Skip to content

Instantly share code, notes, and snippets.

View jsrimr's full-sized avatar

Jungsub Lim jsrimr

  • SNU
  • Seoul, Dongjak-gu
View GitHub Profile
@jsrimr
jsrimr / main.dart
Created September 28, 2022 07:52
speech2text continuous listening
import 'package:flutter/material.dart';
import 'package:speech_to_text/speech_recognition_error.dart';
import 'package:speech_to_text/speech_recognition_result.dart';
import 'package:speech_to_text/speech_to_text.dart';
void main() {
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
@jsrimr
jsrimr / card_pair.py
Created April 7, 2022 15:30
https://programmers.co.kr/learn/courses/30/lessons/72415 풀라고 시도한 코드. 얼개는 짜놓았으나 이미 45분 지난 상태에서 compute_needed_move 구현하려니 도저히 안되겠어서 포기. permuation 이용하기 전에는 dfs 하려 했으나 어차피 필요한 순열 몇개 안되는 거 같아서 그냥 순열을 박아놓음
from itertools import permutations
def solution(board, r, c):
answer = float('inf')
n_set = set()
for row in board:
for el in row:
n_set.add(el)
def compute_needed_move(dst, r,c):
@jsrimr
jsrimr / no_4_sol.py
Created March 14, 2022 11:10
각 노드마다 bfs 적용함
from collections import defaultdict, deque
def solution(edges):
graph = defaultdict(list)
for (n1, n2) in edges:
graph[n1].append(n2)
graph[n2].append(n1)
@jsrimr
jsrimr / autoclick.py
Created February 23, 2022 12:14
autoclick
"""
ref : https://ccurity.tistory.com/231
"""
import datetime
import time
import pyautogui as m
import schedule
import argparse
@jsrimr
jsrimr / wider_conv.py
Created September 5, 2021 13:24
function preservation
def _wider_conv(self, teacher_w1, teacher_b1, teacher_w2, width_coeff, verification):
new_width = int(width_coeff * teacher_w1.shape[3])
rand = np.random.randint(teacher_w1.shape[3], size=(new_width - teacher_w1.shape[3]))
replication_factor = np.bincount(rand)
student_w1 = teacher_w1.copy()
student_w2 = teacher_w2.copy()
student_b1 = teacher_b1.copy()
# target layer update (i)
for i in range(len(rand)):
teacher_index = rand[i]
@jsrimr
jsrimr / tpu_up.sh
Created July 17, 2021 03:05
create 90 tpu at once
ctpu up --noconf --preemptible --zone=us-central1-f --tf-version=1.15.3 --name=incremental-scale-width-232-1-0 -tpu-only &
sleep 15
ctpu up --noconf --preemptible --zone=us-central1-f --tf-version=1.15.3 --name=incremental-scale-width-232-2-0 -tpu-only &
sleep 15
ctpu up --noconf --preemptible --zone=us-central1-f --tf-version=1.15.3 --name=incremental-scale-width-232-3-0 -tpu-only &
sleep 15
ctpu up --noconf --preemptible --zone=us-central1-f --tf-version=1.15.3 --name=incremental-scale-width-232-4-0 -tpu-only &
sleep 15
ctpu up --noconf --preemptible --zone=us-central1-f --tf-version=1.15.3 --name=incremental-scale-width-232-5-0 -tpu-only &
sleep 15
@jsrimr
jsrimr / Wscaling.py
Created March 30, 2021 07:29
fast and accurate scaling compared to compound scaling
#compound scaling
scale_coeff = [0.5, 1.0, 1.5]
for coeff in scale_coeff:
print(a ** coeff, b**(coeff) , c**(coeff))
#1.0954451150103321 1.0488088481701516 1.0723805294763609
#1.2 1.1 1.15
#1.3145341380123985 1.153689732987167 1.2332376088978148
# W scaling
alpha = .8
@jsrimr
jsrimr / mnist_DDP.py
Created February 5, 2021 06:20
pytorch mnist train with multi-gpu using DataParallel
from __future__ import print_function
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
@jsrimr
jsrimr / test_tpu_multiproc.py
Created February 5, 2021 06:16
MNIST train code using pytorch, TPU(GCP)
"""
most code from
https://colab.research.google.com/github/pytorch/xla/blob/master/contrib/colab/mnist-training.ipynb#scrollTo=pTmxZL5ymp8P
"""
import math
from matplotlib import pyplot as plt
M, N = 4, 6
RESULT_IMG_PATH = '/tmp/test_result.png'
@jsrimr
jsrimr / memory_in_multiprocessing.py
Created October 20, 2020 08:24
multiprocess시에 pipe를 만들면 프로세스들은 각자 pipe에 대고 listen, speak할 수 있다. 한쪽의 process가 pipe의 listen쪽이나 speak쪽을 닫더라도 다른 process는 여전히 이 파이프를 통해 listen, speak 할 수 있다
from multiprocessing import Process, Pipe
import time
remotes, work_remotes = zip(*[Pipe() for _ in range(10)])
def worker(remote, parent_remote):
parent_remote.close()
print(f"closed {id(parent_remote)}, {time.time()}")
# print(f"In worker : {id(remotes[0])} is closed. {remotes[0].closed}")