View investigate_fastai_preds.py
learner = load_learner('../data/cropped_faces','effnet_test.pkl', | |
test=ImageList.from_folder('/home/josh/git/kgl_deepfake/data/cropped_faces/valid')) | |
# Opening an image with fastai | |
fastai_img = open_image('/home/josh/git/kgl_deepfake/data/cropped_faces/valid/vylzsyazmx.mp4_1_REAL.jpg') | |
print(fastai_img.shape) | |
# Opening an image manually | |
manual_img = PILImage.open('/home/josh/git/kgl_deepfake/data/cropped_faces/valid/vylzsyazmx.mp4_1_REAL.jpg') | |
manual_img_array = np.array(manual_img, dtype=np.float32) / 255. |
View image_loading.py
def get_img_pil(path): | |
img = Image.open(path) | |
arr = np.asarray(img) | |
img.close() | |
del img | |
return arr | |
def get_img_cv2(path): | |
img = cv2.imread(path) |
View secondApproach.py
learn.fit_one_cycle(10, max_lr=(1e-2)) |
View secondApproach.py
learn.fit_one_cycle(10, max_lr=(1e-2)) |
View firstApproach.py
learn = cnn_learner(data, models.resnet18, pretrained=False, metrics=[f_score]) | |
learn.unfreeze() | |
learn.fit_one_cycle(10, max_lr=slice(1e-6,1e-2)) |
View probeModule.py
# Simply pass in a learner and the module you would like to instrument | |
def probeModule(learn, module): | |
hook = StoreHook(learn, modules=flatten_model(module)) | |
learn.callbacks += [ hook ] | |
return hook |
View StoreHook.py
# Modified from: https://forums.fast.ai/t/confused-by-output-of-hook-output/29514/4 | |
class StoreHook(HookCallback): | |
def on_train_begin(self, **kwargs): | |
super().on_train_begin(**kwargs) | |
self.hists = [] | |
def hook(self, m, i, o): | |
return o | |
def on_batch_end(self, train, **kwargs): |
View RNN.py
class RNN: | |
# ... | |
def step(self, x): | |
# update the hidden state | |
self.h = np.tanh(np.dot(self.W_hh, self.h) + np.dot(self.W_xh, x)) | |
# compute the output vector | |
y = np.dot(self.W_hy, self.h) | |
return y |
View RNN.py
class RNN: | |
# ... | |
def step(self, x): | |
# update the hidden state | |
self.h = np.tanh(np.dot(self.W_hh, self.h) + np.dot(self.W_xh, x)) | |
# compute the output vector | |
y = np.dot(self.W_hy, self.h) | |
return y |
View keybindings.json
// Place your key bindings in this file to override the defaults | |
[ | |
{ | |
"key": "ctrl+k ctrl+o", | |
"command": "workbench.action.files.openFolder" | |
}, | |
{ | |
"key": "ctrl+o", | |
"command": "workbench.action.files.openFile" | |
}, |
NewerOlder