Skip to content

Instantly share code, notes, and snippets.

@renesax14
renesax14 / simple_meta_lstm_meta_learner_parametrized_optimizer.py
Last active June 18, 2020 23:07
trainable optimizer in higher library Pytorch
# base on the paper "OPTIMIZATION AS A MODEL FOR FEW-SHOT LEARNING": https://openreview.net/pdf?id=rJY0-Kcll
class EmptySimpleMetaLstm(Optimizer):
def __init__(self, params, trainable_opt_model, trainable_opt_state, *args, **kwargs):
defaults = {
'trainable_opt_model':trainable_opt_model,
'trainable_opt_state':trainable_opt_state,
'args':args,
'kwargs':kwargs
@renesax14
renesax14 / checking_copy_initial_weight.py
Last active June 18, 2020 18:08
checking the grads are not zero when copy weights is false
def test_training_initial_weights():
import torch
import torch.optim as optim
import torch.nn as nn
from collections import OrderedDict
## training config
#device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
episodes = 5
nb_inner_train_steps = 5