Skip to content

Instantly share code, notes, and snippets.

View Eeman1113's full-sized avatar
🙂
Sleep Deprived

Eeman Majumder Eeman1113

🙂
Sleep Deprived
View GitHub Profile
import pyflashlight
import pyflashlight.nn as nn
import pyflashlight.optim as optim
import random
import math
random.seed(1)
class MyModel(nn.Module):
def __init__(self):
from abc import ABC
from norch.tensor import Tensor
class Optimizer(ABC):
"""
Abstract class for optimizers
"""
def __init__(self, parameters):
if isinstance(parameters, Tensor):
from .module import Module
import math
class Sigmoid(Module):
def __init__(self):
super().__init__()
def forward(self, x):
return 1.0 / (1.0 + (math.e) ** (-x))
from .module import Module
class MSELoss(Module):
def __init__(self):
pass
def forward(self, predictions, labels):
assert labels.shape == predictions.shape, \
"Labels and predictions shape does not match: {} and {}".format(labels.shape, predictions.shape)
from ..module import Module
from ..parameter import Parameter
class Linear(Module):
def __init__(self, input_dim, output_dim):
super().__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.weight = Parameter(shape=[self.output_dim, self.input_dim])
self.bias = Parameter(shape=[self.output_dim, 1])
from .parameter import Parameter
from collections import OrderedDict
from abc import ABC
import inspect
class Module(ABC):
"""
Abstract class for modules
"""
def generate_random_list(shape):
"""
Generate a list with random numbers and shape 'shape'
[4, 2] --> [[rand1, rand2], [rand3, rand4], [rand5, rand6], [rand7, rand8]]
"""
if len(shape) == 0:
return []
else:
inner_shape = shape[1:]
if len(inner_shape) == 0:
from norch.tensor import Tensor
from norch.utils import utils
import random
class Parameter(Tensor):
"""
A parameter is a trainable tensor.
"""
def __init__(self, shape):
data = utils.generate_random_list(shape=shape)
def zero_grad(self):
self.grad = None
def detach(self):
self.grad = None
self.grad_fn = None
def backward(self, gradient=None):
if not self.requires_grad:
return
if gradient is None:
if self.shape == [1]:
gradient = Tensor([1]) # dx/dx = 1 case
else:
raise RuntimeError("Gradient argument must be specified for non-scalar tensors.")