Skip to content

Instantly share code, notes, and snippets.

View iacolippo's full-sized avatar

Iacopo Poli iacolippo

View GitHub Profile
@iacolippo
iacolippo / ray_deep_architect_ex2.py
Created April 24, 2020 10:07
Second example of using ray and deep_architect together - deepcopy protocol error
import os
import ray
from ray import tune
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader, TensorDataset
@iacolippo
iacolippo / ray_deep_architect_ex1.py
Last active April 24, 2020 10:06
First example of using ray and deep_architect together - ray logging does not work
import os
import ray
from ray import tune
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader, TensorDataset
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@iacolippo
iacolippo / ex.cpp
Created October 19, 2018 12:44
RuntimeError: variable impl does not have is_contiguous Pytorch C++ extension
#include <torch/extension.h>
#include <cmath>
#include <iostream>
#include <vector>
at::Tensor ex_forward(
at::Tensor input
) {
auto n_samples = input.size(0);
from time import time
import torch
import torch.nn as nn
import torch.nn.functional as F
class ConvNetV0(nn.Module):
def __init__(self):
super(ConvNetV0, self).__init__()
from torchvision.datasets import MNIST
import torchvision.transforms as transforms
import torch
import torch.legacy.nn as lnn
import torch.legacy.optim as loptim
train_dataset = MNIST(root='./data',
train=True,
transform=transforms.ToTensor(),
from __future__ import print_function, division
import math
import torch
from torch.autograd import Function, Variable
import torch.nn as nn
import torch.nn.functional as F
from ErrorFeedback import EF
from FunctionErrorFeedback import ErrorFeedbackFunction
from SequentialSG import SequentialSG
from FunctionErrorFeedback import ErrorFeedbackFunction
from torch.autograd import Function, Variable
import torch
import torch.nn as nn
class EF(nn.Module):
def __init__(self, layer_dim, error_dim):
super(EF, self).__init__()
self.feedback = torch.Tensor(error_dim, layer_dim)
import torch
from torch.autograd import Function, Variable
class ErrorFeedbackFunction(Function):
# the forward pass consists in copying the input to the output
@staticmethod
def forward(ctx, input, feedback):
ctx.save_for_backward(input, feedback)
return input
import torch.nn as nn
class SequentialSG(nn.Sequential):
def accGradParameters(self, input, gradOutput, scale=1):
currentGradOutput = gradOutput
currentModule = self.modules[-1]
for i in range(len(self.modules)-1, 0, -1):
previousModule = self.modules[i]
if currentModule.__class__.name == 'ErrorFeedback':