Skip to content

Instantly share code, notes, and snippets.

View funktor's full-sized avatar

Abhijit Mondal funktor

View GitHub Profile
#include <Python.h>
#include <torch/extension.h>
#include <tbb/tbb.h>
#include <unistd.h>
#include <stdio.h>
#include <iostream>
#include <deque>
#include <vector>
#include <random>
#include <omp.h>
import math
import os
import numpy as np
import pandas as pd
import random
import uuid
import joblib
import pytorch_lightning as pl
import torch
import math
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as data
import pytorch_lightning as pl
import numpy as np
@funktor
funktor / autograd_simple.py
Last active April 9, 2024 11:46
A very simple implementation of forward mode auto differentiation
from functools import wraps
import math
# Automatic differentiation of exp(1 + x^4*(cos(log(x)) + sin(log(x))))
class Node:
def __init__(self):
self.function = None
self.d_function = None
import numpy as np
def sieve(n):
arr = [1]*(n+1)
arr[0], arr[1] = 0, 0
sqrt_n = int(np.sqrt(n))
for i in range(2, sqrt_n+1):
if arr[i] == 1:
j = i**2