Skip to content

Instantly share code, notes, and snippets.

Avatar

Moritz Freidank MFreidank

  • Grenzach-Wyhlen, Germany
View GitHub Profile
View torch_determinism_flags.py
import torch
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
View tf_determinism_patch.py
import tensorflow as tf
from tfdeterminism import patch
patch()
@MFreidank
MFreidank / tf_deterministic_ops.py
Created Jun 27, 2020
tensorflow>=2.1 determinism variable
View tf_deterministic_ops.py
import tensorflow as tf
import os
os.environ["TF_DETERMINISTIC_OPS"] = "1"
@MFreidank
MFreidank / transformers_classifier_torchserve_handler.py
Last active Dec 6, 2020
A https://github.com/pytorch/serve compatible handler for huggingface/transformers sequence classification models.
View transformers_classifier_torchserve_handler.py
from abc import ABC
import json
import logging
import os
import torch
from transformers import AutoModelForSequenceClassification, AutoTokenizer
from ts.torch_handler.base_handler import BaseHandler
@MFreidank
MFreidank / guided_backprop.py
Created Oct 23, 2018
Torch Guided Backprop -- ResNet Compatible version
View guided_backprop.py
"""
Created on Thu Oct 26 11:23:47 2017
Original Author:
@author: Utku Ozbulak - github.com/utkuozbulak
Changes for ResNet Compatibility:
Moritz Freidank - github.com/MFreidank
"""
import torch
@MFreidank
MFreidank / Padam.py
Created Sep 4, 2018
Padam Keras Optimizer
View Padam.py
from keras import backend as K
from keras.optimizers import Optimizer
class Padam(Optimizer):
def __init__(self, lr=1e-1, beta_1=0.9, beta_2=0.999,
epsilon=1e-8, decay=0., amsgrad=False, partial=1. / 4., **kwargs):
""" Partially adaptive momentum estimation optimizer.
# Arguments
@MFreidank
MFreidank / infinite_dataloader.py
Last active Apr 27, 2021
A pytorch DataLoader that generates an unbounded/infinite number of minibatches from the dataset.
View infinite_dataloader.py
from torch.utils.data import DataLoader
class InfiniteDataLoader(DataLoader):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Initialize an iterator over the dataset.
self.dataset_iterator = super().__iter__()
def __iter__(self):