Skip to content

Instantly share code, notes, and snippets.

View KeremTurgutlu's full-sized avatar
:octocat:
Having Fun

Kerem Turgutlu KeremTurgutlu

:octocat:
Having Fun
View GitHub Profile
@KeremTurgutlu
KeremTurgutlu / gpt_eval_templates.py
Created October 28, 2023 05:03
GPT-Eva Templates
View gpt_eval_templates.py
gpt_eval_template_coherence = """
You will be given title: [TITLE] and description: [DESC] written from a set of information of a real estate listing in Turkish.
Your task is to rate the title and description on one metric.
Please make sure you read and understand these instructions carefully. Please keep this
document open while reviewing, and refer to it as needed.
Evaluation Criteria:
Coherence (1-5) - the collective quality of all sentences. We align this dimension with
@KeremTurgutlu
KeremTurgutlu / multipack_sampler_flash_attn.py
Last active October 7, 2023 04:44
Multipack Sampler x Flash Attention
View multipack_sampler_flash_attn.py
"""
Testing flash attn with multipacking which essentially packs sequences using https://github.com/imoneoi/multipack_sampler,
and passes a single sequence of `1 x (bs x seqlen)` to the model to avoid padding.
An alternative is to use block diagonal attention as attention bias, but the following uses flash attention 2 which
is much faster.
Multipacking can be used to speed up both pretraining and finetuning.
"""
@KeremTurgutlu
KeremTurgutlu / ddp_batch_all_gather.py
Last active September 20, 2023 00:57
Debugging: Distributed InfoNCE Loss
View ddp_batch_all_gather.py
# CLIP contrastive loss is calculated all the negative batch samples from all the GPUs
# How to implement that?
# For more info: https://github.com/openai/CLIP/issues/29
import os
import sys
import tempfile
import torch
import torch.distributed as dist
import torch.nn as nn
@KeremTurgutlu
KeremTurgutlu / nn_interpolate.py
Last active May 22, 2023 18:19
Nearest Neighbor Interpolation in Numpy
View nn_interpolate.py
from collections import Counter
def nn_interpolate(A, new_size):
"""
Nearest Neighbor Interpolation, Step by Step
"""
# get sizes
old_size = A.shape
# calculate row and column ratios
View reddit_comments.tsv
We can make this file beautiful and searchable if this error is corrected: No tabs found in this TSV file in line 0.
TsvHttpData-1.0
https://files.pushshift.io/reddit/comments/RC_2005-12.zst
@KeremTurgutlu
KeremTurgutlu / ema_swa.py
Last active July 26, 2022 03:10
EMA and SWA callbacks for different model averaging techniques
View ema_swa.py
from fastai.vision.all import *
__all__ = ["EMA", "SWA"]
class EMA(Callback):
"https://fastai.github.io/timmdocs/training_modelEMA"
order,run_valid=5,False
def __init__(self, decay=0.9999):
super().__init__()
self.decay = decay
View train_sam.py
from fastai.vision.all import *
from torch.cuda.amp import autocast, GradScaler
from torch.cuda.amp.grad_scaler import _refresh_per_optimizer_state
from sam import SAM
class FastaiSched:
def __init__(self, optimizer, max_lr):
self.optimizer = optimizer
self.lr_sched = combine_scheds([0.1,0.9], [SchedLin(1e-8,max_lr), SchedCos(max_lr,1e-8)])
self.update(0)
@KeremTurgutlu
KeremTurgutlu / zero_training.py
Created March 11, 2021 01:53
ZeRO optimizer example
View zero_training.py
import wandb
from fastai.callback.wandb import WandbCallback
from fastai.distributed import *
torch.backends.cudnn.benchmark = True
from zero_optimizer import ZeroRedundancyOptimizer
@patch
def after_batch(self: WandbCallback):
@KeremTurgutlu
KeremTurgutlu / distributed_wandb.py
Created March 2, 2021 22:54
Fastai WANDB Callback with DDP
View distributed_wandb.py
@call_parse
def main(
size: Param("Image resolution", int)=224,
bs: Param("Batch Size", int)=128,
epochs: Param("Number of epochs for training", int)=1,
lr: Param("Learning rate for training", float)=5e-5):
WANDB = True
# start wandb
View basic_batch_all_gather.py
import os
import torch
import torch.distributed as dist
from torch.multiprocessing import Process
from torchvision import datasets, transforms
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import random