Skip to content

Instantly share code, notes, and snippets.

View Lyken17's full-sized avatar
🎯
Focusing

Ligeng Zhu Lyken17

🎯
Focusing
View GitHub Profile
@Lyken17
Lyken17 / color_cheat.js
Created June 21, 2024 01:04
color plate cheatcode
// https://www.shj.work/tools/secha/
// cheat code for the small color testing game
function processPlates() {
const d = {};
const plates = document.querySelectorAll("#box span");
plates.forEach(p => {
if (!(p.style.backgroundColor in d)) {
@Lyken17
Lyken17 / wids.py
Created March 13, 2024 01:18
WIDS Usage example
from llava.wids import ShardListDataset
train_url = "https://storage.googleapis.com/webdataset/fake-imagenet/imagenet-train.json"
'''
{
"__kind__": "wids-shard-index-v1",
"wids_version": 1,
"shardlist": [
{
@Lyken17
Lyken17 / bench.py
Created February 19, 2024 08:37
single model train benchmark
import time
from collections import defaultdict
import os, os.path as osp
from datasets import load_dataset
import torch
import json
from transformers import AutoTokenizer, AutoModel, AutoModelForCausalLM, AutoConfig
from tqdm import tqdm
In [42]: k
Out[42]: 'https://www.danezon.com/wp-content/uploads/2021/01/The-Man-From-Toronto-Kevin-Hart-Jacket-510x612.jpg'
In [43]: f[k]
Out[43]:
{'query': '<image> Can you briefly explain the content in the image?',
'orig_text': 'Kevin Hart The Man From Toronto Bomber Jacket',
'output': "In the image, the renowned actor Kevin Hart is captured in a moment of quiet intensity. He stands in front of a vibrant market stall, his gaze directed off to the side, suggesting a moment of deep thought or contemplation. Dressed in a navy blue jacket and a green sweater, he exudes a casual yet stylish vibe. His hands are tucked away in his pockets, a common gesture that often signifies relaxation or introspection.\n\nThe market stall behind him is a riot of colors, with various items on display. The exact nature of these items is not clear from the image, but they add to the lively atmosphere of the scene. The background is filled with people, their faces blurred, indicating that they are not the focus of the image.\n\
@Lyken17
Lyken17 / tar_dataset.py
Created December 25, 2023 11:13
tar dataset and imagefolder
import glob
import tarfile
import json
import os, os.path as osp
from io import BytesIO
from PIL import Image, ImageFile
import hashlib
from torch.utils.data import Dataset, get_worker_info, ConcatDataset
from multiprocessing.pool import ThreadPool as Pool
import torch
from diffusers import DiffusionPipeline
from diffusers.models.attention_processor import LoRAAttnProcessor
from utils import print_gpu_utilization
# pipe = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0")
pipe = DiffusionPipeline.from_pretrained(
"runwayml/stable-diffusion-v1-5", torch_dtype=torch.float16, variant="fp16"
from typing import Any
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import autograd
def scientific_precision(number):
suffix = ["KB", "MB", "GB", "TB", "PB"]
for idx_, s in enumerate(suffix):
import torch
from torch import nn
# net = nn.Linear(500, 500)
# input = torch.randn(64, 500)
net = nn.Conv2d(3, 3, kernel_size=3, padding=1)
input = torch.randn(1, 3, 32, 32)
# only calculate input grad, prints ('_saved_mat2', torch.Size([500, 500]))
@Lyken17
Lyken17 / gist:3ce9c32b5e6383b71c84869022de7ebf
Created April 20, 2023 02:00
alpaca_8xA6000_error_script
# e408b27
MODEL=facebook/opt-6.7b
torchrun --nproc_per_node=8 --master_port=24567 train.py \
--model_name_or_path $MODEL \
--data_path ./alpaca_data.json \
--bf16 True \
--output_dir ./output/$MODEL \
--num_train_epochs 3 \
--per_device_train_batch_size 2 \
@Lyken17
Lyken17 / flash_attention.py
Created September 30, 2022 10:08
flash attention
batch_size, seq_length, embed_dim = x.size()
# B, T, D
qkv = self.qkv_proj(x) # B, T, 3xE
# head_dim = embed_dim // num_heads
# Separate Q, K, V from linear output
qkv = qkv.reshape(batch_size, seq_length, self.num_heads, 3 * self.head_dim) # B, T, H, 3xHD
qkv = qkv.permute(0, 2, 1, 3) # B, H, T, 3xHD
q, k, v = qkv.chunk(3, dim=-1) # B, H, T, HD