Skip to content

Instantly share code, notes, and snippets.

Avatar
🏠
Working from home

Ligeng Zhu Lyken17

🏠
Working from home
View GitHub Profile
@Lyken17
Lyken17 / pth-tvm-mbv2.py
Last active Jan 24, 2022
test mbv2 numerical issue
View pth-tvm-mbv2.py
import torch
import torch.nn as nn
import torchvision
from torchvision import models
import numpy as np
import tvm
from tvm import relay
from tvm.contrib import graph_executor
@Lyken17
Lyken17 / fuse-bn.py
Created Jan 10, 2022
[pytorch] Fuse Conv2d and BatchNorm at module level
View fuse-bn.py
import torch as th
import torch
import torch.nn as nn
import torch.nn.functional as F
def fuse_bn_to_conv(module):
module_output = module
if isinstance(module, (nn.Sequential)):
num_modules = len(module)
for idx in range(num_modules-1):
@Lyken17
Lyken17 / satori.md
Last active Nov 29, 2021
Satori Use Tips
View satori.md

Launch an interactive Job

srun --gres=gpu:4 -N 1 --mem=1T --time 1:00:00 -I --pty /bin/bash

Submit a job

Cancel a job

scancel <jobnumber>

Check the job queue

View conv2d_gradient_tvm.py
import numpy as np
import tvm
from tvm import relay
from tvm import relay, auto_scheduler
from tvm.relay import testing
SEMVER = '#[version = "0.0.5"]\n'
@Lyken17
Lyken17 / conv2dt.py
Last active Nov 8, 2021
tvm_tranpose_conv_issue
View conv2dt.py
import numpy as np
import tvm
from tvm import relay
from tvm import relay, auto_scheduler
from tvm.relay import testing
SEMVER = '#[version = "0.0.5"]\n'
View tvm_bug_11_03.py
import numpy as np
import tvm
from tvm import relay
import tvm.relay.testing
import pytest
from numpy import isclose
from typing import Union
from tvm import relay, te, auto_scheduler
@Lyken17
Lyken17 / fx trial.ipynb
Created Oct 2, 2021
Use torch.fx to count FLOPs
View fx trial.ipynb
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@Lyken17
Lyken17 / bench.sh
Last active Sep 9, 2021
Folder IO Test
View bench.sh
# Write speed benchmark
dd if=/dev/zero of=./tmp oflag=direct bs=128k count=16k conv=fdatasync
dd if=/dev/zero of=./tmp oflag=direct bs=128M count=16 conv=fdatasync
dd if=/dev/zero of=./tmp oflag=direct bs=4k count=512k conv=fdatasync
# 4k read and write
dd if=./tmp of=/dev/null iflag=direct bs=128k count=16k
dd if=./tmp of=/dev/null iflag=direct bs=128M count=16
dd if=./tmp of=/dev/null iflag=direct bs=4k count=128k
@Lyken17
Lyken17 / loader.py
Created Sep 5, 2021
Sample ImageNet DataLoader
View loader.py
import argparse
import os
import random
import shutil
import time
import warnings
import torch
import torch.nn as nn
import torch.nn.parallel
@Lyken17
Lyken17 / parallel_bash.sh
Created Aug 19, 2021
Examples of running bash commands in parallel
View parallel_bash.sh
# referenced from https://unix.stackexchange.com/questions/103920/parallelize-a-bash-for-loop/103922
# sequential
for thing in a b c d e f g; do
(
echo "$thing";
sleep 1
)
done
# asynchronous