Skip to content

Instantly share code, notes, and snippets.

View dendisuhubdy's full-sized avatar
🚀
at the speed of light

Dendi Suhubdy dendisuhubdy

🚀
at the speed of light
View GitHub Profile
" Fisa-vim-config
" http://fisadev.github.io/fisa-vim-config/
" version: 8.3.1
" ============================================================================
" Vim-plug initialization
" Avoid modify this section, unless you are very sure of what you are doing
" Vim Tab as 4 Spaces
set tabstop=8 softtabstop=0 expandtab shiftwidth=4 smarttab
@dendisuhubdy
dendisuhubdy / untar.cpp
Created September 30, 2021 01:23
untar.cpp
#pragma once
// from https://github.com/libarchive/libarchive/blob/master/contrib/untar.c
/*
* This file is in the public domain. Use it as you see fit.
*/
/*
* "untar" is an extremely simple tar extractor:
* * A single C source file, so it should be easy to compile
@dendisuhubdy
dendisuhubdy / tar.cpp
Created September 30, 2021 01:23
tar.cpp
#define BUF_SZ 16384
void my_gunzip(gzFile_s *src, FILE *dest) {
unsigned char buf[BUF_SZ];
for (auto sz = gzread(src, buf, BUF_SZ); sz > 0; sz = gzread(src, buf, BUF_SZ) {
std::fwrite(buf, 1, BUF_SZ, dest);
}
}
@dendisuhubdy
dendisuhubdy / parse_json_redpanda.py
Created August 27, 2021 09:32
parse_json_redpanda.py
import json
if __name__ == "__main__":
line_count = 0
with open("/Users/dendisuhubdy/Downloads/redpanda/data/2/kafka/btc_usdt_spot/0_111/0-1-v1.log", "rb") as fp:
Lines = fp.readlines()
for line in Lines:
print(line)
if line_count == 1:
break
from ecdsa import SigningKey, SECP256k1
from sha3 import keccak_256
import click
@click.command()
@click.argument('count', type=click.types.IntRange(1, 1000), default=1)
def main(count):
for i in range(count):
priv, addr = generate_pair()
@dendisuhubdy
dendisuhubdy / get_tokens.py
Created August 13, 2021 04:07 — forked from banteg/get_tokens.py
get all tokens a user has interacted with
from web3 import Web3, HTTPProvider
from eth_utils import encode_hex, event_signature_to_log_topic
from eth_abi import encode_single
w3 = Web3(HTTPProvider('http://127.0.0.1:8545', {'timeout': 120}))
transfer_topic = encode_hex(event_signature_to_log_topic('Transfer(address,address,uint256)'))
def get_tokens(address):
import requests
import click
LTO_NODE_URL = 'http://127.0.0.1:6869'
LTO_API_KEY = '<your api password>'
s = requests.session()
s.headers['X-API-Key'] = LTO_API_KEY
@dendisuhubdy
dendisuhubdy / deposit.py
Created August 13, 2021 04:04 — forked from banteg/deposit.py
eth2 mass deposit for lighthouse
# 1. generate keys with `lighthouse account validator new random N`
# 2. run `python deposit.py`, it skips the ones already deposited and deposits the rest
import json
import sys
from pathlib import Path
from getpass import getpass
from eth_utils import encode_hex, decode_hex
from web3 import Web3
from web3.middleware import construct_sign_and_send_raw_middleware
country_list = [{'Code': 'AF', 'Name': 'Afghanistan'}, {'Code': 'AX', 'Name': 'Åland Islands'}, {'Code': 'AL', 'Name': 'Albania'}, {'Code': 'DZ', 'Name': 'Algeria'}, {'Code': 'AS', 'Name': 'American Samoa'}, {'Code': 'AD', 'Name': 'Andorra'}, {'Code': 'AO', 'Name': 'Angola'}, {'Code': 'AI', 'Name': 'Anguilla'}, {'Code': 'AQ', 'Name': 'Antarctica'}, {'Code': 'AG', 'Name': 'Antigua and Barbuda'}, {'Code': 'AR', 'Name': 'Argentina'}, {'Code': 'AM', 'Name': 'Armenia'}, {'Code': 'AW', 'Name': 'Aruba'}, {'Code': 'AU', 'Name': 'Australia'}, {'Code': 'AT', 'Name': 'Austria'}, {'Code': 'AZ', 'Name': 'Azerbaijan'}, {'Code': 'BS', 'Name': 'Bahamas'}, {'Code': 'BH', 'Name': 'Bahrain'}, {'Code': 'BD', 'Name': 'Bangladesh'}, {'Code': 'BB', 'Name': 'Barbados'}, {'Code': 'BY', 'Name': 'Belarus'}, {'Code': 'BE', 'Name': 'Belgium'}, {'Code': 'BZ', 'Name': 'Belize'}, {'Code': 'BJ', 'Name': 'Benin'}, {'Code': 'BM', 'Name': 'Bermuda'}, {'Code': 'BT', 'Name': 'Bhutan'}, {'Code': 'BO', 'Name': 'Bolivia, Plurinational State of'}, {'
@dendisuhubdy
dendisuhubdy / inference.py
Created July 21, 2021 07:35
Deep Learning Fashion MNIST Quickstart
import torch
from torch import nn
from torch.utils.data import DataLoader
from torchvision import datasets
from torchvision.transforms import ToTensor, Lambda, Compose
from model import NeuralNetwork
# Download training data from open datasets.
training_data = datasets.FashionMNIST(
root="data",