talk on high performance, safe C++ enngineering
firsts done by him
- discord stuff init, in game genAI, stadia
- 25+ c++ exp
talk on high performance, safe C++ enngineering
firsts done by him
import re, mmap | |
pattern = rb'pick_rsteps\s*:\s*(\d+)\s+place_rsteps\s*:\s*(\d+)' | |
expfnam = ["svfu", "svu", "mr"] | |
nf = 10 | |
for exp in expfnam: | |
means = [] | |
for i in range(nf): | |
with open(f"dat/{exp}{i}.txt", "r") as file: | |
with mmap.mmap(file.fileno(), 0, access=mmap.ACCESS_READ) as mmap_file: |
# newton raphson table generator | |
import math | |
def f(x): | |
return x**2 + 2*x - 8 | |
def df(x): | |
return 2*x + 2 | |
def newton_raphson(x0, f, df, n): |
class MoG: | |
def __init__(self, means, sigma, weights=None, td="cpu"): | |
if weights is None: | |
weights = torch.ones(means.shape[0], device=td) / means.shape[0] | |
self.means = means.detach() | |
mix_d = D.Categorical(weights) | |
comp_d = D.Independent(D.Normal(self.means, sigma * torch.ones(means.shape, device=td)), 2) # 2 needed to interpret M as batch | |
self.mixture = D.MixtureSameFamily(mix_d, comp_d) | |
def sample(self, n=None): |
from fpdf import FPDF | |
from PIL import Image | |
def makePdf(pdfFileName, listPages, dir = ''): | |
if (dir): | |
dir += "/" | |
cover = Image.open(dir + str(listPages[0])) | |
width, height = cover.size |
for note in ./notes/*.md; do | |
sed -i -e 's/!\[\[\([^]]*\)\]\]/!\[\[\1\]\]\(https:\/\/github.com\/mdvsh\/eecs281\/blob\/main\/notes\/media\/\1\)/g' $note | |
done | |
find . -name '*.md-e' -exec rm -rf {} \; |
const http = require("http"); | |
const fs = require("fs"); | |
const PORT = 4000; | |
const origin = `http://localhost:${PORT}`; | |
class Database { | |
constructor(fname) { | |
this.fn = `${fname}.json`; | |
let state = new Map(); |
''' | |
PvC Tic Tac Toe Game. | |
(AI) | |
''' | |
from random import randint | |
print("TicTacToe\nWelcome, You are X. The computer is O.\n\nLet's Start.") |
from bs4 import BeautifulSoup | |
src = open('bhai.html', 'r') | |
import re, csv, pandas | |
soup = BeautifulSoup(src, 'lxml') | |
data = {} | |
table = soup.find("table", attrs={'class':'stripe'}) | |
table_head = table.thead.find_all("tr") | |
heading = [] | |
for th in table_head[0].find_all("th"): | |
heading.append(th.text.replace('\n', '').strip()) |
I hereby claim:
To claim this, I am signing this object: