Skip to content

Instantly share code, notes, and snippets.

View andrewnc's full-sized avatar

Andrew Carr andrewnc

View GitHub Profile
@andrewnc
andrewnc / soup.py
Created April 21, 2022 15:43
Code to soup two models in pytorch
def soup_two_models(model, second_model):
souped_model = copy.deepcopy(model)
for param in souped_model.named_parameters():
name = param[0]
param[1].data = (model.state_dict()[name] + second_model.state_dict()[name]) / 2
return souped_model
def get_GPU_usage():
cmd = "nvidia-smi --query-gpu=utilization.gpu --format=csv,noheader,nounits"
result = subprocess.check_output(cmd, shell=True).decode('utf-8')
usages = list(map(int, result.strip().split('\n')))
return usages
@andrewnc
andrewnc / git_to_md.py
Created September 19, 2024 15:20
uv run git_to_md.py
import os
import sys
import subprocess
import tempfile
import shutil
def main():
if len(sys.argv) != 2:
print("Usage: python script.py <github_repo_url>")
sys.exit(1)
#!/bin/bash
# ollama run llama3.2:1b
# chmod +x talk.sh
# ./talk.sh "Your question here"
# Check if a question is passed as an argument
if [ -z "$1" ]; then
echo "Usage: ./talk.sh 'Your question here'"
exit 1
fi
function theme_precmd {
local TERMWIDTH=$(( COLUMNS - ${ZLE_RPROMPT_INDENT:-1} ))
PR_FILLBAR=""
PR_PWDLEN=""
local promptsize=${#${(%):---(%n@%m:%l)---()--}}
local rubypromptsize=${#${(%)$(ruby_prompt_info)}}
local pwdsize=${#${(%):-%~}}
local venvpromptsize=$((${#$(virtualenv_prompt_info)}))