Skip to content

Instantly share code, notes, and snippets.

View ftnext's full-sized avatar

nikkie ftnext

View GitHub Profile
# /// script
# dependencies = ["openai", "tqdm"]
# ///
# based on https://github.com/rasbt/LLMs-from-scratch/blob/aba7ed2eb1fce4ebbca28eeed11ab19687cb1764/ch07/03_model-evaluation/llm-instruction-eval-ollama.ipynb
from openai import OpenAI
from tqdm import tqdm
json_data = [
# /// script
# dependencies = ["Pillow"]
# ///
import argparse
from pathlib import Path
from PIL import Image
parser = argparse.ArgumentParser()
parser.add_argument("base_image", type=Path)
% mkdir -p practice/bin
% ln -s $HOME/.pyenv/versions/3.11.8/bin/python practice/bin/python
% mkdir -p practice/lib/python3.11/site-packages
% echo "home = $HOME/.pyenv/versions/3.11.8/bin" > practice/pyvenv.cfg
% echo 'include-system-site-packages = false' >> practice/pyvenv.cfg
% echo 'version = 3.11.8' >> practice/pyvenv.cfg
% echo "executable = $HOME/.pyenv/versions/3.11.8/bin/python3.11" >> practice/pyvenv.cfg
% echo "command = $HOME/.pyenv/versions/3.11.8/bin/python -m venv $PWD/practice" >> practice/pyvenv.cfg
# /// script
# dependencies = ["pikepdf"]
# ///
# Usage: pipx run unlock_pdf.py path/to/locked.pdf
# ref: https://methane.hatenablog.jp/entry/2024/01/11/PDF%E3%82%92%E3%82%BF%E3%83%96%E3%83%AC%E3%83%83%E3%83%88%E3%81%A7%E8%AA%AD%E3%82%80
import sys
from pathlib import Path
@ftnext
ftnext / kotoha_prototype.py
Last active May 13, 2024 13:17
『引数の型ヒントをlistにしてはいけません』
import ast
class ArgumentConcreteTypeHintChecker(ast.NodeVisitor):
def visit_arg(self, node):
annotation = node.annotation
if annotation.value.id in {"list", "dict", "set", "tuple"}:
print(f"Fix at {node.lineno}:{node.col_offset}")
print(ast.dump(node))
self.generic_visit(node)
# /// script
# dependencies = ["transformers[torch]"]
# ///
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
# 量子化した gguf モデルはconfig.jsonがなく、読み込めなかった
model_name = "microsoft/Phi-3-mini-4k-instruct"
torch.random.manual_seed(0)
# /// script
# dependencies = ["llama-cpp-python"]
# ///
import os
from llama_cpp import Llama
llm = Llama(
model_path=os.path.expanduser(
# /// script
# dependencies = ["pydantic"]
# ///
import argparse
from pathlib import Path
from pydantic.dataclasses import dataclass
import argparse
from dataclasses import dataclass
from pathlib import Path
@dataclass
class Args:
path: Path
import asyncio
async def single_request(i: int) -> int:
print("start", i)
await asyncio.sleep(i)
print("end", i)
return i