Skip to content

Instantly share code, notes, and snippets.

View ftnext's full-sized avatar

nikkie ftnext

View GitHub Profile
# /// script
# dependencies = ["pydantic"]
# ///
import argparse
from pathlib import Path
from pydantic.dataclasses import dataclass
import argparse
from dataclasses import dataclass
from pathlib import Path
@dataclass
class Args:
path: Path
import asyncio
async def single_request(i: int) -> int:
print("start", i)
await asyncio.sleep(i)
print("end", i)
return i
from optparse import OptionParser, make_option
option_list = [make_option("--no-cache-dir", action="store_true")]
parser = OptionParser(option_list=option_list)
options, args = parser.parse_args(["--no-cache-dir"])
assert options.no_cache_dir
options2, args2 = parser.parse_args(["--no-cache"]) # "--no-cache-d"もOK
assert options2.no_cache_dir
# Rewrite with kfp v2 https://huggingface.co/blog/turhancan97/building-your-first-kubeflow-pipeline
from typing import List
from kfp import compiler, dsl
@dsl.component(base_image="python:3.9")
def read_data() -> List[int]:
data = [1, 2, 3, 4, 5]
# https://speakerdeck.com/asei/kubeflow-pipelines-v2-debian-waruji-jie-xue-xi-paipurainkai-fa?slide=10
from kfp import compiler, dsl
@dsl.component(base_image="python:3.9")
def hello_world(text: str) -> str:
print(text)
return text
const { readFileSync, writeFileSync } = require('fs');
const { markdownToBlocks } = require('@tryfabric/martian');
// node practice.js example.md blocks.json
const markdown = readFileSync(process.argv[2], { encoding: "utf8" });
const blocks = markdownToBlocks(markdown);
writeFileSync(process.argv[3], JSON.stringify(blocks, null, 2));
import os
from notion_client import Client
parent_page_id = "ID from URL (pageをintegrationにコネクト)"
notion = Client(auth=os.environ["NOTION_TOKEN"]) # integration secret
new_page = notion.pages.create(
parent={"type": "page_id", "page_id": parent_page_id},
properties={
from rouge_score.rouge_scorer import RougeScorer
from rouge_score.tokenize import SPACES_RE
from rouge_score.tokenizers import Tokenizer
class NonAlphaNumericSupportTokenizer(Tokenizer):
"""
>>> NonAlphaNumericSupportTokenizer().tokenize("いぬ ねこ")
['いぬ', 'ねこ']
"""
"""
>>> HogeTranslator().foo(1)
MyTranslator foo
108
>>> HogeTranslator().foo(-1)
MyTranslator foo
Translator foo
42
"""