Last active
April 29, 2024 07:48
-
-
Save ftnext/94bc4f0560a798bf1c26adf32e58bdd4 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# /// script | |
# dependencies = ["transformers[torch]"] | |
# /// | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
# 量子化した gguf モデルはconfig.jsonがなく、読み込めなかった | |
model_name = "microsoft/Phi-3-mini-4k-instruct" | |
torch.random.manual_seed(0) | |
model = AutoModelForCausalLM.from_pretrained( | |
model_name, torch_dtype=torch.float16, trust_remote_code=True | |
) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
pipe = pipeline( | |
"text-generation", model=model, tokenizer=tokenizer, device="mps" | |
) | |
prompt = "How to explain Internet to a medieval knight?" | |
generation_args = { | |
"max_new_tokens": 256, | |
"return_full_text": False, | |
"temperature": 0.0, | |
} | |
output = pipe([{"role": "user", "content": prompt}], **generation_args) | |
print(output[0]["generated_text"]) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment