Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save lucasjinreal/66d3ef320aa60bdecd71b40933f75612 to your computer and use it in GitHub Desktop.
Save lucasjinreal/66d3ef320aa60bdecd71b40933f75612 to your computer and use it in GitHub Desktop.
trq54y654y
import torch
import sys
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoImageProcessor
def load_model_auto(model_path):
dtype = torch.float16
model = AutoModelForCausalLM.from_pretrained(
model_path, torch_dtype=dtype, trust_remote_code=True, device_map="cuda"
).eval()
model.to("cuda:0")
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
image_processor = AutoImageProcessor.from_pretrained(
model_path, trust_remote_code=True
)
return tokenizer, model, image_processor
if __name__ == "__main__":
load_model_auto(sys.argv[1])
print('model loaded!')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment