Last active
November 5, 2024 09:46
-
-
Save mseri/1e53bdf4a770613fc7a2825491f984be to your computer and use it in GitHub Desktop.
Get models from LM Studio server and prepare them for llm. On Mac the output goes to `Library/Application\ Support/io.datasette.llm/extra-openai-models.yaml`
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import subprocess | |
import yaml | |
def get_data_from_api(): | |
base_url = "http://localhost:1234/v1" | |
response = requests.get(base_url + "/models") | |
if response.status_code == 200: | |
json_data = response.json() | |
models_data = [] | |
for data in json_data['data']: | |
model = { | |
"api_base": base_url, | |
"model_name": data['id'], | |
"model_id": data['id'] | |
} | |
models_data.append(model) | |
yaml_output = yaml.dump(models_data, default_flow_style=False) | |
return yaml_output | |
else: | |
print("Failed to retrieve data from API. Status code: ", response.status_code) | |
return None | |
def get_other_models_path(): | |
result = subprocess.run(['llm', 'keys', 'path'], capture_output=True, text=True) | |
if result.returncode == 0: | |
output = result.stdout.strip() | |
output = output[:-len('keys.json')] | |
return output | |
else: | |
return None | |
# print(f"Command failed with return code {result.returncode}") | |
# print(result.stderr) | |
yaml_output = get_data_from_api() | |
omp = get_other_models_path() | |
if omp: | |
print(f"Copy the following code to {omp + 'extra-openai-models.yaml'}\n") | |
if yaml_output: | |
print(yaml_output) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment