Skip to content

Instantly share code, notes, and snippets.

@litui
Created February 5, 2025 06:57
Show Gist options
  • Save litui/f2b106913174e14efb8ff02f774fce85 to your computer and use it in GitHub Desktop.
Save litui/f2b106913174e14efb8ff02f774fce85 to your computer and use it in GitHub Desktop.
Open-WebUI pipeline script for DeepSeek-R1 as hosted at Together.AI
from typing import List, Union, Generator, Iterator
from pydantic import BaseModel
import requests
import os
class Pipeline:
class Valves(BaseModel):
# You can add your custom valves here.
TOGETHER_DEEPSEEKR1_API_KEY: str
TOGETHER_DEEPSEEKR1_ENDPOINT: str
def __init__(self):
self.type = "manifold"
self.name = "Together.AI "
self.valves = self.Valves(
**{
"TOGETHER_DEEPSEEKR1_API_KEY": os.getenv("TOGETHER_DEEPSEEKR1_API_KEY", "your-together-deepseek-r1-api-key-here"),
"TOGETHER_DEEPSEEKR1_ENDPOINT": os.getenv("TOGETHER_DEEPSEEKR1_ENDPOINT", "your-together-deepseek-r1-endpoint-here"),
}
)
self.set_pipelines()
pass
def set_pipelines(self):
models = ['DeepSeek-R1']
model_names = ['DeepSeek-R1']
self.pipelines = [
{"id": model, "name": name} for model, name in zip(models, model_names)
]
print(f"together_deepseek_r1_pipeline - models: {self.pipelines}")
pass
async def on_valves_updated(self):
self.set_pipelines()
async def on_startup(self):
# This function is called when the server is started.
print(f"on_startup:{__name__}")
pass
async def on_shutdown(self):
# This function is called when the server is stopped.
print(f"on_shutdown:{__name__}")
pass
def pipe(
self, user_message: str, model_id: str, messages: List[dict], body: dict
) -> Union[str, Generator, Iterator]:
# This is where you can add your custom pipelines like RAG.
print(f"pipe:{__name__}")
print(messages)
print(user_message)
headers = {
"Authorization": f"Bearer {self.valves.TOGETHER_DEEPSEEKR1_API_KEY}",
"Content-Type": "application/json",
}
url = f"{self.valves.TOGETHER_DEEPSEEKR1_ENDPOINT}/v1/chat/completions"
print(url)
print(body)
allowed_params = {'messages', 'temperature', 'role', 'content', 'contentPart', 'contentPartImage',
'enhancements', 'dataSources', 'n', 'stream', 'stop', 'max_tokens', 'presence_penalty',
'frequency_penalty', 'logit_bias', 'function_call', 'funcions', 'tools',
'tool_choice', 'top_p', 'log_probs', 'top_logprobs', 'response_format', 'seed', 'model'}
# remap user field
if "user" in body and not isinstance(body["user"], str):
body["user"] = body["user"]["id"] if "id" in body["user"] else str(body["user"])
# Fill in model field as per Together's api requirements
body["model"] = f"deepseek-ai/{model_id}"
filtered_body = {k: v for k, v in body.items() if k in allowed_params}
# log fields that were filtered out as a single line
if len(body) != len(filtered_body):
print(f"Dropped params: {', '.join(set(body.keys()) - set(filtered_body.keys()))}")
try:
r = requests.post(
url=url,
json=filtered_body,
headers=headers,
stream=True,
)
r.raise_for_status()
if body["stream"]:
return r.iter_lines()
else:
return r.json()
except Exception as e:
if r:
text = r.text
return f"Error: {e} ({text})"
else:
return f"Error: {e}"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment