Skip to content

Instantly share code, notes, and snippets.

@thoraxe
Last active October 5, 2023 16:37
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save thoraxe/3396b58e99a7a1a752db17d066970398 to your computer and use it in GitHub Desktop.
Save thoraxe/3396b58e99a7a1a752db17d066970398 to your computer and use it in GitHub Desktop.
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/ols.py", line 4, in <module>
from model_context import get_watsonx_predictor
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/model_context.py", line 3, in <module>
from watsonx_langchain_wrapper import WatsonxLLM
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/watsonx_langchain_wrapper.py", line 17, in <module>
class WatsonxLLM(LLM, BaseModel):
TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases
from typing import Union
from fastapi import FastAPI
from pydantic import BaseModel
## watsonx stuff
from langchain.llms.base import LLM
from typing import Any, List, Mapping, Optional, Dict
from ibm_watson_machine_learning.foundation_models import Model
from pydantic import BaseModel, Extra
from langchain.llms.utils import enforce_stop_tokens
## internal stuff
#from model_context import get_watsonx_predictor
class LLMRequest(BaseModel):
query: str
class WatsonxLLM(LLM, BaseModel):
credentials: Optional[Dict] = None
model: Optional[str] = None
params: Optional[Dict] = None
project_id : Optional[str]=None
class Config:
extra = Extra.forbid
@property
def _identifying_params(self) -> Mapping[str, Any]:
_params = self.params or {}
return {
**{"model": self.model},
**{"params": _params},
}
@property
def _llm_type(self) -> str:
return "IBM WATSONX"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
params = self.params or {}
model = Model(model_id=self.model, params=params, credentials=self.credentials, project_id=self.project_id)
text = model.generate_text(prompt)
if stop is not None:
text = enforce_stop_tokens(text, stop)
return text
app = FastAPI()
@app.get("/healthz")
def read_root():
return {"status": "1"}
@app.post("/base_llm_completion")
def base_llm_completion(llm_request: LLMRequest):
return 'x:y'
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment