Skip to content

Instantly share code, notes, and snippets.

View thoraxe's full-sized avatar

Erik Jacobs thoraxe

View GitHub Profile
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/3.9.16/lib/python3.9/runpy.py", line 197, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/home/thoraxe/.pyenv/versions/3.9.16/lib/python3.9/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/modules/docs_summarizer.py", line 141, in <module>
docs_summarizer.summarize(
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/modules/docs_summarizer.py", line 93, in summarize
summary = query_engine.query(query)
File "/home/thoraxe/.pyenv/versions/fastapi-ols-39/lib/python3.9/site-packages/llama_index/indices/query/base.py", line 31, in query
Indexing summary documents...
Parsing documents into nodes: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 6/6 [00:00<00:00, 3778.65it/s]
Generating embeddings: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 6/6 [00:00<00:00, 22.28it/s]
Indexing product documents...
Parsing documents into nodes: 100%|██████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 533/533 [00:00<00:00, 1216.73it/s]
Generating embeddings: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 731/731 [00:07<00:00, 95.32it/s]
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/3.9.16/lib/python3.9/runpy.py", line 197, in _run_module_as_main
return _run_co
from langchain.agents import load_tools
from langchain.agents import initialize_agent
from langchain.agents import AgentType, ZeroShotAgent, AgentExecutor
from langchain.llms import OpenAI
from model_context import get_watsonx_predictor
from langchain.llms import Ollama
from langchain.llms import HuggingFaceTextGenInference
import langchain
.error_wrappers.ValidationError: 1 validation error for ConversationChain
__root__
Got unexpected prompt input variables. The prompt expects ['input', 'original_query'], but got ['history'] as inputs from memory, and input as the normal input key. (type=value_error)
⌁63% θ63° [thoraxe:~/Red_Hat/ … /fastapi-lightspeed-service] [fastapi-ols-39] main(+20/-11)* 1 ±
⌁63% θ64° [thoraxe:~/Red_Hat/ … /fastapi-lightspeed-service] [fastapi-ols-39] main(+20/-11)* 1 ±
⌁63% θ64° [thoraxe:~/Red_Hat/ … /fastapi-lightspeed-service] [fastapi-ols-39] main(+20/-11)* 1 ±
⌁63% θ64° [thoraxe:~/Red_Hat/ … /fastapi-lightspeed-service] [fastapi-ols-39] main(+20/-11)* 1 ± python routertest.py
> Entering new MultiPromptChain chain...
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/3.9.16/envs/fastapi-ols-39/lib/python3.9/site-packages/uvicorn/protocols/http/h11_impl.py", line 408, in run_asgi
result = await app( # type: ignore[func-returns-value]
File "/home/thoraxe/.pyenv/versions/3.9.16/envs/fastapi-ols-39/lib/python3.9/site-packages/uvicorn/middleware/proxy_headers.py", line 84, in __call__
return await self.app(scope, receive, send)
File "/home/thoraxe/.pyenv/versions/3.9.16/envs/fastapi-ols-39/lib/python3.9/site-packages/fastapi/applications.py", line 292, in __call__
await super().__call__(scope, receive, send)
File "/home/thoraxe/.pyenv/versions/3.9.16/envs/fastapi-ols-39/lib/python3.9/site-packages/starlette/applications.py", line 122, in __call__
await self.middleware_stack(scope, receive, send)
File "/home/thoraxe/.pyenv/versions/3.9.16/envs/fastapi-ols-39/lib/python3.9/site-packages/starlette/middleware/errors.py", line 184, in __call__
@thoraxe
thoraxe / error
Last active October 5, 2023 16:37
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/ols.py", line 4, in <module>
from model_context import get_watsonx_predictor
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/model_context.py", line 3, in <module>
from watsonx_langchain_wrapper import WatsonxLLM
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/fastapi-lightspeed-service/watsonx_langchain_wrapper.py", line 17, in <module>
class WatsonxLLM(LLM, BaseModel):
TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases
import llama_index
from llama_index import (
SimpleDirectoryReader,
VectorStoreIndex,
StorageContext,
load_index_from_storage,
)
from llama_index.vector_stores import RedisVectorStore
from llama_index.tools import QueryEngineTool, ToolMetadata
from model_context import get_falcon_tgis_context
@thoraxe
thoraxe / deploy.sh
Created September 14, 2023 12:22
ClearML on OpenShift Container Platform
#!/bin/bash
# create a project for the clearml solution
oc new-project clearml
# set the policies for the various service accounts
oc adm policy add-scc-to-user anyuid -z clearml-mongodb
oc adm policy add-scc-to-user anyuid -z clearml-redis
oc adm policy add-scc-to-user anyuid -z clearml-core
oc adm policy add-scc-to-user privileged -z clearml-elastic
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/routes.py", line 488, in run_predict
output = await app.get_blocks().process_api(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/blocks.py", line 1431, in process_api
result = await self.call_function(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/blocks.py", line 1103, in call_function
prediction = await anyio.to_thread.run_sync(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/anyio/to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread
version: "3.1"
intents:
- greet
- goodbye
- affirm
- deny
- mood_great
- mood_unhappy
- bot_challenge