Skip to content

Instantly share code, notes, and snippets.

@thoraxe
Created September 7, 2023 19:53
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save thoraxe/024ecae51b5132b226c3a5c62d277eda to your computer and use it in GitHub Desktop.
Save thoraxe/024ecae51b5132b226c3a5c62d277eda to your computer and use it in GitHub Desktop.
Traceback (most recent call last):
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/routes.py", line 488, in run_predict
output = await app.get_blocks().process_api(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/blocks.py", line 1431, in process_api
result = await self.call_function(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/blocks.py", line 1103, in call_function
prediction = await anyio.to_thread.run_sync(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/anyio/to_thread.py", line 33, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 877, in run_sync_in_worker_thread
return await future
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/anyio/_backends/_asyncio.py", line 807, in run
result = context.run(func, *args)
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/gradio/utils.py", line 707, in wrapper
response = f(*args, **kwargs)
File "/home/thoraxe/Red_Hat/openshift/llamaindex-experiments/ops-sop-chatbot/pdf_query.py", line 66, in user
response = index.as_query_engine(verbose=True, streaming=True).query(user_message)
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/indices/query/base.py", line 23, in query
response = self._query(str_or_query_bundle)
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/query_engine/retriever_query_engine.py", line 176, in _query
response = self._response_synthesizer.synthesize(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/response_synthesizers/base.py", line 125, in synthesize
response_str = self.get_response(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/response_synthesizers/compact_and_refine.py", line 34, in get_response
response = super().get_response(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/response_synthesizers/refine.py", line 116, in get_response
response = self._give_response_single(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/response_synthesizers/refine.py", line 184, in _give_response_single
response = self._service_context.llm_predictor.stream(
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/llm_predictor/base.py", line 169, in stream
prompt = self._extend_prompt(prompt)
File "/home/thoraxe/.pyenv/versions/llamaindex-39/lib/python3.9/site-packages/llama_index/llm_predictor/base.py", line 239, in _extend_prompt
if isinstance(default_template, PromptTemplate):
UnboundLocalError: local variable 'default_template' referenced before assignment
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment