Skip to content

Instantly share code, notes, and snippets.

@thoraxe
Created December 12, 2023 17:11
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save thoraxe/a5e0244d4a6aeedc5dcf21b4b24acb1d to your computer and use it in GitHub Desktop.
Save thoraxe/a5e0244d4a6aeedc5dcf21b4b24acb1d to your computer and use it in GitHub Desktop.
==================================== ERRORS ====================================
___________ ERROR collecting query_helpers/test_yes_no_classifier.py ___________
/opt/app-root/lib64/python3.11/site-packages/_pytest/runner.py:341: in from_call
result: Optional[TResult] = func()
/opt/app-root/lib64/python3.11/site-packages/_pytest/runner.py:372: in <lambda>
call = CallInfo.from_call(lambda: list(collector.collect()), "collect")
/opt/app-root/lib64/python3.11/site-packages/_pytest/python.py:531: in collect
self._inject_setup_module_fixture()
/opt/app-root/lib64/python3.11/site-packages/_pytest/python.py:545: in _inject_setup_module_fixture
self.obj, ("setUpModule", "setup_module")
/opt/app-root/lib64/python3.11/site-packages/_pytest/python.py:310: in obj
self._obj = obj = self._getobj()
/opt/app-root/lib64/python3.11/site-packages/_pytest/python.py:528: in _getobj
return self._importtestmodule()
/opt/app-root/lib64/python3.11/site-packages/_pytest/python.py:617: in _importtestmodule
mod = import_path(self.path, mode=importmode, root=self.config.rootpath)
/opt/app-root/lib64/python3.11/site-packages/_pytest/pathlib.py:567: in import_path
importlib.import_module(module_name)
/usr/lib64/python3.11/importlib/__init__.py:126: in import_module
return _bootstrap._gcd_import(name[level:], package, level)
<frozen importlib._bootstrap>:1204: in _gcd_import
???
<frozen importlib._bootstrap>:1176: in _find_and_load
???
<frozen importlib._bootstrap>:1147: in _find_and_load_unlocked
???
<frozen importlib._bootstrap>:690: in _load_unlocked
???
/opt/app-root/lib64/python3.11/site-packages/_pytest/assertion/rewrite.py:186: in exec_module
exec(co, module.__dict__)
tests/unit/query_helpers/test_yes_no_classifier.py:16: in <module>
import src.query_helpers.yes_no_classifier
src/query_helpers/yes_no_classifier.py:6: in <module>
from utils.model_context import get_watsonx_predictor
utils/model_context.py:2: in <module>
from llama_index.embeddings import TextEmbeddingsInference
/opt/app-root/lib64/python3.11/site-packages/llama_index/__init__.py:21: in <module>
from llama_index.indices import (
/opt/app-root/lib64/python3.11/site-packages/llama_index/indices/__init__.py:4: in <module>
from llama_index.indices.composability.graph import ComposableGraph
/opt/app-root/lib64/python3.11/site-packages/llama_index/indices/composability/__init__.py:4: in <module>
from llama_index.indices.composability.graph import ComposableGraph
/opt/app-root/lib64/python3.11/site-packages/llama_index/indices/composability/graph.py:7: in <module>
from llama_index.indices.base import BaseIndex
/opt/app-root/lib64/python3.11/site-packages/llama_index/indices/base.py:6: in <module>
from llama_index.chat_engine.types import BaseChatEngine, ChatMode
/opt/app-root/lib64/python3.11/site-packages/llama_index/chat_engine/__init__.py:1: in <module>
from llama_index.chat_engine.condense_plus_context import CondensePlusContextChatEngine
/opt/app-root/lib64/python3.11/site-packages/llama_index/chat_engine/condense_plus_context.py:7: in <module>
from llama_index.chat_engine.types import (
/opt/app-root/lib64/python3.11/site-packages/llama_index/chat_engine/types.py:11: in <module>
from llama_index.memory import BaseMemory
/opt/app-root/lib64/python3.11/site-packages/llama_index/memory/__init__.py:1: in <module>
from llama_index.memory.chat_memory_buffer import ChatMemoryBuffer
/opt/app-root/lib64/python3.11/site-packages/llama_index/memory/chat_memory_buffer.py:13: in <module>
class ChatMemoryBuffer(BaseMemory):
/opt/app-root/lib64/python3.11/site-packages/llama_index/memory/chat_memory_buffer.py:19: in ChatMemoryBuffer
default_factory=cast(Callable[[], Any], GlobalsHelper().tokenizer),
/opt/app-root/lib64/python3.11/site-packages/llama_index/utils.py:55: in tokenizer
enc = tiktoken.get_encoding("gpt2")
/opt/app-root/lib64/python3.11/site-packages/tiktoken/registry.py:73: in get_encoding
enc = Encoding(**constructor())
/opt/app-root/lib64/python3.11/site-packages/tiktoken_ext/openai_public.py:11: in gpt2
mergeable_ranks = data_gym_to_mergeable_bpe_ranks(
/opt/app-root/lib64/python3.11/site-packages/tiktoken/load.py:82: in data_gym_to_mergeable_bpe_ranks
vocab_bpe_contents = read_file_cached(vocab_bpe_file).decode()
/opt/app-root/lib64/python3.11/site-packages/tiktoken/load.py:50: in read_file_cached
contents = read_file(blobpath)
/opt/app-root/lib64/python3.11/site-packages/tiktoken/load.py:24: in read_file
resp = requests.get(blobpath)
/opt/app-root/lib64/python3.11/site-packages/requests/api.py:73: in get
return request("get", url, params=params, **kwargs)
/opt/app-root/lib64/python3.11/site-packages/requests/api.py:59: in request
return session.request(method=method, url=url, **kwargs)
/opt/app-root/lib64/python3.11/site-packages/requests/sessions.py:589: in request
resp = self.send(prep, **send_kwargs)
/opt/app-root/lib64/python3.11/site-packages/requests/sessions.py:703: in send
r = adapter.send(request, **kwargs)
/opt/app-root/lib64/python3.11/site-packages/requests/adapters.py:486: in send
resp = conn.urlopen(
/opt/app-root/lib64/python3.11/site-packages/urllib3/connectionpool.py:715: in urlopen
httplib_response = self._make_request(
/opt/app-root/lib64/python3.11/site-packages/urllib3/connectionpool.py:404: in _make_request
self._validate_conn(conn)
/opt/app-root/lib64/python3.11/site-packages/urllib3/connectionpool.py:1058: in _validate_conn
conn.connect()
/opt/app-root/lib64/python3.11/site-packages/urllib3/connection.py:363: in connect
self.sock = conn = self._new_conn()
/opt/app-root/lib64/python3.11/site-packages/urllib3/connection.py:174: in _new_conn
conn = connection.create_connection(
/opt/app-root/lib64/python3.11/site-packages/urllib3/util/connection.py:76: in create_connection
sock = socket.socket(af, socktype, proto)
tests/unit/query_helpers/test_yes_no_classifier.py:9: in guard
raise Exception("I told you not to use the Internet!")
E Exception: I told you not to use the Internet!
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment