Skip to content

Instantly share code, notes, and snippets.

@tbenst
Created July 7, 2023 09:13
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tbenst/2b6ef453b37f634f32d856bbdceed71e to your computer and use it in GitHub Desktop.
Save tbenst/2b6ef453b37f634f32d856bbdceed71e to your computer and use it in GitHub Desktop.
pytorch DDP hang
run.py(448): self.define(self._stderr_path, StringSeries([]))
--- modulename: string_series, funcname: __init__
string_series.py(51): if is_stringify_value(values):
--- modulename: __init__, funcname: is_stringify_value
__init__.py(123): return isinstance(var, StringifyValue)
string_series.py(54): if not is_collection(values):
--- modulename: __init__, funcname: is_collection
__init__.py(137): return isinstance(var, (list, set, tuple))
string_series.py(57): self._truncated = any([len(value) > MAX_STRING_SERIES_VALUE_LENGTH for value in values])
--- modulename: string_series, funcname: <listcomp>
string_series.py(57): self._truncated = any([len(value) > MAX_STRING_SERIES_VALUE_LENGTH for value in values])
string_series.py(58): self._values = [value[:MAX_STRING_SERIES_VALUE_LENGTH] for value in values]
--- modulename: string_series, funcname: <listcomp>
string_series.py(58): self._values = [value[:MAX_STRING_SERIES_VALUE_LENGTH] for value in values]
string_series.py(60): if steps is None:
string_series.py(61): self._steps = cycle([None])
string_series.py(66): if timestamps is None:
string_series.py(67): self._timestamps = cycle([time.time()])
--- modulename: metadata_container, funcname: define
metadata_container.py(358): with self._lock:
metadata_container.py(359): old_attr = self.get_attribute(path)
--- modulename: metadata_container, funcname: get_attribute
metadata_container.py(374): with self._lock:
metadata_container.py(375): return self._structure.get(parse_path(path))
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: get
container_structure.py(68): ref = self._structure
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(834): return True
container_structure.py(78): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(834): return True
container_structure.py(78): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(77): return None
metadata_container.py(374): with self._lock:
metadata_container.py(360): if old_attr is not None:
metadata_container.py(363): neptune_value = cast_value(value)
--- modulename: type_casting, funcname: cast_value
type_casting.py(57): from neptune.handler import Handler
type_casting.py(59): from_stringify_value = False
type_casting.py(60): if is_stringify_value(value):
--- modulename: __init__, funcname: is_stringify_value
__init__.py(123): return isinstance(var, StringifyValue)
type_casting.py(63): if isinstance(value, Value):
type_casting.py(64): return value
metadata_container.py(364): if neptune_value is None:
metadata_container.py(368): attr = ValueToAttributeVisitor(self, parse_path(path)).visit(neptune_value)
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: value_to_attribute_visitor, funcname: __init__
value_to_attribute_visitor.py(63): self._container = container
value_to_attribute_visitor.py(64): self._path = path
--- modulename: value_visitor, funcname: visit
value_visitor.py(50): return value.accept(self)
--- modulename: string_series, funcname: accept
string_series.py(81): return visitor.visit_string_series(self)
--- modulename: value_to_attribute_visitor, funcname: visit_string_series
value_to_attribute_visitor.py(94): return StringSeriesAttr(self._container, self._path)
--- modulename: string_series, funcname: __init__
string_series.py(51): super().__init__(container, path)
--- modulename: attribute, funcname: __init__
attribute.py(37): super().__init__()
attribute.py(38): self._container = container
attribute.py(39): self._path = path
string_series.py(52): self._value_truncation_occurred = False
metadata_container.py(369): self.set_attribute(path, attr)
--- modulename: metadata_container, funcname: set_attribute
metadata_container.py(378): with self._lock:
metadata_container.py(379): return self._structure.set(parse_path(path), attribute)
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: set
container_structure.py(83): ref = self._structure
container_structure.py(84): location, attribute_name = path[:-1], path[-1]
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(87): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(834): return True
container_structure.py(89): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(90): if not isinstance(ref, self._node_type):
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(87): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(834): return True
container_structure.py(89): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(90): if not isinstance(ref, self._node_type):
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(96): if attribute_name in ref and isinstance(ref[attribute_name], self._node_type):
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(102): ref[attribute_name] = attr
--- modulename: namespace, funcname: __setitem__
namespace.py(58): self._attributes[k] = v
metadata_container.py(378): with self._lock:
metadata_container.py(370): attr.process_assignment(neptune_value, wait=wait)
--- modulename: attribute, funcname: process_assignment
attribute.py(63): if isinstance(value, ValueCopy):
attribute.py(66): return self.assign(value, wait=wait)
--- modulename: series, funcname: assign
series.py(84): if not self._is_value_type(value):
--- modulename: string_series, funcname: _is_value_type
string_series.py(84): return isinstance(value, StringSeriesVal)
series.py(86): clear_op = self._get_clear_operation()
--- modulename: string_series, funcname: _get_clear_operation
string_series.py(72): return ClearStringLog(self._path)
--- modulename: operation, funcname: __init__
<string>(3): series.py(87): config_op = self._get_config_operation_from_value(value)
--- modulename: series, funcname: _get_config_operation_from_value
series.py(66): return None
series.py(88): with self._container.lock():
--- modulename: metadata_container, funcname: lock
metadata_container.py(417): return self._lock
series.py(89): if config_op:
series.py(91): if not value.values:
--- modulename: string_series, funcname: values
string_series.py(85): return self._values
series.py(92): self._enqueue_operation(clear_op, wait=wait)
--- modulename: attribute, funcname: _enqueue_operation
attribute.py(45): self._container._op_processor.enqueue_operation(operation, wait=wait)
--- modulename: async_operation_processor, funcname: enqueue_operation
async_operation_processor.py(103): if self._drop_operations:
async_operation_processor.py(105): self._last_version = self._queue.put(op)
--- modulename: disk_queue, funcname: put
disk_queue.py(92): version = self._last_put_file.read_local() + 1
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
disk_queue.py(93): _json = json.dumps(self._serialize(obj, version))
--- modulename: disk_queue, funcname: _serialize
disk_queue.py(246): return {"obj": self._to_dict(obj), "version": version}
--- modulename: async_operation_processor, funcname: <lambda>
async_operation_processor.py(68): to_dict=lambda x: x.to_dict(),
--- modulename: operation, funcname: to_dict
operation.py(67): return {"type": self.__class__.__name__, "path": self.path}
--- modulename: __init__, funcname: dumps
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(228): check_circular and allow_nan and
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(228): check_circular and allow_nan and
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(229): cls is None and indent is None and separators is None and
__init__.py(230): default is None and not sort_keys and not kw):
__init__.py(231): return _default_encoder.encode(obj)
--- modulename: encoder, funcname: encode
encoder.py(191): if isinstance(o, str):
encoder.py(199): chunks = self.iterencode(o, _one_shot=True)
--- modulename: encoder, funcname: iterencode
encoder.py(214): if self.check_circular:
encoder.py(215): markers = {}
encoder.py(218): if self.ensure_ascii:
encoder.py(219): _encoder = encode_basestring_ascii
encoder.py(223): def floatstr(o, allow_nan=self.allow_nan,
encoder.py(224): _repr=float.__repr__, _inf=INFINITY, _neginf=-INFINITY):
encoder.py(223): def floatstr(o, allow_nan=self.allow_nan,
encoder.py(246): if (_one_shot and c_make_encoder is not None
encoder.py(247): and self.indent is None):
encoder.py(248): _iterencode = c_make_encoder(
encoder.py(249): markers, self.default, _encoder, self.indent,
encoder.py(250): self.key_separator, self.item_separator, self.sort_keys,
encoder.py(251): self.skipkeys, self.allow_nan)
encoder.py(248): _iterencode = c_make_encoder(
encoder.py(257): return _iterencode(o, 0)
encoder.py(200): if not isinstance(chunks, (list, tuple)):
encoder.py(202): return ''.join(chunks)
disk_queue.py(94): if self._file_size + len(_json) > self._max_file_size:
disk_queue.py(100): self._writer.write(_json + "\n")
disk_queue.py(101): self._last_put_file.write(version)
--- modulename: sync_offset_file, funcname: write
sync_offset_file.py(30): self._file.seek(0)
--- modulename: codecs, funcname: reset
codecs.py(328): IncrementalDecoder.reset(self)
--- modulename: codecs, funcname: reset
codecs.py(276): def reset(self):
codecs.py(329): self.buffer = b""
--- modulename: codecs, funcname: reset
codecs.py(203): def reset(self):
sync_offset_file.py(31): self._file.write(str(offset))
--- modulename: codecs, funcname: reset
codecs.py(328): IncrementalDecoder.reset(self)
--- modulename: codecs, funcname: reset
codecs.py(276): def reset(self):
codecs.py(329): self.buffer = b""
sync_offset_file.py(32): self._file.truncate()
sync_offset_file.py(33): self._file.flush()
sync_offset_file.py(34): self._last = offset
disk_queue.py(102): self._file_size += len(_json) + 1
disk_queue.py(103): return version
async_operation_processor.py(106): if self._queue.size() > self._batch_size / 2:
--- modulename: disk_queue, funcname: size
disk_queue.py(223): return self._last_put_file.read_local() - self._last_ack_file.read_local()
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
async_operation_processor.py(108): if wait:
series.py(88): with self._container.lock():
metadata_container.py(371): return attr
metadata_container.py(358): with self._lock:
run.py(450): if self._with_id is None or self._source_files is not None:
run.py(452): upload_source_code(source_files=self._source_files, run=self)
--- modulename: source_code, funcname: upload_source_code
source_code.py(43): entrypoint_filepath = get_path_executed_script()
--- modulename: lib_programname, funcname: get_path_executed_script
lib_programname.py(78): path_candidate = get_fullpath_from_main_file()
--- modulename: lib_programname, funcname: get_fullpath_from_main_file
lib_programname.py(100): if not hasattr(sys.modules["__main__"], "__file__"):
lib_programname.py(103): arg_string = str(sys.modules["__main__"].__file__)
lib_programname.py(104): valid_executable_path = get_valid_executable_path_or_empty_path(arg_string)
--- modulename: lib_programname, funcname: get_valid_executable_path_or_empty_path
lib_programname.py(136): arg_string = remove_doctest_and_docrunner_parameters(arg_string)
--- modulename: lib_programname, funcname: remove_doctest_and_docrunner_parameters
lib_programname.py(159): path = arg_string.split("::", 1)[0]
lib_programname.py(160): return path
lib_programname.py(137): arg_string = add_python_extension_if_not_there(arg_string)
--- modulename: lib_programname, funcname: add_python_extension_if_not_there
lib_programname.py(175): if not arg_string.endswith(".py"):
lib_programname.py(177): return arg_string
lib_programname.py(138): path = pathlib.Path(arg_string)
--- modulename: pathlib, funcname: __new__
pathlib.py(958): if cls is Path:
pathlib.py(959): cls = WindowsPath if os.name == 'nt' else PosixPath
pathlib.py(960): self = cls._from_parts(args)
--- modulename: pathlib, funcname: _from_parts
pathlib.py(593): self = object.__new__(cls)
pathlib.py(594): drv, root, parts = self._parse_args(args)
--- modulename: pathlib, funcname: _parse_args
pathlib.py(573): parts = []
pathlib.py(574): for a in args:
pathlib.py(575): if isinstance(a, PurePath):
pathlib.py(578): a = os.fspath(a)
pathlib.py(579): if isinstance(a, str):
pathlib.py(581): parts.append(str(a))
pathlib.py(574): for a in args:
pathlib.py(587): return cls._flavour.parse_parts(parts)
--- modulename: pathlib, funcname: parse_parts
pathlib.py(57): parsed = []
pathlib.py(58): sep = self.sep
pathlib.py(59): altsep = self.altsep
pathlib.py(60): drv = root = ''
pathlib.py(61): it = reversed(parts)
pathlib.py(62): for part in it:
pathlib.py(63): if not part:
pathlib.py(65): if altsep:
pathlib.py(67): drv, root, rel = self.splitroot(part)
--- modulename: pathlib, funcname: splitroot
pathlib.py(240): if part and part[0] == sep:
pathlib.py(241): stripped_part = part.lstrip(sep)
pathlib.py(247): if len(part) - len(stripped_part) == 2:
pathlib.py(250): return '', sep, stripped_part
pathlib.py(68): if sep in rel:
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(75): if drv or root:
pathlib.py(76): if not drv:
pathlib.py(80): for part in it:
pathlib.py(88): break
pathlib.py(89): if drv or root:
pathlib.py(90): parsed.append(drv + root)
pathlib.py(91): parsed.reverse()
pathlib.py(92): return drv, root, parsed
pathlib.py(595): self._drv = drv
pathlib.py(596): self._root = root
pathlib.py(597): self._parts = parts
pathlib.py(598): return self
pathlib.py(961): if not self._flavour.is_supported:
pathlib.py(964): return self
lib_programname.py(139): try:
lib_programname.py(140): if path.is_file():
--- modulename: pathlib, funcname: is_file
pathlib.py(1321): try:
pathlib.py(1322): return S_ISREG(self.stat().st_mode)
--- modulename: pathlib, funcname: stat
pathlib.py(1097): return self._accessor.stat(self, follow_symlinks=follow_symlinks)
--- modulename: pathlib, funcname: __fspath__
pathlib.py(632): return str(self)
--- modulename: pathlib, funcname: __str__
pathlib.py(624): try:
pathlib.py(625): return self._str
pathlib.py(626): except AttributeError:
pathlib.py(627): self._str = self._format_parsed_parts(self._drv, self._root,
pathlib.py(628): self._parts) or '.'
pathlib.py(627): self._str = self._format_parsed_parts(self._drv, self._root,
--- modulename: pathlib, funcname: _format_parsed_parts
pathlib.py(610): if drv or root:
pathlib.py(611): return drv + root + cls._flavour.join(parts[1:])
pathlib.py(629): return self._str
lib_programname.py(141): path = path.resolve() # .resolve does not work on a non existing file in python 3.5
--- modulename: pathlib, funcname: resolve
pathlib.py(1071): def check_eloop(e):
pathlib.py(1076): try:
pathlib.py(1077): s = self._accessor.realpath(self, strict=strict)
--- modulename: posixpath, funcname: realpath
posixpath.py(395): filename = os.fspath(filename)
--- modulename: pathlib, funcname: __fspath__
pathlib.py(632): return str(self)
--- modulename: pathlib, funcname: __str__
pathlib.py(624): try:
pathlib.py(625): return self._str
posixpath.py(396): path, ok = _joinrealpath(filename[:0], filename, strict, {})
--- modulename: posixpath, funcname: _joinrealpath
posixpath.py(402): if isinstance(path, bytes):
posixpath.py(407): sep = '/'
posixpath.py(408): curdir = '.'
posixpath.py(409): pardir = '..'
posixpath.py(411): if isabs(rest):
--- modulename: posixpath, funcname: isabs
posixpath.py(62): s = os.fspath(s)
posixpath.py(63): sep = _get_sep(s)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(64): return s.startswith(sep)
posixpath.py(412): rest = rest[1:]
posixpath.py(413): path = sep
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(86): path += b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(416): name, _, rest = rest.partition(sep)
posixpath.py(417): if not name or name == curdir:
posixpath.py(420): if name == pardir:
posixpath.py(429): newpath = join(path, name)
--- modulename: posixpath, funcname: join
posixpath.py(76): a = os.fspath(a)
posixpath.py(77): sep = _get_sep(a)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(78): path = a
posixpath.py(79): try:
posixpath.py(80): if not p:
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(83): if b.startswith(sep):
posixpath.py(85): elif not path or path.endswith(sep):
posixpath.py(88): path += sep + b
posixpath.py(82): for b in map(os.fspath, p):
posixpath.py(92): return path
posixpath.py(430): try:
posixpath.py(431): st = os.lstat(newpath)
posixpath.py(437): is_link = stat.S_ISLNK(st.st_mode)
posixpath.py(438): if not is_link:
posixpath.py(439): path = newpath
posixpath.py(440): continue
posixpath.py(415): while rest:
posixpath.py(461): return path, True
posixpath.py(397): return abspath(path)
--- modulename: posixpath, funcname: abspath
posixpath.py(379): path = os.fspath(path)
posixpath.py(380): if not isabs(path):
--- modulename: posixpath, funcname: isabs
posixpath.py(62): s = os.fspath(s)
posixpath.py(63): sep = _get_sep(s)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(64): return s.startswith(sep)
posixpath.py(386): return normpath(path)
--- modulename: posixpath, funcname: normpath
posixpath.py(340): path = os.fspath(path)
posixpath.py(341): if isinstance(path, bytes):
posixpath.py(347): sep = '/'
posixpath.py(348): empty = ''
posixpath.py(349): dot = '.'
posixpath.py(350): dotdot = '..'
posixpath.py(351): if path == empty:
posixpath.py(353): initial_slashes = path.startswith(sep)
posixpath.py(357): if (initial_slashes and
posixpath.py(358): path.startswith(sep*2) and not path.startswith(sep*3)):
posixpath.py(357): if (initial_slashes and
posixpath.py(360): comps = path.split(sep)
posixpath.py(361): new_comps = []
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(364): continue
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(370): comps = new_comps
posixpath.py(371): path = sep.join(comps)
posixpath.py(372): if initial_slashes:
posixpath.py(373): path = sep*initial_slashes + path
posixpath.py(374): return path or dot
pathlib.py(1081): p = self._from_parts((s,))
--- modulename: pathlib, funcname: _from_parts
pathlib.py(593): self = object.__new__(cls)
pathlib.py(594): drv, root, parts = self._parse_args(args)
--- modulename: pathlib, funcname: _parse_args
pathlib.py(573): parts = []
pathlib.py(574): for a in args:
pathlib.py(575): if isinstance(a, PurePath):
pathlib.py(578): a = os.fspath(a)
pathlib.py(579): if isinstance(a, str):
pathlib.py(581): parts.append(str(a))
pathlib.py(574): for a in args:
pathlib.py(587): return cls._flavour.parse_parts(parts)
--- modulename: pathlib, funcname: parse_parts
pathlib.py(57): parsed = []
pathlib.py(58): sep = self.sep
pathlib.py(59): altsep = self.altsep
pathlib.py(60): drv = root = ''
pathlib.py(61): it = reversed(parts)
pathlib.py(62): for part in it:
pathlib.py(63): if not part:
pathlib.py(65): if altsep:
pathlib.py(67): drv, root, rel = self.splitroot(part)
--- modulename: pathlib, funcname: splitroot
pathlib.py(240): if part and part[0] == sep:
pathlib.py(241): stripped_part = part.lstrip(sep)
pathlib.py(247): if len(part) - len(stripped_part) == 2:
pathlib.py(250): return '', sep, stripped_part
pathlib.py(68): if sep in rel:
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(70): if x and x != '.':
pathlib.py(71): parsed.append(sys.intern(x))
pathlib.py(69): for x in reversed(rel.split(sep)):
pathlib.py(75): if drv or root:
pathlib.py(76): if not drv:
pathlib.py(80): for part in it:
pathlib.py(88): break
pathlib.py(89): if drv or root:
pathlib.py(90): parsed.append(drv + root)
pathlib.py(91): parsed.reverse()
pathlib.py(92): return drv, root, parsed
pathlib.py(595): self._drv = drv
pathlib.py(596): self._root = root
pathlib.py(597): self._parts = parts
pathlib.py(598): return self
pathlib.py(1085): if not strict:
pathlib.py(1086): try:
pathlib.py(1087): p.stat()
--- modulename: pathlib, funcname: stat
pathlib.py(1097): return self._accessor.stat(self, follow_symlinks=follow_symlinks)
--- modulename: pathlib, funcname: __fspath__
pathlib.py(632): return str(self)
--- modulename: pathlib, funcname: __str__
pathlib.py(624): try:
pathlib.py(625): return self._str
pathlib.py(626): except AttributeError:
pathlib.py(627): self._str = self._format_parsed_parts(self._drv, self._root,
pathlib.py(628): self._parts) or '.'
pathlib.py(627): self._str = self._format_parsed_parts(self._drv, self._root,
--- modulename: pathlib, funcname: _format_parsed_parts
pathlib.py(610): if drv or root:
pathlib.py(611): return drv + root + cls._flavour.join(parts[1:])
pathlib.py(629): return self._str
pathlib.py(1090): return p
lib_programname.py(142): return path
lib_programname.py(105): return valid_executable_path
lib_programname.py(79): if path_candidate != empty_path:
--- modulename: pathlib, funcname: __eq__
pathlib.py(664): if not isinstance(other, PurePath):
pathlib.py(666): return self._cparts == other._cparts and self._flavour is other._flavour
--- modulename: pathlib, funcname: _cparts
pathlib.py(657): try:
pathlib.py(658): return self._cached_cparts
pathlib.py(659): except AttributeError:
pathlib.py(660): self._cached_cparts = self._flavour.casefold_parts(self._parts)
--- modulename: pathlib, funcname: casefold_parts
pathlib.py(258): return parts
pathlib.py(661): return self._cached_cparts
--- modulename: pathlib, funcname: _cparts
pathlib.py(657): try:
pathlib.py(658): return self._cached_cparts
lib_programname.py(80): return path_candidate
source_code.py(45): if not is_ipython() and entrypoint_filepath != empty_path and os.path.isfile(entrypoint_filepath):
--- modulename: utils, funcname: is_ipython
utils.py(218): try:
utils.py(219): import IPython
utils.py(221): ipython = IPython.core.getipython.get_ipython()
--- modulename: getipython, funcname: get_ipython
getipython.py(22): from IPython.core.interactiveshell import InteractiveShell
getipython.py(23): if InteractiveShell.initialized():
--- modulename: configurable, funcname: initialized
configurable.py(568): return hasattr(cls, "_instance") and cls._instance is not None
utils.py(222): return ipython is not None
--- modulename: pathlib, funcname: __eq__
pathlib.py(664): if not isinstance(other, PurePath):
pathlib.py(666): return self._cparts == other._cparts and self._flavour is other._flavour
--- modulename: pathlib, funcname: _cparts
pathlib.py(657): try:
pathlib.py(658): return self._cached_cparts
--- modulename: pathlib, funcname: _cparts
pathlib.py(657): try:
pathlib.py(658): return self._cached_cparts
--- modulename: genericpath, funcname: isfile
genericpath.py(29): try:
genericpath.py(30): st = os.stat(path)
--- modulename: pathlib, funcname: __fspath__
pathlib.py(632): return str(self)
--- modulename: pathlib, funcname: __str__
pathlib.py(624): try:
pathlib.py(625): return self._str
genericpath.py(33): return stat.S_ISREG(st.st_mode)
source_code.py(46): if source_files is None:
source_code.py(47): entrypoint = os.path.basename(entrypoint_filepath)
--- modulename: posixpath, funcname: basename
posixpath.py(142): p = os.fspath(p)
--- modulename: pathlib, funcname: __fspath__
pathlib.py(632): return str(self)
--- modulename: pathlib, funcname: __str__
pathlib.py(624): try:
pathlib.py(625): return self._str
posixpath.py(143): sep = _get_sep(p)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(144): i = p.rfind(sep) + 1
posixpath.py(145): return p[i:]
source_code.py(48): source_files = str(entrypoint_filepath)
--- modulename: pathlib, funcname: __str__
pathlib.py(624): try:
pathlib.py(625): return self._str
source_code.py(60): run[attr_consts.SOURCE_CODE_ENTRYPOINT_ATTRIBUTE_PATH] = entrypoint
--- modulename: metadata_container, funcname: inner_fun
metadata_container.py(83): self._raise_if_stopped()
--- modulename: run, funcname: _raise_if_stopped
run.py(459): if self._state == ContainerState.STOPPED:
metadata_container.py(84): return fun(self, *args, **kwargs)
--- modulename: metadata_container, funcname: __setitem__
metadata_container.py(188): self.__getitem__(key).assign(value)
--- modulename: metadata_container, funcname: inner_fun
metadata_container.py(83): self._raise_if_stopped()
--- modulename: run, funcname: _raise_if_stopped
run.py(459): if self._state == ContainerState.STOPPED:
metadata_container.py(84): return fun(self, *args, **kwargs)
--- modulename: metadata_container, funcname: __getitem__
metadata_container.py(184): return Handler(self, path)
--- modulename: handler, funcname: __init__
handler.py(101): super().__init__()
handler.py(102): self._container = container
handler.py(103): self._path = path
--- modulename: handler, funcname: inner_fun
handler.py(85): validate_path_not_protected(self._path, self)
--- modulename: handler, funcname: validate_path_not_protected
handler.py(77): path_protection_exception = handler._PROTECTED_PATHS.get(target_path)
handler.py(78): if path_protection_exception:
handler.py(86): return fun(self, *args, **kwargs)
--- modulename: handler, funcname: assign
handler.py(208): with self._container.lock():
--- modulename: metadata_container, funcname: lock
metadata_container.py(417): return self._lock
handler.py(209): attr = self._container.get_attribute(self._path)
--- modulename: metadata_container, funcname: get_attribute
metadata_container.py(374): with self._lock:
metadata_container.py(375): return self._structure.get(parse_path(path))
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: get
container_structure.py(68): ref = self._structure
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(77): return None
metadata_container.py(374): with self._lock:
handler.py(210): if attr is None:
handler.py(211): self._container.define(self._path, value)
--- modulename: metadata_container, funcname: define
metadata_container.py(358): with self._lock:
metadata_container.py(359): old_attr = self.get_attribute(path)
--- modulename: metadata_container, funcname: get_attribute
metadata_container.py(374): with self._lock:
metadata_container.py(375): return self._structure.get(parse_path(path))
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: get
container_structure.py(68): ref = self._structure
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(77): return None
metadata_container.py(374): with self._lock:
metadata_container.py(360): if old_attr is not None:
metadata_container.py(363): neptune_value = cast_value(value)
--- modulename: type_casting, funcname: cast_value
type_casting.py(57): from neptune.handler import Handler
type_casting.py(59): from_stringify_value = False
type_casting.py(60): if is_stringify_value(value):
--- modulename: __init__, funcname: is_stringify_value
__init__.py(123): return isinstance(var, StringifyValue)
type_casting.py(63): if isinstance(value, Value):
type_casting.py(65): elif isinstance(value, Handler):
--- modulename: abc, funcname: __instancecheck__
abc.py(119): return _abc_instancecheck(cls, instance)
type_casting.py(67): elif isinstance(value, argparse.Namespace):
type_casting.py(69): elif File.is_convertable_to_image(value):
--- modulename: file, funcname: is_convertable_to_image
file.py(315): convertable_to_img_predicates = (is_pil_image, is_matplotlib_figure)
file.py(316): return any(predicate(value) for predicate in convertable_to_img_predicates)
--- modulename: file, funcname: <genexpr>
file.py(316): return any(predicate(value) for predicate in convertable_to_img_predicates)
--- modulename: images, funcname: is_pil_image
images.py(261): return PILImage is not None and isinstance(image, PILImage)
--- modulename: file, funcname: <genexpr>
file.py(316): return any(predicate(value) for predicate in convertable_to_img_predicates)
--- modulename: images, funcname: is_matplotlib_figure
images.py(265): return image.__class__.__module__.startswith("matplotlib.") and image.__class__.__name__ == "Figure"
--- modulename: file, funcname: <genexpr>
file.py(316): return any(predicate(value) for predicate in convertable_to_img_predicates)
type_casting.py(71): elif File.is_convertable_to_html(value):
--- modulename: file, funcname: is_convertable_to_html
file.py(320): convertable_to_html_predicates = (is_altair_chart, is_bokeh_figure, is_plotly_figure)
file.py(321): return any(predicate(value) for predicate in convertable_to_html_predicates)
--- modulename: file, funcname: <genexpr>
file.py(321): return any(predicate(value) for predicate in convertable_to_html_predicates)
--- modulename: images, funcname: is_altair_chart
images.py(273): return chart.__class__.__module__.startswith("altair.") and "Chart" in chart.__class__.__name__
--- modulename: file, funcname: <genexpr>
file.py(321): return any(predicate(value) for predicate in convertable_to_html_predicates)
--- modulename: images, funcname: is_bokeh_figure
images.py(277): return chart.__class__.__module__.startswith("bokeh.") and chart.__class__.__name__.lower() == "figure"
--- modulename: file, funcname: <genexpr>
file.py(321): return any(predicate(value) for predicate in convertable_to_html_predicates)
--- modulename: images, funcname: is_plotly_figure
images.py(269): return chart.__class__.__module__.startswith("plotly.") and chart.__class__.__name__ == "Figure"
--- modulename: file, funcname: <genexpr>
file.py(321): return any(predicate(value) for predicate in convertable_to_html_predicates)
type_casting.py(73): elif is_bool(value):
--- modulename: __init__, funcname: is_bool
__init__.py(87): return isinstance(var, bool)
type_casting.py(75): elif is_int(value):
--- modulename: __init__, funcname: is_int
__init__.py(91): return isinstance(var, int)
type_casting.py(77): elif is_float(value):
--- modulename: __init__, funcname: is_float
__init__.py(95): return isinstance(var, (float, int))
type_casting.py(79): elif is_string(value):
--- modulename: __init__, funcname: is_string
__init__.py(99): return isinstance(var, str)
type_casting.py(80): return String(value)
--- modulename: string, funcname: __init__
string.py(45): verify_type("value", value, (str, type(None), StringifyValue))
--- modulename: __init__, funcname: verify_type
__init__.py(66): try:
__init__.py(67): if isinstance(expected_type, tuple):
__init__.py(68): type_name = " or ".join(get_type_name(t) for t in expected_type)
--- modulename: __init__, funcname: <genexpr>
__init__.py(68): type_name = " or ".join(get_type_name(t) for t in expected_type)
--- modulename: __init__, funcname: get_type_name
__init__.py(127): return _type.__name__ if hasattr(_type, "__name__") else str(_type)
--- modulename: __init__, funcname: <genexpr>
__init__.py(68): type_name = " or ".join(get_type_name(t) for t in expected_type)
--- modulename: __init__, funcname: get_type_name
__init__.py(127): return _type.__name__ if hasattr(_type, "__name__") else str(_type)
--- modulename: __init__, funcname: <genexpr>
__init__.py(68): type_name = " or ".join(get_type_name(t) for t in expected_type)
--- modulename: __init__, funcname: get_type_name
__init__.py(127): return _type.__name__ if hasattr(_type, "__name__") else str(_type)
--- modulename: __init__, funcname: <genexpr>
__init__.py(68): type_name = " or ".join(get_type_name(t) for t in expected_type)
__init__.py(75): if not isinstance(var, expected_type):
__init__.py(78): if isinstance(var, IOBase) and not hasattr(var, "read"):
--- modulename: abc, funcname: __instancecheck__
abc.py(119): return _abc_instancecheck(cls, instance)
string.py(47): self.value = str(value.value) if is_stringify_value(value) else value
--- modulename: __init__, funcname: is_stringify_value
__init__.py(123): return isinstance(var, StringifyValue)
metadata_container.py(364): if neptune_value is None:
metadata_container.py(368): attr = ValueToAttributeVisitor(self, parse_path(path)).visit(neptune_value)
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: value_to_attribute_visitor, funcname: __init__
value_to_attribute_visitor.py(63): self._container = container
value_to_attribute_visitor.py(64): self._path = path
--- modulename: value_visitor, funcname: visit
value_visitor.py(50): return value.accept(self)
--- modulename: string, funcname: accept
string.py(50): return visitor.visit_string(self)
--- modulename: value_to_attribute_visitor, funcname: visit_string
value_to_attribute_visitor.py(76): return StringAttr(self._container, self._path)
--- modulename: string, funcname: __init__
string.py(37): super().__init__(container, path)
--- modulename: attribute, funcname: __init__
attribute.py(37): super().__init__()
attribute.py(38): self._container = container
attribute.py(39): self._path = path
string.py(38): self._value_truncation_occurred = False
metadata_container.py(369): self.set_attribute(path, attr)
--- modulename: metadata_container, funcname: set_attribute
metadata_container.py(378): with self._lock:
metadata_container.py(379): return self._structure.set(parse_path(path), attribute)
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: set
container_structure.py(83): ref = self._structure
container_structure.py(84): location, attribute_name = path[:-1], path[-1]
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(87): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(88): ref[part] = self._node_factory(location[: idx + 1])
--- modulename: namespace, funcname: __call__
namespace.py(129): return Namespace(self._run, path)
--- modulename: namespace, funcname: __init__
namespace.py(53): Attribute.__init__(self, container, path)
--- modulename: attribute, funcname: __init__
attribute.py(37): super().__init__()
attribute.py(38): self._container = container
attribute.py(39): self._path = path
namespace.py(54): self._attributes = {}
namespace.py(55): self._str_path = path_to_str(path)
--- modulename: paths, funcname: path_to_str
paths.py(30): return "/".join(_remove_empty_paths(path))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: namespace, funcname: __setitem__
namespace.py(58): self._attributes[k] = v
container_structure.py(89): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(90): if not isinstance(ref, self._node_type):
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(96): if attribute_name in ref and isinstance(ref[attribute_name], self._node_type):
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(102): ref[attribute_name] = attr
--- modulename: namespace, funcname: __setitem__
namespace.py(58): self._attributes[k] = v
metadata_container.py(378): with self._lock:
metadata_container.py(370): attr.process_assignment(neptune_value, wait=wait)
--- modulename: attribute, funcname: process_assignment
attribute.py(63): if isinstance(value, ValueCopy):
attribute.py(66): return self.assign(value, wait=wait)
--- modulename: string, funcname: assign
string.py(55): if not isinstance(value, StringVal):
string.py(58): if len(value.value) > String.MAX_VALUE_LENGTH:
string.py(72): with self._container.lock():
--- modulename: metadata_container, funcname: lock
metadata_container.py(417): return self._lock
string.py(73): self._enqueue_operation(self.create_assignment_operation(self._path, value.value), wait=wait)
--- modulename: string, funcname: create_assignment_operation
string.py(42): return AssignString(path, value)
--- modulename: operation, funcname: __init__
<string>(3): <string>(4): --- modulename: attribute, funcname: _enqueue_operation
attribute.py(45): self._container._op_processor.enqueue_operation(operation, wait=wait)
--- modulename: async_operation_processor, funcname: enqueue_operation
async_operation_processor.py(103): if self._drop_operations:
async_operation_processor.py(105): self._last_version = self._queue.put(op)
--- modulename: disk_queue, funcname: put
disk_queue.py(92): version = self._last_put_file.read_local() + 1
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
disk_queue.py(93): _json = json.dumps(self._serialize(obj, version))
--- modulename: disk_queue, funcname: _serialize
disk_queue.py(246): return {"obj": self._to_dict(obj), "version": version}
--- modulename: async_operation_processor, funcname: <lambda>
async_operation_processor.py(68): to_dict=lambda x: x.to_dict(),
--- modulename: operation, funcname: to_dict
operation.py(142): ret = super().to_dict()
--- modulename: operation, funcname: to_dict
operation.py(67): return {"type": self.__class__.__name__, "path": self.path}
operation.py(143): ret["value"] = self.value
operation.py(144): return ret
--- modulename: __init__, funcname: dumps
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(228): check_circular and allow_nan and
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(228): check_circular and allow_nan and
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(229): cls is None and indent is None and separators is None and
__init__.py(230): default is None and not sort_keys and not kw):
__init__.py(231): return _default_encoder.encode(obj)
--- modulename: encoder, funcname: encode
encoder.py(191): if isinstance(o, str):
encoder.py(199): chunks = self.iterencode(o, _one_shot=True)
--- modulename: encoder, funcname: iterencode
encoder.py(214): if self.check_circular:
encoder.py(215): markers = {}
encoder.py(218): if self.ensure_ascii:
encoder.py(219): _encoder = encode_basestring_ascii
encoder.py(223): def floatstr(o, allow_nan=self.allow_nan,
encoder.py(224): _repr=float.__repr__, _inf=INFINITY, _neginf=-INFINITY):
encoder.py(223): def floatstr(o, allow_nan=self.allow_nan,
encoder.py(246): if (_one_shot and c_make_encoder is not None
encoder.py(247): and self.indent is None):
encoder.py(248): _iterencode = c_make_encoder(
encoder.py(249): markers, self.default, _encoder, self.indent,
encoder.py(250): self.key_separator, self.item_separator, self.sort_keys,
encoder.py(251): self.skipkeys, self.allow_nan)
encoder.py(248): _iterencode = c_make_encoder(
encoder.py(257): return _iterencode(o, 0)
encoder.py(200): if not isinstance(chunks, (list, tuple)):
encoder.py(202): return ''.join(chunks)
disk_queue.py(94): if self._file_size + len(_json) > self._max_file_size:
disk_queue.py(100): self._writer.write(_json + "\n")
disk_queue.py(101): self._last_put_file.write(version)
--- modulename: sync_offset_file, funcname: write
sync_offset_file.py(30): self._file.seek(0)
--- modulename: codecs, funcname: reset
codecs.py(328): IncrementalDecoder.reset(self)
--- modulename: codecs, funcname: reset
codecs.py(276): def reset(self):
codecs.py(329): self.buffer = b""
--- modulename: codecs, funcname: reset
codecs.py(203): def reset(self):
sync_offset_file.py(31): self._file.write(str(offset))
--- modulename: codecs, funcname: reset
codecs.py(328): IncrementalDecoder.reset(self)
--- modulename: codecs, funcname: reset
codecs.py(276): def reset(self):
codecs.py(329): self.buffer = b""
sync_offset_file.py(32): self._file.truncate()
sync_offset_file.py(33): self._file.flush()
sync_offset_file.py(34): self._last = offset
disk_queue.py(102): self._file_size += len(_json) + 1
disk_queue.py(103): return version
async_operation_processor.py(106): if self._queue.size() > self._batch_size / 2:
--- modulename: disk_queue, funcname: size
disk_queue.py(223): return self._last_put_file.read_local() - self._last_ack_file.read_local()
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
async_operation_processor.py(108): if wait:
string.py(72): with self._container.lock():
metadata_container.py(371): return attr
metadata_container.py(358): with self._lock:
handler.py(208): with self._container.lock():
source_code.py(62): if source_files is not None:
source_code.py(63): run[attr_consts.SOURCE_CODE_FILES_ATTRIBUTE_PATH].upload_files(source_files)
--- modulename: metadata_container, funcname: inner_fun
metadata_container.py(83): self._raise_if_stopped()
--- modulename: run, funcname: _raise_if_stopped
run.py(459): if self._state == ContainerState.STOPPED:
metadata_container.py(84): return fun(self, *args, **kwargs)
--- modulename: metadata_container, funcname: __getitem__
metadata_container.py(184): return Handler(self, path)
--- modulename: handler, funcname: __init__
handler.py(101): super().__init__()
handler.py(102): self._container = container
handler.py(103): self._path = path
--- modulename: handler, funcname: inner_fun
handler.py(85): validate_path_not_protected(self._path, self)
--- modulename: handler, funcname: validate_path_not_protected
handler.py(77): path_protection_exception = handler._PROTECTED_PATHS.get(target_path)
handler.py(78): if path_protection_exception:
handler.py(86): return fun(self, *args, **kwargs)
--- modulename: handler, funcname: upload_files
handler.py(260): if is_collection(value):
--- modulename: __init__, funcname: is_collection
__init__.py(137): return isinstance(var, (list, set, tuple))
handler.py(263): verify_type("value", value, str)
--- modulename: __init__, funcname: verify_type
__init__.py(66): try:
__init__.py(67): if isinstance(expected_type, tuple):
__init__.py(70): type_name = get_type_name(expected_type)
--- modulename: __init__, funcname: get_type_name
__init__.py(127): return _type.__name__ if hasattr(_type, "__name__") else str(_type)
__init__.py(75): if not isinstance(var, expected_type):
__init__.py(78): if isinstance(var, IOBase) and not hasattr(var, "read"):
--- modulename: abc, funcname: __instancecheck__
abc.py(119): return _abc_instancecheck(cls, instance)
handler.py(265): with self._container.lock():
--- modulename: metadata_container, funcname: lock
metadata_container.py(417): return self._lock
handler.py(266): attr = self._container.get_attribute(self._path)
--- modulename: metadata_container, funcname: get_attribute
metadata_container.py(374): with self._lock:
metadata_container.py(375): return self._structure.get(parse_path(path))
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: get
container_structure.py(68): ref = self._structure
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(834): return True
container_structure.py(78): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(70): for index, part in enumerate(path):
container_structure.py(71): if not isinstance(ref, self._node_type):
container_structure.py(76): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(77): return None
metadata_container.py(374): with self._lock:
handler.py(267): if attr is None:
handler.py(268): attr = FileSet(self._container, parse_path(self._path))
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: attribute, funcname: __init__
attribute.py(37): super().__init__()
attribute.py(38): self._container = container
attribute.py(39): self._path = path
handler.py(269): self._container.set_attribute(self._path, attr)
--- modulename: metadata_container, funcname: set_attribute
metadata_container.py(378): with self._lock:
metadata_container.py(379): return self._structure.set(parse_path(path), attribute)
--- modulename: paths, funcname: parse_path
paths.py(26): return _remove_empty_paths(path.split("/"))
--- modulename: paths, funcname: _remove_empty_paths
paths.py(22): return list(filter(bool, paths))
--- modulename: container_structure, funcname: set
container_structure.py(83): ref = self._structure
container_structure.py(84): location, attribute_name = path[:-1], path[-1]
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(87): if part not in ref:
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(834): return True
container_structure.py(89): ref = ref[part]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
container_structure.py(90): if not isinstance(ref, self._node_type):
container_structure.py(86): for idx, part in enumerate(location):
container_structure.py(96): if attribute_name in ref and isinstance(ref[attribute_name], self._node_type):
--- modulename: _collections_abc, funcname: __contains__
_collections_abc.py(829): try:
_collections_abc.py(830): self[key]
--- modulename: namespace, funcname: __getitem__
namespace.py(64): return self._attributes[k]
_collections_abc.py(831): except KeyError:
_collections_abc.py(832): return False
container_structure.py(102): ref[attribute_name] = attr
--- modulename: namespace, funcname: __setitem__
namespace.py(58): self._attributes[k] = v
metadata_container.py(378): with self._lock:
handler.py(270): attr.upload_files(value, wait=wait)
--- modulename: file_set, funcname: upload_files
file_set.py(49): if isinstance(globs, str):
file_set.py(50): globs = [globs]
file_set.py(53): self._enqueue_upload_operation(globs, reset=False, wait=wait)
--- modulename: file_set, funcname: _enqueue_upload_operation
file_set.py(64): with self._container.lock():
--- modulename: metadata_container, funcname: lock
metadata_container.py(417): return self._lock
file_set.py(65): abs_file_globs = list(os.path.abspath(file_glob) for file_glob in globs)
--- modulename: file_set, funcname: <genexpr>
file_set.py(65): abs_file_globs = list(os.path.abspath(file_glob) for file_glob in globs)
--- modulename: posixpath, funcname: abspath
posixpath.py(379): path = os.fspath(path)
posixpath.py(380): if not isabs(path):
--- modulename: posixpath, funcname: isabs
posixpath.py(62): s = os.fspath(s)
posixpath.py(63): sep = _get_sep(s)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(64): return s.startswith(sep)
posixpath.py(386): return normpath(path)
--- modulename: posixpath, funcname: normpath
posixpath.py(340): path = os.fspath(path)
posixpath.py(341): if isinstance(path, bytes):
posixpath.py(347): sep = '/'
posixpath.py(348): empty = ''
posixpath.py(349): dot = '.'
posixpath.py(350): dotdot = '..'
posixpath.py(351): if path == empty:
posixpath.py(353): initial_slashes = path.startswith(sep)
posixpath.py(357): if (initial_slashes and
posixpath.py(358): path.startswith(sep*2) and not path.startswith(sep*3)):
posixpath.py(357): if (initial_slashes and
posixpath.py(360): comps = path.split(sep)
posixpath.py(361): new_comps = []
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(364): continue
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(363): if comp in (empty, dot):
posixpath.py(365): if (comp != dotdot or (not initial_slashes and not new_comps) or
posixpath.py(367): new_comps.append(comp)
posixpath.py(362): for comp in comps:
posixpath.py(370): comps = new_comps
posixpath.py(371): path = sep.join(comps)
posixpath.py(372): if initial_slashes:
posixpath.py(373): path = sep*initial_slashes + path
posixpath.py(374): return path or dot
--- modulename: file_set, funcname: <genexpr>
file_set.py(65): abs_file_globs = list(os.path.abspath(file_glob) for file_glob in globs)
file_set.py(66): self._enqueue_operation(UploadFileSet(self._path, abs_file_globs, reset=reset), wait=wait)
--- modulename: operation, funcname: __init__
<string>(3): <string>(4): <string>(5): --- modulename: attribute, funcname: _enqueue_operation
attribute.py(45): self._container._op_processor.enqueue_operation(operation, wait=wait)
--- modulename: async_operation_processor, funcname: enqueue_operation
async_operation_processor.py(103): if self._drop_operations:
async_operation_processor.py(105): self._last_version = self._queue.put(op)
--- modulename: disk_queue, funcname: put
disk_queue.py(92): version = self._last_put_file.read_local() + 1
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
disk_queue.py(93): _json = json.dumps(self._serialize(obj, version))
--- modulename: disk_queue, funcname: _serialize
disk_queue.py(246): return {"obj": self._to_dict(obj), "version": version}
--- modulename: async_operation_processor, funcname: <lambda>
async_operation_processor.py(68): to_dict=lambda x: x.to_dict(),
--- modulename: operation, funcname: to_dict
operation.py(284): ret = super().to_dict()
--- modulename: operation, funcname: to_dict
operation.py(67): return {"type": self.__class__.__name__, "path": self.path}
operation.py(285): ret["file_globs"] = self.file_globs
operation.py(286): ret["reset"] = str(self.reset)
operation.py(287): return ret
--- modulename: __init__, funcname: dumps
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(228): check_circular and allow_nan and
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(228): check_circular and allow_nan and
__init__.py(227): if (not skipkeys and ensure_ascii and
__init__.py(229): cls is None and indent is None and separators is None and
__init__.py(230): default is None and not sort_keys and not kw):
__init__.py(231): return _default_encoder.encode(obj)
--- modulename: encoder, funcname: encode
encoder.py(191): if isinstance(o, str):
encoder.py(199): chunks = self.iterencode(o, _one_shot=True)
--- modulename: encoder, funcname: iterencode
encoder.py(214): if self.check_circular:
encoder.py(215): markers = {}
encoder.py(218): if self.ensure_ascii:
encoder.py(219): _encoder = encode_basestring_ascii
encoder.py(223): def floatstr(o, allow_nan=self.allow_nan,
encoder.py(224): _repr=float.__repr__, _inf=INFINITY, _neginf=-INFINITY):
encoder.py(223): def floatstr(o, allow_nan=self.allow_nan,
encoder.py(246): if (_one_shot and c_make_encoder is not None
encoder.py(247): and self.indent is None):
encoder.py(248): _iterencode = c_make_encoder(
encoder.py(249): markers, self.default, _encoder, self.indent,
encoder.py(250): self.key_separator, self.item_separator, self.sort_keys,
encoder.py(251): self.skipkeys, self.allow_nan)
encoder.py(248): _iterencode = c_make_encoder(
encoder.py(257): return _iterencode(o, 0)
encoder.py(200): if not isinstance(chunks, (list, tuple)):
encoder.py(202): return ''.join(chunks)
disk_queue.py(94): if self._file_size + len(_json) > self._max_file_size:
disk_queue.py(100): self._writer.write(_json + "\n")
disk_queue.py(101): self._last_put_file.write(version)
--- modulename: sync_offset_file, funcname: write
sync_offset_file.py(30): self._file.seek(0)
--- modulename: codecs, funcname: reset
codecs.py(328): IncrementalDecoder.reset(self)
--- modulename: codecs, funcname: reset
codecs.py(276): def reset(self):
codecs.py(329): self.buffer = b""
--- modulename: codecs, funcname: reset
codecs.py(203): def reset(self):
sync_offset_file.py(31): self._file.write(str(offset))
--- modulename: codecs, funcname: reset
codecs.py(328): IncrementalDecoder.reset(self)
--- modulename: codecs, funcname: reset
codecs.py(276): def reset(self):
codecs.py(329): self.buffer = b""
sync_offset_file.py(32): self._file.truncate()
sync_offset_file.py(33): self._file.flush()
sync_offset_file.py(34): self._last = offset
disk_queue.py(102): self._file_size += len(_json) + 1
disk_queue.py(103): return version
async_operation_processor.py(106): if self._queue.size() > self._batch_size / 2:
--- modulename: disk_queue, funcname: size
disk_queue.py(223): return self._last_put_file.read_local() - self._last_ack_file.read_local()
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
--- modulename: sync_offset_file, funcname: read_local
sync_offset_file.py(44): return self._last
async_operation_processor.py(108): if wait:
file_set.py(64): with self._container.lock():
handler.py(265): with self._container.lock():
metadata_container.py(144): self._startup(debug_mode=mode == Mode.DEBUG)
--- modulename: metadata_container, funcname: _startup
metadata_container.py(483): if not debug_mode:
metadata_container.py(484): logger.info(self.get_url())
--- modulename: run, funcname: get_url
run.py(464): return self._backend.get_run_url(
run.py(465): run_id=self._id,
run.py(466): workspace=self._workspace,
run.py(467): project_name=self._project_name,
run.py(468): sys_id=self._sys_id,
run.py(464): return self._backend.get_run_url(
--- modulename: hosted_neptune_backend, funcname: get_run_url
hosted_neptune_backend.py(1052): base_url = self.get_display_address()
--- modulename: hosted_neptune_backend, funcname: get_display_address
hosted_neptune_backend.py(174): return self._client_config.display_url
hosted_neptune_backend.py(1053): return f"{base_url}/{workspace}/{project_name}/e/{sys_id}"
--- modulename: __init__, funcname: info
__init__.py(1476): if self.isEnabledFor(INFO):
--- modulename: __init__, funcname: isEnabledFor
__init__.py(1728): if self.disabled:
__init__.py(1731): try:
__init__.py(1732): return self._cache[level]
__init__.py(1733): except KeyError:
__init__.py(1734): _acquireLock()
--- modulename: __init__, funcname: _acquireLock
__init__.py(225): if _lock:
__init__.py(226): _lock.acquire()
__init__.py(1735): try:
__init__.py(1736): if self.manager.disable >= level:
--- modulename: __init__, funcname: disable
__init__.py(1309): return self._disable
__init__.py(1740): level >= self.getEffectiveLevel()
--- modulename: __init__, funcname: getEffectiveLevel
__init__.py(1717): logger = self
__init__.py(1718): while logger:
__init__.py(1719): if logger.level:
__init__.py(1720): return logger.level
__init__.py(1739): is_enabled = self._cache[level] = (
__init__.py(1743): _releaseLock()
--- modulename: __init__, funcname: _releaseLock
__init__.py(232): if _lock:
__init__.py(233): _lock.release()
__init__.py(1744): return is_enabled
__init__.py(1477): self._log(INFO, msg, args, **kwargs)
--- modulename: __init__, funcname: _log
__init__.py(1606): sinfo = None
__init__.py(1607): if _srcfile:
__init__.py(1611): try:
__init__.py(1612): fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
--- modulename: __init__, funcname: findCaller
__init__.py(1554): f = currentframe()
--- modulename: __init__, funcname: <lambda>
__init__.py(160): currentframe = lambda: sys._getframe(3)
__init__.py(1557): if f is not None:
__init__.py(1558): f = f.f_back
__init__.py(1559): orig_f = f
__init__.py(1560): while f and stacklevel > 1:
__init__.py(1563): if not f:
__init__.py(1565): rv = "(unknown file)", 0, "(unknown function)", None
__init__.py(1566): while hasattr(f, "f_code"):
__init__.py(1567): co = f.f_code
__init__.py(1568): filename = os.path.normcase(co.co_filename)
--- modulename: posixpath, funcname: normcase
posixpath.py(54): return os.fspath(s)
__init__.py(1569): if filename == _srcfile:
__init__.py(1572): sinfo = None
__init__.py(1573): if stack_info:
__init__.py(1581): rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
__init__.py(1582): break
__init__.py(1583): return rv
__init__.py(1617): if exc_info:
__init__.py(1622): record = self.makeRecord(self.name, level, fn, lno, msg, args,
__init__.py(1623): exc_info, func, extra, sinfo)
__init__.py(1622): record = self.makeRecord(self.name, level, fn, lno, msg, args,
--- modulename: __init__, funcname: makeRecord
__init__.py(1591): rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
__init__.py(1592): sinfo)
__init__.py(1591): rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
--- modulename: __init__, funcname: __init__
__init__.py(288): ct = time.time()
__init__.py(289): self.name = name
__init__.py(290): self.msg = msg
__init__.py(309): if (args and len(args) == 1 and isinstance(args[0], collections.abc.Mapping)
__init__.py(312): self.args = args
__init__.py(313): self.levelname = getLevelName(level)
--- modulename: __init__, funcname: getLevelName
__init__.py(138): result = _levelToName.get(level)
__init__.py(139): if result is not None:
__init__.py(140): return result
__init__.py(314): self.levelno = level
__init__.py(315): self.pathname = pathname
__init__.py(316): try:
__init__.py(317): self.filename = os.path.basename(pathname)
--- modulename: posixpath, funcname: basename
posixpath.py(142): p = os.fspath(p)
posixpath.py(143): sep = _get_sep(p)
--- modulename: posixpath, funcname: _get_sep
posixpath.py(42): if isinstance(path, bytes):
posixpath.py(45): return '/'
posixpath.py(144): i = p.rfind(sep) + 1
posixpath.py(145): return p[i:]
__init__.py(318): self.module = os.path.splitext(self.filename)[0]
--- modulename: posixpath, funcname: splitext
posixpath.py(118): p = os.fspath(p)
posixpath.py(119): if isinstance(p, bytes):
posixpath.py(123): sep = '/'
posixpath.py(124): extsep = '.'
posixpath.py(125): return genericpath._splitext(p, sep, None, extsep)
--- modulename: genericpath, funcname: _splitext
genericpath.py(128): sepIndex = p.rfind(sep)
genericpath.py(129): if altsep:
genericpath.py(133): dotIndex = p.rfind(extsep)
genericpath.py(134): if dotIndex > sepIndex:
genericpath.py(136): filenameIndex = sepIndex + 1
genericpath.py(137): while filenameIndex < dotIndex:
genericpath.py(138): if p[filenameIndex:filenameIndex+1] != extsep:
genericpath.py(139): return p[:dotIndex], p[dotIndex:]
__init__.py(322): self.exc_info = exc_info
__init__.py(323): self.exc_text = None # used to cache the traceback text
__init__.py(324): self.stack_info = sinfo
__init__.py(325): self.lineno = lineno
__init__.py(326): self.funcName = func
__init__.py(327): self.created = ct
__init__.py(328): self.msecs = int((ct - int(ct)) * 1000) + 0.0 # see gh-89047
__init__.py(329): self.relativeCreated = (self.created - _startTime) * 1000
__init__.py(330): if logThreads:
__init__.py(331): self.thread = threading.get_ident()
__init__.py(332): self.threadName = threading.current_thread().name
--- modulename: threading, funcname: current_thread
threading.py(1437): try:
threading.py(1438): return _active[get_ident()]
--- modulename: threading, funcname: name
threading.py(1137): assert self._initialized, "Thread.__init__() not called"
threading.py(1138): return self._name
__init__.py(336): if not logMultiprocessing: # pragma: no cover
__init__.py(339): self.processName = 'MainProcess'
__init__.py(340): mp = sys.modules.get('multiprocessing')
__init__.py(341): if mp is not None:
__init__.py(346): try:
__init__.py(347): self.processName = mp.current_process().name
--- modulename: process, funcname: current_process
process.py(41): return _current_process
--- modulename: process, funcname: name
process.py(191): return self._name
__init__.py(350): if logProcesses and hasattr(os, 'getpid'):
__init__.py(351): self.process = os.getpid()
__init__.py(1593): if extra is not None:
__init__.py(1598): return rv
__init__.py(1624): self.handle(record)
--- modulename: __init__, funcname: handle
__init__.py(1633): if (not self.disabled) and self.filter(record):
--- modulename: __init__, funcname: filter
__init__.py(818): rv = True
__init__.py(819): for f in self.filters:
__init__.py(827): return rv
__init__.py(1634): self.callHandlers(record)
--- modulename: __init__, funcname: callHandlers
__init__.py(1690): c = self
__init__.py(1691): found = 0
__init__.py(1692): while c:
__init__.py(1693): for hdlr in c.handlers:
__init__.py(1694): found = found + 1
__init__.py(1695): if record.levelno >= hdlr.level:
__init__.py(1696): hdlr.handle(record)
--- modulename: __init__, funcname: handle
__init__.py(964): rv = self.filter(record)
--- modulename: __init__, funcname: filter
__init__.py(818): rv = True
__init__.py(819): for f in self.filters:
__init__.py(827): return rv
__init__.py(965): if rv:
__init__.py(966): self.acquire()
--- modulename: __init__, funcname: acquire
__init__.py(916): if self.lock:
__init__.py(917): self.lock.acquire()
__init__.py(967): try:
__init__.py(968): self.emit(record)
--- modulename: __init__, funcname: emit
__init__.py(1099): try:
__init__.py(1100): msg = self.format(record)
--- modulename: __init__, funcname: format
__init__.py(939): if self.formatter:
__init__.py(940): fmt = self.formatter
__init__.py(943): return fmt.format(record)
--- modulename: __init__, funcname: format
__init__.py(678): record.message = record.getMessage()
--- modulename: __init__, funcname: getMessage
__init__.py(366): msg = str(self.msg)
__init__.py(367): if self.args:
__init__.py(369): return msg
__init__.py(679): if self.usesTime():
--- modulename: __init__, funcname: usesTime
__init__.py(647): return self._style.usesTime()
--- modulename: __init__, funcname: usesTime
__init__.py(424): return self._fmt.find(self.asctime_search) >= 0
__init__.py(681): s = self.formatMessage(record)
--- modulename: __init__, funcname: formatMessage
__init__.py(650): return self._style.format(record)
--- modulename: __init__, funcname: format
__init__.py(439): try:
__init__.py(440): return self._format(record)
--- modulename: __init__, funcname: _format
__init__.py(432): if defaults := self._defaults:
__init__.py(435): values = record.__dict__
__init__.py(436): return self._fmt % values
__init__.py(682): if record.exc_info:
__init__.py(687): if record.exc_text:
__init__.py(691): if record.stack_info:
__init__.py(695): return s
__init__.py(1101): stream = self.stream
--- modulename: logger, funcname: stream
logger.py(39): return sys.stdout
__init__.py(1103): stream.write(msg + self.terminator)
https://ui.neptune.ai/neuro/Gaddy/e/GAD-333
__init__.py(1104): self.flush()
--- modulename: __init__, funcname: flush
__init__.py(1081): self.acquire()
--- modulename: __init__, funcname: acquire
__init__.py(916): if self.lock:
__init__.py(917): self.lock.acquire()
__init__.py(1082): try:
__init__.py(1083): if self.stream and hasattr(self.stream, "flush"):
--- modulename: logger, funcname: stream
logger.py(39): return sys.stdout
--- modulename: logger, funcname: stream
logger.py(39): return sys.stdout
__init__.py(1084): self.stream.flush()
--- modulename: logger, funcname: stream
logger.py(39): return sys.stdout
__init__.py(1086): self.release()
--- modulename: __init__, funcname: release
__init__.py(923): if self.lock:
__init__.py(924): self.lock.release()
__init__.py(970): self.release()
--- modulename: __init__, funcname: release
__init__.py(923): if self.lock:
__init__.py(924): self.lock.release()
__init__.py(971): return rv
__init__.py(1693): for hdlr in c.handlers:
__init__.py(1697): if not c.propagate:
__init__.py(1698): c = None #break out
__init__.py(1692): while c:
__init__.py(1701): if (found == 0):
metadata_container.py(486): self.start()
--- modulename: metadata_container, funcname: start
metadata_container.py(259): atexit.register(self._shutdown_hook)
metadata_container.py(260): self._op_processor.start()
--- modulename: async_operation_processor, funcname: start
async_operation_processor.py(128): self._consumer.start()
--- modulename: threading, funcname: start
threading.py(926): if not self._initialized:
threading.py(929): if self._started.is_set():
--- modulename: threading, funcname: is_set
threading.py(555): return self._flag
threading.py(932): with _active_limbo_lock:
threading.py(933): _limbo[self] = self
threading.py(932): with _active_limbo_lock:
threading.py(934): try:
threading.py(935): _start_new_thread(self._bootstrap, ())
threading.py(940): self._started.wait()
--- modulename: threading, funcname: wait
threading.py(604): with self._cond:
--- modulename: threading, funcname: __enter__
threading.py(265): return self._lock.__enter__()
threading.py(605): signaled = self._flag
threading.py(606): if not signaled:
threading.py(607): signaled = self._cond.wait(timeout)
--- modulename: threading, funcname: wait
threading.py(311): if not self._is_owned():
--- modulename: threading, funcname: _is_owned
threading.py(282): if self._lock.acquire(False):
threading.py(286): return True
threading.py(313): waiter = _allocate_lock()
threading.py(314): waiter.acquire()
threading.py(315): self._waiters.append(waiter)
threading.py(316): saved_state = self._release_save()
--- modulename: threading, funcname: _release_save
threading.py(274): self._lock.release() # No state to save
threading.py(317): gotit = False
threading.py(318): try: # restore state no matter what (e.g., KeyboardInterrupt)
threading.py(319): if timeout is None:
threading.py(320): waiter.acquire()
--- modulename: async_operation_processor, funcname: run
threading.py(321): gotit = True
async_operation_processor.py(229): try:
threading.py(327): return gotit
async_operation_processor.py(230): super().run()
threading.py(329): self._acquire_restore(saved_state)
--- modulename: daemon, funcname: run
--- modulename: threading, funcname: _acquire_restore
daemon.py(50): self._is_running = True
threading.py(277): self._lock.acquire() # Ignore saved state
daemon.py(51): try:
threading.py(330): if not gotit:
daemon.py(52): while not self._interrupted:
threading.py(608): return signaled
daemon.py(53): self.work()
threading.py(604): with self._cond:
--- modulename: async_operation_processor, funcname: work
--- modulename: threading, funcname: __exit__
async_operation_processor.py(237): ts = time()
threading.py(268): return self._lock.__exit__(*args)
async_operation_processor.py(238): if ts - self._last_flush >= self._sleep_time:
metadata_container.py(261): self._bg_job.start(self)
async_operation_processor.py(239): self._last_flush = ts
--- modulename: backgroud_job_list, funcname: start
async_operation_processor.py(240): self._processor._queue.flush()
backgroud_job_list.py(36): for job in self._jobs:
--- modulename: disk_queue, funcname: flush
backgroud_job_list.py(37): job.start(container)
disk_queue.py(165): self._writer.flush()
--- modulename: ping_background_job, funcname: start
ping_background_job.py(40): self._thread = self.ReportingThread(self._period, container)
--- modulename: ping_background_job, funcname: __init__
ping_background_job.py(56): super().__init__(sleep_time=period, name="NeptunePing")
--- modulename: daemon, funcname: __init__
daemon.py(29): super().__init__(daemon=True, name=name)
--- modulename: threading, funcname: __init__
threading.py(850): assert group is None, "group argument must be None for now"
threading.py(851): if kwargs is None:
threading.py(852): kwargs = {}
threading.py(853): if name:
threading.py(854): name = str(name)
threading.py(864): self._target = target
threading.py(865): self._name = name
threading.py(866): self._args = args
threading.py(867): self._kwargs = kwargs
threading.py(868): if daemon is not None:
threading.py(869): self._daemonic = daemon
threading.py(872): self._ident = None
threading.py(873): if _HAVE_THREAD_NATIVE_ID:
threading.py(874): self._native_id = None
threading.py(875): self._tstate_lock = None
threading.py(876): self._started = Event()
--- modulename: threading, funcname: __init__
threading.py(546): self._cond = Condition(Lock())
disk_queue.py(166): self._last_ack_file.flush()
--- modulename: threading, funcname: __init__
--- modulename: sync_offset_file, funcname: flush
threading.py(237): if lock is None:
sync_offset_file.py(47): self._file.flush()
threading.py(239): self._lock = lock
disk_queue.py(167): self._last_put_file.flush()
threading.py(241): self.acquire = lock.acquire
--- modulename: sync_offset_file, funcname: flush
threading.py(242): self.release = lock.release
sync_offset_file.py(47): self._file.flush()
threading.py(246): try:
threading.py(247): self._release_save = lock._release_save
threading.py(248): except AttributeError:
threading.py(249): pass
threading.py(250): try:
threading.py(251): self._acquire_restore = lock._acquire_restore
async_operation_processor.py(242): while True:
threading.py(252): except AttributeError:
async_operation_processor.py(243): batch = self._processor._queue.get_batch(self._batch_size)
threading.py(253): pass
--- modulename: disk_queue, funcname: get_batch
threading.py(254): try:
disk_queue.py(144): if self._should_skip_to_ack:
threading.py(255): self._is_owned = lock._is_owned
disk_queue.py(145): first = self._skip_and_get()
threading.py(256): except AttributeError:
--- modulename: disk_queue, funcname: _skip_and_get
threading.py(257): pass
disk_queue.py(112): ack_version = self._last_ack_file.read_local()
threading.py(258): self._waiters = _deque()
--- modulename: sync_offset_file, funcname: read_local
threading.py(547): self._flag = False
sync_offset_file.py(44): return self._last
threading.py(877): self._is_stopped = False
disk_queue.py(113): while True:
threading.py(878): self._initialized = True
disk_queue.py(114): top_element = self._get()
threading.py(880): self._stderr = _sys.stderr
--- modulename: disk_queue, funcname: _get
threading.py(881): self._invoke_excepthook = _make_invoke_excepthook()
disk_queue.py(128): _json, size = self._reader.get_with_size()
--- modulename: threading, funcname: _make_invoke_excepthook
--- modulename: json_file_splitter, funcname: get_with_size
threading.py(1306): old_excepthook = excepthook
json_file_splitter.py(47): if self._parsed_queue:
threading.py(1307): old_sys_excepthook = _sys.excepthook
json_file_splitter.py(49): self._read_data()
threading.py(1308): if old_excepthook is None:
--- modulename: json_file_splitter, funcname: _read_data
threading.py(1310): if old_sys_excepthook is None:
json_file_splitter.py(55): if self._part_buffer.tell() < self.MAX_PART_READ:
threading.py(1313): sys_exc_info = _sys.exc_info
json_file_splitter.py(56): data = self._file.read(self.BUFFER_SIZE)
threading.py(1314): local_print = print
--- modulename: codecs, funcname: getstate
threading.py(1315): local_sys = _sys
codecs.py(333): return (self.buffer, 0)
threading.py(1317): def invoke_excepthook(thread):
threading.py(1349): return invoke_excepthook
threading.py(883): _dangling.add(self)
--- modulename: _weakrefset, funcname: add
_weakrefset.py(87): if self._pending_removals:
--- modulename: codecs, funcname: decode
_weakrefset.py(89): self.data.add(ref(item, self._remove))
codecs.py(321): data = self.buffer + input
daemon.py(30): self._sleep_time = sleep_time
codecs.py(322): (result, consumed) = self._buffer_decode(data, self.errors, final)
daemon.py(31): self._interrupted = False
codecs.py(324): self.buffer = data[consumed:]
daemon.py(32): self._event = threading.Event()
codecs.py(325): return result
--- modulename: threading, funcname: __init__
--- modulename: codecs, funcname: getstate
threading.py(546): self._cond = Condition(Lock())
codecs.py(333): return (self.buffer, 0)
--- modulename: threading, funcname: __init__
--- modulename: codecs, funcname: decode
threading.py(237): if lock is None:
codecs.py(321): data = self.buffer + input
threading.py(239): self._lock = lock
codecs.py(322): (result, consumed) = self._buffer_decode(data, self.errors, final)
threading.py(241): self.acquire = lock.acquire
codecs.py(324): self.buffer = data[consumed:]
threading.py(242): self.release = lock.release
codecs.py(325): return result
threading.py(246): try:
json_file_splitter.py(57): if not data:
threading.py(247): self._release_save = lock._release_save
json_file_splitter.py(59): if self._part_buffer.tell() > 0:
threading.py(248): except AttributeError:
json_file_splitter.py(61): self._decode(data)
threading.py(249): pass
--- modulename: json_file_splitter, funcname: _decode
threading.py(250): try:
json_file_splitter.py(72): start = self._json_start(data)
threading.py(251): self._acquire_restore = lock._acquire_restore
--- modulename: json_file_splitter, funcname: _json_start
threading.py(252): except AttributeError:
json_file_splitter.py(87): try:
threading.py(253): pass
json_file_splitter.py(88): return data.index("{", start)
threading.py(254): try:
json_file_splitter.py(73): while start is not None:
threading.py(255): self._is_owned = lock._is_owned
json_file_splitter.py(74): try:
threading.py(256): except AttributeError:
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(257): pass
--- modulename: decoder, funcname: raw_decode
threading.py(258): self._waiters = _deque()
decoder.py(352): try:
threading.py(547): self._flag = False
decoder.py(353): obj, end = self.scan_once(s, idx)
daemon.py(33): self._is_running = False
decoder.py(356): return obj, end
daemon.py(34): self.last_backoff_time = 0 # used only with ConnectionRetryWrapper decorator
json_file_splitter.py(76): size = new_start - start
ping_background_job.py(57): self._container = container
json_file_splitter.py(77): start = new_start
ping_background_job.py(41): self._thread.start()
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
--- modulename: threading, funcname: start
json_file_splitter.py(83): start = self._json_start(data, start)
threading.py(926): if not self._initialized:
--- modulename: json_file_splitter, funcname: _json_start
threading.py(929): if self._started.is_set():
json_file_splitter.py(87): try:
--- modulename: threading, funcname: is_set
json_file_splitter.py(88): return data.index("{", start)
threading.py(555): return self._flag
json_file_splitter.py(73): while start is not None:
threading.py(932): with _active_limbo_lock:
json_file_splitter.py(74): try:
threading.py(933): _limbo[self] = self
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(932): with _active_limbo_lock:
--- modulename: decoder, funcname: raw_decode
threading.py(934): try:
decoder.py(352): try:
threading.py(935): _start_new_thread(self._bootstrap, ())
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(940): self._started.wait()
decoder.py(356): return obj, end
--- modulename: daemon, funcname: run
json_file_splitter.py(76): size = new_start - start
--- modulename: threading, funcname: wait
json_file_splitter.py(77): start = new_start
threading.py(604): with self._cond:
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
--- modulename: threading, funcname: __enter__
json_file_splitter.py(83): start = self._json_start(data, start)
threading.py(265): return self._lock.__enter__()
--- modulename: json_file_splitter, funcname: _json_start
threading.py(605): signaled = self._flag
json_file_splitter.py(87): try:
threading.py(606): if not signaled:
json_file_splitter.py(88): return data.index("{", start)
threading.py(608): return signaled
json_file_splitter.py(73): while start is not None:
threading.py(604): with self._cond:
json_file_splitter.py(74): try:
--- modulename: threading, funcname: __exit__
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(268): return self._lock.__exit__(*args)
--- modulename: decoder, funcname: raw_decode
ping_background_job.py(42): self._started = True
decoder.py(352): try:
backgroud_job_list.py(36): for job in self._jobs:
decoder.py(353): obj, end = self.scan_once(s, idx)
backgroud_job_list.py(37): job.start(container)
decoder.py(356): return obj, end
--- modulename: websocket_signals_background_job, funcname: start
json_file_splitter.py(76): size = new_start - start
websocket_signals_background_job.py(54): self._thread = self._ListenerThread(container, self._ws_factory.create())
json_file_splitter.py(77): start = new_start
--- modulename: websockets_factory, funcname: create
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
websockets_factory.py(33): return ReconnectingWebsocket(
json_file_splitter.py(83): start = self._json_start(data, start)
websockets_factory.py(34): url=self._url,
--- modulename: json_file_splitter, funcname: _json_start
websockets_factory.py(35): oauth2_session=self._session,
json_file_splitter.py(87): try:
websockets_factory.py(36): shutdown_event=threading.Event(),
json_file_splitter.py(88): return data.index("{", start)
--- modulename: threading, funcname: __init__
json_file_splitter.py(73): while start is not None:
threading.py(546): self._cond = Condition(Lock())
json_file_splitter.py(74): try:
--- modulename: threading, funcname: __init__
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(237): if lock is None:
--- modulename: decoder, funcname: raw_decode
threading.py(239): self._lock = lock
decoder.py(352): try:
threading.py(241): self.acquire = lock.acquire
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(242): self.release = lock.release
decoder.py(356): return obj, end
threading.py(246): try:
json_file_splitter.py(76): size = new_start - start
threading.py(247): self._release_save = lock._release_save
json_file_splitter.py(77): start = new_start
threading.py(248): except AttributeError:
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
threading.py(249): pass
json_file_splitter.py(83): start = self._json_start(data, start)
threading.py(250): try:
--- modulename: json_file_splitter, funcname: _json_start
threading.py(251): self._acquire_restore = lock._acquire_restore
json_file_splitter.py(87): try:
threading.py(252): except AttributeError:
json_file_splitter.py(88): return data.index("{", start)
threading.py(253): pass
json_file_splitter.py(73): while start is not None:
threading.py(254): try:
json_file_splitter.py(74): try:
threading.py(255): self._is_owned = lock._is_owned
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(256): except AttributeError:
--- modulename: decoder, funcname: raw_decode
threading.py(257): pass
decoder.py(352): try:
threading.py(258): self._waiters = _deque()
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(547): self._flag = False
decoder.py(356): return obj, end
websockets_factory.py(37): proxies=self._proxies,
json_file_splitter.py(76): size = new_start - start
websockets_factory.py(33): return ReconnectingWebsocket(
json_file_splitter.py(77): start = new_start
--- modulename: reconnecting_websocket, funcname: __init__
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
reconnecting_websocket.py(32): self.url = url
json_file_splitter.py(83): start = self._json_start(data, start)
reconnecting_websocket.py(33): self.client = WebsocketClientAdapter()
--- modulename: json_file_splitter, funcname: _json_start
--- modulename: websocket_client_adapter, funcname: __init__
json_file_splitter.py(87): try:
websocket_client_adapter.py(30): self._ws_client = None
json_file_splitter.py(88): return data.index("{", start)
reconnecting_websocket.py(34): self._shutdown_event = shutdown_event
json_file_splitter.py(73): while start is not None:
reconnecting_websocket.py(35): self._oauth2_session = oauth2_session
json_file_splitter.py(74): try:
reconnecting_websocket.py(36): self._reconnect_counter = ReconnectCounter()
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
--- modulename: reconnecting_websocket, funcname: __init__
--- modulename: decoder, funcname: raw_decode
reconnecting_websocket.py(95): self.retries = 0
decoder.py(352): try:
reconnecting_websocket.py(37): self._token = oauth2_session.token
decoder.py(353): obj, end = self.scan_once(s, idx)
--- modulename: oauth2_session, funcname: token
decoder.py(356): return obj, end
oauth2_session.py(124): return getattr(self._client, "token", None)
json_file_splitter.py(76): size = new_start - start
reconnecting_websocket.py(38): self._proxies = proxies
json_file_splitter.py(77): start = new_start
--- modulename: websocket_signals_background_job, funcname: __init__
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
websocket_signals_background_job.py(73): super().__init__(sleep_time=0, name="NeptuneWebhooks")
json_file_splitter.py(83): start = self._json_start(data, start)
--- modulename: daemon, funcname: __init__
--- modulename: json_file_splitter, funcname: _json_start
daemon.py(29): super().__init__(daemon=True, name=name)
json_file_splitter.py(87): try:
--- modulename: threading, funcname: __init__
json_file_splitter.py(88): return data.index("{", start)
threading.py(850): assert group is None, "group argument must be None for now"
json_file_splitter.py(73): while start is not None:
threading.py(851): if kwargs is None:
json_file_splitter.py(74): try:
threading.py(852): kwargs = {}
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(853): if name:
--- modulename: decoder, funcname: raw_decode
threading.py(854): name = str(name)
decoder.py(352): try:
threading.py(864): self._target = target
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(865): self._name = name
decoder.py(356): return obj, end
threading.py(866): self._args = args
json_file_splitter.py(76): size = new_start - start
threading.py(867): self._kwargs = kwargs
json_file_splitter.py(77): start = new_start
threading.py(868): if daemon is not None:
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
threading.py(869): self._daemonic = daemon
json_file_splitter.py(83): start = self._json_start(data, start)
threading.py(872): self._ident = None
--- modulename: json_file_splitter, funcname: _json_start
threading.py(873): if _HAVE_THREAD_NATIVE_ID:
json_file_splitter.py(87): try:
threading.py(874): self._native_id = None
json_file_splitter.py(88): return data.index("{", start)
threading.py(875): self._tstate_lock = None
json_file_splitter.py(73): while start is not None:
threading.py(876): self._started = Event()
json_file_splitter.py(74): try:
--- modulename: threading, funcname: __init__
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(546): self._cond = Condition(Lock())
--- modulename: decoder, funcname: raw_decode
--- modulename: threading, funcname: __init__
decoder.py(352): try:
threading.py(237): if lock is None:
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(239): self._lock = lock
decoder.py(356): return obj, end
threading.py(241): self.acquire = lock.acquire
json_file_splitter.py(76): size = new_start - start
threading.py(242): self.release = lock.release
json_file_splitter.py(77): start = new_start
threading.py(246): try:
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
threading.py(247): self._release_save = lock._release_save
json_file_splitter.py(83): start = self._json_start(data, start)
threading.py(248): except AttributeError:
--- modulename: json_file_splitter, funcname: _json_start
threading.py(249): pass
json_file_splitter.py(87): try:
threading.py(250): try:
json_file_splitter.py(88): return data.index("{", start)
threading.py(251): self._acquire_restore = lock._acquire_restore
json_file_splitter.py(73): while start is not None:
threading.py(252): except AttributeError:
json_file_splitter.py(74): try:
threading.py(253): pass
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(254): try:
--- modulename: decoder, funcname: raw_decode
threading.py(255): self._is_owned = lock._is_owned
decoder.py(352): try:
threading.py(256): except AttributeError:
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(257): pass
decoder.py(356): return obj, end
threading.py(258): self._waiters = _deque()
json_file_splitter.py(76): size = new_start - start
threading.py(547): self._flag = False
json_file_splitter.py(77): start = new_start
threading.py(877): self._is_stopped = False
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
threading.py(878): self._initialized = True
daemon.py(50): self._is_running = True
threading.py(880): self._stderr = _sys.stderr
threading.py(881): self._invoke_excepthook = _make_invoke_excepthook()
json_file_splitter.py(83): start = self._json_start(data, start)
daemon.py(51): try:
--- modulename: json_file_splitter, funcname: _json_start
--- modulename: threading, funcname: _make_invoke_excepthook
json_file_splitter.py(87): try:
threading.py(1306): old_excepthook = excepthook
json_file_splitter.py(88): return data.index("{", start)
threading.py(1307): old_sys_excepthook = _sys.excepthook
json_file_splitter.py(73): while start is not None:
threading.py(1308): if old_excepthook is None:
json_file_splitter.py(74): try:
threading.py(1310): if old_sys_excepthook is None:
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(1313): sys_exc_info = _sys.exc_info
--- modulename: decoder, funcname: raw_decode
threading.py(1314): local_print = print
decoder.py(352): try:
threading.py(1315): local_sys = _sys
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(1317): def invoke_excepthook(thread):
decoder.py(356): return obj, end
threading.py(1349): return invoke_excepthook
json_file_splitter.py(76): size = new_start - start
threading.py(883): _dangling.add(self)
json_file_splitter.py(77): start = new_start
--- modulename: _weakrefset, funcname: add
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
_weakrefset.py(87): if self._pending_removals:
json_file_splitter.py(83): start = self._json_start(data, start)
_weakrefset.py(89): self.data.add(ref(item, self._remove))
--- modulename: json_file_splitter, funcname: _json_start
daemon.py(30): self._sleep_time = sleep_time
json_file_splitter.py(87): try:
daemon.py(31): self._interrupted = False
json_file_splitter.py(88): return data.index("{", start)
daemon.py(32): self._event = threading.Event()
json_file_splitter.py(73): while start is not None:
--- modulename: threading, funcname: __init__
json_file_splitter.py(74): try:
threading.py(546): self._cond = Condition(Lock())
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
--- modulename: threading, funcname: __init__
--- modulename: decoder, funcname: raw_decode
threading.py(237): if lock is None:
decoder.py(352): try:
threading.py(239): self._lock = lock
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(241): self.acquire = lock.acquire
decoder.py(356): return obj, end
threading.py(242): self.release = lock.release
json_file_splitter.py(76): size = new_start - start
threading.py(246): try:
json_file_splitter.py(77): start = new_start
threading.py(247): self._release_save = lock._release_save
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
threading.py(248): except AttributeError:
json_file_splitter.py(83): start = self._json_start(data, start)
threading.py(249): pass
--- modulename: json_file_splitter, funcname: _json_start
threading.py(250): try:
json_file_splitter.py(87): try:
threading.py(251): self._acquire_restore = lock._acquire_restore
json_file_splitter.py(88): return data.index("{", start)
threading.py(252): except AttributeError:
json_file_splitter.py(73): while start is not None:
threading.py(253): pass
json_file_splitter.py(74): try:
threading.py(254): try:
json_file_splitter.py(75): json_data, new_start = self._decoder.raw_decode(data, start)
threading.py(255): self._is_owned = lock._is_owned
--- modulename: decoder, funcname: raw_decode
threading.py(256): except AttributeError:
decoder.py(352): try:
threading.py(257): pass
decoder.py(353): obj, end = self.scan_once(s, idx)
threading.py(258): self._waiters = _deque()
decoder.py(356): return obj, end
threading.py(547): self._flag = False
json_file_splitter.py(76): size = new_start - start
daemon.py(33): self._is_running = False
json_file_splitter.py(77): start = new_start
daemon.py(34): self.last_backoff_time = 0 # used only with ConnectionRetryWrapper decorator
json_file_splitter.py(82): self._parsed_queue.append((json_data, size))
websocket_signals_background_job.py(74): self._container = container
json_file_splitter.py(83): start = self._json_start(data, start)
websocket_signals_background_job.py(75): self._ws_client = ws_client
--- modulename: json_file_splitter, funcname: _json_start
websocket_signals_background_job.py(55): self._thread.start()
json_file_splitter.py(87): try:
--- modulename: threading, funcname: start
json_file_splitter.py(88): return data.index("{", start)
threading.py(926): if not self._initialized:
json_file_splitter.py(89): except ValueError:
threading.py(929): if self._started.is_set():
json_file_splitter.py(90): return None
--- modulename: threading, funcname: is_set
json_file_splitter.py(73): while start is not None:
threading.py(555): return self._flag
json_file_splitter.py(63): if not self._parsed_queue:
threading.py(932): with _active_limbo_lock:
json_file_splitter.py(50): if self._parsed_queue:
threading.py(933): _limbo[self] = self
json_file_splitter.py(51): return self._parsed_queue.popleft()
threading.py(932): with _active_limbo_lock:
disk_queue.py(129): if not _json:
threading.py(934): try:
disk_queue.py(137): try:
threading.py(935): _start_new_thread(self._bootstrap, ())
disk_queue.py(138): obj, ver = self._deserialize(_json)
threading.py(940): self._started.wait()
--- modulename: disk_queue, funcname: _deserialize
--- modulename: threading, funcname: wait
disk_queue.py(249): return self._from_dict(data["obj"]), data["version"]
--- modulename: daemon, funcname: run
daemon.py(52): while not self._interrupted:
threading.py(604): with self._cond:
daemon.py(50): self._is_running = True
--- modulename: operation, funcname: from_dict
daemon.py(53): self.work()
daemon.py(51): try:
--- modulename: threading, funcname: __enter__
--- modulename: daemon, funcname: wrapper
daemon.py(52): while not self._interrupted:
operation.py(71): if "type" not in data:
threading.py(265): return self._lock.__enter__()
daemon.py(74): while not self_._interrupted:
daemon.py(53): self.work()
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(605): signaled = self._flag
daemon.py(75): try:
--- modulename: websocket_signals_background_job, funcname: work
--- modulename: operation, funcname: all_subclasses
threading.py(606): if not signaled:
daemon.py(76): result = func(self_, *args, **kwargs)
websocket_signals_background_job.py(78): try:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(608): return signaled
--- modulename: ping_background_job, funcname: work
websocket_signals_background_job.py(79): raw_message = self._ws_client.recv()
--- modulename: operation, funcname: <listcomp>
threading.py(604): with self._cond:
ping_background_job.py(66): self._container.ping()
--- modulename: reconnecting_websocket, funcname: recv
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: threading, funcname: __exit__
--- modulename: metadata_container, funcname: ping
reconnecting_websocket.py(47): if not self.client.connected:
--- modulename: operation, funcname: all_subclasses
threading.py(268): return self._lock.__exit__(*args)
metadata_container.py(256): self._backend.ping(self._id, self.container_type)
--- modulename: websocket_client_adapter, funcname: connected
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
websocket_signals_background_job.py(56): self._started = True
--- modulename: utils, funcname: wrapper
websocket_client_adapter.py(69): return self._ws_client and self._ws_client.connected
--- modulename: operation, funcname: <listcomp>
backgroud_job_list.py(36): for job in self._jobs:
utils.py(58): last_exception = None
reconnecting_websocket.py(48): self._try_to_establish_connection()
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
backgroud_job_list.py(37): job.start(container)
utils.py(59): start_time = time.monotonic()
--- modulename: reconnecting_websocket, funcname: _try_to_establish_connection
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: std_capture_background_job, funcname: start
utils.py(60): for retry in itertools.count(0):
reconnecting_websocket.py(75): try:
--- modulename: operation, funcname: all_subclasses
std_capture_background_job.py(39): self._logger = StdoutCaptureLogger(container, self._attribute_name)
utils.py(61): if time.monotonic() - start_time > retries_timeout:
reconnecting_websocket.py(76): self._request_token_refresh()
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: std_stream_capture_logger, funcname: __init__
utils.py(64): try:
--- modulename: reconnecting_websocket, funcname: _request_token_refresh
--- modulename: operation, funcname: <listcomp>
std_stream_capture_logger.py(59): super().__init__(container, attribute_name, sys.stdout)
utils.py(65): return func(*args, **kwargs)
reconnecting_websocket.py(88): self._token = self._oauth2_session.refresh_token(token_url=self._oauth2_session.auto_refresh_url)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: std_stream_capture_logger, funcname: __init__
--- modulename: hosted_neptune_backend, funcname: ping
--- modulename: oauth2_session, funcname: refresh_token
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
std_stream_capture_logger.py(29): self._logger = NeptuneLogger(container, attribute_name)
hosted_neptune_backend.py(429): "_request_options": {
oauth2_session.py(409): if not token_url:
--- modulename: operation, funcname: all_subclasses
--- modulename: logger, funcname: __init__
hosted_neptune_backend.py(430): "timeout": 10,
oauth2_session.py(412): if not is_secure_transport(token_url):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
logger.py(27): self._container = container
hosted_neptune_backend.py(431): "connect_timeout": 10,
--- modulename: utils, funcname: is_secure_transport
--- modulename: operation, funcname: <listcomp>
logger.py(28): self._attribute_name = attribute_name
hosted_neptune_backend.py(429): "_request_options": {
utils.py(81): if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
std_stream_capture_logger.py(30): self.stream = stream
hosted_neptune_backend.py(428): request_kwargs = {
--- modulename: _collections_abc, funcname: get
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
std_stream_capture_logger.py(31): self._thread_local = threading.local()
hosted_neptune_backend.py(434): try:
_collections_abc.py(823): try:
--- modulename: operation, funcname: all_subclasses
std_stream_capture_logger.py(32): self.enabled = True
hosted_neptune_backend.py(435): self.leaderboard_client.api.ping(
_collections_abc.py(824): return self[key]
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
std_stream_capture_logger.py(33): self._log_data_queue = Queue()
--- modulename: swagger_client_wrapper, funcname: __getattr__
--- modulename: os, funcname: __getitem__
--- modulename: operation, funcname: <listcomp>
--- modulename: queue, funcname: __init__
swagger_client_wrapper.py(110): return ApiMethodWrapper(getattr(self._api_obj, item))
os.py(676): try:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
queue.py(35): self.maxsize = maxsize
--- modulename: client, funcname: __getattr__
os.py(677): value = self._data[self.encodekey(key)]
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
queue.py(36): self._init(maxsize)
client.py(229): return CallableOperation(getattr(self.resource, name), self.also_return_response)
--- modulename: os, funcname: encode
--- modulename: operation, funcname: all_subclasses
--- modulename: queue, funcname: _init
--- modulename: resource, funcname: __getattr__
os.py(756): if not isinstance(value, str):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
queue.py(207): self.queue = deque()
resource.py(144): op = self.operations.get(item)
os.py(758): return value.encode(encoding, 'surrogateescape')
--- modulename: operation, funcname: <listcomp>
queue.py(42): self.mutex = threading.Lock()
resource.py(145): if not op:
os.py(678): except KeyError:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
queue.py(46): self.not_empty = threading.Condition(self.mutex)
resource.py(147): return op
os.py(680): raise KeyError(key) from None
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: threading, funcname: __init__
--- modulename: client, funcname: __init__
_collections_abc.py(825): except KeyError:
--- modulename: operation, funcname: all_subclasses
threading.py(237): if lock is None:
client.py(246): self.also_return_response = also_return_response
_collections_abc.py(826): return default
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(239): self._lock = lock
client.py(247): self.operation = operation
utils.py(83): return uri.lower().startswith('https://')
--- modulename: operation, funcname: <listcomp>
threading.py(241): self.acquire = lock.acquire
--- modulename: swagger_client_wrapper, funcname: __init__
oauth2_session.py(415): refresh_token = refresh_token or self.token.get("refresh_token")
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(242): self.release = lock.release
swagger_client_wrapper.py(42): self._api_method = api_method
--- modulename: oauth2_session, funcname: token
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(246): try:
hosted_neptune_backend.py(436): experimentId=container_id,
oauth2_session.py(124): return getattr(self._client, "token", None)
--- modulename: operation, funcname: all_subclasses
threading.py(247): self._release_save = lock._release_save
hosted_neptune_backend.py(435): self.leaderboard_client.api.ping(
oauth2_session.py(417): log.debug(
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(248): except AttributeError:
hosted_neptune_backend.py(437): **request_kwargs,
oauth2_session.py(418): "Adding auto refresh key word arguments %s.", self.auto_refresh_kwargs
--- modulename: operation, funcname: <listcomp>
threading.py(249): pass
hosted_neptune_backend.py(435): self.leaderboard_client.api.ping(
oauth2_session.py(417): log.debug(
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(250): try:
--- modulename: swagger_client_wrapper, funcname: __call__
--- modulename: __init__, funcname: debug
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(251): self._acquire_restore = lock._acquire_restore
swagger_client_wrapper.py(95): try:
__init__.py(1464): if self.isEnabledFor(DEBUG):
--- modulename: operation, funcname: all_subclasses
threading.py(252): except AttributeError:
swagger_client_wrapper.py(96): future = self._api_method(*args, **kwargs)
--- modulename: __init__, funcname: isEnabledFor
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(253): pass
--- modulename: client, funcname: __call__
__init__.py(1728): if self.disabled:
--- modulename: operation, funcname: <listcomp>
threading.py(254): try:
client.py(264): log.debug(u'%s(%s)', self.operation.operation_id, op_kwargs)
__init__.py(1731): try:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(255): self._is_owned = lock._is_owned
--- modulename: __init__, funcname: debug
__init__.py(1732): return self._cache[level]
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(256): except AttributeError:
__init__.py(1464): if self.isEnabledFor(DEBUG):
oauth2_session.py(420): kwargs.update(self.auto_refresh_kwargs)
--- modulename: operation, funcname: all_subclasses
threading.py(257): pass
--- modulename: __init__, funcname: isEnabledFor
oauth2_session.py(421): body = self._client.prepare_refresh_body(
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(258): self._waiters = _deque()
__init__.py(1728): if self.disabled:
oauth2_session.py(422): body=body, refresh_token=refresh_token, scope=self.scope, **kwargs
--- modulename: operation, funcname: <listcomp>
queue.py(50): self.not_full = threading.Condition(self.mutex)
__init__.py(1731): try:
oauth2_session.py(421): body = self._client.prepare_refresh_body(
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: threading, funcname: __init__
__init__.py(1732): return self._cache[level]
oauth2_session.py(422): body=body, refresh_token=refresh_token, scope=self.scope, **kwargs
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(237): if lock is None:
client.py(265): warn_for_deprecated_op(self.operation)
oauth2_session.py(421): body = self._client.prepare_refresh_body(
--- modulename: operation, funcname: all_subclasses
threading.py(239): self._lock = lock
--- modulename: warning, funcname: warn_for_deprecated_op
--- modulename: base, funcname: prepare_refresh_body
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(241): self.acquire = lock.acquire
warning.py(17): if op.op_spec.get('deprecated', False):
base.py(447): refresh_token = refresh_token or self.refresh_token
--- modulename: operation, funcname: <listcomp>
threading.py(242): self.release = lock.release
client.py(268): request_options = op_kwargs.pop('_request_options', {})
base.py(448): scope = self.scope if scope is None else scope
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(246): try:
client.py(269): request_config = RequestConfig(request_options, self.also_return_response)
base.py(449): return prepare_token_request(self.refresh_token_key, body=body, scope=scope,
--- modulename: operation, funcname: all_subclasses
threading.py(247): self._release_save = lock._release_save
--- modulename: config, funcname: __init__
base.py(450): refresh_token=refresh_token, **kwargs)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(248): except AttributeError:
config.py(81): request_options = request_options.copy() # don't modify the original object
base.py(449): return prepare_token_request(self.refresh_token_key, body=body, scope=scope,
--- modulename: operation, funcname: <listcomp>
threading.py(249): pass
config.py(82): self.also_return_response = also_return_response_default
base.py(450): refresh_token=refresh_token, **kwargs)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(250): try:
config.py(84): for key in list(request_options.keys()):
base.py(449): return prepare_token_request(self.refresh_token_key, body=body, scope=scope,
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(251): self._acquire_restore = lock._acquire_restore
config.py(85): if hasattr(self, key):
--- modulename: parameters, funcname: prepare_token_request
--- modulename: operation, funcname: all_subclasses
threading.py(252): except AttributeError:
config.py(86): setattr(self, key, request_options.pop(key))
parameters.py(146): params = [('grant_type', grant_type)]
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(253): pass
config.py(84): for key in list(request_options.keys()):
parameters.py(148): if 'scope' in kwargs:
--- modulename: operation, funcname: <listcomp>
threading.py(254): try:
config.py(85): if hasattr(self, key):
parameters.py(149): kwargs['scope'] = list_to_scope(kwargs['scope'])
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(255): self._is_owned = lock._is_owned
config.py(86): setattr(self, key, request_options.pop(key))
--- modulename: utils, funcname: list_to_scope
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(256): except AttributeError:
config.py(84): for key in list(request_options.keys()):
utils.py(16): if isinstance(scope, str) or scope is None:
--- modulename: operation, funcname: all_subclasses
threading.py(257): pass
config.py(88): self.additional_properties = request_options
utils.py(17): return scope
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(258): self._waiters = _deque()
client.py(271): request_params = construct_request(
parameters.py(152): client_id = kwargs.pop('client_id', None)
--- modulename: operation, funcname: <listcomp>
queue.py(54): self.all_tasks_done = threading.Condition(self.mutex)
client.py(272): self.operation, request_options, **op_kwargs)
parameters.py(153): if include_client_id:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: threading, funcname: __init__
client.py(271): request_params = construct_request(
parameters.py(154): if client_id is not None:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(237): if lock is None:
client.py(272): self.operation, request_options, **op_kwargs)
parameters.py(155): params.append(('client_id', client_id))
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(239): self._lock = lock
client.py(271): request_params = construct_request(
parameters.py(158): if code_verifier is not None:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(241): self.acquire = lock.acquire
--- modulename: client, funcname: construct_request
parameters.py(163): client_secret = kwargs.pop('client_secret', None)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(242): self.release = lock.release
client.py(294): url = operation.swagger_spec.api_url.rstrip('/') + operation.path_name
parameters.py(164): if client_secret is not None:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(246): try:
client.py(296): 'method': str(operation.http_method.upper()),
parameters.py(168): for k in kwargs:
--- modulename: operation, funcname: all_subclasses
threading.py(247): self._release_save = lock._release_save
client.py(297): 'url': url,
parameters.py(169): if kwargs[k]:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(248): except AttributeError:
client.py(298): 'params': {}, # filled in downstream
parameters.py(168): for k in kwargs:
--- modulename: operation, funcname: <listcomp>
threading.py(249): pass
client.py(301): if 'headers' in request_options else {}),
parameters.py(169): if kwargs[k]:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(250): try:
client.py(295): request = {
parameters.py(170): params.append((str(k), kwargs[k]))
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(251): self._acquire_restore = lock._acquire_restore
client.py(304): if request_options.get('use_msgpack', False):
parameters.py(168): for k in kwargs:
--- modulename: operation, funcname: all_subclasses
threading.py(252): except AttributeError:
client.py(308): for request_option in ('connect_timeout', 'timeout'):
parameters.py(172): return add_params_to_qs(body, params)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(253): pass
client.py(309): if request_option in request_options:
--- modulename: common, funcname: add_params_to_qs
--- modulename: operation, funcname: <listcomp>
threading.py(254): try:
client.py(310): request[request_option] = request_options[request_option]
common.py(235): if isinstance(params, dict):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(255): self._is_owned = lock._is_owned
client.py(308): for request_option in ('connect_timeout', 'timeout'):
common.py(237): queryparams = urlparse.parse_qsl(query, keep_blank_values=True)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(256): except AttributeError:
client.py(309): if request_option in request_options:
--- modulename: parse, funcname: parse_qsl
--- modulename: operation, funcname: all_subclasses
threading.py(257): pass
client.py(310): request[request_option] = request_options[request_option]
parse.py(742): qs, _coerce_result = _coerce_args(qs)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(258): self._waiters = _deque()
client.py(308): for request_option in ('connect_timeout', 'timeout'):
--- modulename: parse, funcname: _coerce_args
--- modulename: operation, funcname: <listcomp>
queue.py(55): self.unfinished_tasks = 0
client.py(312): construct_params(operation, request, op_kwargs)
parse.py(120): str_input = isinstance(args[0], str)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
std_stream_capture_logger.py(34): self._logging_thread = threading.Thread(target=self.__proces_logs, daemon=True)
--- modulename: client, funcname: construct_params
parse.py(121): for arg in args[1:]:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: threading, funcname: __init__
client.py(328): current_params = operation.params.copy()
parse.py(126): if str_input:
--- modulename: operation, funcname: all_subclasses
threading.py(850): assert group is None, "group argument must be None for now"
--- modulename: util, funcname: copy
parse.py(127): return args + (_noop,)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(851): if kwargs is None:
util.py(196): copied_dict = type(self)(self)
parse.py(743): separator, _ = _coerce_args(separator)
--- modulename: operation, funcname: <listcomp>
threading.py(852): kwargs = {}
--- modulename: util, funcname: __init__
--- modulename: parse, funcname: _coerce_args
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(853): if name:
util.py(157): super(AliasKeyDict, self).__init__(*args, **kwargs)
parse.py(120): str_input = isinstance(args[0], str)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(856): name = _newname("Thread-%d")
util.py(158): self.alias_to_key = {} # type: typing.Dict[typing.Text, typing.Any]
parse.py(121): for arg in args[1:]:
--- modulename: operation, funcname: all_subclasses
--- modulename: threading, funcname: _newname
util.py(197): copied_dict.alias_to_key = self.alias_to_key.copy()
parse.py(126): if str_input:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(783): return name_template % _counter()
util.py(198): return copied_dict
parse.py(127): return args + (_noop,)
--- modulename: operation, funcname: <listcomp>
threading.py(857): if target is not None:
client.py(329): for param_name, param_value in iteritems(op_kwargs):
parse.py(745): if not separator or (not isinstance(separator, (str, bytes))):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(858): try:
--- modulename: six, funcname: iteritems
parse.py(751): if max_num_fields is not None:
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(859): target_name = target.__name__
six.py(605): return iter(d.items(**kw))
parse.py(756): r = []
--- modulename: operation, funcname: all_subclasses
threading.py(860): name += f" ({target_name})"
client.py(330): param = current_params.pop(param_name, None)
parse.py(757): for name_value in qs.split(separator):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(864): self._target = target
--- modulename: util, funcname: pop
parse.py(758): if not name_value and not strict_parsing:
--- modulename: operation, funcname: <listcomp>
threading.py(865): self._name = name
util.py(177): return super(AliasKeyDict, self).pop(self.determine_key(key), default)
parse.py(759): continue
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(866): self._args = args
--- modulename: util, funcname: determine_key
parse.py(757): for name_value in qs.split(separator):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(867): self._kwargs = kwargs
util.py(167): if key in self.alias_to_key: # this will normally be False, optimize for it
parse.py(777): return r
--- modulename: operation, funcname: all_subclasses
threading.py(868): if daemon is not None:
util.py(169): return key
common.py(238): queryparams.extend(params)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(869): self._daemonic = daemon
client.py(331): if param is None:
common.py(239): return urlencode(queryparams)
--- modulename: operation, funcname: <listcomp>
threading.py(872): self._ident = None
client.py(335): marshal_param(param, param_value, request)
--- modulename: common, funcname: urlencode
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(873): if _HAVE_THREAD_NATIVE_ID:
--- modulename: param, funcname: marshal_param
common.py(64): utf8_params = encode_params_utf8(params)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(874): self._native_id = None
param.py(119): swagger_spec = param.swagger_spec
--- modulename: common, funcname: encode_params_utf8
--- modulename: operation, funcname: all_subclasses
threading.py(875): self._tstate_lock = None
param.py(120): deref = swagger_spec.deref
common.py(76): encoded = []
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(876): self._started = Event()
param.py(122): param_spec = deref(get_param_type_spec(param))
common.py(77): for k, v in params:
--- modulename: operation, funcname: <listcomp>
--- modulename: threading, funcname: __init__
--- modulename: param, funcname: get_param_type_spec
common.py(78): encoded.append((
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(546): self._cond = Condition(Lock())
param.py(94): location = param.location
common.py(79): k.encode('utf-8') if isinstance(k, str) else k,
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
--- modulename: threading, funcname: __init__
--- modulename: param, funcname: location
common.py(80): v.encode('utf-8') if isinstance(v, str) else v))
--- modulename: operation, funcname: all_subclasses
threading.py(237): if lock is None:
param.py(65): return self.param_spec['in']
common.py(78): encoded.append((
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(239): self._lock = lock
param.py(95): if location in ('path', 'query', 'header', 'formData'):
common.py(77): for k, v in params:
--- modulename: operation, funcname: <listcomp>
threading.py(241): self.acquire = lock.acquire
param.py(96): return param.param_spec
common.py(78): encoded.append((
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(242): self.release = lock.release
--- modulename: spec, funcname: _force_deref
common.py(79): k.encode('utf-8') if isinstance(k, str) else k,
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(246): try:
spec.py(407): if ref_dict is None or not is_ref(ref_dict):
common.py(80): v.encode('utf-8') if isinstance(v, str) else v))
--- modulename: operation, funcname: all_subclasses
threading.py(247): self._release_save = lock._release_save
--- modulename: schema, funcname: is_ref
common.py(78): encoded.append((
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(248): except AttributeError:
schema.py(74): try:
common.py(77): for k, v in params:
--- modulename: operation, funcname: <listcomp>
threading.py(249): pass
schema.py(75): return '$ref' in spec and is_dict_like(spec) and isinstance(spec['$ref'], string_types)
common.py(78): encoded.append((
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(250): try:
spec.py(408): return ref_dict
common.py(79): k.encode('utf-8') if isinstance(k, str) else k,
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(251): self._acquire_restore = lock._acquire_restore
param.py(123): location = param.location
common.py(80): v.encode('utf-8') if isinstance(v, str) else v))
--- modulename: operation, funcname: all_subclasses
threading.py(252): except AttributeError:
--- modulename: param, funcname: location
common.py(78): encoded.append((
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(253): pass
param.py(65): return self.param_spec['in']
common.py(77): for k, v in params:
--- modulename: operation, funcname: <listcomp>
threading.py(254): try:
param.py(127): if value is None and not param.required:
common.py(81): return encoded
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(255): self._is_owned = lock._is_owned
param.py(130): value = marshal_schema_object(swagger_spec, param_spec, value)
common.py(65): urlencoded = _urlencode(utf8_params)
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(256): except AttributeError:
--- modulename: marshal, funcname: marshal_schema_object
--- modulename: parse, funcname: urlencode
--- modulename: operation, funcname: all_subclasses
threading.py(257): pass
marshal.py(53): marshaling_method = _get_marshaling_method(swagger_spec=swagger_spec, object_schema=schema_object_spec)
parse.py(927): if hasattr(query, "items"):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(258): self._waiters = _deque()
--- modulename: _decorators, funcname: wrapper
parse.py(932): try:
--- modulename: operation, funcname: <listcomp>
threading.py(547): self._flag = False
_decorators.py(80): try:
parse.py(935): if len(query) and not isinstance(query[0], tuple):
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(877): self._is_stopped = False
_decorators.py(81): return func(*args, **kwargs)
parse.py(946): l = []
operation.py(51): return set(cls.__subclasses__()).union([s for c in cls.__subclasses__() for s in all_subclasses(c)])
threading.py(878): self._initialized = True
--- modulename: util, funcname: wrapper
parse.py(947): if not doseq:
--- modulename: operation, funcname: <dictcomp>
threading.py(880): self._stderr = _sys.stderr
util.py(119): cache_key = make_key(*args, **kwargs)
parse.py(948): for k, v in query:
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(881): self._invoke_excepthook = _make_invoke_excepthook()
--- modulename: util, funcname: make_key
parse.py(949): if isinstance(k, bytes):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
--- modulename: threading, funcname: _make_invoke_excepthook
util.py(105): if args:
parse.py(950): k = quote_via(k, safe)
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1306): old_excepthook = excepthook
util.py(109): param_name_to_value_mapping = sorted(iteritems(dict(default_mapping, **kwargs)))
--- modulename: parse, funcname: quote_plus
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1307): old_sys_excepthook = _sys.excepthook
--- modulename: six, funcname: iteritems
parse.py(878): if ((isinstance(string, str) and ' ' not in string) or
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1308): if old_excepthook is None:
six.py(605): return iter(d.items(**kw))
parse.py(879): (isinstance(string, bytes) and b' ' not in string)):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1310): if old_sys_excepthook is None:
util.py(111): return tuple(
parse.py(878): if ((isinstance(string, str) and ' ' not in string) or
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1313): sys_exc_info = _sys.exc_info
util.py(113): for param_name, param_value in param_name_to_value_mapping
parse.py(879): (isinstance(string, bytes) and b' ' not in string)):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1314): local_print = print
util.py(111): return tuple(
parse.py(880): return quote(string, safe, encoding, errors)
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1315): local_sys = _sys
--- modulename: util, funcname: <genexpr>
--- modulename: parse, funcname: quote
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1317): def invoke_excepthook(thread):
util.py(111): return tuple(
parse.py(856): if isinstance(string, str):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(1349): return invoke_excepthook
util.py(113): for param_name, param_value in param_name_to_value_mapping
parse.py(865): if encoding is not None:
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(883): _dangling.add(self)
util.py(112): (param_name, id(param_value))
parse.py(867): if errors is not None:
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
--- modulename: _weakrefset, funcname: add
util.py(111): return tuple(
parse.py(869): return quote_from_bytes(string, safe)
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
_weakrefset.py(87): if self._pending_removals:
--- modulename: util, funcname: <genexpr>
--- modulename: parse, funcname: quote_from_bytes
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
_weakrefset.py(89): self.data.add(ref(item, self._remove))
util.py(111): return tuple(
parse.py(893): if not isinstance(bs, (bytes, bytearray)):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
std_stream_capture_logger.py(35): self._logging_thread.start()
util.py(113): for param_name, param_value in param_name_to_value_mapping
parse.py(895): if not bs:
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
--- modulename: threading, funcname: start
util.py(112): (param_name, id(param_value))
parse.py(897): if isinstance(safe, str):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(926): if not self._initialized:
util.py(111): return tuple(
parse.py(899): safe = safe.encode('ascii', 'ignore')
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(929): if self._started.is_set():
--- modulename: util, funcname: <genexpr>
parse.py(902): if not bs.rstrip(_ALWAYS_SAFE_BYTES + safe):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
--- modulename: threading, funcname: is_set
util.py(111): return tuple(
parse.py(903): return bs.decode()
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(555): return self._flag
util.py(113): for param_name, param_value in param_name_to_value_mapping
parse.py(954): if isinstance(v, bytes):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(932): with _active_limbo_lock:
util.py(112): (param_name, id(param_value))
parse.py(955): v = quote_via(v, safe)
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(933): _limbo[self] = self
util.py(111): return tuple(
--- modulename: parse, funcname: quote_plus
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(932): with _active_limbo_lock:
--- modulename: util, funcname: <genexpr>
parse.py(878): if ((isinstance(string, str) and ' ' not in string) or
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(934): try:
util.py(111): return tuple(
parse.py(879): (isinstance(string, bytes) and b' ' not in string)):
operation.py(73): sub_classes = {cls.__name__: cls for cls in all_subclasses(Operation)}
threading.py(935): _start_new_thread(self._bootstrap, ())
util.py(120): cached_value = cache.get(cache_key, _CACHE_MISS)
parse.py(878): if ((isinstance(string, str) and ' ' not in string) or
operation.py(74): if not data["type"] in sub_classes:
util.py(121): if cached_value is _CACHE_MISS:
threading.py(940): self._started.wait()
operation.py(76): return sub_classes[data["type"]].from_dict(data)
util.py(122): if cache_key in key_in_progress_set:
--- modulename: threading, funcname: run
parse.py(879): (isinstance(string, bytes) and b' ' not in string)):
util.py(124): key_in_progress_set.add(cache_key)
threading.py(951): try:
--- modulename: threading, funcname: wait
util.py(125): cached_value = func(*args, **kwargs)
threading.py(952): if self._target is not None:
--- modulename: operation, funcname: from_dict
parse.py(880): return quote(string, safe, encoding, errors)
threading.py(953): self._target(*self._args, **self._kwargs)
operation.py(148): return AssignString(data["path"], data["value"])
threading.py(604): with self._cond:
--- modulename: std_stream_capture_logger, funcname: __proces_logs
--- modulename: operation, funcname: __init__
--- modulename: threading, funcname: __enter__
std_stream_capture_logger.py(50): while True:
<string>(3): <string>(4): disk_queue.py(139): return QueueElement[T](obj, ver, size)
--- modulename: parse, funcname: quote
std_stream_capture_logger.py(51): data = self._log_data_queue.get()
--- modulename: typing, funcname: inner
parse.py(856): if isinstance(string, str):
--- modulename: queue, funcname: get
typing.py(308): try:
threading.py(265): return self._lock.__enter__()
queue.py(165): with self.not_empty:
typing.py(309): return cached(*args, **kwds)
threading.py(605): signaled = self._flag
--- modulename: threading, funcname: __enter__
--- modulename: typing, funcname: __call__
parse.py(865): if encoding is not None:
threading.py(265): return self._lock.__enter__()
threading.py(606): if not signaled:
parse.py(867): if errors is not None:
typing.py(954): if not self._inst:
threading.py(608): return signaled
parse.py(869): return quote_from_bytes(string, safe)
--- modulename: marshal, funcname: _get_marshaling_method
threading.py(604): with self._cond:
--- modulename: parse, funcname: quote_from_bytes
queue.py(166): if not block:
--- modulename: threading, funcname: __exit__
parse.py(893): if not isinstance(bs, (bytes, bytearray)):
typing.py(957): result = self.__origin__(*args, **kwargs)
threading.py(268): return self._lock.__exit__(*args)
parse.py(895): if not bs:
marshal.py(165): object_schema = swagger_spec.deref(object_schema)
std_stream_capture_logger.py(60): sys.stdout = self
parse.py(897): if isinstance(safe, str):
queue.py(169): elif timeout is None:
backgroud_job_list.py(36): for job in self._jobs:
parse.py(899): safe = safe.encode('ascii', 'ignore')
--- modulename: disk_queue, funcname: __init__
queue.py(170): while not self._qsize():
--- modulename: spec, funcname: _force_deref
<string>(3): spec.py(407): if ref_dict is None or not is_ref(ref_dict):
^CTraceback (most recent call last):
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/runpy.py", line 86, in _run_code
exec(code, run_globals)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/trace.py", line 740, in <module>
main()
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/trace.py", line 728, in main
t.runctx(code, globs, globs)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/trace.py", line 450, in runctx
exec(cmd, globals, locals)
File "2023-06-22_librispeech_contrastive.py", line 756, in <module>
neptune_logger.log_hyperparams(vars(config))
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/lightning_utilities/core/rank_zero.py", line 27, in wrapped_fn
return fn(*args, **kwargs)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/pytorch_lightning/loggers/neptune.py", line 406, in log_hyperparams
self.run[parameters_key] = params
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/lightning_fabric/loggers/logger.py", line 114, in experiment
return fn(self)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/pytorch_lightning/loggers/neptune.py", line 358, in run
self._run_instance = neptune.init_run(**self._neptune_init_args)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/metadata_containers/run.py", line 364, in __init__
super().__init__(project=project, api_token=api_token, mode=mode, flush_period=flush_period, proxies=proxies)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/metadata_containers/metadata_container.py", line 144, in __init__
self._startup(debug_mode=mode == Mode.DEBUG)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/metadata_containers/metadata_container.py", line 486, in _startup
self.start()
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/metadata_containers/metadata_container.py", line 261, in start
self._bg_job.start(self)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/internal/backgroud_job_list.py", line 36, in start
for job in self._jobs:
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/internal/backgroud_job_list.py", line 36, in start
for job in self._jobs:
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/trace.py", line 575, in localtrace_trace
print("%s(%d): %s" % (bname, lineno,
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/site-packages/neptune/internal/streams/std_stream_capture_logger.py", line 39, in write
self._log_data_queue.put_nowait(data)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/queue.py", line 191, in put_nowait
return self.put(item, block=False)
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/queue.py", line 133, in put
with self.not_full:
File "/home/tyler/opt/anaconda/envs/gaddy/lib/python3.10/threading.py", line 265, in __enter__
return self._lock.__enter__()
KeyboardInterrupt
[1] + 51627 killed python train.py
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment