diff --git a/conda/core/subdir_data.py b/conda/core/subdir_data.py | |
index ec4a60958..cde9b5b75 100644 | |
--- a/conda/core/subdir_data.py | |
+++ b/conda/core/subdir_data.py | |
@@ -7,6 +7,7 @@ import bz2 | |
from collections import defaultdict | |
from contextlib import closing | |
from errno import EACCES, ENODEV, EPERM, EROFS | |
+from functools import partial | |
from genericpath import getmtime, isfile | |
import hashlib | |
from io import open as io_open | |
@@ -26,7 +27,7 @@ from ..base.constants import CONDA_HOMEPAGE_URL, CONDA_PACKAGE_EXTENSION_V1, REP | |
from ..base.context import context | |
from ..common.compat import (ensure_binary, ensure_text_type, ensure_unicode, iteritems, iterkeys, | |
string_types, text_type, with_metaclass) | |
-from ..common.io import ThreadLimitedThreadPoolExecutor, as_completed | |
+from ..common.io import ThreadLimitedThreadPoolExecutor, DummyExecutor | |
from ..common.url import join_url, maybe_unquote | |
from ..core.package_cache_data import PackageCacheData | |
from ..exceptions import (CondaDependencyError, CondaHTTPError, CondaUpgradeError, | |
@@ -77,17 +78,24 @@ class SubdirData(object): | |
def query_all(package_ref_or_match_spec, channels=None, subdirs=None, | |
repodata_fn=REPODATA_FN): | |
from .index import check_whitelist # TODO: fix in-line import | |
+ # ensure that this is not called by threaded code | |
+ create_cache_dir() | |
if channels is None: | |
channels = context.channels | |
if subdirs is None: | |
subdirs = context.subdirs | |
channel_urls = all_channel_urls(channels, subdirs=subdirs) | |
check_whitelist(channel_urls) | |
- with ThreadLimitedThreadPoolExecutor() as executor: | |
- futures = tuple(executor.submit( | |
- SubdirData(Channel(url), repodata_fn=repodata_fn).query, package_ref_or_match_spec | |
- ) for url in channel_urls) | |
- return tuple(concat(future.result() for future in as_completed(futures))) | |
+ subdir_query = lambda url: tuple(SubdirData(Channel(url), repodata_fn=repodata_fn).query( | |
+ package_ref_or_match_spec)) | |
+ | |
+ # TODO test timing with ProcessPoolExecutor | |
+ Executor = (DummyExecutor if context.debug or context.repodata_threads == 1 | |
+ else partial(ThreadLimitedThreadPoolExecutor, | |
+ max_workers=context.repodata_threads)) | |
+ with Executor() as executor: | |
+ result = tuple(concat(executor.map(subdir_query, channel_urls))) | |
+ return result | |
def query(self, package_ref_or_match_spec): | |
if not self._loaded: | |
@@ -624,7 +632,11 @@ def cache_fn_url(url, repodata_fn=REPODATA_FN): | |
# url must be right-padded with '/' to not invalidate any existing caches | |
if not url.endswith('/'): | |
url += '/' | |
- url += repodata_fn | |
+ # add the repodata_fn in for uniqueness, but keep it off for standard stuff. | |
+ # It would be more sane to add it for everything, but old programs (Navigator) | |
+ # are looking for the cache under keys without this. | |
+ if repodata_fn != REPODATA_FN: | |
+ url += repodata_fn | |
md5 = hashlib.md5(ensure_binary(url)).hexdigest() | |
return '%s.json' % (md5[:8],) | |
@@ -636,6 +648,6 @@ def add_http_value_to_dict(resp, http_key, d, dict_key): | |
def create_cache_dir(): | |
- cache_dir = join(PackageCacheData.first_writable(context.pkgs_dirs).pkgs_dir, 'cache') | |
+ cache_dir = join(PackageCacheData.first_writable().pkgs_dir, 'cache') | |
mkdir_p_sudo_safe(cache_dir) | |
return cache_dir |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment