diff --git a/conda_build/environ.py b/conda_build/environ.py index 5a57359a4b..841b622ee7 100644 --- a/conda_build/environ.py +++ b/conda_build/environ.py @@ -14,9 +14,20 @@ from glob import glob from os.path import join, normpath -from .conda_interface import (CondaError, LinkError, LockError, NoPackagesFoundError, - PaddingError, UnsatisfiableError) -from .conda_interface import display_actions, execute_actions, execute_plan, install_actions +from .conda_interface import ( + CondaError, + LinkError, + LockError, + NoPackagesFoundError, + PaddingError, + UnsatisfiableError, +) +from .conda_interface import ( + display_actions, + execute_actions, + execute_plan, + install_actions, +) from .conda_interface import package_cache, TemporaryDirectory from .conda_interface import pkgs_dirs, root_dir, create_default_packages from .conda_interface import reset_context @@ -33,37 +44,47 @@ # these are things that we provide env vars for more explicitly. This list disables the # pass-through of variant values to env vars for these keys. -LANGUAGES = ('PERL', 'LUA', 'R', "NUMPY", 'PYTHON') -R_PACKAGES = ('r-base', 'mro-base', 'r-impl') +LANGUAGES = ("PERL", "LUA", "R", "NUMPY", "PYTHON") +R_PACKAGES = ("r-base", "mro-base", "r-impl") def get_perl_ver(config): - return '.'.join(config.variant.get('perl', get_default_variant(config)['perl']).split('.')[:2]) + return ".".join( + config.variant.get("perl", get_default_variant(config)["perl"]).split(".")[:2] + ) def get_lua_ver(config): - return '.'.join(config.variant.get('lua', get_default_variant(config)['lua']).split('.')[:2]) + return ".".join( + config.variant.get("lua", get_default_variant(config)["lua"]).split(".")[:2] + ) def get_py_ver(config): - py = config.variant.get('python', get_default_variant(config)['python']) - if not hasattr(py, 'split'): + py = config.variant.get("python", get_default_variant(config)["python"]) + if not hasattr(py, "split"): py = py[0] - return '.'.join(py.split('.')[:2]) + return ".".join(py.split(".")[:2]) def get_r_ver(config): - return '.'.join(config.variant.get('r_base', - get_default_variant(config)['r_base']).split('.')[:3]) + return ".".join( + config.variant.get("r_base", get_default_variant(config)["r_base"]).split(".")[ + :3 + ] + ) def get_npy_ver(config): - conda_npy = ''.join(str(config.variant.get('numpy') or - get_default_variant(config)['numpy']).split('.')) + conda_npy = "".join( + str(config.variant.get("numpy") or get_default_variant(config)["numpy"]).split( + "." + ) + ) # Convert int -> string, e.g. # 17 -> '1.7' # 110 -> '1.10' - return conda_npy[0] + '.' + conda_npy[1:] + return conda_npy[0] + "." + conda_npy[1:] def get_lua_include_dir(config): @@ -71,8 +92,9 @@ def get_lua_include_dir(config): @lru_cache(maxsize=None) -def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=False, - expected_rev='HEAD'): +def verify_git_repo( + git_exe, git_dir, git_url, git_commits_since_tag, debug=False, expected_rev="HEAD" +): env = os.environ.copy() log = utils.get_logger(__name__) @@ -83,51 +105,61 @@ def verify_git_repo(git_exe, git_dir, git_url, git_commits_since_tag, debug=Fals OK = True - env['GIT_DIR'] = git_dir + env["GIT_DIR"] = git_dir try: # Verify current commit (minus our locally applied patches) matches expected commit - current_commit = utils.check_output_env([git_exe, - "log", - "-n1", - "--format=%H", - "HEAD" + "^" * git_commits_since_tag], - env=env, stderr=stderr) - current_commit = current_commit.decode('utf-8') - expected_tag_commit = utils.check_output_env([git_exe, "log", "-n1", "--format=%H", - expected_rev], - env=env, stderr=stderr) - expected_tag_commit = expected_tag_commit.decode('utf-8') + current_commit = utils.check_output_env( + [ + git_exe, + "log", + "-n1", + "--format=%H", + "HEAD" + "^" * git_commits_since_tag, + ], + env=env, + stderr=stderr, + ) + current_commit = current_commit.decode("utf-8") + expected_tag_commit = utils.check_output_env( + [git_exe, "log", "-n1", "--format=%H", expected_rev], env=env, stderr=stderr + ) + expected_tag_commit = expected_tag_commit.decode("utf-8") if current_commit != expected_tag_commit: return False # Verify correct remote url. Need to find the git cache directory, # and check the remote from there. - cache_details = utils.check_output_env([git_exe, "remote", "-v"], env=env, - stderr=stderr) - cache_details = cache_details.decode('utf-8') - cache_dir = cache_details.split('\n')[0].split()[1] + cache_details = utils.check_output_env( + [git_exe, "remote", "-v"], env=env, stderr=stderr + ) + cache_details = cache_details.decode("utf-8") + cache_dir = cache_details.split("\n")[0].split()[1] if not isinstance(cache_dir, str): # On Windows, subprocess env can't handle unicode. - cache_dir = cache_dir.encode(sys.getfilesystemencoding() or 'utf-8') + cache_dir = cache_dir.encode(sys.getfilesystemencoding() or "utf-8") try: - remote_details = utils.check_output_env([git_exe, "--git-dir", cache_dir, - "remote", "-v"], - env=env, stderr=stderr) + remote_details = utils.check_output_env( + [git_exe, "--git-dir", cache_dir, "remote", "-v"], + env=env, + stderr=stderr, + ) except subprocess.CalledProcessError: - if sys.platform == 'win32' and cache_dir.startswith('/'): + if sys.platform == "win32" and cache_dir.startswith("/"): cache_dir = utils.convert_unix_path_to_win(cache_dir) - remote_details = utils.check_output_env([git_exe, "--git-dir", cache_dir, - "remote", "-v"], - env=env, stderr=stderr) - remote_details = remote_details.decode('utf-8') - remote_url = remote_details.split('\n')[0].split()[1] + remote_details = utils.check_output_env( + [git_exe, "--git-dir", cache_dir, "remote", "-v"], + env=env, + stderr=stderr, + ) + remote_details = remote_details.decode("utf-8") + remote_url = remote_details.split("\n")[0].split()[1] # on windows, remote URL comes back to us as cygwin or msys format. Python doesn't # know how to normalize it. Need to convert it to a windows path. - if sys.platform == 'win32' and remote_url.startswith('/'): + if sys.platform == "win32" and remote_url.startswith("/"): remote_url = utils.convert_unix_path_to_win(git_url) if os.path.exists(remote_url): @@ -167,18 +199,21 @@ def get_git_info(git_exe, repo, debug): # grab information from describe env = os.environ.copy() - env['GIT_DIR'] = repo + env["GIT_DIR"] = repo keys = ["GIT_DESCRIBE_TAG", "GIT_DESCRIBE_NUMBER", "GIT_DESCRIBE_HASH"] try: - output = utils.check_output_env([git_exe, "describe", "--tags", "--long", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - parts = output.rsplit('-', 2) + output = utils.check_output_env( + [git_exe, "describe", "--tags", "--long", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + parts = output.rsplit("-", 2) if len(parts) == 3: d.update(dict(zip(keys, parts))) - d['GIT_DESCRIBE_TAG_PEP440'] = str(get_version_from_git_tag(output)) + d["GIT_DESCRIBE_TAG_PEP440"] = str(get_version_from_git_tag(output)) except subprocess.CalledProcessError: msg = ( "Failed to obtain git tag information.\n" @@ -191,35 +226,42 @@ def get_git_info(git_exe, repo, debug): # Try to get the short hash from describing with all refs (not just the tags). if "GIT_DESCRIBE_HASH" not in d: try: - output = utils.check_output_env([git_exe, "describe", "--all", "--long", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - parts = output.rsplit('-', 2) + output = utils.check_output_env( + [git_exe, "describe", "--all", "--long", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + parts = output.rsplit("-", 2) if len(parts) == 3: # Don't save GIT_DESCRIBE_TAG and GIT_DESCRIBE_NUMBER because git (probably) # described a branch. We just want to save the short hash. - d['GIT_DESCRIBE_HASH'] = parts[-1] + d["GIT_DESCRIBE_HASH"] = parts[-1] except subprocess.CalledProcessError as error: log.debug("Error obtaining git commit information. Error was: ") log.debug(str(error)) try: # get the _full_ hash of the current HEAD - output = utils.check_output_env([git_exe, "rev-parse", "HEAD"], - env=env, cwd=os.path.dirname(repo), - stderr=stderr).splitlines()[0] - output = output.decode('utf-8') - - d['GIT_FULL_HASH'] = output + output = utils.check_output_env( + [git_exe, "rev-parse", "HEAD"], + env=env, + cwd=os.path.dirname(repo), + stderr=stderr, + ).splitlines()[0] + output = output.decode("utf-8") + + d["GIT_FULL_HASH"] = output except subprocess.CalledProcessError as error: log.debug("Error obtaining git commit information. Error was: ") log.debug(str(error)) # set up the build string if "GIT_DESCRIBE_NUMBER" in d and "GIT_DESCRIBE_HASH" in d: - d['GIT_BUILD_STR'] = '{}_{}'.format(d["GIT_DESCRIBE_NUMBER"], - d["GIT_DESCRIBE_HASH"]) + d["GIT_BUILD_STR"] = "{}_{}".format( + d["GIT_DESCRIBE_NUMBER"], d["GIT_DESCRIBE_HASH"] + ) # issues on Windows with the next line of the command prompt being recorded here. assert not any("\n" in value for value in d.values()) @@ -228,29 +270,41 @@ def get_git_info(git_exe, repo, debug): def get_hg_build_info(repo): env = os.environ.copy() - env['HG_DIR'] = repo + env["HG_DIR"] = repo env = {str(key): str(value) for key, value in env.items()} d = {} - cmd = ["hg", "log", "--template", - "{rev}|{node|short}|{latesttag}|{latesttagdistance}|{branch}", - "--rev", "."] + cmd = [ + "hg", + "log", + "--template", + "{rev}|{node|short}|{latesttag}|{latesttagdistance}|{branch}", + "--rev", + ".", + ] output = utils.check_output_env(cmd, env=env, cwd=os.path.dirname(repo)) - output = output.decode('utf-8') - rev, short_id, tag, distance, branch = output.split('|') - if tag != 'null': - d['HG_LATEST_TAG'] = tag + output = output.decode("utf-8") + rev, short_id, tag, distance, branch = output.split("|") + if tag != "null": + d["HG_LATEST_TAG"] = tag if branch == "": - branch = 'default' - d['HG_BRANCH'] = branch - d['HG_NUM_ID'] = rev - d['HG_LATEST_TAG_DISTANCE'] = distance - d['HG_SHORT_ID'] = short_id - d['HG_BUILD_STR'] = '{}_{}'.format(d['HG_NUM_ID'], d['HG_SHORT_ID']) + branch = "default" + d["HG_BRANCH"] = branch + d["HG_NUM_ID"] = rev + d["HG_LATEST_TAG_DISTANCE"] = distance + d["HG_SHORT_ID"] = short_id + d["HG_BUILD_STR"] = "{}_{}".format(d["HG_NUM_ID"], d["HG_SHORT_ID"]) return d -def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash=False, variant=None): +def get_dict( + m, + prefix=None, + for_env=True, + skip_build_id=False, + escape_backslash=False, + variant=None, +): if not prefix: prefix = m.config.host_prefix @@ -272,8 +326,7 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash d.update(os_vars(m, prefix)) # features - d.update({feat.upper(): str(int(value)) for feat, value in - feature_list}) + d.update({feat.upper(): str(int(value)) for feat, value in feature_list}) variant = variant or m.config.variant for k, v in variant.items(): @@ -283,34 +336,36 @@ def get_dict(m, prefix=None, for_env=True, skip_build_id=False, escape_backslash def conda_build_vars(prefix, config): - src_dir = config.test_dir if os.path.basename(prefix)[:2] == '_t' else config.work_dir + src_dir = ( + config.test_dir if os.path.basename(prefix)[:2] == "_t" else config.work_dir + ) return { - 'CONDA_BUILD': '1', - 'PYTHONNOUSERSITE': '1', - 'CONDA_DEFAULT_ENV': config.host_prefix, - 'ARCH': str(config.host_arch), + "CONDA_BUILD": "1", + "PYTHONNOUSERSITE": "1", + "CONDA_DEFAULT_ENV": config.host_prefix, + "ARCH": str(config.host_arch), # This is the one that is most important for where people put artifacts that get bundled. # It is fed from our function argument, and can be any of: # 1. Build prefix - when host requirements are not explicitly set, # then prefix = build prefix = host prefix # 2. Host prefix - when host requirements are explicitly set, prefix = host prefix # 3. Test prefix - during test runs, this points at the test prefix - 'PREFIX': prefix, + "PREFIX": prefix, # This is for things that are specifically build tools. Things that run on the build # platform, but probably should not be linked against, since they may not run on the # destination host platform # It can be equivalent to config.host_prefix if the host section is not explicitly set. - 'BUILD_PREFIX': config.build_prefix, - 'SYS_PREFIX': sys.prefix, - 'SYS_PYTHON': sys.executable, - 'SUBDIR': config.host_subdir, - 'build_platform': config.build_subdir, - 'SRC_DIR': src_dir, - 'HTTPS_PROXY': os.getenv('HTTPS_PROXY', ''), - 'HTTP_PROXY': os.getenv('HTTP_PROXY', ''), - 'REQUESTS_CA_BUNDLE': os.getenv('REQUESTS_CA_BUNDLE', ''), - 'DIRTY': '1' if config.dirty else '', - 'ROOT': root_dir, + "BUILD_PREFIX": config.build_prefix, + "SYS_PREFIX": sys.prefix, + "SYS_PYTHON": sys.executable, + "SUBDIR": config.host_subdir, + "build_platform": config.build_subdir, + "SRC_DIR": src_dir, + "HTTPS_PROXY": os.getenv("HTTPS_PROXY", ""), + "HTTP_PROXY": os.getenv("HTTP_PROXY", ""), + "REQUESTS_CA_BUNDLE": os.getenv("REQUESTS_CA_BUNDLE", ""), + "DIRTY": "1" if config.dirty else "", + "ROOT": root_dir, } @@ -320,140 +375,151 @@ def python_vars(metadata, prefix, escape_backslash): sp_dir = utils.get_site_packages(prefix, py_ver) if utils.on_win and escape_backslash: - stdlib_dir = stdlib_dir.replace('\\', '\\\\') - sp_dir = sp_dir.replace('\\', '\\\\') + stdlib_dir = stdlib_dir.replace("\\", "\\\\") + sp_dir = sp_dir.replace("\\", "\\\\") vars_ = { - 'CONDA_PY': ''.join(py_ver.split('.')[:2]), - 'PY3K': str(int(int(py_ver[0]) >= 3)), - 'PY_VER': py_ver, - 'STDLIB_DIR': stdlib_dir, - 'SP_DIR': sp_dir, - } - build_or_host = 'host' if metadata.is_cross else 'build' + "CONDA_PY": "".join(py_ver.split(".")[:2]), + "PY3K": str(int(int(py_ver[0]) >= 3)), + "PY_VER": py_ver, + "STDLIB_DIR": stdlib_dir, + "SP_DIR": sp_dir, + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'python' in deps or metadata.name(fail_ok=True) == 'python': + if "python" in deps or metadata.name(fail_ok=True) == "python": python_bin = metadata.config.python_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: - python_bin = python_bin.replace('\\', '\\\\') + python_bin = python_bin.replace("\\", "\\\\") - vars_.update({ - # host prefix is always fine, because it is the same as build when is_cross is False - 'PYTHON': python_bin, - }) + vars_.update( + { + # host prefix is always fine, because it is the same as build when is_cross is False + "PYTHON": python_bin, + } + ) - np_ver = metadata.config.variant.get('numpy', get_default_variant(metadata.config)['numpy']) - vars_['NPY_VER'] = '.'.join(np_ver.split('.')[:2]) - vars_['CONDA_NPY'] = ''.join(np_ver.split('.')[:2]) - vars_['NPY_DISTUTILS_APPEND_FLAGS'] = '1' + np_ver = metadata.config.variant.get( + "numpy", get_default_variant(metadata.config)["numpy"] + ) + vars_["NPY_VER"] = ".".join(np_ver.split(".")[:2]) + vars_["CONDA_NPY"] = "".join(np_ver.split(".")[:2]) + vars_["NPY_DISTUTILS_APPEND_FLAGS"] = "1" return vars_ def perl_vars(metadata, prefix, escape_backslash): vars_ = { - 'PERL_VER': get_perl_ver(metadata.config), - 'CONDA_PERL': get_perl_ver(metadata.config), - } - build_or_host = 'host' if metadata.is_cross else 'build' + "PERL_VER": get_perl_ver(metadata.config), + "CONDA_PERL": get_perl_ver(metadata.config), + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'perl' in deps or metadata.name(fail_ok=True) == 'perl': + if "perl" in deps or metadata.name(fail_ok=True) == "perl": perl_bin = metadata.config.perl_bin(prefix, metadata.config.host_subdir) if utils.on_win and escape_backslash: - perl_bin = perl_bin.replace('\\', '\\\\') + perl_bin = perl_bin.replace("\\", "\\\\") - vars_.update({ - # host prefix is always fine, because it is the same as build when is_cross is False - 'PERL': perl_bin, - }) + vars_.update( + { + # host prefix is always fine, because it is the same as build when is_cross is False + "PERL": perl_bin, + } + ) return vars_ def lua_vars(metadata, prefix, escape_backslash): vars_ = { - 'LUA_VER': get_lua_ver(metadata.config), - 'CONDA_LUA': get_lua_ver(metadata.config), - } - build_or_host = 'host' if metadata.is_cross else 'build' + "LUA_VER": get_lua_ver(metadata.config), + "CONDA_LUA": get_lua_ver(metadata.config), + } + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if 'lua' in deps: + if "lua" in deps: lua_bin = metadata.config.lua_bin(prefix, metadata.config.host_subdir) lua_include_dir = get_lua_include_dir(metadata.config) if utils.on_win and escape_backslash: - lua_bin = lua_bin.replace('\\', '\\\\') - lua_include_dir = lua_include_dir.replace('\\', '\\\\') + lua_bin = lua_bin.replace("\\", "\\\\") + lua_include_dir = lua_include_dir.replace("\\", "\\\\") - vars_.update({ - 'LUA': lua_bin, - 'LUA_INCLUDE_DIR': lua_include_dir, - }) + vars_.update( + { + "LUA": lua_bin, + "LUA_INCLUDE_DIR": lua_include_dir, + } + ) return vars_ def r_vars(metadata, prefix, escape_backslash): vars_ = { - 'R_VER': get_r_ver(metadata.config), - 'CONDA_R': get_r_ver(metadata.config), - } + "R_VER": get_r_ver(metadata.config), + "CONDA_R": get_r_ver(metadata.config), + } - build_or_host = 'host' if metadata.is_cross else 'build' + build_or_host = "host" if metadata.is_cross else "build" deps = [str(ms.name) for ms in metadata.ms_depends(build_or_host)] - if any(r_pkg in deps for r_pkg in R_PACKAGES) or \ - metadata.name(fail_ok=True) in R_PACKAGES: + if ( + any(r_pkg in deps for r_pkg in R_PACKAGES) + or metadata.name(fail_ok=True) in R_PACKAGES + ): r_bin = metadata.config.r_bin(prefix, metadata.config.host_subdir) # set R_USER explicitly to prevent crosstalk with existing R_LIBS_USER packages - r_user = join(prefix, 'Libs', 'R') + r_user = join(prefix, "Libs", "R") if utils.on_win and escape_backslash: - r_bin = r_bin.replace('\\', '\\\\') + r_bin = r_bin.replace("\\", "\\\\") - vars_.update({ - 'R': r_bin, - 'R_USER': r_user, - }) + vars_.update( + { + "R": r_bin, + "R_USER": r_user, + } + ) return vars_ def meta_vars(meta, skip_build_id=False): d = {} - for var_name in ensure_list(meta.get_value('build/script_env', [])): - if '=' in var_name: - var_name, value = var_name.split('=', 1) + for var_name in ensure_list(meta.get_value("build/script_env", [])): + if "=" in var_name: + var_name, value = var_name.split("=", 1) else: value = os.getenv(var_name) if value is None: warnings.warn( - "The environment variable '%s' is undefined." % var_name, - UserWarning + "The environment variable '%s' is undefined." % var_name, UserWarning ) else: d[var_name] = value warnings.warn( "The environment variable '%s' is being passed through with value '%s'. " "If you are splitting build and test phases with --no-test, please ensure " - "that this value is also set similarly at test time." % - (var_name, "" if meta.config.suppress_variables else value), - UserWarning + "that this value is also set similarly at test time." + % (var_name, "" if meta.config.suppress_variables else value), + UserWarning, ) - folder = meta.get_value('source/0/folder', '') + folder = meta.get_value("source/0/folder", "") repo_dir = join(meta.config.work_dir, folder) - git_dir = join(repo_dir, '.git') - hg_dir = join(repo_dir, '.hg') + git_dir = join(repo_dir, ".git") + hg_dir = join(repo_dir, ".hg") if not isinstance(git_dir, str): # On Windows, subprocess env can't handle unicode. - git_dir = git_dir.encode(sys.getfilesystemencoding() or 'utf-8') + git_dir = git_dir.encode(sys.getfilesystemencoding() or "utf-8") - git_exe = external.find_executable('git', meta.config.build_prefix) + git_exe = external.find_executable("git", meta.config.build_prefix) if git_exe and os.path.exists(git_dir): # We set all 'source' metavars using the FIRST source entry in meta.yaml. - git_url = meta.get_value('source/0/git_url') + git_url = meta.get_value("source/0/git_url") if os.path.exists(git_url): - if sys.platform == 'win32': + if sys.platform == "win32": git_url = utils.convert_unix_path_to_win(git_url) # If git_url is a relative path instead of a url, convert it to an abspath git_url = normpath(join(meta.path, git_url)) @@ -461,30 +527,34 @@ def meta_vars(meta, skip_build_id=False): _x = False if git_url: - _x = verify_git_repo(git_exe, - git_dir, - git_url, - meta.config.git_commits_since_tag, - meta.config.debug, - meta.get_value('source/0/git_rev', 'HEAD')) - - if _x or meta.get_value('source/0/path'): + _x = verify_git_repo( + git_exe, + git_dir, + git_url, + meta.config.git_commits_since_tag, + meta.config.debug, + meta.get_value("source/0/git_rev", "HEAD"), + ) + + if _x or meta.get_value("source/0/path"): d.update(get_git_info(git_exe, git_dir, meta.config.debug)) - elif external.find_executable('hg', meta.config.build_prefix) and os.path.exists(hg_dir): + elif external.find_executable("hg", meta.config.build_prefix) and os.path.exists( + hg_dir + ): d.update(get_hg_build_info(hg_dir)) # use `get_value` to prevent early exit while name is still unresolved during rendering - d['PKG_NAME'] = meta.get_value('package/name') - d['PKG_VERSION'] = meta.version() - d['PKG_BUILDNUM'] = str(meta.build_number()) + d["PKG_NAME"] = meta.get_value("package/name") + d["PKG_VERSION"] = meta.version() + d["PKG_BUILDNUM"] = str(meta.build_number()) if meta.final and not skip_build_id: - d['PKG_BUILD_STRING'] = str(meta.build_id()) - d['PKG_HASH'] = meta.hash_dependencies() + d["PKG_BUILD_STRING"] = str(meta.build_id()) + d["PKG_HASH"] = meta.hash_dependencies() else: - d['PKG_BUILD_STRING'] = 'placeholder' - d['PKG_HASH'] = '1234567' - d['RECIPE_DIR'] = meta.path + d["PKG_BUILD_STRING"] = "placeholder" + d["PKG_HASH"] = "1234567" + d["RECIPE_DIR"] = meta.path return d @@ -493,9 +563,10 @@ def get_cpu_count(): if sys.platform == "darwin": # multiprocessing.cpu_count() is not reliable on OSX # See issue #645 on github.com/conda/conda-build - out, _ = subprocess.Popen('sysctl -n hw.logicalcpu', shell=True, - stdout=subprocess.PIPE).communicate() - return out.decode('utf-8').strip() + out, _ = subprocess.Popen( + "sysctl -n hw.logicalcpu", shell=True, stdout=subprocess.PIPE + ).communicate() + return out.decode("utf-8").strip() else: try: return str(multiprocessing.cpu_count()) @@ -505,13 +576,13 @@ def get_cpu_count(): def get_shlib_ext(host_platform): # Return the shared library extension. - if host_platform.startswith('win'): - return '.dll' - elif host_platform in ['osx', 'darwin']: - return '.dylib' - elif host_platform.startswith('linux'): - return '.so' - elif host_platform == 'noarch': + if host_platform.startswith("win"): + return ".dll" + elif host_platform in ["osx", "darwin"]: + return ".dylib" + elif host_platform.startswith("linux"): + return ".so" + elif host_platform == "noarch": # noarch packages should not contain shared libraries, use the system # platform if this is requested return get_shlib_ext(sys.platform) @@ -522,89 +593,91 @@ def get_shlib_ext(host_platform): def windows_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" # We have gone for the clang values here. - win_arch = 'i386' if str(m.config.host_arch) == '32' else 'amd64' - win_msvc = '19.0.0' - library_prefix = join(prefix, 'Library') - drive, tail = m.config.host_prefix.split(':') - get_default('SCRIPTS', join(prefix, 'Scripts')) - get_default('LIBRARY_PREFIX', library_prefix) - get_default('LIBRARY_BIN', join(library_prefix, 'bin')) - get_default('LIBRARY_INC', join(library_prefix, 'include')) - get_default('LIBRARY_LIB', join(library_prefix, 'lib')) - get_default('CYGWIN_PREFIX', ''.join(('/cygdrive/', drive.lower(), tail.replace('\\', '/')))) + win_arch = "i386" if str(m.config.host_arch) == "32" else "amd64" + win_msvc = "19.0.0" + library_prefix = join(prefix, "Library") + drive, tail = m.config.host_prefix.split(":") + get_default("SCRIPTS", join(prefix, "Scripts")) + get_default("LIBRARY_PREFIX", library_prefix) + get_default("LIBRARY_BIN", join(library_prefix, "bin")) + get_default("LIBRARY_INC", join(library_prefix, "include")) + get_default("LIBRARY_LIB", join(library_prefix, "lib")) + get_default( + "CYGWIN_PREFIX", "".join(("/cygdrive/", drive.lower(), tail.replace("\\", "/"))) + ) # see https://en.wikipedia.org/wiki/Environment_variable#Default_values - get_default('ALLUSERSPROFILE') - get_default('APPDATA') - get_default('CommonProgramFiles') - get_default('CommonProgramFiles(x86)') - get_default('CommonProgramW6432') - get_default('COMPUTERNAME') - get_default('ComSpec') - get_default('HOMEDRIVE') - get_default('HOMEPATH') - get_default('LOCALAPPDATA') - get_default('LOGONSERVER') - get_default('NUMBER_OF_PROCESSORS') - get_default('PATHEXT') - get_default('ProgramData') - get_default('ProgramFiles') - get_default('ProgramFiles(x86)') - get_default('ProgramW6432') - get_default('PROMPT') - get_default('PSModulePath') - get_default('PUBLIC') - get_default('SystemDrive') - get_default('SystemRoot') - get_default('TEMP') - get_default('TMP') - get_default('USERDOMAIN') - get_default('USERNAME') - get_default('USERPROFILE') - get_default('windir') + get_default("ALLUSERSPROFILE") + get_default("APPDATA") + get_default("CommonProgramFiles") + get_default("CommonProgramFiles(x86)") + get_default("CommonProgramW6432") + get_default("COMPUTERNAME") + get_default("ComSpec") + get_default("HOMEDRIVE") + get_default("HOMEPATH") + get_default("LOCALAPPDATA") + get_default("LOGONSERVER") + get_default("NUMBER_OF_PROCESSORS") + get_default("PATHEXT") + get_default("ProgramData") + get_default("ProgramFiles") + get_default("ProgramFiles(x86)") + get_default("ProgramW6432") + get_default("PROMPT") + get_default("PSModulePath") + get_default("PUBLIC") + get_default("SystemDrive") + get_default("SystemRoot") + get_default("TEMP") + get_default("TMP") + get_default("USERDOMAIN") + get_default("USERNAME") + get_default("USERPROFILE") + get_default("windir") # CPU data, see https://github.com/conda/conda-build/issues/2064 - get_default('PROCESSOR_ARCHITEW6432') - get_default('PROCESSOR_ARCHITECTURE') - get_default('PROCESSOR_IDENTIFIER') - get_default('BUILD', win_arch + '-pc-windows-' + win_msvc) + get_default("PROCESSOR_ARCHITEW6432") + get_default("PROCESSOR_ARCHITECTURE") + get_default("PROCESSOR_IDENTIFIER") + get_default("BUILD", win_arch + "-pc-windows-" + win_msvc) for k in os.environ.keys(): - if re.match('VS[0-9]{2,3}COMNTOOLS', k): + if re.match("VS[0-9]{2,3}COMNTOOLS", k): get_default(k) - elif re.match('VS[0-9]{4}INSTALLDIR', k): + elif re.match("VS[0-9]{4}INSTALLDIR", k): get_default(k) def unix_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" - get_default('HOME', 'UNKNOWN') - get_default('PKG_CONFIG_PATH', join(prefix, 'lib', 'pkgconfig')) - get_default('CMAKE_GENERATOR', 'Unix Makefiles') - get_default('SSL_CERT_FILE') + get_default("HOME", "UNKNOWN") + get_default("PKG_CONFIG_PATH", join(prefix, "lib", "pkgconfig")) + get_default("CMAKE_GENERATOR", "Unix Makefiles") + get_default("SSL_CERT_FILE") def osx_vars(m, get_default, prefix): """This is setting variables on a dict that is part of the get_default function""" - if str(m.config.host_arch) == '32': - OSX_ARCH = 'i386' + if str(m.config.host_arch) == "32": + OSX_ARCH = "i386" MACOSX_DEPLOYMENT_TARGET = 10.9 - elif str(m.config.host_arch) == 'arm64': - OSX_ARCH = 'arm64' + elif str(m.config.host_arch) == "arm64": + OSX_ARCH = "arm64" MACOSX_DEPLOYMENT_TARGET = 11.0 else: - OSX_ARCH = 'x86_64' + OSX_ARCH = "x86_64" MACOSX_DEPLOYMENT_TARGET = 10.9 - if str(m.config.arch) == '32': - BUILD = 'i386-apple-darwin13.4.0' - elif str(m.config.arch) == 'arm64': - BUILD = 'arm64-apple-darwin20.0.0' + if str(m.config.arch) == "32": + BUILD = "i386-apple-darwin13.4.0" + elif str(m.config.arch) == "arm64": + BUILD = "arm64-apple-darwin20.0.0" else: - BUILD = 'x86_64-apple-darwin13.4.0' + BUILD = "x86_64-apple-darwin13.4.0" # 10.7 install_name_tool -delete_rpath causes broken dylibs, I will revisit this ASAP. # rpath = ' -Wl,-rpath,%(PREFIX)s/lib' % d # SIP workaround, DYLD_* no longer works. # d['LDFLAGS'] = ldflags + rpath + ' -arch %(OSX_ARCH)s' % d - get_default('OSX_ARCH', OSX_ARCH) - get_default('MACOSX_DEPLOYMENT_TARGET', MACOSX_DEPLOYMENT_TARGET) - get_default('BUILD', BUILD) + get_default("OSX_ARCH", OSX_ARCH) + get_default("MACOSX_DEPLOYMENT_TARGET", MACOSX_DEPLOYMENT_TARGET) + get_default("BUILD", BUILD) @lru_cache(maxsize=None) @@ -618,32 +691,35 @@ def linux_vars(m, get_default, prefix): build_arch = platform_machine # Python reports x86_64 when running a i686 Python binary on a 64-bit CPU # unless run through linux32. Issue a warning when we detect this. - if build_arch == 'x86_64' and platform_architecture[0] == '32bit': + if build_arch == "x86_64" and platform_architecture[0] == "32bit": print("Warning: You are running 32-bit Python on a 64-bit linux installation") print(" but have not launched it via linux32. Various qeuries *will*") print(" give unexpected results (uname -m, platform.machine() etc)") - build_arch = 'i686' + build_arch = "i686" # the GNU triplet is powerpc, not ppc. This matters. - if build_arch.startswith('ppc'): - build_arch = build_arch.replace('ppc', 'powerpc') - if build_arch.startswith('powerpc') or build_arch.startswith('aarch64') \ - or build_arch.startswith('s390x'): - build_distro = 'cos7' + if build_arch.startswith("ppc"): + build_arch = build_arch.replace("ppc", "powerpc") + if ( + build_arch.startswith("powerpc") + or build_arch.startswith("aarch64") + or build_arch.startswith("s390x") + ): + build_distro = "cos7" else: - build_distro = 'cos6' + build_distro = "cos6" # There is also QEMU_SET_ENV, but that needs to be # filtered so it only contains the result of `linux_vars` # which, before this change was empty, and after it only # contains other QEMU env vars. - get_default('CFLAGS') - get_default('CXXFLAGS') - get_default('LDFLAGS') - get_default('QEMU_LD_PREFIX') - get_default('QEMU_UNAME') - get_default('DEJAGNU') - get_default('DISPLAY') - get_default('LD_RUN_PATH', prefix + '/lib') - get_default('BUILD', build_arch + '-conda_' + build_distro + '-linux-gnu') + get_default("CFLAGS") + get_default("CXXFLAGS") + get_default("LDFLAGS") + get_default("QEMU_LD_PREFIX") + get_default("QEMU_UNAME") + get_default("DEJAGNU") + get_default("DISPLAY") + get_default("LD_RUN_PATH", prefix + "/lib") + get_default("BUILD", build_arch + "-conda_" + build_distro + "-linux-gnu") def set_from_os_or_variant(out_dict, key, variant, default): @@ -666,19 +742,21 @@ def system_vars(env_dict, m, prefix): def os_vars(m, prefix): d = dict() # note the dictionary is passed in here - variables are set in that dict if they are non-null - get_default = lambda key, default='': set_from_os_or_variant(d, key, m.config.variant, default) + get_default = lambda key, default="": set_from_os_or_variant( + d, key, m.config.variant, default + ) - get_default('CPU_COUNT', get_cpu_count()) - get_default('LANG') - get_default('LC_ALL') - get_default('MAKEFLAGS') - d['SHLIB_EXT'] = get_shlib_ext(m.config.host_platform) - d['PATH'] = os.environ.copy()['PATH'] + get_default("CPU_COUNT", get_cpu_count()) + get_default("LANG") + get_default("LC_ALL") + get_default("MAKEFLAGS") + d["SHLIB_EXT"] = get_shlib_ext(m.config.host_platform) + d["PATH"] = os.environ.copy()["PATH"] if not m.config.activate: d = prepend_bin_path(d, m.config.host_prefix) - if sys.platform == 'win32': + if sys.platform == "win32": windows_vars(m, get_default, prefix) else: unix_vars(m, get_default, prefix) @@ -711,7 +789,7 @@ def _load_all_json(path): root, _, files = next(utils.walk(path)) result = {} for f in files: - if f.endswith('.json'): + if f.endswith(".json"): result[f] = _load_json(join(root, f)) return result @@ -725,11 +803,11 @@ def __init__(self, path): created. """ self.path = path - self._meta = join(path, 'conda-meta') + self._meta = join(path, "conda-meta") if os.path.isdir(path) and os.path.isdir(self._meta): self._packages = {} else: - raise InvalidEnvironment(f'Unable to load environment {path}') + raise InvalidEnvironment(f"Unable to load environment {path}") def _read_package_json(self): if not self._packages: @@ -743,8 +821,8 @@ def package_specs(self): json_objs = self._packages.values() specs = [] for i in json_objs: - p, v, b = i['name'], i['version'], i['build'] - specs.append(f'{p} {v} {b}') + p, v, b = i["name"], i["version"], i["build"] + specs.append(f"{p} {v} {b}") return specs @@ -752,10 +830,22 @@ def package_specs(self): last_index_ts = 0 -def get_install_actions(prefix, specs, env, retries=0, subdir=None, - verbose=True, debug=False, locking=True, - bldpkgs_dirs=None, timeout=900, disable_pip=False, - max_env_retry=3, output_folder=None, channel_urls=None): +def get_install_actions( + prefix, + specs, + env, + retries=0, + subdir=None, + verbose=True, + debug=False, + locking=True, + bldpkgs_dirs=None, + timeout=900, + disable_pip=False, + max_env_retry=3, + output_folder=None, + channel_urls=None, +): global cached_actions global last_index_ts actions = {} @@ -772,20 +862,34 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, capture = utils.capture for feature, value in feature_list: if value: - specs.append('%s@' % feature) + specs.append("%s@" % feature) bldpkgs_dirs = ensure_list(bldpkgs_dirs) - index, index_ts, _ = get_build_index(subdir, list(bldpkgs_dirs)[0], output_folder=output_folder, - channel_urls=channel_urls, debug=debug, verbose=verbose, - locking=locking, timeout=timeout) - specs = tuple(utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith('@')) + index, index_ts, _ = get_build_index( + subdir, + list(bldpkgs_dirs)[0], + output_folder=output_folder, + channel_urls=channel_urls, + debug=debug, + verbose=verbose, + locking=locking, + timeout=timeout, + ) + specs = tuple( + utils.ensure_valid_spec(spec) for spec in specs if not str(spec).endswith("@") + ) - if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions and - last_index_ts >= index_ts): + if ( + specs, + env, + subdir, + channel_urls, + disable_pip, + ) in cached_actions and last_index_ts >= index_ts: actions = cached_actions[(specs, env, subdir, channel_urls, disable_pip)].copy() if "PREFIX" in actions: - actions['PREFIX'] = prefix + actions["PREFIX"] = prefix elif specs: # this is hiding output like: # Fetching package metadata ........... @@ -796,60 +900,101 @@ def get_install_actions(prefix, specs, env, retries=0, subdir=None, actions = install_actions(prefix, index, specs, force=True) except (NoPackagesFoundError, UnsatisfiableError) as exc: raise DependencyNeedsBuildingError(exc, subdir=subdir) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, AssertionError, BuildLockError) as exc: - if 'lock' in str(exc): - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc) or - isinstance(exc, AssertionError)): - locks = utils.get_conda_operation_locks(locking, bldpkgs_dirs, timeout) + except ( + SystemExit, + PaddingError, + LinkError, + DependencyNeedsBuildingError, + CondaError, + AssertionError, + BuildLockError, + ) as exc: + if "lock" in str(exc): + log.warn( + "failed to get install actions, retrying. exception was: %s", + str(exc), + ) + elif ( + "requires a minimum conda version" in str(exc) + or "link a source that does not" in str(exc) + or isinstance(exc, AssertionError) + ): + locks = utils.get_conda_operation_locks( + locking, bldpkgs_dirs, timeout + ) with utils.try_acquire_locks(locks, timeout=timeout): pkg_dir = str(exc) folder = 0 - while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: + while ( + os.path.dirname(pkg_dir) not in pkgs_dirs + and folder < 20 + ): pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retries < max_env_retry: - log.warn("failed to get install actions, retrying. exception was: %s", - str(exc)) - actions = get_install_actions(prefix, tuple(specs), env, - retries=retries + 1, - subdir=subdir, - verbose=verbose, - debug=debug, - locking=locking, - bldpkgs_dirs=tuple(bldpkgs_dirs), - timeout=timeout, - disable_pip=disable_pip, - max_env_retry=max_env_retry, - output_folder=output_folder, - channel_urls=tuple(channel_urls)) + log.warn( + "failed to get install actions, retrying. exception was: %s", + str(exc), + ) + actions = get_install_actions( + prefix, + tuple(specs), + env, + retries=retries + 1, + subdir=subdir, + verbose=verbose, + debug=debug, + locking=locking, + bldpkgs_dirs=tuple(bldpkgs_dirs), + timeout=timeout, + disable_pip=disable_pip, + max_env_retry=max_env_retry, + output_folder=output_folder, + channel_urls=tuple(channel_urls), + ) else: - log.error("Failed to get install actions, max retries exceeded.") + log.error( + "Failed to get install actions, max retries exceeded." + ) raise if disable_pip: - for pkg in ('pip', 'setuptools', 'wheel'): + for pkg in ("pip", "setuptools", "wheel"): # specs are the raw specifications, not the conda-derived actual specs # We're testing that pip etc. are manually specified - if not any(re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep)) for dep in specs): - actions['LINK'] = [spec for spec in actions['LINK'] if spec.name != pkg] + if not any( + re.match(r"^%s(?:$|[\s=].*)" % pkg, str(dep)) for dep in specs + ): + actions["LINK"] = [ + spec for spec in actions["LINK"] if spec.name != pkg + ] utils.trim_empty_keys(actions) cached_actions[(specs, env, subdir, channel_urls, disable_pip)] = actions.copy() last_index_ts = index_ts return actions -def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, retry=0, - locks=None, is_cross=False, is_conda=False): - ''' +def create_env( + prefix, + specs_or_actions, + env, + config, + subdir, + clear_cache=True, + retry=0, + locks=None, + is_cross=False, + is_conda=False, +): + """ Create a conda envrionment for the given prefix and specs. - ''' + """ if config.debug: external_logger_context = utils.LoggingContext(logging.DEBUG) else: @@ -874,85 +1019,141 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, try: with utils.try_acquire_locks(locks, timeout=config.timeout): # input is a list - it's specs in MatchSpec format - if not hasattr(specs_or_actions, 'keys'): + if not hasattr(specs_or_actions, "keys"): specs = list(set(specs_or_actions)) - actions = get_install_actions(prefix, tuple(specs), env, - subdir=subdir, - verbose=config.verbose, - debug=config.debug, - locking=config.locking, - bldpkgs_dirs=tuple(config.bldpkgs_dirs), - timeout=config.timeout, - disable_pip=config.disable_pip, - max_env_retry=config.max_env_retry, - output_folder=config.output_folder, - channel_urls=tuple(config.channel_urls)) + actions = get_install_actions( + prefix, + tuple(specs), + env, + subdir=subdir, + verbose=config.verbose, + debug=config.debug, + locking=config.locking, + bldpkgs_dirs=tuple(config.bldpkgs_dirs), + timeout=config.timeout, + disable_pip=config.disable_pip, + max_env_retry=config.max_env_retry, + output_folder=config.output_folder, + channel_urls=tuple(config.channel_urls), + ) else: actions = specs_or_actions - index, _, _ = get_build_index(subdir=subdir, - bldpkgs_dir=config.bldpkgs_dir, - output_folder=config.output_folder, - channel_urls=config.channel_urls, - debug=config.debug, - verbose=config.verbose, - locking=config.locking, - timeout=config.timeout) + index, _, _ = get_build_index( + subdir=subdir, + bldpkgs_dir=config.bldpkgs_dir, + output_folder=config.output_folder, + channel_urls=config.channel_urls, + debug=config.debug, + verbose=config.verbose, + locking=config.locking, + timeout=config.timeout, + ) utils.trim_empty_keys(actions) display_actions(actions, index) if utils.on_win: for k, v in os.environ.items(): os.environ[k] = str(v) - with env_var('CONDA_QUIET', not config.verbose, reset_context): - with env_var('CONDA_JSON', not config.verbose, reset_context): + with env_var("CONDA_QUIET", not config.verbose, reset_context): + with env_var("CONDA_JSON", not config.verbose, reset_context): execute_actions(actions, index) - except (SystemExit, PaddingError, LinkError, DependencyNeedsBuildingError, - CondaError, BuildLockError) as exc: - if (("too short in" in str(exc) or - re.search('post-link failed for: (?:[a-zA-Z]*::)?openssl', str(exc)) or - isinstance(exc, PaddingError)) and - config.prefix_length > 80): + except ( + SystemExit, + PaddingError, + LinkError, + DependencyNeedsBuildingError, + CondaError, + BuildLockError, + ) as exc: + if ( + "too short in" in str(exc) + or re.search( + "post-link failed for: (?:[a-zA-Z]*::)?openssl", str(exc) + ) + or isinstance(exc, PaddingError) + ) and config.prefix_length > 80: if config.prefix_length_fallback: - log.warn("Build prefix failed with prefix length %d", - config.prefix_length) + log.warn( + "Build prefix failed with prefix length %d", + config.prefix_length, + ) log.warn("Error was: ") log.warn(str(exc)) - log.warn("One or more of your package dependencies needs to be rebuilt " - "with a longer prefix length.") - log.warn("Falling back to legacy prefix length of 80 characters.") - log.warn("Your package will not install into prefixes > 80 characters.") + log.warn( + "One or more of your package dependencies needs to be rebuilt " + "with a longer prefix length." + ) + log.warn( + "Falling back to legacy prefix length of 80 characters." + ) + log.warn( + "Your package will not install into prefixes > 80 characters." + ) config.prefix_length = 80 - host = '_h_env' in prefix + host = "_h_env" in prefix # Set this here and use to create environ # Setting this here is important because we use it below (symlink) prefix = config.host_prefix if host else config.build_prefix - actions['PREFIX'] = prefix - - create_env(prefix, actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, is_cross=is_cross) + actions["PREFIX"] = prefix + + create_env( + prefix, + actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + is_cross=is_cross, + ) else: raise - elif 'lock' in str(exc): + elif "lock" in str(exc): if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) - elif ('requires a minimum conda version' in str(exc) or - 'link a source that does not' in str(exc)): + log.warn( + "failed to create env, retrying. exception was: %s", + str(exc), + ) + create_env( + prefix, + specs_or_actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) + elif "requires a minimum conda version" in str( + exc + ) or "link a source that does not" in str(exc): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = str(exc) folder = 0 while os.path.dirname(pkg_dir) not in pkgs_dirs and folder < 20: pkg_dir = os.path.dirname(pkg_dir) folder += 1 - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + log.warn( + "failed to create env, retrying. exception was: %s", + str(exc), + ) + create_env( + prefix, + specs_or_actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) else: log.error("Failed to create env, max retries exceeded.") raise @@ -960,18 +1161,37 @@ def create_env(prefix, specs_or_actions, env, config, subdir, clear_cache=True, raise # HACK: some of the time, conda screws up somehow and incomplete packages result. # Just retry. - except (AssertionError, OSError, ValueError, RuntimeError, LockError) as exc: + except ( + AssertionError, + OSError, + ValueError, + RuntimeError, + LockError, + ) as exc: if isinstance(exc, AssertionError): with utils.try_acquire_locks(locks, timeout=config.timeout): pkg_dir = os.path.dirname(os.path.dirname(str(exc))) - log.warn("I think conda ended up with a partial extraction for %s. " - "Removing the folder and retrying", pkg_dir) + log.warn( + "I think conda ended up with a partial extraction for %s. " + "Removing the folder and retrying", + pkg_dir, + ) if os.path.isdir(pkg_dir): utils.rm_rf(pkg_dir) if retry < config.max_env_retry: - log.warn("failed to create env, retrying. exception was: %s", str(exc)) - create_env(prefix, specs_or_actions, config=config, subdir=subdir, env=env, - clear_cache=clear_cache, retry=retry + 1, is_cross=is_cross) + log.warn( + "failed to create env, retrying. exception was: %s", str(exc) + ) + create_env( + prefix, + specs_or_actions, + config=config, + subdir=subdir, + env=env, + clear_cache=clear_cache, + retry=retry + 1, + is_cross=is_cross, + ) else: log.error("Failed to create env, max retries exceeded.") raise @@ -989,7 +1209,7 @@ def remove_existing_packages(dirs, fns, config): for fn in fns: all_files = [fn] if not os.path.isabs(fn): - all_files = glob(os.path.join(folder, fn + '*')) + all_files = glob(os.path.join(folder, fn + "*")) for entry in all_files: utils.rm_rf(entry) @@ -1005,8 +1225,8 @@ def clean_pkg_cache(dist, config): locks = get_pkg_dirs_locks([config.bldpkgs_dir] + pkgs_dirs, config) with utils.try_acquire_locks(locks, timeout=config.timeout): rmplan = [ - 'RM_EXTRACTED {0} local::{0}'.format(dist), - 'RM_FETCHED {0} local::{0}'.format(dist), + "RM_EXTRACTED {0} local::{0}".format(dist), + "RM_FETCHED {0} local::{0}".format(dist), ] execute_plan(rmplan) @@ -1014,12 +1234,18 @@ def clean_pkg_cache(dist, config): # Conda's cleanup is still necessary - it keeps track of its own in-memory # list of downloaded things. for folder in pkgs_dirs: - if (os.path.exists(os.path.join(folder, dist)) or - os.path.exists(os.path.join(folder, dist + '.tar.bz2')) or - any(pkg_id in package_cache() for pkg_id in [dist, 'local::' + dist])): + if ( + os.path.exists(os.path.join(folder, dist)) + or os.path.exists(os.path.join(folder, dist + ".tar.bz2")) + or any( + pkg_id in package_cache() for pkg_id in [dist, "local::" + dist] + ) + ): log = utils.get_logger(__name__) - log.debug("Conda caching error: %s package remains in cache after removal", - dist) + log.debug( + "Conda caching error: %s package remains in cache after removal", + dist, + ) log.debug("manually removing to compensate") cache = package_cache() keys = [key for key in cache.keys() if dist in key] @@ -1033,18 +1259,23 @@ def clean_pkg_cache(dist, config): def get_pinned_deps(m, section): - with TemporaryDirectory(prefix='_') as tmpdir: - actions = get_install_actions(tmpdir, - tuple(m.ms_depends(section)), section, - subdir=m.config.target_subdir, - debug=m.config.debug, - verbose=m.config.verbose, - locking=m.config.locking, - bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), - timeout=m.config.timeout, - disable_pip=m.config.disable_pip, - max_env_retry=m.config.max_env_retry, - output_folder=m.config.output_folder, - channel_urls=tuple(m.config.channel_urls)) - runtime_deps = [' '.join(link.dist_name.rsplit('-', 2)) for link in actions.get('LINK', [])] + with TemporaryDirectory(prefix="_") as tmpdir: + actions = get_install_actions( + tmpdir, + tuple(m.ms_depends(section)), + section, + subdir=m.config.target_subdir, + debug=m.config.debug, + verbose=m.config.verbose, + locking=m.config.locking, + bldpkgs_dirs=tuple(m.config.bldpkgs_dirs), + timeout=m.config.timeout, + disable_pip=m.config.disable_pip, + max_env_retry=m.config.max_env_retry, + output_folder=m.config.output_folder, + channel_urls=tuple(m.config.channel_urls), + ) + runtime_deps = [ + " ".join(link.dist_name.rsplit("-", 2)) for link in actions.get("LINK", []) + ] return runtime_deps diff --git a/conda_build/utils.py b/conda_build/utils.py index ded2ac728a..5c8affae92 100644 --- a/conda_build/utils.py +++ b/conda_build/utils.py @@ -13,8 +13,18 @@ import logging.config import mmap import os -from os.path import (dirname, getmtime, getsize, isdir, join, isfile, abspath, islink, - expanduser, expandvars) +from os.path import ( + dirname, + getmtime, + getsize, + isdir, + join, + isfile, + abspath, + islink, + expanduser, + expandvars, +) import re import stat import subprocess @@ -37,26 +47,44 @@ import conda_package_handling.api try: - from conda.base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 + from conda.base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + CONDA_PACKAGE_EXTENSION_V1, + CONDA_PACKAGE_EXTENSION_V2, + ) except Exception: - from conda.base.constants import CONDA_TARBALL_EXTENSION as CONDA_PACKAGE_EXTENSION_V1 + from conda.base.constants import ( + CONDA_TARBALL_EXTENSION as CONDA_PACKAGE_EXTENSION_V1, + ) + CONDA_PACKAGE_EXTENSION_V2 = ".conda" CONDA_PACKAGE_EXTENSIONS = (CONDA_PACKAGE_EXTENSION_V2, CONDA_PACKAGE_EXTENSION_V1) -from conda.api import PackageCacheData # noqa +from conda.api import PackageCacheData # noqa + +from .conda_interface import ( + hashsum_file, + md5_file, + unix_path_to_win, + win_path_to_unix, +) # noqa +from .conda_interface import root_dir, pkgs_dirs # noqa +from .conda_interface import StringIO # noqa +from .conda_interface import VersionOrder, MatchSpec # noqa +from .conda_interface import cc_conda_build # noqa +from .conda_interface import Dist # noqa +from .conda_interface import context # noqa +from .conda_interface import ( + download, + TemporaryDirectory, + get_conda_channel, + CondaHTTPError, +) # noqa -from .conda_interface import hashsum_file, md5_file, unix_path_to_win, win_path_to_unix # noqa -from .conda_interface import root_dir, pkgs_dirs # noqa -from .conda_interface import StringIO # noqa -from .conda_interface import VersionOrder, MatchSpec # noqa -from .conda_interface import cc_conda_build # noqa -from .conda_interface import Dist # noqa -from .conda_interface import context # noqa -from .conda_interface import download, TemporaryDirectory, get_conda_channel, CondaHTTPError # noqa # NOQA because it is not used in this file. -from conda_build.conda_interface import rm_rf as _rm_rf # noqa -from conda_build.exceptions import BuildLockError # noqa -from conda_build.os_utils import external # noqa +from conda_build.conda_interface import rm_rf as _rm_rf # noqa +from conda_build.exceptions import BuildLockError # noqa +from conda_build.os_utils import external # noqa import urllib.parse as urlparse import urllib.request as urllib @@ -71,14 +99,15 @@ def glob(pathname, recursive=True): # NOQA because it is not used in this file. from contextlib import ExitStack # NOQA + PermissionError = PermissionError # NOQA FileNotFoundError = FileNotFoundError -on_win = (sys.platform == 'win32') +on_win = sys.platform == "win32" -codec = getpreferredencoding() or 'utf-8' +codec = getpreferredencoding() or "utf-8" on_win = sys.platform == "win32" -root_script_dir = os.path.join(root_dir, 'Scripts' if on_win else 'bin') +root_script_dir = os.path.join(root_dir, "Scripts" if on_win else "bin") mmap_MAP_PRIVATE = 0 if on_win else mmap.MAP_PRIVATE mmap_PROT_READ = 0 if on_win else mmap.PROT_READ mmap_PROT_WRITE = 0 if on_win else mmap.PROT_WRITE @@ -156,19 +185,20 @@ def directory_size_slow(path): def directory_size(path): - ''' - ''' + """ """ try: if on_win: command = 'dir /s "{}"' # Windows path can have spaces out = subprocess.check_output(command.format(path), shell=True) else: command = "du -s {}" - out = subprocess.check_output(command.format(path).split(), stderr=subprocess.PIPE) + out = subprocess.check_output( + command.format(path).split(), stderr=subprocess.PIPE + ) - if hasattr(out, 'decode'): + if hasattr(out, "decode"): try: - out = out.decode(errors='ignore') + out = out.decode(errors="ignore") # This isn't important anyway so give up. Don't try search on bytes. except (UnicodeDecodeError, IndexError): if on_win: @@ -177,10 +207,10 @@ def directory_size(path): pass if on_win: # Windows can give long output, we need only 2nd to last line - out = out.strip().rsplit('\r\n', 2)[-2] + out = out.strip().rsplit("\r\n", 2)[-2] pattern = r"\s([\d\W]+).+" # Language and punctuation neutral out = re.search(pattern, out.strip()).group(1).strip() - out = out.replace(',', '').replace('.', '').replace(' ', '') + out = out.replace(",", "").replace(".", "").replace(" ", "") else: out = out.split()[0] except subprocess.CalledProcessError: @@ -215,11 +245,11 @@ def _setup_rewrite_pipe(env): replacements[v] = k r_fd, w_fd = os.pipe() - r = os.fdopen(r_fd, 'rt') - if sys.platform == 'win32': - replacement_t = '%{}%' + r = os.fdopen(r_fd, "rt") + if sys.platform == "win32": + replacement_t = "%{}%" else: - replacement_t = '${}' + replacement_t = "${}" def rewriter(): while True: @@ -236,7 +266,7 @@ def rewriter(): except UnicodeDecodeError: try: txt = os.read(r, 10000) - sys.stdout.write(txt or '') + sys.stdout.write(txt or "") except TypeError: pass @@ -264,19 +294,26 @@ def __init__(self, *args, **kwargs): def _execute(self, *args, **kwargs): try: import psutil - psutil_exceptions = psutil.NoSuchProcess, psutil.AccessDenied, psutil.NoSuchProcess + + psutil_exceptions = ( + psutil.NoSuchProcess, + psutil.AccessDenied, + psutil.NoSuchProcess, + ) except ImportError as e: psutil = None psutil_exceptions = (OSError, ValueError) log = get_logger(__name__) log.warn(f"psutil import failed. Error was {e}") - log.warn("only disk usage and time statistics will be available. Install psutil to " - "get CPU time and memory usage statistics.") + log.warn( + "only disk usage and time statistics will be available. Install psutil to " + "get CPU time and memory usage statistics." + ) # The polling interval (in seconds) - time_int = kwargs.pop('time_int', 2) + time_int = kwargs.pop("time_int", 2) - disk_usage_dir = kwargs.get('cwd', sys.prefix) + disk_usage_dir = kwargs.get("cwd", sys.prefix) # Create a process of this (the parent) process parent = psutil.Process(os.getpid()) if psutil else DummyPsutilProcess() @@ -285,7 +322,11 @@ def _execute(self, *args, **kwargs): # Using the convenience Popen class provided by psutil start_time = time.time() - _popen = psutil.Popen(*args, **kwargs) if psutil else subprocess.Popen(*args, **kwargs) + _popen = ( + psutil.Popen(*args, **kwargs) + if psutil + else subprocess.Popen(*args, **kwargs) + ) try: while self.returncode is None: # We need to get all of the children of our process since our @@ -306,8 +347,8 @@ def _execute(self, *args, **kwargs): # we are instead looping over children and getting each individually. # https://psutil.readthedocs.io/en/latest/#psutil.Process.cpu_times cpu_stats = child.cpu_times() - child_cpu_usage['sys'] = cpu_stats.system - child_cpu_usage['user'] = cpu_stats.user + child_cpu_usage["sys"] = cpu_stats.system + child_cpu_usage["user"] = cpu_stats.user cpu_usage[child.pid] = child_cpu_usage except psutil_exceptions: # process already died. Just ignore it. @@ -317,8 +358,8 @@ def _execute(self, *args, **kwargs): # Sum the memory usage of all the children together (2D columnwise sum) self.rss = max(rss, self.rss) self.vms = max(vms, self.vms) - self.cpu_sys = sum(child['sys'] for child in cpu_usage.values()) - self.cpu_user = sum(child['user'] for child in cpu_usage.values()) + self.cpu_sys = sum(child["sys"] for child in cpu_usage.values()) + self.cpu_user = sum(child["user"] for child in cpu_usage.values()) self.processes = max(processes, self.processes) # Get disk usage @@ -337,74 +378,83 @@ def _execute(self, *args, **kwargs): return _popen.stdout, _popen.stderr def __repr__(self): - return str({'elapsed': self.elapsed, - 'rss': self.rss, - 'vms': self.vms, - 'disk': self.disk, - 'processes': self.processes, - 'cpu_user': self.cpu_user, - 'cpu_sys': self.cpu_sys, - 'returncode': self.returncode}) + return str( + { + "elapsed": self.elapsed, + "rss": self.rss, + "vms": self.vms, + "disk": self.disk, + "processes": self.processes, + "cpu_user": self.cpu_user, + "cpu_sys": self.cpu_sys, + "returncode": self.returncode, + } + ) def _func_defaulting_env_to_os_environ(func, *popenargs, **kwargs): - if 'env' not in kwargs: + if "env" not in kwargs: kwargs = kwargs.copy() env_copy = os.environ.copy() - kwargs.update({'env': env_copy}) - kwargs['env'] = {str(key): str(value) for key, value in kwargs['env'].items()} + kwargs.update({"env": env_copy}) + kwargs["env"] = {str(key): str(value) for key, value in kwargs["env"].items()} _args = [] - if 'stdin' not in kwargs: - kwargs['stdin'] = subprocess.PIPE + if "stdin" not in kwargs: + kwargs["stdin"] = subprocess.PIPE for arg in popenargs: # arguments to subprocess need to be bytestrings - if sys.version_info.major < 3 and hasattr(arg, 'encode'): + if sys.version_info.major < 3 and hasattr(arg, "encode"): arg = arg.encode(codec) - elif sys.version_info.major >= 3 and hasattr(arg, 'decode'): + elif sys.version_info.major >= 3 and hasattr(arg, "decode"): arg = arg.decode(codec) _args.append(str(arg)) - stats = kwargs.get('stats') - if 'stats' in kwargs: - del kwargs['stats'] + stats = kwargs.get("stats") + if "stats" in kwargs: + del kwargs["stats"] - rewrite_stdout_env = kwargs.pop('rewrite_stdout_env', None) + rewrite_stdout_env = kwargs.pop("rewrite_stdout_env", None) if rewrite_stdout_env: - kwargs['stdout'] = _setup_rewrite_pipe(rewrite_stdout_env) + kwargs["stdout"] = _setup_rewrite_pipe(rewrite_stdout_env) out = None if stats is not None: proc = PopenWrapper(_args, **kwargs) - if func == 'output': + if func == "output": out = proc.out.read() if proc.returncode != 0: raise subprocess.CalledProcessError(proc.returncode, _args) - stats.update({'elapsed': proc.elapsed, - 'disk': proc.disk, - 'processes': proc.processes, - 'cpu_user': proc.cpu_user, - 'cpu_sys': proc.cpu_sys, - 'rss': proc.rss, - 'vms': proc.vms}) + stats.update( + { + "elapsed": proc.elapsed, + "disk": proc.disk, + "processes": proc.processes, + "cpu_user": proc.cpu_user, + "cpu_sys": proc.cpu_sys, + "rss": proc.rss, + "vms": proc.vms, + } + ) else: - if func == 'call': + if func == "call": subprocess.check_call(_args, **kwargs) else: - if 'stdout' in kwargs: - del kwargs['stdout'] + if "stdout" in kwargs: + del kwargs["stdout"] out = subprocess.check_output(_args, **kwargs) return out def check_call_env(popenargs, **kwargs): - return _func_defaulting_env_to_os_environ('call', *popenargs, **kwargs) + return _func_defaulting_env_to_os_environ("call", *popenargs, **kwargs) def check_output_env(popenargs, **kwargs): - return _func_defaulting_env_to_os_environ('output', stdout=subprocess.PIPE, - *popenargs, **kwargs).rstrip() + return _func_defaulting_env_to_os_environ( + "output", stdout=subprocess.PIPE, *popenargs, **kwargs + ).rstrip() def bytes2human(n): @@ -413,14 +463,14 @@ def bytes2human(n): # '9.8K' # >>> bytes2human(100001221) # '95.4M' - symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y') + symbols = ("K", "M", "G", "T", "P", "E", "Z", "Y") prefix = {} for i, s in enumerate(symbols): prefix[s] = 1 << (i + 1) * 10 for s in reversed(symbols): if n >= prefix[s]: value = float(n) / prefix[s] - return f'{value:.1f}{s}' + return f"{value:.1f}{s}" return "%sB" % n @@ -438,17 +488,20 @@ def get_recipe_abspath(recipe): and needs cleanup. """ if isfile(recipe): - if recipe.lower().endswith(decompressible_exts) or recipe.lower().endswith(CONDA_PACKAGE_EXTENSIONS): + if recipe.lower().endswith(decompressible_exts) or recipe.lower().endswith( + CONDA_PACKAGE_EXTENSIONS + ): recipe_dir = tempfile.mkdtemp() if recipe.lower().endswith(CONDA_PACKAGE_EXTENSIONS): import conda_package_handling.api + conda_package_handling.api.extract(recipe, recipe_dir) else: tar_xf(recipe, recipe_dir) # At some stage the old build system started to tar up recipes. - recipe_tarfile = os.path.join(recipe_dir, 'info', 'recipe.tar') + recipe_tarfile = os.path.join(recipe_dir, "info", "recipe.tar") if isfile(recipe_tarfile): - tar_xf(recipe_tarfile, os.path.join(recipe_dir, 'info')) + tar_xf(recipe_tarfile, os.path.join(recipe_dir, "info")) need_cleanup = True else: print("Ignoring non-recipe: %s" % recipe) @@ -471,7 +524,7 @@ def try_acquire_locks(locks, timeout): http://stackoverflow.com/questions/9814008/multiple-mutex-locking-strategies-and-why-libraries-dont-use-address-comparison """ t = time.time() - while (time.time() - t < timeout): + while time.time() - t < timeout: # Continuously try to acquire all locks. # By passing a short timeout to each individual lock, we give other # processes that might be trying to acquire the same locks (and may @@ -495,7 +548,7 @@ def try_acquire_locks(locks, timeout): # If we reach this point, we weren't able to acquire all locks within # the specified timeout. We shouldn't be holding any locks anymore at # this point, so we just raise an exception. - raise BuildLockError('Failed to acquire all locks') + raise BuildLockError("Failed to acquire all locks") try: yield @@ -518,8 +571,12 @@ def _copy_with_shell_fallback(src, dst): continue if not is_copied: try: - subprocess.check_call(f'cp -a {src} {dst}', shell=True, - stderr=subprocess.PIPE, stdout=subprocess.PIPE) + subprocess.check_call( + f"cp -a {src} {dst}", + shell=True, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) except subprocess.CalledProcessError as e: if not os.path.isfile(dst): raise OSError(f"Failed to copy {src} to {dst}. Error was: {e}") @@ -534,7 +591,9 @@ def get_prefix_replacement_paths(src, dst): return os.path.join(*ssplit), os.path.join(*dsplit) -def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, clobber=False): +def copy_into( + src, dst, timeout=900, symlinks=False, lock=None, locking=True, clobber=False +): """Copy all the files and directories in src to the directory dst""" log = get_logger(__name__) if symlinks and islink(src): @@ -555,7 +614,15 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl except: pass # lchmod not available elif isdir(src): - merge_tree(src, dst, symlinks, timeout=timeout, lock=lock, locking=locking, clobber=clobber) + merge_tree( + src, + dst, + symlinks, + timeout=timeout, + lock=lock, + locking=locking, + clobber=clobber, + ) else: if isdir(dst): @@ -574,7 +641,7 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl src_folder = os.getcwd() if os.path.islink(src) and not os.path.exists(os.path.realpath(src)): - log.warn('path %s is a broken symlink - ignoring copy', src) + log.warn("path %s is a broken symlink - ignoring copy", src) return if not lock and locking: @@ -591,8 +658,9 @@ def copy_into(src, dst, timeout=900, symlinks=False, lock=None, locking=True, cl try: _copy_with_shell_fallback(src, dst_fn) except shutil.Error: - log.debug("skipping %s - already exists in %s", - os.path.basename(src), dst) + log.debug( + "skipping %s - already exists in %s", os.path.basename(src), dst + ) def move_with_fallback(src, dst): @@ -604,7 +672,9 @@ def move_with_fallback(src, dst): os.unlink(src) except PermissionError: log = get_logger(__name__) - log.debug(f"Failed to copy/remove path from {src} to {dst} due to permission error") + log.debug( + f"Failed to copy/remove path from {src} to {dst} due to permission error" + ) # http://stackoverflow.com/a/22331852/1170370 @@ -618,8 +688,8 @@ def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): lst = [x for x in lst if x not in excl] # do not copy lock files - if '.conda_lock' in lst: - lst.remove('.conda_lock') + if ".conda_lock" in lst: + lst.remove(".conda_lock") dst_lst = [os.path.join(dst, item) for item in lst] @@ -645,7 +715,9 @@ def copytree(src, dst, symlinks=False, ignore=None, dry_run=False): return dst_lst -def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, clobber=False): +def merge_tree( + src, dst, symlinks=False, timeout=900, lock=None, locking=True, clobber=False +): """ Merge src into dst recursively by copying all files from src into dst. Return a list of all files copied. @@ -655,17 +727,20 @@ def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, c """ dst = os.path.normpath(os.path.normcase(dst)) src = os.path.normpath(os.path.normcase(src)) - assert not dst.startswith(src), ("Can't merge/copy source into subdirectory of itself. " - "Please create separate spaces for these things.\n" - " src: {}\n" - " dst: {}".format(src, dst)) + assert not dst.startswith(src), ( + "Can't merge/copy source into subdirectory of itself. " + "Please create separate spaces for these things.\n" + " src: {}\n" + " dst: {}".format(src, dst) + ) new_files = copytree(src, dst, symlinks=symlinks, dry_run=True) existing = [f for f in new_files if isfile(f)] if existing and not clobber: - raise OSError("Can't merge {} into {}: file exists: " - "{}".format(src, dst, existing[0])) + raise OSError( + "Can't merge {} into {}: file exists: " "{}".format(src, dst, existing[0]) + ) locks = [] if locking: @@ -679,8 +754,10 @@ def merge_tree(src, dst, symlinks=False, timeout=900, lock=None, locking=True, c # purpose here is that we want *one* lock per location on disk. It can be locked or unlocked # at any time, but the lock within this process should all be tied to the same tracking # mechanism. -_lock_folders = (os.path.join(root_dir, 'locks'), - os.path.expanduser(os.path.join('~', '.conda_build_locks'))) +_lock_folders = ( + os.path.join(root_dir, "locks"), + os.path.expanduser(os.path.join("~", ".conda_build_locks")), +) def get_lock(folder, timeout=900): @@ -690,28 +767,30 @@ def get_lock(folder, timeout=900): except OSError: location = folder b_location = location - if hasattr(b_location, 'encode'): + if hasattr(b_location, "encode"): b_location = b_location.encode() # Hash the entire filename to avoid collisions. lock_filename = hashlib.sha256(b_location).hexdigest() - if hasattr(lock_filename, 'decode'): + if hasattr(lock_filename, "decode"): lock_filename = lock_filename.decode() for locks_dir in _lock_folders: try: if not os.path.isdir(locks_dir): os.makedirs(locks_dir) lock_file = os.path.join(locks_dir, lock_filename) - with open(lock_file, 'w') as f: + with open(lock_file, "w") as f: f.write("") fl = filelock.FileLock(lock_file, timeout) break except OSError: continue else: - raise RuntimeError("Could not write locks folder to either system location ({})" - "or user location ({}). Aborting.".format(*_lock_folders)) + raise RuntimeError( + "Could not write locks folder to either system location ({})" + "or user location ({}). Aborting.".format(*_lock_folders) + ) return fl @@ -728,48 +807,63 @@ def get_conda_operation_locks(locking=True, bldpkgs_dirs=None, timeout=900): lock = get_lock(folder, timeout=timeout) locks.append(lock) # lock used to generally indicate a conda operation occurring - locks.append(get_lock('conda-operation', timeout=timeout)) + locks.append(get_lock("conda-operation", timeout=timeout)) return locks -def relative(f, d='lib'): - assert not f.startswith('/'), f - assert not d.startswith('/'), d - d = d.strip('/').split('/') - if d == ['.']: +def relative(f, d="lib"): + assert not f.startswith("/"), f + assert not d.startswith("/"), d + d = d.strip("/").split("/") + if d == ["."]: d = [] - f = dirname(f).split('/') - if f == ['']: + f = dirname(f).split("/") + if f == [""]: f = [] while d and f and d[0] == f[0]: d.pop(0) f.pop(0) - return '/'.join(((['..'] * len(f)) if f else ['.']) + d) + return "/".join((([".."] * len(f)) if f else ["."]) + d) # This is the lowest common denominator of the formats supported by our libarchive/python-libarchive-c # packages across all platforms -decompressible_exts = ('.7z', '.tar', '.tar.bz2', '.tar.gz', '.tar.lzma', '.tar.xz', - '.tar.z', '.tar.zst', '.tgz', '.whl', '.zip', '.rpm', '.deb') - - -def _tar_xf_fallback(tarball, dir_path, mode='r:*'): - if tarball.lower().endswith('.tar.z'): - uncompress = external.find_executable('uncompress') +decompressible_exts = ( + ".7z", + ".tar", + ".tar.bz2", + ".tar.gz", + ".tar.lzma", + ".tar.xz", + ".tar.z", + ".tar.zst", + ".tgz", + ".whl", + ".zip", + ".rpm", + ".deb", +) + + +def _tar_xf_fallback(tarball, dir_path, mode="r:*"): + if tarball.lower().endswith(".tar.z"): + uncompress = external.find_executable("uncompress") if not uncompress: - uncompress = external.find_executable('gunzip') + uncompress = external.find_executable("gunzip") if not uncompress: - sys.exit("""\ + sys.exit( + """\ uncompress (or gunzip) is required to unarchive .z source files. -""") - check_call_env([uncompress, '-f', tarball]) +""" + ) + check_call_env([uncompress, "-f", tarball]) tarball = tarball[:-2] t = tarfile.open(tarball, mode) members = t.getmembers() for i, member in enumerate(members, 0): if os.path.isabs(member.name): - member.name = os.path.relpath(member.name, '/') + member.name = os.path.relpath(member.name, "/") cwd = os.path.realpath(os.getcwd()) if not os.path.realpath(member.name).startswith(cwd): member.name = member.name.replace("../", "") @@ -783,6 +877,7 @@ def _tar_xf_fallback(tarball, dir_path, mode='r:*'): def tar_xf_file(tarball, entries): from conda_build.utils import ensure_list + entries = ensure_list(entries) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) @@ -814,11 +909,13 @@ def tar_xf_getnames(tarball): def tar_xf(tarball, dir_path): - flags = libarchive.extract.EXTRACT_TIME | \ - libarchive.extract.EXTRACT_PERM | \ - libarchive.extract.EXTRACT_SECURE_NODOTDOT | \ - libarchive.extract.EXTRACT_SECURE_SYMLINKS | \ - libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS + flags = ( + libarchive.extract.EXTRACT_TIME + | libarchive.extract.EXTRACT_PERM + | libarchive.extract.EXTRACT_SECURE_NODOTDOT + | libarchive.extract.EXTRACT_SECURE_SYMLINKS + | libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS + ) if not os.path.isabs(tarball): tarball = os.path.join(os.getcwd(), tarball) try: @@ -828,17 +925,21 @@ def tar_xf(tarball, dir_path): # try again, maybe we are on Windows and the archive contains symlinks # https://github.com/conda/conda-build/issues/3351 # https://github.com/libarchive/libarchive/pull/1030 - if tarball.lower().endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2', '.tar.z', '.tar.xz')): + if tarball.lower().endswith( + (".tar", ".tar.gz", ".tgz", ".tar.bz2", ".tar.z", ".tar.xz") + ): _tar_xf_fallback(tarball, dir_path) else: raise def file_info(path): - return {'size': getsize(path), - 'md5': md5_file(path), - 'sha256': hashsum_file(path, 'sha256'), - 'mtime': getmtime(path)} + return { + "size": getsize(path), + "md5": md5_file(path), + "sha256": hashsum_file(path, "sha256"), + "mtime": getmtime(path), + } def comma_join(items): @@ -854,7 +955,11 @@ def comma_join(items): >>> comma_join(['a', 'b', 'c']) 'a, b, and c' """ - return ' and '.join(items) if len(items) <= 2 else ', '.join(items[:-1]) + ', and ' + items[-1] + return ( + " and ".join(items) + if len(items) <= 2 + else ", ".join(items[:-1]) + ", and " + items[-1] + ) def safe_print_unicode(*args, **kwargs): @@ -867,12 +972,12 @@ def safe_print_unicode(*args, **kwargs): :param end: ending character (defaults to '\n') :param errors: error handler for encoding errors (defaults to 'replace') """ - sep = kwargs.pop('sep', ' ') - end = kwargs.pop('end', '\n') - errors = kwargs.pop('errors', 'replace') + sep = kwargs.pop("sep", " ") + end = kwargs.pop("end", "\n") + errors = kwargs.pop("errors", "replace") func = sys.stdout.buffer.write line = sep.join(args) + end - encoding = sys.stdout.encoding or 'utf8' + encoding = sys.stdout.encoding or "utf8" func(line.encode(encoding, errors)) @@ -901,7 +1006,7 @@ def rec_glob(path, patterns, ignores=None): def convert_unix_path_to_win(path): - if external.find_executable('cygpath'): + if external.find_executable("cygpath"): cmd = f"cygpath -w {path}" path = subprocess.getoutput(cmd) @@ -911,7 +1016,7 @@ def convert_unix_path_to_win(path): def convert_win_path_to_unix(path): - if external.find_executable('cygpath'): + if external.find_executable("cygpath"): cmd = f"cygpath -u {path}" path = subprocess.getoutput(cmd) @@ -923,25 +1028,25 @@ def convert_win_path_to_unix(path): # Used for translating local paths into url (file://) paths # http://stackoverflow.com/a/14298190/1170370 def path2url(path): - return urlparse.urljoin('file:', urllib.pathname2url(path)) + return urlparse.urljoin("file:", urllib.pathname2url(path)) def get_stdlib_dir(prefix, py_ver): - if sys.platform == 'win32': - lib_dir = os.path.join(prefix, 'Lib') + if sys.platform == "win32": + lib_dir = os.path.join(prefix, "Lib") else: - lib_dir = os.path.join(prefix, 'lib') - python_folder = glob(os.path.join(lib_dir, 'python?.*')) + lib_dir = os.path.join(prefix, "lib") + python_folder = glob(os.path.join(lib_dir, "python?.*")) python_folder = sorted(filterfalse(islink, python_folder)) if python_folder: lib_dir = os.path.join(lib_dir, python_folder[0]) else: - lib_dir = os.path.join(lib_dir, f'python{py_ver}') + lib_dir = os.path.join(lib_dir, f"python{py_ver}") return lib_dir def get_site_packages(prefix, py_ver): - return os.path.join(get_stdlib_dir(prefix, py_ver), 'site-packages') + return os.path.join(get_stdlib_dir(prefix, py_ver), "site-packages") def get_build_folders(croot): @@ -950,16 +1055,22 @@ def get_build_folders(croot): def prepend_bin_path(env, prefix, prepend_prefix=False): - env['PATH'] = join(prefix, "bin") + os.pathsep + env['PATH'] + env["PATH"] = join(prefix, "bin") + os.pathsep + env["PATH"] if sys.platform == "win32": - env['PATH'] = join(prefix, "Library", "mingw-w64", "bin") + os.pathsep + \ - join(prefix, "Library", "usr", "bin") + os.pathsep + \ - join(prefix, "Library", "bin") + os.pathsep + \ - join(prefix, "Scripts") + os.pathsep + \ - env['PATH'] + env["PATH"] = ( + join(prefix, "Library", "mingw-w64", "bin") + + os.pathsep + + join(prefix, "Library", "usr", "bin") + + os.pathsep + + join(prefix, "Library", "bin") + + os.pathsep + + join(prefix, "Scripts") + + os.pathsep + + env["PATH"] + ) prepend_prefix = True # windows has Python in the prefix. Use it. if prepend_prefix: - env['PATH'] = prefix + os.pathsep + env['PATH'] + env["PATH"] = prefix + os.pathsep + env["PATH"] return env @@ -970,13 +1081,13 @@ def prepend_bin_path(env, prefix, prepend_prefix=False): def sys_path_prepended(prefix): path_backup = sys.path[:] if on_win: - sys.path.insert(1, os.path.join(prefix, 'lib', 'site-packages')) + sys.path.insert(1, os.path.join(prefix, "lib", "site-packages")) else: - lib_dir = os.path.join(prefix, 'lib') - python_dir = glob(os.path.join(lib_dir, r'python[0-9\.]*')) + lib_dir = os.path.join(prefix, "lib") + python_dir = glob(os.path.join(lib_dir, r"python[0-9\.]*")) if python_dir: python_dir = python_dir[0] - sys.path.insert(1, os.path.join(python_dir, 'site-packages')) + sys.path.insert(1, os.path.join(python_dir, "site-packages")) try: yield finally: @@ -986,17 +1097,19 @@ def sys_path_prepended(prefix): @contextlib.contextmanager def path_prepended(prefix, prepend_prefix=True): # FIXME: Unclear why prepend_prefix=True for all platforms. - old_path = os.environ['PATH'] - os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, prepend_prefix)['PATH'] + old_path = os.environ["PATH"] + os.environ["PATH"] = prepend_bin_path(os.environ.copy(), prefix, prepend_prefix)[ + "PATH" + ] try: yield finally: - os.environ['PATH'] = old_path + os.environ["PATH"] = old_path -bin_dirname = 'Scripts' if sys.platform == 'win32' else 'bin' +bin_dirname = "Scripts" if sys.platform == "win32" else "bin" -entry_pat = re.compile(r'\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$') +entry_pat = re.compile(r"\s*([\w\-\.]+)\s*=\s*([\w.]+):([\w.]+)\s*$") def iter_entry_points(items): @@ -1008,22 +1121,24 @@ def iter_entry_points(items): def create_entry_point(path, module, func, config): - import_name = func.split('.')[0] - pyscript = PY_TMPL % { - 'module': module, 'func': func, 'import_name': import_name} + import_name = func.split(".")[0] + pyscript = PY_TMPL % {"module": module, "func": func, "import_name": import_name} if on_win: - with open(path + '-script.py', 'w') as fo: - if os.path.isfile(os.path.join(config.host_prefix, 'python_d.exe')): - fo.write('#!python_d\n') + with open(path + "-script.py", "w") as fo: + if os.path.isfile(os.path.join(config.host_prefix, "python_d.exe")): + fo.write("#!python_d\n") fo.write(pyscript) - copy_into(join(dirname(__file__), f'cli-{str(config.host_arch)}.exe'), - path + '.exe', config.timeout) + copy_into( + join(dirname(__file__), f"cli-{str(config.host_arch)}.exe"), + path + ".exe", + config.timeout, + ) else: if os.path.islink(path): os.remove(path) - with open(path, 'w') as fo: + with open(path, "w") as fo: if not config.noarch: - fo.write('#!%s\n' % config.host_python) + fo.write("#!%s\n" % config.host_python) fo.write(pyscript) os.chmod(path, 0o775) @@ -1051,36 +1166,45 @@ def get_ext_files(start_path, pattern): def convert_path_for_cygwin_or_msys2(exe, path): "If exe is a Cygwin or MSYS2 executable then filters it through `cygpath -u`" - if sys.platform != 'win32': + if sys.platform != "win32": return path if exe not in _posix_exes_cache: with open(exe, "rb") as exe_file: exe_binary = exe_file.read() - msys2_cygwin = re.findall(b'(cygwin1.dll|msys-2.0.dll)', exe_binary) + msys2_cygwin = re.findall(b"(cygwin1.dll|msys-2.0.dll)", exe_binary) _posix_exes_cache[exe] = True if msys2_cygwin else False if _posix_exes_cache[exe]: try: - path = check_output_env(['cygpath', '-u', - path]).splitlines()[0].decode(getpreferredencoding()) + path = ( + check_output_env(["cygpath", "-u", path]) + .splitlines()[0] + .decode(getpreferredencoding()) + ) except OSError: log = get_logger(__name__) - log.debug('cygpath executable not found. Passing native path. This is OK for msys2.') + log.debug( + "cygpath executable not found. Passing native path. This is OK for msys2." + ) return path def get_skip_message(m): - return ("Skipped: {} from {} defines build/skip for this configuration ({}).".format( - m.name(), m.path, - {k: m.config.variant[k] for k in m.get_used_vars()})) + return "Skipped: {} from {} defines build/skip for this configuration ({}).".format( + m.name(), m.path, {k: m.config.variant[k] for k in m.get_used_vars()} + ) -def package_has_file(package_path, file_path, refresh_mode='modified'): +def package_has_file(package_path, file_path, refresh_mode="modified"): # This version does nothing to the package cache. with TemporaryDirectory() as td: - if file_path.startswith('info'): - conda_package_handling.api.extract(package_path, dest_dir=td, components='info') + if file_path.startswith("info"): + conda_package_handling.api.extract( + package_path, dest_dir=td, components="info" + ) else: - conda_package_handling.api.extract(package_path, dest_dir=td, components=file_path) + conda_package_handling.api.extract( + package_path, dest_dir=td, components=file_path + ) resolved_file_path = os.path.join(td, file_path) if os.path.exists(resolved_file_path): # TODO :: Remove this text-mode load. Files are binary. @@ -1088,7 +1212,7 @@ def package_has_file(package_path, file_path, refresh_mode='modified'): with open(resolved_file_path) as f: content = f.read() except UnicodeDecodeError: - with open(resolved_file_path, 'rb') as f: + with open(resolved_file_path, "rb") as f: content = f.read() else: content = False @@ -1145,7 +1269,7 @@ def islist(arg, uniform=False, include_dict=True): :return: Whether `arg` is a `list` :rtype: bool """ - if isinstance(arg, str) or not hasattr(arg, '__iter__'): + if isinstance(arg, str) or not hasattr(arg, "__iter__"): # str and non-iterables are not lists return False elif not include_dict and isinstance(arg, dict): @@ -1204,13 +1328,13 @@ def expand_globs(path_list, root_dir): glob_files = glob(path) if not glob_files: log = get_logger(__name__) - log.error(f'Glob {path} did not match in root_dir {root_dir}') + log.error(f"Glob {path} did not match in root_dir {root_dir}") # https://docs.python.org/3/library/glob.html#glob.glob states that # "whether or not the results are sorted depends on the file system". # Avoid this potential ambiguity by sorting. (see #4185) files.extend(sorted(glob_files)) - prefix_path_re = re.compile('^' + re.escape(f'{root_dir}{os.path.sep}')) - files = [prefix_path_re.sub('', f, 1) for f in files] + prefix_path_re = re.compile("^" + re.escape(f"{root_dir}{os.path.sep}")) + files = [prefix_path_re.sub("", f, 1) for f in files] return files @@ -1228,12 +1352,16 @@ def find_recipe(path): if os.path.isfile(path): if os.path.basename(path) in VALID_METAS: return path - raise OSError("{} is not a valid meta file ({})".format(path, ", ".join(VALID_METAS))) + raise OSError( + "{} is not a valid meta file ({})".format(path, ", ".join(VALID_METAS)) + ) results = list(rec_glob(path, VALID_METAS, ignores=(".AppleDouble",))) if not results: - raise OSError("No meta files ({}) found in {}".format(", ".join(VALID_METAS), path)) + raise OSError( + "No meta files ({}) found in {}".format(", ".join(VALID_METAS), path) + ) if len(results) == 1: return results[0] @@ -1243,20 +1371,40 @@ def find_recipe(path): metas = [m for m in VALID_METAS if os.path.isfile(os.path.join(path, m))] if len(metas) == 1: - get_logger(__name__).warn("Multiple meta files found. " - "The %s file in the base directory (%s) " - "will be used." % (metas[0], path)) + get_logger(__name__).warn( + "Multiple meta files found. " + "The %s file in the base directory (%s) " + "will be used." % (metas[0], path) + ) return os.path.join(path, metas[0]) - raise OSError("More than one meta files ({}) found in {}".format(", ".join(VALID_METAS), path)) + raise OSError( + "More than one meta files ({}) found in {}".format(", ".join(VALID_METAS), path) + ) class LoggingContext: - default_loggers = ['conda', 'binstar', 'install', 'conda.install', 'fetch', 'conda.instructions', - 'fetch.progress', 'print', 'progress', 'dotupdate', 'stdoutlog', 'requests', - 'conda.core.package_cache', 'conda.plan', 'conda.gateways.disk.delete', - 'conda_build', 'conda_build.index', 'conda_build.noarch_python', - 'urllib3.connectionpool'] + default_loggers = [ + "conda", + "binstar", + "install", + "conda.install", + "fetch", + "conda.instructions", + "fetch.progress", + "print", + "progress", + "dotupdate", + "stdoutlog", + "requests", + "conda.core.package_cache", + "conda.plan", + "conda.gateways.disk.delete", + "conda_build", + "conda_build.index", + "conda_build.noarch_python", + "urllib3.connectionpool", + ] def __init__(self, level=logging.WARN, handler=None, close=True, loggers=None): self.level = level @@ -1274,8 +1422,11 @@ def __enter__(self): if isinstance(logger, str): log = logging.getLogger(logger) self.old_levels[logger] = log.level - log.setLevel(self.level if ('install' not in logger or - self.level < logging.INFO) else self.level + 10) + log.setLevel( + self.level + if ("install" not in logger or self.level < logging.INFO) + else self.level + 10 + ) if self.handler: self.logger.addHandler(self.handler) @@ -1295,23 +1446,23 @@ def __exit__(self, et, ev, tb): def get_installed_packages(path): - ''' + """ Scan all json files in 'path' and return a dictionary with their contents. Files are assumed to be in 'index.json' format. - ''' + """ installed = dict() - for filename in glob(os.path.join(path, 'conda-meta', '*.json')): + for filename in glob(os.path.join(path, "conda-meta", "*.json")): with open(filename) as file: data = json.load(file) - installed[data['name']] = data + installed[data["name"]] = data return installed def _convert_lists_to_sets(_dict): for k, v in _dict.items(): - if hasattr(v, 'keys'): + if hasattr(v, "keys"): _dict[k] = HashableDict(_convert_lists_to_sets(v)) - elif hasattr(v, '__iter__') and not isinstance(v, str): + elif hasattr(v, "__iter__") and not isinstance(v, str): try: _dict[k] = sorted(list(set(v))) except TypeError: @@ -1320,8 +1471,8 @@ def _convert_lists_to_sets(_dict): class HashableDict(dict): - """use hashable frozen dictionaries for resources and resource types so that they can be in sets - """ + """use hashable frozen dictionaries for resources and resource types so that they can be in sets""" + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self = _convert_lists_to_sets(self) @@ -1339,7 +1490,7 @@ def represent_hashabledict(dumper, data): value.append((node_key, node_value)) - return yaml.nodes.MappingNode('tag:yaml.org,2002:map', value) + return yaml.nodes.MappingNode("tag:yaml.org,2002:map", value) yaml.add_representer(HashableDict, represent_hashabledict) @@ -1349,6 +1500,7 @@ def represent_hashabledict(dumper, data): @contextlib.contextmanager def capture(): import sys + oldout, olderr = sys.stdout, sys.stderr try: out = [StringIO(), StringIO()] @@ -1383,19 +1535,19 @@ def env_var(name, value, callback=None): def trim_empty_keys(dict_): to_remove = set() - negative_means_empty = ('final', 'noarch_python', 'zip_keys') + negative_means_empty = ("final", "noarch_python", "zip_keys") for k, v in dict_.items(): - if hasattr(v, 'keys'): + if hasattr(v, "keys"): trim_empty_keys(v) # empty lists and empty strings, and None are always empty. - if v == list() or v == '' or v is None or v == dict(): + if v == list() or v == "" or v is None or v == dict(): to_remove.add(k) # other things that evaluate as False may not be "empty" - things can be manually set to # false, and we need to keep that setting. if not v and k in negative_means_empty: to_remove.add(k) - if 'zip_keys' in dict_ and not any(v for v in dict_['zip_keys']): - to_remove.add('zip_keys') + if "zip_keys" in dict_ and not any(v for v in dict_["zip_keys"]): + to_remove.add("zip_keys") for k in to_remove: del dict_[k] @@ -1403,17 +1555,17 @@ def trim_empty_keys(dict_): def _increment(version, alpha_ver): try: if alpha_ver: - suffix = 'a' + suffix = "a" else: - suffix = '.0a0' + suffix = ".0a0" last_version = str(int(version) + 1) + suffix except ValueError: last_version = chr(ord(version) + 1) return last_version -def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): - pins = [len(p.split('.')) if p else None for p in (min_pin, max_pin)] +def apply_pin_expressions(version, min_pin="x.x.x.x.x.x.x", max_pin="x"): + pins = [len(p.split(".")) if p else None for p in (min_pin, max_pin)] parsed_version = VersionOrder(version).version[1:] nesting_position = None flat_list = [] @@ -1423,9 +1575,9 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): flat_list.extend(item) else: flat_list.append(item) - if max_pin and len(max_pin.split('.')) > len(flat_list): + if max_pin and len(max_pin.split(".")) > len(flat_list): pins[1] = len(flat_list) - versions = ['', ''] + versions = ["", ""] # first idx is lower bound pin; second is upper bound pin. # pin value is number of places to pin. for p_idx, pin in enumerate(pins): @@ -1439,8 +1591,8 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): v = _increment(v, alpha_ver) versions[p_idx] += str(v) if v_idx != nesting_position: - versions[p_idx] += '.' - if versions[p_idx][-1] == '.': + versions[p_idx] += "." + if versions[p_idx][-1] == ".": versions[p_idx] = versions[p_idx][:-1] if versions[0]: if version.endswith(".*"): @@ -1452,55 +1604,68 @@ def apply_pin_expressions(version, min_pin='x.x.x.x.x.x.x', max_pin='x'): if version_order < VersionOrder(versions[0]): # If the minimum is greater than the version this is a pre-release build. # Use the version as the lower bound - versions[0] = '>=' + version + versions[0] = ">=" + version else: - versions[0] = '>=' + versions[0] + versions[0] = ">=" + versions[0] if versions[1]: - versions[1] = '<' + versions[1] - return ','.join([v for v in versions if v]) - - -def filter_files(files_list, prefix, filter_patterns=(r'(.*[\\/])?\.git[\\/].*', - r'(.*[\\/])?\.git$', - r'(.*)?\.DS_Store.*', - r'.*\.la$', - r'conda-meta.*', - r'.*\.conda_trash(?:_\d+)*$')): + versions[1] = "<" + versions[1] + return ",".join([v for v in versions if v]) + + +def filter_files( + files_list, + prefix, + filter_patterns=( + r"(.*[\\/])?\.git[\\/].*", + r"(.*[\\/])?\.git$", + r"(.*)?\.DS_Store.*", + r".*\.la$", + r"conda-meta.*", + r".*\.conda_trash(?:_\d+)*$", + ), +): """Remove things like the .git directory from the list of files to be copied""" for pattern in filter_patterns: r = re.compile(pattern) files_list = set(files_list) - set(filter(r.match, files_list)) - return [f for f in files_list - if not os.path.isdir(os.path.join(prefix, f)) or - os.path.islink(os.path.join(prefix, f))] + return [ + f + for f in files_list + if not os.path.isdir(os.path.join(prefix, f)) + or os.path.islink(os.path.join(prefix, f)) + ] def filter_info_files(files_list, prefix): - return filter_files(files_list, prefix, filter_patterns=( - 'info[\\\\/]index.json', - 'info[\\\\/]files', - 'info[\\\\/]paths.json', - 'info[\\\\/]about.json', - 'info[\\\\/]has_prefix', - 'info[\\\\/]hash_input_files', # legacy, not used anymore - 'info[\\\\/]hash_input.json', - 'info[\\\\/]run_exports.yaml', # legacy - 'info[\\\\/]run_exports.json', # current - 'info[\\\\/]git', - 'info[\\\\/]recipe[\\\\/].*', - 'info[\\\\/]recipe_log.json', - 'info[\\\\/]recipe.tar', - 'info[\\\\/]test[\\\\/].*', - 'info[\\\\/]LICENSE.txt', # legacy, some tests rely on this - 'info[\\\\/]licenses[\\\\/]*', - 'info[\\\\/]prelink_messages[\\\\/]*', - 'info[\\\\/]requires', - 'info[\\\\/]meta', - 'info[\\\\/]platform', - 'info[\\\\/]no_link', - 'info[\\\\/]link.json', - 'info[\\\\/]icon.png', - )) + return filter_files( + files_list, + prefix, + filter_patterns=( + "info[\\\\/]index.json", + "info[\\\\/]files", + "info[\\\\/]paths.json", + "info[\\\\/]about.json", + "info[\\\\/]has_prefix", + "info[\\\\/]hash_input_files", # legacy, not used anymore + "info[\\\\/]hash_input.json", + "info[\\\\/]run_exports.yaml", # legacy + "info[\\\\/]run_exports.json", # current + "info[\\\\/]git", + "info[\\\\/]recipe[\\\\/].*", + "info[\\\\/]recipe_log.json", + "info[\\\\/]recipe.tar", + "info[\\\\/]test[\\\\/].*", + "info[\\\\/]LICENSE.txt", # legacy, some tests rely on this + "info[\\\\/]licenses[\\\\/]*", + "info[\\\\/]prelink_messages[\\\\/]*", + "info[\\\\/]requires", + "info[\\\\/]meta", + "info[\\\\/]platform", + "info[\\\\/]no_link", + "info[\\\\/]link.json", + "info[\\\\/]icon.png", + ), + ) def rm_rf(path, config=None): @@ -1544,12 +1709,12 @@ def filter(self, record): warning_error_stderr_filter = GreaterThanFilter(logging.INFO) # set filelock's logger to only show warnings by default -logging.getLogger('filelock').setLevel(logging.WARN) +logging.getLogger("filelock").setLevel(logging.WARN) # quiet some of conda's less useful output -logging.getLogger('conda.core.linked_data').setLevel(logging.WARN) -logging.getLogger('conda.gateways.disk.delete').setLevel(logging.WARN) -logging.getLogger('conda.gateways.disk.test').setLevel(logging.WARN) +logging.getLogger("conda.core.linked_data").setLevel(logging.WARN) +logging.getLogger("conda.gateways.disk.delete").setLevel(logging.WARN) +logging.getLogger("conda.gateways.disk.test").setLevel(logging.WARN) def reset_deduplicator(): @@ -1561,15 +1726,17 @@ def reset_deduplicator(): def get_logger(name, level=logging.INFO, dedupe=True, add_stdout_stderr_handlers=True): config_file = None - if cc_conda_build.get('log_config_file'): - config_file = abspath(expanduser(expandvars(cc_conda_build.get('log_config_file')))) + if cc_conda_build.get("log_config_file"): + config_file = abspath( + expanduser(expandvars(cc_conda_build.get("log_config_file"))) + ) # by loading config file here, and then only adding handlers later, people # should be able to override conda-build's logger settings here. if config_file: with open(config_file) as f: config_dict = yaml.safe_load(f) logging.config.dictConfig(config_dict) - level = config_dict.get('loggers', {}).get(name, {}).get('level', level) + level = config_dict.get("loggers", {}).get(name, {}).get("level", level) log = logging.getLogger(name) log.setLevel(level) if dedupe: @@ -1592,25 +1759,30 @@ def _equivalent(base_value, value, path): equivalent = value == base_value if isinstance(value, str) and isinstance(base_value, str): if not os.path.isabs(base_value): - base_value = os.path.abspath(os.path.normpath(os.path.join(path, base_value))) + base_value = os.path.abspath( + os.path.normpath(os.path.join(path, base_value)) + ) if not os.path.isabs(value): value = os.path.abspath(os.path.normpath(os.path.join(path, value))) equivalent |= base_value == value return equivalent -def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, add_missing_keys=True): +def merge_or_update_dict( + base, new, path="", merge=True, raise_on_clobber=False, add_missing_keys=True +): if base == new: return base log = get_logger(__name__) for key, value in new.items(): if key in base or add_missing_keys: base_value = base.get(key, value) - if hasattr(value, 'keys'): - base_value = merge_or_update_dict(base_value, value, path, merge, - raise_on_clobber=raise_on_clobber) + if hasattr(value, "keys"): + base_value = merge_or_update_dict( + base_value, value, path, merge, raise_on_clobber=raise_on_clobber + ) base[key] = base_value - elif hasattr(value, '__iter__') and not isinstance(value, str): + elif hasattr(value, "__iter__") and not isinstance(value, str): if merge: if base_value != value: try: @@ -1624,10 +1796,17 @@ def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, else: base[key] = value else: - if (base_value and merge and not _equivalent(base_value, value, path) and - raise_on_clobber): - log.debug('clobbering key {} (original value {}) with value {}'.format(key, - base_value, value)) + if ( + base_value + and merge + and not _equivalent(base_value, value, path) + and raise_on_clobber + ): + log.debug( + "clobbering key {} (original value {}) with value {}".format( + key, base_value, value + ) + ) if value is None and key in base: del base[key] else: @@ -1636,49 +1815,60 @@ def merge_or_update_dict(base, new, path="", merge=True, raise_on_clobber=False, def merge_dicts_of_lists(dol1, dol2): - ''' + """ From Alex Martelli: https://stackoverflow.com/a/1495821/3257826 - ''' + """ keys = set(dol1).union(dol2) no = [] return {k: dol1.get(k, no) + dol2.get(k, no) for k in keys} def prefix_files(prefix): - ''' + """ Returns a set of all files in prefix. - ''' + """ res = set() prefix_rep = prefix + os.path.sep for root, dirs, files in walk(prefix): for fn in files: # this is relpath, just hacked to be faster - res.add(join(root, fn).replace(prefix_rep, '', 1)) + res.add(join(root, fn).replace(prefix_rep, "", 1)) for dn in dirs: path = join(root, dn) if islink(path): - res.add(path.replace(prefix_rep, '', 1)) - res.update(expand_globs((path, ), prefix)) + res.add(path.replace(prefix_rep, "", 1)) + res.update(expand_globs((path,), prefix)) return res -def mmap_mmap(fileno, length, tagname=None, flags=0, prot=mmap_PROT_READ | mmap_PROT_WRITE, - access=None, offset=0): - ''' +def mmap_mmap( + fileno, + length, + tagname=None, + flags=0, + prot=mmap_PROT_READ | mmap_PROT_WRITE, + access=None, + offset=0, +): + """ Hides the differences between mmap.mmap on Windows and Unix. Windows has `tagname`. Unix does not, but makes up for it with `flags` and `prot`. On both, the default value for `access` is determined from how the file was opened so must not be passed in at all to get this default behaviour. - ''' + """ if on_win: if access: - return mmap.mmap(fileno, length, tagname=tagname, access=access, offset=offset) + return mmap.mmap( + fileno, length, tagname=tagname, access=access, offset=offset + ) else: return mmap.mmap(fileno, length, tagname=tagname) else: if access: - return mmap.mmap(fileno, length, flags=flags, prot=prot, access=access, offset=offset) + return mmap.mmap( + fileno, length, flags=flags, prot=prot, access=access, offset=offset + ) else: return mmap.mmap(fileno, length, flags=flags, prot=prot) @@ -1686,21 +1876,21 @@ def mmap_mmap(fileno, length, tagname=None, flags=0, prot=mmap_PROT_READ | mmap_ def remove_pycache_from_scripts(build_prefix): """Remove pip created pycache directory from bin or Scripts.""" if on_win: - scripts_path = os.path.join(build_prefix, 'Scripts') + scripts_path = os.path.join(build_prefix, "Scripts") else: - scripts_path = os.path.join(build_prefix, 'bin') + scripts_path = os.path.join(build_prefix, "bin") if os.path.isdir(scripts_path): for entry in os.listdir(scripts_path): entry_path = os.path.join(scripts_path, entry) - if os.path.isdir(entry_path) and entry.strip(os.sep) == '__pycache__': + if os.path.isdir(entry_path) and entry.strip(os.sep) == "__pycache__": shutil.rmtree(entry_path) - elif os.path.isfile(entry_path) and entry_path.endswith('.pyc'): + elif os.path.isfile(entry_path) and entry_path.endswith(".pyc"): os.remove(entry_path) -def sort_list_in_nested_structure(dictionary, omissions=''): +def sort_list_in_nested_structure(dictionary, omissions=""): """Recurse through a nested dictionary and sort any lists that are found. If the list that is found contains anything but strings, it is skipped @@ -1713,9 +1903,11 @@ def sort_list_in_nested_structure(dictionary, omissions=''): section = dictionary[field][key] if isinstance(section, dict): sort_list_in_nested_structure(section) - elif (isinstance(section, list) and - '{}/{}' .format(field, key) not in omissions and - all(isinstance(item, str) for item in section)): + elif ( + isinstance(section, list) + and f"{field}/{key}" not in omissions + and all(isinstance(item, str) for item in section) + ): section.sort() # there's a possibility for nested lists containing dictionaries @@ -1738,51 +1930,63 @@ def sort_list_in_nested_structure(dictionary, omissions=''): # if you are seeing mysterious unsatisfiable errors, with the package you're building being the # unsatisfiable part, then you probably need to update this regex. -spec_needing_star_re = re.compile(r"([\w\d\.\-\_]+)\s+((?<=])[\w\d\.\-\_]+?(?!\*))(\s+[\w\d\.\_]+)?$") # NOQA +spec_needing_star_re = re.compile( + r"([\w\d\.\-\_]+)\s+((?<=])[\w\d\.\-\_]+?(?!\*))(\s+[\w\d\.\_]+)?$" +) # NOQA spec_ver_needing_star_re = re.compile(r"^([0-9a-zA-Z\.]+)$") def ensure_valid_spec(spec, warn=False): if isinstance(spec, MatchSpec): - if (hasattr(spec, 'version') and spec.version and (not spec.get('build', '')) and - spec_ver_needing_star_re.match(str(spec.version))): - if str(spec.name) not in ('python', 'numpy') or str(spec.version) != 'x.x': - spec = MatchSpec("{} {}".format(str(spec.name), str(spec.version) + '.*')) + if ( + hasattr(spec, "version") + and spec.version + and (not spec.get("build", "")) + and spec_ver_needing_star_re.match(str(spec.version)) + ): + if str(spec.name) not in ("python", "numpy") or str(spec.version) != "x.x": + spec = MatchSpec( + "{} {}".format(str(spec.name), str(spec.version) + ".*") + ) else: match = spec_needing_star_re.match(spec) # ignore exact pins (would be a 3rd group) if match and not match.group(3): - if match.group(1) in ('python', 'numpy') and match.group(2) == 'x.x': + if match.group(1) in ("python", "numpy") and match.group(2) == "x.x": spec = spec_needing_star_re.sub(r"\1 \2", spec) else: if "*" not in spec: - if match.group(1) not in ('python', 'vc') and warn: + if match.group(1) not in ("python", "vc") and warn: log = get_logger(__name__) - log.warn("Adding .* to spec '{}' to ensure satisfiability. Please " - "consider putting {{{{ var_name }}}}.* or some relational " - "operator (>/=/<=) on this spec in meta.yaml, or if req is " - "also a build req, using {{{{ pin_compatible() }}}} jinja2 " - "function instead. See " - "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level" # NOQA - .format(spec)) + log.warn( + "Adding .* to spec '{}' to ensure satisfiability. Please " + "consider putting {{{{ var_name }}}}.* or some relational " + "operator (>/=/<=) on this spec in meta.yaml, or if req is " + "also a build req, using {{{{ pin_compatible() }}}} jinja2 " + "function instead. See " + "https://conda.io/docs/user-guide/tasks/build-packages/variants.html#pinning-at-the-variant-level".format( # NOQA + spec + ) + ) spec = spec_needing_star_re.sub(r"\1 \2.*", spec) return spec def insert_variant_versions(requirements_dict, variant, env): - build_deps = (ensure_list(requirements_dict.get('build')) + - ensure_list(requirements_dict.get('host'))) + build_deps = ensure_list(requirements_dict.get("build")) + ensure_list( + requirements_dict.get("host") + ) reqs = ensure_list(requirements_dict.get(env)) for key, val in variant.items(): - regex = re.compile(r'^(%s)(?:\s*$)' % key.replace('_', '[-_]')) + regex = re.compile(r"^(%s)(?:\s*$)" % key.replace("_", "[-_]")) matches = [regex.match(pkg) for pkg in reqs] if any(matches): for i, x in enumerate(matches): - if x and (env in ('build', 'host') or x.group(1) in build_deps): + if x and (env in ("build", "host") or x.group(1) in build_deps): del reqs[i] if not isinstance(val, str): val = val[0] - reqs.insert(i, ensure_valid_spec(' '.join((x.group(1), val)))) + reqs.insert(i, ensure_valid_spec(" ".join((x.group(1), val)))) xx_re = re.compile(r"([0-9a-zA-Z\.\-\_]+)\s+x\.x") @@ -1791,7 +1995,10 @@ def insert_variant_versions(requirements_dict, variant, env): for i, x in enumerate(matches): if x: del reqs[i] - reqs.insert(i, ensure_valid_spec(' '.join((x.group(1), variant.get(x.group(1)))))) + reqs.insert( + i, + ensure_valid_spec(" ".join((x.group(1), variant.get(x.group(1))))), + ) if reqs: requirements_dict[env] = reqs @@ -1822,10 +2029,13 @@ def match_peer_job(target_matchspec, other_m, this_m=None): def expand_reqs(reqs_entry): - if not hasattr(reqs_entry, 'keys'): + if not hasattr(reqs_entry, "keys"): original = ensure_list(reqs_entry)[:] - reqs_entry = {'host': ensure_list(original), - 'run': ensure_list(original)} if original else {} + reqs_entry = ( + {"host": ensure_list(original), "run": ensure_list(original)} + if original + else {} + ) else: for sec in reqs_entry: reqs_entry[sec] = ensure_list(reqs_entry[sec]) @@ -1840,8 +2050,8 @@ def sha256_checksum(filename, buffersize=65536): if not isfile(filename): return None sha256 = hashlib.sha256() - with open(filename, 'rb') as f: - for block in iter(lambda: f.read(buffersize), b''): + with open(filename, "rb") as f: + for block in iter(lambda: f.read(buffersize), b""): sha256.update(block) return sha256.hexdigest() @@ -1865,11 +2075,11 @@ def write_bat_activation_text(file_handle, m): # exists to identify a valid conda environment # conda 4.6 changes this one final time, by adding a '--stack' flag to the 'activate' # command, and 'activate' does not stack environments by default without that flag - history_file = join(m.config.host_prefix, 'conda-meta', 'history') + history_file = join(m.config.host_prefix, "conda-meta", "history") if not isfile(history_file): if not isdir(dirname(history_file)): os.makedirs(dirname(history_file)) - open(history_file, 'a').close() + open(history_file, "a").close() file_handle.write( f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate "{m.config.host_prefix}"\n' @@ -1880,47 +2090,51 @@ def write_bat_activation_text(file_handle, m): f'call "{root_script_dir}\\..\\condabin\\conda.bat" activate --stack "{m.config.build_prefix}"\n' ) from conda_build.os_utils.external import find_executable - ccache = find_executable('ccache', m.config.build_prefix, False) + + ccache = find_executable("ccache", m.config.build_prefix, False) if ccache: if isinstance(ccache, list): ccache = ccache[0] ccache_methods = {} - ccache_methods['env_vars'] = False - ccache_methods['symlinks'] = False - ccache_methods['native'] = False - if hasattr(m.config, 'ccache_method'): + ccache_methods["env_vars"] = False + ccache_methods["symlinks"] = False + ccache_methods["native"] = False + if hasattr(m.config, "ccache_method"): ccache_methods[m.config.ccache_method] = True for method, value in ccache_methods.items(): if value: - if method == 'env_vars': + if method == "env_vars": file_handle.write(f'set "CC={ccache} %CC%"\n') file_handle.write(f'set "CXX={ccache} %CXX%"\n') - elif method == 'symlinks': - dirname_ccache_ln_bin = join(m.config.build_prefix, 'ccache-ln-bin') - file_handle.write(f'mkdir {dirname_ccache_ln_bin}\n') - file_handle.write(f'pushd {dirname_ccache_ln_bin}\n') + elif method == "symlinks": + dirname_ccache_ln_bin = join(m.config.build_prefix, "ccache-ln-bin") + file_handle.write(f"mkdir {dirname_ccache_ln_bin}\n") + file_handle.write(f"pushd {dirname_ccache_ln_bin}\n") # If you use mklink.exe instead of mklink here it breaks as it's a builtin. - for ext in ('.exe', ''): + for ext in (".exe", ""): # MSVC - file_handle.write(f'mklink cl{ext} {ccache}\n') - file_handle.write(f'mklink link{ext} {ccache}\n') + file_handle.write(f"mklink cl{ext} {ccache}\n") + file_handle.write(f"mklink link{ext} {ccache}\n") # GCC - file_handle.write(f'mklink gcc{ext} {ccache}\n') - file_handle.write(f'mklink g++{ext} {ccache}\n') - file_handle.write(f'mklink cc{ext} {ccache}\n') - file_handle.write(f'mklink c++{ext} {ccache}\n') - file_handle.write(f'mklink as{ext} {ccache}\n') - file_handle.write(f'mklink ar{ext} {ccache}\n') - file_handle.write(f'mklink nm{ext} {ccache}\n') - file_handle.write(f'mklink ranlib{ext} {ccache}\n') - file_handle.write(f'mklink gcc-ar{ext} {ccache}\n') - file_handle.write(f'mklink gcc-nm{ext} {ccache}\n') - file_handle.write(f'mklink gcc-ranlib{ext} {ccache}\n') - file_handle.write('popd\n') - file_handle.write('set PATH={dirname_ccache_ln};{dirname_ccache};%PATH%\n'.format( - dirname_ccache_ln=dirname_ccache_ln_bin, - dirname_ccache=os.path.dirname(ccache))) - elif method == 'native': + file_handle.write(f"mklink gcc{ext} {ccache}\n") + file_handle.write(f"mklink g++{ext} {ccache}\n") + file_handle.write(f"mklink cc{ext} {ccache}\n") + file_handle.write(f"mklink c++{ext} {ccache}\n") + file_handle.write(f"mklink as{ext} {ccache}\n") + file_handle.write(f"mklink ar{ext} {ccache}\n") + file_handle.write(f"mklink nm{ext} {ccache}\n") + file_handle.write(f"mklink ranlib{ext} {ccache}\n") + file_handle.write(f"mklink gcc-ar{ext} {ccache}\n") + file_handle.write(f"mklink gcc-nm{ext} {ccache}\n") + file_handle.write(f"mklink gcc-ranlib{ext} {ccache}\n") + file_handle.write("popd\n") + file_handle.write( + "set PATH={dirname_ccache_ln};{dirname_ccache};%PATH%\n".format( + dirname_ccache_ln=dirname_ccache_ln_bin, + dirname_ccache=os.path.dirname(ccache), + ) + ) + elif method == "native": pass else: print("ccache method {} not implemented") @@ -1931,15 +2145,15 @@ def write_bat_activation_text(file_handle, m): def download_channeldata(channel_url): global channeldata_cache - if channel_url.startswith('file://') or channel_url not in channeldata_cache: + if channel_url.startswith("file://") or channel_url not in channeldata_cache: urls = get_conda_channel(channel_url).urls() - urls = {url.rsplit('/', 1)[0] for url in urls} + urls = {url.rsplit("/", 1)[0] for url in urls} data = {} for url in urls: with TemporaryDirectory() as td: tf = os.path.join(td, "channeldata.json") try: - download(url + '/channeldata.json', tf) + download(url + "/channeldata.json", tf) with open(tf) as f: new_channeldata = json.load(f) except (JSONDecodeError, CondaHTTPError): @@ -1977,20 +2191,26 @@ def shutil_move_more_retrying(src, dest, debug_name): log.info(f"shutil.move({debug_name})={src}, dest={dest})") shutil.move(src, dest) if attempts_left != 5: - log.warning("shutil.move({}={}, dest={}) succeeded on attempt number {}".format(debug_name, src, dest, - 6 - attempts_left)) + log.warning( + "shutil.move({}={}, dest={}) succeeded on attempt number {}".format( + debug_name, src, dest, 6 - attempts_left + ) + ) attempts_left = -1 except: attempts_left = attempts_left - 1 if attempts_left > 0: log.warning( "Failed to rename {} directory, check with strace, struss or procmon. " - "Will sleep for 3 seconds and try again!".format(debug_name)) + "Will sleep for 3 seconds and try again!".format(debug_name) + ) import time + time.sleep(3) elif attempts_left != -1: log.error( - f"Failed to rename {debug_name} directory despite sleeping and retrying.") + f"Failed to rename {debug_name} directory despite sleeping and retrying." + ) def is_conda_pkg(pkg_path: str) -> bool: @@ -1999,8 +2219,6 @@ def is_conda_pkg(pkg_path: str) -> bool: """ path = Path(pkg_path) - return ( - path.is_file() and ( - any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) - ) + return path.is_file() and ( + any(path.name.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) )