diff --git a/scripts/devops_tasks/build_packages.py b/scripts/devops_tasks/build_packages.py index bc962c29e0a4..3a8d221935b5 100644 --- a/scripts/devops_tasks/build_packages.py +++ b/scripts/devops_tasks/build_packages.py @@ -12,7 +12,7 @@ import sys import os -from common_tasks import process_glob_string, run_check_call +from common_tasks import process_glob_string, run_check_call, str_to_bool root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", "..")) build_packing_script_location = os.path.join(root_dir, "build_package.py") @@ -22,17 +22,6 @@ sys.path.append(tox_path) from sanitize_setup import process_requires - -def str_to_bool(input_string): - if isinstance(input_string, bool): - return input_string - elif input_string.lower() in ("true", "t", "1"): - return True - elif input_string.lower() in ("false", "f", "0"): - return False - else: - return False - def build_packages(targeted_packages, distribution_directory, is_dev_build=False): # run the build and distribution for package_root in targeted_packages: diff --git a/scripts/devops_tasks/common_tasks.py b/scripts/devops_tasks/common_tasks.py index c0ad19122385..3afb50e3461d 100644 --- a/scripts/devops_tasks/common_tasks.py +++ b/scripts/devops_tasks/common_tasks.py @@ -119,6 +119,15 @@ def clean_coverage(coverage_dir): else: raise +def str_to_bool(input_string): + if isinstance(input_string, bool): + return input_string + elif input_string.lower() in ("true", "t", "1"): + return True + elif input_string.lower() in ("false", "f", "0"): + return False + else: + return False def parse_setup(setup_path): setup_filename = os.path.join(setup_path, "setup.py") @@ -399,7 +408,7 @@ def extend_dev_requirements(dev_req_path, packages_to_include): # include any package given in included list. omit duplicate for requirement in packages_to_include: if requirement not in requirements: - requirements.append(requirement) + requirements.insert(0, requirement.rstrip() + '\n') logging.info("Extending dev requirements. New result:: {}".format(requirements)) # create new dev requirements file with different name for filtered requirements diff --git a/scripts/devops_tasks/test_regression.py b/scripts/devops_tasks/test_regression.py index 934fbbf5e702..1c69c07cc0a0 100644 --- a/scripts/devops_tasks/test_regression.py +++ b/scripts/devops_tasks/test_regression.py @@ -23,7 +23,9 @@ find_packages_missing_on_pypi, find_whl, find_tools_packages, - get_installed_packages + get_installed_packages, + extend_dev_requirements, + str_to_bool ) from git_helper import get_release_tag, git_checkout_tag, git_checkout_branch, clone_repo @@ -38,6 +40,8 @@ AZURE_SDK_FOR_PYTHON_GIT_URL = "https://github.com/Azure/azure-sdk-for-python.git" TEMP_FOLDER_NAME = ".tmp_code_path" +OLDEST_EXTENSION_PKGS = ['msrestazure','adal'] + logging.getLogger().setLevel(logging.INFO) class CustomVirtualEnv: @@ -114,6 +118,7 @@ def run(self): logging.info("Dependent packages for [{0}]: {1}".format(pkg_name, dep_packages)) for dep_pkg_path in dep_packages: dep_pkg_name, _, _, _ = parse_setup(dep_pkg_path) + logging.info( "Starting regression test of {0} against released {1}".format( pkg_name, dep_pkg_name @@ -235,6 +240,18 @@ def _install_packages(self, dependent_pkg_path, pkg_to_exclude): dependent_pkg_path, list_to_exclude, dependent_pkg_path ) + # early versions of azure-sdk-tools had an unpinned version of azure-mgmt packages. + # that unpinned version hits an a code path in azure-sdk-tools that hits this error. + if filtered_dev_req_path and self.context.is_latest_depend_test == False: + logging.info( + "Extending dev requirements with {}".format(OLDEST_EXTENSION_PKGS) + ) + extend_dev_requirements( + filtered_dev_req_path, OLDEST_EXTENSION_PKGS + ) + else: + logging.info("Not extending dev requirements {} {}".format(filtered_dev_req_path, self.context.is_latest_depend_test)) + if filtered_dev_req_path: logging.info( "Installing filtered dev requirements from {}".format(filtered_dev_req_path) @@ -289,7 +306,6 @@ def find_package_dependency(glob_string, repo_root_dir): # This is the main function which identifies packages to test, find dependency matrix and trigger test def run_main(args): - temp_dir = "" if args.temp_dir: temp_dir = args.temp_dir @@ -318,7 +334,7 @@ def run_main(args): if len(targeted_packages) == 0: exit(0) - # clone code repo only if it doesn't exists + # clone code repo only if it doesn't exist if not os.path.exists(code_repo_root): clone_repo(temp_dir, AZURE_SDK_FOR_PYTHON_GIT_URL) else: @@ -333,7 +349,7 @@ def run_main(args): # Create regression text context. One context object will be reused for all packages context = RegressionContext( - args.whl_dir, temp_dir, args.verify_latest, args.mark_arg + args.whl_dir, temp_dir, str_to_bool(args.verify_latest), args.mark_arg ) for pkg_path in targeted_packages: