From 2f0b7f94bb37f42146204ff8df9713801aeeba05 Mon Sep 17 00:00:00 2001 From: Zachary Anderson Date: Sat, 4 Jun 2022 15:21:18 -0700 Subject: [PATCH] pylint a few python scripts and lint in lint.sh (#33675) --- .pylintrc | 254 ++++++++++++++++++++++++++ build/copy_info_plist.py | 29 +-- build/dart/tools/dart_package_name.py | 25 ++- build/dart/tools/dart_pkg.py | 48 ++--- build/generate_coverage.py | 205 +++++++++++---------- build/get_concurrent_jobs.py | 51 +++--- build/git_revision.py | 12 +- build/zip.py | 2 +- ci/deps_parser.py | 42 ++--- ci/firebase_testlab.py | 27 +-- ci/licenses_golden/licenses_flutter | 1 + ci/lint.sh | 8 + impeller/tools/build_metal_library.py | 86 ++++----- impeller/tools/check_licenses.py | 64 +++---- impeller/tools/xxd.py | 65 ++++--- tools/gn | 73 ++++---- 16 files changed, 632 insertions(+), 360 deletions(-) create mode 100644 .pylintrc diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000000000..431b9e51c99f2 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,254 @@ +[MASTER] + +# Specify a configuration file. +#rcfile= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Add files or directories to the blocklist. They should be base names, not +# paths. +ignore=CVS,.git,out + +# Pickle collected data for later comparisons. +persistent=yes + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + pylint_quotes + + +# Configure quote preferences. +string-quote = single-avoid-escape +triple-quote = double +docstring-quote = double + + +[MESSAGES CONTROL] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time. +#enable= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). +disable= + exec-used, + fixme, + missing-class-docstring, + missing-function-docstring, + missing-module-docstring, + too-few-public-methods, + too-many-branches, + too-many-lines, + too-many-return-statements, + too-many-statements, + + +[REPORTS] + +# Set the output format. Available formats are text, parseable, colorized, msvs +# (visual studio) and html +output-format=text + +# Put messages in a separate file for each module / package specified on the +# command line instead of printing them on stdout. Reports (if any) will be +# written in a file name "pylint_global.[txt|html]". +files-output=no + +# Tells whether to display a full report or only the messages +# CHANGED: +reports=no + +# Activate the evaluation score. +score=no + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + + +[VARIABLES] + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# A regular expression matching the beginning of the name of dummy variables +# (i.e. not used). +dummy-variables-rgx=_|dummy + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + + +[TYPECHECK] + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# List of classes names for which member attributes should not be checked +# (useful for classes with attributes dynamically set). +ignored-classes=SQLObject,twisted.internet.reactor,hashlib,google.appengine.api.memcache + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E0201 when accessed. Python regular +# expressions are accepted. +generated-members=REQUEST,acl_users,aq_parent,multiprocessing.managers.SyncManager + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[SIMILARITIES] + +# Minimum lines number of a similarity. +min-similarity-lines=10 + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + + +[FORMAT] + +# Maximum number of characters on a single line. +# yapf is configured (in .style.yapf) to format to a line length of 80, but +# sometimes it is not successful if a comment or string literal isn't already +# well-formatted. Therefore, we use pylint to put a hard limit somewhere +# further out at a point where manual formatting should be done. +max-line-length=100 + +# Maximum number of lines in a module +max-module-lines=1000 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +# CHANGED: +indent-string=' ' + + +[BASIC] + +# List of builtins function names that should not be used, separated by a comma +bad-functions=map,filter,apply,input + +# Regular expression which should only match correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression which should only match correct module level names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression which should only match correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression which should only match correct function names +function-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct method names +method-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct instance attribute names +attr-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct argument names +argument-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct variable names +variable-rgx=[a-z_][a-z0-9_]{2,30}$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Regular expression which should only match functions or classes name which do +# not require a docstring +no-docstring-rgx=__.*__ + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.* + +# Maximum number of locals for function / method body +max-locals=25 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of branch for function / method body +max-branchs=12 + +# Maximum number of statements in function / method body +max-statements=50 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + + +[IMPORTS] + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,string,TERMIOS,Bastion,rexec + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception diff --git a/build/copy_info_plist.py b/build/copy_info_plist.py index 89f67a2e23b53..b589e7ce04c1b 100644 --- a/build/copy_info_plist.py +++ b/build/copy_info_plist.py @@ -10,33 +10,34 @@ Precondition: $CWD/../../flutter is the path to the flutter engine repo. -usage: copy_info_plist.py --source --destination --bitcode --minversion= +usage: copy_info_plist.py --source --destination + --bitcode --minversion= """ +import argparse +import os import subprocess -import argparse -import sys import git_revision -import os -def GetClangVersion(bitcode): +def get_clang_version(bitcode): clang_executable = str( os.path.join( - "..", "..", "buildtools", "mac-x64", "clang", "bin", "clang++" + '..', '..', 'buildtools', 'mac-x64', 'clang', 'bin', 'clang++' ) ) if bitcode: - clang_executable = "clang++" - version = subprocess.check_output([clang_executable, "--version"]) + clang_executable = 'clang++' + version = subprocess.check_output([clang_executable, '--version']) return version.splitlines()[0] def main(): parser = argparse.ArgumentParser( - description='Copies the Info.plist and adds extra fields to it like the git hash of the engine' + description='Copies the Info.plist and adds extra fields to it like the ' + 'git hash of the engine' ) parser.add_argument( @@ -61,19 +62,19 @@ def main(): args = parser.parse_args() text = open(args.source).read() - engine_path = os.path.join(os.getcwd(), "..", "..", "flutter") - revision = git_revision.GetRepositoryVersion(engine_path) + engine_path = os.path.join(os.getcwd(), '..', '..', 'flutter') + revision = git_revision.get_repository_version(engine_path) bitcode = args.bitcode is not None - clang_version = GetClangVersion(bitcode) + clang_version = get_clang_version(bitcode) text = text.format( revision=revision, clang_version=clang_version, min_version=args.minversion ) - with open(args.destination, "w") as outfile: + with open(args.destination, 'w') as outfile: outfile.write(text) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/build/dart/tools/dart_package_name.py b/build/dart/tools/dart_package_name.py index c6e245009f1b3..54446248d8113 100755 --- a/build/dart/tools/dart_package_name.py +++ b/build/dart/tools/dart_package_name.py @@ -7,21 +7,20 @@ """This script outputs the package name specified in the pubspec.yaml""" import argparse -import os import sys # TODO(johnmccutchan): Use the yaml package to parse. -def PackageName(line): - assert line.startswith("name:") - return line.split(":")[1].strip() +def package_name(line): + assert line.startswith('name:') + return line.split(':')[1].strip() def main(pubspec_file): - source_file = open(pubspec_file, "r") + source_file = open(pubspec_file, 'r') for line in source_file: - if line.startswith("name:"): - print(PackageName(line)) + if line.startswith('name:'): + print(package_name(line)) return 0 source_file.close() # Couldn't find it. @@ -30,16 +29,16 @@ def main(pubspec_file): if __name__ == '__main__': parser = argparse.ArgumentParser( - description="This script outputs the package name specified in the" - "pubspec.yaml" + description='This script outputs the package name specified in the ' + 'pubspec.yaml' ) parser.add_argument( - "--pubspec", - dest="pubspec_file", - metavar="", + '--pubspec', + dest='pubspec_file', + metavar='', type=str, required=True, - help="Path to pubspec file" + help='Path to pubspec file' ) args = parser.parse_args() sys.exit(main(args.pubspec_file)) diff --git a/build/dart/tools/dart_pkg.py b/build/dart/tools/dart_pkg.py index 1a8e2785ea8c2..ef5eec91e0522 100755 --- a/build/dart/tools/dart_pkg.py +++ b/build/dart/tools/dart_pkg.py @@ -14,10 +14,10 @@ import subprocess import sys -USE_LINKS = sys.platform != "win32" +USE_LINKS = sys.platform != 'win32' DART_ANALYZE = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "dart_analyze.py" + os.path.dirname(os.path.abspath(__file__)), 'dart_analyze.py' ) @@ -38,7 +38,7 @@ def ensure_dir_exists(path): def has_pubspec_yaml(paths): for path in paths: _, filename = os.path.split(path) - if 'pubspec.yaml' == filename: + if filename == 'pubspec.yaml': return True return False @@ -47,14 +47,14 @@ def link(from_root, to_root): ensure_dir_exists(os.path.dirname(to_root)) try: os.unlink(to_root) - except OSError as e: - if e.errno == errno.ENOENT: + except OSError as err: + if err.errno == errno.ENOENT: pass try: os.symlink(from_root, to_root) - except OSError as e: - if e.errno == errno.EEXIST: + except OSError as err: + if err.errno == errno.EEXIST: pass @@ -72,7 +72,7 @@ def copy(from_root, to_root, filter_func=None): # filter_func expects paths not names, so wrap it to make them absolute. wrapped_filter = None if filter_func: - wrapped_filter = lambda name: filter_func(os.path.join(root, name)) + wrapped_filter = lambda name, rt=root: filter_func(os.path.join(rt, name)) for name in filter(wrapped_filter, files): from_path = os.path.join(root, name) @@ -101,8 +101,8 @@ def link_if_possible(from_root, to_root): def remove_if_exists(path): try: os.remove(path) - except OSError as e: - if e.errno != errno.ENOENT: + except OSError as err: + if err.errno != errno.ENOENT: raise @@ -112,7 +112,7 @@ def list_files(from_root, filter_func=None): # filter_func expects paths not names, so wrap it to make them absolute. wrapped_filter = None if filter_func: - wrapped_filter = lambda name: filter_func(os.path.join(root, name)) + wrapped_filter = lambda name, rt=root: filter_func(os.path.join(rt, name)) for name in filter(wrapped_filter, files): path = os.path.join(root, name) file_list.append(path) @@ -125,9 +125,9 @@ def remove_broken_symlink(path): return try: link_path = os.readlink(path) - except OSError as e: + except OSError as err: # Path was not a symlink. - if e.errno == errno.EINVAL: + if err.errno == errno.EINVAL: pass else: if not os.path.exists(link_path): @@ -144,20 +144,20 @@ def remove_broken_symlinks(root_dir): def analyze_entrypoints(dart_sdk, package_root, entrypoints): - cmd = ["python", DART_ANALYZE] - cmd.append("--dart-sdk") + cmd = ['python', DART_ANALYZE] + cmd.append('--dart-sdk') cmd.append(dart_sdk) - cmd.append("--entrypoints") + cmd.append('--entrypoints') cmd.extend(entrypoints) - cmd.append("--package-root") + cmd.append('--package-root') cmd.append(package_root) - cmd.append("--no-hints") + cmd.append('--no-hints') try: subprocess.check_output(cmd, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError as err: print('Failed analyzing %s' % entrypoints) - print(e.output) - return e.returncode + print(err.output) + return err.returncode return 0 @@ -248,7 +248,7 @@ def main(): target_dir = os.path.join(args.pkg_directory, args.package_name) target_packages_dir = os.path.join(target_dir, 'packages') - lib_path = os.path.join(target_dir, "lib") + lib_path = os.path.join(target_dir, 'lib') ensure_dir_exists(lib_path) mappings = {} @@ -312,9 +312,9 @@ def main(): # If any entrypoints are defined, write them to disk so that the analyzer # test can find them. - with open(args.entries_file, 'w') as f: + with open(args.entries_file, 'w') as file: for entrypoint in entrypoint_targets: - f.write(entrypoint + '\n') + file.write(entrypoint + '\n') # Write stamp file. with open(args.stamp_file, 'w'): diff --git a/build/generate_coverage.py b/build/generate_coverage.py index 3279d301af878..7cad08a76d3ec 100755 --- a/build/generate_coverage.py +++ b/build/generate_coverage.py @@ -12,41 +12,106 @@ import shutil -def GetLLVMBinDirectory(): +def get_llvm_bin_directory(): buildtool_dir = os.path.join( - os.path.dirname(os.path.realpath(__file__)), "../../buildtools" + os.path.dirname(os.path.realpath(__file__)), '../../buildtools' ) - platform_dir = "" + platform_dir = '' if sys.platform.startswith('linux'): - platform_dir = "linux-x64" + platform_dir = 'linux-x64' elif sys.platform == 'darwin': - platform_dir = "mac-x64" + platform_dir = 'mac-x64' else: - raise Exception("Unknown/Unsupported platform.") + raise Exception('Unknown/Unsupported platform.') llvm_bin_dir = os.path.abspath( - os.path.join(buildtool_dir, platform_dir, "clang/bin") + os.path.join(buildtool_dir, platform_dir, 'clang/bin') ) if not os.path.exists(llvm_bin_dir): - raise Exception("LLVM directory %s double not be located." % llvm_bin_dir) + raise Exception('LLVM directory %s double not be located.' % llvm_bin_dir) return llvm_bin_dir -def MakeDirs(new_dir): +def make_dirs(new_dir): """A wrapper around os.makedirs() that emulates "mkdir -p".""" try: os.makedirs(new_dir) - except OSError as e: - if e.errno != errno.EEXIST: + except OSError as err: + if err.errno != errno.EEXIST: raise -def RemoveIfExists(path): +def remove_if_exists(path): if os.path.isdir(path) and not os.path.islink(path): shutil.rmtree(path) elif os.path.exists(path): os.remove(path) +def collect_profiles(args): + raw_profiles = [] + binaries = [] + + # Run all unit tests and collect raw profiles. + for test in args.tests: + absolute_test_path = os.path.abspath(test) + absolute_test_dir = os.path.dirname(absolute_test_path) + test_name = os.path.basename(absolute_test_path) + + if not os.path.exists(absolute_test_path): + print('Path %s does not exist.' % absolute_test_path) + return -1 + + unstripped_test_path = os.path.join( + absolute_test_dir, 'exe.unstripped', test_name + ) + + if os.path.exists(unstripped_test_path): + binaries.append(unstripped_test_path) + else: + binaries.append(absolute_test_path) + + raw_profile = absolute_test_path + '.rawprofile' + + remove_if_exists(raw_profile) + + print( + 'Running test %s to gather profile.' % + os.path.basename(absolute_test_path) + ) + + test_command = [absolute_test_path] + + test_args = ' '.join(args.test_args).split() + + if test_args is not None: + test_command += test_args + + subprocess.check_call(test_command, env={'LLVM_PROFILE_FILE': raw_profile}) + + if not os.path.exists(raw_profile): + print('Could not find raw profile data for unit test run %s.' % test) + print('Did you build with the --coverage flag?') + return -1 + + raw_profiles.append(raw_profile) + + return (binaries, raw_profiles) + + +def merge_profiles(llvm_bin_dir, raw_profiles, output): + # Merge all raw profiles into a single profile. + profdata_binary = os.path.join(llvm_bin_dir, 'llvm-profdata') + + print('Merging %d raw profile(s) into single profile.' % len(raw_profiles)) + merged_profile_path = os.path.join(output, 'all.profile') + remove_if_exists(merged_profile_path) + merge_command = [profdata_binary, 'merge', '-sparse' + ] + raw_profiles + ['-o', merged_profile_path] + subprocess.check_call(merge_command) + print('Done.') + return merged_profile_path + + def main(): parser = argparse.ArgumentParser() @@ -86,59 +151,14 @@ def main(): output = os.path.abspath(args.output) - MakeDirs(output) - - generate_all_reports = args.format == "all" - - raw_profiles = [] - binaries = [] - - # Run all unit tests and collect raw profiles. - for test in args.tests: - absolute_test_path = os.path.abspath(test) - absolute_test_dir = os.path.dirname(absolute_test_path) - test_name = os.path.basename(absolute_test_path) - - if not os.path.exists(absolute_test_path): - print("Path %s does not exist." % absolute_test_path) - return -1 - - unstripped_test_path = os.path.join( - absolute_test_dir, "exe.unstripped", test_name - ) - - if os.path.exists(unstripped_test_path): - binaries.append(unstripped_test_path) - else: - binaries.append(absolute_test_path) - - raw_profile = absolute_test_path + ".rawprofile" - - RemoveIfExists(raw_profile) - - print( - "Running test %s to gather profile." % - os.path.basename(absolute_test_path) - ) - - test_command = [absolute_test_path] - - test_args = ' '.join(args.test_args).split() - - if test_args is not None: - test_command += test_args - - subprocess.check_call(test_command, env={"LLVM_PROFILE_FILE": raw_profile}) + make_dirs(output) - if not os.path.exists(raw_profile): - print("Could not find raw profile data for unit test run %s." % test) - print("Did you build with the --coverage flag?") - return -1 + generate_all_reports = args.format == 'all' - raw_profiles.append(raw_profile) + binaries, raw_profiles = collect_profiles(args) if len(raw_profiles) == 0: - print("No raw profiles could be generated.") + print('No raw profiles could be generated.') return -1 binaries_flag = [] @@ -146,63 +166,52 @@ def main(): binaries_flag.append('-object') binaries_flag.append(binary) - llvm_bin_dir = GetLLVMBinDirectory() + llvm_bin_dir = get_llvm_bin_directory() - # Merge all raw profiles into a single profile. - profdata_binary = os.path.join(llvm_bin_dir, "llvm-profdata") - - print("Merging %d raw profile(s) into single profile." % len(raw_profiles)) - merged_profile_path = os.path.join(output, "all.profile") - RemoveIfExists(merged_profile_path) - merge_command = [profdata_binary, "merge", "-sparse" - ] + raw_profiles + ["-o", merged_profile_path] - subprocess.check_call(merge_command) - print("Done.") + merged_profile_path = merge_profiles(llvm_bin_dir, raw_profiles, output) if not os.path.exists(merged_profile_path): - print("Could not generate or find merged profile %s." % merged_profile_path) + print('Could not generate or find merged profile %s.' % merged_profile_path) return -1 - llvm_cov_binary = os.path.join(llvm_bin_dir, "llvm-cov") - instr_profile_flag = "-instr-profile=%s" % merged_profile_path - ignore_flags = "-ignore-filename-regex=third_party|unittest|fixture" + llvm_cov_binary = os.path.join(llvm_bin_dir, 'llvm-cov') + instr_profile_flag = '-instr-profile=%s' % merged_profile_path + ignore_flags = '-ignore-filename-regex=third_party|unittest|fixture' # Generate the HTML report if specified. if generate_all_reports or args.format == 'html': - print("Generating HTML report.") - show_command = [llvm_cov_binary, "show"] + binaries_flag + [ + print('Generating HTML report.') + subprocess.check_call([llvm_cov_binary, 'show'] + binaries_flag + [ instr_profile_flag, - "-format=html", - "-output-dir=%s" % output, - "-tab-size=2", + '-format=html', + '-output-dir=%s' % output, + '-tab-size=2', ignore_flags, - ] - subprocess.check_call(show_command) - print("Done.") + ]) + print('Done.') # Generate a report summary if specified. if generate_all_reports or args.format == 'summary': - print("Generating a summary report.") - report_command = [llvm_cov_binary, "report"] + binaries_flag + [ + print('Generating a summary report.') + subprocess.check_call([llvm_cov_binary, 'report'] + binaries_flag + [ instr_profile_flag, ignore_flags, - ] - subprocess.check_call(report_command) - print("Done.") + ]) + print('Done.') # Generate a lcov summary if specified. if generate_all_reports or args.format == 'lcov': - print("Generating LCOV report.") + print('Generating LCOV report.') lcov_file = os.path.join(output, 'coverage.lcov') - RemoveIfExists(lcov_file) - lcov_command = [llvm_cov_binary, "export"] + binaries_flag + [ - instr_profile_flag, - ignore_flags, - "-format=lcov", - ] + remove_if_exists(lcov_file) with open(lcov_file, 'w') as lcov_redirect: - subprocess.check_call(lcov_command, stdout=lcov_redirect) - print("Done.") + subprocess.check_call([llvm_cov_binary, 'export'] + binaries_flag + [ + instr_profile_flag, + ignore_flags, + '-format=lcov', + ], + stdout=lcov_redirect) + print('Done.') return 0 diff --git a/build/get_concurrent_jobs.py b/build/get_concurrent_jobs.py index 24445ef7ff271..3559073c66dce 100755 --- a/build/get_concurrent_jobs.py +++ b/build/get_concurrent_jobs.py @@ -28,45 +28,46 @@ UNITS = {'B': 1, 'KB': 2**10, 'MB': 2**20, 'GB': 2**30, 'TB': 2**40} +# pylint: disable=line-too-long # See https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/nf-sysinfoapi-globalmemorystatusex # and https://docs.microsoft.com/en-us/windows/win32/api/sysinfoapi/ns-sysinfoapi-memorystatusex +# pylint: enable=line-too-long class MEMORYSTATUSEX(ctypes.Structure): _fields_ = [ - ("dwLength", ctypes.c_ulong), - ("dwMemoryLoad", ctypes.c_ulong), - ("ullTotalPhys", ctypes.c_ulonglong), - ("ullAvailPhys", ctypes.c_ulonglong), - ("ullTotalPageFile", ctypes.c_ulonglong), - ("ullAvailPageFile", ctypes.c_ulonglong), - ("ullTotalVirtual", ctypes.c_ulonglong), - ("ullAvailVirtual", ctypes.c_ulonglong), - ("sullAvailExtendedVirtual", ctypes.c_ulonglong), + ('dwLength', ctypes.c_ulong), + ('dwMemoryLoad', ctypes.c_ulong), + ('ullTotalPhys', ctypes.c_ulonglong), + ('ullAvailPhys', ctypes.c_ulonglong), + ('ullTotalPageFile', ctypes.c_ulonglong), + ('ullAvailPageFile', ctypes.c_ulonglong), + ('ullTotalVirtual', ctypes.c_ulonglong), + ('ullAvailVirtual', ctypes.c_ulonglong), + ('sullAvailExtendedVirtual', ctypes.c_ulonglong), ] -def GetTotalMemory(): +def get_total_memory(): if sys.platform in ('win32', 'cygwin'): stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX)) success = ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) return stat.ullTotalPhys if success else 0 - elif sys.platform.startswith('linux'): - if os.path.exists("/proc/meminfo"): - with open("/proc/meminfo") as meminfo: + if sys.platform.startswith('linux'): + if os.path.exists('/proc/meminfo'): + with open('/proc/meminfo') as meminfo: memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') for line in meminfo: match = memtotal_re.match(line) if match: return float(match.group(1)) * 2**10 - elif sys.platform == 'darwin': + if sys.platform == 'darwin': try: return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) - except Exception: + except: # pylint: disable=bare-except return 0 - else: - return 0 + return 0 -def ParseSize(string): +def parse_size(string): i = next(i for (i, c) in enumerate(string) if not c.isdigit()) number = string[:i].strip() unit = string[i:].strip() @@ -78,12 +79,12 @@ class ParseSizeAction(argparse.Action): def __call__(self, parser, args, values, option_string=None): sizes = getattr(args, self.dest, []) for value in values: - (k, v) = value.split('=', 1) - sizes.append((k, ParseSize(v))) + (k, val) = value.split('=', 1) + sizes.append((k, parse_size(val))) setattr(args, self.dest, sizes) -def Main(): +def main(): parser = argparse.ArgumentParser() parser.add_argument( '--memory-per-job', @@ -95,13 +96,13 @@ def Main(): ) parser.add_argument( '--reserve-memory', - type=ParseSize, + type=parse_size, default=0, help='The amount of memory to be held out of the amount for jobs to use.' ) args = parser.parse_args() - total_memory = GetTotalMemory() + total_memory = get_total_memory() # Ensure the total memory used in the calculation below is at least 0 mem_total_bytes = max(0, total_memory - args.reserve_memory) @@ -109,7 +110,7 @@ def Main(): # Ensure the number of cpus used in the calculation below is at least 1 try: cpu_cap = multiprocessing.cpu_count() - except: + except: # pylint: disable=bare-except cpu_cap = 1 concurrent_jobs = {} @@ -126,4 +127,4 @@ def Main(): if __name__ == '__main__': - sys.exit(Main()) + sys.exit(main()) diff --git a/build/git_revision.py b/build/git_revision.py index a79119faff8c1..d84886d8b8427 100755 --- a/build/git_revision.py +++ b/build/git_revision.py @@ -12,18 +12,18 @@ import argparse -def IsWindows(): +def is_windows(): os_id = sys.platform return os_id.startswith('win32') or os_id.startswith('cygwin') -def GetRepositoryVersion(repository): - "Returns the Git HEAD for the supplied repository path as a string." +def get_repository_version(repository): + 'Returns the Git HEAD for the supplied repository path as a string.' if not os.path.exists(repository): - raise IOError("path doesn't exist") + raise IOError('path does not exist') git = 'git' - if IsWindows(): + if is_windows(): git = 'git.bat' version = subprocess.check_output([ git, @@ -48,7 +48,7 @@ def main(): args = parser.parse_args() repository = os.path.abspath(args.repository) - version = GetRepositoryVersion(repository) + version = get_repository_version(repository) print(version.strip()) return 0 diff --git a/build/zip.py b/build/zip.py index a56e11d784ec8..9b36dd2e537c1 100755 --- a/build/zip.py +++ b/build/zip.py @@ -14,7 +14,7 @@ def _zip_dir(path, zip_file, prefix): path = path.rstrip('/\\') - for root, dirs, files in os.walk(path): + for root, _, files in os.walk(path): for file in files: if os.path.islink(os.path.join(root, file)): add_symlink( diff --git a/ci/deps_parser.py b/ci/deps_parser.py index c8944a685671a..4e46c8d9cb75c 100644 --- a/ci/deps_parser.py +++ b/ci/deps_parser.py @@ -19,35 +19,35 @@ # Used in parsing the DEPS file. -class VarImpl(object): +class VarImpl: _env_vars = { - "host_cpu": "x64", - "host_os": "linux", + 'host_cpu': 'x64', + 'host_os': 'linux', } def __init__(self, local_scope): self._local_scope = local_scope - def Lookup(self, var_name): + def lookup(self, var_name): """Implements the Var syntax.""" - if var_name in self._local_scope.get("vars", {}): - return self._local_scope["vars"][var_name] + if var_name in self._local_scope.get('vars', {}): + return self._local_scope['vars'][var_name] # Inject default values for env variables if var_name in self._env_vars: return self._env_vars[var_name] - raise Exception("Var is not defined: %s" % var_name) + raise Exception('Var is not defined: %s' % var_name) -def ParseDepsFile(deps_file): +def parse_deps_file(deps_file): local_scope = {} var = VarImpl(local_scope) global_scope = { - 'Var': var.Lookup, + 'Var': var.lookup, 'deps_os': {}, } # Read the content. - with open(deps_file, 'r') as fp: - deps_content = fp.read() + with open(deps_file, 'r') as file: + deps_content = file.read() # Eval the content. exec(deps_content, global_scope, local_scope) @@ -55,22 +55,22 @@ def ParseDepsFile(deps_file): # Extract the deps and filter. deps = local_scope.get('deps', {}) filtered_deps = [] - for k, v in deps.items(): + for val in deps.values(): # We currently do not support packages or cipd which are represented # as dictionaries. - if isinstance(v, str): - filtered_deps.append(v) + if isinstance(val, str): + filtered_deps.append(val) return filtered_deps -def WriteManifest(deps, manifest_file): +def write_manifest(deps, manifest_file): print('\n'.join(sorted(deps))) with open(manifest_file, 'w') as manifest: manifest.write('\n'.join(sorted(deps))) -def ParseArgs(args): +def parse_args(args): args = args[1:] parser = argparse.ArgumentParser( description='A script to flatten a gclient DEPS file.' @@ -94,12 +94,12 @@ def ParseArgs(args): return parser.parse_args(args) -def Main(argv): - args = ParseArgs(argv) - deps = ParseDepsFile(args.deps) - WriteManifest(deps, args.output) +def main(argv): + args = parse_args(argv) + deps = parse_deps_file(args.deps) + write_manifest(deps, args.output) return 0 if __name__ == '__main__': - sys.exit(Main(sys.argv)) + sys.exit(main(sys.argv)) diff --git a/ci/firebase_testlab.py b/ci/firebase_testlab.py index c36d9c07d44a7..36b5d419b6471 100755 --- a/ci/firebase_testlab.py +++ b/ci/firebase_testlab.py @@ -11,14 +11,14 @@ import subprocess import sys +BUCKET = 'gs://flutter_firebase_testlab' script_dir = os.path.dirname(os.path.realpath(__file__)) buildroot_dir = os.path.abspath(os.path.join(script_dir, '..', '..')) out_dir = os.path.join(buildroot_dir, 'out') -bucket = 'gs://flutter_firebase_testlab' error_re = re.compile(r'[EF]/flutter.+') -def RunFirebaseTest(apk, results_dir): +def run_firebase_test(apk, results_dir): # game-loop tests are meant for OpenGL apps. # This type of test will give the application a handle to a file, and # we'll write the timeline JSON to that file. @@ -40,7 +40,7 @@ def RunFirebaseTest(apk, results_dir): '--timeout', '2m', '--results-bucket', - bucket, + BUCKET, '--results-dir', results_dir, '--device', @@ -53,10 +53,10 @@ def RunFirebaseTest(apk, results_dir): return process -def CheckLogcat(results_dir): +def check_logcat(results_dir): logcat = subprocess.check_output([ 'gsutil', 'cat', - '%s/%s/*/logcat' % (bucket, results_dir) + '%s/%s/*/logcat' % (BUCKET, results_dir) ]) if not logcat: sys.exit(1) @@ -68,13 +68,13 @@ def CheckLogcat(results_dir): sys.exit(1) -def CheckTimeline(results_dir): - du = subprocess.check_output([ +def check_timeline(results_dir): + gsutil_du = subprocess.check_output([ 'gsutil', 'du', '%s/%s/*/game_loop_results/results_scenario_0.json' % - (bucket, results_dir) + (BUCKET, results_dir) ]).strip() - if du == '0': + if gsutil_du == '0': print('Failed to produce a timeline.') sys.exit(1) @@ -107,15 +107,16 @@ def main(): cwd=script_dir).strip() results = [] + apk = None for apk in apks: results_dir = '%s/%s/%s' % ( os.path.basename(apk), git_revision, args.build_id ) - process = RunFirebaseTest(apk, results_dir) + process = run_firebase_test(apk, results_dir) results.append((results_dir, process)) for results_dir, process in results: - for line in iter(process.stdout.readline, ""): + for line in iter(process.stdout.readline, ''): print(line.strip()) return_code = process.wait() if return_code != 0: @@ -123,11 +124,11 @@ def main(): sys.exit(return_code) print('Checking logcat for %s' % results_dir) - CheckLogcat(results_dir) + check_logcat(results_dir) # scenario_app produces a timeline, but the android image test does not. if 'scenario' in apk: print('Checking timeline for %s' % results_dir) - CheckTimeline(results_dir) + check_timeline(results_dir) return 0 diff --git a/ci/licenses_golden/licenses_flutter b/ci/licenses_golden/licenses_flutter index 36079fa1b8430..622b015fb6608 100644 --- a/ci/licenses_golden/licenses_flutter +++ b/ci/licenses_golden/licenses_flutter @@ -12,6 +12,7 @@ LIBRARY: txt ORIGIN: ../../../flutter/LICENSE TYPE: LicenseType.bsd FILE: ../../../flutter/.clang-tidy +FILE: ../../../flutter/.pylintrc FILE: ../../../flutter/.style.yapf FILE: ../../../flutter/DEPS FILE: ../../../flutter/assets/asset_manager.cc diff --git a/ci/lint.sh b/ci/lint.sh index 6355e637fd1e1..d42defb51f990 100755 --- a/ci/lint.sh +++ b/ci/lint.sh @@ -29,6 +29,7 @@ function follow_links() ( SCRIPT_DIR=$(follow_links "$(dirname -- "${BASH_SOURCE[0]}")") SRC_DIR="$(cd "$SCRIPT_DIR/../.."; pwd -P)" +FLUTTER_DIR="$(cd "$SCRIPT_DIR/.."; pwd -P)" DART_BIN="${SRC_DIR}/third_party/dart/tools/sdks/dart-sdk/bin" DART="${DART_BIN}/dart" @@ -43,3 +44,10 @@ cd "$SCRIPT_DIR" "$SRC_DIR/flutter/tools/clang_tidy/bin/main.dart" \ --src-dir="$SRC_DIR" \ "$@" + +cd "$FLUTTER_DIR" +pylint-2.7 --rcfile=.pylintrc \ + "build/" \ + "ci/" \ + "impeller/" \ + "tools/gn" diff --git a/impeller/tools/build_metal_library.py b/impeller/tools/build_metal_library.py index d5ae22d7c4707..c23ac736f51ea 100644 --- a/impeller/tools/build_metal_library.py +++ b/impeller/tools/build_metal_library.py @@ -10,7 +10,7 @@ import subprocess -def MakeDirectories(path): +def make_directories(path): try: os.makedirs(path) except OSError as exc: @@ -20,91 +20,91 @@ def MakeDirectories(path): raise -def Main(): +def main(): parser = argparse.ArgumentParser() parser.add_argument( - "--output", + '--output', type=str, required=True, - help="The location to generate the Metal library to." + help='The location to generate the Metal library to.' ) parser.add_argument( - "--depfile", type=str, required=True, help="The location of the depfile." + '--depfile', type=str, required=True, help='The location of the depfile.' ) parser.add_argument( - "--source", + '--source', type=str, - action="append", + action='append', required=True, - help="The source file to compile. Can be specified multiple times." + help='The source file to compile. Can be specified multiple times.' ) parser.add_argument( - "--optimize", - action="store_true", + '--optimize', + action='store_true', default=False, - help="If available optimizations must be applied to the compiled Metal sources." + help='If available optimizations must be applied to the compiled Metal sources.' ) parser.add_argument( - "--platform", + '--platform', required=True, - choices=["mac", "ios", "ios-simulator"], - help="Select the platform." + choices=['mac', 'ios', 'ios-simulator'], + help='Select the platform.' ) args = parser.parse_args() - MakeDirectories(os.path.dirname(args.depfile)) + make_directories(os.path.dirname(args.depfile)) command = [ - "xcrun", + 'xcrun', ] - if args.platform == "mac": + if args.platform == 'mac': command += [ - "-sdk", - "macosx", + '-sdk', + 'macosx', ] - elif args.platform == "ios": + elif args.platform == 'ios': command += [ - "-sdk", - "iphoneos", + '-sdk', + 'iphoneos', ] - elif args.platform == "ios-simulator": + elif args.platform == 'ios-simulator': command += [ - "-sdk", - "iphonesimulator", + '-sdk', + 'iphonesimulator', ] command += [ - "metal", + 'metal', # These warnings are from generated code and would make no sense to the GLSL # author. - "-Wno-unused-variable", + '-Wno-unused-variable', # Both user and system header will be tracked. - "-MMD", - "-MF", + '-MMD', + '-MF', args.depfile, - "-o", + '-o', args.output, ] # The Metal standard must match the specification in impellerc. - if args.platform == "mac": + if args.platform == 'mac': command += [ - "--std=macos-metal1.2", + '--std=macos-metal1.2', ] - elif args.platform == "ios": + elif args.platform == 'ios': command += [ - "--std=ios-metal1.2", - "-mios-version-min=10.0", + '--std=ios-metal1.2', + '-mios-version-min=10.0', ] if args.optimize: command += [ # Like -Os (and thus -O2), but reduces code size further. - "-Oz", + '-Oz', # Allow aggressive, lossy floating-point optimizations. - "-ffast-math", + '-ffast-math', ] else: command += [ @@ -112,12 +112,12 @@ def Main(): # debugging but should be removed from release builds. # TODO(chinmaygarde): Use -frecord-sources when CI upgrades to # Xcode 13. - "-MO", + '-MO', # Assist the sampling profiler. - "-gline-tables-only", - "-g", + '-gline-tables-only', + '-g', # Optimize for debuggability. - "-Og", + '-Og', ] command += args.source @@ -127,5 +127,5 @@ def Main(): if __name__ == '__main__': if sys.platform != 'darwin': - raise Exception("This script only runs on Mac") - Main() + raise Exception('This script only runs on Mac') + main() diff --git a/impeller/tools/check_licenses.py b/impeller/tools/check_licenses.py index a366099404fa3..45607b7fb7f95 100644 --- a/impeller/tools/check_licenses.py +++ b/impeller/tools/check_licenses.py @@ -6,44 +6,44 @@ import os -def ContainsLicenseBlock(source_file): +def contains_license_block(source_file): # This check is somewhat easier than in the engine because all sources need to # have the same license. - py_license = '''# Copyright 2013 The Flutter Authors. All rights reserved. + py_license = """# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file.''' - c_license = py_license.replace("#", "//") +# found in the LICENSE file.""" + c_license = py_license.replace('#', '//') # Make sure we don't read the entire file into memory. read_size = (max(len(py_license), len(c_license))) - for license in [c_license, py_license]: + for lic in [c_license, py_license]: with open(source_file) as source: - if source.read(read_size).startswith(license): + if source.read(read_size).startswith(lic): return True return False -def IsSourceFile(path): +def is_source_file(path): known_extensions = [ - ".cc", - ".cpp", - ".c", - ".h", - ".hpp", - ".py", - ".sh", - ".gn", - ".gni", - ".glsl", - ".sl.h", - ".vert", - ".frag", - ".tesc", - ".tese", - ".yaml", - ".dart", + '.cc', + '.cpp', + '.c', + '.h', + '.hpp', + '.py', + '.sh', + '.gn', + '.gni', + '.glsl', + '.sl.h', + '.vert', + '.frag', + '.tesc', + '.tese', + '.yaml', + '.dart', ] for extension in known_extensions: if os.path.basename(path).endswith(extension): @@ -52,29 +52,29 @@ def IsSourceFile(path): # Checks that all source files have the same license preamble. -def Main(): +def main(): parser = argparse.ArgumentParser() parser.add_argument( - "--source-root", type=str, required=True, help="The source root." + '--source-root', type=str, required=True, help='The source root.' ) args = parser.parse_args() - assert (os.path.exists(args.source_root)) + assert os.path.exists(args.source_root) source_files = set() - for root, dirs, files in os.walk(os.path.abspath(args.source_root)): + for root, _, files in os.walk(os.path.abspath(args.source_root)): for file in files: file_path = os.path.join(root, file) - if IsSourceFile(file_path): + if is_source_file(file_path): source_files.add(file_path) for source_file in source_files: - if not ContainsLicenseBlock(source_file): + if not contains_license_block(source_file): raise Exception( - "Could not find valid license block in source ", source_file + 'Could not find valid license block in source ', source_file ) if __name__ == '__main__': - Main() + main() diff --git a/impeller/tools/xxd.py b/impeller/tools/xxd.py index 50d2fdd926d12..6a922103925bd 100644 --- a/impeller/tools/xxd.py +++ b/impeller/tools/xxd.py @@ -2,15 +2,12 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. -import sys - import argparse import errno import os -import struct -def MakeDirectories(path): +def make_directories(path): try: os.makedirs(path) except OSError as exc: @@ -22,77 +19,77 @@ def MakeDirectories(path): # Dump the bytes of file into a C translation unit. # This can be used to embed the file contents into a binary. -def Main(): +def main(): parser = argparse.ArgumentParser() parser.add_argument( - "--symbol-name", + '--symbol-name', type=str, required=True, - help="The name of the symbol referencing the data." + help='The name of the symbol referencing the data.' ) parser.add_argument( - "--output-header", + '--output-header', type=str, required=True, - help="The header file containing the symbol reference." + help='The header file containing the symbol reference.' ) parser.add_argument( - "--output-source", + '--output-source', type=str, required=True, - help="The source file containing the file bytes." + help='The source file containing the file bytes.' ) parser.add_argument( - "--source", + '--source', type=str, required=True, - help="The source file whose contents to embed in the output source file." + help='The source file whose contents to embed in the output source file.' ) args = parser.parse_args() - assert (os.path.exists(args.source)) + assert os.path.exists(args.source) output_header = os.path.abspath(args.output_header) output_source = os.path.abspath(args.output_source) output_header_basename = output_header[output_header.rfind('/') + 1:] - MakeDirectories(os.path.dirname(output_header)) - MakeDirectories(os.path.dirname(output_source)) + make_directories(os.path.dirname(output_header)) + make_directories(os.path.dirname(output_source)) - with open(args.source, "rb") as source, open(output_source, "w") as output: + with open(args.source, 'rb') as source, open(output_source, 'w') as output: data_len = 0 - output.write(f"#include \"{output_header_basename}\"\n") - output.write(f"const unsigned char impeller_{args.symbol_name}_data[] =\n") - output.write("{\n") + output.write(f'#include "{output_header_basename}"\n') + output.write(f'const unsigned char impeller_{args.symbol_name}_data[] =\n') + output.write('{\n') while True: byte = source.read(1) if not byte: break data_len += 1 - output.write(f"{ord(byte)},") - output.write("};\n") + output.write(f'{ord(byte)},') + output.write('};\n') output.write( - f"const unsigned long impeller_{args.symbol_name}_length = {data_len};\n" + f'const unsigned long impeller_{args.symbol_name}_length = {data_len};\n' ) - with open(output_header, "w") as output: - output.write("#pragma once\n") - output.write("#ifdef __cplusplus\n") - output.write("extern \"C\" {\n") - output.write("#endif\n\n") + with open(output_header, 'w') as output: + output.write('#pragma once\n') + output.write('#ifdef __cplusplus\n') + output.write('extern "C" {\n') + output.write('#endif\n\n') output.write( - f"extern const unsigned char impeller_{args.symbol_name}_data[];\n" + f'extern const unsigned char impeller_{args.symbol_name}_data[];\n' ) output.write( - f"extern const unsigned long impeller_{args.symbol_name}_length;\n\n" + f'extern const unsigned long impeller_{args.symbol_name}_length;\n\n' ) - output.write("#ifdef __cplusplus\n") - output.write("}\n") - output.write("#endif\n") + output.write('#ifdef __cplusplus\n') + output.write('}\n') + output.write('#endif\n') if __name__ == '__main__': - Main() + main() diff --git a/tools/gn b/tools/gn index 0c0d078a843d9..65c0b0146cf0a 100755 --- a/tools/gn +++ b/tools/gn @@ -24,8 +24,6 @@ def get_out_dir(args): else: target_dir = ['host'] - runtime_mode = args.runtime_mode - target_dir.append(args.runtime_mode) if args.simulator: @@ -81,7 +79,7 @@ def to_command_line(gn_args): """ def merge(key, value): - if type(value) is bool: + if isinstance(value, bool): return '%s=%s' % (key, 'true' if value else 'false') return '%s="%s"' % (key, value) @@ -94,6 +92,7 @@ def cpu_for_target_arch(arch): return 'x86' if arch in ['x64', 'arm64', 'simarm64', 'simdbc64', 'armsimdbc64']: return 'x64' + return None def is_host_build(args): @@ -115,7 +114,7 @@ def can_use_prebuilt_dart(args): # platform agnostic artifacts (e.g. kernel snapshots), and a Dart SDK # targeting Fuchsia is not needed. So, it is safe to say that the prebuilt # Dart SDK in a Fuchsia build is the host prebuilt Dart SDK. - if args.target_os == None or args.target_os == 'fuchsia': + if args.target_os is None or args.target_os == 'fuchsia': if sys.platform.startswith(('cygwin', 'win')): prebuilt = 'windows-x64' elif sys.platform == 'darwin': @@ -130,9 +129,9 @@ def can_use_prebuilt_dart(args): prebuilt = 'windows-%s' % args.windows_cpu prebuilts_dir = None - if prebuilt != None: + if prebuilt is not None: prebuilts_dir = os.path.join(SRC_ROOT, 'flutter', 'prebuilts', prebuilt) - return prebuilts_dir != None and os.path.isdir(prebuilts_dir) + return prebuilts_dir is not None and os.path.isdir(prebuilts_dir) # Returns the host machine operating system. @@ -148,28 +147,28 @@ def get_host_os(): # Rosetta. I.e., python3 is an x64 executable and we're on an arm64 Mac. def is_rosetta(): if platform.system() == 'Darwin': - p = subprocess.Popen(['sysctl', '-in', 'sysctl.proc_translated'], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - output, _ = p.communicate() + proc = subprocess.Popen(['sysctl', '-in', 'sysctl.proc_translated'], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + output, _ = proc.communicate() return output.decode('utf-8').strip() == '1' return False # Returns the host machine CPU architecture. -def get_host_cpu(args): +def get_host_cpu(): # If gn itself is running under Rosetta on an arm64 Mac, platform.machine() # will return x86_64; instead return the underlying host architecture. if is_rosetta(): return 'arm64' - m = platform.machine() - if m in ['aarch64', 'arm64']: + machine = platform.machine() + if machine in ['aarch64', 'arm64']: return 'arm64' - if m in ['x86_64', 'AMD64', 'x64']: + if machine in ['x86_64', 'AMD64', 'x64']: return 'x64' - if m in ['i686', 'i386', 'x86']: + if machine in ['i686', 'i386', 'x86']: return 'x86' - raise Exception('Unknown CPU architecture: %s' % m) + raise Exception('Unknown CPU architecture: %s' % machine) # Returns the target CPU architecture. @@ -182,8 +181,7 @@ def get_target_cpu(args): if args.target_os == 'ios': if args.simulator: return args.simulator_cpu - else: - return args.ios_cpu + return args.ios_cpu if args.target_os == 'mac': return args.mac_cpu if args.target_os == 'linux': @@ -276,7 +274,7 @@ def to_gn_args(args): # Set OS, CPU arch for host or target build. if is_host_build(args): gn_args['host_os'] = get_host_os() - gn_args['host_cpu'] = get_host_cpu(args) + gn_args['host_cpu'] = get_host_cpu() gn_args['target_os'] = gn_args['host_os'] gn_args['target_cpu'] = get_target_cpu(args) gn_args['dart_target_arch'] = gn_args['target_cpu'] @@ -394,7 +392,8 @@ def to_gn_args(args): else: if args.goma: print( - "GOMA usage was specified but can't be found, falling back to local builds. Set the GOMA_DIR environment variable to fix GOMA." + "GOMA usage was specified but can't be found, falling back to local " + 'builds. Set the GOMA_DIR environment variable to fix GOMA.' ) gn_args['use_goma'] = False gn_args['goma_dir'] = None @@ -411,7 +410,7 @@ def to_gn_args(args): # Bitcode enabled builds using the current version of the toolchain leak # C++ symbols decorated with the availability attribute. Disable these # attributes in release modes till the toolchain is updated. - gn_args['skia_enable_api_available_macro'] = args.runtime_mode != "release" + gn_args['skia_enable_api_available_macro'] = args.runtime_mode != 'release' if sys.platform == 'darwin' and args.target_os not in ['android', 'fuchsia', 'wasm']: @@ -519,8 +518,8 @@ def to_gn_args(args): elif args.target_os == 'fuchsia': # Read the default target api level from a file so we can update it with a roller with open(os.path.join(os.path.dirname(__file__), - 'fuchsia/target_api_level')) as f: - gn_args['fuchsia_target_api_level'] = int(f.read().strip()) + 'fuchsia/target_api_level')) as file: + gn_args['fuchsia_target_api_level'] = int(file.read().strip()) # Flags for Dart features: if args.use_mallinfo2: @@ -557,8 +556,9 @@ def parse_args(args): '--dart-debug', default=False, action='store_true', - help='Enables assertions in the Dart VM. ' + - 'Does not affect optimization levels. If you need to disable optimizations in Dart, use --full-dart-debug' + help='Enables assertions in the Dart VM. Does not affect optimization ' + 'levels. If you need to disable optimizations in Dart, use ' + '--full-dart-debug' ) parser.add_argument( '--no-dart-version-git-info', @@ -570,8 +570,8 @@ def parse_args(args): '--full-dart-debug', default=False, action='store_true', - help='Implies --dart-debug ' + - 'and also disables optimizations in the Dart VM making it easier to step through VM code in the debugger.' + help='Implies --dart-debug and also disables optimizations in the Dart ' + 'VM making it easier to step through VM code in the debugger.' ) parser.add_argument( @@ -635,12 +635,12 @@ def parse_args(args): parser.add_argument( '--xcode-symlinks', action='store_true', - help='Set to true for builds targeting macOS or iOS ' + - 'when using goma. If set, symlinks to the Xcode provided sysroot and SDKs will be created in a generated ' - + - 'folder, which will avoid potential backend errors in Fuchsia RBE. Instead of specifying the flag on each invocation ' - + - 'the FLUTTER_GOMA_CREATE_XCODE_SYMLINKS environment variable may be set to 1 to achieve the same effect.' + help='Set to true for builds targeting macOS or iOS when using goma. If ' + 'set, symlinks to the Xcode provided sysroot and SDKs will be ' + 'created in a generated folder, which will avoid potential backend ' + 'errors in Fuchsia RBE. Instead of specifying the flag on each ' + 'invocation the FLUTTER_GOMA_CREATE_XCODE_SYMLINKS environment ' + 'variable may be set to 1 to achieve the same effect.' ) parser.add_argument( '--no-xcode-symlinks', @@ -856,7 +856,8 @@ def parse_args(args): '--trace-gn', default=False, action='store_true', - help='Write a GN trace log (gn_trace.json) in the Chromium tracing format in the build directory.' + help='Write a GN trace log (gn_trace.json) in the Chromium tracing ' + 'format in the build directory.' ) # Verbose output. @@ -901,11 +902,11 @@ def main(argv): if args.verbose: command.append('-v') - print("Generating GN files in: %s" % out_dir) + print('Generating GN files in: %s' % out_dir) try: gn_call_result = subprocess.call(command, cwd=SRC_ROOT) except subprocess.CalledProcessError as exc: - print("Failed to generate gn files: ", exc.returncode, exc.output) + print('Failed to generate gn files: ', exc.returncode, exc.output) sys.exit(1) return gn_call_result