From a5a1c3b07f4f1f70817c4a2784b244ef6e8f5316 Mon Sep 17 00:00:00 2001 From: Mark Mentovai Date: Mon, 27 Apr 2020 09:43:35 -0400 Subject: [PATCH] =?UTF-8?q?Add=20.style.yapf=20and=20reformat=20according?= =?UTF-8?q?=20to=20yapf,=20using=20=E2=80=9Cgoogle=E2=80=9D=20style?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit % yapf --in-place $(git ls-files **/*.py) % yapf --version yapf 0.30.0 Note that this is not using the “chromium” yapf style because Chromium is moving to PEP-8. https://groups.google.com/a/chromium.org/d/topic/chromium-dev/RcJgJdkNIdg yapf 0.30.0 no longer recognizes “chromium” as a style option. https://github.com/google/yapf/commit/22ef70f3c497436adf59934b2ffa84ce6f962b88 Since this is a mass reformatting, it might as well move things all the way into the future all at once. This uses the “google” style, which is a superset of “pep8”. Change-Id: Ifa37371079ea1859e4afe8e31d2eef2cfd7af384 Reviewed-on: https://chromium-review.googlesource.com/c/crashpad/crashpad/+/2165637 Commit-Queue: Mark Mentovai Reviewed-by: Scott Graham --- .style.yapf | 16 + build/gyp_crashpad.py | 121 +-- build/gyp_crashpad_android.py | 73 +- build/install_linux_sysroot.py | 56 +- build/ios/convert_gn_xcodeproj.py | 376 +++++----- build/ios/setup-ios-gn.py | 524 +++++++------ build/run_fuchsia_qemu.py | 161 ++-- build/run_tests.py | 1012 +++++++++++++------------- doc/support/generate_doxygen.py | 29 +- infra/config/PRESUBMIT.py | 8 +- snapshot/win/end_to_end_test.py | 794 ++++++++++---------- third_party/fuchsia/runner.py | 3 + util/mach/mig.py | 3 +- util/mach/mig_fix.py | 38 +- util/mach/mig_gen.py | 47 +- util/net/generate_test_server_key.py | 41 +- 16 files changed, 1687 insertions(+), 1615 deletions(-) create mode 100644 .style.yapf mode change 100644 => 100755 third_party/fuchsia/runner.py diff --git a/.style.yapf b/.style.yapf new file mode 100644 index 00000000..8aaf9b62 --- /dev/null +++ b/.style.yapf @@ -0,0 +1,16 @@ +# Copyright 2020 The Crashpad Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[style] +based_on_style = google diff --git a/build/gyp_crashpad.py b/build/gyp_crashpad.py index 84856606..41ab4b94 100755 --- a/build/gyp_crashpad.py +++ b/build/gyp_crashpad.py @@ -19,83 +19,84 @@ import sys def ChooseDependencyPath(local_path, external_path): - """Chooses between a dependency located at local path and an external path. + """Chooses between a dependency located at local path and an external path. - The local path, used in standalone builds, is preferred. If it is not present - but the external path is, the external path will be used. If neither path is - present, the local path will be used, so that error messages uniformly refer - to the local path. + The local path, used in standalone builds, is preferred. If it is not + present but the external path is, the external path will be used. If neither + path is present, the local path will be used, so that error messages + uniformly refer to the local path. - Args: - local_path: The preferred local path to use for a standalone build. - external_path: The external path to fall back to. + Args: + local_path: The preferred local path to use for a standalone build. + external_path: The external path to fall back to. - Returns: - A 2-tuple. The first element is None or 'external', depending on whether - local_path or external_path was chosen. The second element is the chosen - path. - """ - if os.path.exists(local_path) or not os.path.exists(external_path): - return (None, local_path) - return ('external', external_path) + Returns: + A 2-tuple. The first element is None or 'external', depending on whether + local_path or external_path was chosen. The second element is the chosen + path. + """ + if os.path.exists(local_path) or not os.path.exists(external_path): + return (None, local_path) + return ('external', external_path) script_dir = os.path.dirname(__file__) -crashpad_dir = (os.path.dirname(script_dir) if script_dir not in ('', os.curdir) - else os.pardir) +crashpad_dir = (os.path.dirname(script_dir) + if script_dir not in ('', os.curdir) else os.pardir) -sys.path.insert(0, - ChooseDependencyPath(os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', - 'pylib'), - os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', - 'pylib'))[1]) +sys.path.insert( + 0, + ChooseDependencyPath( + os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 'pylib'), + os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', 'pylib'))[1]) import gyp def main(args): - if 'GYP_GENERATORS' not in os.environ: - os.environ['GYP_GENERATORS'] = 'ninja' + if 'GYP_GENERATORS' not in os.environ: + os.environ['GYP_GENERATORS'] = 'ninja' - crashpad_dir_or_dot = crashpad_dir if crashpad_dir is not '' else os.curdir + crashpad_dir_or_dot = crashpad_dir if crashpad_dir is not '' else os.curdir - (dependencies, mini_chromium_common_gypi) = (ChooseDependencyPath( - os.path.join(crashpad_dir, 'third_party', 'mini_chromium', - 'mini_chromium', 'build', 'common.gypi'), - os.path.join(crashpad_dir, os.pardir, os.pardir, 'mini_chromium', - 'mini_chromium', 'build', 'common.gypi'))) - if dependencies is not None: - args.extend(['-D', 'crashpad_dependencies=%s' % dependencies]) - args.extend(['--include', mini_chromium_common_gypi]) - args.extend(['--depth', crashpad_dir_or_dot]) - args.append(os.path.join(crashpad_dir, 'crashpad.gyp')) + (dependencies, mini_chromium_common_gypi) = (ChooseDependencyPath( + os.path.join(crashpad_dir, 'third_party', 'mini_chromium', + 'mini_chromium', 'build', 'common.gypi'), + os.path.join(crashpad_dir, os.pardir, os.pardir, 'mini_chromium', + 'mini_chromium', 'build', 'common.gypi'))) + if dependencies is not None: + args.extend(['-D', 'crashpad_dependencies=%s' % dependencies]) + args.extend(['--include', mini_chromium_common_gypi]) + args.extend(['--depth', crashpad_dir_or_dot]) + args.append(os.path.join(crashpad_dir, 'crashpad.gyp')) - result = gyp.main(args) - if result != 0: - return result - - if sys.platform == 'win32': - # Check to make sure that no target_arch was specified. target_arch may be - # set during a cross build, such as a cross build for Android. - has_target_arch = False - for arg_index in range(0, len(args)): - arg = args[arg_index] - if (arg.startswith('-Dtarget_arch=') or - (arg == '-D' and arg_index + 1 < len(args) and - args[arg_index + 1].startswith('target_arch='))): - has_target_arch = True - break - - if not has_target_arch: - # Also generate the x86 build. - result = gyp.main(args + ['-D', 'target_arch=ia32', '-G', 'config=Debug']) - if result != 0: + result = gyp.main(args) + if result != 0: return result - result = gyp.main( - args + ['-D', 'target_arch=ia32', '-G', 'config=Release']) - return result + if sys.platform == 'win32': + # Check to make sure that no target_arch was specified. target_arch may + # be set during a cross build, such as a cross build for Android. + has_target_arch = False + for arg_index in range(0, len(args)): + arg = args[arg_index] + if (arg.startswith('-Dtarget_arch=') or + (arg == '-D' and arg_index + 1 < len(args) and + args[arg_index + 1].startswith('target_arch='))): + has_target_arch = True + break + + if not has_target_arch: + # Also generate the x86 build. + result = gyp.main(args + + ['-D', 'target_arch=ia32', '-G', 'config=Debug']) + if result != 0: + return result + result = gyp.main( + args + ['-D', 'target_arch=ia32', '-G', 'config=Release']) + + return result if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/build/gyp_crashpad_android.py b/build/gyp_crashpad_android.py index e17e7a26..327fa21f 100755 --- a/build/gyp_crashpad_android.py +++ b/build/gyp_crashpad_android.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# coding: utf-8 # Copyright 2017 The Crashpad Authors. All rights reserved. # @@ -25,52 +24,46 @@ import sys def main(args): - parser = argparse.ArgumentParser( - description='Set up an Android cross build', - epilog='Additional arguments will be passed to gyp_crashpad.py.') - parser.add_argument('--arch', required=True, help='Target architecture') - parser.add_argument('--api-level', required=True, help='Target API level') - parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain') - (parsed, extra_command_line_args) = parser.parse_known_args(args) + parser = argparse.ArgumentParser( + description='Set up an Android cross build', + epilog='Additional arguments will be passed to gyp_crashpad.py.') + parser.add_argument('--arch', required=True, help='Target architecture') + parser.add_argument('--api-level', required=True, help='Target API level') + parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain') + (parsed, extra_command_line_args) = parser.parse_known_args(args) - ndk_bin_dir = os.path.join(parsed.ndk, - 'toolchains', - 'llvm', - 'prebuilt', - 'linux-x86_64', - 'bin') - if not os.path.exists(ndk_bin_dir): - parser.error("missing toolchain") + ndk_bin_dir = os.path.join(parsed.ndk, 'toolchains', 'llvm', 'prebuilt', + 'linux-x86_64', 'bin') + if not os.path.exists(ndk_bin_dir): + parser.error("missing toolchain") - ARCH_TO_ARCH_TRIPLET = { - 'arm': 'armv7a-linux-androideabi', - 'arm64': 'aarch64-linux-android', - 'ia32': 'i686-linux-android', - 'x64': 'x86_64-linux-android', - } + ARCH_TO_ARCH_TRIPLET = { + 'arm': 'armv7a-linux-androideabi', + 'arm64': 'aarch64-linux-android', + 'ia32': 'i686-linux-android', + 'x64': 'x86_64-linux-android', + } - clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level - os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang') - os.environ['CXX_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang++') + clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level + os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang') + os.environ['CXX_target'] = os.path.join(ndk_bin_dir, + clang_prefix + '-clang++') - extra_args = ['-D', 'android_api_level=' + parsed.api_level] + extra_args = ['-D', 'android_api_level=' + parsed.api_level] - # ARM only includes 'v7a' in the tool prefix for clang - tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' - else ARCH_TO_ARCH_TRIPLET[parsed.arch]) + # ARM only includes 'v7a' in the tool prefix for clang + tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' else + ARCH_TO_ARCH_TRIPLET[parsed.arch]) - for tool in ('ar', 'nm', 'readelf'): - os.environ['%s_target' % tool.upper()] = ( - os.path.join(ndk_bin_dir, '%s-%s' % (tool_prefix, tool))) + for tool in ('ar', 'nm', 'readelf'): + os.environ['%s_target' % tool.upper()] = (os.path.join( + ndk_bin_dir, '%s-%s' % (tool_prefix, tool))) - return gyp_crashpad.main( - ['-D', 'OS=android', - '-D', 'target_arch=%s' % parsed.arch, - '-D', 'clang=1', - '-f', 'ninja-android'] + - extra_args + - extra_command_line_args) + return gyp_crashpad.main([ + '-D', 'OS=android', '-D', + 'target_arch=%s' % parsed.arch, '-D', 'clang=1', '-f', 'ninja-android' + ] + extra_args + extra_command_line_args) if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/build/install_linux_sysroot.py b/build/install_linux_sysroot.py index afa88157..97f2c140 100755 --- a/build/install_linux_sysroot.py +++ b/build/install_linux_sysroot.py @@ -23,7 +23,6 @@ import subprocess import sys import urllib2 - SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) # Sysroot revision from: @@ -33,42 +32,43 @@ PATH = 'chrome-linux-sysroot/toolchain' REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66' FILENAME = 'debian_stretch_amd64_sysroot.tar.xz' + def main(): - url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME) + url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME) - sysroot = os.path.join(SCRIPT_DIR, os.pardir, - 'third_party', 'linux', 'sysroot') + sysroot = os.path.join(SCRIPT_DIR, os.pardir, 'third_party', 'linux', + 'sysroot') - stamp = os.path.join(sysroot, '.stamp') - if os.path.exists(stamp): - with open(stamp) as s: - if s.read() == url: - return + stamp = os.path.join(sysroot, '.stamp') + if os.path.exists(stamp): + with open(stamp) as s: + if s.read() == url: + return - print 'Installing Debian root image from %s' % url + print 'Installing Debian root image from %s' % url - if os.path.isdir(sysroot): - shutil.rmtree(sysroot) - os.mkdir(sysroot) - tarball = os.path.join(sysroot, FILENAME) - print 'Downloading %s' % url + if os.path.isdir(sysroot): + shutil.rmtree(sysroot) + os.mkdir(sysroot) + tarball = os.path.join(sysroot, FILENAME) + print 'Downloading %s' % url - for _ in range(3): - response = urllib2.urlopen(url) - with open(tarball, 'wb') as f: - f.write(response.read()) - break - else: - raise Exception('Failed to download %s' % url) + for _ in range(3): + response = urllib2.urlopen(url) + with open(tarball, 'wb') as f: + f.write(response.read()) + break + else: + raise Exception('Failed to download %s' % url) - subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot]) + subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot]) - os.remove(tarball) + os.remove(tarball) - with open(stamp, 'w') as s: - s.write(url) + with open(stamp, 'w') as s: + s.write(url) if __name__ == '__main__': - main() - sys.exit(0) + main() + sys.exit(0) diff --git a/build/ios/convert_gn_xcodeproj.py b/build/ios/convert_gn_xcodeproj.py index b41d04ec..c00d3318 100755 --- a/build/ios/convert_gn_xcodeproj.py +++ b/build/ios/convert_gn_xcodeproj.py @@ -13,7 +13,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """Convert GN Xcode projects to platform and configuration independent targets. GN generates Xcode projects that build one configuration only. However, typical @@ -40,234 +39,245 @@ import tempfile class XcodeProject(object): - def __init__(self, objects, counter = 0): - self.objects = objects - self.counter = 0 + def __init__(self, objects, counter=0): + self.objects = objects + self.counter = 0 - def AddObject(self, parent_name, obj): - while True: - self.counter += 1 - str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter) - new_id = hashlib.sha1(str_id).hexdigest()[:24].upper() + def AddObject(self, parent_name, obj): + while True: + self.counter += 1 + str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter) + new_id = hashlib.sha1(str_id).hexdigest()[:24].upper() - # Make sure ID is unique. It's possible there could be an id conflict - # since this is run after GN runs. - if new_id not in self.objects: - self.objects[new_id] = obj - return new_id + # Make sure ID is unique. It's possible there could be an id + # conflict since this is run after GN runs. + if new_id not in self.objects: + self.objects[new_id] = obj + return new_id def CopyFileIfChanged(source_path, target_path): - """Copy |source_path| to |target_path| is different.""" - target_dir = os.path.dirname(target_path) - if not os.path.isdir(target_dir): - os.makedirs(target_dir) - if not os.path.exists(target_path) or \ - not filecmp.cmp(source_path, target_path): - shutil.copyfile(source_path, target_path) + """Copy |source_path| to |target_path| is different.""" + target_dir = os.path.dirname(target_path) + if not os.path.isdir(target_dir): + os.makedirs(target_dir) + if (not os.path.exists(target_path) or + not filecmp.cmp(source_path, target_path)): + shutil.copyfile(source_path, target_path) def LoadXcodeProjectAsJSON(path): - """Return Xcode project at |path| as a JSON string.""" - return subprocess.check_output([ - 'plutil', '-convert', 'json', '-o', '-', path]) + """Return Xcode project at |path| as a JSON string.""" + return subprocess.check_output( + ['plutil', '-convert', 'json', '-o', '-', path]) def WriteXcodeProject(output_path, json_string): - """Save Xcode project to |output_path| as XML.""" - with tempfile.NamedTemporaryFile() as temp_file: - temp_file.write(json_string) - temp_file.flush() - subprocess.check_call(['plutil', '-convert', 'xml1', temp_file.name]) - CopyFileIfChanged(temp_file.name, output_path) + """Save Xcode project to |output_path| as XML.""" + with tempfile.NamedTemporaryFile() as temp_file: + temp_file.write(json_string) + temp_file.flush() + subprocess.check_call(['plutil', '-convert', 'xml1', temp_file.name]) + CopyFileIfChanged(temp_file.name, output_path) def UpdateProductsProject(file_input, file_output, configurations, root_dir): - """Update Xcode project to support multiple configurations. + """Update Xcode project to support multiple configurations. - Args: - file_input: path to the input Xcode project - file_output: path to the output file - configurations: list of string corresponding to the configurations that - need to be supported by the tweaked Xcode projects, must contains at - least one value. - """ - json_data = json.loads(LoadXcodeProjectAsJSON(file_input)) - project = XcodeProject(json_data['objects']) + Args: + file_input: path to the input Xcode project + file_output: path to the output file + configurations: list of string corresponding to the configurations that + need to be supported by the tweaked Xcode projects, must contains at + least one value. + """ + json_data = json.loads(LoadXcodeProjectAsJSON(file_input)) + project = XcodeProject(json_data['objects']) - objects_to_remove = [] - for value in project.objects.values(): - isa = value['isa'] + objects_to_remove = [] + for value in project.objects.values(): + isa = value['isa'] - # Teach build shell script to look for the configuration and platform. - if isa == 'PBXShellScriptBuildPhase': - value['shellScript'] = value['shellScript'].replace( - 'ninja -C .', - 'ninja -C "../${CONFIGURATION}${EFFECTIVE_PLATFORM_NAME}"') + # Teach build shell script to look for the configuration and platform. + if isa == 'PBXShellScriptBuildPhase': + value['shellScript'] = value['shellScript'].replace( + 'ninja -C .', + 'ninja -C "../${CONFIGURATION}${EFFECTIVE_PLATFORM_NAME}"') - # Add new configuration, using the first one as default. - if isa == 'XCConfigurationList': - value['defaultConfigurationName'] = configurations[0] - objects_to_remove.extend(value['buildConfigurations']) + # Add new configuration, using the first one as default. + if isa == 'XCConfigurationList': + value['defaultConfigurationName'] = configurations[0] + objects_to_remove.extend(value['buildConfigurations']) - build_config_template = project.objects[value['buildConfigurations'][0]] - build_config_template['buildSettings']['CONFIGURATION_BUILD_DIR'] = \ - '$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)' - build_config_template['buildSettings']['CODE_SIGN_IDENTITY'] = '' + build_config_template = project.objects[value['buildConfigurations'] + [0]] + build_settings = build_config_template['buildSettings'] + build_settings['CONFIGURATION_BUILD_DIR'] = ( + '$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)') + build_settings['CODE_SIGN_IDENTITY'] = '' - value['buildConfigurations'] = [] - for configuration in configurations: - new_build_config = copy.copy(build_config_template) - new_build_config['name'] = configuration - value['buildConfigurations'].append( - project.AddObject('products', new_build_config)) + value['buildConfigurations'] = [] + for configuration in configurations: + new_build_config = copy.copy(build_config_template) + new_build_config['name'] = configuration + value['buildConfigurations'].append( + project.AddObject('products', new_build_config)) - for object_id in objects_to_remove: - del project.objects[object_id] + for object_id in objects_to_remove: + del project.objects[object_id] - AddMarkdownToProject(project, root_dir, json_data['rootObject']) + AddMarkdownToProject(project, root_dir, json_data['rootObject']) - objects = collections.OrderedDict(sorted(project.objects.iteritems())) - WriteXcodeProject(file_output, json.dumps(json_data)) + objects = collections.OrderedDict(sorted(project.objects.iteritems())) + WriteXcodeProject(file_output, json.dumps(json_data)) def AddMarkdownToProject(project, root_dir, root_object): - list_files_cmd = ['git', '-C', root_dir, 'ls-files', '*.md'] - paths = subprocess.check_output(list_files_cmd).splitlines() - ios_internal_dir = os.path.join(root_dir, 'ios_internal') - if os.path.exists(ios_internal_dir): - list_files_cmd = ['git', '-C', ios_internal_dir, 'ls-files', '*.md'] - ios_paths = subprocess.check_output(list_files_cmd).splitlines() - paths.extend(["ios_internal/" + path for path in ios_paths]) - for path in paths: - new_markdown_entry = { - "fileEncoding": "4", - "isa": "PBXFileReference", - "lastKnownFileType": "net.daringfireball.markdown", - "name": os.path.basename(path), - "path": path, - "sourceTree": "" - } - new_markdown_entry_id = project.AddObject('sources', new_markdown_entry) - folder = GetFolderForPath(project, root_object, os.path.dirname(path)) - folder['children'].append(new_markdown_entry_id) + list_files_cmd = ['git', '-C', root_dir, 'ls-files', '*.md'] + paths = subprocess.check_output(list_files_cmd).splitlines() + ios_internal_dir = os.path.join(root_dir, 'ios_internal') + if os.path.exists(ios_internal_dir): + list_files_cmd = ['git', '-C', ios_internal_dir, 'ls-files', '*.md'] + ios_paths = subprocess.check_output(list_files_cmd).splitlines() + paths.extend(["ios_internal/" + path for path in ios_paths]) + for path in paths: + new_markdown_entry = { + "fileEncoding": "4", + "isa": "PBXFileReference", + "lastKnownFileType": "net.daringfireball.markdown", + "name": os.path.basename(path), + "path": path, + "sourceTree": "" + } + new_markdown_entry_id = project.AddObject('sources', new_markdown_entry) + folder = GetFolderForPath(project, root_object, os.path.dirname(path)) + folder['children'].append(new_markdown_entry_id) def GetFolderForPath(project, rootObject, path): - objects = project.objects - # 'Sources' is always the first child of - # project->rootObject->mainGroup->children. - root = objects[objects[objects[rootObject]['mainGroup']]['children'][0]] - if not path: + objects = project.objects + # 'Sources' is always the first child of + # project->rootObject->mainGroup->children. + root = objects[objects[objects[rootObject]['mainGroup']]['children'][0]] + if not path: + return root + for folder in path.split('/'): + children = root['children'] + new_root = None + for child in children: + if (objects[child]['isa'] == 'PBXGroup' and + objects[child]['name'] == folder): + new_root = objects[child] + break + if not new_root: + # If the folder isn't found we could just cram it into the leaf + # existing folder, but that leads to folders with tons of README.md + # inside. + new_group = { + "children": [], + "isa": "PBXGroup", + "name": folder, + "sourceTree": "" + } + new_group_id = project.AddObject('sources', new_group) + children.append(new_group_id) + new_root = objects[new_group_id] + root = new_root return root - for folder in path.split('/'): - children = root['children'] - new_root = None - for child in children: - if objects[child]['isa'] == 'PBXGroup' and \ - objects[child]['name'] == folder: - new_root = objects[child] - break - if not new_root: - # If the folder isn't found we could just cram it into the leaf existing - # folder, but that leads to folders with tons of README.md inside. - new_group = { - "children": [ - ], - "isa": "PBXGroup", - "name": folder, - "sourceTree": "" - } - new_group_id = project.AddObject('sources', new_group) - children.append(new_group_id) - new_root = objects[new_group_id] - root = new_root - return root def DisableNewBuildSystem(output_dir): - """Disables the new build system due to crbug.com/852522 """ - xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace', - 'xcshareddata', 'WorkspaceSettings.xcsettings') - if os.path.isfile(xcwspacesharedsettings): - json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings)) - else: - json_data = {} - json_data['BuildSystemType'] = 'Original' - WriteXcodeProject(xcwspacesharedsettings, json.dumps(json_data)) + """Disables the new build system due to crbug.com/852522 """ + xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace', + 'xcshareddata', + 'WorkspaceSettings.xcsettings') + if os.path.isfile(xcwspacesharedsettings): + json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings)) + else: + json_data = {} + json_data['BuildSystemType'] = 'Original' + WriteXcodeProject(xcwspacesharedsettings, json.dumps(json_data)) def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations): - '''Tweak the Xcode project generated by gn to support multiple configurations. + '''Tweak the Xcode project generated by gn to support multiple + configurations. - The Xcode projects generated by "gn gen --ide" only supports a single - platform and configuration (as the platform and configuration are set - per output directory). This method takes as input such projects and - add support for multiple configurations and platforms (to allow devs - to select them in Xcode). + The Xcode projects generated by "gn gen --ide" only supports a single + platform and configuration (as the platform and configuration are set per + output directory). This method takes as input such projects and add support + for multiple configurations and platforms (to allow devs to select them in + Xcode). - Args: - input_dir: directory containing the XCode projects created by "gn gen --ide" - output_dir: directory where the tweaked Xcode projects will be saved - configurations: list of string corresponding to the configurations that - need to be supported by the tweaked Xcode projects, must contains at - least one value. - ''' - # Update products project. - products = os.path.join('products.xcodeproj', 'project.pbxproj') - product_input = os.path.join(input_dir, products) - product_output = os.path.join(output_dir, products) - UpdateProductsProject(product_input, product_output, configurations, root_dir) + Args: + input_dir: directory containing the XCode projects created by "gn gen + --ide" + output_dir: directory where the tweaked Xcode projects will be saved + configurations: list of string corresponding to the configurations that + need to be supported by the tweaked Xcode projects, must contains at + least one value. + ''' + # Update products project. + products = os.path.join('products.xcodeproj', 'project.pbxproj') + product_input = os.path.join(input_dir, products) + product_output = os.path.join(output_dir, products) + UpdateProductsProject(product_input, product_output, configurations, + root_dir) - # Copy all workspace. - xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata') - CopyFileIfChanged(os.path.join(input_dir, xcwspace), - os.path.join(output_dir, xcwspace)) + # Copy all workspace. + xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata') + CopyFileIfChanged(os.path.join(input_dir, xcwspace), + os.path.join(output_dir, xcwspace)) - # TODO(crbug.com/852522): Disable new BuildSystemType. - DisableNewBuildSystem(output_dir) + # TODO(crbug.com/852522): Disable new BuildSystemType. + DisableNewBuildSystem(output_dir) + + # TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj' + # and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is + # here to support both old and new projects setup and will be removed once + # gn has rolled past it. + sources = os.path.join('sources.xcodeproj', 'project.pbxproj') + if os.path.isfile(os.path.join(input_dir, sources)): + CopyFileIfChanged(os.path.join(input_dir, sources), + os.path.join(output_dir, sources)) - # TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj' - # and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is - # here to support both old and new projects setup and will be removed once gn - # has rolled past it. - sources = os.path.join('sources.xcodeproj', 'project.pbxproj') - if os.path.isfile(os.path.join(input_dir, sources)): - CopyFileIfChanged(os.path.join(input_dir, sources), - os.path.join(output_dir, sources)) def Main(args): - parser = argparse.ArgumentParser( - description='Convert GN Xcode projects for iOS.') - parser.add_argument( - 'input', - help='directory containing [product|all] Xcode projects.') - parser.add_argument( - 'output', - help='directory where to generate the iOS configuration.') - parser.add_argument( - '--add-config', dest='configurations', default=[], action='append', - help='configuration to add to the Xcode project') - parser.add_argument( - '--root', type=os.path.abspath, required=True, - help='root directory of the project') - args = parser.parse_args(args) + parser = argparse.ArgumentParser( + description='Convert GN Xcode projects for iOS.') + parser.add_argument( + 'input', help='directory containing [product|all] Xcode projects.') + parser.add_argument( + 'output', help='directory where to generate the iOS configuration.') + parser.add_argument('--add-config', + dest='configurations', + default=[], + action='append', + help='configuration to add to the Xcode project') + parser.add_argument('--root', + type=os.path.abspath, + required=True, + help='root directory of the project') + args = parser.parse_args(args) - if not os.path.isdir(args.input): - sys.stderr.write('Input directory does not exists.\n') - return 1 + if not os.path.isdir(args.input): + sys.stderr.write('Input directory does not exists.\n') + return 1 - required = set(['products.xcodeproj', 'all.xcworkspace']) - if not required.issubset(os.listdir(args.input)): - sys.stderr.write( - 'Input directory does not contain all necessary Xcode projects.\n') - return 1 + required = set(['products.xcodeproj', 'all.xcworkspace']) + if not required.issubset(os.listdir(args.input)): + sys.stderr.write( + 'Input directory does not contain all necessary Xcode projects.\n') + return 1 - if not args.configurations: - sys.stderr.write('At least one configuration required, see --add-config.\n') - return 1 + if not args.configurations: + sys.stderr.write( + 'At least one configuration required, see --add-config.\n') + return 1 + + ConvertGnXcodeProject(args.root, args.input, args.output, + args.configurations) - ConvertGnXcodeProject(args.root, args.input, args.output, args.configurations) if __name__ == '__main__': - sys.exit(Main(sys.argv[1:])) + sys.exit(Main(sys.argv[1:])) diff --git a/build/ios/setup-ios-gn.py b/build/ios/setup-ios-gn.py index 995e3c84..934b67c6 100755 --- a/build/ios/setup-ios-gn.py +++ b/build/ios/setup-ios-gn.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import argparse import convert_gn_xcodeproj import errno @@ -27,326 +26,325 @@ import tempfile import ConfigParser try: - import cStringIO as StringIO + import cStringIO as StringIO except ImportError: - import StringIO - + import StringIO SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator') SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage') class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser): + '''A .ini file parser that supports strings and environment variables.''' - '''A .ini file parser that supports strings and environment variables.''' + ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)') - ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)') + def values(self, section): + return map(lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)), + ConfigParser.SafeConfigParser.items(self, section)) - def values(self, section): - return map( - lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)), - ConfigParser.SafeConfigParser.items(self, section)) + def getstring(self, section, option): + return self._UnquoteString(self._ExpandEnvVar(self.get(section, + option))) - def getstring(self, section, option): - return self._UnquoteString(self._ExpandEnvVar(self.get(section, option))) + def _UnquoteString(self, string): + if not string or string[0] != '"' or string[-1] != '"': + return string + return string[1:-1] - def _UnquoteString(self, string): - if not string or string[0] != '"' or string[-1] != '"': - return string - return string[1:-1] + def _ExpandEnvVar(self, value): + match = self.ENV_VAR_PATTERN.search(value) + if not match: + return value + name, (begin, end) = match.group(1), match.span(0) + prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:]) + return prefix + os.environ.get(name, '') + suffix - def _ExpandEnvVar(self, value): - match = self.ENV_VAR_PATTERN.search(value) - if not match: - return value - name, (begin, end) = match.group(1), match.span(0) - prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:]) - return prefix + os.environ.get(name, '') + suffix class GnGenerator(object): + '''Holds configuration for a build and method to generate gn default + files.''' - '''Holds configuration for a build and method to generate gn default files.''' + FAT_BUILD_DEFAULT_ARCH = '64-bit' - FAT_BUILD_DEFAULT_ARCH = '64-bit' - - TARGET_CPU_VALUES = { - 'iphoneos': { - '32-bit': '"arm"', - '64-bit': '"arm64"', - }, - 'iphonesimulator': { - '32-bit': '"x86"', - '64-bit': '"x64"', + TARGET_CPU_VALUES = { + 'iphoneos': { + '32-bit': '"arm"', + '64-bit': '"arm64"', + }, + 'iphonesimulator': { + '32-bit': '"x86"', + '64-bit': '"x64"', + } } - } - def __init__(self, settings, config, target): - assert target in SUPPORTED_TARGETS - assert config in SUPPORTED_CONFIGS - self._settings = settings - self._config = config - self._target = target + def __init__(self, settings, config, target): + assert target in SUPPORTED_TARGETS + assert config in SUPPORTED_CONFIGS + self._settings = settings + self._config = config + self._target = target - def _GetGnArgs(self): - """Build the list of arguments to pass to gn. + def _GetGnArgs(self): + """Build the list of arguments to pass to gn. - Returns: - A list of tuple containing gn variable names and variable values (it - is not a dictionary as the order needs to be preserved). - """ - args = [] + Returns: + A list of tuple containing gn variable names and variable values (it + is not a dictionary as the order needs to be preserved). + """ + args = [] - args.append(('is_debug', self._config in ('Debug', 'Coverage'))) + args.append(('is_debug', self._config in ('Debug', 'Coverage'))) - if os.environ.get('FORCE_MAC_TOOLCHAIN', '0') == '1': - args.append(('use_system_xcode', False)) + if os.environ.get('FORCE_MAC_TOOLCHAIN', '0') == '1': + args.append(('use_system_xcode', False)) - cpu_values = self.TARGET_CPU_VALUES[self._target] - build_arch = self._settings.getstring('build', 'arch') - if build_arch == 'fat': - target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH] - args.append(('target_cpu', target_cpu)) - args.append(('additional_target_cpus', - [cpu for cpu in cpu_values.itervalues() if cpu != target_cpu])) - else: - args.append(('target_cpu', cpu_values[build_arch])) - - # Add user overrides after the other configurations so that they can - # refer to them and override them. - args.extend(self._settings.items('gn_args')) - return args - - - def Generate(self, gn_path, root_path, out_path): - buf = StringIO.StringIO() - self.WriteArgsGn(buf) - WriteToFileIfChanged( - os.path.join(out_path, 'args.gn'), - buf.getvalue(), - overwrite=True) - - subprocess.check_call( - self.GetGnCommand(gn_path, root_path, out_path, True)) - - def CreateGnRules(self, gn_path, root_path, out_path): - buf = StringIO.StringIO() - self.WriteArgsGn(buf) - WriteToFileIfChanged( - os.path.join(out_path, 'args.gn'), - buf.getvalue(), - overwrite=True) - - buf = StringIO.StringIO() - gn_command = self.GetGnCommand(gn_path, root_path, out_path, False) - self.WriteBuildNinja(buf, gn_command) - WriteToFileIfChanged( - os.path.join(out_path, 'build.ninja'), - buf.getvalue(), - overwrite=False) - - buf = StringIO.StringIO() - self.WriteBuildNinjaDeps(buf) - WriteToFileIfChanged( - os.path.join(out_path, 'build.ninja.d'), - buf.getvalue(), - overwrite=False) - - def WriteArgsGn(self, stream): - stream.write('# This file was generated by setup-gn.py. Do not edit\n') - stream.write('# but instead use ~/.setup-gn or $repo/.setup-gn files\n') - stream.write('# to configure settings.\n') - stream.write('\n') - - if self._settings.has_section('$imports$'): - for import_rule in self._settings.values('$imports$'): - stream.write('import("%s")\n' % import_rule) - stream.write('\n') - - gn_args = self._GetGnArgs() - for name, value in gn_args: - if isinstance(value, bool): - stream.write('%s = %s\n' % (name, str(value).lower())) - elif isinstance(value, list): - stream.write('%s = [%s' % (name, '\n' if len(value) > 1 else '')) - if len(value) == 1: - prefix = ' ' - suffix = ' ' + cpu_values = self.TARGET_CPU_VALUES[self._target] + build_arch = self._settings.getstring('build', 'arch') + if build_arch == 'fat': + target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH] + args.append(('target_cpu', target_cpu)) + args.append( + ('additional_target_cpus', + [cpu for cpu in cpu_values.itervalues() if cpu != target_cpu])) else: - prefix = ' ' - suffix = ',\n' - for item in value: - if isinstance(item, bool): - stream.write('%s%s%s' % (prefix, str(item).lower(), suffix)) - else: - stream.write('%s%s%s' % (prefix, item, suffix)) - stream.write(']\n') - else: - stream.write('%s = %s\n' % (name, value)) + args.append(('target_cpu', cpu_values[build_arch])) - def WriteBuildNinja(self, stream, gn_command): - stream.write('rule gn\n') - stream.write(' command = %s\n' % NinjaEscapeCommand(gn_command)) - stream.write(' description = Regenerating ninja files\n') - stream.write('\n') - stream.write('build build.ninja: gn\n') - stream.write(' generator = 1\n') - stream.write(' depfile = build.ninja.d\n') + # Add user overrides after the other configurations so that they can + # refer to them and override them. + args.extend(self._settings.items('gn_args')) + return args - def WriteBuildNinjaDeps(self, stream): - stream.write('build.ninja: nonexistant_file.gn\n') + def Generate(self, gn_path, root_path, out_path): + buf = StringIO.StringIO() + self.WriteArgsGn(buf) + WriteToFileIfChanged(os.path.join(out_path, 'args.gn'), + buf.getvalue(), + overwrite=True) - def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project): - gn_command = [ gn_path, '--root=%s' % os.path.realpath(src_path), '-q' ] - if generate_xcode_project: - gn_command.append('--ide=xcode') - gn_command.append('--root-target=gn_all') - if self._settings.getboolean('goma', 'enabled'): - ninja_jobs = self._settings.getint('xcode', 'jobs') or 200 - gn_command.append('--ninja-extra-args=-j%s' % ninja_jobs) - if self._settings.has_section('filters'): - target_filters = self._settings.values('filters') - if target_filters: - gn_command.append('--filters=%s' % ';'.join(target_filters)) - # TODO(justincohen): --check is currently failing in crashpad. - # else: - # gn_command.append('--check') - gn_command.append('gen') - gn_command.append('//%s' % - os.path.relpath(os.path.abspath(out_path), os.path.abspath(src_path))) - return gn_command + subprocess.check_call( + self.GetGnCommand(gn_path, root_path, out_path, True)) + + def CreateGnRules(self, gn_path, root_path, out_path): + buf = StringIO.StringIO() + self.WriteArgsGn(buf) + WriteToFileIfChanged(os.path.join(out_path, 'args.gn'), + buf.getvalue(), + overwrite=True) + + buf = StringIO.StringIO() + gn_command = self.GetGnCommand(gn_path, root_path, out_path, False) + self.WriteBuildNinja(buf, gn_command) + WriteToFileIfChanged(os.path.join(out_path, 'build.ninja'), + buf.getvalue(), + overwrite=False) + + buf = StringIO.StringIO() + self.WriteBuildNinjaDeps(buf) + WriteToFileIfChanged(os.path.join(out_path, 'build.ninja.d'), + buf.getvalue(), + overwrite=False) + + def WriteArgsGn(self, stream): + stream.write('# This file was generated by setup-gn.py. Do not edit\n') + stream.write('# but instead use ~/.setup-gn or $repo/.setup-gn files\n') + stream.write('# to configure settings.\n') + stream.write('\n') + + if self._settings.has_section('$imports$'): + for import_rule in self._settings.values('$imports$'): + stream.write('import("%s")\n' % import_rule) + stream.write('\n') + + gn_args = self._GetGnArgs() + for name, value in gn_args: + if isinstance(value, bool): + stream.write('%s = %s\n' % (name, str(value).lower())) + elif isinstance(value, list): + stream.write('%s = [%s' % + (name, '\n' if len(value) > 1 else '')) + if len(value) == 1: + prefix = ' ' + suffix = ' ' + else: + prefix = ' ' + suffix = ',\n' + for item in value: + if isinstance(item, bool): + stream.write('%s%s%s' % + (prefix, str(item).lower(), suffix)) + else: + stream.write('%s%s%s' % (prefix, item, suffix)) + stream.write(']\n') + else: + stream.write('%s = %s\n' % (name, value)) + + def WriteBuildNinja(self, stream, gn_command): + stream.write('rule gn\n') + stream.write(' command = %s\n' % NinjaEscapeCommand(gn_command)) + stream.write(' description = Regenerating ninja files\n') + stream.write('\n') + stream.write('build build.ninja: gn\n') + stream.write(' generator = 1\n') + stream.write(' depfile = build.ninja.d\n') + + def WriteBuildNinjaDeps(self, stream): + stream.write('build.ninja: nonexistant_file.gn\n') + + def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project): + gn_command = [gn_path, '--root=%s' % os.path.realpath(src_path), '-q'] + if generate_xcode_project: + gn_command.append('--ide=xcode') + gn_command.append('--root-target=gn_all') + if self._settings.getboolean('goma', 'enabled'): + ninja_jobs = self._settings.getint('xcode', 'jobs') or 200 + gn_command.append('--ninja-extra-args=-j%s' % ninja_jobs) + if self._settings.has_section('filters'): + target_filters = self._settings.values('filters') + if target_filters: + gn_command.append('--filters=%s' % ';'.join(target_filters)) + # TODO(justincohen): --check is currently failing in crashpad. + # else: + # gn_command.append('--check') + gn_command.append('gen') + gn_command.append('//%s' % os.path.relpath(os.path.abspath(out_path), + os.path.abspath(src_path))) + return gn_command def WriteToFileIfChanged(filename, content, overwrite): - '''Write |content| to |filename| if different. If |overwrite| is False - and the file already exists it is left untouched.''' - if os.path.exists(filename): - if not overwrite: - return - with open(filename) as file: - if file.read() == content: - return - if not os.path.isdir(os.path.dirname(filename)): - os.makedirs(os.path.dirname(filename)) - with open(filename, 'w') as file: - file.write(content) + '''Write |content| to |filename| if different. If |overwrite| is False + and the file already exists it is left untouched.''' + if os.path.exists(filename): + if not overwrite: + return + with open(filename) as file: + if file.read() == content: + return + if not os.path.isdir(os.path.dirname(filename)): + os.makedirs(os.path.dirname(filename)) + with open(filename, 'w') as file: + file.write(content) def NinjaNeedEscape(arg): - '''Returns True if |arg| needs to be escaped when written to .ninja file.''' - return ':' in arg or '*' in arg or ';' in arg + '''Returns True if |arg| needs to be escaped when written to .ninja file.''' + return ':' in arg or '*' in arg or ';' in arg def NinjaEscapeCommand(command): - '''Escapes |command| in order to write it to .ninja file.''' - result = [] - for arg in command: - if NinjaNeedEscape(arg): - arg = arg.replace(':', '$:') - arg = arg.replace(';', '\\;') - arg = arg.replace('*', '\\*') - else: - result.append(arg) - return ' '.join(result) + '''Escapes |command| in order to write it to .ninja file.''' + result = [] + for arg in command: + if NinjaNeedEscape(arg): + arg = arg.replace(':', '$:') + arg = arg.replace(';', '\\;') + arg = arg.replace('*', '\\*') + else: + result.append(arg) + return ' '.join(result) def FindGn(): - '''Returns absolute path to gn binary looking at the PATH env variable.''' - for path in os.environ['PATH'].split(os.path.pathsep): - gn_path = os.path.join(path, 'gn') - if os.path.isfile(gn_path) and os.access(gn_path, os.X_OK): - return gn_path - return None + '''Returns absolute path to gn binary looking at the PATH env variable.''' + for path in os.environ['PATH'].split(os.path.pathsep): + gn_path = os.path.join(path, 'gn') + if os.path.isfile(gn_path) and os.access(gn_path, os.X_OK): + return gn_path + return None def GenerateXcodeProject(gn_path, root_dir, out_dir, settings): - '''Convert GN generated Xcode project into multi-configuration Xcode - project.''' + '''Convert GN generated Xcode project into multi-configuration Xcode + project.''' - temp_path = tempfile.mkdtemp(prefix=os.path.abspath( - os.path.join(out_dir, '_temp'))) - try: - generator = GnGenerator(settings, 'Debug', 'iphonesimulator') - generator.Generate(gn_path, root_dir, temp_path) - convert_gn_xcodeproj.ConvertGnXcodeProject( - root_dir, - os.path.join(temp_path), - os.path.join(out_dir, 'build'), - SUPPORTED_CONFIGS) - finally: - if os.path.exists(temp_path): - shutil.rmtree(temp_path) + temp_path = tempfile.mkdtemp( + prefix=os.path.abspath(os.path.join(out_dir, '_temp'))) + try: + generator = GnGenerator(settings, 'Debug', 'iphonesimulator') + generator.Generate(gn_path, root_dir, temp_path) + convert_gn_xcodeproj.ConvertGnXcodeProject( + root_dir, os.path.join(temp_path), os.path.join(out_dir, 'build'), + SUPPORTED_CONFIGS) + finally: + if os.path.exists(temp_path): + shutil.rmtree(temp_path) def GenerateGnBuildRules(gn_path, root_dir, out_dir, settings): - '''Generates all template configurations for gn.''' - for config in SUPPORTED_CONFIGS: - for target in SUPPORTED_TARGETS: - build_dir = os.path.join(out_dir, '%s-%s' % (config, target)) - generator = GnGenerator(settings, config, target) - generator.CreateGnRules(gn_path, root_dir, build_dir) + '''Generates all template configurations for gn.''' + for config in SUPPORTED_CONFIGS: + for target in SUPPORTED_TARGETS: + build_dir = os.path.join(out_dir, '%s-%s' % (config, target)) + generator = GnGenerator(settings, config, target) + generator.CreateGnRules(gn_path, root_dir, build_dir) def Main(args): - default_root = os.path.normpath(os.path.join( - os.path.dirname(__file__), os.pardir, os.pardir)) + default_root = os.path.normpath( + os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) - parser = argparse.ArgumentParser( - description='Generate build directories for use with gn.') - parser.add_argument( - 'root', default=default_root, nargs='?', - help='root directory where to generate multiple out configurations') - parser.add_argument( - '--import', action='append', dest='import_rules', default=[], - help='path to file defining default gn variables') - args = parser.parse_args(args) + parser = argparse.ArgumentParser( + description='Generate build directories for use with gn.') + parser.add_argument( + 'root', + default=default_root, + nargs='?', + help='root directory where to generate multiple out configurations') + parser.add_argument('--import', + action='append', + dest='import_rules', + default=[], + help='path to file defining default gn variables') + args = parser.parse_args(args) - # Load configuration (first global and then any user overrides). - settings = ConfigParserWithStringInterpolation() - settings.read([ - os.path.splitext(__file__)[0] + '.config', - os.path.expanduser('~/.setup-gn'), - ]) + # Load configuration (first global and then any user overrides). + settings = ConfigParserWithStringInterpolation() + settings.read([ + os.path.splitext(__file__)[0] + '.config', + os.path.expanduser('~/.setup-gn'), + ]) - # Add private sections corresponding to --import argument. - if args.import_rules: - settings.add_section('$imports$') - for i, import_rule in enumerate(args.import_rules): - if not import_rule.startswith('//'): - import_rule = '//%s' % os.path.relpath( - os.path.abspath(import_rule), os.path.abspath(args.root)) - settings.set('$imports$', '$rule%d$' % i, import_rule) + # Add private sections corresponding to --import argument. + if args.import_rules: + settings.add_section('$imports$') + for i, import_rule in enumerate(args.import_rules): + if not import_rule.startswith('//'): + import_rule = '//%s' % os.path.relpath( + os.path.abspath(import_rule), os.path.abspath(args.root)) + settings.set('$imports$', '$rule%d$' % i, import_rule) - # Validate settings. - if settings.getstring('build', 'arch') not in ('64-bit', '32-bit', 'fat'): - sys.stderr.write('ERROR: invalid value for build.arch: %s\n' % - settings.getstring('build', 'arch')) - sys.exit(1) + # Validate settings. + if settings.getstring('build', 'arch') not in ('64-bit', '32-bit', 'fat'): + sys.stderr.write('ERROR: invalid value for build.arch: %s\n' % + settings.getstring('build', 'arch')) + sys.exit(1) - if settings.getboolean('goma', 'enabled'): - if settings.getint('xcode', 'jobs') < 0: - sys.stderr.write('ERROR: invalid value for xcode.jobs: %s\n' % - settings.get('xcode', 'jobs')) - sys.exit(1) - goma_install = os.path.expanduser(settings.getstring('goma', 'install')) - if not os.path.isdir(goma_install): - sys.stderr.write('WARNING: goma.install directory not found: %s\n' % - settings.get('goma', 'install')) - sys.stderr.write('WARNING: disabling goma\n') - settings.set('goma', 'enabled', 'false') + if settings.getboolean('goma', 'enabled'): + if settings.getint('xcode', 'jobs') < 0: + sys.stderr.write('ERROR: invalid value for xcode.jobs: %s\n' % + settings.get('xcode', 'jobs')) + sys.exit(1) + goma_install = os.path.expanduser(settings.getstring('goma', 'install')) + if not os.path.isdir(goma_install): + sys.stderr.write('WARNING: goma.install directory not found: %s\n' % + settings.get('goma', 'install')) + sys.stderr.write('WARNING: disabling goma\n') + settings.set('goma', 'enabled', 'false') - # Find gn binary in PATH. - gn_path = FindGn() - if gn_path is None: - sys.stderr.write('ERROR: cannot find gn in PATH\n') - sys.exit(1) + # Find gn binary in PATH. + gn_path = FindGn() + if gn_path is None: + sys.stderr.write('ERROR: cannot find gn in PATH\n') + sys.exit(1) - out_dir = os.path.join(args.root, 'out') - if not os.path.isdir(out_dir): - os.makedirs(out_dir) + out_dir = os.path.join(args.root, 'out') + if not os.path.isdir(out_dir): + os.makedirs(out_dir) - GenerateXcodeProject(gn_path, args.root, out_dir, settings) - GenerateGnBuildRules(gn_path, args.root, out_dir, settings) + GenerateXcodeProject(gn_path, args.root, out_dir, settings) + GenerateGnBuildRules(gn_path, args.root, out_dir, settings) if __name__ == '__main__': - sys.exit(Main(sys.argv[1:])) + sys.exit(Main(sys.argv[1:])) diff --git a/build/run_fuchsia_qemu.py b/build/run_fuchsia_qemu.py index 135b314e..aff0efd8 100755 --- a/build/run_fuchsia_qemu.py +++ b/build/run_fuchsia_qemu.py @@ -13,7 +13,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used for running tests without a device. """ @@ -30,105 +29,117 @@ import tempfile import time try: - from subprocess import DEVNULL + from subprocess import DEVNULL except ImportError: - DEVNULL = open(os.devnull, 'r+b') + DEVNULL = open(os.devnull, 'r+b') CRASHPAD_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir) def _Stop(pid_file): - if os.path.isfile(pid_file): - with open(pid_file, 'rb') as f: - pid = int(f.read().strip()) - try: - os.kill(pid, signal.SIGTERM) - except: - print('Unable to kill pid %d, continuing' % pid, file=sys.stderr) - os.unlink(pid_file) + if os.path.isfile(pid_file): + with open(pid_file, 'rb') as f: + pid = int(f.read().strip()) + try: + os.kill(pid, signal.SIGTERM) + except: + print('Unable to kill pid %d, continuing' % pid, file=sys.stderr) + os.unlink(pid_file) def _CheckForTun(): - """Check for networking. TODO(scottmg): Currently, this is Linux-specific. - """ - returncode = subprocess.call( - ['tunctl', '-b', '-u', getpass.getuser(), '-t', 'qemu'], - stdout=DEVNULL, stderr=DEVNULL) - if returncode != 0: - print('To use QEMU with networking on Linux, configure TUN/TAP. See:', - file=sys.stderr) - print(' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only', - file=sys.stderr) - return 2 - return 0 + """Check for networking. TODO(scottmg): Currently, this is Linux-specific. + """ + returncode = subprocess.call( + ['tunctl', '-b', '-u', + getpass.getuser(), '-t', 'qemu'], + stdout=DEVNULL, + stderr=DEVNULL) + if returncode != 0: + print('To use QEMU with networking on Linux, configure TUN/TAP. See:', + file=sys.stderr) + print( + ' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only', + file=sys.stderr) + return 2 + return 0 def _Start(pid_file): - tun_result = _CheckForTun() - if tun_result != 0: - return tun_result + tun_result = _CheckForTun() + if tun_result != 0: + return tun_result - arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64' - fuchsia_dir = os.path.join(CRASHPAD_ROOT, 'third_party', 'fuchsia') - qemu_path = os.path.join(fuchsia_dir, 'qemu', arch, 'bin', - 'qemu-system-x86_64') - kernel_data_dir = os.path.join(fuchsia_dir, 'sdk', arch, 'target', 'x86_64') - kernel_path = os.path.join(kernel_data_dir, 'zircon.bin') - initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin') + arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64' + fuchsia_dir = os.path.join(CRASHPAD_ROOT, 'third_party', 'fuchsia') + qemu_path = os.path.join(fuchsia_dir, 'qemu', arch, 'bin', + 'qemu-system-x86_64') + kernel_data_dir = os.path.join(fuchsia_dir, 'sdk', arch, 'target', 'x86_64') + kernel_path = os.path.join(kernel_data_dir, 'zircon.bin') + initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin') - mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3)) - instance_name = 'crashpad_qemu_' + \ - ''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)) + mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3)) + instance_name = ( + 'crashpad_qemu_' + + ''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8))) - # These arguments are from the Fuchsia repo in zircon/scripts/run-zircon. - popen = subprocess.Popen([ - qemu_path, - '-m', '2048', - '-nographic', - '-kernel', kernel_path, - '-initrd', initrd_path, - '-smp', '4', - '-serial', 'stdio', - '-monitor', 'none', - '-machine', 'q35', - '-cpu', 'host,migratable=no', - '-enable-kvm', - '-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0', - '-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail, - '-append', 'TERM=dumb zircon.nodename=' + instance_name, - ], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL) + # These arguments are from the Fuchsia repo in zircon/scripts/run-zircon. - with open(pid_file, 'wb') as f: - f.write('%d\n' % popen.pid) + # yapf: disable + popen = subprocess.Popen([ + qemu_path, + '-m', '2048', + '-nographic', + '-kernel', kernel_path, + '-initrd', initrd_path, + '-smp', '4', + '-serial', 'stdio', + '-monitor', 'none', + '-machine', 'q35', + '-cpu', 'host,migratable=no', + '-enable-kvm', + '-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0', + '-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail, + '-append', 'TERM=dumb zircon.nodename=' + instance_name, + ], + stdin=DEVNULL, + stdout=DEVNULL, + stderr=DEVNULL) + # yapf: enable - for i in range(10): - netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools', 'netaddr') - if subprocess.call([netaddr_path, '--nowait', instance_name], - stdout=open(os.devnull), stderr=open(os.devnull)) == 0: - break - time.sleep(.5) - else: - print('instance did not respond after start', file=sys.stderr) - return 1 + with open(pid_file, 'wb') as f: + f.write('%d\n' % popen.pid) - return 0 + for i in range(10): + netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools', + 'netaddr') + if subprocess.call([netaddr_path, '--nowait', instance_name], + stdout=open(os.devnull), + stderr=open(os.devnull)) == 0: + break + time.sleep(.5) + else: + print('instance did not respond after start', file=sys.stderr) + return 1 + + return 0 def main(args): - if len(args) != 1 or args[0] not in ('start', 'stop'): - print('usage: run_fuchsia_qemu.py start|stop', file=sys.stderr) - return 1 + if len(args) != 1 or args[0] not in ('start', 'stop'): + print('usage: run_fuchsia_qemu.py start|stop', file=sys.stderr) + return 1 - command = args[0] + command = args[0] - pid_file = os.path.join(tempfile.gettempdir(), 'crashpad_fuchsia_qemu_pid') - _Stop(pid_file) - if command == 'start': - return _Start(pid_file) + pid_file = os.path.join(tempfile.gettempdir(), 'crashpad_fuchsia_qemu_pid') + _Stop(pid_file) + if command == 'start': + return _Start(pid_file) - return 0 + return 0 if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/build/run_tests.py b/build/run_tests.py index 38473cd0..2f1919b5 100755 --- a/build/run_tests.py +++ b/build/run_tests.py @@ -33,579 +33,603 @@ IS_WINDOWS_HOST = sys.platform.startswith('win') def _FindGNFromBinaryDir(binary_dir): - """Attempts to determine the path to a GN binary used to generate the build - files in the given binary_dir. This is necessary because `gn` might not be in - the path or might be in a non-standard location, particularly on build - machines.""" + """Attempts to determine the path to a GN binary used to generate the build + files in the given binary_dir. This is necessary because `gn` might not be + in the path or might be in a non-standard location, particularly on build + machines.""" - build_ninja = os.path.join(binary_dir, 'build.ninja') - if os.path.isfile(build_ninja): - with open(build_ninja, 'rb') as f: - # Look for the always-generated regeneration rule of the form: - # - # rule gn - # command = ... arguments ... - # - # to extract the gn binary's full path. - found_rule_gn = False - for line in f: - if line.strip() == 'rule gn': - found_rule_gn = True - continue - if found_rule_gn: - if len(line) == 0 or line[0] != ' ': - return None - if line.startswith(' command = '): - gn_command_line_parts = line.strip().split(' ') - if len(gn_command_line_parts) > 2: - return os.path.join(binary_dir, gn_command_line_parts[2]) + build_ninja = os.path.join(binary_dir, 'build.ninja') + if os.path.isfile(build_ninja): + with open(build_ninja, 'rb') as f: + # Look for the always-generated regeneration rule of the form: + # + # rule gn + # command = ... arguments ... + # + # to extract the gn binary's full path. + found_rule_gn = False + for line in f: + if line.strip() == 'rule gn': + found_rule_gn = True + continue + if found_rule_gn: + if len(line) == 0 or line[0] != ' ': + return None + if line.startswith(' command = '): + gn_command_line_parts = line.strip().split(' ') + if len(gn_command_line_parts) > 2: + return os.path.join(binary_dir, + gn_command_line_parts[2]) - return None + return None def _BinaryDirTargetOS(binary_dir): - """Returns the apparent target OS of binary_dir, or None if none appear to be - explicitly specified.""" + """Returns the apparent target OS of binary_dir, or None if none appear to + be explicitly specified.""" - gn_path = _FindGNFromBinaryDir(binary_dir) + gn_path = _FindGNFromBinaryDir(binary_dir) - if gn_path: - # Look for a GN “target_os”. - popen = subprocess.Popen([gn_path, '--root=' + CRASHPAD_DIR, - 'args', binary_dir, - '--list=target_os', '--short'], - shell=IS_WINDOWS_HOST, - stdout=subprocess.PIPE, stderr=open(os.devnull)) - value = popen.communicate()[0] - if popen.returncode == 0: - match = re.match('target_os = "(.*)"$', value.decode('utf-8')) - if match: - return match.group(1) + if gn_path: + # Look for a GN “target_os”. + popen = subprocess.Popen([ + gn_path, '--root=' + CRASHPAD_DIR, 'args', binary_dir, + '--list=target_os', '--short' + ], + shell=IS_WINDOWS_HOST, + stdout=subprocess.PIPE, + stderr=open(os.devnull)) + value = popen.communicate()[0] + if popen.returncode == 0: + match = re.match('target_os = "(.*)"$', value.decode('utf-8')) + if match: + return match.group(1) - # For GYP with Ninja, look for the appearance of “linux-android” in the path - # to ar. This path is configured by gyp_crashpad_android.py. - build_ninja_path = os.path.join(binary_dir, 'build.ninja') - if os.path.exists(build_ninja_path): - with open(build_ninja_path) as build_ninja_file: - build_ninja_content = build_ninja_file.read() - match = re.search('-linux-android(eabi)?-ar$', - build_ninja_content, - re.MULTILINE) - if match: - return 'android' + # For GYP with Ninja, look for the appearance of “linux-android” in the path + # to ar. This path is configured by gyp_crashpad_android.py. + build_ninja_path = os.path.join(binary_dir, 'build.ninja') + if os.path.exists(build_ninja_path): + with open(build_ninja_path) as build_ninja_file: + build_ninja_content = build_ninja_file.read() + match = re.search('-linux-android(eabi)?-ar$', build_ninja_content, + re.MULTILINE) + if match: + return 'android' - return None + return None def _EnableVTProcessingOnWindowsConsole(): - """Enables virtual terminal processing for ANSI/VT100-style escape sequences - on a Windows console attached to standard output. Returns True on success. - Returns False if standard output is not a console or if virtual terminal - processing is not supported. The feature was introduced in Windows 10. - """ + """Enables virtual terminal processing for ANSI/VT100-style escape sequences + on a Windows console attached to standard output. Returns True on success. + Returns False if standard output is not a console or if virtual terminal + processing is not supported. The feature was introduced in Windows 10. + """ - import pywintypes - import win32console - import winerror + import pywintypes + import win32console + import winerror - stdout_console = win32console.GetStdHandle(win32console.STD_OUTPUT_HANDLE) - try: - console_mode = stdout_console.GetConsoleMode() - except pywintypes.error as e: - if e.winerror == winerror.ERROR_INVALID_HANDLE: - # Standard output is not a console. - return False - raise + stdout_console = win32console.GetStdHandle(win32console.STD_OUTPUT_HANDLE) + try: + console_mode = stdout_console.GetConsoleMode() + except pywintypes.error as e: + if e.winerror == winerror.ERROR_INVALID_HANDLE: + # Standard output is not a console. + return False + raise - try: - # From . This would be - # win32console.ENABLE_VIRTUAL_TERMINAL_PROCESSING, but it’s too new to be - # defined there. - ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 + try: + # From . This would be + # win32console.ENABLE_VIRTUAL_TERMINAL_PROCESSING, but it’s too new to + # be defined there. + ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 - stdout_console.SetConsoleMode(console_mode | - ENABLE_VIRTUAL_TERMINAL_PROCESSING) - except pywintypes.error as e: - if e.winerror == winerror.ERROR_INVALID_PARAMETER: - # ANSI/VT100-style escape sequence processing isn’t supported before - # Windows 10. - return False - raise + stdout_console.SetConsoleMode(console_mode | + ENABLE_VIRTUAL_TERMINAL_PROCESSING) + except pywintypes.error as e: + if e.winerror == winerror.ERROR_INVALID_PARAMETER: + # ANSI/VT100-style escape sequence processing isn’t supported before + # Windows 10. + return False + raise - return True + return True def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line): - local_test_path = os.path.join(binary_dir, test) - MAYBE_UNSUPPORTED_TESTS = ( - 'crashpad_client_test', - 'crashpad_handler_test', - 'crashpad_minidump_test', - 'crashpad_snapshot_test', - ) - if not os.path.exists(local_test_path) and test in MAYBE_UNSUPPORTED_TESTS: - print('This test is not present and may not be supported, skipping') - return + local_test_path = os.path.join(binary_dir, test) + MAYBE_UNSUPPORTED_TESTS = ( + 'crashpad_client_test', + 'crashpad_handler_test', + 'crashpad_minidump_test', + 'crashpad_snapshot_test', + ) + if not os.path.exists(local_test_path) and test in MAYBE_UNSUPPORTED_TESTS: + print('This test is not present and may not be supported, skipping') + return - def _adb(*args): - # Flush all of this script’s own buffered stdout output before running adb, - # which will likely produce its own output on stdout. - sys.stdout.flush() + def _adb(*args): + # Flush all of this script’s own buffered stdout output before running + # adb, which will likely produce its own output on stdout. + sys.stdout.flush() - adb_command = ['adb', '-s', android_device] - adb_command.extend(args) - subprocess.check_call(adb_command, shell=IS_WINDOWS_HOST) + adb_command = ['adb', '-s', android_device] + adb_command.extend(args) + subprocess.check_call(adb_command, shell=IS_WINDOWS_HOST) - def _adb_push(sources, destination): - args = list(sources) - args.append(destination) - _adb('push', *args) + def _adb_push(sources, destination): + args = list(sources) + args.append(destination) + _adb('push', *args) - def _adb_shell(command_args, env={}): - # Build a command to execute via “sh -c” instead of invoking it directly. - # Here’s why: - # - # /system/bin/env isn’t normally present prior to Android 6.0 (M), where - # toybox was introduced (Android platform/manifest 9a2c01e8450b). Instead, - # set environment variables by using the shell’s internal “export” command. - # - # adbd prior to Android 7.0 (N), and the adb client prior to SDK - # platform-tools version 24, don’t know how to communicate a shell command’s - # exit status. This was added in Android platform/system/core 606835ae5c4b). - # With older adb servers and clients, adb will “exit 0” indicating success - # even if the command failed on the device. This makes - # subprocess.check_call() semantics difficult to implement directly. As a - # workaround, have the device send the command’s exit status over stdout and - # pick it back up in this function. - # - # Both workarounds are implemented by giving the device a simple script, - # which adbd will run as an “sh -c” argument. - adb_command = ['adb', '-s', android_device, 'shell'] - script_commands = [] - for k, v in env.items(): - script_commands.append('export %s=%s' % (pipes.quote(k), pipes.quote(v))) - script_commands.extend([ - ' '.join(pipes.quote(x) for x in command_args), - 'status=${?}', - 'echo "status=${status}"', - 'exit ${status}']) - adb_command.append('; '.join(script_commands)) - child = subprocess.Popen(adb_command, - shell=IS_WINDOWS_HOST, - stdin=open(os.devnull), - stdout=subprocess.PIPE) + def _adb_shell(command_args, env={}): + # Build a command to execute via “sh -c” instead of invoking it + # directly. Here’s why: + # + # /system/bin/env isn’t normally present prior to Android 6.0 (M), where + # toybox was introduced (Android platform/manifest 9a2c01e8450b). + # Instead, set environment variables by using the shell’s internal + # “export” command. + # + # adbd prior to Android 7.0 (N), and the adb client prior to SDK + # platform-tools version 24, don’t know how to communicate a shell + # command’s exit status. This was added in Android platform/system/core + # 606835ae5c4b). With older adb servers and clients, adb will “exit 0” + # indicating success even if the command failed on the device. This + # makes subprocess.check_call() semantics difficult to implement + # directly. As a workaround, have the device send the command’s exit + # status over stdout and pick it back up in this function. + # + # Both workarounds are implemented by giving the device a simple script, + # which adbd will run as an “sh -c” argument. + adb_command = ['adb', '-s', android_device, 'shell'] + script_commands = [] + for k, v in env.items(): + script_commands.append('export %s=%s' % + (pipes.quote(k), pipes.quote(v))) + script_commands.extend([ + ' '.join(pipes.quote(x) for x in command_args), 'status=${?}', + 'echo "status=${status}"', 'exit ${status}' + ]) + adb_command.append('; '.join(script_commands)) + child = subprocess.Popen(adb_command, + shell=IS_WINDOWS_HOST, + stdin=open(os.devnull), + stdout=subprocess.PIPE) - FINAL_LINE_RE = re.compile('status=(\d+)$') - final_line = None - while True: - # Use readline so that the test output appears “live” when running. - data = child.stdout.readline().decode('utf-8') - if data == '': - break - if final_line is not None: - # It wasn’t really the final line. - print(final_line, end='') + FINAL_LINE_RE = re.compile('status=(\d+)$') final_line = None - if FINAL_LINE_RE.match(data.rstrip()): - final_line = data - else: - print(data, end='') + while True: + # Use readline so that the test output appears “live” when running. + data = child.stdout.readline().decode('utf-8') + if data == '': + break + if final_line is not None: + # It wasn’t really the final line. + print(final_line, end='') + final_line = None + if FINAL_LINE_RE.match(data.rstrip()): + final_line = data + else: + print(data, end='') - if final_line is None: - # Maybe there was some stderr output after the end of stdout. Old versions - # of adb, prior to when the exit status could be communicated, smush the - # two together. - raise subprocess.CalledProcessError(-1, adb_command) - status = int(FINAL_LINE_RE.match(final_line.rstrip()).group(1)) - if status != 0: - raise subprocess.CalledProcessError(status, adb_command) + if final_line is None: + # Maybe there was some stderr output after the end of stdout. Old + # versions of adb, prior to when the exit status could be + # communicated, smush the two together. + raise subprocess.CalledProcessError(-1, adb_command) + status = int(FINAL_LINE_RE.match(final_line.rstrip()).group(1)) + if status != 0: + raise subprocess.CalledProcessError(status, adb_command) - child.wait() - if child.returncode != 0: - raise subprocess.CalledProcessError(subprocess.returncode, adb_command) + child.wait() + if child.returncode != 0: + raise subprocess.CalledProcessError(subprocess.returncode, + adb_command) - # /system/bin/mktemp isn’t normally present prior to Android 6.0 (M), where - # toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it with - # a host-generated name. This won’t retry if the name is in use, but with 122 - # bits of randomness, it should be OK. This uses “mkdir” instead of “mkdir -p” - # because the latter will not indicate failure if the directory already - # exists. - device_temp_dir = '/data/local/tmp/%s.%s' % (test, uuid.uuid4().hex) - _adb_shell(['mkdir', device_temp_dir]) + # /system/bin/mktemp isn’t normally present prior to Android 6.0 (M), where + # toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it + # with a host-generated name. This won’t retry if the name is in use, but + # with 122 bits of randomness, it should be OK. This uses “mkdir” instead of + # “mkdir -p”because the latter will not indicate failure if the directory + # already exists. + device_temp_dir = '/data/local/tmp/%s.%s' % (test, uuid.uuid4().hex) + _adb_shell(['mkdir', device_temp_dir]) - try: - # Specify test dependencies that must be pushed to the device. This could be - # determined automatically in a GN build, following the example used for - # Fuchsia. Since nothing like that exists for GYP, hard-code it for - # supported tests. - test_build_artifacts = [test, 'crashpad_handler'] - test_data = ['test/test_paths_test_data_root.txt'] + try: + # Specify test dependencies that must be pushed to the device. This + # could be determined automatically in a GN build, following the example + # used for Fuchsia. Since nothing like that exists for GYP, hard-code it + # for supported tests. + test_build_artifacts = [test, 'crashpad_handler'] + test_data = ['test/test_paths_test_data_root.txt'] - if test == 'crashpad_test_test': - test_build_artifacts.append( - 'crashpad_test_test_multiprocess_exec_test_child') - elif test == 'crashpad_util_test': - test_data.append('util/net/testdata/') + if test == 'crashpad_test_test': + test_build_artifacts.append( + 'crashpad_test_test_multiprocess_exec_test_child') + elif test == 'crashpad_util_test': + test_data.append('util/net/testdata/') - # Establish the directory structure on the device. - device_out_dir = posixpath.join(device_temp_dir, 'out') - device_mkdirs = [device_out_dir] - for source_path in test_data: - # A trailing slash could reasonably mean to copy an entire directory, but - # will interfere with what’s needed from the path split. All parent - # directories of any source_path need to be be represented in - # device_mkdirs, but it’s important that no source_path itself wind up in - # device_mkdirs, even if source_path names a directory, because that would - # cause the “adb push” of the directory below to behave incorrectly. - if source_path.endswith(posixpath.sep): - source_path = source_path[:-1] + # Establish the directory structure on the device. + device_out_dir = posixpath.join(device_temp_dir, 'out') + device_mkdirs = [device_out_dir] + for source_path in test_data: + # A trailing slash could reasonably mean to copy an entire + # directory, but will interfere with what’s needed from the path + # split. All parent directories of any source_path need to be be + # represented in device_mkdirs, but it’s important that no + # source_path itself wind up in device_mkdirs, even if source_path + # names a directory, because that would cause the “adb push” of the + # directory below to behave incorrectly. + if source_path.endswith(posixpath.sep): + source_path = source_path[:-1] - device_source_path = posixpath.join(device_temp_dir, source_path) - device_mkdir = posixpath.split(device_source_path)[0] - if device_mkdir not in device_mkdirs: - device_mkdirs.append(device_mkdir) - adb_mkdir_command = ['mkdir', '-p'] - adb_mkdir_command.extend(device_mkdirs) - _adb_shell(adb_mkdir_command) + device_source_path = posixpath.join(device_temp_dir, source_path) + device_mkdir = posixpath.split(device_source_path)[0] + if device_mkdir not in device_mkdirs: + device_mkdirs.append(device_mkdir) + adb_mkdir_command = ['mkdir', '-p'] + adb_mkdir_command.extend(device_mkdirs) + _adb_shell(adb_mkdir_command) - # Push the test binary and any other build output to the device. - local_test_build_artifacts = [] - for artifact in test_build_artifacts: - local_test_build_artifacts.append(os.path.join(binary_dir, artifact)) - _adb_push(local_test_build_artifacts, device_out_dir) + # Push the test binary and any other build output to the device. + local_test_build_artifacts = [] + for artifact in test_build_artifacts: + local_test_build_artifacts.append(os.path.join( + binary_dir, artifact)) + _adb_push(local_test_build_artifacts, device_out_dir) - # Push test data to the device. - for source_path in test_data: - _adb_push([os.path.join(CRASHPAD_DIR, source_path)], - posixpath.join(device_temp_dir, source_path)) + # Push test data to the device. + for source_path in test_data: + _adb_push([os.path.join(CRASHPAD_DIR, source_path)], + posixpath.join(device_temp_dir, source_path)) - # Run the test on the device. Pass the test data root in the environment. - # - # Because the test will not run with its standard output attached to a - # pseudo-terminal device, gtest will not normally enable colored output, so - # mimic gtest’s own logic for deciding whether to enable color by checking - # this script’s own standard output connection. The whitelist of TERM values - # comes from gtest googletest/src/gtest.cc - # testing::internal::ShouldUseColor(). - env = {'CRASHPAD_TEST_DATA_ROOT': device_temp_dir} - gtest_color = os.environ.get('GTEST_COLOR') - if gtest_color in ('auto', None): - if (sys.stdout.isatty() and - (os.environ.get('TERM') in - ('xterm', 'xterm-color', 'xterm-256color', 'screen', - 'screen-256color', 'tmux', 'tmux-256color', 'rxvt-unicode', - 'rxvt-unicode-256color', 'linux', 'cygwin') or - (IS_WINDOWS_HOST and _EnableVTProcessingOnWindowsConsole()))): - gtest_color = 'yes' - else: - gtest_color = 'no' - env['GTEST_COLOR'] = gtest_color - _adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line, env) - finally: - _adb_shell(['rm', '-rf', device_temp_dir]) + # Run the test on the device. Pass the test data root in the + # environment. + # + # Because the test will not run with its standard output attached to a + # pseudo-terminal device, gtest will not normally enable colored output, + # so mimic gtest’s own logic for deciding whether to enable color by + # checking this script’s own standard output connection. The whitelist + # of TERM values comes from gtest googletest/src/gtest.cc + # testing::internal::ShouldUseColor(). + env = {'CRASHPAD_TEST_DATA_ROOT': device_temp_dir} + gtest_color = os.environ.get('GTEST_COLOR') + if gtest_color in ('auto', None): + if (sys.stdout.isatty() and + (os.environ.get('TERM') + in ('xterm', 'xterm-color', 'xterm-256color', 'screen', + 'screen-256color', 'tmux', 'tmux-256color', 'rxvt-unicode', + 'rxvt-unicode-256color', 'linux', 'cygwin') or + (IS_WINDOWS_HOST and _EnableVTProcessingOnWindowsConsole()))): + gtest_color = 'yes' + else: + gtest_color = 'no' + env['GTEST_COLOR'] = gtest_color + _adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line, + env) + finally: + _adb_shell(['rm', '-rf', device_temp_dir]) def _GetFuchsiaSDKRoot(): - arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64' - return os.path.join(CRASHPAD_DIR, 'third_party', 'fuchsia', 'sdk', arch) + arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64' + return os.path.join(CRASHPAD_DIR, 'third_party', 'fuchsia', 'sdk', arch) def _GenerateFuchsiaRuntimeDepsFiles(binary_dir, tests): - """Ensures a /.runtime_deps file exists for each test.""" - targets_file = os.path.join(binary_dir, 'targets.txt') - with open(targets_file, 'wb') as f: - f.write('//:' + '\n//:'.join(tests) + '\n') - gn_path = _FindGNFromBinaryDir(binary_dir) - subprocess.check_call( - [gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir, - '--runtime-deps-list-file=' + targets_file]) + """Ensures a /.runtime_deps file exists for each test.""" + targets_file = os.path.join(binary_dir, 'targets.txt') + with open(targets_file, 'wb') as f: + f.write('//:' + '\n//:'.join(tests) + '\n') + gn_path = _FindGNFromBinaryDir(binary_dir) + subprocess.check_call([ + gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir, + '--runtime-deps-list-file=' + targets_file + ]) - # Run again so that --runtime-deps-list-file isn't in the regen rule. See - # https://crbug.com/814816. - subprocess.check_call( - [gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir]) + # Run again so that --runtime-deps-list-file isn't in the regen rule. See + # https://crbug.com/814816. + subprocess.check_call( + [gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir]) def _HandleOutputFromFuchsiaLogListener(process, done_message): - """Pass through the output from |process| (which should be an instance of - Fuchsia's loglistener) until a special termination |done_message| is - encountered. + """Pass through the output from |process| (which should be an instance of + Fuchsia's loglistener) until a special termination |done_message| is + encountered. - Also attempts to determine if any tests failed by inspecting the log output, - and returns False if there were failures. - """ - success = True - while True: - line = process.stdout.readline().rstrip() - if 'FAILED TEST' in line: - success = False - elif done_message in line and 'echo ' not in line: - break - print(line) - return success + Also attempts to determine if any tests failed by inspecting the log output, + and returns False if there were failures. + """ + success = True + while True: + line = process.stdout.readline().rstrip() + if 'FAILED TEST' in line: + success = False + elif done_message in line and 'echo ' not in line: + break + print(line) + return success def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line): - """Runs the given Fuchsia |test| executable on the given |device_name|. The - device must already be booted. + """Runs the given Fuchsia |test| executable on the given |device_name|. The + device must already be booted. - Copies the executable and its runtime dependencies as specified by GN to the - target in /tmp using `netcp`, runs the binary on the target, and logs output - back to stdout on this machine via `loglistener`. - """ - sdk_root = _GetFuchsiaSDKRoot() - - # Run loglistener and filter the output to know when the test is done. - loglistener_process = subprocess.Popen( - [os.path.join(sdk_root, 'tools', 'loglistener'), device_name], - stdout=subprocess.PIPE, stdin=open(os.devnull), stderr=open(os.devnull)) - - runtime_deps_file = os.path.join(binary_dir, test + '.runtime_deps') - with open(runtime_deps_file, 'rb') as f: - runtime_deps = f.read().splitlines() - - def netruncmd(*args): - """Runs a list of commands on the target device. Each command is escaped - by using pipes.quote(), and then each command is chained by shell ';'. + Copies the executable and its runtime dependencies as specified by GN to the + target in /tmp using `netcp`, runs the binary on the target, and logs output + back to stdout on this machine via `loglistener`. """ - netruncmd_path = os.path.join(sdk_root, 'tools', 'netruncmd') - final_args = ' ; '.join(' '.join(pipes.quote(x) for x in command) - for command in args) - subprocess.check_call([netruncmd_path, device_name, final_args]) + sdk_root = _GetFuchsiaSDKRoot() - try: - unique_id = uuid.uuid4().hex - test_root = '/tmp/%s_%s' % (test, unique_id) - tmp_root = test_root + '/tmp' - staging_root = test_root + '/pkg' + # Run loglistener and filter the output to know when the test is done. + loglistener_process = subprocess.Popen( + [os.path.join(sdk_root, 'tools', 'loglistener'), device_name], + stdout=subprocess.PIPE, + stdin=open(os.devnull), + stderr=open(os.devnull)) - # Make a staging directory tree on the target. - directories_to_create = [tmp_root, - '%s/bin' % staging_root, - '%s/assets' % staging_root] - netruncmd(['mkdir', '-p'] + directories_to_create) + runtime_deps_file = os.path.join(binary_dir, test + '.runtime_deps') + with open(runtime_deps_file, 'rb') as f: + runtime_deps = f.read().splitlines() - def netcp(local_path): - """Uses `netcp` to copy a file or directory to the device. Files located - inside the build dir are stored to /pkg/bin, otherwise to /pkg/assets. - .so files are stored somewhere completely different, into /boot/lib (!). - This is because the loader service does not yet correctly handle the - namespace in which the caller is being run, and so can only load .so files - from a couple hardcoded locations, the only writable one of which is - /boot/lib, so we copy all .so files there. This bug is filed upstream as - ZX-1619. - """ - in_binary_dir = local_path.startswith(binary_dir + '/') - if in_binary_dir: - if local_path.endswith('.so'): - target_path = os.path.join( - '/boot/lib', local_path[len(binary_dir)+1:]) - else: - target_path = os.path.join( - staging_root, 'bin', local_path[len(binary_dir)+1:]) - else: - relative_path = os.path.relpath(local_path, CRASHPAD_DIR) - target_path = os.path.join(staging_root, 'assets', relative_path) - netcp_path = os.path.join(sdk_root, 'tools', 'netcp') - subprocess.check_call([netcp_path, local_path, - device_name + ':' + target_path], - stderr=open(os.devnull)) + def netruncmd(*args): + """Runs a list of commands on the target device. Each command is escaped + by using pipes.quote(), and then each command is chained by shell ';'. + """ + netruncmd_path = os.path.join(sdk_root, 'tools', 'netruncmd') + final_args = ' ; '.join( + ' '.join(pipes.quote(x) for x in command) for command in args) + subprocess.check_call([netruncmd_path, device_name, final_args]) - # Copy runtime deps into the staging tree. - for dep in runtime_deps: - local_path = os.path.normpath(os.path.join(binary_dir, dep)) - if os.path.isdir(local_path): - for root, dirs, files in os.walk(local_path): - for f in files: - netcp(os.path.join(root, f)) - else: - netcp(local_path) + try: + unique_id = uuid.uuid4().hex + test_root = '/tmp/%s_%s' % (test, unique_id) + tmp_root = test_root + '/tmp' + staging_root = test_root + '/pkg' - done_message = 'TERMINATED: ' + unique_id - namespace_command = [ - 'namespace', '/pkg=' + staging_root, '/tmp=' + tmp_root, '/svc=/svc', - '--replace-child-argv0=/pkg/bin/' + test, '--', - staging_root + '/bin/' + test] + extra_command_line - netruncmd(namespace_command, ['echo', done_message]) + # Make a staging directory tree on the target. + directories_to_create = [ + tmp_root, + '%s/bin' % staging_root, + '%s/assets' % staging_root + ] + netruncmd(['mkdir', '-p'] + directories_to_create) - success = _HandleOutputFromFuchsiaLogListener( - loglistener_process, done_message) - if not success: - raise subprocess.CalledProcessError(1, test) - finally: - netruncmd(['rm', '-rf', test_root]) + def netcp(local_path): + """Uses `netcp` to copy a file or directory to the device. Files + located inside the build dir are stored to /pkg/bin, otherwise to + /pkg/assets. .so files are stored somewhere completely different, + into /boot/lib (!). This is because the loader service does not yet + correctly handle the namespace in which the caller is being run, and + so can only load .so files from a couple hardcoded locations, the + only writable one of which is /boot/lib, so we copy all .so files + there. This bug is filed upstream as ZX-1619. + """ + in_binary_dir = local_path.startswith(binary_dir + '/') + if in_binary_dir: + if local_path.endswith('.so'): + target_path = os.path.join('/boot/lib', + local_path[len(binary_dir) + 1:]) + else: + target_path = os.path.join(staging_root, 'bin', + local_path[len(binary_dir) + 1:]) + else: + relative_path = os.path.relpath(local_path, CRASHPAD_DIR) + target_path = os.path.join(staging_root, 'assets', + relative_path) + netcp_path = os.path.join(sdk_root, 'tools', 'netcp') + subprocess.check_call( + [netcp_path, local_path, device_name + ':' + target_path], + stderr=open(os.devnull)) + + # Copy runtime deps into the staging tree. + for dep in runtime_deps: + local_path = os.path.normpath(os.path.join(binary_dir, dep)) + if os.path.isdir(local_path): + for root, dirs, files in os.walk(local_path): + for f in files: + netcp(os.path.join(root, f)) + else: + netcp(local_path) + + done_message = 'TERMINATED: ' + unique_id + namespace_command = [ + 'namespace', '/pkg=' + staging_root, '/tmp=' + tmp_root, + '/svc=/svc', '--replace-child-argv0=/pkg/bin/' + test, '--', + staging_root + '/bin/' + test + ] + extra_command_line + netruncmd(namespace_command, ['echo', done_message]) + + success = _HandleOutputFromFuchsiaLogListener(loglistener_process, + done_message) + if not success: + raise subprocess.CalledProcessError(1, test) + finally: + netruncmd(['rm', '-rf', test_root]) def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False): - """Runs the given iOS |test| app on iPhone 8 with the default OS version.""" + """Runs the given iOS |test| app on iPhone 8 with the default OS version.""" - def xctest(binary_dir, test): - """Returns a dict containing the xctestrun data needed to run an - XCTest-based test app.""" - test_path = os.path.join(CRASHPAD_DIR, binary_dir) - module_data = { - 'TestBundlePath': os.path.join(test_path, test + '_module.xctest'), - 'TestHostPath': os.path.join(test_path, test + '.app'), - 'TestingEnvironmentVariables': { - 'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:', - 'DYLD_INSERT_LIBRARIES': ( - '__PLATFORMS__/iPhoneSimulator.platform/Developer/' - 'usr/lib/libXCTestBundleInject.dylib'), - 'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator', - 'IDEiPhoneInternalTestBundleName': test + '.app', - 'XCInjectBundleInto': '__TESTHOST__/' + test, - } - } - return { test: module_data } + def xctest(binary_dir, test): + """Returns a dict containing the xctestrun data needed to run an + XCTest-based test app.""" + test_path = os.path.join(CRASHPAD_DIR, binary_dir) + module_data = { + 'TestBundlePath': os.path.join(test_path, test + '_module.xctest'), + 'TestHostPath': os.path.join(test_path, test + '.app'), + 'TestingEnvironmentVariables': { + 'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:', + 'DYLD_INSERT_LIBRARIES': + ('__PLATFORMS__/iPhoneSimulator.platform/Developer/' + 'usr/lib/libXCTestBundleInject.dylib'), + 'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator', + 'IDEiPhoneInternalTestBundleName': test + '.app', + 'XCInjectBundleInto': '__TESTHOST__/' + test, + } + } + return {test: module_data} - def xcuitest(binary_dir, test): - """Returns a dict containing the xctestrun data needed to run an - XCUITest-based test app.""" + def xcuitest(binary_dir, test): + """Returns a dict containing the xctestrun data needed to run an + XCUITest-based test app.""" - test_path = os.path.join(CRASHPAD_DIR, binary_dir) - runner_path = os.path.join(test_path, test + '_module-Runner.app') - bundle_path = os.path.join(runner_path, 'PlugIns', test + '_module.xctest') - target_app_path = os.path.join(test_path, test + '.app') - module_data = { - 'IsUITestBundle': True, - 'IsXCTRunnerHostedTestBundle': True, - 'TestBundlePath': bundle_path, - 'TestHostPath': runner_path, - 'UITargetAppPath': target_app_path, - 'DependentProductPaths': [ bundle_path, runner_path, target_app_path ], - 'TestingEnvironmentVariables': { - 'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:', - 'DYLD_INSERT_LIBRARIES': ( - '__PLATFORMS__/iPhoneSimulator.platform/Developer/' - 'usr/lib/libXCTestBundleInject.dylib'), - 'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator', - 'XCInjectBundleInto': '__TESTHOST__/' + test + '_module-Runner', - }, - } - return { test: module_data } + test_path = os.path.join(CRASHPAD_DIR, binary_dir) + runner_path = os.path.join(test_path, test + '_module-Runner.app') + bundle_path = os.path.join(runner_path, 'PlugIns', + test + '_module.xctest') + target_app_path = os.path.join(test_path, test + '.app') + module_data = { + 'IsUITestBundle': True, + 'IsXCTRunnerHostedTestBundle': True, + 'TestBundlePath': bundle_path, + 'TestHostPath': runner_path, + 'UITargetAppPath': target_app_path, + 'DependentProductPaths': [ + bundle_path, runner_path, target_app_path + ], + 'TestingEnvironmentVariables': { + 'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:', + 'DYLD_INSERT_LIBRARIES': + ('__PLATFORMS__/iPhoneSimulator.platform/Developer/' + 'usr/lib/libXCTestBundleInject.dylib'), + 'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator', + 'XCInjectBundleInto': '__TESTHOST__/' + test + '_module-Runner', + }, + } + return {test: module_data} - with tempfile.NamedTemporaryFile() as f: - import plistlib + with tempfile.NamedTemporaryFile() as f: + import plistlib - xctestrun_path = f.name - print(xctestrun_path) - if is_xcuitest: - plistlib.writePlist(xcuitest(binary_dir, test), xctestrun_path) - else: - plistlib.writePlist(xctest(binary_dir, test), xctestrun_path) + xctestrun_path = f.name + print(xctestrun_path) + if is_xcuitest: + plistlib.writePlist(xcuitest(binary_dir, test), xctestrun_path) + else: + plistlib.writePlist(xctest(binary_dir, test), xctestrun_path) + + subprocess.check_call([ + 'xcodebuild', 'test-without-building', '-xctestrun', xctestrun_path, + '-destination', 'platform=iOS Simulator,name=iPhone 8' + ]) - subprocess.check_call(['xcodebuild', 'test-without-building', - '-xctestrun', xctestrun_path, '-destination', - 'platform=iOS Simulator,name=iPhone 8']) # This script is primarily used from the waterfall so that the list of tests # that are run is maintained in-tree, rather than in a separate infrastructure # location in the recipe. def main(args): - parser = argparse.ArgumentParser(description='Run Crashpad unittests.') - parser.add_argument('binary_dir', help='Root of build dir') - parser.add_argument('test', nargs='*', help='Specific test(s) to run.') - parser.add_argument('--gtest_filter', - help='GTest filter applied to GTest binary runs.') - args = parser.parse_args() + parser = argparse.ArgumentParser(description='Run Crashpad unittests.') + parser.add_argument('binary_dir', help='Root of build dir') + parser.add_argument('test', nargs='*', help='Specific test(s) to run.') + parser.add_argument('--gtest_filter', + help='GTest filter applied to GTest binary runs.') + args = parser.parse_args() - # Tell 64-bit Windows tests where to find 32-bit test executables, for - # cross-bitted testing. This relies on the fact that the GYP build by default - # uses {Debug,Release} for the 32-bit build and {Debug,Release}_x64 for the - # 64-bit build. This is not a universally valid assumption, and if it’s not - # met, 64-bit tests that require 32-bit build output will disable themselves - # dynamically. - if (sys.platform == 'win32' and args.binary_dir.endswith('_x64') and - 'CRASHPAD_TEST_32_BIT_OUTPUT' not in os.environ): - binary_dir_32 = args.binary_dir[:-4] - if os.path.isdir(binary_dir_32): - os.environ['CRASHPAD_TEST_32_BIT_OUTPUT'] = binary_dir_32 + # Tell 64-bit Windows tests where to find 32-bit test executables, for + # cross-bitted testing. This relies on the fact that the GYP build by + # default uses {Debug,Release} for the 32-bit build and {Debug,Release}_x64 + # for the 64-bit build. This is not a universally valid assumption, and if + # it’s not met, 64-bit tests that require 32-bit build output will disable + # themselves dynamically. + if (sys.platform == 'win32' and args.binary_dir.endswith('_x64') and + 'CRASHPAD_TEST_32_BIT_OUTPUT' not in os.environ): + binary_dir_32 = args.binary_dir[:-4] + if os.path.isdir(binary_dir_32): + os.environ['CRASHPAD_TEST_32_BIT_OUTPUT'] = binary_dir_32 - target_os = _BinaryDirTargetOS(args.binary_dir) - is_android = target_os == 'android' - is_fuchsia = target_os == 'fuchsia' - is_ios = target_os == 'ios' + target_os = _BinaryDirTargetOS(args.binary_dir) + is_android = target_os == 'android' + is_fuchsia = target_os == 'fuchsia' + is_ios = target_os == 'ios' - tests = [ - 'crashpad_client_test', - 'crashpad_handler_test', - 'crashpad_minidump_test', - 'crashpad_snapshot_test', - 'crashpad_test_test', - 'crashpad_util_test', - ] + tests = [ + 'crashpad_client_test', + 'crashpad_handler_test', + 'crashpad_minidump_test', + 'crashpad_snapshot_test', + 'crashpad_test_test', + 'crashpad_util_test', + ] - if is_android: - android_device = os.environ.get('ANDROID_DEVICE') - if not android_device: - adb_devices = subprocess.check_output(['adb', 'devices'], - shell=IS_WINDOWS_HOST) - devices = [] - for line in adb_devices.splitlines(): - line = line.decode('utf-8') - if (line == 'List of devices attached' or - re.match('^\* daemon .+ \*$', line) or - line == ''): - continue - (device, ignore) = line.split('\t') - devices.append(device) - if len(devices) != 1: - print("Please set ANDROID_DEVICE to your device's id", file=sys.stderr) - return 2 - android_device = devices[0] - print('Using autodetected Android device:', android_device) - elif is_fuchsia: - zircon_nodename = os.environ.get('ZIRCON_NODENAME') - if not zircon_nodename: - netls = os.path.join(_GetFuchsiaSDKRoot(), 'tools', 'netls') - popen = subprocess.Popen([netls, '--nowait'], stdout=subprocess.PIPE) - devices = popen.communicate()[0].splitlines() - if popen.returncode != 0 or len(devices) != 1: - print("Please set ZIRCON_NODENAME to your device's hostname", - file=sys.stderr) - return 2 - zircon_nodename = devices[0].strip().split()[1] - print('Using autodetected Fuchsia device:', zircon_nodename) - _GenerateFuchsiaRuntimeDepsFiles( - args.binary_dir, [t for t in tests if not t.endswith('.py')]) - elif is_ios: - tests.append('ios_crash_xcuitests') - elif IS_WINDOWS_HOST: - tests.append('snapshot/win/end_to_end_test.py') + if is_android: + android_device = os.environ.get('ANDROID_DEVICE') + if not android_device: + adb_devices = subprocess.check_output(['adb', 'devices'], + shell=IS_WINDOWS_HOST) + devices = [] + for line in adb_devices.splitlines(): + line = line.decode('utf-8') + if (line == 'List of devices attached' or + re.match('^\* daemon .+ \*$', line) or line == ''): + continue + (device, ignore) = line.split('\t') + devices.append(device) + if len(devices) != 1: + print("Please set ANDROID_DEVICE to your device's id", + file=sys.stderr) + return 2 + android_device = devices[0] + print('Using autodetected Android device:', android_device) + elif is_fuchsia: + zircon_nodename = os.environ.get('ZIRCON_NODENAME') + if not zircon_nodename: + netls = os.path.join(_GetFuchsiaSDKRoot(), 'tools', 'netls') + popen = subprocess.Popen([netls, '--nowait'], + stdout=subprocess.PIPE) + devices = popen.communicate()[0].splitlines() + if popen.returncode != 0 or len(devices) != 1: + print("Please set ZIRCON_NODENAME to your device's hostname", + file=sys.stderr) + return 2 + zircon_nodename = devices[0].strip().split()[1] + print('Using autodetected Fuchsia device:', zircon_nodename) + _GenerateFuchsiaRuntimeDepsFiles( + args.binary_dir, [t for t in tests if not t.endswith('.py')]) + elif is_ios: + tests.append('ios_crash_xcuitests') + elif IS_WINDOWS_HOST: + tests.append('snapshot/win/end_to_end_test.py') - if args.test: - for t in args.test: - if t not in tests: - print('Unrecognized test:', t, file=sys.stderr) - return 3 - tests = args.test + if args.test: + for t in args.test: + if t not in tests: + print('Unrecognized test:', t, file=sys.stderr) + return 3 + tests = args.test - for test in tests: - print('-' * 80) - print(test) - print('-' * 80) - if test.endswith('.py'): - subprocess.check_call( - [sys.executable, os.path.join(CRASHPAD_DIR, test), args.binary_dir]) - else: - extra_command_line = [] - if args.gtest_filter: - extra_command_line.append('--gtest_filter=' + args.gtest_filter) - if is_android: - _RunOnAndroidTarget(args.binary_dir, test, android_device, - extra_command_line) - elif is_fuchsia: - _RunOnFuchsiaTarget(args.binary_dir, test, zircon_nodename, - extra_command_line) - elif is_ios: - _RunOnIOSTarget(args.binary_dir, test, - is_xcuitest=test.startswith('ios')) - else: - subprocess.check_call([os.path.join(args.binary_dir, test)] + - extra_command_line) + for test in tests: + print('-' * 80) + print(test) + print('-' * 80) + if test.endswith('.py'): + subprocess.check_call([ + sys.executable, + os.path.join(CRASHPAD_DIR, test), args.binary_dir + ]) + else: + extra_command_line = [] + if args.gtest_filter: + extra_command_line.append('--gtest_filter=' + args.gtest_filter) + if is_android: + _RunOnAndroidTarget(args.binary_dir, test, android_device, + extra_command_line) + elif is_fuchsia: + _RunOnFuchsiaTarget(args.binary_dir, test, zircon_nodename, + extra_command_line) + elif is_ios: + _RunOnIOSTarget(args.binary_dir, + test, + is_xcuitest=test.startswith('ios')) + else: + subprocess.check_call([os.path.join(args.binary_dir, test)] + + extra_command_line) - return 0 + return 0 if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/doc/support/generate_doxygen.py b/doc/support/generate_doxygen.py index a93028df..577b51e5 100755 --- a/doc/support/generate_doxygen.py +++ b/doc/support/generate_doxygen.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# coding: utf-8 # Copyright 2017 The Crashpad Authors. All rights reserved. # @@ -24,26 +23,26 @@ import sys def main(args): - script_dir = os.path.dirname(__file__) - crashpad_dir = os.path.join(script_dir, os.pardir, os.pardir) + script_dir = os.path.dirname(__file__) + crashpad_dir = os.path.join(script_dir, os.pardir, os.pardir) - # Run from the Crashpad project root directory. - os.chdir(crashpad_dir) + # Run from the Crashpad project root directory. + os.chdir(crashpad_dir) - output_dir = os.path.join('out', 'doc', 'doxygen') + output_dir = os.path.join('out', 'doc', 'doxygen') - if os.path.isdir(output_dir) and not os.path.islink(output_dir): - shutil.rmtree(output_dir) - elif os.path.exists(output_dir): - os.unlink(output_dir) + if os.path.isdir(output_dir) and not os.path.islink(output_dir): + shutil.rmtree(output_dir) + elif os.path.exists(output_dir): + os.unlink(output_dir) - os.makedirs(output_dir, 0o755) + os.makedirs(output_dir, 0o755) - doxy_file = os.path.join('doc', 'support', 'crashpad.doxy') - subprocess.check_call(['doxygen', doxy_file]) + doxy_file = os.path.join('doc', 'support', 'crashpad.doxy') + subprocess.check_call(['doxygen', doxy_file]) - return 0 + return 0 if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/infra/config/PRESUBMIT.py b/infra/config/PRESUBMIT.py index c2317e1b..ca044332 100644 --- a/infra/config/PRESUBMIT.py +++ b/infra/config/PRESUBMIT.py @@ -12,8 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. + def CheckChangeOnUpload(input_api, output_api): - return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api) + return input_api.canned_checks.CheckChangedLUCIConfigs( + input_api, output_api) + def CheckChangeOnCommit(input_api, output_api): - return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api) + return input_api.canned_checks.CheckChangedLUCIConfigs( + input_api, output_api) diff --git a/snapshot/win/end_to_end_test.py b/snapshot/win/end_to_end_test.py index 8823785f..b1919ade 100755 --- a/snapshot/win/end_to_end_test.py +++ b/snapshot/win/end_to_end_test.py @@ -29,462 +29,452 @@ import win32con import win32pipe import winerror - g_temp_dirs = [] g_had_failures = False def MakeTempDir(): - global g_temp_dirs - new_dir = tempfile.mkdtemp() - g_temp_dirs.append(new_dir) - return new_dir + global g_temp_dirs + new_dir = tempfile.mkdtemp() + g_temp_dirs.append(new_dir) + return new_dir def CleanUpTempDirs(): - global g_temp_dirs - for d in g_temp_dirs: - subprocess.call(['rmdir', '/s', '/q', d], shell=True) + global g_temp_dirs + for d in g_temp_dirs: + subprocess.call(['rmdir', '/s', '/q', d], shell=True) def FindInstalledWindowsApplication(app_path): - search_paths = [os.getenv('PROGRAMFILES(X86)'), - os.getenv('PROGRAMFILES'), - os.getenv('PROGRAMW6432'), - os.getenv('LOCALAPPDATA')] - search_paths += os.getenv('PATH', '').split(os.pathsep) + search_paths = [ + os.getenv('PROGRAMFILES(X86)'), + os.getenv('PROGRAMFILES'), + os.getenv('PROGRAMW6432'), + os.getenv('LOCALAPPDATA') + ] + search_paths += os.getenv('PATH', '').split(os.pathsep) - for search_path in search_paths: - if not search_path: - continue - path = os.path.join(search_path, app_path) - if os.path.isfile(path): - return path + for search_path in search_paths: + if not search_path: + continue + path = os.path.join(search_path, app_path) + if os.path.isfile(path): + return path - return None - - -def GetCdbPath(): - """Search in some reasonable places to find cdb.exe. Searches x64 before x86 - and newer versions before older versions. - """ - possible_paths = ( - os.path.join('Windows Kits', '10', 'Debuggers', 'x64'), - os.path.join('Windows Kits', '10', 'Debuggers', 'x86'), - os.path.join('Windows Kits', '8.1', 'Debuggers', 'x64'), - os.path.join('Windows Kits', '8.1', 'Debuggers', 'x86'), - os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'), - os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'), - 'Debugging Tools For Windows (x64)', - 'Debugging Tools For Windows (x86)', - 'Debugging Tools For Windows',) - for possible_path in possible_paths: - app_path = os.path.join(possible_path, 'cdb.exe') - app_path = FindInstalledWindowsApplication(app_path) - if app_path: - return app_path - return None - - -def NamedPipeExistsAndReady(pipe_name): - """Returns False if pipe_name does not exist. If pipe_name does exist, blocks - until the pipe is ready to service clients, and then returns True. - - This is used as a drop-in replacement for os.path.exists() and os.access() to - test for the pipe's existence. Both of those calls tickle the pipe in a way - that appears to the server to be a client connecting, triggering error - messages when no data is received. - - Although this function only needs to test pipe existence (waiting for - CreateNamedPipe()), it actually winds up testing pipe readiness - (waiting for ConnectNamedPipe()). This is unnecessary but harmless. - """ - try: - win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER) - except pywintypes.error as e: - if e[0] == winerror.ERROR_FILE_NOT_FOUND: - return False - raise - return True - - -def GetDumpFromProgram( - out_dir, pipe_name, executable_name, expect_exit_code, *args): - """Initialize a crash database, and run |executable_name| connecting to a - crash handler. If pipe_name is set, crashpad_handler will be started first. If - pipe_name is empty, the executable is responsible for starting - crashpad_handler. *args will be passed after other arguments to - executable_name. If the child process does not exit with |expect_exit_code|, - an exception will be raised. Returns the path to the minidump generated by - crashpad_handler for further testing. - """ - test_database = MakeTempDir() - handler = None - - try: - subprocess.check_call( - [os.path.join(out_dir, 'crashpad_database_util.exe'), '--create', - '--database=' + test_database]) - - if pipe_name is not None: - handler = subprocess.Popen([ - os.path.join(out_dir, 'crashpad_handler.com'), - '--pipe-name=' + pipe_name, - '--database=' + test_database - ]) - - # Wait until the server is ready. - printed = False - while not NamedPipeExistsAndReady(pipe_name): - if not printed: - print('Waiting for crashpad_handler to be ready...') - printed = True - time.sleep(0.001) - - command = [os.path.join(out_dir, executable_name), pipe_name] + list(args) - else: - command = ([os.path.join(out_dir, executable_name), - os.path.join(out_dir, 'crashpad_handler.com'), - test_database] + - list(args)) - print('Running %s' % os.path.basename(command[0])) - exit_code = subprocess.call(command) - if exit_code != expect_exit_code: - raise subprocess.CalledProcessError(exit_code, executable_name) - - out = subprocess.check_output([ - os.path.join(out_dir, 'crashpad_database_util.exe'), - '--database=' + test_database, - '--show-pending-reports', - '--show-all-report-info', - ]) - for line in out.splitlines(): - if line.strip().startswith('Path:'): - return line.partition(':')[2].strip() - finally: - if handler: - handler.kill() - - -def GetDumpFromCrashyProgram(out_dir, pipe_name): - return GetDumpFromProgram(out_dir, - pipe_name, - 'crashy_program.exe', - win32con.EXCEPTION_ACCESS_VIOLATION) - - -def GetDumpFromOtherProgram(out_dir, pipe_name, *args): - return GetDumpFromProgram( - out_dir, pipe_name, 'crash_other_program.exe', 0, *args) - - -def GetDumpFromSignal(out_dir, pipe_name, *args): - STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con. - return GetDumpFromProgram(out_dir, - pipe_name, - 'crashy_signal.exe', - STATUS_FATAL_APP_EXIT, - *args) - - -def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name): - return GetDumpFromProgram(out_dir, - pipe_name, - 'self_destroying_program.exe', - win32con.EXCEPTION_BREAKPOINT) - - -def GetDumpFromZ7Program(out_dir, pipe_name): - return GetDumpFromProgram(out_dir, - pipe_name, - 'crashy_z7_loader.exe', - win32con.EXCEPTION_ACCESS_VIOLATION) - - -class CdbRun(object): - """Run cdb.exe passing it a cdb command and capturing the output. - `Check()` searches for regex patterns in sequence allowing verification of - expected output. - """ - - def __init__(self, cdb_path, dump_path, command): - # Run a command line that loads the dump, runs the specified cdb command, - # and then quits, and capturing stdout. - self.out = subprocess.check_output([ - cdb_path, - '-z', dump_path, - '-c', command + ';q' - ]) - - def Check(self, pattern, message, re_flags=0): - match_obj = re.search(pattern, self.out, re_flags) - if match_obj: - # Matched. Consume up to end of match. - self.out = self.out[match_obj.end(0):] - print('ok - %s' % message) - sys.stdout.flush() - else: - print('-' * 80, file=sys.stderr) - print('FAILED - %s' % message, file=sys.stderr) - print('-' * 80, file=sys.stderr) - print('did not match:\n %s' % pattern, file=sys.stderr) - print('-' * 80, file=sys.stderr) - print('remaining output was:\n %s' % self.out, file=sys.stderr) - print('-' * 80, file=sys.stderr) - sys.stderr.flush() - global g_had_failures - g_had_failures = True - - def Find(self, pattern, re_flags=0): - match_obj = re.search(pattern, self.out, re_flags) - if match_obj: - # Matched. Consume up to end of match. - self.out = self.out[match_obj.end(0):] - return match_obj return None -def RunTests(cdb_path, - dump_path, - start_handler_dump_path, - destroyed_dump_path, - z7_dump_path, - other_program_path, - other_program_no_exception_path, - sigabrt_main_path, - sigabrt_background_path, - pipe_name): - """Runs various tests in sequence. Runs a new cdb instance on the dump for - each block of tests to reduce the chances that output from one command is - confused for output from another. - """ - out = CdbRun(cdb_path, dump_path, '.ecxr') - out.Check('This dump file has an exception of interest stored in it', - 'captured exception') +def GetCdbPath(): + """Search in some reasonable places to find cdb.exe. Searches x64 before x86 + and newer versions before older versions. + """ + possible_paths = ( + os.path.join('Windows Kits', '10', 'Debuggers', 'x64'), + os.path.join('Windows Kits', '10', 'Debuggers', 'x86'), + os.path.join('Windows Kits', '8.1', 'Debuggers', 'x64'), + os.path.join('Windows Kits', '8.1', 'Debuggers', 'x86'), + os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'), + os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'), + 'Debugging Tools For Windows (x64)', + 'Debugging Tools For Windows (x86)', + 'Debugging Tools For Windows', + ) + for possible_path in possible_paths: + app_path = os.path.join(possible_path, 'cdb.exe') + app_path = FindInstalledWindowsApplication(app_path) + if app_path: + return app_path + return None - # When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as - # "`anonymous namespace'" and instead gives the decorated form. - out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::' - 'SomeCrashyFunction', - 'exception at correct location') - out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr') - out.Check('This dump file has an exception of interest stored in it', - 'captured exception (using StartHandler())') - out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::' - 'SomeCrashyFunction', - 'exception at correct location (using StartHandler())') +def NamedPipeExistsAndReady(pipe_name): + """Returns False if pipe_name does not exist. If pipe_name does exist, + blocks until the pipe is ready to service clients, and then returns True. - out = CdbRun(cdb_path, dump_path, '!peb') - out.Check(r'PEB at', 'found the PEB') - out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved') - out.Check(r'Base TimeStamp Module', 'module list present') - pipe_name_escaped = pipe_name.replace('\\', '\\\\') - out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped, - 'some PEB data is correct') - out.Check(r'SystemRoot=C:\\Windows', 'some of environment captured', - re.IGNORECASE) + This is used as a drop-in replacement for os.path.exists() and os.access() + to test for the pipe's existence. Both of those calls tickle the pipe in a + way that appears to the server to be a client connecting, triggering error + messages when no data is received. - out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters') - out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"', - 'PEB->ProcessParameters.ImagePathName string captured') - out.Check(' DesktopInfo *: ' - '_UNICODE_STRING "(?!--- memory read error at address ).*"', - 'PEB->ProcessParameters.DesktopInfo string captured') + Although this function only needs to test pipe existence (waiting for + CreateNamedPipe()), it actually winds up testing pipe readiness (waiting for + ConnectNamedPipe()). This is unnecessary but harmless. + """ + try: + win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER) + except pywintypes.error as e: + if e[0] == winerror.ERROR_FILE_NOT_FOUND: + return False + raise + return True - out = CdbRun(cdb_path, dump_path, '!teb') - out.Check(r'TEB at', 'found the TEB') - out.Check(r'ExceptionList:\s+[0-9a-fA-F]+', 'some valid teb data') - out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue') - out = CdbRun(cdb_path, dump_path, '!gle') - out.Check('LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the ' - 'file specified.', '!gle gets last error') - out.Check('LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The ' - 'file %hs does not exist.', '!gle gets last ntstatus') +def GetDumpFromProgram(out_dir, pipe_name, executable_name, expect_exit_code, + *args): + """Initialize a crash database, and run |executable_name| connecting to a + crash handler. If pipe_name is set, crashpad_handler will be started first. + If pipe_name is empty, the executable is responsible for starting + crashpad_handler. *args will be passed after other arguments to + executable_name. If the child process does not exit with |expect_exit_code|, + an exception will be raised. Returns the path to the minidump generated by + crashpad_handler for further testing. + """ + test_database = MakeTempDir() + handler = None - if False: - # TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList. - out = CdbRun(cdb_path, dump_path, '!locks') - out.Check(r'CritSec crashy_program!crashpad::`anonymous namespace\'::' - r'g_test_critical_section', 'lock was captured') - if platform.win32_ver()[0] != '7': - # We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7. - out.Check(r'\*\*\* Locked', 'lock debug info was captured, and is locked') + try: + subprocess.check_call([ + os.path.join(out_dir, 'crashpad_database_util.exe'), '--create', + '--database=' + test_database + ]) - out = CdbRun(cdb_path, dump_path, '!handle') - out.Check(r'\d+ Handles', 'captured handles') - out.Check(r'Event\s+\d+', 'capture some event handles') - out.Check(r'File\s+\d+', 'capture some file handles') + if pipe_name is not None: + handler = subprocess.Popen([ + os.path.join(out_dir, 'crashpad_handler.com'), + '--pipe-name=' + pipe_name, '--database=' + test_database + ]) - out = CdbRun(cdb_path, dump_path, 'lm') - out.Check(r'Unloaded modules:', 'captured some unloaded modules') - out.Check(r'lz32\.dll', 'found expected unloaded module lz32') - out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror') + # Wait until the server is ready. + printed = False + while not NamedPipeExistsAndReady(pipe_name): + if not printed: + print('Waiting for crashpad_handler to be ready...') + printed = True + time.sleep(0.001) - out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2') - out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved') - out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE) + command = [os.path.join(out_dir, executable_name), pipe_name + ] + list(args) + else: + command = ([ + os.path.join(out_dir, executable_name), + os.path.join(out_dir, 'crashpad_handler.com'), test_database + ] + list(args)) + print('Running %s' % os.path.basename(command[0])) + exit_code = subprocess.call(command) + if exit_code != expect_exit_code: + raise subprocess.CalledProcessError(exit_code, executable_name) - # Check that there is no stack trace in the self-destroyed process. Confirm - # that the top is where we expect it (that's based only on IP), but subsequent - # stack entries will not be available. This confirms that we have a mostly - # valid dump, but that the stack was omitted. - out.Check(r'self_destroying_program!crashpad::`anonymous namespace\'::' - r'FreeOwnStackAndBreak.*\nquit:', - 'at correct location, no additional stack entries') + out = subprocess.check_output([ + os.path.join(out_dir, 'crashpad_database_util.exe'), + '--database=' + test_database, + '--show-pending-reports', + '--show-all-report-info', + ]) + for line in out.splitlines(): + if line.strip().startswith('Path:'): + return line.partition(':')[2].strip() + finally: + if handler: + handler.kill() - # Dump memory pointed to be EDI on the background suspended thread. We don't - # know the index of the thread because the system may have started other - # threads, so first do a run to extract the thread index that's suspended, and - # then another run to dump the data pointed to by EDI for that thread. - out = CdbRun(cdb_path, dump_path, '.ecxr;~') - match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:') - if match_obj: - thread = match_obj.group(1) - out = CdbRun(cdb_path, dump_path, '.ecxr;~' + thread + 's;db /c14 edi') - out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50', - 'data pointed to by registers captured') - # Move up one stack frame after jumping to the exception, and examine memory. - out = CdbRun(cdb_path, dump_path, - '.ecxr; .f+; dd /c100 poi(offset_pointer)-20') - out.Check(r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e ' - r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 ' - r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c ' - r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 ' - r'80000094 00000095 80000096 00000097', - 'data pointed to by stack captured') +def GetDumpFromCrashyProgram(out_dir, pipe_name): + return GetDumpFromProgram(out_dir, pipe_name, 'crashy_program.exe', + win32con.EXCEPTION_ACCESS_VIOLATION) - # Attempt to retrieve the value of g_extra_memory_pointer (by name), and then - # examine the memory at which it points. Both should have been saved. - out = CdbRun(cdb_path, dump_path, - 'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 ' - 'L8') - out.Check(r'0000655e 0000656b 00006578 00006585', - 'extra memory range captured') - out = CdbRun(cdb_path, dump_path, '.dumpdebug') - out.Check(r'type \?\?\? \(333333\), size 00001000', - 'first user stream') - out.Check(r'type \?\?\? \(222222\), size 00000080', - 'second user stream') +def GetDumpFromOtherProgram(out_dir, pipe_name, *args): + return GetDumpFromProgram(out_dir, pipe_name, 'crash_other_program.exe', 0, + *args) - if z7_dump_path: - out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm') + +def GetDumpFromSignal(out_dir, pipe_name, *args): + STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con. + return GetDumpFromProgram(out_dir, pipe_name, 'crashy_signal.exe', + STATUS_FATAL_APP_EXIT, *args) + + +def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name): + return GetDumpFromProgram(out_dir, pipe_name, 'self_destroying_program.exe', + win32con.EXCEPTION_BREAKPOINT) + + +def GetDumpFromZ7Program(out_dir, pipe_name): + return GetDumpFromProgram(out_dir, pipe_name, 'crashy_z7_loader.exe', + win32con.EXCEPTION_ACCESS_VIOLATION) + + +class CdbRun(object): + """Run cdb.exe passing it a cdb command and capturing the output. + `Check()` searches for regex patterns in sequence allowing verification of + expected output. + """ + + def __init__(self, cdb_path, dump_path, command): + # Run a command line that loads the dump, runs the specified cdb + # command, and then quits, and capturing stdout. + self.out = subprocess.check_output( + [cdb_path, '-z', dump_path, '-c', command + ';q']) + + def Check(self, pattern, message, re_flags=0): + match_obj = re.search(pattern, self.out, re_flags) + if match_obj: + # Matched. Consume up to end of match. + self.out = self.out[match_obj.end(0):] + print('ok - %s' % message) + sys.stdout.flush() + else: + print('-' * 80, file=sys.stderr) + print('FAILED - %s' % message, file=sys.stderr) + print('-' * 80, file=sys.stderr) + print('did not match:\n %s' % pattern, file=sys.stderr) + print('-' * 80, file=sys.stderr) + print('remaining output was:\n %s' % self.out, file=sys.stderr) + print('-' * 80, file=sys.stderr) + sys.stderr.flush() + global g_had_failures + g_had_failures = True + + def Find(self, pattern, re_flags=0): + match_obj = re.search(pattern, self.out, re_flags) + if match_obj: + # Matched. Consume up to end of match. + self.out = self.out[match_obj.end(0):] + return match_obj + return None + + +def RunTests(cdb_path, dump_path, start_handler_dump_path, destroyed_dump_path, + z7_dump_path, other_program_path, other_program_no_exception_path, + sigabrt_main_path, sigabrt_background_path, pipe_name): + """Runs various tests in sequence. Runs a new cdb instance on the dump for + each block of tests to reduce the chances that output from one command is + confused for output from another. + """ + out = CdbRun(cdb_path, dump_path, '.ecxr') out.Check('This dump file has an exception of interest stored in it', - 'captured exception in z7 module') - # Older versions of cdb display relative to exports for /Z7 modules, newer - # ones just display the offset. - out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):', - 'exception in z7 at correct location') - out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll', - 'expected non-pdb symbol format') + 'captured exception') - out = CdbRun(cdb_path, other_program_path, '.ecxr;k;~') - out.Check('Unknown exception - code deadbea7', - 'other program dump exception code') - out.Check('!Sleep', 'other program reasonable location') - out.Check("hanging_program!`anonymous namespace'::Thread1", - 'other program dump right thread') - count = 0 - while True: - match_obj = out.Find(r'Id.*Suspend: (\d+) ') + # When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as + # "`anonymous namespace'" and instead gives the decorated form. + out.Check( + 'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::' + 'SomeCrashyFunction', 'exception at correct location') + + out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr') + out.Check('This dump file has an exception of interest stored in it', + 'captured exception (using StartHandler())') + out.Check( + 'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::' + 'SomeCrashyFunction', + 'exception at correct location (using StartHandler())') + + out = CdbRun(cdb_path, dump_path, '!peb') + out.Check(r'PEB at', 'found the PEB') + out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', + 'PEB_LDR_DATA saved') + out.Check(r'Base TimeStamp Module', + 'module list present') + pipe_name_escaped = pipe_name.replace('\\', '\\\\') + out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped, + 'some PEB data is correct') + out.Check(r'SystemRoot=C:\\Windows', 'some of environment captured', + re.IGNORECASE) + + out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters') + out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"', + 'PEB->ProcessParameters.ImagePathName string captured') + out.Check( + ' DesktopInfo *: ' + '_UNICODE_STRING "(?!--- memory read error at address ).*"', + 'PEB->ProcessParameters.DesktopInfo string captured') + + out = CdbRun(cdb_path, dump_path, '!teb') + out.Check(r'TEB at', 'found the TEB') + out.Check(r'ExceptionList:\s+[0-9a-fA-F]+', 'some valid teb data') + out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue') + + out = CdbRun(cdb_path, dump_path, '!gle') + out.Check( + 'LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the ' + 'file specified.', '!gle gets last error') + out.Check( + 'LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The ' + 'file %hs does not exist.', '!gle gets last ntstatus') + + if False: + # TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList. + out = CdbRun(cdb_path, dump_path, '!locks') + out.Check( + r'CritSec crashy_program!crashpad::`anonymous namespace\'::' + r'g_test_critical_section', 'lock was captured') + if platform.win32_ver()[0] != '7': + # We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7. + out.Check(r'\*\*\* Locked', + 'lock debug info was captured, and is locked') + + out = CdbRun(cdb_path, dump_path, '!handle') + out.Check(r'\d+ Handles', 'captured handles') + out.Check(r'Event\s+\d+', 'capture some event handles') + out.Check(r'File\s+\d+', 'capture some file handles') + + out = CdbRun(cdb_path, dump_path, 'lm') + out.Check(r'Unloaded modules:', 'captured some unloaded modules') + out.Check(r'lz32\.dll', 'found expected unloaded module lz32') + out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror') + + out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2') + out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', + 'PEB_LDR_DATA saved') + out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE) + + # Check that there is no stack trace in the self-destroyed process. Confirm + # that the top is where we expect it (that's based only on IP), but + # subsequent stack entries will not be available. This confirms that we have + # a mostly valid dump, but that the stack was omitted. + out.Check( + r'self_destroying_program!crashpad::`anonymous namespace\'::' + r'FreeOwnStackAndBreak.*\nquit:', + 'at correct location, no additional stack entries') + + # Dump memory pointed to be EDI on the background suspended thread. We don't + # know the index of the thread because the system may have started other + # threads, so first do a run to extract the thread index that's suspended, + # and then another run to dump the data pointed to by EDI for that thread. + out = CdbRun(cdb_path, dump_path, '.ecxr;~') + match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:') if match_obj: - if match_obj.group(1) != '0': - out.Check(r'FAILED', 'all suspend counts should be 0') - else: - count += 1 - else: - break - assert count > 2 + thread = match_obj.group(1) + out = CdbRun(cdb_path, dump_path, '.ecxr;~' + thread + 's;db /c14 edi') + out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50', + 'data pointed to by registers captured') - out = CdbRun(cdb_path, other_program_no_exception_path, '.ecxr;k') - out.Check('Unknown exception - code 0cca11ed', - 'other program with no exception given') - out.Check('!RaiseException', 'other program in RaiseException()') + # Move up one stack frame after jumping to the exception, and examine + # memory. + out = CdbRun(cdb_path, dump_path, + '.ecxr; .f+; dd /c100 poi(offset_pointer)-20') + out.Check( + r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e ' + r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 ' + r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c ' + r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 ' + r'80000094 00000095 80000096 00000097', + 'data pointed to by stack captured') - out = CdbRun(cdb_path, sigabrt_main_path, '.ecxr') - out.Check('code 40000015', 'got sigabrt signal') - out.Check('::HandleAbortSignal', ' stack in expected location') + # Attempt to retrieve the value of g_extra_memory_pointer (by name), and + # then examine the memory at which it points. Both should have been saved. + out = CdbRun( + cdb_path, dump_path, + 'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 ' + 'L8') + out.Check(r'0000655e 0000656b 00006578 00006585', + 'extra memory range captured') - out = CdbRun(cdb_path, sigabrt_background_path, '.ecxr') - out.Check('code 40000015', 'got sigabrt signal from background thread') + out = CdbRun(cdb_path, dump_path, '.dumpdebug') + out.Check(r'type \?\?\? \(333333\), size 00001000', 'first user stream') + out.Check(r'type \?\?\? \(222222\), size 00000080', 'second user stream') + + if z7_dump_path: + out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm') + out.Check('This dump file has an exception of interest stored in it', + 'captured exception in z7 module') + # Older versions of cdb display relative to exports for /Z7 modules, + # newer ones just display the offset. + out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):', + 'exception in z7 at correct location') + out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll', + 'expected non-pdb symbol format') + + out = CdbRun(cdb_path, other_program_path, '.ecxr;k;~') + out.Check('Unknown exception - code deadbea7', + 'other program dump exception code') + out.Check('!Sleep', 'other program reasonable location') + out.Check("hanging_program!`anonymous namespace'::Thread1", + 'other program dump right thread') + count = 0 + while True: + match_obj = out.Find(r'Id.*Suspend: (\d+) ') + if match_obj: + if match_obj.group(1) != '0': + out.Check(r'FAILED', 'all suspend counts should be 0') + else: + count += 1 + else: + break + assert count > 2 + + out = CdbRun(cdb_path, other_program_no_exception_path, '.ecxr;k') + out.Check('Unknown exception - code 0cca11ed', + 'other program with no exception given') + out.Check('!RaiseException', 'other program in RaiseException()') + + out = CdbRun(cdb_path, sigabrt_main_path, '.ecxr') + out.Check('code 40000015', 'got sigabrt signal') + out.Check('::HandleAbortSignal', ' stack in expected location') + + out = CdbRun(cdb_path, sigabrt_background_path, '.ecxr') + out.Check('code 40000015', 'got sigabrt signal from background thread') def main(args): - try: - if len(args) != 1: - print('must supply binary dir', file=sys.stderr) - return 1 + try: + if len(args) != 1: + print('must supply binary dir', file=sys.stderr) + return 1 - cdb_path = GetCdbPath() - if not cdb_path: - print('could not find cdb', file=sys.stderr) - return 1 + cdb_path = GetCdbPath() + if not cdb_path: + print('could not find cdb', file=sys.stderr) + return 1 - # Make sure we can download Windows symbols. - if not os.environ.get('_NT_SYMBOL_PATH'): - symbol_dir = MakeTempDir() - protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http' - os.environ['_NT_SYMBOL_PATH'] = ( - 'SRV*' + symbol_dir + '*' + - protocol + '://msdl.microsoft.com/download/symbols') + # Make sure we can download Windows symbols. + if not os.environ.get('_NT_SYMBOL_PATH'): + symbol_dir = MakeTempDir() + protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http' + os.environ['_NT_SYMBOL_PATH'] = ( + 'SRV*' + symbol_dir + '*' + protocol + + '://msdl.microsoft.com/download/symbols') - pipe_name = r'\\.\pipe\end-to-end_%s_%s' % ( - os.getpid(), str(random.getrandbits(64))) + pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (os.getpid(), + str(random.getrandbits(64))) - crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name) - if not crashy_dump_path: - return 1 + crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name) + if not crashy_dump_path: + return 1 - start_handler_dump_path = GetDumpFromCrashyProgram(args[0], None) - if not start_handler_dump_path: - return 1 + start_handler_dump_path = GetDumpFromCrashyProgram(args[0], None) + if not start_handler_dump_path: + return 1 - destroyed_dump_path = GetDumpFromSelfDestroyingProgram(args[0], pipe_name) - if not destroyed_dump_path: - return 1 + destroyed_dump_path = GetDumpFromSelfDestroyingProgram( + args[0], pipe_name) + if not destroyed_dump_path: + return 1 - z7_dump_path = None - if not args[0].endswith('_x64'): - z7_dump_path = GetDumpFromZ7Program(args[0], pipe_name) - if not z7_dump_path: - return 1 + z7_dump_path = None + if not args[0].endswith('_x64'): + z7_dump_path = GetDumpFromZ7Program(args[0], pipe_name) + if not z7_dump_path: + return 1 - other_program_path = GetDumpFromOtherProgram(args[0], pipe_name) - if not other_program_path: - return 1 + other_program_path = GetDumpFromOtherProgram(args[0], pipe_name) + if not other_program_path: + return 1 - other_program_no_exception_path = GetDumpFromOtherProgram( - args[0], pipe_name, 'noexception') - if not other_program_no_exception_path: - return 1 + other_program_no_exception_path = GetDumpFromOtherProgram( + args[0], pipe_name, 'noexception') + if not other_program_no_exception_path: + return 1 - sigabrt_main_path = GetDumpFromSignal(args[0], pipe_name, 'main') - if not sigabrt_main_path: - return 1 + sigabrt_main_path = GetDumpFromSignal(args[0], pipe_name, 'main') + if not sigabrt_main_path: + return 1 - sigabrt_background_path = GetDumpFromSignal( - args[0], pipe_name, 'background') - if not sigabrt_background_path: - return 1 + sigabrt_background_path = GetDumpFromSignal(args[0], pipe_name, + 'background') + if not sigabrt_background_path: + return 1 - RunTests(cdb_path, - crashy_dump_path, - start_handler_dump_path, - destroyed_dump_path, - z7_dump_path, - other_program_path, - other_program_no_exception_path, - sigabrt_main_path, - sigabrt_background_path, - pipe_name) + RunTests(cdb_path, crashy_dump_path, start_handler_dump_path, + destroyed_dump_path, z7_dump_path, other_program_path, + other_program_no_exception_path, sigabrt_main_path, + sigabrt_background_path, pipe_name) - return 1 if g_had_failures else 0 - finally: - CleanUpTempDirs() + return 1 if g_had_failures else 0 + finally: + CleanUpTempDirs() if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) + sys.exit(main(sys.argv[1:])) diff --git a/third_party/fuchsia/runner.py b/third_party/fuchsia/runner.py old mode 100644 new mode 100755 index da4ec99a..229d8a42 --- a/third_party/fuchsia/runner.py +++ b/third_party/fuchsia/runner.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python + # Copyright 2018 The Crashpad Authors. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,4 +16,5 @@ import os import sys + os.execv(sys.argv[1], sys.argv[1:]) diff --git a/util/mach/mig.py b/util/mach/mig.py index dc9b7c64..fa35e006 100755 --- a/util/mach/mig.py +++ b/util/mach/mig.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# coding: utf-8 # Copyright 2019 The Crashpad Authors. All rights reserved. # @@ -20,6 +19,7 @@ import sys import mig_fix import mig_gen + def main(args): parsed = mig_gen.parse_args(args) @@ -30,5 +30,6 @@ def main(args): parsed.migcom_path, parsed.arch) mig_fix.fix_interface(interface) + if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/util/mach/mig_fix.py b/util/mach/mig_fix.py index 8cd5e4f6..037746fa 100755 --- a/util/mach/mig_fix.py +++ b/util/mach/mig_fix.py @@ -22,18 +22,18 @@ import sys from mig_gen import MigInterface + def _fix_user_implementation(implementation, fixed_implementation, header, fixed_header): """Rewrites a MIG-generated user implementation (.c) file. - Rewrites the file at |implementation| by adding - “__attribute__((unused))” to the definition of any structure typedefed - as “__Reply” by searching for the pattern unique to those structure - definitions. These structures are in fact unused in the user - implementation file, and this will trigger a -Wunused-local-typedefs - warning in gcc unless removed or marked with the “unused” attribute. - Also changes header references to point to the new header filename, if - changed. + Rewrites the file at |implementation| by adding “__attribute__((unused))” to + the definition of any structure typedefed as “__Reply” by searching for the + pattern unique to those structure definitions. These structures are in fact + unused in the user implementation file, and this will trigger a + -Wunused-local-typedefs warning in gcc unless removed or marked with the + “unused” attribute. Also changes header references to point to the new + header filename, if changed. If |fixed_implementation| is None, overwrites the original; otherwise, puts the result in the file at |fixed_implementation|. @@ -59,6 +59,7 @@ def _fix_user_implementation(implementation, fixed_implementation, header, file.write(contents) file.close() + def _fix_server_implementation(implementation, fixed_implementation, header, fixed_header): """Rewrites a MIG-generated server implementation (.c) file. @@ -79,24 +80,25 @@ def _fix_server_implementation(implementation, fixed_implementation, header, contents = file.read() # Find interesting declarations. - declaration_pattern = \ - re.compile('^mig_internal (kern_return_t __MIG_check__.*)$', - re.MULTILINE) + declaration_pattern = re.compile( + '^mig_internal (kern_return_t __MIG_check__.*)$', re.MULTILINE) declarations = declaration_pattern.findall(contents) # Remove “__attribute__((__unused__))” from the declarations, and call them # “mig_external” or “extern” depending on whether “mig_external” is defined. attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ') - declarations = ['''\ + declarations = [ + '''\ #ifdef mig_external mig_external #else extern #endif -''' + attribute_pattern.sub('', x) + ';\n' for x in declarations] +''' + attribute_pattern.sub('', x) + ';\n' for x in declarations + ] # Rewrite the declarations in this file as “mig_external”. - contents = declaration_pattern.sub(r'mig_external \1', contents); + contents = declaration_pattern.sub(r'mig_external \1', contents) # Crashpad never implements the mach_msg_server() MIG callouts. To avoid # needing to provide stub implementations, set KERN_FAILURE as the RetCode @@ -125,6 +127,7 @@ extern file.close() return declarations + def _fix_header(header, fixed_header, declarations=[]): """Rewrites a MIG-generated header (.h) file. @@ -161,6 +164,7 @@ extern "C" { file.write(contents) file.close() + def fix_interface(interface, fixed_interface=None): if fixed_interface is None: fixed_interface = MigInterface(None, None, None, None) @@ -175,6 +179,7 @@ def fix_interface(interface, fixed_interface=None): _fix_header(interface.server_h, fixed_interface.server_h, server_declarations) + def main(args): parser = argparse.ArgumentParser() parser.add_argument('user_c') @@ -187,11 +192,12 @@ def main(args): parser.add_argument('--fixed_server_h', default=None) parsed = parser.parse_args(args) - interface = MigInterface(parsed.user_c, parsed.server_c, - parsed.user_h, parsed.server_h) + interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h, + parsed.server_h) fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c, parsed.fixed_user_h, parsed.fixed_server_h) fix_interface(interface, fixed_interface) + if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/util/mach/mig_gen.py b/util/mach/mig_gen.py index b3e9a5b4..dcbf8296 100755 --- a/util/mach/mig_gen.py +++ b/util/mach/mig_gen.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# coding: utf-8 # Copyright 2019 The Crashpad Authors. All rights reserved. # @@ -21,19 +20,30 @@ import os import subprocess import sys -MigInterface = collections.namedtuple('MigInterface', ['user_c', 'server_c', - 'user_h', 'server_h']) +MigInterface = collections.namedtuple( + 'MigInterface', ['user_c', 'server_c', 'user_h', 'server_h']) -def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None, - mig_path=None, migcom_path=None, arch=None): + +def generate_interface(defs, + interface, + includes=[], + sdk=None, + clang_path=None, + mig_path=None, + migcom_path=None, + arch=None): if mig_path is None: - mig_path = 'mig' - command = [mig_path, - '-user', interface.user_c, - '-server', interface.server_c, - '-header', interface.user_h, - '-sheader', interface.server_h, - ] + mig_path = 'mig' + + # yapf: disable + command = [ + mig_path, + '-user', interface.user_c, + '-server', interface.server_c, + '-header', interface.user_h, + '-sheader', interface.server_h, + ] + # yapf: enable if clang_path is not None: os.environ['MIGCC'] = clang_path @@ -48,6 +58,7 @@ def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None, command.append(defs) subprocess.check_call(command) + def parse_args(args): parser = argparse.ArgumentParser() parser.add_argument('--clang-path', help='Path to Clang') @@ -66,13 +77,15 @@ def parse_args(args): parser.add_argument('server_h') return parser.parse_args(args) + def main(args): parsed = parse_args(args) - interface = MigInterface(parsed.user_c, parsed.server_c, - parsed.user_h, parsed.server_h) - generate_interface(parsed.defs, interface, parsed.include, - parsed.sdk, parsed.clang_path, parsed.mig_path, - parsed.migcom_path, parsed.arch) + interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h, + parsed.server_h) + generate_interface(parsed.defs, interface, parsed.include, parsed.sdk, + parsed.clang_path, parsed.mig_path, parsed.migcom_path, + parsed.arch) + if __name__ == '__main__': sys.exit(main(sys.argv[1:])) diff --git a/util/net/generate_test_server_key.py b/util/net/generate_test_server_key.py index 31d73c9a..3db08ade 100755 --- a/util/net/generate_test_server_key.py +++ b/util/net/generate_test_server_key.py @@ -23,25 +23,28 @@ key = os.path.join(testdata, 'crashpad_util_test_key.pem') cert = os.path.join(testdata, 'crashpad_util_test_cert.pem') with open(cert, 'w') as cert_file, open(key, 'w') as key_file: - MESSAGE = 'DO NOT EDIT: This file was auto-generated by ' + __file__ + '\n\n' - cert_file.write(MESSAGE) - key_file.write(MESSAGE) + MESSAGE = ('DO NOT EDIT: This file was auto-generated by ' + __file__ + + '\n\n') + cert_file.write(MESSAGE) + key_file.write(MESSAGE) - proc = subprocess.Popen( - ['openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', - '-days', '3650', '-newkey', 'rsa:2048', '-keyout', '-'], - stderr=open(os.devnull, 'w'), stdout=subprocess.PIPE) + proc = subprocess.Popen([ + 'openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', '-days', + '3650', '-newkey', 'rsa:2048', '-keyout', '-' + ], + stderr=open(os.devnull, 'w'), + stdout=subprocess.PIPE) - contents = proc.communicate()[0] - dest = sys.stderr - for line in contents.splitlines(True): - if line.startswith("-----BEGIN PRIVATE KEY-----"): - dest = key_file - elif line.startswith("-----BEGIN CERTIFICATE-----"): - dest = cert_file - elif line.startswith("-----END"): - dest.write(line) - dest = sys.stderr - continue + contents = proc.communicate()[0] + dest = sys.stderr + for line in contents.splitlines(True): + if line.startswith("-----BEGIN PRIVATE KEY-----"): + dest = key_file + elif line.startswith("-----BEGIN CERTIFICATE-----"): + dest = cert_file + elif line.startswith("-----END"): + dest.write(line) + dest = sys.stderr + continue - dest.write(line) + dest.write(line)