Add .style.yapf and reformat according to yapf, using “google” style

% yapf --in-place $(git ls-files **/*.py)
% yapf --version
yapf 0.30.0

Note that this is not using the “chromium” yapf style because Chromium
is moving to PEP-8.
https://groups.google.com/a/chromium.org/d/topic/chromium-dev/RcJgJdkNIdg
yapf 0.30.0 no longer recognizes “chromium” as a style option.
22ef70f3c4
Since this is a mass reformatting, it might as well move things all the
way into the future all at once.

This uses the “google” style, which is a superset of “pep8”.

Change-Id: Ifa37371079ea1859e4afe8e31d2eef2cfd7af384
Reviewed-on: https://chromium-review.googlesource.com/c/crashpad/crashpad/+/2165637
Commit-Queue: Mark Mentovai <mark@chromium.org>
Reviewed-by: Scott Graham <scottmg@chromium.org>
This commit is contained in:
Mark Mentovai 2020-04-27 09:43:35 -04:00 committed by Commit Bot
parent 29b1688c11
commit a5a1c3b07f
16 changed files with 1687 additions and 1615 deletions

16
.style.yapf Normal file
View File

@ -0,0 +1,16 @@
# Copyright 2020 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
[style]
based_on_style = google

View File

@ -21,10 +21,10 @@ import sys
def ChooseDependencyPath(local_path, external_path): def ChooseDependencyPath(local_path, external_path):
"""Chooses between a dependency located at local path and an external path. """Chooses between a dependency located at local path and an external path.
The local path, used in standalone builds, is preferred. If it is not present The local path, used in standalone builds, is preferred. If it is not
but the external path is, the external path will be used. If neither path is present but the external path is, the external path will be used. If neither
present, the local path will be used, so that error messages uniformly refer path is present, the local path will be used, so that error messages
to the local path. uniformly refer to the local path.
Args: Args:
local_path: The preferred local path to use for a standalone build. local_path: The preferred local path to use for a standalone build.
@ -41,14 +41,14 @@ def ChooseDependencyPath(local_path, external_path):
script_dir = os.path.dirname(__file__) script_dir = os.path.dirname(__file__)
crashpad_dir = (os.path.dirname(script_dir) if script_dir not in ('', os.curdir) crashpad_dir = (os.path.dirname(script_dir)
else os.pardir) if script_dir not in ('', os.curdir) else os.pardir)
sys.path.insert(0, sys.path.insert(
ChooseDependencyPath(os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 0,
'pylib'), ChooseDependencyPath(
os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 'pylib'),
'pylib'))[1]) os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', 'pylib'))[1])
import gyp import gyp
@ -75,8 +75,8 @@ def main(args):
return result return result
if sys.platform == 'win32': if sys.platform == 'win32':
# Check to make sure that no target_arch was specified. target_arch may be # Check to make sure that no target_arch was specified. target_arch may
# set during a cross build, such as a cross build for Android. # be set during a cross build, such as a cross build for Android.
has_target_arch = False has_target_arch = False
for arg_index in range(0, len(args)): for arg_index in range(0, len(args)):
arg = args[arg_index] arg = args[arg_index]
@ -88,7 +88,8 @@ def main(args):
if not has_target_arch: if not has_target_arch:
# Also generate the x86 build. # Also generate the x86 build.
result = gyp.main(args + ['-D', 'target_arch=ia32', '-G', 'config=Debug']) result = gyp.main(args +
['-D', 'target_arch=ia32', '-G', 'config=Debug'])
if result != 0: if result != 0:
return result return result
result = gyp.main( result = gyp.main(

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved. # Copyright 2017 The Crashpad Authors. All rights reserved.
# #
@ -33,12 +32,8 @@ def main(args):
parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain') parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain')
(parsed, extra_command_line_args) = parser.parse_known_args(args) (parsed, extra_command_line_args) = parser.parse_known_args(args)
ndk_bin_dir = os.path.join(parsed.ndk, ndk_bin_dir = os.path.join(parsed.ndk, 'toolchains', 'llvm', 'prebuilt',
'toolchains', 'linux-x86_64', 'bin')
'llvm',
'prebuilt',
'linux-x86_64',
'bin')
if not os.path.exists(ndk_bin_dir): if not os.path.exists(ndk_bin_dir):
parser.error("missing toolchain") parser.error("missing toolchain")
@ -51,25 +46,23 @@ def main(args):
clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level
os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang') os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang')
os.environ['CXX_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang++') os.environ['CXX_target'] = os.path.join(ndk_bin_dir,
clang_prefix + '-clang++')
extra_args = ['-D', 'android_api_level=' + parsed.api_level] extra_args = ['-D', 'android_api_level=' + parsed.api_level]
# ARM only includes 'v7a' in the tool prefix for clang # ARM only includes 'v7a' in the tool prefix for clang
tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' else
else ARCH_TO_ARCH_TRIPLET[parsed.arch]) ARCH_TO_ARCH_TRIPLET[parsed.arch])
for tool in ('ar', 'nm', 'readelf'): for tool in ('ar', 'nm', 'readelf'):
os.environ['%s_target' % tool.upper()] = ( os.environ['%s_target' % tool.upper()] = (os.path.join(
os.path.join(ndk_bin_dir, '%s-%s' % (tool_prefix, tool))) ndk_bin_dir, '%s-%s' % (tool_prefix, tool)))
return gyp_crashpad.main( return gyp_crashpad.main([
['-D', 'OS=android', '-D', 'OS=android', '-D',
'-D', 'target_arch=%s' % parsed.arch, 'target_arch=%s' % parsed.arch, '-D', 'clang=1', '-f', 'ninja-android'
'-D', 'clang=1', ] + extra_args + extra_command_line_args)
'-f', 'ninja-android'] +
extra_args +
extra_command_line_args)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -23,7 +23,6 @@ import subprocess
import sys import sys
import urllib2 import urllib2
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Sysroot revision from: # Sysroot revision from:
@ -33,11 +32,12 @@ PATH = 'chrome-linux-sysroot/toolchain'
REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66' REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66'
FILENAME = 'debian_stretch_amd64_sysroot.tar.xz' FILENAME = 'debian_stretch_amd64_sysroot.tar.xz'
def main(): def main():
url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME) url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME)
sysroot = os.path.join(SCRIPT_DIR, os.pardir, sysroot = os.path.join(SCRIPT_DIR, os.pardir, 'third_party', 'linux',
'third_party', 'linux', 'sysroot') 'sysroot')
stamp = os.path.join(sysroot, '.stamp') stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp): if os.path.exists(stamp):

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Convert GN Xcode projects to platform and configuration independent targets. """Convert GN Xcode projects to platform and configuration independent targets.
GN generates Xcode projects that build one configuration only. However, typical GN generates Xcode projects that build one configuration only. However, typical
@ -40,7 +39,7 @@ import tempfile
class XcodeProject(object): class XcodeProject(object):
def __init__(self, objects, counter = 0): def __init__(self, objects, counter=0):
self.objects = objects self.objects = objects
self.counter = 0 self.counter = 0
@ -50,8 +49,8 @@ class XcodeProject(object):
str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter) str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter)
new_id = hashlib.sha1(str_id).hexdigest()[:24].upper() new_id = hashlib.sha1(str_id).hexdigest()[:24].upper()
# Make sure ID is unique. It's possible there could be an id conflict # Make sure ID is unique. It's possible there could be an id
# since this is run after GN runs. # conflict since this is run after GN runs.
if new_id not in self.objects: if new_id not in self.objects:
self.objects[new_id] = obj self.objects[new_id] = obj
return new_id return new_id
@ -62,15 +61,15 @@ def CopyFileIfChanged(source_path, target_path):
target_dir = os.path.dirname(target_path) target_dir = os.path.dirname(target_path)
if not os.path.isdir(target_dir): if not os.path.isdir(target_dir):
os.makedirs(target_dir) os.makedirs(target_dir)
if not os.path.exists(target_path) or \ if (not os.path.exists(target_path) or
not filecmp.cmp(source_path, target_path): not filecmp.cmp(source_path, target_path)):
shutil.copyfile(source_path, target_path) shutil.copyfile(source_path, target_path)
def LoadXcodeProjectAsJSON(path): def LoadXcodeProjectAsJSON(path):
"""Return Xcode project at |path| as a JSON string.""" """Return Xcode project at |path| as a JSON string."""
return subprocess.check_output([ return subprocess.check_output(
'plutil', '-convert', 'json', '-o', '-', path]) ['plutil', '-convert', 'json', '-o', '-', path])
def WriteXcodeProject(output_path, json_string): def WriteXcodeProject(output_path, json_string):
@ -110,10 +109,12 @@ def UpdateProductsProject(file_input, file_output, configurations, root_dir):
value['defaultConfigurationName'] = configurations[0] value['defaultConfigurationName'] = configurations[0]
objects_to_remove.extend(value['buildConfigurations']) objects_to_remove.extend(value['buildConfigurations'])
build_config_template = project.objects[value['buildConfigurations'][0]] build_config_template = project.objects[value['buildConfigurations']
build_config_template['buildSettings']['CONFIGURATION_BUILD_DIR'] = \ [0]]
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)' build_settings = build_config_template['buildSettings']
build_config_template['buildSettings']['CODE_SIGN_IDENTITY'] = '' build_settings['CONFIGURATION_BUILD_DIR'] = (
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)')
build_settings['CODE_SIGN_IDENTITY'] = ''
value['buildConfigurations'] = [] value['buildConfigurations'] = []
for configuration in configurations: for configuration in configurations:
@ -164,16 +165,16 @@ def GetFolderForPath(project, rootObject, path):
children = root['children'] children = root['children']
new_root = None new_root = None
for child in children: for child in children:
if objects[child]['isa'] == 'PBXGroup' and \ if (objects[child]['isa'] == 'PBXGroup' and
objects[child]['name'] == folder: objects[child]['name'] == folder):
new_root = objects[child] new_root = objects[child]
break break
if not new_root: if not new_root:
# If the folder isn't found we could just cram it into the leaf existing # If the folder isn't found we could just cram it into the leaf
# folder, but that leads to folders with tons of README.md inside. # existing folder, but that leads to folders with tons of README.md
# inside.
new_group = { new_group = {
"children": [ "children": [],
],
"isa": "PBXGroup", "isa": "PBXGroup",
"name": folder, "name": folder,
"sourceTree": "<group>" "sourceTree": "<group>"
@ -188,7 +189,8 @@ def GetFolderForPath(project, rootObject, path):
def DisableNewBuildSystem(output_dir): def DisableNewBuildSystem(output_dir):
"""Disables the new build system due to crbug.com/852522 """ """Disables the new build system due to crbug.com/852522 """
xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace', xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace',
'xcshareddata', 'WorkspaceSettings.xcsettings') 'xcshareddata',
'WorkspaceSettings.xcsettings')
if os.path.isfile(xcwspacesharedsettings): if os.path.isfile(xcwspacesharedsettings):
json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings)) json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings))
else: else:
@ -198,16 +200,18 @@ def DisableNewBuildSystem(output_dir):
def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations): def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
'''Tweak the Xcode project generated by gn to support multiple configurations. '''Tweak the Xcode project generated by gn to support multiple
configurations.
The Xcode projects generated by "gn gen --ide" only supports a single The Xcode projects generated by "gn gen --ide" only supports a single
platform and configuration (as the platform and configuration are set platform and configuration (as the platform and configuration are set per
per output directory). This method takes as input such projects and output directory). This method takes as input such projects and add support
add support for multiple configurations and platforms (to allow devs for multiple configurations and platforms (to allow devs to select them in
to select them in Xcode). Xcode).
Args: Args:
input_dir: directory containing the XCode projects created by "gn gen --ide" input_dir: directory containing the XCode projects created by "gn gen
--ide"
output_dir: directory where the tweaked Xcode projects will be saved output_dir: directory where the tweaked Xcode projects will be saved
configurations: list of string corresponding to the configurations that configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at need to be supported by the tweaked Xcode projects, must contains at
@ -217,7 +221,8 @@ def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
products = os.path.join('products.xcodeproj', 'project.pbxproj') products = os.path.join('products.xcodeproj', 'project.pbxproj')
product_input = os.path.join(input_dir, products) product_input = os.path.join(input_dir, products)
product_output = os.path.join(output_dir, products) product_output = os.path.join(output_dir, products)
UpdateProductsProject(product_input, product_output, configurations, root_dir) UpdateProductsProject(product_input, product_output, configurations,
root_dir)
# Copy all workspace. # Copy all workspace.
xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata') xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata')
@ -229,27 +234,29 @@ def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
# TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj' # TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj'
# and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is # and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is
# here to support both old and new projects setup and will be removed once gn # here to support both old and new projects setup and will be removed once
# has rolled past it. # gn has rolled past it.
sources = os.path.join('sources.xcodeproj', 'project.pbxproj') sources = os.path.join('sources.xcodeproj', 'project.pbxproj')
if os.path.isfile(os.path.join(input_dir, sources)): if os.path.isfile(os.path.join(input_dir, sources)):
CopyFileIfChanged(os.path.join(input_dir, sources), CopyFileIfChanged(os.path.join(input_dir, sources),
os.path.join(output_dir, sources)) os.path.join(output_dir, sources))
def Main(args): def Main(args):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Convert GN Xcode projects for iOS.') description='Convert GN Xcode projects for iOS.')
parser.add_argument( parser.add_argument(
'input', 'input', help='directory containing [product|all] Xcode projects.')
help='directory containing [product|all] Xcode projects.')
parser.add_argument( parser.add_argument(
'output', 'output', help='directory where to generate the iOS configuration.')
help='directory where to generate the iOS configuration.') parser.add_argument('--add-config',
parser.add_argument( dest='configurations',
'--add-config', dest='configurations', default=[], action='append', default=[],
action='append',
help='configuration to add to the Xcode project') help='configuration to add to the Xcode project')
parser.add_argument( parser.add_argument('--root',
'--root', type=os.path.abspath, required=True, type=os.path.abspath,
required=True,
help='root directory of the project') help='root directory of the project')
args = parser.parse_args(args) args = parser.parse_args(args)
@ -264,10 +271,13 @@ def Main(args):
return 1 return 1
if not args.configurations: if not args.configurations:
sys.stderr.write('At least one configuration required, see --add-config.\n') sys.stderr.write(
'At least one configuration required, see --add-config.\n')
return 1 return 1
ConvertGnXcodeProject(args.root, args.input, args.output, args.configurations) ConvertGnXcodeProject(args.root, args.input, args.output,
args.configurations)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main(sys.argv[1:])) sys.exit(Main(sys.argv[1:]))

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import argparse import argparse
import convert_gn_xcodeproj import convert_gn_xcodeproj
import errno import errno
@ -31,24 +30,22 @@ try:
except ImportError: except ImportError:
import StringIO import StringIO
SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator') SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator')
SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage') SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage')
class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser): class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser):
'''A .ini file parser that supports strings and environment variables.''' '''A .ini file parser that supports strings and environment variables.'''
ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)') ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)')
def values(self, section): def values(self, section):
return map( return map(lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
ConfigParser.SafeConfigParser.items(self, section)) ConfigParser.SafeConfigParser.items(self, section))
def getstring(self, section, option): def getstring(self, section, option):
return self._UnquoteString(self._ExpandEnvVar(self.get(section, option))) return self._UnquoteString(self._ExpandEnvVar(self.get(section,
option)))
def _UnquoteString(self, string): def _UnquoteString(self, string):
if not string or string[0] != '"' or string[-1] != '"': if not string or string[0] != '"' or string[-1] != '"':
@ -63,9 +60,10 @@ class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser):
prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:]) prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:])
return prefix + os.environ.get(name, '') + suffix return prefix + os.environ.get(name, '') + suffix
class GnGenerator(object):
'''Holds configuration for a build and method to generate gn default files.''' class GnGenerator(object):
'''Holds configuration for a build and method to generate gn default
files.'''
FAT_BUILD_DEFAULT_ARCH = '64-bit' FAT_BUILD_DEFAULT_ARCH = '64-bit'
@ -106,7 +104,8 @@ class GnGenerator(object):
if build_arch == 'fat': if build_arch == 'fat':
target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH] target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH]
args.append(('target_cpu', target_cpu)) args.append(('target_cpu', target_cpu))
args.append(('additional_target_cpus', args.append(
('additional_target_cpus',
[cpu for cpu in cpu_values.itervalues() if cpu != target_cpu])) [cpu for cpu in cpu_values.itervalues() if cpu != target_cpu]))
else: else:
args.append(('target_cpu', cpu_values[build_arch])) args.append(('target_cpu', cpu_values[build_arch]))
@ -116,12 +115,10 @@ class GnGenerator(object):
args.extend(self._settings.items('gn_args')) args.extend(self._settings.items('gn_args'))
return args return args
def Generate(self, gn_path, root_path, out_path): def Generate(self, gn_path, root_path, out_path):
buf = StringIO.StringIO() buf = StringIO.StringIO()
self.WriteArgsGn(buf) self.WriteArgsGn(buf)
WriteToFileIfChanged( WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
os.path.join(out_path, 'args.gn'),
buf.getvalue(), buf.getvalue(),
overwrite=True) overwrite=True)
@ -131,23 +128,20 @@ class GnGenerator(object):
def CreateGnRules(self, gn_path, root_path, out_path): def CreateGnRules(self, gn_path, root_path, out_path):
buf = StringIO.StringIO() buf = StringIO.StringIO()
self.WriteArgsGn(buf) self.WriteArgsGn(buf)
WriteToFileIfChanged( WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
os.path.join(out_path, 'args.gn'),
buf.getvalue(), buf.getvalue(),
overwrite=True) overwrite=True)
buf = StringIO.StringIO() buf = StringIO.StringIO()
gn_command = self.GetGnCommand(gn_path, root_path, out_path, False) gn_command = self.GetGnCommand(gn_path, root_path, out_path, False)
self.WriteBuildNinja(buf, gn_command) self.WriteBuildNinja(buf, gn_command)
WriteToFileIfChanged( WriteToFileIfChanged(os.path.join(out_path, 'build.ninja'),
os.path.join(out_path, 'build.ninja'),
buf.getvalue(), buf.getvalue(),
overwrite=False) overwrite=False)
buf = StringIO.StringIO() buf = StringIO.StringIO()
self.WriteBuildNinjaDeps(buf) self.WriteBuildNinjaDeps(buf)
WriteToFileIfChanged( WriteToFileIfChanged(os.path.join(out_path, 'build.ninja.d'),
os.path.join(out_path, 'build.ninja.d'),
buf.getvalue(), buf.getvalue(),
overwrite=False) overwrite=False)
@ -167,7 +161,8 @@ class GnGenerator(object):
if isinstance(value, bool): if isinstance(value, bool):
stream.write('%s = %s\n' % (name, str(value).lower())) stream.write('%s = %s\n' % (name, str(value).lower()))
elif isinstance(value, list): elif isinstance(value, list):
stream.write('%s = [%s' % (name, '\n' if len(value) > 1 else '')) stream.write('%s = [%s' %
(name, '\n' if len(value) > 1 else ''))
if len(value) == 1: if len(value) == 1:
prefix = ' ' prefix = ' '
suffix = ' ' suffix = ' '
@ -176,7 +171,8 @@ class GnGenerator(object):
suffix = ',\n' suffix = ',\n'
for item in value: for item in value:
if isinstance(item, bool): if isinstance(item, bool):
stream.write('%s%s%s' % (prefix, str(item).lower(), suffix)) stream.write('%s%s%s' %
(prefix, str(item).lower(), suffix))
else: else:
stream.write('%s%s%s' % (prefix, item, suffix)) stream.write('%s%s%s' % (prefix, item, suffix))
stream.write(']\n') stream.write(']\n')
@ -196,7 +192,7 @@ class GnGenerator(object):
stream.write('build.ninja: nonexistant_file.gn\n') stream.write('build.ninja: nonexistant_file.gn\n')
def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project): def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project):
gn_command = [ gn_path, '--root=%s' % os.path.realpath(src_path), '-q' ] gn_command = [gn_path, '--root=%s' % os.path.realpath(src_path), '-q']
if generate_xcode_project: if generate_xcode_project:
gn_command.append('--ide=xcode') gn_command.append('--ide=xcode')
gn_command.append('--root-target=gn_all') gn_command.append('--root-target=gn_all')
@ -211,8 +207,8 @@ class GnGenerator(object):
# else: # else:
# gn_command.append('--check') # gn_command.append('--check')
gn_command.append('gen') gn_command.append('gen')
gn_command.append('//%s' % gn_command.append('//%s' % os.path.relpath(os.path.abspath(out_path),
os.path.relpath(os.path.abspath(out_path), os.path.abspath(src_path))) os.path.abspath(src_path)))
return gn_command return gn_command
@ -262,15 +258,13 @@ def GenerateXcodeProject(gn_path, root_dir, out_dir, settings):
'''Convert GN generated Xcode project into multi-configuration Xcode '''Convert GN generated Xcode project into multi-configuration Xcode
project.''' project.'''
temp_path = tempfile.mkdtemp(prefix=os.path.abspath( temp_path = tempfile.mkdtemp(
os.path.join(out_dir, '_temp'))) prefix=os.path.abspath(os.path.join(out_dir, '_temp')))
try: try:
generator = GnGenerator(settings, 'Debug', 'iphonesimulator') generator = GnGenerator(settings, 'Debug', 'iphonesimulator')
generator.Generate(gn_path, root_dir, temp_path) generator.Generate(gn_path, root_dir, temp_path)
convert_gn_xcodeproj.ConvertGnXcodeProject( convert_gn_xcodeproj.ConvertGnXcodeProject(
root_dir, root_dir, os.path.join(temp_path), os.path.join(out_dir, 'build'),
os.path.join(temp_path),
os.path.join(out_dir, 'build'),
SUPPORTED_CONFIGS) SUPPORTED_CONFIGS)
finally: finally:
if os.path.exists(temp_path): if os.path.exists(temp_path):
@ -287,16 +281,20 @@ def GenerateGnBuildRules(gn_path, root_dir, out_dir, settings):
def Main(args): def Main(args):
default_root = os.path.normpath(os.path.join( default_root = os.path.normpath(
os.path.dirname(__file__), os.pardir, os.pardir)) os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Generate build directories for use with gn.') description='Generate build directories for use with gn.')
parser.add_argument( parser.add_argument(
'root', default=default_root, nargs='?', 'root',
default=default_root,
nargs='?',
help='root directory where to generate multiple out configurations') help='root directory where to generate multiple out configurations')
parser.add_argument( parser.add_argument('--import',
'--import', action='append', dest='import_rules', default=[], action='append',
dest='import_rules',
default=[],
help='path to file defining default gn variables') help='path to file defining default gn variables')
args = parser.parse_args(args) args = parser.parse_args(args)

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used """Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used
for running tests without a device. for running tests without a device.
""" """
@ -53,12 +52,15 @@ def _CheckForTun():
"""Check for networking. TODO(scottmg): Currently, this is Linux-specific. """Check for networking. TODO(scottmg): Currently, this is Linux-specific.
""" """
returncode = subprocess.call( returncode = subprocess.call(
['tunctl', '-b', '-u', getpass.getuser(), '-t', 'qemu'], ['tunctl', '-b', '-u',
stdout=DEVNULL, stderr=DEVNULL) getpass.getuser(), '-t', 'qemu'],
stdout=DEVNULL,
stderr=DEVNULL)
if returncode != 0: if returncode != 0:
print('To use QEMU with networking on Linux, configure TUN/TAP. See:', print('To use QEMU with networking on Linux, configure TUN/TAP. See:',
file=sys.stderr) file=sys.stderr)
print(' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only', print(
' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only',
file=sys.stderr) file=sys.stderr)
return 2 return 2
return 0 return 0
@ -78,10 +80,13 @@ def _Start(pid_file):
initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin') initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin')
mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3)) mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3))
instance_name = 'crashpad_qemu_' + \ instance_name = (
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)) 'crashpad_qemu_' +
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)))
# These arguments are from the Fuchsia repo in zircon/scripts/run-zircon. # These arguments are from the Fuchsia repo in zircon/scripts/run-zircon.
# yapf: disable
popen = subprocess.Popen([ popen = subprocess.Popen([
qemu_path, qemu_path,
'-m', '2048', '-m', '2048',
@ -97,15 +102,21 @@ def _Start(pid_file):
'-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0', '-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0',
'-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail, '-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail,
'-append', 'TERM=dumb zircon.nodename=' + instance_name, '-append', 'TERM=dumb zircon.nodename=' + instance_name,
], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL) ],
stdin=DEVNULL,
stdout=DEVNULL,
stderr=DEVNULL)
# yapf: enable
with open(pid_file, 'wb') as f: with open(pid_file, 'wb') as f:
f.write('%d\n' % popen.pid) f.write('%d\n' % popen.pid)
for i in range(10): for i in range(10):
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools', 'netaddr') netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools',
'netaddr')
if subprocess.call([netaddr_path, '--nowait', instance_name], if subprocess.call([netaddr_path, '--nowait', instance_name],
stdout=open(os.devnull), stderr=open(os.devnull)) == 0: stdout=open(os.devnull),
stderr=open(os.devnull)) == 0:
break break
time.sleep(.5) time.sleep(.5)
else: else:

View File

@ -34,8 +34,8 @@ IS_WINDOWS_HOST = sys.platform.startswith('win')
def _FindGNFromBinaryDir(binary_dir): def _FindGNFromBinaryDir(binary_dir):
"""Attempts to determine the path to a GN binary used to generate the build """Attempts to determine the path to a GN binary used to generate the build
files in the given binary_dir. This is necessary because `gn` might not be in files in the given binary_dir. This is necessary because `gn` might not be
the path or might be in a non-standard location, particularly on build in the path or might be in a non-standard location, particularly on build
machines.""" machines."""
build_ninja = os.path.join(binary_dir, 'build.ninja') build_ninja = os.path.join(binary_dir, 'build.ninja')
@ -58,24 +58,27 @@ def _FindGNFromBinaryDir(binary_dir):
if line.startswith(' command = '): if line.startswith(' command = '):
gn_command_line_parts = line.strip().split(' ') gn_command_line_parts = line.strip().split(' ')
if len(gn_command_line_parts) > 2: if len(gn_command_line_parts) > 2:
return os.path.join(binary_dir, gn_command_line_parts[2]) return os.path.join(binary_dir,
gn_command_line_parts[2])
return None return None
def _BinaryDirTargetOS(binary_dir): def _BinaryDirTargetOS(binary_dir):
"""Returns the apparent target OS of binary_dir, or None if none appear to be """Returns the apparent target OS of binary_dir, or None if none appear to
explicitly specified.""" be explicitly specified."""
gn_path = _FindGNFromBinaryDir(binary_dir) gn_path = _FindGNFromBinaryDir(binary_dir)
if gn_path: if gn_path:
# Look for a GN “target_os”. # Look for a GN “target_os”.
popen = subprocess.Popen([gn_path, '--root=' + CRASHPAD_DIR, popen = subprocess.Popen([
'args', binary_dir, gn_path, '--root=' + CRASHPAD_DIR, 'args', binary_dir,
'--list=target_os', '--short'], '--list=target_os', '--short'
],
shell=IS_WINDOWS_HOST, shell=IS_WINDOWS_HOST,
stdout=subprocess.PIPE, stderr=open(os.devnull)) stdout=subprocess.PIPE,
stderr=open(os.devnull))
value = popen.communicate()[0] value = popen.communicate()[0]
if popen.returncode == 0: if popen.returncode == 0:
match = re.match('target_os = "(.*)"$', value.decode('utf-8')) match = re.match('target_os = "(.*)"$', value.decode('utf-8'))
@ -88,8 +91,7 @@ def _BinaryDirTargetOS(binary_dir):
if os.path.exists(build_ninja_path): if os.path.exists(build_ninja_path):
with open(build_ninja_path) as build_ninja_file: with open(build_ninja_path) as build_ninja_file:
build_ninja_content = build_ninja_file.read() build_ninja_content = build_ninja_file.read()
match = re.search('-linux-android(eabi)?-ar$', match = re.search('-linux-android(eabi)?-ar$', build_ninja_content,
build_ninja_content,
re.MULTILINE) re.MULTILINE)
if match: if match:
return 'android' return 'android'
@ -119,8 +121,8 @@ def _EnableVTProcessingOnWindowsConsole():
try: try:
# From <wincon.h>. This would be # From <wincon.h>. This would be
# win32console.ENABLE_VIRTUAL_TERMINAL_PROCESSING, but its too new to be # win32console.ENABLE_VIRTUAL_TERMINAL_PROCESSING, but its too new to
# defined there. # be defined there.
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
stdout_console.SetConsoleMode(console_mode | stdout_console.SetConsoleMode(console_mode |
@ -148,8 +150,8 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
return return
def _adb(*args): def _adb(*args):
# Flush all of this scripts own buffered stdout output before running adb, # Flush all of this scripts own buffered stdout output before running
# which will likely produce its own output on stdout. # adb, which will likely produce its own output on stdout.
sys.stdout.flush() sys.stdout.flush()
adb_command = ['adb', '-s', android_device] adb_command = ['adb', '-s', android_device]
@ -162,33 +164,34 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
_adb('push', *args) _adb('push', *args)
def _adb_shell(command_args, env={}): def _adb_shell(command_args, env={}):
# Build a command to execute via “sh -c” instead of invoking it directly. # Build a command to execute via “sh -c” instead of invoking it
# Heres why: # directly. Heres why:
# #
# /system/bin/env isnt normally present prior to Android 6.0 (M), where # /system/bin/env isnt normally present prior to Android 6.0 (M), where
# toybox was introduced (Android platform/manifest 9a2c01e8450b). Instead, # toybox was introduced (Android platform/manifest 9a2c01e8450b).
# set environment variables by using the shells internal “export” command. # Instead, set environment variables by using the shells internal
# “export” command.
# #
# adbd prior to Android 7.0 (N), and the adb client prior to SDK # adbd prior to Android 7.0 (N), and the adb client prior to SDK
# platform-tools version 24, dont know how to communicate a shell commands # platform-tools version 24, dont know how to communicate a shell
# exit status. This was added in Android platform/system/core 606835ae5c4b). # commands exit status. This was added in Android platform/system/core
# With older adb servers and clients, adb will “exit 0” indicating success # 606835ae5c4b). With older adb servers and clients, adb will “exit 0”
# even if the command failed on the device. This makes # indicating success even if the command failed on the device. This
# subprocess.check_call() semantics difficult to implement directly. As a # makes subprocess.check_call() semantics difficult to implement
# workaround, have the device send the commands exit status over stdout and # directly. As a workaround, have the device send the commands exit
# pick it back up in this function. # status over stdout and pick it back up in this function.
# #
# Both workarounds are implemented by giving the device a simple script, # Both workarounds are implemented by giving the device a simple script,
# which adbd will run as an “sh -c” argument. # which adbd will run as an “sh -c” argument.
adb_command = ['adb', '-s', android_device, 'shell'] adb_command = ['adb', '-s', android_device, 'shell']
script_commands = [] script_commands = []
for k, v in env.items(): for k, v in env.items():
script_commands.append('export %s=%s' % (pipes.quote(k), pipes.quote(v))) script_commands.append('export %s=%s' %
(pipes.quote(k), pipes.quote(v)))
script_commands.extend([ script_commands.extend([
' '.join(pipes.quote(x) for x in command_args), ' '.join(pipes.quote(x) for x in command_args), 'status=${?}',
'status=${?}', 'echo "status=${status}"', 'exit ${status}'
'echo "status=${status}"', ])
'exit ${status}'])
adb_command.append('; '.join(script_commands)) adb_command.append('; '.join(script_commands))
child = subprocess.Popen(adb_command, child = subprocess.Popen(adb_command,
shell=IS_WINDOWS_HOST, shell=IS_WINDOWS_HOST,
@ -212,9 +215,9 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
print(data, end='') print(data, end='')
if final_line is None: if final_line is None:
# Maybe there was some stderr output after the end of stdout. Old versions # Maybe there was some stderr output after the end of stdout. Old
# of adb, prior to when the exit status could be communicated, smush the # versions of adb, prior to when the exit status could be
# two together. # communicated, smush the two together.
raise subprocess.CalledProcessError(-1, adb_command) raise subprocess.CalledProcessError(-1, adb_command)
status = int(FINAL_LINE_RE.match(final_line.rstrip()).group(1)) status = int(FINAL_LINE_RE.match(final_line.rstrip()).group(1))
if status != 0: if status != 0:
@ -222,22 +225,23 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
child.wait() child.wait()
if child.returncode != 0: if child.returncode != 0:
raise subprocess.CalledProcessError(subprocess.returncode, adb_command) raise subprocess.CalledProcessError(subprocess.returncode,
adb_command)
# /system/bin/mktemp isnt normally present prior to Android 6.0 (M), where # /system/bin/mktemp isnt normally present prior to Android 6.0 (M), where
# toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it with # toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it
# a host-generated name. This wont retry if the name is in use, but with 122 # with a host-generated name. This wont retry if the name is in use, but
# bits of randomness, it should be OK. This uses “mkdir” instead of “mkdir -p” # with 122 bits of randomness, it should be OK. This uses “mkdir” instead of
# because the latter will not indicate failure if the directory already # “mkdir -p”because the latter will not indicate failure if the directory
# exists. # already exists.
device_temp_dir = '/data/local/tmp/%s.%s' % (test, uuid.uuid4().hex) device_temp_dir = '/data/local/tmp/%s.%s' % (test, uuid.uuid4().hex)
_adb_shell(['mkdir', device_temp_dir]) _adb_shell(['mkdir', device_temp_dir])
try: try:
# Specify test dependencies that must be pushed to the device. This could be # Specify test dependencies that must be pushed to the device. This
# determined automatically in a GN build, following the example used for # could be determined automatically in a GN build, following the example
# Fuchsia. Since nothing like that exists for GYP, hard-code it for # used for Fuchsia. Since nothing like that exists for GYP, hard-code it
# supported tests. # for supported tests.
test_build_artifacts = [test, 'crashpad_handler'] test_build_artifacts = [test, 'crashpad_handler']
test_data = ['test/test_paths_test_data_root.txt'] test_data = ['test/test_paths_test_data_root.txt']
@ -251,12 +255,13 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
device_out_dir = posixpath.join(device_temp_dir, 'out') device_out_dir = posixpath.join(device_temp_dir, 'out')
device_mkdirs = [device_out_dir] device_mkdirs = [device_out_dir]
for source_path in test_data: for source_path in test_data:
# A trailing slash could reasonably mean to copy an entire directory, but # A trailing slash could reasonably mean to copy an entire
# will interfere with whats needed from the path split. All parent # directory, but will interfere with whats needed from the path
# directories of any source_path need to be be represented in # split. All parent directories of any source_path need to be be
# device_mkdirs, but its important that no source_path itself wind up in # represented in device_mkdirs, but its important that no
# device_mkdirs, even if source_path names a directory, because that would # source_path itself wind up in device_mkdirs, even if source_path
# cause the “adb push” of the directory below to behave incorrectly. # names a directory, because that would cause the “adb push” of the
# directory below to behave incorrectly.
if source_path.endswith(posixpath.sep): if source_path.endswith(posixpath.sep):
source_path = source_path[:-1] source_path = source_path[:-1]
@ -271,7 +276,8 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
# Push the test binary and any other build output to the device. # Push the test binary and any other build output to the device.
local_test_build_artifacts = [] local_test_build_artifacts = []
for artifact in test_build_artifacts: for artifact in test_build_artifacts:
local_test_build_artifacts.append(os.path.join(binary_dir, artifact)) local_test_build_artifacts.append(os.path.join(
binary_dir, artifact))
_adb_push(local_test_build_artifacts, device_out_dir) _adb_push(local_test_build_artifacts, device_out_dir)
# Push test data to the device. # Push test data to the device.
@ -279,20 +285,21 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
_adb_push([os.path.join(CRASHPAD_DIR, source_path)], _adb_push([os.path.join(CRASHPAD_DIR, source_path)],
posixpath.join(device_temp_dir, source_path)) posixpath.join(device_temp_dir, source_path))
# Run the test on the device. Pass the test data root in the environment. # Run the test on the device. Pass the test data root in the
# environment.
# #
# Because the test will not run with its standard output attached to a # Because the test will not run with its standard output attached to a
# pseudo-terminal device, gtest will not normally enable colored output, so # pseudo-terminal device, gtest will not normally enable colored output,
# mimic gtests own logic for deciding whether to enable color by checking # so mimic gtests own logic for deciding whether to enable color by
# this scripts own standard output connection. The whitelist of TERM values # checking this scripts own standard output connection. The whitelist
# comes from gtest googletest/src/gtest.cc # of TERM values comes from gtest googletest/src/gtest.cc
# testing::internal::ShouldUseColor(). # testing::internal::ShouldUseColor().
env = {'CRASHPAD_TEST_DATA_ROOT': device_temp_dir} env = {'CRASHPAD_TEST_DATA_ROOT': device_temp_dir}
gtest_color = os.environ.get('GTEST_COLOR') gtest_color = os.environ.get('GTEST_COLOR')
if gtest_color in ('auto', None): if gtest_color in ('auto', None):
if (sys.stdout.isatty() and if (sys.stdout.isatty() and
(os.environ.get('TERM') in (os.environ.get('TERM')
('xterm', 'xterm-color', 'xterm-256color', 'screen', in ('xterm', 'xterm-color', 'xterm-256color', 'screen',
'screen-256color', 'tmux', 'tmux-256color', 'rxvt-unicode', 'screen-256color', 'tmux', 'tmux-256color', 'rxvt-unicode',
'rxvt-unicode-256color', 'linux', 'cygwin') or 'rxvt-unicode-256color', 'linux', 'cygwin') or
(IS_WINDOWS_HOST and _EnableVTProcessingOnWindowsConsole()))): (IS_WINDOWS_HOST and _EnableVTProcessingOnWindowsConsole()))):
@ -300,7 +307,8 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
else: else:
gtest_color = 'no' gtest_color = 'no'
env['GTEST_COLOR'] = gtest_color env['GTEST_COLOR'] = gtest_color
_adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line, env) _adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line,
env)
finally: finally:
_adb_shell(['rm', '-rf', device_temp_dir]) _adb_shell(['rm', '-rf', device_temp_dir])
@ -316,9 +324,10 @@ def _GenerateFuchsiaRuntimeDepsFiles(binary_dir, tests):
with open(targets_file, 'wb') as f: with open(targets_file, 'wb') as f:
f.write('//:' + '\n//:'.join(tests) + '\n') f.write('//:' + '\n//:'.join(tests) + '\n')
gn_path = _FindGNFromBinaryDir(binary_dir) gn_path = _FindGNFromBinaryDir(binary_dir)
subprocess.check_call( subprocess.check_call([
[gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir, gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir,
'--runtime-deps-list-file=' + targets_file]) '--runtime-deps-list-file=' + targets_file
])
# Run again so that --runtime-deps-list-file isn't in the regen rule. See # Run again so that --runtime-deps-list-file isn't in the regen rule. See
# https://crbug.com/814816. # https://crbug.com/814816.
@ -358,7 +367,9 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
# Run loglistener and filter the output to know when the test is done. # Run loglistener and filter the output to know when the test is done.
loglistener_process = subprocess.Popen( loglistener_process = subprocess.Popen(
[os.path.join(sdk_root, 'tools', 'loglistener'), device_name], [os.path.join(sdk_root, 'tools', 'loglistener'), device_name],
stdout=subprocess.PIPE, stdin=open(os.devnull), stderr=open(os.devnull)) stdout=subprocess.PIPE,
stdin=open(os.devnull),
stderr=open(os.devnull))
runtime_deps_file = os.path.join(binary_dir, test + '.runtime_deps') runtime_deps_file = os.path.join(binary_dir, test + '.runtime_deps')
with open(runtime_deps_file, 'rb') as f: with open(runtime_deps_file, 'rb') as f:
@ -369,8 +380,8 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
by using pipes.quote(), and then each command is chained by shell ';'. by using pipes.quote(), and then each command is chained by shell ';'.
""" """
netruncmd_path = os.path.join(sdk_root, 'tools', 'netruncmd') netruncmd_path = os.path.join(sdk_root, 'tools', 'netruncmd')
final_args = ' ; '.join(' '.join(pipes.quote(x) for x in command) final_args = ' ; '.join(
for command in args) ' '.join(pipes.quote(x) for x in command) for command in args)
subprocess.check_call([netruncmd_path, device_name, final_args]) subprocess.check_call([netruncmd_path, device_name, final_args])
try: try:
@ -380,35 +391,38 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
staging_root = test_root + '/pkg' staging_root = test_root + '/pkg'
# Make a staging directory tree on the target. # Make a staging directory tree on the target.
directories_to_create = [tmp_root, directories_to_create = [
tmp_root,
'%s/bin' % staging_root, '%s/bin' % staging_root,
'%s/assets' % staging_root] '%s/assets' % staging_root
]
netruncmd(['mkdir', '-p'] + directories_to_create) netruncmd(['mkdir', '-p'] + directories_to_create)
def netcp(local_path): def netcp(local_path):
"""Uses `netcp` to copy a file or directory to the device. Files located """Uses `netcp` to copy a file or directory to the device. Files
inside the build dir are stored to /pkg/bin, otherwise to /pkg/assets. located inside the build dir are stored to /pkg/bin, otherwise to
.so files are stored somewhere completely different, into /boot/lib (!). /pkg/assets. .so files are stored somewhere completely different,
This is because the loader service does not yet correctly handle the into /boot/lib (!). This is because the loader service does not yet
namespace in which the caller is being run, and so can only load .so files correctly handle the namespace in which the caller is being run, and
from a couple hardcoded locations, the only writable one of which is so can only load .so files from a couple hardcoded locations, the
/boot/lib, so we copy all .so files there. This bug is filed upstream as only writable one of which is /boot/lib, so we copy all .so files
ZX-1619. there. This bug is filed upstream as ZX-1619.
""" """
in_binary_dir = local_path.startswith(binary_dir + '/') in_binary_dir = local_path.startswith(binary_dir + '/')
if in_binary_dir: if in_binary_dir:
if local_path.endswith('.so'): if local_path.endswith('.so'):
target_path = os.path.join( target_path = os.path.join('/boot/lib',
'/boot/lib', local_path[len(binary_dir)+1:]) local_path[len(binary_dir) + 1:])
else: else:
target_path = os.path.join( target_path = os.path.join(staging_root, 'bin',
staging_root, 'bin', local_path[len(binary_dir)+1:]) local_path[len(binary_dir) + 1:])
else: else:
relative_path = os.path.relpath(local_path, CRASHPAD_DIR) relative_path = os.path.relpath(local_path, CRASHPAD_DIR)
target_path = os.path.join(staging_root, 'assets', relative_path) target_path = os.path.join(staging_root, 'assets',
relative_path)
netcp_path = os.path.join(sdk_root, 'tools', 'netcp') netcp_path = os.path.join(sdk_root, 'tools', 'netcp')
subprocess.check_call([netcp_path, local_path, subprocess.check_call(
device_name + ':' + target_path], [netcp_path, local_path, device_name + ':' + target_path],
stderr=open(os.devnull)) stderr=open(os.devnull))
# Copy runtime deps into the staging tree. # Copy runtime deps into the staging tree.
@ -423,13 +437,14 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
done_message = 'TERMINATED: ' + unique_id done_message = 'TERMINATED: ' + unique_id
namespace_command = [ namespace_command = [
'namespace', '/pkg=' + staging_root, '/tmp=' + tmp_root, '/svc=/svc', 'namespace', '/pkg=' + staging_root, '/tmp=' + tmp_root,
'--replace-child-argv0=/pkg/bin/' + test, '--', '/svc=/svc', '--replace-child-argv0=/pkg/bin/' + test, '--',
staging_root + '/bin/' + test] + extra_command_line staging_root + '/bin/' + test
] + extra_command_line
netruncmd(namespace_command, ['echo', done_message]) netruncmd(namespace_command, ['echo', done_message])
success = _HandleOutputFromFuchsiaLogListener( success = _HandleOutputFromFuchsiaLogListener(loglistener_process,
loglistener_process, done_message) done_message)
if not success: if not success:
raise subprocess.CalledProcessError(1, test) raise subprocess.CalledProcessError(1, test)
finally: finally:
@ -448,15 +463,15 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
'TestHostPath': os.path.join(test_path, test + '.app'), 'TestHostPath': os.path.join(test_path, test + '.app'),
'TestingEnvironmentVariables': { 'TestingEnvironmentVariables': {
'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:', 'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:',
'DYLD_INSERT_LIBRARIES': ( 'DYLD_INSERT_LIBRARIES':
'__PLATFORMS__/iPhoneSimulator.platform/Developer/' ('__PLATFORMS__/iPhoneSimulator.platform/Developer/'
'usr/lib/libXCTestBundleInject.dylib'), 'usr/lib/libXCTestBundleInject.dylib'),
'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator', 'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator',
'IDEiPhoneInternalTestBundleName': test + '.app', 'IDEiPhoneInternalTestBundleName': test + '.app',
'XCInjectBundleInto': '__TESTHOST__/' + test, 'XCInjectBundleInto': '__TESTHOST__/' + test,
} }
} }
return { test: module_data } return {test: module_data}
def xcuitest(binary_dir, test): def xcuitest(binary_dir, test):
"""Returns a dict containing the xctestrun data needed to run an """Returns a dict containing the xctestrun data needed to run an
@ -464,7 +479,8 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
test_path = os.path.join(CRASHPAD_DIR, binary_dir) test_path = os.path.join(CRASHPAD_DIR, binary_dir)
runner_path = os.path.join(test_path, test + '_module-Runner.app') runner_path = os.path.join(test_path, test + '_module-Runner.app')
bundle_path = os.path.join(runner_path, 'PlugIns', test + '_module.xctest') bundle_path = os.path.join(runner_path, 'PlugIns',
test + '_module.xctest')
target_app_path = os.path.join(test_path, test + '.app') target_app_path = os.path.join(test_path, test + '.app')
module_data = { module_data = {
'IsUITestBundle': True, 'IsUITestBundle': True,
@ -472,17 +488,19 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
'TestBundlePath': bundle_path, 'TestBundlePath': bundle_path,
'TestHostPath': runner_path, 'TestHostPath': runner_path,
'UITargetAppPath': target_app_path, 'UITargetAppPath': target_app_path,
'DependentProductPaths': [ bundle_path, runner_path, target_app_path ], 'DependentProductPaths': [
bundle_path, runner_path, target_app_path
],
'TestingEnvironmentVariables': { 'TestingEnvironmentVariables': {
'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:', 'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:',
'DYLD_INSERT_LIBRARIES': ( 'DYLD_INSERT_LIBRARIES':
'__PLATFORMS__/iPhoneSimulator.platform/Developer/' ('__PLATFORMS__/iPhoneSimulator.platform/Developer/'
'usr/lib/libXCTestBundleInject.dylib'), 'usr/lib/libXCTestBundleInject.dylib'),
'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator', 'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator',
'XCInjectBundleInto': '__TESTHOST__/' + test + '_module-Runner', 'XCInjectBundleInto': '__TESTHOST__/' + test + '_module-Runner',
}, },
} }
return { test: module_data } return {test: module_data}
with tempfile.NamedTemporaryFile() as f: with tempfile.NamedTemporaryFile() as f:
import plistlib import plistlib
@ -494,9 +512,11 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
else: else:
plistlib.writePlist(xctest(binary_dir, test), xctestrun_path) plistlib.writePlist(xctest(binary_dir, test), xctestrun_path)
subprocess.check_call(['xcodebuild', 'test-without-building', subprocess.check_call([
'-xctestrun', xctestrun_path, '-destination', 'xcodebuild', 'test-without-building', '-xctestrun', xctestrun_path,
'platform=iOS Simulator,name=iPhone 8']) '-destination', 'platform=iOS Simulator,name=iPhone 8'
])
# This script is primarily used from the waterfall so that the list of tests # This script is primarily used from the waterfall so that the list of tests
# that are run is maintained in-tree, rather than in a separate infrastructure # that are run is maintained in-tree, rather than in a separate infrastructure
@ -510,11 +530,11 @@ def main(args):
args = parser.parse_args() args = parser.parse_args()
# Tell 64-bit Windows tests where to find 32-bit test executables, for # Tell 64-bit Windows tests where to find 32-bit test executables, for
# cross-bitted testing. This relies on the fact that the GYP build by default # cross-bitted testing. This relies on the fact that the GYP build by
# uses {Debug,Release} for the 32-bit build and {Debug,Release}_x64 for the # default uses {Debug,Release} for the 32-bit build and {Debug,Release}_x64
# 64-bit build. This is not a universally valid assumption, and if its not # for the 64-bit build. This is not a universally valid assumption, and if
# met, 64-bit tests that require 32-bit build output will disable themselves # its not met, 64-bit tests that require 32-bit build output will disable
# dynamically. # themselves dynamically.
if (sys.platform == 'win32' and args.binary_dir.endswith('_x64') and if (sys.platform == 'win32' and args.binary_dir.endswith('_x64') and
'CRASHPAD_TEST_32_BIT_OUTPUT' not in os.environ): 'CRASHPAD_TEST_32_BIT_OUTPUT' not in os.environ):
binary_dir_32 = args.binary_dir[:-4] binary_dir_32 = args.binary_dir[:-4]
@ -544,13 +564,13 @@ def main(args):
for line in adb_devices.splitlines(): for line in adb_devices.splitlines():
line = line.decode('utf-8') line = line.decode('utf-8')
if (line == 'List of devices attached' or if (line == 'List of devices attached' or
re.match('^\* daemon .+ \*$', line) or re.match('^\* daemon .+ \*$', line) or line == ''):
line == ''):
continue continue
(device, ignore) = line.split('\t') (device, ignore) = line.split('\t')
devices.append(device) devices.append(device)
if len(devices) != 1: if len(devices) != 1:
print("Please set ANDROID_DEVICE to your device's id", file=sys.stderr) print("Please set ANDROID_DEVICE to your device's id",
file=sys.stderr)
return 2 return 2
android_device = devices[0] android_device = devices[0]
print('Using autodetected Android device:', android_device) print('Using autodetected Android device:', android_device)
@ -558,7 +578,8 @@ def main(args):
zircon_nodename = os.environ.get('ZIRCON_NODENAME') zircon_nodename = os.environ.get('ZIRCON_NODENAME')
if not zircon_nodename: if not zircon_nodename:
netls = os.path.join(_GetFuchsiaSDKRoot(), 'tools', 'netls') netls = os.path.join(_GetFuchsiaSDKRoot(), 'tools', 'netls')
popen = subprocess.Popen([netls, '--nowait'], stdout=subprocess.PIPE) popen = subprocess.Popen([netls, '--nowait'],
stdout=subprocess.PIPE)
devices = popen.communicate()[0].splitlines() devices = popen.communicate()[0].splitlines()
if popen.returncode != 0 or len(devices) != 1: if popen.returncode != 0 or len(devices) != 1:
print("Please set ZIRCON_NODENAME to your device's hostname", print("Please set ZIRCON_NODENAME to your device's hostname",
@ -585,8 +606,10 @@ def main(args):
print(test) print(test)
print('-' * 80) print('-' * 80)
if test.endswith('.py'): if test.endswith('.py'):
subprocess.check_call( subprocess.check_call([
[sys.executable, os.path.join(CRASHPAD_DIR, test), args.binary_dir]) sys.executable,
os.path.join(CRASHPAD_DIR, test), args.binary_dir
])
else: else:
extra_command_line = [] extra_command_line = []
if args.gtest_filter: if args.gtest_filter:
@ -598,7 +621,8 @@ def main(args):
_RunOnFuchsiaTarget(args.binary_dir, test, zircon_nodename, _RunOnFuchsiaTarget(args.binary_dir, test, zircon_nodename,
extra_command_line) extra_command_line)
elif is_ios: elif is_ios:
_RunOnIOSTarget(args.binary_dir, test, _RunOnIOSTarget(args.binary_dir,
test,
is_xcuitest=test.startswith('ios')) is_xcuitest=test.startswith('ios'))
else: else:
subprocess.check_call([os.path.join(args.binary_dir, test)] + subprocess.check_call([os.path.join(args.binary_dir, test)] +

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved. # Copyright 2017 The Crashpad Authors. All rights reserved.
# #

View File

@ -12,8 +12,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
def CheckChangeOnUpload(input_api, output_api): def CheckChangeOnUpload(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api) return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)
def CheckChangeOnCommit(input_api, output_api): def CheckChangeOnCommit(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api) return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)

View File

@ -29,7 +29,6 @@ import win32con
import win32pipe import win32pipe
import winerror import winerror
g_temp_dirs = [] g_temp_dirs = []
g_had_failures = False g_had_failures = False
@ -48,10 +47,12 @@ def CleanUpTempDirs():
def FindInstalledWindowsApplication(app_path): def FindInstalledWindowsApplication(app_path):
search_paths = [os.getenv('PROGRAMFILES(X86)'), search_paths = [
os.getenv('PROGRAMFILES(X86)'),
os.getenv('PROGRAMFILES'), os.getenv('PROGRAMFILES'),
os.getenv('PROGRAMW6432'), os.getenv('PROGRAMW6432'),
os.getenv('LOCALAPPDATA')] os.getenv('LOCALAPPDATA')
]
search_paths += os.getenv('PATH', '').split(os.pathsep) search_paths += os.getenv('PATH', '').split(os.pathsep)
for search_path in search_paths: for search_path in search_paths:
@ -77,7 +78,8 @@ def GetCdbPath():
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'), os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
'Debugging Tools For Windows (x64)', 'Debugging Tools For Windows (x64)',
'Debugging Tools For Windows (x86)', 'Debugging Tools For Windows (x86)',
'Debugging Tools For Windows',) 'Debugging Tools For Windows',
)
for possible_path in possible_paths: for possible_path in possible_paths:
app_path = os.path.join(possible_path, 'cdb.exe') app_path = os.path.join(possible_path, 'cdb.exe')
app_path = FindInstalledWindowsApplication(app_path) app_path = FindInstalledWindowsApplication(app_path)
@ -87,17 +89,17 @@ def GetCdbPath():
def NamedPipeExistsAndReady(pipe_name): def NamedPipeExistsAndReady(pipe_name):
"""Returns False if pipe_name does not exist. If pipe_name does exist, blocks """Returns False if pipe_name does not exist. If pipe_name does exist,
until the pipe is ready to service clients, and then returns True. blocks until the pipe is ready to service clients, and then returns True.
This is used as a drop-in replacement for os.path.exists() and os.access() to This is used as a drop-in replacement for os.path.exists() and os.access()
test for the pipe's existence. Both of those calls tickle the pipe in a way to test for the pipe's existence. Both of those calls tickle the pipe in a
that appears to the server to be a client connecting, triggering error way that appears to the server to be a client connecting, triggering error
messages when no data is received. messages when no data is received.
Although this function only needs to test pipe existence (waiting for Although this function only needs to test pipe existence (waiting for
CreateNamedPipe()), it actually winds up testing pipe readiness CreateNamedPipe()), it actually winds up testing pipe readiness (waiting for
(waiting for ConnectNamedPipe()). This is unnecessary but harmless. ConnectNamedPipe()). This is unnecessary but harmless.
""" """
try: try:
win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER) win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER)
@ -108,11 +110,11 @@ def NamedPipeExistsAndReady(pipe_name):
return True return True
def GetDumpFromProgram( def GetDumpFromProgram(out_dir, pipe_name, executable_name, expect_exit_code,
out_dir, pipe_name, executable_name, expect_exit_code, *args): *args):
"""Initialize a crash database, and run |executable_name| connecting to a """Initialize a crash database, and run |executable_name| connecting to a
crash handler. If pipe_name is set, crashpad_handler will be started first. If crash handler. If pipe_name is set, crashpad_handler will be started first.
pipe_name is empty, the executable is responsible for starting If pipe_name is empty, the executable is responsible for starting
crashpad_handler. *args will be passed after other arguments to crashpad_handler. *args will be passed after other arguments to
executable_name. If the child process does not exit with |expect_exit_code|, executable_name. If the child process does not exit with |expect_exit_code|,
an exception will be raised. Returns the path to the minidump generated by an exception will be raised. Returns the path to the minidump generated by
@ -122,15 +124,15 @@ def GetDumpFromProgram(
handler = None handler = None
try: try:
subprocess.check_call( subprocess.check_call([
[os.path.join(out_dir, 'crashpad_database_util.exe'), '--create', os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
'--database=' + test_database]) '--database=' + test_database
])
if pipe_name is not None: if pipe_name is not None:
handler = subprocess.Popen([ handler = subprocess.Popen([
os.path.join(out_dir, 'crashpad_handler.com'), os.path.join(out_dir, 'crashpad_handler.com'),
'--pipe-name=' + pipe_name, '--pipe-name=' + pipe_name, '--database=' + test_database
'--database=' + test_database
]) ])
# Wait until the server is ready. # Wait until the server is ready.
@ -141,12 +143,13 @@ def GetDumpFromProgram(
printed = True printed = True
time.sleep(0.001) time.sleep(0.001)
command = [os.path.join(out_dir, executable_name), pipe_name] + list(args) command = [os.path.join(out_dir, executable_name), pipe_name
] + list(args)
else: else:
command = ([os.path.join(out_dir, executable_name), command = ([
os.path.join(out_dir, 'crashpad_handler.com'), os.path.join(out_dir, executable_name),
test_database] + os.path.join(out_dir, 'crashpad_handler.com'), test_database
list(args)) ] + list(args))
print('Running %s' % os.path.basename(command[0])) print('Running %s' % os.path.basename(command[0]))
exit_code = subprocess.call(command) exit_code = subprocess.call(command)
if exit_code != expect_exit_code: if exit_code != expect_exit_code:
@ -167,37 +170,28 @@ def GetDumpFromProgram(
def GetDumpFromCrashyProgram(out_dir, pipe_name): def GetDumpFromCrashyProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, return GetDumpFromProgram(out_dir, pipe_name, 'crashy_program.exe',
pipe_name,
'crashy_program.exe',
win32con.EXCEPTION_ACCESS_VIOLATION) win32con.EXCEPTION_ACCESS_VIOLATION)
def GetDumpFromOtherProgram(out_dir, pipe_name, *args): def GetDumpFromOtherProgram(out_dir, pipe_name, *args):
return GetDumpFromProgram( return GetDumpFromProgram(out_dir, pipe_name, 'crash_other_program.exe', 0,
out_dir, pipe_name, 'crash_other_program.exe', 0, *args) *args)
def GetDumpFromSignal(out_dir, pipe_name, *args): def GetDumpFromSignal(out_dir, pipe_name, *args):
STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con. STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con.
return GetDumpFromProgram(out_dir, return GetDumpFromProgram(out_dir, pipe_name, 'crashy_signal.exe',
pipe_name, STATUS_FATAL_APP_EXIT, *args)
'crashy_signal.exe',
STATUS_FATAL_APP_EXIT,
*args)
def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name): def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, return GetDumpFromProgram(out_dir, pipe_name, 'self_destroying_program.exe',
pipe_name,
'self_destroying_program.exe',
win32con.EXCEPTION_BREAKPOINT) win32con.EXCEPTION_BREAKPOINT)
def GetDumpFromZ7Program(out_dir, pipe_name): def GetDumpFromZ7Program(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, return GetDumpFromProgram(out_dir, pipe_name, 'crashy_z7_loader.exe',
pipe_name,
'crashy_z7_loader.exe',
win32con.EXCEPTION_ACCESS_VIOLATION) win32con.EXCEPTION_ACCESS_VIOLATION)
@ -208,13 +202,10 @@ class CdbRun(object):
""" """
def __init__(self, cdb_path, dump_path, command): def __init__(self, cdb_path, dump_path, command):
# Run a command line that loads the dump, runs the specified cdb command, # Run a command line that loads the dump, runs the specified cdb
# and then quits, and capturing stdout. # command, and then quits, and capturing stdout.
self.out = subprocess.check_output([ self.out = subprocess.check_output(
cdb_path, [cdb_path, '-z', dump_path, '-c', command + ';q'])
'-z', dump_path,
'-c', command + ';q'
])
def Check(self, pattern, message, re_flags=0): def Check(self, pattern, message, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags) match_obj = re.search(pattern, self.out, re_flags)
@ -244,16 +235,9 @@ class CdbRun(object):
return None return None
def RunTests(cdb_path, def RunTests(cdb_path, dump_path, start_handler_dump_path, destroyed_dump_path,
dump_path, z7_dump_path, other_program_path, other_program_no_exception_path,
start_handler_dump_path, sigabrt_main_path, sigabrt_background_path, pipe_name):
destroyed_dump_path,
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name):
"""Runs various tests in sequence. Runs a new cdb instance on the dump for """Runs various tests in sequence. Runs a new cdb instance on the dump for
each block of tests to reduce the chances that output from one command is each block of tests to reduce the chances that output from one command is
confused for output from another. confused for output from another.
@ -264,21 +248,24 @@ def RunTests(cdb_path,
# When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as # When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as
# "`anonymous namespace'" and instead gives the decorated form. # "`anonymous namespace'" and instead gives the decorated form.
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::' out.Check(
'SomeCrashyFunction', 'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'exception at correct location') 'SomeCrashyFunction', 'exception at correct location')
out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr') out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it', out.Check('This dump file has an exception of interest stored in it',
'captured exception (using StartHandler())') 'captured exception (using StartHandler())')
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::' out.Check(
'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction', 'SomeCrashyFunction',
'exception at correct location (using StartHandler())') 'exception at correct location (using StartHandler())')
out = CdbRun(cdb_path, dump_path, '!peb') out = CdbRun(cdb_path, dump_path, '!peb')
out.Check(r'PEB at', 'found the PEB') out.Check(r'PEB at', 'found the PEB')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved') out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
out.Check(r'Base TimeStamp Module', 'module list present') 'PEB_LDR_DATA saved')
out.Check(r'Base TimeStamp Module',
'module list present')
pipe_name_escaped = pipe_name.replace('\\', '\\\\') pipe_name_escaped = pipe_name.replace('\\', '\\\\')
out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped, out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped,
'some PEB data is correct') 'some PEB data is correct')
@ -288,7 +275,8 @@ def RunTests(cdb_path,
out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters') out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters')
out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"', out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"',
'PEB->ProcessParameters.ImagePathName string captured') 'PEB->ProcessParameters.ImagePathName string captured')
out.Check(' DesktopInfo *: ' out.Check(
' DesktopInfo *: '
'_UNICODE_STRING "(?!--- memory read error at address ).*"', '_UNICODE_STRING "(?!--- memory read error at address ).*"',
'PEB->ProcessParameters.DesktopInfo string captured') 'PEB->ProcessParameters.DesktopInfo string captured')
@ -298,19 +286,23 @@ def RunTests(cdb_path,
out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue') out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue')
out = CdbRun(cdb_path, dump_path, '!gle') out = CdbRun(cdb_path, dump_path, '!gle')
out.Check('LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the ' out.Check(
'LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the '
'file specified.', '!gle gets last error') 'file specified.', '!gle gets last error')
out.Check('LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The ' out.Check(
'LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The '
'file %hs does not exist.', '!gle gets last ntstatus') 'file %hs does not exist.', '!gle gets last ntstatus')
if False: if False:
# TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList. # TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList.
out = CdbRun(cdb_path, dump_path, '!locks') out = CdbRun(cdb_path, dump_path, '!locks')
out.Check(r'CritSec crashy_program!crashpad::`anonymous namespace\'::' out.Check(
r'CritSec crashy_program!crashpad::`anonymous namespace\'::'
r'g_test_critical_section', 'lock was captured') r'g_test_critical_section', 'lock was captured')
if platform.win32_ver()[0] != '7': if platform.win32_ver()[0] != '7':
# We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7. # We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7.
out.Check(r'\*\*\* Locked', 'lock debug info was captured, and is locked') out.Check(r'\*\*\* Locked',
'lock debug info was captured, and is locked')
out = CdbRun(cdb_path, dump_path, '!handle') out = CdbRun(cdb_path, dump_path, '!handle')
out.Check(r'\d+ Handles', 'captured handles') out.Check(r'\d+ Handles', 'captured handles')
@ -323,21 +315,23 @@ def RunTests(cdb_path,
out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror') out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror')
out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2') out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved') out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE) out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE)
# Check that there is no stack trace in the self-destroyed process. Confirm # Check that there is no stack trace in the self-destroyed process. Confirm
# that the top is where we expect it (that's based only on IP), but subsequent # that the top is where we expect it (that's based only on IP), but
# stack entries will not be available. This confirms that we have a mostly # subsequent stack entries will not be available. This confirms that we have
# valid dump, but that the stack was omitted. # a mostly valid dump, but that the stack was omitted.
out.Check(r'self_destroying_program!crashpad::`anonymous namespace\'::' out.Check(
r'self_destroying_program!crashpad::`anonymous namespace\'::'
r'FreeOwnStackAndBreak.*\nquit:', r'FreeOwnStackAndBreak.*\nquit:',
'at correct location, no additional stack entries') 'at correct location, no additional stack entries')
# Dump memory pointed to be EDI on the background suspended thread. We don't # Dump memory pointed to be EDI on the background suspended thread. We don't
# know the index of the thread because the system may have started other # know the index of the thread because the system may have started other
# threads, so first do a run to extract the thread index that's suspended, and # threads, so first do a run to extract the thread index that's suspended,
# then another run to dump the data pointed to by EDI for that thread. # and then another run to dump the data pointed to by EDI for that thread.
out = CdbRun(cdb_path, dump_path, '.ecxr;~') out = CdbRun(cdb_path, dump_path, '.ecxr;~')
match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:') match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:')
if match_obj: if match_obj:
@ -346,36 +340,37 @@ def RunTests(cdb_path,
out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50', out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50',
'data pointed to by registers captured') 'data pointed to by registers captured')
# Move up one stack frame after jumping to the exception, and examine memory. # Move up one stack frame after jumping to the exception, and examine
# memory.
out = CdbRun(cdb_path, dump_path, out = CdbRun(cdb_path, dump_path,
'.ecxr; .f+; dd /c100 poi(offset_pointer)-20') '.ecxr; .f+; dd /c100 poi(offset_pointer)-20')
out.Check(r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e ' out.Check(
r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 ' r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 '
r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c ' r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c '
r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 ' r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 '
r'80000094 00000095 80000096 00000097', r'80000094 00000095 80000096 00000097',
'data pointed to by stack captured') 'data pointed to by stack captured')
# Attempt to retrieve the value of g_extra_memory_pointer (by name), and then # Attempt to retrieve the value of g_extra_memory_pointer (by name), and
# examine the memory at which it points. Both should have been saved. # then examine the memory at which it points. Both should have been saved.
out = CdbRun(cdb_path, dump_path, out = CdbRun(
cdb_path, dump_path,
'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 ' 'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 '
'L8') 'L8')
out.Check(r'0000655e 0000656b 00006578 00006585', out.Check(r'0000655e 0000656b 00006578 00006585',
'extra memory range captured') 'extra memory range captured')
out = CdbRun(cdb_path, dump_path, '.dumpdebug') out = CdbRun(cdb_path, dump_path, '.dumpdebug')
out.Check(r'type \?\?\? \(333333\), size 00001000', out.Check(r'type \?\?\? \(333333\), size 00001000', 'first user stream')
'first user stream') out.Check(r'type \?\?\? \(222222\), size 00000080', 'second user stream')
out.Check(r'type \?\?\? \(222222\), size 00000080',
'second user stream')
if z7_dump_path: if z7_dump_path:
out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm') out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm')
out.Check('This dump file has an exception of interest stored in it', out.Check('This dump file has an exception of interest stored in it',
'captured exception in z7 module') 'captured exception in z7 module')
# Older versions of cdb display relative to exports for /Z7 modules, newer # Older versions of cdb display relative to exports for /Z7 modules,
# ones just display the offset. # newer ones just display the offset.
out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):', out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):',
'exception in z7 at correct location') 'exception in z7 at correct location')
out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll', out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll',
@ -428,11 +423,11 @@ def main(args):
symbol_dir = MakeTempDir() symbol_dir = MakeTempDir()
protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http' protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http'
os.environ['_NT_SYMBOL_PATH'] = ( os.environ['_NT_SYMBOL_PATH'] = (
'SRV*' + symbol_dir + '*' + 'SRV*' + symbol_dir + '*' + protocol +
protocol + '://msdl.microsoft.com/download/symbols') '://msdl.microsoft.com/download/symbols')
pipe_name = r'\\.\pipe\end-to-end_%s_%s' % ( pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (os.getpid(),
os.getpid(), str(random.getrandbits(64))) str(random.getrandbits(64)))
crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name) crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name)
if not crashy_dump_path: if not crashy_dump_path:
@ -442,7 +437,8 @@ def main(args):
if not start_handler_dump_path: if not start_handler_dump_path:
return 1 return 1
destroyed_dump_path = GetDumpFromSelfDestroyingProgram(args[0], pipe_name) destroyed_dump_path = GetDumpFromSelfDestroyingProgram(
args[0], pipe_name)
if not destroyed_dump_path: if not destroyed_dump_path:
return 1 return 1
@ -465,21 +461,15 @@ def main(args):
if not sigabrt_main_path: if not sigabrt_main_path:
return 1 return 1
sigabrt_background_path = GetDumpFromSignal( sigabrt_background_path = GetDumpFromSignal(args[0], pipe_name,
args[0], pipe_name, 'background') 'background')
if not sigabrt_background_path: if not sigabrt_background_path:
return 1 return 1
RunTests(cdb_path, RunTests(cdb_path, crashy_dump_path, start_handler_dump_path,
crashy_dump_path, destroyed_dump_path, z7_dump_path, other_program_path,
start_handler_dump_path, other_program_no_exception_path, sigabrt_main_path,
destroyed_dump_path, sigabrt_background_path, pipe_name)
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name)
return 1 if g_had_failures else 0 return 1 if g_had_failures else 0
finally: finally:

3
third_party/fuchsia/runner.py vendored Normal file → Executable file
View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
# Copyright 2018 The Crashpad Authors. All rights reserved. # Copyright 2018 The Crashpad Authors. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
@ -14,4 +16,5 @@
import os import os
import sys import sys
os.execv(sys.argv[1], sys.argv[1:]) os.execv(sys.argv[1], sys.argv[1:])

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved. # Copyright 2019 The Crashpad Authors. All rights reserved.
# #
@ -20,6 +19,7 @@ import sys
import mig_fix import mig_fix
import mig_gen import mig_gen
def main(args): def main(args):
parsed = mig_gen.parse_args(args) parsed = mig_gen.parse_args(args)
@ -30,5 +30,6 @@ def main(args):
parsed.migcom_path, parsed.arch) parsed.migcom_path, parsed.arch)
mig_fix.fix_interface(interface) mig_fix.fix_interface(interface)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -22,18 +22,18 @@ import sys
from mig_gen import MigInterface from mig_gen import MigInterface
def _fix_user_implementation(implementation, fixed_implementation, header, def _fix_user_implementation(implementation, fixed_implementation, header,
fixed_header): fixed_header):
"""Rewrites a MIG-generated user implementation (.c) file. """Rewrites a MIG-generated user implementation (.c) file.
Rewrites the file at |implementation| by adding Rewrites the file at |implementation| by adding __attribute__((unused)) to
__attribute__((unused)) to the definition of any structure typedefed the definition of any structure typedefed as __Reply by searching for the
as __Reply by searching for the pattern unique to those structure pattern unique to those structure definitions. These structures are in fact
definitions. These structures are in fact unused in the user unused in the user implementation file, and this will trigger a
implementation file, and this will trigger a -Wunused-local-typedefs -Wunused-local-typedefs warning in gcc unless removed or marked with the
warning in gcc unless removed or marked with the unused attribute. unused attribute. Also changes header references to point to the new
Also changes header references to point to the new header filename, if header filename, if changed.
changed.
If |fixed_implementation| is None, overwrites the original; otherwise, puts If |fixed_implementation| is None, overwrites the original; otherwise, puts
the result in the file at |fixed_implementation|. the result in the file at |fixed_implementation|.
@ -59,6 +59,7 @@ def _fix_user_implementation(implementation, fixed_implementation, header,
file.write(contents) file.write(contents)
file.close() file.close()
def _fix_server_implementation(implementation, fixed_implementation, header, def _fix_server_implementation(implementation, fixed_implementation, header,
fixed_header): fixed_header):
"""Rewrites a MIG-generated server implementation (.c) file. """Rewrites a MIG-generated server implementation (.c) file.
@ -79,24 +80,25 @@ def _fix_server_implementation(implementation, fixed_implementation, header,
contents = file.read() contents = file.read()
# Find interesting declarations. # Find interesting declarations.
declaration_pattern = \ declaration_pattern = re.compile(
re.compile('^mig_internal (kern_return_t __MIG_check__.*)$', '^mig_internal (kern_return_t __MIG_check__.*)$', re.MULTILINE)
re.MULTILINE)
declarations = declaration_pattern.findall(contents) declarations = declaration_pattern.findall(contents)
# Remove “__attribute__((__unused__))” from the declarations, and call them # Remove “__attribute__((__unused__))” from the declarations, and call them
# “mig_external” or “extern” depending on whether “mig_external” is defined. # “mig_external” or “extern” depending on whether “mig_external” is defined.
attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ') attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ')
declarations = ['''\ declarations = [
'''\
#ifdef mig_external #ifdef mig_external
mig_external mig_external
#else #else
extern extern
#endif #endif
''' + attribute_pattern.sub('', x) + ';\n' for x in declarations] ''' + attribute_pattern.sub('', x) + ';\n' for x in declarations
]
# Rewrite the declarations in this file as “mig_external”. # Rewrite the declarations in this file as “mig_external”.
contents = declaration_pattern.sub(r'mig_external \1', contents); contents = declaration_pattern.sub(r'mig_external \1', contents)
# Crashpad never implements the mach_msg_server() MIG callouts. To avoid # Crashpad never implements the mach_msg_server() MIG callouts. To avoid
# needing to provide stub implementations, set KERN_FAILURE as the RetCode # needing to provide stub implementations, set KERN_FAILURE as the RetCode
@ -125,6 +127,7 @@ extern
file.close() file.close()
return declarations return declarations
def _fix_header(header, fixed_header, declarations=[]): def _fix_header(header, fixed_header, declarations=[]):
"""Rewrites a MIG-generated header (.h) file. """Rewrites a MIG-generated header (.h) file.
@ -161,6 +164,7 @@ extern "C" {
file.write(contents) file.write(contents)
file.close() file.close()
def fix_interface(interface, fixed_interface=None): def fix_interface(interface, fixed_interface=None):
if fixed_interface is None: if fixed_interface is None:
fixed_interface = MigInterface(None, None, None, None) fixed_interface = MigInterface(None, None, None, None)
@ -175,6 +179,7 @@ def fix_interface(interface, fixed_interface=None):
_fix_header(interface.server_h, fixed_interface.server_h, _fix_header(interface.server_h, fixed_interface.server_h,
server_declarations) server_declarations)
def main(args): def main(args):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('user_c') parser.add_argument('user_c')
@ -187,11 +192,12 @@ def main(args):
parser.add_argument('--fixed_server_h', default=None) parser.add_argument('--fixed_server_h', default=None)
parsed = parser.parse_args(args) parsed = parser.parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c, interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.user_h, parsed.server_h) parsed.server_h)
fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c, fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c,
parsed.fixed_user_h, parsed.fixed_server_h) parsed.fixed_user_h, parsed.fixed_server_h)
fix_interface(interface, fixed_interface) fix_interface(interface, fixed_interface)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved. # Copyright 2019 The Crashpad Authors. All rights reserved.
# #
@ -21,19 +20,30 @@ import os
import subprocess import subprocess
import sys import sys
MigInterface = collections.namedtuple('MigInterface', ['user_c', 'server_c', MigInterface = collections.namedtuple(
'user_h', 'server_h']) 'MigInterface', ['user_c', 'server_c', 'user_h', 'server_h'])
def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
mig_path=None, migcom_path=None, arch=None): def generate_interface(defs,
interface,
includes=[],
sdk=None,
clang_path=None,
mig_path=None,
migcom_path=None,
arch=None):
if mig_path is None: if mig_path is None:
mig_path = 'mig' mig_path = 'mig'
command = [mig_path,
# yapf: disable
command = [
mig_path,
'-user', interface.user_c, '-user', interface.user_c,
'-server', interface.server_c, '-server', interface.server_c,
'-header', interface.user_h, '-header', interface.user_h,
'-sheader', interface.server_h, '-sheader', interface.server_h,
] ]
# yapf: enable
if clang_path is not None: if clang_path is not None:
os.environ['MIGCC'] = clang_path os.environ['MIGCC'] = clang_path
@ -48,6 +58,7 @@ def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
command.append(defs) command.append(defs)
subprocess.check_call(command) subprocess.check_call(command)
def parse_args(args): def parse_args(args):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--clang-path', help='Path to Clang') parser.add_argument('--clang-path', help='Path to Clang')
@ -66,13 +77,15 @@ def parse_args(args):
parser.add_argument('server_h') parser.add_argument('server_h')
return parser.parse_args(args) return parser.parse_args(args)
def main(args): def main(args):
parsed = parse_args(args) parsed = parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c, interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.user_h, parsed.server_h) parsed.server_h)
generate_interface(parsed.defs, interface, parsed.include, generate_interface(parsed.defs, interface, parsed.include, parsed.sdk,
parsed.sdk, parsed.clang_path, parsed.mig_path, parsed.clang_path, parsed.mig_path, parsed.migcom_path,
parsed.migcom_path, parsed.arch) parsed.arch)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -23,14 +23,17 @@ key = os.path.join(testdata, 'crashpad_util_test_key.pem')
cert = os.path.join(testdata, 'crashpad_util_test_cert.pem') cert = os.path.join(testdata, 'crashpad_util_test_cert.pem')
with open(cert, 'w') as cert_file, open(key, 'w') as key_file: with open(cert, 'w') as cert_file, open(key, 'w') as key_file:
MESSAGE = 'DO NOT EDIT: This file was auto-generated by ' + __file__ + '\n\n' MESSAGE = ('DO NOT EDIT: This file was auto-generated by ' + __file__ +
'\n\n')
cert_file.write(MESSAGE) cert_file.write(MESSAGE)
key_file.write(MESSAGE) key_file.write(MESSAGE)
proc = subprocess.Popen( proc = subprocess.Popen([
['openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', 'openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', '-days',
'-days', '3650', '-newkey', 'rsa:2048', '-keyout', '-'], '3650', '-newkey', 'rsa:2048', '-keyout', '-'
stderr=open(os.devnull, 'w'), stdout=subprocess.PIPE) ],
stderr=open(os.devnull, 'w'),
stdout=subprocess.PIPE)
contents = proc.communicate()[0] contents = proc.communicate()[0]
dest = sys.stderr dest = sys.stderr