Add .style.yapf and reformat according to yapf, using “google” style

% yapf --in-place $(git ls-files **/*.py)
% yapf --version
yapf 0.30.0

Note that this is not using the “chromium” yapf style because Chromium
is moving to PEP-8.
https://groups.google.com/a/chromium.org/d/topic/chromium-dev/RcJgJdkNIdg
yapf 0.30.0 no longer recognizes “chromium” as a style option.
22ef70f3c4
Since this is a mass reformatting, it might as well move things all the
way into the future all at once.

This uses the “google” style, which is a superset of “pep8”.

Change-Id: Ifa37371079ea1859e4afe8e31d2eef2cfd7af384
Reviewed-on: https://chromium-review.googlesource.com/c/crashpad/crashpad/+/2165637
Commit-Queue: Mark Mentovai <mark@chromium.org>
Reviewed-by: Scott Graham <scottmg@chromium.org>
This commit is contained in:
Mark Mentovai 2020-04-27 09:43:35 -04:00 committed by Commit Bot
parent 29b1688c11
commit a5a1c3b07f
16 changed files with 1687 additions and 1615 deletions

16
.style.yapf Normal file
View File

@ -0,0 +1,16 @@
# Copyright 2020 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
[style]
based_on_style = google

View File

@ -21,10 +21,10 @@ import sys
def ChooseDependencyPath(local_path, external_path):
"""Chooses between a dependency located at local path and an external path.
The local path, used in standalone builds, is preferred. If it is not present
but the external path is, the external path will be used. If neither path is
present, the local path will be used, so that error messages uniformly refer
to the local path.
The local path, used in standalone builds, is preferred. If it is not
present but the external path is, the external path will be used. If neither
path is present, the local path will be used, so that error messages
uniformly refer to the local path.
Args:
local_path: The preferred local path to use for a standalone build.
@ -41,14 +41,14 @@ def ChooseDependencyPath(local_path, external_path):
script_dir = os.path.dirname(__file__)
crashpad_dir = (os.path.dirname(script_dir) if script_dir not in ('', os.curdir)
else os.pardir)
crashpad_dir = (os.path.dirname(script_dir)
if script_dir not in ('', os.curdir) else os.pardir)
sys.path.insert(0,
ChooseDependencyPath(os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp',
'pylib'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp',
'pylib'))[1])
sys.path.insert(
0,
ChooseDependencyPath(
os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 'pylib'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', 'pylib'))[1])
import gyp
@ -75,8 +75,8 @@ def main(args):
return result
if sys.platform == 'win32':
# Check to make sure that no target_arch was specified. target_arch may be
# set during a cross build, such as a cross build for Android.
# Check to make sure that no target_arch was specified. target_arch may
# be set during a cross build, such as a cross build for Android.
has_target_arch = False
for arg_index in range(0, len(args)):
arg = args[arg_index]
@ -88,7 +88,8 @@ def main(args):
if not has_target_arch:
# Also generate the x86 build.
result = gyp.main(args + ['-D', 'target_arch=ia32', '-G', 'config=Debug'])
result = gyp.main(args +
['-D', 'target_arch=ia32', '-G', 'config=Debug'])
if result != 0:
return result
result = gyp.main(

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved.
#
@ -33,12 +32,8 @@ def main(args):
parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain')
(parsed, extra_command_line_args) = parser.parse_known_args(args)
ndk_bin_dir = os.path.join(parsed.ndk,
'toolchains',
'llvm',
'prebuilt',
'linux-x86_64',
'bin')
ndk_bin_dir = os.path.join(parsed.ndk, 'toolchains', 'llvm', 'prebuilt',
'linux-x86_64', 'bin')
if not os.path.exists(ndk_bin_dir):
parser.error("missing toolchain")
@ -51,25 +46,23 @@ def main(args):
clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level
os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang')
os.environ['CXX_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang++')
os.environ['CXX_target'] = os.path.join(ndk_bin_dir,
clang_prefix + '-clang++')
extra_args = ['-D', 'android_api_level=' + parsed.api_level]
# ARM only includes 'v7a' in the tool prefix for clang
tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm'
else ARCH_TO_ARCH_TRIPLET[parsed.arch])
tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' else
ARCH_TO_ARCH_TRIPLET[parsed.arch])
for tool in ('ar', 'nm', 'readelf'):
os.environ['%s_target' % tool.upper()] = (
os.path.join(ndk_bin_dir, '%s-%s' % (tool_prefix, tool)))
os.environ['%s_target' % tool.upper()] = (os.path.join(
ndk_bin_dir, '%s-%s' % (tool_prefix, tool)))
return gyp_crashpad.main(
['-D', 'OS=android',
'-D', 'target_arch=%s' % parsed.arch,
'-D', 'clang=1',
'-f', 'ninja-android'] +
extra_args +
extra_command_line_args)
return gyp_crashpad.main([
'-D', 'OS=android', '-D',
'target_arch=%s' % parsed.arch, '-D', 'clang=1', '-f', 'ninja-android'
] + extra_args + extra_command_line_args)
if __name__ == '__main__':

View File

@ -23,7 +23,6 @@ import subprocess
import sys
import urllib2
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Sysroot revision from:
@ -33,11 +32,12 @@ PATH = 'chrome-linux-sysroot/toolchain'
REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66'
FILENAME = 'debian_stretch_amd64_sysroot.tar.xz'
def main():
url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME)
sysroot = os.path.join(SCRIPT_DIR, os.pardir,
'third_party', 'linux', 'sysroot')
sysroot = os.path.join(SCRIPT_DIR, os.pardir, 'third_party', 'linux',
'sysroot')
stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp):

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert GN Xcode projects to platform and configuration independent targets.
GN generates Xcode projects that build one configuration only. However, typical
@ -50,8 +49,8 @@ class XcodeProject(object):
str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter)
new_id = hashlib.sha1(str_id).hexdigest()[:24].upper()
# Make sure ID is unique. It's possible there could be an id conflict
# since this is run after GN runs.
# Make sure ID is unique. It's possible there could be an id
# conflict since this is run after GN runs.
if new_id not in self.objects:
self.objects[new_id] = obj
return new_id
@ -62,15 +61,15 @@ def CopyFileIfChanged(source_path, target_path):
target_dir = os.path.dirname(target_path)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if not os.path.exists(target_path) or \
not filecmp.cmp(source_path, target_path):
if (not os.path.exists(target_path) or
not filecmp.cmp(source_path, target_path)):
shutil.copyfile(source_path, target_path)
def LoadXcodeProjectAsJSON(path):
"""Return Xcode project at |path| as a JSON string."""
return subprocess.check_output([
'plutil', '-convert', 'json', '-o', '-', path])
return subprocess.check_output(
['plutil', '-convert', 'json', '-o', '-', path])
def WriteXcodeProject(output_path, json_string):
@ -110,10 +109,12 @@ def UpdateProductsProject(file_input, file_output, configurations, root_dir):
value['defaultConfigurationName'] = configurations[0]
objects_to_remove.extend(value['buildConfigurations'])
build_config_template = project.objects[value['buildConfigurations'][0]]
build_config_template['buildSettings']['CONFIGURATION_BUILD_DIR'] = \
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)'
build_config_template['buildSettings']['CODE_SIGN_IDENTITY'] = ''
build_config_template = project.objects[value['buildConfigurations']
[0]]
build_settings = build_config_template['buildSettings']
build_settings['CONFIGURATION_BUILD_DIR'] = (
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)')
build_settings['CODE_SIGN_IDENTITY'] = ''
value['buildConfigurations'] = []
for configuration in configurations:
@ -164,16 +165,16 @@ def GetFolderForPath(project, rootObject, path):
children = root['children']
new_root = None
for child in children:
if objects[child]['isa'] == 'PBXGroup' and \
objects[child]['name'] == folder:
if (objects[child]['isa'] == 'PBXGroup' and
objects[child]['name'] == folder):
new_root = objects[child]
break
if not new_root:
# If the folder isn't found we could just cram it into the leaf existing
# folder, but that leads to folders with tons of README.md inside.
# If the folder isn't found we could just cram it into the leaf
# existing folder, but that leads to folders with tons of README.md
# inside.
new_group = {
"children": [
],
"children": [],
"isa": "PBXGroup",
"name": folder,
"sourceTree": "<group>"
@ -188,7 +189,8 @@ def GetFolderForPath(project, rootObject, path):
def DisableNewBuildSystem(output_dir):
"""Disables the new build system due to crbug.com/852522 """
xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace',
'xcshareddata', 'WorkspaceSettings.xcsettings')
'xcshareddata',
'WorkspaceSettings.xcsettings')
if os.path.isfile(xcwspacesharedsettings):
json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings))
else:
@ -198,16 +200,18 @@ def DisableNewBuildSystem(output_dir):
def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
'''Tweak the Xcode project generated by gn to support multiple configurations.
'''Tweak the Xcode project generated by gn to support multiple
configurations.
The Xcode projects generated by "gn gen --ide" only supports a single
platform and configuration (as the platform and configuration are set
per output directory). This method takes as input such projects and
add support for multiple configurations and platforms (to allow devs
to select them in Xcode).
platform and configuration (as the platform and configuration are set per
output directory). This method takes as input such projects and add support
for multiple configurations and platforms (to allow devs to select them in
Xcode).
Args:
input_dir: directory containing the XCode projects created by "gn gen --ide"
input_dir: directory containing the XCode projects created by "gn gen
--ide"
output_dir: directory where the tweaked Xcode projects will be saved
configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at
@ -217,7 +221,8 @@ def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
products = os.path.join('products.xcodeproj', 'project.pbxproj')
product_input = os.path.join(input_dir, products)
product_output = os.path.join(output_dir, products)
UpdateProductsProject(product_input, product_output, configurations, root_dir)
UpdateProductsProject(product_input, product_output, configurations,
root_dir)
# Copy all workspace.
xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata')
@ -229,27 +234,29 @@ def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
# TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj'
# and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is
# here to support both old and new projects setup and will be removed once gn
# has rolled past it.
# here to support both old and new projects setup and will be removed once
# gn has rolled past it.
sources = os.path.join('sources.xcodeproj', 'project.pbxproj')
if os.path.isfile(os.path.join(input_dir, sources)):
CopyFileIfChanged(os.path.join(input_dir, sources),
os.path.join(output_dir, sources))
def Main(args):
parser = argparse.ArgumentParser(
description='Convert GN Xcode projects for iOS.')
parser.add_argument(
'input',
help='directory containing [product|all] Xcode projects.')
'input', help='directory containing [product|all] Xcode projects.')
parser.add_argument(
'output',
help='directory where to generate the iOS configuration.')
parser.add_argument(
'--add-config', dest='configurations', default=[], action='append',
'output', help='directory where to generate the iOS configuration.')
parser.add_argument('--add-config',
dest='configurations',
default=[],
action='append',
help='configuration to add to the Xcode project')
parser.add_argument(
'--root', type=os.path.abspath, required=True,
parser.add_argument('--root',
type=os.path.abspath,
required=True,
help='root directory of the project')
args = parser.parse_args(args)
@ -264,10 +271,13 @@ def Main(args):
return 1
if not args.configurations:
sys.stderr.write('At least one configuration required, see --add-config.\n')
sys.stderr.write(
'At least one configuration required, see --add-config.\n')
return 1
ConvertGnXcodeProject(args.root, args.input, args.output, args.configurations)
ConvertGnXcodeProject(args.root, args.input, args.output,
args.configurations)
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import convert_gn_xcodeproj
import errno
@ -31,24 +30,22 @@ try:
except ImportError:
import StringIO
SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator')
SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage')
class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser):
'''A .ini file parser that supports strings and environment variables.'''
ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)')
def values(self, section):
return map(
lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
return map(lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
ConfigParser.SafeConfigParser.items(self, section))
def getstring(self, section, option):
return self._UnquoteString(self._ExpandEnvVar(self.get(section, option)))
return self._UnquoteString(self._ExpandEnvVar(self.get(section,
option)))
def _UnquoteString(self, string):
if not string or string[0] != '"' or string[-1] != '"':
@ -63,9 +60,10 @@ class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser):
prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:])
return prefix + os.environ.get(name, '') + suffix
class GnGenerator(object):
'''Holds configuration for a build and method to generate gn default files.'''
class GnGenerator(object):
'''Holds configuration for a build and method to generate gn default
files.'''
FAT_BUILD_DEFAULT_ARCH = '64-bit'
@ -106,7 +104,8 @@ class GnGenerator(object):
if build_arch == 'fat':
target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH]
args.append(('target_cpu', target_cpu))
args.append(('additional_target_cpus',
args.append(
('additional_target_cpus',
[cpu for cpu in cpu_values.itervalues() if cpu != target_cpu]))
else:
args.append(('target_cpu', cpu_values[build_arch]))
@ -116,12 +115,10 @@ class GnGenerator(object):
args.extend(self._settings.items('gn_args'))
return args
def Generate(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'args.gn'),
WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
@ -131,23 +128,20 @@ class GnGenerator(object):
def CreateGnRules(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'args.gn'),
WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
buf = StringIO.StringIO()
gn_command = self.GetGnCommand(gn_path, root_path, out_path, False)
self.WriteBuildNinja(buf, gn_command)
WriteToFileIfChanged(
os.path.join(out_path, 'build.ninja'),
WriteToFileIfChanged(os.path.join(out_path, 'build.ninja'),
buf.getvalue(),
overwrite=False)
buf = StringIO.StringIO()
self.WriteBuildNinjaDeps(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'build.ninja.d'),
WriteToFileIfChanged(os.path.join(out_path, 'build.ninja.d'),
buf.getvalue(),
overwrite=False)
@ -167,7 +161,8 @@ class GnGenerator(object):
if isinstance(value, bool):
stream.write('%s = %s\n' % (name, str(value).lower()))
elif isinstance(value, list):
stream.write('%s = [%s' % (name, '\n' if len(value) > 1 else ''))
stream.write('%s = [%s' %
(name, '\n' if len(value) > 1 else ''))
if len(value) == 1:
prefix = ' '
suffix = ' '
@ -176,7 +171,8 @@ class GnGenerator(object):
suffix = ',\n'
for item in value:
if isinstance(item, bool):
stream.write('%s%s%s' % (prefix, str(item).lower(), suffix))
stream.write('%s%s%s' %
(prefix, str(item).lower(), suffix))
else:
stream.write('%s%s%s' % (prefix, item, suffix))
stream.write(']\n')
@ -211,8 +207,8 @@ class GnGenerator(object):
# else:
# gn_command.append('--check')
gn_command.append('gen')
gn_command.append('//%s' %
os.path.relpath(os.path.abspath(out_path), os.path.abspath(src_path)))
gn_command.append('//%s' % os.path.relpath(os.path.abspath(out_path),
os.path.abspath(src_path)))
return gn_command
@ -262,15 +258,13 @@ def GenerateXcodeProject(gn_path, root_dir, out_dir, settings):
'''Convert GN generated Xcode project into multi-configuration Xcode
project.'''
temp_path = tempfile.mkdtemp(prefix=os.path.abspath(
os.path.join(out_dir, '_temp')))
temp_path = tempfile.mkdtemp(
prefix=os.path.abspath(os.path.join(out_dir, '_temp')))
try:
generator = GnGenerator(settings, 'Debug', 'iphonesimulator')
generator.Generate(gn_path, root_dir, temp_path)
convert_gn_xcodeproj.ConvertGnXcodeProject(
root_dir,
os.path.join(temp_path),
os.path.join(out_dir, 'build'),
root_dir, os.path.join(temp_path), os.path.join(out_dir, 'build'),
SUPPORTED_CONFIGS)
finally:
if os.path.exists(temp_path):
@ -287,16 +281,20 @@ def GenerateGnBuildRules(gn_path, root_dir, out_dir, settings):
def Main(args):
default_root = os.path.normpath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir))
default_root = os.path.normpath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
parser = argparse.ArgumentParser(
description='Generate build directories for use with gn.')
parser.add_argument(
'root', default=default_root, nargs='?',
'root',
default=default_root,
nargs='?',
help='root directory where to generate multiple out configurations')
parser.add_argument(
'--import', action='append', dest='import_rules', default=[],
parser.add_argument('--import',
action='append',
dest='import_rules',
default=[],
help='path to file defining default gn variables')
args = parser.parse_args(args)

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used
for running tests without a device.
"""
@ -53,12 +52,15 @@ def _CheckForTun():
"""Check for networking. TODO(scottmg): Currently, this is Linux-specific.
"""
returncode = subprocess.call(
['tunctl', '-b', '-u', getpass.getuser(), '-t', 'qemu'],
stdout=DEVNULL, stderr=DEVNULL)
['tunctl', '-b', '-u',
getpass.getuser(), '-t', 'qemu'],
stdout=DEVNULL,
stderr=DEVNULL)
if returncode != 0:
print('To use QEMU with networking on Linux, configure TUN/TAP. See:',
file=sys.stderr)
print(' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only',
print(
' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only',
file=sys.stderr)
return 2
return 0
@ -78,10 +80,13 @@ def _Start(pid_file):
initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin')
mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3))
instance_name = 'crashpad_qemu_' + \
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8))
instance_name = (
'crashpad_qemu_' +
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)))
# These arguments are from the Fuchsia repo in zircon/scripts/run-zircon.
# yapf: disable
popen = subprocess.Popen([
qemu_path,
'-m', '2048',
@ -97,15 +102,21 @@ def _Start(pid_file):
'-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0',
'-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail,
'-append', 'TERM=dumb zircon.nodename=' + instance_name,
], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL)
],
stdin=DEVNULL,
stdout=DEVNULL,
stderr=DEVNULL)
# yapf: enable
with open(pid_file, 'wb') as f:
f.write('%d\n' % popen.pid)
for i in range(10):
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools', 'netaddr')
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools',
'netaddr')
if subprocess.call([netaddr_path, '--nowait', instance_name],
stdout=open(os.devnull), stderr=open(os.devnull)) == 0:
stdout=open(os.devnull),
stderr=open(os.devnull)) == 0:
break
time.sleep(.5)
else:

View File

@ -34,8 +34,8 @@ IS_WINDOWS_HOST = sys.platform.startswith('win')
def _FindGNFromBinaryDir(binary_dir):
"""Attempts to determine the path to a GN binary used to generate the build
files in the given binary_dir. This is necessary because `gn` might not be in
the path or might be in a non-standard location, particularly on build
files in the given binary_dir. This is necessary because `gn` might not be
in the path or might be in a non-standard location, particularly on build
machines."""
build_ninja = os.path.join(binary_dir, 'build.ninja')
@ -58,24 +58,27 @@ def _FindGNFromBinaryDir(binary_dir):
if line.startswith(' command = '):
gn_command_line_parts = line.strip().split(' ')
if len(gn_command_line_parts) > 2:
return os.path.join(binary_dir, gn_command_line_parts[2])
return os.path.join(binary_dir,
gn_command_line_parts[2])
return None
def _BinaryDirTargetOS(binary_dir):
"""Returns the apparent target OS of binary_dir, or None if none appear to be
explicitly specified."""
"""Returns the apparent target OS of binary_dir, or None if none appear to
be explicitly specified."""
gn_path = _FindGNFromBinaryDir(binary_dir)
if gn_path:
# Look for a GN “target_os”.
popen = subprocess.Popen([gn_path, '--root=' + CRASHPAD_DIR,
'args', binary_dir,
'--list=target_os', '--short'],
popen = subprocess.Popen([
gn_path, '--root=' + CRASHPAD_DIR, 'args', binary_dir,
'--list=target_os', '--short'
],
shell=IS_WINDOWS_HOST,
stdout=subprocess.PIPE, stderr=open(os.devnull))
stdout=subprocess.PIPE,
stderr=open(os.devnull))
value = popen.communicate()[0]
if popen.returncode == 0:
match = re.match('target_os = "(.*)"$', value.decode('utf-8'))
@ -88,8 +91,7 @@ def _BinaryDirTargetOS(binary_dir):
if os.path.exists(build_ninja_path):
with open(build_ninja_path) as build_ninja_file:
build_ninja_content = build_ninja_file.read()
match = re.search('-linux-android(eabi)?-ar$',
build_ninja_content,
match = re.search('-linux-android(eabi)?-ar$', build_ninja_content,
re.MULTILINE)
if match:
return 'android'
@ -119,8 +121,8 @@ def _EnableVTProcessingOnWindowsConsole():
try:
# From <wincon.h>. This would be
# win32console.ENABLE_VIRTUAL_TERMINAL_PROCESSING, but its too new to be
# defined there.
# win32console.ENABLE_VIRTUAL_TERMINAL_PROCESSING, but its too new to
# be defined there.
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
stdout_console.SetConsoleMode(console_mode |
@ -148,8 +150,8 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
return
def _adb(*args):
# Flush all of this scripts own buffered stdout output before running adb,
# which will likely produce its own output on stdout.
# Flush all of this scripts own buffered stdout output before running
# adb, which will likely produce its own output on stdout.
sys.stdout.flush()
adb_command = ['adb', '-s', android_device]
@ -162,33 +164,34 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
_adb('push', *args)
def _adb_shell(command_args, env={}):
# Build a command to execute via “sh -c” instead of invoking it directly.
# Heres why:
# Build a command to execute via “sh -c” instead of invoking it
# directly. Heres why:
#
# /system/bin/env isnt normally present prior to Android 6.0 (M), where
# toybox was introduced (Android platform/manifest 9a2c01e8450b). Instead,
# set environment variables by using the shells internal “export” command.
# toybox was introduced (Android platform/manifest 9a2c01e8450b).
# Instead, set environment variables by using the shells internal
# “export” command.
#
# adbd prior to Android 7.0 (N), and the adb client prior to SDK
# platform-tools version 24, dont know how to communicate a shell commands
# exit status. This was added in Android platform/system/core 606835ae5c4b).
# With older adb servers and clients, adb will “exit 0” indicating success
# even if the command failed on the device. This makes
# subprocess.check_call() semantics difficult to implement directly. As a
# workaround, have the device send the commands exit status over stdout and
# pick it back up in this function.
# platform-tools version 24, dont know how to communicate a shell
# commands exit status. This was added in Android platform/system/core
# 606835ae5c4b). With older adb servers and clients, adb will “exit 0”
# indicating success even if the command failed on the device. This
# makes subprocess.check_call() semantics difficult to implement
# directly. As a workaround, have the device send the commands exit
# status over stdout and pick it back up in this function.
#
# Both workarounds are implemented by giving the device a simple script,
# which adbd will run as an “sh -c” argument.
adb_command = ['adb', '-s', android_device, 'shell']
script_commands = []
for k, v in env.items():
script_commands.append('export %s=%s' % (pipes.quote(k), pipes.quote(v)))
script_commands.append('export %s=%s' %
(pipes.quote(k), pipes.quote(v)))
script_commands.extend([
' '.join(pipes.quote(x) for x in command_args),
'status=${?}',
'echo "status=${status}"',
'exit ${status}'])
' '.join(pipes.quote(x) for x in command_args), 'status=${?}',
'echo "status=${status}"', 'exit ${status}'
])
adb_command.append('; '.join(script_commands))
child = subprocess.Popen(adb_command,
shell=IS_WINDOWS_HOST,
@ -212,9 +215,9 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
print(data, end='')
if final_line is None:
# Maybe there was some stderr output after the end of stdout. Old versions
# of adb, prior to when the exit status could be communicated, smush the
# two together.
# Maybe there was some stderr output after the end of stdout. Old
# versions of adb, prior to when the exit status could be
# communicated, smush the two together.
raise subprocess.CalledProcessError(-1, adb_command)
status = int(FINAL_LINE_RE.match(final_line.rstrip()).group(1))
if status != 0:
@ -222,22 +225,23 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
child.wait()
if child.returncode != 0:
raise subprocess.CalledProcessError(subprocess.returncode, adb_command)
raise subprocess.CalledProcessError(subprocess.returncode,
adb_command)
# /system/bin/mktemp isnt normally present prior to Android 6.0 (M), where
# toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it with
# a host-generated name. This wont retry if the name is in use, but with 122
# bits of randomness, it should be OK. This uses “mkdir” instead of “mkdir -p”
# because the latter will not indicate failure if the directory already
# exists.
# toybox was introduced (Android platform/manifest 9a2c01e8450b). Fake it
# with a host-generated name. This wont retry if the name is in use, but
# with 122 bits of randomness, it should be OK. This uses “mkdir” instead of
# “mkdir -p”because the latter will not indicate failure if the directory
# already exists.
device_temp_dir = '/data/local/tmp/%s.%s' % (test, uuid.uuid4().hex)
_adb_shell(['mkdir', device_temp_dir])
try:
# Specify test dependencies that must be pushed to the device. This could be
# determined automatically in a GN build, following the example used for
# Fuchsia. Since nothing like that exists for GYP, hard-code it for
# supported tests.
# Specify test dependencies that must be pushed to the device. This
# could be determined automatically in a GN build, following the example
# used for Fuchsia. Since nothing like that exists for GYP, hard-code it
# for supported tests.
test_build_artifacts = [test, 'crashpad_handler']
test_data = ['test/test_paths_test_data_root.txt']
@ -251,12 +255,13 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
device_out_dir = posixpath.join(device_temp_dir, 'out')
device_mkdirs = [device_out_dir]
for source_path in test_data:
# A trailing slash could reasonably mean to copy an entire directory, but
# will interfere with whats needed from the path split. All parent
# directories of any source_path need to be be represented in
# device_mkdirs, but its important that no source_path itself wind up in
# device_mkdirs, even if source_path names a directory, because that would
# cause the “adb push” of the directory below to behave incorrectly.
# A trailing slash could reasonably mean to copy an entire
# directory, but will interfere with whats needed from the path
# split. All parent directories of any source_path need to be be
# represented in device_mkdirs, but its important that no
# source_path itself wind up in device_mkdirs, even if source_path
# names a directory, because that would cause the “adb push” of the
# directory below to behave incorrectly.
if source_path.endswith(posixpath.sep):
source_path = source_path[:-1]
@ -271,7 +276,8 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
# Push the test binary and any other build output to the device.
local_test_build_artifacts = []
for artifact in test_build_artifacts:
local_test_build_artifacts.append(os.path.join(binary_dir, artifact))
local_test_build_artifacts.append(os.path.join(
binary_dir, artifact))
_adb_push(local_test_build_artifacts, device_out_dir)
# Push test data to the device.
@ -279,20 +285,21 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
_adb_push([os.path.join(CRASHPAD_DIR, source_path)],
posixpath.join(device_temp_dir, source_path))
# Run the test on the device. Pass the test data root in the environment.
# Run the test on the device. Pass the test data root in the
# environment.
#
# Because the test will not run with its standard output attached to a
# pseudo-terminal device, gtest will not normally enable colored output, so
# mimic gtests own logic for deciding whether to enable color by checking
# this scripts own standard output connection. The whitelist of TERM values
# comes from gtest googletest/src/gtest.cc
# pseudo-terminal device, gtest will not normally enable colored output,
# so mimic gtests own logic for deciding whether to enable color by
# checking this scripts own standard output connection. The whitelist
# of TERM values comes from gtest googletest/src/gtest.cc
# testing::internal::ShouldUseColor().
env = {'CRASHPAD_TEST_DATA_ROOT': device_temp_dir}
gtest_color = os.environ.get('GTEST_COLOR')
if gtest_color in ('auto', None):
if (sys.stdout.isatty() and
(os.environ.get('TERM') in
('xterm', 'xterm-color', 'xterm-256color', 'screen',
(os.environ.get('TERM')
in ('xterm', 'xterm-color', 'xterm-256color', 'screen',
'screen-256color', 'tmux', 'tmux-256color', 'rxvt-unicode',
'rxvt-unicode-256color', 'linux', 'cygwin') or
(IS_WINDOWS_HOST and _EnableVTProcessingOnWindowsConsole()))):
@ -300,7 +307,8 @@ def _RunOnAndroidTarget(binary_dir, test, android_device, extra_command_line):
else:
gtest_color = 'no'
env['GTEST_COLOR'] = gtest_color
_adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line, env)
_adb_shell([posixpath.join(device_out_dir, test)] + extra_command_line,
env)
finally:
_adb_shell(['rm', '-rf', device_temp_dir])
@ -316,9 +324,10 @@ def _GenerateFuchsiaRuntimeDepsFiles(binary_dir, tests):
with open(targets_file, 'wb') as f:
f.write('//:' + '\n//:'.join(tests) + '\n')
gn_path = _FindGNFromBinaryDir(binary_dir)
subprocess.check_call(
[gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir,
'--runtime-deps-list-file=' + targets_file])
subprocess.check_call([
gn_path, '--root=' + CRASHPAD_DIR, 'gen', binary_dir,
'--runtime-deps-list-file=' + targets_file
])
# Run again so that --runtime-deps-list-file isn't in the regen rule. See
# https://crbug.com/814816.
@ -358,7 +367,9 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
# Run loglistener and filter the output to know when the test is done.
loglistener_process = subprocess.Popen(
[os.path.join(sdk_root, 'tools', 'loglistener'), device_name],
stdout=subprocess.PIPE, stdin=open(os.devnull), stderr=open(os.devnull))
stdout=subprocess.PIPE,
stdin=open(os.devnull),
stderr=open(os.devnull))
runtime_deps_file = os.path.join(binary_dir, test + '.runtime_deps')
with open(runtime_deps_file, 'rb') as f:
@ -369,8 +380,8 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
by using pipes.quote(), and then each command is chained by shell ';'.
"""
netruncmd_path = os.path.join(sdk_root, 'tools', 'netruncmd')
final_args = ' ; '.join(' '.join(pipes.quote(x) for x in command)
for command in args)
final_args = ' ; '.join(
' '.join(pipes.quote(x) for x in command) for command in args)
subprocess.check_call([netruncmd_path, device_name, final_args])
try:
@ -380,35 +391,38 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
staging_root = test_root + '/pkg'
# Make a staging directory tree on the target.
directories_to_create = [tmp_root,
directories_to_create = [
tmp_root,
'%s/bin' % staging_root,
'%s/assets' % staging_root]
'%s/assets' % staging_root
]
netruncmd(['mkdir', '-p'] + directories_to_create)
def netcp(local_path):
"""Uses `netcp` to copy a file or directory to the device. Files located
inside the build dir are stored to /pkg/bin, otherwise to /pkg/assets.
.so files are stored somewhere completely different, into /boot/lib (!).
This is because the loader service does not yet correctly handle the
namespace in which the caller is being run, and so can only load .so files
from a couple hardcoded locations, the only writable one of which is
/boot/lib, so we copy all .so files there. This bug is filed upstream as
ZX-1619.
"""Uses `netcp` to copy a file or directory to the device. Files
located inside the build dir are stored to /pkg/bin, otherwise to
/pkg/assets. .so files are stored somewhere completely different,
into /boot/lib (!). This is because the loader service does not yet
correctly handle the namespace in which the caller is being run, and
so can only load .so files from a couple hardcoded locations, the
only writable one of which is /boot/lib, so we copy all .so files
there. This bug is filed upstream as ZX-1619.
"""
in_binary_dir = local_path.startswith(binary_dir + '/')
if in_binary_dir:
if local_path.endswith('.so'):
target_path = os.path.join(
'/boot/lib', local_path[len(binary_dir)+1:])
target_path = os.path.join('/boot/lib',
local_path[len(binary_dir) + 1:])
else:
target_path = os.path.join(
staging_root, 'bin', local_path[len(binary_dir)+1:])
target_path = os.path.join(staging_root, 'bin',
local_path[len(binary_dir) + 1:])
else:
relative_path = os.path.relpath(local_path, CRASHPAD_DIR)
target_path = os.path.join(staging_root, 'assets', relative_path)
target_path = os.path.join(staging_root, 'assets',
relative_path)
netcp_path = os.path.join(sdk_root, 'tools', 'netcp')
subprocess.check_call([netcp_path, local_path,
device_name + ':' + target_path],
subprocess.check_call(
[netcp_path, local_path, device_name + ':' + target_path],
stderr=open(os.devnull))
# Copy runtime deps into the staging tree.
@ -423,13 +437,14 @@ def _RunOnFuchsiaTarget(binary_dir, test, device_name, extra_command_line):
done_message = 'TERMINATED: ' + unique_id
namespace_command = [
'namespace', '/pkg=' + staging_root, '/tmp=' + tmp_root, '/svc=/svc',
'--replace-child-argv0=/pkg/bin/' + test, '--',
staging_root + '/bin/' + test] + extra_command_line
'namespace', '/pkg=' + staging_root, '/tmp=' + tmp_root,
'/svc=/svc', '--replace-child-argv0=/pkg/bin/' + test, '--',
staging_root + '/bin/' + test
] + extra_command_line
netruncmd(namespace_command, ['echo', done_message])
success = _HandleOutputFromFuchsiaLogListener(
loglistener_process, done_message)
success = _HandleOutputFromFuchsiaLogListener(loglistener_process,
done_message)
if not success:
raise subprocess.CalledProcessError(1, test)
finally:
@ -448,8 +463,8 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
'TestHostPath': os.path.join(test_path, test + '.app'),
'TestingEnvironmentVariables': {
'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:',
'DYLD_INSERT_LIBRARIES': (
'__PLATFORMS__/iPhoneSimulator.platform/Developer/'
'DYLD_INSERT_LIBRARIES':
('__PLATFORMS__/iPhoneSimulator.platform/Developer/'
'usr/lib/libXCTestBundleInject.dylib'),
'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator',
'IDEiPhoneInternalTestBundleName': test + '.app',
@ -464,7 +479,8 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
test_path = os.path.join(CRASHPAD_DIR, binary_dir)
runner_path = os.path.join(test_path, test + '_module-Runner.app')
bundle_path = os.path.join(runner_path, 'PlugIns', test + '_module.xctest')
bundle_path = os.path.join(runner_path, 'PlugIns',
test + '_module.xctest')
target_app_path = os.path.join(test_path, test + '.app')
module_data = {
'IsUITestBundle': True,
@ -472,11 +488,13 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
'TestBundlePath': bundle_path,
'TestHostPath': runner_path,
'UITargetAppPath': target_app_path,
'DependentProductPaths': [ bundle_path, runner_path, target_app_path ],
'DependentProductPaths': [
bundle_path, runner_path, target_app_path
],
'TestingEnvironmentVariables': {
'DYLD_FRAMEWORK_PATH': '__TESTROOT__/Debug-iphonesimulator:',
'DYLD_INSERT_LIBRARIES': (
'__PLATFORMS__/iPhoneSimulator.platform/Developer/'
'DYLD_INSERT_LIBRARIES':
('__PLATFORMS__/iPhoneSimulator.platform/Developer/'
'usr/lib/libXCTestBundleInject.dylib'),
'DYLD_LIBRARY_PATH': '__TESTROOT__/Debug-iphonesimulator',
'XCInjectBundleInto': '__TESTHOST__/' + test + '_module-Runner',
@ -494,9 +512,11 @@ def _RunOnIOSTarget(binary_dir, test, is_xcuitest=False):
else:
plistlib.writePlist(xctest(binary_dir, test), xctestrun_path)
subprocess.check_call(['xcodebuild', 'test-without-building',
'-xctestrun', xctestrun_path, '-destination',
'platform=iOS Simulator,name=iPhone 8'])
subprocess.check_call([
'xcodebuild', 'test-without-building', '-xctestrun', xctestrun_path,
'-destination', 'platform=iOS Simulator,name=iPhone 8'
])
# This script is primarily used from the waterfall so that the list of tests
# that are run is maintained in-tree, rather than in a separate infrastructure
@ -510,11 +530,11 @@ def main(args):
args = parser.parse_args()
# Tell 64-bit Windows tests where to find 32-bit test executables, for
# cross-bitted testing. This relies on the fact that the GYP build by default
# uses {Debug,Release} for the 32-bit build and {Debug,Release}_x64 for the
# 64-bit build. This is not a universally valid assumption, and if its not
# met, 64-bit tests that require 32-bit build output will disable themselves
# dynamically.
# cross-bitted testing. This relies on the fact that the GYP build by
# default uses {Debug,Release} for the 32-bit build and {Debug,Release}_x64
# for the 64-bit build. This is not a universally valid assumption, and if
# its not met, 64-bit tests that require 32-bit build output will disable
# themselves dynamically.
if (sys.platform == 'win32' and args.binary_dir.endswith('_x64') and
'CRASHPAD_TEST_32_BIT_OUTPUT' not in os.environ):
binary_dir_32 = args.binary_dir[:-4]
@ -544,13 +564,13 @@ def main(args):
for line in adb_devices.splitlines():
line = line.decode('utf-8')
if (line == 'List of devices attached' or
re.match('^\* daemon .+ \*$', line) or
line == ''):
re.match('^\* daemon .+ \*$', line) or line == ''):
continue
(device, ignore) = line.split('\t')
devices.append(device)
if len(devices) != 1:
print("Please set ANDROID_DEVICE to your device's id", file=sys.stderr)
print("Please set ANDROID_DEVICE to your device's id",
file=sys.stderr)
return 2
android_device = devices[0]
print('Using autodetected Android device:', android_device)
@ -558,7 +578,8 @@ def main(args):
zircon_nodename = os.environ.get('ZIRCON_NODENAME')
if not zircon_nodename:
netls = os.path.join(_GetFuchsiaSDKRoot(), 'tools', 'netls')
popen = subprocess.Popen([netls, '--nowait'], stdout=subprocess.PIPE)
popen = subprocess.Popen([netls, '--nowait'],
stdout=subprocess.PIPE)
devices = popen.communicate()[0].splitlines()
if popen.returncode != 0 or len(devices) != 1:
print("Please set ZIRCON_NODENAME to your device's hostname",
@ -585,8 +606,10 @@ def main(args):
print(test)
print('-' * 80)
if test.endswith('.py'):
subprocess.check_call(
[sys.executable, os.path.join(CRASHPAD_DIR, test), args.binary_dir])
subprocess.check_call([
sys.executable,
os.path.join(CRASHPAD_DIR, test), args.binary_dir
])
else:
extra_command_line = []
if args.gtest_filter:
@ -598,7 +621,8 @@ def main(args):
_RunOnFuchsiaTarget(args.binary_dir, test, zircon_nodename,
extra_command_line)
elif is_ios:
_RunOnIOSTarget(args.binary_dir, test,
_RunOnIOSTarget(args.binary_dir,
test,
is_xcuitest=test.startswith('ios'))
else:
subprocess.check_call([os.path.join(args.binary_dir, test)] +

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved.
#

View File

@ -12,8 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
def CheckChangeOnUpload(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api)
return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api)
return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)

View File

@ -29,7 +29,6 @@ import win32con
import win32pipe
import winerror
g_temp_dirs = []
g_had_failures = False
@ -48,10 +47,12 @@ def CleanUpTempDirs():
def FindInstalledWindowsApplication(app_path):
search_paths = [os.getenv('PROGRAMFILES(X86)'),
search_paths = [
os.getenv('PROGRAMFILES(X86)'),
os.getenv('PROGRAMFILES'),
os.getenv('PROGRAMW6432'),
os.getenv('LOCALAPPDATA')]
os.getenv('LOCALAPPDATA')
]
search_paths += os.getenv('PATH', '').split(os.pathsep)
for search_path in search_paths:
@ -77,7 +78,8 @@ def GetCdbPath():
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
'Debugging Tools For Windows (x64)',
'Debugging Tools For Windows (x86)',
'Debugging Tools For Windows',)
'Debugging Tools For Windows',
)
for possible_path in possible_paths:
app_path = os.path.join(possible_path, 'cdb.exe')
app_path = FindInstalledWindowsApplication(app_path)
@ -87,17 +89,17 @@ def GetCdbPath():
def NamedPipeExistsAndReady(pipe_name):
"""Returns False if pipe_name does not exist. If pipe_name does exist, blocks
until the pipe is ready to service clients, and then returns True.
"""Returns False if pipe_name does not exist. If pipe_name does exist,
blocks until the pipe is ready to service clients, and then returns True.
This is used as a drop-in replacement for os.path.exists() and os.access() to
test for the pipe's existence. Both of those calls tickle the pipe in a way
that appears to the server to be a client connecting, triggering error
This is used as a drop-in replacement for os.path.exists() and os.access()
to test for the pipe's existence. Both of those calls tickle the pipe in a
way that appears to the server to be a client connecting, triggering error
messages when no data is received.
Although this function only needs to test pipe existence (waiting for
CreateNamedPipe()), it actually winds up testing pipe readiness
(waiting for ConnectNamedPipe()). This is unnecessary but harmless.
CreateNamedPipe()), it actually winds up testing pipe readiness (waiting for
ConnectNamedPipe()). This is unnecessary but harmless.
"""
try:
win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER)
@ -108,11 +110,11 @@ def NamedPipeExistsAndReady(pipe_name):
return True
def GetDumpFromProgram(
out_dir, pipe_name, executable_name, expect_exit_code, *args):
def GetDumpFromProgram(out_dir, pipe_name, executable_name, expect_exit_code,
*args):
"""Initialize a crash database, and run |executable_name| connecting to a
crash handler. If pipe_name is set, crashpad_handler will be started first. If
pipe_name is empty, the executable is responsible for starting
crash handler. If pipe_name is set, crashpad_handler will be started first.
If pipe_name is empty, the executable is responsible for starting
crashpad_handler. *args will be passed after other arguments to
executable_name. If the child process does not exit with |expect_exit_code|,
an exception will be raised. Returns the path to the minidump generated by
@ -122,15 +124,15 @@ def GetDumpFromProgram(
handler = None
try:
subprocess.check_call(
[os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
'--database=' + test_database])
subprocess.check_call([
os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
'--database=' + test_database
])
if pipe_name is not None:
handler = subprocess.Popen([
os.path.join(out_dir, 'crashpad_handler.com'),
'--pipe-name=' + pipe_name,
'--database=' + test_database
'--pipe-name=' + pipe_name, '--database=' + test_database
])
# Wait until the server is ready.
@ -141,12 +143,13 @@ def GetDumpFromProgram(
printed = True
time.sleep(0.001)
command = [os.path.join(out_dir, executable_name), pipe_name] + list(args)
command = [os.path.join(out_dir, executable_name), pipe_name
] + list(args)
else:
command = ([os.path.join(out_dir, executable_name),
os.path.join(out_dir, 'crashpad_handler.com'),
test_database] +
list(args))
command = ([
os.path.join(out_dir, executable_name),
os.path.join(out_dir, 'crashpad_handler.com'), test_database
] + list(args))
print('Running %s' % os.path.basename(command[0]))
exit_code = subprocess.call(command)
if exit_code != expect_exit_code:
@ -167,37 +170,28 @@ def GetDumpFromProgram(
def GetDumpFromCrashyProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_program.exe',
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_program.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
def GetDumpFromOtherProgram(out_dir, pipe_name, *args):
return GetDumpFromProgram(
out_dir, pipe_name, 'crash_other_program.exe', 0, *args)
return GetDumpFromProgram(out_dir, pipe_name, 'crash_other_program.exe', 0,
*args)
def GetDumpFromSignal(out_dir, pipe_name, *args):
STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con.
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_signal.exe',
STATUS_FATAL_APP_EXIT,
*args)
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_signal.exe',
STATUS_FATAL_APP_EXIT, *args)
def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'self_destroying_program.exe',
return GetDumpFromProgram(out_dir, pipe_name, 'self_destroying_program.exe',
win32con.EXCEPTION_BREAKPOINT)
def GetDumpFromZ7Program(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_z7_loader.exe',
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_z7_loader.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
@ -208,13 +202,10 @@ class CdbRun(object):
"""
def __init__(self, cdb_path, dump_path, command):
# Run a command line that loads the dump, runs the specified cdb command,
# and then quits, and capturing stdout.
self.out = subprocess.check_output([
cdb_path,
'-z', dump_path,
'-c', command + ';q'
])
# Run a command line that loads the dump, runs the specified cdb
# command, and then quits, and capturing stdout.
self.out = subprocess.check_output(
[cdb_path, '-z', dump_path, '-c', command + ';q'])
def Check(self, pattern, message, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
@ -244,16 +235,9 @@ class CdbRun(object):
return None
def RunTests(cdb_path,
dump_path,
start_handler_dump_path,
destroyed_dump_path,
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name):
def RunTests(cdb_path, dump_path, start_handler_dump_path, destroyed_dump_path,
z7_dump_path, other_program_path, other_program_no_exception_path,
sigabrt_main_path, sigabrt_background_path, pipe_name):
"""Runs various tests in sequence. Runs a new cdb instance on the dump for
each block of tests to reduce the chances that output from one command is
confused for output from another.
@ -264,21 +248,24 @@ def RunTests(cdb_path,
# When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as
# "`anonymous namespace'" and instead gives the decorated form.
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location')
out.Check(
'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction', 'exception at correct location')
out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it',
'captured exception (using StartHandler())')
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
out.Check(
'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location (using StartHandler())')
out = CdbRun(cdb_path, dump_path, '!peb')
out.Check(r'PEB at', 'found the PEB')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved')
out.Check(r'Base TimeStamp Module', 'module list present')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'Base TimeStamp Module',
'module list present')
pipe_name_escaped = pipe_name.replace('\\', '\\\\')
out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped,
'some PEB data is correct')
@ -288,7 +275,8 @@ def RunTests(cdb_path,
out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters')
out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"',
'PEB->ProcessParameters.ImagePathName string captured')
out.Check(' DesktopInfo *: '
out.Check(
' DesktopInfo *: '
'_UNICODE_STRING "(?!--- memory read error at address ).*"',
'PEB->ProcessParameters.DesktopInfo string captured')
@ -298,19 +286,23 @@ def RunTests(cdb_path,
out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue')
out = CdbRun(cdb_path, dump_path, '!gle')
out.Check('LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the '
out.Check(
'LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the '
'file specified.', '!gle gets last error')
out.Check('LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The '
out.Check(
'LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The '
'file %hs does not exist.', '!gle gets last ntstatus')
if False:
# TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList.
out = CdbRun(cdb_path, dump_path, '!locks')
out.Check(r'CritSec crashy_program!crashpad::`anonymous namespace\'::'
out.Check(
r'CritSec crashy_program!crashpad::`anonymous namespace\'::'
r'g_test_critical_section', 'lock was captured')
if platform.win32_ver()[0] != '7':
# We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7.
out.Check(r'\*\*\* Locked', 'lock debug info was captured, and is locked')
out.Check(r'\*\*\* Locked',
'lock debug info was captured, and is locked')
out = CdbRun(cdb_path, dump_path, '!handle')
out.Check(r'\d+ Handles', 'captured handles')
@ -323,21 +315,23 @@ def RunTests(cdb_path,
out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror')
out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE)
# Check that there is no stack trace in the self-destroyed process. Confirm
# that the top is where we expect it (that's based only on IP), but subsequent
# stack entries will not be available. This confirms that we have a mostly
# valid dump, but that the stack was omitted.
out.Check(r'self_destroying_program!crashpad::`anonymous namespace\'::'
# that the top is where we expect it (that's based only on IP), but
# subsequent stack entries will not be available. This confirms that we have
# a mostly valid dump, but that the stack was omitted.
out.Check(
r'self_destroying_program!crashpad::`anonymous namespace\'::'
r'FreeOwnStackAndBreak.*\nquit:',
'at correct location, no additional stack entries')
# Dump memory pointed to be EDI on the background suspended thread. We don't
# know the index of the thread because the system may have started other
# threads, so first do a run to extract the thread index that's suspended, and
# then another run to dump the data pointed to by EDI for that thread.
# threads, so first do a run to extract the thread index that's suspended,
# and then another run to dump the data pointed to by EDI for that thread.
out = CdbRun(cdb_path, dump_path, '.ecxr;~')
match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:')
if match_obj:
@ -346,36 +340,37 @@ def RunTests(cdb_path,
out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50',
'data pointed to by registers captured')
# Move up one stack frame after jumping to the exception, and examine memory.
# Move up one stack frame after jumping to the exception, and examine
# memory.
out = CdbRun(cdb_path, dump_path,
'.ecxr; .f+; dd /c100 poi(offset_pointer)-20')
out.Check(r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
out.Check(
r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 '
r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c '
r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 '
r'80000094 00000095 80000096 00000097',
'data pointed to by stack captured')
# Attempt to retrieve the value of g_extra_memory_pointer (by name), and then
# examine the memory at which it points. Both should have been saved.
out = CdbRun(cdb_path, dump_path,
# Attempt to retrieve the value of g_extra_memory_pointer (by name), and
# then examine the memory at which it points. Both should have been saved.
out = CdbRun(
cdb_path, dump_path,
'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 '
'L8')
out.Check(r'0000655e 0000656b 00006578 00006585',
'extra memory range captured')
out = CdbRun(cdb_path, dump_path, '.dumpdebug')
out.Check(r'type \?\?\? \(333333\), size 00001000',
'first user stream')
out.Check(r'type \?\?\? \(222222\), size 00000080',
'second user stream')
out.Check(r'type \?\?\? \(333333\), size 00001000', 'first user stream')
out.Check(r'type \?\?\? \(222222\), size 00000080', 'second user stream')
if z7_dump_path:
out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm')
out.Check('This dump file has an exception of interest stored in it',
'captured exception in z7 module')
# Older versions of cdb display relative to exports for /Z7 modules, newer
# ones just display the offset.
# Older versions of cdb display relative to exports for /Z7 modules,
# newer ones just display the offset.
out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):',
'exception in z7 at correct location')
out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll',
@ -428,11 +423,11 @@ def main(args):
symbol_dir = MakeTempDir()
protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http'
os.environ['_NT_SYMBOL_PATH'] = (
'SRV*' + symbol_dir + '*' +
protocol + '://msdl.microsoft.com/download/symbols')
'SRV*' + symbol_dir + '*' + protocol +
'://msdl.microsoft.com/download/symbols')
pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (
os.getpid(), str(random.getrandbits(64)))
pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (os.getpid(),
str(random.getrandbits(64)))
crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name)
if not crashy_dump_path:
@ -442,7 +437,8 @@ def main(args):
if not start_handler_dump_path:
return 1
destroyed_dump_path = GetDumpFromSelfDestroyingProgram(args[0], pipe_name)
destroyed_dump_path = GetDumpFromSelfDestroyingProgram(
args[0], pipe_name)
if not destroyed_dump_path:
return 1
@ -465,21 +461,15 @@ def main(args):
if not sigabrt_main_path:
return 1
sigabrt_background_path = GetDumpFromSignal(
args[0], pipe_name, 'background')
sigabrt_background_path = GetDumpFromSignal(args[0], pipe_name,
'background')
if not sigabrt_background_path:
return 1
RunTests(cdb_path,
crashy_dump_path,
start_handler_dump_path,
destroyed_dump_path,
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name)
RunTests(cdb_path, crashy_dump_path, start_handler_dump_path,
destroyed_dump_path, z7_dump_path, other_program_path,
other_program_no_exception_path, sigabrt_main_path,
sigabrt_background_path, pipe_name)
return 1 if g_had_failures else 0
finally:

3
third_party/fuchsia/runner.py vendored Normal file → Executable file
View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
# Copyright 2018 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,4 +16,5 @@
import os
import sys
os.execv(sys.argv[1], sys.argv[1:])

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved.
#
@ -20,6 +19,7 @@ import sys
import mig_fix
import mig_gen
def main(args):
parsed = mig_gen.parse_args(args)
@ -30,5 +30,6 @@ def main(args):
parsed.migcom_path, parsed.arch)
mig_fix.fix_interface(interface)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -22,18 +22,18 @@ import sys
from mig_gen import MigInterface
def _fix_user_implementation(implementation, fixed_implementation, header,
fixed_header):
"""Rewrites a MIG-generated user implementation (.c) file.
Rewrites the file at |implementation| by adding
__attribute__((unused)) to the definition of any structure typedefed
as __Reply by searching for the pattern unique to those structure
definitions. These structures are in fact unused in the user
implementation file, and this will trigger a -Wunused-local-typedefs
warning in gcc unless removed or marked with the unused attribute.
Also changes header references to point to the new header filename, if
changed.
Rewrites the file at |implementation| by adding __attribute__((unused)) to
the definition of any structure typedefed as __Reply by searching for the
pattern unique to those structure definitions. These structures are in fact
unused in the user implementation file, and this will trigger a
-Wunused-local-typedefs warning in gcc unless removed or marked with the
unused attribute. Also changes header references to point to the new
header filename, if changed.
If |fixed_implementation| is None, overwrites the original; otherwise, puts
the result in the file at |fixed_implementation|.
@ -59,6 +59,7 @@ def _fix_user_implementation(implementation, fixed_implementation, header,
file.write(contents)
file.close()
def _fix_server_implementation(implementation, fixed_implementation, header,
fixed_header):
"""Rewrites a MIG-generated server implementation (.c) file.
@ -79,24 +80,25 @@ def _fix_server_implementation(implementation, fixed_implementation, header,
contents = file.read()
# Find interesting declarations.
declaration_pattern = \
re.compile('^mig_internal (kern_return_t __MIG_check__.*)$',
re.MULTILINE)
declaration_pattern = re.compile(
'^mig_internal (kern_return_t __MIG_check__.*)$', re.MULTILINE)
declarations = declaration_pattern.findall(contents)
# Remove “__attribute__((__unused__))” from the declarations, and call them
# “mig_external” or “extern” depending on whether “mig_external” is defined.
attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ')
declarations = ['''\
declarations = [
'''\
#ifdef mig_external
mig_external
#else
extern
#endif
''' + attribute_pattern.sub('', x) + ';\n' for x in declarations]
''' + attribute_pattern.sub('', x) + ';\n' for x in declarations
]
# Rewrite the declarations in this file as “mig_external”.
contents = declaration_pattern.sub(r'mig_external \1', contents);
contents = declaration_pattern.sub(r'mig_external \1', contents)
# Crashpad never implements the mach_msg_server() MIG callouts. To avoid
# needing to provide stub implementations, set KERN_FAILURE as the RetCode
@ -125,6 +127,7 @@ extern
file.close()
return declarations
def _fix_header(header, fixed_header, declarations=[]):
"""Rewrites a MIG-generated header (.h) file.
@ -161,6 +164,7 @@ extern "C" {
file.write(contents)
file.close()
def fix_interface(interface, fixed_interface=None):
if fixed_interface is None:
fixed_interface = MigInterface(None, None, None, None)
@ -175,6 +179,7 @@ def fix_interface(interface, fixed_interface=None):
_fix_header(interface.server_h, fixed_interface.server_h,
server_declarations)
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument('user_c')
@ -187,11 +192,12 @@ def main(args):
parser.add_argument('--fixed_server_h', default=None)
parsed = parser.parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c,
parsed.user_h, parsed.server_h)
interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.server_h)
fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c,
parsed.fixed_user_h, parsed.fixed_server_h)
fix_interface(interface, fixed_interface)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved.
#
@ -21,19 +20,30 @@ import os
import subprocess
import sys
MigInterface = collections.namedtuple('MigInterface', ['user_c', 'server_c',
'user_h', 'server_h'])
MigInterface = collections.namedtuple(
'MigInterface', ['user_c', 'server_c', 'user_h', 'server_h'])
def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
mig_path=None, migcom_path=None, arch=None):
def generate_interface(defs,
interface,
includes=[],
sdk=None,
clang_path=None,
mig_path=None,
migcom_path=None,
arch=None):
if mig_path is None:
mig_path = 'mig'
command = [mig_path,
# yapf: disable
command = [
mig_path,
'-user', interface.user_c,
'-server', interface.server_c,
'-header', interface.user_h,
'-sheader', interface.server_h,
]
# yapf: enable
if clang_path is not None:
os.environ['MIGCC'] = clang_path
@ -48,6 +58,7 @@ def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
command.append(defs)
subprocess.check_call(command)
def parse_args(args):
parser = argparse.ArgumentParser()
parser.add_argument('--clang-path', help='Path to Clang')
@ -66,13 +77,15 @@ def parse_args(args):
parser.add_argument('server_h')
return parser.parse_args(args)
def main(args):
parsed = parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c,
parsed.user_h, parsed.server_h)
generate_interface(parsed.defs, interface, parsed.include,
parsed.sdk, parsed.clang_path, parsed.mig_path,
parsed.migcom_path, parsed.arch)
interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.server_h)
generate_interface(parsed.defs, interface, parsed.include, parsed.sdk,
parsed.clang_path, parsed.mig_path, parsed.migcom_path,
parsed.arch)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -23,14 +23,17 @@ key = os.path.join(testdata, 'crashpad_util_test_key.pem')
cert = os.path.join(testdata, 'crashpad_util_test_cert.pem')
with open(cert, 'w') as cert_file, open(key, 'w') as key_file:
MESSAGE = 'DO NOT EDIT: This file was auto-generated by ' + __file__ + '\n\n'
MESSAGE = ('DO NOT EDIT: This file was auto-generated by ' + __file__ +
'\n\n')
cert_file.write(MESSAGE)
key_file.write(MESSAGE)
proc = subprocess.Popen(
['openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost',
'-days', '3650', '-newkey', 'rsa:2048', '-keyout', '-'],
stderr=open(os.devnull, 'w'), stdout=subprocess.PIPE)
proc = subprocess.Popen([
'openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', '-days',
'3650', '-newkey', 'rsa:2048', '-keyout', '-'
],
stderr=open(os.devnull, 'w'),
stdout=subprocess.PIPE)
contents = proc.communicate()[0]
dest = sys.stderr