Add .style.yapf and reformat according to yapf, using “google” style

% yapf --in-place $(git ls-files **/*.py)
% yapf --version
yapf 0.30.0

Note that this is not using the “chromium” yapf style because Chromium
is moving to PEP-8.
https://groups.google.com/a/chromium.org/d/topic/chromium-dev/RcJgJdkNIdg
yapf 0.30.0 no longer recognizes “chromium” as a style option.
22ef70f3c4
Since this is a mass reformatting, it might as well move things all the
way into the future all at once.

This uses the “google” style, which is a superset of “pep8”.

Change-Id: Ifa37371079ea1859e4afe8e31d2eef2cfd7af384
Reviewed-on: https://chromium-review.googlesource.com/c/crashpad/crashpad/+/2165637
Commit-Queue: Mark Mentovai <mark@chromium.org>
Reviewed-by: Scott Graham <scottmg@chromium.org>
This commit is contained in:
Mark Mentovai 2020-04-27 09:43:35 -04:00 committed by Commit Bot
parent 29b1688c11
commit a5a1c3b07f
16 changed files with 1687 additions and 1615 deletions

16
.style.yapf Normal file
View File

@ -0,0 +1,16 @@
# Copyright 2020 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
[style]
based_on_style = google

View File

@ -19,83 +19,84 @@ import sys
def ChooseDependencyPath(local_path, external_path): def ChooseDependencyPath(local_path, external_path):
"""Chooses between a dependency located at local path and an external path. """Chooses between a dependency located at local path and an external path.
The local path, used in standalone builds, is preferred. If it is not present The local path, used in standalone builds, is preferred. If it is not
but the external path is, the external path will be used. If neither path is present but the external path is, the external path will be used. If neither
present, the local path will be used, so that error messages uniformly refer path is present, the local path will be used, so that error messages
to the local path. uniformly refer to the local path.
Args: Args:
local_path: The preferred local path to use for a standalone build. local_path: The preferred local path to use for a standalone build.
external_path: The external path to fall back to. external_path: The external path to fall back to.
Returns: Returns:
A 2-tuple. The first element is None or 'external', depending on whether A 2-tuple. The first element is None or 'external', depending on whether
local_path or external_path was chosen. The second element is the chosen local_path or external_path was chosen. The second element is the chosen
path. path.
""" """
if os.path.exists(local_path) or not os.path.exists(external_path): if os.path.exists(local_path) or not os.path.exists(external_path):
return (None, local_path) return (None, local_path)
return ('external', external_path) return ('external', external_path)
script_dir = os.path.dirname(__file__) script_dir = os.path.dirname(__file__)
crashpad_dir = (os.path.dirname(script_dir) if script_dir not in ('', os.curdir) crashpad_dir = (os.path.dirname(script_dir)
else os.pardir) if script_dir not in ('', os.curdir) else os.pardir)
sys.path.insert(0, sys.path.insert(
ChooseDependencyPath(os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 0,
'pylib'), ChooseDependencyPath(
os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 'pylib'),
'pylib'))[1]) os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', 'pylib'))[1])
import gyp import gyp
def main(args): def main(args):
if 'GYP_GENERATORS' not in os.environ: if 'GYP_GENERATORS' not in os.environ:
os.environ['GYP_GENERATORS'] = 'ninja' os.environ['GYP_GENERATORS'] = 'ninja'
crashpad_dir_or_dot = crashpad_dir if crashpad_dir is not '' else os.curdir crashpad_dir_or_dot = crashpad_dir if crashpad_dir is not '' else os.curdir
(dependencies, mini_chromium_common_gypi) = (ChooseDependencyPath( (dependencies, mini_chromium_common_gypi) = (ChooseDependencyPath(
os.path.join(crashpad_dir, 'third_party', 'mini_chromium', os.path.join(crashpad_dir, 'third_party', 'mini_chromium',
'mini_chromium', 'build', 'common.gypi'), 'mini_chromium', 'build', 'common.gypi'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'mini_chromium', os.path.join(crashpad_dir, os.pardir, os.pardir, 'mini_chromium',
'mini_chromium', 'build', 'common.gypi'))) 'mini_chromium', 'build', 'common.gypi')))
if dependencies is not None: if dependencies is not None:
args.extend(['-D', 'crashpad_dependencies=%s' % dependencies]) args.extend(['-D', 'crashpad_dependencies=%s' % dependencies])
args.extend(['--include', mini_chromium_common_gypi]) args.extend(['--include', mini_chromium_common_gypi])
args.extend(['--depth', crashpad_dir_or_dot]) args.extend(['--depth', crashpad_dir_or_dot])
args.append(os.path.join(crashpad_dir, 'crashpad.gyp')) args.append(os.path.join(crashpad_dir, 'crashpad.gyp'))
result = gyp.main(args) result = gyp.main(args)
if result != 0: if result != 0:
return result
if sys.platform == 'win32':
# Check to make sure that no target_arch was specified. target_arch may be
# set during a cross build, such as a cross build for Android.
has_target_arch = False
for arg_index in range(0, len(args)):
arg = args[arg_index]
if (arg.startswith('-Dtarget_arch=') or
(arg == '-D' and arg_index + 1 < len(args) and
args[arg_index + 1].startswith('target_arch='))):
has_target_arch = True
break
if not has_target_arch:
# Also generate the x86 build.
result = gyp.main(args + ['-D', 'target_arch=ia32', '-G', 'config=Debug'])
if result != 0:
return result return result
result = gyp.main(
args + ['-D', 'target_arch=ia32', '-G', 'config=Release'])
return result if sys.platform == 'win32':
# Check to make sure that no target_arch was specified. target_arch may
# be set during a cross build, such as a cross build for Android.
has_target_arch = False
for arg_index in range(0, len(args)):
arg = args[arg_index]
if (arg.startswith('-Dtarget_arch=') or
(arg == '-D' and arg_index + 1 < len(args) and
args[arg_index + 1].startswith('target_arch='))):
has_target_arch = True
break
if not has_target_arch:
# Also generate the x86 build.
result = gyp.main(args +
['-D', 'target_arch=ia32', '-G', 'config=Debug'])
if result != 0:
return result
result = gyp.main(
args + ['-D', 'target_arch=ia32', '-G', 'config=Release'])
return result
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved. # Copyright 2017 The Crashpad Authors. All rights reserved.
# #
@ -25,52 +24,46 @@ import sys
def main(args): def main(args):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Set up an Android cross build', description='Set up an Android cross build',
epilog='Additional arguments will be passed to gyp_crashpad.py.') epilog='Additional arguments will be passed to gyp_crashpad.py.')
parser.add_argument('--arch', required=True, help='Target architecture') parser.add_argument('--arch', required=True, help='Target architecture')
parser.add_argument('--api-level', required=True, help='Target API level') parser.add_argument('--api-level', required=True, help='Target API level')
parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain') parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain')
(parsed, extra_command_line_args) = parser.parse_known_args(args) (parsed, extra_command_line_args) = parser.parse_known_args(args)
ndk_bin_dir = os.path.join(parsed.ndk, ndk_bin_dir = os.path.join(parsed.ndk, 'toolchains', 'llvm', 'prebuilt',
'toolchains', 'linux-x86_64', 'bin')
'llvm', if not os.path.exists(ndk_bin_dir):
'prebuilt', parser.error("missing toolchain")
'linux-x86_64',
'bin')
if not os.path.exists(ndk_bin_dir):
parser.error("missing toolchain")
ARCH_TO_ARCH_TRIPLET = { ARCH_TO_ARCH_TRIPLET = {
'arm': 'armv7a-linux-androideabi', 'arm': 'armv7a-linux-androideabi',
'arm64': 'aarch64-linux-android', 'arm64': 'aarch64-linux-android',
'ia32': 'i686-linux-android', 'ia32': 'i686-linux-android',
'x64': 'x86_64-linux-android', 'x64': 'x86_64-linux-android',
} }
clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level
os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang') os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang')
os.environ['CXX_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang++') os.environ['CXX_target'] = os.path.join(ndk_bin_dir,
clang_prefix + '-clang++')
extra_args = ['-D', 'android_api_level=' + parsed.api_level] extra_args = ['-D', 'android_api_level=' + parsed.api_level]
# ARM only includes 'v7a' in the tool prefix for clang # ARM only includes 'v7a' in the tool prefix for clang
tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' else
else ARCH_TO_ARCH_TRIPLET[parsed.arch]) ARCH_TO_ARCH_TRIPLET[parsed.arch])
for tool in ('ar', 'nm', 'readelf'): for tool in ('ar', 'nm', 'readelf'):
os.environ['%s_target' % tool.upper()] = ( os.environ['%s_target' % tool.upper()] = (os.path.join(
os.path.join(ndk_bin_dir, '%s-%s' % (tool_prefix, tool))) ndk_bin_dir, '%s-%s' % (tool_prefix, tool)))
return gyp_crashpad.main( return gyp_crashpad.main([
['-D', 'OS=android', '-D', 'OS=android', '-D',
'-D', 'target_arch=%s' % parsed.arch, 'target_arch=%s' % parsed.arch, '-D', 'clang=1', '-f', 'ninja-android'
'-D', 'clang=1', ] + extra_args + extra_command_line_args)
'-f', 'ninja-android'] +
extra_args +
extra_command_line_args)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -23,7 +23,6 @@ import subprocess
import sys import sys
import urllib2 import urllib2
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Sysroot revision from: # Sysroot revision from:
@ -33,42 +32,43 @@ PATH = 'chrome-linux-sysroot/toolchain'
REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66' REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66'
FILENAME = 'debian_stretch_amd64_sysroot.tar.xz' FILENAME = 'debian_stretch_amd64_sysroot.tar.xz'
def main(): def main():
url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME) url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME)
sysroot = os.path.join(SCRIPT_DIR, os.pardir, sysroot = os.path.join(SCRIPT_DIR, os.pardir, 'third_party', 'linux',
'third_party', 'linux', 'sysroot') 'sysroot')
stamp = os.path.join(sysroot, '.stamp') stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp): if os.path.exists(stamp):
with open(stamp) as s: with open(stamp) as s:
if s.read() == url: if s.read() == url:
return return
print 'Installing Debian root image from %s' % url print 'Installing Debian root image from %s' % url
if os.path.isdir(sysroot): if os.path.isdir(sysroot):
shutil.rmtree(sysroot) shutil.rmtree(sysroot)
os.mkdir(sysroot) os.mkdir(sysroot)
tarball = os.path.join(sysroot, FILENAME) tarball = os.path.join(sysroot, FILENAME)
print 'Downloading %s' % url print 'Downloading %s' % url
for _ in range(3): for _ in range(3):
response = urllib2.urlopen(url) response = urllib2.urlopen(url)
with open(tarball, 'wb') as f: with open(tarball, 'wb') as f:
f.write(response.read()) f.write(response.read())
break break
else: else:
raise Exception('Failed to download %s' % url) raise Exception('Failed to download %s' % url)
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot]) subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
os.remove(tarball) os.remove(tarball)
with open(stamp, 'w') as s: with open(stamp, 'w') as s:
s.write(url) s.write(url)
if __name__ == '__main__': if __name__ == '__main__':
main() main()
sys.exit(0) sys.exit(0)

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Convert GN Xcode projects to platform and configuration independent targets. """Convert GN Xcode projects to platform and configuration independent targets.
GN generates Xcode projects that build one configuration only. However, typical GN generates Xcode projects that build one configuration only. However, typical
@ -40,234 +39,245 @@ import tempfile
class XcodeProject(object): class XcodeProject(object):
def __init__(self, objects, counter = 0): def __init__(self, objects, counter=0):
self.objects = objects self.objects = objects
self.counter = 0 self.counter = 0
def AddObject(self, parent_name, obj): def AddObject(self, parent_name, obj):
while True: while True:
self.counter += 1 self.counter += 1
str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter) str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter)
new_id = hashlib.sha1(str_id).hexdigest()[:24].upper() new_id = hashlib.sha1(str_id).hexdigest()[:24].upper()
# Make sure ID is unique. It's possible there could be an id conflict # Make sure ID is unique. It's possible there could be an id
# since this is run after GN runs. # conflict since this is run after GN runs.
if new_id not in self.objects: if new_id not in self.objects:
self.objects[new_id] = obj self.objects[new_id] = obj
return new_id return new_id
def CopyFileIfChanged(source_path, target_path): def CopyFileIfChanged(source_path, target_path):
"""Copy |source_path| to |target_path| is different.""" """Copy |source_path| to |target_path| is different."""
target_dir = os.path.dirname(target_path) target_dir = os.path.dirname(target_path)
if not os.path.isdir(target_dir): if not os.path.isdir(target_dir):
os.makedirs(target_dir) os.makedirs(target_dir)
if not os.path.exists(target_path) or \ if (not os.path.exists(target_path) or
not filecmp.cmp(source_path, target_path): not filecmp.cmp(source_path, target_path)):
shutil.copyfile(source_path, target_path) shutil.copyfile(source_path, target_path)
def LoadXcodeProjectAsJSON(path): def LoadXcodeProjectAsJSON(path):
"""Return Xcode project at |path| as a JSON string.""" """Return Xcode project at |path| as a JSON string."""
return subprocess.check_output([ return subprocess.check_output(
'plutil', '-convert', 'json', '-o', '-', path]) ['plutil', '-convert', 'json', '-o', '-', path])
def WriteXcodeProject(output_path, json_string): def WriteXcodeProject(output_path, json_string):
"""Save Xcode project to |output_path| as XML.""" """Save Xcode project to |output_path| as XML."""
with tempfile.NamedTemporaryFile() as temp_file: with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(json_string) temp_file.write(json_string)
temp_file.flush() temp_file.flush()
subprocess.check_call(['plutil', '-convert', 'xml1', temp_file.name]) subprocess.check_call(['plutil', '-convert', 'xml1', temp_file.name])
CopyFileIfChanged(temp_file.name, output_path) CopyFileIfChanged(temp_file.name, output_path)
def UpdateProductsProject(file_input, file_output, configurations, root_dir): def UpdateProductsProject(file_input, file_output, configurations, root_dir):
"""Update Xcode project to support multiple configurations. """Update Xcode project to support multiple configurations.
Args: Args:
file_input: path to the input Xcode project file_input: path to the input Xcode project
file_output: path to the output file file_output: path to the output file
configurations: list of string corresponding to the configurations that configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at need to be supported by the tweaked Xcode projects, must contains at
least one value. least one value.
""" """
json_data = json.loads(LoadXcodeProjectAsJSON(file_input)) json_data = json.loads(LoadXcodeProjectAsJSON(file_input))
project = XcodeProject(json_data['objects']) project = XcodeProject(json_data['objects'])
objects_to_remove = [] objects_to_remove = []
for value in project.objects.values(): for value in project.objects.values():
isa = value['isa'] isa = value['isa']
# Teach build shell script to look for the configuration and platform. # Teach build shell script to look for the configuration and platform.
if isa == 'PBXShellScriptBuildPhase': if isa == 'PBXShellScriptBuildPhase':
value['shellScript'] = value['shellScript'].replace( value['shellScript'] = value['shellScript'].replace(
'ninja -C .', 'ninja -C .',
'ninja -C "../${CONFIGURATION}${EFFECTIVE_PLATFORM_NAME}"') 'ninja -C "../${CONFIGURATION}${EFFECTIVE_PLATFORM_NAME}"')
# Add new configuration, using the first one as default. # Add new configuration, using the first one as default.
if isa == 'XCConfigurationList': if isa == 'XCConfigurationList':
value['defaultConfigurationName'] = configurations[0] value['defaultConfigurationName'] = configurations[0]
objects_to_remove.extend(value['buildConfigurations']) objects_to_remove.extend(value['buildConfigurations'])
build_config_template = project.objects[value['buildConfigurations'][0]] build_config_template = project.objects[value['buildConfigurations']
build_config_template['buildSettings']['CONFIGURATION_BUILD_DIR'] = \ [0]]
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)' build_settings = build_config_template['buildSettings']
build_config_template['buildSettings']['CODE_SIGN_IDENTITY'] = '' build_settings['CONFIGURATION_BUILD_DIR'] = (
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)')
build_settings['CODE_SIGN_IDENTITY'] = ''
value['buildConfigurations'] = [] value['buildConfigurations'] = []
for configuration in configurations: for configuration in configurations:
new_build_config = copy.copy(build_config_template) new_build_config = copy.copy(build_config_template)
new_build_config['name'] = configuration new_build_config['name'] = configuration
value['buildConfigurations'].append( value['buildConfigurations'].append(
project.AddObject('products', new_build_config)) project.AddObject('products', new_build_config))
for object_id in objects_to_remove: for object_id in objects_to_remove:
del project.objects[object_id] del project.objects[object_id]
AddMarkdownToProject(project, root_dir, json_data['rootObject']) AddMarkdownToProject(project, root_dir, json_data['rootObject'])
objects = collections.OrderedDict(sorted(project.objects.iteritems())) objects = collections.OrderedDict(sorted(project.objects.iteritems()))
WriteXcodeProject(file_output, json.dumps(json_data)) WriteXcodeProject(file_output, json.dumps(json_data))
def AddMarkdownToProject(project, root_dir, root_object): def AddMarkdownToProject(project, root_dir, root_object):
list_files_cmd = ['git', '-C', root_dir, 'ls-files', '*.md'] list_files_cmd = ['git', '-C', root_dir, 'ls-files', '*.md']
paths = subprocess.check_output(list_files_cmd).splitlines() paths = subprocess.check_output(list_files_cmd).splitlines()
ios_internal_dir = os.path.join(root_dir, 'ios_internal') ios_internal_dir = os.path.join(root_dir, 'ios_internal')
if os.path.exists(ios_internal_dir): if os.path.exists(ios_internal_dir):
list_files_cmd = ['git', '-C', ios_internal_dir, 'ls-files', '*.md'] list_files_cmd = ['git', '-C', ios_internal_dir, 'ls-files', '*.md']
ios_paths = subprocess.check_output(list_files_cmd).splitlines() ios_paths = subprocess.check_output(list_files_cmd).splitlines()
paths.extend(["ios_internal/" + path for path in ios_paths]) paths.extend(["ios_internal/" + path for path in ios_paths])
for path in paths: for path in paths:
new_markdown_entry = { new_markdown_entry = {
"fileEncoding": "4", "fileEncoding": "4",
"isa": "PBXFileReference", "isa": "PBXFileReference",
"lastKnownFileType": "net.daringfireball.markdown", "lastKnownFileType": "net.daringfireball.markdown",
"name": os.path.basename(path), "name": os.path.basename(path),
"path": path, "path": path,
"sourceTree": "<group>" "sourceTree": "<group>"
} }
new_markdown_entry_id = project.AddObject('sources', new_markdown_entry) new_markdown_entry_id = project.AddObject('sources', new_markdown_entry)
folder = GetFolderForPath(project, root_object, os.path.dirname(path)) folder = GetFolderForPath(project, root_object, os.path.dirname(path))
folder['children'].append(new_markdown_entry_id) folder['children'].append(new_markdown_entry_id)
def GetFolderForPath(project, rootObject, path): def GetFolderForPath(project, rootObject, path):
objects = project.objects objects = project.objects
# 'Sources' is always the first child of # 'Sources' is always the first child of
# project->rootObject->mainGroup->children. # project->rootObject->mainGroup->children.
root = objects[objects[objects[rootObject]['mainGroup']]['children'][0]] root = objects[objects[objects[rootObject]['mainGroup']]['children'][0]]
if not path: if not path:
return root
for folder in path.split('/'):
children = root['children']
new_root = None
for child in children:
if (objects[child]['isa'] == 'PBXGroup' and
objects[child]['name'] == folder):
new_root = objects[child]
break
if not new_root:
# If the folder isn't found we could just cram it into the leaf
# existing folder, but that leads to folders with tons of README.md
# inside.
new_group = {
"children": [],
"isa": "PBXGroup",
"name": folder,
"sourceTree": "<group>"
}
new_group_id = project.AddObject('sources', new_group)
children.append(new_group_id)
new_root = objects[new_group_id]
root = new_root
return root return root
for folder in path.split('/'):
children = root['children']
new_root = None
for child in children:
if objects[child]['isa'] == 'PBXGroup' and \
objects[child]['name'] == folder:
new_root = objects[child]
break
if not new_root:
# If the folder isn't found we could just cram it into the leaf existing
# folder, but that leads to folders with tons of README.md inside.
new_group = {
"children": [
],
"isa": "PBXGroup",
"name": folder,
"sourceTree": "<group>"
}
new_group_id = project.AddObject('sources', new_group)
children.append(new_group_id)
new_root = objects[new_group_id]
root = new_root
return root
def DisableNewBuildSystem(output_dir): def DisableNewBuildSystem(output_dir):
"""Disables the new build system due to crbug.com/852522 """ """Disables the new build system due to crbug.com/852522 """
xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace', xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace',
'xcshareddata', 'WorkspaceSettings.xcsettings') 'xcshareddata',
if os.path.isfile(xcwspacesharedsettings): 'WorkspaceSettings.xcsettings')
json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings)) if os.path.isfile(xcwspacesharedsettings):
else: json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings))
json_data = {} else:
json_data['BuildSystemType'] = 'Original' json_data = {}
WriteXcodeProject(xcwspacesharedsettings, json.dumps(json_data)) json_data['BuildSystemType'] = 'Original'
WriteXcodeProject(xcwspacesharedsettings, json.dumps(json_data))
def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations): def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
'''Tweak the Xcode project generated by gn to support multiple configurations. '''Tweak the Xcode project generated by gn to support multiple
configurations.
The Xcode projects generated by "gn gen --ide" only supports a single The Xcode projects generated by "gn gen --ide" only supports a single
platform and configuration (as the platform and configuration are set platform and configuration (as the platform and configuration are set per
per output directory). This method takes as input such projects and output directory). This method takes as input such projects and add support
add support for multiple configurations and platforms (to allow devs for multiple configurations and platforms (to allow devs to select them in
to select them in Xcode). Xcode).
Args: Args:
input_dir: directory containing the XCode projects created by "gn gen --ide" input_dir: directory containing the XCode projects created by "gn gen
output_dir: directory where the tweaked Xcode projects will be saved --ide"
configurations: list of string corresponding to the configurations that output_dir: directory where the tweaked Xcode projects will be saved
need to be supported by the tweaked Xcode projects, must contains at configurations: list of string corresponding to the configurations that
least one value. need to be supported by the tweaked Xcode projects, must contains at
''' least one value.
# Update products project. '''
products = os.path.join('products.xcodeproj', 'project.pbxproj') # Update products project.
product_input = os.path.join(input_dir, products) products = os.path.join('products.xcodeproj', 'project.pbxproj')
product_output = os.path.join(output_dir, products) product_input = os.path.join(input_dir, products)
UpdateProductsProject(product_input, product_output, configurations, root_dir) product_output = os.path.join(output_dir, products)
UpdateProductsProject(product_input, product_output, configurations,
root_dir)
# Copy all workspace. # Copy all workspace.
xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata') xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata')
CopyFileIfChanged(os.path.join(input_dir, xcwspace), CopyFileIfChanged(os.path.join(input_dir, xcwspace),
os.path.join(output_dir, xcwspace)) os.path.join(output_dir, xcwspace))
# TODO(crbug.com/852522): Disable new BuildSystemType. # TODO(crbug.com/852522): Disable new BuildSystemType.
DisableNewBuildSystem(output_dir) DisableNewBuildSystem(output_dir)
# TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj'
# and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is
# here to support both old and new projects setup and will be removed once
# gn has rolled past it.
sources = os.path.join('sources.xcodeproj', 'project.pbxproj')
if os.path.isfile(os.path.join(input_dir, sources)):
CopyFileIfChanged(os.path.join(input_dir, sources),
os.path.join(output_dir, sources))
# TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj'
# and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is
# here to support both old and new projects setup and will be removed once gn
# has rolled past it.
sources = os.path.join('sources.xcodeproj', 'project.pbxproj')
if os.path.isfile(os.path.join(input_dir, sources)):
CopyFileIfChanged(os.path.join(input_dir, sources),
os.path.join(output_dir, sources))
def Main(args): def Main(args):
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Convert GN Xcode projects for iOS.') description='Convert GN Xcode projects for iOS.')
parser.add_argument( parser.add_argument(
'input', 'input', help='directory containing [product|all] Xcode projects.')
help='directory containing [product|all] Xcode projects.') parser.add_argument(
parser.add_argument( 'output', help='directory where to generate the iOS configuration.')
'output', parser.add_argument('--add-config',
help='directory where to generate the iOS configuration.') dest='configurations',
parser.add_argument( default=[],
'--add-config', dest='configurations', default=[], action='append', action='append',
help='configuration to add to the Xcode project') help='configuration to add to the Xcode project')
parser.add_argument( parser.add_argument('--root',
'--root', type=os.path.abspath, required=True, type=os.path.abspath,
help='root directory of the project') required=True,
args = parser.parse_args(args) help='root directory of the project')
args = parser.parse_args(args)
if not os.path.isdir(args.input): if not os.path.isdir(args.input):
sys.stderr.write('Input directory does not exists.\n') sys.stderr.write('Input directory does not exists.\n')
return 1 return 1
required = set(['products.xcodeproj', 'all.xcworkspace']) required = set(['products.xcodeproj', 'all.xcworkspace'])
if not required.issubset(os.listdir(args.input)): if not required.issubset(os.listdir(args.input)):
sys.stderr.write( sys.stderr.write(
'Input directory does not contain all necessary Xcode projects.\n') 'Input directory does not contain all necessary Xcode projects.\n')
return 1 return 1
if not args.configurations: if not args.configurations:
sys.stderr.write('At least one configuration required, see --add-config.\n') sys.stderr.write(
return 1 'At least one configuration required, see --add-config.\n')
return 1
ConvertGnXcodeProject(args.root, args.input, args.output,
args.configurations)
ConvertGnXcodeProject(args.root, args.input, args.output, args.configurations)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main(sys.argv[1:])) sys.exit(Main(sys.argv[1:]))

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import argparse import argparse
import convert_gn_xcodeproj import convert_gn_xcodeproj
import errno import errno
@ -27,326 +26,325 @@ import tempfile
import ConfigParser import ConfigParser
try: try:
import cStringIO as StringIO import cStringIO as StringIO
except ImportError: except ImportError:
import StringIO import StringIO
SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator') SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator')
SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage') SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage')
class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser): class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser):
'''A .ini file parser that supports strings and environment variables.'''
'''A .ini file parser that supports strings and environment variables.''' ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)')
ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)') def values(self, section):
return map(lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
ConfigParser.SafeConfigParser.items(self, section))
def values(self, section): def getstring(self, section, option):
return map( return self._UnquoteString(self._ExpandEnvVar(self.get(section,
lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)), option)))
ConfigParser.SafeConfigParser.items(self, section))
def getstring(self, section, option): def _UnquoteString(self, string):
return self._UnquoteString(self._ExpandEnvVar(self.get(section, option))) if not string or string[0] != '"' or string[-1] != '"':
return string
return string[1:-1]
def _UnquoteString(self, string): def _ExpandEnvVar(self, value):
if not string or string[0] != '"' or string[-1] != '"': match = self.ENV_VAR_PATTERN.search(value)
return string if not match:
return string[1:-1] return value
name, (begin, end) = match.group(1), match.span(0)
prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:])
return prefix + os.environ.get(name, '') + suffix
def _ExpandEnvVar(self, value):
match = self.ENV_VAR_PATTERN.search(value)
if not match:
return value
name, (begin, end) = match.group(1), match.span(0)
prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:])
return prefix + os.environ.get(name, '') + suffix
class GnGenerator(object): class GnGenerator(object):
'''Holds configuration for a build and method to generate gn default
files.'''
'''Holds configuration for a build and method to generate gn default files.''' FAT_BUILD_DEFAULT_ARCH = '64-bit'
FAT_BUILD_DEFAULT_ARCH = '64-bit' TARGET_CPU_VALUES = {
'iphoneos': {
TARGET_CPU_VALUES = { '32-bit': '"arm"',
'iphoneos': { '64-bit': '"arm64"',
'32-bit': '"arm"', },
'64-bit': '"arm64"', 'iphonesimulator': {
}, '32-bit': '"x86"',
'iphonesimulator': { '64-bit': '"x64"',
'32-bit': '"x86"', }
'64-bit': '"x64"',
} }
}
def __init__(self, settings, config, target): def __init__(self, settings, config, target):
assert target in SUPPORTED_TARGETS assert target in SUPPORTED_TARGETS
assert config in SUPPORTED_CONFIGS assert config in SUPPORTED_CONFIGS
self._settings = settings self._settings = settings
self._config = config self._config = config
self._target = target self._target = target
def _GetGnArgs(self): def _GetGnArgs(self):
"""Build the list of arguments to pass to gn. """Build the list of arguments to pass to gn.
Returns: Returns:
A list of tuple containing gn variable names and variable values (it A list of tuple containing gn variable names and variable values (it
is not a dictionary as the order needs to be preserved). is not a dictionary as the order needs to be preserved).
""" """
args = [] args = []
args.append(('is_debug', self._config in ('Debug', 'Coverage'))) args.append(('is_debug', self._config in ('Debug', 'Coverage')))
if os.environ.get('FORCE_MAC_TOOLCHAIN', '0') == '1': if os.environ.get('FORCE_MAC_TOOLCHAIN', '0') == '1':
args.append(('use_system_xcode', False)) args.append(('use_system_xcode', False))
cpu_values = self.TARGET_CPU_VALUES[self._target] cpu_values = self.TARGET_CPU_VALUES[self._target]
build_arch = self._settings.getstring('build', 'arch') build_arch = self._settings.getstring('build', 'arch')
if build_arch == 'fat': if build_arch == 'fat':
target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH] target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH]
args.append(('target_cpu', target_cpu)) args.append(('target_cpu', target_cpu))
args.append(('additional_target_cpus', args.append(
[cpu for cpu in cpu_values.itervalues() if cpu != target_cpu])) ('additional_target_cpus',
else: [cpu for cpu in cpu_values.itervalues() if cpu != target_cpu]))
args.append(('target_cpu', cpu_values[build_arch]))
# Add user overrides after the other configurations so that they can
# refer to them and override them.
args.extend(self._settings.items('gn_args'))
return args
def Generate(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
subprocess.check_call(
self.GetGnCommand(gn_path, root_path, out_path, True))
def CreateGnRules(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
buf = StringIO.StringIO()
gn_command = self.GetGnCommand(gn_path, root_path, out_path, False)
self.WriteBuildNinja(buf, gn_command)
WriteToFileIfChanged(
os.path.join(out_path, 'build.ninja'),
buf.getvalue(),
overwrite=False)
buf = StringIO.StringIO()
self.WriteBuildNinjaDeps(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'build.ninja.d'),
buf.getvalue(),
overwrite=False)
def WriteArgsGn(self, stream):
stream.write('# This file was generated by setup-gn.py. Do not edit\n')
stream.write('# but instead use ~/.setup-gn or $repo/.setup-gn files\n')
stream.write('# to configure settings.\n')
stream.write('\n')
if self._settings.has_section('$imports$'):
for import_rule in self._settings.values('$imports$'):
stream.write('import("%s")\n' % import_rule)
stream.write('\n')
gn_args = self._GetGnArgs()
for name, value in gn_args:
if isinstance(value, bool):
stream.write('%s = %s\n' % (name, str(value).lower()))
elif isinstance(value, list):
stream.write('%s = [%s' % (name, '\n' if len(value) > 1 else ''))
if len(value) == 1:
prefix = ' '
suffix = ' '
else: else:
prefix = ' ' args.append(('target_cpu', cpu_values[build_arch]))
suffix = ',\n'
for item in value:
if isinstance(item, bool):
stream.write('%s%s%s' % (prefix, str(item).lower(), suffix))
else:
stream.write('%s%s%s' % (prefix, item, suffix))
stream.write(']\n')
else:
stream.write('%s = %s\n' % (name, value))
def WriteBuildNinja(self, stream, gn_command): # Add user overrides after the other configurations so that they can
stream.write('rule gn\n') # refer to them and override them.
stream.write(' command = %s\n' % NinjaEscapeCommand(gn_command)) args.extend(self._settings.items('gn_args'))
stream.write(' description = Regenerating ninja files\n') return args
stream.write('\n')
stream.write('build build.ninja: gn\n')
stream.write(' generator = 1\n')
stream.write(' depfile = build.ninja.d\n')
def WriteBuildNinjaDeps(self, stream): def Generate(self, gn_path, root_path, out_path):
stream.write('build.ninja: nonexistant_file.gn\n') buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project): subprocess.check_call(
gn_command = [ gn_path, '--root=%s' % os.path.realpath(src_path), '-q' ] self.GetGnCommand(gn_path, root_path, out_path, True))
if generate_xcode_project:
gn_command.append('--ide=xcode') def CreateGnRules(self, gn_path, root_path, out_path):
gn_command.append('--root-target=gn_all') buf = StringIO.StringIO()
if self._settings.getboolean('goma', 'enabled'): self.WriteArgsGn(buf)
ninja_jobs = self._settings.getint('xcode', 'jobs') or 200 WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
gn_command.append('--ninja-extra-args=-j%s' % ninja_jobs) buf.getvalue(),
if self._settings.has_section('filters'): overwrite=True)
target_filters = self._settings.values('filters')
if target_filters: buf = StringIO.StringIO()
gn_command.append('--filters=%s' % ';'.join(target_filters)) gn_command = self.GetGnCommand(gn_path, root_path, out_path, False)
# TODO(justincohen): --check is currently failing in crashpad. self.WriteBuildNinja(buf, gn_command)
# else: WriteToFileIfChanged(os.path.join(out_path, 'build.ninja'),
# gn_command.append('--check') buf.getvalue(),
gn_command.append('gen') overwrite=False)
gn_command.append('//%s' %
os.path.relpath(os.path.abspath(out_path), os.path.abspath(src_path))) buf = StringIO.StringIO()
return gn_command self.WriteBuildNinjaDeps(buf)
WriteToFileIfChanged(os.path.join(out_path, 'build.ninja.d'),
buf.getvalue(),
overwrite=False)
def WriteArgsGn(self, stream):
stream.write('# This file was generated by setup-gn.py. Do not edit\n')
stream.write('# but instead use ~/.setup-gn or $repo/.setup-gn files\n')
stream.write('# to configure settings.\n')
stream.write('\n')
if self._settings.has_section('$imports$'):
for import_rule in self._settings.values('$imports$'):
stream.write('import("%s")\n' % import_rule)
stream.write('\n')
gn_args = self._GetGnArgs()
for name, value in gn_args:
if isinstance(value, bool):
stream.write('%s = %s\n' % (name, str(value).lower()))
elif isinstance(value, list):
stream.write('%s = [%s' %
(name, '\n' if len(value) > 1 else ''))
if len(value) == 1:
prefix = ' '
suffix = ' '
else:
prefix = ' '
suffix = ',\n'
for item in value:
if isinstance(item, bool):
stream.write('%s%s%s' %
(prefix, str(item).lower(), suffix))
else:
stream.write('%s%s%s' % (prefix, item, suffix))
stream.write(']\n')
else:
stream.write('%s = %s\n' % (name, value))
def WriteBuildNinja(self, stream, gn_command):
stream.write('rule gn\n')
stream.write(' command = %s\n' % NinjaEscapeCommand(gn_command))
stream.write(' description = Regenerating ninja files\n')
stream.write('\n')
stream.write('build build.ninja: gn\n')
stream.write(' generator = 1\n')
stream.write(' depfile = build.ninja.d\n')
def WriteBuildNinjaDeps(self, stream):
stream.write('build.ninja: nonexistant_file.gn\n')
def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project):
gn_command = [gn_path, '--root=%s' % os.path.realpath(src_path), '-q']
if generate_xcode_project:
gn_command.append('--ide=xcode')
gn_command.append('--root-target=gn_all')
if self._settings.getboolean('goma', 'enabled'):
ninja_jobs = self._settings.getint('xcode', 'jobs') or 200
gn_command.append('--ninja-extra-args=-j%s' % ninja_jobs)
if self._settings.has_section('filters'):
target_filters = self._settings.values('filters')
if target_filters:
gn_command.append('--filters=%s' % ';'.join(target_filters))
# TODO(justincohen): --check is currently failing in crashpad.
# else:
# gn_command.append('--check')
gn_command.append('gen')
gn_command.append('//%s' % os.path.relpath(os.path.abspath(out_path),
os.path.abspath(src_path)))
return gn_command
def WriteToFileIfChanged(filename, content, overwrite): def WriteToFileIfChanged(filename, content, overwrite):
'''Write |content| to |filename| if different. If |overwrite| is False '''Write |content| to |filename| if different. If |overwrite| is False
and the file already exists it is left untouched.''' and the file already exists it is left untouched.'''
if os.path.exists(filename): if os.path.exists(filename):
if not overwrite: if not overwrite:
return return
with open(filename) as file: with open(filename) as file:
if file.read() == content: if file.read() == content:
return return
if not os.path.isdir(os.path.dirname(filename)): if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename)) os.makedirs(os.path.dirname(filename))
with open(filename, 'w') as file: with open(filename, 'w') as file:
file.write(content) file.write(content)
def NinjaNeedEscape(arg): def NinjaNeedEscape(arg):
'''Returns True if |arg| needs to be escaped when written to .ninja file.''' '''Returns True if |arg| needs to be escaped when written to .ninja file.'''
return ':' in arg or '*' in arg or ';' in arg return ':' in arg or '*' in arg or ';' in arg
def NinjaEscapeCommand(command): def NinjaEscapeCommand(command):
'''Escapes |command| in order to write it to .ninja file.''' '''Escapes |command| in order to write it to .ninja file.'''
result = [] result = []
for arg in command: for arg in command:
if NinjaNeedEscape(arg): if NinjaNeedEscape(arg):
arg = arg.replace(':', '$:') arg = arg.replace(':', '$:')
arg = arg.replace(';', '\\;') arg = arg.replace(';', '\\;')
arg = arg.replace('*', '\\*') arg = arg.replace('*', '\\*')
else: else:
result.append(arg) result.append(arg)
return ' '.join(result) return ' '.join(result)
def FindGn(): def FindGn():
'''Returns absolute path to gn binary looking at the PATH env variable.''' '''Returns absolute path to gn binary looking at the PATH env variable.'''
for path in os.environ['PATH'].split(os.path.pathsep): for path in os.environ['PATH'].split(os.path.pathsep):
gn_path = os.path.join(path, 'gn') gn_path = os.path.join(path, 'gn')
if os.path.isfile(gn_path) and os.access(gn_path, os.X_OK): if os.path.isfile(gn_path) and os.access(gn_path, os.X_OK):
return gn_path return gn_path
return None return None
def GenerateXcodeProject(gn_path, root_dir, out_dir, settings): def GenerateXcodeProject(gn_path, root_dir, out_dir, settings):
'''Convert GN generated Xcode project into multi-configuration Xcode '''Convert GN generated Xcode project into multi-configuration Xcode
project.''' project.'''
temp_path = tempfile.mkdtemp(prefix=os.path.abspath( temp_path = tempfile.mkdtemp(
os.path.join(out_dir, '_temp'))) prefix=os.path.abspath(os.path.join(out_dir, '_temp')))
try: try:
generator = GnGenerator(settings, 'Debug', 'iphonesimulator') generator = GnGenerator(settings, 'Debug', 'iphonesimulator')
generator.Generate(gn_path, root_dir, temp_path) generator.Generate(gn_path, root_dir, temp_path)
convert_gn_xcodeproj.ConvertGnXcodeProject( convert_gn_xcodeproj.ConvertGnXcodeProject(
root_dir, root_dir, os.path.join(temp_path), os.path.join(out_dir, 'build'),
os.path.join(temp_path), SUPPORTED_CONFIGS)
os.path.join(out_dir, 'build'), finally:
SUPPORTED_CONFIGS) if os.path.exists(temp_path):
finally: shutil.rmtree(temp_path)
if os.path.exists(temp_path):
shutil.rmtree(temp_path)
def GenerateGnBuildRules(gn_path, root_dir, out_dir, settings): def GenerateGnBuildRules(gn_path, root_dir, out_dir, settings):
'''Generates all template configurations for gn.''' '''Generates all template configurations for gn.'''
for config in SUPPORTED_CONFIGS: for config in SUPPORTED_CONFIGS:
for target in SUPPORTED_TARGETS: for target in SUPPORTED_TARGETS:
build_dir = os.path.join(out_dir, '%s-%s' % (config, target)) build_dir = os.path.join(out_dir, '%s-%s' % (config, target))
generator = GnGenerator(settings, config, target) generator = GnGenerator(settings, config, target)
generator.CreateGnRules(gn_path, root_dir, build_dir) generator.CreateGnRules(gn_path, root_dir, build_dir)
def Main(args): def Main(args):
default_root = os.path.normpath(os.path.join( default_root = os.path.normpath(
os.path.dirname(__file__), os.pardir, os.pardir)) os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description='Generate build directories for use with gn.') description='Generate build directories for use with gn.')
parser.add_argument( parser.add_argument(
'root', default=default_root, nargs='?', 'root',
help='root directory where to generate multiple out configurations') default=default_root,
parser.add_argument( nargs='?',
'--import', action='append', dest='import_rules', default=[], help='root directory where to generate multiple out configurations')
help='path to file defining default gn variables') parser.add_argument('--import',
args = parser.parse_args(args) action='append',
dest='import_rules',
default=[],
help='path to file defining default gn variables')
args = parser.parse_args(args)
# Load configuration (first global and then any user overrides). # Load configuration (first global and then any user overrides).
settings = ConfigParserWithStringInterpolation() settings = ConfigParserWithStringInterpolation()
settings.read([ settings.read([
os.path.splitext(__file__)[0] + '.config', os.path.splitext(__file__)[0] + '.config',
os.path.expanduser('~/.setup-gn'), os.path.expanduser('~/.setup-gn'),
]) ])
# Add private sections corresponding to --import argument. # Add private sections corresponding to --import argument.
if args.import_rules: if args.import_rules:
settings.add_section('$imports$') settings.add_section('$imports$')
for i, import_rule in enumerate(args.import_rules): for i, import_rule in enumerate(args.import_rules):
if not import_rule.startswith('//'): if not import_rule.startswith('//'):
import_rule = '//%s' % os.path.relpath( import_rule = '//%s' % os.path.relpath(
os.path.abspath(import_rule), os.path.abspath(args.root)) os.path.abspath(import_rule), os.path.abspath(args.root))
settings.set('$imports$', '$rule%d$' % i, import_rule) settings.set('$imports$', '$rule%d$' % i, import_rule)
# Validate settings. # Validate settings.
if settings.getstring('build', 'arch') not in ('64-bit', '32-bit', 'fat'): if settings.getstring('build', 'arch') not in ('64-bit', '32-bit', 'fat'):
sys.stderr.write('ERROR: invalid value for build.arch: %s\n' % sys.stderr.write('ERROR: invalid value for build.arch: %s\n' %
settings.getstring('build', 'arch')) settings.getstring('build', 'arch'))
sys.exit(1) sys.exit(1)
if settings.getboolean('goma', 'enabled'): if settings.getboolean('goma', 'enabled'):
if settings.getint('xcode', 'jobs') < 0: if settings.getint('xcode', 'jobs') < 0:
sys.stderr.write('ERROR: invalid value for xcode.jobs: %s\n' % sys.stderr.write('ERROR: invalid value for xcode.jobs: %s\n' %
settings.get('xcode', 'jobs')) settings.get('xcode', 'jobs'))
sys.exit(1) sys.exit(1)
goma_install = os.path.expanduser(settings.getstring('goma', 'install')) goma_install = os.path.expanduser(settings.getstring('goma', 'install'))
if not os.path.isdir(goma_install): if not os.path.isdir(goma_install):
sys.stderr.write('WARNING: goma.install directory not found: %s\n' % sys.stderr.write('WARNING: goma.install directory not found: %s\n' %
settings.get('goma', 'install')) settings.get('goma', 'install'))
sys.stderr.write('WARNING: disabling goma\n') sys.stderr.write('WARNING: disabling goma\n')
settings.set('goma', 'enabled', 'false') settings.set('goma', 'enabled', 'false')
# Find gn binary in PATH. # Find gn binary in PATH.
gn_path = FindGn() gn_path = FindGn()
if gn_path is None: if gn_path is None:
sys.stderr.write('ERROR: cannot find gn in PATH\n') sys.stderr.write('ERROR: cannot find gn in PATH\n')
sys.exit(1) sys.exit(1)
out_dir = os.path.join(args.root, 'out') out_dir = os.path.join(args.root, 'out')
if not os.path.isdir(out_dir): if not os.path.isdir(out_dir):
os.makedirs(out_dir) os.makedirs(out_dir)
GenerateXcodeProject(gn_path, args.root, out_dir, settings) GenerateXcodeProject(gn_path, args.root, out_dir, settings)
GenerateGnBuildRules(gn_path, args.root, out_dir, settings) GenerateGnBuildRules(gn_path, args.root, out_dir, settings)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(Main(sys.argv[1:])) sys.exit(Main(sys.argv[1:]))

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used """Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used
for running tests without a device. for running tests without a device.
""" """
@ -30,105 +29,117 @@ import tempfile
import time import time
try: try:
from subprocess import DEVNULL from subprocess import DEVNULL
except ImportError: except ImportError:
DEVNULL = open(os.devnull, 'r+b') DEVNULL = open(os.devnull, 'r+b')
CRASHPAD_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)), CRASHPAD_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir) os.pardir)
def _Stop(pid_file): def _Stop(pid_file):
if os.path.isfile(pid_file): if os.path.isfile(pid_file):
with open(pid_file, 'rb') as f: with open(pid_file, 'rb') as f:
pid = int(f.read().strip()) pid = int(f.read().strip())
try: try:
os.kill(pid, signal.SIGTERM) os.kill(pid, signal.SIGTERM)
except: except:
print('Unable to kill pid %d, continuing' % pid, file=sys.stderr) print('Unable to kill pid %d, continuing' % pid, file=sys.stderr)
os.unlink(pid_file) os.unlink(pid_file)
def _CheckForTun(): def _CheckForTun():
"""Check for networking. TODO(scottmg): Currently, this is Linux-specific. """Check for networking. TODO(scottmg): Currently, this is Linux-specific.
""" """
returncode = subprocess.call( returncode = subprocess.call(
['tunctl', '-b', '-u', getpass.getuser(), '-t', 'qemu'], ['tunctl', '-b', '-u',
stdout=DEVNULL, stderr=DEVNULL) getpass.getuser(), '-t', 'qemu'],
if returncode != 0: stdout=DEVNULL,
print('To use QEMU with networking on Linux, configure TUN/TAP. See:', stderr=DEVNULL)
file=sys.stderr) if returncode != 0:
print(' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only', print('To use QEMU with networking on Linux, configure TUN/TAP. See:',
file=sys.stderr) file=sys.stderr)
return 2 print(
return 0 ' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only',
file=sys.stderr)
return 2
return 0
def _Start(pid_file): def _Start(pid_file):
tun_result = _CheckForTun() tun_result = _CheckForTun()
if tun_result != 0: if tun_result != 0:
return tun_result return tun_result
arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64' arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64'
fuchsia_dir = os.path.join(CRASHPAD_ROOT, 'third_party', 'fuchsia') fuchsia_dir = os.path.join(CRASHPAD_ROOT, 'third_party', 'fuchsia')
qemu_path = os.path.join(fuchsia_dir, 'qemu', arch, 'bin', qemu_path = os.path.join(fuchsia_dir, 'qemu', arch, 'bin',
'qemu-system-x86_64') 'qemu-system-x86_64')
kernel_data_dir = os.path.join(fuchsia_dir, 'sdk', arch, 'target', 'x86_64') kernel_data_dir = os.path.join(fuchsia_dir, 'sdk', arch, 'target', 'x86_64')
kernel_path = os.path.join(kernel_data_dir, 'zircon.bin') kernel_path = os.path.join(kernel_data_dir, 'zircon.bin')
initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin') initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin')
mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3)) mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3))
instance_name = 'crashpad_qemu_' + \ instance_name = (
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)) 'crashpad_qemu_' +
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)))
# These arguments are from the Fuchsia repo in zircon/scripts/run-zircon. # These arguments are from the Fuchsia repo in zircon/scripts/run-zircon.
popen = subprocess.Popen([
qemu_path,
'-m', '2048',
'-nographic',
'-kernel', kernel_path,
'-initrd', initrd_path,
'-smp', '4',
'-serial', 'stdio',
'-monitor', 'none',
'-machine', 'q35',
'-cpu', 'host,migratable=no',
'-enable-kvm',
'-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0',
'-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail,
'-append', 'TERM=dumb zircon.nodename=' + instance_name,
], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL)
with open(pid_file, 'wb') as f: # yapf: disable
f.write('%d\n' % popen.pid) popen = subprocess.Popen([
qemu_path,
'-m', '2048',
'-nographic',
'-kernel', kernel_path,
'-initrd', initrd_path,
'-smp', '4',
'-serial', 'stdio',
'-monitor', 'none',
'-machine', 'q35',
'-cpu', 'host,migratable=no',
'-enable-kvm',
'-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0',
'-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail,
'-append', 'TERM=dumb zircon.nodename=' + instance_name,
],
stdin=DEVNULL,
stdout=DEVNULL,
stderr=DEVNULL)
# yapf: enable
for i in range(10): with open(pid_file, 'wb') as f:
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools', 'netaddr') f.write('%d\n' % popen.pid)
if subprocess.call([netaddr_path, '--nowait', instance_name],
stdout=open(os.devnull), stderr=open(os.devnull)) == 0:
break
time.sleep(.5)
else:
print('instance did not respond after start', file=sys.stderr)
return 1
return 0 for i in range(10):
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools',
'netaddr')
if subprocess.call([netaddr_path, '--nowait', instance_name],
stdout=open(os.devnull),
stderr=open(os.devnull)) == 0:
break
time.sleep(.5)
else:
print('instance did not respond after start', file=sys.stderr)
return 1
return 0
def main(args): def main(args):
if len(args) != 1 or args[0] not in ('start', 'stop'): if len(args) != 1 or args[0] not in ('start', 'stop'):
print('usage: run_fuchsia_qemu.py start|stop', file=sys.stderr) print('usage: run_fuchsia_qemu.py start|stop', file=sys.stderr)
return 1 return 1
command = args[0] command = args[0]
pid_file = os.path.join(tempfile.gettempdir(), 'crashpad_fuchsia_qemu_pid') pid_file = os.path.join(tempfile.gettempdir(), 'crashpad_fuchsia_qemu_pid')
_Stop(pid_file) _Stop(pid_file)
if command == 'start': if command == 'start':
return _Start(pid_file) return _Start(pid_file)
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved. # Copyright 2017 The Crashpad Authors. All rights reserved.
# #
@ -24,26 +23,26 @@ import sys
def main(args): def main(args):
script_dir = os.path.dirname(__file__) script_dir = os.path.dirname(__file__)
crashpad_dir = os.path.join(script_dir, os.pardir, os.pardir) crashpad_dir = os.path.join(script_dir, os.pardir, os.pardir)
# Run from the Crashpad project root directory. # Run from the Crashpad project root directory.
os.chdir(crashpad_dir) os.chdir(crashpad_dir)
output_dir = os.path.join('out', 'doc', 'doxygen') output_dir = os.path.join('out', 'doc', 'doxygen')
if os.path.isdir(output_dir) and not os.path.islink(output_dir): if os.path.isdir(output_dir) and not os.path.islink(output_dir):
shutil.rmtree(output_dir) shutil.rmtree(output_dir)
elif os.path.exists(output_dir): elif os.path.exists(output_dir):
os.unlink(output_dir) os.unlink(output_dir)
os.makedirs(output_dir, 0o755) os.makedirs(output_dir, 0o755)
doxy_file = os.path.join('doc', 'support', 'crashpad.doxy') doxy_file = os.path.join('doc', 'support', 'crashpad.doxy')
subprocess.check_call(['doxygen', doxy_file]) subprocess.check_call(['doxygen', doxy_file])
return 0 return 0
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -12,8 +12,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
def CheckChangeOnUpload(input_api, output_api): def CheckChangeOnUpload(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api) return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)
def CheckChangeOnCommit(input_api, output_api): def CheckChangeOnCommit(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api) return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)

View File

@ -29,462 +29,452 @@ import win32con
import win32pipe import win32pipe
import winerror import winerror
g_temp_dirs = [] g_temp_dirs = []
g_had_failures = False g_had_failures = False
def MakeTempDir(): def MakeTempDir():
global g_temp_dirs global g_temp_dirs
new_dir = tempfile.mkdtemp() new_dir = tempfile.mkdtemp()
g_temp_dirs.append(new_dir) g_temp_dirs.append(new_dir)
return new_dir return new_dir
def CleanUpTempDirs(): def CleanUpTempDirs():
global g_temp_dirs global g_temp_dirs
for d in g_temp_dirs: for d in g_temp_dirs:
subprocess.call(['rmdir', '/s', '/q', d], shell=True) subprocess.call(['rmdir', '/s', '/q', d], shell=True)
def FindInstalledWindowsApplication(app_path): def FindInstalledWindowsApplication(app_path):
search_paths = [os.getenv('PROGRAMFILES(X86)'), search_paths = [
os.getenv('PROGRAMFILES'), os.getenv('PROGRAMFILES(X86)'),
os.getenv('PROGRAMW6432'), os.getenv('PROGRAMFILES'),
os.getenv('LOCALAPPDATA')] os.getenv('PROGRAMW6432'),
search_paths += os.getenv('PATH', '').split(os.pathsep) os.getenv('LOCALAPPDATA')
]
search_paths += os.getenv('PATH', '').split(os.pathsep)
for search_path in search_paths: for search_path in search_paths:
if not search_path: if not search_path:
continue continue
path = os.path.join(search_path, app_path) path = os.path.join(search_path, app_path)
if os.path.isfile(path): if os.path.isfile(path):
return path return path
return None
def GetCdbPath():
"""Search in some reasonable places to find cdb.exe. Searches x64 before x86
and newer versions before older versions.
"""
possible_paths = (
os.path.join('Windows Kits', '10', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '10', 'Debuggers', 'x86'),
os.path.join('Windows Kits', '8.1', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '8.1', 'Debuggers', 'x86'),
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
'Debugging Tools For Windows (x64)',
'Debugging Tools For Windows (x86)',
'Debugging Tools For Windows',)
for possible_path in possible_paths:
app_path = os.path.join(possible_path, 'cdb.exe')
app_path = FindInstalledWindowsApplication(app_path)
if app_path:
return app_path
return None
def NamedPipeExistsAndReady(pipe_name):
"""Returns False if pipe_name does not exist. If pipe_name does exist, blocks
until the pipe is ready to service clients, and then returns True.
This is used as a drop-in replacement for os.path.exists() and os.access() to
test for the pipe's existence. Both of those calls tickle the pipe in a way
that appears to the server to be a client connecting, triggering error
messages when no data is received.
Although this function only needs to test pipe existence (waiting for
CreateNamedPipe()), it actually winds up testing pipe readiness
(waiting for ConnectNamedPipe()). This is unnecessary but harmless.
"""
try:
win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER)
except pywintypes.error as e:
if e[0] == winerror.ERROR_FILE_NOT_FOUND:
return False
raise
return True
def GetDumpFromProgram(
out_dir, pipe_name, executable_name, expect_exit_code, *args):
"""Initialize a crash database, and run |executable_name| connecting to a
crash handler. If pipe_name is set, crashpad_handler will be started first. If
pipe_name is empty, the executable is responsible for starting
crashpad_handler. *args will be passed after other arguments to
executable_name. If the child process does not exit with |expect_exit_code|,
an exception will be raised. Returns the path to the minidump generated by
crashpad_handler for further testing.
"""
test_database = MakeTempDir()
handler = None
try:
subprocess.check_call(
[os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
'--database=' + test_database])
if pipe_name is not None:
handler = subprocess.Popen([
os.path.join(out_dir, 'crashpad_handler.com'),
'--pipe-name=' + pipe_name,
'--database=' + test_database
])
# Wait until the server is ready.
printed = False
while not NamedPipeExistsAndReady(pipe_name):
if not printed:
print('Waiting for crashpad_handler to be ready...')
printed = True
time.sleep(0.001)
command = [os.path.join(out_dir, executable_name), pipe_name] + list(args)
else:
command = ([os.path.join(out_dir, executable_name),
os.path.join(out_dir, 'crashpad_handler.com'),
test_database] +
list(args))
print('Running %s' % os.path.basename(command[0]))
exit_code = subprocess.call(command)
if exit_code != expect_exit_code:
raise subprocess.CalledProcessError(exit_code, executable_name)
out = subprocess.check_output([
os.path.join(out_dir, 'crashpad_database_util.exe'),
'--database=' + test_database,
'--show-pending-reports',
'--show-all-report-info',
])
for line in out.splitlines():
if line.strip().startswith('Path:'):
return line.partition(':')[2].strip()
finally:
if handler:
handler.kill()
def GetDumpFromCrashyProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_program.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
def GetDumpFromOtherProgram(out_dir, pipe_name, *args):
return GetDumpFromProgram(
out_dir, pipe_name, 'crash_other_program.exe', 0, *args)
def GetDumpFromSignal(out_dir, pipe_name, *args):
STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con.
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_signal.exe',
STATUS_FATAL_APP_EXIT,
*args)
def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'self_destroying_program.exe',
win32con.EXCEPTION_BREAKPOINT)
def GetDumpFromZ7Program(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_z7_loader.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
class CdbRun(object):
"""Run cdb.exe passing it a cdb command and capturing the output.
`Check()` searches for regex patterns in sequence allowing verification of
expected output.
"""
def __init__(self, cdb_path, dump_path, command):
# Run a command line that loads the dump, runs the specified cdb command,
# and then quits, and capturing stdout.
self.out = subprocess.check_output([
cdb_path,
'-z', dump_path,
'-c', command + ';q'
])
def Check(self, pattern, message, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
print('ok - %s' % message)
sys.stdout.flush()
else:
print('-' * 80, file=sys.stderr)
print('FAILED - %s' % message, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('did not match:\n %s' % pattern, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('remaining output was:\n %s' % self.out, file=sys.stderr)
print('-' * 80, file=sys.stderr)
sys.stderr.flush()
global g_had_failures
g_had_failures = True
def Find(self, pattern, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
return match_obj
return None return None
def RunTests(cdb_path, def GetCdbPath():
dump_path, """Search in some reasonable places to find cdb.exe. Searches x64 before x86
start_handler_dump_path, and newer versions before older versions.
destroyed_dump_path, """
z7_dump_path, possible_paths = (
other_program_path, os.path.join('Windows Kits', '10', 'Debuggers', 'x64'),
other_program_no_exception_path, os.path.join('Windows Kits', '10', 'Debuggers', 'x86'),
sigabrt_main_path, os.path.join('Windows Kits', '8.1', 'Debuggers', 'x64'),
sigabrt_background_path, os.path.join('Windows Kits', '8.1', 'Debuggers', 'x86'),
pipe_name): os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'),
"""Runs various tests in sequence. Runs a new cdb instance on the dump for os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
each block of tests to reduce the chances that output from one command is 'Debugging Tools For Windows (x64)',
confused for output from another. 'Debugging Tools For Windows (x86)',
""" 'Debugging Tools For Windows',
out = CdbRun(cdb_path, dump_path, '.ecxr') )
out.Check('This dump file has an exception of interest stored in it', for possible_path in possible_paths:
'captured exception') app_path = os.path.join(possible_path, 'cdb.exe')
app_path = FindInstalledWindowsApplication(app_path)
if app_path:
return app_path
return None
# When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as
# "`anonymous namespace'" and instead gives the decorated form.
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location')
out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr') def NamedPipeExistsAndReady(pipe_name):
out.Check('This dump file has an exception of interest stored in it', """Returns False if pipe_name does not exist. If pipe_name does exist,
'captured exception (using StartHandler())') blocks until the pipe is ready to service clients, and then returns True.
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location (using StartHandler())')
out = CdbRun(cdb_path, dump_path, '!peb') This is used as a drop-in replacement for os.path.exists() and os.access()
out.Check(r'PEB at', 'found the PEB') to test for the pipe's existence. Both of those calls tickle the pipe in a
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved') way that appears to the server to be a client connecting, triggering error
out.Check(r'Base TimeStamp Module', 'module list present') messages when no data is received.
pipe_name_escaped = pipe_name.replace('\\', '\\\\')
out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped,
'some PEB data is correct')
out.Check(r'SystemRoot=C:\\Windows', 'some of environment captured',
re.IGNORECASE)
out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters') Although this function only needs to test pipe existence (waiting for
out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"', CreateNamedPipe()), it actually winds up testing pipe readiness (waiting for
'PEB->ProcessParameters.ImagePathName string captured') ConnectNamedPipe()). This is unnecessary but harmless.
out.Check(' DesktopInfo *: ' """
'_UNICODE_STRING "(?!--- memory read error at address ).*"', try:
'PEB->ProcessParameters.DesktopInfo string captured') win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER)
except pywintypes.error as e:
if e[0] == winerror.ERROR_FILE_NOT_FOUND:
return False
raise
return True
out = CdbRun(cdb_path, dump_path, '!teb')
out.Check(r'TEB at', 'found the TEB')
out.Check(r'ExceptionList:\s+[0-9a-fA-F]+', 'some valid teb data')
out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue')
out = CdbRun(cdb_path, dump_path, '!gle') def GetDumpFromProgram(out_dir, pipe_name, executable_name, expect_exit_code,
out.Check('LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the ' *args):
'file specified.', '!gle gets last error') """Initialize a crash database, and run |executable_name| connecting to a
out.Check('LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The ' crash handler. If pipe_name is set, crashpad_handler will be started first.
'file %hs does not exist.', '!gle gets last ntstatus') If pipe_name is empty, the executable is responsible for starting
crashpad_handler. *args will be passed after other arguments to
executable_name. If the child process does not exit with |expect_exit_code|,
an exception will be raised. Returns the path to the minidump generated by
crashpad_handler for further testing.
"""
test_database = MakeTempDir()
handler = None
if False: try:
# TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList. subprocess.check_call([
out = CdbRun(cdb_path, dump_path, '!locks') os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
out.Check(r'CritSec crashy_program!crashpad::`anonymous namespace\'::' '--database=' + test_database
r'g_test_critical_section', 'lock was captured') ])
if platform.win32_ver()[0] != '7':
# We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7.
out.Check(r'\*\*\* Locked', 'lock debug info was captured, and is locked')
out = CdbRun(cdb_path, dump_path, '!handle') if pipe_name is not None:
out.Check(r'\d+ Handles', 'captured handles') handler = subprocess.Popen([
out.Check(r'Event\s+\d+', 'capture some event handles') os.path.join(out_dir, 'crashpad_handler.com'),
out.Check(r'File\s+\d+', 'capture some file handles') '--pipe-name=' + pipe_name, '--database=' + test_database
])
out = CdbRun(cdb_path, dump_path, 'lm') # Wait until the server is ready.
out.Check(r'Unloaded modules:', 'captured some unloaded modules') printed = False
out.Check(r'lz32\.dll', 'found expected unloaded module lz32') while not NamedPipeExistsAndReady(pipe_name):
out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror') if not printed:
print('Waiting for crashpad_handler to be ready...')
printed = True
time.sleep(0.001)
out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2') command = [os.path.join(out_dir, executable_name), pipe_name
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved') ] + list(args)
out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE) else:
command = ([
os.path.join(out_dir, executable_name),
os.path.join(out_dir, 'crashpad_handler.com'), test_database
] + list(args))
print('Running %s' % os.path.basename(command[0]))
exit_code = subprocess.call(command)
if exit_code != expect_exit_code:
raise subprocess.CalledProcessError(exit_code, executable_name)
# Check that there is no stack trace in the self-destroyed process. Confirm out = subprocess.check_output([
# that the top is where we expect it (that's based only on IP), but subsequent os.path.join(out_dir, 'crashpad_database_util.exe'),
# stack entries will not be available. This confirms that we have a mostly '--database=' + test_database,
# valid dump, but that the stack was omitted. '--show-pending-reports',
out.Check(r'self_destroying_program!crashpad::`anonymous namespace\'::' '--show-all-report-info',
r'FreeOwnStackAndBreak.*\nquit:', ])
'at correct location, no additional stack entries') for line in out.splitlines():
if line.strip().startswith('Path:'):
return line.partition(':')[2].strip()
finally:
if handler:
handler.kill()
# Dump memory pointed to be EDI on the background suspended thread. We don't
# know the index of the thread because the system may have started other
# threads, so first do a run to extract the thread index that's suspended, and
# then another run to dump the data pointed to by EDI for that thread.
out = CdbRun(cdb_path, dump_path, '.ecxr;~')
match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:')
if match_obj:
thread = match_obj.group(1)
out = CdbRun(cdb_path, dump_path, '.ecxr;~' + thread + 's;db /c14 edi')
out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50',
'data pointed to by registers captured')
# Move up one stack frame after jumping to the exception, and examine memory. def GetDumpFromCrashyProgram(out_dir, pipe_name):
out = CdbRun(cdb_path, dump_path, return GetDumpFromProgram(out_dir, pipe_name, 'crashy_program.exe',
'.ecxr; .f+; dd /c100 poi(offset_pointer)-20') win32con.EXCEPTION_ACCESS_VIOLATION)
out.Check(r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 '
r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c '
r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 '
r'80000094 00000095 80000096 00000097',
'data pointed to by stack captured')
# Attempt to retrieve the value of g_extra_memory_pointer (by name), and then
# examine the memory at which it points. Both should have been saved.
out = CdbRun(cdb_path, dump_path,
'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 '
'L8')
out.Check(r'0000655e 0000656b 00006578 00006585',
'extra memory range captured')
out = CdbRun(cdb_path, dump_path, '.dumpdebug') def GetDumpFromOtherProgram(out_dir, pipe_name, *args):
out.Check(r'type \?\?\? \(333333\), size 00001000', return GetDumpFromProgram(out_dir, pipe_name, 'crash_other_program.exe', 0,
'first user stream') *args)
out.Check(r'type \?\?\? \(222222\), size 00000080',
'second user stream')
if z7_dump_path:
out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm') def GetDumpFromSignal(out_dir, pipe_name, *args):
STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con.
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_signal.exe',
STATUS_FATAL_APP_EXIT, *args)
def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, pipe_name, 'self_destroying_program.exe',
win32con.EXCEPTION_BREAKPOINT)
def GetDumpFromZ7Program(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_z7_loader.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
class CdbRun(object):
"""Run cdb.exe passing it a cdb command and capturing the output.
`Check()` searches for regex patterns in sequence allowing verification of
expected output.
"""
def __init__(self, cdb_path, dump_path, command):
# Run a command line that loads the dump, runs the specified cdb
# command, and then quits, and capturing stdout.
self.out = subprocess.check_output(
[cdb_path, '-z', dump_path, '-c', command + ';q'])
def Check(self, pattern, message, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
print('ok - %s' % message)
sys.stdout.flush()
else:
print('-' * 80, file=sys.stderr)
print('FAILED - %s' % message, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('did not match:\n %s' % pattern, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('remaining output was:\n %s' % self.out, file=sys.stderr)
print('-' * 80, file=sys.stderr)
sys.stderr.flush()
global g_had_failures
g_had_failures = True
def Find(self, pattern, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
return match_obj
return None
def RunTests(cdb_path, dump_path, start_handler_dump_path, destroyed_dump_path,
z7_dump_path, other_program_path, other_program_no_exception_path,
sigabrt_main_path, sigabrt_background_path, pipe_name):
"""Runs various tests in sequence. Runs a new cdb instance on the dump for
each block of tests to reduce the chances that output from one command is
confused for output from another.
"""
out = CdbRun(cdb_path, dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it', out.Check('This dump file has an exception of interest stored in it',
'captured exception in z7 module') 'captured exception')
# Older versions of cdb display relative to exports for /Z7 modules, newer
# ones just display the offset.
out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):',
'exception in z7 at correct location')
out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll',
'expected non-pdb symbol format')
out = CdbRun(cdb_path, other_program_path, '.ecxr;k;~') # When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as
out.Check('Unknown exception - code deadbea7', # "`anonymous namespace'" and instead gives the decorated form.
'other program dump exception code') out.Check(
out.Check('!Sleep', 'other program reasonable location') 'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
out.Check("hanging_program!`anonymous namespace'::Thread1", 'SomeCrashyFunction', 'exception at correct location')
'other program dump right thread')
count = 0 out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr')
while True: out.Check('This dump file has an exception of interest stored in it',
match_obj = out.Find(r'Id.*Suspend: (\d+) ') 'captured exception (using StartHandler())')
out.Check(
'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location (using StartHandler())')
out = CdbRun(cdb_path, dump_path, '!peb')
out.Check(r'PEB at', 'found the PEB')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'Base TimeStamp Module',
'module list present')
pipe_name_escaped = pipe_name.replace('\\', '\\\\')
out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped,
'some PEB data is correct')
out.Check(r'SystemRoot=C:\\Windows', 'some of environment captured',
re.IGNORECASE)
out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters')
out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"',
'PEB->ProcessParameters.ImagePathName string captured')
out.Check(
' DesktopInfo *: '
'_UNICODE_STRING "(?!--- memory read error at address ).*"',
'PEB->ProcessParameters.DesktopInfo string captured')
out = CdbRun(cdb_path, dump_path, '!teb')
out.Check(r'TEB at', 'found the TEB')
out.Check(r'ExceptionList:\s+[0-9a-fA-F]+', 'some valid teb data')
out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue')
out = CdbRun(cdb_path, dump_path, '!gle')
out.Check(
'LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the '
'file specified.', '!gle gets last error')
out.Check(
'LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The '
'file %hs does not exist.', '!gle gets last ntstatus')
if False:
# TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList.
out = CdbRun(cdb_path, dump_path, '!locks')
out.Check(
r'CritSec crashy_program!crashpad::`anonymous namespace\'::'
r'g_test_critical_section', 'lock was captured')
if platform.win32_ver()[0] != '7':
# We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7.
out.Check(r'\*\*\* Locked',
'lock debug info was captured, and is locked')
out = CdbRun(cdb_path, dump_path, '!handle')
out.Check(r'\d+ Handles', 'captured handles')
out.Check(r'Event\s+\d+', 'capture some event handles')
out.Check(r'File\s+\d+', 'capture some file handles')
out = CdbRun(cdb_path, dump_path, 'lm')
out.Check(r'Unloaded modules:', 'captured some unloaded modules')
out.Check(r'lz32\.dll', 'found expected unloaded module lz32')
out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror')
out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE)
# Check that there is no stack trace in the self-destroyed process. Confirm
# that the top is where we expect it (that's based only on IP), but
# subsequent stack entries will not be available. This confirms that we have
# a mostly valid dump, but that the stack was omitted.
out.Check(
r'self_destroying_program!crashpad::`anonymous namespace\'::'
r'FreeOwnStackAndBreak.*\nquit:',
'at correct location, no additional stack entries')
# Dump memory pointed to be EDI on the background suspended thread. We don't
# know the index of the thread because the system may have started other
# threads, so first do a run to extract the thread index that's suspended,
# and then another run to dump the data pointed to by EDI for that thread.
out = CdbRun(cdb_path, dump_path, '.ecxr;~')
match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:')
if match_obj: if match_obj:
if match_obj.group(1) != '0': thread = match_obj.group(1)
out.Check(r'FAILED', 'all suspend counts should be 0') out = CdbRun(cdb_path, dump_path, '.ecxr;~' + thread + 's;db /c14 edi')
else: out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50',
count += 1 'data pointed to by registers captured')
else:
break
assert count > 2
out = CdbRun(cdb_path, other_program_no_exception_path, '.ecxr;k') # Move up one stack frame after jumping to the exception, and examine
out.Check('Unknown exception - code 0cca11ed', # memory.
'other program with no exception given') out = CdbRun(cdb_path, dump_path,
out.Check('!RaiseException', 'other program in RaiseException()') '.ecxr; .f+; dd /c100 poi(offset_pointer)-20')
out.Check(
r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 '
r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c '
r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 '
r'80000094 00000095 80000096 00000097',
'data pointed to by stack captured')
out = CdbRun(cdb_path, sigabrt_main_path, '.ecxr') # Attempt to retrieve the value of g_extra_memory_pointer (by name), and
out.Check('code 40000015', 'got sigabrt signal') # then examine the memory at which it points. Both should have been saved.
out.Check('::HandleAbortSignal', ' stack in expected location') out = CdbRun(
cdb_path, dump_path,
'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 '
'L8')
out.Check(r'0000655e 0000656b 00006578 00006585',
'extra memory range captured')
out = CdbRun(cdb_path, sigabrt_background_path, '.ecxr') out = CdbRun(cdb_path, dump_path, '.dumpdebug')
out.Check('code 40000015', 'got sigabrt signal from background thread') out.Check(r'type \?\?\? \(333333\), size 00001000', 'first user stream')
out.Check(r'type \?\?\? \(222222\), size 00000080', 'second user stream')
if z7_dump_path:
out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm')
out.Check('This dump file has an exception of interest stored in it',
'captured exception in z7 module')
# Older versions of cdb display relative to exports for /Z7 modules,
# newer ones just display the offset.
out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):',
'exception in z7 at correct location')
out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll',
'expected non-pdb symbol format')
out = CdbRun(cdb_path, other_program_path, '.ecxr;k;~')
out.Check('Unknown exception - code deadbea7',
'other program dump exception code')
out.Check('!Sleep', 'other program reasonable location')
out.Check("hanging_program!`anonymous namespace'::Thread1",
'other program dump right thread')
count = 0
while True:
match_obj = out.Find(r'Id.*Suspend: (\d+) ')
if match_obj:
if match_obj.group(1) != '0':
out.Check(r'FAILED', 'all suspend counts should be 0')
else:
count += 1
else:
break
assert count > 2
out = CdbRun(cdb_path, other_program_no_exception_path, '.ecxr;k')
out.Check('Unknown exception - code 0cca11ed',
'other program with no exception given')
out.Check('!RaiseException', 'other program in RaiseException()')
out = CdbRun(cdb_path, sigabrt_main_path, '.ecxr')
out.Check('code 40000015', 'got sigabrt signal')
out.Check('::HandleAbortSignal', ' stack in expected location')
out = CdbRun(cdb_path, sigabrt_background_path, '.ecxr')
out.Check('code 40000015', 'got sigabrt signal from background thread')
def main(args): def main(args):
try: try:
if len(args) != 1: if len(args) != 1:
print('must supply binary dir', file=sys.stderr) print('must supply binary dir', file=sys.stderr)
return 1 return 1
cdb_path = GetCdbPath() cdb_path = GetCdbPath()
if not cdb_path: if not cdb_path:
print('could not find cdb', file=sys.stderr) print('could not find cdb', file=sys.stderr)
return 1 return 1
# Make sure we can download Windows symbols. # Make sure we can download Windows symbols.
if not os.environ.get('_NT_SYMBOL_PATH'): if not os.environ.get('_NT_SYMBOL_PATH'):
symbol_dir = MakeTempDir() symbol_dir = MakeTempDir()
protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http' protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http'
os.environ['_NT_SYMBOL_PATH'] = ( os.environ['_NT_SYMBOL_PATH'] = (
'SRV*' + symbol_dir + '*' + 'SRV*' + symbol_dir + '*' + protocol +
protocol + '://msdl.microsoft.com/download/symbols') '://msdl.microsoft.com/download/symbols')
pipe_name = r'\\.\pipe\end-to-end_%s_%s' % ( pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (os.getpid(),
os.getpid(), str(random.getrandbits(64))) str(random.getrandbits(64)))
crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name) crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name)
if not crashy_dump_path: if not crashy_dump_path:
return 1 return 1
start_handler_dump_path = GetDumpFromCrashyProgram(args[0], None) start_handler_dump_path = GetDumpFromCrashyProgram(args[0], None)
if not start_handler_dump_path: if not start_handler_dump_path:
return 1 return 1
destroyed_dump_path = GetDumpFromSelfDestroyingProgram(args[0], pipe_name) destroyed_dump_path = GetDumpFromSelfDestroyingProgram(
if not destroyed_dump_path: args[0], pipe_name)
return 1 if not destroyed_dump_path:
return 1
z7_dump_path = None z7_dump_path = None
if not args[0].endswith('_x64'): if not args[0].endswith('_x64'):
z7_dump_path = GetDumpFromZ7Program(args[0], pipe_name) z7_dump_path = GetDumpFromZ7Program(args[0], pipe_name)
if not z7_dump_path: if not z7_dump_path:
return 1 return 1
other_program_path = GetDumpFromOtherProgram(args[0], pipe_name) other_program_path = GetDumpFromOtherProgram(args[0], pipe_name)
if not other_program_path: if not other_program_path:
return 1 return 1
other_program_no_exception_path = GetDumpFromOtherProgram( other_program_no_exception_path = GetDumpFromOtherProgram(
args[0], pipe_name, 'noexception') args[0], pipe_name, 'noexception')
if not other_program_no_exception_path: if not other_program_no_exception_path:
return 1 return 1
sigabrt_main_path = GetDumpFromSignal(args[0], pipe_name, 'main') sigabrt_main_path = GetDumpFromSignal(args[0], pipe_name, 'main')
if not sigabrt_main_path: if not sigabrt_main_path:
return 1 return 1
sigabrt_background_path = GetDumpFromSignal( sigabrt_background_path = GetDumpFromSignal(args[0], pipe_name,
args[0], pipe_name, 'background') 'background')
if not sigabrt_background_path: if not sigabrt_background_path:
return 1 return 1
RunTests(cdb_path, RunTests(cdb_path, crashy_dump_path, start_handler_dump_path,
crashy_dump_path, destroyed_dump_path, z7_dump_path, other_program_path,
start_handler_dump_path, other_program_no_exception_path, sigabrt_main_path,
destroyed_dump_path, sigabrt_background_path, pipe_name)
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name)
return 1 if g_had_failures else 0 return 1 if g_had_failures else 0
finally: finally:
CleanUpTempDirs() CleanUpTempDirs()
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

3
third_party/fuchsia/runner.py vendored Normal file → Executable file
View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
# Copyright 2018 The Crashpad Authors. All rights reserved. # Copyright 2018 The Crashpad Authors. All rights reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
@ -14,4 +16,5 @@
import os import os
import sys import sys
os.execv(sys.argv[1], sys.argv[1:]) os.execv(sys.argv[1], sys.argv[1:])

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved. # Copyright 2019 The Crashpad Authors. All rights reserved.
# #
@ -20,6 +19,7 @@ import sys
import mig_fix import mig_fix
import mig_gen import mig_gen
def main(args): def main(args):
parsed = mig_gen.parse_args(args) parsed = mig_gen.parse_args(args)
@ -30,5 +30,6 @@ def main(args):
parsed.migcom_path, parsed.arch) parsed.migcom_path, parsed.arch)
mig_fix.fix_interface(interface) mig_fix.fix_interface(interface)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -22,18 +22,18 @@ import sys
from mig_gen import MigInterface from mig_gen import MigInterface
def _fix_user_implementation(implementation, fixed_implementation, header, def _fix_user_implementation(implementation, fixed_implementation, header,
fixed_header): fixed_header):
"""Rewrites a MIG-generated user implementation (.c) file. """Rewrites a MIG-generated user implementation (.c) file.
Rewrites the file at |implementation| by adding Rewrites the file at |implementation| by adding __attribute__((unused)) to
__attribute__((unused)) to the definition of any structure typedefed the definition of any structure typedefed as __Reply by searching for the
as __Reply by searching for the pattern unique to those structure pattern unique to those structure definitions. These structures are in fact
definitions. These structures are in fact unused in the user unused in the user implementation file, and this will trigger a
implementation file, and this will trigger a -Wunused-local-typedefs -Wunused-local-typedefs warning in gcc unless removed or marked with the
warning in gcc unless removed or marked with the unused attribute. unused attribute. Also changes header references to point to the new
Also changes header references to point to the new header filename, if header filename, if changed.
changed.
If |fixed_implementation| is None, overwrites the original; otherwise, puts If |fixed_implementation| is None, overwrites the original; otherwise, puts
the result in the file at |fixed_implementation|. the result in the file at |fixed_implementation|.
@ -59,6 +59,7 @@ def _fix_user_implementation(implementation, fixed_implementation, header,
file.write(contents) file.write(contents)
file.close() file.close()
def _fix_server_implementation(implementation, fixed_implementation, header, def _fix_server_implementation(implementation, fixed_implementation, header,
fixed_header): fixed_header):
"""Rewrites a MIG-generated server implementation (.c) file. """Rewrites a MIG-generated server implementation (.c) file.
@ -79,24 +80,25 @@ def _fix_server_implementation(implementation, fixed_implementation, header,
contents = file.read() contents = file.read()
# Find interesting declarations. # Find interesting declarations.
declaration_pattern = \ declaration_pattern = re.compile(
re.compile('^mig_internal (kern_return_t __MIG_check__.*)$', '^mig_internal (kern_return_t __MIG_check__.*)$', re.MULTILINE)
re.MULTILINE)
declarations = declaration_pattern.findall(contents) declarations = declaration_pattern.findall(contents)
# Remove “__attribute__((__unused__))” from the declarations, and call them # Remove “__attribute__((__unused__))” from the declarations, and call them
# “mig_external” or “extern” depending on whether “mig_external” is defined. # “mig_external” or “extern” depending on whether “mig_external” is defined.
attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ') attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ')
declarations = ['''\ declarations = [
'''\
#ifdef mig_external #ifdef mig_external
mig_external mig_external
#else #else
extern extern
#endif #endif
''' + attribute_pattern.sub('', x) + ';\n' for x in declarations] ''' + attribute_pattern.sub('', x) + ';\n' for x in declarations
]
# Rewrite the declarations in this file as “mig_external”. # Rewrite the declarations in this file as “mig_external”.
contents = declaration_pattern.sub(r'mig_external \1', contents); contents = declaration_pattern.sub(r'mig_external \1', contents)
# Crashpad never implements the mach_msg_server() MIG callouts. To avoid # Crashpad never implements the mach_msg_server() MIG callouts. To avoid
# needing to provide stub implementations, set KERN_FAILURE as the RetCode # needing to provide stub implementations, set KERN_FAILURE as the RetCode
@ -125,6 +127,7 @@ extern
file.close() file.close()
return declarations return declarations
def _fix_header(header, fixed_header, declarations=[]): def _fix_header(header, fixed_header, declarations=[]):
"""Rewrites a MIG-generated header (.h) file. """Rewrites a MIG-generated header (.h) file.
@ -161,6 +164,7 @@ extern "C" {
file.write(contents) file.write(contents)
file.close() file.close()
def fix_interface(interface, fixed_interface=None): def fix_interface(interface, fixed_interface=None):
if fixed_interface is None: if fixed_interface is None:
fixed_interface = MigInterface(None, None, None, None) fixed_interface = MigInterface(None, None, None, None)
@ -175,6 +179,7 @@ def fix_interface(interface, fixed_interface=None):
_fix_header(interface.server_h, fixed_interface.server_h, _fix_header(interface.server_h, fixed_interface.server_h,
server_declarations) server_declarations)
def main(args): def main(args):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('user_c') parser.add_argument('user_c')
@ -187,11 +192,12 @@ def main(args):
parser.add_argument('--fixed_server_h', default=None) parser.add_argument('--fixed_server_h', default=None)
parsed = parser.parse_args(args) parsed = parser.parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c, interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.user_h, parsed.server_h) parsed.server_h)
fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c, fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c,
parsed.fixed_user_h, parsed.fixed_server_h) parsed.fixed_user_h, parsed.fixed_server_h)
fix_interface(interface, fixed_interface) fix_interface(interface, fixed_interface)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved. # Copyright 2019 The Crashpad Authors. All rights reserved.
# #
@ -21,19 +20,30 @@ import os
import subprocess import subprocess
import sys import sys
MigInterface = collections.namedtuple('MigInterface', ['user_c', 'server_c', MigInterface = collections.namedtuple(
'user_h', 'server_h']) 'MigInterface', ['user_c', 'server_c', 'user_h', 'server_h'])
def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
mig_path=None, migcom_path=None, arch=None): def generate_interface(defs,
interface,
includes=[],
sdk=None,
clang_path=None,
mig_path=None,
migcom_path=None,
arch=None):
if mig_path is None: if mig_path is None:
mig_path = 'mig' mig_path = 'mig'
command = [mig_path,
'-user', interface.user_c, # yapf: disable
'-server', interface.server_c, command = [
'-header', interface.user_h, mig_path,
'-sheader', interface.server_h, '-user', interface.user_c,
] '-server', interface.server_c,
'-header', interface.user_h,
'-sheader', interface.server_h,
]
# yapf: enable
if clang_path is not None: if clang_path is not None:
os.environ['MIGCC'] = clang_path os.environ['MIGCC'] = clang_path
@ -48,6 +58,7 @@ def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
command.append(defs) command.append(defs)
subprocess.check_call(command) subprocess.check_call(command)
def parse_args(args): def parse_args(args):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--clang-path', help='Path to Clang') parser.add_argument('--clang-path', help='Path to Clang')
@ -66,13 +77,15 @@ def parse_args(args):
parser.add_argument('server_h') parser.add_argument('server_h')
return parser.parse_args(args) return parser.parse_args(args)
def main(args): def main(args):
parsed = parse_args(args) parsed = parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c, interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.user_h, parsed.server_h) parsed.server_h)
generate_interface(parsed.defs, interface, parsed.include, generate_interface(parsed.defs, interface, parsed.include, parsed.sdk,
parsed.sdk, parsed.clang_path, parsed.mig_path, parsed.clang_path, parsed.mig_path, parsed.migcom_path,
parsed.migcom_path, parsed.arch) parsed.arch)
if __name__ == '__main__': if __name__ == '__main__':
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))

View File

@ -23,25 +23,28 @@ key = os.path.join(testdata, 'crashpad_util_test_key.pem')
cert = os.path.join(testdata, 'crashpad_util_test_cert.pem') cert = os.path.join(testdata, 'crashpad_util_test_cert.pem')
with open(cert, 'w') as cert_file, open(key, 'w') as key_file: with open(cert, 'w') as cert_file, open(key, 'w') as key_file:
MESSAGE = 'DO NOT EDIT: This file was auto-generated by ' + __file__ + '\n\n' MESSAGE = ('DO NOT EDIT: This file was auto-generated by ' + __file__ +
cert_file.write(MESSAGE) '\n\n')
key_file.write(MESSAGE) cert_file.write(MESSAGE)
key_file.write(MESSAGE)
proc = subprocess.Popen( proc = subprocess.Popen([
['openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', 'openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', '-days',
'-days', '3650', '-newkey', 'rsa:2048', '-keyout', '-'], '3650', '-newkey', 'rsa:2048', '-keyout', '-'
stderr=open(os.devnull, 'w'), stdout=subprocess.PIPE) ],
stderr=open(os.devnull, 'w'),
stdout=subprocess.PIPE)
contents = proc.communicate()[0] contents = proc.communicate()[0]
dest = sys.stderr dest = sys.stderr
for line in contents.splitlines(True): for line in contents.splitlines(True):
if line.startswith("-----BEGIN PRIVATE KEY-----"): if line.startswith("-----BEGIN PRIVATE KEY-----"):
dest = key_file dest = key_file
elif line.startswith("-----BEGIN CERTIFICATE-----"): elif line.startswith("-----BEGIN CERTIFICATE-----"):
dest = cert_file dest = cert_file
elif line.startswith("-----END"): elif line.startswith("-----END"):
dest.write(line) dest.write(line)
dest = sys.stderr dest = sys.stderr
continue continue
dest.write(line) dest.write(line)