Add .style.yapf and reformat according to yapf, using “google” style

% yapf --in-place $(git ls-files **/*.py)
% yapf --version
yapf 0.30.0

Note that this is not using the “chromium” yapf style because Chromium
is moving to PEP-8.
https://groups.google.com/a/chromium.org/d/topic/chromium-dev/RcJgJdkNIdg
yapf 0.30.0 no longer recognizes “chromium” as a style option.
22ef70f3c4
Since this is a mass reformatting, it might as well move things all the
way into the future all at once.

This uses the “google” style, which is a superset of “pep8”.

Change-Id: Ifa37371079ea1859e4afe8e31d2eef2cfd7af384
Reviewed-on: https://chromium-review.googlesource.com/c/crashpad/crashpad/+/2165637
Commit-Queue: Mark Mentovai <mark@chromium.org>
Reviewed-by: Scott Graham <scottmg@chromium.org>
This commit is contained in:
Mark Mentovai 2020-04-27 09:43:35 -04:00 committed by Commit Bot
parent 29b1688c11
commit a5a1c3b07f
16 changed files with 1687 additions and 1615 deletions

16
.style.yapf Normal file
View File

@ -0,0 +1,16 @@
# Copyright 2020 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
[style]
based_on_style = google

View File

@ -19,83 +19,84 @@ import sys
def ChooseDependencyPath(local_path, external_path):
"""Chooses between a dependency located at local path and an external path.
"""Chooses between a dependency located at local path and an external path.
The local path, used in standalone builds, is preferred. If it is not present
but the external path is, the external path will be used. If neither path is
present, the local path will be used, so that error messages uniformly refer
to the local path.
The local path, used in standalone builds, is preferred. If it is not
present but the external path is, the external path will be used. If neither
path is present, the local path will be used, so that error messages
uniformly refer to the local path.
Args:
local_path: The preferred local path to use for a standalone build.
external_path: The external path to fall back to.
Args:
local_path: The preferred local path to use for a standalone build.
external_path: The external path to fall back to.
Returns:
A 2-tuple. The first element is None or 'external', depending on whether
local_path or external_path was chosen. The second element is the chosen
path.
"""
if os.path.exists(local_path) or not os.path.exists(external_path):
return (None, local_path)
return ('external', external_path)
Returns:
A 2-tuple. The first element is None or 'external', depending on whether
local_path or external_path was chosen. The second element is the chosen
path.
"""
if os.path.exists(local_path) or not os.path.exists(external_path):
return (None, local_path)
return ('external', external_path)
script_dir = os.path.dirname(__file__)
crashpad_dir = (os.path.dirname(script_dir) if script_dir not in ('', os.curdir)
else os.pardir)
crashpad_dir = (os.path.dirname(script_dir)
if script_dir not in ('', os.curdir) else os.pardir)
sys.path.insert(0,
ChooseDependencyPath(os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp',
'pylib'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp',
'pylib'))[1])
sys.path.insert(
0,
ChooseDependencyPath(
os.path.join(crashpad_dir, 'third_party', 'gyp', 'gyp', 'pylib'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'gyp', 'pylib'))[1])
import gyp
def main(args):
if 'GYP_GENERATORS' not in os.environ:
os.environ['GYP_GENERATORS'] = 'ninja'
if 'GYP_GENERATORS' not in os.environ:
os.environ['GYP_GENERATORS'] = 'ninja'
crashpad_dir_or_dot = crashpad_dir if crashpad_dir is not '' else os.curdir
crashpad_dir_or_dot = crashpad_dir if crashpad_dir is not '' else os.curdir
(dependencies, mini_chromium_common_gypi) = (ChooseDependencyPath(
os.path.join(crashpad_dir, 'third_party', 'mini_chromium',
'mini_chromium', 'build', 'common.gypi'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'mini_chromium',
'mini_chromium', 'build', 'common.gypi')))
if dependencies is not None:
args.extend(['-D', 'crashpad_dependencies=%s' % dependencies])
args.extend(['--include', mini_chromium_common_gypi])
args.extend(['--depth', crashpad_dir_or_dot])
args.append(os.path.join(crashpad_dir, 'crashpad.gyp'))
(dependencies, mini_chromium_common_gypi) = (ChooseDependencyPath(
os.path.join(crashpad_dir, 'third_party', 'mini_chromium',
'mini_chromium', 'build', 'common.gypi'),
os.path.join(crashpad_dir, os.pardir, os.pardir, 'mini_chromium',
'mini_chromium', 'build', 'common.gypi')))
if dependencies is not None:
args.extend(['-D', 'crashpad_dependencies=%s' % dependencies])
args.extend(['--include', mini_chromium_common_gypi])
args.extend(['--depth', crashpad_dir_or_dot])
args.append(os.path.join(crashpad_dir, 'crashpad.gyp'))
result = gyp.main(args)
if result != 0:
return result
if sys.platform == 'win32':
# Check to make sure that no target_arch was specified. target_arch may be
# set during a cross build, such as a cross build for Android.
has_target_arch = False
for arg_index in range(0, len(args)):
arg = args[arg_index]
if (arg.startswith('-Dtarget_arch=') or
(arg == '-D' and arg_index + 1 < len(args) and
args[arg_index + 1].startswith('target_arch='))):
has_target_arch = True
break
if not has_target_arch:
# Also generate the x86 build.
result = gyp.main(args + ['-D', 'target_arch=ia32', '-G', 'config=Debug'])
if result != 0:
result = gyp.main(args)
if result != 0:
return result
result = gyp.main(
args + ['-D', 'target_arch=ia32', '-G', 'config=Release'])
return result
if sys.platform == 'win32':
# Check to make sure that no target_arch was specified. target_arch may
# be set during a cross build, such as a cross build for Android.
has_target_arch = False
for arg_index in range(0, len(args)):
arg = args[arg_index]
if (arg.startswith('-Dtarget_arch=') or
(arg == '-D' and arg_index + 1 < len(args) and
args[arg_index + 1].startswith('target_arch='))):
has_target_arch = True
break
if not has_target_arch:
# Also generate the x86 build.
result = gyp.main(args +
['-D', 'target_arch=ia32', '-G', 'config=Debug'])
if result != 0:
return result
result = gyp.main(
args + ['-D', 'target_arch=ia32', '-G', 'config=Release'])
return result
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
sys.exit(main(sys.argv[1:]))

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved.
#
@ -25,52 +24,46 @@ import sys
def main(args):
parser = argparse.ArgumentParser(
description='Set up an Android cross build',
epilog='Additional arguments will be passed to gyp_crashpad.py.')
parser.add_argument('--arch', required=True, help='Target architecture')
parser.add_argument('--api-level', required=True, help='Target API level')
parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain')
(parsed, extra_command_line_args) = parser.parse_known_args(args)
parser = argparse.ArgumentParser(
description='Set up an Android cross build',
epilog='Additional arguments will be passed to gyp_crashpad.py.')
parser.add_argument('--arch', required=True, help='Target architecture')
parser.add_argument('--api-level', required=True, help='Target API level')
parser.add_argument('--ndk', required=True, help='Standalone NDK toolchain')
(parsed, extra_command_line_args) = parser.parse_known_args(args)
ndk_bin_dir = os.path.join(parsed.ndk,
'toolchains',
'llvm',
'prebuilt',
'linux-x86_64',
'bin')
if not os.path.exists(ndk_bin_dir):
parser.error("missing toolchain")
ndk_bin_dir = os.path.join(parsed.ndk, 'toolchains', 'llvm', 'prebuilt',
'linux-x86_64', 'bin')
if not os.path.exists(ndk_bin_dir):
parser.error("missing toolchain")
ARCH_TO_ARCH_TRIPLET = {
'arm': 'armv7a-linux-androideabi',
'arm64': 'aarch64-linux-android',
'ia32': 'i686-linux-android',
'x64': 'x86_64-linux-android',
}
ARCH_TO_ARCH_TRIPLET = {
'arm': 'armv7a-linux-androideabi',
'arm64': 'aarch64-linux-android',
'ia32': 'i686-linux-android',
'x64': 'x86_64-linux-android',
}
clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level
os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang')
os.environ['CXX_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang++')
clang_prefix = ARCH_TO_ARCH_TRIPLET[parsed.arch] + parsed.api_level
os.environ['CC_target'] = os.path.join(ndk_bin_dir, clang_prefix + '-clang')
os.environ['CXX_target'] = os.path.join(ndk_bin_dir,
clang_prefix + '-clang++')
extra_args = ['-D', 'android_api_level=' + parsed.api_level]
extra_args = ['-D', 'android_api_level=' + parsed.api_level]
# ARM only includes 'v7a' in the tool prefix for clang
tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm'
else ARCH_TO_ARCH_TRIPLET[parsed.arch])
# ARM only includes 'v7a' in the tool prefix for clang
tool_prefix = ('arm-linux-androideabi' if parsed.arch == 'arm' else
ARCH_TO_ARCH_TRIPLET[parsed.arch])
for tool in ('ar', 'nm', 'readelf'):
os.environ['%s_target' % tool.upper()] = (
os.path.join(ndk_bin_dir, '%s-%s' % (tool_prefix, tool)))
for tool in ('ar', 'nm', 'readelf'):
os.environ['%s_target' % tool.upper()] = (os.path.join(
ndk_bin_dir, '%s-%s' % (tool_prefix, tool)))
return gyp_crashpad.main(
['-D', 'OS=android',
'-D', 'target_arch=%s' % parsed.arch,
'-D', 'clang=1',
'-f', 'ninja-android'] +
extra_args +
extra_command_line_args)
return gyp_crashpad.main([
'-D', 'OS=android', '-D',
'target_arch=%s' % parsed.arch, '-D', 'clang=1', '-f', 'ninja-android'
] + extra_args + extra_command_line_args)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
sys.exit(main(sys.argv[1:]))

View File

@ -23,7 +23,6 @@ import subprocess
import sys
import urllib2
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Sysroot revision from:
@ -33,42 +32,43 @@ PATH = 'chrome-linux-sysroot/toolchain'
REVISION = '3c248ba4290a5ad07085b7af07e6785bf1ae5b66'
FILENAME = 'debian_stretch_amd64_sysroot.tar.xz'
def main():
url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME)
url = '%s/%s/%s/%s' % (SERVER, PATH, REVISION, FILENAME)
sysroot = os.path.join(SCRIPT_DIR, os.pardir,
'third_party', 'linux', 'sysroot')
sysroot = os.path.join(SCRIPT_DIR, os.pardir, 'third_party', 'linux',
'sysroot')
stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp):
with open(stamp) as s:
if s.read() == url:
return
stamp = os.path.join(sysroot, '.stamp')
if os.path.exists(stamp):
with open(stamp) as s:
if s.read() == url:
return
print 'Installing Debian root image from %s' % url
print 'Installing Debian root image from %s' % url
if os.path.isdir(sysroot):
shutil.rmtree(sysroot)
os.mkdir(sysroot)
tarball = os.path.join(sysroot, FILENAME)
print 'Downloading %s' % url
if os.path.isdir(sysroot):
shutil.rmtree(sysroot)
os.mkdir(sysroot)
tarball = os.path.join(sysroot, FILENAME)
print 'Downloading %s' % url
for _ in range(3):
response = urllib2.urlopen(url)
with open(tarball, 'wb') as f:
f.write(response.read())
break
else:
raise Exception('Failed to download %s' % url)
for _ in range(3):
response = urllib2.urlopen(url)
with open(tarball, 'wb') as f:
f.write(response.read())
break
else:
raise Exception('Failed to download %s' % url)
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
os.remove(tarball)
os.remove(tarball)
with open(stamp, 'w') as s:
s.write(url)
with open(stamp, 'w') as s:
s.write(url)
if __name__ == '__main__':
main()
sys.exit(0)
main()
sys.exit(0)

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert GN Xcode projects to platform and configuration independent targets.
GN generates Xcode projects that build one configuration only. However, typical
@ -40,234 +39,245 @@ import tempfile
class XcodeProject(object):
def __init__(self, objects, counter = 0):
self.objects = objects
self.counter = 0
def __init__(self, objects, counter=0):
self.objects = objects
self.counter = 0
def AddObject(self, parent_name, obj):
while True:
self.counter += 1
str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter)
new_id = hashlib.sha1(str_id).hexdigest()[:24].upper()
def AddObject(self, parent_name, obj):
while True:
self.counter += 1
str_id = "%s %s %d" % (parent_name, obj['isa'], self.counter)
new_id = hashlib.sha1(str_id).hexdigest()[:24].upper()
# Make sure ID is unique. It's possible there could be an id conflict
# since this is run after GN runs.
if new_id not in self.objects:
self.objects[new_id] = obj
return new_id
# Make sure ID is unique. It's possible there could be an id
# conflict since this is run after GN runs.
if new_id not in self.objects:
self.objects[new_id] = obj
return new_id
def CopyFileIfChanged(source_path, target_path):
"""Copy |source_path| to |target_path| is different."""
target_dir = os.path.dirname(target_path)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if not os.path.exists(target_path) or \
not filecmp.cmp(source_path, target_path):
shutil.copyfile(source_path, target_path)
"""Copy |source_path| to |target_path| is different."""
target_dir = os.path.dirname(target_path)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if (not os.path.exists(target_path) or
not filecmp.cmp(source_path, target_path)):
shutil.copyfile(source_path, target_path)
def LoadXcodeProjectAsJSON(path):
"""Return Xcode project at |path| as a JSON string."""
return subprocess.check_output([
'plutil', '-convert', 'json', '-o', '-', path])
"""Return Xcode project at |path| as a JSON string."""
return subprocess.check_output(
['plutil', '-convert', 'json', '-o', '-', path])
def WriteXcodeProject(output_path, json_string):
"""Save Xcode project to |output_path| as XML."""
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(json_string)
temp_file.flush()
subprocess.check_call(['plutil', '-convert', 'xml1', temp_file.name])
CopyFileIfChanged(temp_file.name, output_path)
"""Save Xcode project to |output_path| as XML."""
with tempfile.NamedTemporaryFile() as temp_file:
temp_file.write(json_string)
temp_file.flush()
subprocess.check_call(['plutil', '-convert', 'xml1', temp_file.name])
CopyFileIfChanged(temp_file.name, output_path)
def UpdateProductsProject(file_input, file_output, configurations, root_dir):
"""Update Xcode project to support multiple configurations.
"""Update Xcode project to support multiple configurations.
Args:
file_input: path to the input Xcode project
file_output: path to the output file
configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at
least one value.
"""
json_data = json.loads(LoadXcodeProjectAsJSON(file_input))
project = XcodeProject(json_data['objects'])
Args:
file_input: path to the input Xcode project
file_output: path to the output file
configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at
least one value.
"""
json_data = json.loads(LoadXcodeProjectAsJSON(file_input))
project = XcodeProject(json_data['objects'])
objects_to_remove = []
for value in project.objects.values():
isa = value['isa']
objects_to_remove = []
for value in project.objects.values():
isa = value['isa']
# Teach build shell script to look for the configuration and platform.
if isa == 'PBXShellScriptBuildPhase':
value['shellScript'] = value['shellScript'].replace(
'ninja -C .',
'ninja -C "../${CONFIGURATION}${EFFECTIVE_PLATFORM_NAME}"')
# Teach build shell script to look for the configuration and platform.
if isa == 'PBXShellScriptBuildPhase':
value['shellScript'] = value['shellScript'].replace(
'ninja -C .',
'ninja -C "../${CONFIGURATION}${EFFECTIVE_PLATFORM_NAME}"')
# Add new configuration, using the first one as default.
if isa == 'XCConfigurationList':
value['defaultConfigurationName'] = configurations[0]
objects_to_remove.extend(value['buildConfigurations'])
# Add new configuration, using the first one as default.
if isa == 'XCConfigurationList':
value['defaultConfigurationName'] = configurations[0]
objects_to_remove.extend(value['buildConfigurations'])
build_config_template = project.objects[value['buildConfigurations'][0]]
build_config_template['buildSettings']['CONFIGURATION_BUILD_DIR'] = \
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)'
build_config_template['buildSettings']['CODE_SIGN_IDENTITY'] = ''
build_config_template = project.objects[value['buildConfigurations']
[0]]
build_settings = build_config_template['buildSettings']
build_settings['CONFIGURATION_BUILD_DIR'] = (
'$(PROJECT_DIR)/../$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)')
build_settings['CODE_SIGN_IDENTITY'] = ''
value['buildConfigurations'] = []
for configuration in configurations:
new_build_config = copy.copy(build_config_template)
new_build_config['name'] = configuration
value['buildConfigurations'].append(
project.AddObject('products', new_build_config))
value['buildConfigurations'] = []
for configuration in configurations:
new_build_config = copy.copy(build_config_template)
new_build_config['name'] = configuration
value['buildConfigurations'].append(
project.AddObject('products', new_build_config))
for object_id in objects_to_remove:
del project.objects[object_id]
for object_id in objects_to_remove:
del project.objects[object_id]
AddMarkdownToProject(project, root_dir, json_data['rootObject'])
AddMarkdownToProject(project, root_dir, json_data['rootObject'])
objects = collections.OrderedDict(sorted(project.objects.iteritems()))
WriteXcodeProject(file_output, json.dumps(json_data))
objects = collections.OrderedDict(sorted(project.objects.iteritems()))
WriteXcodeProject(file_output, json.dumps(json_data))
def AddMarkdownToProject(project, root_dir, root_object):
list_files_cmd = ['git', '-C', root_dir, 'ls-files', '*.md']
paths = subprocess.check_output(list_files_cmd).splitlines()
ios_internal_dir = os.path.join(root_dir, 'ios_internal')
if os.path.exists(ios_internal_dir):
list_files_cmd = ['git', '-C', ios_internal_dir, 'ls-files', '*.md']
ios_paths = subprocess.check_output(list_files_cmd).splitlines()
paths.extend(["ios_internal/" + path for path in ios_paths])
for path in paths:
new_markdown_entry = {
"fileEncoding": "4",
"isa": "PBXFileReference",
"lastKnownFileType": "net.daringfireball.markdown",
"name": os.path.basename(path),
"path": path,
"sourceTree": "<group>"
}
new_markdown_entry_id = project.AddObject('sources', new_markdown_entry)
folder = GetFolderForPath(project, root_object, os.path.dirname(path))
folder['children'].append(new_markdown_entry_id)
list_files_cmd = ['git', '-C', root_dir, 'ls-files', '*.md']
paths = subprocess.check_output(list_files_cmd).splitlines()
ios_internal_dir = os.path.join(root_dir, 'ios_internal')
if os.path.exists(ios_internal_dir):
list_files_cmd = ['git', '-C', ios_internal_dir, 'ls-files', '*.md']
ios_paths = subprocess.check_output(list_files_cmd).splitlines()
paths.extend(["ios_internal/" + path for path in ios_paths])
for path in paths:
new_markdown_entry = {
"fileEncoding": "4",
"isa": "PBXFileReference",
"lastKnownFileType": "net.daringfireball.markdown",
"name": os.path.basename(path),
"path": path,
"sourceTree": "<group>"
}
new_markdown_entry_id = project.AddObject('sources', new_markdown_entry)
folder = GetFolderForPath(project, root_object, os.path.dirname(path))
folder['children'].append(new_markdown_entry_id)
def GetFolderForPath(project, rootObject, path):
objects = project.objects
# 'Sources' is always the first child of
# project->rootObject->mainGroup->children.
root = objects[objects[objects[rootObject]['mainGroup']]['children'][0]]
if not path:
objects = project.objects
# 'Sources' is always the first child of
# project->rootObject->mainGroup->children.
root = objects[objects[objects[rootObject]['mainGroup']]['children'][0]]
if not path:
return root
for folder in path.split('/'):
children = root['children']
new_root = None
for child in children:
if (objects[child]['isa'] == 'PBXGroup' and
objects[child]['name'] == folder):
new_root = objects[child]
break
if not new_root:
# If the folder isn't found we could just cram it into the leaf
# existing folder, but that leads to folders with tons of README.md
# inside.
new_group = {
"children": [],
"isa": "PBXGroup",
"name": folder,
"sourceTree": "<group>"
}
new_group_id = project.AddObject('sources', new_group)
children.append(new_group_id)
new_root = objects[new_group_id]
root = new_root
return root
for folder in path.split('/'):
children = root['children']
new_root = None
for child in children:
if objects[child]['isa'] == 'PBXGroup' and \
objects[child]['name'] == folder:
new_root = objects[child]
break
if not new_root:
# If the folder isn't found we could just cram it into the leaf existing
# folder, but that leads to folders with tons of README.md inside.
new_group = {
"children": [
],
"isa": "PBXGroup",
"name": folder,
"sourceTree": "<group>"
}
new_group_id = project.AddObject('sources', new_group)
children.append(new_group_id)
new_root = objects[new_group_id]
root = new_root
return root
def DisableNewBuildSystem(output_dir):
"""Disables the new build system due to crbug.com/852522 """
xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace',
'xcshareddata', 'WorkspaceSettings.xcsettings')
if os.path.isfile(xcwspacesharedsettings):
json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings))
else:
json_data = {}
json_data['BuildSystemType'] = 'Original'
WriteXcodeProject(xcwspacesharedsettings, json.dumps(json_data))
"""Disables the new build system due to crbug.com/852522 """
xcwspacesharedsettings = os.path.join(output_dir, 'all.xcworkspace',
'xcshareddata',
'WorkspaceSettings.xcsettings')
if os.path.isfile(xcwspacesharedsettings):
json_data = json.loads(LoadXcodeProjectAsJSON(xcwspacesharedsettings))
else:
json_data = {}
json_data['BuildSystemType'] = 'Original'
WriteXcodeProject(xcwspacesharedsettings, json.dumps(json_data))
def ConvertGnXcodeProject(root_dir, input_dir, output_dir, configurations):
'''Tweak the Xcode project generated by gn to support multiple configurations.
'''Tweak the Xcode project generated by gn to support multiple
configurations.
The Xcode projects generated by "gn gen --ide" only supports a single
platform and configuration (as the platform and configuration are set
per output directory). This method takes as input such projects and
add support for multiple configurations and platforms (to allow devs
to select them in Xcode).
The Xcode projects generated by "gn gen --ide" only supports a single
platform and configuration (as the platform and configuration are set per
output directory). This method takes as input such projects and add support
for multiple configurations and platforms (to allow devs to select them in
Xcode).
Args:
input_dir: directory containing the XCode projects created by "gn gen --ide"
output_dir: directory where the tweaked Xcode projects will be saved
configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at
least one value.
'''
# Update products project.
products = os.path.join('products.xcodeproj', 'project.pbxproj')
product_input = os.path.join(input_dir, products)
product_output = os.path.join(output_dir, products)
UpdateProductsProject(product_input, product_output, configurations, root_dir)
Args:
input_dir: directory containing the XCode projects created by "gn gen
--ide"
output_dir: directory where the tweaked Xcode projects will be saved
configurations: list of string corresponding to the configurations that
need to be supported by the tweaked Xcode projects, must contains at
least one value.
'''
# Update products project.
products = os.path.join('products.xcodeproj', 'project.pbxproj')
product_input = os.path.join(input_dir, products)
product_output = os.path.join(output_dir, products)
UpdateProductsProject(product_input, product_output, configurations,
root_dir)
# Copy all workspace.
xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata')
CopyFileIfChanged(os.path.join(input_dir, xcwspace),
os.path.join(output_dir, xcwspace))
# Copy all workspace.
xcwspace = os.path.join('all.xcworkspace', 'contents.xcworkspacedata')
CopyFileIfChanged(os.path.join(input_dir, xcwspace),
os.path.join(output_dir, xcwspace))
# TODO(crbug.com/852522): Disable new BuildSystemType.
DisableNewBuildSystem(output_dir)
# TODO(crbug.com/852522): Disable new BuildSystemType.
DisableNewBuildSystem(output_dir)
# TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj'
# and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is
# here to support both old and new projects setup and will be removed once
# gn has rolled past it.
sources = os.path.join('sources.xcodeproj', 'project.pbxproj')
if os.path.isfile(os.path.join(input_dir, sources)):
CopyFileIfChanged(os.path.join(input_dir, sources),
os.path.join(output_dir, sources))
# TODO(crbug.com/679110): gn has been modified to remove 'sources.xcodeproj'
# and keep 'all.xcworkspace' and 'products.xcodeproj'. The following code is
# here to support both old and new projects setup and will be removed once gn
# has rolled past it.
sources = os.path.join('sources.xcodeproj', 'project.pbxproj')
if os.path.isfile(os.path.join(input_dir, sources)):
CopyFileIfChanged(os.path.join(input_dir, sources),
os.path.join(output_dir, sources))
def Main(args):
parser = argparse.ArgumentParser(
description='Convert GN Xcode projects for iOS.')
parser.add_argument(
'input',
help='directory containing [product|all] Xcode projects.')
parser.add_argument(
'output',
help='directory where to generate the iOS configuration.')
parser.add_argument(
'--add-config', dest='configurations', default=[], action='append',
help='configuration to add to the Xcode project')
parser.add_argument(
'--root', type=os.path.abspath, required=True,
help='root directory of the project')
args = parser.parse_args(args)
parser = argparse.ArgumentParser(
description='Convert GN Xcode projects for iOS.')
parser.add_argument(
'input', help='directory containing [product|all] Xcode projects.')
parser.add_argument(
'output', help='directory where to generate the iOS configuration.')
parser.add_argument('--add-config',
dest='configurations',
default=[],
action='append',
help='configuration to add to the Xcode project')
parser.add_argument('--root',
type=os.path.abspath,
required=True,
help='root directory of the project')
args = parser.parse_args(args)
if not os.path.isdir(args.input):
sys.stderr.write('Input directory does not exists.\n')
return 1
if not os.path.isdir(args.input):
sys.stderr.write('Input directory does not exists.\n')
return 1
required = set(['products.xcodeproj', 'all.xcworkspace'])
if not required.issubset(os.listdir(args.input)):
sys.stderr.write(
'Input directory does not contain all necessary Xcode projects.\n')
return 1
required = set(['products.xcodeproj', 'all.xcworkspace'])
if not required.issubset(os.listdir(args.input)):
sys.stderr.write(
'Input directory does not contain all necessary Xcode projects.\n')
return 1
if not args.configurations:
sys.stderr.write('At least one configuration required, see --add-config.\n')
return 1
if not args.configurations:
sys.stderr.write(
'At least one configuration required, see --add-config.\n')
return 1
ConvertGnXcodeProject(args.root, args.input, args.output,
args.configurations)
ConvertGnXcodeProject(args.root, args.input, args.output, args.configurations)
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))
sys.exit(Main(sys.argv[1:]))

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import convert_gn_xcodeproj
import errno
@ -27,326 +26,325 @@ import tempfile
import ConfigParser
try:
import cStringIO as StringIO
import cStringIO as StringIO
except ImportError:
import StringIO
import StringIO
SUPPORTED_TARGETS = ('iphoneos', 'iphonesimulator')
SUPPORTED_CONFIGS = ('Debug', 'Release', 'Profile', 'Official', 'Coverage')
class ConfigParserWithStringInterpolation(ConfigParser.SafeConfigParser):
'''A .ini file parser that supports strings and environment variables.'''
'''A .ini file parser that supports strings and environment variables.'''
ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)')
ENV_VAR_PATTERN = re.compile(r'\$([A-Za-z0-9_]+)')
def values(self, section):
return map(lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
ConfigParser.SafeConfigParser.items(self, section))
def values(self, section):
return map(
lambda (k, v): self._UnquoteString(self._ExpandEnvVar(v)),
ConfigParser.SafeConfigParser.items(self, section))
def getstring(self, section, option):
return self._UnquoteString(self._ExpandEnvVar(self.get(section,
option)))
def getstring(self, section, option):
return self._UnquoteString(self._ExpandEnvVar(self.get(section, option)))
def _UnquoteString(self, string):
if not string or string[0] != '"' or string[-1] != '"':
return string
return string[1:-1]
def _UnquoteString(self, string):
if not string or string[0] != '"' or string[-1] != '"':
return string
return string[1:-1]
def _ExpandEnvVar(self, value):
match = self.ENV_VAR_PATTERN.search(value)
if not match:
return value
name, (begin, end) = match.group(1), match.span(0)
prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:])
return prefix + os.environ.get(name, '') + suffix
def _ExpandEnvVar(self, value):
match = self.ENV_VAR_PATTERN.search(value)
if not match:
return value
name, (begin, end) = match.group(1), match.span(0)
prefix, suffix = value[:begin], self._ExpandEnvVar(value[end:])
return prefix + os.environ.get(name, '') + suffix
class GnGenerator(object):
'''Holds configuration for a build and method to generate gn default
files.'''
'''Holds configuration for a build and method to generate gn default files.'''
FAT_BUILD_DEFAULT_ARCH = '64-bit'
FAT_BUILD_DEFAULT_ARCH = '64-bit'
TARGET_CPU_VALUES = {
'iphoneos': {
'32-bit': '"arm"',
'64-bit': '"arm64"',
},
'iphonesimulator': {
'32-bit': '"x86"',
'64-bit': '"x64"',
TARGET_CPU_VALUES = {
'iphoneos': {
'32-bit': '"arm"',
'64-bit': '"arm64"',
},
'iphonesimulator': {
'32-bit': '"x86"',
'64-bit': '"x64"',
}
}
}
def __init__(self, settings, config, target):
assert target in SUPPORTED_TARGETS
assert config in SUPPORTED_CONFIGS
self._settings = settings
self._config = config
self._target = target
def __init__(self, settings, config, target):
assert target in SUPPORTED_TARGETS
assert config in SUPPORTED_CONFIGS
self._settings = settings
self._config = config
self._target = target
def _GetGnArgs(self):
"""Build the list of arguments to pass to gn.
def _GetGnArgs(self):
"""Build the list of arguments to pass to gn.
Returns:
A list of tuple containing gn variable names and variable values (it
is not a dictionary as the order needs to be preserved).
"""
args = []
Returns:
A list of tuple containing gn variable names and variable values (it
is not a dictionary as the order needs to be preserved).
"""
args = []
args.append(('is_debug', self._config in ('Debug', 'Coverage')))
args.append(('is_debug', self._config in ('Debug', 'Coverage')))
if os.environ.get('FORCE_MAC_TOOLCHAIN', '0') == '1':
args.append(('use_system_xcode', False))
if os.environ.get('FORCE_MAC_TOOLCHAIN', '0') == '1':
args.append(('use_system_xcode', False))
cpu_values = self.TARGET_CPU_VALUES[self._target]
build_arch = self._settings.getstring('build', 'arch')
if build_arch == 'fat':
target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH]
args.append(('target_cpu', target_cpu))
args.append(('additional_target_cpus',
[cpu for cpu in cpu_values.itervalues() if cpu != target_cpu]))
else:
args.append(('target_cpu', cpu_values[build_arch]))
# Add user overrides after the other configurations so that they can
# refer to them and override them.
args.extend(self._settings.items('gn_args'))
return args
def Generate(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
subprocess.check_call(
self.GetGnCommand(gn_path, root_path, out_path, True))
def CreateGnRules(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
buf = StringIO.StringIO()
gn_command = self.GetGnCommand(gn_path, root_path, out_path, False)
self.WriteBuildNinja(buf, gn_command)
WriteToFileIfChanged(
os.path.join(out_path, 'build.ninja'),
buf.getvalue(),
overwrite=False)
buf = StringIO.StringIO()
self.WriteBuildNinjaDeps(buf)
WriteToFileIfChanged(
os.path.join(out_path, 'build.ninja.d'),
buf.getvalue(),
overwrite=False)
def WriteArgsGn(self, stream):
stream.write('# This file was generated by setup-gn.py. Do not edit\n')
stream.write('# but instead use ~/.setup-gn or $repo/.setup-gn files\n')
stream.write('# to configure settings.\n')
stream.write('\n')
if self._settings.has_section('$imports$'):
for import_rule in self._settings.values('$imports$'):
stream.write('import("%s")\n' % import_rule)
stream.write('\n')
gn_args = self._GetGnArgs()
for name, value in gn_args:
if isinstance(value, bool):
stream.write('%s = %s\n' % (name, str(value).lower()))
elif isinstance(value, list):
stream.write('%s = [%s' % (name, '\n' if len(value) > 1 else ''))
if len(value) == 1:
prefix = ' '
suffix = ' '
cpu_values = self.TARGET_CPU_VALUES[self._target]
build_arch = self._settings.getstring('build', 'arch')
if build_arch == 'fat':
target_cpu = cpu_values[self.FAT_BUILD_DEFAULT_ARCH]
args.append(('target_cpu', target_cpu))
args.append(
('additional_target_cpus',
[cpu for cpu in cpu_values.itervalues() if cpu != target_cpu]))
else:
prefix = ' '
suffix = ',\n'
for item in value:
if isinstance(item, bool):
stream.write('%s%s%s' % (prefix, str(item).lower(), suffix))
else:
stream.write('%s%s%s' % (prefix, item, suffix))
stream.write(']\n')
else:
stream.write('%s = %s\n' % (name, value))
args.append(('target_cpu', cpu_values[build_arch]))
def WriteBuildNinja(self, stream, gn_command):
stream.write('rule gn\n')
stream.write(' command = %s\n' % NinjaEscapeCommand(gn_command))
stream.write(' description = Regenerating ninja files\n')
stream.write('\n')
stream.write('build build.ninja: gn\n')
stream.write(' generator = 1\n')
stream.write(' depfile = build.ninja.d\n')
# Add user overrides after the other configurations so that they can
# refer to them and override them.
args.extend(self._settings.items('gn_args'))
return args
def WriteBuildNinjaDeps(self, stream):
stream.write('build.ninja: nonexistant_file.gn\n')
def Generate(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project):
gn_command = [ gn_path, '--root=%s' % os.path.realpath(src_path), '-q' ]
if generate_xcode_project:
gn_command.append('--ide=xcode')
gn_command.append('--root-target=gn_all')
if self._settings.getboolean('goma', 'enabled'):
ninja_jobs = self._settings.getint('xcode', 'jobs') or 200
gn_command.append('--ninja-extra-args=-j%s' % ninja_jobs)
if self._settings.has_section('filters'):
target_filters = self._settings.values('filters')
if target_filters:
gn_command.append('--filters=%s' % ';'.join(target_filters))
# TODO(justincohen): --check is currently failing in crashpad.
# else:
# gn_command.append('--check')
gn_command.append('gen')
gn_command.append('//%s' %
os.path.relpath(os.path.abspath(out_path), os.path.abspath(src_path)))
return gn_command
subprocess.check_call(
self.GetGnCommand(gn_path, root_path, out_path, True))
def CreateGnRules(self, gn_path, root_path, out_path):
buf = StringIO.StringIO()
self.WriteArgsGn(buf)
WriteToFileIfChanged(os.path.join(out_path, 'args.gn'),
buf.getvalue(),
overwrite=True)
buf = StringIO.StringIO()
gn_command = self.GetGnCommand(gn_path, root_path, out_path, False)
self.WriteBuildNinja(buf, gn_command)
WriteToFileIfChanged(os.path.join(out_path, 'build.ninja'),
buf.getvalue(),
overwrite=False)
buf = StringIO.StringIO()
self.WriteBuildNinjaDeps(buf)
WriteToFileIfChanged(os.path.join(out_path, 'build.ninja.d'),
buf.getvalue(),
overwrite=False)
def WriteArgsGn(self, stream):
stream.write('# This file was generated by setup-gn.py. Do not edit\n')
stream.write('# but instead use ~/.setup-gn or $repo/.setup-gn files\n')
stream.write('# to configure settings.\n')
stream.write('\n')
if self._settings.has_section('$imports$'):
for import_rule in self._settings.values('$imports$'):
stream.write('import("%s")\n' % import_rule)
stream.write('\n')
gn_args = self._GetGnArgs()
for name, value in gn_args:
if isinstance(value, bool):
stream.write('%s = %s\n' % (name, str(value).lower()))
elif isinstance(value, list):
stream.write('%s = [%s' %
(name, '\n' if len(value) > 1 else ''))
if len(value) == 1:
prefix = ' '
suffix = ' '
else:
prefix = ' '
suffix = ',\n'
for item in value:
if isinstance(item, bool):
stream.write('%s%s%s' %
(prefix, str(item).lower(), suffix))
else:
stream.write('%s%s%s' % (prefix, item, suffix))
stream.write(']\n')
else:
stream.write('%s = %s\n' % (name, value))
def WriteBuildNinja(self, stream, gn_command):
stream.write('rule gn\n')
stream.write(' command = %s\n' % NinjaEscapeCommand(gn_command))
stream.write(' description = Regenerating ninja files\n')
stream.write('\n')
stream.write('build build.ninja: gn\n')
stream.write(' generator = 1\n')
stream.write(' depfile = build.ninja.d\n')
def WriteBuildNinjaDeps(self, stream):
stream.write('build.ninja: nonexistant_file.gn\n')
def GetGnCommand(self, gn_path, src_path, out_path, generate_xcode_project):
gn_command = [gn_path, '--root=%s' % os.path.realpath(src_path), '-q']
if generate_xcode_project:
gn_command.append('--ide=xcode')
gn_command.append('--root-target=gn_all')
if self._settings.getboolean('goma', 'enabled'):
ninja_jobs = self._settings.getint('xcode', 'jobs') or 200
gn_command.append('--ninja-extra-args=-j%s' % ninja_jobs)
if self._settings.has_section('filters'):
target_filters = self._settings.values('filters')
if target_filters:
gn_command.append('--filters=%s' % ';'.join(target_filters))
# TODO(justincohen): --check is currently failing in crashpad.
# else:
# gn_command.append('--check')
gn_command.append('gen')
gn_command.append('//%s' % os.path.relpath(os.path.abspath(out_path),
os.path.abspath(src_path)))
return gn_command
def WriteToFileIfChanged(filename, content, overwrite):
'''Write |content| to |filename| if different. If |overwrite| is False
and the file already exists it is left untouched.'''
if os.path.exists(filename):
if not overwrite:
return
with open(filename) as file:
if file.read() == content:
return
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
with open(filename, 'w') as file:
file.write(content)
'''Write |content| to |filename| if different. If |overwrite| is False
and the file already exists it is left untouched.'''
if os.path.exists(filename):
if not overwrite:
return
with open(filename) as file:
if file.read() == content:
return
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
with open(filename, 'w') as file:
file.write(content)
def NinjaNeedEscape(arg):
'''Returns True if |arg| needs to be escaped when written to .ninja file.'''
return ':' in arg or '*' in arg or ';' in arg
'''Returns True if |arg| needs to be escaped when written to .ninja file.'''
return ':' in arg or '*' in arg or ';' in arg
def NinjaEscapeCommand(command):
'''Escapes |command| in order to write it to .ninja file.'''
result = []
for arg in command:
if NinjaNeedEscape(arg):
arg = arg.replace(':', '$:')
arg = arg.replace(';', '\\;')
arg = arg.replace('*', '\\*')
else:
result.append(arg)
return ' '.join(result)
'''Escapes |command| in order to write it to .ninja file.'''
result = []
for arg in command:
if NinjaNeedEscape(arg):
arg = arg.replace(':', '$:')
arg = arg.replace(';', '\\;')
arg = arg.replace('*', '\\*')
else:
result.append(arg)
return ' '.join(result)
def FindGn():
'''Returns absolute path to gn binary looking at the PATH env variable.'''
for path in os.environ['PATH'].split(os.path.pathsep):
gn_path = os.path.join(path, 'gn')
if os.path.isfile(gn_path) and os.access(gn_path, os.X_OK):
return gn_path
return None
'''Returns absolute path to gn binary looking at the PATH env variable.'''
for path in os.environ['PATH'].split(os.path.pathsep):
gn_path = os.path.join(path, 'gn')
if os.path.isfile(gn_path) and os.access(gn_path, os.X_OK):
return gn_path
return None
def GenerateXcodeProject(gn_path, root_dir, out_dir, settings):
'''Convert GN generated Xcode project into multi-configuration Xcode
project.'''
'''Convert GN generated Xcode project into multi-configuration Xcode
project.'''
temp_path = tempfile.mkdtemp(prefix=os.path.abspath(
os.path.join(out_dir, '_temp')))
try:
generator = GnGenerator(settings, 'Debug', 'iphonesimulator')
generator.Generate(gn_path, root_dir, temp_path)
convert_gn_xcodeproj.ConvertGnXcodeProject(
root_dir,
os.path.join(temp_path),
os.path.join(out_dir, 'build'),
SUPPORTED_CONFIGS)
finally:
if os.path.exists(temp_path):
shutil.rmtree(temp_path)
temp_path = tempfile.mkdtemp(
prefix=os.path.abspath(os.path.join(out_dir, '_temp')))
try:
generator = GnGenerator(settings, 'Debug', 'iphonesimulator')
generator.Generate(gn_path, root_dir, temp_path)
convert_gn_xcodeproj.ConvertGnXcodeProject(
root_dir, os.path.join(temp_path), os.path.join(out_dir, 'build'),
SUPPORTED_CONFIGS)
finally:
if os.path.exists(temp_path):
shutil.rmtree(temp_path)
def GenerateGnBuildRules(gn_path, root_dir, out_dir, settings):
'''Generates all template configurations for gn.'''
for config in SUPPORTED_CONFIGS:
for target in SUPPORTED_TARGETS:
build_dir = os.path.join(out_dir, '%s-%s' % (config, target))
generator = GnGenerator(settings, config, target)
generator.CreateGnRules(gn_path, root_dir, build_dir)
'''Generates all template configurations for gn.'''
for config in SUPPORTED_CONFIGS:
for target in SUPPORTED_TARGETS:
build_dir = os.path.join(out_dir, '%s-%s' % (config, target))
generator = GnGenerator(settings, config, target)
generator.CreateGnRules(gn_path, root_dir, build_dir)
def Main(args):
default_root = os.path.normpath(os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir))
default_root = os.path.normpath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
parser = argparse.ArgumentParser(
description='Generate build directories for use with gn.')
parser.add_argument(
'root', default=default_root, nargs='?',
help='root directory where to generate multiple out configurations')
parser.add_argument(
'--import', action='append', dest='import_rules', default=[],
help='path to file defining default gn variables')
args = parser.parse_args(args)
parser = argparse.ArgumentParser(
description='Generate build directories for use with gn.')
parser.add_argument(
'root',
default=default_root,
nargs='?',
help='root directory where to generate multiple out configurations')
parser.add_argument('--import',
action='append',
dest='import_rules',
default=[],
help='path to file defining default gn variables')
args = parser.parse_args(args)
# Load configuration (first global and then any user overrides).
settings = ConfigParserWithStringInterpolation()
settings.read([
os.path.splitext(__file__)[0] + '.config',
os.path.expanduser('~/.setup-gn'),
])
# Load configuration (first global and then any user overrides).
settings = ConfigParserWithStringInterpolation()
settings.read([
os.path.splitext(__file__)[0] + '.config',
os.path.expanduser('~/.setup-gn'),
])
# Add private sections corresponding to --import argument.
if args.import_rules:
settings.add_section('$imports$')
for i, import_rule in enumerate(args.import_rules):
if not import_rule.startswith('//'):
import_rule = '//%s' % os.path.relpath(
os.path.abspath(import_rule), os.path.abspath(args.root))
settings.set('$imports$', '$rule%d$' % i, import_rule)
# Add private sections corresponding to --import argument.
if args.import_rules:
settings.add_section('$imports$')
for i, import_rule in enumerate(args.import_rules):
if not import_rule.startswith('//'):
import_rule = '//%s' % os.path.relpath(
os.path.abspath(import_rule), os.path.abspath(args.root))
settings.set('$imports$', '$rule%d$' % i, import_rule)
# Validate settings.
if settings.getstring('build', 'arch') not in ('64-bit', '32-bit', 'fat'):
sys.stderr.write('ERROR: invalid value for build.arch: %s\n' %
settings.getstring('build', 'arch'))
sys.exit(1)
# Validate settings.
if settings.getstring('build', 'arch') not in ('64-bit', '32-bit', 'fat'):
sys.stderr.write('ERROR: invalid value for build.arch: %s\n' %
settings.getstring('build', 'arch'))
sys.exit(1)
if settings.getboolean('goma', 'enabled'):
if settings.getint('xcode', 'jobs') < 0:
sys.stderr.write('ERROR: invalid value for xcode.jobs: %s\n' %
settings.get('xcode', 'jobs'))
sys.exit(1)
goma_install = os.path.expanduser(settings.getstring('goma', 'install'))
if not os.path.isdir(goma_install):
sys.stderr.write('WARNING: goma.install directory not found: %s\n' %
settings.get('goma', 'install'))
sys.stderr.write('WARNING: disabling goma\n')
settings.set('goma', 'enabled', 'false')
if settings.getboolean('goma', 'enabled'):
if settings.getint('xcode', 'jobs') < 0:
sys.stderr.write('ERROR: invalid value for xcode.jobs: %s\n' %
settings.get('xcode', 'jobs'))
sys.exit(1)
goma_install = os.path.expanduser(settings.getstring('goma', 'install'))
if not os.path.isdir(goma_install):
sys.stderr.write('WARNING: goma.install directory not found: %s\n' %
settings.get('goma', 'install'))
sys.stderr.write('WARNING: disabling goma\n')
settings.set('goma', 'enabled', 'false')
# Find gn binary in PATH.
gn_path = FindGn()
if gn_path is None:
sys.stderr.write('ERROR: cannot find gn in PATH\n')
sys.exit(1)
# Find gn binary in PATH.
gn_path = FindGn()
if gn_path is None:
sys.stderr.write('ERROR: cannot find gn in PATH\n')
sys.exit(1)
out_dir = os.path.join(args.root, 'out')
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
out_dir = os.path.join(args.root, 'out')
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
GenerateXcodeProject(gn_path, args.root, out_dir, settings)
GenerateGnBuildRules(gn_path, args.root, out_dir, settings)
GenerateXcodeProject(gn_path, args.root, out_dir, settings)
GenerateGnBuildRules(gn_path, args.root, out_dir, settings)
if __name__ == '__main__':
sys.exit(Main(sys.argv[1:]))
sys.exit(Main(sys.argv[1:]))

View File

@ -13,7 +13,6 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper script to [re]start or stop a helper Fuchsia QEMU instance to be used
for running tests without a device.
"""
@ -30,105 +29,117 @@ import tempfile
import time
try:
from subprocess import DEVNULL
from subprocess import DEVNULL
except ImportError:
DEVNULL = open(os.devnull, 'r+b')
DEVNULL = open(os.devnull, 'r+b')
CRASHPAD_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)),
os.pardir)
def _Stop(pid_file):
if os.path.isfile(pid_file):
with open(pid_file, 'rb') as f:
pid = int(f.read().strip())
try:
os.kill(pid, signal.SIGTERM)
except:
print('Unable to kill pid %d, continuing' % pid, file=sys.stderr)
os.unlink(pid_file)
if os.path.isfile(pid_file):
with open(pid_file, 'rb') as f:
pid = int(f.read().strip())
try:
os.kill(pid, signal.SIGTERM)
except:
print('Unable to kill pid %d, continuing' % pid, file=sys.stderr)
os.unlink(pid_file)
def _CheckForTun():
"""Check for networking. TODO(scottmg): Currently, this is Linux-specific.
"""
returncode = subprocess.call(
['tunctl', '-b', '-u', getpass.getuser(), '-t', 'qemu'],
stdout=DEVNULL, stderr=DEVNULL)
if returncode != 0:
print('To use QEMU with networking on Linux, configure TUN/TAP. See:',
file=sys.stderr)
print(' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only',
file=sys.stderr)
return 2
return 0
"""Check for networking. TODO(scottmg): Currently, this is Linux-specific.
"""
returncode = subprocess.call(
['tunctl', '-b', '-u',
getpass.getuser(), '-t', 'qemu'],
stdout=DEVNULL,
stderr=DEVNULL)
if returncode != 0:
print('To use QEMU with networking on Linux, configure TUN/TAP. See:',
file=sys.stderr)
print(
' https://fuchsia.googlesource.com/zircon/+/HEAD/docs/qemu.md#enabling-networking-under-qemu-x86_64-only',
file=sys.stderr)
return 2
return 0
def _Start(pid_file):
tun_result = _CheckForTun()
if tun_result != 0:
return tun_result
tun_result = _CheckForTun()
if tun_result != 0:
return tun_result
arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64'
fuchsia_dir = os.path.join(CRASHPAD_ROOT, 'third_party', 'fuchsia')
qemu_path = os.path.join(fuchsia_dir, 'qemu', arch, 'bin',
'qemu-system-x86_64')
kernel_data_dir = os.path.join(fuchsia_dir, 'sdk', arch, 'target', 'x86_64')
kernel_path = os.path.join(kernel_data_dir, 'zircon.bin')
initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin')
arch = 'mac-amd64' if sys.platform == 'darwin' else 'linux-amd64'
fuchsia_dir = os.path.join(CRASHPAD_ROOT, 'third_party', 'fuchsia')
qemu_path = os.path.join(fuchsia_dir, 'qemu', arch, 'bin',
'qemu-system-x86_64')
kernel_data_dir = os.path.join(fuchsia_dir, 'sdk', arch, 'target', 'x86_64')
kernel_path = os.path.join(kernel_data_dir, 'zircon.bin')
initrd_path = os.path.join(kernel_data_dir, 'bootdata.bin')
mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3))
instance_name = 'crashpad_qemu_' + \
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8))
mac_tail = ':'.join('%02x' % random.randint(0, 255) for x in range(3))
instance_name = (
'crashpad_qemu_' +
''.join(chr(random.randint(ord('A'), ord('Z'))) for x in range(8)))
# These arguments are from the Fuchsia repo in zircon/scripts/run-zircon.
popen = subprocess.Popen([
qemu_path,
'-m', '2048',
'-nographic',
'-kernel', kernel_path,
'-initrd', initrd_path,
'-smp', '4',
'-serial', 'stdio',
'-monitor', 'none',
'-machine', 'q35',
'-cpu', 'host,migratable=no',
'-enable-kvm',
'-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0',
'-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail,
'-append', 'TERM=dumb zircon.nodename=' + instance_name,
], stdin=DEVNULL, stdout=DEVNULL, stderr=DEVNULL)
# These arguments are from the Fuchsia repo in zircon/scripts/run-zircon.
with open(pid_file, 'wb') as f:
f.write('%d\n' % popen.pid)
# yapf: disable
popen = subprocess.Popen([
qemu_path,
'-m', '2048',
'-nographic',
'-kernel', kernel_path,
'-initrd', initrd_path,
'-smp', '4',
'-serial', 'stdio',
'-monitor', 'none',
'-machine', 'q35',
'-cpu', 'host,migratable=no',
'-enable-kvm',
'-netdev', 'type=tap,ifname=qemu,script=no,downscript=no,id=net0',
'-device', 'e1000,netdev=net0,mac=52:54:00:' + mac_tail,
'-append', 'TERM=dumb zircon.nodename=' + instance_name,
],
stdin=DEVNULL,
stdout=DEVNULL,
stderr=DEVNULL)
# yapf: enable
for i in range(10):
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools', 'netaddr')
if subprocess.call([netaddr_path, '--nowait', instance_name],
stdout=open(os.devnull), stderr=open(os.devnull)) == 0:
break
time.sleep(.5)
else:
print('instance did not respond after start', file=sys.stderr)
return 1
with open(pid_file, 'wb') as f:
f.write('%d\n' % popen.pid)
return 0
for i in range(10):
netaddr_path = os.path.join(fuchsia_dir, 'sdk', arch, 'tools',
'netaddr')
if subprocess.call([netaddr_path, '--nowait', instance_name],
stdout=open(os.devnull),
stderr=open(os.devnull)) == 0:
break
time.sleep(.5)
else:
print('instance did not respond after start', file=sys.stderr)
return 1
return 0
def main(args):
if len(args) != 1 or args[0] not in ('start', 'stop'):
print('usage: run_fuchsia_qemu.py start|stop', file=sys.stderr)
return 1
if len(args) != 1 or args[0] not in ('start', 'stop'):
print('usage: run_fuchsia_qemu.py start|stop', file=sys.stderr)
return 1
command = args[0]
command = args[0]
pid_file = os.path.join(tempfile.gettempdir(), 'crashpad_fuchsia_qemu_pid')
_Stop(pid_file)
if command == 'start':
return _Start(pid_file)
pid_file = os.path.join(tempfile.gettempdir(), 'crashpad_fuchsia_qemu_pid')
_Stop(pid_file)
if command == 'start':
return _Start(pid_file)
return 0
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
sys.exit(main(sys.argv[1:]))

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2017 The Crashpad Authors. All rights reserved.
#
@ -24,26 +23,26 @@ import sys
def main(args):
script_dir = os.path.dirname(__file__)
crashpad_dir = os.path.join(script_dir, os.pardir, os.pardir)
script_dir = os.path.dirname(__file__)
crashpad_dir = os.path.join(script_dir, os.pardir, os.pardir)
# Run from the Crashpad project root directory.
os.chdir(crashpad_dir)
# Run from the Crashpad project root directory.
os.chdir(crashpad_dir)
output_dir = os.path.join('out', 'doc', 'doxygen')
output_dir = os.path.join('out', 'doc', 'doxygen')
if os.path.isdir(output_dir) and not os.path.islink(output_dir):
shutil.rmtree(output_dir)
elif os.path.exists(output_dir):
os.unlink(output_dir)
if os.path.isdir(output_dir) and not os.path.islink(output_dir):
shutil.rmtree(output_dir)
elif os.path.exists(output_dir):
os.unlink(output_dir)
os.makedirs(output_dir, 0o755)
os.makedirs(output_dir, 0o755)
doxy_file = os.path.join('doc', 'support', 'crashpad.doxy')
subprocess.check_call(['doxygen', doxy_file])
doxy_file = os.path.join('doc', 'support', 'crashpad.doxy')
subprocess.check_call(['doxygen', doxy_file])
return 0
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
sys.exit(main(sys.argv[1:]))

View File

@ -12,8 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
def CheckChangeOnUpload(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api)
return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return input_api.canned_checks.CheckChangedLUCIConfigs(input_api, output_api)
return input_api.canned_checks.CheckChangedLUCIConfigs(
input_api, output_api)

View File

@ -29,462 +29,452 @@ import win32con
import win32pipe
import winerror
g_temp_dirs = []
g_had_failures = False
def MakeTempDir():
global g_temp_dirs
new_dir = tempfile.mkdtemp()
g_temp_dirs.append(new_dir)
return new_dir
global g_temp_dirs
new_dir = tempfile.mkdtemp()
g_temp_dirs.append(new_dir)
return new_dir
def CleanUpTempDirs():
global g_temp_dirs
for d in g_temp_dirs:
subprocess.call(['rmdir', '/s', '/q', d], shell=True)
global g_temp_dirs
for d in g_temp_dirs:
subprocess.call(['rmdir', '/s', '/q', d], shell=True)
def FindInstalledWindowsApplication(app_path):
search_paths = [os.getenv('PROGRAMFILES(X86)'),
os.getenv('PROGRAMFILES'),
os.getenv('PROGRAMW6432'),
os.getenv('LOCALAPPDATA')]
search_paths += os.getenv('PATH', '').split(os.pathsep)
search_paths = [
os.getenv('PROGRAMFILES(X86)'),
os.getenv('PROGRAMFILES'),
os.getenv('PROGRAMW6432'),
os.getenv('LOCALAPPDATA')
]
search_paths += os.getenv('PATH', '').split(os.pathsep)
for search_path in search_paths:
if not search_path:
continue
path = os.path.join(search_path, app_path)
if os.path.isfile(path):
return path
for search_path in search_paths:
if not search_path:
continue
path = os.path.join(search_path, app_path)
if os.path.isfile(path):
return path
return None
def GetCdbPath():
"""Search in some reasonable places to find cdb.exe. Searches x64 before x86
and newer versions before older versions.
"""
possible_paths = (
os.path.join('Windows Kits', '10', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '10', 'Debuggers', 'x86'),
os.path.join('Windows Kits', '8.1', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '8.1', 'Debuggers', 'x86'),
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
'Debugging Tools For Windows (x64)',
'Debugging Tools For Windows (x86)',
'Debugging Tools For Windows',)
for possible_path in possible_paths:
app_path = os.path.join(possible_path, 'cdb.exe')
app_path = FindInstalledWindowsApplication(app_path)
if app_path:
return app_path
return None
def NamedPipeExistsAndReady(pipe_name):
"""Returns False if pipe_name does not exist. If pipe_name does exist, blocks
until the pipe is ready to service clients, and then returns True.
This is used as a drop-in replacement for os.path.exists() and os.access() to
test for the pipe's existence. Both of those calls tickle the pipe in a way
that appears to the server to be a client connecting, triggering error
messages when no data is received.
Although this function only needs to test pipe existence (waiting for
CreateNamedPipe()), it actually winds up testing pipe readiness
(waiting for ConnectNamedPipe()). This is unnecessary but harmless.
"""
try:
win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER)
except pywintypes.error as e:
if e[0] == winerror.ERROR_FILE_NOT_FOUND:
return False
raise
return True
def GetDumpFromProgram(
out_dir, pipe_name, executable_name, expect_exit_code, *args):
"""Initialize a crash database, and run |executable_name| connecting to a
crash handler. If pipe_name is set, crashpad_handler will be started first. If
pipe_name is empty, the executable is responsible for starting
crashpad_handler. *args will be passed after other arguments to
executable_name. If the child process does not exit with |expect_exit_code|,
an exception will be raised. Returns the path to the minidump generated by
crashpad_handler for further testing.
"""
test_database = MakeTempDir()
handler = None
try:
subprocess.check_call(
[os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
'--database=' + test_database])
if pipe_name is not None:
handler = subprocess.Popen([
os.path.join(out_dir, 'crashpad_handler.com'),
'--pipe-name=' + pipe_name,
'--database=' + test_database
])
# Wait until the server is ready.
printed = False
while not NamedPipeExistsAndReady(pipe_name):
if not printed:
print('Waiting for crashpad_handler to be ready...')
printed = True
time.sleep(0.001)
command = [os.path.join(out_dir, executable_name), pipe_name] + list(args)
else:
command = ([os.path.join(out_dir, executable_name),
os.path.join(out_dir, 'crashpad_handler.com'),
test_database] +
list(args))
print('Running %s' % os.path.basename(command[0]))
exit_code = subprocess.call(command)
if exit_code != expect_exit_code:
raise subprocess.CalledProcessError(exit_code, executable_name)
out = subprocess.check_output([
os.path.join(out_dir, 'crashpad_database_util.exe'),
'--database=' + test_database,
'--show-pending-reports',
'--show-all-report-info',
])
for line in out.splitlines():
if line.strip().startswith('Path:'):
return line.partition(':')[2].strip()
finally:
if handler:
handler.kill()
def GetDumpFromCrashyProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_program.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
def GetDumpFromOtherProgram(out_dir, pipe_name, *args):
return GetDumpFromProgram(
out_dir, pipe_name, 'crash_other_program.exe', 0, *args)
def GetDumpFromSignal(out_dir, pipe_name, *args):
STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con.
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_signal.exe',
STATUS_FATAL_APP_EXIT,
*args)
def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'self_destroying_program.exe',
win32con.EXCEPTION_BREAKPOINT)
def GetDumpFromZ7Program(out_dir, pipe_name):
return GetDumpFromProgram(out_dir,
pipe_name,
'crashy_z7_loader.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
class CdbRun(object):
"""Run cdb.exe passing it a cdb command and capturing the output.
`Check()` searches for regex patterns in sequence allowing verification of
expected output.
"""
def __init__(self, cdb_path, dump_path, command):
# Run a command line that loads the dump, runs the specified cdb command,
# and then quits, and capturing stdout.
self.out = subprocess.check_output([
cdb_path,
'-z', dump_path,
'-c', command + ';q'
])
def Check(self, pattern, message, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
print('ok - %s' % message)
sys.stdout.flush()
else:
print('-' * 80, file=sys.stderr)
print('FAILED - %s' % message, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('did not match:\n %s' % pattern, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('remaining output was:\n %s' % self.out, file=sys.stderr)
print('-' * 80, file=sys.stderr)
sys.stderr.flush()
global g_had_failures
g_had_failures = True
def Find(self, pattern, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
return match_obj
return None
def RunTests(cdb_path,
dump_path,
start_handler_dump_path,
destroyed_dump_path,
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name):
"""Runs various tests in sequence. Runs a new cdb instance on the dump for
each block of tests to reduce the chances that output from one command is
confused for output from another.
"""
out = CdbRun(cdb_path, dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it',
'captured exception')
def GetCdbPath():
"""Search in some reasonable places to find cdb.exe. Searches x64 before x86
and newer versions before older versions.
"""
possible_paths = (
os.path.join('Windows Kits', '10', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '10', 'Debuggers', 'x86'),
os.path.join('Windows Kits', '8.1', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '8.1', 'Debuggers', 'x86'),
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x64'),
os.path.join('Windows Kits', '8.0', 'Debuggers', 'x86'),
'Debugging Tools For Windows (x64)',
'Debugging Tools For Windows (x86)',
'Debugging Tools For Windows',
)
for possible_path in possible_paths:
app_path = os.path.join(possible_path, 'cdb.exe')
app_path = FindInstalledWindowsApplication(app_path)
if app_path:
return app_path
return None
# When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as
# "`anonymous namespace'" and instead gives the decorated form.
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location')
out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it',
'captured exception (using StartHandler())')
out.Check('crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location (using StartHandler())')
def NamedPipeExistsAndReady(pipe_name):
"""Returns False if pipe_name does not exist. If pipe_name does exist,
blocks until the pipe is ready to service clients, and then returns True.
out = CdbRun(cdb_path, dump_path, '!peb')
out.Check(r'PEB at', 'found the PEB')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved')
out.Check(r'Base TimeStamp Module', 'module list present')
pipe_name_escaped = pipe_name.replace('\\', '\\\\')
out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped,
'some PEB data is correct')
out.Check(r'SystemRoot=C:\\Windows', 'some of environment captured',
re.IGNORECASE)
This is used as a drop-in replacement for os.path.exists() and os.access()
to test for the pipe's existence. Both of those calls tickle the pipe in a
way that appears to the server to be a client connecting, triggering error
messages when no data is received.
out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters')
out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"',
'PEB->ProcessParameters.ImagePathName string captured')
out.Check(' DesktopInfo *: '
'_UNICODE_STRING "(?!--- memory read error at address ).*"',
'PEB->ProcessParameters.DesktopInfo string captured')
Although this function only needs to test pipe existence (waiting for
CreateNamedPipe()), it actually winds up testing pipe readiness (waiting for
ConnectNamedPipe()). This is unnecessary but harmless.
"""
try:
win32pipe.WaitNamedPipe(pipe_name, win32pipe.NMPWAIT_WAIT_FOREVER)
except pywintypes.error as e:
if e[0] == winerror.ERROR_FILE_NOT_FOUND:
return False
raise
return True
out = CdbRun(cdb_path, dump_path, '!teb')
out.Check(r'TEB at', 'found the TEB')
out.Check(r'ExceptionList:\s+[0-9a-fA-F]+', 'some valid teb data')
out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue')
out = CdbRun(cdb_path, dump_path, '!gle')
out.Check('LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the '
'file specified.', '!gle gets last error')
out.Check('LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The '
'file %hs does not exist.', '!gle gets last ntstatus')
def GetDumpFromProgram(out_dir, pipe_name, executable_name, expect_exit_code,
*args):
"""Initialize a crash database, and run |executable_name| connecting to a
crash handler. If pipe_name is set, crashpad_handler will be started first.
If pipe_name is empty, the executable is responsible for starting
crashpad_handler. *args will be passed after other arguments to
executable_name. If the child process does not exit with |expect_exit_code|,
an exception will be raised. Returns the path to the minidump generated by
crashpad_handler for further testing.
"""
test_database = MakeTempDir()
handler = None
if False:
# TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList.
out = CdbRun(cdb_path, dump_path, '!locks')
out.Check(r'CritSec crashy_program!crashpad::`anonymous namespace\'::'
r'g_test_critical_section', 'lock was captured')
if platform.win32_ver()[0] != '7':
# We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7.
out.Check(r'\*\*\* Locked', 'lock debug info was captured, and is locked')
try:
subprocess.check_call([
os.path.join(out_dir, 'crashpad_database_util.exe'), '--create',
'--database=' + test_database
])
out = CdbRun(cdb_path, dump_path, '!handle')
out.Check(r'\d+ Handles', 'captured handles')
out.Check(r'Event\s+\d+', 'capture some event handles')
out.Check(r'File\s+\d+', 'capture some file handles')
if pipe_name is not None:
handler = subprocess.Popen([
os.path.join(out_dir, 'crashpad_handler.com'),
'--pipe-name=' + pipe_name, '--database=' + test_database
])
out = CdbRun(cdb_path, dump_path, 'lm')
out.Check(r'Unloaded modules:', 'captured some unloaded modules')
out.Check(r'lz32\.dll', 'found expected unloaded module lz32')
out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror')
# Wait until the server is ready.
printed = False
while not NamedPipeExistsAndReady(pipe_name):
if not printed:
print('Waiting for crashpad_handler to be ready...')
printed = True
time.sleep(0.001)
out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+', 'PEB_LDR_DATA saved')
out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE)
command = [os.path.join(out_dir, executable_name), pipe_name
] + list(args)
else:
command = ([
os.path.join(out_dir, executable_name),
os.path.join(out_dir, 'crashpad_handler.com'), test_database
] + list(args))
print('Running %s' % os.path.basename(command[0]))
exit_code = subprocess.call(command)
if exit_code != expect_exit_code:
raise subprocess.CalledProcessError(exit_code, executable_name)
# Check that there is no stack trace in the self-destroyed process. Confirm
# that the top is where we expect it (that's based only on IP), but subsequent
# stack entries will not be available. This confirms that we have a mostly
# valid dump, but that the stack was omitted.
out.Check(r'self_destroying_program!crashpad::`anonymous namespace\'::'
r'FreeOwnStackAndBreak.*\nquit:',
'at correct location, no additional stack entries')
out = subprocess.check_output([
os.path.join(out_dir, 'crashpad_database_util.exe'),
'--database=' + test_database,
'--show-pending-reports',
'--show-all-report-info',
])
for line in out.splitlines():
if line.strip().startswith('Path:'):
return line.partition(':')[2].strip()
finally:
if handler:
handler.kill()
# Dump memory pointed to be EDI on the background suspended thread. We don't
# know the index of the thread because the system may have started other
# threads, so first do a run to extract the thread index that's suspended, and
# then another run to dump the data pointed to by EDI for that thread.
out = CdbRun(cdb_path, dump_path, '.ecxr;~')
match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:')
if match_obj:
thread = match_obj.group(1)
out = CdbRun(cdb_path, dump_path, '.ecxr;~' + thread + 's;db /c14 edi')
out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50',
'data pointed to by registers captured')
# Move up one stack frame after jumping to the exception, and examine memory.
out = CdbRun(cdb_path, dump_path,
'.ecxr; .f+; dd /c100 poi(offset_pointer)-20')
out.Check(r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 '
r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c '
r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 '
r'80000094 00000095 80000096 00000097',
'data pointed to by stack captured')
def GetDumpFromCrashyProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_program.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
# Attempt to retrieve the value of g_extra_memory_pointer (by name), and then
# examine the memory at which it points. Both should have been saved.
out = CdbRun(cdb_path, dump_path,
'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 '
'L8')
out.Check(r'0000655e 0000656b 00006578 00006585',
'extra memory range captured')
out = CdbRun(cdb_path, dump_path, '.dumpdebug')
out.Check(r'type \?\?\? \(333333\), size 00001000',
'first user stream')
out.Check(r'type \?\?\? \(222222\), size 00000080',
'second user stream')
def GetDumpFromOtherProgram(out_dir, pipe_name, *args):
return GetDumpFromProgram(out_dir, pipe_name, 'crash_other_program.exe', 0,
*args)
if z7_dump_path:
out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm')
def GetDumpFromSignal(out_dir, pipe_name, *args):
STATUS_FATAL_APP_EXIT = 0x40000015 # Not known by win32con.
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_signal.exe',
STATUS_FATAL_APP_EXIT, *args)
def GetDumpFromSelfDestroyingProgram(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, pipe_name, 'self_destroying_program.exe',
win32con.EXCEPTION_BREAKPOINT)
def GetDumpFromZ7Program(out_dir, pipe_name):
return GetDumpFromProgram(out_dir, pipe_name, 'crashy_z7_loader.exe',
win32con.EXCEPTION_ACCESS_VIOLATION)
class CdbRun(object):
"""Run cdb.exe passing it a cdb command and capturing the output.
`Check()` searches for regex patterns in sequence allowing verification of
expected output.
"""
def __init__(self, cdb_path, dump_path, command):
# Run a command line that loads the dump, runs the specified cdb
# command, and then quits, and capturing stdout.
self.out = subprocess.check_output(
[cdb_path, '-z', dump_path, '-c', command + ';q'])
def Check(self, pattern, message, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
print('ok - %s' % message)
sys.stdout.flush()
else:
print('-' * 80, file=sys.stderr)
print('FAILED - %s' % message, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('did not match:\n %s' % pattern, file=sys.stderr)
print('-' * 80, file=sys.stderr)
print('remaining output was:\n %s' % self.out, file=sys.stderr)
print('-' * 80, file=sys.stderr)
sys.stderr.flush()
global g_had_failures
g_had_failures = True
def Find(self, pattern, re_flags=0):
match_obj = re.search(pattern, self.out, re_flags)
if match_obj:
# Matched. Consume up to end of match.
self.out = self.out[match_obj.end(0):]
return match_obj
return None
def RunTests(cdb_path, dump_path, start_handler_dump_path, destroyed_dump_path,
z7_dump_path, other_program_path, other_program_no_exception_path,
sigabrt_main_path, sigabrt_background_path, pipe_name):
"""Runs various tests in sequence. Runs a new cdb instance on the dump for
each block of tests to reduce the chances that output from one command is
confused for output from another.
"""
out = CdbRun(cdb_path, dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it',
'captured exception in z7 module')
# Older versions of cdb display relative to exports for /Z7 modules, newer
# ones just display the offset.
out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):',
'exception in z7 at correct location')
out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll',
'expected non-pdb symbol format')
'captured exception')
out = CdbRun(cdb_path, other_program_path, '.ecxr;k;~')
out.Check('Unknown exception - code deadbea7',
'other program dump exception code')
out.Check('!Sleep', 'other program reasonable location')
out.Check("hanging_program!`anonymous namespace'::Thread1",
'other program dump right thread')
count = 0
while True:
match_obj = out.Find(r'Id.*Suspend: (\d+) ')
# When SomeCrashyFunction is inlined, cdb doesn't demangle its namespace as
# "`anonymous namespace'" and instead gives the decorated form.
out.Check(
'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction', 'exception at correct location')
out = CdbRun(cdb_path, start_handler_dump_path, '.ecxr')
out.Check('This dump file has an exception of interest stored in it',
'captured exception (using StartHandler())')
out.Check(
'crashy_program!crashpad::(`anonymous namespace\'|\?A0x[0-9a-f]+)::'
'SomeCrashyFunction',
'exception at correct location (using StartHandler())')
out = CdbRun(cdb_path, dump_path, '!peb')
out.Check(r'PEB at', 'found the PEB')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'Base TimeStamp Module',
'module list present')
pipe_name_escaped = pipe_name.replace('\\', '\\\\')
out.Check(r'CommandLine: *\'.*crashy_program\.exe *' + pipe_name_escaped,
'some PEB data is correct')
out.Check(r'SystemRoot=C:\\Windows', 'some of environment captured',
re.IGNORECASE)
out = CdbRun(cdb_path, dump_path, '?? @$peb->ProcessParameters')
out.Check(r' ImagePathName *: _UNICODE_STRING ".*\\crashy_program\.exe"',
'PEB->ProcessParameters.ImagePathName string captured')
out.Check(
' DesktopInfo *: '
'_UNICODE_STRING "(?!--- memory read error at address ).*"',
'PEB->ProcessParameters.DesktopInfo string captured')
out = CdbRun(cdb_path, dump_path, '!teb')
out.Check(r'TEB at', 'found the TEB')
out.Check(r'ExceptionList:\s+[0-9a-fA-F]+', 'some valid teb data')
out.Check(r'LastErrorValue:\s+2', 'correct LastErrorValue')
out = CdbRun(cdb_path, dump_path, '!gle')
out.Check(
'LastErrorValue: \(Win32\) 0x2 \(2\) - The system cannot find the '
'file specified.', '!gle gets last error')
out.Check(
'LastStatusValue: \(NTSTATUS\) 0xc000000f - {File Not Found} The '
'file %hs does not exist.', '!gle gets last ntstatus')
if False:
# TODO(scottmg): Re-enable when we grab ntdll!RtlCriticalSectionList.
out = CdbRun(cdb_path, dump_path, '!locks')
out.Check(
r'CritSec crashy_program!crashpad::`anonymous namespace\'::'
r'g_test_critical_section', 'lock was captured')
if platform.win32_ver()[0] != '7':
# We can't allocate CRITICAL_SECTIONs with .DebugInfo on Win 7.
out.Check(r'\*\*\* Locked',
'lock debug info was captured, and is locked')
out = CdbRun(cdb_path, dump_path, '!handle')
out.Check(r'\d+ Handles', 'captured handles')
out.Check(r'Event\s+\d+', 'capture some event handles')
out.Check(r'File\s+\d+', 'capture some file handles')
out = CdbRun(cdb_path, dump_path, 'lm')
out.Check(r'Unloaded modules:', 'captured some unloaded modules')
out.Check(r'lz32\.dll', 'found expected unloaded module lz32')
out.Check(r'wmerror\.dll', 'found expected unloaded module wmerror')
out = CdbRun(cdb_path, destroyed_dump_path, '.ecxr;!peb;k 2')
out.Check(r'Ldr\.InMemoryOrderModuleList:.*\d+ \. \d+',
'PEB_LDR_DATA saved')
out.Check(r'ntdll\.dll', 'ntdll present', re.IGNORECASE)
# Check that there is no stack trace in the self-destroyed process. Confirm
# that the top is where we expect it (that's based only on IP), but
# subsequent stack entries will not be available. This confirms that we have
# a mostly valid dump, but that the stack was omitted.
out.Check(
r'self_destroying_program!crashpad::`anonymous namespace\'::'
r'FreeOwnStackAndBreak.*\nquit:',
'at correct location, no additional stack entries')
# Dump memory pointed to be EDI on the background suspended thread. We don't
# know the index of the thread because the system may have started other
# threads, so first do a run to extract the thread index that's suspended,
# and then another run to dump the data pointed to by EDI for that thread.
out = CdbRun(cdb_path, dump_path, '.ecxr;~')
match_obj = out.Find(r'(\d+)\s+Id: [0-9a-f.]+ Suspend: 1 Teb:')
if match_obj:
if match_obj.group(1) != '0':
out.Check(r'FAILED', 'all suspend counts should be 0')
else:
count += 1
else:
break
assert count > 2
thread = match_obj.group(1)
out = CdbRun(cdb_path, dump_path, '.ecxr;~' + thread + 's;db /c14 edi')
out.Check(r'63 62 61 60 5f 5e 5d 5c-5b 5a 59 58 57 56 55 54 53 52 51 50',
'data pointed to by registers captured')
out = CdbRun(cdb_path, other_program_no_exception_path, '.ecxr;k')
out.Check('Unknown exception - code 0cca11ed',
'other program with no exception given')
out.Check('!RaiseException', 'other program in RaiseException()')
# Move up one stack frame after jumping to the exception, and examine
# memory.
out = CdbRun(cdb_path, dump_path,
'.ecxr; .f+; dd /c100 poi(offset_pointer)-20')
out.Check(
r'80000078 00000079 8000007a 0000007b 8000007c 0000007d 8000007e '
r'0000007f 80000080 00000081 80000082 00000083 80000084 00000085 '
r'80000086 00000087 80000088 00000089 8000008a 0000008b 8000008c '
r'0000008d 8000008e 0000008f 80000090 00000091 80000092 00000093 '
r'80000094 00000095 80000096 00000097',
'data pointed to by stack captured')
out = CdbRun(cdb_path, sigabrt_main_path, '.ecxr')
out.Check('code 40000015', 'got sigabrt signal')
out.Check('::HandleAbortSignal', ' stack in expected location')
# Attempt to retrieve the value of g_extra_memory_pointer (by name), and
# then examine the memory at which it points. Both should have been saved.
out = CdbRun(
cdb_path, dump_path,
'dd poi(crashy_program!crashpad::g_extra_memory_pointer)+0x1f30 '
'L8')
out.Check(r'0000655e 0000656b 00006578 00006585',
'extra memory range captured')
out = CdbRun(cdb_path, sigabrt_background_path, '.ecxr')
out.Check('code 40000015', 'got sigabrt signal from background thread')
out = CdbRun(cdb_path, dump_path, '.dumpdebug')
out.Check(r'type \?\?\? \(333333\), size 00001000', 'first user stream')
out.Check(r'type \?\?\? \(222222\), size 00000080', 'second user stream')
if z7_dump_path:
out = CdbRun(cdb_path, z7_dump_path, '.ecxr;lm')
out.Check('This dump file has an exception of interest stored in it',
'captured exception in z7 module')
# Older versions of cdb display relative to exports for /Z7 modules,
# newer ones just display the offset.
out.Check(r'z7_test(!CrashMe\+0xe|\+0x100e):',
'exception in z7 at correct location')
out.Check(r'z7_test C \(codeview symbols\) z7_test\.dll',
'expected non-pdb symbol format')
out = CdbRun(cdb_path, other_program_path, '.ecxr;k;~')
out.Check('Unknown exception - code deadbea7',
'other program dump exception code')
out.Check('!Sleep', 'other program reasonable location')
out.Check("hanging_program!`anonymous namespace'::Thread1",
'other program dump right thread')
count = 0
while True:
match_obj = out.Find(r'Id.*Suspend: (\d+) ')
if match_obj:
if match_obj.group(1) != '0':
out.Check(r'FAILED', 'all suspend counts should be 0')
else:
count += 1
else:
break
assert count > 2
out = CdbRun(cdb_path, other_program_no_exception_path, '.ecxr;k')
out.Check('Unknown exception - code 0cca11ed',
'other program with no exception given')
out.Check('!RaiseException', 'other program in RaiseException()')
out = CdbRun(cdb_path, sigabrt_main_path, '.ecxr')
out.Check('code 40000015', 'got sigabrt signal')
out.Check('::HandleAbortSignal', ' stack in expected location')
out = CdbRun(cdb_path, sigabrt_background_path, '.ecxr')
out.Check('code 40000015', 'got sigabrt signal from background thread')
def main(args):
try:
if len(args) != 1:
print('must supply binary dir', file=sys.stderr)
return 1
try:
if len(args) != 1:
print('must supply binary dir', file=sys.stderr)
return 1
cdb_path = GetCdbPath()
if not cdb_path:
print('could not find cdb', file=sys.stderr)
return 1
cdb_path = GetCdbPath()
if not cdb_path:
print('could not find cdb', file=sys.stderr)
return 1
# Make sure we can download Windows symbols.
if not os.environ.get('_NT_SYMBOL_PATH'):
symbol_dir = MakeTempDir()
protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http'
os.environ['_NT_SYMBOL_PATH'] = (
'SRV*' + symbol_dir + '*' +
protocol + '://msdl.microsoft.com/download/symbols')
# Make sure we can download Windows symbols.
if not os.environ.get('_NT_SYMBOL_PATH'):
symbol_dir = MakeTempDir()
protocol = 'https' if platform.win32_ver()[0] != 'XP' else 'http'
os.environ['_NT_SYMBOL_PATH'] = (
'SRV*' + symbol_dir + '*' + protocol +
'://msdl.microsoft.com/download/symbols')
pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (
os.getpid(), str(random.getrandbits(64)))
pipe_name = r'\\.\pipe\end-to-end_%s_%s' % (os.getpid(),
str(random.getrandbits(64)))
crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name)
if not crashy_dump_path:
return 1
crashy_dump_path = GetDumpFromCrashyProgram(args[0], pipe_name)
if not crashy_dump_path:
return 1
start_handler_dump_path = GetDumpFromCrashyProgram(args[0], None)
if not start_handler_dump_path:
return 1
start_handler_dump_path = GetDumpFromCrashyProgram(args[0], None)
if not start_handler_dump_path:
return 1
destroyed_dump_path = GetDumpFromSelfDestroyingProgram(args[0], pipe_name)
if not destroyed_dump_path:
return 1
destroyed_dump_path = GetDumpFromSelfDestroyingProgram(
args[0], pipe_name)
if not destroyed_dump_path:
return 1
z7_dump_path = None
if not args[0].endswith('_x64'):
z7_dump_path = GetDumpFromZ7Program(args[0], pipe_name)
if not z7_dump_path:
return 1
z7_dump_path = None
if not args[0].endswith('_x64'):
z7_dump_path = GetDumpFromZ7Program(args[0], pipe_name)
if not z7_dump_path:
return 1
other_program_path = GetDumpFromOtherProgram(args[0], pipe_name)
if not other_program_path:
return 1
other_program_path = GetDumpFromOtherProgram(args[0], pipe_name)
if not other_program_path:
return 1
other_program_no_exception_path = GetDumpFromOtherProgram(
args[0], pipe_name, 'noexception')
if not other_program_no_exception_path:
return 1
other_program_no_exception_path = GetDumpFromOtherProgram(
args[0], pipe_name, 'noexception')
if not other_program_no_exception_path:
return 1
sigabrt_main_path = GetDumpFromSignal(args[0], pipe_name, 'main')
if not sigabrt_main_path:
return 1
sigabrt_main_path = GetDumpFromSignal(args[0], pipe_name, 'main')
if not sigabrt_main_path:
return 1
sigabrt_background_path = GetDumpFromSignal(
args[0], pipe_name, 'background')
if not sigabrt_background_path:
return 1
sigabrt_background_path = GetDumpFromSignal(args[0], pipe_name,
'background')
if not sigabrt_background_path:
return 1
RunTests(cdb_path,
crashy_dump_path,
start_handler_dump_path,
destroyed_dump_path,
z7_dump_path,
other_program_path,
other_program_no_exception_path,
sigabrt_main_path,
sigabrt_background_path,
pipe_name)
RunTests(cdb_path, crashy_dump_path, start_handler_dump_path,
destroyed_dump_path, z7_dump_path, other_program_path,
other_program_no_exception_path, sigabrt_main_path,
sigabrt_background_path, pipe_name)
return 1 if g_had_failures else 0
finally:
CleanUpTempDirs()
return 1 if g_had_failures else 0
finally:
CleanUpTempDirs()
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
sys.exit(main(sys.argv[1:]))

3
third_party/fuchsia/runner.py vendored Normal file → Executable file
View File

@ -1,3 +1,5 @@
#!/usr/bin/env python
# Copyright 2018 The Crashpad Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@ -14,4 +16,5 @@
import os
import sys
os.execv(sys.argv[1], sys.argv[1:])

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved.
#
@ -20,6 +19,7 @@ import sys
import mig_fix
import mig_gen
def main(args):
parsed = mig_gen.parse_args(args)
@ -30,5 +30,6 @@ def main(args):
parsed.migcom_path, parsed.arch)
mig_fix.fix_interface(interface)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -22,18 +22,18 @@ import sys
from mig_gen import MigInterface
def _fix_user_implementation(implementation, fixed_implementation, header,
fixed_header):
"""Rewrites a MIG-generated user implementation (.c) file.
Rewrites the file at |implementation| by adding
__attribute__((unused)) to the definition of any structure typedefed
as __Reply by searching for the pattern unique to those structure
definitions. These structures are in fact unused in the user
implementation file, and this will trigger a -Wunused-local-typedefs
warning in gcc unless removed or marked with the unused attribute.
Also changes header references to point to the new header filename, if
changed.
Rewrites the file at |implementation| by adding __attribute__((unused)) to
the definition of any structure typedefed as __Reply by searching for the
pattern unique to those structure definitions. These structures are in fact
unused in the user implementation file, and this will trigger a
-Wunused-local-typedefs warning in gcc unless removed or marked with the
unused attribute. Also changes header references to point to the new
header filename, if changed.
If |fixed_implementation| is None, overwrites the original; otherwise, puts
the result in the file at |fixed_implementation|.
@ -59,6 +59,7 @@ def _fix_user_implementation(implementation, fixed_implementation, header,
file.write(contents)
file.close()
def _fix_server_implementation(implementation, fixed_implementation, header,
fixed_header):
"""Rewrites a MIG-generated server implementation (.c) file.
@ -79,24 +80,25 @@ def _fix_server_implementation(implementation, fixed_implementation, header,
contents = file.read()
# Find interesting declarations.
declaration_pattern = \
re.compile('^mig_internal (kern_return_t __MIG_check__.*)$',
re.MULTILINE)
declaration_pattern = re.compile(
'^mig_internal (kern_return_t __MIG_check__.*)$', re.MULTILINE)
declarations = declaration_pattern.findall(contents)
# Remove “__attribute__((__unused__))” from the declarations, and call them
# “mig_external” or “extern” depending on whether “mig_external” is defined.
attribute_pattern = re.compile(r'__attribute__\(\(__unused__\)\) ')
declarations = ['''\
declarations = [
'''\
#ifdef mig_external
mig_external
#else
extern
#endif
''' + attribute_pattern.sub('', x) + ';\n' for x in declarations]
''' + attribute_pattern.sub('', x) + ';\n' for x in declarations
]
# Rewrite the declarations in this file as “mig_external”.
contents = declaration_pattern.sub(r'mig_external \1', contents);
contents = declaration_pattern.sub(r'mig_external \1', contents)
# Crashpad never implements the mach_msg_server() MIG callouts. To avoid
# needing to provide stub implementations, set KERN_FAILURE as the RetCode
@ -125,6 +127,7 @@ extern
file.close()
return declarations
def _fix_header(header, fixed_header, declarations=[]):
"""Rewrites a MIG-generated header (.h) file.
@ -161,6 +164,7 @@ extern "C" {
file.write(contents)
file.close()
def fix_interface(interface, fixed_interface=None):
if fixed_interface is None:
fixed_interface = MigInterface(None, None, None, None)
@ -175,6 +179,7 @@ def fix_interface(interface, fixed_interface=None):
_fix_header(interface.server_h, fixed_interface.server_h,
server_declarations)
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument('user_c')
@ -187,11 +192,12 @@ def main(args):
parser.add_argument('--fixed_server_h', default=None)
parsed = parser.parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c,
parsed.user_h, parsed.server_h)
interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.server_h)
fixed_interface = MigInterface(parsed.fixed_user_c, parsed.fixed_server_c,
parsed.fixed_user_h, parsed.fixed_server_h)
fix_interface(interface, fixed_interface)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python
# coding: utf-8
# Copyright 2019 The Crashpad Authors. All rights reserved.
#
@ -21,19 +20,30 @@ import os
import subprocess
import sys
MigInterface = collections.namedtuple('MigInterface', ['user_c', 'server_c',
'user_h', 'server_h'])
MigInterface = collections.namedtuple(
'MigInterface', ['user_c', 'server_c', 'user_h', 'server_h'])
def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
mig_path=None, migcom_path=None, arch=None):
def generate_interface(defs,
interface,
includes=[],
sdk=None,
clang_path=None,
mig_path=None,
migcom_path=None,
arch=None):
if mig_path is None:
mig_path = 'mig'
command = [mig_path,
'-user', interface.user_c,
'-server', interface.server_c,
'-header', interface.user_h,
'-sheader', interface.server_h,
]
mig_path = 'mig'
# yapf: disable
command = [
mig_path,
'-user', interface.user_c,
'-server', interface.server_c,
'-header', interface.user_h,
'-sheader', interface.server_h,
]
# yapf: enable
if clang_path is not None:
os.environ['MIGCC'] = clang_path
@ -48,6 +58,7 @@ def generate_interface(defs, interface, includes=[], sdk=None, clang_path=None,
command.append(defs)
subprocess.check_call(command)
def parse_args(args):
parser = argparse.ArgumentParser()
parser.add_argument('--clang-path', help='Path to Clang')
@ -66,13 +77,15 @@ def parse_args(args):
parser.add_argument('server_h')
return parser.parse_args(args)
def main(args):
parsed = parse_args(args)
interface = MigInterface(parsed.user_c, parsed.server_c,
parsed.user_h, parsed.server_h)
generate_interface(parsed.defs, interface, parsed.include,
parsed.sdk, parsed.clang_path, parsed.mig_path,
parsed.migcom_path, parsed.arch)
interface = MigInterface(parsed.user_c, parsed.server_c, parsed.user_h,
parsed.server_h)
generate_interface(parsed.defs, interface, parsed.include, parsed.sdk,
parsed.clang_path, parsed.mig_path, parsed.migcom_path,
parsed.arch)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))

View File

@ -23,25 +23,28 @@ key = os.path.join(testdata, 'crashpad_util_test_key.pem')
cert = os.path.join(testdata, 'crashpad_util_test_cert.pem')
with open(cert, 'w') as cert_file, open(key, 'w') as key_file:
MESSAGE = 'DO NOT EDIT: This file was auto-generated by ' + __file__ + '\n\n'
cert_file.write(MESSAGE)
key_file.write(MESSAGE)
MESSAGE = ('DO NOT EDIT: This file was auto-generated by ' + __file__ +
'\n\n')
cert_file.write(MESSAGE)
key_file.write(MESSAGE)
proc = subprocess.Popen(
['openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost',
'-days', '3650', '-newkey', 'rsa:2048', '-keyout', '-'],
stderr=open(os.devnull, 'w'), stdout=subprocess.PIPE)
proc = subprocess.Popen([
'openssl', 'req', '-x509', '-nodes', '-subj', '/CN=localhost', '-days',
'3650', '-newkey', 'rsa:2048', '-keyout', '-'
],
stderr=open(os.devnull, 'w'),
stdout=subprocess.PIPE)
contents = proc.communicate()[0]
dest = sys.stderr
for line in contents.splitlines(True):
if line.startswith("-----BEGIN PRIVATE KEY-----"):
dest = key_file
elif line.startswith("-----BEGIN CERTIFICATE-----"):
dest = cert_file
elif line.startswith("-----END"):
dest.write(line)
dest = sys.stderr
continue
contents = proc.communicate()[0]
dest = sys.stderr
for line in contents.splitlines(True):
if line.startswith("-----BEGIN PRIVATE KEY-----"):
dest = key_file
elif line.startswith("-----BEGIN CERTIFICATE-----"):
dest = cert_file
elif line.startswith("-----END"):
dest.write(line)
dest = sys.stderr
continue
dest.write(line)
dest.write(line)