Fix mb.py presubmit issues.

* Add a config file for python formatting (.style.yapf).
* Change the default indentation from 4 spaces to 2 spaces.
* Run 'git cl format --python' on a few python files.

Bug: webrtc:13413
Change-Id: Ia71135131276c2c499b00032d57ad16ee5200a5c
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/238982
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Christoffer Jansson <jansson@google.com>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Jeremy Leconte <jleconte@google.com>
Cr-Commit-Position: refs/heads/main@{#35500}
This commit is contained in:
Jeremy Leconte 2021-12-07 19:49:48 +01:00 committed by WebRTC LUCI CQ
parent 035f0446c8
commit f22c78b01a
6 changed files with 1758 additions and 1697 deletions

4
.style.yapf Normal file
View file

@ -0,0 +1,4 @@
[style]
based_on_style = pep8
indent_width = 2
column_limit = 80

1
OWNERS
View file

@ -17,3 +17,4 @@ per-file pylintrc=mbonadei@webrtc.org
per-file WATCHLISTS=* per-file WATCHLISTS=*
per-file native-api.md=mbonadei@webrtc.org per-file native-api.md=mbonadei@webrtc.org
per-file ....lua=titovartem@webrtc.org per-file ....lua=titovartem@webrtc.org
per-file .style.yapf=jleconte@webrtc.org

View file

@ -31,9 +31,9 @@ NO_TOOLS_ERROR_MESSAGE = (
'To fix this run:\n' 'To fix this run:\n'
' python %s %s\n' ' python %s %s\n'
'\n' '\n'
'Note that these tools are Google-internal due to licensing, so in order to ' 'Note that these tools are Google-internal due to licensing, so in order '
'use them you will have to get your own license and manually put them in the ' 'to use them you will have to get your own license and manually put them '
'right location.\n' 'in the right location.\n'
'See https://cs.chromium.org/chromium/src/third_party/webrtc/tools_webrtc/' 'See https://cs.chromium.org/chromium/src/third_party/webrtc/tools_webrtc/'
'download_tools.py?rcl=bbceb76f540159e2dba0701ac03c514f01624130&l=13') 'download_tools.py?rcl=bbceb76f540159e2dba0701ac03c514f01624130&l=13')
@ -44,8 +44,7 @@ def _LogCommand(command):
def _ParseArgs(): def _ParseArgs():
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(description='Run low-bandwidth audio tests.')
description='Run low-bandwidth audio tests.')
parser.add_argument('build_dir', parser.add_argument('build_dir',
help='Path to the build directory (e.g. out/Release).') help='Path to the build directory (e.g. out/Release).')
parser.add_argument('--remove', parser.add_argument('--remove',
@ -55,9 +54,7 @@ def _ParseArgs():
'--android', '--android',
action='store_true', action='store_true',
help='Perform the test on a connected Android device instead.') help='Perform the test on a connected Android device instead.')
parser.add_argument('--adb-path', parser.add_argument('--adb-path', help='Path to adb binary.', default='adb')
help='Path to adb binary.',
default='adb')
parser.add_argument('--num-retries', parser.add_argument('--num-retries',
default='0', default='0',
help='Number of times to retry the test on Android.') help='Number of times to retry the test on Android.')
@ -71,9 +68,7 @@ def _ParseArgs():
help='Extra args to path to the test binary.') help='Extra args to path to the test binary.')
# Ignore Chromium-specific flags # Ignore Chromium-specific flags
parser.add_argument('--test-launcher-summary-output', parser.add_argument('--test-launcher-summary-output', type=str, default=None)
type=str,
default=None)
args = parser.parse_args() args = parser.parse_args()
return args return args
@ -86,6 +81,7 @@ def _GetPlatform():
return 'mac' return 'mac'
elif sys.platform.startswith('linux'): elif sys.platform.startswith('linux'):
return 'linux' return 'linux'
raise AssertionError('Unknown platform %s' % sys.platform)
def _GetExtension(): def _GetExtension():
@ -109,8 +105,7 @@ def _GetPathToTools():
if (platform != 'mac' and not polqa_path) or not pesq_path: if (platform != 'mac' and not polqa_path) or not pesq_path:
logging.error(NO_TOOLS_ERROR_MESSAGE, toolchain_dir, logging.error(NO_TOOLS_ERROR_MESSAGE, toolchain_dir,
os.path.join(tools_dir, 'download_tools.py'), os.path.join(tools_dir, 'download_tools.py'), toolchain_dir)
toolchain_dir)
return pesq_path, polqa_path return pesq_path, polqa_path
@ -186,9 +181,7 @@ def _RunPesq(executable_path,
r'Prediction \(Raw MOS, MOS-LQO\):\s+=\s+([\d.]+)\s+([\d.]+)', out) r'Prediction \(Raw MOS, MOS-LQO\):\s+=\s+([\d.]+)\s+([\d.]+)', out)
if match: if match:
raw_mos, _ = match.groups() raw_mos, _ = match.groups()
return {'pesq_mos': (raw_mos, 'unitless')} return {'pesq_mos': (raw_mos, 'unitless')}
else:
logging.error('PESQ: %s', out.splitlines()[-1]) logging.error('PESQ: %s', out.splitlines()[-1])
return {} return {}
@ -239,31 +232,30 @@ Analyzer = collections.namedtuple(
def _ConfigurePythonPath(args): def _ConfigurePythonPath(args):
script_dir = os.path.dirname(os.path.realpath(__file__)) script_dir = os.path.dirname(os.path.realpath(__file__))
checkout_root = os.path.abspath( checkout_root = os.path.abspath(os.path.join(script_dir, os.pardir,
os.path.join(script_dir, os.pardir, os.pardir)) os.pardir))
# TODO(https://crbug.com/1029452): Use a copy rule and add these from the out # TODO(https://crbug.com/1029452): Use a copy rule and add these from the
# dir like for the third_party/protobuf code. # out dir like for the third_party/protobuf code.
sys.path.insert( sys.path.insert(
0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing')) 0, os.path.join(checkout_root, 'third_party', 'catapult', 'tracing'))
# The low_bandwidth_audio_perf_test gn rule will build the protobuf stub for # The low_bandwidth_audio_perf_test gn rule will build the protobuf stub
# python, so put it in the path for this script before we attempt to import # for python, so put it in the path for this script before we attempt to
# it. # import it.
histogram_proto_path = os.path.join(os.path.abspath(args.build_dir), histogram_proto_path = os.path.join(os.path.abspath(args.build_dir),
'pyproto', 'tracing', 'tracing', 'pyproto', 'tracing', 'tracing', 'proto')
'proto')
sys.path.insert(0, histogram_proto_path) sys.path.insert(0, histogram_proto_path)
proto_stub_path = os.path.join(os.path.abspath(args.build_dir), 'pyproto') proto_stub_path = os.path.join(os.path.abspath(args.build_dir), 'pyproto')
sys.path.insert(0, proto_stub_path) sys.path.insert(0, proto_stub_path)
# Fail early in case the proto hasn't been built. # Fail early in case the proto hasn't been built.
try: try:
#pylint: disable=unused-variable
import histogram_pb2 import histogram_pb2
except ImportError as e: except ImportError as e:
logging.exception(e) logging.exception(e)
raise ImportError( raise ImportError('Could not import histogram_pb2. You need to build the '
'Could not import histogram_pb2. You need to build the '
'low_bandwidth_audio_perf_test target before invoking ' 'low_bandwidth_audio_perf_test target before invoking '
'this script. Expected to find ' 'this script. Expected to find '
'histogram_pb2.py in %s.' % histogram_proto_path) 'histogram_pb2.py in %s.' % histogram_proto_path)
@ -290,14 +282,11 @@ def main():
out_dir = os.path.join(args.build_dir, '..') out_dir = os.path.join(args.build_dir, '..')
if args.android: if args.android:
test_command = [ test_command = [
os.path.join(args.build_dir, 'bin', os.path.join(args.build_dir, 'bin', 'run_low_bandwidth_audio_test'),
'run_low_bandwidth_audio_test'), '-v', '-v', '--num-retries', args.num_retries
'--num-retries', args.num_retries
] ]
else: else:
test_command = [ test_command = [os.path.join(args.build_dir, 'low_bandwidth_audio_test')]
os.path.join(args.build_dir, 'low_bandwidth_audio_test')
]
analyzers = [Analyzer('pesq', _RunPesq, pesq_path, 16000)] analyzers = [Analyzer('pesq', _RunPesq, pesq_path, 16000)]
# Check if POLQA can run at all, or skip the 48 kHz tests entirely. # Check if POLQA can run at all, or skip the 48 kHz tests entirely.
@ -336,16 +325,15 @@ def main():
android=args.android, android=args.android,
adb_prefix=adb_prefix) adb_prefix=adb_prefix)
analyzer_results = analyzer.func(analyzer.executable, analyzer_results = analyzer.func(analyzer.executable, reference_file,
reference_file, degraded_file) degraded_file)
for metric, (value, units) in analyzer_results.items(): for metric, (value, units) in analyzer_results.items():
hist = histograms.CreateHistogram(metric, units, [value]) hist = histograms.CreateHistogram(metric, units, [value])
user_story = generic_set.GenericSet([test_name]) user_story = generic_set.GenericSet([test_name])
hist.diagnostics[reserved_infos.STORIES.name] = user_story hist.diagnostics[reserved_infos.STORIES.name] = user_story
# Output human readable results. # Output human readable results.
print 'RESULT %s: %s= %s %s' % (metric, test_name, value, print 'RESULT %s: %s= %s %s' % (metric, test_name, value, units)
units)
if args.remove: if args.remove:
os.remove(reference_file) os.remove(reference_file)

View file

@ -97,6 +97,9 @@ max-line-length=80
# Maximum number of lines in a module # Maximum number of lines in a module
max-module-lines=1000 max-module-lines=1000
# We use two spaces for indents, instead of the usual four spaces or tab.
indent-string=' '
[BASIC] [BASIC]
@ -192,10 +195,6 @@ max-public-methods=20
[CLASSES] [CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes. # List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp defining-attr-methods=__init__,__new__,setUp

View file

@ -33,8 +33,6 @@ try:
except ImportError: except ImportError:
from urllib.request import urlopen # for Python3 from urllib.request import urlopen # for Python3
from collections import OrderedDict
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR)) SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR))
sys.path = [os.path.join(SRC_DIR, 'build')] + sys.path sys.path = [os.path.join(SRC_DIR, 'build')] + sys.path
@ -51,8 +49,7 @@ class MetaBuildWrapper(object):
def __init__(self): def __init__(self):
self.src_dir = SRC_DIR self.src_dir = SRC_DIR
self.default_config = os.path.join(SCRIPT_DIR, 'mb_config.pyl') self.default_config = os.path.join(SCRIPT_DIR, 'mb_config.pyl')
self.default_isolate_map = os.path.join( self.default_isolate_map = os.path.join(SCRIPT_DIR, 'gn_isolate_map.pyl')
SCRIPT_DIR, 'gn_isolate_map.pyl')
self.executable = sys.executable self.executable = sys.executable
self.platform = sys.platform self.platform = sys.platform
self.sep = os.sep self.sep = os.sep
@ -82,37 +79,47 @@ class MetaBuildWrapper(object):
def ParseArgs(self, argv): def ParseArgs(self, argv):
def AddCommonOptions(subp): def AddCommonOptions(subp):
subp.add_argument('-b', '--builder', subp.add_argument('-b',
'--builder',
help='builder name to look up config from') help='builder name to look up config from')
subp.add_argument('-m', '--builder-group', subp.add_argument('-m',
'--builder-group',
help='builder group name to look up config from') help='builder group name to look up config from')
subp.add_argument('-c', '--config', subp.add_argument('-c', '--config', help='configuration to analyze')
help='configuration to analyze')
subp.add_argument('--phase', subp.add_argument('--phase',
help='optional phase name (used when builders ' help='optional phase name (used when builders '
'do multiple compiles with different ' 'do multiple compiles with different '
'arguments in a single build)') 'arguments in a single build)')
subp.add_argument('-f', '--config-file', metavar='PATH', subp.add_argument('-f',
'--config-file',
metavar='PATH',
default=self.default_config, default=self.default_config,
help='path to config file ' help='path to config file '
'(default is %(default)s)') '(default is %(default)s)')
subp.add_argument('-i', '--isolate-map-file', metavar='PATH', subp.add_argument('-i',
'--isolate-map-file',
metavar='PATH',
default=self.default_isolate_map, default=self.default_isolate_map,
help='path to isolate map file ' help='path to isolate map file '
'(default is %(default)s)') '(default is %(default)s)')
subp.add_argument('-r', '--realm', default='webrtc:try', subp.add_argument('-r',
'--realm',
default='webrtc:try',
help='optional LUCI realm to use (for example ' help='optional LUCI realm to use (for example '
'when triggering tasks on Swarming)') 'when triggering tasks on Swarming)')
subp.add_argument('-g', '--goma-dir', subp.add_argument('-g', '--goma-dir', help='path to goma directory')
help='path to goma directory')
subp.add_argument('--android-version-code', subp.add_argument('--android-version-code',
help='Sets GN arg android_default_version_code') help='Sets GN arg android_default_version_code')
subp.add_argument('--android-version-name', subp.add_argument('--android-version-name',
help='Sets GN arg android_default_version_name') help='Sets GN arg android_default_version_name')
subp.add_argument('-n', '--dryrun', action='store_true', subp.add_argument('-n',
'--dryrun',
action='store_true',
help='Do a dry run (i.e., do nothing, just ' help='Do a dry run (i.e., do nothing, just '
'print the commands that will run)') 'print the commands that will run)')
subp.add_argument('-v', '--verbose', action='store_true', subp.add_argument('-v',
'--verbose',
action='store_true',
help='verbose logging') help='verbose logging')
parser = argparse.ArgumentParser(prog='mb') parser = argparse.ArgumentParser(prog='mb')
@ -123,47 +130,45 @@ class MetaBuildWrapper(object):
'files will cause a set of binaries ' 'files will cause a set of binaries '
'to be rebuilt.') 'to be rebuilt.')
AddCommonOptions(subp) AddCommonOptions(subp)
subp.add_argument('path', nargs=1, subp.add_argument('path', nargs=1, help='path build was generated into.')
help='path build was generated into.') subp.add_argument('input_path',
subp.add_argument('input_path', nargs=1, nargs=1,
help='path to a file containing the input ' help='path to a file containing the input '
'arguments as a JSON object.') 'arguments as a JSON object.')
subp.add_argument('output_path', nargs=1, subp.add_argument('output_path',
nargs=1,
help='path to a file containing the output ' help='path to a file containing the output '
'arguments as a JSON object.') 'arguments as a JSON object.')
subp.add_argument('--json-output', subp.add_argument('--json-output', help='Write errors to json.output')
help='Write errors to json.output')
subp.set_defaults(func=self.CmdAnalyze) subp.set_defaults(func=self.CmdAnalyze)
subp = subps.add_parser('export', subp = subps.add_parser('export',
help='print out the expanded configuration for' help='print out the expanded configuration for'
'each builder as a JSON object') 'each builder as a JSON object')
subp.add_argument('-f', '--config-file', metavar='PATH', subp.add_argument('-f',
'--config-file',
metavar='PATH',
default=self.default_config, default=self.default_config,
help='path to config file (default is %(default)s)') help='path to config file (default is %(default)s)')
subp.add_argument('-g', '--goma-dir', subp.add_argument('-g', '--goma-dir', help='path to goma directory')
help='path to goma directory')
subp.set_defaults(func=self.CmdExport) subp.set_defaults(func=self.CmdExport)
subp = subps.add_parser('gen', subp = subps.add_parser('gen', help='generate a new set of build files')
help='generate a new set of build files')
AddCommonOptions(subp) AddCommonOptions(subp)
subp.add_argument('--swarming-targets-file', subp.add_argument('--swarming-targets-file',
help='save runtime dependencies for targets listed ' help='save runtime dependencies for targets listed '
'in file.') 'in file.')
subp.add_argument('--json-output', subp.add_argument('--json-output', help='Write errors to json.output')
help='Write errors to json.output') subp.add_argument('path', nargs=1, help='path to generate build into')
subp.add_argument('path', nargs=1,
help='path to generate build into')
subp.set_defaults(func=self.CmdGen) subp.set_defaults(func=self.CmdGen)
subp = subps.add_parser('isolate', subp = subps.add_parser('isolate',
help='generate the .isolate files for a given' help='generate the .isolate files for a given'
'binary') 'binary')
AddCommonOptions(subp) AddCommonOptions(subp)
subp.add_argument('path', nargs=1, subp.add_argument('path', nargs=1, help='path build was generated into')
help='path build was generated into') subp.add_argument('target',
subp.add_argument('target', nargs=1, nargs=1,
help='ninja target to generate the isolate for') help='ninja target to generate the isolate for')
subp.set_defaults(func=self.CmdIsolate) subp.set_defaults(func=self.CmdIsolate)
@ -171,7 +176,9 @@ class MetaBuildWrapper(object):
help='look up the command for a given config ' help='look up the command for a given config '
'or builder') 'or builder')
AddCommonOptions(subp) AddCommonOptions(subp)
subp.add_argument('--quiet', default=False, action='store_true', subp.add_argument('--quiet',
default=False,
action='store_true',
help='Print out just the arguments, do ' help='Print out just the arguments, do '
'not emulate the output of the gen subcommand.') 'not emulate the output of the gen subcommand.')
subp.set_defaults(func=self.CmdLookup) subp.set_defaults(func=self.CmdLookup)
@ -195,49 +202,62 @@ class MetaBuildWrapper(object):
'\n' '\n'
' % tools/mb/mb.py run out/Default content_browsertests -- \\\n' ' % tools/mb/mb.py run out/Default content_browsertests -- \\\n'
' --test-launcher-retry-limit=0' ' --test-launcher-retry-limit=0'
'\n' '\n')
)
AddCommonOptions(subp) AddCommonOptions(subp)
subp.add_argument('-j', '--jobs', dest='jobs', type=int, subp.add_argument('-j',
'--jobs',
dest='jobs',
type=int,
help='Number of jobs to pass to ninja') help='Number of jobs to pass to ninja')
subp.add_argument('--no-build', dest='build', default=True, subp.add_argument('--no-build',
dest='build',
default=True,
action='store_false', action='store_false',
help='Do not build, just isolate and run') help='Do not build, just isolate and run')
subp.add_argument('path', nargs=1, subp.add_argument('path',
nargs=1,
help=('path to generate build into (or use).' help=('path to generate build into (or use).'
' This can be either a regular path or a ' ' This can be either a regular path or a '
'GN-style source-relative path like ' 'GN-style source-relative path like '
'//out/Default.')) '//out/Default.'))
subp.add_argument('-s', '--swarmed', action='store_true', subp.add_argument('-s',
'--swarmed',
action='store_true',
help='Run under swarming') help='Run under swarming')
subp.add_argument('-d', '--dimension', default=[], action='append', subp.add_argument('-d',
nargs=2, dest='dimensions', metavar='FOO bar', '--dimension',
default=[],
action='append',
nargs=2,
dest='dimensions',
metavar='FOO bar',
help='dimension to filter on') help='dimension to filter on')
subp.add_argument('target', nargs=1, subp.add_argument('target', nargs=1, help='ninja target to build and run')
help='ninja target to build and run') subp.add_argument('extra_args',
subp.add_argument('extra_args', nargs='*', nargs='*',
help=('extra args to pass to the isolate to run. ' help=('extra args to pass to the isolate to run. '
'Use "--" as the first arg if you need to ' 'Use "--" as the first arg if you need to '
'pass switches')) 'pass switches'))
subp.set_defaults(func=self.CmdRun) subp.set_defaults(func=self.CmdRun)
subp = subps.add_parser('validate', subp = subps.add_parser('validate', help='validate the config file')
help='validate the config file') subp.add_argument('-f',
subp.add_argument('-f', '--config-file', metavar='PATH', '--config-file',
metavar='PATH',
default=self.default_config, default=self.default_config,
help='path to config file (default is %(default)s)') help='path to config file (default is %(default)s)')
subp.set_defaults(func=self.CmdValidate) subp.set_defaults(func=self.CmdValidate)
subp = subps.add_parser('help', subp = subps.add_parser('help', help='Get help on a subcommand.')
help='Get help on a subcommand.') subp.add_argument(nargs='?',
subp.add_argument(nargs='?', action='store', dest='subcommand', action='store',
dest='subcommand',
help='The command to get help for.') help='The command to get help for.')
subp.set_defaults(func=self.CmdHelp) subp.set_defaults(func=self.CmdHelp)
self.args = parser.parse_args(argv) self.args = parser.parse_args(argv)
def DumpInputFiles(self): def DumpInputFiles(self):
def DumpContentsOfFilePassedTo(arg_name, path): def DumpContentsOfFilePassedTo(arg_name, path):
if path and self.Exists(path): if path and self.Exists(path):
self.Print("\n# To recreate the file passed to %s:" % arg_name) self.Print("\n# To recreate the file passed to %s:" % arg_name)
@ -247,11 +267,11 @@ class MetaBuildWrapper(object):
self.Print("EOF\n%\n") self.Print("EOF\n%\n")
if getattr(self.args, 'input_path', None): if getattr(self.args, 'input_path', None):
DumpContentsOfFilePassedTo( DumpContentsOfFilePassedTo('argv[0] (input_path)',
'argv[0] (input_path)', self.args.input_path[0]) self.args.input_path[0])
if getattr(self.args, 'swarming_targets_file', None): if getattr(self.args, 'swarming_targets_file', None):
DumpContentsOfFilePassedTo( DumpContentsOfFilePassedTo('--swarming-targets-file',
'--swarming-targets-file', self.args.swarming_targets_file) self.args.swarming_targets_file)
def CmdAnalyze(self): def CmdAnalyze(self):
vals = self.Lookup() vals = self.Lookup()
@ -268,8 +288,10 @@ class MetaBuildWrapper(object):
continue continue
if isinstance(config, dict): if isinstance(config, dict):
args = {k: self.FlattenConfig(v)['gn_args'] args = {
for k, v in config.items()} k: self.FlattenConfig(v)['gn_args']
for k, v in config.items()
}
elif config.startswith('//'): elif config.startswith('//'):
args = config args = config
else: else:
@ -280,8 +302,9 @@ class MetaBuildWrapper(object):
obj[builder_group][builder] = args obj[builder_group][builder] = args
# Dump object and trim trailing whitespace. # Dump object and trim trailing whitespace.
s = '\n'.join(l.rstrip() for l in s = '\n'.join(
json.dumps(obj, sort_keys=True, indent=2).splitlines()) l.rstrip()
for l in json.dumps(obj, sort_keys=True, indent=2).splitlines())
self.Print(s) self.Print(s)
return 0 return 0
@ -348,8 +371,8 @@ class MetaBuildWrapper(object):
for k, v in self.args.dimensions: for k, v in self.args.dimensions:
dimensions += ['-d', '%s=%s' % (k, v)] dimensions += ['-d', '%s=%s' % (k, v)]
archive_json_path = self.ToSrcRelPath( archive_json_path = self.ToSrcRelPath('%s/%s.archive.json' %
'%s/%s.archive.json' % (build_dir, target)) (build_dir, target))
cmd = [ cmd = [
self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe), self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe),
'archive', 'archive',
@ -378,15 +401,13 @@ class MetaBuildWrapper(object):
try: try:
archive_hashes = json.loads(self.ReadFile(archive_json_path)) archive_hashes = json.loads(self.ReadFile(archive_json_path))
except Exception: except Exception:
self.Print( self.Print('Failed to read JSON file "%s"' % archive_json_path,
'Failed to read JSON file "%s"' % file=sys.stderr)
archive_json_path, file=sys.stderr)
return 1 return 1
try: try:
cas_digest = archive_hashes[target] cas_digest = archive_hashes[target]
except Exception: except Exception:
self.Print( self.Print('Cannot find hash for "%s" in "%s", file content: %s' %
'Cannot find hash for "%s" in "%s", file content: %s' %
(target, archive_json_path, archive_hashes), (target, archive_json_path, archive_hashes),
file=sys.stderr) file=sys.stderr)
return 1 return 1
@ -469,8 +490,7 @@ class MetaBuildWrapper(object):
errs.append('Unknown args file "%s" referenced from "%s".' % errs.append('Unknown args file "%s" referenced from "%s".' %
(config, loc)) (config, loc))
elif not config in self.configs: elif not config in self.configs:
errs.append('Unknown config "%s" referenced from "%s".' % errs.append('Unknown config "%s" referenced from "%s".' % (config, loc))
(config, loc))
# Check that every actual config is actually referenced. # Check that every actual config is actually referenced.
for config in self.configs: for config in self.configs:
@ -483,15 +503,15 @@ class MetaBuildWrapper(object):
for config, mixins in self.configs.items(): for config, mixins in self.configs.items():
for mixin in mixins: for mixin in mixins:
if not mixin in self.mixins: if not mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by config "%s".' errs.append('Unknown mixin "%s" referenced by config "%s".' %
% (mixin, config)) (mixin, config))
referenced_mixins.add(mixin) referenced_mixins.add(mixin)
for mixin in self.mixins: for mixin in self.mixins:
for sub_mixin in self.mixins[mixin].get('mixins', []): for sub_mixin in self.mixins[mixin].get('mixins', []):
if not sub_mixin in self.mixins: if not sub_mixin in self.mixins:
errs.append('Unknown mixin "%s" referenced by mixin "%s".' errs.append('Unknown mixin "%s" referenced by mixin "%s".' %
% (sub_mixin, mixin)) (sub_mixin, mixin))
referenced_mixins.add(sub_mixin) referenced_mixins.add(sub_mixin)
# Check that every mixin defined is actually referenced somewhere. # Check that every mixin defined is actually referenced somewhere.
@ -500,8 +520,8 @@ class MetaBuildWrapper(object):
errs.append('Unreferenced mixin "%s".' % mixin) errs.append('Unreferenced mixin "%s".' % mixin)
if errs: if errs:
raise MBErr(('mb config file %s has problems:' % raise MBErr(('mb config file %s has problems:' % self.args.config_file) +
self.args.config_file) + '\n ' + '\n '.join(errs)) '\n ' + '\n '.join(errs))
if print_ok: if print_ok:
self.Print('mb config file %s looks ok.' % self.args.config_file) self.Print('mb config file %s looks ok.' % self.args.config_file)
@ -518,8 +538,7 @@ class MetaBuildWrapper(object):
self.RunGNGen(vals) self.RunGNGen(vals)
return vals return vals
toolchain_path = self.PathJoin(self.ToAbsPath(build_dir), toolchain_path = self.PathJoin(self.ToAbsPath(build_dir), 'toolchain.ninja')
'toolchain.ninja')
if not self.Exists(toolchain_path): if not self.Exists(toolchain_path):
self.Print('Must either specify a path to an existing GN build ' self.Print('Must either specify a path to an existing GN build '
'dir or pass in a -m/-b pair or a -c flag to specify ' 'dir or pass in a -m/-b pair or a -c flag to specify '
@ -579,8 +598,8 @@ class MetaBuildWrapper(object):
try: try:
return ast.literal_eval(self.ReadFile(isolate_map)) return ast.literal_eval(self.ReadFile(isolate_map))
except SyntaxError as e: except SyntaxError as e:
raise MBErr( raise MBErr('Failed to parse isolate map file "%s": %s' %
'Failed to parse isolate map file "%s": %s' % (isolate_map, e)) (isolate_map, e))
def ConfigFromArgs(self): def ConfigFromArgs(self):
if self.args.config: if self.args.config:
@ -598,16 +617,12 @@ class MetaBuildWrapper(object):
raise MBErr('Master name "%s" not found in "%s"' % raise MBErr('Master name "%s" not found in "%s"' %
(self.args.builder_group, self.args.config_file)) (self.args.builder_group, self.args.config_file))
if (not self.args.builder in if not self.args.builder in self.builder_groups[self.args.builder_group]:
self.builder_groups[self.args.builder_group]):
raise MBErr( raise MBErr(
'Builder name "%s" not found under builder_groups[%s] in "%s"' 'Builder name "%s" not found under builder_groups[%s] in "%s"' %
% (self.args.builder, self.args.builder_group, (self.args.builder, self.args.builder_group, self.args.config_file))
self.args.config_file))
config = ( config = (self.builder_groups[self.args.builder_group][self.args.builder])
self.builder_groups[self.args.builder_group][self.args.builder]
)
if isinstance(config, dict): if isinstance(config, dict):
if self.args.phase is None: if self.args.phase is None:
raise MBErr('Must specify a build --phase for %s on %s' % raise MBErr('Must specify a build --phase for %s on %s' %
@ -615,8 +630,7 @@ class MetaBuildWrapper(object):
phase = str(self.args.phase) phase = str(self.args.phase)
if phase not in config: if phase not in config:
raise MBErr('Phase %s doesn\'t exist for %s on %s' % raise MBErr('Phase %s doesn\'t exist for %s on %s' %
(phase, self.args.builder, (phase, self.args.builder, self.args.builder_group))
self.args.builder_group))
return config[phase] return config[phase]
if self.args.phase is not None: if self.args.phase is not None:
@ -632,7 +646,8 @@ class MetaBuildWrapper(object):
self.FlattenMixins(mixins, vals, visited) self.FlattenMixins(mixins, vals, visited)
return vals return vals
def DefaultVals(self): @staticmethod
def DefaultVals():
return { return {
'args_file': '', 'args_file': '',
'cros_passthrough': False, 'cros_passthrough': False,
@ -672,7 +687,7 @@ class MetaBuildWrapper(object):
gn_args_path = self.ToAbsPath(build_dir, 'args.gn') gn_args_path = self.ToAbsPath(build_dir, 'args.gn')
self.WriteFile(gn_args_path, gn_args, force_verbose=True) self.WriteFile(gn_args_path, gn_args, force_verbose=True)
swarming_targets = [] swarming_targets = set()
if getattr(self.args, 'swarming_targets_file', None): if getattr(self.args, 'swarming_targets_file', None):
# We need GN to generate the list of runtime dependencies for # We need GN to generate the list of runtime dependencies for
# the compile targets listed (one per line) in the file so # the compile targets listed (one per line) in the file so
@ -686,8 +701,7 @@ class MetaBuildWrapper(object):
swarming_targets = set(contents.splitlines()) swarming_targets = set(contents.splitlines())
isolate_map = self.ReadIsolateMap() isolate_map = self.ReadIsolateMap()
err, labels = self.MapTargetsToLabels( err, labels = self.MapTargetsToLabels(isolate_map, swarming_targets)
isolate_map, swarming_targets)
if err: if err:
raise MBErr(err) raise MBErr(err)
@ -716,7 +730,8 @@ class MetaBuildWrapper(object):
label = isolate_map[target]['label'] label = isolate_map[target]['label']
runtime_deps_targets = [ runtime_deps_targets = [
target + '.runtime_deps', target + '.runtime_deps',
'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')
]
elif isolate_map[target]['type'] == 'gpu_browser_test': elif isolate_map[target]['type'] == 'gpu_browser_test':
if self.platform == 'win32': if self.platform == 'win32':
runtime_deps_targets = ['browser_tests.exe.runtime_deps'] runtime_deps_targets = ['browser_tests.exe.runtime_deps']
@ -724,8 +739,7 @@ class MetaBuildWrapper(object):
runtime_deps_targets = ['browser_tests.runtime_deps'] runtime_deps_targets = ['browser_tests.runtime_deps']
elif isolate_map[target]['type'] == 'script': elif isolate_map[target]['type'] == 'script':
label = isolate_map[target]['label'].split(':')[1] label = isolate_map[target]['label'].split(':')[1]
runtime_deps_targets = [ runtime_deps_targets = ['%s.runtime_deps' % label]
'%s.runtime_deps' % label]
if self.platform == 'win32': if self.platform == 'win32':
runtime_deps_targets += [label + '.exe.runtime_deps'] runtime_deps_targets += [label + '.exe.runtime_deps']
else: else:
@ -777,9 +791,9 @@ class MetaBuildWrapper(object):
ret, _, _ = self.Run([ ret, _, _ = self.Run([
self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe), self.PathJoin(self.src_dir, 'tools', 'luci-go', self.isolate_exe),
'check', 'check', '-i',
'-i', self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target))
self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target))], ],
buffer_output=False) buffer_output=False)
return ret return ret
@ -787,7 +801,8 @@ class MetaBuildWrapper(object):
def WriteIsolateFiles(self, build_dir, command, target, runtime_deps, def WriteIsolateFiles(self, build_dir, command, target, runtime_deps,
extra_files): extra_files):
isolate_path = self.ToAbsPath(build_dir, target + '.isolate') isolate_path = self.ToAbsPath(build_dir, target + '.isolate')
self.WriteFile(isolate_path, self.WriteFile(
isolate_path,
pprint.pformat({ pprint.pformat({
'variables': { 'variables': {
'command': command, 'command': command,
@ -801,13 +816,16 @@ class MetaBuildWrapper(object):
'--isolate', '--isolate',
self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
], ],
'dir': self.src_dir, 'dir':
'version': 1, self.src_dir,
'version':
1,
}, },
isolate_path + 'd.gen.json', isolate_path + 'd.gen.json',
) )
def MapTargetsToLabels(self, isolate_map, targets): @staticmethod
def MapTargetsToLabels(isolate_map, targets):
labels = [] labels = []
err = '' err = ''
@ -831,8 +849,7 @@ class MetaBuildWrapper(object):
if isolate_map[stripped_target]['type'] == 'unknown': if isolate_map[stripped_target]['type'] == 'unknown':
err += ('test target "%s" type is unknown\n' % target) err += ('test target "%s" type is unknown\n' % target)
else: else:
labels.append( labels.append(isolate_map[stripped_target]['label'] + suffix)
isolate_map[stripped_target]['label'] + suffix)
else: else:
err += ('target "%s" not found in ' err += ('target "%s" not found in '
'//testing/buildbot/gn_isolate_map.pyl\n' % target) '//testing/buildbot/gn_isolate_map.pyl\n' % target)
@ -866,13 +883,11 @@ class MetaBuildWrapper(object):
android_version_code = self.args.android_version_code android_version_code = self.args.android_version_code
if android_version_code: if android_version_code:
gn_args += (' android_default_version_code="%s"' % gn_args += (' android_default_version_code="%s"' % android_version_code)
android_version_code)
android_version_name = self.args.android_version_name android_version_name = self.args.android_version_name
if android_version_name: if android_version_name:
gn_args += (' android_default_version_name="%s"' % gn_args += (' android_default_version_name="%s"' % android_version_name)
android_version_name)
# Canonicalize the arg string into a sorted, newline-separated list # Canonicalize the arg string into a sorted, newline-separated list
# of key-value pairs, and de-dup the keys if need be so that only # of key-value pairs, and de-dup the keys if need be so that only
@ -892,15 +907,15 @@ class MetaBuildWrapper(object):
is_linux = self.platform.startswith('linux') and not is_android is_linux = self.platform.startswith('linux') and not is_android
if test_type == 'nontest': if test_type == 'nontest':
self.WriteFailureAndRaise('We should not be isolating %s.' % self.WriteFailureAndRaise('We should not be isolating %s.' % target,
target, output_path=None) output_path=None)
if test_type not in ('console_test_launcher', 'windowed_test_launcher', if test_type not in ('console_test_launcher', 'windowed_test_launcher',
'non_parallel_console_test_launcher', 'raw', 'non_parallel_console_test_launcher', 'raw',
'additional_compile_target', 'junit_test', 'additional_compile_target', 'junit_test', 'script'):
'script'):
self.WriteFailureAndRaise('No command line for ' self.WriteFailureAndRaise('No command line for '
'%s found (test type %s).' '%s found (test type %s).' %
% (target, test_type), output_path=None) (target, test_type),
output_path=None)
cmdline = [] cmdline = []
extra_files = [ extra_files = [
@ -911,27 +926,27 @@ class MetaBuildWrapper(object):
must_retry = False must_retry = False
if test_type == 'script': if test_type == 'script':
cmdline += [vpython_exe, cmdline += [
'../../' + vpython_exe,
self.ToSrcRelPath(isolate_map[target]['script'])] '../../' + self.ToSrcRelPath(isolate_map[target]['script'])
]
elif is_android: elif is_android:
cmdline += [vpython_exe, cmdline += [
'../../build/android/test_wrapper/logdog_wrapper.py', vpython_exe, '../../build/android/test_wrapper/logdog_wrapper.py',
'--target', target, '--target', target, '--logdog-bin-cmd', '../../bin/logdog_butler',
'--logdog-bin-cmd', '../../bin/logdog_butler',
'--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats',
'--store-tombstones'] '--store-tombstones'
]
else: else:
if test_type == 'raw': if test_type == 'raw':
cmdline += [vpython_exe, cmdline += [vpython_exe, '../../tools_webrtc/flags_compatibility.py']
'../../tools_webrtc/flags_compatibility.py']
extra_files.append('../../tools_webrtc/flags_compatibility.py') extra_files.append('../../tools_webrtc/flags_compatibility.py')
if isolate_map[target].get('use_webcam', False): if isolate_map[target].get('use_webcam', False):
cmdline += [vpython_exe, cmdline += [
'../../tools_webrtc/ensure_webcam_is_running.py'] vpython_exe, '../../tools_webrtc/ensure_webcam_is_running.py'
extra_files.append( ]
'../../tools_webrtc/ensure_webcam_is_running.py') extra_files.append('../../tools_webrtc/ensure_webcam_is_running.py')
# is_linux uses use_ozone and x11 by default. # is_linux uses use_ozone and x11 by default.
use_x11 = is_linux use_x11 = is_linux
@ -998,9 +1013,7 @@ class MetaBuildWrapper(object):
return cmdline, extra_files return cmdline, extra_files
def ToAbsPath(self, build_path, *comps): def ToAbsPath(self, build_path, *comps):
return self.PathJoin(self.src_dir, return self.PathJoin(self.src_dir, self.ToSrcRelPath(build_path), *comps)
self.ToSrcRelPath(build_path),
*comps)
def ToSrcRelPath(self, path): def ToSrcRelPath(self, path):
"""Returns a relative path from the top of the repo.""" """Returns a relative path from the top of the repo."""
@ -1021,8 +1034,8 @@ class MetaBuildWrapper(object):
output_path = self.args.output_path[0] output_path = self.args.output_path[0]
gn_output_path = output_path + '.gn' gn_output_path = output_path + '.gn'
inp = self.ReadInputJSON(['files', 'test_targets', inp = self.ReadInputJSON(
'additional_compile_targets']) ['files', 'test_targets', 'additional_compile_targets'])
if self.args.verbose: if self.args.verbose:
self.Print() self.Print()
self.Print('analyze input:') self.Print('analyze input:')
@ -1033,9 +1046,9 @@ class MetaBuildWrapper(object):
# conditions, like a try job that gets scheduled before a patch # conditions, like a try job that gets scheduled before a patch
# lands but runs after the patch has landed. # lands but runs after the patch has landed.
if not inp['files']: if not inp['files']:
self.Print( self.Print('Warning: No files modified in patch, bailing out early.')
'Warning: No files modified in patch, bailing out early.') self.WriteJSON(
self.WriteJSON({ {
'status': 'No dependency', 'status': 'No dependency',
'compile_targets': [], 'compile_targets': [],
'test_targets': [], 'test_targets': [],
@ -1043,8 +1056,7 @@ class MetaBuildWrapper(object):
return 0 return 0
gn_inp = {} gn_inp = {}
gn_inp['files'] = ['//' + f for f in inp['files'] gn_inp['files'] = ['//' + f for f in inp['files'] if not f.startswith('//')]
if not f.startswith('//')]
isolate_map = self.ReadIsolateMap() isolate_map = self.ReadIsolateMap()
err, gn_inp['additional_compile_targets'] = self.MapTargetsToLabels( err, gn_inp['additional_compile_targets'] = self.MapTargetsToLabels(
@ -1062,8 +1074,7 @@ class MetaBuildWrapper(object):
try: try:
self.WriteJSON(gn_inp, gn_input_path) self.WriteJSON(gn_inp, gn_input_path)
cmd = self.GNCmd('analyze', build_path, cmd = self.GNCmd('analyze', build_path, gn_input_path, gn_output_path)
gn_input_path, gn_output_path)
ret, output, _ = self.Run(cmd, force_verbose=True) ret, output, _ = self.Run(cmd, force_verbose=True)
if ret: if ret:
if self.args.json_output: if self.args.json_output:
@ -1091,13 +1102,11 @@ class MetaBuildWrapper(object):
outp['compile_targets'] = ['all'] outp['compile_targets'] = ['all']
else: else:
outp['compile_targets'] = [ outp['compile_targets'] = [
label.replace('//', '') label.replace('//', '') for label in gn_outp['compile_targets']
for label in gn_outp['compile_targets']
] ]
if 'test_targets' in gn_outp: if 'test_targets' in gn_outp:
outp['test_targets'] = [ outp['test_targets'] = [
labels_to_targets[label] labels_to_targets[label] for label in gn_outp['test_targets']
for label in gn_outp['test_targets']
] ]
if self.args.verbose: if self.args.verbose:
@ -1120,19 +1129,18 @@ class MetaBuildWrapper(object):
path = self.args.input_path[0] path = self.args.input_path[0]
output_path = self.args.output_path[0] output_path = self.args.output_path[0]
if not self.Exists(path): if not self.Exists(path):
self.WriteFailureAndRaise( self.WriteFailureAndRaise('"%s" does not exist' % path, output_path)
'"%s" does not exist' % path, output_path)
try: try:
inp = json.loads(self.ReadFile(path)) inp = json.loads(self.ReadFile(path))
except Exception as e: except Exception as e:
self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' self.WriteFailureAndRaise(
% (path, e), output_path) 'Failed to read JSON input from "%s": %s' % (path, e), output_path)
for k in required_keys: for k in required_keys:
if not k in inp: if not k in inp:
self.WriteFailureAndRaise('input file is missing a "%s" key' % self.WriteFailureAndRaise('input file is missing a "%s" key' % k,
k, output_path) output_path)
return inp return inp
@ -1147,8 +1155,7 @@ class MetaBuildWrapper(object):
json.dumps(obj, indent=2, sort_keys=True) + '\n', json.dumps(obj, indent=2, sort_keys=True) + '\n',
force_verbose=force_verbose) force_verbose=force_verbose)
except Exception as e: except Exception as e:
raise MBErr('Error %s writing to the output path "%s"' % raise MBErr('Error %s writing to the output path "%s"' % (e, path))
(e, path))
def PrintCmd(self, cmd, env): def PrintCmd(self, cmd, env):
if self.platform == 'win32': if self.platform == 'win32':
@ -1160,12 +1167,10 @@ class MetaBuildWrapper(object):
env_quoter = pipes.quote env_quoter = pipes.quote
shell_quoter = pipes.quote shell_quoter = pipes.quote
def print_env(var): var = 'LLVM_FORCE_HEAD_REVISION'
if env and var in env: if env and var in env:
self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
print_env('LLVM_FORCE_HEAD_REVISION')
if cmd[0] == self.executable: if cmd[0] == self.executable:
cmd = ['python'] + cmd[1:] cmd = ['python'] + cmd[1:]
self.Print(*[shell_quoter(arg) for arg in cmd]) self.Print(*[shell_quoter(arg) for arg in cmd])
@ -1179,8 +1184,7 @@ class MetaBuildWrapper(object):
if self.args.jobs: if self.args.jobs:
ninja_cmd.extend(['-j', '%d' % self.args.jobs]) ninja_cmd.extend(['-j', '%d' % self.args.jobs])
ninja_cmd.append(target) ninja_cmd.append(target)
ret, _, _ = self.Run( ret, _, _ = self.Run(ninja_cmd, force_verbose=False, buffer_output=False)
ninja_cmd, force_verbose=False, buffer_output=False)
return ret return ret
def Run(self, cmd, env=None, force_verbose=True, buffer_output=True): def Run(self, cmd, env=None, force_verbose=True, buffer_output=True):
@ -1202,60 +1206,70 @@ class MetaBuildWrapper(object):
def Call(self, cmd, env=None, buffer_output=True): def Call(self, cmd, env=None, buffer_output=True):
if buffer_output: if buffer_output:
p = subprocess.Popen(cmd, shell=False, cwd=self.src_dir, p = subprocess.Popen(cmd,
shell=False,
cwd=self.src_dir,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
env=env) env=env)
out, err = p.communicate() out, err = p.communicate()
else: else:
p = subprocess.Popen(cmd, shell=False, cwd=self.src_dir, p = subprocess.Popen(cmd, shell=False, cwd=self.src_dir, env=env)
env=env)
p.wait() p.wait()
out = err = '' out = err = ''
return p.returncode, out, err return p.returncode, out, err
def ExpandUser(self, path): @staticmethod
def ExpandUser(path):
# This function largely exists so it can be overridden for testing. # This function largely exists so it can be overridden for testing.
return os.path.expanduser(path) return os.path.expanduser(path)
def Exists(self, path): @staticmethod
def Exists(path):
# This function largely exists so it can be overridden for testing. # This function largely exists so it can be overridden for testing.
return os.path.exists(path) return os.path.exists(path)
def Fetch(self, url): @staticmethod
def Fetch(url):
# This function largely exists so it can be overridden for testing. # This function largely exists so it can be overridden for testing.
f = urlopen(url) f = urlopen(url)
contents = f.read() contents = f.read()
f.close() f.close()
return contents return contents
def MaybeMakeDirectory(self, path): @staticmethod
def MaybeMakeDirectory(path):
try: try:
os.makedirs(path) os.makedirs(path)
except OSError as e: except OSError as e:
if e.errno != errno.EEXIST: if e.errno != errno.EEXIST:
raise raise
def PathJoin(self, *comps): @staticmethod
def PathJoin(*comps):
# This function largely exists so it can be overriden for testing. # This function largely exists so it can be overriden for testing.
return os.path.join(*comps) return os.path.join(*comps)
def Print(self, *args, **kwargs): @staticmethod
def Print(*args, **kwargs):
# This function largely exists so it can be overridden for testing. # This function largely exists so it can be overridden for testing.
print(*args, **kwargs) print(*args, **kwargs)
if kwargs.get('stream', sys.stdout) == sys.stdout: if kwargs.get('stream', sys.stdout) == sys.stdout:
sys.stdout.flush() sys.stdout.flush()
def ReadFile(self, path): @staticmethod
def ReadFile(path):
# This function largely exists so it can be overriden for testing. # This function largely exists so it can be overriden for testing.
with open(path) as fp: with open(path) as fp:
return fp.read() return fp.read()
def RelPath(self, path, start='.'): @staticmethod
def RelPath(path, start='.'):
# This function largely exists so it can be overriden for testing. # This function largely exists so it can be overriden for testing.
return os.path.relpath(path, start) return os.path.relpath(path, start)
def RemoveFile(self, path): @staticmethod
def RemoveFile(path):
# This function largely exists so it can be overriden for testing. # This function largely exists so it can be overriden for testing.
os.remove(path) os.remove(path)
@ -1270,11 +1284,13 @@ class MetaBuildWrapper(object):
else: else:
shutil.rmtree(abs_path, ignore_errors=True) shutil.rmtree(abs_path, ignore_errors=True)
def TempDir(self): @staticmethod
def TempDir():
# This function largely exists so it can be overriden for testing. # This function largely exists so it can be overriden for testing.
return tempfile.mkdtemp(prefix='mb_') return tempfile.mkdtemp(prefix='mb_')
def TempFile(self, mode='w'): @staticmethod
def TempFile(mode='w'):
# This function largely exists so it can be overriden for testing. # This function largely exists so it can be overriden for testing.
return tempfile.NamedTemporaryFile(mode=mode, delete=False) return tempfile.NamedTemporaryFile(mode=mode, delete=False)

View file

@ -61,7 +61,7 @@ class FakeMBW(mb.MetaBuildWrapper):
def Exists(self, path): def Exists(self, path):
abs_path = self._AbsPath(path) abs_path = self._AbsPath(path)
return (self.files.get(abs_path) is not None or abs_path in self.dirs) return self.files.get(abs_path) is not None or abs_path in self.dirs
def MaybeMakeDirectory(self, path): def MaybeMakeDirectory(self, path):
abpath = self._AbsPath(path) abpath = self._AbsPath(path)
@ -186,8 +186,7 @@ class UnitTest(unittest.TestCase):
mbw = FakeMBW(win32=win32) mbw = FakeMBW(win32=win32)
mbw.files.setdefault(mbw.default_config, TEST_CONFIG) mbw.files.setdefault(mbw.default_config, TEST_CONFIG)
mbw.files.setdefault( mbw.files.setdefault(
mbw.ToAbsPath('//testing/buildbot/gn_isolate_map.pyl'), mbw.ToAbsPath('//testing/buildbot/gn_isolate_map.pyl'), '''{
'''{
"foo_unittests": { "foo_unittests": {
"label": "//foo:foo_unittests", "label": "//foo:foo_unittests",
"type": "console_test_launcher", "type": "console_test_launcher",
@ -202,7 +201,13 @@ class UnitTest(unittest.TestCase):
mbw.files[path] = contents mbw.files[path] = contents
return mbw return mbw
def check(self, args, mbw=None, files=None, out=None, err=None, ret=None, def check(self,
args,
mbw=None,
files=None,
out=None,
err=None,
ret=None,
env=None): env=None):
if not mbw: if not mbw:
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
@ -223,24 +228,33 @@ class UnitTest(unittest.TestCase):
return mbw return mbw
def test_analyze(self): def test_analyze(self):
files = {'/tmp/in.json': '''{\ files = {
'/tmp/in.json':
'''{\
"files": ["foo/foo_unittest.cc"], "files": ["foo/foo_unittest.cc"],
"test_targets": ["foo_unittests"], "test_targets": ["foo_unittests"],
"additional_compile_targets": ["all"] "additional_compile_targets": ["all"]
}''', }''',
'/tmp/out.json.gn': '''{\ '/tmp/out.json.gn':
'''{\
"status": "Found dependency", "status": "Found dependency",
"compile_targets": ["//foo:foo_unittests"], "compile_targets": ["//foo:foo_unittests"],
"test_targets": ["//foo:foo_unittests"] "test_targets": ["//foo:foo_unittests"]
}'''} }'''
}
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '')
self.check(['analyze', '-c', 'debug_goma', '//out/Default', self.check([
'/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0) 'analyze', '-c', 'debug_goma', '//out/Default', '/tmp/in.json',
'/tmp/out.json'
],
mbw=mbw,
ret=0)
out = json.loads(mbw.files['/tmp/out.json']) out = json.loads(mbw.files['/tmp/out.json'])
self.assertEqual(out, { self.assertEqual(
out, {
'status': 'Found dependency', 'status': 'Found dependency',
'compile_targets': ['foo:foo_unittests'], 'compile_targets': ['foo:foo_unittests'],
'test_targets': ['foo_unittests'] 'test_targets': ['foo_unittests']
@ -249,7 +263,8 @@ class UnitTest(unittest.TestCase):
def test_gen(self): def test_gen(self):
mbw = self.fake_mbw() mbw = self.fake_mbw()
self.check(['gen', '-c', 'debug_goma', '//out/Default', '-g', '/goma'], self.check(['gen', '-c', 'debug_goma', '//out/Default', '-g', '/goma'],
mbw=mbw, ret=0) mbw=mbw,
ret=0)
self.assertMultiLineEqual(mbw.files['/fake_src/out/Default/args.gn'], self.assertMultiLineEqual(mbw.files['/fake_src/out/Default/args.gn'],
('goma_dir = "/goma"\n' ('goma_dir = "/goma"\n'
'is_debug = true\n' 'is_debug = true\n'
@ -262,23 +277,25 @@ class UnitTest(unittest.TestCase):
mbw = self.fake_mbw(win32=True) mbw = self.fake_mbw(win32=True)
self.check(['gen', '-c', 'debug_goma', '-g', 'c:\\goma', '//out/Debug'], self.check(['gen', '-c', 'debug_goma', '-g', 'c:\\goma', '//out/Debug'],
mbw=mbw, ret=0) mbw=mbw,
ret=0)
self.assertMultiLineEqual(mbw.files['c:\\fake_src\\out\\Debug\\args.gn'], self.assertMultiLineEqual(mbw.files['c:\\fake_src\\out\\Debug\\args.gn'],
('goma_dir = "c:\\\\goma"\n' ('goma_dir = "c:\\\\goma"\n'
'is_debug = true\n' 'is_debug = true\n'
'use_goma = true\n')) 'use_goma = true\n'))
self.assertIn('c:\\fake_src\\buildtools\\win\\gn.exe gen //out/Debug ' self.assertIn(
'c:\\fake_src\\buildtools\\win\\gn.exe gen //out/Debug '
'--check\n', mbw.out) '--check\n', mbw.out)
mbw = self.fake_mbw() mbw = self.fake_mbw()
self.check(['gen', '-m', 'fake_group', '-b', 'fake_args_bot', self.check(
'//out/Debug'], ['gen', '-m', 'fake_group', '-b', 'fake_args_bot', '//out/Debug'],
mbw=mbw, ret=0) mbw=mbw,
ret=0)
self.assertEqual( self.assertEqual(
mbw.files['/fake_src/out/Debug/args.gn'], mbw.files['/fake_src/out/Debug/args.gn'],
'import("//build/args/bots/fake_group/fake_args_bot.gn")\n\n') 'import("//build/args/bots/fake_group/fake_args_bot.gn")\n\n')
def test_gen_fails(self): def test_gen_fails(self):
mbw = self.fake_mbw() mbw = self.fake_mbw()
mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
@ -286,46 +303,47 @@ class UnitTest(unittest.TestCase):
def test_gen_swarming(self): def test_gen_swarming(self):
files = { files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'raw'," " 'type': 'raw',"
" 'args': []," " 'args': [],"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
self.check(['gen', self.check([
'-c', 'debug_goma', 'gen', '-c', 'debug_goma', '--swarming-targets-file',
'--swarming-targets-file', '/tmp/swarming_targets', '/tmp/swarming_targets', '//out/Default'
'//out/Default'], mbw=mbw, ret=0) ],
self.assertIn('/fake_src/out/Default/base_unittests.isolate', mbw=mbw,
mbw.files) ret=0)
self.assertIn('/fake_src/out/Default/base_unittests.isolate', mbw.files)
self.assertIn('/fake_src/out/Default/base_unittests.isolated.gen.json', self.assertIn('/fake_src/out/Default/base_unittests.isolated.gen.json',
mbw.files) mbw.files)
def test_gen_swarming_android(self): def test_gen_swarming_android(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'additional_compile_target'," " 'type': 'additional_compile_target',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.check(['gen', '-c', 'android_bot', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'android_bot', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -337,30 +355,34 @@ class UnitTest(unittest.TestCase):
self.assertEqual(command, [ self.assertEqual(command, [
'vpython', 'vpython',
'../../build/android/test_wrapper/logdog_wrapper.py', '../../build/android/test_wrapper/logdog_wrapper.py',
'--target', 'base_unittests', '--target',
'--logdog-bin-cmd', '../../bin/logdog_butler', 'base_unittests',
'--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--logdog-bin-cmd',
'../../bin/logdog_butler',
'--logcat-output-file',
'${ISOLATED_OUTDIR}/logcats',
'--store-tombstones', '--store-tombstones',
]) ])
def test_gen_swarming_android_junit_test(self): def test_gen_swarming_android_junit_test(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'junit_test'," " 'type': 'junit_test',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.check(['gen', '-c', 'android_bot', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'android_bot', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -372,31 +394,35 @@ class UnitTest(unittest.TestCase):
self.assertEqual(command, [ self.assertEqual(command, [
'vpython', 'vpython',
'../../build/android/test_wrapper/logdog_wrapper.py', '../../build/android/test_wrapper/logdog_wrapper.py',
'--target', 'base_unittests', '--target',
'--logdog-bin-cmd', '../../bin/logdog_butler', 'base_unittests',
'--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', '--logdog-bin-cmd',
'../../bin/logdog_butler',
'--logcat-output-file',
'${ISOLATED_OUTDIR}/logcats',
'--store-tombstones', '--store-tombstones',
]) ])
def test_gen_timeout(self): def test_gen_timeout(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'non_parallel_console_test_launcher'," " 'type': 'non_parallel_console_test_launcher',"
" 'timeout': 500," " 'timeout': 500,"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -430,24 +456,25 @@ class UnitTest(unittest.TestCase):
def test_gen_script(self): def test_gen_script(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests_script\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests_script\n',
"{'base_unittests_script': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests_script': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'script'," " 'type': 'script',"
" 'script': '//base/base_unittests_script.py'," " 'script': '//base/base_unittests_script.py',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"
"base_unittests\n" "base_unittests_script.py\n"),
"base_unittests_script.py\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = ( isolate_file = (
mbw.files['/fake_src/out/Default/base_unittests_script.isolate']) mbw.files['/fake_src/out/Default/base_unittests_script.isolate'])
@ -466,22 +493,23 @@ class UnitTest(unittest.TestCase):
def test_gen_raw(self): def test_gen_raw(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'raw'," " 'type': 'raw',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -508,22 +536,23 @@ class UnitTest(unittest.TestCase):
def test_gen_non_parallel_console_test_launcher(self): def test_gen_non_parallel_console_test_launcher(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'non_parallel_console_test_launcher'," " 'type': 'non_parallel_console_test_launcher',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -557,23 +586,24 @@ class UnitTest(unittest.TestCase):
def test_isolate_windowed_test_launcher_linux(self): def test_isolate_windowed_test_launcher_linux(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'windowed_test_launcher'," " 'type': 'windowed_test_launcher',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"
"base_unittests\n" "some_resource_file\n"),
"some_resource_file\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -608,26 +638,26 @@ class UnitTest(unittest.TestCase):
def test_gen_windowed_test_launcher_win(self): def test_gen_windowed_test_launcher_win(self):
files = { files = {
'c:\\fake_src\\out\\Default\\tmp\\swarming_targets': 'unittests\n', 'c:\\fake_src\\out\\Default\\tmp\\swarming_targets':
'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl': ( 'unittests\n',
"{'unittests': {" 'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl':
("{'unittests': {"
" 'label': '//somewhere:unittests'," " 'label': '//somewhere:unittests',"
" 'type': 'windowed_test_launcher'," " 'type': 'windowed_test_launcher',"
"}}\n" "}}\n"),
), r'c:\fake_src\out\Default\unittests.exe.runtime_deps':
r'c:\fake_src\out\Default\unittests.exe.runtime_deps': ( ("unittests.exe\n"
"unittests.exe\n" "some_dependency\n"),
"some_dependency\n"
),
} }
mbw = self.fake_mbw(files=files, win32=True) mbw = self.fake_mbw(files=files, win32=True)
self.check(['gen', self.check([
'-c', 'debug_goma', 'gen', '-c', 'debug_goma', '--swarming-targets-file',
'--swarming-targets-file',
'c:\\fake_src\\out\\Default\\tmp\\swarming_targets', 'c:\\fake_src\\out\\Default\\tmp\\swarming_targets',
'--isolate-map-file', '--isolate-map-file',
'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl', 'c:\\fake_src\\testing\\buildbot\\gn_isolate_map.pyl', '//out/Default'
'//out/Default'], mbw=mbw, ret=0) ],
mbw=mbw,
ret=0)
isolate_file = mbw.files['c:\\fake_src\\out\\Default\\unittests.isolate'] isolate_file = mbw.files['c:\\fake_src\\out\\Default\\unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -661,22 +691,23 @@ class UnitTest(unittest.TestCase):
def test_gen_console_test_launcher(self): def test_gen_console_test_launcher(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'console_test_launcher'," " 'type': 'console_test_launcher',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -709,24 +740,25 @@ class UnitTest(unittest.TestCase):
def test_isolate_test_launcher_with_webcam(self): def test_isolate_test_launcher_with_webcam(self):
test_files = { test_files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( 'base_unittests\n',
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'console_test_launcher'," " 'type': 'console_test_launcher',"
" 'use_webcam': True," " 'use_webcam': True,"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"
"base_unittests\n" "some_resource_file\n"),
"some_resource_file\n"
),
} }
mbw = self.check(['gen', '-c', 'debug_goma', '//out/Default', mbw = self.check([
'--swarming-targets-file', '/tmp/swarming_targets', 'gen', '-c', 'debug_goma', '//out/Default', '--swarming-targets-file',
'--isolate-map-file', '/tmp/swarming_targets', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl'], '/fake_src/testing/buildbot/gn_isolate_map.pyl'
files=test_files, ret=0) ],
files=test_files,
ret=0)
isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate'] isolate_file = mbw.files['/fake_src/out/Default/base_unittests.isolate']
isolate_file_contents = ast.literal_eval(isolate_file) isolate_file_contents = ast.literal_eval(isolate_file)
@ -763,42 +795,44 @@ class UnitTest(unittest.TestCase):
def test_isolate(self): def test_isolate(self):
files = { files = {
'/fake_src/out/Default/toolchain.ninja': "", '/fake_src/out/Default/toolchain.ninja':
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( "",
"{'base_unittests': {" '/fake_src/testing/buildbot/gn_isolate_map.pyl':
("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'non_parallel_console_test_launcher'," " 'type': 'non_parallel_console_test_launcher',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
self.check(['isolate', '-c', 'debug_goma', '//out/Default', self.check(
'base_unittests'], files=files, ret=0) ['isolate', '-c', 'debug_goma', '//out/Default', 'base_unittests'],
files=files,
ret=0)
# test running isolate on an existing build_dir # test running isolate on an existing build_dir
files['/fake_src/out/Default/args.gn'] = 'is_debug = True\n' files['/fake_src/out/Default/args.gn'] = 'is_debug = True\n'
self.check(['isolate', '//out/Default', 'base_unittests'], self.check(['isolate', '//out/Default', 'base_unittests'],
files=files, ret=0) files=files,
ret=0)
files['/fake_src/out/Default/mb_type'] = 'gn\n' files['/fake_src/out/Default/mb_type'] = 'gn\n'
self.check(['isolate', '//out/Default', 'base_unittests'], self.check(['isolate', '//out/Default', 'base_unittests'],
files=files, ret=0) files=files,
ret=0)
def test_run(self): def test_run(self):
files = { files = {
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( '/fake_src/testing/buildbot/gn_isolate_map.pyl':
"{'base_unittests': {" ("{'base_unittests': {"
" 'label': '//base:base_unittests'," " 'label': '//base:base_unittests',"
" 'type': 'windowed_test_launcher'," " 'type': 'windowed_test_launcher',"
"}}\n" "}}\n"),
), '/fake_src/out/Default/base_unittests.runtime_deps':
'/fake_src/out/Default/base_unittests.runtime_deps': ( ("base_unittests\n"),
"base_unittests\n"
),
} }
self.check(['run', '-c', 'debug_goma', '//out/Default', self.check(['run', '-c', 'debug_goma', '//out/Default', 'base_unittests'],
'base_unittests'], files=files, ret=0) files=files,
ret=0)
def test_run_swarmed(self): def test_run_swarmed(self):
files = { files = {
@ -830,25 +864,33 @@ class UnitTest(unittest.TestCase):
mbw.ToSrcRelPath = to_src_rel_path_stub mbw.ToSrcRelPath = to_src_rel_path_stub
self.check(['run', '-s', '-c', 'debug_goma', '//out/Default', self.check(
'base_unittests'], mbw=mbw, ret=0) ['run', '-s', '-c', 'debug_goma', '//out/Default', 'base_unittests'],
mbw=mbw,
ret=0)
mbw = self.fake_mbw(files=files) mbw = self.fake_mbw(files=files)
mbw.files[mbw.PathJoin(mbw.TempDir(), 'task.json')] = task_json mbw.files[mbw.PathJoin(mbw.TempDir(), 'task.json')] = task_json
mbw.files[mbw.PathJoin(mbw.TempDir(), 'collect_output.json')] = collect_json mbw.files[mbw.PathJoin(mbw.TempDir(), 'collect_output.json')] = collect_json
mbw.ToSrcRelPath = to_src_rel_path_stub mbw.ToSrcRelPath = to_src_rel_path_stub
self.check(['run', '-s', '-c', 'debug_goma', '-d', 'os', 'Win7', self.check([
'//out/Default', 'base_unittests'], mbw=mbw, ret=0) 'run', '-s', '-c', 'debug_goma', '-d', 'os', 'Win7', '//out/Default',
'base_unittests'
],
mbw=mbw,
ret=0)
def test_lookup(self): def test_lookup(self):
self.check(['lookup', '-c', 'debug_goma'], ret=0) self.check(['lookup', '-c', 'debug_goma'], ret=0)
def test_quiet_lookup(self): def test_quiet_lookup(self):
self.check(['lookup', '-c', 'debug_goma', '--quiet'], ret=0, self.check(['lookup', '-c', 'debug_goma', '--quiet'],
ret=0,
out=('is_debug = true\n' out=('is_debug = true\n'
'use_goma = true\n')) 'use_goma = true\n'))
def test_lookup_goma_dir_expansion(self): def test_lookup_goma_dir_expansion(self):
self.check(['lookup', '-c', 'rel_bot', '-g', '/foo'], ret=0, self.check(['lookup', '-c', 'rel_bot', '-g', '/foo'],
ret=0,
out=('\n' out=('\n'
'Writing """\\\n' 'Writing """\\\n'
'enable_doom_melon = true\n' 'enable_doom_melon = true\n'
@ -875,22 +917,33 @@ class UnitTest(unittest.TestCase):
self.assertIn('Must specify a build --phase', mbw.out) self.assertIn('Must specify a build --phase', mbw.out)
# Check that passing a --phase to a single-phase builder fails. # Check that passing a --phase to a single-phase builder fails.
mbw = self.check(['lookup', '-m', 'fake_group', '-b', 'fake_builder', mbw = self.check([
'--phase', 'phase_1'], ret=1) 'lookup', '-m', 'fake_group', '-b', 'fake_builder', '--phase', 'phase_1'
],
ret=1)
self.assertIn('Must not specify a build --phase', mbw.out) self.assertIn('Must not specify a build --phase', mbw.out)
# Check that passing a wrong phase key to a multi-phase builder fails. # Check that passing a wrong phase key to a multi-phase builder fails.
mbw = self.check(['lookup', '-m', 'fake_group', '-b', 'fake_multi_phase', mbw = self.check([
'--phase', 'wrong_phase'], ret=1) 'lookup', '-m', 'fake_group', '-b', 'fake_multi_phase', '--phase',
'wrong_phase'
],
ret=1)
self.assertIn('Phase wrong_phase doesn\'t exist', mbw.out) self.assertIn('Phase wrong_phase doesn\'t exist', mbw.out)
# Check that passing a correct phase key to a multi-phase builder passes. # Check that passing a correct phase key to a multi-phase builder passes.
mbw = self.check(['lookup', '-m', 'fake_group', '-b', 'fake_multi_phase', mbw = self.check([
'--phase', 'phase_1'], ret=0) 'lookup', '-m', 'fake_group', '-b', 'fake_multi_phase', '--phase',
'phase_1'
],
ret=0)
self.assertIn('phase = 1', mbw.out) self.assertIn('phase = 1', mbw.out)
mbw = self.check(['lookup', '-m', 'fake_group', '-b', 'fake_multi_phase', mbw = self.check([
'--phase', 'phase_2'], ret=0) 'lookup', '-m', 'fake_group', '-b', 'fake_multi_phase', '--phase',
'phase_2'
],
ret=0)
self.assertIn('phase = 2', mbw.out) self.assertIn('phase = 2', mbw.out)
def test_validate(self): def test_validate(self):