| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | |
| 3 # | |
| 4 # Use of this source code is governed by a BSD-style license | |
| 5 # that can be found in the LICENSE file in the root of the source | |
| 6 # tree. An additional intellectual property rights grant can be found | |
| 7 # in the file PATENTS. All contributing project authors may | |
| 8 # be found in the AUTHORS file in the root of the source tree. | |
| 9 | |
| 10 """MB - the Meta-Build wrapper around GYP and GN | |
| 11 | |
| 12 MB is a wrapper script for GYP and GN that can be used to generate build files | |
| 13 for sets of canned configurations and analyze them. | |
| 14 """ | |
| 15 | |
| 16 from __future__ import print_function | |
| 17 | |
| 18 import argparse | |
| 19 import ast | |
| 20 import errno | |
| 21 import json | |
| 22 import os | |
| 23 import pipes | |
| 24 import pprint | |
| 25 import re | |
| 26 import shutil | |
| 27 import sys | |
| 28 import subprocess | |
| 29 import tempfile | |
| 30 import traceback | |
| 31 import urllib2 | |
| 32 | |
| 33 from collections import OrderedDict | |
| 34 | |
| 35 CHROMIUM_SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname( | |
| 36 os.path.abspath(__file__)))) | |
| 37 sys.path = [os.path.join(CHROMIUM_SRC_DIR, 'build')] + sys.path | |
| 38 | |
| 39 import gn_helpers | |
| 40 | |
| 41 | |
| 42 def main(args): | |
| 43 mbw = MetaBuildWrapper() | |
| 44 return mbw.Main(args) | |
| 45 | |
| 46 | |
| 47 class MetaBuildWrapper(object): | |
| 48 def __init__(self): | |
| 49 self.chromium_src_dir = CHROMIUM_SRC_DIR | |
| 50 self.default_config = os.path.join(self.chromium_src_dir, 'tools', 'mb', | |
| 51 'mb_config.pyl') | |
| 52 self.default_isolate_map = os.path.join(self.chromium_src_dir, 'testing', | |
| 53 'buildbot', 'gn_isolate_map.pyl') | |
| 54 self.executable = sys.executable | |
| 55 self.platform = sys.platform | |
| 56 self.sep = os.sep | |
| 57 self.args = argparse.Namespace() | |
| 58 self.configs = {} | |
| 59 self.masters = {} | |
| 60 self.mixins = {} | |
| 61 | |
| 62 def Main(self, args): | |
| 63 self.ParseArgs(args) | |
| 64 try: | |
| 65 ret = self.args.func() | |
| 66 if ret: | |
| 67 self.DumpInputFiles() | |
| 68 return ret | |
| 69 except KeyboardInterrupt: | |
| 70 self.Print('interrupted, exiting') | |
| 71 return 130 | |
| 72 except Exception: | |
| 73 self.DumpInputFiles() | |
| 74 s = traceback.format_exc() | |
| 75 for l in s.splitlines(): | |
| 76 self.Print(l) | |
| 77 return 1 | |
| 78 | |
| 79 def ParseArgs(self, argv): | |
| 80 def AddCommonOptions(subp): | |
| 81 subp.add_argument('-b', '--builder', | |
| 82 help='builder name to look up config from') | |
| 83 subp.add_argument('-m', '--master', | |
| 84 help='master name to look up config from') | |
| 85 subp.add_argument('-c', '--config', | |
| 86 help='configuration to analyze') | |
| 87 subp.add_argument('--phase', | |
| 88 help='optional phase name (used when builders ' | |
| 89 'do multiple compiles with different ' | |
| 90 'arguments in a single build)') | |
| 91 subp.add_argument('-f', '--config-file', metavar='PATH', | |
| 92 default=self.default_config, | |
| 93 help='path to config file ' | |
| 94 '(default is %(default)s)') | |
| 95 subp.add_argument('-i', '--isolate-map-file', metavar='PATH', | |
| 96 default=self.default_isolate_map, | |
| 97 help='path to isolate map file ' | |
| 98 '(default is %(default)s)') | |
| 99 subp.add_argument('-g', '--goma-dir', | |
| 100 help='path to goma directory') | |
| 101 subp.add_argument('--gyp-script', metavar='PATH', | |
| 102 default=self.PathJoin('build', 'gyp_chromium'), | |
| 103 help='path to gyp script relative to project root ' | |
| 104 '(default is %(default)s)') | |
| 105 subp.add_argument('--android-version-code', | |
| 106 help='Sets GN arg android_default_version_code and ' | |
| 107 'GYP_DEFINE app_manifest_version_code') | |
| 108 subp.add_argument('--android-version-name', | |
| 109 help='Sets GN arg android_default_version_name and ' | |
| 110 'GYP_DEFINE app_manifest_version_name') | |
| 111 subp.add_argument('-n', '--dryrun', action='store_true', | |
| 112 help='Do a dry run (i.e., do nothing, just print ' | |
| 113 'the commands that will run)') | |
| 114 subp.add_argument('-v', '--verbose', action='store_true', | |
| 115 help='verbose logging') | |
| 116 | |
| 117 parser = argparse.ArgumentParser(prog='mb') | |
| 118 subps = parser.add_subparsers() | |
| 119 | |
| 120 subp = subps.add_parser('analyze', | |
| 121 help='analyze whether changes to a set of files ' | |
| 122 'will cause a set of binaries to be rebuilt.') | |
| 123 AddCommonOptions(subp) | |
| 124 subp.add_argument('path', nargs=1, | |
| 125 help='path build was generated into.') | |
| 126 subp.add_argument('input_path', nargs=1, | |
| 127 help='path to a file containing the input arguments ' | |
| 128 'as a JSON object.') | |
| 129 subp.add_argument('output_path', nargs=1, | |
| 130 help='path to a file containing the output arguments ' | |
| 131 'as a JSON object.') | |
| 132 subp.set_defaults(func=self.CmdAnalyze) | |
| 133 | |
| 134 subp = subps.add_parser('export', | |
| 135 help='print out the expanded configuration for' | |
| 136 'each builder as a JSON object') | |
| 137 subp.add_argument('-f', '--config-file', metavar='PATH', | |
| 138 default=self.default_config, | |
| 139 help='path to config file (default is %(default)s)') | |
| 140 subp.add_argument('-g', '--goma-dir', | |
| 141 help='path to goma directory') | |
| 142 subp.set_defaults(func=self.CmdExport) | |
| 143 | |
| 144 subp = subps.add_parser('gen', | |
| 145 help='generate a new set of build files') | |
| 146 AddCommonOptions(subp) | |
| 147 subp.add_argument('--swarming-targets-file', | |
| 148 help='save runtime dependencies for targets listed ' | |
| 149 'in file.') | |
| 150 subp.add_argument('path', nargs=1, | |
| 151 help='path to generate build into') | |
| 152 subp.set_defaults(func=self.CmdGen) | |
| 153 | |
| 154 subp = subps.add_parser('isolate', | |
| 155 help='generate the .isolate files for a given' | |
| 156 'binary') | |
| 157 AddCommonOptions(subp) | |
| 158 subp.add_argument('path', nargs=1, | |
| 159 help='path build was generated into') | |
| 160 subp.add_argument('target', nargs=1, | |
| 161 help='ninja target to generate the isolate for') | |
| 162 subp.set_defaults(func=self.CmdIsolate) | |
| 163 | |
| 164 subp = subps.add_parser('lookup', | |
| 165 help='look up the command for a given config or ' | |
| 166 'builder') | |
| 167 AddCommonOptions(subp) | |
| 168 subp.set_defaults(func=self.CmdLookup) | |
| 169 | |
| 170 subp = subps.add_parser( | |
| 171 'run', | |
| 172 help='build and run the isolated version of a ' | |
| 173 'binary', | |
| 174 formatter_class=argparse.RawDescriptionHelpFormatter) | |
| 175 subp.description = ( | |
| 176 'Build, isolate, and run the given binary with the command line\n' | |
| 177 'listed in the isolate. You may pass extra arguments after the\n' | |
| 178 'target; use "--" if the extra arguments need to include switches.\n' | |
| 179 '\n' | |
| 180 'Examples:\n' | |
| 181 '\n' | |
| 182 ' % tools/mb/mb.py run -m chromium.linux -b "Linux Builder" \\\n' | |
| 183 ' //out/Default content_browsertests\n' | |
| 184 '\n' | |
| 185 ' % tools/mb/mb.py run out/Default content_browsertests\n' | |
| 186 '\n' | |
| 187 ' % tools/mb/mb.py run out/Default content_browsertests -- \\\n' | |
| 188 ' --test-launcher-retry-limit=0' | |
| 189 '\n' | |
| 190 ) | |
| 191 | |
| 192 AddCommonOptions(subp) | |
| 193 subp.add_argument('-j', '--jobs', dest='jobs', type=int, | |
| 194 help='Number of jobs to pass to ninja') | |
| 195 subp.add_argument('--no-build', dest='build', default=True, | |
| 196 action='store_false', | |
| 197 help='Do not build, just isolate and run') | |
| 198 subp.add_argument('path', nargs=1, | |
| 199 help=('path to generate build into (or use).' | |
| 200 ' This can be either a regular path or a ' | |
| 201 'GN-style source-relative path like ' | |
| 202 '//out/Default.')) | |
| 203 subp.add_argument('target', nargs=1, | |
| 204 help='ninja target to build and run') | |
| 205 subp.add_argument('extra_args', nargs='*', | |
| 206 help=('extra args to pass to the isolate to run. Use ' | |
| 207 '"--" as the first arg if you need to pass ' | |
| 208 'switches')) | |
| 209 subp.set_defaults(func=self.CmdRun) | |
| 210 | |
| 211 subp = subps.add_parser('validate', | |
| 212 help='validate the config file') | |
| 213 subp.add_argument('-f', '--config-file', metavar='PATH', | |
| 214 default=self.default_config, | |
| 215 help='path to config file (default is %(default)s)') | |
| 216 subp.set_defaults(func=self.CmdValidate) | |
| 217 | |
| 218 subp = subps.add_parser('audit', | |
| 219 help='Audit the config file to track progress') | |
| 220 subp.add_argument('-f', '--config-file', metavar='PATH', | |
| 221 default=self.default_config, | |
| 222 help='path to config file (default is %(default)s)') | |
| 223 subp.add_argument('-i', '--internal', action='store_true', | |
| 224 help='check internal masters also') | |
| 225 subp.add_argument('-m', '--master', action='append', | |
| 226 help='master to audit (default is all non-internal ' | |
| 227 'masters in file)') | |
| 228 subp.add_argument('-u', '--url-template', action='store', | |
| 229 default='https://build.chromium.org/p/' | |
| 230 '{master}/json/builders', | |
| 231 help='URL scheme for JSON APIs to buildbot ' | |
| 232 '(default: %(default)s) ') | |
| 233 subp.add_argument('-c', '--check-compile', action='store_true', | |
| 234 help='check whether tbd and master-only bots actually' | |
| 235 ' do compiles') | |
| 236 subp.set_defaults(func=self.CmdAudit) | |
| 237 | |
| 238 subp = subps.add_parser('help', | |
| 239 help='Get help on a subcommand.') | |
| 240 subp.add_argument(nargs='?', action='store', dest='subcommand', | |
| 241 help='The command to get help for.') | |
| 242 subp.set_defaults(func=self.CmdHelp) | |
| 243 | |
| 244 self.args = parser.parse_args(argv) | |
| 245 | |
| 246 def DumpInputFiles(self): | |
| 247 | |
| 248 def DumpContentsOfFilePassedTo(arg_name, path): | |
| 249 if path and self.Exists(path): | |
| 250 self.Print("\n# To recreate the file passed to %s:" % arg_name) | |
| 251 self.Print("%% cat > %s <<EOF" % path) | |
| 252 contents = self.ReadFile(path) | |
| 253 self.Print(contents) | |
| 254 self.Print("EOF\n%\n") | |
| 255 | |
| 256 if getattr(self.args, 'input_path', None): | |
| 257 DumpContentsOfFilePassedTo( | |
| 258 'argv[0] (input_path)', self.args.input_path[0]) | |
| 259 if getattr(self.args, 'swarming_targets_file', None): | |
| 260 DumpContentsOfFilePassedTo( | |
| 261 '--swarming-targets-file', self.args.swarming_targets_file) | |
| 262 | |
| 263 def CmdAnalyze(self): | |
| 264 vals = self.Lookup() | |
| 265 self.ClobberIfNeeded(vals) | |
| 266 if vals['type'] == 'gn': | |
| 267 return self.RunGNAnalyze(vals) | |
| 268 else: | |
| 269 return self.RunGYPAnalyze(vals) | |
| 270 | |
| 271 def CmdExport(self): | |
| 272 self.ReadConfigFile() | |
| 273 obj = {} | |
| 274 for master, builders in self.masters.items(): | |
| 275 obj[master] = {} | |
| 276 for builder in builders: | |
| 277 config = self.masters[master][builder] | |
| 278 if not config: | |
| 279 continue | |
| 280 | |
| 281 if isinstance(config, dict): | |
| 282 args = {k: self.FlattenConfig(v)['gn_args'] | |
| 283 for k, v in config.items()} | |
| 284 elif config.startswith('//'): | |
| 285 args = config | |
| 286 else: | |
| 287 args = self.FlattenConfig(config)['gn_args'] | |
| 288 if 'error' in args: | |
| 289 continue | |
| 290 | |
| 291 obj[master][builder] = args | |
| 292 | |
| 293 # Dump object and trim trailing whitespace. | |
| 294 s = '\n'.join(l.rstrip() for l in | |
| 295 json.dumps(obj, sort_keys=True, indent=2).splitlines()) | |
| 296 self.Print(s) | |
| 297 return 0 | |
| 298 | |
| 299 def CmdGen(self): | |
| 300 vals = self.Lookup() | |
| 301 self.ClobberIfNeeded(vals) | |
| 302 if vals['type'] == 'gn': | |
| 303 return self.RunGNGen(vals) | |
| 304 else: | |
| 305 return self.RunGYPGen(vals) | |
| 306 | |
| 307 def CmdHelp(self): | |
| 308 if self.args.subcommand: | |
| 309 self.ParseArgs([self.args.subcommand, '--help']) | |
| 310 else: | |
| 311 self.ParseArgs(['--help']) | |
| 312 | |
| 313 def CmdIsolate(self): | |
| 314 vals = self.GetConfig() | |
| 315 if not vals: | |
| 316 return 1 | |
| 317 | |
| 318 if vals['type'] == 'gn': | |
| 319 return self.RunGNIsolate(vals) | |
| 320 else: | |
| 321 return self.Build('%s_run' % self.args.target[0]) | |
| 322 | |
| 323 def CmdLookup(self): | |
| 324 vals = self.Lookup() | |
| 325 if vals['type'] == 'gn': | |
| 326 cmd = self.GNCmd('gen', '_path_') | |
| 327 gn_args = self.GNArgs(vals) | |
| 328 self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args) | |
| 329 env = None | |
| 330 else: | |
| 331 cmd, env = self.GYPCmd('_path_', vals) | |
| 332 | |
| 333 self.PrintCmd(cmd, env) | |
| 334 return 0 | |
| 335 | |
| 336 def CmdRun(self): | |
| 337 vals = self.GetConfig() | |
| 338 if not vals: | |
| 339 return 1 | |
| 340 | |
| 341 build_dir = self.args.path[0] | |
| 342 target = self.args.target[0] | |
| 343 | |
| 344 if vals['type'] == 'gn': | |
| 345 if self.args.build: | |
| 346 ret = self.Build(target) | |
| 347 if ret: | |
| 348 return ret | |
| 349 ret = self.RunGNIsolate(vals) | |
| 350 if ret: | |
| 351 return ret | |
| 352 else: | |
| 353 ret = self.Build('%s_run' % target) | |
| 354 if ret: | |
| 355 return ret | |
| 356 | |
| 357 cmd = [ | |
| 358 self.executable, | |
| 359 self.PathJoin('tools', 'swarming_client', 'isolate.py'), | |
| 360 'run', | |
| 361 '-s', | |
| 362 self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), | |
| 363 ] | |
| 364 if self.args.extra_args: | |
| 365 cmd += ['--'] + self.args.extra_args | |
| 366 | |
| 367 ret, _, _ = self.Run(cmd, force_verbose=False, buffer_output=False) | |
| 368 | |
| 369 return ret | |
| 370 | |
| 371 def CmdValidate(self, print_ok=True): | |
| 372 errs = [] | |
| 373 | |
| 374 # Read the file to make sure it parses. | |
| 375 self.ReadConfigFile() | |
| 376 | |
| 377 # Build a list of all of the configs referenced by builders. | |
| 378 all_configs = {} | |
| 379 for master in self.masters: | |
| 380 for config in self.masters[master].values(): | |
| 381 if isinstance(config, dict): | |
| 382 for c in config.values(): | |
| 383 all_configs[c] = master | |
| 384 else: | |
| 385 all_configs[config] = master | |
| 386 | |
| 387 # Check that every referenced args file or config actually exists. | |
| 388 for config, loc in all_configs.items(): | |
| 389 if config.startswith('//'): | |
| 390 if not self.Exists(self.ToAbsPath(config)): | |
| 391 errs.append('Unknown args file "%s" referenced from "%s".' % | |
| 392 (config, loc)) | |
| 393 elif not config in self.configs: | |
| 394 errs.append('Unknown config "%s" referenced from "%s".' % | |
| 395 (config, loc)) | |
| 396 | |
| 397 # Check that every actual config is actually referenced. | |
| 398 for config in self.configs: | |
| 399 if not config in all_configs: | |
| 400 errs.append('Unused config "%s".' % config) | |
| 401 | |
| 402 # Figure out the whole list of mixins, and check that every mixin | |
| 403 # listed by a config or another mixin actually exists. | |
| 404 referenced_mixins = set() | |
| 405 for config, mixins in self.configs.items(): | |
| 406 for mixin in mixins: | |
| 407 if not mixin in self.mixins: | |
| 408 errs.append('Unknown mixin "%s" referenced by config "%s".' % | |
| 409 (mixin, config)) | |
| 410 referenced_mixins.add(mixin) | |
| 411 | |
| 412 for mixin in self.mixins: | |
| 413 for sub_mixin in self.mixins[mixin].get('mixins', []): | |
| 414 if not sub_mixin in self.mixins: | |
| 415 errs.append('Unknown mixin "%s" referenced by mixin "%s".' % | |
| 416 (sub_mixin, mixin)) | |
| 417 referenced_mixins.add(sub_mixin) | |
| 418 | |
| 419 # Check that every mixin defined is actually referenced somewhere. | |
| 420 for mixin in self.mixins: | |
| 421 if not mixin in referenced_mixins: | |
| 422 errs.append('Unreferenced mixin "%s".' % mixin) | |
| 423 | |
| 424 # If we're checking the Chromium config, check that the 'chromium' bots | |
| 425 # which build public artifacts do not include the chrome_with_codecs mixin. | |
| 426 if self.args.config_file == self.default_config: | |
| 427 if 'chromium' in self.masters: | |
| 428 for builder in self.masters['chromium']: | |
| 429 config = self.masters['chromium'][builder] | |
| 430 def RecurseMixins(current_mixin): | |
| 431 if current_mixin == 'chrome_with_codecs': | |
| 432 errs.append('Public artifact builder "%s" can not contain the ' | |
| 433 '"chrome_with_codecs" mixin.' % builder) | |
| 434 return | |
| 435 if not 'mixins' in self.mixins[current_mixin]: | |
| 436 return | |
| 437 for mixin in self.mixins[current_mixin]['mixins']: | |
| 438 RecurseMixins(mixin) | |
| 439 | |
| 440 for mixin in self.configs[config]: | |
| 441 RecurseMixins(mixin) | |
| 442 else: | |
| 443 errs.append('Missing "chromium" master. Please update this ' | |
| 444 'proprietary codecs check with the name of the master ' | |
| 445 'responsible for public build artifacts.') | |
| 446 | |
| 447 if errs: | |
| 448 raise MBErr(('mb config file %s has problems:' % self.args.config_file) + | |
| 449 '\n ' + '\n '.join(errs)) | |
| 450 | |
| 451 if print_ok: | |
| 452 self.Print('mb config file %s looks ok.' % self.args.config_file) | |
| 453 return 0 | |
| 454 | |
| 455 def CmdAudit(self): | |
| 456 """Track the progress of the GYP->GN migration on the bots.""" | |
| 457 | |
| 458 # First, make sure the config file is okay, but don't print anything | |
| 459 # if it is (it will throw an error if it isn't). | |
| 460 self.CmdValidate(print_ok=False) | |
| 461 | |
| 462 stats = OrderedDict() | |
| 463 STAT_MASTER_ONLY = 'Master only' | |
| 464 STAT_CONFIG_ONLY = 'Config only' | |
| 465 STAT_TBD = 'Still TBD' | |
| 466 STAT_GYP = 'Still GYP' | |
| 467 STAT_DONE = 'Done (on GN)' | |
| 468 stats[STAT_MASTER_ONLY] = 0 | |
| 469 stats[STAT_CONFIG_ONLY] = 0 | |
| 470 stats[STAT_TBD] = 0 | |
| 471 stats[STAT_GYP] = 0 | |
| 472 stats[STAT_DONE] = 0 | |
| 473 | |
| 474 def PrintBuilders(heading, builders, notes): | |
| 475 stats.setdefault(heading, 0) | |
| 476 stats[heading] += len(builders) | |
| 477 if builders: | |
| 478 self.Print(' %s:' % heading) | |
| 479 for builder in sorted(builders): | |
| 480 self.Print(' %s%s' % (builder, notes[builder])) | |
| 481 | |
| 482 self.ReadConfigFile() | |
| 483 | |
| 484 masters = self.args.master or self.masters | |
| 485 for master in sorted(masters): | |
| 486 url = self.args.url_template.replace('{master}', master) | |
| 487 | |
| 488 self.Print('Auditing %s' % master) | |
| 489 | |
| 490 MASTERS_TO_SKIP = ( | |
| 491 'client.skia', | |
| 492 'client.v8.fyi', | |
| 493 'tryserver.v8', | |
| 494 ) | |
| 495 if master in MASTERS_TO_SKIP: | |
| 496 # Skip these bots because converting them is the responsibility of | |
| 497 # those teams and out of scope for the Chromium migration to GN. | |
| 498 self.Print(' Skipped (out of scope)') | |
| 499 self.Print('') | |
| 500 continue | |
| 501 | |
| 502 INTERNAL_MASTERS = ('official.desktop', 'official.desktop.continuous', | |
| 503 'internal.client.kitchensync') | |
| 504 if master in INTERNAL_MASTERS and not self.args.internal: | |
| 505 # Skip these because the servers aren't accessible by default ... | |
| 506 self.Print(' Skipped (internal)') | |
| 507 self.Print('') | |
| 508 continue | |
| 509 | |
| 510 try: | |
| 511 # Fetch the /builders contents from the buildbot master. The | |
| 512 # keys of the dict are the builder names themselves. | |
| 513 json_contents = self.Fetch(url) | |
| 514 d = json.loads(json_contents) | |
| 515 except Exception as e: | |
| 516 self.Print(str(e)) | |
| 517 return 1 | |
| 518 | |
| 519 config_builders = set(self.masters[master]) | |
| 520 master_builders = set(d.keys()) | |
| 521 both = master_builders & config_builders | |
| 522 master_only = master_builders - config_builders | |
| 523 config_only = config_builders - master_builders | |
| 524 tbd = set() | |
| 525 gyp = set() | |
| 526 done = set() | |
| 527 notes = {builder: '' for builder in config_builders | master_builders} | |
| 528 | |
| 529 for builder in both: | |
| 530 config = self.masters[master][builder] | |
| 531 if config == 'tbd': | |
| 532 tbd.add(builder) | |
| 533 elif isinstance(config, dict): | |
| 534 vals = self.FlattenConfig(config.values()[0]) | |
| 535 if vals['type'] == 'gyp': | |
| 536 gyp.add(builder) | |
| 537 else: | |
| 538 done.add(builder) | |
| 539 elif config.startswith('//'): | |
| 540 done.add(builder) | |
| 541 else: | |
| 542 vals = self.FlattenConfig(config) | |
| 543 if vals['type'] == 'gyp': | |
| 544 gyp.add(builder) | |
| 545 else: | |
| 546 done.add(builder) | |
| 547 | |
| 548 if self.args.check_compile and (tbd or master_only): | |
| 549 either = tbd | master_only | |
| 550 for builder in either: | |
| 551 notes[builder] = ' (' + self.CheckCompile(master, builder) +')' | |
| 552 | |
| 553 if master_only or config_only or tbd or gyp: | |
| 554 PrintBuilders(STAT_MASTER_ONLY, master_only, notes) | |
| 555 PrintBuilders(STAT_CONFIG_ONLY, config_only, notes) | |
| 556 PrintBuilders(STAT_TBD, tbd, notes) | |
| 557 PrintBuilders(STAT_GYP, gyp, notes) | |
| 558 else: | |
| 559 self.Print(' All GN!') | |
| 560 | |
| 561 stats[STAT_DONE] += len(done) | |
| 562 | |
| 563 self.Print('') | |
| 564 | |
| 565 fmt = '{:<27} {:>4}' | |
| 566 self.Print(fmt.format('Totals', str(sum(int(v) for v in stats.values())))) | |
| 567 self.Print(fmt.format('-' * 27, '----')) | |
| 568 for stat, count in stats.items(): | |
| 569 self.Print(fmt.format(stat, str(count))) | |
| 570 | |
| 571 return 0 | |
| 572 | |
| 573 def GetConfig(self): | |
| 574 build_dir = self.args.path[0] | |
| 575 | |
| 576 vals = self.DefaultVals() | |
| 577 if self.args.builder or self.args.master or self.args.config: | |
| 578 vals = self.Lookup() | |
| 579 if vals['type'] == 'gn': | |
| 580 # Re-run gn gen in order to ensure the config is consistent with the | |
| 581 # build dir. | |
| 582 self.RunGNGen(vals) | |
| 583 return vals | |
| 584 | |
| 585 mb_type_path = self.PathJoin(self.ToAbsPath(build_dir), 'mb_type') | |
| 586 if not self.Exists(mb_type_path): | |
| 587 toolchain_path = self.PathJoin(self.ToAbsPath(build_dir), | |
| 588 'toolchain.ninja') | |
| 589 if not self.Exists(toolchain_path): | |
| 590 self.Print('Must either specify a path to an existing GN build dir ' | |
| 591 'or pass in a -m/-b pair or a -c flag to specify the ' | |
| 592 'configuration') | |
| 593 return {} | |
| 594 else: | |
| 595 mb_type = 'gn' | |
| 596 else: | |
| 597 mb_type = self.ReadFile(mb_type_path).strip() | |
| 598 | |
| 599 if mb_type == 'gn': | |
| 600 vals['gn_args'] = self.GNArgsFromDir(build_dir) | |
| 601 vals['type'] = mb_type | |
| 602 | |
| 603 return vals | |
| 604 | |
| 605 def GNArgsFromDir(self, build_dir): | |
| 606 args_contents = "" | |
| 607 gn_args_path = self.PathJoin(self.ToAbsPath(build_dir), 'args.gn') | |
| 608 if self.Exists(gn_args_path): | |
| 609 args_contents = self.ReadFile(gn_args_path) | |
| 610 gn_args = [] | |
| 611 for l in args_contents.splitlines(): | |
| 612 fields = l.split(' ') | |
| 613 name = fields[0] | |
| 614 val = ' '.join(fields[2:]) | |
| 615 gn_args.append('%s=%s' % (name, val)) | |
| 616 | |
| 617 return ' '.join(gn_args) | |
| 618 | |
| 619 def Lookup(self): | |
| 620 vals = self.ReadIOSBotConfig() | |
| 621 if not vals: | |
| 622 self.ReadConfigFile() | |
| 623 config = self.ConfigFromArgs() | |
| 624 if config.startswith('//'): | |
| 625 if not self.Exists(self.ToAbsPath(config)): | |
| 626 raise MBErr('args file "%s" not found' % config) | |
| 627 vals = self.DefaultVals() | |
| 628 vals['args_file'] = config | |
| 629 else: | |
| 630 if not config in self.configs: | |
| 631 raise MBErr('Config "%s" not found in %s' % | |
| 632 (config, self.args.config_file)) | |
| 633 vals = self.FlattenConfig(config) | |
| 634 | |
| 635 # Do some basic sanity checking on the config so that we | |
| 636 # don't have to do this in every caller. | |
| 637 if 'type' not in vals: | |
| 638 vals['type'] = 'gn' | |
| 639 assert vals['type'] in ('gn', 'gyp'), ( | |
| 640 'Unknown meta-build type "%s"' % vals['gn_args']) | |
| 641 | |
| 642 return vals | |
| 643 | |
| 644 def ReadIOSBotConfig(self): | |
| 645 if not self.args.master or not self.args.builder: | |
| 646 return {} | |
| 647 path = self.PathJoin(self.chromium_src_dir, 'ios', 'build', 'bots', | |
| 648 self.args.master, self.args.builder + '.json') | |
| 649 if not self.Exists(path): | |
| 650 return {} | |
| 651 | |
| 652 contents = json.loads(self.ReadFile(path)) | |
| 653 gyp_vals = contents.get('GYP_DEFINES', {}) | |
| 654 if isinstance(gyp_vals, dict): | |
| 655 gyp_defines = ' '.join('%s=%s' % (k, v) for k, v in gyp_vals.items()) | |
| 656 else: | |
| 657 gyp_defines = ' '.join(gyp_vals) | |
| 658 gn_args = ' '.join(contents.get('gn_args', [])) | |
| 659 | |
| 660 vals = self.DefaultVals() | |
| 661 vals['gn_args'] = gn_args | |
| 662 vals['gyp_defines'] = gyp_defines | |
| 663 vals['type'] = contents.get('mb_type', 'gn') | |
| 664 return vals | |
| 665 | |
| 666 def ReadConfigFile(self): | |
| 667 if not self.Exists(self.args.config_file): | |
| 668 raise MBErr('config file not found at %s' % self.args.config_file) | |
| 669 | |
| 670 try: | |
| 671 contents = ast.literal_eval(self.ReadFile(self.args.config_file)) | |
| 672 except SyntaxError as e: | |
| 673 raise MBErr('Failed to parse config file "%s": %s' % | |
| 674 (self.args.config_file, e)) | |
| 675 | |
| 676 self.configs = contents['configs'] | |
| 677 self.masters = contents['masters'] | |
| 678 self.mixins = contents['mixins'] | |
| 679 | |
| 680 def ReadIsolateMap(self): | |
| 681 if not self.Exists(self.args.isolate_map_file): | |
| 682 raise MBErr('isolate map file not found at %s' % | |
| 683 self.args.isolate_map_file) | |
| 684 try: | |
| 685 return ast.literal_eval(self.ReadFile(self.args.isolate_map_file)) | |
| 686 except SyntaxError as e: | |
| 687 raise MBErr('Failed to parse isolate map file "%s": %s' % | |
| 688 (self.args.isolate_map_file, e)) | |
| 689 | |
| 690 def ConfigFromArgs(self): | |
| 691 if self.args.config: | |
| 692 if self.args.master or self.args.builder: | |
| 693 raise MBErr('Can not specific both -c/--config and -m/--master or ' | |
| 694 '-b/--builder') | |
| 695 | |
| 696 return self.args.config | |
| 697 | |
| 698 if not self.args.master or not self.args.builder: | |
| 699 raise MBErr('Must specify either -c/--config or ' | |
| 700 '(-m/--master and -b/--builder)') | |
| 701 | |
| 702 if not self.args.master in self.masters: | |
| 703 raise MBErr('Master name "%s" not found in "%s"' % | |
| 704 (self.args.master, self.args.config_file)) | |
| 705 | |
| 706 if not self.args.builder in self.masters[self.args.master]: | |
| 707 raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' % | |
| 708 (self.args.builder, self.args.master, self.args.config_file)) | |
| 709 | |
| 710 config = self.masters[self.args.master][self.args.builder] | |
| 711 if isinstance(config, dict): | |
| 712 if self.args.phase is None: | |
| 713 raise MBErr('Must specify a build --phase for %s on %s' % | |
| 714 (self.args.builder, self.args.master)) | |
| 715 phase = str(self.args.phase) | |
| 716 if phase not in config: | |
| 717 raise MBErr('Phase %s doesn\'t exist for %s on %s' % | |
| 718 (phase, self.args.builder, self.args.master)) | |
| 719 return config[phase] | |
| 720 | |
| 721 if self.args.phase is not None: | |
| 722 raise MBErr('Must not specify a build --phase for %s on %s' % | |
| 723 (self.args.builder, self.args.master)) | |
| 724 return config | |
| 725 | |
| 726 def FlattenConfig(self, config): | |
| 727 mixins = self.configs[config] | |
| 728 vals = self.DefaultVals() | |
| 729 | |
| 730 visited = [] | |
| 731 self.FlattenMixins(mixins, vals, visited) | |
| 732 return vals | |
| 733 | |
| 734 def DefaultVals(self): | |
| 735 return { | |
| 736 'args_file': '', | |
| 737 'cros_passthrough': False, | |
| 738 'gn_args': '', | |
| 739 'gyp_defines': '', | |
| 740 'gyp_crosscompile': False, | |
| 741 'type': 'gn', | |
| 742 } | |
| 743 | |
| 744 def FlattenMixins(self, mixins, vals, visited): | |
| 745 for m in mixins: | |
| 746 if m not in self.mixins: | |
| 747 raise MBErr('Unknown mixin "%s"' % m) | |
| 748 | |
| 749 visited.append(m) | |
| 750 | |
| 751 mixin_vals = self.mixins[m] | |
| 752 | |
| 753 if 'cros_passthrough' in mixin_vals: | |
| 754 vals['cros_passthrough'] = mixin_vals['cros_passthrough'] | |
| 755 if 'gn_args' in mixin_vals: | |
| 756 if vals['gn_args']: | |
| 757 vals['gn_args'] += ' ' + mixin_vals['gn_args'] | |
| 758 else: | |
| 759 vals['gn_args'] = mixin_vals['gn_args'] | |
| 760 if 'gyp_crosscompile' in mixin_vals: | |
| 761 vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile'] | |
| 762 if 'gyp_defines' in mixin_vals: | |
| 763 if vals['gyp_defines']: | |
| 764 vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines'] | |
| 765 else: | |
| 766 vals['gyp_defines'] = mixin_vals['gyp_defines'] | |
| 767 if 'type' in mixin_vals: | |
| 768 vals['type'] = mixin_vals['type'] | |
| 769 | |
| 770 if 'mixins' in mixin_vals: | |
| 771 self.FlattenMixins(mixin_vals['mixins'], vals, visited) | |
| 772 return vals | |
| 773 | |
| 774 def ClobberIfNeeded(self, vals): | |
| 775 path = self.args.path[0] | |
| 776 build_dir = self.ToAbsPath(path) | |
| 777 mb_type_path = self.PathJoin(build_dir, 'mb_type') | |
| 778 needs_clobber = False | |
| 779 new_mb_type = vals['type'] | |
| 780 if self.Exists(build_dir): | |
| 781 if self.Exists(mb_type_path): | |
| 782 old_mb_type = self.ReadFile(mb_type_path) | |
| 783 if old_mb_type != new_mb_type: | |
| 784 self.Print("Build type mismatch: was %s, will be %s, clobbering %s" % | |
| 785 (old_mb_type, new_mb_type, path)) | |
| 786 needs_clobber = True | |
| 787 else: | |
| 788 # There is no 'mb_type' file in the build directory, so this probably | |
| 789 # means that the prior build(s) were not done through mb, and we | |
| 790 # have no idea if this was a GYP build or a GN build. Clobber it | |
| 791 # to be safe. | |
| 792 self.Print("%s/mb_type missing, clobbering to be safe" % path) | |
| 793 needs_clobber = True | |
| 794 | |
| 795 if self.args.dryrun: | |
| 796 return | |
| 797 | |
| 798 if needs_clobber: | |
| 799 self.RemoveDirectory(build_dir) | |
| 800 | |
| 801 self.MaybeMakeDirectory(build_dir) | |
| 802 self.WriteFile(mb_type_path, new_mb_type) | |
| 803 | |
| 804 def RunGNGen(self, vals): | |
| 805 build_dir = self.args.path[0] | |
| 806 | |
| 807 cmd = self.GNCmd('gen', build_dir, '--check') | |
| 808 gn_args = self.GNArgs(vals) | |
| 809 | |
| 810 # Since GN hasn't run yet, the build directory may not even exist. | |
| 811 self.MaybeMakeDirectory(self.ToAbsPath(build_dir)) | |
| 812 | |
| 813 gn_args_path = self.ToAbsPath(build_dir, 'args.gn') | |
| 814 self.WriteFile(gn_args_path, gn_args, force_verbose=True) | |
| 815 | |
| 816 swarming_targets = [] | |
| 817 if getattr(self.args, 'swarming_targets_file', None): | |
| 818 # We need GN to generate the list of runtime dependencies for | |
| 819 # the compile targets listed (one per line) in the file so | |
| 820 # we can run them via swarming. We use gn_isolate_map.pyl to convert | |
| 821 # the compile targets to the matching GN labels. | |
| 822 path = self.args.swarming_targets_file | |
| 823 if not self.Exists(path): | |
| 824 self.WriteFailureAndRaise('"%s" does not exist' % path, | |
| 825 output_path=None) | |
| 826 contents = self.ReadFile(path) | |
| 827 swarming_targets = set(contents.splitlines()) | |
| 828 | |
| 829 isolate_map = self.ReadIsolateMap() | |
| 830 err, labels = self.MapTargetsToLabels(isolate_map, swarming_targets) | |
| 831 if err: | |
| 832 raise MBErr(err) | |
| 833 | |
| 834 gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps') | |
| 835 self.WriteFile(gn_runtime_deps_path, '\n'.join(labels) + '\n') | |
| 836 cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path) | |
| 837 | |
| 838 ret, _, _ = self.Run(cmd) | |
| 839 if ret: | |
| 840 # If `gn gen` failed, we should exit early rather than trying to | |
| 841 # generate isolates. Run() will have already logged any error output. | |
| 842 self.Print('GN gen failed: %d' % ret) | |
| 843 return ret | |
| 844 | |
| 845 android = 'target_os="android"' in vals['gn_args'] | |
| 846 for target in swarming_targets: | |
| 847 if android: | |
| 848 # Android targets may be either android_apk or executable. The former | |
| 849 # will result in runtime_deps associated with the stamp file, while the | |
| 850 # latter will result in runtime_deps associated with the executable. | |
| 851 label = isolate_map[target]['label'] | |
| 852 runtime_deps_targets = [ | |
| 853 target + '.runtime_deps', | |
| 854 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] | |
| 855 elif isolate_map[target]['type'] == 'gpu_browser_test': | |
| 856 if self.platform == 'win32': | |
| 857 runtime_deps_targets = ['browser_tests.exe.runtime_deps'] | |
| 858 else: | |
| 859 runtime_deps_targets = ['browser_tests.runtime_deps'] | |
| 860 elif (isolate_map[target]['type'] == 'script' or | |
| 861 isolate_map[target].get('label_type') == 'group'): | |
| 862 # For script targets, the build target is usually a group, | |
| 863 # for which gn generates the runtime_deps next to the stamp file | |
| 864 # for the label, which lives under the obj/ directory, but it may | |
| 865 # also be an executable. | |
| 866 label = isolate_map[target]['label'] | |
| 867 runtime_deps_targets = [ | |
| 868 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] | |
| 869 if self.platform == 'win32': | |
| 870 runtime_deps_targets += [ target + '.exe.runtime_deps' ] | |
| 871 else: | |
| 872 runtime_deps_targets += [ target + '.runtime_deps' ] | |
| 873 elif self.platform == 'win32': | |
| 874 runtime_deps_targets = [target + '.exe.runtime_deps'] | |
| 875 else: | |
| 876 runtime_deps_targets = [target + '.runtime_deps'] | |
| 877 | |
| 878 for r in runtime_deps_targets: | |
| 879 runtime_deps_path = self.ToAbsPath(build_dir, r) | |
| 880 if self.Exists(runtime_deps_path): | |
| 881 break | |
| 882 else: | |
| 883 raise MBErr('did not generate any of %s' % | |
| 884 ', '.join(runtime_deps_targets)) | |
| 885 | |
| 886 command, extra_files = self.GetIsolateCommand(target, vals) | |
| 887 | |
| 888 runtime_deps = self.ReadFile(runtime_deps_path).splitlines() | |
| 889 | |
| 890 self.WriteIsolateFiles(build_dir, command, target, runtime_deps, | |
| 891 extra_files) | |
| 892 | |
| 893 return 0 | |
| 894 | |
| 895 def RunGNIsolate(self, vals): | |
| 896 target = self.args.target[0] | |
| 897 isolate_map = self.ReadIsolateMap() | |
| 898 err, labels = self.MapTargetsToLabels(isolate_map, [target]) | |
| 899 if err: | |
| 900 raise MBErr(err) | |
| 901 label = labels[0] | |
| 902 | |
| 903 build_dir = self.args.path[0] | |
| 904 command, extra_files = self.GetIsolateCommand(target, vals) | |
| 905 | |
| 906 cmd = self.GNCmd('desc', build_dir, label, 'runtime_deps') | |
| 907 ret, out, _ = self.Call(cmd) | |
| 908 if ret: | |
| 909 if out: | |
| 910 self.Print(out) | |
| 911 return ret | |
| 912 | |
| 913 runtime_deps = out.splitlines() | |
| 914 | |
| 915 self.WriteIsolateFiles(build_dir, command, target, runtime_deps, | |
| 916 extra_files) | |
| 917 | |
| 918 ret, _, _ = self.Run([ | |
| 919 self.executable, | |
| 920 self.PathJoin('tools', 'swarming_client', 'isolate.py'), | |
| 921 'check', | |
| 922 '-i', | |
| 923 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), | |
| 924 '-s', | |
| 925 self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target))], | |
| 926 buffer_output=False) | |
| 927 | |
| 928 return ret | |
| 929 | |
| 930 def WriteIsolateFiles(self, build_dir, command, target, runtime_deps, | |
| 931 extra_files): | |
| 932 isolate_path = self.ToAbsPath(build_dir, target + '.isolate') | |
| 933 self.WriteFile(isolate_path, | |
| 934 pprint.pformat({ | |
| 935 'variables': { | |
| 936 'command': command, | |
| 937 'files': sorted(runtime_deps + extra_files), | |
| 938 } | |
| 939 }) + '\n') | |
| 940 | |
| 941 self.WriteJSON( | |
| 942 { | |
| 943 'args': [ | |
| 944 '--isolated', | |
| 945 self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), | |
| 946 '--isolate', | |
| 947 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), | |
| 948 ], | |
| 949 'dir': self.chromium_src_dir, | |
| 950 'version': 1, | |
| 951 }, | |
| 952 isolate_path + 'd.gen.json', | |
| 953 ) | |
| 954 | |
| 955 def MapTargetsToLabels(self, isolate_map, targets): | |
| 956 labels = [] | |
| 957 err = '' | |
| 958 | |
| 959 def StripTestSuffixes(target): | |
| 960 for suffix in ('_apk_run', '_apk', '_run'): | |
| 961 if target.endswith(suffix): | |
| 962 return target[:-len(suffix)], suffix | |
| 963 return None, None | |
| 964 | |
| 965 for target in targets: | |
| 966 if target == 'all': | |
| 967 labels.append(target) | |
| 968 elif target.startswith('//'): | |
| 969 labels.append(target) | |
| 970 else: | |
| 971 if target in isolate_map: | |
| 972 stripped_target, suffix = target, '' | |
| 973 else: | |
| 974 stripped_target, suffix = StripTestSuffixes(target) | |
| 975 if stripped_target in isolate_map: | |
| 976 if isolate_map[stripped_target]['type'] == 'unknown': | |
| 977 err += ('test target "%s" type is unknown\n' % target) | |
| 978 else: | |
| 979 labels.append(isolate_map[stripped_target]['label'] + suffix) | |
| 980 else: | |
| 981 err += ('target "%s" not found in ' | |
| 982 '//testing/buildbot/gn_isolate_map.pyl\n' % target) | |
| 983 | |
| 984 return err, labels | |
| 985 | |
| 986 def GNCmd(self, subcommand, path, *args): | |
| 987 if self.platform == 'linux2': | |
| 988 subdir, exe = 'linux64', 'gn' | |
| 989 elif self.platform == 'darwin': | |
| 990 subdir, exe = 'mac', 'gn' | |
| 991 else: | |
| 992 subdir, exe = 'win', 'gn.exe' | |
| 993 | |
| 994 gn_path = self.PathJoin(self.chromium_src_dir, 'buildtools', subdir, exe) | |
| 995 return [gn_path, subcommand, path] + list(args) | |
| 996 | |
| 997 | |
| 998 def GNArgs(self, vals): | |
| 999 if vals['cros_passthrough']: | |
| 1000 if not 'GN_ARGS' in os.environ: | |
| 1001 raise MBErr('MB is expecting GN_ARGS to be in the environment') | |
| 1002 gn_args = os.environ['GN_ARGS'] | |
| 1003 if not re.search('target_os.*=.*"chromeos"', gn_args): | |
| 1004 raise MBErr('GN_ARGS is missing target_os = "chromeos": (GN_ARGS=%s)' % | |
| 1005 gn_args) | |
| 1006 else: | |
| 1007 gn_args = vals['gn_args'] | |
| 1008 | |
| 1009 if self.args.goma_dir: | |
| 1010 gn_args += ' goma_dir="%s"' % self.args.goma_dir | |
| 1011 | |
| 1012 android_version_code = self.args.android_version_code | |
| 1013 if android_version_code: | |
| 1014 gn_args += ' android_default_version_code="%s"' % android_version_code | |
| 1015 | |
| 1016 android_version_name = self.args.android_version_name | |
| 1017 if android_version_name: | |
| 1018 gn_args += ' android_default_version_name="%s"' % android_version_name | |
| 1019 | |
| 1020 # Canonicalize the arg string into a sorted, newline-separated list | |
| 1021 # of key-value pairs, and de-dup the keys if need be so that only | |
| 1022 # the last instance of each arg is listed. | |
| 1023 gn_args = gn_helpers.ToGNString(gn_helpers.FromGNArgs(gn_args)) | |
| 1024 | |
| 1025 args_file = vals.get('args_file', None) | |
| 1026 if args_file: | |
| 1027 gn_args = ('import("%s")\n' % vals['args_file']) + gn_args | |
| 1028 return gn_args | |
| 1029 | |
| 1030 def RunGYPGen(self, vals): | |
| 1031 path = self.args.path[0] | |
| 1032 | |
| 1033 output_dir = self.ParseGYPConfigPath(path) | |
| 1034 cmd, env = self.GYPCmd(output_dir, vals) | |
| 1035 ret, _, _ = self.Run(cmd, env=env) | |
| 1036 return ret | |
| 1037 | |
| 1038 def RunGYPAnalyze(self, vals): | |
| 1039 output_dir = self.ParseGYPConfigPath(self.args.path[0]) | |
| 1040 if self.args.verbose: | |
| 1041 inp = self.ReadInputJSON(['files', 'test_targets', | |
| 1042 'additional_compile_targets']) | |
| 1043 self.Print() | |
| 1044 self.Print('analyze input:') | |
| 1045 self.PrintJSON(inp) | |
| 1046 self.Print() | |
| 1047 | |
| 1048 cmd, env = self.GYPCmd(output_dir, vals) | |
| 1049 cmd.extend(['-f', 'analyzer', | |
| 1050 '-G', 'config_path=%s' % self.args.input_path[0], | |
| 1051 '-G', 'analyzer_output_path=%s' % self.args.output_path[0]]) | |
| 1052 ret, _, _ = self.Run(cmd, env=env) | |
| 1053 if not ret and self.args.verbose: | |
| 1054 outp = json.loads(self.ReadFile(self.args.output_path[0])) | |
| 1055 self.Print() | |
| 1056 self.Print('analyze output:') | |
| 1057 self.PrintJSON(outp) | |
| 1058 self.Print() | |
| 1059 | |
| 1060 return ret | |
| 1061 | |
| 1062 def GetIsolateCommand(self, target, vals): | |
| 1063 isolate_map = self.ReadIsolateMap() | |
| 1064 test_type = isolate_map[target]['type'] | |
| 1065 | |
| 1066 android = 'target_os="android"' in vals['gn_args'] | |
| 1067 is_linux = self.platform == 'linux2' and not android | |
| 1068 | |
| 1069 if test_type == 'nontest': | |
| 1070 self.WriteFailureAndRaise('We should not be isolating %s.' % target, | |
| 1071 output_path=None) | |
| 1072 if test_type not in ('console_test_launcher', 'windowed_test_launcher', | |
| 1073 'non_parallel_console_test_launcher', | |
| 1074 'additional_compile_target', 'junit_test'): | |
| 1075 self.WriteFailureAndRaise('No command line for %s found (test type %s).' | |
| 1076 % (target, test_type), output_path=None) | |
| 1077 | |
| 1078 cmdline = [] | |
| 1079 extra_files = [] | |
| 1080 | |
| 1081 if android: | |
| 1082 logdog_command = [ | |
| 1083 '--logdog-bin-cmd', './../../bin/logdog_butler', | |
| 1084 '--project', 'chromium', | |
| 1085 '--service-account-json', | |
| 1086 '/creds/service_accounts/service-account-luci-logdog-publisher.json', | |
| 1087 '--prefix', 'android/swarming/logcats/${SWARMING_TASK_ID}', | |
| 1088 '--source', '${ISOLATED_OUTDIR}/logcats', | |
| 1089 '--name', 'unified_logcats', | |
| 1090 ] | |
| 1091 test_cmdline = [ | |
| 1092 self.PathJoin('bin', 'run_%s' % target), | |
| 1093 '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', | |
| 1094 ] | |
| 1095 if test_type != 'junit_test': | |
| 1096 test_cmdline += ['--target-devices-file', '${SWARMING_BOT_FILE}',] | |
| 1097 cmdline = (['./../../build/android/test_wrapper/logdog_wrapper.py'] | |
| 1098 + logdog_command + test_cmdline + ['-v']) | |
| 1099 else: | |
| 1100 extra_files = ['../../testing/test_env.py'] | |
| 1101 | |
| 1102 # This needs to mirror the settings in //build/config/ui.gni: | |
| 1103 # use_x11 = is_linux && !use_ozone. | |
| 1104 use_x11 = is_linux and not 'use_ozone=true' in vals['gn_args'] | |
| 1105 | |
| 1106 xvfb = use_x11 and test_type == 'windowed_test_launcher' | |
| 1107 if xvfb: | |
| 1108 extra_files += [ | |
| 1109 '../../testing/xvfb.py', | |
| 1110 ] | |
| 1111 | |
| 1112 # Memcheck is only supported for linux. Ignore in other platforms. | |
| 1113 memcheck = is_linux and 'rtc_use_memcheck=true' in vals['gn_args'] | |
| 1114 memcheck_cmdline = [ | |
| 1115 'bash', | |
| 1116 '../../tools/valgrind-webrtc/webrtc_tests.sh', | |
| 1117 '--tool', | |
| 1118 'memcheck', | |
| 1119 '--target', | |
| 1120 'Release', | |
| 1121 '--build-dir', | |
| 1122 '..', | |
| 1123 '--test', | |
| 1124 ] | |
| 1125 | |
| 1126 gtest_parallel = (test_type != 'non_parallel_console_test_launcher' and | |
| 1127 not memcheck) | |
| 1128 gtest_parallel_wrapper = [ | |
| 1129 '../../third_party/gtest-parallel/gtest-parallel-wrapper.py' | |
| 1130 ] | |
| 1131 if gtest_parallel: | |
| 1132 extra_files += [ | |
| 1133 '../../third_party/gtest-parallel/gtest-parallel', | |
| 1134 '../../third_party/gtest-parallel/gtest-parallel-wrapper.py', | |
| 1135 ] | |
| 1136 | |
| 1137 asan = 'is_asan=true' in vals['gn_args'] | |
| 1138 msan = 'is_msan=true' in vals['gn_args'] | |
| 1139 tsan = 'is_tsan=true' in vals['gn_args'] | |
| 1140 | |
| 1141 executable_prefix = '.\\' if self.platform == 'win32' else './' | |
| 1142 executable_suffix = '.exe' if self.platform == 'win32' else '' | |
| 1143 executable = executable_prefix + target + executable_suffix | |
| 1144 | |
| 1145 cmdline = (['../../testing/xvfb.py'] if xvfb else | |
| 1146 ['../../testing/test_env.py']) | |
| 1147 if memcheck: | |
| 1148 cmdline += memcheck_cmdline | |
| 1149 elif gtest_parallel: | |
| 1150 cmdline += gtest_parallel_wrapper | |
| 1151 cmdline += [ | |
| 1152 executable, | |
| 1153 '--', | |
| 1154 '--asan=%d' % asan, | |
| 1155 '--msan=%d' % msan, | |
| 1156 '--tsan=%d' % tsan, | |
| 1157 ] | |
| 1158 | |
| 1159 return cmdline, extra_files | |
| 1160 | |
| 1161 def ToAbsPath(self, build_path, *comps): | |
| 1162 return self.PathJoin(self.chromium_src_dir, | |
| 1163 self.ToSrcRelPath(build_path), | |
| 1164 *comps) | |
| 1165 | |
| 1166 def ToSrcRelPath(self, path): | |
| 1167 """Returns a relative path from the top of the repo.""" | |
| 1168 if path.startswith('//'): | |
| 1169 return path[2:].replace('/', self.sep) | |
| 1170 return self.RelPath(path, self.chromium_src_dir) | |
| 1171 | |
| 1172 def ParseGYPConfigPath(self, path): | |
| 1173 rpath = self.ToSrcRelPath(path) | |
| 1174 output_dir, _, _ = rpath.rpartition(self.sep) | |
| 1175 return output_dir | |
| 1176 | |
| 1177 def GYPCmd(self, output_dir, vals): | |
| 1178 if vals['cros_passthrough']: | |
| 1179 if not 'GYP_DEFINES' in os.environ: | |
| 1180 raise MBErr('MB is expecting GYP_DEFINES to be in the environment') | |
| 1181 gyp_defines = os.environ['GYP_DEFINES'] | |
| 1182 if not 'chromeos=1' in gyp_defines: | |
| 1183 raise MBErr('GYP_DEFINES is missing chromeos=1: (GYP_DEFINES=%s)' % | |
| 1184 gyp_defines) | |
| 1185 else: | |
| 1186 gyp_defines = vals['gyp_defines'] | |
| 1187 | |
| 1188 goma_dir = self.args.goma_dir | |
| 1189 | |
| 1190 # GYP uses shlex.split() to split the gyp defines into separate arguments, | |
| 1191 # so we can support backslashes and and spaces in arguments by quoting | |
| 1192 # them, even on Windows, where this normally wouldn't work. | |
| 1193 if goma_dir and ('\\' in goma_dir or ' ' in goma_dir): | |
| 1194 goma_dir = "'%s'" % goma_dir | |
| 1195 | |
| 1196 if goma_dir: | |
| 1197 gyp_defines += ' gomadir=%s' % goma_dir | |
| 1198 | |
| 1199 android_version_code = self.args.android_version_code | |
| 1200 if android_version_code: | |
| 1201 gyp_defines += ' app_manifest_version_code=%s' % android_version_code | |
| 1202 | |
| 1203 android_version_name = self.args.android_version_name | |
| 1204 if android_version_name: | |
| 1205 gyp_defines += ' app_manifest_version_name=%s' % android_version_name | |
| 1206 | |
| 1207 cmd = [ | |
| 1208 self.executable, | |
| 1209 self.args.gyp_script, | |
| 1210 '-G', | |
| 1211 'output_dir=' + output_dir, | |
| 1212 ] | |
| 1213 | |
| 1214 # Ensure that we have an environment that only contains | |
| 1215 # the exact values of the GYP variables we need. | |
| 1216 env = os.environ.copy() | |
| 1217 | |
| 1218 # This is a terrible hack to work around the fact that | |
| 1219 # //tools/clang/scripts/update.py is invoked by GYP and GN but | |
| 1220 # currently relies on an environment variable to figure out | |
| 1221 # what revision to embed in the command line #defines. | |
| 1222 # For GN, we've made this work via a gn arg that will cause update.py | |
| 1223 # to get an additional command line arg, but getting that to work | |
| 1224 # via GYP_DEFINES has proven difficult, so we rewrite the GYP_DEFINES | |
| 1225 # to get rid of the arg and add the old var in, instead. | |
| 1226 # See crbug.com/582737 for more on this. This can hopefully all | |
| 1227 # go away with GYP. | |
| 1228 m = re.search('llvm_force_head_revision=1\s*', gyp_defines) | |
| 1229 if m: | |
| 1230 env['LLVM_FORCE_HEAD_REVISION'] = '1' | |
| 1231 gyp_defines = gyp_defines.replace(m.group(0), '') | |
| 1232 | |
| 1233 # This is another terrible hack to work around the fact that | |
| 1234 # GYP sets the link concurrency to use via the GYP_LINK_CONCURRENCY | |
| 1235 # environment variable, and not via a proper GYP_DEFINE. See | |
| 1236 # crbug.com/611491 for more on this. | |
| 1237 m = re.search('gyp_link_concurrency=(\d+)(\s*)', gyp_defines) | |
| 1238 if m: | |
| 1239 env['GYP_LINK_CONCURRENCY'] = m.group(1) | |
| 1240 gyp_defines = gyp_defines.replace(m.group(0), '') | |
| 1241 | |
| 1242 env['GYP_GENERATORS'] = 'ninja' | |
| 1243 if 'GYP_CHROMIUM_NO_ACTION' in env: | |
| 1244 del env['GYP_CHROMIUM_NO_ACTION'] | |
| 1245 if 'GYP_CROSSCOMPILE' in env: | |
| 1246 del env['GYP_CROSSCOMPILE'] | |
| 1247 env['GYP_DEFINES'] = gyp_defines | |
| 1248 if vals['gyp_crosscompile']: | |
| 1249 env['GYP_CROSSCOMPILE'] = '1' | |
| 1250 return cmd, env | |
| 1251 | |
| 1252 def RunGNAnalyze(self, vals): | |
| 1253 # Analyze runs before 'gn gen' now, so we need to run gn gen | |
| 1254 # in order to ensure that we have a build directory. | |
| 1255 ret = self.RunGNGen(vals) | |
| 1256 if ret: | |
| 1257 return ret | |
| 1258 | |
| 1259 build_path = self.args.path[0] | |
| 1260 input_path = self.args.input_path[0] | |
| 1261 gn_input_path = input_path + '.gn' | |
| 1262 output_path = self.args.output_path[0] | |
| 1263 gn_output_path = output_path + '.gn' | |
| 1264 | |
| 1265 inp = self.ReadInputJSON(['files', 'test_targets', | |
| 1266 'additional_compile_targets']) | |
| 1267 if self.args.verbose: | |
| 1268 self.Print() | |
| 1269 self.Print('analyze input:') | |
| 1270 self.PrintJSON(inp) | |
| 1271 self.Print() | |
| 1272 | |
| 1273 | |
| 1274 # This shouldn't normally happen, but could due to unusual race conditions, | |
| 1275 # like a try job that gets scheduled before a patch lands but runs after | |
| 1276 # the patch has landed. | |
| 1277 if not inp['files']: | |
| 1278 self.Print('Warning: No files modified in patch, bailing out early.') | |
| 1279 self.WriteJSON({ | |
| 1280 'status': 'No dependency', | |
| 1281 'compile_targets': [], | |
| 1282 'test_targets': [], | |
| 1283 }, output_path) | |
| 1284 return 0 | |
| 1285 | |
| 1286 gn_inp = {} | |
| 1287 gn_inp['files'] = ['//' + f for f in inp['files'] if not f.startswith('//')] | |
| 1288 | |
| 1289 isolate_map = self.ReadIsolateMap() | |
| 1290 err, gn_inp['additional_compile_targets'] = self.MapTargetsToLabels( | |
| 1291 isolate_map, inp['additional_compile_targets']) | |
| 1292 if err: | |
| 1293 raise MBErr(err) | |
| 1294 | |
| 1295 err, gn_inp['test_targets'] = self.MapTargetsToLabels( | |
| 1296 isolate_map, inp['test_targets']) | |
| 1297 if err: | |
| 1298 raise MBErr(err) | |
| 1299 labels_to_targets = {} | |
| 1300 for i, label in enumerate(gn_inp['test_targets']): | |
| 1301 labels_to_targets[label] = inp['test_targets'][i] | |
| 1302 | |
| 1303 try: | |
| 1304 self.WriteJSON(gn_inp, gn_input_path) | |
| 1305 cmd = self.GNCmd('analyze', build_path, gn_input_path, gn_output_path) | |
| 1306 ret, _, _ = self.Run(cmd, force_verbose=True) | |
| 1307 if ret: | |
| 1308 return ret | |
| 1309 | |
| 1310 gn_outp_str = self.ReadFile(gn_output_path) | |
| 1311 try: | |
| 1312 gn_outp = json.loads(gn_outp_str) | |
| 1313 except Exception as e: | |
| 1314 self.Print("Failed to parse the JSON string GN returned: %s\n%s" | |
| 1315 % (repr(gn_outp_str), str(e))) | |
| 1316 raise | |
| 1317 | |
| 1318 outp = {} | |
| 1319 if 'status' in gn_outp: | |
| 1320 outp['status'] = gn_outp['status'] | |
| 1321 if 'error' in gn_outp: | |
| 1322 outp['error'] = gn_outp['error'] | |
| 1323 if 'invalid_targets' in gn_outp: | |
| 1324 outp['invalid_targets'] = gn_outp['invalid_targets'] | |
| 1325 if 'compile_targets' in gn_outp: | |
| 1326 if 'all' in gn_outp['compile_targets']: | |
| 1327 outp['compile_targets'] = ['all'] | |
| 1328 else: | |
| 1329 outp['compile_targets'] = [ | |
| 1330 label.replace('//', '') for label in gn_outp['compile_targets']] | |
| 1331 if 'test_targets' in gn_outp: | |
| 1332 outp['test_targets'] = [ | |
| 1333 labels_to_targets[label] for label in gn_outp['test_targets']] | |
| 1334 | |
| 1335 if self.args.verbose: | |
| 1336 self.Print() | |
| 1337 self.Print('analyze output:') | |
| 1338 self.PrintJSON(outp) | |
| 1339 self.Print() | |
| 1340 | |
| 1341 self.WriteJSON(outp, output_path) | |
| 1342 | |
| 1343 finally: | |
| 1344 if self.Exists(gn_input_path): | |
| 1345 self.RemoveFile(gn_input_path) | |
| 1346 if self.Exists(gn_output_path): | |
| 1347 self.RemoveFile(gn_output_path) | |
| 1348 | |
| 1349 return 0 | |
| 1350 | |
| 1351 def ReadInputJSON(self, required_keys): | |
| 1352 path = self.args.input_path[0] | |
| 1353 output_path = self.args.output_path[0] | |
| 1354 if not self.Exists(path): | |
| 1355 self.WriteFailureAndRaise('"%s" does not exist' % path, output_path) | |
| 1356 | |
| 1357 try: | |
| 1358 inp = json.loads(self.ReadFile(path)) | |
| 1359 except Exception as e: | |
| 1360 self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' % | |
| 1361 (path, e), output_path) | |
| 1362 | |
| 1363 for k in required_keys: | |
| 1364 if not k in inp: | |
| 1365 self.WriteFailureAndRaise('input file is missing a "%s" key' % k, | |
| 1366 output_path) | |
| 1367 | |
| 1368 return inp | |
| 1369 | |
| 1370 def WriteFailureAndRaise(self, msg, output_path): | |
| 1371 if output_path: | |
| 1372 self.WriteJSON({'error': msg}, output_path, force_verbose=True) | |
| 1373 raise MBErr(msg) | |
| 1374 | |
| 1375 def WriteJSON(self, obj, path, force_verbose=False): | |
| 1376 try: | |
| 1377 self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n', | |
| 1378 force_verbose=force_verbose) | |
| 1379 except Exception as e: | |
| 1380 raise MBErr('Error %s writing to the output path "%s"' % | |
| 1381 (e, path)) | |
| 1382 | |
| 1383 def CheckCompile(self, master, builder): | |
| 1384 url_template = self.args.url_template + '/{builder}/builds/_all?as_text=1' | |
| 1385 url = urllib2.quote(url_template.format(master=master, builder=builder), | |
| 1386 safe=':/()?=') | |
| 1387 try: | |
| 1388 builds = json.loads(self.Fetch(url)) | |
| 1389 except Exception as e: | |
| 1390 return str(e) | |
| 1391 successes = sorted( | |
| 1392 [int(x) for x in builds.keys() if "text" in builds[x] and | |
| 1393 cmp(builds[x]["text"][:2], ["build", "successful"]) == 0], | |
| 1394 reverse=True) | |
| 1395 if not successes: | |
| 1396 return "no successful builds" | |
| 1397 build = builds[str(successes[0])] | |
| 1398 step_names = set([step["name"] for step in build["steps"]]) | |
| 1399 compile_indicators = set(["compile", "compile (with patch)", "analyze"]) | |
| 1400 if compile_indicators & step_names: | |
| 1401 return "compiles" | |
| 1402 return "does not compile" | |
| 1403 | |
| 1404 def PrintCmd(self, cmd, env): | |
| 1405 if self.platform == 'win32': | |
| 1406 env_prefix = 'set ' | |
| 1407 env_quoter = QuoteForSet | |
| 1408 shell_quoter = QuoteForCmd | |
| 1409 else: | |
| 1410 env_prefix = '' | |
| 1411 env_quoter = pipes.quote | |
| 1412 shell_quoter = pipes.quote | |
| 1413 | |
| 1414 def print_env(var): | |
| 1415 if env and var in env: | |
| 1416 self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) | |
| 1417 | |
| 1418 print_env('GYP_CROSSCOMPILE') | |
| 1419 print_env('GYP_DEFINES') | |
| 1420 print_env('GYP_LINK_CONCURRENCY') | |
| 1421 print_env('LLVM_FORCE_HEAD_REVISION') | |
| 1422 | |
| 1423 if cmd[0] == self.executable: | |
| 1424 cmd = ['python'] + cmd[1:] | |
| 1425 self.Print(*[shell_quoter(arg) for arg in cmd]) | |
| 1426 | |
| 1427 def PrintJSON(self, obj): | |
| 1428 self.Print(json.dumps(obj, indent=2, sort_keys=True)) | |
| 1429 | |
| 1430 def Build(self, target): | |
| 1431 build_dir = self.ToSrcRelPath(self.args.path[0]) | |
| 1432 ninja_cmd = ['ninja', '-C', build_dir] | |
| 1433 if self.args.jobs: | |
| 1434 ninja_cmd.extend(['-j', '%d' % self.args.jobs]) | |
| 1435 ninja_cmd.append(target) | |
| 1436 ret, _, _ = self.Run(ninja_cmd, force_verbose=False, buffer_output=False) | |
| 1437 return ret | |
| 1438 | |
| 1439 def Run(self, cmd, env=None, force_verbose=True, buffer_output=True): | |
| 1440 # This function largely exists so it can be overridden for testing. | |
| 1441 if self.args.dryrun or self.args.verbose or force_verbose: | |
| 1442 self.PrintCmd(cmd, env) | |
| 1443 if self.args.dryrun: | |
| 1444 return 0, '', '' | |
| 1445 | |
| 1446 ret, out, err = self.Call(cmd, env=env, buffer_output=buffer_output) | |
| 1447 if self.args.verbose or force_verbose: | |
| 1448 if ret: | |
| 1449 self.Print(' -> returned %d' % ret) | |
| 1450 if out: | |
| 1451 self.Print(out, end='') | |
| 1452 if err: | |
| 1453 self.Print(err, end='', file=sys.stderr) | |
| 1454 return ret, out, err | |
| 1455 | |
| 1456 def Call(self, cmd, env=None, buffer_output=True): | |
| 1457 if buffer_output: | |
| 1458 p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir, | |
| 1459 stdout=subprocess.PIPE, stderr=subprocess.PIPE, | |
| 1460 env=env) | |
| 1461 out, err = p.communicate() | |
| 1462 else: | |
| 1463 p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir, | |
| 1464 env=env) | |
| 1465 p.wait() | |
| 1466 out = err = '' | |
| 1467 return p.returncode, out, err | |
| 1468 | |
| 1469 def ExpandUser(self, path): | |
| 1470 # This function largely exists so it can be overridden for testing. | |
| 1471 return os.path.expanduser(path) | |
| 1472 | |
| 1473 def Exists(self, path): | |
| 1474 # This function largely exists so it can be overridden for testing. | |
| 1475 return os.path.exists(path) | |
| 1476 | |
| 1477 def Fetch(self, url): | |
| 1478 # This function largely exists so it can be overridden for testing. | |
| 1479 f = urllib2.urlopen(url) | |
| 1480 contents = f.read() | |
| 1481 f.close() | |
| 1482 return contents | |
| 1483 | |
| 1484 def MaybeMakeDirectory(self, path): | |
| 1485 try: | |
| 1486 os.makedirs(path) | |
| 1487 except OSError, e: | |
| 1488 if e.errno != errno.EEXIST: | |
| 1489 raise | |
| 1490 | |
| 1491 def PathJoin(self, *comps): | |
| 1492 # This function largely exists so it can be overriden for testing. | |
| 1493 return os.path.join(*comps) | |
| 1494 | |
| 1495 def Print(self, *args, **kwargs): | |
| 1496 # This function largely exists so it can be overridden for testing. | |
| 1497 print(*args, **kwargs) | |
| 1498 if kwargs.get('stream', sys.stdout) == sys.stdout: | |
| 1499 sys.stdout.flush() | |
| 1500 | |
| 1501 def ReadFile(self, path): | |
| 1502 # This function largely exists so it can be overriden for testing. | |
| 1503 with open(path) as fp: | |
| 1504 return fp.read() | |
| 1505 | |
| 1506 def RelPath(self, path, start='.'): | |
| 1507 # This function largely exists so it can be overriden for testing. | |
| 1508 return os.path.relpath(path, start) | |
| 1509 | |
| 1510 def RemoveFile(self, path): | |
| 1511 # This function largely exists so it can be overriden for testing. | |
| 1512 os.remove(path) | |
| 1513 | |
| 1514 def RemoveDirectory(self, abs_path): | |
| 1515 if self.platform == 'win32': | |
| 1516 # In other places in chromium, we often have to retry this command | |
| 1517 # because we're worried about other processes still holding on to | |
| 1518 # file handles, but when MB is invoked, it will be early enough in the | |
| 1519 # build that their should be no other processes to interfere. We | |
| 1520 # can change this if need be. | |
| 1521 self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path]) | |
| 1522 else: | |
| 1523 shutil.rmtree(abs_path, ignore_errors=True) | |
| 1524 | |
| 1525 def TempFile(self, mode='w'): | |
| 1526 # This function largely exists so it can be overriden for testing. | |
| 1527 return tempfile.NamedTemporaryFile(mode=mode, delete=False) | |
| 1528 | |
| 1529 def WriteFile(self, path, contents, force_verbose=False): | |
| 1530 # This function largely exists so it can be overriden for testing. | |
| 1531 if self.args.dryrun or self.args.verbose or force_verbose: | |
| 1532 self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path)) | |
| 1533 with open(path, 'w') as fp: | |
| 1534 return fp.write(contents) | |
| 1535 | |
| 1536 | |
| 1537 class MBErr(Exception): | |
| 1538 pass | |
| 1539 | |
| 1540 | |
| 1541 # See http://goo.gl/l5NPDW and http://goo.gl/4Diozm for the painful | |
| 1542 # details of this next section, which handles escaping command lines | |
| 1543 # so that they can be copied and pasted into a cmd window. | |
| 1544 UNSAFE_FOR_SET = set('^<>&|') | |
| 1545 UNSAFE_FOR_CMD = UNSAFE_FOR_SET.union(set('()%')) | |
| 1546 ALL_META_CHARS = UNSAFE_FOR_CMD.union(set('"')) | |
| 1547 | |
| 1548 | |
| 1549 def QuoteForSet(arg): | |
| 1550 if any(a in UNSAFE_FOR_SET for a in arg): | |
| 1551 arg = ''.join('^' + a if a in UNSAFE_FOR_SET else a for a in arg) | |
| 1552 return arg | |
| 1553 | |
| 1554 | |
| 1555 def QuoteForCmd(arg): | |
| 1556 # First, escape the arg so that CommandLineToArgvW will parse it properly. | |
| 1557 # From //tools/gyp/pylib/gyp/msvs_emulation.py:23. | |
| 1558 if arg == '' or ' ' in arg or '"' in arg: | |
| 1559 quote_re = re.compile(r'(\\*)"') | |
| 1560 arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)) | |
| 1561 | |
| 1562 # Then check to see if the arg contains any metacharacters other than | |
| 1563 # double quotes; if it does, quote everything (including the double | |
| 1564 # quotes) for safety. | |
| 1565 if any(a in UNSAFE_FOR_CMD for a in arg): | |
| 1566 arg = ''.join('^' + a if a in ALL_META_CHARS else a for a in arg) | |
| 1567 return arg | |
| 1568 | |
| 1569 | |
| 1570 if __name__ == '__main__': | |
| 1571 sys.exit(main(sys.argv[1:])) | |
| OLD | NEW |