OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. | |
3 # | |
4 # Use of this source code is governed by a BSD-style license | |
5 # that can be found in the LICENSE file in the root of the source | |
6 # tree. An additional intellectual property rights grant can be found | |
7 # in the file PATENTS. All contributing project authors may | |
8 # be found in the AUTHORS file in the root of the source tree. | |
9 | |
10 """MB - the Meta-Build wrapper around GYP and GN | |
11 | |
12 MB is a wrapper script for GYP and GN that can be used to generate build files | |
13 for sets of canned configurations and analyze them. | |
14 """ | |
15 | |
16 from __future__ import print_function | |
17 | |
18 import argparse | |
19 import ast | |
20 import errno | |
21 import json | |
22 import os | |
23 import pipes | |
24 import pprint | |
25 import re | |
26 import shutil | |
27 import sys | |
28 import subprocess | |
29 import tempfile | |
30 import traceback | |
31 import urllib2 | |
32 | |
33 from collections import OrderedDict | |
34 | |
35 SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) | |
36 SRC_DIR = os.path.dirname(os.path.dirname(SCRIPT_DIR)) | |
37 sys.path = [os.path.join(SRC_DIR, 'build')] + sys.path | |
38 | |
39 import gn_helpers | |
40 | |
41 | |
42 def main(args): | |
43 mbw = MetaBuildWrapper() | |
44 return mbw.Main(args) | |
45 | |
46 | |
47 class MetaBuildWrapper(object): | |
48 def __init__(self): | |
49 self.src_dir = SRC_DIR | |
50 self.default_config = os.path.join(SCRIPT_DIR, 'mb_config.pyl') | |
51 self.default_isolate_map = os.path.join(SCRIPT_DIR, 'gn_isolate_map.pyl') | |
52 self.executable = sys.executable | |
53 self.platform = sys.platform | |
54 self.sep = os.sep | |
55 self.args = argparse.Namespace() | |
56 self.configs = {} | |
57 self.masters = {} | |
58 self.mixins = {} | |
59 | |
60 def Main(self, args): | |
61 self.ParseArgs(args) | |
62 try: | |
63 ret = self.args.func() | |
64 if ret: | |
65 self.DumpInputFiles() | |
66 return ret | |
67 except KeyboardInterrupt: | |
68 self.Print('interrupted, exiting') | |
69 return 130 | |
70 except Exception: | |
71 self.DumpInputFiles() | |
72 s = traceback.format_exc() | |
73 for l in s.splitlines(): | |
74 self.Print(l) | |
75 return 1 | |
76 | |
77 def ParseArgs(self, argv): | |
78 def AddCommonOptions(subp): | |
79 subp.add_argument('-b', '--builder', | |
80 help='builder name to look up config from') | |
81 subp.add_argument('-m', '--master', | |
82 help='master name to look up config from') | |
83 subp.add_argument('-c', '--config', | |
84 help='configuration to analyze') | |
85 subp.add_argument('--phase', | |
86 help='optional phase name (used when builders ' | |
87 'do multiple compiles with different ' | |
88 'arguments in a single build)') | |
89 subp.add_argument('-f', '--config-file', metavar='PATH', | |
90 default=self.default_config, | |
91 help='path to config file ' | |
92 '(default is %(default)s)') | |
93 subp.add_argument('-i', '--isolate-map-file', metavar='PATH', | |
94 default=self.default_isolate_map, | |
95 help='path to isolate map file ' | |
96 '(default is %(default)s)') | |
97 subp.add_argument('-g', '--goma-dir', | |
98 help='path to goma directory') | |
99 subp.add_argument('--gyp-script', metavar='PATH', | |
100 default=self.PathJoin('build', 'gyp_chromium'), | |
101 help='path to gyp script relative to project root ' | |
102 '(default is %(default)s)') | |
103 subp.add_argument('--android-version-code', | |
104 help='Sets GN arg android_default_version_code and ' | |
105 'GYP_DEFINE app_manifest_version_code') | |
106 subp.add_argument('--android-version-name', | |
107 help='Sets GN arg android_default_version_name and ' | |
108 'GYP_DEFINE app_manifest_version_name') | |
109 subp.add_argument('-n', '--dryrun', action='store_true', | |
110 help='Do a dry run (i.e., do nothing, just print ' | |
111 'the commands that will run)') | |
112 subp.add_argument('-v', '--verbose', action='store_true', | |
113 help='verbose logging') | |
114 | |
115 parser = argparse.ArgumentParser(prog='mb') | |
116 subps = parser.add_subparsers() | |
117 | |
118 subp = subps.add_parser('analyze', | |
119 help='analyze whether changes to a set of files ' | |
120 'will cause a set of binaries to be rebuilt.') | |
121 AddCommonOptions(subp) | |
122 subp.add_argument('path', nargs=1, | |
123 help='path build was generated into.') | |
124 subp.add_argument('input_path', nargs=1, | |
125 help='path to a file containing the input arguments ' | |
126 'as a JSON object.') | |
127 subp.add_argument('output_path', nargs=1, | |
128 help='path to a file containing the output arguments ' | |
129 'as a JSON object.') | |
130 subp.set_defaults(func=self.CmdAnalyze) | |
131 | |
132 subp = subps.add_parser('export', | |
133 help='print out the expanded configuration for' | |
134 'each builder as a JSON object') | |
135 subp.add_argument('-f', '--config-file', metavar='PATH', | |
136 default=self.default_config, | |
137 help='path to config file (default is %(default)s)') | |
138 subp.add_argument('-g', '--goma-dir', | |
139 help='path to goma directory') | |
140 subp.set_defaults(func=self.CmdExport) | |
141 | |
142 subp = subps.add_parser('gen', | |
143 help='generate a new set of build files') | |
144 AddCommonOptions(subp) | |
145 subp.add_argument('--swarming-targets-file', | |
146 help='save runtime dependencies for targets listed ' | |
147 'in file.') | |
148 subp.add_argument('path', nargs=1, | |
149 help='path to generate build into') | |
150 subp.set_defaults(func=self.CmdGen) | |
151 | |
152 subp = subps.add_parser('isolate', | |
153 help='generate the .isolate files for a given' | |
154 'binary') | |
155 AddCommonOptions(subp) | |
156 subp.add_argument('path', nargs=1, | |
157 help='path build was generated into') | |
158 subp.add_argument('target', nargs=1, | |
159 help='ninja target to generate the isolate for') | |
160 subp.set_defaults(func=self.CmdIsolate) | |
161 | |
162 subp = subps.add_parser('lookup', | |
163 help='look up the command for a given config or ' | |
164 'builder') | |
165 AddCommonOptions(subp) | |
166 subp.set_defaults(func=self.CmdLookup) | |
167 | |
168 subp = subps.add_parser( | |
169 'run', | |
170 help='build and run the isolated version of a ' | |
171 'binary', | |
172 formatter_class=argparse.RawDescriptionHelpFormatter) | |
173 subp.description = ( | |
174 'Build, isolate, and run the given binary with the command line\n' | |
175 'listed in the isolate. You may pass extra arguments after the\n' | |
176 'target; use "--" if the extra arguments need to include switches.\n' | |
177 '\n' | |
178 'Examples:\n' | |
179 '\n' | |
180 ' % tools/mb/mb.py run -m chromium.linux -b "Linux Builder" \\\n' | |
181 ' //out/Default content_browsertests\n' | |
182 '\n' | |
183 ' % tools/mb/mb.py run out/Default content_browsertests\n' | |
184 '\n' | |
185 ' % tools/mb/mb.py run out/Default content_browsertests -- \\\n' | |
186 ' --test-launcher-retry-limit=0' | |
187 '\n' | |
188 ) | |
189 | |
190 AddCommonOptions(subp) | |
191 subp.add_argument('-j', '--jobs', dest='jobs', type=int, | |
192 help='Number of jobs to pass to ninja') | |
193 subp.add_argument('--no-build', dest='build', default=True, | |
194 action='store_false', | |
195 help='Do not build, just isolate and run') | |
196 subp.add_argument('path', nargs=1, | |
197 help=('path to generate build into (or use).' | |
198 ' This can be either a regular path or a ' | |
199 'GN-style source-relative path like ' | |
200 '//out/Default.')) | |
201 subp.add_argument('target', nargs=1, | |
202 help='ninja target to build and run') | |
203 subp.add_argument('extra_args', nargs='*', | |
204 help=('extra args to pass to the isolate to run. Use ' | |
205 '"--" as the first arg if you need to pass ' | |
206 'switches')) | |
207 subp.set_defaults(func=self.CmdRun) | |
208 | |
209 subp = subps.add_parser('validate', | |
210 help='validate the config file') | |
211 subp.add_argument('-f', '--config-file', metavar='PATH', | |
212 default=self.default_config, | |
213 help='path to config file (default is %(default)s)') | |
214 subp.set_defaults(func=self.CmdValidate) | |
215 | |
216 subp = subps.add_parser('audit', | |
217 help='Audit the config file to track progress') | |
218 subp.add_argument('-f', '--config-file', metavar='PATH', | |
219 default=self.default_config, | |
220 help='path to config file (default is %(default)s)') | |
221 subp.add_argument('-i', '--internal', action='store_true', | |
222 help='check internal masters also') | |
223 subp.add_argument('-m', '--master', action='append', | |
224 help='master to audit (default is all non-internal ' | |
225 'masters in file)') | |
226 subp.add_argument('-u', '--url-template', action='store', | |
227 default='https://build.chromium.org/p/' | |
228 '{master}/json/builders', | |
229 help='URL scheme for JSON APIs to buildbot ' | |
230 '(default: %(default)s) ') | |
231 subp.add_argument('-c', '--check-compile', action='store_true', | |
232 help='check whether tbd and master-only bots actually' | |
233 ' do compiles') | |
234 subp.set_defaults(func=self.CmdAudit) | |
235 | |
236 subp = subps.add_parser('help', | |
237 help='Get help on a subcommand.') | |
238 subp.add_argument(nargs='?', action='store', dest='subcommand', | |
239 help='The command to get help for.') | |
240 subp.set_defaults(func=self.CmdHelp) | |
241 | |
242 self.args = parser.parse_args(argv) | |
243 | |
244 def DumpInputFiles(self): | |
245 | |
246 def DumpContentsOfFilePassedTo(arg_name, path): | |
247 if path and self.Exists(path): | |
248 self.Print("\n# To recreate the file passed to %s:" % arg_name) | |
249 self.Print("%% cat > %s <<EOF" % path) | |
250 contents = self.ReadFile(path) | |
251 self.Print(contents) | |
252 self.Print("EOF\n%\n") | |
253 | |
254 if getattr(self.args, 'input_path', None): | |
255 DumpContentsOfFilePassedTo( | |
256 'argv[0] (input_path)', self.args.input_path[0]) | |
257 if getattr(self.args, 'swarming_targets_file', None): | |
258 DumpContentsOfFilePassedTo( | |
259 '--swarming-targets-file', self.args.swarming_targets_file) | |
260 | |
261 def CmdAnalyze(self): | |
262 vals = self.Lookup() | |
263 self.ClobberIfNeeded(vals) | |
264 if vals['type'] == 'gn': | |
265 return self.RunGNAnalyze(vals) | |
266 else: | |
267 return self.RunGYPAnalyze(vals) | |
268 | |
269 def CmdExport(self): | |
270 self.ReadConfigFile() | |
271 obj = {} | |
272 for master, builders in self.masters.items(): | |
273 obj[master] = {} | |
274 for builder in builders: | |
275 config = self.masters[master][builder] | |
276 if not config: | |
277 continue | |
278 | |
279 if isinstance(config, dict): | |
280 args = {k: self.FlattenConfig(v)['gn_args'] | |
281 for k, v in config.items()} | |
282 elif config.startswith('//'): | |
283 args = config | |
284 else: | |
285 args = self.FlattenConfig(config)['gn_args'] | |
286 if 'error' in args: | |
287 continue | |
288 | |
289 obj[master][builder] = args | |
290 | |
291 # Dump object and trim trailing whitespace. | |
292 s = '\n'.join(l.rstrip() for l in | |
293 json.dumps(obj, sort_keys=True, indent=2).splitlines()) | |
294 self.Print(s) | |
295 return 0 | |
296 | |
297 def CmdGen(self): | |
298 vals = self.Lookup() | |
299 self.ClobberIfNeeded(vals) | |
300 if vals['type'] == 'gn': | |
301 return self.RunGNGen(vals) | |
302 else: | |
303 return self.RunGYPGen(vals) | |
304 | |
305 def CmdHelp(self): | |
306 if self.args.subcommand: | |
307 self.ParseArgs([self.args.subcommand, '--help']) | |
308 else: | |
309 self.ParseArgs(['--help']) | |
310 | |
311 def CmdIsolate(self): | |
312 vals = self.GetConfig() | |
313 if not vals: | |
314 return 1 | |
315 | |
316 if vals['type'] == 'gn': | |
317 return self.RunGNIsolate(vals) | |
318 else: | |
319 return self.Build('%s_run' % self.args.target[0]) | |
320 | |
321 def CmdLookup(self): | |
322 vals = self.Lookup() | |
323 if vals['type'] == 'gn': | |
324 cmd = self.GNCmd('gen', '_path_') | |
325 gn_args = self.GNArgs(vals) | |
326 self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args) | |
327 env = None | |
328 else: | |
329 cmd, env = self.GYPCmd('_path_', vals) | |
330 | |
331 self.PrintCmd(cmd, env) | |
332 return 0 | |
333 | |
334 def CmdRun(self): | |
335 vals = self.GetConfig() | |
336 if not vals: | |
337 return 1 | |
338 | |
339 build_dir = self.args.path[0] | |
340 target = self.args.target[0] | |
341 | |
342 if vals['type'] == 'gn': | |
343 if self.args.build: | |
344 ret = self.Build(target) | |
345 if ret: | |
346 return ret | |
347 ret = self.RunGNIsolate(vals) | |
348 if ret: | |
349 return ret | |
350 else: | |
351 ret = self.Build('%s_run' % target) | |
352 if ret: | |
353 return ret | |
354 | |
355 cmd = [ | |
356 self.executable, | |
357 self.PathJoin('tools', 'swarming_client', 'isolate.py'), | |
358 'run', | |
359 '-s', | |
360 self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), | |
361 ] | |
362 if self.args.extra_args: | |
363 cmd += ['--'] + self.args.extra_args | |
364 | |
365 ret, _, _ = self.Run(cmd, force_verbose=False, buffer_output=False) | |
366 | |
367 return ret | |
368 | |
369 def CmdValidate(self, print_ok=True): | |
370 errs = [] | |
371 | |
372 # Read the file to make sure it parses. | |
373 self.ReadConfigFile() | |
374 | |
375 # Build a list of all of the configs referenced by builders. | |
376 all_configs = {} | |
377 for master in self.masters: | |
378 for config in self.masters[master].values(): | |
379 if isinstance(config, dict): | |
380 for c in config.values(): | |
381 all_configs[c] = master | |
382 else: | |
383 all_configs[config] = master | |
384 | |
385 # Check that every referenced args file or config actually exists. | |
386 for config, loc in all_configs.items(): | |
387 if config.startswith('//'): | |
388 if not self.Exists(self.ToAbsPath(config)): | |
389 errs.append('Unknown args file "%s" referenced from "%s".' % | |
390 (config, loc)) | |
391 elif not config in self.configs: | |
392 errs.append('Unknown config "%s" referenced from "%s".' % | |
393 (config, loc)) | |
394 | |
395 # Check that every actual config is actually referenced. | |
396 for config in self.configs: | |
397 if not config in all_configs: | |
398 errs.append('Unused config "%s".' % config) | |
399 | |
400 # Figure out the whole list of mixins, and check that every mixin | |
401 # listed by a config or another mixin actually exists. | |
402 referenced_mixins = set() | |
403 for config, mixins in self.configs.items(): | |
404 for mixin in mixins: | |
405 if not mixin in self.mixins: | |
406 errs.append('Unknown mixin "%s" referenced by config "%s".' % | |
407 (mixin, config)) | |
408 referenced_mixins.add(mixin) | |
409 | |
410 for mixin in self.mixins: | |
411 for sub_mixin in self.mixins[mixin].get('mixins', []): | |
412 if not sub_mixin in self.mixins: | |
413 errs.append('Unknown mixin "%s" referenced by mixin "%s".' % | |
414 (sub_mixin, mixin)) | |
415 referenced_mixins.add(sub_mixin) | |
416 | |
417 # Check that every mixin defined is actually referenced somewhere. | |
418 for mixin in self.mixins: | |
419 if not mixin in referenced_mixins: | |
420 errs.append('Unreferenced mixin "%s".' % mixin) | |
421 | |
422 if errs: | |
423 raise MBErr(('mb config file %s has problems:' % self.args.config_file) + | |
424 '\n ' + '\n '.join(errs)) | |
425 | |
426 if print_ok: | |
427 self.Print('mb config file %s looks ok.' % self.args.config_file) | |
428 return 0 | |
429 | |
430 def CmdAudit(self): | |
431 """Track the progress of the GYP->GN migration on the bots.""" | |
432 | |
433 # First, make sure the config file is okay, but don't print anything | |
434 # if it is (it will throw an error if it isn't). | |
435 self.CmdValidate(print_ok=False) | |
436 | |
437 stats = OrderedDict() | |
438 STAT_MASTER_ONLY = 'Master only' | |
439 STAT_CONFIG_ONLY = 'Config only' | |
440 STAT_TBD = 'Still TBD' | |
441 STAT_GYP = 'Still GYP' | |
442 STAT_DONE = 'Done (on GN)' | |
443 stats[STAT_MASTER_ONLY] = 0 | |
444 stats[STAT_CONFIG_ONLY] = 0 | |
445 stats[STAT_TBD] = 0 | |
446 stats[STAT_GYP] = 0 | |
447 stats[STAT_DONE] = 0 | |
448 | |
449 def PrintBuilders(heading, builders, notes): | |
450 stats.setdefault(heading, 0) | |
451 stats[heading] += len(builders) | |
452 if builders: | |
453 self.Print(' %s:' % heading) | |
454 for builder in sorted(builders): | |
455 self.Print(' %s%s' % (builder, notes[builder])) | |
456 | |
457 self.ReadConfigFile() | |
458 | |
459 masters = self.args.master or self.masters | |
460 for master in sorted(masters): | |
461 url = self.args.url_template.replace('{master}', master) | |
462 | |
463 self.Print('Auditing %s' % master) | |
464 | |
465 MASTERS_TO_SKIP = ( | |
466 'client.skia', | |
467 'client.v8.fyi', | |
468 'tryserver.v8', | |
469 ) | |
470 if master in MASTERS_TO_SKIP: | |
471 # Skip these bots because converting them is the responsibility of | |
472 # those teams and out of scope for the Chromium migration to GN. | |
473 self.Print(' Skipped (out of scope)') | |
474 self.Print('') | |
475 continue | |
476 | |
477 INTERNAL_MASTERS = ('official.desktop', 'official.desktop.continuous', | |
478 'internal.client.kitchensync') | |
479 if master in INTERNAL_MASTERS and not self.args.internal: | |
480 # Skip these because the servers aren't accessible by default ... | |
481 self.Print(' Skipped (internal)') | |
482 self.Print('') | |
483 continue | |
484 | |
485 try: | |
486 # Fetch the /builders contents from the buildbot master. The | |
487 # keys of the dict are the builder names themselves. | |
488 json_contents = self.Fetch(url) | |
489 d = json.loads(json_contents) | |
490 except Exception as e: | |
491 self.Print(str(e)) | |
492 return 1 | |
493 | |
494 config_builders = set(self.masters[master]) | |
495 master_builders = set(d.keys()) | |
496 both = master_builders & config_builders | |
497 master_only = master_builders - config_builders | |
498 config_only = config_builders - master_builders | |
499 tbd = set() | |
500 gyp = set() | |
501 done = set() | |
502 notes = {builder: '' for builder in config_builders | master_builders} | |
503 | |
504 for builder in both: | |
505 config = self.masters[master][builder] | |
506 if config == 'tbd': | |
507 tbd.add(builder) | |
508 elif isinstance(config, dict): | |
509 vals = self.FlattenConfig(config.values()[0]) | |
510 if vals['type'] == 'gyp': | |
511 gyp.add(builder) | |
512 else: | |
513 done.add(builder) | |
514 elif config.startswith('//'): | |
515 done.add(builder) | |
516 else: | |
517 vals = self.FlattenConfig(config) | |
518 if vals['type'] == 'gyp': | |
519 gyp.add(builder) | |
520 else: | |
521 done.add(builder) | |
522 | |
523 if self.args.check_compile and (tbd or master_only): | |
524 either = tbd | master_only | |
525 for builder in either: | |
526 notes[builder] = ' (' + self.CheckCompile(master, builder) +')' | |
527 | |
528 if master_only or config_only or tbd or gyp: | |
529 PrintBuilders(STAT_MASTER_ONLY, master_only, notes) | |
530 PrintBuilders(STAT_CONFIG_ONLY, config_only, notes) | |
531 PrintBuilders(STAT_TBD, tbd, notes) | |
532 PrintBuilders(STAT_GYP, gyp, notes) | |
533 else: | |
534 self.Print(' All GN!') | |
535 | |
536 stats[STAT_DONE] += len(done) | |
537 | |
538 self.Print('') | |
539 | |
540 fmt = '{:<27} {:>4}' | |
541 self.Print(fmt.format('Totals', str(sum(int(v) for v in stats.values())))) | |
542 self.Print(fmt.format('-' * 27, '----')) | |
543 for stat, count in stats.items(): | |
544 self.Print(fmt.format(stat, str(count))) | |
545 | |
546 return 0 | |
547 | |
548 def GetConfig(self): | |
549 build_dir = self.args.path[0] | |
550 | |
551 vals = self.DefaultVals() | |
552 if self.args.builder or self.args.master or self.args.config: | |
553 vals = self.Lookup() | |
554 if vals['type'] == 'gn': | |
555 # Re-run gn gen in order to ensure the config is consistent with the | |
556 # build dir. | |
557 self.RunGNGen(vals) | |
558 return vals | |
559 | |
560 mb_type_path = self.PathJoin(self.ToAbsPath(build_dir), 'mb_type') | |
561 if not self.Exists(mb_type_path): | |
562 toolchain_path = self.PathJoin(self.ToAbsPath(build_dir), | |
563 'toolchain.ninja') | |
564 if not self.Exists(toolchain_path): | |
565 self.Print('Must either specify a path to an existing GN build dir ' | |
566 'or pass in a -m/-b pair or a -c flag to specify the ' | |
567 'configuration') | |
568 return {} | |
569 else: | |
570 mb_type = 'gn' | |
571 else: | |
572 mb_type = self.ReadFile(mb_type_path).strip() | |
573 | |
574 if mb_type == 'gn': | |
575 vals['gn_args'] = self.GNArgsFromDir(build_dir) | |
576 vals['type'] = mb_type | |
577 | |
578 return vals | |
579 | |
580 def GNArgsFromDir(self, build_dir): | |
581 args_contents = "" | |
582 gn_args_path = self.PathJoin(self.ToAbsPath(build_dir), 'args.gn') | |
583 if self.Exists(gn_args_path): | |
584 args_contents = self.ReadFile(gn_args_path) | |
585 gn_args = [] | |
586 for l in args_contents.splitlines(): | |
587 fields = l.split(' ') | |
588 name = fields[0] | |
589 val = ' '.join(fields[2:]) | |
590 gn_args.append('%s=%s' % (name, val)) | |
591 | |
592 return ' '.join(gn_args) | |
593 | |
594 def Lookup(self): | |
595 vals = self.ReadIOSBotConfig() | |
596 if not vals: | |
597 self.ReadConfigFile() | |
598 config = self.ConfigFromArgs() | |
599 if config.startswith('//'): | |
600 if not self.Exists(self.ToAbsPath(config)): | |
601 raise MBErr('args file "%s" not found' % config) | |
602 vals = self.DefaultVals() | |
603 vals['args_file'] = config | |
604 else: | |
605 if not config in self.configs: | |
606 raise MBErr('Config "%s" not found in %s' % | |
607 (config, self.args.config_file)) | |
608 vals = self.FlattenConfig(config) | |
609 | |
610 # Do some basic sanity checking on the config so that we | |
611 # don't have to do this in every caller. | |
612 if 'type' not in vals: | |
613 vals['type'] = 'gn' | |
614 assert vals['type'] in ('gn', 'gyp'), ( | |
615 'Unknown meta-build type "%s"' % vals['gn_args']) | |
616 | |
617 return vals | |
618 | |
619 def ReadIOSBotConfig(self): | |
620 if not self.args.master or not self.args.builder: | |
621 return {} | |
622 path = self.PathJoin(self.src_dir, 'tools-webrtc', 'ios', | |
623 self.args.master, | |
624 self.args.builder.replace(' ', '_') + '.json') | |
625 if not self.Exists(path): | |
626 return {} | |
627 | |
628 contents = json.loads(self.ReadFile(path)) | |
629 gyp_vals = contents.get('GYP_DEFINES', {}) | |
630 if isinstance(gyp_vals, dict): | |
631 gyp_defines = ' '.join('%s=%s' % (k, v) for k, v in gyp_vals.items()) | |
632 else: | |
633 gyp_defines = ' '.join(gyp_vals) | |
634 gn_args = ' '.join(contents.get('gn_args', [])) | |
635 | |
636 vals = self.DefaultVals() | |
637 vals['gn_args'] = gn_args | |
638 vals['gyp_defines'] = gyp_defines | |
639 vals['type'] = contents.get('mb_type', 'gn') | |
640 return vals | |
641 | |
642 def ReadConfigFile(self): | |
643 if not self.Exists(self.args.config_file): | |
644 raise MBErr('config file not found at %s' % self.args.config_file) | |
645 | |
646 try: | |
647 contents = ast.literal_eval(self.ReadFile(self.args.config_file)) | |
648 except SyntaxError as e: | |
649 raise MBErr('Failed to parse config file "%s": %s' % | |
650 (self.args.config_file, e)) | |
651 | |
652 self.configs = contents['configs'] | |
653 self.masters = contents['masters'] | |
654 self.mixins = contents['mixins'] | |
655 | |
656 def ReadIsolateMap(self): | |
657 if not self.Exists(self.args.isolate_map_file): | |
658 raise MBErr('isolate map file not found at %s' % | |
659 self.args.isolate_map_file) | |
660 try: | |
661 return ast.literal_eval(self.ReadFile(self.args.isolate_map_file)) | |
662 except SyntaxError as e: | |
663 raise MBErr('Failed to parse isolate map file "%s": %s' % | |
664 (self.args.isolate_map_file, e)) | |
665 | |
666 def ConfigFromArgs(self): | |
667 if self.args.config: | |
668 if self.args.master or self.args.builder: | |
669 raise MBErr('Can not specific both -c/--config and -m/--master or ' | |
670 '-b/--builder') | |
671 | |
672 return self.args.config | |
673 | |
674 if not self.args.master or not self.args.builder: | |
675 raise MBErr('Must specify either -c/--config or ' | |
676 '(-m/--master and -b/--builder)') | |
677 | |
678 if not self.args.master in self.masters: | |
679 raise MBErr('Master name "%s" not found in "%s"' % | |
680 (self.args.master, self.args.config_file)) | |
681 | |
682 if not self.args.builder in self.masters[self.args.master]: | |
683 raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' % | |
684 (self.args.builder, self.args.master, self.args.config_file)) | |
685 | |
686 config = self.masters[self.args.master][self.args.builder] | |
687 if isinstance(config, dict): | |
688 if self.args.phase is None: | |
689 raise MBErr('Must specify a build --phase for %s on %s' % | |
690 (self.args.builder, self.args.master)) | |
691 phase = str(self.args.phase) | |
692 if phase not in config: | |
693 raise MBErr('Phase %s doesn\'t exist for %s on %s' % | |
694 (phase, self.args.builder, self.args.master)) | |
695 return config[phase] | |
696 | |
697 if self.args.phase is not None: | |
698 raise MBErr('Must not specify a build --phase for %s on %s' % | |
699 (self.args.builder, self.args.master)) | |
700 return config | |
701 | |
702 def FlattenConfig(self, config): | |
703 mixins = self.configs[config] | |
704 vals = self.DefaultVals() | |
705 | |
706 visited = [] | |
707 self.FlattenMixins(mixins, vals, visited) | |
708 return vals | |
709 | |
710 def DefaultVals(self): | |
711 return { | |
712 'args_file': '', | |
713 'cros_passthrough': False, | |
714 'gn_args': '', | |
715 'gyp_defines': '', | |
716 'gyp_crosscompile': False, | |
717 'type': 'gn', | |
718 } | |
719 | |
720 def FlattenMixins(self, mixins, vals, visited): | |
721 for m in mixins: | |
722 if m not in self.mixins: | |
723 raise MBErr('Unknown mixin "%s"' % m) | |
724 | |
725 visited.append(m) | |
726 | |
727 mixin_vals = self.mixins[m] | |
728 | |
729 if 'cros_passthrough' in mixin_vals: | |
730 vals['cros_passthrough'] = mixin_vals['cros_passthrough'] | |
731 if 'gn_args' in mixin_vals: | |
732 if vals['gn_args']: | |
733 vals['gn_args'] += ' ' + mixin_vals['gn_args'] | |
734 else: | |
735 vals['gn_args'] = mixin_vals['gn_args'] | |
736 if 'gyp_crosscompile' in mixin_vals: | |
737 vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile'] | |
738 if 'gyp_defines' in mixin_vals: | |
739 if vals['gyp_defines']: | |
740 vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines'] | |
741 else: | |
742 vals['gyp_defines'] = mixin_vals['gyp_defines'] | |
743 if 'type' in mixin_vals: | |
744 vals['type'] = mixin_vals['type'] | |
745 | |
746 if 'mixins' in mixin_vals: | |
747 self.FlattenMixins(mixin_vals['mixins'], vals, visited) | |
748 return vals | |
749 | |
750 def ClobberIfNeeded(self, vals): | |
751 path = self.args.path[0] | |
752 build_dir = self.ToAbsPath(path) | |
753 mb_type_path = self.PathJoin(build_dir, 'mb_type') | |
754 needs_clobber = False | |
755 new_mb_type = vals['type'] | |
756 if self.Exists(build_dir): | |
757 if self.Exists(mb_type_path): | |
758 old_mb_type = self.ReadFile(mb_type_path) | |
759 if old_mb_type != new_mb_type: | |
760 self.Print("Build type mismatch: was %s, will be %s, clobbering %s" % | |
761 (old_mb_type, new_mb_type, path)) | |
762 needs_clobber = True | |
763 else: | |
764 # There is no 'mb_type' file in the build directory, so this probably | |
765 # means that the prior build(s) were not done through mb, and we | |
766 # have no idea if this was a GYP build or a GN build. Clobber it | |
767 # to be safe. | |
768 self.Print("%s/mb_type missing, clobbering to be safe" % path) | |
769 needs_clobber = True | |
770 | |
771 if self.args.dryrun: | |
772 return | |
773 | |
774 if needs_clobber: | |
775 self.RemoveDirectory(build_dir) | |
776 | |
777 self.MaybeMakeDirectory(build_dir) | |
778 self.WriteFile(mb_type_path, new_mb_type) | |
779 | |
780 def RunGNGen(self, vals): | |
781 build_dir = self.args.path[0] | |
782 | |
783 cmd = self.GNCmd('gen', build_dir, '--check') | |
784 gn_args = self.GNArgs(vals) | |
785 | |
786 # Since GN hasn't run yet, the build directory may not even exist. | |
787 self.MaybeMakeDirectory(self.ToAbsPath(build_dir)) | |
788 | |
789 gn_args_path = self.ToAbsPath(build_dir, 'args.gn') | |
790 self.WriteFile(gn_args_path, gn_args, force_verbose=True) | |
791 | |
792 swarming_targets = [] | |
793 if getattr(self.args, 'swarming_targets_file', None): | |
794 # We need GN to generate the list of runtime dependencies for | |
795 # the compile targets listed (one per line) in the file so | |
796 # we can run them via swarming. We use gn_isolate_map.pyl to convert | |
797 # the compile targets to the matching GN labels. | |
798 path = self.args.swarming_targets_file | |
799 if not self.Exists(path): | |
800 self.WriteFailureAndRaise('"%s" does not exist' % path, | |
801 output_path=None) | |
802 contents = self.ReadFile(path) | |
803 swarming_targets = set(contents.splitlines()) | |
804 | |
805 isolate_map = self.ReadIsolateMap() | |
806 err, labels = self.MapTargetsToLabels(isolate_map, swarming_targets) | |
807 if err: | |
808 raise MBErr(err) | |
809 | |
810 gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps') | |
811 self.WriteFile(gn_runtime_deps_path, '\n'.join(labels) + '\n') | |
812 cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path) | |
813 | |
814 ret, _, _ = self.Run(cmd) | |
815 if ret: | |
816 # If `gn gen` failed, we should exit early rather than trying to | |
817 # generate isolates. Run() will have already logged any error output. | |
818 self.Print('GN gen failed: %d' % ret) | |
819 return ret | |
820 | |
821 android = 'target_os="android"' in vals['gn_args'] | |
822 for target in swarming_targets: | |
823 if android: | |
824 # Android targets may be either android_apk or executable. The former | |
825 # will result in runtime_deps associated with the stamp file, while the | |
826 # latter will result in runtime_deps associated with the executable. | |
827 label = isolate_map[target]['label'] | |
828 runtime_deps_targets = [ | |
829 target + '.runtime_deps', | |
830 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] | |
831 elif isolate_map[target]['type'] == 'gpu_browser_test': | |
832 if self.platform == 'win32': | |
833 runtime_deps_targets = ['browser_tests.exe.runtime_deps'] | |
834 else: | |
835 runtime_deps_targets = ['browser_tests.runtime_deps'] | |
836 elif (isolate_map[target]['type'] == 'script' or | |
837 isolate_map[target].get('label_type') == 'group'): | |
838 # For script targets, the build target is usually a group, | |
839 # for which gn generates the runtime_deps next to the stamp file | |
840 # for the label, which lives under the obj/ directory, but it may | |
841 # also be an executable. | |
842 label = isolate_map[target]['label'] | |
843 runtime_deps_targets = [ | |
844 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] | |
845 if self.platform == 'win32': | |
846 runtime_deps_targets += [ target + '.exe.runtime_deps' ] | |
847 else: | |
848 runtime_deps_targets += [ target + '.runtime_deps' ] | |
849 elif self.platform == 'win32': | |
850 runtime_deps_targets = [target + '.exe.runtime_deps'] | |
851 else: | |
852 runtime_deps_targets = [target + '.runtime_deps'] | |
853 | |
854 for r in runtime_deps_targets: | |
855 runtime_deps_path = self.ToAbsPath(build_dir, r) | |
856 if self.Exists(runtime_deps_path): | |
857 break | |
858 else: | |
859 raise MBErr('did not generate any of %s' % | |
860 ', '.join(runtime_deps_targets)) | |
861 | |
862 command, extra_files = self.GetIsolateCommand(target, vals) | |
863 | |
864 runtime_deps = self.ReadFile(runtime_deps_path).splitlines() | |
865 | |
866 self.WriteIsolateFiles(build_dir, command, target, runtime_deps, | |
867 extra_files) | |
868 | |
869 return 0 | |
870 | |
871 def RunGNIsolate(self, vals): | |
872 target = self.args.target[0] | |
873 isolate_map = self.ReadIsolateMap() | |
874 err, labels = self.MapTargetsToLabels(isolate_map, [target]) | |
875 if err: | |
876 raise MBErr(err) | |
877 label = labels[0] | |
878 | |
879 build_dir = self.args.path[0] | |
880 command, extra_files = self.GetIsolateCommand(target, vals) | |
881 | |
882 cmd = self.GNCmd('desc', build_dir, label, 'runtime_deps') | |
883 ret, out, _ = self.Call(cmd) | |
884 if ret: | |
885 if out: | |
886 self.Print(out) | |
887 return ret | |
888 | |
889 runtime_deps = out.splitlines() | |
890 | |
891 self.WriteIsolateFiles(build_dir, command, target, runtime_deps, | |
892 extra_files) | |
893 | |
894 ret, _, _ = self.Run([ | |
895 self.executable, | |
896 self.PathJoin('tools', 'swarming_client', 'isolate.py'), | |
897 'check', | |
898 '-i', | |
899 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), | |
900 '-s', | |
901 self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target))], | |
902 buffer_output=False) | |
903 | |
904 return ret | |
905 | |
906 def WriteIsolateFiles(self, build_dir, command, target, runtime_deps, | |
907 extra_files): | |
908 isolate_path = self.ToAbsPath(build_dir, target + '.isolate') | |
909 self.WriteFile(isolate_path, | |
910 pprint.pformat({ | |
911 'variables': { | |
912 'command': command, | |
913 'files': sorted(runtime_deps + extra_files), | |
914 } | |
915 }) + '\n') | |
916 | |
917 self.WriteJSON( | |
918 { | |
919 'args': [ | |
920 '--isolated', | |
921 self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), | |
922 '--isolate', | |
923 self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), | |
924 ], | |
925 'dir': self.src_dir, | |
926 'version': 1, | |
927 }, | |
928 isolate_path + 'd.gen.json', | |
929 ) | |
930 | |
931 def MapTargetsToLabels(self, isolate_map, targets): | |
932 labels = [] | |
933 err = '' | |
934 | |
935 def StripTestSuffixes(target): | |
936 for suffix in ('_apk_run', '_apk', '_run'): | |
937 if target.endswith(suffix): | |
938 return target[:-len(suffix)], suffix | |
939 return None, None | |
940 | |
941 for target in targets: | |
942 if target == 'all': | |
943 labels.append(target) | |
944 elif target.startswith('//'): | |
945 labels.append(target) | |
946 else: | |
947 if target in isolate_map: | |
948 stripped_target, suffix = target, '' | |
949 else: | |
950 stripped_target, suffix = StripTestSuffixes(target) | |
951 if stripped_target in isolate_map: | |
952 if isolate_map[stripped_target]['type'] == 'unknown': | |
953 err += ('test target "%s" type is unknown\n' % target) | |
954 else: | |
955 labels.append(isolate_map[stripped_target]['label'] + suffix) | |
956 else: | |
957 err += ('target "%s" not found in ' | |
958 '//testing/buildbot/gn_isolate_map.pyl\n' % target) | |
959 | |
960 return err, labels | |
961 | |
962 def GNCmd(self, subcommand, path, *args): | |
963 if self.platform == 'linux2': | |
964 subdir, exe = 'linux64', 'gn' | |
965 elif self.platform == 'darwin': | |
966 subdir, exe = 'mac', 'gn' | |
967 else: | |
968 subdir, exe = 'win', 'gn.exe' | |
969 | |
970 gn_path = self.PathJoin(self.src_dir, 'buildtools', subdir, exe) | |
971 return [gn_path, subcommand, path] + list(args) | |
972 | |
973 | |
974 def GNArgs(self, vals): | |
975 if vals['cros_passthrough']: | |
976 if not 'GN_ARGS' in os.environ: | |
977 raise MBErr('MB is expecting GN_ARGS to be in the environment') | |
978 gn_args = os.environ['GN_ARGS'] | |
979 if not re.search('target_os.*=.*"chromeos"', gn_args): | |
980 raise MBErr('GN_ARGS is missing target_os = "chromeos": (GN_ARGS=%s)' % | |
981 gn_args) | |
982 else: | |
983 gn_args = vals['gn_args'] | |
984 | |
985 if self.args.goma_dir: | |
986 gn_args += ' goma_dir="%s"' % self.args.goma_dir | |
987 | |
988 android_version_code = self.args.android_version_code | |
989 if android_version_code: | |
990 gn_args += ' android_default_version_code="%s"' % android_version_code | |
991 | |
992 android_version_name = self.args.android_version_name | |
993 if android_version_name: | |
994 gn_args += ' android_default_version_name="%s"' % android_version_name | |
995 | |
996 # Canonicalize the arg string into a sorted, newline-separated list | |
997 # of key-value pairs, and de-dup the keys if need be so that only | |
998 # the last instance of each arg is listed. | |
999 gn_args = gn_helpers.ToGNString(gn_helpers.FromGNArgs(gn_args)) | |
1000 | |
1001 args_file = vals.get('args_file', None) | |
1002 if args_file: | |
1003 gn_args = ('import("%s")\n' % vals['args_file']) + gn_args | |
1004 return gn_args | |
1005 | |
1006 def RunGYPGen(self, vals): | |
1007 path = self.args.path[0] | |
1008 | |
1009 output_dir = self.ParseGYPConfigPath(path) | |
1010 cmd, env = self.GYPCmd(output_dir, vals) | |
1011 ret, _, _ = self.Run(cmd, env=env) | |
1012 return ret | |
1013 | |
1014 def RunGYPAnalyze(self, vals): | |
1015 output_dir = self.ParseGYPConfigPath(self.args.path[0]) | |
1016 if self.args.verbose: | |
1017 inp = self.ReadInputJSON(['files', 'test_targets', | |
1018 'additional_compile_targets']) | |
1019 self.Print() | |
1020 self.Print('analyze input:') | |
1021 self.PrintJSON(inp) | |
1022 self.Print() | |
1023 | |
1024 cmd, env = self.GYPCmd(output_dir, vals) | |
1025 cmd.extend(['-f', 'analyzer', | |
1026 '-G', 'config_path=%s' % self.args.input_path[0], | |
1027 '-G', 'analyzer_output_path=%s' % self.args.output_path[0]]) | |
1028 ret, _, _ = self.Run(cmd, env=env) | |
1029 if not ret and self.args.verbose: | |
1030 outp = json.loads(self.ReadFile(self.args.output_path[0])) | |
1031 self.Print() | |
1032 self.Print('analyze output:') | |
1033 self.PrintJSON(outp) | |
1034 self.Print() | |
1035 | |
1036 return ret | |
1037 | |
1038 def GetIsolateCommand(self, target, vals): | |
1039 isolate_map = self.ReadIsolateMap() | |
1040 test_type = isolate_map[target]['type'] | |
1041 | |
1042 android = 'target_os="android"' in vals['gn_args'] | |
1043 is_linux = self.platform == 'linux2' and not android | |
1044 | |
1045 if test_type == 'nontest': | |
1046 self.WriteFailureAndRaise('We should not be isolating %s.' % target, | |
1047 output_path=None) | |
1048 if test_type not in ('console_test_launcher', 'windowed_test_launcher', | |
1049 'non_parallel_console_test_launcher', | |
1050 'additional_compile_target', 'junit_test'): | |
1051 self.WriteFailureAndRaise('No command line for %s found (test type %s).' | |
1052 % (target, test_type), output_path=None) | |
1053 | |
1054 cmdline = [] | |
1055 extra_files = [] | |
1056 | |
1057 if android: | |
1058 cmdline = ['../../build/android/test_wrapper/logdog_wrapper.py', | |
1059 '--target', target, | |
1060 '--logdog-bin-cmd', '../../bin/logdog_butler'] | |
1061 if test_type != 'junit_test': | |
1062 cmdline += ['--target-devices-file', '${SWARMING_BOT_FILE}'] | |
1063 else: | |
1064 extra_files = ['../../testing/test_env.py'] | |
1065 | |
1066 # This needs to mirror the settings in //build/config/ui.gni: | |
1067 # use_x11 = is_linux && !use_ozone. | |
1068 use_x11 = is_linux and not 'use_ozone=true' in vals['gn_args'] | |
1069 | |
1070 xvfb = use_x11 and test_type == 'windowed_test_launcher' | |
1071 if xvfb: | |
1072 extra_files += [ | |
1073 '../../testing/xvfb.py', | |
1074 ] | |
1075 | |
1076 # Memcheck is only supported for linux. Ignore in other platforms. | |
1077 memcheck = is_linux and 'rtc_use_memcheck=true' in vals['gn_args'] | |
1078 memcheck_cmdline = [ | |
1079 'bash', | |
1080 '../../tools-webrtc/valgrind/webrtc_tests.sh', | |
1081 '--tool', | |
1082 'memcheck', | |
1083 '--target', | |
1084 'Release', | |
1085 '--build-dir', | |
1086 '..', | |
1087 '--test', | |
1088 ] | |
1089 | |
1090 if not memcheck: | |
1091 extra_files += [ | |
1092 '../../third_party/gtest-parallel/gtest-parallel', | |
1093 '../../tools-webrtc/gtest-parallel-wrapper.py', | |
1094 ] | |
1095 sep = '\\' if self.platform == 'win32' else '/' | |
1096 output_dir = '${ISOLATED_OUTDIR}' + sep + 'test_logs' | |
1097 gtest_parallel_wrapper = [ | |
1098 '../../tools-webrtc/gtest-parallel-wrapper.py', | |
1099 '--output_dir=%s' % output_dir, | |
1100 '--gtest_color=no', | |
1101 # We tell gtest-parallel to interrupt the test after 900 seconds, | |
1102 # so it can exit cleanly and report results, instead of being | |
1103 # interrupted by swarming and not reporting anything. | |
1104 '--timeout=900', | |
1105 '--retry_failed=3', | |
1106 ] | |
1107 | |
1108 asan = 'is_asan=true' in vals['gn_args'] | |
1109 lsan = 'is_lsan=true' in vals['gn_args'] | |
1110 msan = 'is_msan=true' in vals['gn_args'] | |
1111 tsan = 'is_tsan=true' in vals['gn_args'] | |
1112 | |
1113 executable_prefix = '.\\' if self.platform == 'win32' else './' | |
1114 executable_suffix = '.exe' if self.platform == 'win32' else '' | |
1115 executable = executable_prefix + target + executable_suffix | |
1116 | |
1117 cmdline = (['../../testing/xvfb.py'] if xvfb else | |
1118 ['../../testing/test_env.py']) | |
1119 cmdline += memcheck_cmdline if memcheck else gtest_parallel_wrapper | |
1120 cmdline.append(executable) | |
1121 if test_type == 'non_parallel_console_test_launcher' and not memcheck: | |
1122 # Still use the gtest-parallel-wrapper.py script since we need it to | |
1123 # run tests on swarming, but don't execute tests in parallel. | |
1124 cmdline.append('--workers=1') | |
1125 | |
1126 cmdline.extend([ | |
1127 '--', | |
1128 '--asan=%d' % asan, | |
1129 '--lsan=%d' % lsan, | |
1130 '--msan=%d' % msan, | |
1131 '--tsan=%d' % tsan, | |
1132 ]) | |
1133 | |
1134 cmdline += isolate_map[target].get('args', []) | |
1135 | |
1136 return cmdline, extra_files | |
1137 | |
1138 def ToAbsPath(self, build_path, *comps): | |
1139 return self.PathJoin(self.src_dir, | |
1140 self.ToSrcRelPath(build_path), | |
1141 *comps) | |
1142 | |
1143 def ToSrcRelPath(self, path): | |
1144 """Returns a relative path from the top of the repo.""" | |
1145 if path.startswith('//'): | |
1146 return path[2:].replace('/', self.sep) | |
1147 return self.RelPath(path, self.src_dir) | |
1148 | |
1149 def ParseGYPConfigPath(self, path): | |
1150 rpath = self.ToSrcRelPath(path) | |
1151 output_dir, _, _ = rpath.rpartition(self.sep) | |
1152 return output_dir | |
1153 | |
1154 def GYPCmd(self, output_dir, vals): | |
1155 if vals['cros_passthrough']: | |
1156 if not 'GYP_DEFINES' in os.environ: | |
1157 raise MBErr('MB is expecting GYP_DEFINES to be in the environment') | |
1158 gyp_defines = os.environ['GYP_DEFINES'] | |
1159 if not 'chromeos=1' in gyp_defines: | |
1160 raise MBErr('GYP_DEFINES is missing chromeos=1: (GYP_DEFINES=%s)' % | |
1161 gyp_defines) | |
1162 else: | |
1163 gyp_defines = vals['gyp_defines'] | |
1164 | |
1165 goma_dir = self.args.goma_dir | |
1166 | |
1167 # GYP uses shlex.split() to split the gyp defines into separate arguments, | |
1168 # so we can support backslashes and and spaces in arguments by quoting | |
1169 # them, even on Windows, where this normally wouldn't work. | |
1170 if goma_dir and ('\\' in goma_dir or ' ' in goma_dir): | |
1171 goma_dir = "'%s'" % goma_dir | |
1172 | |
1173 if goma_dir: | |
1174 gyp_defines += ' gomadir=%s' % goma_dir | |
1175 | |
1176 android_version_code = self.args.android_version_code | |
1177 if android_version_code: | |
1178 gyp_defines += ' app_manifest_version_code=%s' % android_version_code | |
1179 | |
1180 android_version_name = self.args.android_version_name | |
1181 if android_version_name: | |
1182 gyp_defines += ' app_manifest_version_name=%s' % android_version_name | |
1183 | |
1184 cmd = [ | |
1185 self.executable, | |
1186 self.args.gyp_script, | |
1187 '-G', | |
1188 'output_dir=' + output_dir, | |
1189 ] | |
1190 | |
1191 # Ensure that we have an environment that only contains | |
1192 # the exact values of the GYP variables we need. | |
1193 env = os.environ.copy() | |
1194 | |
1195 # This is a terrible hack to work around the fact that | |
1196 # //tools/clang/scripts/update.py is invoked by GYP and GN but | |
1197 # currently relies on an environment variable to figure out | |
1198 # what revision to embed in the command line #defines. | |
1199 # For GN, we've made this work via a gn arg that will cause update.py | |
1200 # to get an additional command line arg, but getting that to work | |
1201 # via GYP_DEFINES has proven difficult, so we rewrite the GYP_DEFINES | |
1202 # to get rid of the arg and add the old var in, instead. | |
1203 # See crbug.com/582737 for more on this. This can hopefully all | |
1204 # go away with GYP. | |
1205 m = re.search('llvm_force_head_revision=1\s*', gyp_defines) | |
1206 if m: | |
1207 env['LLVM_FORCE_HEAD_REVISION'] = '1' | |
1208 gyp_defines = gyp_defines.replace(m.group(0), '') | |
1209 | |
1210 # This is another terrible hack to work around the fact that | |
1211 # GYP sets the link concurrency to use via the GYP_LINK_CONCURRENCY | |
1212 # environment variable, and not via a proper GYP_DEFINE. See | |
1213 # crbug.com/611491 for more on this. | |
1214 m = re.search('gyp_link_concurrency=(\d+)(\s*)', gyp_defines) | |
1215 if m: | |
1216 env['GYP_LINK_CONCURRENCY'] = m.group(1) | |
1217 gyp_defines = gyp_defines.replace(m.group(0), '') | |
1218 | |
1219 env['GYP_GENERATORS'] = 'ninja' | |
1220 if 'GYP_CHROMIUM_NO_ACTION' in env: | |
1221 del env['GYP_CHROMIUM_NO_ACTION'] | |
1222 if 'GYP_CROSSCOMPILE' in env: | |
1223 del env['GYP_CROSSCOMPILE'] | |
1224 env['GYP_DEFINES'] = gyp_defines | |
1225 if vals['gyp_crosscompile']: | |
1226 env['GYP_CROSSCOMPILE'] = '1' | |
1227 return cmd, env | |
1228 | |
1229 def RunGNAnalyze(self, vals): | |
1230 # Analyze runs before 'gn gen' now, so we need to run gn gen | |
1231 # in order to ensure that we have a build directory. | |
1232 ret = self.RunGNGen(vals) | |
1233 if ret: | |
1234 return ret | |
1235 | |
1236 build_path = self.args.path[0] | |
1237 input_path = self.args.input_path[0] | |
1238 gn_input_path = input_path + '.gn' | |
1239 output_path = self.args.output_path[0] | |
1240 gn_output_path = output_path + '.gn' | |
1241 | |
1242 inp = self.ReadInputJSON(['files', 'test_targets', | |
1243 'additional_compile_targets']) | |
1244 if self.args.verbose: | |
1245 self.Print() | |
1246 self.Print('analyze input:') | |
1247 self.PrintJSON(inp) | |
1248 self.Print() | |
1249 | |
1250 | |
1251 # This shouldn't normally happen, but could due to unusual race conditions, | |
1252 # like a try job that gets scheduled before a patch lands but runs after | |
1253 # the patch has landed. | |
1254 if not inp['files']: | |
1255 self.Print('Warning: No files modified in patch, bailing out early.') | |
1256 self.WriteJSON({ | |
1257 'status': 'No dependency', | |
1258 'compile_targets': [], | |
1259 'test_targets': [], | |
1260 }, output_path) | |
1261 return 0 | |
1262 | |
1263 gn_inp = {} | |
1264 gn_inp['files'] = ['//' + f for f in inp['files'] if not f.startswith('//')] | |
1265 | |
1266 isolate_map = self.ReadIsolateMap() | |
1267 err, gn_inp['additional_compile_targets'] = self.MapTargetsToLabels( | |
1268 isolate_map, inp['additional_compile_targets']) | |
1269 if err: | |
1270 raise MBErr(err) | |
1271 | |
1272 err, gn_inp['test_targets'] = self.MapTargetsToLabels( | |
1273 isolate_map, inp['test_targets']) | |
1274 if err: | |
1275 raise MBErr(err) | |
1276 labels_to_targets = {} | |
1277 for i, label in enumerate(gn_inp['test_targets']): | |
1278 labels_to_targets[label] = inp['test_targets'][i] | |
1279 | |
1280 try: | |
1281 self.WriteJSON(gn_inp, gn_input_path) | |
1282 cmd = self.GNCmd('analyze', build_path, gn_input_path, gn_output_path) | |
1283 ret, _, _ = self.Run(cmd, force_verbose=True) | |
1284 if ret: | |
1285 return ret | |
1286 | |
1287 gn_outp_str = self.ReadFile(gn_output_path) | |
1288 try: | |
1289 gn_outp = json.loads(gn_outp_str) | |
1290 except Exception as e: | |
1291 self.Print("Failed to parse the JSON string GN returned: %s\n%s" | |
1292 % (repr(gn_outp_str), str(e))) | |
1293 raise | |
1294 | |
1295 outp = {} | |
1296 if 'status' in gn_outp: | |
1297 outp['status'] = gn_outp['status'] | |
1298 if 'error' in gn_outp: | |
1299 outp['error'] = gn_outp['error'] | |
1300 if 'invalid_targets' in gn_outp: | |
1301 outp['invalid_targets'] = gn_outp['invalid_targets'] | |
1302 if 'compile_targets' in gn_outp: | |
1303 if 'all' in gn_outp['compile_targets']: | |
1304 outp['compile_targets'] = ['all'] | |
1305 else: | |
1306 outp['compile_targets'] = [ | |
1307 label.replace('//', '') for label in gn_outp['compile_targets']] | |
1308 if 'test_targets' in gn_outp: | |
1309 outp['test_targets'] = [ | |
1310 labels_to_targets[label] for label in gn_outp['test_targets']] | |
1311 | |
1312 if self.args.verbose: | |
1313 self.Print() | |
1314 self.Print('analyze output:') | |
1315 self.PrintJSON(outp) | |
1316 self.Print() | |
1317 | |
1318 self.WriteJSON(outp, output_path) | |
1319 | |
1320 finally: | |
1321 if self.Exists(gn_input_path): | |
1322 self.RemoveFile(gn_input_path) | |
1323 if self.Exists(gn_output_path): | |
1324 self.RemoveFile(gn_output_path) | |
1325 | |
1326 return 0 | |
1327 | |
1328 def ReadInputJSON(self, required_keys): | |
1329 path = self.args.input_path[0] | |
1330 output_path = self.args.output_path[0] | |
1331 if not self.Exists(path): | |
1332 self.WriteFailureAndRaise('"%s" does not exist' % path, output_path) | |
1333 | |
1334 try: | |
1335 inp = json.loads(self.ReadFile(path)) | |
1336 except Exception as e: | |
1337 self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' % | |
1338 (path, e), output_path) | |
1339 | |
1340 for k in required_keys: | |
1341 if not k in inp: | |
1342 self.WriteFailureAndRaise('input file is missing a "%s" key' % k, | |
1343 output_path) | |
1344 | |
1345 return inp | |
1346 | |
1347 def WriteFailureAndRaise(self, msg, output_path): | |
1348 if output_path: | |
1349 self.WriteJSON({'error': msg}, output_path, force_verbose=True) | |
1350 raise MBErr(msg) | |
1351 | |
1352 def WriteJSON(self, obj, path, force_verbose=False): | |
1353 try: | |
1354 self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n', | |
1355 force_verbose=force_verbose) | |
1356 except Exception as e: | |
1357 raise MBErr('Error %s writing to the output path "%s"' % | |
1358 (e, path)) | |
1359 | |
1360 def CheckCompile(self, master, builder): | |
1361 url_template = self.args.url_template + '/{builder}/builds/_all?as_text=1' | |
1362 url = urllib2.quote(url_template.format(master=master, builder=builder), | |
1363 safe=':/()?=') | |
1364 try: | |
1365 builds = json.loads(self.Fetch(url)) | |
1366 except Exception as e: | |
1367 return str(e) | |
1368 successes = sorted( | |
1369 [int(x) for x in builds.keys() if "text" in builds[x] and | |
1370 cmp(builds[x]["text"][:2], ["build", "successful"]) == 0], | |
1371 reverse=True) | |
1372 if not successes: | |
1373 return "no successful builds" | |
1374 build = builds[str(successes[0])] | |
1375 step_names = set([step["name"] for step in build["steps"]]) | |
1376 compile_indicators = set(["compile", "compile (with patch)", "analyze"]) | |
1377 if compile_indicators & step_names: | |
1378 return "compiles" | |
1379 return "does not compile" | |
1380 | |
1381 def PrintCmd(self, cmd, env): | |
1382 if self.platform == 'win32': | |
1383 env_prefix = 'set ' | |
1384 env_quoter = QuoteForSet | |
1385 shell_quoter = QuoteForCmd | |
1386 else: | |
1387 env_prefix = '' | |
1388 env_quoter = pipes.quote | |
1389 shell_quoter = pipes.quote | |
1390 | |
1391 def print_env(var): | |
1392 if env and var in env: | |
1393 self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) | |
1394 | |
1395 print_env('GYP_CROSSCOMPILE') | |
1396 print_env('GYP_DEFINES') | |
1397 print_env('GYP_LINK_CONCURRENCY') | |
1398 print_env('LLVM_FORCE_HEAD_REVISION') | |
1399 | |
1400 if cmd[0] == self.executable: | |
1401 cmd = ['python'] + cmd[1:] | |
1402 self.Print(*[shell_quoter(arg) for arg in cmd]) | |
1403 | |
1404 def PrintJSON(self, obj): | |
1405 self.Print(json.dumps(obj, indent=2, sort_keys=True)) | |
1406 | |
1407 def Build(self, target): | |
1408 build_dir = self.ToSrcRelPath(self.args.path[0]) | |
1409 ninja_cmd = ['ninja', '-C', build_dir] | |
1410 if self.args.jobs: | |
1411 ninja_cmd.extend(['-j', '%d' % self.args.jobs]) | |
1412 ninja_cmd.append(target) | |
1413 ret, _, _ = self.Run(ninja_cmd, force_verbose=False, buffer_output=False) | |
1414 return ret | |
1415 | |
1416 def Run(self, cmd, env=None, force_verbose=True, buffer_output=True): | |
1417 # This function largely exists so it can be overridden for testing. | |
1418 if self.args.dryrun or self.args.verbose or force_verbose: | |
1419 self.PrintCmd(cmd, env) | |
1420 if self.args.dryrun: | |
1421 return 0, '', '' | |
1422 | |
1423 ret, out, err = self.Call(cmd, env=env, buffer_output=buffer_output) | |
1424 if self.args.verbose or force_verbose: | |
1425 if ret: | |
1426 self.Print(' -> returned %d' % ret) | |
1427 if out: | |
1428 self.Print(out, end='') | |
1429 if err: | |
1430 self.Print(err, end='', file=sys.stderr) | |
1431 return ret, out, err | |
1432 | |
1433 def Call(self, cmd, env=None, buffer_output=True): | |
1434 if buffer_output: | |
1435 p = subprocess.Popen(cmd, shell=False, cwd=self.src_dir, | |
1436 stdout=subprocess.PIPE, stderr=subprocess.PIPE, | |
1437 env=env) | |
1438 out, err = p.communicate() | |
1439 else: | |
1440 p = subprocess.Popen(cmd, shell=False, cwd=self.src_dir, | |
1441 env=env) | |
1442 p.wait() | |
1443 out = err = '' | |
1444 return p.returncode, out, err | |
1445 | |
1446 def ExpandUser(self, path): | |
1447 # This function largely exists so it can be overridden for testing. | |
1448 return os.path.expanduser(path) | |
1449 | |
1450 def Exists(self, path): | |
1451 # This function largely exists so it can be overridden for testing. | |
1452 return os.path.exists(path) | |
1453 | |
1454 def Fetch(self, url): | |
1455 # This function largely exists so it can be overridden for testing. | |
1456 f = urllib2.urlopen(url) | |
1457 contents = f.read() | |
1458 f.close() | |
1459 return contents | |
1460 | |
1461 def MaybeMakeDirectory(self, path): | |
1462 try: | |
1463 os.makedirs(path) | |
1464 except OSError, e: | |
1465 if e.errno != errno.EEXIST: | |
1466 raise | |
1467 | |
1468 def PathJoin(self, *comps): | |
1469 # This function largely exists so it can be overriden for testing. | |
1470 return os.path.join(*comps) | |
1471 | |
1472 def Print(self, *args, **kwargs): | |
1473 # This function largely exists so it can be overridden for testing. | |
1474 print(*args, **kwargs) | |
1475 if kwargs.get('stream', sys.stdout) == sys.stdout: | |
1476 sys.stdout.flush() | |
1477 | |
1478 def ReadFile(self, path): | |
1479 # This function largely exists so it can be overriden for testing. | |
1480 with open(path) as fp: | |
1481 return fp.read() | |
1482 | |
1483 def RelPath(self, path, start='.'): | |
1484 # This function largely exists so it can be overriden for testing. | |
1485 return os.path.relpath(path, start) | |
1486 | |
1487 def RemoveFile(self, path): | |
1488 # This function largely exists so it can be overriden for testing. | |
1489 os.remove(path) | |
1490 | |
1491 def RemoveDirectory(self, abs_path): | |
1492 if self.platform == 'win32': | |
1493 # In other places in chromium, we often have to retry this command | |
1494 # because we're worried about other processes still holding on to | |
1495 # file handles, but when MB is invoked, it will be early enough in the | |
1496 # build that their should be no other processes to interfere. We | |
1497 # can change this if need be. | |
1498 self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path]) | |
1499 else: | |
1500 shutil.rmtree(abs_path, ignore_errors=True) | |
1501 | |
1502 def TempFile(self, mode='w'): | |
1503 # This function largely exists so it can be overriden for testing. | |
1504 return tempfile.NamedTemporaryFile(mode=mode, delete=False) | |
1505 | |
1506 def WriteFile(self, path, contents, force_verbose=False): | |
1507 # This function largely exists so it can be overriden for testing. | |
1508 if self.args.dryrun or self.args.verbose or force_verbose: | |
1509 self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path)) | |
1510 with open(path, 'w') as fp: | |
1511 return fp.write(contents) | |
1512 | |
1513 | |
1514 class MBErr(Exception): | |
1515 pass | |
1516 | |
1517 | |
1518 # See http://goo.gl/l5NPDW and http://goo.gl/4Diozm for the painful | |
1519 # details of this next section, which handles escaping command lines | |
1520 # so that they can be copied and pasted into a cmd window. | |
1521 UNSAFE_FOR_SET = set('^<>&|') | |
1522 UNSAFE_FOR_CMD = UNSAFE_FOR_SET.union(set('()%')) | |
1523 ALL_META_CHARS = UNSAFE_FOR_CMD.union(set('"')) | |
1524 | |
1525 | |
1526 def QuoteForSet(arg): | |
1527 if any(a in UNSAFE_FOR_SET for a in arg): | |
1528 arg = ''.join('^' + a if a in UNSAFE_FOR_SET else a for a in arg) | |
1529 return arg | |
1530 | |
1531 | |
1532 def QuoteForCmd(arg): | |
1533 # First, escape the arg so that CommandLineToArgvW will parse it properly. | |
1534 # From //tools/gyp/pylib/gyp/msvs_emulation.py:23. | |
1535 if arg == '' or ' ' in arg or '"' in arg: | |
1536 quote_re = re.compile(r'(\\*)"') | |
1537 arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)) | |
1538 | |
1539 # Then check to see if the arg contains any metacharacters other than | |
1540 # double quotes; if it does, quote everything (including the double | |
1541 # quotes) for safety. | |
1542 if any(a in UNSAFE_FOR_CMD for a in arg): | |
1543 arg = ''.join('^' + a if a in ALL_META_CHARS else a for a in arg) | |
1544 return arg | |
1545 | |
1546 | |
1547 if __name__ == '__main__': | |
1548 sys.exit(main(sys.argv[1:])) | |
OLD | NEW |