kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1 | #!/usr/bin/env python |
| 2 | # Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. |
| 3 | # |
| 4 | # Use of this source code is governed by a BSD-style license |
| 5 | # that can be found in the LICENSE file in the root of the source |
| 6 | # tree. An additional intellectual property rights grant can be found |
| 7 | # in the file PATENTS. All contributing project authors may |
| 8 | # be found in the AUTHORS file in the root of the source tree. |
| 9 | |
| 10 | """MB - the Meta-Build wrapper around GYP and GN |
| 11 | |
| 12 | MB is a wrapper script for GYP and GN that can be used to generate build files |
| 13 | for sets of canned configurations and analyze them. |
| 14 | """ |
| 15 | |
| 16 | from __future__ import print_function |
| 17 | |
| 18 | import argparse |
| 19 | import ast |
| 20 | import errno |
| 21 | import json |
| 22 | import os |
| 23 | import pipes |
| 24 | import pprint |
| 25 | import re |
| 26 | import shutil |
| 27 | import sys |
| 28 | import subprocess |
| 29 | import tempfile |
| 30 | import traceback |
| 31 | import urllib2 |
| 32 | |
| 33 | from collections import OrderedDict |
| 34 | |
| 35 | CHROMIUM_SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname( |
| 36 | os.path.abspath(__file__)))) |
| 37 | sys.path = [os.path.join(CHROMIUM_SRC_DIR, 'build')] + sys.path |
| 38 | |
| 39 | import gn_helpers |
| 40 | |
| 41 | |
| 42 | def main(args): |
| 43 | mbw = MetaBuildWrapper() |
| 44 | return mbw.Main(args) |
| 45 | |
| 46 | |
| 47 | class MetaBuildWrapper(object): |
| 48 | def __init__(self): |
| 49 | self.chromium_src_dir = CHROMIUM_SRC_DIR |
| 50 | self.default_config = os.path.join(self.chromium_src_dir, 'tools', 'mb', |
| 51 | 'mb_config.pyl') |
| 52 | self.default_isolate_map = os.path.join(self.chromium_src_dir, 'testing', |
| 53 | 'buildbot', 'gn_isolate_map.pyl') |
| 54 | self.executable = sys.executable |
| 55 | self.platform = sys.platform |
| 56 | self.sep = os.sep |
| 57 | self.args = argparse.Namespace() |
| 58 | self.configs = {} |
| 59 | self.masters = {} |
| 60 | self.mixins = {} |
| 61 | |
| 62 | def Main(self, args): |
| 63 | self.ParseArgs(args) |
| 64 | try: |
| 65 | ret = self.args.func() |
| 66 | if ret: |
| 67 | self.DumpInputFiles() |
| 68 | return ret |
| 69 | except KeyboardInterrupt: |
| 70 | self.Print('interrupted, exiting') |
| 71 | return 130 |
| 72 | except Exception: |
| 73 | self.DumpInputFiles() |
| 74 | s = traceback.format_exc() |
| 75 | for l in s.splitlines(): |
| 76 | self.Print(l) |
| 77 | return 1 |
| 78 | |
| 79 | def ParseArgs(self, argv): |
| 80 | def AddCommonOptions(subp): |
| 81 | subp.add_argument('-b', '--builder', |
| 82 | help='builder name to look up config from') |
| 83 | subp.add_argument('-m', '--master', |
| 84 | help='master name to look up config from') |
| 85 | subp.add_argument('-c', '--config', |
| 86 | help='configuration to analyze') |
| 87 | subp.add_argument('--phase', |
| 88 | help='optional phase name (used when builders ' |
| 89 | 'do multiple compiles with different ' |
| 90 | 'arguments in a single build)') |
| 91 | subp.add_argument('-f', '--config-file', metavar='PATH', |
| 92 | default=self.default_config, |
| 93 | help='path to config file ' |
| 94 | '(default is %(default)s)') |
| 95 | subp.add_argument('-i', '--isolate-map-file', metavar='PATH', |
| 96 | default=self.default_isolate_map, |
| 97 | help='path to isolate map file ' |
| 98 | '(default is %(default)s)') |
| 99 | subp.add_argument('-g', '--goma-dir', |
| 100 | help='path to goma directory') |
| 101 | subp.add_argument('--gyp-script', metavar='PATH', |
| 102 | default=self.PathJoin('build', 'gyp_chromium'), |
| 103 | help='path to gyp script relative to project root ' |
| 104 | '(default is %(default)s)') |
| 105 | subp.add_argument('--android-version-code', |
| 106 | help='Sets GN arg android_default_version_code and ' |
| 107 | 'GYP_DEFINE app_manifest_version_code') |
| 108 | subp.add_argument('--android-version-name', |
| 109 | help='Sets GN arg android_default_version_name and ' |
| 110 | 'GYP_DEFINE app_manifest_version_name') |
| 111 | subp.add_argument('-n', '--dryrun', action='store_true', |
| 112 | help='Do a dry run (i.e., do nothing, just print ' |
| 113 | 'the commands that will run)') |
| 114 | subp.add_argument('-v', '--verbose', action='store_true', |
| 115 | help='verbose logging') |
| 116 | |
| 117 | parser = argparse.ArgumentParser(prog='mb') |
| 118 | subps = parser.add_subparsers() |
| 119 | |
| 120 | subp = subps.add_parser('analyze', |
| 121 | help='analyze whether changes to a set of files ' |
| 122 | 'will cause a set of binaries to be rebuilt.') |
| 123 | AddCommonOptions(subp) |
| 124 | subp.add_argument('path', nargs=1, |
| 125 | help='path build was generated into.') |
| 126 | subp.add_argument('input_path', nargs=1, |
| 127 | help='path to a file containing the input arguments ' |
| 128 | 'as a JSON object.') |
| 129 | subp.add_argument('output_path', nargs=1, |
| 130 | help='path to a file containing the output arguments ' |
| 131 | 'as a JSON object.') |
| 132 | subp.set_defaults(func=self.CmdAnalyze) |
| 133 | |
| 134 | subp = subps.add_parser('export', |
| 135 | help='print out the expanded configuration for' |
| 136 | 'each builder as a JSON object') |
| 137 | subp.add_argument('-f', '--config-file', metavar='PATH', |
| 138 | default=self.default_config, |
| 139 | help='path to config file (default is %(default)s)') |
| 140 | subp.add_argument('-g', '--goma-dir', |
| 141 | help='path to goma directory') |
| 142 | subp.set_defaults(func=self.CmdExport) |
| 143 | |
| 144 | subp = subps.add_parser('gen', |
| 145 | help='generate a new set of build files') |
| 146 | AddCommonOptions(subp) |
| 147 | subp.add_argument('--swarming-targets-file', |
| 148 | help='save runtime dependencies for targets listed ' |
| 149 | 'in file.') |
| 150 | subp.add_argument('path', nargs=1, |
| 151 | help='path to generate build into') |
| 152 | subp.set_defaults(func=self.CmdGen) |
| 153 | |
| 154 | subp = subps.add_parser('isolate', |
| 155 | help='generate the .isolate files for a given' |
| 156 | 'binary') |
| 157 | AddCommonOptions(subp) |
| 158 | subp.add_argument('path', nargs=1, |
| 159 | help='path build was generated into') |
| 160 | subp.add_argument('target', nargs=1, |
| 161 | help='ninja target to generate the isolate for') |
| 162 | subp.set_defaults(func=self.CmdIsolate) |
| 163 | |
| 164 | subp = subps.add_parser('lookup', |
| 165 | help='look up the command for a given config or ' |
| 166 | 'builder') |
| 167 | AddCommonOptions(subp) |
| 168 | subp.set_defaults(func=self.CmdLookup) |
| 169 | |
| 170 | subp = subps.add_parser( |
| 171 | 'run', |
| 172 | help='build and run the isolated version of a ' |
| 173 | 'binary', |
| 174 | formatter_class=argparse.RawDescriptionHelpFormatter) |
| 175 | subp.description = ( |
| 176 | 'Build, isolate, and run the given binary with the command line\n' |
| 177 | 'listed in the isolate. You may pass extra arguments after the\n' |
| 178 | 'target; use "--" if the extra arguments need to include switches.\n' |
| 179 | '\n' |
| 180 | 'Examples:\n' |
| 181 | '\n' |
| 182 | ' % tools/mb/mb.py run -m chromium.linux -b "Linux Builder" \\\n' |
| 183 | ' //out/Default content_browsertests\n' |
| 184 | '\n' |
| 185 | ' % tools/mb/mb.py run out/Default content_browsertests\n' |
| 186 | '\n' |
| 187 | ' % tools/mb/mb.py run out/Default content_browsertests -- \\\n' |
| 188 | ' --test-launcher-retry-limit=0' |
| 189 | '\n' |
| 190 | ) |
| 191 | |
| 192 | AddCommonOptions(subp) |
| 193 | subp.add_argument('-j', '--jobs', dest='jobs', type=int, |
| 194 | help='Number of jobs to pass to ninja') |
| 195 | subp.add_argument('--no-build', dest='build', default=True, |
| 196 | action='store_false', |
| 197 | help='Do not build, just isolate and run') |
| 198 | subp.add_argument('path', nargs=1, |
| 199 | help=('path to generate build into (or use).' |
| 200 | ' This can be either a regular path or a ' |
| 201 | 'GN-style source-relative path like ' |
| 202 | '//out/Default.')) |
| 203 | subp.add_argument('target', nargs=1, |
| 204 | help='ninja target to build and run') |
| 205 | subp.add_argument('extra_args', nargs='*', |
| 206 | help=('extra args to pass to the isolate to run. Use ' |
| 207 | '"--" as the first arg if you need to pass ' |
| 208 | 'switches')) |
| 209 | subp.set_defaults(func=self.CmdRun) |
| 210 | |
| 211 | subp = subps.add_parser('validate', |
| 212 | help='validate the config file') |
| 213 | subp.add_argument('-f', '--config-file', metavar='PATH', |
| 214 | default=self.default_config, |
| 215 | help='path to config file (default is %(default)s)') |
| 216 | subp.set_defaults(func=self.CmdValidate) |
| 217 | |
| 218 | subp = subps.add_parser('audit', |
| 219 | help='Audit the config file to track progress') |
| 220 | subp.add_argument('-f', '--config-file', metavar='PATH', |
| 221 | default=self.default_config, |
| 222 | help='path to config file (default is %(default)s)') |
| 223 | subp.add_argument('-i', '--internal', action='store_true', |
| 224 | help='check internal masters also') |
| 225 | subp.add_argument('-m', '--master', action='append', |
| 226 | help='master to audit (default is all non-internal ' |
| 227 | 'masters in file)') |
| 228 | subp.add_argument('-u', '--url-template', action='store', |
| 229 | default='https://build.chromium.org/p/' |
| 230 | '{master}/json/builders', |
| 231 | help='URL scheme for JSON APIs to buildbot ' |
| 232 | '(default: %(default)s) ') |
| 233 | subp.add_argument('-c', '--check-compile', action='store_true', |
| 234 | help='check whether tbd and master-only bots actually' |
| 235 | ' do compiles') |
| 236 | subp.set_defaults(func=self.CmdAudit) |
| 237 | |
| 238 | subp = subps.add_parser('help', |
| 239 | help='Get help on a subcommand.') |
| 240 | subp.add_argument(nargs='?', action='store', dest='subcommand', |
| 241 | help='The command to get help for.') |
| 242 | subp.set_defaults(func=self.CmdHelp) |
| 243 | |
| 244 | self.args = parser.parse_args(argv) |
| 245 | |
| 246 | def DumpInputFiles(self): |
| 247 | |
| 248 | def DumpContentsOfFilePassedTo(arg_name, path): |
| 249 | if path and self.Exists(path): |
| 250 | self.Print("\n# To recreate the file passed to %s:" % arg_name) |
| 251 | self.Print("%% cat > %s <<EOF" % path) |
| 252 | contents = self.ReadFile(path) |
| 253 | self.Print(contents) |
| 254 | self.Print("EOF\n%\n") |
| 255 | |
| 256 | if getattr(self.args, 'input_path', None): |
| 257 | DumpContentsOfFilePassedTo( |
| 258 | 'argv[0] (input_path)', self.args.input_path[0]) |
| 259 | if getattr(self.args, 'swarming_targets_file', None): |
| 260 | DumpContentsOfFilePassedTo( |
| 261 | '--swarming-targets-file', self.args.swarming_targets_file) |
| 262 | |
| 263 | def CmdAnalyze(self): |
| 264 | vals = self.Lookup() |
| 265 | self.ClobberIfNeeded(vals) |
| 266 | if vals['type'] == 'gn': |
| 267 | return self.RunGNAnalyze(vals) |
| 268 | else: |
| 269 | return self.RunGYPAnalyze(vals) |
| 270 | |
| 271 | def CmdExport(self): |
| 272 | self.ReadConfigFile() |
| 273 | obj = {} |
| 274 | for master, builders in self.masters.items(): |
| 275 | obj[master] = {} |
| 276 | for builder in builders: |
| 277 | config = self.masters[master][builder] |
| 278 | if not config: |
| 279 | continue |
| 280 | |
| 281 | if isinstance(config, dict): |
| 282 | args = {k: self.FlattenConfig(v)['gn_args'] |
| 283 | for k, v in config.items()} |
| 284 | elif config.startswith('//'): |
| 285 | args = config |
| 286 | else: |
| 287 | args = self.FlattenConfig(config)['gn_args'] |
| 288 | if 'error' in args: |
| 289 | continue |
| 290 | |
| 291 | obj[master][builder] = args |
| 292 | |
| 293 | # Dump object and trim trailing whitespace. |
| 294 | s = '\n'.join(l.rstrip() for l in |
| 295 | json.dumps(obj, sort_keys=True, indent=2).splitlines()) |
| 296 | self.Print(s) |
| 297 | return 0 |
| 298 | |
| 299 | def CmdGen(self): |
| 300 | vals = self.Lookup() |
| 301 | self.ClobberIfNeeded(vals) |
| 302 | if vals['type'] == 'gn': |
| 303 | return self.RunGNGen(vals) |
| 304 | else: |
| 305 | return self.RunGYPGen(vals) |
| 306 | |
| 307 | def CmdHelp(self): |
| 308 | if self.args.subcommand: |
| 309 | self.ParseArgs([self.args.subcommand, '--help']) |
| 310 | else: |
| 311 | self.ParseArgs(['--help']) |
| 312 | |
| 313 | def CmdIsolate(self): |
| 314 | vals = self.GetConfig() |
| 315 | if not vals: |
| 316 | return 1 |
| 317 | |
| 318 | if vals['type'] == 'gn': |
| 319 | return self.RunGNIsolate(vals) |
| 320 | else: |
| 321 | return self.Build('%s_run' % self.args.target[0]) |
| 322 | |
| 323 | def CmdLookup(self): |
| 324 | vals = self.Lookup() |
| 325 | if vals['type'] == 'gn': |
| 326 | cmd = self.GNCmd('gen', '_path_') |
| 327 | gn_args = self.GNArgs(vals) |
| 328 | self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args) |
| 329 | env = None |
| 330 | else: |
| 331 | cmd, env = self.GYPCmd('_path_', vals) |
| 332 | |
| 333 | self.PrintCmd(cmd, env) |
| 334 | return 0 |
| 335 | |
| 336 | def CmdRun(self): |
| 337 | vals = self.GetConfig() |
| 338 | if not vals: |
| 339 | return 1 |
| 340 | |
| 341 | build_dir = self.args.path[0] |
| 342 | target = self.args.target[0] |
| 343 | |
| 344 | if vals['type'] == 'gn': |
| 345 | if self.args.build: |
| 346 | ret = self.Build(target) |
| 347 | if ret: |
| 348 | return ret |
| 349 | ret = self.RunGNIsolate(vals) |
| 350 | if ret: |
| 351 | return ret |
| 352 | else: |
| 353 | ret = self.Build('%s_run' % target) |
| 354 | if ret: |
| 355 | return ret |
| 356 | |
| 357 | cmd = [ |
| 358 | self.executable, |
| 359 | self.PathJoin('tools', 'swarming_client', 'isolate.py'), |
| 360 | 'run', |
| 361 | '-s', |
| 362 | self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), |
| 363 | ] |
| 364 | if self.args.extra_args: |
| 365 | cmd += ['--'] + self.args.extra_args |
| 366 | |
| 367 | ret, _, _ = self.Run(cmd, force_verbose=False, buffer_output=False) |
| 368 | |
| 369 | return ret |
| 370 | |
| 371 | def CmdValidate(self, print_ok=True): |
| 372 | errs = [] |
| 373 | |
| 374 | # Read the file to make sure it parses. |
| 375 | self.ReadConfigFile() |
| 376 | |
| 377 | # Build a list of all of the configs referenced by builders. |
| 378 | all_configs = {} |
| 379 | for master in self.masters: |
| 380 | for config in self.masters[master].values(): |
| 381 | if isinstance(config, dict): |
| 382 | for c in config.values(): |
| 383 | all_configs[c] = master |
| 384 | else: |
| 385 | all_configs[config] = master |
| 386 | |
| 387 | # Check that every referenced args file or config actually exists. |
| 388 | for config, loc in all_configs.items(): |
| 389 | if config.startswith('//'): |
| 390 | if not self.Exists(self.ToAbsPath(config)): |
| 391 | errs.append('Unknown args file "%s" referenced from "%s".' % |
| 392 | (config, loc)) |
| 393 | elif not config in self.configs: |
| 394 | errs.append('Unknown config "%s" referenced from "%s".' % |
| 395 | (config, loc)) |
| 396 | |
| 397 | # Check that every actual config is actually referenced. |
| 398 | for config in self.configs: |
| 399 | if not config in all_configs: |
| 400 | errs.append('Unused config "%s".' % config) |
| 401 | |
| 402 | # Figure out the whole list of mixins, and check that every mixin |
| 403 | # listed by a config or another mixin actually exists. |
| 404 | referenced_mixins = set() |
| 405 | for config, mixins in self.configs.items(): |
| 406 | for mixin in mixins: |
| 407 | if not mixin in self.mixins: |
| 408 | errs.append('Unknown mixin "%s" referenced by config "%s".' % |
| 409 | (mixin, config)) |
| 410 | referenced_mixins.add(mixin) |
| 411 | |
| 412 | for mixin in self.mixins: |
| 413 | for sub_mixin in self.mixins[mixin].get('mixins', []): |
| 414 | if not sub_mixin in self.mixins: |
| 415 | errs.append('Unknown mixin "%s" referenced by mixin "%s".' % |
| 416 | (sub_mixin, mixin)) |
| 417 | referenced_mixins.add(sub_mixin) |
| 418 | |
| 419 | # Check that every mixin defined is actually referenced somewhere. |
| 420 | for mixin in self.mixins: |
| 421 | if not mixin in referenced_mixins: |
| 422 | errs.append('Unreferenced mixin "%s".' % mixin) |
| 423 | |
| 424 | # If we're checking the Chromium config, check that the 'chromium' bots |
| 425 | # which build public artifacts do not include the chrome_with_codecs mixin. |
| 426 | if self.args.config_file == self.default_config: |
| 427 | if 'chromium' in self.masters: |
| 428 | for builder in self.masters['chromium']: |
| 429 | config = self.masters['chromium'][builder] |
| 430 | def RecurseMixins(current_mixin): |
| 431 | if current_mixin == 'chrome_with_codecs': |
| 432 | errs.append('Public artifact builder "%s" can not contain the ' |
| 433 | '"chrome_with_codecs" mixin.' % builder) |
| 434 | return |
| 435 | if not 'mixins' in self.mixins[current_mixin]: |
| 436 | return |
| 437 | for mixin in self.mixins[current_mixin]['mixins']: |
| 438 | RecurseMixins(mixin) |
| 439 | |
| 440 | for mixin in self.configs[config]: |
| 441 | RecurseMixins(mixin) |
| 442 | else: |
| 443 | errs.append('Missing "chromium" master. Please update this ' |
| 444 | 'proprietary codecs check with the name of the master ' |
| 445 | 'responsible for public build artifacts.') |
| 446 | |
| 447 | if errs: |
| 448 | raise MBErr(('mb config file %s has problems:' % self.args.config_file) + |
| 449 | '\n ' + '\n '.join(errs)) |
| 450 | |
| 451 | if print_ok: |
| 452 | self.Print('mb config file %s looks ok.' % self.args.config_file) |
| 453 | return 0 |
| 454 | |
| 455 | def CmdAudit(self): |
| 456 | """Track the progress of the GYP->GN migration on the bots.""" |
| 457 | |
| 458 | # First, make sure the config file is okay, but don't print anything |
| 459 | # if it is (it will throw an error if it isn't). |
| 460 | self.CmdValidate(print_ok=False) |
| 461 | |
| 462 | stats = OrderedDict() |
| 463 | STAT_MASTER_ONLY = 'Master only' |
| 464 | STAT_CONFIG_ONLY = 'Config only' |
| 465 | STAT_TBD = 'Still TBD' |
| 466 | STAT_GYP = 'Still GYP' |
| 467 | STAT_DONE = 'Done (on GN)' |
| 468 | stats[STAT_MASTER_ONLY] = 0 |
| 469 | stats[STAT_CONFIG_ONLY] = 0 |
| 470 | stats[STAT_TBD] = 0 |
| 471 | stats[STAT_GYP] = 0 |
| 472 | stats[STAT_DONE] = 0 |
| 473 | |
| 474 | def PrintBuilders(heading, builders, notes): |
| 475 | stats.setdefault(heading, 0) |
| 476 | stats[heading] += len(builders) |
| 477 | if builders: |
| 478 | self.Print(' %s:' % heading) |
| 479 | for builder in sorted(builders): |
| 480 | self.Print(' %s%s' % (builder, notes[builder])) |
| 481 | |
| 482 | self.ReadConfigFile() |
| 483 | |
| 484 | masters = self.args.master or self.masters |
| 485 | for master in sorted(masters): |
| 486 | url = self.args.url_template.replace('{master}', master) |
| 487 | |
| 488 | self.Print('Auditing %s' % master) |
| 489 | |
| 490 | MASTERS_TO_SKIP = ( |
| 491 | 'client.skia', |
| 492 | 'client.v8.fyi', |
| 493 | 'tryserver.v8', |
| 494 | ) |
| 495 | if master in MASTERS_TO_SKIP: |
| 496 | # Skip these bots because converting them is the responsibility of |
| 497 | # those teams and out of scope for the Chromium migration to GN. |
| 498 | self.Print(' Skipped (out of scope)') |
| 499 | self.Print('') |
| 500 | continue |
| 501 | |
| 502 | INTERNAL_MASTERS = ('official.desktop', 'official.desktop.continuous', |
| 503 | 'internal.client.kitchensync') |
| 504 | if master in INTERNAL_MASTERS and not self.args.internal: |
| 505 | # Skip these because the servers aren't accessible by default ... |
| 506 | self.Print(' Skipped (internal)') |
| 507 | self.Print('') |
| 508 | continue |
| 509 | |
| 510 | try: |
| 511 | # Fetch the /builders contents from the buildbot master. The |
| 512 | # keys of the dict are the builder names themselves. |
| 513 | json_contents = self.Fetch(url) |
| 514 | d = json.loads(json_contents) |
| 515 | except Exception as e: |
| 516 | self.Print(str(e)) |
| 517 | return 1 |
| 518 | |
| 519 | config_builders = set(self.masters[master]) |
| 520 | master_builders = set(d.keys()) |
| 521 | both = master_builders & config_builders |
| 522 | master_only = master_builders - config_builders |
| 523 | config_only = config_builders - master_builders |
| 524 | tbd = set() |
| 525 | gyp = set() |
| 526 | done = set() |
| 527 | notes = {builder: '' for builder in config_builders | master_builders} |
| 528 | |
| 529 | for builder in both: |
| 530 | config = self.masters[master][builder] |
| 531 | if config == 'tbd': |
| 532 | tbd.add(builder) |
| 533 | elif isinstance(config, dict): |
| 534 | vals = self.FlattenConfig(config.values()[0]) |
| 535 | if vals['type'] == 'gyp': |
| 536 | gyp.add(builder) |
| 537 | else: |
| 538 | done.add(builder) |
| 539 | elif config.startswith('//'): |
| 540 | done.add(builder) |
| 541 | else: |
| 542 | vals = self.FlattenConfig(config) |
| 543 | if vals['type'] == 'gyp': |
| 544 | gyp.add(builder) |
| 545 | else: |
| 546 | done.add(builder) |
| 547 | |
| 548 | if self.args.check_compile and (tbd or master_only): |
| 549 | either = tbd | master_only |
| 550 | for builder in either: |
| 551 | notes[builder] = ' (' + self.CheckCompile(master, builder) +')' |
| 552 | |
| 553 | if master_only or config_only or tbd or gyp: |
| 554 | PrintBuilders(STAT_MASTER_ONLY, master_only, notes) |
| 555 | PrintBuilders(STAT_CONFIG_ONLY, config_only, notes) |
| 556 | PrintBuilders(STAT_TBD, tbd, notes) |
| 557 | PrintBuilders(STAT_GYP, gyp, notes) |
| 558 | else: |
| 559 | self.Print(' All GN!') |
| 560 | |
| 561 | stats[STAT_DONE] += len(done) |
| 562 | |
| 563 | self.Print('') |
| 564 | |
| 565 | fmt = '{:<27} {:>4}' |
| 566 | self.Print(fmt.format('Totals', str(sum(int(v) for v in stats.values())))) |
| 567 | self.Print(fmt.format('-' * 27, '----')) |
| 568 | for stat, count in stats.items(): |
| 569 | self.Print(fmt.format(stat, str(count))) |
| 570 | |
| 571 | return 0 |
| 572 | |
| 573 | def GetConfig(self): |
| 574 | build_dir = self.args.path[0] |
| 575 | |
| 576 | vals = self.DefaultVals() |
| 577 | if self.args.builder or self.args.master or self.args.config: |
| 578 | vals = self.Lookup() |
| 579 | if vals['type'] == 'gn': |
| 580 | # Re-run gn gen in order to ensure the config is consistent with the |
| 581 | # build dir. |
| 582 | self.RunGNGen(vals) |
| 583 | return vals |
| 584 | |
| 585 | mb_type_path = self.PathJoin(self.ToAbsPath(build_dir), 'mb_type') |
| 586 | if not self.Exists(mb_type_path): |
| 587 | toolchain_path = self.PathJoin(self.ToAbsPath(build_dir), |
| 588 | 'toolchain.ninja') |
| 589 | if not self.Exists(toolchain_path): |
| 590 | self.Print('Must either specify a path to an existing GN build dir ' |
| 591 | 'or pass in a -m/-b pair or a -c flag to specify the ' |
| 592 | 'configuration') |
| 593 | return {} |
| 594 | else: |
| 595 | mb_type = 'gn' |
| 596 | else: |
| 597 | mb_type = self.ReadFile(mb_type_path).strip() |
| 598 | |
| 599 | if mb_type == 'gn': |
| 600 | vals['gn_args'] = self.GNArgsFromDir(build_dir) |
| 601 | vals['type'] = mb_type |
| 602 | |
| 603 | return vals |
| 604 | |
| 605 | def GNArgsFromDir(self, build_dir): |
| 606 | args_contents = "" |
| 607 | gn_args_path = self.PathJoin(self.ToAbsPath(build_dir), 'args.gn') |
| 608 | if self.Exists(gn_args_path): |
| 609 | args_contents = self.ReadFile(gn_args_path) |
| 610 | gn_args = [] |
| 611 | for l in args_contents.splitlines(): |
| 612 | fields = l.split(' ') |
| 613 | name = fields[0] |
| 614 | val = ' '.join(fields[2:]) |
| 615 | gn_args.append('%s=%s' % (name, val)) |
| 616 | |
| 617 | return ' '.join(gn_args) |
| 618 | |
| 619 | def Lookup(self): |
| 620 | vals = self.ReadIOSBotConfig() |
| 621 | if not vals: |
| 622 | self.ReadConfigFile() |
| 623 | config = self.ConfigFromArgs() |
| 624 | if config.startswith('//'): |
| 625 | if not self.Exists(self.ToAbsPath(config)): |
| 626 | raise MBErr('args file "%s" not found' % config) |
| 627 | vals = self.DefaultVals() |
| 628 | vals['args_file'] = config |
| 629 | else: |
| 630 | if not config in self.configs: |
| 631 | raise MBErr('Config "%s" not found in %s' % |
| 632 | (config, self.args.config_file)) |
| 633 | vals = self.FlattenConfig(config) |
| 634 | |
| 635 | # Do some basic sanity checking on the config so that we |
| 636 | # don't have to do this in every caller. |
| 637 | if 'type' not in vals: |
| 638 | vals['type'] = 'gn' |
| 639 | assert vals['type'] in ('gn', 'gyp'), ( |
| 640 | 'Unknown meta-build type "%s"' % vals['gn_args']) |
| 641 | |
| 642 | return vals |
| 643 | |
| 644 | def ReadIOSBotConfig(self): |
| 645 | if not self.args.master or not self.args.builder: |
| 646 | return {} |
| 647 | path = self.PathJoin(self.chromium_src_dir, 'ios', 'build', 'bots', |
| 648 | self.args.master, self.args.builder + '.json') |
| 649 | if not self.Exists(path): |
| 650 | return {} |
| 651 | |
| 652 | contents = json.loads(self.ReadFile(path)) |
| 653 | gyp_vals = contents.get('GYP_DEFINES', {}) |
| 654 | if isinstance(gyp_vals, dict): |
| 655 | gyp_defines = ' '.join('%s=%s' % (k, v) for k, v in gyp_vals.items()) |
| 656 | else: |
| 657 | gyp_defines = ' '.join(gyp_vals) |
| 658 | gn_args = ' '.join(contents.get('gn_args', [])) |
| 659 | |
| 660 | vals = self.DefaultVals() |
| 661 | vals['gn_args'] = gn_args |
| 662 | vals['gyp_defines'] = gyp_defines |
| 663 | vals['type'] = contents.get('mb_type', 'gn') |
| 664 | return vals |
| 665 | |
| 666 | def ReadConfigFile(self): |
| 667 | if not self.Exists(self.args.config_file): |
| 668 | raise MBErr('config file not found at %s' % self.args.config_file) |
| 669 | |
| 670 | try: |
| 671 | contents = ast.literal_eval(self.ReadFile(self.args.config_file)) |
| 672 | except SyntaxError as e: |
| 673 | raise MBErr('Failed to parse config file "%s": %s' % |
| 674 | (self.args.config_file, e)) |
| 675 | |
| 676 | self.configs = contents['configs'] |
| 677 | self.masters = contents['masters'] |
| 678 | self.mixins = contents['mixins'] |
| 679 | |
| 680 | def ReadIsolateMap(self): |
| 681 | if not self.Exists(self.args.isolate_map_file): |
| 682 | raise MBErr('isolate map file not found at %s' % |
| 683 | self.args.isolate_map_file) |
| 684 | try: |
| 685 | return ast.literal_eval(self.ReadFile(self.args.isolate_map_file)) |
| 686 | except SyntaxError as e: |
| 687 | raise MBErr('Failed to parse isolate map file "%s": %s' % |
| 688 | (self.args.isolate_map_file, e)) |
| 689 | |
| 690 | def ConfigFromArgs(self): |
| 691 | if self.args.config: |
| 692 | if self.args.master or self.args.builder: |
| 693 | raise MBErr('Can not specific both -c/--config and -m/--master or ' |
| 694 | '-b/--builder') |
| 695 | |
| 696 | return self.args.config |
| 697 | |
| 698 | if not self.args.master or not self.args.builder: |
| 699 | raise MBErr('Must specify either -c/--config or ' |
| 700 | '(-m/--master and -b/--builder)') |
| 701 | |
| 702 | if not self.args.master in self.masters: |
| 703 | raise MBErr('Master name "%s" not found in "%s"' % |
| 704 | (self.args.master, self.args.config_file)) |
| 705 | |
| 706 | if not self.args.builder in self.masters[self.args.master]: |
| 707 | raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' % |
| 708 | (self.args.builder, self.args.master, self.args.config_file)) |
| 709 | |
| 710 | config = self.masters[self.args.master][self.args.builder] |
| 711 | if isinstance(config, dict): |
| 712 | if self.args.phase is None: |
| 713 | raise MBErr('Must specify a build --phase for %s on %s' % |
| 714 | (self.args.builder, self.args.master)) |
| 715 | phase = str(self.args.phase) |
| 716 | if phase not in config: |
| 717 | raise MBErr('Phase %s doesn\'t exist for %s on %s' % |
| 718 | (phase, self.args.builder, self.args.master)) |
| 719 | return config[phase] |
| 720 | |
| 721 | if self.args.phase is not None: |
| 722 | raise MBErr('Must not specify a build --phase for %s on %s' % |
| 723 | (self.args.builder, self.args.master)) |
| 724 | return config |
| 725 | |
| 726 | def FlattenConfig(self, config): |
| 727 | mixins = self.configs[config] |
| 728 | vals = self.DefaultVals() |
| 729 | |
| 730 | visited = [] |
| 731 | self.FlattenMixins(mixins, vals, visited) |
| 732 | return vals |
| 733 | |
| 734 | def DefaultVals(self): |
| 735 | return { |
| 736 | 'args_file': '', |
| 737 | 'cros_passthrough': False, |
| 738 | 'gn_args': '', |
| 739 | 'gyp_defines': '', |
| 740 | 'gyp_crosscompile': False, |
| 741 | 'type': 'gn', |
| 742 | } |
| 743 | |
| 744 | def FlattenMixins(self, mixins, vals, visited): |
| 745 | for m in mixins: |
| 746 | if m not in self.mixins: |
| 747 | raise MBErr('Unknown mixin "%s"' % m) |
| 748 | |
| 749 | visited.append(m) |
| 750 | |
| 751 | mixin_vals = self.mixins[m] |
| 752 | |
| 753 | if 'cros_passthrough' in mixin_vals: |
| 754 | vals['cros_passthrough'] = mixin_vals['cros_passthrough'] |
| 755 | if 'gn_args' in mixin_vals: |
| 756 | if vals['gn_args']: |
| 757 | vals['gn_args'] += ' ' + mixin_vals['gn_args'] |
| 758 | else: |
| 759 | vals['gn_args'] = mixin_vals['gn_args'] |
| 760 | if 'gyp_crosscompile' in mixin_vals: |
| 761 | vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile'] |
| 762 | if 'gyp_defines' in mixin_vals: |
| 763 | if vals['gyp_defines']: |
| 764 | vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines'] |
| 765 | else: |
| 766 | vals['gyp_defines'] = mixin_vals['gyp_defines'] |
| 767 | if 'type' in mixin_vals: |
| 768 | vals['type'] = mixin_vals['type'] |
| 769 | |
| 770 | if 'mixins' in mixin_vals: |
| 771 | self.FlattenMixins(mixin_vals['mixins'], vals, visited) |
| 772 | return vals |
| 773 | |
| 774 | def ClobberIfNeeded(self, vals): |
| 775 | path = self.args.path[0] |
| 776 | build_dir = self.ToAbsPath(path) |
| 777 | mb_type_path = self.PathJoin(build_dir, 'mb_type') |
| 778 | needs_clobber = False |
| 779 | new_mb_type = vals['type'] |
| 780 | if self.Exists(build_dir): |
| 781 | if self.Exists(mb_type_path): |
| 782 | old_mb_type = self.ReadFile(mb_type_path) |
| 783 | if old_mb_type != new_mb_type: |
| 784 | self.Print("Build type mismatch: was %s, will be %s, clobbering %s" % |
| 785 | (old_mb_type, new_mb_type, path)) |
| 786 | needs_clobber = True |
| 787 | else: |
| 788 | # There is no 'mb_type' file in the build directory, so this probably |
| 789 | # means that the prior build(s) were not done through mb, and we |
| 790 | # have no idea if this was a GYP build or a GN build. Clobber it |
| 791 | # to be safe. |
| 792 | self.Print("%s/mb_type missing, clobbering to be safe" % path) |
| 793 | needs_clobber = True |
| 794 | |
| 795 | if self.args.dryrun: |
| 796 | return |
| 797 | |
| 798 | if needs_clobber: |
| 799 | self.RemoveDirectory(build_dir) |
| 800 | |
| 801 | self.MaybeMakeDirectory(build_dir) |
| 802 | self.WriteFile(mb_type_path, new_mb_type) |
| 803 | |
| 804 | def RunGNGen(self, vals): |
| 805 | build_dir = self.args.path[0] |
| 806 | |
| 807 | cmd = self.GNCmd('gen', build_dir, '--check') |
| 808 | gn_args = self.GNArgs(vals) |
| 809 | |
| 810 | # Since GN hasn't run yet, the build directory may not even exist. |
| 811 | self.MaybeMakeDirectory(self.ToAbsPath(build_dir)) |
| 812 | |
| 813 | gn_args_path = self.ToAbsPath(build_dir, 'args.gn') |
| 814 | self.WriteFile(gn_args_path, gn_args, force_verbose=True) |
| 815 | |
| 816 | swarming_targets = [] |
| 817 | if getattr(self.args, 'swarming_targets_file', None): |
| 818 | # We need GN to generate the list of runtime dependencies for |
| 819 | # the compile targets listed (one per line) in the file so |
| 820 | # we can run them via swarming. We use gn_isolate_map.pyl to convert |
| 821 | # the compile targets to the matching GN labels. |
| 822 | path = self.args.swarming_targets_file |
| 823 | if not self.Exists(path): |
| 824 | self.WriteFailureAndRaise('"%s" does not exist' % path, |
| 825 | output_path=None) |
| 826 | contents = self.ReadFile(path) |
| 827 | swarming_targets = set(contents.splitlines()) |
| 828 | |
| 829 | isolate_map = self.ReadIsolateMap() |
| 830 | err, labels = self.MapTargetsToLabels(isolate_map, swarming_targets) |
| 831 | if err: |
| 832 | raise MBErr(err) |
| 833 | |
| 834 | gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps') |
| 835 | self.WriteFile(gn_runtime_deps_path, '\n'.join(labels) + '\n') |
| 836 | cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path) |
| 837 | |
| 838 | ret, _, _ = self.Run(cmd) |
| 839 | if ret: |
| 840 | # If `gn gen` failed, we should exit early rather than trying to |
| 841 | # generate isolates. Run() will have already logged any error output. |
| 842 | self.Print('GN gen failed: %d' % ret) |
| 843 | return ret |
| 844 | |
| 845 | android = 'target_os="android"' in vals['gn_args'] |
| 846 | for target in swarming_targets: |
| 847 | if android: |
| 848 | # Android targets may be either android_apk or executable. The former |
| 849 | # will result in runtime_deps associated with the stamp file, while the |
| 850 | # latter will result in runtime_deps associated with the executable. |
| 851 | label = isolate_map[target]['label'] |
| 852 | runtime_deps_targets = [ |
| 853 | target + '.runtime_deps', |
| 854 | 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] |
| 855 | elif isolate_map[target]['type'] == 'gpu_browser_test': |
| 856 | if self.platform == 'win32': |
| 857 | runtime_deps_targets = ['browser_tests.exe.runtime_deps'] |
| 858 | else: |
| 859 | runtime_deps_targets = ['browser_tests.runtime_deps'] |
| 860 | elif (isolate_map[target]['type'] == 'script' or |
| 861 | isolate_map[target].get('label_type') == 'group'): |
| 862 | # For script targets, the build target is usually a group, |
| 863 | # for which gn generates the runtime_deps next to the stamp file |
| 864 | # for the label, which lives under the obj/ directory, but it may |
| 865 | # also be an executable. |
| 866 | label = isolate_map[target]['label'] |
| 867 | runtime_deps_targets = [ |
| 868 | 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] |
| 869 | if self.platform == 'win32': |
| 870 | runtime_deps_targets += [ target + '.exe.runtime_deps' ] |
| 871 | else: |
| 872 | runtime_deps_targets += [ target + '.runtime_deps' ] |
| 873 | elif self.platform == 'win32': |
| 874 | runtime_deps_targets = [target + '.exe.runtime_deps'] |
| 875 | else: |
| 876 | runtime_deps_targets = [target + '.runtime_deps'] |
| 877 | |
| 878 | for r in runtime_deps_targets: |
| 879 | runtime_deps_path = self.ToAbsPath(build_dir, r) |
| 880 | if self.Exists(runtime_deps_path): |
| 881 | break |
| 882 | else: |
| 883 | raise MBErr('did not generate any of %s' % |
| 884 | ', '.join(runtime_deps_targets)) |
| 885 | |
| 886 | command, extra_files = self.GetIsolateCommand(target, vals) |
| 887 | |
| 888 | runtime_deps = self.ReadFile(runtime_deps_path).splitlines() |
| 889 | |
| 890 | self.WriteIsolateFiles(build_dir, command, target, runtime_deps, |
| 891 | extra_files) |
| 892 | |
| 893 | return 0 |
| 894 | |
| 895 | def RunGNIsolate(self, vals): |
| 896 | target = self.args.target[0] |
| 897 | isolate_map = self.ReadIsolateMap() |
| 898 | err, labels = self.MapTargetsToLabels(isolate_map, [target]) |
| 899 | if err: |
| 900 | raise MBErr(err) |
| 901 | label = labels[0] |
| 902 | |
| 903 | build_dir = self.args.path[0] |
| 904 | command, extra_files = self.GetIsolateCommand(target, vals) |
| 905 | |
| 906 | cmd = self.GNCmd('desc', build_dir, label, 'runtime_deps') |
| 907 | ret, out, _ = self.Call(cmd) |
| 908 | if ret: |
| 909 | if out: |
| 910 | self.Print(out) |
| 911 | return ret |
| 912 | |
| 913 | runtime_deps = out.splitlines() |
| 914 | |
| 915 | self.WriteIsolateFiles(build_dir, command, target, runtime_deps, |
| 916 | extra_files) |
| 917 | |
| 918 | ret, _, _ = self.Run([ |
| 919 | self.executable, |
| 920 | self.PathJoin('tools', 'swarming_client', 'isolate.py'), |
| 921 | 'check', |
| 922 | '-i', |
| 923 | self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), |
| 924 | '-s', |
| 925 | self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target))], |
| 926 | buffer_output=False) |
| 927 | |
| 928 | return ret |
| 929 | |
| 930 | def WriteIsolateFiles(self, build_dir, command, target, runtime_deps, |
| 931 | extra_files): |
| 932 | isolate_path = self.ToAbsPath(build_dir, target + '.isolate') |
| 933 | self.WriteFile(isolate_path, |
| 934 | pprint.pformat({ |
| 935 | 'variables': { |
| 936 | 'command': command, |
| 937 | 'files': sorted(runtime_deps + extra_files), |
| 938 | } |
| 939 | }) + '\n') |
| 940 | |
| 941 | self.WriteJSON( |
| 942 | { |
| 943 | 'args': [ |
| 944 | '--isolated', |
| 945 | self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)), |
| 946 | '--isolate', |
| 947 | self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)), |
| 948 | ], |
| 949 | 'dir': self.chromium_src_dir, |
| 950 | 'version': 1, |
| 951 | }, |
| 952 | isolate_path + 'd.gen.json', |
| 953 | ) |
| 954 | |
| 955 | def MapTargetsToLabels(self, isolate_map, targets): |
| 956 | labels = [] |
| 957 | err = '' |
| 958 | |
| 959 | def StripTestSuffixes(target): |
| 960 | for suffix in ('_apk_run', '_apk', '_run'): |
| 961 | if target.endswith(suffix): |
| 962 | return target[:-len(suffix)], suffix |
| 963 | return None, None |
| 964 | |
| 965 | for target in targets: |
| 966 | if target == 'all': |
| 967 | labels.append(target) |
| 968 | elif target.startswith('//'): |
| 969 | labels.append(target) |
| 970 | else: |
| 971 | if target in isolate_map: |
| 972 | stripped_target, suffix = target, '' |
| 973 | else: |
| 974 | stripped_target, suffix = StripTestSuffixes(target) |
| 975 | if stripped_target in isolate_map: |
| 976 | if isolate_map[stripped_target]['type'] == 'unknown': |
| 977 | err += ('test target "%s" type is unknown\n' % target) |
| 978 | else: |
| 979 | labels.append(isolate_map[stripped_target]['label'] + suffix) |
| 980 | else: |
| 981 | err += ('target "%s" not found in ' |
| 982 | '//testing/buildbot/gn_isolate_map.pyl\n' % target) |
| 983 | |
| 984 | return err, labels |
| 985 | |
| 986 | def GNCmd(self, subcommand, path, *args): |
| 987 | if self.platform == 'linux2': |
| 988 | subdir, exe = 'linux64', 'gn' |
| 989 | elif self.platform == 'darwin': |
| 990 | subdir, exe = 'mac', 'gn' |
| 991 | else: |
| 992 | subdir, exe = 'win', 'gn.exe' |
| 993 | |
| 994 | gn_path = self.PathJoin(self.chromium_src_dir, 'buildtools', subdir, exe) |
| 995 | return [gn_path, subcommand, path] + list(args) |
| 996 | |
| 997 | |
| 998 | def GNArgs(self, vals): |
| 999 | if vals['cros_passthrough']: |
| 1000 | if not 'GN_ARGS' in os.environ: |
| 1001 | raise MBErr('MB is expecting GN_ARGS to be in the environment') |
| 1002 | gn_args = os.environ['GN_ARGS'] |
| 1003 | if not re.search('target_os.*=.*"chromeos"', gn_args): |
| 1004 | raise MBErr('GN_ARGS is missing target_os = "chromeos": (GN_ARGS=%s)' % |
| 1005 | gn_args) |
| 1006 | else: |
| 1007 | gn_args = vals['gn_args'] |
| 1008 | |
| 1009 | if self.args.goma_dir: |
| 1010 | gn_args += ' goma_dir="%s"' % self.args.goma_dir |
| 1011 | |
| 1012 | android_version_code = self.args.android_version_code |
| 1013 | if android_version_code: |
| 1014 | gn_args += ' android_default_version_code="%s"' % android_version_code |
| 1015 | |
| 1016 | android_version_name = self.args.android_version_name |
| 1017 | if android_version_name: |
| 1018 | gn_args += ' android_default_version_name="%s"' % android_version_name |
| 1019 | |
| 1020 | # Canonicalize the arg string into a sorted, newline-separated list |
| 1021 | # of key-value pairs, and de-dup the keys if need be so that only |
| 1022 | # the last instance of each arg is listed. |
| 1023 | gn_args = gn_helpers.ToGNString(gn_helpers.FromGNArgs(gn_args)) |
| 1024 | |
| 1025 | args_file = vals.get('args_file', None) |
| 1026 | if args_file: |
| 1027 | gn_args = ('import("%s")\n' % vals['args_file']) + gn_args |
| 1028 | return gn_args |
| 1029 | |
| 1030 | def RunGYPGen(self, vals): |
| 1031 | path = self.args.path[0] |
| 1032 | |
| 1033 | output_dir = self.ParseGYPConfigPath(path) |
| 1034 | cmd, env = self.GYPCmd(output_dir, vals) |
| 1035 | ret, _, _ = self.Run(cmd, env=env) |
| 1036 | return ret |
| 1037 | |
| 1038 | def RunGYPAnalyze(self, vals): |
| 1039 | output_dir = self.ParseGYPConfigPath(self.args.path[0]) |
| 1040 | if self.args.verbose: |
| 1041 | inp = self.ReadInputJSON(['files', 'test_targets', |
| 1042 | 'additional_compile_targets']) |
| 1043 | self.Print() |
| 1044 | self.Print('analyze input:') |
| 1045 | self.PrintJSON(inp) |
| 1046 | self.Print() |
| 1047 | |
| 1048 | cmd, env = self.GYPCmd(output_dir, vals) |
| 1049 | cmd.extend(['-f', 'analyzer', |
| 1050 | '-G', 'config_path=%s' % self.args.input_path[0], |
| 1051 | '-G', 'analyzer_output_path=%s' % self.args.output_path[0]]) |
| 1052 | ret, _, _ = self.Run(cmd, env=env) |
| 1053 | if not ret and self.args.verbose: |
| 1054 | outp = json.loads(self.ReadFile(self.args.output_path[0])) |
| 1055 | self.Print() |
| 1056 | self.Print('analyze output:') |
| 1057 | self.PrintJSON(outp) |
| 1058 | self.Print() |
| 1059 | |
| 1060 | return ret |
| 1061 | |
| 1062 | def GetIsolateCommand(self, target, vals): |
| 1063 | android = 'target_os="android"' in vals['gn_args'] |
| 1064 | |
| 1065 | # This needs to mirror the settings in //build/config/ui.gni: |
| 1066 | # use_x11 = is_linux && !use_ozone. |
| 1067 | use_x11 = (self.platform == 'linux2' and |
| 1068 | not android and |
| 1069 | not 'use_ozone=true' in vals['gn_args']) |
| 1070 | |
| 1071 | asan = 'is_asan=true' in vals['gn_args'] |
| 1072 | msan = 'is_msan=true' in vals['gn_args'] |
| 1073 | tsan = 'is_tsan=true' in vals['gn_args'] |
| 1074 | |
| 1075 | isolate_map = self.ReadIsolateMap() |
| 1076 | test_type = isolate_map[target]['type'] |
| 1077 | |
| 1078 | executable = isolate_map[target].get('executable', target) |
| 1079 | executable_suffix = '.exe' if self.platform == 'win32' else '' |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1080 | executable_prefix = '.\\' if self.platform == 'win32' else './' |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1081 | |
| 1082 | cmdline = [] |
| 1083 | extra_files = [] |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1084 | common_cmdline = [ |
| 1085 | executable_prefix + str(executable) + executable_suffix, |
| 1086 | '--', |
| 1087 | '--asan=%d' % asan, |
| 1088 | '--msan=%d' % msan, |
| 1089 | '--tsan=%d' % tsan, |
| 1090 | ] |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1091 | |
| 1092 | if test_type == 'nontest': |
| 1093 | self.WriteFailureAndRaise('We should not be isolating %s.' % target, |
| 1094 | output_path=None) |
| 1095 | |
| 1096 | if android and test_type != "script": |
| 1097 | logdog_command = [ |
| 1098 | '--logdog-bin-cmd', './../../bin/logdog_butler', |
| 1099 | '--project', 'chromium', |
| 1100 | '--service-account-json', |
| 1101 | '/creds/service_accounts/service-account-luci-logdog-publisher.json', |
| 1102 | '--prefix', 'android/swarming/logcats/${SWARMING_TASK_ID}', |
| 1103 | '--source', '${ISOLATED_OUTDIR}/logcats', |
| 1104 | '--name', 'unified_logcats', |
| 1105 | ] |
| 1106 | test_cmdline = [ |
| 1107 | self.PathJoin('bin', 'run_%s' % target), |
| 1108 | '--logcat-output-file', '${ISOLATED_OUTDIR}/logcats', |
| 1109 | '--target-devices-file', '${SWARMING_BOT_FILE}', |
| 1110 | '-v' |
| 1111 | ] |
| 1112 | cmdline = (['./../../build/android/test_wrapper/logdog_wrapper.py'] |
| 1113 | + logdog_command + test_cmdline) |
| 1114 | elif use_x11 and test_type == 'windowed_test_launcher': |
| 1115 | extra_files = [ |
| 1116 | 'xdisplaycheck', |
| 1117 | '../../testing/test_env.py', |
| 1118 | '../../testing/xvfb.py', |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1119 | '../../third_party/gtest-parallel/gtest-parallel', |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1120 | ] |
| 1121 | cmdline = [ |
| 1122 | '../../testing/xvfb.py', |
| 1123 | '.', |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1124 | 'python', |
| 1125 | '../../third_party/gtest-parallel/gtest-parallel', |
| 1126 | ] + common_cmdline |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1127 | elif test_type in ('windowed_test_launcher', 'console_test_launcher'): |
| 1128 | extra_files = [ |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1129 | '../../testing/test_env.py', |
| 1130 | '../../third_party/gtest-parallel/gtest-parallel', |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1131 | ] |
| 1132 | cmdline = [ |
| 1133 | '../../testing/test_env.py', |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1134 | 'python', |
| 1135 | '../../third_party/gtest-parallel/gtest-parallel', |
| 1136 | ] + common_cmdline |
| 1137 | elif test_type == 'non_parallel_console_test_launcher': |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1138 | extra_files = [ |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1139 | '../../testing/test_env.py', |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1140 | ] |
| 1141 | cmdline = [ |
| 1142 | '../../testing/test_env.py', |
ehmaldonado | b2fcf6d | 2016-11-15 12:20:30 -0800 | [diff] [blame] | 1143 | ] + common_cmdline |
kjellander | a013a02 | 2016-11-14 05:54:22 -0800 | [diff] [blame] | 1144 | else: |
| 1145 | self.WriteFailureAndRaise('No command line for %s found (test type %s).' |
| 1146 | % (target, test_type), output_path=None) |
| 1147 | |
| 1148 | cmdline += isolate_map[target].get('args', []) |
| 1149 | |
| 1150 | return cmdline, extra_files |
| 1151 | |
| 1152 | def ToAbsPath(self, build_path, *comps): |
| 1153 | return self.PathJoin(self.chromium_src_dir, |
| 1154 | self.ToSrcRelPath(build_path), |
| 1155 | *comps) |
| 1156 | |
| 1157 | def ToSrcRelPath(self, path): |
| 1158 | """Returns a relative path from the top of the repo.""" |
| 1159 | if path.startswith('//'): |
| 1160 | return path[2:].replace('/', self.sep) |
| 1161 | return self.RelPath(path, self.chromium_src_dir) |
| 1162 | |
| 1163 | def ParseGYPConfigPath(self, path): |
| 1164 | rpath = self.ToSrcRelPath(path) |
| 1165 | output_dir, _, _ = rpath.rpartition(self.sep) |
| 1166 | return output_dir |
| 1167 | |
| 1168 | def GYPCmd(self, output_dir, vals): |
| 1169 | if vals['cros_passthrough']: |
| 1170 | if not 'GYP_DEFINES' in os.environ: |
| 1171 | raise MBErr('MB is expecting GYP_DEFINES to be in the environment') |
| 1172 | gyp_defines = os.environ['GYP_DEFINES'] |
| 1173 | if not 'chromeos=1' in gyp_defines: |
| 1174 | raise MBErr('GYP_DEFINES is missing chromeos=1: (GYP_DEFINES=%s)' % |
| 1175 | gyp_defines) |
| 1176 | else: |
| 1177 | gyp_defines = vals['gyp_defines'] |
| 1178 | |
| 1179 | goma_dir = self.args.goma_dir |
| 1180 | |
| 1181 | # GYP uses shlex.split() to split the gyp defines into separate arguments, |
| 1182 | # so we can support backslashes and and spaces in arguments by quoting |
| 1183 | # them, even on Windows, where this normally wouldn't work. |
| 1184 | if goma_dir and ('\\' in goma_dir or ' ' in goma_dir): |
| 1185 | goma_dir = "'%s'" % goma_dir |
| 1186 | |
| 1187 | if goma_dir: |
| 1188 | gyp_defines += ' gomadir=%s' % goma_dir |
| 1189 | |
| 1190 | android_version_code = self.args.android_version_code |
| 1191 | if android_version_code: |
| 1192 | gyp_defines += ' app_manifest_version_code=%s' % android_version_code |
| 1193 | |
| 1194 | android_version_name = self.args.android_version_name |
| 1195 | if android_version_name: |
| 1196 | gyp_defines += ' app_manifest_version_name=%s' % android_version_name |
| 1197 | |
| 1198 | cmd = [ |
| 1199 | self.executable, |
| 1200 | self.args.gyp_script, |
| 1201 | '-G', |
| 1202 | 'output_dir=' + output_dir, |
| 1203 | ] |
| 1204 | |
| 1205 | # Ensure that we have an environment that only contains |
| 1206 | # the exact values of the GYP variables we need. |
| 1207 | env = os.environ.copy() |
| 1208 | |
| 1209 | # This is a terrible hack to work around the fact that |
| 1210 | # //tools/clang/scripts/update.py is invoked by GYP and GN but |
| 1211 | # currently relies on an environment variable to figure out |
| 1212 | # what revision to embed in the command line #defines. |
| 1213 | # For GN, we've made this work via a gn arg that will cause update.py |
| 1214 | # to get an additional command line arg, but getting that to work |
| 1215 | # via GYP_DEFINES has proven difficult, so we rewrite the GYP_DEFINES |
| 1216 | # to get rid of the arg and add the old var in, instead. |
| 1217 | # See crbug.com/582737 for more on this. This can hopefully all |
| 1218 | # go away with GYP. |
| 1219 | m = re.search('llvm_force_head_revision=1\s*', gyp_defines) |
| 1220 | if m: |
| 1221 | env['LLVM_FORCE_HEAD_REVISION'] = '1' |
| 1222 | gyp_defines = gyp_defines.replace(m.group(0), '') |
| 1223 | |
| 1224 | # This is another terrible hack to work around the fact that |
| 1225 | # GYP sets the link concurrency to use via the GYP_LINK_CONCURRENCY |
| 1226 | # environment variable, and not via a proper GYP_DEFINE. See |
| 1227 | # crbug.com/611491 for more on this. |
| 1228 | m = re.search('gyp_link_concurrency=(\d+)(\s*)', gyp_defines) |
| 1229 | if m: |
| 1230 | env['GYP_LINK_CONCURRENCY'] = m.group(1) |
| 1231 | gyp_defines = gyp_defines.replace(m.group(0), '') |
| 1232 | |
| 1233 | env['GYP_GENERATORS'] = 'ninja' |
| 1234 | if 'GYP_CHROMIUM_NO_ACTION' in env: |
| 1235 | del env['GYP_CHROMIUM_NO_ACTION'] |
| 1236 | if 'GYP_CROSSCOMPILE' in env: |
| 1237 | del env['GYP_CROSSCOMPILE'] |
| 1238 | env['GYP_DEFINES'] = gyp_defines |
| 1239 | if vals['gyp_crosscompile']: |
| 1240 | env['GYP_CROSSCOMPILE'] = '1' |
| 1241 | return cmd, env |
| 1242 | |
| 1243 | def RunGNAnalyze(self, vals): |
| 1244 | # Analyze runs before 'gn gen' now, so we need to run gn gen |
| 1245 | # in order to ensure that we have a build directory. |
| 1246 | ret = self.RunGNGen(vals) |
| 1247 | if ret: |
| 1248 | return ret |
| 1249 | |
| 1250 | build_path = self.args.path[0] |
| 1251 | input_path = self.args.input_path[0] |
| 1252 | gn_input_path = input_path + '.gn' |
| 1253 | output_path = self.args.output_path[0] |
| 1254 | gn_output_path = output_path + '.gn' |
| 1255 | |
| 1256 | inp = self.ReadInputJSON(['files', 'test_targets', |
| 1257 | 'additional_compile_targets']) |
| 1258 | if self.args.verbose: |
| 1259 | self.Print() |
| 1260 | self.Print('analyze input:') |
| 1261 | self.PrintJSON(inp) |
| 1262 | self.Print() |
| 1263 | |
| 1264 | |
| 1265 | # This shouldn't normally happen, but could due to unusual race conditions, |
| 1266 | # like a try job that gets scheduled before a patch lands but runs after |
| 1267 | # the patch has landed. |
| 1268 | if not inp['files']: |
| 1269 | self.Print('Warning: No files modified in patch, bailing out early.') |
| 1270 | self.WriteJSON({ |
| 1271 | 'status': 'No dependency', |
| 1272 | 'compile_targets': [], |
| 1273 | 'test_targets': [], |
| 1274 | }, output_path) |
| 1275 | return 0 |
| 1276 | |
| 1277 | gn_inp = {} |
| 1278 | gn_inp['files'] = ['//' + f for f in inp['files'] if not f.startswith('//')] |
| 1279 | |
| 1280 | isolate_map = self.ReadIsolateMap() |
| 1281 | err, gn_inp['additional_compile_targets'] = self.MapTargetsToLabels( |
| 1282 | isolate_map, inp['additional_compile_targets']) |
| 1283 | if err: |
| 1284 | raise MBErr(err) |
| 1285 | |
| 1286 | err, gn_inp['test_targets'] = self.MapTargetsToLabels( |
| 1287 | isolate_map, inp['test_targets']) |
| 1288 | if err: |
| 1289 | raise MBErr(err) |
| 1290 | labels_to_targets = {} |
| 1291 | for i, label in enumerate(gn_inp['test_targets']): |
| 1292 | labels_to_targets[label] = inp['test_targets'][i] |
| 1293 | |
| 1294 | try: |
| 1295 | self.WriteJSON(gn_inp, gn_input_path) |
| 1296 | cmd = self.GNCmd('analyze', build_path, gn_input_path, gn_output_path) |
| 1297 | ret, _, _ = self.Run(cmd, force_verbose=True) |
| 1298 | if ret: |
| 1299 | return ret |
| 1300 | |
| 1301 | gn_outp_str = self.ReadFile(gn_output_path) |
| 1302 | try: |
| 1303 | gn_outp = json.loads(gn_outp_str) |
| 1304 | except Exception as e: |
| 1305 | self.Print("Failed to parse the JSON string GN returned: %s\n%s" |
| 1306 | % (repr(gn_outp_str), str(e))) |
| 1307 | raise |
| 1308 | |
| 1309 | outp = {} |
| 1310 | if 'status' in gn_outp: |
| 1311 | outp['status'] = gn_outp['status'] |
| 1312 | if 'error' in gn_outp: |
| 1313 | outp['error'] = gn_outp['error'] |
| 1314 | if 'invalid_targets' in gn_outp: |
| 1315 | outp['invalid_targets'] = gn_outp['invalid_targets'] |
| 1316 | if 'compile_targets' in gn_outp: |
| 1317 | if 'all' in gn_outp['compile_targets']: |
| 1318 | outp['compile_targets'] = ['all'] |
| 1319 | else: |
| 1320 | outp['compile_targets'] = [ |
| 1321 | label.replace('//', '') for label in gn_outp['compile_targets']] |
| 1322 | if 'test_targets' in gn_outp: |
| 1323 | outp['test_targets'] = [ |
| 1324 | labels_to_targets[label] for label in gn_outp['test_targets']] |
| 1325 | |
| 1326 | if self.args.verbose: |
| 1327 | self.Print() |
| 1328 | self.Print('analyze output:') |
| 1329 | self.PrintJSON(outp) |
| 1330 | self.Print() |
| 1331 | |
| 1332 | self.WriteJSON(outp, output_path) |
| 1333 | |
| 1334 | finally: |
| 1335 | if self.Exists(gn_input_path): |
| 1336 | self.RemoveFile(gn_input_path) |
| 1337 | if self.Exists(gn_output_path): |
| 1338 | self.RemoveFile(gn_output_path) |
| 1339 | |
| 1340 | return 0 |
| 1341 | |
| 1342 | def ReadInputJSON(self, required_keys): |
| 1343 | path = self.args.input_path[0] |
| 1344 | output_path = self.args.output_path[0] |
| 1345 | if not self.Exists(path): |
| 1346 | self.WriteFailureAndRaise('"%s" does not exist' % path, output_path) |
| 1347 | |
| 1348 | try: |
| 1349 | inp = json.loads(self.ReadFile(path)) |
| 1350 | except Exception as e: |
| 1351 | self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' % |
| 1352 | (path, e), output_path) |
| 1353 | |
| 1354 | for k in required_keys: |
| 1355 | if not k in inp: |
| 1356 | self.WriteFailureAndRaise('input file is missing a "%s" key' % k, |
| 1357 | output_path) |
| 1358 | |
| 1359 | return inp |
| 1360 | |
| 1361 | def WriteFailureAndRaise(self, msg, output_path): |
| 1362 | if output_path: |
| 1363 | self.WriteJSON({'error': msg}, output_path, force_verbose=True) |
| 1364 | raise MBErr(msg) |
| 1365 | |
| 1366 | def WriteJSON(self, obj, path, force_verbose=False): |
| 1367 | try: |
| 1368 | self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n', |
| 1369 | force_verbose=force_verbose) |
| 1370 | except Exception as e: |
| 1371 | raise MBErr('Error %s writing to the output path "%s"' % |
| 1372 | (e, path)) |
| 1373 | |
| 1374 | def CheckCompile(self, master, builder): |
| 1375 | url_template = self.args.url_template + '/{builder}/builds/_all?as_text=1' |
| 1376 | url = urllib2.quote(url_template.format(master=master, builder=builder), |
| 1377 | safe=':/()?=') |
| 1378 | try: |
| 1379 | builds = json.loads(self.Fetch(url)) |
| 1380 | except Exception as e: |
| 1381 | return str(e) |
| 1382 | successes = sorted( |
| 1383 | [int(x) for x in builds.keys() if "text" in builds[x] and |
| 1384 | cmp(builds[x]["text"][:2], ["build", "successful"]) == 0], |
| 1385 | reverse=True) |
| 1386 | if not successes: |
| 1387 | return "no successful builds" |
| 1388 | build = builds[str(successes[0])] |
| 1389 | step_names = set([step["name"] for step in build["steps"]]) |
| 1390 | compile_indicators = set(["compile", "compile (with patch)", "analyze"]) |
| 1391 | if compile_indicators & step_names: |
| 1392 | return "compiles" |
| 1393 | return "does not compile" |
| 1394 | |
| 1395 | def PrintCmd(self, cmd, env): |
| 1396 | if self.platform == 'win32': |
| 1397 | env_prefix = 'set ' |
| 1398 | env_quoter = QuoteForSet |
| 1399 | shell_quoter = QuoteForCmd |
| 1400 | else: |
| 1401 | env_prefix = '' |
| 1402 | env_quoter = pipes.quote |
| 1403 | shell_quoter = pipes.quote |
| 1404 | |
| 1405 | def print_env(var): |
| 1406 | if env and var in env: |
| 1407 | self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) |
| 1408 | |
| 1409 | print_env('GYP_CROSSCOMPILE') |
| 1410 | print_env('GYP_DEFINES') |
| 1411 | print_env('GYP_LINK_CONCURRENCY') |
| 1412 | print_env('LLVM_FORCE_HEAD_REVISION') |
| 1413 | |
| 1414 | if cmd[0] == self.executable: |
| 1415 | cmd = ['python'] + cmd[1:] |
| 1416 | self.Print(*[shell_quoter(arg) for arg in cmd]) |
| 1417 | |
| 1418 | def PrintJSON(self, obj): |
| 1419 | self.Print(json.dumps(obj, indent=2, sort_keys=True)) |
| 1420 | |
| 1421 | def Build(self, target): |
| 1422 | build_dir = self.ToSrcRelPath(self.args.path[0]) |
| 1423 | ninja_cmd = ['ninja', '-C', build_dir] |
| 1424 | if self.args.jobs: |
| 1425 | ninja_cmd.extend(['-j', '%d' % self.args.jobs]) |
| 1426 | ninja_cmd.append(target) |
| 1427 | ret, _, _ = self.Run(ninja_cmd, force_verbose=False, buffer_output=False) |
| 1428 | return ret |
| 1429 | |
| 1430 | def Run(self, cmd, env=None, force_verbose=True, buffer_output=True): |
| 1431 | # This function largely exists so it can be overridden for testing. |
| 1432 | if self.args.dryrun or self.args.verbose or force_verbose: |
| 1433 | self.PrintCmd(cmd, env) |
| 1434 | if self.args.dryrun: |
| 1435 | return 0, '', '' |
| 1436 | |
| 1437 | ret, out, err = self.Call(cmd, env=env, buffer_output=buffer_output) |
| 1438 | if self.args.verbose or force_verbose: |
| 1439 | if ret: |
| 1440 | self.Print(' -> returned %d' % ret) |
| 1441 | if out: |
| 1442 | self.Print(out, end='') |
| 1443 | if err: |
| 1444 | self.Print(err, end='', file=sys.stderr) |
| 1445 | return ret, out, err |
| 1446 | |
| 1447 | def Call(self, cmd, env=None, buffer_output=True): |
| 1448 | if buffer_output: |
| 1449 | p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir, |
| 1450 | stdout=subprocess.PIPE, stderr=subprocess.PIPE, |
| 1451 | env=env) |
| 1452 | out, err = p.communicate() |
| 1453 | else: |
| 1454 | p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir, |
| 1455 | env=env) |
| 1456 | p.wait() |
| 1457 | out = err = '' |
| 1458 | return p.returncode, out, err |
| 1459 | |
| 1460 | def ExpandUser(self, path): |
| 1461 | # This function largely exists so it can be overridden for testing. |
| 1462 | return os.path.expanduser(path) |
| 1463 | |
| 1464 | def Exists(self, path): |
| 1465 | # This function largely exists so it can be overridden for testing. |
| 1466 | return os.path.exists(path) |
| 1467 | |
| 1468 | def Fetch(self, url): |
| 1469 | # This function largely exists so it can be overridden for testing. |
| 1470 | f = urllib2.urlopen(url) |
| 1471 | contents = f.read() |
| 1472 | f.close() |
| 1473 | return contents |
| 1474 | |
| 1475 | def MaybeMakeDirectory(self, path): |
| 1476 | try: |
| 1477 | os.makedirs(path) |
| 1478 | except OSError, e: |
| 1479 | if e.errno != errno.EEXIST: |
| 1480 | raise |
| 1481 | |
| 1482 | def PathJoin(self, *comps): |
| 1483 | # This function largely exists so it can be overriden for testing. |
| 1484 | return os.path.join(*comps) |
| 1485 | |
| 1486 | def Print(self, *args, **kwargs): |
| 1487 | # This function largely exists so it can be overridden for testing. |
| 1488 | print(*args, **kwargs) |
| 1489 | if kwargs.get('stream', sys.stdout) == sys.stdout: |
| 1490 | sys.stdout.flush() |
| 1491 | |
| 1492 | def ReadFile(self, path): |
| 1493 | # This function largely exists so it can be overriden for testing. |
| 1494 | with open(path) as fp: |
| 1495 | return fp.read() |
| 1496 | |
| 1497 | def RelPath(self, path, start='.'): |
| 1498 | # This function largely exists so it can be overriden for testing. |
| 1499 | return os.path.relpath(path, start) |
| 1500 | |
| 1501 | def RemoveFile(self, path): |
| 1502 | # This function largely exists so it can be overriden for testing. |
| 1503 | os.remove(path) |
| 1504 | |
| 1505 | def RemoveDirectory(self, abs_path): |
| 1506 | if self.platform == 'win32': |
| 1507 | # In other places in chromium, we often have to retry this command |
| 1508 | # because we're worried about other processes still holding on to |
| 1509 | # file handles, but when MB is invoked, it will be early enough in the |
| 1510 | # build that their should be no other processes to interfere. We |
| 1511 | # can change this if need be. |
| 1512 | self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path]) |
| 1513 | else: |
| 1514 | shutil.rmtree(abs_path, ignore_errors=True) |
| 1515 | |
| 1516 | def TempFile(self, mode='w'): |
| 1517 | # This function largely exists so it can be overriden for testing. |
| 1518 | return tempfile.NamedTemporaryFile(mode=mode, delete=False) |
| 1519 | |
| 1520 | def WriteFile(self, path, contents, force_verbose=False): |
| 1521 | # This function largely exists so it can be overriden for testing. |
| 1522 | if self.args.dryrun or self.args.verbose or force_verbose: |
| 1523 | self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path)) |
| 1524 | with open(path, 'w') as fp: |
| 1525 | return fp.write(contents) |
| 1526 | |
| 1527 | |
| 1528 | class MBErr(Exception): |
| 1529 | pass |
| 1530 | |
| 1531 | |
| 1532 | # See http://goo.gl/l5NPDW and http://goo.gl/4Diozm for the painful |
| 1533 | # details of this next section, which handles escaping command lines |
| 1534 | # so that they can be copied and pasted into a cmd window. |
| 1535 | UNSAFE_FOR_SET = set('^<>&|') |
| 1536 | UNSAFE_FOR_CMD = UNSAFE_FOR_SET.union(set('()%')) |
| 1537 | ALL_META_CHARS = UNSAFE_FOR_CMD.union(set('"')) |
| 1538 | |
| 1539 | |
| 1540 | def QuoteForSet(arg): |
| 1541 | if any(a in UNSAFE_FOR_SET for a in arg): |
| 1542 | arg = ''.join('^' + a if a in UNSAFE_FOR_SET else a for a in arg) |
| 1543 | return arg |
| 1544 | |
| 1545 | |
| 1546 | def QuoteForCmd(arg): |
| 1547 | # First, escape the arg so that CommandLineToArgvW will parse it properly. |
| 1548 | # From //tools/gyp/pylib/gyp/msvs_emulation.py:23. |
| 1549 | if arg == '' or ' ' in arg or '"' in arg: |
| 1550 | quote_re = re.compile(r'(\\*)"') |
| 1551 | arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)) |
| 1552 | |
| 1553 | # Then check to see if the arg contains any metacharacters other than |
| 1554 | # double quotes; if it does, quote everything (including the double |
| 1555 | # quotes) for safety. |
| 1556 | if any(a in UNSAFE_FOR_CMD for a in arg): |
| 1557 | arg = ''.join('^' + a if a in ALL_META_CHARS else a for a in arg) |
| 1558 | return arg |
| 1559 | |
| 1560 | |
| 1561 | if __name__ == '__main__': |
| 1562 | sys.exit(main(sys.argv[1:])) |