blob: 8e6638b8426d6b0c8c16b0a9462c8d70be551b53 [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Takuto Ikuta0e3e1c42018-11-29 14:21:06 +00008__version__ = '0.14'
maruel@chromium.org0437a732013-08-27 16:05:52 +00009
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050010import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040011import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000012import json
13import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040014import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000015import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100016import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import sys
maruel11e31af2017-02-15 07:30:50 -080018import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070019import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000020import time
21import urllib
maruel@chromium.org0437a732013-08-27 16:05:52 +000022
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000023from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000024tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000025
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026# third_party/
27import colorama
28from chromium import natsort
29from depot_tools import fix_encoding
30from depot_tools import subcommand
31
32# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080033import auth
iannucci31ab9192017-05-02 19:11:56 -070034import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000035import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000036import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040037import local_caching
maruelc070e672016-02-22 17:32:57 -080038import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000039from utils import file_path
40from utils import fs
41from utils import logging_utils
42from utils import net
43from utils import on_error
44from utils import subprocess42
45from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050046
47
48class Failure(Exception):
49 """Generic failure."""
50 pass
51
52
maruel0a25f6c2017-05-10 10:43:23 -070053def default_task_name(options):
54 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050055 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070056 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070057 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070058 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070059 if options.isolated:
60 task_name += u'/' + options.isolated
61 return task_name
62 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050063
64
65### Triggering.
66
67
maruel77f720b2015-09-15 12:35:22 -070068# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070069CipdPackage = collections.namedtuple(
70 'CipdPackage',
71 [
72 'package_name',
73 'path',
74 'version',
75 ])
76
77
78# See ../appengine/swarming/swarming_rpcs.py.
79CipdInput = collections.namedtuple(
80 'CipdInput',
81 [
82 'client_package',
83 'packages',
84 'server',
85 ])
86
87
88# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070089FilesRef = collections.namedtuple(
90 'FilesRef',
91 [
92 'isolated',
93 'isolatedserver',
94 'namespace',
95 ])
96
97
98# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -080099StringListPair = collections.namedtuple(
100 'StringListPair', [
101 'key',
102 'value', # repeated string
103 ]
104)
105
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000106# See ../appengine/swarming/swarming_rpcs.py.
107Containment = collections.namedtuple(
108 'Containment',
109 [
110 'lower_priority',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000111 'containment_type',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000112 ])
113
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800114
115# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700116TaskProperties = collections.namedtuple(
117 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500118 [
maruel681d6802017-01-17 16:56:03 -0800119 'caches',
borenet02f772b2016-06-22 12:42:19 -0700120 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500121 'command',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000122 'containment',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500123 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500124 'dimensions',
125 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800126 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700127 'execution_timeout_secs',
128 'extra_args',
129 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500130 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700131 'inputs_ref',
132 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700133 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700134 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700135 ])
136
137
138# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400139TaskSlice = collections.namedtuple(
140 'TaskSlice',
141 [
142 'expiration_secs',
143 'properties',
144 'wait_for_capacity',
145 ])
146
147
148# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700149NewTaskRequest = collections.namedtuple(
150 'NewTaskRequest',
151 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500152 'name',
maruel77f720b2015-09-15 12:35:22 -0700153 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500154 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400155 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700156 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500157 'tags',
158 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000159 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500160 ])
161
162
maruel77f720b2015-09-15 12:35:22 -0700163def namedtuple_to_dict(value):
164 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400165 if hasattr(value, '_asdict'):
166 return namedtuple_to_dict(value._asdict())
167 if isinstance(value, (list, tuple)):
168 return [namedtuple_to_dict(v) for v in value]
169 if isinstance(value, dict):
170 return {k: namedtuple_to_dict(v) for k, v in value.iteritems()}
171 return value
maruel77f720b2015-09-15 12:35:22 -0700172
173
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700174def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800175 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700176
177 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500178 """
maruel77f720b2015-09-15 12:35:22 -0700179 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700180 # Don't send 'service_account' if it is None to avoid confusing older
181 # version of the server that doesn't know about 'service_account' and don't
182 # use it at all.
183 if not out['service_account']:
184 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000185 for task_slice in out['task_slices']:
186 task_slice['properties']['env'] = [
187 {'key': k, 'value': v}
188 for k, v in task_slice['properties']['env'].iteritems()
189 ]
190 task_slice['properties']['env'].sort(key=lambda x: x['key'])
maruel77f720b2015-09-15 12:35:22 -0700191 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500192
193
maruel77f720b2015-09-15 12:35:22 -0700194def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500195 """Triggers a request on the Swarming server and returns the json data.
196
197 It's the low-level function.
198
199 Returns:
200 {
201 'request': {
202 'created_ts': u'2010-01-02 03:04:05',
203 'name': ..
204 },
205 'task_id': '12300',
206 }
207 """
208 logging.info('Triggering: %s', raw_request['name'])
209
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500210 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700211 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500212 if not result:
213 on_error.report('Failed to trigger task %s' % raw_request['name'])
214 return None
maruele557bce2015-11-17 09:01:27 -0800215 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800216 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800217 msg = 'Failed to trigger task %s' % raw_request['name']
218 if result['error'].get('errors'):
219 for err in result['error']['errors']:
220 if err.get('message'):
221 msg += '\nMessage: %s' % err['message']
222 if err.get('debugInfo'):
223 msg += '\nDebug info:\n%s' % err['debugInfo']
224 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800225 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800226
227 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800228 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500229 return result
230
231
232def setup_googletest(env, shards, index):
233 """Sets googletest specific environment variables."""
234 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700235 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
236 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
237 env = env[:]
238 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
239 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500240 return env
241
242
243def trigger_task_shards(swarming, task_request, shards):
244 """Triggers one or many subtasks of a sharded task.
245
246 Returns:
247 Dict with task details, returned to caller as part of --dump-json output.
248 None in case of failure.
249 """
250 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000251 """
252 Args:
253 index: The index of the task request.
254
255 Returns:
256 raw_request: A swarming compatible JSON dictionary of the request.
257 shard_index: The index of the shard, which may be different than the index
258 of the task request.
259 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700260 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000261 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500262 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000263 for task_slice in req['task_slices']:
264 task_slice['properties']['env'] = setup_googletest(
265 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700266 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000267 else:
268 task_slices = req['task_slices']
269
270 total_shards = None
271 # Multiple tasks slices might exist if there are optional "slices", e.g.
272 # multiple ways of dispatching the task that should be equivalent. These
273 # should be functionally equivalent but we have cannot guarantee that. If
274 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
275 # slices.
276 for task_slice in task_slices:
277 for env_var in task_slice['properties']['env']:
278 if env_var['key'] == 'GTEST_SHARD_INDEX':
279 shard_index = int(env_var['value'])
280 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
281 total_shards = int(env_var['value'])
282 if total_shards > 1:
283 req['name'] += ':%s:%s' % (shard_index, total_shards)
284
285 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500286
287 requests = [convert(index) for index in xrange(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500288 tasks = {}
289 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000290 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700291 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500292 if not task:
293 break
294 logging.info('Request result: %s', task)
295 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400296 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500297 priority_warning = True
298 print >> sys.stderr, (
299 'Priority was reset to %s' % task['request']['priority'])
300 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000301 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500302 'task_id': task['task_id'],
303 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
304 }
305
306 # Some shards weren't triggered. Abort everything.
307 if len(tasks) != len(requests):
308 if tasks:
309 print >> sys.stderr, 'Only %d shard(s) out of %d were triggered' % (
310 len(tasks), len(requests))
311 for task_dict in tasks.itervalues():
312 abort_task(swarming, task_dict['task_id'])
313 return None
314
315 return tasks
316
317
318### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000319
320
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700321# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000322STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700323
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400324
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000325class TaskState(object):
326 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000327
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000328 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
329 is the source of truth for these values:
330 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400331
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000332 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400333 """
334 RUNNING = 0x10
335 PENDING = 0x20
336 EXPIRED = 0x30
337 TIMED_OUT = 0x40
338 BOT_DIED = 0x50
339 CANCELED = 0x60
340 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400341 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400342 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400343
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000344 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400345
maruel77f720b2015-09-15 12:35:22 -0700346 _ENUMS = {
347 'RUNNING': RUNNING,
348 'PENDING': PENDING,
349 'EXPIRED': EXPIRED,
350 'TIMED_OUT': TIMED_OUT,
351 'BOT_DIED': BOT_DIED,
352 'CANCELED': CANCELED,
353 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400354 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400355 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700356 }
357
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400358 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700359 def from_enum(cls, state):
360 """Returns int value based on the string."""
361 if state not in cls._ENUMS:
362 raise ValueError('Invalid state %s' % state)
363 return cls._ENUMS[state]
364
maruel@chromium.org0437a732013-08-27 16:05:52 +0000365
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700366class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700367 """Assembles task execution summary (for --task-summary-json output).
368
369 Optionally fetches task outputs from isolate server to local disk (used when
370 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700371
372 This object is shared among multiple threads running 'retrieve_results'
373 function, in particular they call 'process_shard_result' method in parallel.
374 """
375
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000376 def __init__(self, task_output_dir, task_output_stdout, shard_count,
377 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700378 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
379
380 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700381 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700382 shard_count: expected number of task shards.
383 """
maruel12e30012015-10-09 11:55:35 -0700384 self.task_output_dir = (
385 unicode(os.path.abspath(task_output_dir))
386 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000387 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700388 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000389 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700390
391 self._lock = threading.Lock()
392 self._per_shard_results = {}
393 self._storage = None
394
nodire5028a92016-04-29 14:38:21 -0700395 if self.task_output_dir:
396 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700397
Vadim Shtayurab450c602014-05-12 19:23:25 -0700398 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700399 """Stores results of a single task shard, fetches output files if necessary.
400
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400401 Modifies |result| in place.
402
maruel77f720b2015-09-15 12:35:22 -0700403 shard_index is 0-based.
404
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700405 Called concurrently from multiple threads.
406 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700407 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700408 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700409 if shard_index < 0 or shard_index >= self.shard_count:
410 logging.warning(
411 'Shard index %d is outside of expected range: [0; %d]',
412 shard_index, self.shard_count - 1)
413 return
414
maruel77f720b2015-09-15 12:35:22 -0700415 if result.get('outputs_ref'):
416 ref = result['outputs_ref']
417 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
418 ref['isolatedserver'],
419 urllib.urlencode(
420 [('namespace', ref['namespace']), ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400421
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700422 # Store result dict of that shard, ignore results we've already seen.
423 with self._lock:
424 if shard_index in self._per_shard_results:
425 logging.warning('Ignoring duplicate shard index %d', shard_index)
426 return
427 self._per_shard_results[shard_index] = result
428
429 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700430 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000431 server_ref = isolate_storage.ServerRef(
432 result['outputs_ref']['isolatedserver'],
433 result['outputs_ref']['namespace'])
434 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400435 if storage:
436 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400437 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
438 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400439 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700440 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400441 storage,
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400442 local_caching.MemoryContentAddressedCache(file_mode_mask=0700),
maruel4409e302016-07-19 14:25:51 -0700443 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000444 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700445
446 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700447 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700448 with self._lock:
449 # Write an array of shard results with None for missing shards.
450 summary = {
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700451 'shards': [
452 self._per_shard_results.get(i) for i in xrange(self.shard_count)
453 ],
454 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000455
456 # Don't store stdout in the summary if not requested too.
457 if "json" not in self.task_output_stdout:
458 for shard_json in summary['shards']:
459 if not shard_json:
460 continue
461 if "output" in shard_json:
462 del shard_json["output"]
463 if "outputs" in shard_json:
464 del shard_json["outputs"]
465
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700466 # Write summary.json to task_output_dir as well.
467 if self.task_output_dir:
468 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700469 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700470 summary,
471 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700472 if self._storage:
473 self._storage.close()
474 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700475 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700476
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000477 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700478 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700479 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700480 with self._lock:
481 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000482 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700483 else:
484 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000485 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700486 logging.error(
487 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000488 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700489 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000490 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700491 logging.error(
492 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000493 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700494 return None
495 return self._storage
496
497
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500498def now():
499 """Exists so it can be mocked easily."""
500 return time.time()
501
502
maruel77f720b2015-09-15 12:35:22 -0700503def parse_time(value):
504 """Converts serialized time from the API to datetime.datetime."""
505 # When microseconds are 0, the '.123456' suffix is elided. This means the
506 # serialized format is not consistent, which confuses the hell out of python.
507 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
508 try:
509 return datetime.datetime.strptime(value, fmt)
510 except ValueError:
511 pass
512 raise ValueError('Failed to parse %s' % value)
513
514
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700515def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700516 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000517 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400518 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700519
Vadim Shtayurab450c602014-05-12 19:23:25 -0700520 Returns:
521 <result dict> on success.
522 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700523 """
maruel71c61c82016-02-22 06:52:05 -0800524 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700525 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700526 if include_perf:
527 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700528 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700529 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400530 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700531 attempt = 0
532
533 while not should_stop.is_set():
534 attempt += 1
535
536 # Waiting for too long -> give up.
537 current_time = now()
538 if deadline and current_time >= deadline:
539 logging.error('retrieve_results(%s) timed out on attempt %d',
540 base_url, attempt)
541 return None
542
543 # Do not spin too fast. Spin faster at the beginning though.
544 # Start with 1 sec delay and for each 30 sec of waiting add another second
545 # of delay, until hitting 15 sec ceiling.
546 if attempt > 1:
547 max_delay = min(15, 1 + (current_time - started) / 30.0)
548 delay = min(max_delay, deadline - current_time) if deadline else max_delay
549 if delay > 0:
550 logging.debug('Waiting %.1f sec before retrying', delay)
551 should_stop.wait(delay)
552 if should_stop.is_set():
553 return None
554
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400555 # Disable internal retries in net.url_read_json, since we are doing retries
556 # ourselves.
557 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700558 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
559 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400560 # Retry on 500s only if no timeout is specified.
561 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400562 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400563 if timeout == -1:
564 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400565 continue
maruel77f720b2015-09-15 12:35:22 -0700566
maruelbf53e042015-12-01 15:00:51 -0800567 if result.get('error'):
568 # An error occurred.
569 if result['error'].get('errors'):
570 for err in result['error']['errors']:
571 logging.warning(
572 'Error while reading task: %s; %s',
573 err.get('message'), err.get('debugInfo'))
574 elif result['error'].get('message'):
575 logging.warning(
576 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400577 if timeout == -1:
578 return result
maruelbf53e042015-12-01 15:00:51 -0800579 continue
580
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400581 # When timeout == -1, always return on first attempt. 500s are already
582 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000583 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000584 if fetch_stdout:
585 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700586 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700587 # Record the result, try to fetch attached output files (if any).
588 if output_collector:
589 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700590 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700591 if result.get('internal_failure'):
592 logging.error('Internal error!')
593 elif result['state'] == 'BOT_DIED':
594 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700595 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000596
597
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700598def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400599 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000600 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500601 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000602
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700603 Duplicate shards are ignored. Shards are yielded in order of completion.
604 Timed out shards are NOT yielded at all. Caller can compare number of yielded
605 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000606
607 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500608 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000609 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500610
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700611 output_collector is an optional instance of TaskOutputCollector that will be
612 used to fetch files produced by a task from isolate server to the local disk.
613
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500614 Yields:
615 (index, result). In particular, 'result' is defined as the
616 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000617 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000618 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400619 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700620 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700621 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700622
maruel@chromium.org0437a732013-08-27 16:05:52 +0000623 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
624 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700625 # Adds a task to the thread pool to call 'retrieve_results' and return
626 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400627 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000628 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700629 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000630 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400631 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000632 task_id, timeout, should_stop, output_collector, include_perf,
633 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700634
635 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400636 for shard_index, task_id in enumerate(task_ids):
637 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700638
639 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400640 shards_remaining = range(len(task_ids))
641 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700642 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700643 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700644 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000645 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700646 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700647 except threading_utils.TaskChannel.Timeout:
648 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000649 time_now = str(datetime.datetime.now())
650 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700651 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000652 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700653 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000654 (time_now, ', '.join(map(str, shards_remaining)))
655 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700656 sys.stdout.flush()
657 continue
658 except Exception:
659 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700660
661 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700662 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000663 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500664 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000665 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700666
Vadim Shtayurab450c602014-05-12 19:23:25 -0700667 # Yield back results to the caller.
668 assert shard_index in shards_remaining
669 shards_remaining.remove(shard_index)
670 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700671
maruel@chromium.org0437a732013-08-27 16:05:52 +0000672 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700673 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000674 should_stop.set()
675
676
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000677def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000678 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700679 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400680 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700681 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
682 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400683 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
684 metadata.get('abandoned_ts')):
685 pending = '%.1fs' % (
686 parse_time(metadata['abandoned_ts']) -
687 parse_time(metadata['created_ts'])
688 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400689 else:
690 pending = 'N/A'
691
maruel77f720b2015-09-15 12:35:22 -0700692 if metadata.get('duration') is not None:
693 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400694 else:
695 duration = 'N/A'
696
maruel77f720b2015-09-15 12:35:22 -0700697 if metadata.get('exit_code') is not None:
698 # Integers are encoded as string to not loose precision.
699 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400700 else:
701 exit_code = 'N/A'
702
703 bot_id = metadata.get('bot_id') or 'N/A'
704
maruel77f720b2015-09-15 12:35:22 -0700705 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400706 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000707 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400708 if metadata.get('state') == 'CANCELED':
709 tag_footer2 = ' Pending: %s CANCELED' % pending
710 elif metadata.get('state') == 'EXPIRED':
711 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400712 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400713 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
714 pending, duration, bot_id, exit_code, metadata['state'])
715 else:
716 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
717 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400718
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000719 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
720 dash_pad = '+-%s-+' % ('-' * tag_len)
721 tag_header = '| %s |' % tag_header.ljust(tag_len)
722 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
723 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400724
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000725 if include_stdout:
726 return '\n'.join([
727 dash_pad,
728 tag_header,
729 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400730 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000731 dash_pad,
732 tag_footer1,
733 tag_footer2,
734 dash_pad,
735 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000736 return '\n'.join([
737 dash_pad,
738 tag_header,
739 tag_footer2,
740 dash_pad,
741 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000742
743
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700744def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700745 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000746 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000747 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700748 """Retrieves results of a Swarming task.
749
750 Returns:
751 process exit code that should be returned to the user.
752 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000753
754 filter_cb = None
755 if filepath_filter:
756 filter_cb = re.compile(filepath_filter).match
757
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700758 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000759 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000760 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700761
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700762 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700763 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400764 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700765 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400766 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400767 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000768 output_collector, include_perf,
769 (len(task_output_stdout) > 0),
770 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700771 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700772
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400773 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700774 shard_exit_code = metadata.get('exit_code')
775 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700776 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700777 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700778 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400779 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700780 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700781
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700782 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000783 s = decorate_shard_output(
784 swarming, index, metadata,
785 "console" in task_output_stdout).encode(
786 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700787 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400788 if len(seen_shards) < len(task_ids):
789 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700790 else:
maruel77f720b2015-09-15 12:35:22 -0700791 print('%s: %s %s' % (
792 metadata.get('bot_id', 'N/A'),
793 metadata['task_id'],
794 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000795 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700796 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400797 if output:
798 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700799 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700800 summary = output_collector.finalize()
801 if task_summary_json:
802 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700803
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400804 if decorate and total_duration:
805 print('Total duration: %.1fs' % total_duration)
806
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400807 if len(seen_shards) != len(task_ids):
808 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700809 print >> sys.stderr, ('Results from some shards are missing: %s' %
810 ', '.join(map(str, missing_shards)))
Vadim Shtayurac524f512014-05-15 09:54:56 -0700811 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700812
maruela5490782015-09-30 10:56:59 -0700813 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000814
815
maruel77f720b2015-09-15 12:35:22 -0700816### API management.
817
818
819class APIError(Exception):
820 pass
821
822
823def endpoints_api_discovery_apis(host):
824 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
825 the APIs exposed by a host.
826
827 https://developers.google.com/discovery/v1/reference/apis/list
828 """
maruel380e3262016-08-31 16:10:06 -0700829 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
830 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700831 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
832 if data is None:
833 raise APIError('Failed to discover APIs on %s' % host)
834 out = {}
835 for api in data['items']:
836 if api['id'] == 'discovery:v1':
837 continue
838 # URL is of the following form:
839 # url = host + (
840 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
841 api_data = net.url_read_json(api['discoveryRestUrl'])
842 if api_data is None:
843 raise APIError('Failed to discover %s on %s' % (api['id'], host))
844 out[api['id']] = api_data
845 return out
846
847
maruelaf6b06c2017-06-08 06:26:53 -0700848def get_yielder(base_url, limit):
849 """Returns the first query and a function that yields following items."""
850 CHUNK_SIZE = 250
851
852 url = base_url
853 if limit:
854 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
855 data = net.url_read_json(url)
856 if data is None:
857 # TODO(maruel): Do basic diagnostic.
858 raise Failure('Failed to access %s' % url)
859 org_cursor = data.pop('cursor', None)
860 org_total = len(data.get('items') or [])
861 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
862 if not org_cursor or not org_total:
863 # This is not an iterable resource.
864 return data, lambda: []
865
866 def yielder():
867 cursor = org_cursor
868 total = org_total
869 # Some items support cursors. Try to get automatically if cursors are needed
870 # by looking at the 'cursor' items.
871 while cursor and (not limit or total < limit):
872 merge_char = '&' if '?' in base_url else '?'
873 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
874 if limit:
875 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
876 new = net.url_read_json(url)
877 if new is None:
878 raise Failure('Failed to access %s' % url)
879 cursor = new.get('cursor')
880 new_items = new.get('items')
881 nb_items = len(new_items or [])
882 total += nb_items
883 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
884 yield new_items
885
886 return data, yielder
887
888
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500889### Commands.
890
891
892def abort_task(_swarming, _manifest):
893 """Given a task manifest that was triggered, aborts its execution."""
894 # TODO(vadimsh): No supported by the server yet.
895
896
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400897def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800898 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500899 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500900 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500901 dest='dimensions', metavar='FOO bar',
902 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000903 parser.filter_group.add_option(
904 '--optional-dimension', default=[], action='append', nargs=3,
905 dest='optional_dimensions', metavar='key value expiration',
906 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500907 parser.add_option_group(parser.filter_group)
908
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400909
Brad Hallf78187a2018-10-19 17:08:55 +0000910def _validate_filter_option(parser, key, value, expiration, argname):
911 if ':' in key:
912 parser.error('%s key cannot contain ":"' % argname)
913 if key.strip() != key:
914 parser.error('%s key has whitespace' % argname)
915 if not key:
916 parser.error('%s key is empty' % argname)
917
918 if value.strip() != value:
919 parser.error('%s value has whitespace' % argname)
920 if not value:
921 parser.error('%s value is empty' % argname)
922
923 if expiration is not None:
924 try:
925 expiration = int(expiration)
926 except ValueError:
927 parser.error('%s expiration is not an integer' % argname)
928 if expiration <= 0:
929 parser.error('%s expiration should be positive' % argname)
930 if expiration % 60 != 0:
931 parser.error('%s expiration is not divisible by 60' % argname)
932
933
maruelaf6b06c2017-06-08 06:26:53 -0700934def process_filter_options(parser, options):
935 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000936 _validate_filter_option(parser, key, value, None, 'dimension')
937 for key, value, exp in options.optional_dimensions:
938 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700939 options.dimensions.sort()
940
941
Vadim Shtayurab450c602014-05-12 19:23:25 -0700942def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400943 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700944 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700945 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700946 help='Number of shards to trigger and collect.')
947 parser.add_option_group(parser.sharding_group)
948
949
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400950def add_trigger_options(parser):
951 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500952 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400953 add_filter_options(parser)
954
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400955 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800956 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700957 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500958 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800959 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500960 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700961 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800962 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800963 '--env-prefix', default=[], action='append', nargs=2,
964 metavar='VAR local/path',
965 help='Prepend task-relative `local/path` to the task\'s VAR environment '
966 'variable using os-appropriate pathsep character. Can be specified '
967 'multiple times for the same VAR to add multiple paths.')
968 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400969 '--idempotent', action='store_true', default=False,
970 help='When set, the server will actively try to find a previous task '
971 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800972 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700973 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000974 help='The optional path to a file containing the secret_bytes to use '
975 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800976 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700977 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400978 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800979 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700980 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400981 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000982 parser.add_option(
983 '--lower-priority', action='store_true',
984 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000985 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
986 parser.add_option(
987 '--containment-type', default='NONE', metavar='NONE',
988 choices=containment_choices,
989 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -0800990 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500991 '--raw-cmd', action='store_true', default=False,
992 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700993 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800994 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500995 '--relative-cwd',
996 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
997 'requires --raw-cmd')
998 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700999 '--cipd-package', action='append', default=[], metavar='PKG',
1000 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -07001001 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001002 group.add_option(
1003 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -07001004 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001005 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1006 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001007 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001008 help='Email of a service account to run the task as, or literal "bot" '
1009 'string to indicate that the task should use the same account the '
1010 'bot itself is using to authenticate to Swarming. Don\'t use task '
1011 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001012 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001013 '--pool-task-template',
1014 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1015 default='AUTO',
1016 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1017 'By default, the pool\'s TaskTemplate is automatically selected, '
1018 'according the pool configuration on the server. Choices are: '
1019 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1020 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001021 '-o', '--output', action='append', default=[], metavar='PATH',
1022 help='A list of files to return in addition to those written to '
1023 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1024 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001025 group.add_option(
1026 '--wait-for-capacity', action='store_true', default=False,
1027 help='Instructs to leave the task PENDING even if there\'s no known bot '
1028 'that could run this task, otherwise the task will be denied with '
1029 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001030 parser.add_option_group(group)
1031
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001032 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001033 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001034 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001035 help='The lower value, the more important the task is')
1036 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001037 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001038 help='Display name of the task. Defaults to '
1039 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1040 'isolated file is provided, if a hash is provided, it defaults to '
1041 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1042 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001043 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001044 help='Tags to assign to the task.')
1045 group.add_option(
1046 '--user', default='',
1047 help='User associated with the task. Defaults to authenticated user on '
1048 'the server.')
1049 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001050 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001051 help='Seconds to allow the task to be pending for a bot to run before '
1052 'this task request expires.')
1053 group.add_option(
1054 '--deadline', type='int', dest='expiration',
1055 help=optparse.SUPPRESS_HELP)
1056 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001057
1058
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001059def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001060 """Processes trigger options and does preparatory steps.
1061
1062 Returns:
1063 NewTaskRequest instance.
1064 """
maruelaf6b06c2017-06-08 06:26:53 -07001065 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001066 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001067 if args and args[0] == '--':
1068 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001069
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001070 if not options.dimensions:
1071 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001072 if not any(k == 'pool' for k, _v in options.dimensions):
1073 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001074 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1075 parser.error('--tags must be in the format key:value')
1076 if options.raw_cmd and not args:
1077 parser.error(
1078 'Arguments with --raw-cmd should be passed after -- as command '
1079 'delimiter.')
1080 if options.isolate_server and not options.namespace:
1081 parser.error(
1082 '--namespace must be a valid value when --isolate-server is used')
1083 if not options.isolated and not options.raw_cmd:
1084 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1085
1086 # Isolated
1087 # --isolated is required only if --raw-cmd wasn't provided.
1088 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1089 # preferred server.
1090 isolateserver.process_isolate_server_options(
1091 parser, options, False, not options.raw_cmd)
1092 inputs_ref = None
1093 if options.isolate_server:
1094 inputs_ref = FilesRef(
1095 isolated=options.isolated,
1096 isolatedserver=options.isolate_server,
1097 namespace=options.namespace)
1098
1099 # Command
1100 command = None
1101 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001102 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001103 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001104 if options.relative_cwd:
1105 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1106 if not a.startswith(os.getcwd()):
1107 parser.error(
1108 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001109 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001110 if options.relative_cwd:
1111 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001112 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001113
maruel0a25f6c2017-05-10 10:43:23 -07001114 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001115 cipd_packages = []
1116 for p in options.cipd_package:
1117 split = p.split(':', 2)
1118 if len(split) != 3:
1119 parser.error('CIPD packages must take the form: path:package:version')
1120 cipd_packages.append(CipdPackage(
1121 package_name=split[1],
1122 path=split[0],
1123 version=split[2]))
1124 cipd_input = None
1125 if cipd_packages:
1126 cipd_input = CipdInput(
1127 client_package=None,
1128 packages=cipd_packages,
1129 server=None)
1130
maruel0a25f6c2017-05-10 10:43:23 -07001131 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001132 secret_bytes = None
1133 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001134 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001135 secret_bytes = f.read().encode('base64')
1136
maruel0a25f6c2017-05-10 10:43:23 -07001137 # Named caches
maruel681d6802017-01-17 16:56:03 -08001138 caches = [
1139 {u'name': unicode(i[0]), u'path': unicode(i[1])}
1140 for i in options.named_cache
1141 ]
maruel0a25f6c2017-05-10 10:43:23 -07001142
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001143 env_prefixes = {}
1144 for k, v in options.env_prefix:
1145 env_prefixes.setdefault(k, []).append(v)
1146
Brad Hallf78187a2018-10-19 17:08:55 +00001147 # Get dimensions into the key/value format we can manipulate later.
1148 orig_dims = [
1149 {'key': key, 'value': value} for key, value in options.dimensions]
1150 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1151
1152 # Construct base properties that we will use for all the slices, adding in
1153 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001154 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001155 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001156 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001157 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001158 containment=Containment(
1159 lower_priority=bool(options.lower_priority),
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001160 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001161 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001162 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001163 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001164 env=options.env,
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001165 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.iteritems()],
maruel77f720b2015-09-15 12:35:22 -07001166 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001167 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001168 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001169 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001170 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001171 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001172 outputs=options.output,
1173 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001174
1175 slices = []
1176
1177 # Group the optional dimensions by expiration.
1178 dims_by_exp = {}
1179 for key, value, exp_secs in options.optional_dimensions:
1180 dims_by_exp.setdefault(int(exp_secs), []).append(
1181 {'key': key, 'value': value})
1182
1183 # Create the optional slices with expiration deltas, we fix up the properties
1184 # below.
1185 last_exp = 0
1186 for expiration_secs in sorted(dims_by_exp):
1187 t = TaskSlice(
1188 expiration_secs=expiration_secs - last_exp,
1189 properties=properties,
1190 wait_for_capacity=False)
1191 slices.append(t)
1192 last_exp = expiration_secs
1193
1194 # Add back in the default slice (the last one).
1195 exp = max(int(options.expiration) - last_exp, 60)
1196 base_task_slice = TaskSlice(
1197 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001198 properties=properties,
1199 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001200 slices.append(base_task_slice)
1201
Brad Hall7f463e62018-11-16 16:13:30 +00001202 # Add optional dimensions to the task slices, replacing a dimension that
1203 # has the same key if it is a dimension where repeating isn't valid (otherwise
1204 # we append it). Currently the only dimension we can repeat is "caches"; the
1205 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001206 extra_dims = []
Brad Hall7f463e62018-11-16 16:13:30 +00001207 for i, (_, kvs) in enumerate(sorted(dims_by_exp.iteritems(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001208 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001209 # Replace or append the key/value pairs for this expiration in extra_dims;
1210 # we keep extra_dims around because we are iterating backwards and filling
1211 # in slices with shorter expirations. Dimensions expire as time goes on so
1212 # the slices that expire earlier will generally have more dimensions.
1213 for kv in kvs:
1214 if kv['key'] == 'caches':
1215 extra_dims.append(kv)
1216 else:
1217 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1218 # Then, add all the optional dimensions to the original dimension set, again
1219 # replacing if needed.
1220 for kv in extra_dims:
1221 if kv['key'] == 'caches':
1222 dims.append(kv)
1223 else:
1224 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001225 dims.sort(key=lambda x: (x['key'], x['value']))
1226 slice_properties = properties._replace(dimensions=dims)
1227 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1228
maruel77f720b2015-09-15 12:35:22 -07001229 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001230 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001231 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001232 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001233 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001234 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001235 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001236 user=options.user,
1237 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001238
1239
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001240class TaskOutputStdoutOption(optparse.Option):
1241 """Where to output the each task's console output (stderr/stdout).
1242
1243 The output will be;
1244 none - not be downloaded.
1245 json - stored in summary.json file *only*.
1246 console - shown on stdout *only*.
1247 all - stored in summary.json and shown on stdout.
1248 """
1249
1250 choices = ['all', 'json', 'console', 'none']
1251
1252 def __init__(self, *args, **kw):
1253 optparse.Option.__init__(
1254 self,
1255 *args,
1256 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001257 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001258 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001259 **kw)
1260
1261 def convert_value(self, opt, value):
1262 if value not in self.choices:
1263 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1264 self.get_opt_string(), self.choices, value))
1265 stdout_to = []
1266 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001267 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001268 elif value != 'none':
1269 stdout_to = [value]
1270 return stdout_to
1271
1272
maruel@chromium.org0437a732013-08-27 16:05:52 +00001273def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001274 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001275 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001276 help='Timeout to wait for result, set to -1 for no timeout and get '
1277 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001278 parser.group_logging.add_option(
1279 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001280 parser.group_logging.add_option(
1281 '--print-status-updates', action='store_true',
1282 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001283 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001284 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001285 '--task-summary-json',
1286 metavar='FILE',
1287 help='Dump a summary of task results to this file as json. It contains '
1288 'only shards statuses as know to server directly. Any output files '
1289 'emitted by the task can be collected by using --task-output-dir')
1290 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001291 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001292 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001293 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001294 'directory contains per-shard directory with output files produced '
1295 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001296 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001297 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001298 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001299 '--filepath-filter',
1300 help='This is regexp filter used to specify downloaded filepath when '
1301 'collecting isolated output.')
1302 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001303 '--perf', action='store_true', default=False,
1304 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001305 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001306
1307
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001308def process_collect_options(parser, options):
1309 # Only negative -1 is allowed, disallow other negative values.
1310 if options.timeout != -1 and options.timeout < 0:
1311 parser.error('Invalid --timeout value')
1312
1313
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001314@subcommand.usage('bots...')
1315def CMDbot_delete(parser, args):
1316 """Forcibly deletes bots from the Swarming server."""
1317 parser.add_option(
1318 '-f', '--force', action='store_true',
1319 help='Do not prompt for confirmation')
1320 options, args = parser.parse_args(args)
1321 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001322 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001323
1324 bots = sorted(args)
1325 if not options.force:
1326 print('Delete the following bots?')
1327 for bot in bots:
1328 print(' %s' % bot)
1329 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1330 print('Goodbye.')
1331 return 1
1332
1333 result = 0
1334 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001335 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001336 if net.url_read_json(url, data={}, method='POST') is None:
1337 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001338 result = 1
1339 return result
1340
1341
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001342def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001343 """Returns information about the bots connected to the Swarming server."""
1344 add_filter_options(parser)
1345 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001346 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001347 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001348 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001349 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001350 help='Keep both dead and alive bots')
1351 parser.filter_group.add_option(
1352 '--busy', action='store_true', help='Keep only busy bots')
1353 parser.filter_group.add_option(
1354 '--idle', action='store_true', help='Keep only idle bots')
1355 parser.filter_group.add_option(
1356 '--mp', action='store_true',
1357 help='Keep only Machine Provider managed bots')
1358 parser.filter_group.add_option(
1359 '--non-mp', action='store_true',
1360 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001361 parser.filter_group.add_option(
1362 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001363 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001364 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001365 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001366
1367 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001368 parser.error('Use only one of --keep-dead or --dead-only')
1369 if options.busy and options.idle:
1370 parser.error('Use only one of --busy or --idle')
1371 if options.mp and options.non_mp:
1372 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001373
smut281c3902018-05-30 17:50:05 -07001374 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001375 values = []
1376 if options.dead_only:
1377 values.append(('is_dead', 'TRUE'))
1378 elif options.keep_dead:
1379 values.append(('is_dead', 'NONE'))
1380 else:
1381 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001382
maruelaf6b06c2017-06-08 06:26:53 -07001383 if options.busy:
1384 values.append(('is_busy', 'TRUE'))
1385 elif options.idle:
1386 values.append(('is_busy', 'FALSE'))
1387 else:
1388 values.append(('is_busy', 'NONE'))
1389
1390 if options.mp:
1391 values.append(('is_mp', 'TRUE'))
1392 elif options.non_mp:
1393 values.append(('is_mp', 'FALSE'))
1394 else:
1395 values.append(('is_mp', 'NONE'))
1396
1397 for key, value in options.dimensions:
1398 values.append(('dimensions', '%s:%s' % (key, value)))
1399 url += urllib.urlencode(values)
1400 try:
1401 data, yielder = get_yielder(url, 0)
1402 bots = data.get('items') or []
1403 for items in yielder():
1404 if items:
1405 bots.extend(items)
1406 except Failure as e:
1407 sys.stderr.write('\n%s\n' % e)
1408 return 1
maruel77f720b2015-09-15 12:35:22 -07001409 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
maruelaf6b06c2017-06-08 06:26:53 -07001410 print bot['bot_id']
1411 if not options.bare:
1412 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1413 print ' %s' % json.dumps(dimensions, sort_keys=True)
1414 if bot.get('task_id'):
1415 print ' task: %s' % bot['task_id']
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001416 return 0
1417
1418
maruelfd0a90c2016-06-10 11:51:10 -07001419@subcommand.usage('task_id')
1420def CMDcancel(parser, args):
1421 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001422 parser.add_option(
1423 '-k', '--kill-running', action='store_true', default=False,
1424 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001425 options, args = parser.parse_args(args)
1426 if not args:
1427 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001428 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001429 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001430 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001431 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001432 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001433 print('Deleting %s failed. Probably already gone' % task_id)
1434 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001435 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001436 return 0
1437
1438
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001439@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001440def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001441 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001442
1443 The result can be in multiple part if the execution was sharded. It can
1444 potentially have retries.
1445 """
1446 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001447 parser.add_option(
1448 '-j', '--json',
1449 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001450 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001451 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001452 if not args and not options.json:
1453 parser.error('Must specify at least one task id or --json.')
1454 if args and options.json:
1455 parser.error('Only use one of task id or --json.')
1456
1457 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001458 options.json = unicode(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001459 try:
maruel1ceb3872015-10-14 06:10:44 -07001460 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001461 data = json.load(f)
1462 except (IOError, ValueError):
1463 parser.error('Failed to open %s' % options.json)
1464 try:
1465 tasks = sorted(
1466 data['tasks'].itervalues(), key=lambda x: x['shard_index'])
1467 args = [t['task_id'] for t in tasks]
1468 except (KeyError, TypeError):
1469 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001470 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001471 # Take in account all the task slices.
1472 offset = 0
1473 for s in data['request']['task_slices']:
1474 m = (offset + s['properties']['execution_timeout_secs'] +
1475 s['expiration_secs'])
1476 if m > options.timeout:
1477 options.timeout = m
1478 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001479 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001480 else:
1481 valid = frozenset('0123456789abcdef')
1482 if any(not valid.issuperset(task_id) for task_id in args):
1483 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001484
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001485 try:
1486 return collect(
1487 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001488 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001489 options.timeout,
1490 options.decorate,
1491 options.print_status_updates,
1492 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001493 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001494 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001495 options.perf,
1496 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001497 except Failure:
1498 on_error.report(None)
1499 return 1
1500
1501
maruel77f720b2015-09-15 12:35:22 -07001502@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001503def CMDpost(parser, args):
1504 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1505
1506 Input data must be sent to stdin, result is printed to stdout.
1507
1508 If HTTP response code >= 400, returns non-zero.
1509 """
1510 options, args = parser.parse_args(args)
1511 if len(args) != 1:
1512 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001513 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001514 data = sys.stdin.read()
1515 try:
1516 resp = net.url_read(url, data=data, method='POST')
1517 except net.TimeoutError:
1518 sys.stderr.write('Timeout!\n')
1519 return 1
1520 if not resp:
1521 sys.stderr.write('No response!\n')
1522 return 1
1523 sys.stdout.write(resp)
1524 return 0
1525
1526
1527@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001528def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001529 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1530 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001531
1532 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001533 Raw task request and results:
1534 swarming.py query -S server-url.com task/123456/request
1535 swarming.py query -S server-url.com task/123456/result
1536
maruel77f720b2015-09-15 12:35:22 -07001537 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001538 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001539
maruelaf6b06c2017-06-08 06:26:53 -07001540 Listing last 10 tasks on a specific bot named 'bot1':
1541 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001542
maruelaf6b06c2017-06-08 06:26:53 -07001543 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001544 quoting is important!:
1545 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001546 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001547 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001548 parser.add_option(
1549 '-L', '--limit', type='int', default=200,
1550 help='Limit to enforce on limitless items (like number of tasks); '
1551 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001552 parser.add_option(
1553 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001554 parser.add_option(
1555 '--progress', action='store_true',
1556 help='Prints a dot at each request to show progress')
1557 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001558 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001559 parser.error(
1560 'Must specify only method name and optionally query args properly '
1561 'escaped.')
smut281c3902018-05-30 17:50:05 -07001562 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001563 try:
1564 data, yielder = get_yielder(base_url, options.limit)
1565 for items in yielder():
1566 if items:
1567 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001568 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001569 sys.stderr.write('.')
1570 sys.stderr.flush()
1571 except Failure as e:
1572 sys.stderr.write('\n%s\n' % e)
1573 return 1
maruel77f720b2015-09-15 12:35:22 -07001574 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001575 sys.stderr.write('\n')
1576 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001577 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001578 options.json = unicode(os.path.abspath(options.json))
1579 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001580 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001581 try:
maruel77f720b2015-09-15 12:35:22 -07001582 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001583 sys.stdout.write('\n')
1584 except IOError:
1585 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001586 return 0
1587
1588
maruel77f720b2015-09-15 12:35:22 -07001589def CMDquery_list(parser, args):
1590 """Returns list of all the Swarming APIs that can be used with command
1591 'query'.
1592 """
1593 parser.add_option(
1594 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1595 options, args = parser.parse_args(args)
1596 if args:
1597 parser.error('No argument allowed.')
1598
1599 try:
1600 apis = endpoints_api_discovery_apis(options.swarming)
1601 except APIError as e:
1602 parser.error(str(e))
1603 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001604 options.json = unicode(os.path.abspath(options.json))
1605 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001606 json.dump(apis, f)
1607 else:
1608 help_url = (
1609 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1610 options.swarming)
maruel11e31af2017-02-15 07:30:50 -08001611 for i, (api_id, api) in enumerate(sorted(apis.iteritems())):
1612 if i:
1613 print('')
maruel77f720b2015-09-15 12:35:22 -07001614 print api_id
maruel11e31af2017-02-15 07:30:50 -08001615 print ' ' + api['description'].strip()
1616 if 'resources' in api:
1617 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001618 # TODO(maruel): Remove.
1619 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001620 for j, (resource_name, resource) in enumerate(
1621 sorted(api['resources'].iteritems())):
1622 if j:
1623 print('')
1624 for method_name, method in sorted(resource['methods'].iteritems()):
1625 # Only list the GET ones.
1626 if method['httpMethod'] != 'GET':
1627 continue
1628 print '- %s.%s: %s' % (
1629 resource_name, method_name, method['path'])
1630 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001631 ' ' + l for l in textwrap.wrap(
1632 method.get('description', 'No description'), 78)))
maruel11e31af2017-02-15 07:30:50 -08001633 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1634 else:
1635 # New.
1636 for method_name, method in sorted(api['methods'].iteritems()):
maruel77f720b2015-09-15 12:35:22 -07001637 # Only list the GET ones.
1638 if method['httpMethod'] != 'GET':
1639 continue
maruel11e31af2017-02-15 07:30:50 -08001640 print '- %s: %s' % (method['id'], method['path'])
1641 print('\n'.join(
1642 ' ' + l for l in textwrap.wrap(method['description'], 78)))
maruel77f720b2015-09-15 12:35:22 -07001643 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1644 return 0
1645
1646
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001647@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001648def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001649 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001650
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001651 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001652 """
1653 add_trigger_options(parser)
1654 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001655 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001656 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001657 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001658 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001659 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001660 tasks = trigger_task_shards(
1661 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001662 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001663 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001664 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001665 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001666 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001667 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001668 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001669 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001670 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001671 task_ids = [
1672 t['task_id']
1673 for t in sorted(tasks.itervalues(), key=lambda x: x['shard_index'])
1674 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001675 for task_id in task_ids:
1676 print('Task: {server}/task?id={task}'.format(
1677 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001678 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001679 offset = 0
1680 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001681 m = (offset + s.properties.execution_timeout_secs +
1682 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001683 if m > options.timeout:
1684 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001685 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001686 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001687 try:
1688 return collect(
1689 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001690 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001691 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001692 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001693 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001694 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001695 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001696 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001697 options.perf,
1698 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001699 except Failure:
1700 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001701 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001702
1703
maruel18122c62015-10-23 06:31:23 -07001704@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001705def CMDreproduce(parser, args):
1706 """Runs a task locally that was triggered on the server.
1707
1708 This running locally the same commands that have been run on the bot. The data
1709 downloaded will be in a subdirectory named 'work' of the current working
1710 directory.
maruel18122c62015-10-23 06:31:23 -07001711
1712 You can pass further additional arguments to the target command by passing
1713 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001714 """
maruelc070e672016-02-22 17:32:57 -08001715 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001716 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001717 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001718 parser.add_option(
1719 '--work', metavar='DIR', default='work',
1720 help='Directory to map the task input files into')
1721 parser.add_option(
1722 '--cache', metavar='DIR', default='cache',
1723 help='Directory that contains the input cache')
1724 parser.add_option(
1725 '--leak', action='store_true',
1726 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001727 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001728 extra_args = []
1729 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001730 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001731 if len(args) > 1:
1732 if args[1] == '--':
1733 if len(args) > 2:
1734 extra_args = args[2:]
1735 else:
1736 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001737
smut281c3902018-05-30 17:50:05 -07001738 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001739 request = net.url_read_json(url)
1740 if not request:
1741 print >> sys.stderr, 'Failed to retrieve request data for the task'
1742 return 1
1743
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001744 workdir = unicode(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001745 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001746 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001747 fs.mkdir(workdir)
iannucci31ab9192017-05-02 19:11:56 -07001748 cachedir = unicode(os.path.abspath('cipd_cache'))
1749 if not fs.exists(cachedir):
1750 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001751
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001752 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001753 env = os.environ.copy()
1754 env['SWARMING_BOT_ID'] = 'reproduce'
1755 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001756 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001757 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001758 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001759 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001760 if not i['value']:
1761 env.pop(key, None)
1762 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001763 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001764
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001765 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001766 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001767 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001768 for i in env_prefixes:
1769 key = i['key']
1770 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001771 cur = env.get(key)
1772 if cur:
1773 paths.append(cur)
1774 env[key] = os.path.pathsep.join(paths)
1775
iannucci31ab9192017-05-02 19:11:56 -07001776 command = []
nodir152cba62016-05-12 16:08:56 -07001777 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001778 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001779 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001780 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001781 properties['inputs_ref']['namespace'])
1782 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001783 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1784 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1785 # leak.
1786 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001787 cache = local_caching.DiskContentAddressedCache(
Marc-Antoine Ruel79d42192019-02-06 19:24:16 +00001788 unicode(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001789 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001790 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001791 command = bundle.command
1792 if bundle.relative_cwd:
1793 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001794 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001795
1796 if properties.get('command'):
1797 command.extend(properties['command'])
1798
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001799 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Robert Iannucci24ae76a2018-02-26 12:51:18 -08001800 command = tools.fix_python_cmd(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001801 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001802 new_command = run_isolated.process_command(command, 'invalid', None)
1803 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001804 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001805 else:
1806 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001807 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001808 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001809 command, options.output, None)
1810 if not os.path.isdir(options.output):
1811 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001812 command = new_command
1813 file_path.ensure_command_has_abs_path(command, workdir)
1814
1815 if properties.get('cipd_input'):
1816 ci = properties['cipd_input']
1817 cp = ci['client_package']
1818 client_manager = cipd.get_client(
1819 ci['server'], cp['package_name'], cp['version'], cachedir)
1820
1821 with client_manager as client:
1822 by_path = collections.defaultdict(list)
1823 for pkg in ci['packages']:
1824 path = pkg['path']
1825 # cipd deals with 'root' as ''
1826 if path == '.':
1827 path = ''
1828 by_path[path].append((pkg['package_name'], pkg['version']))
1829 client.ensure(workdir, by_path, cache_dir=cachedir)
1830
maruel77f720b2015-09-15 12:35:22 -07001831 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001832 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001833 except OSError as e:
maruel29ab2fd2015-10-16 11:44:01 -07001834 print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
maruel77f720b2015-09-15 12:35:22 -07001835 print >> sys.stderr, str(e)
1836 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001837 finally:
1838 # Do not delete options.cache.
1839 if not options.leak:
1840 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001841
1842
maruel0eb1d1b2015-10-02 14:48:21 -07001843@subcommand.usage('bot_id')
1844def CMDterminate(parser, args):
1845 """Tells a bot to gracefully shut itself down as soon as it can.
1846
1847 This is done by completing whatever current task there is then exiting the bot
1848 process.
1849 """
1850 parser.add_option(
1851 '--wait', action='store_true', help='Wait for the bot to terminate')
1852 options, args = parser.parse_args(args)
1853 if len(args) != 1:
1854 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001855 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001856 request = net.url_read_json(url, data={})
1857 if not request:
1858 print >> sys.stderr, 'Failed to ask for termination'
1859 return 1
1860 if options.wait:
1861 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001862 options.swarming,
1863 [request['task_id']],
1864 0.,
1865 False,
1866 False,
1867 None,
1868 None,
1869 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001870 False,
1871 None)
maruelbfc5f872017-06-10 16:43:17 -07001872 else:
1873 print request['task_id']
maruel0eb1d1b2015-10-02 14:48:21 -07001874 return 0
1875
1876
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001877@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001878def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001879 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001880
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001881 Passes all extra arguments provided after '--' as additional command line
1882 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001883 """
1884 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001885 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001886 parser.add_option(
1887 '--dump-json',
1888 metavar='FILE',
1889 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001890 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001891 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001892 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001893 tasks = trigger_task_shards(
1894 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001895 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001896 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001897 tasks_sorted = sorted(
1898 tasks.itervalues(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001899 if options.dump_json:
1900 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001901 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001902 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001903 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001904 }
maruel46b015f2015-10-13 18:40:35 -07001905 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001906 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001907 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001908 (options.swarming, options.dump_json))
1909 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001910 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001911 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001912 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1913 print('Or visit:')
1914 for t in tasks_sorted:
1915 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001916 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001917 except Failure:
1918 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001919 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001920
1921
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001922class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001923 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001924 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001925 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001926 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001927 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001928 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001929 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001930 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001931 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001932 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001933
1934 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001935 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001936 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001937 auth.process_auth_options(self, options)
1938 user = self._process_swarming(options)
1939 if hasattr(options, 'user') and not options.user:
1940 options.user = user
1941 return options, args
1942
1943 def _process_swarming(self, options):
1944 """Processes the --swarming option and aborts if not specified.
1945
1946 Returns the identity as determined by the server.
1947 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001948 if not options.swarming:
1949 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001950 try:
1951 options.swarming = net.fix_url(options.swarming)
1952 except ValueError as e:
1953 self.error('--swarming %s' % e)
1954 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001955 try:
1956 user = auth.ensure_logged_in(options.swarming)
1957 except ValueError as e:
1958 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001959 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001960
1961
1962def main(args):
1963 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001964 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001965
1966
1967if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001968 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001969 fix_encoding.fix_encoding()
1970 tools.disable_buffering()
1971 colorama.init()
1972 sys.exit(main(sys.argv[1:]))