blob: de126add2706e84fd2faae82d1fcbf9a78f0c436 [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Lei Leife202df2019-06-11 17:33:34 +00008from __future__ import print_function
9
10__version__ = '1.0'
maruel@chromium.org0437a732013-08-27 16:05:52 +000011
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050012import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040013import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000014import json
15import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040016import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100018import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000019import sys
maruel11e31af2017-02-15 07:30:50 -080020import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070021import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000022import time
23import urllib
maruel@chromium.org0437a732013-08-27 16:05:52 +000024
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000025from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000027
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000028# third_party/
29import colorama
30from chromium import natsort
31from depot_tools import fix_encoding
32from depot_tools import subcommand
33
34# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080035import auth
iannucci31ab9192017-05-02 19:11:56 -070036import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000037import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000038import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040039import local_caching
maruelc070e672016-02-22 17:32:57 -080040import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000041from utils import file_path
42from utils import fs
43from utils import logging_utils
44from utils import net
45from utils import on_error
46from utils import subprocess42
47from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050048
49
50class Failure(Exception):
51 """Generic failure."""
52 pass
53
54
maruel0a25f6c2017-05-10 10:43:23 -070055def default_task_name(options):
56 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050057 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070058 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070059 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070060 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070061 if options.isolated:
62 task_name += u'/' + options.isolated
63 return task_name
64 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050065
66
67### Triggering.
68
69
maruel77f720b2015-09-15 12:35:22 -070070# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070071CipdPackage = collections.namedtuple(
72 'CipdPackage',
73 [
74 'package_name',
75 'path',
76 'version',
77 ])
78
79
80# See ../appengine/swarming/swarming_rpcs.py.
81CipdInput = collections.namedtuple(
82 'CipdInput',
83 [
84 'client_package',
85 'packages',
86 'server',
87 ])
88
89
90# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070091FilesRef = collections.namedtuple(
92 'FilesRef',
93 [
94 'isolated',
95 'isolatedserver',
96 'namespace',
97 ])
98
99
100# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800101StringListPair = collections.namedtuple(
102 'StringListPair', [
103 'key',
104 'value', # repeated string
105 ]
106)
107
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000108# See ../appengine/swarming/swarming_rpcs.py.
109Containment = collections.namedtuple(
110 'Containment',
111 [
112 'lower_priority',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000113 'containment_type',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000114 ])
115
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800116
117# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700118TaskProperties = collections.namedtuple(
119 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500120 [
maruel681d6802017-01-17 16:56:03 -0800121 'caches',
borenet02f772b2016-06-22 12:42:19 -0700122 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500123 'command',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000124 'containment',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500125 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500126 'dimensions',
127 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800128 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700129 'execution_timeout_secs',
130 'extra_args',
131 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500132 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700133 'inputs_ref',
134 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700135 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700136 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700137 ])
138
139
140# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400141TaskSlice = collections.namedtuple(
142 'TaskSlice',
143 [
144 'expiration_secs',
145 'properties',
146 'wait_for_capacity',
147 ])
148
149
150# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700151NewTaskRequest = collections.namedtuple(
152 'NewTaskRequest',
153 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500154 'name',
maruel77f720b2015-09-15 12:35:22 -0700155 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500156 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400157 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700158 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500159 'tags',
160 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000161 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500162 ])
163
164
maruel77f720b2015-09-15 12:35:22 -0700165def namedtuple_to_dict(value):
166 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400167 if hasattr(value, '_asdict'):
168 return namedtuple_to_dict(value._asdict())
169 if isinstance(value, (list, tuple)):
170 return [namedtuple_to_dict(v) for v in value]
171 if isinstance(value, dict):
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000172 return {k: namedtuple_to_dict(v) for k, v in value.items()}
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400173 return value
maruel77f720b2015-09-15 12:35:22 -0700174
175
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700176def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800177 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700178
179 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500180 """
maruel77f720b2015-09-15 12:35:22 -0700181 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700182 # Don't send 'service_account' if it is None to avoid confusing older
183 # version of the server that doesn't know about 'service_account' and don't
184 # use it at all.
185 if not out['service_account']:
186 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000187 for task_slice in out['task_slices']:
188 task_slice['properties']['env'] = [
189 {'key': k, 'value': v}
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000190 for k, v in task_slice['properties']['env'].items()
Brad Hallf78187a2018-10-19 17:08:55 +0000191 ]
192 task_slice['properties']['env'].sort(key=lambda x: x['key'])
maruel77f720b2015-09-15 12:35:22 -0700193 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500194
195
maruel77f720b2015-09-15 12:35:22 -0700196def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500197 """Triggers a request on the Swarming server and returns the json data.
198
199 It's the low-level function.
200
201 Returns:
202 {
203 'request': {
204 'created_ts': u'2010-01-02 03:04:05',
205 'name': ..
206 },
207 'task_id': '12300',
208 }
209 """
210 logging.info('Triggering: %s', raw_request['name'])
211
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500212 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700213 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500214 if not result:
215 on_error.report('Failed to trigger task %s' % raw_request['name'])
216 return None
maruele557bce2015-11-17 09:01:27 -0800217 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800218 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800219 msg = 'Failed to trigger task %s' % raw_request['name']
220 if result['error'].get('errors'):
221 for err in result['error']['errors']:
222 if err.get('message'):
223 msg += '\nMessage: %s' % err['message']
224 if err.get('debugInfo'):
225 msg += '\nDebug info:\n%s' % err['debugInfo']
226 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800227 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800228
229 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800230 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500231 return result
232
233
234def setup_googletest(env, shards, index):
235 """Sets googletest specific environment variables."""
236 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700237 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
238 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
239 env = env[:]
240 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
241 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500242 return env
243
244
245def trigger_task_shards(swarming, task_request, shards):
246 """Triggers one or many subtasks of a sharded task.
247
248 Returns:
249 Dict with task details, returned to caller as part of --dump-json output.
250 None in case of failure.
251 """
252 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000253 """
254 Args:
255 index: The index of the task request.
256
257 Returns:
258 raw_request: A swarming compatible JSON dictionary of the request.
259 shard_index: The index of the shard, which may be different than the index
260 of the task request.
261 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700262 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000263 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500264 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000265 for task_slice in req['task_slices']:
266 task_slice['properties']['env'] = setup_googletest(
267 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700268 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000269 else:
270 task_slices = req['task_slices']
271
272 total_shards = None
273 # Multiple tasks slices might exist if there are optional "slices", e.g.
274 # multiple ways of dispatching the task that should be equivalent. These
275 # should be functionally equivalent but we have cannot guarantee that. If
276 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
277 # slices.
278 for task_slice in task_slices:
279 for env_var in task_slice['properties']['env']:
280 if env_var['key'] == 'GTEST_SHARD_INDEX':
281 shard_index = int(env_var['value'])
282 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
283 total_shards = int(env_var['value'])
284 if total_shards > 1:
285 req['name'] += ':%s:%s' % (shard_index, total_shards)
286
287 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500288
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000289 requests = [convert(index) for index in range(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500290 tasks = {}
291 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000292 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700293 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500294 if not task:
295 break
296 logging.info('Request result: %s', task)
297 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400298 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500299 priority_warning = True
Lei Leife202df2019-06-11 17:33:34 +0000300 print('Priority was reset to %s' % task['request']['priority'],
301 file=sys.stderr)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500302 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000303 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500304 'task_id': task['task_id'],
305 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
306 }
307
308 # Some shards weren't triggered. Abort everything.
309 if len(tasks) != len(requests):
310 if tasks:
Lei Leife202df2019-06-11 17:33:34 +0000311 print('Only %d shard(s) out of %d were triggered' % (
312 len(tasks), len(requests)), file=sys.stderr)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000313 for task_dict in tasks.values():
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500314 abort_task(swarming, task_dict['task_id'])
315 return None
316
317 return tasks
318
319
320### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000321
322
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700323# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000324STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700325
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400326
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000327class TaskState(object):
328 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000329
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000330 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
331 is the source of truth for these values:
332 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400333
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000334 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400335 """
336 RUNNING = 0x10
337 PENDING = 0x20
338 EXPIRED = 0x30
339 TIMED_OUT = 0x40
340 BOT_DIED = 0x50
341 CANCELED = 0x60
342 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400343 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400344 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400345
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000346 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400347
maruel77f720b2015-09-15 12:35:22 -0700348 _ENUMS = {
349 'RUNNING': RUNNING,
350 'PENDING': PENDING,
351 'EXPIRED': EXPIRED,
352 'TIMED_OUT': TIMED_OUT,
353 'BOT_DIED': BOT_DIED,
354 'CANCELED': CANCELED,
355 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400356 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400357 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700358 }
359
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400360 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700361 def from_enum(cls, state):
362 """Returns int value based on the string."""
363 if state not in cls._ENUMS:
364 raise ValueError('Invalid state %s' % state)
365 return cls._ENUMS[state]
366
maruel@chromium.org0437a732013-08-27 16:05:52 +0000367
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700368class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700369 """Assembles task execution summary (for --task-summary-json output).
370
371 Optionally fetches task outputs from isolate server to local disk (used when
372 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700373
374 This object is shared among multiple threads running 'retrieve_results'
375 function, in particular they call 'process_shard_result' method in parallel.
376 """
377
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000378 def __init__(self, task_output_dir, task_output_stdout, shard_count,
379 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700380 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
381
382 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700383 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700384 shard_count: expected number of task shards.
385 """
maruel12e30012015-10-09 11:55:35 -0700386 self.task_output_dir = (
387 unicode(os.path.abspath(task_output_dir))
388 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000389 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700390 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000391 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700392
393 self._lock = threading.Lock()
394 self._per_shard_results = {}
395 self._storage = None
396
nodire5028a92016-04-29 14:38:21 -0700397 if self.task_output_dir:
398 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700399
Vadim Shtayurab450c602014-05-12 19:23:25 -0700400 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700401 """Stores results of a single task shard, fetches output files if necessary.
402
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400403 Modifies |result| in place.
404
maruel77f720b2015-09-15 12:35:22 -0700405 shard_index is 0-based.
406
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700407 Called concurrently from multiple threads.
408 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700409 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700410 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700411 if shard_index < 0 or shard_index >= self.shard_count:
412 logging.warning(
413 'Shard index %d is outside of expected range: [0; %d]',
414 shard_index, self.shard_count - 1)
415 return
416
maruel77f720b2015-09-15 12:35:22 -0700417 if result.get('outputs_ref'):
418 ref = result['outputs_ref']
419 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
420 ref['isolatedserver'],
421 urllib.urlencode(
422 [('namespace', ref['namespace']), ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400423
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700424 # Store result dict of that shard, ignore results we've already seen.
425 with self._lock:
426 if shard_index in self._per_shard_results:
427 logging.warning('Ignoring duplicate shard index %d', shard_index)
428 return
429 self._per_shard_results[shard_index] = result
430
431 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700432 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000433 server_ref = isolate_storage.ServerRef(
434 result['outputs_ref']['isolatedserver'],
435 result['outputs_ref']['namespace'])
436 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400437 if storage:
438 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400439 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
440 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400441 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700442 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400443 storage,
Lei Leife202df2019-06-11 17:33:34 +0000444 local_caching.MemoryContentAddressedCache(file_mode_mask=0o700),
maruel4409e302016-07-19 14:25:51 -0700445 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000446 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700447
448 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700449 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700450 with self._lock:
451 # Write an array of shard results with None for missing shards.
452 summary = {
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000453 'shards': [
454 self._per_shard_results.get(i) for i in range(self.shard_count)
455 ],
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700456 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000457
458 # Don't store stdout in the summary if not requested too.
459 if "json" not in self.task_output_stdout:
460 for shard_json in summary['shards']:
461 if not shard_json:
462 continue
463 if "output" in shard_json:
464 del shard_json["output"]
465 if "outputs" in shard_json:
466 del shard_json["outputs"]
467
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700468 # Write summary.json to task_output_dir as well.
469 if self.task_output_dir:
470 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700471 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700472 summary,
473 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700474 if self._storage:
475 self._storage.close()
476 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700477 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700478
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000479 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700480 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700481 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700482 with self._lock:
483 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000484 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700485 else:
486 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000487 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700488 logging.error(
489 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000490 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700491 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000492 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700493 logging.error(
494 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000495 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700496 return None
497 return self._storage
498
499
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500500def now():
501 """Exists so it can be mocked easily."""
502 return time.time()
503
504
maruel77f720b2015-09-15 12:35:22 -0700505def parse_time(value):
506 """Converts serialized time from the API to datetime.datetime."""
507 # When microseconds are 0, the '.123456' suffix is elided. This means the
508 # serialized format is not consistent, which confuses the hell out of python.
509 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
510 try:
511 return datetime.datetime.strptime(value, fmt)
512 except ValueError:
513 pass
514 raise ValueError('Failed to parse %s' % value)
515
516
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700517def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700518 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000519 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400520 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700521
Vadim Shtayurab450c602014-05-12 19:23:25 -0700522 Returns:
523 <result dict> on success.
524 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700525 """
maruel71c61c82016-02-22 06:52:05 -0800526 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700527 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700528 if include_perf:
529 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700530 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700531 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400532 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700533 attempt = 0
534
535 while not should_stop.is_set():
536 attempt += 1
537
538 # Waiting for too long -> give up.
539 current_time = now()
540 if deadline and current_time >= deadline:
541 logging.error('retrieve_results(%s) timed out on attempt %d',
542 base_url, attempt)
543 return None
544
545 # Do not spin too fast. Spin faster at the beginning though.
546 # Start with 1 sec delay and for each 30 sec of waiting add another second
547 # of delay, until hitting 15 sec ceiling.
548 if attempt > 1:
549 max_delay = min(15, 1 + (current_time - started) / 30.0)
550 delay = min(max_delay, deadline - current_time) if deadline else max_delay
551 if delay > 0:
552 logging.debug('Waiting %.1f sec before retrying', delay)
553 should_stop.wait(delay)
554 if should_stop.is_set():
555 return None
556
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400557 # Disable internal retries in net.url_read_json, since we are doing retries
558 # ourselves.
559 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700560 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
561 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400562 # Retry on 500s only if no timeout is specified.
563 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400564 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400565 if timeout == -1:
566 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400567 continue
maruel77f720b2015-09-15 12:35:22 -0700568
maruelbf53e042015-12-01 15:00:51 -0800569 if result.get('error'):
570 # An error occurred.
571 if result['error'].get('errors'):
572 for err in result['error']['errors']:
573 logging.warning(
574 'Error while reading task: %s; %s',
575 err.get('message'), err.get('debugInfo'))
576 elif result['error'].get('message'):
577 logging.warning(
578 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400579 if timeout == -1:
580 return result
maruelbf53e042015-12-01 15:00:51 -0800581 continue
582
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400583 # When timeout == -1, always return on first attempt. 500s are already
584 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000585 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000586 if fetch_stdout:
587 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700588 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700589 # Record the result, try to fetch attached output files (if any).
590 if output_collector:
591 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700592 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700593 if result.get('internal_failure'):
594 logging.error('Internal error!')
595 elif result['state'] == 'BOT_DIED':
596 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700597 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000598
599
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700600def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400601 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000602 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500603 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000604
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700605 Duplicate shards are ignored. Shards are yielded in order of completion.
606 Timed out shards are NOT yielded at all. Caller can compare number of yielded
607 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000608
609 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500610 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000611 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500612
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700613 output_collector is an optional instance of TaskOutputCollector that will be
614 used to fetch files produced by a task from isolate server to the local disk.
615
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500616 Yields:
617 (index, result). In particular, 'result' is defined as the
618 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000619 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000620 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400621 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700622 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700623 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700624
maruel@chromium.org0437a732013-08-27 16:05:52 +0000625 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
626 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700627 # Adds a task to the thread pool to call 'retrieve_results' and return
628 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400629 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000630 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700631 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000632 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400633 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000634 task_id, timeout, should_stop, output_collector, include_perf,
635 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700636
637 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400638 for shard_index, task_id in enumerate(task_ids):
639 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700640
641 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400642 shards_remaining = range(len(task_ids))
643 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700644 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700645 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700646 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000647 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700648 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700649 except threading_utils.TaskChannel.Timeout:
650 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000651 time_now = str(datetime.datetime.now())
652 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700653 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000654 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700655 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000656 (time_now, ', '.join(map(str, shards_remaining)))
657 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700658 sys.stdout.flush()
659 continue
660 except Exception:
661 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700662
663 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700664 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000665 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500666 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000667 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700668
Vadim Shtayurab450c602014-05-12 19:23:25 -0700669 # Yield back results to the caller.
670 assert shard_index in shards_remaining
671 shards_remaining.remove(shard_index)
672 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700673
maruel@chromium.org0437a732013-08-27 16:05:52 +0000674 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700675 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000676 should_stop.set()
677
678
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000679def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000680 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700681 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400682 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700683 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
684 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400685 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
686 metadata.get('abandoned_ts')):
687 pending = '%.1fs' % (
688 parse_time(metadata['abandoned_ts']) -
689 parse_time(metadata['created_ts'])
690 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400691 else:
692 pending = 'N/A'
693
maruel77f720b2015-09-15 12:35:22 -0700694 if metadata.get('duration') is not None:
695 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400696 else:
697 duration = 'N/A'
698
maruel77f720b2015-09-15 12:35:22 -0700699 if metadata.get('exit_code') is not None:
700 # Integers are encoded as string to not loose precision.
701 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400702 else:
703 exit_code = 'N/A'
704
705 bot_id = metadata.get('bot_id') or 'N/A'
706
maruel77f720b2015-09-15 12:35:22 -0700707 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400708 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000709 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400710 if metadata.get('state') == 'CANCELED':
711 tag_footer2 = ' Pending: %s CANCELED' % pending
712 elif metadata.get('state') == 'EXPIRED':
713 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400714 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400715 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
716 pending, duration, bot_id, exit_code, metadata['state'])
717 else:
718 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
719 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400720
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000721 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
722 dash_pad = '+-%s-+' % ('-' * tag_len)
723 tag_header = '| %s |' % tag_header.ljust(tag_len)
724 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
725 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400726
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000727 if include_stdout:
728 return '\n'.join([
729 dash_pad,
730 tag_header,
731 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400732 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000733 dash_pad,
734 tag_footer1,
735 tag_footer2,
736 dash_pad,
737 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000738 return '\n'.join([
739 dash_pad,
740 tag_header,
741 tag_footer2,
742 dash_pad,
743 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000744
745
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700746def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700747 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000748 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000749 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700750 """Retrieves results of a Swarming task.
751
752 Returns:
753 process exit code that should be returned to the user.
754 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000755
756 filter_cb = None
757 if filepath_filter:
758 filter_cb = re.compile(filepath_filter).match
759
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700760 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000761 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000762 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700763
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700764 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700765 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400766 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700767 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400768 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400769 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000770 output_collector, include_perf,
771 (len(task_output_stdout) > 0),
772 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700773 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700774
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400775 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700776 shard_exit_code = metadata.get('exit_code')
777 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700778 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700779 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700780 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400781 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700782 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700783
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700784 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000785 s = decorate_shard_output(
786 swarming, index, metadata,
787 "console" in task_output_stdout).encode(
788 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700789 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400790 if len(seen_shards) < len(task_ids):
791 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700792 else:
maruel77f720b2015-09-15 12:35:22 -0700793 print('%s: %s %s' % (
794 metadata.get('bot_id', 'N/A'),
795 metadata['task_id'],
796 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000797 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700798 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400799 if output:
800 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700801 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700802 summary = output_collector.finalize()
803 if task_summary_json:
804 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700805
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400806 if decorate and total_duration:
807 print('Total duration: %.1fs' % total_duration)
808
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400809 if len(seen_shards) != len(task_ids):
810 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Lei Leife202df2019-06-11 17:33:34 +0000811 print('Results from some shards are missing: %s' %
812 ', '.join(map(str, missing_shards)), file=sys.stderr)
Vadim Shtayurac524f512014-05-15 09:54:56 -0700813 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700814
maruela5490782015-09-30 10:56:59 -0700815 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000816
817
maruel77f720b2015-09-15 12:35:22 -0700818### API management.
819
820
821class APIError(Exception):
822 pass
823
824
825def endpoints_api_discovery_apis(host):
826 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
827 the APIs exposed by a host.
828
829 https://developers.google.com/discovery/v1/reference/apis/list
830 """
maruel380e3262016-08-31 16:10:06 -0700831 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
832 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700833 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
834 if data is None:
835 raise APIError('Failed to discover APIs on %s' % host)
836 out = {}
837 for api in data['items']:
838 if api['id'] == 'discovery:v1':
839 continue
840 # URL is of the following form:
841 # url = host + (
842 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
843 api_data = net.url_read_json(api['discoveryRestUrl'])
844 if api_data is None:
845 raise APIError('Failed to discover %s on %s' % (api['id'], host))
846 out[api['id']] = api_data
847 return out
848
849
maruelaf6b06c2017-06-08 06:26:53 -0700850def get_yielder(base_url, limit):
851 """Returns the first query and a function that yields following items."""
852 CHUNK_SIZE = 250
853
854 url = base_url
855 if limit:
856 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
857 data = net.url_read_json(url)
858 if data is None:
859 # TODO(maruel): Do basic diagnostic.
860 raise Failure('Failed to access %s' % url)
861 org_cursor = data.pop('cursor', None)
862 org_total = len(data.get('items') or [])
863 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
864 if not org_cursor or not org_total:
865 # This is not an iterable resource.
866 return data, lambda: []
867
868 def yielder():
869 cursor = org_cursor
870 total = org_total
871 # Some items support cursors. Try to get automatically if cursors are needed
872 # by looking at the 'cursor' items.
873 while cursor and (not limit or total < limit):
874 merge_char = '&' if '?' in base_url else '?'
875 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
876 if limit:
877 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
878 new = net.url_read_json(url)
879 if new is None:
880 raise Failure('Failed to access %s' % url)
881 cursor = new.get('cursor')
882 new_items = new.get('items')
883 nb_items = len(new_items or [])
884 total += nb_items
885 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
886 yield new_items
887
888 return data, yielder
889
890
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500891### Commands.
892
893
894def abort_task(_swarming, _manifest):
895 """Given a task manifest that was triggered, aborts its execution."""
896 # TODO(vadimsh): No supported by the server yet.
897
898
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400899def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800900 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500901 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500902 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500903 dest='dimensions', metavar='FOO bar',
904 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000905 parser.filter_group.add_option(
906 '--optional-dimension', default=[], action='append', nargs=3,
907 dest='optional_dimensions', metavar='key value expiration',
908 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500909 parser.add_option_group(parser.filter_group)
910
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400911
Brad Hallf78187a2018-10-19 17:08:55 +0000912def _validate_filter_option(parser, key, value, expiration, argname):
913 if ':' in key:
914 parser.error('%s key cannot contain ":"' % argname)
915 if key.strip() != key:
916 parser.error('%s key has whitespace' % argname)
917 if not key:
918 parser.error('%s key is empty' % argname)
919
920 if value.strip() != value:
921 parser.error('%s value has whitespace' % argname)
922 if not value:
923 parser.error('%s value is empty' % argname)
924
925 if expiration is not None:
926 try:
927 expiration = int(expiration)
928 except ValueError:
929 parser.error('%s expiration is not an integer' % argname)
930 if expiration <= 0:
931 parser.error('%s expiration should be positive' % argname)
932 if expiration % 60 != 0:
933 parser.error('%s expiration is not divisible by 60' % argname)
934
935
maruelaf6b06c2017-06-08 06:26:53 -0700936def process_filter_options(parser, options):
937 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000938 _validate_filter_option(parser, key, value, None, 'dimension')
939 for key, value, exp in options.optional_dimensions:
940 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700941 options.dimensions.sort()
942
943
Vadim Shtayurab450c602014-05-12 19:23:25 -0700944def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400945 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700946 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700947 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700948 help='Number of shards to trigger and collect.')
949 parser.add_option_group(parser.sharding_group)
950
951
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400952def add_trigger_options(parser):
953 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500954 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400955 add_filter_options(parser)
956
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400957 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800958 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700959 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500960 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800961 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500962 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700963 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800964 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800965 '--env-prefix', default=[], action='append', nargs=2,
966 metavar='VAR local/path',
967 help='Prepend task-relative `local/path` to the task\'s VAR environment '
968 'variable using os-appropriate pathsep character. Can be specified '
969 'multiple times for the same VAR to add multiple paths.')
970 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400971 '--idempotent', action='store_true', default=False,
972 help='When set, the server will actively try to find a previous task '
973 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800974 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700975 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000976 help='The optional path to a file containing the secret_bytes to use '
977 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800978 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700979 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400980 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800981 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700982 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400983 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000984 parser.add_option(
985 '--lower-priority', action='store_true',
986 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000987 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
988 parser.add_option(
989 '--containment-type', default='NONE', metavar='NONE',
990 choices=containment_choices,
991 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -0800992 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500993 '--raw-cmd', action='store_true', default=False,
994 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700995 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800996 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500997 '--relative-cwd',
998 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
999 'requires --raw-cmd')
1000 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001001 '--cipd-package', action='append', default=[], metavar='PKG',
1002 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -07001003 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001004 group.add_option(
1005 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -07001006 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001007 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1008 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001009 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001010 help='Email of a service account to run the task as, or literal "bot" '
1011 'string to indicate that the task should use the same account the '
1012 'bot itself is using to authenticate to Swarming. Don\'t use task '
1013 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001014 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001015 '--pool-task-template',
1016 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1017 default='AUTO',
1018 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1019 'By default, the pool\'s TaskTemplate is automatically selected, '
1020 'according the pool configuration on the server. Choices are: '
1021 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1022 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001023 '-o', '--output', action='append', default=[], metavar='PATH',
1024 help='A list of files to return in addition to those written to '
1025 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1026 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001027 group.add_option(
1028 '--wait-for-capacity', action='store_true', default=False,
1029 help='Instructs to leave the task PENDING even if there\'s no known bot '
1030 'that could run this task, otherwise the task will be denied with '
1031 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001032 parser.add_option_group(group)
1033
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001034 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001035 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001036 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001037 help='The lower value, the more important the task is')
1038 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001039 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001040 help='Display name of the task. Defaults to '
1041 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1042 'isolated file is provided, if a hash is provided, it defaults to '
1043 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1044 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001045 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001046 help='Tags to assign to the task.')
1047 group.add_option(
1048 '--user', default='',
1049 help='User associated with the task. Defaults to authenticated user on '
1050 'the server.')
1051 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001052 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001053 help='Seconds to allow the task to be pending for a bot to run before '
1054 'this task request expires.')
1055 group.add_option(
1056 '--deadline', type='int', dest='expiration',
1057 help=optparse.SUPPRESS_HELP)
1058 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001059
1060
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001061def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001062 """Processes trigger options and does preparatory steps.
1063
1064 Returns:
1065 NewTaskRequest instance.
1066 """
maruelaf6b06c2017-06-08 06:26:53 -07001067 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001068 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001069 if args and args[0] == '--':
1070 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001071
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001072 if not options.dimensions:
1073 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001074 if not any(k == 'pool' for k, _v in options.dimensions):
1075 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001076 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1077 parser.error('--tags must be in the format key:value')
1078 if options.raw_cmd and not args:
1079 parser.error(
1080 'Arguments with --raw-cmd should be passed after -- as command '
1081 'delimiter.')
1082 if options.isolate_server and not options.namespace:
1083 parser.error(
1084 '--namespace must be a valid value when --isolate-server is used')
1085 if not options.isolated and not options.raw_cmd:
1086 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1087
1088 # Isolated
1089 # --isolated is required only if --raw-cmd wasn't provided.
1090 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1091 # preferred server.
1092 isolateserver.process_isolate_server_options(
1093 parser, options, False, not options.raw_cmd)
1094 inputs_ref = None
1095 if options.isolate_server:
1096 inputs_ref = FilesRef(
1097 isolated=options.isolated,
1098 isolatedserver=options.isolate_server,
1099 namespace=options.namespace)
1100
1101 # Command
1102 command = None
1103 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001104 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001105 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001106 if options.relative_cwd:
1107 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1108 if not a.startswith(os.getcwd()):
1109 parser.error(
1110 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001111 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001112 if options.relative_cwd:
1113 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001114 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001115
maruel0a25f6c2017-05-10 10:43:23 -07001116 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001117 cipd_packages = []
1118 for p in options.cipd_package:
1119 split = p.split(':', 2)
1120 if len(split) != 3:
1121 parser.error('CIPD packages must take the form: path:package:version')
1122 cipd_packages.append(CipdPackage(
1123 package_name=split[1],
1124 path=split[0],
1125 version=split[2]))
1126 cipd_input = None
1127 if cipd_packages:
1128 cipd_input = CipdInput(
1129 client_package=None,
1130 packages=cipd_packages,
1131 server=None)
1132
maruel0a25f6c2017-05-10 10:43:23 -07001133 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001134 secret_bytes = None
1135 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001136 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001137 secret_bytes = f.read().encode('base64')
1138
maruel0a25f6c2017-05-10 10:43:23 -07001139 # Named caches
maruel681d6802017-01-17 16:56:03 -08001140 caches = [
1141 {u'name': unicode(i[0]), u'path': unicode(i[1])}
1142 for i in options.named_cache
1143 ]
maruel0a25f6c2017-05-10 10:43:23 -07001144
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001145 env_prefixes = {}
1146 for k, v in options.env_prefix:
1147 env_prefixes.setdefault(k, []).append(v)
1148
Brad Hallf78187a2018-10-19 17:08:55 +00001149 # Get dimensions into the key/value format we can manipulate later.
1150 orig_dims = [
1151 {'key': key, 'value': value} for key, value in options.dimensions]
1152 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1153
1154 # Construct base properties that we will use for all the slices, adding in
1155 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001156 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001157 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001158 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001159 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001160 containment=Containment(
1161 lower_priority=bool(options.lower_priority),
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001162 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001163 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001164 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001165 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001166 env=options.env,
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001167 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.items()],
maruel77f720b2015-09-15 12:35:22 -07001168 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001169 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001170 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001171 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001172 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001173 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001174 outputs=options.output,
1175 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001176
1177 slices = []
1178
1179 # Group the optional dimensions by expiration.
1180 dims_by_exp = {}
1181 for key, value, exp_secs in options.optional_dimensions:
1182 dims_by_exp.setdefault(int(exp_secs), []).append(
1183 {'key': key, 'value': value})
1184
1185 # Create the optional slices with expiration deltas, we fix up the properties
1186 # below.
1187 last_exp = 0
1188 for expiration_secs in sorted(dims_by_exp):
1189 t = TaskSlice(
1190 expiration_secs=expiration_secs - last_exp,
1191 properties=properties,
1192 wait_for_capacity=False)
1193 slices.append(t)
1194 last_exp = expiration_secs
1195
1196 # Add back in the default slice (the last one).
1197 exp = max(int(options.expiration) - last_exp, 60)
1198 base_task_slice = TaskSlice(
1199 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001200 properties=properties,
1201 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001202 slices.append(base_task_slice)
1203
Brad Hall7f463e62018-11-16 16:13:30 +00001204 # Add optional dimensions to the task slices, replacing a dimension that
1205 # has the same key if it is a dimension where repeating isn't valid (otherwise
1206 # we append it). Currently the only dimension we can repeat is "caches"; the
1207 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001208 extra_dims = []
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001209 for i, (_, kvs) in enumerate(sorted(dims_by_exp.items(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001210 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001211 # Replace or append the key/value pairs for this expiration in extra_dims;
1212 # we keep extra_dims around because we are iterating backwards and filling
1213 # in slices with shorter expirations. Dimensions expire as time goes on so
1214 # the slices that expire earlier will generally have more dimensions.
1215 for kv in kvs:
1216 if kv['key'] == 'caches':
1217 extra_dims.append(kv)
1218 else:
1219 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1220 # Then, add all the optional dimensions to the original dimension set, again
1221 # replacing if needed.
1222 for kv in extra_dims:
1223 if kv['key'] == 'caches':
1224 dims.append(kv)
1225 else:
1226 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001227 dims.sort(key=lambda x: (x['key'], x['value']))
1228 slice_properties = properties._replace(dimensions=dims)
1229 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1230
maruel77f720b2015-09-15 12:35:22 -07001231 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001232 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001233 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001234 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001235 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001236 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001237 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001238 user=options.user,
1239 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001240
1241
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001242class TaskOutputStdoutOption(optparse.Option):
1243 """Where to output the each task's console output (stderr/stdout).
1244
1245 The output will be;
1246 none - not be downloaded.
1247 json - stored in summary.json file *only*.
1248 console - shown on stdout *only*.
1249 all - stored in summary.json and shown on stdout.
1250 """
1251
1252 choices = ['all', 'json', 'console', 'none']
1253
1254 def __init__(self, *args, **kw):
1255 optparse.Option.__init__(
1256 self,
1257 *args,
1258 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001259 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001260 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001261 **kw)
1262
1263 def convert_value(self, opt, value):
1264 if value not in self.choices:
1265 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1266 self.get_opt_string(), self.choices, value))
1267 stdout_to = []
1268 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001269 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001270 elif value != 'none':
1271 stdout_to = [value]
1272 return stdout_to
1273
1274
maruel@chromium.org0437a732013-08-27 16:05:52 +00001275def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001276 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001277 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001278 help='Timeout to wait for result, set to -1 for no timeout and get '
1279 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001280 parser.group_logging.add_option(
1281 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001282 parser.group_logging.add_option(
1283 '--print-status-updates', action='store_true',
1284 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001285 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001286 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001287 '--task-summary-json',
1288 metavar='FILE',
1289 help='Dump a summary of task results to this file as json. It contains '
1290 'only shards statuses as know to server directly. Any output files '
1291 'emitted by the task can be collected by using --task-output-dir')
1292 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001293 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001294 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001295 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001296 'directory contains per-shard directory with output files produced '
1297 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001298 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001299 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001300 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001301 '--filepath-filter',
1302 help='This is regexp filter used to specify downloaded filepath when '
1303 'collecting isolated output.')
1304 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001305 '--perf', action='store_true', default=False,
1306 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001307 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001308
1309
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001310def process_collect_options(parser, options):
1311 # Only negative -1 is allowed, disallow other negative values.
1312 if options.timeout != -1 and options.timeout < 0:
1313 parser.error('Invalid --timeout value')
1314
1315
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001316@subcommand.usage('bots...')
1317def CMDbot_delete(parser, args):
1318 """Forcibly deletes bots from the Swarming server."""
1319 parser.add_option(
1320 '-f', '--force', action='store_true',
1321 help='Do not prompt for confirmation')
1322 options, args = parser.parse_args(args)
1323 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001324 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001325
1326 bots = sorted(args)
1327 if not options.force:
1328 print('Delete the following bots?')
1329 for bot in bots:
1330 print(' %s' % bot)
1331 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1332 print('Goodbye.')
1333 return 1
1334
1335 result = 0
1336 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001337 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001338 if net.url_read_json(url, data={}, method='POST') is None:
1339 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001340 result = 1
1341 return result
1342
1343
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001344def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001345 """Returns information about the bots connected to the Swarming server."""
1346 add_filter_options(parser)
1347 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001348 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001349 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001350 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001351 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001352 help='Keep both dead and alive bots')
1353 parser.filter_group.add_option(
1354 '--busy', action='store_true', help='Keep only busy bots')
1355 parser.filter_group.add_option(
1356 '--idle', action='store_true', help='Keep only idle bots')
1357 parser.filter_group.add_option(
1358 '--mp', action='store_true',
1359 help='Keep only Machine Provider managed bots')
1360 parser.filter_group.add_option(
1361 '--non-mp', action='store_true',
1362 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001363 parser.filter_group.add_option(
1364 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001365 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001366 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001367 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001368
1369 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001370 parser.error('Use only one of --keep-dead or --dead-only')
1371 if options.busy and options.idle:
1372 parser.error('Use only one of --busy or --idle')
1373 if options.mp and options.non_mp:
1374 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001375
smut281c3902018-05-30 17:50:05 -07001376 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001377 values = []
1378 if options.dead_only:
1379 values.append(('is_dead', 'TRUE'))
1380 elif options.keep_dead:
1381 values.append(('is_dead', 'NONE'))
1382 else:
1383 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001384
maruelaf6b06c2017-06-08 06:26:53 -07001385 if options.busy:
1386 values.append(('is_busy', 'TRUE'))
1387 elif options.idle:
1388 values.append(('is_busy', 'FALSE'))
1389 else:
1390 values.append(('is_busy', 'NONE'))
1391
1392 if options.mp:
1393 values.append(('is_mp', 'TRUE'))
1394 elif options.non_mp:
1395 values.append(('is_mp', 'FALSE'))
1396 else:
1397 values.append(('is_mp', 'NONE'))
1398
1399 for key, value in options.dimensions:
1400 values.append(('dimensions', '%s:%s' % (key, value)))
1401 url += urllib.urlencode(values)
1402 try:
1403 data, yielder = get_yielder(url, 0)
1404 bots = data.get('items') or []
1405 for items in yielder():
1406 if items:
1407 bots.extend(items)
1408 except Failure as e:
1409 sys.stderr.write('\n%s\n' % e)
1410 return 1
maruel77f720b2015-09-15 12:35:22 -07001411 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
Lei Leife202df2019-06-11 17:33:34 +00001412 print(bot['bot_id'])
maruelaf6b06c2017-06-08 06:26:53 -07001413 if not options.bare:
1414 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
Lei Leife202df2019-06-11 17:33:34 +00001415 print(' %s' % json.dumps(dimensions, sort_keys=True))
maruelaf6b06c2017-06-08 06:26:53 -07001416 if bot.get('task_id'):
Lei Leife202df2019-06-11 17:33:34 +00001417 print(' task: %s' % bot['task_id'])
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001418 return 0
1419
1420
maruelfd0a90c2016-06-10 11:51:10 -07001421@subcommand.usage('task_id')
1422def CMDcancel(parser, args):
1423 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001424 parser.add_option(
1425 '-k', '--kill-running', action='store_true', default=False,
1426 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001427 options, args = parser.parse_args(args)
1428 if not args:
1429 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001430 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001431 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001432 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001433 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001434 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001435 print('Deleting %s failed. Probably already gone' % task_id)
1436 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001437 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001438 return 0
1439
1440
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001441@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001442def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001443 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001444
1445 The result can be in multiple part if the execution was sharded. It can
1446 potentially have retries.
1447 """
1448 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001449 parser.add_option(
1450 '-j', '--json',
1451 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001452 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001453 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001454 if not args and not options.json:
1455 parser.error('Must specify at least one task id or --json.')
1456 if args and options.json:
1457 parser.error('Only use one of task id or --json.')
1458
1459 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001460 options.json = unicode(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001461 try:
maruel1ceb3872015-10-14 06:10:44 -07001462 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001463 data = json.load(f)
1464 except (IOError, ValueError):
1465 parser.error('Failed to open %s' % options.json)
1466 try:
1467 tasks = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001468 data['tasks'].values(), key=lambda x: x['shard_index'])
maruel71c61c82016-02-22 06:52:05 -08001469 args = [t['task_id'] for t in tasks]
1470 except (KeyError, TypeError):
1471 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001472 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001473 # Take in account all the task slices.
1474 offset = 0
1475 for s in data['request']['task_slices']:
1476 m = (offset + s['properties']['execution_timeout_secs'] +
1477 s['expiration_secs'])
1478 if m > options.timeout:
1479 options.timeout = m
1480 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001481 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001482 else:
1483 valid = frozenset('0123456789abcdef')
1484 if any(not valid.issuperset(task_id) for task_id in args):
1485 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001486
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001487 try:
1488 return collect(
1489 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001490 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001491 options.timeout,
1492 options.decorate,
1493 options.print_status_updates,
1494 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001495 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001496 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001497 options.perf,
1498 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001499 except Failure:
1500 on_error.report(None)
1501 return 1
1502
1503
maruel77f720b2015-09-15 12:35:22 -07001504@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001505def CMDpost(parser, args):
1506 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1507
1508 Input data must be sent to stdin, result is printed to stdout.
1509
1510 If HTTP response code >= 400, returns non-zero.
1511 """
1512 options, args = parser.parse_args(args)
1513 if len(args) != 1:
1514 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001515 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001516 data = sys.stdin.read()
1517 try:
1518 resp = net.url_read(url, data=data, method='POST')
1519 except net.TimeoutError:
1520 sys.stderr.write('Timeout!\n')
1521 return 1
1522 if not resp:
1523 sys.stderr.write('No response!\n')
1524 return 1
1525 sys.stdout.write(resp)
1526 return 0
1527
1528
1529@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001530def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001531 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1532 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001533
1534 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001535 Raw task request and results:
1536 swarming.py query -S server-url.com task/123456/request
1537 swarming.py query -S server-url.com task/123456/result
1538
maruel77f720b2015-09-15 12:35:22 -07001539 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001540 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001541
maruelaf6b06c2017-06-08 06:26:53 -07001542 Listing last 10 tasks on a specific bot named 'bot1':
1543 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001544
maruelaf6b06c2017-06-08 06:26:53 -07001545 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001546 quoting is important!:
1547 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001548 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001549 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001550 parser.add_option(
1551 '-L', '--limit', type='int', default=200,
1552 help='Limit to enforce on limitless items (like number of tasks); '
1553 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001554 parser.add_option(
1555 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001556 parser.add_option(
1557 '--progress', action='store_true',
1558 help='Prints a dot at each request to show progress')
1559 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001560 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001561 parser.error(
1562 'Must specify only method name and optionally query args properly '
1563 'escaped.')
smut281c3902018-05-30 17:50:05 -07001564 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001565 try:
1566 data, yielder = get_yielder(base_url, options.limit)
1567 for items in yielder():
1568 if items:
1569 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001570 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001571 sys.stderr.write('.')
1572 sys.stderr.flush()
1573 except Failure as e:
1574 sys.stderr.write('\n%s\n' % e)
1575 return 1
maruel77f720b2015-09-15 12:35:22 -07001576 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001577 sys.stderr.write('\n')
1578 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001579 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001580 options.json = unicode(os.path.abspath(options.json))
1581 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001582 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001583 try:
maruel77f720b2015-09-15 12:35:22 -07001584 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001585 sys.stdout.write('\n')
1586 except IOError:
1587 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001588 return 0
1589
1590
maruel77f720b2015-09-15 12:35:22 -07001591def CMDquery_list(parser, args):
1592 """Returns list of all the Swarming APIs that can be used with command
1593 'query'.
1594 """
1595 parser.add_option(
1596 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1597 options, args = parser.parse_args(args)
1598 if args:
1599 parser.error('No argument allowed.')
1600
1601 try:
1602 apis = endpoints_api_discovery_apis(options.swarming)
1603 except APIError as e:
1604 parser.error(str(e))
1605 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001606 options.json = unicode(os.path.abspath(options.json))
1607 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001608 json.dump(apis, f)
1609 else:
1610 help_url = (
1611 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1612 options.swarming)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001613 for i, (api_id, api) in enumerate(sorted(apis.items())):
maruel11e31af2017-02-15 07:30:50 -08001614 if i:
1615 print('')
Lei Leife202df2019-06-11 17:33:34 +00001616 print(api_id)
1617 print(' ' + api['description'].strip())
maruel11e31af2017-02-15 07:30:50 -08001618 if 'resources' in api:
1619 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001620 # TODO(maruel): Remove.
1621 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001622 for j, (resource_name, resource) in enumerate(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001623 sorted(api['resources'].items())):
maruel11e31af2017-02-15 07:30:50 -08001624 if j:
1625 print('')
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001626 for method_name, method in sorted(resource['methods'].items()):
maruel11e31af2017-02-15 07:30:50 -08001627 # Only list the GET ones.
1628 if method['httpMethod'] != 'GET':
1629 continue
Lei Leife202df2019-06-11 17:33:34 +00001630 print('- %s.%s: %s' % (
1631 resource_name, method_name, method['path']))
maruel11e31af2017-02-15 07:30:50 -08001632 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001633 ' ' + l for l in textwrap.wrap(
1634 method.get('description', 'No description'), 78)))
Lei Leife202df2019-06-11 17:33:34 +00001635 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel11e31af2017-02-15 07:30:50 -08001636 else:
1637 # New.
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001638 for method_name, method in sorted(api['methods'].items()):
maruel77f720b2015-09-15 12:35:22 -07001639 # Only list the GET ones.
1640 if method['httpMethod'] != 'GET':
1641 continue
Lei Leife202df2019-06-11 17:33:34 +00001642 print('- %s: %s' % (method['id'], method['path']))
maruel11e31af2017-02-15 07:30:50 -08001643 print('\n'.join(
1644 ' ' + l for l in textwrap.wrap(method['description'], 78)))
Lei Leife202df2019-06-11 17:33:34 +00001645 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel77f720b2015-09-15 12:35:22 -07001646 return 0
1647
1648
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001649@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001650def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001651 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001652
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001653 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001654 """
1655 add_trigger_options(parser)
1656 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001657 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001658 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001659 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001660 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001661 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001662 tasks = trigger_task_shards(
1663 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001664 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001665 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001666 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001667 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001668 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001669 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001670 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001671 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001672 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001673 task_ids = [
1674 t['task_id']
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001675 for t in sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001676 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001677 for task_id in task_ids:
1678 print('Task: {server}/task?id={task}'.format(
1679 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001680 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001681 offset = 0
1682 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001683 m = (offset + s.properties.execution_timeout_secs +
1684 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001685 if m > options.timeout:
1686 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001687 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001688 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001689 try:
1690 return collect(
1691 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001692 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001693 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001694 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001695 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001696 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001697 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001698 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001699 options.perf,
1700 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001701 except Failure:
1702 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001703 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001704
1705
maruel18122c62015-10-23 06:31:23 -07001706@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001707def CMDreproduce(parser, args):
1708 """Runs a task locally that was triggered on the server.
1709
1710 This running locally the same commands that have been run on the bot. The data
1711 downloaded will be in a subdirectory named 'work' of the current working
1712 directory.
maruel18122c62015-10-23 06:31:23 -07001713
1714 You can pass further additional arguments to the target command by passing
1715 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001716 """
maruelc070e672016-02-22 17:32:57 -08001717 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001718 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001719 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001720 parser.add_option(
1721 '--work', metavar='DIR', default='work',
1722 help='Directory to map the task input files into')
1723 parser.add_option(
1724 '--cache', metavar='DIR', default='cache',
1725 help='Directory that contains the input cache')
1726 parser.add_option(
1727 '--leak', action='store_true',
1728 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001729 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001730 extra_args = []
1731 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001732 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001733 if len(args) > 1:
1734 if args[1] == '--':
1735 if len(args) > 2:
1736 extra_args = args[2:]
1737 else:
1738 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001739
smut281c3902018-05-30 17:50:05 -07001740 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001741 request = net.url_read_json(url)
1742 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001743 print('Failed to retrieve request data for the task', file=sys.stderr)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001744 return 1
1745
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001746 workdir = unicode(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001747 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001748 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001749 fs.mkdir(workdir)
iannucci31ab9192017-05-02 19:11:56 -07001750 cachedir = unicode(os.path.abspath('cipd_cache'))
1751 if not fs.exists(cachedir):
1752 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001753
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001754 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001755 env = os.environ.copy()
1756 env['SWARMING_BOT_ID'] = 'reproduce'
1757 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001758 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001759 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001760 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001761 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001762 if not i['value']:
1763 env.pop(key, None)
1764 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001765 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001766
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001767 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001768 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001769 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001770 for i in env_prefixes:
1771 key = i['key']
1772 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001773 cur = env.get(key)
1774 if cur:
1775 paths.append(cur)
1776 env[key] = os.path.pathsep.join(paths)
1777
iannucci31ab9192017-05-02 19:11:56 -07001778 command = []
nodir152cba62016-05-12 16:08:56 -07001779 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001780 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001781 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001782 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001783 properties['inputs_ref']['namespace'])
1784 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001785 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1786 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1787 # leak.
1788 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001789 cache = local_caching.DiskContentAddressedCache(
Marc-Antoine Ruel79d42192019-02-06 19:24:16 +00001790 unicode(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001791 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001792 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001793 command = bundle.command
1794 if bundle.relative_cwd:
1795 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001796 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001797
1798 if properties.get('command'):
1799 command.extend(properties['command'])
1800
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001801 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Brian Sheedy7a761172019-08-30 22:55:14 +00001802 command = tools.find_executable(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001803 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001804 new_command = run_isolated.process_command(command, 'invalid', None)
1805 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001806 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001807 else:
1808 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001809 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001810 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001811 command, options.output, None)
1812 if not os.path.isdir(options.output):
1813 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001814 command = new_command
1815 file_path.ensure_command_has_abs_path(command, workdir)
1816
1817 if properties.get('cipd_input'):
1818 ci = properties['cipd_input']
1819 cp = ci['client_package']
1820 client_manager = cipd.get_client(
1821 ci['server'], cp['package_name'], cp['version'], cachedir)
1822
1823 with client_manager as client:
1824 by_path = collections.defaultdict(list)
1825 for pkg in ci['packages']:
1826 path = pkg['path']
1827 # cipd deals with 'root' as ''
1828 if path == '.':
1829 path = ''
1830 by_path[path].append((pkg['package_name'], pkg['version']))
1831 client.ensure(workdir, by_path, cache_dir=cachedir)
1832
maruel77f720b2015-09-15 12:35:22 -07001833 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001834 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001835 except OSError as e:
Lei Leife202df2019-06-11 17:33:34 +00001836 print('Failed to run: %s' % ' '.join(command), file=sys.stderr)
1837 print(str(e), file=sys.stderr)
maruel77f720b2015-09-15 12:35:22 -07001838 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001839 finally:
1840 # Do not delete options.cache.
1841 if not options.leak:
1842 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001843
1844
maruel0eb1d1b2015-10-02 14:48:21 -07001845@subcommand.usage('bot_id')
1846def CMDterminate(parser, args):
1847 """Tells a bot to gracefully shut itself down as soon as it can.
1848
1849 This is done by completing whatever current task there is then exiting the bot
1850 process.
1851 """
1852 parser.add_option(
1853 '--wait', action='store_true', help='Wait for the bot to terminate')
1854 options, args = parser.parse_args(args)
1855 if len(args) != 1:
1856 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001857 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001858 request = net.url_read_json(url, data={})
1859 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001860 print('Failed to ask for termination', file=sys.stderr)
maruel0eb1d1b2015-10-02 14:48:21 -07001861 return 1
1862 if options.wait:
1863 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001864 options.swarming,
1865 [request['task_id']],
1866 0.,
1867 False,
1868 False,
1869 None,
1870 None,
1871 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001872 False,
1873 None)
maruelbfc5f872017-06-10 16:43:17 -07001874 else:
Lei Leife202df2019-06-11 17:33:34 +00001875 print(request['task_id'])
maruel0eb1d1b2015-10-02 14:48:21 -07001876 return 0
1877
1878
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001879@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001880def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001881 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001882
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001883 Passes all extra arguments provided after '--' as additional command line
1884 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001885 """
1886 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001887 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001888 parser.add_option(
1889 '--dump-json',
1890 metavar='FILE',
1891 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001892 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001893 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001894 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001895 tasks = trigger_task_shards(
1896 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001897 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001898 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001899 tasks_sorted = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001900 tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001901 if options.dump_json:
1902 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001903 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001904 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001905 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001906 }
maruel46b015f2015-10-13 18:40:35 -07001907 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001908 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001909 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001910 (options.swarming, options.dump_json))
1911 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001912 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001913 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001914 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1915 print('Or visit:')
1916 for t in tasks_sorted:
1917 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001918 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001919 except Failure:
1920 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001921 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001922
1923
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001924class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001925 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001926 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001927 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001928 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001929 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001930 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001931 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001932 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001933 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001934 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001935
1936 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001937 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001938 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001939 auth.process_auth_options(self, options)
1940 user = self._process_swarming(options)
1941 if hasattr(options, 'user') and not options.user:
1942 options.user = user
1943 return options, args
1944
1945 def _process_swarming(self, options):
1946 """Processes the --swarming option and aborts if not specified.
1947
1948 Returns the identity as determined by the server.
1949 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001950 if not options.swarming:
1951 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001952 try:
1953 options.swarming = net.fix_url(options.swarming)
1954 except ValueError as e:
1955 self.error('--swarming %s' % e)
1956 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001957 try:
1958 user = auth.ensure_logged_in(options.swarming)
1959 except ValueError as e:
1960 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001961 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001962
1963
1964def main(args):
1965 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001966 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001967
1968
1969if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001970 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001971 fix_encoding.fix_encoding()
1972 tools.disable_buffering()
1973 colorama.init()
1974 sys.exit(main(sys.argv[1:]))