blob: faa40ccddc3475418aef36e2ea25219475ac62c5 [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Lei Leife202df2019-06-11 17:33:34 +00008from __future__ import print_function
9
10__version__ = '1.0'
maruel@chromium.org0437a732013-08-27 16:05:52 +000011
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050012import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040013import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000014import json
15import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040016import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100018import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000019import sys
maruel11e31af2017-02-15 07:30:50 -080020import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070021import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000022import time
maruel@chromium.org0437a732013-08-27 16:05:52 +000023
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000024from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000025tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000026
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000027# third_party/
28import colorama
29from chromium import natsort
30from depot_tools import fix_encoding
31from depot_tools import subcommand
Takuto Ikuta6e2ff962019-10-29 12:35:27 +000032import six
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +000033from six.moves import urllib
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000034
35# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080036import auth
iannucci31ab9192017-05-02 19:11:56 -070037import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000038import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000039import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040040import local_caching
maruelc070e672016-02-22 17:32:57 -080041import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000042from utils import file_path
43from utils import fs
44from utils import logging_utils
45from utils import net
46from utils import on_error
47from utils import subprocess42
48from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050049
50
51class Failure(Exception):
52 """Generic failure."""
53 pass
54
55
maruel0a25f6c2017-05-10 10:43:23 -070056def default_task_name(options):
57 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050058 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070059 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070060 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070061 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070062 if options.isolated:
63 task_name += u'/' + options.isolated
64 return task_name
65 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050066
67
68### Triggering.
69
70
maruel77f720b2015-09-15 12:35:22 -070071# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070072CipdPackage = collections.namedtuple(
73 'CipdPackage',
74 [
75 'package_name',
76 'path',
77 'version',
78 ])
79
80
81# See ../appengine/swarming/swarming_rpcs.py.
82CipdInput = collections.namedtuple(
83 'CipdInput',
84 [
85 'client_package',
86 'packages',
87 'server',
88 ])
89
90
91# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070092FilesRef = collections.namedtuple(
93 'FilesRef',
94 [
95 'isolated',
96 'isolatedserver',
97 'namespace',
98 ])
99
100
101# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800102StringListPair = collections.namedtuple(
103 'StringListPair', [
104 'key',
105 'value', # repeated string
106 ]
107)
108
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000109# See ../appengine/swarming/swarming_rpcs.py.
110Containment = collections.namedtuple(
111 'Containment',
112 [
113 'lower_priority',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000114 'containment_type',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000115 ])
116
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800117
118# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700119TaskProperties = collections.namedtuple(
120 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500121 [
maruel681d6802017-01-17 16:56:03 -0800122 'caches',
borenet02f772b2016-06-22 12:42:19 -0700123 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500124 'command',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000125 'containment',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500126 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500127 'dimensions',
128 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800129 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700130 'execution_timeout_secs',
131 'extra_args',
132 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500133 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700134 'inputs_ref',
135 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700136 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700137 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700138 ])
139
140
141# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400142TaskSlice = collections.namedtuple(
143 'TaskSlice',
144 [
145 'expiration_secs',
146 'properties',
147 'wait_for_capacity',
148 ])
149
150
151# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700152NewTaskRequest = collections.namedtuple(
153 'NewTaskRequest',
154 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500155 'name',
maruel77f720b2015-09-15 12:35:22 -0700156 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500157 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400158 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700159 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500160 'tags',
161 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000162 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500163 ])
164
165
maruel77f720b2015-09-15 12:35:22 -0700166def namedtuple_to_dict(value):
167 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400168 if hasattr(value, '_asdict'):
169 return namedtuple_to_dict(value._asdict())
170 if isinstance(value, (list, tuple)):
171 return [namedtuple_to_dict(v) for v in value]
172 if isinstance(value, dict):
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000173 return {k: namedtuple_to_dict(v) for k, v in value.items()}
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400174 return value
maruel77f720b2015-09-15 12:35:22 -0700175
176
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700177def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800178 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700179
180 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500181 """
maruel77f720b2015-09-15 12:35:22 -0700182 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700183 # Don't send 'service_account' if it is None to avoid confusing older
184 # version of the server that doesn't know about 'service_account' and don't
185 # use it at all.
186 if not out['service_account']:
187 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000188 for task_slice in out['task_slices']:
189 task_slice['properties']['env'] = [
190 {'key': k, 'value': v}
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000191 for k, v in task_slice['properties']['env'].items()
Brad Hallf78187a2018-10-19 17:08:55 +0000192 ]
193 task_slice['properties']['env'].sort(key=lambda x: x['key'])
maruel77f720b2015-09-15 12:35:22 -0700194 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500195
196
maruel77f720b2015-09-15 12:35:22 -0700197def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500198 """Triggers a request on the Swarming server and returns the json data.
199
200 It's the low-level function.
201
202 Returns:
203 {
204 'request': {
205 'created_ts': u'2010-01-02 03:04:05',
206 'name': ..
207 },
208 'task_id': '12300',
209 }
210 """
211 logging.info('Triggering: %s', raw_request['name'])
212
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500213 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700214 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500215 if not result:
216 on_error.report('Failed to trigger task %s' % raw_request['name'])
217 return None
maruele557bce2015-11-17 09:01:27 -0800218 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800219 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800220 msg = 'Failed to trigger task %s' % raw_request['name']
221 if result['error'].get('errors'):
222 for err in result['error']['errors']:
223 if err.get('message'):
224 msg += '\nMessage: %s' % err['message']
225 if err.get('debugInfo'):
226 msg += '\nDebug info:\n%s' % err['debugInfo']
227 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800228 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800229
230 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800231 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500232 return result
233
234
235def setup_googletest(env, shards, index):
236 """Sets googletest specific environment variables."""
237 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700238 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
239 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
240 env = env[:]
241 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
242 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500243 return env
244
245
246def trigger_task_shards(swarming, task_request, shards):
247 """Triggers one or many subtasks of a sharded task.
248
249 Returns:
250 Dict with task details, returned to caller as part of --dump-json output.
251 None in case of failure.
252 """
253 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000254 """
255 Args:
256 index: The index of the task request.
257
258 Returns:
259 raw_request: A swarming compatible JSON dictionary of the request.
260 shard_index: The index of the shard, which may be different than the index
261 of the task request.
262 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700263 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000264 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500265 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000266 for task_slice in req['task_slices']:
267 task_slice['properties']['env'] = setup_googletest(
268 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700269 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000270 else:
271 task_slices = req['task_slices']
272
273 total_shards = None
274 # Multiple tasks slices might exist if there are optional "slices", e.g.
275 # multiple ways of dispatching the task that should be equivalent. These
276 # should be functionally equivalent but we have cannot guarantee that. If
277 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
278 # slices.
279 for task_slice in task_slices:
280 for env_var in task_slice['properties']['env']:
281 if env_var['key'] == 'GTEST_SHARD_INDEX':
282 shard_index = int(env_var['value'])
283 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
284 total_shards = int(env_var['value'])
285 if total_shards > 1:
286 req['name'] += ':%s:%s' % (shard_index, total_shards)
287
288 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500289
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000290 requests = [convert(index) for index in range(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500291 tasks = {}
292 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000293 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700294 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500295 if not task:
296 break
297 logging.info('Request result: %s', task)
298 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400299 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500300 priority_warning = True
Lei Leife202df2019-06-11 17:33:34 +0000301 print('Priority was reset to %s' % task['request']['priority'],
302 file=sys.stderr)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500303 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000304 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500305 'task_id': task['task_id'],
306 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
307 }
308
309 # Some shards weren't triggered. Abort everything.
310 if len(tasks) != len(requests):
311 if tasks:
Lei Leife202df2019-06-11 17:33:34 +0000312 print('Only %d shard(s) out of %d were triggered' % (
313 len(tasks), len(requests)), file=sys.stderr)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000314 for task_dict in tasks.values():
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500315 abort_task(swarming, task_dict['task_id'])
316 return None
317
318 return tasks
319
320
321### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000322
323
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700324# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000325STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700326
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400327
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000328class TaskState(object):
329 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000330
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000331 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
332 is the source of truth for these values:
333 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400334
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000335 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400336 """
337 RUNNING = 0x10
338 PENDING = 0x20
339 EXPIRED = 0x30
340 TIMED_OUT = 0x40
341 BOT_DIED = 0x50
342 CANCELED = 0x60
343 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400344 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400345 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400346
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000347 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400348
maruel77f720b2015-09-15 12:35:22 -0700349 _ENUMS = {
350 'RUNNING': RUNNING,
351 'PENDING': PENDING,
352 'EXPIRED': EXPIRED,
353 'TIMED_OUT': TIMED_OUT,
354 'BOT_DIED': BOT_DIED,
355 'CANCELED': CANCELED,
356 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400357 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400358 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700359 }
360
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400361 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700362 def from_enum(cls, state):
363 """Returns int value based on the string."""
364 if state not in cls._ENUMS:
365 raise ValueError('Invalid state %s' % state)
366 return cls._ENUMS[state]
367
maruel@chromium.org0437a732013-08-27 16:05:52 +0000368
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700369class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700370 """Assembles task execution summary (for --task-summary-json output).
371
372 Optionally fetches task outputs from isolate server to local disk (used when
373 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700374
375 This object is shared among multiple threads running 'retrieve_results'
376 function, in particular they call 'process_shard_result' method in parallel.
377 """
378
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000379 def __init__(self, task_output_dir, task_output_stdout, shard_count,
380 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700381 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
382
383 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700384 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700385 shard_count: expected number of task shards.
386 """
maruel12e30012015-10-09 11:55:35 -0700387 self.task_output_dir = (
Takuto Ikuta6e2ff962019-10-29 12:35:27 +0000388 six.text_type(os.path.abspath(task_output_dir))
maruel12e30012015-10-09 11:55:35 -0700389 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000390 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700391 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000392 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700393
394 self._lock = threading.Lock()
395 self._per_shard_results = {}
396 self._storage = None
397
nodire5028a92016-04-29 14:38:21 -0700398 if self.task_output_dir:
399 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700400
Vadim Shtayurab450c602014-05-12 19:23:25 -0700401 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700402 """Stores results of a single task shard, fetches output files if necessary.
403
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400404 Modifies |result| in place.
405
maruel77f720b2015-09-15 12:35:22 -0700406 shard_index is 0-based.
407
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700408 Called concurrently from multiple threads.
409 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700410 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700411 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700412 if shard_index < 0 or shard_index >= self.shard_count:
413 logging.warning(
414 'Shard index %d is outside of expected range: [0; %d]',
415 shard_index, self.shard_count - 1)
416 return
417
maruel77f720b2015-09-15 12:35:22 -0700418 if result.get('outputs_ref'):
419 ref = result['outputs_ref']
420 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
421 ref['isolatedserver'],
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000422 urllib.parse.urlencode([('namespace', ref['namespace']),
423 ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400424
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700425 # Store result dict of that shard, ignore results we've already seen.
426 with self._lock:
427 if shard_index in self._per_shard_results:
428 logging.warning('Ignoring duplicate shard index %d', shard_index)
429 return
430 self._per_shard_results[shard_index] = result
431
432 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700433 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000434 server_ref = isolate_storage.ServerRef(
435 result['outputs_ref']['isolatedserver'],
436 result['outputs_ref']['namespace'])
437 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400438 if storage:
439 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400440 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
441 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400442 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700443 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400444 storage,
Lei Leife202df2019-06-11 17:33:34 +0000445 local_caching.MemoryContentAddressedCache(file_mode_mask=0o700),
maruel4409e302016-07-19 14:25:51 -0700446 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000447 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700448
449 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700450 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700451 with self._lock:
452 # Write an array of shard results with None for missing shards.
453 summary = {
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000454 'shards': [
455 self._per_shard_results.get(i) for i in range(self.shard_count)
456 ],
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700457 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000458
459 # Don't store stdout in the summary if not requested too.
460 if "json" not in self.task_output_stdout:
461 for shard_json in summary['shards']:
462 if not shard_json:
463 continue
464 if "output" in shard_json:
465 del shard_json["output"]
466 if "outputs" in shard_json:
467 del shard_json["outputs"]
468
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700469 # Write summary.json to task_output_dir as well.
470 if self.task_output_dir:
471 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700472 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700473 summary,
474 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700475 if self._storage:
476 self._storage.close()
477 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700478 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700479
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000480 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700481 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700482 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700483 with self._lock:
484 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000485 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700486 else:
487 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000488 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700489 logging.error(
490 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000491 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700492 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000493 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700494 logging.error(
495 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000496 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700497 return None
498 return self._storage
499
500
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500501def now():
502 """Exists so it can be mocked easily."""
503 return time.time()
504
505
maruel77f720b2015-09-15 12:35:22 -0700506def parse_time(value):
507 """Converts serialized time from the API to datetime.datetime."""
508 # When microseconds are 0, the '.123456' suffix is elided. This means the
509 # serialized format is not consistent, which confuses the hell out of python.
510 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
511 try:
512 return datetime.datetime.strptime(value, fmt)
513 except ValueError:
514 pass
515 raise ValueError('Failed to parse %s' % value)
516
517
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700518def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700519 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000520 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400521 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700522
Vadim Shtayurab450c602014-05-12 19:23:25 -0700523 Returns:
524 <result dict> on success.
525 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700526 """
maruel71c61c82016-02-22 06:52:05 -0800527 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700528 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700529 if include_perf:
530 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700531 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700532 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400533 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700534 attempt = 0
535
536 while not should_stop.is_set():
537 attempt += 1
538
539 # Waiting for too long -> give up.
540 current_time = now()
541 if deadline and current_time >= deadline:
542 logging.error('retrieve_results(%s) timed out on attempt %d',
543 base_url, attempt)
544 return None
545
546 # Do not spin too fast. Spin faster at the beginning though.
547 # Start with 1 sec delay and for each 30 sec of waiting add another second
548 # of delay, until hitting 15 sec ceiling.
549 if attempt > 1:
550 max_delay = min(15, 1 + (current_time - started) / 30.0)
551 delay = min(max_delay, deadline - current_time) if deadline else max_delay
552 if delay > 0:
553 logging.debug('Waiting %.1f sec before retrying', delay)
554 should_stop.wait(delay)
555 if should_stop.is_set():
556 return None
557
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400558 # Disable internal retries in net.url_read_json, since we are doing retries
559 # ourselves.
560 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700561 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
562 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400563 # Retry on 500s only if no timeout is specified.
564 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400565 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400566 if timeout == -1:
567 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400568 continue
maruel77f720b2015-09-15 12:35:22 -0700569
maruelbf53e042015-12-01 15:00:51 -0800570 if result.get('error'):
571 # An error occurred.
572 if result['error'].get('errors'):
573 for err in result['error']['errors']:
574 logging.warning(
575 'Error while reading task: %s; %s',
576 err.get('message'), err.get('debugInfo'))
577 elif result['error'].get('message'):
578 logging.warning(
579 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400580 if timeout == -1:
581 return result
maruelbf53e042015-12-01 15:00:51 -0800582 continue
583
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400584 # When timeout == -1, always return on first attempt. 500s are already
585 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000586 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000587 if fetch_stdout:
588 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700589 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700590 # Record the result, try to fetch attached output files (if any).
591 if output_collector:
592 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700593 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700594 if result.get('internal_failure'):
595 logging.error('Internal error!')
596 elif result['state'] == 'BOT_DIED':
597 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700598 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000599
600
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700601def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400602 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000603 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500604 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000605
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700606 Duplicate shards are ignored. Shards are yielded in order of completion.
607 Timed out shards are NOT yielded at all. Caller can compare number of yielded
608 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000609
610 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500611 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000612 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500613
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700614 output_collector is an optional instance of TaskOutputCollector that will be
615 used to fetch files produced by a task from isolate server to the local disk.
616
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500617 Yields:
618 (index, result). In particular, 'result' is defined as the
619 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000620 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000621 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400622 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700623 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700624 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700625
maruel@chromium.org0437a732013-08-27 16:05:52 +0000626 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
627 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700628 # Adds a task to the thread pool to call 'retrieve_results' and return
629 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400630 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000631 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700632 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000633 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400634 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000635 task_id, timeout, should_stop, output_collector, include_perf,
636 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700637
638 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400639 for shard_index, task_id in enumerate(task_ids):
640 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700641
642 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400643 shards_remaining = range(len(task_ids))
644 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700645 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700646 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700647 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000648 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700649 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700650 except threading_utils.TaskChannel.Timeout:
651 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000652 time_now = str(datetime.datetime.now())
653 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700654 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000655 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700656 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000657 (time_now, ', '.join(map(str, shards_remaining)))
658 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700659 sys.stdout.flush()
660 continue
661 except Exception:
662 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700663
664 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700665 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000666 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500667 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000668 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700669
Vadim Shtayurab450c602014-05-12 19:23:25 -0700670 # Yield back results to the caller.
671 assert shard_index in shards_remaining
672 shards_remaining.remove(shard_index)
673 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700674
maruel@chromium.org0437a732013-08-27 16:05:52 +0000675 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700676 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000677 should_stop.set()
678
679
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000680def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000681 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700682 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400683 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700684 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
685 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400686 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
687 metadata.get('abandoned_ts')):
688 pending = '%.1fs' % (
689 parse_time(metadata['abandoned_ts']) -
690 parse_time(metadata['created_ts'])
691 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400692 else:
693 pending = 'N/A'
694
maruel77f720b2015-09-15 12:35:22 -0700695 if metadata.get('duration') is not None:
696 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400697 else:
698 duration = 'N/A'
699
maruel77f720b2015-09-15 12:35:22 -0700700 if metadata.get('exit_code') is not None:
701 # Integers are encoded as string to not loose precision.
702 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400703 else:
704 exit_code = 'N/A'
705
706 bot_id = metadata.get('bot_id') or 'N/A'
707
maruel77f720b2015-09-15 12:35:22 -0700708 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400709 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000710 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400711 if metadata.get('state') == 'CANCELED':
712 tag_footer2 = ' Pending: %s CANCELED' % pending
713 elif metadata.get('state') == 'EXPIRED':
714 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400715 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400716 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
717 pending, duration, bot_id, exit_code, metadata['state'])
718 else:
719 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
720 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400721
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000722 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
723 dash_pad = '+-%s-+' % ('-' * tag_len)
724 tag_header = '| %s |' % tag_header.ljust(tag_len)
725 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
726 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400727
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000728 if include_stdout:
729 return '\n'.join([
730 dash_pad,
731 tag_header,
732 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400733 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000734 dash_pad,
735 tag_footer1,
736 tag_footer2,
737 dash_pad,
738 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000739 return '\n'.join([
740 dash_pad,
741 tag_header,
742 tag_footer2,
743 dash_pad,
744 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000745
746
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700747def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700748 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000749 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000750 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700751 """Retrieves results of a Swarming task.
752
753 Returns:
754 process exit code that should be returned to the user.
755 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000756
757 filter_cb = None
758 if filepath_filter:
759 filter_cb = re.compile(filepath_filter).match
760
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700761 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000762 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000763 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700764
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700765 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700766 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400767 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700768 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400769 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400770 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000771 output_collector, include_perf,
772 (len(task_output_stdout) > 0),
773 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700774 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700775
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400776 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700777 shard_exit_code = metadata.get('exit_code')
778 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700779 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700780 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700781 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400782 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700783 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700784
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700785 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000786 s = decorate_shard_output(
787 swarming, index, metadata,
788 "console" in task_output_stdout).encode(
789 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700790 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400791 if len(seen_shards) < len(task_ids):
792 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700793 else:
maruel77f720b2015-09-15 12:35:22 -0700794 print('%s: %s %s' % (
795 metadata.get('bot_id', 'N/A'),
796 metadata['task_id'],
797 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000798 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700799 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400800 if output:
801 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700802 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700803 summary = output_collector.finalize()
804 if task_summary_json:
805 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700806
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400807 if decorate and total_duration:
808 print('Total duration: %.1fs' % total_duration)
809
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400810 if len(seen_shards) != len(task_ids):
811 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Lei Leife202df2019-06-11 17:33:34 +0000812 print('Results from some shards are missing: %s' %
813 ', '.join(map(str, missing_shards)), file=sys.stderr)
Vadim Shtayurac524f512014-05-15 09:54:56 -0700814 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700815
maruela5490782015-09-30 10:56:59 -0700816 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000817
818
maruel77f720b2015-09-15 12:35:22 -0700819### API management.
820
821
822class APIError(Exception):
823 pass
824
825
826def endpoints_api_discovery_apis(host):
827 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
828 the APIs exposed by a host.
829
830 https://developers.google.com/discovery/v1/reference/apis/list
831 """
maruel380e3262016-08-31 16:10:06 -0700832 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
833 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700834 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
835 if data is None:
836 raise APIError('Failed to discover APIs on %s' % host)
837 out = {}
838 for api in data['items']:
839 if api['id'] == 'discovery:v1':
840 continue
841 # URL is of the following form:
842 # url = host + (
843 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
844 api_data = net.url_read_json(api['discoveryRestUrl'])
845 if api_data is None:
846 raise APIError('Failed to discover %s on %s' % (api['id'], host))
847 out[api['id']] = api_data
848 return out
849
850
maruelaf6b06c2017-06-08 06:26:53 -0700851def get_yielder(base_url, limit):
852 """Returns the first query and a function that yields following items."""
853 CHUNK_SIZE = 250
854
855 url = base_url
856 if limit:
857 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
858 data = net.url_read_json(url)
859 if data is None:
860 # TODO(maruel): Do basic diagnostic.
861 raise Failure('Failed to access %s' % url)
862 org_cursor = data.pop('cursor', None)
863 org_total = len(data.get('items') or [])
864 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
865 if not org_cursor or not org_total:
866 # This is not an iterable resource.
867 return data, lambda: []
868
869 def yielder():
870 cursor = org_cursor
871 total = org_total
872 # Some items support cursors. Try to get automatically if cursors are needed
873 # by looking at the 'cursor' items.
874 while cursor and (not limit or total < limit):
875 merge_char = '&' if '?' in base_url else '?'
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000876 url = base_url + '%scursor=%s' % (merge_char, urllib.parse.quote(cursor))
maruelaf6b06c2017-06-08 06:26:53 -0700877 if limit:
878 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
879 new = net.url_read_json(url)
880 if new is None:
881 raise Failure('Failed to access %s' % url)
882 cursor = new.get('cursor')
883 new_items = new.get('items')
884 nb_items = len(new_items or [])
885 total += nb_items
886 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
887 yield new_items
888
889 return data, yielder
890
891
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500892### Commands.
893
894
895def abort_task(_swarming, _manifest):
896 """Given a task manifest that was triggered, aborts its execution."""
897 # TODO(vadimsh): No supported by the server yet.
898
899
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400900def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800901 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500902 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500903 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500904 dest='dimensions', metavar='FOO bar',
905 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000906 parser.filter_group.add_option(
907 '--optional-dimension', default=[], action='append', nargs=3,
908 dest='optional_dimensions', metavar='key value expiration',
909 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500910 parser.add_option_group(parser.filter_group)
911
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400912
Brad Hallf78187a2018-10-19 17:08:55 +0000913def _validate_filter_option(parser, key, value, expiration, argname):
914 if ':' in key:
915 parser.error('%s key cannot contain ":"' % argname)
916 if key.strip() != key:
917 parser.error('%s key has whitespace' % argname)
918 if not key:
919 parser.error('%s key is empty' % argname)
920
921 if value.strip() != value:
922 parser.error('%s value has whitespace' % argname)
923 if not value:
924 parser.error('%s value is empty' % argname)
925
926 if expiration is not None:
927 try:
928 expiration = int(expiration)
929 except ValueError:
930 parser.error('%s expiration is not an integer' % argname)
931 if expiration <= 0:
932 parser.error('%s expiration should be positive' % argname)
933 if expiration % 60 != 0:
934 parser.error('%s expiration is not divisible by 60' % argname)
935
936
maruelaf6b06c2017-06-08 06:26:53 -0700937def process_filter_options(parser, options):
938 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000939 _validate_filter_option(parser, key, value, None, 'dimension')
940 for key, value, exp in options.optional_dimensions:
941 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700942 options.dimensions.sort()
943
944
Vadim Shtayurab450c602014-05-12 19:23:25 -0700945def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400946 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700947 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700948 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700949 help='Number of shards to trigger and collect.')
950 parser.add_option_group(parser.sharding_group)
951
952
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400953def add_trigger_options(parser):
954 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500955 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400956 add_filter_options(parser)
957
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400958 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800959 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700960 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500961 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800962 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500963 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700964 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800965 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800966 '--env-prefix', default=[], action='append', nargs=2,
967 metavar='VAR local/path',
968 help='Prepend task-relative `local/path` to the task\'s VAR environment '
969 'variable using os-appropriate pathsep character. Can be specified '
970 'multiple times for the same VAR to add multiple paths.')
971 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400972 '--idempotent', action='store_true', default=False,
973 help='When set, the server will actively try to find a previous task '
974 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800975 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700976 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000977 help='The optional path to a file containing the secret_bytes to use '
978 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800979 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700980 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400981 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800982 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700983 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400984 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000985 parser.add_option(
986 '--lower-priority', action='store_true',
987 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000988 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
989 parser.add_option(
990 '--containment-type', default='NONE', metavar='NONE',
991 choices=containment_choices,
992 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -0800993 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500994 '--raw-cmd', action='store_true', default=False,
995 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700996 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800997 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500998 '--relative-cwd',
999 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
1000 'requires --raw-cmd')
1001 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001002 '--cipd-package', action='append', default=[], metavar='PKG',
1003 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -07001004 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001005 group.add_option(
1006 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -07001007 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001008 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1009 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001010 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001011 help='Email of a service account to run the task as, or literal "bot" '
1012 'string to indicate that the task should use the same account the '
1013 'bot itself is using to authenticate to Swarming. Don\'t use task '
1014 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001015 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001016 '--pool-task-template',
1017 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1018 default='AUTO',
1019 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1020 'By default, the pool\'s TaskTemplate is automatically selected, '
1021 'according the pool configuration on the server. Choices are: '
1022 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1023 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001024 '-o', '--output', action='append', default=[], metavar='PATH',
1025 help='A list of files to return in addition to those written to '
1026 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1027 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001028 group.add_option(
1029 '--wait-for-capacity', action='store_true', default=False,
1030 help='Instructs to leave the task PENDING even if there\'s no known bot '
1031 'that could run this task, otherwise the task will be denied with '
1032 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001033 parser.add_option_group(group)
1034
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001035 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001036 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001037 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001038 help='The lower value, the more important the task is')
1039 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001040 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001041 help='Display name of the task. Defaults to '
1042 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1043 'isolated file is provided, if a hash is provided, it defaults to '
1044 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1045 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001046 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001047 help='Tags to assign to the task.')
1048 group.add_option(
1049 '--user', default='',
1050 help='User associated with the task. Defaults to authenticated user on '
1051 'the server.')
1052 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001053 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001054 help='Seconds to allow the task to be pending for a bot to run before '
1055 'this task request expires.')
1056 group.add_option(
1057 '--deadline', type='int', dest='expiration',
1058 help=optparse.SUPPRESS_HELP)
1059 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001060
1061
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001062def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001063 """Processes trigger options and does preparatory steps.
1064
1065 Returns:
1066 NewTaskRequest instance.
1067 """
maruelaf6b06c2017-06-08 06:26:53 -07001068 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001069 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001070 if args and args[0] == '--':
1071 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001072
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001073 if not options.dimensions:
1074 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001075 if not any(k == 'pool' for k, _v in options.dimensions):
1076 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001077 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1078 parser.error('--tags must be in the format key:value')
1079 if options.raw_cmd and not args:
1080 parser.error(
1081 'Arguments with --raw-cmd should be passed after -- as command '
1082 'delimiter.')
1083 if options.isolate_server and not options.namespace:
1084 parser.error(
1085 '--namespace must be a valid value when --isolate-server is used')
1086 if not options.isolated and not options.raw_cmd:
1087 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1088
1089 # Isolated
1090 # --isolated is required only if --raw-cmd wasn't provided.
1091 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1092 # preferred server.
1093 isolateserver.process_isolate_server_options(
1094 parser, options, False, not options.raw_cmd)
1095 inputs_ref = None
1096 if options.isolate_server:
1097 inputs_ref = FilesRef(
1098 isolated=options.isolated,
1099 isolatedserver=options.isolate_server,
1100 namespace=options.namespace)
1101
1102 # Command
1103 command = None
1104 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001105 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001106 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001107 if options.relative_cwd:
1108 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1109 if not a.startswith(os.getcwd()):
1110 parser.error(
1111 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001112 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001113 if options.relative_cwd:
1114 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001115 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001116
maruel0a25f6c2017-05-10 10:43:23 -07001117 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001118 cipd_packages = []
1119 for p in options.cipd_package:
1120 split = p.split(':', 2)
1121 if len(split) != 3:
1122 parser.error('CIPD packages must take the form: path:package:version')
1123 cipd_packages.append(CipdPackage(
1124 package_name=split[1],
1125 path=split[0],
1126 version=split[2]))
1127 cipd_input = None
1128 if cipd_packages:
1129 cipd_input = CipdInput(
1130 client_package=None,
1131 packages=cipd_packages,
1132 server=None)
1133
maruel0a25f6c2017-05-10 10:43:23 -07001134 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001135 secret_bytes = None
1136 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001137 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001138 secret_bytes = f.read().encode('base64')
1139
maruel0a25f6c2017-05-10 10:43:23 -07001140 # Named caches
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001141 caches = [{
1142 u'name': six.text_type(i[0]),
1143 u'path': six.text_type(i[1])
1144 } for i in options.named_cache]
maruel0a25f6c2017-05-10 10:43:23 -07001145
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001146 env_prefixes = {}
1147 for k, v in options.env_prefix:
1148 env_prefixes.setdefault(k, []).append(v)
1149
Brad Hallf78187a2018-10-19 17:08:55 +00001150 # Get dimensions into the key/value format we can manipulate later.
1151 orig_dims = [
1152 {'key': key, 'value': value} for key, value in options.dimensions]
1153 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1154
1155 # Construct base properties that we will use for all the slices, adding in
1156 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001157 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001158 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001159 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001160 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001161 containment=Containment(
1162 lower_priority=bool(options.lower_priority),
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001163 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001164 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001165 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001166 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001167 env=options.env,
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001168 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.items()],
maruel77f720b2015-09-15 12:35:22 -07001169 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001170 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001171 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001172 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001173 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001174 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001175 outputs=options.output,
1176 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001177
1178 slices = []
1179
1180 # Group the optional dimensions by expiration.
1181 dims_by_exp = {}
1182 for key, value, exp_secs in options.optional_dimensions:
1183 dims_by_exp.setdefault(int(exp_secs), []).append(
1184 {'key': key, 'value': value})
1185
1186 # Create the optional slices with expiration deltas, we fix up the properties
1187 # below.
1188 last_exp = 0
1189 for expiration_secs in sorted(dims_by_exp):
1190 t = TaskSlice(
1191 expiration_secs=expiration_secs - last_exp,
1192 properties=properties,
1193 wait_for_capacity=False)
1194 slices.append(t)
1195 last_exp = expiration_secs
1196
1197 # Add back in the default slice (the last one).
1198 exp = max(int(options.expiration) - last_exp, 60)
1199 base_task_slice = TaskSlice(
1200 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001201 properties=properties,
1202 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001203 slices.append(base_task_slice)
1204
Brad Hall7f463e62018-11-16 16:13:30 +00001205 # Add optional dimensions to the task slices, replacing a dimension that
1206 # has the same key if it is a dimension where repeating isn't valid (otherwise
1207 # we append it). Currently the only dimension we can repeat is "caches"; the
1208 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001209 extra_dims = []
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001210 for i, (_, kvs) in enumerate(sorted(dims_by_exp.items(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001211 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001212 # Replace or append the key/value pairs for this expiration in extra_dims;
1213 # we keep extra_dims around because we are iterating backwards and filling
1214 # in slices with shorter expirations. Dimensions expire as time goes on so
1215 # the slices that expire earlier will generally have more dimensions.
1216 for kv in kvs:
1217 if kv['key'] == 'caches':
1218 extra_dims.append(kv)
1219 else:
1220 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1221 # Then, add all the optional dimensions to the original dimension set, again
1222 # replacing if needed.
1223 for kv in extra_dims:
1224 if kv['key'] == 'caches':
1225 dims.append(kv)
1226 else:
1227 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001228 dims.sort(key=lambda x: (x['key'], x['value']))
1229 slice_properties = properties._replace(dimensions=dims)
1230 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1231
maruel77f720b2015-09-15 12:35:22 -07001232 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001233 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001234 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001235 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001236 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001237 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001238 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001239 user=options.user,
1240 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001241
1242
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001243class TaskOutputStdoutOption(optparse.Option):
1244 """Where to output the each task's console output (stderr/stdout).
1245
1246 The output will be;
1247 none - not be downloaded.
1248 json - stored in summary.json file *only*.
1249 console - shown on stdout *only*.
1250 all - stored in summary.json and shown on stdout.
1251 """
1252
1253 choices = ['all', 'json', 'console', 'none']
1254
1255 def __init__(self, *args, **kw):
1256 optparse.Option.__init__(
1257 self,
1258 *args,
1259 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001260 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001261 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001262 **kw)
1263
1264 def convert_value(self, opt, value):
1265 if value not in self.choices:
1266 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1267 self.get_opt_string(), self.choices, value))
1268 stdout_to = []
1269 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001270 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001271 elif value != 'none':
1272 stdout_to = [value]
1273 return stdout_to
1274
1275
maruel@chromium.org0437a732013-08-27 16:05:52 +00001276def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001277 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001278 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001279 help='Timeout to wait for result, set to -1 for no timeout and get '
1280 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001281 parser.group_logging.add_option(
1282 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001283 parser.group_logging.add_option(
1284 '--print-status-updates', action='store_true',
1285 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001286 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001287 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001288 '--task-summary-json',
1289 metavar='FILE',
1290 help='Dump a summary of task results to this file as json. It contains '
1291 'only shards statuses as know to server directly. Any output files '
1292 'emitted by the task can be collected by using --task-output-dir')
1293 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001294 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001295 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001296 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001297 'directory contains per-shard directory with output files produced '
1298 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001299 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001300 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001301 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001302 '--filepath-filter',
1303 help='This is regexp filter used to specify downloaded filepath when '
1304 'collecting isolated output.')
1305 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001306 '--perf', action='store_true', default=False,
1307 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001308 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001309
1310
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001311def process_collect_options(parser, options):
1312 # Only negative -1 is allowed, disallow other negative values.
1313 if options.timeout != -1 and options.timeout < 0:
1314 parser.error('Invalid --timeout value')
1315
1316
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001317@subcommand.usage('bots...')
1318def CMDbot_delete(parser, args):
1319 """Forcibly deletes bots from the Swarming server."""
1320 parser.add_option(
1321 '-f', '--force', action='store_true',
1322 help='Do not prompt for confirmation')
1323 options, args = parser.parse_args(args)
1324 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001325 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001326
1327 bots = sorted(args)
1328 if not options.force:
1329 print('Delete the following bots?')
1330 for bot in bots:
1331 print(' %s' % bot)
1332 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1333 print('Goodbye.')
1334 return 1
1335
1336 result = 0
1337 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001338 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001339 if net.url_read_json(url, data={}, method='POST') is None:
1340 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001341 result = 1
1342 return result
1343
1344
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001345def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001346 """Returns information about the bots connected to the Swarming server."""
1347 add_filter_options(parser)
1348 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001349 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001350 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001351 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001352 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001353 help='Keep both dead and alive bots')
1354 parser.filter_group.add_option(
1355 '--busy', action='store_true', help='Keep only busy bots')
1356 parser.filter_group.add_option(
1357 '--idle', action='store_true', help='Keep only idle bots')
1358 parser.filter_group.add_option(
1359 '--mp', action='store_true',
1360 help='Keep only Machine Provider managed bots')
1361 parser.filter_group.add_option(
1362 '--non-mp', action='store_true',
1363 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001364 parser.filter_group.add_option(
1365 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001366 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001367 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001368 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001369
1370 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001371 parser.error('Use only one of --keep-dead or --dead-only')
1372 if options.busy and options.idle:
1373 parser.error('Use only one of --busy or --idle')
1374 if options.mp and options.non_mp:
1375 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001376
smut281c3902018-05-30 17:50:05 -07001377 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001378 values = []
1379 if options.dead_only:
1380 values.append(('is_dead', 'TRUE'))
1381 elif options.keep_dead:
1382 values.append(('is_dead', 'NONE'))
1383 else:
1384 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001385
maruelaf6b06c2017-06-08 06:26:53 -07001386 if options.busy:
1387 values.append(('is_busy', 'TRUE'))
1388 elif options.idle:
1389 values.append(('is_busy', 'FALSE'))
1390 else:
1391 values.append(('is_busy', 'NONE'))
1392
1393 if options.mp:
1394 values.append(('is_mp', 'TRUE'))
1395 elif options.non_mp:
1396 values.append(('is_mp', 'FALSE'))
1397 else:
1398 values.append(('is_mp', 'NONE'))
1399
1400 for key, value in options.dimensions:
1401 values.append(('dimensions', '%s:%s' % (key, value)))
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +00001402 url += urllib.parse.urlencode(values)
maruelaf6b06c2017-06-08 06:26:53 -07001403 try:
1404 data, yielder = get_yielder(url, 0)
1405 bots = data.get('items') or []
1406 for items in yielder():
1407 if items:
1408 bots.extend(items)
1409 except Failure as e:
1410 sys.stderr.write('\n%s\n' % e)
1411 return 1
maruel77f720b2015-09-15 12:35:22 -07001412 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
Lei Leife202df2019-06-11 17:33:34 +00001413 print(bot['bot_id'])
maruelaf6b06c2017-06-08 06:26:53 -07001414 if not options.bare:
1415 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
Lei Leife202df2019-06-11 17:33:34 +00001416 print(' %s' % json.dumps(dimensions, sort_keys=True))
maruelaf6b06c2017-06-08 06:26:53 -07001417 if bot.get('task_id'):
Lei Leife202df2019-06-11 17:33:34 +00001418 print(' task: %s' % bot['task_id'])
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001419 return 0
1420
1421
maruelfd0a90c2016-06-10 11:51:10 -07001422@subcommand.usage('task_id')
1423def CMDcancel(parser, args):
1424 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001425 parser.add_option(
1426 '-k', '--kill-running', action='store_true', default=False,
1427 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001428 options, args = parser.parse_args(args)
1429 if not args:
1430 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001431 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001432 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001433 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001434 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001435 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001436 print('Deleting %s failed. Probably already gone' % task_id)
1437 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001438 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001439 return 0
1440
1441
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001442@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001443def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001444 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001445
1446 The result can be in multiple part if the execution was sharded. It can
1447 potentially have retries.
1448 """
1449 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001450 parser.add_option(
1451 '-j', '--json',
1452 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001453 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001454 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001455 if not args and not options.json:
1456 parser.error('Must specify at least one task id or --json.')
1457 if args and options.json:
1458 parser.error('Only use one of task id or --json.')
1459
1460 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001461 options.json = six.text_type(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001462 try:
maruel1ceb3872015-10-14 06:10:44 -07001463 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001464 data = json.load(f)
1465 except (IOError, ValueError):
1466 parser.error('Failed to open %s' % options.json)
1467 try:
1468 tasks = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001469 data['tasks'].values(), key=lambda x: x['shard_index'])
maruel71c61c82016-02-22 06:52:05 -08001470 args = [t['task_id'] for t in tasks]
1471 except (KeyError, TypeError):
1472 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001473 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001474 # Take in account all the task slices.
1475 offset = 0
1476 for s in data['request']['task_slices']:
1477 m = (offset + s['properties']['execution_timeout_secs'] +
1478 s['expiration_secs'])
1479 if m > options.timeout:
1480 options.timeout = m
1481 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001482 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001483 else:
1484 valid = frozenset('0123456789abcdef')
1485 if any(not valid.issuperset(task_id) for task_id in args):
1486 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001487
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001488 try:
1489 return collect(
1490 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001491 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001492 options.timeout,
1493 options.decorate,
1494 options.print_status_updates,
1495 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001496 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001497 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001498 options.perf,
1499 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001500 except Failure:
1501 on_error.report(None)
1502 return 1
1503
1504
maruel77f720b2015-09-15 12:35:22 -07001505@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001506def CMDpost(parser, args):
1507 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1508
1509 Input data must be sent to stdin, result is printed to stdout.
1510
1511 If HTTP response code >= 400, returns non-zero.
1512 """
1513 options, args = parser.parse_args(args)
1514 if len(args) != 1:
1515 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001516 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001517 data = sys.stdin.read()
1518 try:
1519 resp = net.url_read(url, data=data, method='POST')
1520 except net.TimeoutError:
1521 sys.stderr.write('Timeout!\n')
1522 return 1
1523 if not resp:
1524 sys.stderr.write('No response!\n')
1525 return 1
1526 sys.stdout.write(resp)
1527 return 0
1528
1529
1530@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001531def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001532 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1533 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001534
1535 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001536 Raw task request and results:
1537 swarming.py query -S server-url.com task/123456/request
1538 swarming.py query -S server-url.com task/123456/result
1539
maruel77f720b2015-09-15 12:35:22 -07001540 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001541 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001542
maruelaf6b06c2017-06-08 06:26:53 -07001543 Listing last 10 tasks on a specific bot named 'bot1':
1544 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001545
maruelaf6b06c2017-06-08 06:26:53 -07001546 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001547 quoting is important!:
1548 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001549 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001550 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001551 parser.add_option(
1552 '-L', '--limit', type='int', default=200,
1553 help='Limit to enforce on limitless items (like number of tasks); '
1554 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001555 parser.add_option(
1556 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001557 parser.add_option(
1558 '--progress', action='store_true',
1559 help='Prints a dot at each request to show progress')
1560 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001561 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001562 parser.error(
1563 'Must specify only method name and optionally query args properly '
1564 'escaped.')
smut281c3902018-05-30 17:50:05 -07001565 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001566 try:
1567 data, yielder = get_yielder(base_url, options.limit)
1568 for items in yielder():
1569 if items:
1570 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001571 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001572 sys.stderr.write('.')
1573 sys.stderr.flush()
1574 except Failure as e:
1575 sys.stderr.write('\n%s\n' % e)
1576 return 1
maruel77f720b2015-09-15 12:35:22 -07001577 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001578 sys.stderr.write('\n')
1579 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001580 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001581 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001582 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001583 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001584 try:
maruel77f720b2015-09-15 12:35:22 -07001585 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001586 sys.stdout.write('\n')
1587 except IOError:
1588 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001589 return 0
1590
1591
maruel77f720b2015-09-15 12:35:22 -07001592def CMDquery_list(parser, args):
1593 """Returns list of all the Swarming APIs that can be used with command
1594 'query'.
1595 """
1596 parser.add_option(
1597 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1598 options, args = parser.parse_args(args)
1599 if args:
1600 parser.error('No argument allowed.')
1601
1602 try:
1603 apis = endpoints_api_discovery_apis(options.swarming)
1604 except APIError as e:
1605 parser.error(str(e))
1606 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001607 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001608 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001609 json.dump(apis, f)
1610 else:
1611 help_url = (
1612 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1613 options.swarming)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001614 for i, (api_id, api) in enumerate(sorted(apis.items())):
maruel11e31af2017-02-15 07:30:50 -08001615 if i:
1616 print('')
Lei Leife202df2019-06-11 17:33:34 +00001617 print(api_id)
1618 print(' ' + api['description'].strip())
maruel11e31af2017-02-15 07:30:50 -08001619 if 'resources' in api:
1620 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001621 # TODO(maruel): Remove.
1622 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001623 for j, (resource_name, resource) in enumerate(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001624 sorted(api['resources'].items())):
maruel11e31af2017-02-15 07:30:50 -08001625 if j:
1626 print('')
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001627 for method_name, method in sorted(resource['methods'].items()):
maruel11e31af2017-02-15 07:30:50 -08001628 # Only list the GET ones.
1629 if method['httpMethod'] != 'GET':
1630 continue
Lei Leife202df2019-06-11 17:33:34 +00001631 print('- %s.%s: %s' % (
1632 resource_name, method_name, method['path']))
maruel11e31af2017-02-15 07:30:50 -08001633 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001634 ' ' + l for l in textwrap.wrap(
1635 method.get('description', 'No description'), 78)))
Lei Leife202df2019-06-11 17:33:34 +00001636 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel11e31af2017-02-15 07:30:50 -08001637 else:
1638 # New.
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001639 for method_name, method in sorted(api['methods'].items()):
maruel77f720b2015-09-15 12:35:22 -07001640 # Only list the GET ones.
1641 if method['httpMethod'] != 'GET':
1642 continue
Lei Leife202df2019-06-11 17:33:34 +00001643 print('- %s: %s' % (method['id'], method['path']))
maruel11e31af2017-02-15 07:30:50 -08001644 print('\n'.join(
1645 ' ' + l for l in textwrap.wrap(method['description'], 78)))
Lei Leife202df2019-06-11 17:33:34 +00001646 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel77f720b2015-09-15 12:35:22 -07001647 return 0
1648
1649
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001650@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001651def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001652 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001653
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001654 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001655 """
1656 add_trigger_options(parser)
1657 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001658 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001659 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001660 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001661 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001662 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001663 tasks = trigger_task_shards(
1664 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001665 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001666 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001667 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001668 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001669 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001670 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001671 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001672 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001673 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001674 task_ids = [
1675 t['task_id']
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001676 for t in sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001677 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001678 for task_id in task_ids:
1679 print('Task: {server}/task?id={task}'.format(
1680 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001681 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001682 offset = 0
1683 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001684 m = (offset + s.properties.execution_timeout_secs +
1685 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001686 if m > options.timeout:
1687 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001688 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001689 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001690 try:
1691 return collect(
1692 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001693 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001694 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001695 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001696 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001697 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001698 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001699 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001700 options.perf,
1701 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001702 except Failure:
1703 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001704 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001705
1706
maruel18122c62015-10-23 06:31:23 -07001707@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001708def CMDreproduce(parser, args):
1709 """Runs a task locally that was triggered on the server.
1710
1711 This running locally the same commands that have been run on the bot. The data
1712 downloaded will be in a subdirectory named 'work' of the current working
1713 directory.
maruel18122c62015-10-23 06:31:23 -07001714
1715 You can pass further additional arguments to the target command by passing
1716 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001717 """
maruelc070e672016-02-22 17:32:57 -08001718 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001719 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001720 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001721 parser.add_option(
1722 '--work', metavar='DIR', default='work',
1723 help='Directory to map the task input files into')
1724 parser.add_option(
1725 '--cache', metavar='DIR', default='cache',
1726 help='Directory that contains the input cache')
1727 parser.add_option(
1728 '--leak', action='store_true',
1729 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001730 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001731 extra_args = []
1732 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001733 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001734 if len(args) > 1:
1735 if args[1] == '--':
1736 if len(args) > 2:
1737 extra_args = args[2:]
1738 else:
1739 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001740
smut281c3902018-05-30 17:50:05 -07001741 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001742 request = net.url_read_json(url)
1743 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001744 print('Failed to retrieve request data for the task', file=sys.stderr)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001745 return 1
1746
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001747 workdir = six.text_type(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001748 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001749 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001750 fs.mkdir(workdir)
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001751 cachedir = six.text_type(os.path.abspath('cipd_cache'))
iannucci31ab9192017-05-02 19:11:56 -07001752 if not fs.exists(cachedir):
1753 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001754
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001755 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001756 env = os.environ.copy()
1757 env['SWARMING_BOT_ID'] = 'reproduce'
1758 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001759 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001760 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001761 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001762 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001763 if not i['value']:
1764 env.pop(key, None)
1765 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001766 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001767
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001768 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001769 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001770 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001771 for i in env_prefixes:
1772 key = i['key']
1773 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001774 cur = env.get(key)
1775 if cur:
1776 paths.append(cur)
1777 env[key] = os.path.pathsep.join(paths)
1778
iannucci31ab9192017-05-02 19:11:56 -07001779 command = []
nodir152cba62016-05-12 16:08:56 -07001780 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001781 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001782 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001783 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001784 properties['inputs_ref']['namespace'])
1785 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001786 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1787 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1788 # leak.
1789 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001790 cache = local_caching.DiskContentAddressedCache(
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001791 six.text_type(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001792 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001793 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001794 command = bundle.command
1795 if bundle.relative_cwd:
1796 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001797 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001798
1799 if properties.get('command'):
1800 command.extend(properties['command'])
1801
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001802 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Brian Sheedy7a761172019-08-30 22:55:14 +00001803 command = tools.find_executable(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001804 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001805 new_command = run_isolated.process_command(command, 'invalid', None)
1806 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001807 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001808 else:
1809 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001810 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001811 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001812 command, options.output, None)
1813 if not os.path.isdir(options.output):
1814 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001815 command = new_command
1816 file_path.ensure_command_has_abs_path(command, workdir)
1817
1818 if properties.get('cipd_input'):
1819 ci = properties['cipd_input']
1820 cp = ci['client_package']
1821 client_manager = cipd.get_client(
1822 ci['server'], cp['package_name'], cp['version'], cachedir)
1823
1824 with client_manager as client:
1825 by_path = collections.defaultdict(list)
1826 for pkg in ci['packages']:
1827 path = pkg['path']
1828 # cipd deals with 'root' as ''
1829 if path == '.':
1830 path = ''
1831 by_path[path].append((pkg['package_name'], pkg['version']))
1832 client.ensure(workdir, by_path, cache_dir=cachedir)
1833
maruel77f720b2015-09-15 12:35:22 -07001834 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001835 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001836 except OSError as e:
Lei Leife202df2019-06-11 17:33:34 +00001837 print('Failed to run: %s' % ' '.join(command), file=sys.stderr)
1838 print(str(e), file=sys.stderr)
maruel77f720b2015-09-15 12:35:22 -07001839 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001840 finally:
1841 # Do not delete options.cache.
1842 if not options.leak:
1843 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001844
1845
maruel0eb1d1b2015-10-02 14:48:21 -07001846@subcommand.usage('bot_id')
1847def CMDterminate(parser, args):
1848 """Tells a bot to gracefully shut itself down as soon as it can.
1849
1850 This is done by completing whatever current task there is then exiting the bot
1851 process.
1852 """
1853 parser.add_option(
1854 '--wait', action='store_true', help='Wait for the bot to terminate')
1855 options, args = parser.parse_args(args)
1856 if len(args) != 1:
1857 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001858 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001859 request = net.url_read_json(url, data={})
1860 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001861 print('Failed to ask for termination', file=sys.stderr)
maruel0eb1d1b2015-10-02 14:48:21 -07001862 return 1
1863 if options.wait:
1864 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001865 options.swarming,
1866 [request['task_id']],
1867 0.,
1868 False,
1869 False,
1870 None,
1871 None,
1872 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001873 False,
1874 None)
maruelbfc5f872017-06-10 16:43:17 -07001875 else:
Lei Leife202df2019-06-11 17:33:34 +00001876 print(request['task_id'])
maruel0eb1d1b2015-10-02 14:48:21 -07001877 return 0
1878
1879
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001880@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001881def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001882 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001883
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001884 Passes all extra arguments provided after '--' as additional command line
1885 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001886 """
1887 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001888 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001889 parser.add_option(
1890 '--dump-json',
1891 metavar='FILE',
1892 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001893 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001894 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001895 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001896 tasks = trigger_task_shards(
1897 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001898 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001899 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001900 tasks_sorted = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001901 tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001902 if options.dump_json:
1903 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001904 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001905 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001906 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001907 }
maruel46b015f2015-10-13 18:40:35 -07001908 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001909 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001910 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001911 (options.swarming, options.dump_json))
1912 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001913 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001914 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001915 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1916 print('Or visit:')
1917 for t in tasks_sorted:
1918 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001919 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001920 except Failure:
1921 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001922 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001923
1924
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001925class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001926 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001927 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001928 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001929 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001930 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001931 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001932 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001933 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001934 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001935 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001936
1937 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001938 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001939 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001940 auth.process_auth_options(self, options)
1941 user = self._process_swarming(options)
1942 if hasattr(options, 'user') and not options.user:
1943 options.user = user
1944 return options, args
1945
1946 def _process_swarming(self, options):
1947 """Processes the --swarming option and aborts if not specified.
1948
1949 Returns the identity as determined by the server.
1950 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001951 if not options.swarming:
1952 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001953 try:
1954 options.swarming = net.fix_url(options.swarming)
1955 except ValueError as e:
1956 self.error('--swarming %s' % e)
1957 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001958 try:
1959 user = auth.ensure_logged_in(options.swarming)
1960 except ValueError as e:
1961 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001962 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001963
1964
1965def main(args):
1966 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001967 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001968
1969
1970if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001971 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001972 fix_encoding.fix_encoding()
1973 tools.disable_buffering()
1974 colorama.init()
1975 sys.exit(main(sys.argv[1:]))