blob: 0f67425cda7d57345ab54d5452014211e753223d [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005"""Client tool to trigger tasks or retrieve results from a Swarming server."""
6
Lei Leife202df2019-06-11 17:33:34 +00007from __future__ import print_function
8
9__version__ = '1.0'
maruel@chromium.org0437a732013-08-27 16:05:52 +000010
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050011import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040012import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000013import json
14import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040015import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000016import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100017import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000018import sys
maruel11e31af2017-02-15 07:30:50 -080019import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070020import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000021import time
Takuto Ikuta35250172020-01-31 09:33:46 +000022import uuid
maruel@chromium.org0437a732013-08-27 16:05:52 +000023
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000024from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000025tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000026
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000027# third_party/
28import colorama
29from chromium import natsort
30from depot_tools import fix_encoding
31from depot_tools import subcommand
Takuto Ikuta6e2ff962019-10-29 12:35:27 +000032import six
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +000033from six.moves import urllib
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000034
35# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080036import auth
iannucci31ab9192017-05-02 19:11:56 -070037import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000038import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000039import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040040import local_caching
maruelc070e672016-02-22 17:32:57 -080041import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000042from utils import file_path
43from utils import fs
44from utils import logging_utils
45from utils import net
46from utils import on_error
47from utils import subprocess42
48from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050049
50
51class Failure(Exception):
52 """Generic failure."""
53 pass
54
55
maruel0a25f6c2017-05-10 10:43:23 -070056def default_task_name(options):
57 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050058 if not options.task_name:
Junji Watanabe38b28b02020-04-23 10:23:30 +000059 task_name = u'%s/%s' % (options.user, '_'.join(
60 '%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070061 if options.isolated:
62 task_name += u'/' + options.isolated
63 return task_name
64 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050065
66
67### Triggering.
68
maruel77f720b2015-09-15 12:35:22 -070069# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +000070CipdPackage = collections.namedtuple('CipdPackage', [
71 'package_name',
72 'path',
73 'version',
74])
borenet02f772b2016-06-22 12:42:19 -070075
borenet02f772b2016-06-22 12:42:19 -070076# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +000077CipdInput = collections.namedtuple('CipdInput', [
78 'client_package',
79 'packages',
80 'server',
81])
borenet02f772b2016-06-22 12:42:19 -070082
83# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +000084FilesRef = collections.namedtuple('FilesRef', [
85 'isolated',
86 'isolatedserver',
87 'namespace',
88])
maruel77f720b2015-09-15 12:35:22 -070089
90# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -080091StringListPair = collections.namedtuple(
Junji Watanabe38b28b02020-04-23 10:23:30 +000092 'StringListPair',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +000093 [
Junji Watanabe38b28b02020-04-23 10:23:30 +000094 'key',
95 'value', # repeated string
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +000096 ])
97
Junji Watanabe38b28b02020-04-23 10:23:30 +000098# See ../appengine/swarming/swarming_rpcs.py.
99Containment = collections.namedtuple('Containment', [
100 'lower_priority',
101 'containment_type',
102])
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800103
104# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +0000105TaskProperties = collections.namedtuple('TaskProperties', [
106 'caches',
107 'cipd_input',
108 'command',
109 'containment',
110 'relative_cwd',
111 'dimensions',
112 'env',
113 'env_prefixes',
114 'execution_timeout_secs',
115 'extra_args',
116 'grace_period_secs',
117 'idempotent',
118 'inputs_ref',
119 'io_timeout_secs',
120 'outputs',
121 'secret_bytes',
122])
maruel77f720b2015-09-15 12:35:22 -0700123
Junji Watanabecb054042020-07-21 08:43:26 +0000124# See ../appengine/swarming/swarming_rpcs.py.
125TaskSlice = collections.namedtuple('TaskSlice', [
126 'expiration_secs',
127 'properties',
128 'wait_for_capacity',
129])
maruel77f720b2015-09-15 12:35:22 -0700130
131# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabecb054042020-07-21 08:43:26 +0000132NewTaskRequest = collections.namedtuple('NewTaskRequest', [
133 'name',
134 'parent_task_id',
135 'priority',
Junji Watanabe71bbaef2020-07-21 08:55:37 +0000136 'realm',
Junji Watanabecb054042020-07-21 08:43:26 +0000137 'task_slices',
138 'service_account',
139 'tags',
140 'user',
141 'pool_task_template',
142])
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500143
144
maruel77f720b2015-09-15 12:35:22 -0700145def namedtuple_to_dict(value):
146 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400147 if hasattr(value, '_asdict'):
148 return namedtuple_to_dict(value._asdict())
149 if isinstance(value, (list, tuple)):
150 return [namedtuple_to_dict(v) for v in value]
151 if isinstance(value, dict):
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000152 return {k: namedtuple_to_dict(v) for k, v in value.items()}
Lei Lei73a5f732020-03-23 20:36:14 +0000153 # json.dumps in Python3 doesn't support bytes.
154 if isinstance(value, bytes):
155 return six.ensure_str(value)
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400156 return value
maruel77f720b2015-09-15 12:35:22 -0700157
158
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700159def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800160 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700161
162 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500163 """
maruel77f720b2015-09-15 12:35:22 -0700164 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700165 # Don't send 'service_account' if it is None to avoid confusing older
166 # version of the server that doesn't know about 'service_account' and don't
167 # use it at all.
168 if not out['service_account']:
169 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000170 for task_slice in out['task_slices']:
Junji Watanabecb054042020-07-21 08:43:26 +0000171 task_slice['properties']['env'] = [{
172 'key': k,
173 'value': v
174 } for k, v in task_slice['properties']['env'].items()]
Brad Hallf78187a2018-10-19 17:08:55 +0000175 task_slice['properties']['env'].sort(key=lambda x: x['key'])
Takuto Ikuta35250172020-01-31 09:33:46 +0000176 out['request_uuid'] = str(uuid.uuid4())
maruel77f720b2015-09-15 12:35:22 -0700177 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500178
179
maruel77f720b2015-09-15 12:35:22 -0700180def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500181 """Triggers a request on the Swarming server and returns the json data.
182
183 It's the low-level function.
184
185 Returns:
186 {
187 'request': {
188 'created_ts': u'2010-01-02 03:04:05',
189 'name': ..
190 },
191 'task_id': '12300',
192 }
193 """
194 logging.info('Triggering: %s', raw_request['name'])
195
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500196 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700197 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500198 if not result:
199 on_error.report('Failed to trigger task %s' % raw_request['name'])
200 return None
maruele557bce2015-11-17 09:01:27 -0800201 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800202 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800203 msg = 'Failed to trigger task %s' % raw_request['name']
204 if result['error'].get('errors'):
205 for err in result['error']['errors']:
206 if err.get('message'):
207 msg += '\nMessage: %s' % err['message']
208 if err.get('debugInfo'):
209 msg += '\nDebug info:\n%s' % err['debugInfo']
210 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800211 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800212
213 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800214 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500215 return result
216
217
218def setup_googletest(env, shards, index):
219 """Sets googletest specific environment variables."""
220 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700221 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
222 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
223 env = env[:]
224 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
225 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500226 return env
227
228
229def trigger_task_shards(swarming, task_request, shards):
230 """Triggers one or many subtasks of a sharded task.
231
232 Returns:
233 Dict with task details, returned to caller as part of --dump-json output.
234 None in case of failure.
235 """
Junji Watanabecb054042020-07-21 08:43:26 +0000236
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500237 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000238 """
239 Args:
240 index: The index of the task request.
241
242 Returns:
243 raw_request: A swarming compatible JSON dictionary of the request.
244 shard_index: The index of the shard, which may be different than the index
245 of the task request.
246 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700247 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000248 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500249 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000250 for task_slice in req['task_slices']:
251 task_slice['properties']['env'] = setup_googletest(
252 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700253 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000254 else:
255 task_slices = req['task_slices']
256
Lei Lei73a5f732020-03-23 20:36:14 +0000257 total_shards = 1
Erik Chend50a88f2019-02-16 01:22:07 +0000258 # Multiple tasks slices might exist if there are optional "slices", e.g.
259 # multiple ways of dispatching the task that should be equivalent. These
260 # should be functionally equivalent but we have cannot guarantee that. If
261 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
262 # slices.
263 for task_slice in task_slices:
264 for env_var in task_slice['properties']['env']:
265 if env_var['key'] == 'GTEST_SHARD_INDEX':
266 shard_index = int(env_var['value'])
267 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
268 total_shards = int(env_var['value'])
269 if total_shards > 1:
270 req['name'] += ':%s:%s' % (shard_index, total_shards)
Ben Pastened2a7be42020-07-14 22:28:55 +0000271 if shard_index and total_shards:
272 req['tags'] += [
273 'shard_index:%d' % shard_index,
274 'total_shards:%d' % total_shards,
275 ]
Erik Chend50a88f2019-02-16 01:22:07 +0000276
277 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500278
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000279 requests = [convert(index) for index in range(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500280 tasks = {}
281 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000282 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700283 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500284 if not task:
285 break
286 logging.info('Request result: %s', task)
287 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400288 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500289 priority_warning = True
Junji Watanabecb054042020-07-21 08:43:26 +0000290 print(
291 'Priority was reset to %s' % task['request']['priority'],
292 file=sys.stderr)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500293 tasks[request['name']] = {
Junji Watanabecb054042020-07-21 08:43:26 +0000294 'shard_index': shard_index,
295 'task_id': task['task_id'],
296 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500297 }
298
299 # Some shards weren't triggered. Abort everything.
300 if len(tasks) != len(requests):
301 if tasks:
Junji Watanabecb054042020-07-21 08:43:26 +0000302 print(
303 'Only %d shard(s) out of %d were triggered' %
304 (len(tasks), len(requests)),
305 file=sys.stderr)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000306 for task_dict in tasks.values():
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500307 abort_task(swarming, task_dict['task_id'])
308 return None
309
310 return tasks
311
312
313### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000314
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700315# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000316STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700317
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400318
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000319class TaskState(object):
320 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000321
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000322 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
323 is the source of truth for these values:
324 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400325
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000326 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400327 """
328 RUNNING = 0x10
329 PENDING = 0x20
330 EXPIRED = 0x30
331 TIMED_OUT = 0x40
332 BOT_DIED = 0x50
333 CANCELED = 0x60
334 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400335 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400336 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400337
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000338 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400339
maruel77f720b2015-09-15 12:35:22 -0700340 _ENUMS = {
Junji Watanabecb054042020-07-21 08:43:26 +0000341 'RUNNING': RUNNING,
342 'PENDING': PENDING,
343 'EXPIRED': EXPIRED,
344 'TIMED_OUT': TIMED_OUT,
345 'BOT_DIED': BOT_DIED,
346 'CANCELED': CANCELED,
347 'COMPLETED': COMPLETED,
348 'KILLED': KILLED,
349 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700350 }
351
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400352 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700353 def from_enum(cls, state):
354 """Returns int value based on the string."""
355 if state not in cls._ENUMS:
356 raise ValueError('Invalid state %s' % state)
357 return cls._ENUMS[state]
358
maruel@chromium.org0437a732013-08-27 16:05:52 +0000359
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700360class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700361 """Assembles task execution summary (for --task-summary-json output).
362
363 Optionally fetches task outputs from isolate server to local disk (used when
364 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700365
366 This object is shared among multiple threads running 'retrieve_results'
367 function, in particular they call 'process_shard_result' method in parallel.
368 """
369
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000370 def __init__(self, task_output_dir, task_output_stdout, shard_count,
371 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700372 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
373
374 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700375 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700376 shard_count: expected number of task shards.
377 """
maruel12e30012015-10-09 11:55:35 -0700378 self.task_output_dir = (
Takuto Ikuta6e2ff962019-10-29 12:35:27 +0000379 six.text_type(os.path.abspath(task_output_dir))
maruel12e30012015-10-09 11:55:35 -0700380 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000381 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700382 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000383 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700384
385 self._lock = threading.Lock()
386 self._per_shard_results = {}
387 self._storage = None
388
nodire5028a92016-04-29 14:38:21 -0700389 if self.task_output_dir:
390 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700391
Vadim Shtayurab450c602014-05-12 19:23:25 -0700392 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700393 """Stores results of a single task shard, fetches output files if necessary.
394
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400395 Modifies |result| in place.
396
maruel77f720b2015-09-15 12:35:22 -0700397 shard_index is 0-based.
398
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700399 Called concurrently from multiple threads.
400 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700401 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700402 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700403 if shard_index < 0 or shard_index >= self.shard_count:
Junji Watanabe38b28b02020-04-23 10:23:30 +0000404 logging.warning('Shard index %d is outside of expected range: [0; %d]',
405 shard_index, self.shard_count - 1)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700406 return
407
maruel77f720b2015-09-15 12:35:22 -0700408 if result.get('outputs_ref'):
409 ref = result['outputs_ref']
410 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
411 ref['isolatedserver'],
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000412 urllib.parse.urlencode([('namespace', ref['namespace']),
413 ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400414
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700415 # Store result dict of that shard, ignore results we've already seen.
416 with self._lock:
417 if shard_index in self._per_shard_results:
418 logging.warning('Ignoring duplicate shard index %d', shard_index)
419 return
420 self._per_shard_results[shard_index] = result
421
422 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700423 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000424 server_ref = isolate_storage.ServerRef(
Junji Watanabecb054042020-07-21 08:43:26 +0000425 result['outputs_ref']['isolatedserver'],
426 result['outputs_ref']['namespace'])
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000427 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400428 if storage:
429 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400430 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
431 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400432 isolateserver.fetch_isolated(
Junji Watanabecb054042020-07-21 08:43:26 +0000433 result['outputs_ref']['isolated'], storage,
Lei Leife202df2019-06-11 17:33:34 +0000434 local_caching.MemoryContentAddressedCache(file_mode_mask=0o700),
Junji Watanabecb054042020-07-21 08:43:26 +0000435 os.path.join(self.task_output_dir, str(shard_index)), False,
436 self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700437
438 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700439 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700440 with self._lock:
441 # Write an array of shard results with None for missing shards.
442 summary = {
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000443 'shards': [
444 self._per_shard_results.get(i) for i in range(self.shard_count)
445 ],
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700446 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000447
448 # Don't store stdout in the summary if not requested too.
449 if "json" not in self.task_output_stdout:
450 for shard_json in summary['shards']:
451 if not shard_json:
452 continue
453 if "output" in shard_json:
454 del shard_json["output"]
455 if "outputs" in shard_json:
456 del shard_json["outputs"]
457
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700458 # Write summary.json to task_output_dir as well.
459 if self.task_output_dir:
460 tools.write_json(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000461 os.path.join(self.task_output_dir, u'summary.json'), summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700462 if self._storage:
463 self._storage.close()
464 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700465 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700466
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000467 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700468 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700469 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700470 with self._lock:
471 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000472 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700473 else:
474 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000475 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700476 logging.error(
477 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000478 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700479 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000480 if self._storage.server_ref.namespace != server_ref.namespace:
Junji Watanabe38b28b02020-04-23 10:23:30 +0000481 logging.error('Task shards are using multiple namespaces: %s and %s',
482 self._storage.server_ref.namespace,
483 server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700484 return None
485 return self._storage
486
487
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500488def now():
489 """Exists so it can be mocked easily."""
490 return time.time()
491
492
maruel77f720b2015-09-15 12:35:22 -0700493def parse_time(value):
494 """Converts serialized time from the API to datetime.datetime."""
495 # When microseconds are 0, the '.123456' suffix is elided. This means the
496 # serialized format is not consistent, which confuses the hell out of python.
497 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
498 try:
499 return datetime.datetime.strptime(value, fmt)
500 except ValueError:
501 pass
502 raise ValueError('Failed to parse %s' % value)
503
504
Junji Watanabe38b28b02020-04-23 10:23:30 +0000505def retrieve_results(base_url, shard_index, task_id, timeout, should_stop,
506 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400507 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700508
Vadim Shtayurab450c602014-05-12 19:23:25 -0700509 Returns:
510 <result dict> on success.
511 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700512 """
maruel71c61c82016-02-22 06:52:05 -0800513 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700514 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700515 if include_perf:
516 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700517 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700518 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400519 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700520 attempt = 0
521
522 while not should_stop.is_set():
523 attempt += 1
524
525 # Waiting for too long -> give up.
526 current_time = now()
527 if deadline and current_time >= deadline:
Junji Watanabecb054042020-07-21 08:43:26 +0000528 logging.error('retrieve_results(%s) timed out on attempt %d', base_url,
529 attempt)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700530 return None
531
532 # Do not spin too fast. Spin faster at the beginning though.
533 # Start with 1 sec delay and for each 30 sec of waiting add another second
534 # of delay, until hitting 15 sec ceiling.
535 if attempt > 1:
536 max_delay = min(15, 1 + (current_time - started) / 30.0)
537 delay = min(max_delay, deadline - current_time) if deadline else max_delay
538 if delay > 0:
539 logging.debug('Waiting %.1f sec before retrying', delay)
540 should_stop.wait(delay)
541 if should_stop.is_set():
542 return None
543
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400544 # Disable internal retries in net.url_read_json, since we are doing retries
545 # ourselves.
546 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700547 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
548 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400549 # Retry on 500s only if no timeout is specified.
550 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400551 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400552 if timeout == -1:
553 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400554 continue
maruel77f720b2015-09-15 12:35:22 -0700555
maruelbf53e042015-12-01 15:00:51 -0800556 if result.get('error'):
557 # An error occurred.
558 if result['error'].get('errors'):
559 for err in result['error']['errors']:
Junji Watanabe38b28b02020-04-23 10:23:30 +0000560 logging.warning('Error while reading task: %s; %s',
561 err.get('message'), err.get('debugInfo'))
maruelbf53e042015-12-01 15:00:51 -0800562 elif result['error'].get('message'):
Junji Watanabecb054042020-07-21 08:43:26 +0000563 logging.warning('Error while reading task: %s',
564 result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400565 if timeout == -1:
566 return result
maruelbf53e042015-12-01 15:00:51 -0800567 continue
568
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400569 # When timeout == -1, always return on first attempt. 500s are already
570 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000571 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000572 if fetch_stdout:
573 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700574 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700575 # Record the result, try to fetch attached output files (if any).
576 if output_collector:
577 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700578 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700579 if result.get('internal_failure'):
580 logging.error('Internal error!')
581 elif result['state'] == 'BOT_DIED':
582 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700583 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000584
585
Junji Watanabecb054042020-07-21 08:43:26 +0000586def yield_results(swarm_base_url, task_ids, timeout, max_threads,
587 print_status_updates, output_collector, include_perf,
588 fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500589 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000590
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700591 Duplicate shards are ignored. Shards are yielded in order of completion.
592 Timed out shards are NOT yielded at all. Caller can compare number of yielded
593 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000594
595 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500596 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000597 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500598
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700599 output_collector is an optional instance of TaskOutputCollector that will be
600 used to fetch files produced by a task from isolate server to the local disk.
601
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500602 Yields:
603 (index, result). In particular, 'result' is defined as the
604 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000605 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000606 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400607 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700608 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700609 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700610
maruel@chromium.org0437a732013-08-27 16:05:52 +0000611 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
612 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700613 # Adds a task to the thread pool to call 'retrieve_results' and return
614 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400615 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000616 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700617 task_fn = lambda *args: (shard_index, retrieve_results(*args))
Junji Watanabecb054042020-07-21 08:43:26 +0000618 pool.add_task(0, results_channel.wrap_task(task_fn), swarm_base_url,
619 shard_index, task_id, timeout, should_stop,
620 output_collector, include_perf, fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700621
622 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400623 for shard_index, task_id in enumerate(task_ids):
624 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700625
626 # Wait for all of them to finish.
Lei Lei73a5f732020-03-23 20:36:14 +0000627 # Convert to list, since range in Python3 doesn't have remove.
628 shards_remaining = list(range(len(task_ids)))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400629 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700630 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700631 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700632 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000633 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700634 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700635 except threading_utils.TaskChannel.Timeout:
636 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000637 time_now = str(datetime.datetime.now())
638 _, time_now = time_now.split(' ')
Junji Watanabe38b28b02020-04-23 10:23:30 +0000639 print('%s '
640 'Waiting for results from the following shards: %s' %
641 (time_now, ', '.join(map(str, shards_remaining))))
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700642 sys.stdout.flush()
643 continue
644 except Exception:
645 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700646
647 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700648 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000649 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500650 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000651 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700652
Vadim Shtayurab450c602014-05-12 19:23:25 -0700653 # Yield back results to the caller.
654 assert shard_index in shards_remaining
655 shards_remaining.remove(shard_index)
656 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700657
maruel@chromium.org0437a732013-08-27 16:05:52 +0000658 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700659 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000660 should_stop.set()
661
662
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000663def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000664 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700665 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Junji Watanabecb054042020-07-21 08:43:26 +0000666 pending = '%.1fs' % (parse_time(metadata['started_ts']) -
667 parse_time(metadata['created_ts'])).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400668 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
669 metadata.get('abandoned_ts')):
Junji Watanabecb054042020-07-21 08:43:26 +0000670 pending = '%.1fs' % (parse_time(metadata['abandoned_ts']) -
671 parse_time(metadata['created_ts'])).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400672 else:
673 pending = 'N/A'
674
maruel77f720b2015-09-15 12:35:22 -0700675 if metadata.get('duration') is not None:
676 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400677 else:
678 duration = 'N/A'
679
maruel77f720b2015-09-15 12:35:22 -0700680 if metadata.get('exit_code') is not None:
681 # Integers are encoded as string to not loose precision.
682 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400683 else:
684 exit_code = 'N/A'
685
686 bot_id = metadata.get('bot_id') or 'N/A'
687
maruel77f720b2015-09-15 12:35:22 -0700688 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400689 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000690 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400691 if metadata.get('state') == 'CANCELED':
692 tag_footer2 = ' Pending: %s CANCELED' % pending
693 elif metadata.get('state') == 'EXPIRED':
694 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400695 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400696 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
697 pending, duration, bot_id, exit_code, metadata['state'])
698 else:
699 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
700 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400701
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000702 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
703 dash_pad = '+-%s-+' % ('-' * tag_len)
704 tag_header = '| %s |' % tag_header.ljust(tag_len)
705 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
706 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400707
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000708 if include_stdout:
709 return '\n'.join([
710 dash_pad,
711 tag_header,
712 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400713 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000714 dash_pad,
715 tag_footer1,
716 tag_footer2,
717 dash_pad,
Junji Watanabe38b28b02020-04-23 10:23:30 +0000718 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000719 return '\n'.join([
720 dash_pad,
721 tag_header,
722 tag_footer2,
723 dash_pad,
Junji Watanabe38b28b02020-04-23 10:23:30 +0000724 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000725
726
Junji Watanabecb054042020-07-21 08:43:26 +0000727def collect(swarming, task_ids, timeout, decorate, print_status_updates,
728 task_summary_json, task_output_dir, task_output_stdout,
729 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700730 """Retrieves results of a Swarming task.
731
732 Returns:
733 process exit code that should be returned to the user.
734 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000735
736 filter_cb = None
737 if filepath_filter:
738 filter_cb = re.compile(filepath_filter).match
739
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700740 # Collect summary JSON and output files (if task_output_dir is not None).
Junji Watanabecb054042020-07-21 08:43:26 +0000741 output_collector = TaskOutputCollector(task_output_dir, task_output_stdout,
742 len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700743
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700744 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700745 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400746 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700747 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400748 for index, metadata in yield_results(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000749 swarming,
750 task_ids,
751 timeout,
752 None,
753 print_status_updates,
754 output_collector,
755 include_perf,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000756 (len(task_output_stdout) > 0),
Junji Watanabe38b28b02020-04-23 10:23:30 +0000757 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700758 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700759
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400760 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700761 shard_exit_code = metadata.get('exit_code')
762 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700763 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700764 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700765 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400766 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700767 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700768
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700769 if decorate:
Lei Lei73a5f732020-03-23 20:36:14 +0000770 # s is bytes in Python3, print could not print
771 # s with nice format, so decode s to str.
772 s = six.ensure_str(
773 decorate_shard_output(swarming, index, metadata,
774 "console" in task_output_stdout).encode(
775 'utf-8', 'replace'))
leileied181762016-10-13 14:24:59 -0700776 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400777 if len(seen_shards) < len(task_ids):
778 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700779 else:
Junji Watanabecb054042020-07-21 08:43:26 +0000780 print('%s: %s %s' % (metadata.get(
781 'bot_id', 'N/A'), metadata['task_id'], shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000782 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700783 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400784 if output:
785 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700786 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700787 summary = output_collector.finalize()
788 if task_summary_json:
789 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700790
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400791 if decorate and total_duration:
792 print('Total duration: %.1fs' % total_duration)
793
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400794 if len(seen_shards) != len(task_ids):
795 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Junji Watanabecb054042020-07-21 08:43:26 +0000796 print(
797 'Results from some shards are missing: %s' %
798 ', '.join(map(str, missing_shards)),
799 file=sys.stderr)
Vadim Shtayurac524f512014-05-15 09:54:56 -0700800 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700801
maruela5490782015-09-30 10:56:59 -0700802 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000803
804
maruel77f720b2015-09-15 12:35:22 -0700805### API management.
806
807
808class APIError(Exception):
809 pass
810
811
812def endpoints_api_discovery_apis(host):
813 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
814 the APIs exposed by a host.
815
816 https://developers.google.com/discovery/v1/reference/apis/list
817 """
maruel380e3262016-08-31 16:10:06 -0700818 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
819 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700820 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
821 if data is None:
822 raise APIError('Failed to discover APIs on %s' % host)
823 out = {}
824 for api in data['items']:
825 if api['id'] == 'discovery:v1':
826 continue
827 # URL is of the following form:
828 # url = host + (
829 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
830 api_data = net.url_read_json(api['discoveryRestUrl'])
831 if api_data is None:
832 raise APIError('Failed to discover %s on %s' % (api['id'], host))
833 out[api['id']] = api_data
834 return out
835
836
maruelaf6b06c2017-06-08 06:26:53 -0700837def get_yielder(base_url, limit):
838 """Returns the first query and a function that yields following items."""
839 CHUNK_SIZE = 250
840
841 url = base_url
842 if limit:
843 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
844 data = net.url_read_json(url)
845 if data is None:
846 # TODO(maruel): Do basic diagnostic.
847 raise Failure('Failed to access %s' % url)
848 org_cursor = data.pop('cursor', None)
849 org_total = len(data.get('items') or [])
850 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
851 if not org_cursor or not org_total:
852 # This is not an iterable resource.
853 return data, lambda: []
854
855 def yielder():
856 cursor = org_cursor
857 total = org_total
858 # Some items support cursors. Try to get automatically if cursors are needed
859 # by looking at the 'cursor' items.
860 while cursor and (not limit or total < limit):
861 merge_char = '&' if '?' in base_url else '?'
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000862 url = base_url + '%scursor=%s' % (merge_char, urllib.parse.quote(cursor))
maruelaf6b06c2017-06-08 06:26:53 -0700863 if limit:
864 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
865 new = net.url_read_json(url)
866 if new is None:
867 raise Failure('Failed to access %s' % url)
868 cursor = new.get('cursor')
869 new_items = new.get('items')
870 nb_items = len(new_items or [])
871 total += nb_items
872 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
873 yield new_items
874
875 return data, yielder
876
877
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500878### Commands.
879
880
881def abort_task(_swarming, _manifest):
882 """Given a task manifest that was triggered, aborts its execution."""
883 # TODO(vadimsh): No supported by the server yet.
884
885
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400886def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800887 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500888 parser.filter_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +0000889 '-d',
890 '--dimension',
891 default=[],
892 action='append',
893 nargs=2,
894 dest='dimensions',
895 metavar='FOO bar',
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500896 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000897 parser.filter_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +0000898 '--optional-dimension',
899 default=[],
900 action='append',
901 nargs=3,
902 dest='optional_dimensions',
903 metavar='key value expiration',
Brad Hallf78187a2018-10-19 17:08:55 +0000904 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500905 parser.add_option_group(parser.filter_group)
906
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400907
Brad Hallf78187a2018-10-19 17:08:55 +0000908def _validate_filter_option(parser, key, value, expiration, argname):
909 if ':' in key:
910 parser.error('%s key cannot contain ":"' % argname)
911 if key.strip() != key:
912 parser.error('%s key has whitespace' % argname)
913 if not key:
914 parser.error('%s key is empty' % argname)
915
916 if value.strip() != value:
917 parser.error('%s value has whitespace' % argname)
918 if not value:
919 parser.error('%s value is empty' % argname)
920
921 if expiration is not None:
922 try:
923 expiration = int(expiration)
924 except ValueError:
925 parser.error('%s expiration is not an integer' % argname)
926 if expiration <= 0:
927 parser.error('%s expiration should be positive' % argname)
928 if expiration % 60 != 0:
929 parser.error('%s expiration is not divisible by 60' % argname)
930
931
maruelaf6b06c2017-06-08 06:26:53 -0700932def process_filter_options(parser, options):
933 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000934 _validate_filter_option(parser, key, value, None, 'dimension')
935 for key, value, exp in options.optional_dimensions:
936 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700937 options.dimensions.sort()
938
939
Vadim Shtayurab450c602014-05-12 19:23:25 -0700940def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400941 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700942 parser.sharding_group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000943 '--shards',
944 type='int',
945 default=1,
946 metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700947 help='Number of shards to trigger and collect.')
948 parser.add_option_group(parser.sharding_group)
949
950
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400951def add_trigger_options(parser):
952 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500953 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400954 add_filter_options(parser)
955
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400956 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800957 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000958 '-s',
959 '--isolated',
960 metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500961 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800962 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000963 '-e',
964 '--env',
965 default=[],
966 action='append',
967 nargs=2,
968 metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700969 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800970 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000971 '--env-prefix',
972 default=[],
973 action='append',
974 nargs=2,
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800975 metavar='VAR local/path',
976 help='Prepend task-relative `local/path` to the task\'s VAR environment '
Junji Watanabe38b28b02020-04-23 10:23:30 +0000977 'variable using os-appropriate pathsep character. Can be specified '
978 'multiple times for the same VAR to add multiple paths.')
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800979 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000980 '--idempotent',
981 action='store_true',
982 default=False,
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400983 help='When set, the server will actively try to find a previous task '
Junji Watanabe38b28b02020-04-23 10:23:30 +0000984 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800985 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000986 '--secret-bytes-path',
987 metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000988 help='The optional path to a file containing the secret_bytes to use '
Junji Watanabe38b28b02020-04-23 10:23:30 +0000989 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800990 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000991 '--hard-timeout',
992 type='int',
993 default=60 * 60,
994 metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400995 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800996 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000997 '--io-timeout',
998 type='int',
999 default=20 * 60,
1000 metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001001 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001002 parser.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001003 '--lower-priority',
1004 action='store_true',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001005 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001006 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
1007 parser.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001008 '--containment-type',
1009 default='NONE',
1010 metavar='NONE',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001011 choices=containment_choices,
1012 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -08001013 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001014 '--raw-cmd',
1015 action='store_true',
1016 default=False,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001017 help='When set, the command after -- is used as-is without run_isolated. '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001018 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -08001019 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001020 '--relative-cwd',
1021 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001022 'requires --raw-cmd')
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001023 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001024 '--cipd-package',
1025 action='append',
1026 default=[],
1027 metavar='PKG',
maruel5475ba62017-05-31 15:35:47 -07001028 help='CIPD packages to install on the Swarming bot. Uses the format: '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001029 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001030 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001031 '--named-cache',
1032 action='append',
1033 nargs=2,
1034 default=[],
maruel5475ba62017-05-31 15:35:47 -07001035 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001036 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1037 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001038 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001039 help='Email of a service account to run the task as, or literal "bot" '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001040 'string to indicate that the task should use the same account the '
1041 'bot itself is using to authenticate to Swarming. Don\'t use task '
1042 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001043 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001044 '--pool-task-template',
1045 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1046 default='AUTO',
1047 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001048 'By default, the pool\'s TaskTemplate is automatically selected, '
1049 'according the pool configuration on the server. Choices are: '
1050 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
Robert Iannuccifafa7352018-06-13 17:08:17 +00001051 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001052 '-o',
1053 '--output',
1054 action='append',
1055 default=[],
1056 metavar='PATH',
maruel5475ba62017-05-31 15:35:47 -07001057 help='A list of files to return in addition to those written to '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001058 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1059 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001060 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001061 '--wait-for-capacity',
1062 action='store_true',
1063 default=False,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001064 help='Instructs to leave the task PENDING even if there\'s no known bot '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001065 'that could run this task, otherwise the task will be denied with '
1066 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001067 parser.add_option_group(group)
1068
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001069 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001070 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001071 '--priority',
1072 type='int',
1073 default=200,
maruel681d6802017-01-17 16:56:03 -08001074 help='The lower value, the more important the task is')
1075 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001076 '-T',
1077 '--task-name',
1078 metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001079 help='Display name of the task. Defaults to '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001080 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1081 'isolated file is provided, if a hash is provided, it defaults to '
1082 '<user>/<dimensions>/<isolated hash>/<timestamp>')
maruel681d6802017-01-17 16:56:03 -08001083 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001084 '--tags',
1085 action='append',
1086 default=[],
1087 metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001088 help='Tags to assign to the task.')
1089 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001090 '--user',
1091 default='',
maruel681d6802017-01-17 16:56:03 -08001092 help='User associated with the task. Defaults to authenticated user on '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001093 'the server.')
maruel681d6802017-01-17 16:56:03 -08001094 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001095 '--expiration',
1096 type='int',
1097 default=6 * 60 * 60,
1098 metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001099 help='Seconds to allow the task to be pending for a bot to run before '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001100 'this task request expires.')
maruel681d6802017-01-17 16:56:03 -08001101 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001102 '--deadline', type='int', dest='expiration', help=optparse.SUPPRESS_HELP)
Junji Watanabe71bbaef2020-07-21 08:55:37 +00001103 group.add_option(
1104 '--realm',
1105 dest='realm',
1106 metavar='REALM',
1107 help='Realm associated with the task.')
maruel681d6802017-01-17 16:56:03 -08001108 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001109
1110
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001111def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001112 """Processes trigger options and does preparatory steps.
1113
1114 Returns:
1115 NewTaskRequest instance.
1116 """
maruelaf6b06c2017-06-08 06:26:53 -07001117 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001118 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001119 if args and args[0] == '--':
1120 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001121
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001122 if not options.dimensions:
1123 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001124 if not any(k == 'pool' for k, _v in options.dimensions):
1125 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001126 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1127 parser.error('--tags must be in the format key:value')
1128 if options.raw_cmd and not args:
1129 parser.error(
1130 'Arguments with --raw-cmd should be passed after -- as command '
1131 'delimiter.')
1132 if options.isolate_server and not options.namespace:
1133 parser.error(
1134 '--namespace must be a valid value when --isolate-server is used')
1135 if not options.isolated and not options.raw_cmd:
1136 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1137
1138 # Isolated
1139 # --isolated is required only if --raw-cmd wasn't provided.
1140 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1141 # preferred server.
Takuto Ikutaae767b32020-05-11 01:22:19 +00001142 isolateserver.process_isolate_server_options(parser, options,
1143 not options.raw_cmd)
maruel0a25f6c2017-05-10 10:43:23 -07001144 inputs_ref = None
1145 if options.isolate_server:
1146 inputs_ref = FilesRef(
1147 isolated=options.isolated,
1148 isolatedserver=options.isolate_server,
1149 namespace=options.namespace)
1150
1151 # Command
1152 command = None
1153 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001154 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001155 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001156 if options.relative_cwd:
1157 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1158 if not a.startswith(os.getcwd()):
1159 parser.error(
1160 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001161 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001162 if options.relative_cwd:
1163 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001164 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001165
maruel0a25f6c2017-05-10 10:43:23 -07001166 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001167 cipd_packages = []
1168 for p in options.cipd_package:
1169 split = p.split(':', 2)
1170 if len(split) != 3:
1171 parser.error('CIPD packages must take the form: path:package:version')
Junji Watanabe38b28b02020-04-23 10:23:30 +00001172 cipd_packages.append(
1173 CipdPackage(package_name=split[1], path=split[0], version=split[2]))
borenet02f772b2016-06-22 12:42:19 -07001174 cipd_input = None
1175 if cipd_packages:
1176 cipd_input = CipdInput(
Junji Watanabecb054042020-07-21 08:43:26 +00001177 client_package=None, packages=cipd_packages, server=None)
borenet02f772b2016-06-22 12:42:19 -07001178
maruel0a25f6c2017-05-10 10:43:23 -07001179 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001180 secret_bytes = None
1181 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001182 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001183 secret_bytes = f.read().encode('base64')
1184
maruel0a25f6c2017-05-10 10:43:23 -07001185 # Named caches
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001186 caches = [{
1187 u'name': six.text_type(i[0]),
1188 u'path': six.text_type(i[1])
1189 } for i in options.named_cache]
maruel0a25f6c2017-05-10 10:43:23 -07001190
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001191 env_prefixes = {}
1192 for k, v in options.env_prefix:
1193 env_prefixes.setdefault(k, []).append(v)
1194
Brad Hallf78187a2018-10-19 17:08:55 +00001195 # Get dimensions into the key/value format we can manipulate later.
Junji Watanabecb054042020-07-21 08:43:26 +00001196 orig_dims = [{
1197 'key': key,
1198 'value': value
1199 } for key, value in options.dimensions]
Brad Hallf78187a2018-10-19 17:08:55 +00001200 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1201
1202 # Construct base properties that we will use for all the slices, adding in
1203 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001204 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001205 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001206 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001207 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001208 containment=Containment(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001209 lower_priority=bool(options.lower_priority),
1210 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001211 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001212 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001213 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001214 env=options.env,
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001215 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.items()],
maruel77f720b2015-09-15 12:35:22 -07001216 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001217 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001218 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001219 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001220 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001221 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001222 outputs=options.output,
1223 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001224
1225 slices = []
1226
1227 # Group the optional dimensions by expiration.
1228 dims_by_exp = {}
1229 for key, value, exp_secs in options.optional_dimensions:
Junji Watanabecb054042020-07-21 08:43:26 +00001230 dims_by_exp.setdefault(int(exp_secs), []).append({
1231 'key': key,
1232 'value': value
1233 })
Brad Hallf78187a2018-10-19 17:08:55 +00001234
1235 # Create the optional slices with expiration deltas, we fix up the properties
1236 # below.
1237 last_exp = 0
1238 for expiration_secs in sorted(dims_by_exp):
1239 t = TaskSlice(
1240 expiration_secs=expiration_secs - last_exp,
1241 properties=properties,
1242 wait_for_capacity=False)
1243 slices.append(t)
1244 last_exp = expiration_secs
1245
1246 # Add back in the default slice (the last one).
1247 exp = max(int(options.expiration) - last_exp, 60)
1248 base_task_slice = TaskSlice(
1249 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001250 properties=properties,
1251 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001252 slices.append(base_task_slice)
1253
Brad Hall7f463e62018-11-16 16:13:30 +00001254 # Add optional dimensions to the task slices, replacing a dimension that
1255 # has the same key if it is a dimension where repeating isn't valid (otherwise
1256 # we append it). Currently the only dimension we can repeat is "caches"; the
1257 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001258 extra_dims = []
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001259 for i, (_, kvs) in enumerate(sorted(dims_by_exp.items(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001260 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001261 # Replace or append the key/value pairs for this expiration in extra_dims;
1262 # we keep extra_dims around because we are iterating backwards and filling
1263 # in slices with shorter expirations. Dimensions expire as time goes on so
1264 # the slices that expire earlier will generally have more dimensions.
1265 for kv in kvs:
1266 if kv['key'] == 'caches':
1267 extra_dims.append(kv)
1268 else:
1269 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1270 # Then, add all the optional dimensions to the original dimension set, again
1271 # replacing if needed.
1272 for kv in extra_dims:
1273 if kv['key'] == 'caches':
1274 dims.append(kv)
1275 else:
1276 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001277 dims.sort(key=lambda x: (x['key'], x['value']))
1278 slice_properties = properties._replace(dimensions=dims)
1279 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1280
maruel77f720b2015-09-15 12:35:22 -07001281 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001282 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001283 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001284 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001285 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001286 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001287 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001288 user=options.user,
Junji Watanabe71bbaef2020-07-21 08:55:37 +00001289 pool_task_template=options.pool_task_template,
1290 realm=options.realm)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001291
1292
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001293class TaskOutputStdoutOption(optparse.Option):
1294 """Where to output the each task's console output (stderr/stdout).
1295
1296 The output will be;
1297 none - not be downloaded.
1298 json - stored in summary.json file *only*.
1299 console - shown on stdout *only*.
1300 all - stored in summary.json and shown on stdout.
1301 """
1302
1303 choices = ['all', 'json', 'console', 'none']
1304
1305 def __init__(self, *args, **kw):
1306 optparse.Option.__init__(
1307 self,
1308 *args,
1309 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001310 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001311 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001312 **kw)
1313
1314 def convert_value(self, opt, value):
1315 if value not in self.choices:
Junji Watanabecb054042020-07-21 08:43:26 +00001316 raise optparse.OptionValueError(
1317 "%s must be one of %s not %r" %
1318 (self.get_opt_string(), self.choices, value))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001319 stdout_to = []
1320 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001321 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001322 elif value != 'none':
1323 stdout_to = [value]
1324 return stdout_to
1325
1326
maruel@chromium.org0437a732013-08-27 16:05:52 +00001327def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001328 parser.server_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001329 '-t',
1330 '--timeout',
1331 type='float',
1332 default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001333 help='Timeout to wait for result, set to -1 for no timeout and get '
Junji Watanabecb054042020-07-21 08:43:26 +00001334 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001335 parser.group_logging.add_option(
1336 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001337 parser.group_logging.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001338 '--print-status-updates',
1339 action='store_true',
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001340 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001341 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001342 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001343 '--task-summary-json',
1344 metavar='FILE',
1345 help='Dump a summary of task results to this file as json. It contains '
Junji Watanabecb054042020-07-21 08:43:26 +00001346 'only shards statuses as know to server directly. Any output files '
1347 'emitted by the task can be collected by using --task-output-dir')
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001348 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001349 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001350 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001351 help='Directory to put task results into. When the task finishes, this '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001352 'directory contains per-shard directory with output files produced '
1353 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
maruel9531ce02016-04-13 06:11:23 -07001354 parser.task_output_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001355 TaskOutputStdoutOption('--task-output-stdout'))
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001356 parser.task_output_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001357 '--filepath-filter',
1358 help='This is regexp filter used to specify downloaded filepath when '
1359 'collecting isolated output.')
1360 parser.task_output_group.add_option(
1361 '--perf',
1362 action='store_true',
1363 default=False,
maruel9531ce02016-04-13 06:11:23 -07001364 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001365 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001366
1367
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001368def process_collect_options(parser, options):
1369 # Only negative -1 is allowed, disallow other negative values.
1370 if options.timeout != -1 and options.timeout < 0:
1371 parser.error('Invalid --timeout value')
1372
1373
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001374@subcommand.usage('bots...')
1375def CMDbot_delete(parser, args):
1376 """Forcibly deletes bots from the Swarming server."""
1377 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001378 '-f',
1379 '--force',
1380 action='store_true',
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001381 help='Do not prompt for confirmation')
1382 options, args = parser.parse_args(args)
1383 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001384 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001385
1386 bots = sorted(args)
1387 if not options.force:
1388 print('Delete the following bots?')
1389 for bot in bots:
1390 print(' %s' % bot)
1391 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1392 print('Goodbye.')
1393 return 1
1394
1395 result = 0
1396 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001397 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001398 if net.url_read_json(url, data={}, method='POST') is None:
1399 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001400 result = 1
1401 return result
1402
1403
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001404def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001405 """Returns information about the bots connected to the Swarming server."""
1406 add_filter_options(parser)
1407 parser.filter_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001408 '--dead-only',
1409 action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001410 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001411 parser.filter_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001412 '-k',
1413 '--keep-dead',
1414 action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001415 help='Keep both dead and alive bots')
1416 parser.filter_group.add_option(
1417 '--busy', action='store_true', help='Keep only busy bots')
1418 parser.filter_group.add_option(
1419 '--idle', action='store_true', help='Keep only idle bots')
1420 parser.filter_group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001421 '--mp',
1422 action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001423 help='Keep only Machine Provider managed bots')
1424 parser.filter_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001425 '--non-mp',
1426 action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001427 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001428 parser.filter_group.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001429 '-b', '--bare', action='store_true', help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001430 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001431 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001432
1433 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001434 parser.error('Use only one of --keep-dead or --dead-only')
1435 if options.busy and options.idle:
1436 parser.error('Use only one of --busy or --idle')
1437 if options.mp and options.non_mp:
1438 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001439
smut281c3902018-05-30 17:50:05 -07001440 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001441 values = []
1442 if options.dead_only:
1443 values.append(('is_dead', 'TRUE'))
1444 elif options.keep_dead:
1445 values.append(('is_dead', 'NONE'))
1446 else:
1447 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001448
maruelaf6b06c2017-06-08 06:26:53 -07001449 if options.busy:
1450 values.append(('is_busy', 'TRUE'))
1451 elif options.idle:
1452 values.append(('is_busy', 'FALSE'))
1453 else:
1454 values.append(('is_busy', 'NONE'))
1455
1456 if options.mp:
1457 values.append(('is_mp', 'TRUE'))
1458 elif options.non_mp:
1459 values.append(('is_mp', 'FALSE'))
1460 else:
1461 values.append(('is_mp', 'NONE'))
1462
1463 for key, value in options.dimensions:
1464 values.append(('dimensions', '%s:%s' % (key, value)))
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +00001465 url += urllib.parse.urlencode(values)
maruelaf6b06c2017-06-08 06:26:53 -07001466 try:
1467 data, yielder = get_yielder(url, 0)
1468 bots = data.get('items') or []
1469 for items in yielder():
1470 if items:
1471 bots.extend(items)
1472 except Failure as e:
1473 sys.stderr.write('\n%s\n' % e)
1474 return 1
maruel77f720b2015-09-15 12:35:22 -07001475 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
Lei Leife202df2019-06-11 17:33:34 +00001476 print(bot['bot_id'])
maruelaf6b06c2017-06-08 06:26:53 -07001477 if not options.bare:
1478 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
Lei Leife202df2019-06-11 17:33:34 +00001479 print(' %s' % json.dumps(dimensions, sort_keys=True))
maruelaf6b06c2017-06-08 06:26:53 -07001480 if bot.get('task_id'):
Lei Leife202df2019-06-11 17:33:34 +00001481 print(' task: %s' % bot['task_id'])
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001482 return 0
1483
1484
maruelfd0a90c2016-06-10 11:51:10 -07001485@subcommand.usage('task_id')
1486def CMDcancel(parser, args):
1487 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001488 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001489 '-k',
1490 '--kill-running',
1491 action='store_true',
1492 default=False,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001493 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001494 options, args = parser.parse_args(args)
1495 if not args:
1496 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001497 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001498 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001499 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001500 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001501 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001502 print('Deleting %s failed. Probably already gone' % task_id)
1503 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001504 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001505 return 0
1506
1507
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001508@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001509def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001510 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001511
1512 The result can be in multiple part if the execution was sharded. It can
1513 potentially have retries.
1514 """
1515 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001516 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001517 '-j',
1518 '--json',
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001519 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001520 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001521 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001522 if not args and not options.json:
1523 parser.error('Must specify at least one task id or --json.')
1524 if args and options.json:
1525 parser.error('Only use one of task id or --json.')
1526
1527 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001528 options.json = six.text_type(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001529 try:
maruel1ceb3872015-10-14 06:10:44 -07001530 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001531 data = json.load(f)
1532 except (IOError, ValueError):
1533 parser.error('Failed to open %s' % options.json)
1534 try:
Junji Watanabe38b28b02020-04-23 10:23:30 +00001535 tasks = sorted(data['tasks'].values(), key=lambda x: x['shard_index'])
maruel71c61c82016-02-22 06:52:05 -08001536 args = [t['task_id'] for t in tasks]
1537 except (KeyError, TypeError):
1538 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001539 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001540 # Take in account all the task slices.
1541 offset = 0
1542 for s in data['request']['task_slices']:
Junji Watanabecb054042020-07-21 08:43:26 +00001543 m = (
1544 offset + s['properties']['execution_timeout_secs'] +
1545 s['expiration_secs'])
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001546 if m > options.timeout:
1547 options.timeout = m
1548 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001549 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001550 else:
1551 valid = frozenset('0123456789abcdef')
1552 if any(not valid.issuperset(task_id) for task_id in args):
1553 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001554
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001555 try:
Junji Watanabe38b28b02020-04-23 10:23:30 +00001556 return collect(options.swarming, args, options.timeout, options.decorate,
1557 options.print_status_updates, options.task_summary_json,
1558 options.task_output_dir, options.task_output_stdout,
1559 options.perf, options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001560 except Failure:
1561 on_error.report(None)
1562 return 1
1563
1564
maruel77f720b2015-09-15 12:35:22 -07001565@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001566def CMDpost(parser, args):
1567 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1568
1569 Input data must be sent to stdin, result is printed to stdout.
1570
1571 If HTTP response code >= 400, returns non-zero.
1572 """
1573 options, args = parser.parse_args(args)
1574 if len(args) != 1:
1575 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001576 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001577 data = sys.stdin.read()
1578 try:
1579 resp = net.url_read(url, data=data, method='POST')
1580 except net.TimeoutError:
1581 sys.stderr.write('Timeout!\n')
1582 return 1
1583 if not resp:
1584 sys.stderr.write('No response!\n')
1585 return 1
1586 sys.stdout.write(resp)
1587 return 0
1588
1589
1590@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001591def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001592 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1593 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001594
1595 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001596 Raw task request and results:
1597 swarming.py query -S server-url.com task/123456/request
1598 swarming.py query -S server-url.com task/123456/result
1599
maruel77f720b2015-09-15 12:35:22 -07001600 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001601 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001602
maruelaf6b06c2017-06-08 06:26:53 -07001603 Listing last 10 tasks on a specific bot named 'bot1':
1604 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001605
maruelaf6b06c2017-06-08 06:26:53 -07001606 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001607 quoting is important!:
1608 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001609 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001610 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001611 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001612 '-L',
1613 '--limit',
1614 type='int',
1615 default=200,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001616 help='Limit to enforce on limitless items (like number of tasks); '
Junji Watanabecb054042020-07-21 08:43:26 +00001617 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001618 parser.add_option(
1619 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001620 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001621 '--progress',
1622 action='store_true',
maruel77f720b2015-09-15 12:35:22 -07001623 help='Prints a dot at each request to show progress')
1624 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001625 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001626 parser.error(
1627 'Must specify only method name and optionally query args properly '
1628 'escaped.')
smut281c3902018-05-30 17:50:05 -07001629 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001630 try:
1631 data, yielder = get_yielder(base_url, options.limit)
1632 for items in yielder():
1633 if items:
1634 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001635 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001636 sys.stderr.write('.')
1637 sys.stderr.flush()
1638 except Failure as e:
1639 sys.stderr.write('\n%s\n' % e)
1640 return 1
maruel77f720b2015-09-15 12:35:22 -07001641 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001642 sys.stderr.write('\n')
1643 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001644 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001645 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001646 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001647 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001648 try:
maruel77f720b2015-09-15 12:35:22 -07001649 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001650 sys.stdout.write('\n')
1651 except IOError:
1652 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001653 return 0
1654
1655
maruel77f720b2015-09-15 12:35:22 -07001656def CMDquery_list(parser, args):
1657 """Returns list of all the Swarming APIs that can be used with command
1658 'query'.
1659 """
1660 parser.add_option(
1661 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1662 options, args = parser.parse_args(args)
1663 if args:
1664 parser.error('No argument allowed.')
1665
1666 try:
1667 apis = endpoints_api_discovery_apis(options.swarming)
1668 except APIError as e:
1669 parser.error(str(e))
1670 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001671 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001672 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001673 json.dump(apis, f)
1674 else:
1675 help_url = (
Junji Watanabecb054042020-07-21 08:43:26 +00001676 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1677 options.swarming)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001678 for i, (api_id, api) in enumerate(sorted(apis.items())):
maruel11e31af2017-02-15 07:30:50 -08001679 if i:
1680 print('')
Lei Leife202df2019-06-11 17:33:34 +00001681 print(api_id)
1682 print(' ' + api['description'].strip())
maruel11e31af2017-02-15 07:30:50 -08001683 if 'resources' in api:
1684 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001685 # TODO(maruel): Remove.
1686 # pylint: disable=too-many-nested-blocks
Junji Watanabecb054042020-07-21 08:43:26 +00001687 for j, (resource_name,
1688 resource) in enumerate(sorted(api['resources'].items())):
maruel11e31af2017-02-15 07:30:50 -08001689 if j:
1690 print('')
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001691 for method_name, method in sorted(resource['methods'].items()):
maruel11e31af2017-02-15 07:30:50 -08001692 # Only list the GET ones.
1693 if method['httpMethod'] != 'GET':
1694 continue
Junji Watanabecb054042020-07-21 08:43:26 +00001695 print('- %s.%s: %s' % (resource_name, method_name, method['path']))
1696 print('\n'.join(' ' + l for l in textwrap.wrap(
1697 method.get('description', 'No description'), 78)))
Lei Leife202df2019-06-11 17:33:34 +00001698 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel11e31af2017-02-15 07:30:50 -08001699 else:
1700 # New.
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001701 for method_name, method in sorted(api['methods'].items()):
maruel77f720b2015-09-15 12:35:22 -07001702 # Only list the GET ones.
1703 if method['httpMethod'] != 'GET':
1704 continue
Lei Leife202df2019-06-11 17:33:34 +00001705 print('- %s: %s' % (method['id'], method['path']))
maruel11e31af2017-02-15 07:30:50 -08001706 print('\n'.join(
1707 ' ' + l for l in textwrap.wrap(method['description'], 78)))
Lei Leife202df2019-06-11 17:33:34 +00001708 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel77f720b2015-09-15 12:35:22 -07001709 return 0
1710
1711
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001712@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001713def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001714 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001715
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001716 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001717 """
1718 add_trigger_options(parser)
1719 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001720 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001721 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001722 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001723 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001724 try:
Junji Watanabecb054042020-07-21 08:43:26 +00001725 tasks = trigger_task_shards(options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001726 except Failure as e:
Junji Watanabecb054042020-07-21 08:43:26 +00001727 on_error.report('Failed to trigger %s(%s): %s' %
1728 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001729 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001730 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001731 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001732 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001733 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001734 task_ids = [
Junji Watanabe38b28b02020-04-23 10:23:30 +00001735 t['task_id']
1736 for t in sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001737 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001738 for task_id in task_ids:
1739 print('Task: {server}/task?id={task}'.format(
1740 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001741 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001742 offset = 0
1743 for s in task_request.task_slices:
Junji Watanabecb054042020-07-21 08:43:26 +00001744 m = (offset + s.properties.execution_timeout_secs + s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001745 if m > options.timeout:
1746 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001747 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001748 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001749 try:
Junji Watanabecb054042020-07-21 08:43:26 +00001750 return collect(options.swarming, task_ids, options.timeout,
1751 options.decorate, options.print_status_updates,
1752 options.task_summary_json, options.task_output_dir,
1753 options.task_output_stdout, options.perf,
1754 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001755 except Failure:
1756 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001757 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001758
1759
maruel18122c62015-10-23 06:31:23 -07001760@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001761def CMDreproduce(parser, args):
1762 """Runs a task locally that was triggered on the server.
1763
1764 This running locally the same commands that have been run on the bot. The data
1765 downloaded will be in a subdirectory named 'work' of the current working
1766 directory.
maruel18122c62015-10-23 06:31:23 -07001767
1768 You can pass further additional arguments to the target command by passing
1769 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001770 """
maruelc070e672016-02-22 17:32:57 -08001771 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001772 '--output',
1773 metavar='DIR',
1774 default='out',
maruelc070e672016-02-22 17:32:57 -08001775 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001776 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001777 '--work',
1778 metavar='DIR',
1779 default='work',
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001780 help='Directory to map the task input files into')
1781 parser.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001782 '--cache',
1783 metavar='DIR',
1784 default='cache',
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001785 help='Directory that contains the input cache')
1786 parser.add_option(
Junji Watanabecb054042020-07-21 08:43:26 +00001787 '--leak',
1788 action='store_true',
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001789 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001790 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001791 extra_args = []
1792 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001793 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001794 if len(args) > 1:
1795 if args[1] == '--':
1796 if len(args) > 2:
1797 extra_args = args[2:]
1798 else:
1799 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001800
smut281c3902018-05-30 17:50:05 -07001801 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001802 request = net.url_read_json(url)
1803 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001804 print('Failed to retrieve request data for the task', file=sys.stderr)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001805 return 1
1806
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001807 workdir = six.text_type(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001808 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001809 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001810 fs.mkdir(workdir)
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001811 cachedir = six.text_type(os.path.abspath('cipd_cache'))
iannucci31ab9192017-05-02 19:11:56 -07001812 if not fs.exists(cachedir):
1813 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001814
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001815 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001816 env = os.environ.copy()
1817 env['SWARMING_BOT_ID'] = 'reproduce'
1818 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001819 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001820 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001821 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001822 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001823 if not i['value']:
1824 env.pop(key, None)
1825 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001826 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001827
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001828 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001829 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001830 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001831 for i in env_prefixes:
1832 key = i['key']
1833 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001834 cur = env.get(key)
1835 if cur:
1836 paths.append(cur)
1837 env[key] = os.path.pathsep.join(paths)
1838
iannucci31ab9192017-05-02 19:11:56 -07001839 command = []
nodir152cba62016-05-12 16:08:56 -07001840 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001841 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001842 server_ref = isolate_storage.ServerRef(
Junji Watanabecb054042020-07-21 08:43:26 +00001843 properties['inputs_ref']['isolatedserver'],
1844 properties['inputs_ref']['namespace'])
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001845 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001846 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1847 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1848 # leak.
1849 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001850 cache = local_caching.DiskContentAddressedCache(
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001851 six.text_type(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001852 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001853 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001854 command = bundle.command
1855 if bundle.relative_cwd:
1856 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001857 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001858
1859 if properties.get('command'):
1860 command.extend(properties['command'])
1861
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001862 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Brian Sheedy7a761172019-08-30 22:55:14 +00001863 command = tools.find_executable(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001864 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001865 new_command = run_isolated.process_command(command, 'invalid', None)
1866 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001867 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001868 else:
1869 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001870 options.output = os.path.abspath(options.output)
Junji Watanabecb054042020-07-21 08:43:26 +00001871 new_command = run_isolated.process_command(command, options.output, None)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001872 if not os.path.isdir(options.output):
1873 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001874 command = new_command
1875 file_path.ensure_command_has_abs_path(command, workdir)
1876
1877 if properties.get('cipd_input'):
1878 ci = properties['cipd_input']
1879 cp = ci['client_package']
Junji Watanabecb054042020-07-21 08:43:26 +00001880 client_manager = cipd.get_client(ci['server'], cp['package_name'],
1881 cp['version'], cachedir)
iannucci31ab9192017-05-02 19:11:56 -07001882
1883 with client_manager as client:
1884 by_path = collections.defaultdict(list)
1885 for pkg in ci['packages']:
1886 path = pkg['path']
1887 # cipd deals with 'root' as ''
1888 if path == '.':
1889 path = ''
1890 by_path[path].append((pkg['package_name'], pkg['version']))
1891 client.ensure(workdir, by_path, cache_dir=cachedir)
1892
maruel77f720b2015-09-15 12:35:22 -07001893 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001894 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001895 except OSError as e:
Lei Leife202df2019-06-11 17:33:34 +00001896 print('Failed to run: %s' % ' '.join(command), file=sys.stderr)
1897 print(str(e), file=sys.stderr)
maruel77f720b2015-09-15 12:35:22 -07001898 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001899 finally:
1900 # Do not delete options.cache.
1901 if not options.leak:
1902 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001903
1904
maruel0eb1d1b2015-10-02 14:48:21 -07001905@subcommand.usage('bot_id')
1906def CMDterminate(parser, args):
1907 """Tells a bot to gracefully shut itself down as soon as it can.
1908
1909 This is done by completing whatever current task there is then exiting the bot
1910 process.
1911 """
1912 parser.add_option(
1913 '--wait', action='store_true', help='Wait for the bot to terminate')
1914 options, args = parser.parse_args(args)
1915 if len(args) != 1:
1916 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001917 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001918 request = net.url_read_json(url, data={})
1919 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001920 print('Failed to ask for termination', file=sys.stderr)
maruel0eb1d1b2015-10-02 14:48:21 -07001921 return 1
1922 if options.wait:
Junji Watanabecb054042020-07-21 08:43:26 +00001923 return collect(options.swarming, [request['task_id']], 0., False, False,
1924 None, None, [], False, None)
maruelbfc5f872017-06-10 16:43:17 -07001925 else:
Lei Leife202df2019-06-11 17:33:34 +00001926 print(request['task_id'])
maruel0eb1d1b2015-10-02 14:48:21 -07001927 return 0
1928
1929
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001930@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001931def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001932 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001933
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001934 Passes all extra arguments provided after '--' as additional command line
1935 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001936 """
1937 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001938 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001939 parser.add_option(
1940 '--dump-json',
1941 metavar='FILE',
1942 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001943 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001944 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001945 try:
Junji Watanabecb054042020-07-21 08:43:26 +00001946 tasks = trigger_task_shards(options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001947 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001948 print('Triggered task: %s' % task_request.name)
Junji Watanabecb054042020-07-21 08:43:26 +00001949 tasks_sorted = sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001950 if options.dump_json:
1951 data = {
Junji Watanabe38b28b02020-04-23 10:23:30 +00001952 'base_task_name': task_request.name,
1953 'tasks': tasks,
1954 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001955 }
maruel46b015f2015-10-13 18:40:35 -07001956 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001957 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001958 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Junji Watanabecb054042020-07-21 08:43:26 +00001959 (options.swarming, options.dump_json))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001960 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001961 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001962 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Junji Watanabecb054042020-07-21 08:43:26 +00001963 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001964 print('Or visit:')
1965 for t in tasks_sorted:
1966 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001967 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001968 except Failure:
1969 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001970 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001971
1972
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001973class OptionParserSwarming(logging_utils.OptionParserWithLogging):
Junji Watanabe38b28b02020-04-23 10:23:30 +00001974
maruel@chromium.org0437a732013-08-27 16:05:52 +00001975 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001976 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001977 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001978 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001979 self.server_group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001980 '-S',
1981 '--swarming',
1982 metavar='URL',
1983 default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001984 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001985 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001986 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001987
1988 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001989 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001990 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001991 auth.process_auth_options(self, options)
1992 user = self._process_swarming(options)
1993 if hasattr(options, 'user') and not options.user:
1994 options.user = user
1995 return options, args
1996
1997 def _process_swarming(self, options):
1998 """Processes the --swarming option and aborts if not specified.
1999
2000 Returns the identity as determined by the server.
2001 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00002002 if not options.swarming:
2003 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05002004 try:
2005 options.swarming = net.fix_url(options.swarming)
2006 except ValueError as e:
2007 self.error('--swarming %s' % e)
Takuto Ikutaae767b32020-05-11 01:22:19 +00002008
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05002009 try:
2010 user = auth.ensure_logged_in(options.swarming)
2011 except ValueError as e:
2012 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05002013 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00002014
2015
2016def main(args):
2017 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04002018 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00002019
2020
2021if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07002022 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00002023 fix_encoding.fix_encoding()
2024 tools.disable_buffering()
2025 colorama.init()
Takuto Ikuta7c843c82020-04-15 05:42:54 +00002026 net.set_user_agent('swarming.py/' + __version__)
maruel@chromium.org0437a732013-08-27 16:05:52 +00002027 sys.exit(main(sys.argv[1:]))