blob: 914e8e9e96b5d67d63de60230c703a4892391aec [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Lei Leife202df2019-06-11 17:33:34 +00008from __future__ import print_function
9
10__version__ = '1.0'
maruel@chromium.org0437a732013-08-27 16:05:52 +000011
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050012import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040013import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000014import json
15import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040016import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100018import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000019import sys
maruel11e31af2017-02-15 07:30:50 -080020import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070021import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000022import time
Takuto Ikuta35250172020-01-31 09:33:46 +000023import uuid
maruel@chromium.org0437a732013-08-27 16:05:52 +000024
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000025from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000027
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000028# third_party/
29import colorama
30from chromium import natsort
31from depot_tools import fix_encoding
32from depot_tools import subcommand
Takuto Ikuta6e2ff962019-10-29 12:35:27 +000033import six
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +000034from six.moves import urllib
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000035
36# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080037import auth
iannucci31ab9192017-05-02 19:11:56 -070038import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000039import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000040import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040041import local_caching
maruelc070e672016-02-22 17:32:57 -080042import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000043from utils import file_path
44from utils import fs
45from utils import logging_utils
46from utils import net
47from utils import on_error
48from utils import subprocess42
49from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050050
51
52class Failure(Exception):
53 """Generic failure."""
54 pass
55
56
maruel0a25f6c2017-05-10 10:43:23 -070057def default_task_name(options):
58 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050059 if not options.task_name:
Junji Watanabe38b28b02020-04-23 10:23:30 +000060 task_name = u'%s/%s' % (options.user, '_'.join(
61 '%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070062 if options.isolated:
63 task_name += u'/' + options.isolated
64 return task_name
65 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050066
67
68### Triggering.
69
70
maruel77f720b2015-09-15 12:35:22 -070071# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +000072CipdPackage = collections.namedtuple('CipdPackage', [
73 'package_name',
74 'path',
75 'version',
76])
borenet02f772b2016-06-22 12:42:19 -070077
78
79# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +000080CipdInput = collections.namedtuple('CipdInput', [
81 'client_package',
82 'packages',
83 'server',
84])
borenet02f772b2016-06-22 12:42:19 -070085
86# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +000087FilesRef = collections.namedtuple('FilesRef', [
88 'isolated',
89 'isolatedserver',
90 'namespace',
91])
maruel77f720b2015-09-15 12:35:22 -070092
93# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -080094StringListPair = collections.namedtuple(
Junji Watanabe38b28b02020-04-23 10:23:30 +000095 'StringListPair',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +000096 [
Junji Watanabe38b28b02020-04-23 10:23:30 +000097 'key',
98 'value', # repeated string
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +000099 ])
100
Junji Watanabe38b28b02020-04-23 10:23:30 +0000101# See ../appengine/swarming/swarming_rpcs.py.
102Containment = collections.namedtuple('Containment', [
103 'lower_priority',
104 'containment_type',
105])
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800106
107# See ../appengine/swarming/swarming_rpcs.py.
Junji Watanabe38b28b02020-04-23 10:23:30 +0000108TaskProperties = collections.namedtuple('TaskProperties', [
109 'caches',
110 'cipd_input',
111 'command',
112 'containment',
113 'relative_cwd',
114 'dimensions',
115 'env',
116 'env_prefixes',
117 'execution_timeout_secs',
118 'extra_args',
119 'grace_period_secs',
120 'idempotent',
121 'inputs_ref',
122 'io_timeout_secs',
123 'outputs',
124 'secret_bytes',
125])
maruel77f720b2015-09-15 12:35:22 -0700126
127
128# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400129TaskSlice = collections.namedtuple(
130 'TaskSlice',
131 [
132 'expiration_secs',
133 'properties',
134 'wait_for_capacity',
135 ])
136
137
138# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700139NewTaskRequest = collections.namedtuple(
140 'NewTaskRequest',
141 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500142 'name',
maruel77f720b2015-09-15 12:35:22 -0700143 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500144 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400145 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700146 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500147 'tags',
148 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000149 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500150 ])
151
152
maruel77f720b2015-09-15 12:35:22 -0700153def namedtuple_to_dict(value):
154 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400155 if hasattr(value, '_asdict'):
156 return namedtuple_to_dict(value._asdict())
157 if isinstance(value, (list, tuple)):
158 return [namedtuple_to_dict(v) for v in value]
159 if isinstance(value, dict):
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000160 return {k: namedtuple_to_dict(v) for k, v in value.items()}
Lei Lei73a5f732020-03-23 20:36:14 +0000161 # json.dumps in Python3 doesn't support bytes.
162 if isinstance(value, bytes):
163 return six.ensure_str(value)
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400164 return value
maruel77f720b2015-09-15 12:35:22 -0700165
166
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700167def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800168 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700169
170 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500171 """
maruel77f720b2015-09-15 12:35:22 -0700172 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700173 # Don't send 'service_account' if it is None to avoid confusing older
174 # version of the server that doesn't know about 'service_account' and don't
175 # use it at all.
176 if not out['service_account']:
177 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000178 for task_slice in out['task_slices']:
179 task_slice['properties']['env'] = [
180 {'key': k, 'value': v}
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000181 for k, v in task_slice['properties']['env'].items()
Brad Hallf78187a2018-10-19 17:08:55 +0000182 ]
183 task_slice['properties']['env'].sort(key=lambda x: x['key'])
Takuto Ikuta35250172020-01-31 09:33:46 +0000184 out['request_uuid'] = str(uuid.uuid4())
maruel77f720b2015-09-15 12:35:22 -0700185 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500186
187
maruel77f720b2015-09-15 12:35:22 -0700188def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500189 """Triggers a request on the Swarming server and returns the json data.
190
191 It's the low-level function.
192
193 Returns:
194 {
195 'request': {
196 'created_ts': u'2010-01-02 03:04:05',
197 'name': ..
198 },
199 'task_id': '12300',
200 }
201 """
202 logging.info('Triggering: %s', raw_request['name'])
203
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500204 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700205 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500206 if not result:
207 on_error.report('Failed to trigger task %s' % raw_request['name'])
208 return None
maruele557bce2015-11-17 09:01:27 -0800209 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800210 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800211 msg = 'Failed to trigger task %s' % raw_request['name']
212 if result['error'].get('errors'):
213 for err in result['error']['errors']:
214 if err.get('message'):
215 msg += '\nMessage: %s' % err['message']
216 if err.get('debugInfo'):
217 msg += '\nDebug info:\n%s' % err['debugInfo']
218 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800219 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800220
221 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800222 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500223 return result
224
225
226def setup_googletest(env, shards, index):
227 """Sets googletest specific environment variables."""
228 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700229 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
230 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
231 env = env[:]
232 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
233 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500234 return env
235
236
237def trigger_task_shards(swarming, task_request, shards):
238 """Triggers one or many subtasks of a sharded task.
239
240 Returns:
241 Dict with task details, returned to caller as part of --dump-json output.
242 None in case of failure.
243 """
244 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000245 """
246 Args:
247 index: The index of the task request.
248
249 Returns:
250 raw_request: A swarming compatible JSON dictionary of the request.
251 shard_index: The index of the shard, which may be different than the index
252 of the task request.
253 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700254 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000255 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500256 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000257 for task_slice in req['task_slices']:
258 task_slice['properties']['env'] = setup_googletest(
259 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700260 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000261 else:
262 task_slices = req['task_slices']
263
Lei Lei73a5f732020-03-23 20:36:14 +0000264 total_shards = 1
Erik Chend50a88f2019-02-16 01:22:07 +0000265 # Multiple tasks slices might exist if there are optional "slices", e.g.
266 # multiple ways of dispatching the task that should be equivalent. These
267 # should be functionally equivalent but we have cannot guarantee that. If
268 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
269 # slices.
270 for task_slice in task_slices:
271 for env_var in task_slice['properties']['env']:
272 if env_var['key'] == 'GTEST_SHARD_INDEX':
273 shard_index = int(env_var['value'])
274 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
275 total_shards = int(env_var['value'])
276 if total_shards > 1:
277 req['name'] += ':%s:%s' % (shard_index, total_shards)
278
279 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500280
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000281 requests = [convert(index) for index in range(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500282 tasks = {}
283 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000284 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700285 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500286 if not task:
287 break
288 logging.info('Request result: %s', task)
289 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400290 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500291 priority_warning = True
Lei Leife202df2019-06-11 17:33:34 +0000292 print('Priority was reset to %s' % task['request']['priority'],
293 file=sys.stderr)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500294 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000295 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500296 'task_id': task['task_id'],
297 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
298 }
299
300 # Some shards weren't triggered. Abort everything.
301 if len(tasks) != len(requests):
302 if tasks:
Lei Leife202df2019-06-11 17:33:34 +0000303 print('Only %d shard(s) out of %d were triggered' % (
304 len(tasks), len(requests)), file=sys.stderr)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000305 for task_dict in tasks.values():
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500306 abort_task(swarming, task_dict['task_id'])
307 return None
308
309 return tasks
310
311
312### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000313
314
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700315# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000316STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700317
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400318
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000319class TaskState(object):
320 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000321
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000322 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
323 is the source of truth for these values:
324 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400325
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000326 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400327 """
328 RUNNING = 0x10
329 PENDING = 0x20
330 EXPIRED = 0x30
331 TIMED_OUT = 0x40
332 BOT_DIED = 0x50
333 CANCELED = 0x60
334 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400335 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400336 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400337
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000338 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400339
maruel77f720b2015-09-15 12:35:22 -0700340 _ENUMS = {
341 'RUNNING': RUNNING,
342 'PENDING': PENDING,
343 'EXPIRED': EXPIRED,
344 'TIMED_OUT': TIMED_OUT,
345 'BOT_DIED': BOT_DIED,
346 'CANCELED': CANCELED,
347 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400348 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400349 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700350 }
351
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400352 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700353 def from_enum(cls, state):
354 """Returns int value based on the string."""
355 if state not in cls._ENUMS:
356 raise ValueError('Invalid state %s' % state)
357 return cls._ENUMS[state]
358
maruel@chromium.org0437a732013-08-27 16:05:52 +0000359
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700360class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700361 """Assembles task execution summary (for --task-summary-json output).
362
363 Optionally fetches task outputs from isolate server to local disk (used when
364 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700365
366 This object is shared among multiple threads running 'retrieve_results'
367 function, in particular they call 'process_shard_result' method in parallel.
368 """
369
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000370 def __init__(self, task_output_dir, task_output_stdout, shard_count,
371 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700372 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
373
374 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700375 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700376 shard_count: expected number of task shards.
377 """
maruel12e30012015-10-09 11:55:35 -0700378 self.task_output_dir = (
Takuto Ikuta6e2ff962019-10-29 12:35:27 +0000379 six.text_type(os.path.abspath(task_output_dir))
maruel12e30012015-10-09 11:55:35 -0700380 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000381 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700382 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000383 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700384
385 self._lock = threading.Lock()
386 self._per_shard_results = {}
387 self._storage = None
388
nodire5028a92016-04-29 14:38:21 -0700389 if self.task_output_dir:
390 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700391
Vadim Shtayurab450c602014-05-12 19:23:25 -0700392 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700393 """Stores results of a single task shard, fetches output files if necessary.
394
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400395 Modifies |result| in place.
396
maruel77f720b2015-09-15 12:35:22 -0700397 shard_index is 0-based.
398
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700399 Called concurrently from multiple threads.
400 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700401 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700402 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700403 if shard_index < 0 or shard_index >= self.shard_count:
Junji Watanabe38b28b02020-04-23 10:23:30 +0000404 logging.warning('Shard index %d is outside of expected range: [0; %d]',
405 shard_index, self.shard_count - 1)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700406 return
407
maruel77f720b2015-09-15 12:35:22 -0700408 if result.get('outputs_ref'):
409 ref = result['outputs_ref']
410 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
411 ref['isolatedserver'],
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000412 urllib.parse.urlencode([('namespace', ref['namespace']),
413 ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400414
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700415 # Store result dict of that shard, ignore results we've already seen.
416 with self._lock:
417 if shard_index in self._per_shard_results:
418 logging.warning('Ignoring duplicate shard index %d', shard_index)
419 return
420 self._per_shard_results[shard_index] = result
421
422 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700423 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000424 server_ref = isolate_storage.ServerRef(
425 result['outputs_ref']['isolatedserver'],
426 result['outputs_ref']['namespace'])
427 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400428 if storage:
429 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400430 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
431 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400432 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700433 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400434 storage,
Lei Leife202df2019-06-11 17:33:34 +0000435 local_caching.MemoryContentAddressedCache(file_mode_mask=0o700),
maruel4409e302016-07-19 14:25:51 -0700436 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000437 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700438
439 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700440 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700441 with self._lock:
442 # Write an array of shard results with None for missing shards.
443 summary = {
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000444 'shards': [
445 self._per_shard_results.get(i) for i in range(self.shard_count)
446 ],
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700447 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000448
449 # Don't store stdout in the summary if not requested too.
450 if "json" not in self.task_output_stdout:
451 for shard_json in summary['shards']:
452 if not shard_json:
453 continue
454 if "output" in shard_json:
455 del shard_json["output"]
456 if "outputs" in shard_json:
457 del shard_json["outputs"]
458
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700459 # Write summary.json to task_output_dir as well.
460 if self.task_output_dir:
461 tools.write_json(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000462 os.path.join(self.task_output_dir, u'summary.json'), summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700463 if self._storage:
464 self._storage.close()
465 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700466 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700467
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000468 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700469 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700470 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700471 with self._lock:
472 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000473 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700474 else:
475 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000476 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700477 logging.error(
478 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000479 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700480 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000481 if self._storage.server_ref.namespace != server_ref.namespace:
Junji Watanabe38b28b02020-04-23 10:23:30 +0000482 logging.error('Task shards are using multiple namespaces: %s and %s',
483 self._storage.server_ref.namespace,
484 server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700485 return None
486 return self._storage
487
488
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500489def now():
490 """Exists so it can be mocked easily."""
491 return time.time()
492
493
maruel77f720b2015-09-15 12:35:22 -0700494def parse_time(value):
495 """Converts serialized time from the API to datetime.datetime."""
496 # When microseconds are 0, the '.123456' suffix is elided. This means the
497 # serialized format is not consistent, which confuses the hell out of python.
498 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
499 try:
500 return datetime.datetime.strptime(value, fmt)
501 except ValueError:
502 pass
503 raise ValueError('Failed to parse %s' % value)
504
505
Junji Watanabe38b28b02020-04-23 10:23:30 +0000506def retrieve_results(base_url, shard_index, task_id, timeout, should_stop,
507 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400508 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700509
Vadim Shtayurab450c602014-05-12 19:23:25 -0700510 Returns:
511 <result dict> on success.
512 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700513 """
maruel71c61c82016-02-22 06:52:05 -0800514 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700515 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700516 if include_perf:
517 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700518 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700519 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400520 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700521 attempt = 0
522
523 while not should_stop.is_set():
524 attempt += 1
525
526 # Waiting for too long -> give up.
527 current_time = now()
528 if deadline and current_time >= deadline:
529 logging.error('retrieve_results(%s) timed out on attempt %d',
530 base_url, attempt)
531 return None
532
533 # Do not spin too fast. Spin faster at the beginning though.
534 # Start with 1 sec delay and for each 30 sec of waiting add another second
535 # of delay, until hitting 15 sec ceiling.
536 if attempt > 1:
537 max_delay = min(15, 1 + (current_time - started) / 30.0)
538 delay = min(max_delay, deadline - current_time) if deadline else max_delay
539 if delay > 0:
540 logging.debug('Waiting %.1f sec before retrying', delay)
541 should_stop.wait(delay)
542 if should_stop.is_set():
543 return None
544
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400545 # Disable internal retries in net.url_read_json, since we are doing retries
546 # ourselves.
547 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700548 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
549 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400550 # Retry on 500s only if no timeout is specified.
551 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400552 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400553 if timeout == -1:
554 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400555 continue
maruel77f720b2015-09-15 12:35:22 -0700556
maruelbf53e042015-12-01 15:00:51 -0800557 if result.get('error'):
558 # An error occurred.
559 if result['error'].get('errors'):
560 for err in result['error']['errors']:
Junji Watanabe38b28b02020-04-23 10:23:30 +0000561 logging.warning('Error while reading task: %s; %s',
562 err.get('message'), err.get('debugInfo'))
maruelbf53e042015-12-01 15:00:51 -0800563 elif result['error'].get('message'):
564 logging.warning(
565 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400566 if timeout == -1:
567 return result
maruelbf53e042015-12-01 15:00:51 -0800568 continue
569
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400570 # When timeout == -1, always return on first attempt. 500s are already
571 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000572 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000573 if fetch_stdout:
574 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700575 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700576 # Record the result, try to fetch attached output files (if any).
577 if output_collector:
578 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700579 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700580 if result.get('internal_failure'):
581 logging.error('Internal error!')
582 elif result['state'] == 'BOT_DIED':
583 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700584 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000585
586
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700587def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400588 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000589 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500590 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000591
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700592 Duplicate shards are ignored. Shards are yielded in order of completion.
593 Timed out shards are NOT yielded at all. Caller can compare number of yielded
594 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000595
596 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500597 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000598 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500599
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700600 output_collector is an optional instance of TaskOutputCollector that will be
601 used to fetch files produced by a task from isolate server to the local disk.
602
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500603 Yields:
604 (index, result). In particular, 'result' is defined as the
605 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000606 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000607 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400608 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700609 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700610 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700611
maruel@chromium.org0437a732013-08-27 16:05:52 +0000612 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
613 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700614 # Adds a task to the thread pool to call 'retrieve_results' and return
615 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400616 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000617 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700618 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000619 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400620 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000621 task_id, timeout, should_stop, output_collector, include_perf,
622 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700623
624 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400625 for shard_index, task_id in enumerate(task_ids):
626 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700627
628 # Wait for all of them to finish.
Lei Lei73a5f732020-03-23 20:36:14 +0000629 # Convert to list, since range in Python3 doesn't have remove.
630 shards_remaining = list(range(len(task_ids)))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400631 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700632 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700633 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700634 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000635 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700636 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700637 except threading_utils.TaskChannel.Timeout:
638 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000639 time_now = str(datetime.datetime.now())
640 _, time_now = time_now.split(' ')
Junji Watanabe38b28b02020-04-23 10:23:30 +0000641 print('%s '
642 'Waiting for results from the following shards: %s' %
643 (time_now, ', '.join(map(str, shards_remaining))))
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700644 sys.stdout.flush()
645 continue
646 except Exception:
647 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700648
649 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700650 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000651 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500652 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000653 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700654
Vadim Shtayurab450c602014-05-12 19:23:25 -0700655 # Yield back results to the caller.
656 assert shard_index in shards_remaining
657 shards_remaining.remove(shard_index)
658 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700659
maruel@chromium.org0437a732013-08-27 16:05:52 +0000660 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700661 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000662 should_stop.set()
663
664
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000665def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000666 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700667 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400668 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700669 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
670 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400671 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
672 metadata.get('abandoned_ts')):
673 pending = '%.1fs' % (
674 parse_time(metadata['abandoned_ts']) -
675 parse_time(metadata['created_ts'])
676 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400677 else:
678 pending = 'N/A'
679
maruel77f720b2015-09-15 12:35:22 -0700680 if metadata.get('duration') is not None:
681 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400682 else:
683 duration = 'N/A'
684
maruel77f720b2015-09-15 12:35:22 -0700685 if metadata.get('exit_code') is not None:
686 # Integers are encoded as string to not loose precision.
687 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400688 else:
689 exit_code = 'N/A'
690
691 bot_id = metadata.get('bot_id') or 'N/A'
692
maruel77f720b2015-09-15 12:35:22 -0700693 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400694 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000695 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400696 if metadata.get('state') == 'CANCELED':
697 tag_footer2 = ' Pending: %s CANCELED' % pending
698 elif metadata.get('state') == 'EXPIRED':
699 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400700 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400701 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
702 pending, duration, bot_id, exit_code, metadata['state'])
703 else:
704 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
705 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400706
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000707 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
708 dash_pad = '+-%s-+' % ('-' * tag_len)
709 tag_header = '| %s |' % tag_header.ljust(tag_len)
710 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
711 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400712
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000713 if include_stdout:
714 return '\n'.join([
715 dash_pad,
716 tag_header,
717 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400718 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000719 dash_pad,
720 tag_footer1,
721 tag_footer2,
722 dash_pad,
Junji Watanabe38b28b02020-04-23 10:23:30 +0000723 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000724 return '\n'.join([
725 dash_pad,
726 tag_header,
727 tag_footer2,
728 dash_pad,
Junji Watanabe38b28b02020-04-23 10:23:30 +0000729 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000730
731
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700732def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700733 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000734 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000735 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700736 """Retrieves results of a Swarming task.
737
738 Returns:
739 process exit code that should be returned to the user.
740 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000741
742 filter_cb = None
743 if filepath_filter:
744 filter_cb = re.compile(filepath_filter).match
745
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700746 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000747 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000748 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700749
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700750 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700751 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400752 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700753 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400754 for index, metadata in yield_results(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000755 swarming,
756 task_ids,
757 timeout,
758 None,
759 print_status_updates,
760 output_collector,
761 include_perf,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000762 (len(task_output_stdout) > 0),
Junji Watanabe38b28b02020-04-23 10:23:30 +0000763 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700764 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700765
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400766 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700767 shard_exit_code = metadata.get('exit_code')
768 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700769 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700770 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700771 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400772 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700773 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700774
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700775 if decorate:
Lei Lei73a5f732020-03-23 20:36:14 +0000776 # s is bytes in Python3, print could not print
777 # s with nice format, so decode s to str.
778 s = six.ensure_str(
779 decorate_shard_output(swarming, index, metadata,
780 "console" in task_output_stdout).encode(
781 'utf-8', 'replace'))
leileied181762016-10-13 14:24:59 -0700782 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400783 if len(seen_shards) < len(task_ids):
784 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700785 else:
maruel77f720b2015-09-15 12:35:22 -0700786 print('%s: %s %s' % (
787 metadata.get('bot_id', 'N/A'),
788 metadata['task_id'],
789 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000790 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700791 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400792 if output:
793 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700794 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700795 summary = output_collector.finalize()
796 if task_summary_json:
797 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700798
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400799 if decorate and total_duration:
800 print('Total duration: %.1fs' % total_duration)
801
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400802 if len(seen_shards) != len(task_ids):
803 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Lei Leife202df2019-06-11 17:33:34 +0000804 print('Results from some shards are missing: %s' %
805 ', '.join(map(str, missing_shards)), file=sys.stderr)
Vadim Shtayurac524f512014-05-15 09:54:56 -0700806 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700807
maruela5490782015-09-30 10:56:59 -0700808 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000809
810
maruel77f720b2015-09-15 12:35:22 -0700811### API management.
812
813
814class APIError(Exception):
815 pass
816
817
818def endpoints_api_discovery_apis(host):
819 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
820 the APIs exposed by a host.
821
822 https://developers.google.com/discovery/v1/reference/apis/list
823 """
maruel380e3262016-08-31 16:10:06 -0700824 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
825 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700826 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
827 if data is None:
828 raise APIError('Failed to discover APIs on %s' % host)
829 out = {}
830 for api in data['items']:
831 if api['id'] == 'discovery:v1':
832 continue
833 # URL is of the following form:
834 # url = host + (
835 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
836 api_data = net.url_read_json(api['discoveryRestUrl'])
837 if api_data is None:
838 raise APIError('Failed to discover %s on %s' % (api['id'], host))
839 out[api['id']] = api_data
840 return out
841
842
maruelaf6b06c2017-06-08 06:26:53 -0700843def get_yielder(base_url, limit):
844 """Returns the first query and a function that yields following items."""
845 CHUNK_SIZE = 250
846
847 url = base_url
848 if limit:
849 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
850 data = net.url_read_json(url)
851 if data is None:
852 # TODO(maruel): Do basic diagnostic.
853 raise Failure('Failed to access %s' % url)
854 org_cursor = data.pop('cursor', None)
855 org_total = len(data.get('items') or [])
856 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
857 if not org_cursor or not org_total:
858 # This is not an iterable resource.
859 return data, lambda: []
860
861 def yielder():
862 cursor = org_cursor
863 total = org_total
864 # Some items support cursors. Try to get automatically if cursors are needed
865 # by looking at the 'cursor' items.
866 while cursor and (not limit or total < limit):
867 merge_char = '&' if '?' in base_url else '?'
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000868 url = base_url + '%scursor=%s' % (merge_char, urllib.parse.quote(cursor))
maruelaf6b06c2017-06-08 06:26:53 -0700869 if limit:
870 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
871 new = net.url_read_json(url)
872 if new is None:
873 raise Failure('Failed to access %s' % url)
874 cursor = new.get('cursor')
875 new_items = new.get('items')
876 nb_items = len(new_items or [])
877 total += nb_items
878 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
879 yield new_items
880
881 return data, yielder
882
883
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500884### Commands.
885
886
887def abort_task(_swarming, _manifest):
888 """Given a task manifest that was triggered, aborts its execution."""
889 # TODO(vadimsh): No supported by the server yet.
890
891
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400892def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800893 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500894 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500895 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500896 dest='dimensions', metavar='FOO bar',
897 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000898 parser.filter_group.add_option(
899 '--optional-dimension', default=[], action='append', nargs=3,
900 dest='optional_dimensions', metavar='key value expiration',
901 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500902 parser.add_option_group(parser.filter_group)
903
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400904
Brad Hallf78187a2018-10-19 17:08:55 +0000905def _validate_filter_option(parser, key, value, expiration, argname):
906 if ':' in key:
907 parser.error('%s key cannot contain ":"' % argname)
908 if key.strip() != key:
909 parser.error('%s key has whitespace' % argname)
910 if not key:
911 parser.error('%s key is empty' % argname)
912
913 if value.strip() != value:
914 parser.error('%s value has whitespace' % argname)
915 if not value:
916 parser.error('%s value is empty' % argname)
917
918 if expiration is not None:
919 try:
920 expiration = int(expiration)
921 except ValueError:
922 parser.error('%s expiration is not an integer' % argname)
923 if expiration <= 0:
924 parser.error('%s expiration should be positive' % argname)
925 if expiration % 60 != 0:
926 parser.error('%s expiration is not divisible by 60' % argname)
927
928
maruelaf6b06c2017-06-08 06:26:53 -0700929def process_filter_options(parser, options):
930 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000931 _validate_filter_option(parser, key, value, None, 'dimension')
932 for key, value, exp in options.optional_dimensions:
933 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700934 options.dimensions.sort()
935
936
Vadim Shtayurab450c602014-05-12 19:23:25 -0700937def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400938 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700939 parser.sharding_group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000940 '--shards',
941 type='int',
942 default=1,
943 metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700944 help='Number of shards to trigger and collect.')
945 parser.add_option_group(parser.sharding_group)
946
947
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400948def add_trigger_options(parser):
949 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500950 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400951 add_filter_options(parser)
952
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400953 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800954 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000955 '-s',
956 '--isolated',
957 metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500958 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800959 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000960 '-e',
961 '--env',
962 default=[],
963 action='append',
964 nargs=2,
965 metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700966 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800967 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000968 '--env-prefix',
969 default=[],
970 action='append',
971 nargs=2,
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800972 metavar='VAR local/path',
973 help='Prepend task-relative `local/path` to the task\'s VAR environment '
Junji Watanabe38b28b02020-04-23 10:23:30 +0000974 'variable using os-appropriate pathsep character. Can be specified '
975 'multiple times for the same VAR to add multiple paths.')
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800976 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000977 '--idempotent',
978 action='store_true',
979 default=False,
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400980 help='When set, the server will actively try to find a previous task '
Junji Watanabe38b28b02020-04-23 10:23:30 +0000981 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800982 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000983 '--secret-bytes-path',
984 metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000985 help='The optional path to a file containing the secret_bytes to use '
Junji Watanabe38b28b02020-04-23 10:23:30 +0000986 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800987 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000988 '--hard-timeout',
989 type='int',
990 default=60 * 60,
991 metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400992 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800993 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +0000994 '--io-timeout',
995 type='int',
996 default=20 * 60,
997 metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400998 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000999 parser.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001000 '--lower-priority',
1001 action='store_true',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001002 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001003 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
1004 parser.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001005 '--containment-type',
1006 default='NONE',
1007 metavar='NONE',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001008 choices=containment_choices,
1009 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -08001010 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001011 '--raw-cmd',
1012 action='store_true',
1013 default=False,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001014 help='When set, the command after -- is used as-is without run_isolated. '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001015 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -08001016 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001017 '--relative-cwd',
1018 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001019 'requires --raw-cmd')
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001020 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001021 '--cipd-package',
1022 action='append',
1023 default=[],
1024 metavar='PKG',
maruel5475ba62017-05-31 15:35:47 -07001025 help='CIPD packages to install on the Swarming bot. Uses the format: '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001026 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001027 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001028 '--named-cache',
1029 action='append',
1030 nargs=2,
1031 default=[],
maruel5475ba62017-05-31 15:35:47 -07001032 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001033 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1034 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001035 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001036 help='Email of a service account to run the task as, or literal "bot" '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001037 'string to indicate that the task should use the same account the '
1038 'bot itself is using to authenticate to Swarming. Don\'t use task '
1039 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001040 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001041 '--pool-task-template',
1042 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1043 default='AUTO',
1044 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001045 'By default, the pool\'s TaskTemplate is automatically selected, '
1046 'according the pool configuration on the server. Choices are: '
1047 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
Robert Iannuccifafa7352018-06-13 17:08:17 +00001048 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001049 '-o',
1050 '--output',
1051 action='append',
1052 default=[],
1053 metavar='PATH',
maruel5475ba62017-05-31 15:35:47 -07001054 help='A list of files to return in addition to those written to '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001055 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1056 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001057 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001058 '--wait-for-capacity',
1059 action='store_true',
1060 default=False,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001061 help='Instructs to leave the task PENDING even if there\'s no known bot '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001062 'that could run this task, otherwise the task will be denied with '
1063 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001064 parser.add_option_group(group)
1065
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001066 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001067 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001068 '--priority',
1069 type='int',
1070 default=200,
maruel681d6802017-01-17 16:56:03 -08001071 help='The lower value, the more important the task is')
1072 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001073 '-T',
1074 '--task-name',
1075 metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001076 help='Display name of the task. Defaults to '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001077 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1078 'isolated file is provided, if a hash is provided, it defaults to '
1079 '<user>/<dimensions>/<isolated hash>/<timestamp>')
maruel681d6802017-01-17 16:56:03 -08001080 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001081 '--tags',
1082 action='append',
1083 default=[],
1084 metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001085 help='Tags to assign to the task.')
1086 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001087 '--user',
1088 default='',
maruel681d6802017-01-17 16:56:03 -08001089 help='User associated with the task. Defaults to authenticated user on '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001090 'the server.')
maruel681d6802017-01-17 16:56:03 -08001091 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001092 '--expiration',
1093 type='int',
1094 default=6 * 60 * 60,
1095 metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001096 help='Seconds to allow the task to be pending for a bot to run before '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001097 'this task request expires.')
maruel681d6802017-01-17 16:56:03 -08001098 group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001099 '--deadline', type='int', dest='expiration', help=optparse.SUPPRESS_HELP)
maruel681d6802017-01-17 16:56:03 -08001100 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001101
1102
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001103def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001104 """Processes trigger options and does preparatory steps.
1105
1106 Returns:
1107 NewTaskRequest instance.
1108 """
maruelaf6b06c2017-06-08 06:26:53 -07001109 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001110 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001111 if args and args[0] == '--':
1112 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001113
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001114 if not options.dimensions:
1115 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001116 if not any(k == 'pool' for k, _v in options.dimensions):
1117 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001118 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1119 parser.error('--tags must be in the format key:value')
1120 if options.raw_cmd and not args:
1121 parser.error(
1122 'Arguments with --raw-cmd should be passed after -- as command '
1123 'delimiter.')
1124 if options.isolate_server and not options.namespace:
1125 parser.error(
1126 '--namespace must be a valid value when --isolate-server is used')
1127 if not options.isolated and not options.raw_cmd:
1128 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1129
1130 # Isolated
1131 # --isolated is required only if --raw-cmd wasn't provided.
1132 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1133 # preferred server.
Takuto Ikutaae767b32020-05-11 01:22:19 +00001134 isolateserver.process_isolate_server_options(parser, options,
1135 not options.raw_cmd)
maruel0a25f6c2017-05-10 10:43:23 -07001136 inputs_ref = None
1137 if options.isolate_server:
1138 inputs_ref = FilesRef(
1139 isolated=options.isolated,
1140 isolatedserver=options.isolate_server,
1141 namespace=options.namespace)
1142
1143 # Command
1144 command = None
1145 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001146 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001147 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001148 if options.relative_cwd:
1149 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1150 if not a.startswith(os.getcwd()):
1151 parser.error(
1152 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001153 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001154 if options.relative_cwd:
1155 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001156 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001157
maruel0a25f6c2017-05-10 10:43:23 -07001158 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001159 cipd_packages = []
1160 for p in options.cipd_package:
1161 split = p.split(':', 2)
1162 if len(split) != 3:
1163 parser.error('CIPD packages must take the form: path:package:version')
Junji Watanabe38b28b02020-04-23 10:23:30 +00001164 cipd_packages.append(
1165 CipdPackage(package_name=split[1], path=split[0], version=split[2]))
borenet02f772b2016-06-22 12:42:19 -07001166 cipd_input = None
1167 if cipd_packages:
1168 cipd_input = CipdInput(
1169 client_package=None,
1170 packages=cipd_packages,
1171 server=None)
1172
maruel0a25f6c2017-05-10 10:43:23 -07001173 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001174 secret_bytes = None
1175 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001176 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001177 secret_bytes = f.read().encode('base64')
1178
maruel0a25f6c2017-05-10 10:43:23 -07001179 # Named caches
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001180 caches = [{
1181 u'name': six.text_type(i[0]),
1182 u'path': six.text_type(i[1])
1183 } for i in options.named_cache]
maruel0a25f6c2017-05-10 10:43:23 -07001184
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001185 env_prefixes = {}
1186 for k, v in options.env_prefix:
1187 env_prefixes.setdefault(k, []).append(v)
1188
Brad Hallf78187a2018-10-19 17:08:55 +00001189 # Get dimensions into the key/value format we can manipulate later.
1190 orig_dims = [
1191 {'key': key, 'value': value} for key, value in options.dimensions]
1192 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1193
1194 # Construct base properties that we will use for all the slices, adding in
1195 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001196 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001197 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001198 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001199 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001200 containment=Containment(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001201 lower_priority=bool(options.lower_priority),
1202 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001203 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001204 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001205 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001206 env=options.env,
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001207 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.items()],
maruel77f720b2015-09-15 12:35:22 -07001208 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001209 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001210 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001211 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001212 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001213 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001214 outputs=options.output,
1215 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001216
1217 slices = []
1218
1219 # Group the optional dimensions by expiration.
1220 dims_by_exp = {}
1221 for key, value, exp_secs in options.optional_dimensions:
1222 dims_by_exp.setdefault(int(exp_secs), []).append(
1223 {'key': key, 'value': value})
1224
1225 # Create the optional slices with expiration deltas, we fix up the properties
1226 # below.
1227 last_exp = 0
1228 for expiration_secs in sorted(dims_by_exp):
1229 t = TaskSlice(
1230 expiration_secs=expiration_secs - last_exp,
1231 properties=properties,
1232 wait_for_capacity=False)
1233 slices.append(t)
1234 last_exp = expiration_secs
1235
1236 # Add back in the default slice (the last one).
1237 exp = max(int(options.expiration) - last_exp, 60)
1238 base_task_slice = TaskSlice(
1239 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001240 properties=properties,
1241 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001242 slices.append(base_task_slice)
1243
Brad Hall7f463e62018-11-16 16:13:30 +00001244 # Add optional dimensions to the task slices, replacing a dimension that
1245 # has the same key if it is a dimension where repeating isn't valid (otherwise
1246 # we append it). Currently the only dimension we can repeat is "caches"; the
1247 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001248 extra_dims = []
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001249 for i, (_, kvs) in enumerate(sorted(dims_by_exp.items(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001250 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001251 # Replace or append the key/value pairs for this expiration in extra_dims;
1252 # we keep extra_dims around because we are iterating backwards and filling
1253 # in slices with shorter expirations. Dimensions expire as time goes on so
1254 # the slices that expire earlier will generally have more dimensions.
1255 for kv in kvs:
1256 if kv['key'] == 'caches':
1257 extra_dims.append(kv)
1258 else:
1259 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1260 # Then, add all the optional dimensions to the original dimension set, again
1261 # replacing if needed.
1262 for kv in extra_dims:
1263 if kv['key'] == 'caches':
1264 dims.append(kv)
1265 else:
1266 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001267 dims.sort(key=lambda x: (x['key'], x['value']))
1268 slice_properties = properties._replace(dimensions=dims)
1269 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1270
maruel77f720b2015-09-15 12:35:22 -07001271 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001272 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001273 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001274 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001275 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001276 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001277 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001278 user=options.user,
1279 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001280
1281
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001282class TaskOutputStdoutOption(optparse.Option):
1283 """Where to output the each task's console output (stderr/stdout).
1284
1285 The output will be;
1286 none - not be downloaded.
1287 json - stored in summary.json file *only*.
1288 console - shown on stdout *only*.
1289 all - stored in summary.json and shown on stdout.
1290 """
1291
1292 choices = ['all', 'json', 'console', 'none']
1293
1294 def __init__(self, *args, **kw):
1295 optparse.Option.__init__(
1296 self,
1297 *args,
1298 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001299 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001300 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001301 **kw)
1302
1303 def convert_value(self, opt, value):
1304 if value not in self.choices:
1305 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1306 self.get_opt_string(), self.choices, value))
1307 stdout_to = []
1308 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001309 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001310 elif value != 'none':
1311 stdout_to = [value]
1312 return stdout_to
1313
1314
maruel@chromium.org0437a732013-08-27 16:05:52 +00001315def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001316 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001317 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001318 help='Timeout to wait for result, set to -1 for no timeout and get '
1319 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001320 parser.group_logging.add_option(
1321 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001322 parser.group_logging.add_option(
1323 '--print-status-updates', action='store_true',
1324 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001325 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001326 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001327 '--task-summary-json',
1328 metavar='FILE',
1329 help='Dump a summary of task results to this file as json. It contains '
1330 'only shards statuses as know to server directly. Any output files '
1331 'emitted by the task can be collected by using --task-output-dir')
1332 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001333 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001334 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001335 help='Directory to put task results into. When the task finishes, this '
Junji Watanabe38b28b02020-04-23 10:23:30 +00001336 'directory contains per-shard directory with output files produced '
1337 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001338 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001339 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001340 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001341 '--filepath-filter',
1342 help='This is regexp filter used to specify downloaded filepath when '
1343 'collecting isolated output.')
1344 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001345 '--perf', action='store_true', default=False,
1346 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001347 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001348
1349
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001350def process_collect_options(parser, options):
1351 # Only negative -1 is allowed, disallow other negative values.
1352 if options.timeout != -1 and options.timeout < 0:
1353 parser.error('Invalid --timeout value')
1354
1355
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001356@subcommand.usage('bots...')
1357def CMDbot_delete(parser, args):
1358 """Forcibly deletes bots from the Swarming server."""
1359 parser.add_option(
1360 '-f', '--force', action='store_true',
1361 help='Do not prompt for confirmation')
1362 options, args = parser.parse_args(args)
1363 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001364 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001365
1366 bots = sorted(args)
1367 if not options.force:
1368 print('Delete the following bots?')
1369 for bot in bots:
1370 print(' %s' % bot)
1371 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1372 print('Goodbye.')
1373 return 1
1374
1375 result = 0
1376 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001377 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001378 if net.url_read_json(url, data={}, method='POST') is None:
1379 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001380 result = 1
1381 return result
1382
1383
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001384def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001385 """Returns information about the bots connected to the Swarming server."""
1386 add_filter_options(parser)
1387 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001388 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001389 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001390 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001391 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001392 help='Keep both dead and alive bots')
1393 parser.filter_group.add_option(
1394 '--busy', action='store_true', help='Keep only busy bots')
1395 parser.filter_group.add_option(
1396 '--idle', action='store_true', help='Keep only idle bots')
1397 parser.filter_group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001398 '--mp',
1399 action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001400 help='Keep only Machine Provider managed bots')
1401 parser.filter_group.add_option(
1402 '--non-mp', action='store_true',
1403 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001404 parser.filter_group.add_option(
1405 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001406 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001407 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001408 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001409
1410 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001411 parser.error('Use only one of --keep-dead or --dead-only')
1412 if options.busy and options.idle:
1413 parser.error('Use only one of --busy or --idle')
1414 if options.mp and options.non_mp:
1415 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001416
smut281c3902018-05-30 17:50:05 -07001417 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001418 values = []
1419 if options.dead_only:
1420 values.append(('is_dead', 'TRUE'))
1421 elif options.keep_dead:
1422 values.append(('is_dead', 'NONE'))
1423 else:
1424 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001425
maruelaf6b06c2017-06-08 06:26:53 -07001426 if options.busy:
1427 values.append(('is_busy', 'TRUE'))
1428 elif options.idle:
1429 values.append(('is_busy', 'FALSE'))
1430 else:
1431 values.append(('is_busy', 'NONE'))
1432
1433 if options.mp:
1434 values.append(('is_mp', 'TRUE'))
1435 elif options.non_mp:
1436 values.append(('is_mp', 'FALSE'))
1437 else:
1438 values.append(('is_mp', 'NONE'))
1439
1440 for key, value in options.dimensions:
1441 values.append(('dimensions', '%s:%s' % (key, value)))
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +00001442 url += urllib.parse.urlencode(values)
maruelaf6b06c2017-06-08 06:26:53 -07001443 try:
1444 data, yielder = get_yielder(url, 0)
1445 bots = data.get('items') or []
1446 for items in yielder():
1447 if items:
1448 bots.extend(items)
1449 except Failure as e:
1450 sys.stderr.write('\n%s\n' % e)
1451 return 1
maruel77f720b2015-09-15 12:35:22 -07001452 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
Lei Leife202df2019-06-11 17:33:34 +00001453 print(bot['bot_id'])
maruelaf6b06c2017-06-08 06:26:53 -07001454 if not options.bare:
1455 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
Lei Leife202df2019-06-11 17:33:34 +00001456 print(' %s' % json.dumps(dimensions, sort_keys=True))
maruelaf6b06c2017-06-08 06:26:53 -07001457 if bot.get('task_id'):
Lei Leife202df2019-06-11 17:33:34 +00001458 print(' task: %s' % bot['task_id'])
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001459 return 0
1460
1461
maruelfd0a90c2016-06-10 11:51:10 -07001462@subcommand.usage('task_id')
1463def CMDcancel(parser, args):
1464 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001465 parser.add_option(
1466 '-k', '--kill-running', action='store_true', default=False,
1467 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001468 options, args = parser.parse_args(args)
1469 if not args:
1470 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001471 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001472 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001473 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001474 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001475 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001476 print('Deleting %s failed. Probably already gone' % task_id)
1477 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001478 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001479 return 0
1480
1481
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001482@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001483def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001484 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001485
1486 The result can be in multiple part if the execution was sharded. It can
1487 potentially have retries.
1488 """
1489 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001490 parser.add_option(
1491 '-j', '--json',
1492 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001493 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001494 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001495 if not args and not options.json:
1496 parser.error('Must specify at least one task id or --json.')
1497 if args and options.json:
1498 parser.error('Only use one of task id or --json.')
1499
1500 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001501 options.json = six.text_type(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001502 try:
maruel1ceb3872015-10-14 06:10:44 -07001503 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001504 data = json.load(f)
1505 except (IOError, ValueError):
1506 parser.error('Failed to open %s' % options.json)
1507 try:
Junji Watanabe38b28b02020-04-23 10:23:30 +00001508 tasks = sorted(data['tasks'].values(), key=lambda x: x['shard_index'])
maruel71c61c82016-02-22 06:52:05 -08001509 args = [t['task_id'] for t in tasks]
1510 except (KeyError, TypeError):
1511 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001512 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001513 # Take in account all the task slices.
1514 offset = 0
1515 for s in data['request']['task_slices']:
1516 m = (offset + s['properties']['execution_timeout_secs'] +
1517 s['expiration_secs'])
1518 if m > options.timeout:
1519 options.timeout = m
1520 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001521 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001522 else:
1523 valid = frozenset('0123456789abcdef')
1524 if any(not valid.issuperset(task_id) for task_id in args):
1525 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001526
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001527 try:
Junji Watanabe38b28b02020-04-23 10:23:30 +00001528 return collect(options.swarming, args, options.timeout, options.decorate,
1529 options.print_status_updates, options.task_summary_json,
1530 options.task_output_dir, options.task_output_stdout,
1531 options.perf, options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001532 except Failure:
1533 on_error.report(None)
1534 return 1
1535
1536
maruel77f720b2015-09-15 12:35:22 -07001537@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001538def CMDpost(parser, args):
1539 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1540
1541 Input data must be sent to stdin, result is printed to stdout.
1542
1543 If HTTP response code >= 400, returns non-zero.
1544 """
1545 options, args = parser.parse_args(args)
1546 if len(args) != 1:
1547 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001548 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001549 data = sys.stdin.read()
1550 try:
1551 resp = net.url_read(url, data=data, method='POST')
1552 except net.TimeoutError:
1553 sys.stderr.write('Timeout!\n')
1554 return 1
1555 if not resp:
1556 sys.stderr.write('No response!\n')
1557 return 1
1558 sys.stdout.write(resp)
1559 return 0
1560
1561
1562@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001563def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001564 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1565 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001566
1567 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001568 Raw task request and results:
1569 swarming.py query -S server-url.com task/123456/request
1570 swarming.py query -S server-url.com task/123456/result
1571
maruel77f720b2015-09-15 12:35:22 -07001572 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001573 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001574
maruelaf6b06c2017-06-08 06:26:53 -07001575 Listing last 10 tasks on a specific bot named 'bot1':
1576 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001577
maruelaf6b06c2017-06-08 06:26:53 -07001578 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001579 quoting is important!:
1580 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001581 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001582 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001583 parser.add_option(
1584 '-L', '--limit', type='int', default=200,
1585 help='Limit to enforce on limitless items (like number of tasks); '
1586 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001587 parser.add_option(
1588 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001589 parser.add_option(
1590 '--progress', action='store_true',
1591 help='Prints a dot at each request to show progress')
1592 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001593 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001594 parser.error(
1595 'Must specify only method name and optionally query args properly '
1596 'escaped.')
smut281c3902018-05-30 17:50:05 -07001597 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001598 try:
1599 data, yielder = get_yielder(base_url, options.limit)
1600 for items in yielder():
1601 if items:
1602 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001603 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001604 sys.stderr.write('.')
1605 sys.stderr.flush()
1606 except Failure as e:
1607 sys.stderr.write('\n%s\n' % e)
1608 return 1
maruel77f720b2015-09-15 12:35:22 -07001609 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001610 sys.stderr.write('\n')
1611 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001612 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001613 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001614 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001615 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001616 try:
maruel77f720b2015-09-15 12:35:22 -07001617 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001618 sys.stdout.write('\n')
1619 except IOError:
1620 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001621 return 0
1622
1623
maruel77f720b2015-09-15 12:35:22 -07001624def CMDquery_list(parser, args):
1625 """Returns list of all the Swarming APIs that can be used with command
1626 'query'.
1627 """
1628 parser.add_option(
1629 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1630 options, args = parser.parse_args(args)
1631 if args:
1632 parser.error('No argument allowed.')
1633
1634 try:
1635 apis = endpoints_api_discovery_apis(options.swarming)
1636 except APIError as e:
1637 parser.error(str(e))
1638 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001639 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001640 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001641 json.dump(apis, f)
1642 else:
1643 help_url = (
1644 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1645 options.swarming)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001646 for i, (api_id, api) in enumerate(sorted(apis.items())):
maruel11e31af2017-02-15 07:30:50 -08001647 if i:
1648 print('')
Lei Leife202df2019-06-11 17:33:34 +00001649 print(api_id)
1650 print(' ' + api['description'].strip())
maruel11e31af2017-02-15 07:30:50 -08001651 if 'resources' in api:
1652 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001653 # TODO(maruel): Remove.
1654 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001655 for j, (resource_name, resource) in enumerate(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001656 sorted(api['resources'].items())):
maruel11e31af2017-02-15 07:30:50 -08001657 if j:
1658 print('')
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001659 for method_name, method in sorted(resource['methods'].items()):
maruel11e31af2017-02-15 07:30:50 -08001660 # Only list the GET ones.
1661 if method['httpMethod'] != 'GET':
1662 continue
Lei Leife202df2019-06-11 17:33:34 +00001663 print('- %s.%s: %s' % (
1664 resource_name, method_name, method['path']))
maruel11e31af2017-02-15 07:30:50 -08001665 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001666 ' ' + l for l in textwrap.wrap(
1667 method.get('description', 'No description'), 78)))
Lei Leife202df2019-06-11 17:33:34 +00001668 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel11e31af2017-02-15 07:30:50 -08001669 else:
1670 # New.
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001671 for method_name, method in sorted(api['methods'].items()):
maruel77f720b2015-09-15 12:35:22 -07001672 # Only list the GET ones.
1673 if method['httpMethod'] != 'GET':
1674 continue
Lei Leife202df2019-06-11 17:33:34 +00001675 print('- %s: %s' % (method['id'], method['path']))
maruel11e31af2017-02-15 07:30:50 -08001676 print('\n'.join(
1677 ' ' + l for l in textwrap.wrap(method['description'], 78)))
Lei Leife202df2019-06-11 17:33:34 +00001678 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel77f720b2015-09-15 12:35:22 -07001679 return 0
1680
1681
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001682@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001683def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001684 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001685
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001686 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001687 """
1688 add_trigger_options(parser)
1689 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001690 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001691 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001692 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001693 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001694 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001695 tasks = trigger_task_shards(
1696 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001697 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001698 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001699 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001700 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001701 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001702 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001703 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001704 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001705 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001706 task_ids = [
Junji Watanabe38b28b02020-04-23 10:23:30 +00001707 t['task_id']
1708 for t in sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001709 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001710 for task_id in task_ids:
1711 print('Task: {server}/task?id={task}'.format(
1712 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001713 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001714 offset = 0
1715 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001716 m = (offset + s.properties.execution_timeout_secs +
1717 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001718 if m > options.timeout:
1719 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001720 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001721 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001722 try:
1723 return collect(
1724 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001725 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001726 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001727 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001728 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001729 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001730 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001731 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001732 options.perf,
1733 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001734 except Failure:
1735 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001736 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001737
1738
maruel18122c62015-10-23 06:31:23 -07001739@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001740def CMDreproduce(parser, args):
1741 """Runs a task locally that was triggered on the server.
1742
1743 This running locally the same commands that have been run on the bot. The data
1744 downloaded will be in a subdirectory named 'work' of the current working
1745 directory.
maruel18122c62015-10-23 06:31:23 -07001746
1747 You can pass further additional arguments to the target command by passing
1748 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001749 """
maruelc070e672016-02-22 17:32:57 -08001750 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001751 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001752 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001753 parser.add_option(
1754 '--work', metavar='DIR', default='work',
1755 help='Directory to map the task input files into')
1756 parser.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001757 '--cache',
1758 metavar='DIR',
1759 default='cache',
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001760 help='Directory that contains the input cache')
1761 parser.add_option(
1762 '--leak', action='store_true',
1763 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001764 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001765 extra_args = []
1766 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001767 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001768 if len(args) > 1:
1769 if args[1] == '--':
1770 if len(args) > 2:
1771 extra_args = args[2:]
1772 else:
1773 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001774
smut281c3902018-05-30 17:50:05 -07001775 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001776 request = net.url_read_json(url)
1777 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001778 print('Failed to retrieve request data for the task', file=sys.stderr)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001779 return 1
1780
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001781 workdir = six.text_type(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001782 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001783 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001784 fs.mkdir(workdir)
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001785 cachedir = six.text_type(os.path.abspath('cipd_cache'))
iannucci31ab9192017-05-02 19:11:56 -07001786 if not fs.exists(cachedir):
1787 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001788
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001789 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001790 env = os.environ.copy()
1791 env['SWARMING_BOT_ID'] = 'reproduce'
1792 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001793 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001794 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001795 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001796 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001797 if not i['value']:
1798 env.pop(key, None)
1799 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001800 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001801
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001802 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001803 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001804 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001805 for i in env_prefixes:
1806 key = i['key']
1807 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001808 cur = env.get(key)
1809 if cur:
1810 paths.append(cur)
1811 env[key] = os.path.pathsep.join(paths)
1812
iannucci31ab9192017-05-02 19:11:56 -07001813 command = []
nodir152cba62016-05-12 16:08:56 -07001814 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001815 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001816 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001817 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001818 properties['inputs_ref']['namespace'])
1819 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001820 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1821 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1822 # leak.
1823 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001824 cache = local_caching.DiskContentAddressedCache(
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001825 six.text_type(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001826 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001827 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001828 command = bundle.command
1829 if bundle.relative_cwd:
1830 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001831 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001832
1833 if properties.get('command'):
1834 command.extend(properties['command'])
1835
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001836 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Brian Sheedy7a761172019-08-30 22:55:14 +00001837 command = tools.find_executable(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001838 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001839 new_command = run_isolated.process_command(command, 'invalid', None)
1840 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001841 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001842 else:
1843 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001844 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001845 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001846 command, options.output, None)
1847 if not os.path.isdir(options.output):
1848 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001849 command = new_command
1850 file_path.ensure_command_has_abs_path(command, workdir)
1851
1852 if properties.get('cipd_input'):
1853 ci = properties['cipd_input']
1854 cp = ci['client_package']
1855 client_manager = cipd.get_client(
1856 ci['server'], cp['package_name'], cp['version'], cachedir)
1857
1858 with client_manager as client:
1859 by_path = collections.defaultdict(list)
1860 for pkg in ci['packages']:
1861 path = pkg['path']
1862 # cipd deals with 'root' as ''
1863 if path == '.':
1864 path = ''
1865 by_path[path].append((pkg['package_name'], pkg['version']))
1866 client.ensure(workdir, by_path, cache_dir=cachedir)
1867
maruel77f720b2015-09-15 12:35:22 -07001868 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001869 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001870 except OSError as e:
Lei Leife202df2019-06-11 17:33:34 +00001871 print('Failed to run: %s' % ' '.join(command), file=sys.stderr)
1872 print(str(e), file=sys.stderr)
maruel77f720b2015-09-15 12:35:22 -07001873 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001874 finally:
1875 # Do not delete options.cache.
1876 if not options.leak:
1877 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001878
1879
maruel0eb1d1b2015-10-02 14:48:21 -07001880@subcommand.usage('bot_id')
1881def CMDterminate(parser, args):
1882 """Tells a bot to gracefully shut itself down as soon as it can.
1883
1884 This is done by completing whatever current task there is then exiting the bot
1885 process.
1886 """
1887 parser.add_option(
1888 '--wait', action='store_true', help='Wait for the bot to terminate')
1889 options, args = parser.parse_args(args)
1890 if len(args) != 1:
1891 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001892 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001893 request = net.url_read_json(url, data={})
1894 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001895 print('Failed to ask for termination', file=sys.stderr)
maruel0eb1d1b2015-10-02 14:48:21 -07001896 return 1
1897 if options.wait:
1898 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001899 options.swarming,
1900 [request['task_id']],
1901 0.,
1902 False,
1903 False,
1904 None,
1905 None,
1906 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001907 False,
1908 None)
maruelbfc5f872017-06-10 16:43:17 -07001909 else:
Lei Leife202df2019-06-11 17:33:34 +00001910 print(request['task_id'])
maruel0eb1d1b2015-10-02 14:48:21 -07001911 return 0
1912
1913
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001914@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001915def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001916 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001917
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001918 Passes all extra arguments provided after '--' as additional command line
1919 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001920 """
1921 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001922 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001923 parser.add_option(
1924 '--dump-json',
1925 metavar='FILE',
1926 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001927 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001928 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001929 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001930 tasks = trigger_task_shards(
1931 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001932 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001933 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001934 tasks_sorted = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001935 tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001936 if options.dump_json:
1937 data = {
Junji Watanabe38b28b02020-04-23 10:23:30 +00001938 'base_task_name': task_request.name,
1939 'tasks': tasks,
1940 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001941 }
maruel46b015f2015-10-13 18:40:35 -07001942 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001943 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001944 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001945 (options.swarming, options.dump_json))
1946 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001947 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001948 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001949 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1950 print('Or visit:')
1951 for t in tasks_sorted:
1952 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001953 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001954 except Failure:
1955 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001956 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001957
1958
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001959class OptionParserSwarming(logging_utils.OptionParserWithLogging):
Junji Watanabe38b28b02020-04-23 10:23:30 +00001960
maruel@chromium.org0437a732013-08-27 16:05:52 +00001961 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001962 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001963 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001964 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001965 self.server_group.add_option(
Junji Watanabe38b28b02020-04-23 10:23:30 +00001966 '-S',
1967 '--swarming',
1968 metavar='URL',
1969 default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001970 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001971 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001972 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001973
1974 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001975 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001976 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001977 auth.process_auth_options(self, options)
1978 user = self._process_swarming(options)
1979 if hasattr(options, 'user') and not options.user:
1980 options.user = user
1981 return options, args
1982
1983 def _process_swarming(self, options):
1984 """Processes the --swarming option and aborts if not specified.
1985
1986 Returns the identity as determined by the server.
1987 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001988 if not options.swarming:
1989 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001990 try:
1991 options.swarming = net.fix_url(options.swarming)
1992 except ValueError as e:
1993 self.error('--swarming %s' % e)
Takuto Ikutaae767b32020-05-11 01:22:19 +00001994
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001995 try:
1996 user = auth.ensure_logged_in(options.swarming)
1997 except ValueError as e:
1998 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001999 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00002000
2001
2002def main(args):
2003 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04002004 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00002005
2006
2007if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07002008 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00002009 fix_encoding.fix_encoding()
2010 tools.disable_buffering()
2011 colorama.init()
Takuto Ikuta7c843c82020-04-15 05:42:54 +00002012 net.set_user_agent('swarming.py/' + __version__)
maruel@chromium.org0437a732013-08-27 16:05:52 +00002013 sys.exit(main(sys.argv[1:]))