blob: c3de2e7810ac5dcb86e9ce989cef21049cf889c3 [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Takuto Ikuta0e3e1c42018-11-29 14:21:06 +00008__version__ = '0.14'
maruel@chromium.org0437a732013-08-27 16:05:52 +00009
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050010import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040011import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000012import json
13import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040014import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000015import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100016import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import sys
maruel11e31af2017-02-15 07:30:50 -080018import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070019import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000020import time
21import urllib
maruel@chromium.org0437a732013-08-27 16:05:52 +000022
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000023from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000024tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000025
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026# third_party/
27import colorama
28from chromium import natsort
29from depot_tools import fix_encoding
30from depot_tools import subcommand
31
32# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080033import auth
iannucci31ab9192017-05-02 19:11:56 -070034import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000035import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000036import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040037import local_caching
maruelc070e672016-02-22 17:32:57 -080038import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000039from utils import file_path
40from utils import fs
41from utils import logging_utils
42from utils import net
43from utils import on_error
44from utils import subprocess42
45from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050046
47
48class Failure(Exception):
49 """Generic failure."""
50 pass
51
52
maruel0a25f6c2017-05-10 10:43:23 -070053def default_task_name(options):
54 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050055 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070056 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070057 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070058 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070059 if options.isolated:
60 task_name += u'/' + options.isolated
61 return task_name
62 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050063
64
65### Triggering.
66
67
maruel77f720b2015-09-15 12:35:22 -070068# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070069CipdPackage = collections.namedtuple(
70 'CipdPackage',
71 [
72 'package_name',
73 'path',
74 'version',
75 ])
76
77
78# See ../appengine/swarming/swarming_rpcs.py.
79CipdInput = collections.namedtuple(
80 'CipdInput',
81 [
82 'client_package',
83 'packages',
84 'server',
85 ])
86
87
88# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070089FilesRef = collections.namedtuple(
90 'FilesRef',
91 [
92 'isolated',
93 'isolatedserver',
94 'namespace',
95 ])
96
97
98# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -080099StringListPair = collections.namedtuple(
100 'StringListPair', [
101 'key',
102 'value', # repeated string
103 ]
104)
105
106
107# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700108TaskProperties = collections.namedtuple(
109 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500110 [
maruel681d6802017-01-17 16:56:03 -0800111 'caches',
borenet02f772b2016-06-22 12:42:19 -0700112 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500113 'command',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500114 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500115 'dimensions',
116 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800117 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700118 'execution_timeout_secs',
119 'extra_args',
120 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500121 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700122 'inputs_ref',
123 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700124 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700125 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700126 ])
127
128
129# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400130TaskSlice = collections.namedtuple(
131 'TaskSlice',
132 [
133 'expiration_secs',
134 'properties',
135 'wait_for_capacity',
136 ])
137
138
139# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700140NewTaskRequest = collections.namedtuple(
141 'NewTaskRequest',
142 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500143 'name',
maruel77f720b2015-09-15 12:35:22 -0700144 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500145 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400146 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700147 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500148 'tags',
149 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000150 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500151 ])
152
153
maruel77f720b2015-09-15 12:35:22 -0700154def namedtuple_to_dict(value):
155 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400156 if hasattr(value, '_asdict'):
157 return namedtuple_to_dict(value._asdict())
158 if isinstance(value, (list, tuple)):
159 return [namedtuple_to_dict(v) for v in value]
160 if isinstance(value, dict):
161 return {k: namedtuple_to_dict(v) for k, v in value.iteritems()}
162 return value
maruel77f720b2015-09-15 12:35:22 -0700163
164
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700165def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800166 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700167
168 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500169 """
maruel77f720b2015-09-15 12:35:22 -0700170 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700171 # Don't send 'service_account' if it is None to avoid confusing older
172 # version of the server that doesn't know about 'service_account' and don't
173 # use it at all.
174 if not out['service_account']:
175 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000176 for task_slice in out['task_slices']:
177 task_slice['properties']['env'] = [
178 {'key': k, 'value': v}
179 for k, v in task_slice['properties']['env'].iteritems()
180 ]
181 task_slice['properties']['env'].sort(key=lambda x: x['key'])
maruel77f720b2015-09-15 12:35:22 -0700182 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500183
184
maruel77f720b2015-09-15 12:35:22 -0700185def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500186 """Triggers a request on the Swarming server and returns the json data.
187
188 It's the low-level function.
189
190 Returns:
191 {
192 'request': {
193 'created_ts': u'2010-01-02 03:04:05',
194 'name': ..
195 },
196 'task_id': '12300',
197 }
198 """
199 logging.info('Triggering: %s', raw_request['name'])
200
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500201 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700202 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500203 if not result:
204 on_error.report('Failed to trigger task %s' % raw_request['name'])
205 return None
maruele557bce2015-11-17 09:01:27 -0800206 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800207 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800208 msg = 'Failed to trigger task %s' % raw_request['name']
209 if result['error'].get('errors'):
210 for err in result['error']['errors']:
211 if err.get('message'):
212 msg += '\nMessage: %s' % err['message']
213 if err.get('debugInfo'):
214 msg += '\nDebug info:\n%s' % err['debugInfo']
215 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800216 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800217
218 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800219 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500220 return result
221
222
223def setup_googletest(env, shards, index):
224 """Sets googletest specific environment variables."""
225 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700226 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
227 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
228 env = env[:]
229 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
230 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500231 return env
232
233
234def trigger_task_shards(swarming, task_request, shards):
235 """Triggers one or many subtasks of a sharded task.
236
237 Returns:
238 Dict with task details, returned to caller as part of --dump-json output.
239 None in case of failure.
240 """
241 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000242 """
243 Args:
244 index: The index of the task request.
245
246 Returns:
247 raw_request: A swarming compatible JSON dictionary of the request.
248 shard_index: The index of the shard, which may be different than the index
249 of the task request.
250 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700251 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000252 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500253 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000254 for task_slice in req['task_slices']:
255 task_slice['properties']['env'] = setup_googletest(
256 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700257 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000258 else:
259 task_slices = req['task_slices']
260
261 total_shards = None
262 # Multiple tasks slices might exist if there are optional "slices", e.g.
263 # multiple ways of dispatching the task that should be equivalent. These
264 # should be functionally equivalent but we have cannot guarantee that. If
265 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
266 # slices.
267 for task_slice in task_slices:
268 for env_var in task_slice['properties']['env']:
269 if env_var['key'] == 'GTEST_SHARD_INDEX':
270 shard_index = int(env_var['value'])
271 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
272 total_shards = int(env_var['value'])
273 if total_shards > 1:
274 req['name'] += ':%s:%s' % (shard_index, total_shards)
275
276 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500277
278 requests = [convert(index) for index in xrange(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500279 tasks = {}
280 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000281 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700282 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500283 if not task:
284 break
285 logging.info('Request result: %s', task)
286 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400287 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500288 priority_warning = True
289 print >> sys.stderr, (
290 'Priority was reset to %s' % task['request']['priority'])
291 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000292 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500293 'task_id': task['task_id'],
294 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
295 }
296
297 # Some shards weren't triggered. Abort everything.
298 if len(tasks) != len(requests):
299 if tasks:
300 print >> sys.stderr, 'Only %d shard(s) out of %d were triggered' % (
301 len(tasks), len(requests))
302 for task_dict in tasks.itervalues():
303 abort_task(swarming, task_dict['task_id'])
304 return None
305
306 return tasks
307
308
309### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000310
311
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700312# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000313STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700314
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400315
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000316class TaskState(object):
317 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000318
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000319 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
320 is the source of truth for these values:
321 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400322
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000323 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400324 """
325 RUNNING = 0x10
326 PENDING = 0x20
327 EXPIRED = 0x30
328 TIMED_OUT = 0x40
329 BOT_DIED = 0x50
330 CANCELED = 0x60
331 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400332 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400333 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400334
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000335 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400336
maruel77f720b2015-09-15 12:35:22 -0700337 _ENUMS = {
338 'RUNNING': RUNNING,
339 'PENDING': PENDING,
340 'EXPIRED': EXPIRED,
341 'TIMED_OUT': TIMED_OUT,
342 'BOT_DIED': BOT_DIED,
343 'CANCELED': CANCELED,
344 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400345 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400346 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700347 }
348
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400349 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700350 def from_enum(cls, state):
351 """Returns int value based on the string."""
352 if state not in cls._ENUMS:
353 raise ValueError('Invalid state %s' % state)
354 return cls._ENUMS[state]
355
maruel@chromium.org0437a732013-08-27 16:05:52 +0000356
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700357class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700358 """Assembles task execution summary (for --task-summary-json output).
359
360 Optionally fetches task outputs from isolate server to local disk (used when
361 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700362
363 This object is shared among multiple threads running 'retrieve_results'
364 function, in particular they call 'process_shard_result' method in parallel.
365 """
366
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000367 def __init__(self, task_output_dir, task_output_stdout, shard_count,
368 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700369 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
370
371 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700372 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700373 shard_count: expected number of task shards.
374 """
maruel12e30012015-10-09 11:55:35 -0700375 self.task_output_dir = (
376 unicode(os.path.abspath(task_output_dir))
377 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000378 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700379 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000380 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700381
382 self._lock = threading.Lock()
383 self._per_shard_results = {}
384 self._storage = None
385
nodire5028a92016-04-29 14:38:21 -0700386 if self.task_output_dir:
387 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700388
Vadim Shtayurab450c602014-05-12 19:23:25 -0700389 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700390 """Stores results of a single task shard, fetches output files if necessary.
391
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400392 Modifies |result| in place.
393
maruel77f720b2015-09-15 12:35:22 -0700394 shard_index is 0-based.
395
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700396 Called concurrently from multiple threads.
397 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700398 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700399 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700400 if shard_index < 0 or shard_index >= self.shard_count:
401 logging.warning(
402 'Shard index %d is outside of expected range: [0; %d]',
403 shard_index, self.shard_count - 1)
404 return
405
maruel77f720b2015-09-15 12:35:22 -0700406 if result.get('outputs_ref'):
407 ref = result['outputs_ref']
408 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
409 ref['isolatedserver'],
410 urllib.urlencode(
411 [('namespace', ref['namespace']), ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400412
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700413 # Store result dict of that shard, ignore results we've already seen.
414 with self._lock:
415 if shard_index in self._per_shard_results:
416 logging.warning('Ignoring duplicate shard index %d', shard_index)
417 return
418 self._per_shard_results[shard_index] = result
419
420 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700421 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000422 server_ref = isolate_storage.ServerRef(
423 result['outputs_ref']['isolatedserver'],
424 result['outputs_ref']['namespace'])
425 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400426 if storage:
427 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400428 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
429 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400430 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700431 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400432 storage,
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400433 local_caching.MemoryContentAddressedCache(file_mode_mask=0700),
maruel4409e302016-07-19 14:25:51 -0700434 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000435 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700436
437 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700438 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700439 with self._lock:
440 # Write an array of shard results with None for missing shards.
441 summary = {
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700442 'shards': [
443 self._per_shard_results.get(i) for i in xrange(self.shard_count)
444 ],
445 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000446
447 # Don't store stdout in the summary if not requested too.
448 if "json" not in self.task_output_stdout:
449 for shard_json in summary['shards']:
450 if not shard_json:
451 continue
452 if "output" in shard_json:
453 del shard_json["output"]
454 if "outputs" in shard_json:
455 del shard_json["outputs"]
456
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700457 # Write summary.json to task_output_dir as well.
458 if self.task_output_dir:
459 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700460 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700461 summary,
462 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700463 if self._storage:
464 self._storage.close()
465 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700466 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700467
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000468 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700469 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700470 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700471 with self._lock:
472 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000473 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700474 else:
475 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000476 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700477 logging.error(
478 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000479 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700480 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000481 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700482 logging.error(
483 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000484 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700485 return None
486 return self._storage
487
488
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500489def now():
490 """Exists so it can be mocked easily."""
491 return time.time()
492
493
maruel77f720b2015-09-15 12:35:22 -0700494def parse_time(value):
495 """Converts serialized time from the API to datetime.datetime."""
496 # When microseconds are 0, the '.123456' suffix is elided. This means the
497 # serialized format is not consistent, which confuses the hell out of python.
498 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
499 try:
500 return datetime.datetime.strptime(value, fmt)
501 except ValueError:
502 pass
503 raise ValueError('Failed to parse %s' % value)
504
505
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700506def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700507 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000508 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400509 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700510
Vadim Shtayurab450c602014-05-12 19:23:25 -0700511 Returns:
512 <result dict> on success.
513 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700514 """
maruel71c61c82016-02-22 06:52:05 -0800515 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700516 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700517 if include_perf:
518 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700519 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700520 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400521 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700522 attempt = 0
523
524 while not should_stop.is_set():
525 attempt += 1
526
527 # Waiting for too long -> give up.
528 current_time = now()
529 if deadline and current_time >= deadline:
530 logging.error('retrieve_results(%s) timed out on attempt %d',
531 base_url, attempt)
532 return None
533
534 # Do not spin too fast. Spin faster at the beginning though.
535 # Start with 1 sec delay and for each 30 sec of waiting add another second
536 # of delay, until hitting 15 sec ceiling.
537 if attempt > 1:
538 max_delay = min(15, 1 + (current_time - started) / 30.0)
539 delay = min(max_delay, deadline - current_time) if deadline else max_delay
540 if delay > 0:
541 logging.debug('Waiting %.1f sec before retrying', delay)
542 should_stop.wait(delay)
543 if should_stop.is_set():
544 return None
545
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400546 # Disable internal retries in net.url_read_json, since we are doing retries
547 # ourselves.
548 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700549 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
550 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400551 # Retry on 500s only if no timeout is specified.
552 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400553 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400554 if timeout == -1:
555 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400556 continue
maruel77f720b2015-09-15 12:35:22 -0700557
maruelbf53e042015-12-01 15:00:51 -0800558 if result.get('error'):
559 # An error occurred.
560 if result['error'].get('errors'):
561 for err in result['error']['errors']:
562 logging.warning(
563 'Error while reading task: %s; %s',
564 err.get('message'), err.get('debugInfo'))
565 elif result['error'].get('message'):
566 logging.warning(
567 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400568 if timeout == -1:
569 return result
maruelbf53e042015-12-01 15:00:51 -0800570 continue
571
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400572 # When timeout == -1, always return on first attempt. 500s are already
573 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000574 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000575 if fetch_stdout:
576 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700577 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700578 # Record the result, try to fetch attached output files (if any).
579 if output_collector:
580 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700581 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700582 if result.get('internal_failure'):
583 logging.error('Internal error!')
584 elif result['state'] == 'BOT_DIED':
585 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700586 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000587
588
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700589def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400590 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000591 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500592 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000593
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700594 Duplicate shards are ignored. Shards are yielded in order of completion.
595 Timed out shards are NOT yielded at all. Caller can compare number of yielded
596 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000597
598 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500599 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000600 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500601
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700602 output_collector is an optional instance of TaskOutputCollector that will be
603 used to fetch files produced by a task from isolate server to the local disk.
604
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500605 Yields:
606 (index, result). In particular, 'result' is defined as the
607 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000608 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000609 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400610 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700611 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700612 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700613
maruel@chromium.org0437a732013-08-27 16:05:52 +0000614 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
615 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700616 # Adds a task to the thread pool to call 'retrieve_results' and return
617 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400618 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000619 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700620 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000621 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400622 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000623 task_id, timeout, should_stop, output_collector, include_perf,
624 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700625
626 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400627 for shard_index, task_id in enumerate(task_ids):
628 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700629
630 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400631 shards_remaining = range(len(task_ids))
632 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700633 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700634 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700635 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000636 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700637 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700638 except threading_utils.TaskChannel.Timeout:
639 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000640 time_now = str(datetime.datetime.now())
641 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700642 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000643 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700644 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000645 (time_now, ', '.join(map(str, shards_remaining)))
646 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700647 sys.stdout.flush()
648 continue
649 except Exception:
650 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700651
652 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700653 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000654 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500655 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000656 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700657
Vadim Shtayurab450c602014-05-12 19:23:25 -0700658 # Yield back results to the caller.
659 assert shard_index in shards_remaining
660 shards_remaining.remove(shard_index)
661 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700662
maruel@chromium.org0437a732013-08-27 16:05:52 +0000663 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700664 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000665 should_stop.set()
666
667
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000668def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000669 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700670 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400671 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700672 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
673 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400674 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
675 metadata.get('abandoned_ts')):
676 pending = '%.1fs' % (
677 parse_time(metadata['abandoned_ts']) -
678 parse_time(metadata['created_ts'])
679 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400680 else:
681 pending = 'N/A'
682
maruel77f720b2015-09-15 12:35:22 -0700683 if metadata.get('duration') is not None:
684 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400685 else:
686 duration = 'N/A'
687
maruel77f720b2015-09-15 12:35:22 -0700688 if metadata.get('exit_code') is not None:
689 # Integers are encoded as string to not loose precision.
690 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400691 else:
692 exit_code = 'N/A'
693
694 bot_id = metadata.get('bot_id') or 'N/A'
695
maruel77f720b2015-09-15 12:35:22 -0700696 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400697 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000698 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400699 if metadata.get('state') == 'CANCELED':
700 tag_footer2 = ' Pending: %s CANCELED' % pending
701 elif metadata.get('state') == 'EXPIRED':
702 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400703 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400704 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
705 pending, duration, bot_id, exit_code, metadata['state'])
706 else:
707 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
708 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400709
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000710 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
711 dash_pad = '+-%s-+' % ('-' * tag_len)
712 tag_header = '| %s |' % tag_header.ljust(tag_len)
713 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
714 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400715
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000716 if include_stdout:
717 return '\n'.join([
718 dash_pad,
719 tag_header,
720 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400721 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000722 dash_pad,
723 tag_footer1,
724 tag_footer2,
725 dash_pad,
726 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000727 return '\n'.join([
728 dash_pad,
729 tag_header,
730 tag_footer2,
731 dash_pad,
732 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000733
734
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700735def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700736 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000737 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000738 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700739 """Retrieves results of a Swarming task.
740
741 Returns:
742 process exit code that should be returned to the user.
743 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000744
745 filter_cb = None
746 if filepath_filter:
747 filter_cb = re.compile(filepath_filter).match
748
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700749 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000750 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000751 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700752
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700753 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700754 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400755 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700756 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400757 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400758 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000759 output_collector, include_perf,
760 (len(task_output_stdout) > 0),
761 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700762 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700763
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400764 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700765 shard_exit_code = metadata.get('exit_code')
766 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700767 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700768 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700769 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400770 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700771 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700772
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700773 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000774 s = decorate_shard_output(
775 swarming, index, metadata,
776 "console" in task_output_stdout).encode(
777 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700778 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400779 if len(seen_shards) < len(task_ids):
780 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700781 else:
maruel77f720b2015-09-15 12:35:22 -0700782 print('%s: %s %s' % (
783 metadata.get('bot_id', 'N/A'),
784 metadata['task_id'],
785 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000786 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700787 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400788 if output:
789 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700790 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700791 summary = output_collector.finalize()
792 if task_summary_json:
793 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700794
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400795 if decorate and total_duration:
796 print('Total duration: %.1fs' % total_duration)
797
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400798 if len(seen_shards) != len(task_ids):
799 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700800 print >> sys.stderr, ('Results from some shards are missing: %s' %
801 ', '.join(map(str, missing_shards)))
Vadim Shtayurac524f512014-05-15 09:54:56 -0700802 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700803
maruela5490782015-09-30 10:56:59 -0700804 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000805
806
maruel77f720b2015-09-15 12:35:22 -0700807### API management.
808
809
810class APIError(Exception):
811 pass
812
813
814def endpoints_api_discovery_apis(host):
815 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
816 the APIs exposed by a host.
817
818 https://developers.google.com/discovery/v1/reference/apis/list
819 """
maruel380e3262016-08-31 16:10:06 -0700820 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
821 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700822 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
823 if data is None:
824 raise APIError('Failed to discover APIs on %s' % host)
825 out = {}
826 for api in data['items']:
827 if api['id'] == 'discovery:v1':
828 continue
829 # URL is of the following form:
830 # url = host + (
831 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
832 api_data = net.url_read_json(api['discoveryRestUrl'])
833 if api_data is None:
834 raise APIError('Failed to discover %s on %s' % (api['id'], host))
835 out[api['id']] = api_data
836 return out
837
838
maruelaf6b06c2017-06-08 06:26:53 -0700839def get_yielder(base_url, limit):
840 """Returns the first query and a function that yields following items."""
841 CHUNK_SIZE = 250
842
843 url = base_url
844 if limit:
845 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
846 data = net.url_read_json(url)
847 if data is None:
848 # TODO(maruel): Do basic diagnostic.
849 raise Failure('Failed to access %s' % url)
850 org_cursor = data.pop('cursor', None)
851 org_total = len(data.get('items') or [])
852 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
853 if not org_cursor or not org_total:
854 # This is not an iterable resource.
855 return data, lambda: []
856
857 def yielder():
858 cursor = org_cursor
859 total = org_total
860 # Some items support cursors. Try to get automatically if cursors are needed
861 # by looking at the 'cursor' items.
862 while cursor and (not limit or total < limit):
863 merge_char = '&' if '?' in base_url else '?'
864 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
865 if limit:
866 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
867 new = net.url_read_json(url)
868 if new is None:
869 raise Failure('Failed to access %s' % url)
870 cursor = new.get('cursor')
871 new_items = new.get('items')
872 nb_items = len(new_items or [])
873 total += nb_items
874 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
875 yield new_items
876
877 return data, yielder
878
879
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500880### Commands.
881
882
883def abort_task(_swarming, _manifest):
884 """Given a task manifest that was triggered, aborts its execution."""
885 # TODO(vadimsh): No supported by the server yet.
886
887
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400888def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800889 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500890 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500891 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500892 dest='dimensions', metavar='FOO bar',
893 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000894 parser.filter_group.add_option(
895 '--optional-dimension', default=[], action='append', nargs=3,
896 dest='optional_dimensions', metavar='key value expiration',
897 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500898 parser.add_option_group(parser.filter_group)
899
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400900
Brad Hallf78187a2018-10-19 17:08:55 +0000901def _validate_filter_option(parser, key, value, expiration, argname):
902 if ':' in key:
903 parser.error('%s key cannot contain ":"' % argname)
904 if key.strip() != key:
905 parser.error('%s key has whitespace' % argname)
906 if not key:
907 parser.error('%s key is empty' % argname)
908
909 if value.strip() != value:
910 parser.error('%s value has whitespace' % argname)
911 if not value:
912 parser.error('%s value is empty' % argname)
913
914 if expiration is not None:
915 try:
916 expiration = int(expiration)
917 except ValueError:
918 parser.error('%s expiration is not an integer' % argname)
919 if expiration <= 0:
920 parser.error('%s expiration should be positive' % argname)
921 if expiration % 60 != 0:
922 parser.error('%s expiration is not divisible by 60' % argname)
923
924
maruelaf6b06c2017-06-08 06:26:53 -0700925def process_filter_options(parser, options):
926 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000927 _validate_filter_option(parser, key, value, None, 'dimension')
928 for key, value, exp in options.optional_dimensions:
929 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700930 options.dimensions.sort()
931
932
Vadim Shtayurab450c602014-05-12 19:23:25 -0700933def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400934 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700935 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700936 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700937 help='Number of shards to trigger and collect.')
938 parser.add_option_group(parser.sharding_group)
939
940
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400941def add_trigger_options(parser):
942 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500943 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400944 add_filter_options(parser)
945
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400946 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800947 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700948 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500949 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800950 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500951 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700952 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800953 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800954 '--env-prefix', default=[], action='append', nargs=2,
955 metavar='VAR local/path',
956 help='Prepend task-relative `local/path` to the task\'s VAR environment '
957 'variable using os-appropriate pathsep character. Can be specified '
958 'multiple times for the same VAR to add multiple paths.')
959 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400960 '--idempotent', action='store_true', default=False,
961 help='When set, the server will actively try to find a previous task '
962 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800963 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700964 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000965 help='The optional path to a file containing the secret_bytes to use '
966 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800967 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700968 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400969 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800970 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700971 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400972 help='Seconds to allow the task to be silent.')
maruel681d6802017-01-17 16:56:03 -0800973 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500974 '--raw-cmd', action='store_true', default=False,
975 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700976 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800977 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500978 '--relative-cwd',
979 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
980 'requires --raw-cmd')
981 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700982 '--cipd-package', action='append', default=[], metavar='PKG',
983 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -0700984 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -0800985 group.add_option(
986 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -0700987 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -0800988 help='"<name> <relpath>" items to keep a persistent bot managed cache')
989 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -0700990 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700991 help='Email of a service account to run the task as, or literal "bot" '
992 'string to indicate that the task should use the same account the '
993 'bot itself is using to authenticate to Swarming. Don\'t use task '
994 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -0800995 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +0000996 '--pool-task-template',
997 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
998 default='AUTO',
999 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1000 'By default, the pool\'s TaskTemplate is automatically selected, '
1001 'according the pool configuration on the server. Choices are: '
1002 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1003 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001004 '-o', '--output', action='append', default=[], metavar='PATH',
1005 help='A list of files to return in addition to those written to '
1006 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1007 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001008 group.add_option(
1009 '--wait-for-capacity', action='store_true', default=False,
1010 help='Instructs to leave the task PENDING even if there\'s no known bot '
1011 'that could run this task, otherwise the task will be denied with '
1012 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001013 parser.add_option_group(group)
1014
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001015 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001016 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001017 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001018 help='The lower value, the more important the task is')
1019 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001020 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001021 help='Display name of the task. Defaults to '
1022 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1023 'isolated file is provided, if a hash is provided, it defaults to '
1024 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1025 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001026 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001027 help='Tags to assign to the task.')
1028 group.add_option(
1029 '--user', default='',
1030 help='User associated with the task. Defaults to authenticated user on '
1031 'the server.')
1032 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001033 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001034 help='Seconds to allow the task to be pending for a bot to run before '
1035 'this task request expires.')
1036 group.add_option(
1037 '--deadline', type='int', dest='expiration',
1038 help=optparse.SUPPRESS_HELP)
1039 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001040
1041
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001042def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001043 """Processes trigger options and does preparatory steps.
1044
1045 Returns:
1046 NewTaskRequest instance.
1047 """
maruelaf6b06c2017-06-08 06:26:53 -07001048 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001049 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001050 if args and args[0] == '--':
1051 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001052
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001053 if not options.dimensions:
1054 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001055 if not any(k == 'pool' for k, _v in options.dimensions):
1056 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001057 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1058 parser.error('--tags must be in the format key:value')
1059 if options.raw_cmd and not args:
1060 parser.error(
1061 'Arguments with --raw-cmd should be passed after -- as command '
1062 'delimiter.')
1063 if options.isolate_server and not options.namespace:
1064 parser.error(
1065 '--namespace must be a valid value when --isolate-server is used')
1066 if not options.isolated and not options.raw_cmd:
1067 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1068
1069 # Isolated
1070 # --isolated is required only if --raw-cmd wasn't provided.
1071 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1072 # preferred server.
1073 isolateserver.process_isolate_server_options(
1074 parser, options, False, not options.raw_cmd)
1075 inputs_ref = None
1076 if options.isolate_server:
1077 inputs_ref = FilesRef(
1078 isolated=options.isolated,
1079 isolatedserver=options.isolate_server,
1080 namespace=options.namespace)
1081
1082 # Command
1083 command = None
1084 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001085 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001086 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001087 if options.relative_cwd:
1088 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1089 if not a.startswith(os.getcwd()):
1090 parser.error(
1091 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001092 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001093 if options.relative_cwd:
1094 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001095 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001096
maruel0a25f6c2017-05-10 10:43:23 -07001097 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001098 cipd_packages = []
1099 for p in options.cipd_package:
1100 split = p.split(':', 2)
1101 if len(split) != 3:
1102 parser.error('CIPD packages must take the form: path:package:version')
1103 cipd_packages.append(CipdPackage(
1104 package_name=split[1],
1105 path=split[0],
1106 version=split[2]))
1107 cipd_input = None
1108 if cipd_packages:
1109 cipd_input = CipdInput(
1110 client_package=None,
1111 packages=cipd_packages,
1112 server=None)
1113
maruel0a25f6c2017-05-10 10:43:23 -07001114 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001115 secret_bytes = None
1116 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001117 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001118 secret_bytes = f.read().encode('base64')
1119
maruel0a25f6c2017-05-10 10:43:23 -07001120 # Named caches
maruel681d6802017-01-17 16:56:03 -08001121 caches = [
1122 {u'name': unicode(i[0]), u'path': unicode(i[1])}
1123 for i in options.named_cache
1124 ]
maruel0a25f6c2017-05-10 10:43:23 -07001125
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001126 env_prefixes = {}
1127 for k, v in options.env_prefix:
1128 env_prefixes.setdefault(k, []).append(v)
1129
Brad Hallf78187a2018-10-19 17:08:55 +00001130 # Get dimensions into the key/value format we can manipulate later.
1131 orig_dims = [
1132 {'key': key, 'value': value} for key, value in options.dimensions]
1133 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1134
1135 # Construct base properties that we will use for all the slices, adding in
1136 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001137 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001138 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001139 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001140 command=command,
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001141 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001142 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001143 env=options.env,
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001144 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.iteritems()],
maruel77f720b2015-09-15 12:35:22 -07001145 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001146 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001147 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001148 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001149 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001150 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001151 outputs=options.output,
1152 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001153
1154 slices = []
1155
1156 # Group the optional dimensions by expiration.
1157 dims_by_exp = {}
1158 for key, value, exp_secs in options.optional_dimensions:
1159 dims_by_exp.setdefault(int(exp_secs), []).append(
1160 {'key': key, 'value': value})
1161
1162 # Create the optional slices with expiration deltas, we fix up the properties
1163 # below.
1164 last_exp = 0
1165 for expiration_secs in sorted(dims_by_exp):
1166 t = TaskSlice(
1167 expiration_secs=expiration_secs - last_exp,
1168 properties=properties,
1169 wait_for_capacity=False)
1170 slices.append(t)
1171 last_exp = expiration_secs
1172
1173 # Add back in the default slice (the last one).
1174 exp = max(int(options.expiration) - last_exp, 60)
1175 base_task_slice = TaskSlice(
1176 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001177 properties=properties,
1178 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001179 slices.append(base_task_slice)
1180
Brad Hall7f463e62018-11-16 16:13:30 +00001181 # Add optional dimensions to the task slices, replacing a dimension that
1182 # has the same key if it is a dimension where repeating isn't valid (otherwise
1183 # we append it). Currently the only dimension we can repeat is "caches"; the
1184 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001185 extra_dims = []
Brad Hall7f463e62018-11-16 16:13:30 +00001186 for i, (_, kvs) in enumerate(sorted(dims_by_exp.iteritems(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001187 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001188 # Replace or append the key/value pairs for this expiration in extra_dims;
1189 # we keep extra_dims around because we are iterating backwards and filling
1190 # in slices with shorter expirations. Dimensions expire as time goes on so
1191 # the slices that expire earlier will generally have more dimensions.
1192 for kv in kvs:
1193 if kv['key'] == 'caches':
1194 extra_dims.append(kv)
1195 else:
1196 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1197 # Then, add all the optional dimensions to the original dimension set, again
1198 # replacing if needed.
1199 for kv in extra_dims:
1200 if kv['key'] == 'caches':
1201 dims.append(kv)
1202 else:
1203 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001204 dims.sort(key=lambda x: (x['key'], x['value']))
1205 slice_properties = properties._replace(dimensions=dims)
1206 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1207
maruel77f720b2015-09-15 12:35:22 -07001208 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001209 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001210 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001211 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001212 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001213 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001214 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001215 user=options.user,
1216 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001217
1218
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001219class TaskOutputStdoutOption(optparse.Option):
1220 """Where to output the each task's console output (stderr/stdout).
1221
1222 The output will be;
1223 none - not be downloaded.
1224 json - stored in summary.json file *only*.
1225 console - shown on stdout *only*.
1226 all - stored in summary.json and shown on stdout.
1227 """
1228
1229 choices = ['all', 'json', 'console', 'none']
1230
1231 def __init__(self, *args, **kw):
1232 optparse.Option.__init__(
1233 self,
1234 *args,
1235 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001236 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001237 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001238 **kw)
1239
1240 def convert_value(self, opt, value):
1241 if value not in self.choices:
1242 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1243 self.get_opt_string(), self.choices, value))
1244 stdout_to = []
1245 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001246 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001247 elif value != 'none':
1248 stdout_to = [value]
1249 return stdout_to
1250
1251
maruel@chromium.org0437a732013-08-27 16:05:52 +00001252def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001253 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001254 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001255 help='Timeout to wait for result, set to -1 for no timeout and get '
1256 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001257 parser.group_logging.add_option(
1258 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001259 parser.group_logging.add_option(
1260 '--print-status-updates', action='store_true',
1261 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001262 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001263 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001264 '--task-summary-json',
1265 metavar='FILE',
1266 help='Dump a summary of task results to this file as json. It contains '
1267 'only shards statuses as know to server directly. Any output files '
1268 'emitted by the task can be collected by using --task-output-dir')
1269 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001270 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001271 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001272 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001273 'directory contains per-shard directory with output files produced '
1274 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001275 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001276 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001277 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001278 '--filepath-filter',
1279 help='This is regexp filter used to specify downloaded filepath when '
1280 'collecting isolated output.')
1281 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001282 '--perf', action='store_true', default=False,
1283 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001284 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001285
1286
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001287def process_collect_options(parser, options):
1288 # Only negative -1 is allowed, disallow other negative values.
1289 if options.timeout != -1 and options.timeout < 0:
1290 parser.error('Invalid --timeout value')
1291
1292
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001293@subcommand.usage('bots...')
1294def CMDbot_delete(parser, args):
1295 """Forcibly deletes bots from the Swarming server."""
1296 parser.add_option(
1297 '-f', '--force', action='store_true',
1298 help='Do not prompt for confirmation')
1299 options, args = parser.parse_args(args)
1300 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001301 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001302
1303 bots = sorted(args)
1304 if not options.force:
1305 print('Delete the following bots?')
1306 for bot in bots:
1307 print(' %s' % bot)
1308 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1309 print('Goodbye.')
1310 return 1
1311
1312 result = 0
1313 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001314 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001315 if net.url_read_json(url, data={}, method='POST') is None:
1316 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001317 result = 1
1318 return result
1319
1320
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001321def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001322 """Returns information about the bots connected to the Swarming server."""
1323 add_filter_options(parser)
1324 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001325 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001326 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001327 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001328 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001329 help='Keep both dead and alive bots')
1330 parser.filter_group.add_option(
1331 '--busy', action='store_true', help='Keep only busy bots')
1332 parser.filter_group.add_option(
1333 '--idle', action='store_true', help='Keep only idle bots')
1334 parser.filter_group.add_option(
1335 '--mp', action='store_true',
1336 help='Keep only Machine Provider managed bots')
1337 parser.filter_group.add_option(
1338 '--non-mp', action='store_true',
1339 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001340 parser.filter_group.add_option(
1341 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001342 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001343 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001344 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001345
1346 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001347 parser.error('Use only one of --keep-dead or --dead-only')
1348 if options.busy and options.idle:
1349 parser.error('Use only one of --busy or --idle')
1350 if options.mp and options.non_mp:
1351 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001352
smut281c3902018-05-30 17:50:05 -07001353 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001354 values = []
1355 if options.dead_only:
1356 values.append(('is_dead', 'TRUE'))
1357 elif options.keep_dead:
1358 values.append(('is_dead', 'NONE'))
1359 else:
1360 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001361
maruelaf6b06c2017-06-08 06:26:53 -07001362 if options.busy:
1363 values.append(('is_busy', 'TRUE'))
1364 elif options.idle:
1365 values.append(('is_busy', 'FALSE'))
1366 else:
1367 values.append(('is_busy', 'NONE'))
1368
1369 if options.mp:
1370 values.append(('is_mp', 'TRUE'))
1371 elif options.non_mp:
1372 values.append(('is_mp', 'FALSE'))
1373 else:
1374 values.append(('is_mp', 'NONE'))
1375
1376 for key, value in options.dimensions:
1377 values.append(('dimensions', '%s:%s' % (key, value)))
1378 url += urllib.urlencode(values)
1379 try:
1380 data, yielder = get_yielder(url, 0)
1381 bots = data.get('items') or []
1382 for items in yielder():
1383 if items:
1384 bots.extend(items)
1385 except Failure as e:
1386 sys.stderr.write('\n%s\n' % e)
1387 return 1
maruel77f720b2015-09-15 12:35:22 -07001388 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
maruelaf6b06c2017-06-08 06:26:53 -07001389 print bot['bot_id']
1390 if not options.bare:
1391 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1392 print ' %s' % json.dumps(dimensions, sort_keys=True)
1393 if bot.get('task_id'):
1394 print ' task: %s' % bot['task_id']
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001395 return 0
1396
1397
maruelfd0a90c2016-06-10 11:51:10 -07001398@subcommand.usage('task_id')
1399def CMDcancel(parser, args):
1400 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001401 parser.add_option(
1402 '-k', '--kill-running', action='store_true', default=False,
1403 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001404 options, args = parser.parse_args(args)
1405 if not args:
1406 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001407 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001408 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001409 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001410 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001411 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001412 print('Deleting %s failed. Probably already gone' % task_id)
1413 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001414 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001415 return 0
1416
1417
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001418@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001419def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001420 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001421
1422 The result can be in multiple part if the execution was sharded. It can
1423 potentially have retries.
1424 """
1425 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001426 parser.add_option(
1427 '-j', '--json',
1428 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001429 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001430 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001431 if not args and not options.json:
1432 parser.error('Must specify at least one task id or --json.')
1433 if args and options.json:
1434 parser.error('Only use one of task id or --json.')
1435
1436 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001437 options.json = unicode(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001438 try:
maruel1ceb3872015-10-14 06:10:44 -07001439 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001440 data = json.load(f)
1441 except (IOError, ValueError):
1442 parser.error('Failed to open %s' % options.json)
1443 try:
1444 tasks = sorted(
1445 data['tasks'].itervalues(), key=lambda x: x['shard_index'])
1446 args = [t['task_id'] for t in tasks]
1447 except (KeyError, TypeError):
1448 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001449 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001450 # Take in account all the task slices.
1451 offset = 0
1452 for s in data['request']['task_slices']:
1453 m = (offset + s['properties']['execution_timeout_secs'] +
1454 s['expiration_secs'])
1455 if m > options.timeout:
1456 options.timeout = m
1457 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001458 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001459 else:
1460 valid = frozenset('0123456789abcdef')
1461 if any(not valid.issuperset(task_id) for task_id in args):
1462 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001463
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001464 try:
1465 return collect(
1466 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001467 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001468 options.timeout,
1469 options.decorate,
1470 options.print_status_updates,
1471 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001472 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001473 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001474 options.perf,
1475 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001476 except Failure:
1477 on_error.report(None)
1478 return 1
1479
1480
maruel77f720b2015-09-15 12:35:22 -07001481@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001482def CMDpost(parser, args):
1483 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1484
1485 Input data must be sent to stdin, result is printed to stdout.
1486
1487 If HTTP response code >= 400, returns non-zero.
1488 """
1489 options, args = parser.parse_args(args)
1490 if len(args) != 1:
1491 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001492 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001493 data = sys.stdin.read()
1494 try:
1495 resp = net.url_read(url, data=data, method='POST')
1496 except net.TimeoutError:
1497 sys.stderr.write('Timeout!\n')
1498 return 1
1499 if not resp:
1500 sys.stderr.write('No response!\n')
1501 return 1
1502 sys.stdout.write(resp)
1503 return 0
1504
1505
1506@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001507def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001508 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1509 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001510
1511 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001512 Raw task request and results:
1513 swarming.py query -S server-url.com task/123456/request
1514 swarming.py query -S server-url.com task/123456/result
1515
maruel77f720b2015-09-15 12:35:22 -07001516 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001517 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001518
maruelaf6b06c2017-06-08 06:26:53 -07001519 Listing last 10 tasks on a specific bot named 'bot1':
1520 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001521
maruelaf6b06c2017-06-08 06:26:53 -07001522 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001523 quoting is important!:
1524 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001525 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001526 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001527 parser.add_option(
1528 '-L', '--limit', type='int', default=200,
1529 help='Limit to enforce on limitless items (like number of tasks); '
1530 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001531 parser.add_option(
1532 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001533 parser.add_option(
1534 '--progress', action='store_true',
1535 help='Prints a dot at each request to show progress')
1536 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001537 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001538 parser.error(
1539 'Must specify only method name and optionally query args properly '
1540 'escaped.')
smut281c3902018-05-30 17:50:05 -07001541 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001542 try:
1543 data, yielder = get_yielder(base_url, options.limit)
1544 for items in yielder():
1545 if items:
1546 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001547 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001548 sys.stderr.write('.')
1549 sys.stderr.flush()
1550 except Failure as e:
1551 sys.stderr.write('\n%s\n' % e)
1552 return 1
maruel77f720b2015-09-15 12:35:22 -07001553 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001554 sys.stderr.write('\n')
1555 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001556 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001557 options.json = unicode(os.path.abspath(options.json))
1558 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001559 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001560 try:
maruel77f720b2015-09-15 12:35:22 -07001561 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001562 sys.stdout.write('\n')
1563 except IOError:
1564 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001565 return 0
1566
1567
maruel77f720b2015-09-15 12:35:22 -07001568def CMDquery_list(parser, args):
1569 """Returns list of all the Swarming APIs that can be used with command
1570 'query'.
1571 """
1572 parser.add_option(
1573 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1574 options, args = parser.parse_args(args)
1575 if args:
1576 parser.error('No argument allowed.')
1577
1578 try:
1579 apis = endpoints_api_discovery_apis(options.swarming)
1580 except APIError as e:
1581 parser.error(str(e))
1582 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001583 options.json = unicode(os.path.abspath(options.json))
1584 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001585 json.dump(apis, f)
1586 else:
1587 help_url = (
1588 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1589 options.swarming)
maruel11e31af2017-02-15 07:30:50 -08001590 for i, (api_id, api) in enumerate(sorted(apis.iteritems())):
1591 if i:
1592 print('')
maruel77f720b2015-09-15 12:35:22 -07001593 print api_id
maruel11e31af2017-02-15 07:30:50 -08001594 print ' ' + api['description'].strip()
1595 if 'resources' in api:
1596 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001597 # TODO(maruel): Remove.
1598 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001599 for j, (resource_name, resource) in enumerate(
1600 sorted(api['resources'].iteritems())):
1601 if j:
1602 print('')
1603 for method_name, method in sorted(resource['methods'].iteritems()):
1604 # Only list the GET ones.
1605 if method['httpMethod'] != 'GET':
1606 continue
1607 print '- %s.%s: %s' % (
1608 resource_name, method_name, method['path'])
1609 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001610 ' ' + l for l in textwrap.wrap(
1611 method.get('description', 'No description'), 78)))
maruel11e31af2017-02-15 07:30:50 -08001612 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1613 else:
1614 # New.
1615 for method_name, method in sorted(api['methods'].iteritems()):
maruel77f720b2015-09-15 12:35:22 -07001616 # Only list the GET ones.
1617 if method['httpMethod'] != 'GET':
1618 continue
maruel11e31af2017-02-15 07:30:50 -08001619 print '- %s: %s' % (method['id'], method['path'])
1620 print('\n'.join(
1621 ' ' + l for l in textwrap.wrap(method['description'], 78)))
maruel77f720b2015-09-15 12:35:22 -07001622 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1623 return 0
1624
1625
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001626@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001627def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001628 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001629
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001630 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001631 """
1632 add_trigger_options(parser)
1633 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001634 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001635 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001636 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001637 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001638 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001639 tasks = trigger_task_shards(
1640 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001641 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001642 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001643 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001644 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001645 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001646 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001647 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001648 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001649 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001650 task_ids = [
1651 t['task_id']
1652 for t in sorted(tasks.itervalues(), key=lambda x: x['shard_index'])
1653 ]
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001654 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001655 offset = 0
1656 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001657 m = (offset + s.properties.execution_timeout_secs +
1658 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001659 if m > options.timeout:
1660 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001661 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001662 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001663 try:
1664 return collect(
1665 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001666 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001667 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001668 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001669 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001670 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001671 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001672 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001673 options.perf,
1674 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001675 except Failure:
1676 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001677 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001678
1679
maruel18122c62015-10-23 06:31:23 -07001680@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001681def CMDreproduce(parser, args):
1682 """Runs a task locally that was triggered on the server.
1683
1684 This running locally the same commands that have been run on the bot. The data
1685 downloaded will be in a subdirectory named 'work' of the current working
1686 directory.
maruel18122c62015-10-23 06:31:23 -07001687
1688 You can pass further additional arguments to the target command by passing
1689 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001690 """
maruelc070e672016-02-22 17:32:57 -08001691 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001692 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001693 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001694 parser.add_option(
1695 '--work', metavar='DIR', default='work',
1696 help='Directory to map the task input files into')
1697 parser.add_option(
1698 '--cache', metavar='DIR', default='cache',
1699 help='Directory that contains the input cache')
1700 parser.add_option(
1701 '--leak', action='store_true',
1702 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001703 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001704 extra_args = []
1705 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001706 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001707 if len(args) > 1:
1708 if args[1] == '--':
1709 if len(args) > 2:
1710 extra_args = args[2:]
1711 else:
1712 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001713
smut281c3902018-05-30 17:50:05 -07001714 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001715 request = net.url_read_json(url)
1716 if not request:
1717 print >> sys.stderr, 'Failed to retrieve request data for the task'
1718 return 1
1719
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001720 workdir = unicode(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001721 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001722 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001723 fs.mkdir(workdir)
iannucci31ab9192017-05-02 19:11:56 -07001724 cachedir = unicode(os.path.abspath('cipd_cache'))
1725 if not fs.exists(cachedir):
1726 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001727
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001728 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001729 env = os.environ.copy()
1730 env['SWARMING_BOT_ID'] = 'reproduce'
1731 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001732 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001733 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001734 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001735 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001736 if not i['value']:
1737 env.pop(key, None)
1738 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001739 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001740
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001741 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001742 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001743 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001744 for i in env_prefixes:
1745 key = i['key']
1746 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001747 cur = env.get(key)
1748 if cur:
1749 paths.append(cur)
1750 env[key] = os.path.pathsep.join(paths)
1751
iannucci31ab9192017-05-02 19:11:56 -07001752 command = []
nodir152cba62016-05-12 16:08:56 -07001753 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001754 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001755 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001756 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001757 properties['inputs_ref']['namespace'])
1758 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001759 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1760 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1761 # leak.
1762 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001763 cache = local_caching.DiskContentAddressedCache(
Marc-Antoine Ruel79d42192019-02-06 19:24:16 +00001764 unicode(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001765 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001766 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001767 command = bundle.command
1768 if bundle.relative_cwd:
1769 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001770 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001771
1772 if properties.get('command'):
1773 command.extend(properties['command'])
1774
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001775 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Robert Iannucci24ae76a2018-02-26 12:51:18 -08001776 command = tools.fix_python_cmd(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001777 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001778 new_command = run_isolated.process_command(command, 'invalid', None)
1779 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001780 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001781 else:
1782 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001783 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001784 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001785 command, options.output, None)
1786 if not os.path.isdir(options.output):
1787 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001788 command = new_command
1789 file_path.ensure_command_has_abs_path(command, workdir)
1790
1791 if properties.get('cipd_input'):
1792 ci = properties['cipd_input']
1793 cp = ci['client_package']
1794 client_manager = cipd.get_client(
1795 ci['server'], cp['package_name'], cp['version'], cachedir)
1796
1797 with client_manager as client:
1798 by_path = collections.defaultdict(list)
1799 for pkg in ci['packages']:
1800 path = pkg['path']
1801 # cipd deals with 'root' as ''
1802 if path == '.':
1803 path = ''
1804 by_path[path].append((pkg['package_name'], pkg['version']))
1805 client.ensure(workdir, by_path, cache_dir=cachedir)
1806
maruel77f720b2015-09-15 12:35:22 -07001807 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001808 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001809 except OSError as e:
maruel29ab2fd2015-10-16 11:44:01 -07001810 print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
maruel77f720b2015-09-15 12:35:22 -07001811 print >> sys.stderr, str(e)
1812 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001813 finally:
1814 # Do not delete options.cache.
1815 if not options.leak:
1816 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001817
1818
maruel0eb1d1b2015-10-02 14:48:21 -07001819@subcommand.usage('bot_id')
1820def CMDterminate(parser, args):
1821 """Tells a bot to gracefully shut itself down as soon as it can.
1822
1823 This is done by completing whatever current task there is then exiting the bot
1824 process.
1825 """
1826 parser.add_option(
1827 '--wait', action='store_true', help='Wait for the bot to terminate')
1828 options, args = parser.parse_args(args)
1829 if len(args) != 1:
1830 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001831 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001832 request = net.url_read_json(url, data={})
1833 if not request:
1834 print >> sys.stderr, 'Failed to ask for termination'
1835 return 1
1836 if options.wait:
1837 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001838 options.swarming,
1839 [request['task_id']],
1840 0.,
1841 False,
1842 False,
1843 None,
1844 None,
1845 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001846 False,
1847 None)
maruelbfc5f872017-06-10 16:43:17 -07001848 else:
1849 print request['task_id']
maruel0eb1d1b2015-10-02 14:48:21 -07001850 return 0
1851
1852
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001853@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001854def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001855 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001856
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001857 Passes all extra arguments provided after '--' as additional command line
1858 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001859 """
1860 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001861 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001862 parser.add_option(
1863 '--dump-json',
1864 metavar='FILE',
1865 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001866 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001867 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001868 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001869 tasks = trigger_task_shards(
1870 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001871 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001872 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001873 tasks_sorted = sorted(
1874 tasks.itervalues(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001875 if options.dump_json:
1876 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001877 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001878 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001879 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001880 }
maruel46b015f2015-10-13 18:40:35 -07001881 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001882 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001883 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001884 (options.swarming, options.dump_json))
1885 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001886 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001887 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001888 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1889 print('Or visit:')
1890 for t in tasks_sorted:
1891 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001892 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001893 except Failure:
1894 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001895 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001896
1897
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001898class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001899 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001900 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001901 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001902 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001903 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001904 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001905 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001906 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001907 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001908 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001909
1910 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001911 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001912 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001913 auth.process_auth_options(self, options)
1914 user = self._process_swarming(options)
1915 if hasattr(options, 'user') and not options.user:
1916 options.user = user
1917 return options, args
1918
1919 def _process_swarming(self, options):
1920 """Processes the --swarming option and aborts if not specified.
1921
1922 Returns the identity as determined by the server.
1923 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001924 if not options.swarming:
1925 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001926 try:
1927 options.swarming = net.fix_url(options.swarming)
1928 except ValueError as e:
1929 self.error('--swarming %s' % e)
1930 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001931 try:
1932 user = auth.ensure_logged_in(options.swarming)
1933 except ValueError as e:
1934 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001935 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001936
1937
1938def main(args):
1939 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001940 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001941
1942
1943if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001944 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001945 fix_encoding.fix_encoding()
1946 tools.disable_buffering()
1947 colorama.init()
1948 sys.exit(main(sys.argv[1:]))