blob: 3df926fbcc54d833a9f3cdc3f1594b70178a201c [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Takuto Ikuta0e3e1c42018-11-29 14:21:06 +00008__version__ = '0.14'
maruel@chromium.org0437a732013-08-27 16:05:52 +00009
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050010import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040011import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000012import json
13import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040014import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000015import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100016import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import sys
maruel11e31af2017-02-15 07:30:50 -080018import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070019import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000020import time
21import urllib
maruel@chromium.org0437a732013-08-27 16:05:52 +000022
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000023from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000024tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000025
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026# third_party/
27import colorama
28from chromium import natsort
29from depot_tools import fix_encoding
30from depot_tools import subcommand
31
32# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080033import auth
iannucci31ab9192017-05-02 19:11:56 -070034import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000035import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000036import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040037import local_caching
maruelc070e672016-02-22 17:32:57 -080038import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000039from utils import file_path
40from utils import fs
41from utils import logging_utils
42from utils import net
43from utils import on_error
44from utils import subprocess42
45from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050046
47
48class Failure(Exception):
49 """Generic failure."""
50 pass
51
52
maruel0a25f6c2017-05-10 10:43:23 -070053def default_task_name(options):
54 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050055 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070056 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070057 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070058 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070059 if options.isolated:
60 task_name += u'/' + options.isolated
61 return task_name
62 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050063
64
65### Triggering.
66
67
maruel77f720b2015-09-15 12:35:22 -070068# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070069CipdPackage = collections.namedtuple(
70 'CipdPackage',
71 [
72 'package_name',
73 'path',
74 'version',
75 ])
76
77
78# See ../appengine/swarming/swarming_rpcs.py.
79CipdInput = collections.namedtuple(
80 'CipdInput',
81 [
82 'client_package',
83 'packages',
84 'server',
85 ])
86
87
88# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070089FilesRef = collections.namedtuple(
90 'FilesRef',
91 [
92 'isolated',
93 'isolatedserver',
94 'namespace',
95 ])
96
97
98# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -080099StringListPair = collections.namedtuple(
100 'StringListPair', [
101 'key',
102 'value', # repeated string
103 ]
104)
105
106
107# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700108TaskProperties = collections.namedtuple(
109 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500110 [
maruel681d6802017-01-17 16:56:03 -0800111 'caches',
borenet02f772b2016-06-22 12:42:19 -0700112 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500113 'command',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500114 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500115 'dimensions',
116 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800117 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700118 'execution_timeout_secs',
119 'extra_args',
120 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500121 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700122 'inputs_ref',
123 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700124 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700125 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700126 ])
127
128
129# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400130TaskSlice = collections.namedtuple(
131 'TaskSlice',
132 [
133 'expiration_secs',
134 'properties',
135 'wait_for_capacity',
136 ])
137
138
139# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700140NewTaskRequest = collections.namedtuple(
141 'NewTaskRequest',
142 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500143 'name',
maruel77f720b2015-09-15 12:35:22 -0700144 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500145 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400146 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700147 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500148 'tags',
149 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000150 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500151 ])
152
153
maruel77f720b2015-09-15 12:35:22 -0700154def namedtuple_to_dict(value):
155 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400156 if hasattr(value, '_asdict'):
157 return namedtuple_to_dict(value._asdict())
158 if isinstance(value, (list, tuple)):
159 return [namedtuple_to_dict(v) for v in value]
160 if isinstance(value, dict):
161 return {k: namedtuple_to_dict(v) for k, v in value.iteritems()}
162 return value
maruel77f720b2015-09-15 12:35:22 -0700163
164
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700165def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800166 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700167
168 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500169 """
maruel77f720b2015-09-15 12:35:22 -0700170 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700171 # Don't send 'service_account' if it is None to avoid confusing older
172 # version of the server that doesn't know about 'service_account' and don't
173 # use it at all.
174 if not out['service_account']:
175 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000176 for task_slice in out['task_slices']:
177 task_slice['properties']['env'] = [
178 {'key': k, 'value': v}
179 for k, v in task_slice['properties']['env'].iteritems()
180 ]
181 task_slice['properties']['env'].sort(key=lambda x: x['key'])
maruel77f720b2015-09-15 12:35:22 -0700182 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500183
184
maruel77f720b2015-09-15 12:35:22 -0700185def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500186 """Triggers a request on the Swarming server and returns the json data.
187
188 It's the low-level function.
189
190 Returns:
191 {
192 'request': {
193 'created_ts': u'2010-01-02 03:04:05',
194 'name': ..
195 },
196 'task_id': '12300',
197 }
198 """
199 logging.info('Triggering: %s', raw_request['name'])
200
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500201 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700202 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500203 if not result:
204 on_error.report('Failed to trigger task %s' % raw_request['name'])
205 return None
maruele557bce2015-11-17 09:01:27 -0800206 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800207 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800208 msg = 'Failed to trigger task %s' % raw_request['name']
209 if result['error'].get('errors'):
210 for err in result['error']['errors']:
211 if err.get('message'):
212 msg += '\nMessage: %s' % err['message']
213 if err.get('debugInfo'):
214 msg += '\nDebug info:\n%s' % err['debugInfo']
215 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800216 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800217
218 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800219 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500220 return result
221
222
223def setup_googletest(env, shards, index):
224 """Sets googletest specific environment variables."""
225 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700226 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
227 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
228 env = env[:]
229 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
230 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500231 return env
232
233
234def trigger_task_shards(swarming, task_request, shards):
235 """Triggers one or many subtasks of a sharded task.
236
237 Returns:
238 Dict with task details, returned to caller as part of --dump-json output.
239 None in case of failure.
240 """
241 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000242 """
243 Args:
244 index: The index of the task request.
245
246 Returns:
247 raw_request: A swarming compatible JSON dictionary of the request.
248 shard_index: The index of the shard, which may be different than the index
249 of the task request.
250 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700251 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000252 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500253 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000254 for task_slice in req['task_slices']:
255 task_slice['properties']['env'] = setup_googletest(
256 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700257 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000258 else:
259 task_slices = req['task_slices']
260
261 total_shards = None
262 # Multiple tasks slices might exist if there are optional "slices", e.g.
263 # multiple ways of dispatching the task that should be equivalent. These
264 # should be functionally equivalent but we have cannot guarantee that. If
265 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
266 # slices.
267 for task_slice in task_slices:
268 for env_var in task_slice['properties']['env']:
269 if env_var['key'] == 'GTEST_SHARD_INDEX':
270 shard_index = int(env_var['value'])
271 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
272 total_shards = int(env_var['value'])
273 if total_shards > 1:
274 req['name'] += ':%s:%s' % (shard_index, total_shards)
275
276 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500277
278 requests = [convert(index) for index in xrange(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500279 tasks = {}
280 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000281 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700282 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500283 if not task:
284 break
285 logging.info('Request result: %s', task)
286 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400287 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500288 priority_warning = True
289 print >> sys.stderr, (
290 'Priority was reset to %s' % task['request']['priority'])
291 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000292 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500293 'task_id': task['task_id'],
294 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
295 }
296
297 # Some shards weren't triggered. Abort everything.
298 if len(tasks) != len(requests):
299 if tasks:
300 print >> sys.stderr, 'Only %d shard(s) out of %d were triggered' % (
301 len(tasks), len(requests))
302 for task_dict in tasks.itervalues():
303 abort_task(swarming, task_dict['task_id'])
304 return None
305
306 return tasks
307
308
309### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000310
311
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700312# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000313STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700314
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400315
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000316class TaskState(object):
317 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000318
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000319 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
320 is the source of truth for these values:
321 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400322
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000323 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400324 """
325 RUNNING = 0x10
326 PENDING = 0x20
327 EXPIRED = 0x30
328 TIMED_OUT = 0x40
329 BOT_DIED = 0x50
330 CANCELED = 0x60
331 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400332 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400333 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400334
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000335 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400336
maruel77f720b2015-09-15 12:35:22 -0700337 _ENUMS = {
338 'RUNNING': RUNNING,
339 'PENDING': PENDING,
340 'EXPIRED': EXPIRED,
341 'TIMED_OUT': TIMED_OUT,
342 'BOT_DIED': BOT_DIED,
343 'CANCELED': CANCELED,
344 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400345 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400346 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700347 }
348
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400349 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700350 def from_enum(cls, state):
351 """Returns int value based on the string."""
352 if state not in cls._ENUMS:
353 raise ValueError('Invalid state %s' % state)
354 return cls._ENUMS[state]
355
maruel@chromium.org0437a732013-08-27 16:05:52 +0000356
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700357class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700358 """Assembles task execution summary (for --task-summary-json output).
359
360 Optionally fetches task outputs from isolate server to local disk (used when
361 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700362
363 This object is shared among multiple threads running 'retrieve_results'
364 function, in particular they call 'process_shard_result' method in parallel.
365 """
366
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000367 def __init__(self, task_output_dir, task_output_stdout, shard_count,
368 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700369 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
370
371 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700372 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700373 shard_count: expected number of task shards.
374 """
maruel12e30012015-10-09 11:55:35 -0700375 self.task_output_dir = (
376 unicode(os.path.abspath(task_output_dir))
377 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000378 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700379 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000380 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700381
382 self._lock = threading.Lock()
383 self._per_shard_results = {}
384 self._storage = None
385
nodire5028a92016-04-29 14:38:21 -0700386 if self.task_output_dir:
387 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700388
Vadim Shtayurab450c602014-05-12 19:23:25 -0700389 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700390 """Stores results of a single task shard, fetches output files if necessary.
391
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400392 Modifies |result| in place.
393
maruel77f720b2015-09-15 12:35:22 -0700394 shard_index is 0-based.
395
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700396 Called concurrently from multiple threads.
397 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700398 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700399 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700400 if shard_index < 0 or shard_index >= self.shard_count:
401 logging.warning(
402 'Shard index %d is outside of expected range: [0; %d]',
403 shard_index, self.shard_count - 1)
404 return
405
maruel77f720b2015-09-15 12:35:22 -0700406 if result.get('outputs_ref'):
407 ref = result['outputs_ref']
408 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
409 ref['isolatedserver'],
410 urllib.urlencode(
411 [('namespace', ref['namespace']), ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400412
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700413 # Store result dict of that shard, ignore results we've already seen.
414 with self._lock:
415 if shard_index in self._per_shard_results:
416 logging.warning('Ignoring duplicate shard index %d', shard_index)
417 return
418 self._per_shard_results[shard_index] = result
419
420 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700421 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000422 server_ref = isolate_storage.ServerRef(
423 result['outputs_ref']['isolatedserver'],
424 result['outputs_ref']['namespace'])
425 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400426 if storage:
427 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400428 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
429 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400430 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700431 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400432 storage,
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400433 local_caching.MemoryContentAddressedCache(file_mode_mask=0700),
maruel4409e302016-07-19 14:25:51 -0700434 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000435 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700436
437 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700438 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700439 with self._lock:
440 # Write an array of shard results with None for missing shards.
441 summary = {
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700442 'shards': [
443 self._per_shard_results.get(i) for i in xrange(self.shard_count)
444 ],
445 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000446
447 # Don't store stdout in the summary if not requested too.
448 if "json" not in self.task_output_stdout:
449 for shard_json in summary['shards']:
450 if not shard_json:
451 continue
452 if "output" in shard_json:
453 del shard_json["output"]
454 if "outputs" in shard_json:
455 del shard_json["outputs"]
456
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700457 # Write summary.json to task_output_dir as well.
458 if self.task_output_dir:
459 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700460 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700461 summary,
462 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700463 if self._storage:
464 self._storage.close()
465 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700466 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700467
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000468 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700469 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700470 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700471 with self._lock:
472 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000473 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700474 else:
475 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000476 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700477 logging.error(
478 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000479 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700480 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000481 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700482 logging.error(
483 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000484 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700485 return None
486 return self._storage
487
488
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500489def now():
490 """Exists so it can be mocked easily."""
491 return time.time()
492
493
maruel77f720b2015-09-15 12:35:22 -0700494def parse_time(value):
495 """Converts serialized time from the API to datetime.datetime."""
496 # When microseconds are 0, the '.123456' suffix is elided. This means the
497 # serialized format is not consistent, which confuses the hell out of python.
498 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
499 try:
500 return datetime.datetime.strptime(value, fmt)
501 except ValueError:
502 pass
503 raise ValueError('Failed to parse %s' % value)
504
505
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700506def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700507 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000508 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400509 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700510
Vadim Shtayurab450c602014-05-12 19:23:25 -0700511 Returns:
512 <result dict> on success.
513 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700514 """
maruel71c61c82016-02-22 06:52:05 -0800515 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700516 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700517 if include_perf:
518 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700519 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700520 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400521 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700522 attempt = 0
523
524 while not should_stop.is_set():
525 attempt += 1
526
527 # Waiting for too long -> give up.
528 current_time = now()
529 if deadline and current_time >= deadline:
530 logging.error('retrieve_results(%s) timed out on attempt %d',
531 base_url, attempt)
532 return None
533
534 # Do not spin too fast. Spin faster at the beginning though.
535 # Start with 1 sec delay and for each 30 sec of waiting add another second
536 # of delay, until hitting 15 sec ceiling.
537 if attempt > 1:
538 max_delay = min(15, 1 + (current_time - started) / 30.0)
539 delay = min(max_delay, deadline - current_time) if deadline else max_delay
540 if delay > 0:
541 logging.debug('Waiting %.1f sec before retrying', delay)
542 should_stop.wait(delay)
543 if should_stop.is_set():
544 return None
545
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400546 # Disable internal retries in net.url_read_json, since we are doing retries
547 # ourselves.
548 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700549 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
550 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400551 # Retry on 500s only if no timeout is specified.
552 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400553 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400554 if timeout == -1:
555 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400556 continue
maruel77f720b2015-09-15 12:35:22 -0700557
maruelbf53e042015-12-01 15:00:51 -0800558 if result.get('error'):
559 # An error occurred.
560 if result['error'].get('errors'):
561 for err in result['error']['errors']:
562 logging.warning(
563 'Error while reading task: %s; %s',
564 err.get('message'), err.get('debugInfo'))
565 elif result['error'].get('message'):
566 logging.warning(
567 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400568 if timeout == -1:
569 return result
maruelbf53e042015-12-01 15:00:51 -0800570 continue
571
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400572 # When timeout == -1, always return on first attempt. 500s are already
573 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000574 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000575 if fetch_stdout:
576 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700577 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700578 # Record the result, try to fetch attached output files (if any).
579 if output_collector:
580 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700581 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700582 if result.get('internal_failure'):
583 logging.error('Internal error!')
584 elif result['state'] == 'BOT_DIED':
585 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700586 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000587
588
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700589def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400590 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000591 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500592 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000593
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700594 Duplicate shards are ignored. Shards are yielded in order of completion.
595 Timed out shards are NOT yielded at all. Caller can compare number of yielded
596 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000597
598 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500599 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000600 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500601
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700602 output_collector is an optional instance of TaskOutputCollector that will be
603 used to fetch files produced by a task from isolate server to the local disk.
604
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500605 Yields:
606 (index, result). In particular, 'result' is defined as the
607 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000608 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000609 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400610 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700611 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700612 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700613
maruel@chromium.org0437a732013-08-27 16:05:52 +0000614 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
615 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700616 # Adds a task to the thread pool to call 'retrieve_results' and return
617 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400618 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000619 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700620 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000621 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400622 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000623 task_id, timeout, should_stop, output_collector, include_perf,
624 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700625
626 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400627 for shard_index, task_id in enumerate(task_ids):
628 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700629
630 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400631 shards_remaining = range(len(task_ids))
632 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700633 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700634 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700635 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000636 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700637 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700638 except threading_utils.TaskChannel.Timeout:
639 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000640 time_now = str(datetime.datetime.now())
641 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700642 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000643 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700644 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000645 (time_now, ', '.join(map(str, shards_remaining)))
646 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700647 sys.stdout.flush()
648 continue
649 except Exception:
650 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700651
652 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700653 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000654 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500655 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000656 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700657
Vadim Shtayurab450c602014-05-12 19:23:25 -0700658 # Yield back results to the caller.
659 assert shard_index in shards_remaining
660 shards_remaining.remove(shard_index)
661 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700662
maruel@chromium.org0437a732013-08-27 16:05:52 +0000663 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700664 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000665 should_stop.set()
666
667
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000668def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000669 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700670 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400671 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700672 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
673 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400674 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
675 metadata.get('abandoned_ts')):
676 pending = '%.1fs' % (
677 parse_time(metadata['abandoned_ts']) -
678 parse_time(metadata['created_ts'])
679 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400680 else:
681 pending = 'N/A'
682
maruel77f720b2015-09-15 12:35:22 -0700683 if metadata.get('duration') is not None:
684 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400685 else:
686 duration = 'N/A'
687
maruel77f720b2015-09-15 12:35:22 -0700688 if metadata.get('exit_code') is not None:
689 # Integers are encoded as string to not loose precision.
690 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400691 else:
692 exit_code = 'N/A'
693
694 bot_id = metadata.get('bot_id') or 'N/A'
695
maruel77f720b2015-09-15 12:35:22 -0700696 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400697 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000698 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400699 if metadata.get('state') == 'CANCELED':
700 tag_footer2 = ' Pending: %s CANCELED' % pending
701 elif metadata.get('state') == 'EXPIRED':
702 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400703 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400704 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
705 pending, duration, bot_id, exit_code, metadata['state'])
706 else:
707 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
708 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400709
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000710 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
711 dash_pad = '+-%s-+' % ('-' * tag_len)
712 tag_header = '| %s |' % tag_header.ljust(tag_len)
713 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
714 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400715
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000716 if include_stdout:
717 return '\n'.join([
718 dash_pad,
719 tag_header,
720 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400721 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000722 dash_pad,
723 tag_footer1,
724 tag_footer2,
725 dash_pad,
726 ])
727 else:
728 return '\n'.join([
729 dash_pad,
730 tag_header,
731 tag_footer2,
732 dash_pad,
733 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000734
735
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700736def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700737 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000738 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000739 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700740 """Retrieves results of a Swarming task.
741
742 Returns:
743 process exit code that should be returned to the user.
744 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000745
746 filter_cb = None
747 if filepath_filter:
748 filter_cb = re.compile(filepath_filter).match
749
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700750 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000751 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000752 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700753
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700754 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700755 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400756 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700757 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400758 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400759 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000760 output_collector, include_perf,
761 (len(task_output_stdout) > 0),
762 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700763 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700764
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400765 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700766 shard_exit_code = metadata.get('exit_code')
767 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700768 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700769 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700770 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400771 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700772 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700773
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700774 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000775 s = decorate_shard_output(
776 swarming, index, metadata,
777 "console" in task_output_stdout).encode(
778 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700779 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400780 if len(seen_shards) < len(task_ids):
781 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700782 else:
maruel77f720b2015-09-15 12:35:22 -0700783 print('%s: %s %s' % (
784 metadata.get('bot_id', 'N/A'),
785 metadata['task_id'],
786 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000787 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700788 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400789 if output:
790 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700791 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700792 summary = output_collector.finalize()
793 if task_summary_json:
794 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700795
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400796 if decorate and total_duration:
797 print('Total duration: %.1fs' % total_duration)
798
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400799 if len(seen_shards) != len(task_ids):
800 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700801 print >> sys.stderr, ('Results from some shards are missing: %s' %
802 ', '.join(map(str, missing_shards)))
Vadim Shtayurac524f512014-05-15 09:54:56 -0700803 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700804
maruela5490782015-09-30 10:56:59 -0700805 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000806
807
maruel77f720b2015-09-15 12:35:22 -0700808### API management.
809
810
811class APIError(Exception):
812 pass
813
814
815def endpoints_api_discovery_apis(host):
816 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
817 the APIs exposed by a host.
818
819 https://developers.google.com/discovery/v1/reference/apis/list
820 """
maruel380e3262016-08-31 16:10:06 -0700821 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
822 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700823 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
824 if data is None:
825 raise APIError('Failed to discover APIs on %s' % host)
826 out = {}
827 for api in data['items']:
828 if api['id'] == 'discovery:v1':
829 continue
830 # URL is of the following form:
831 # url = host + (
832 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
833 api_data = net.url_read_json(api['discoveryRestUrl'])
834 if api_data is None:
835 raise APIError('Failed to discover %s on %s' % (api['id'], host))
836 out[api['id']] = api_data
837 return out
838
839
maruelaf6b06c2017-06-08 06:26:53 -0700840def get_yielder(base_url, limit):
841 """Returns the first query and a function that yields following items."""
842 CHUNK_SIZE = 250
843
844 url = base_url
845 if limit:
846 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
847 data = net.url_read_json(url)
848 if data is None:
849 # TODO(maruel): Do basic diagnostic.
850 raise Failure('Failed to access %s' % url)
851 org_cursor = data.pop('cursor', None)
852 org_total = len(data.get('items') or [])
853 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
854 if not org_cursor or not org_total:
855 # This is not an iterable resource.
856 return data, lambda: []
857
858 def yielder():
859 cursor = org_cursor
860 total = org_total
861 # Some items support cursors. Try to get automatically if cursors are needed
862 # by looking at the 'cursor' items.
863 while cursor and (not limit or total < limit):
864 merge_char = '&' if '?' in base_url else '?'
865 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
866 if limit:
867 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
868 new = net.url_read_json(url)
869 if new is None:
870 raise Failure('Failed to access %s' % url)
871 cursor = new.get('cursor')
872 new_items = new.get('items')
873 nb_items = len(new_items or [])
874 total += nb_items
875 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
876 yield new_items
877
878 return data, yielder
879
880
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500881### Commands.
882
883
884def abort_task(_swarming, _manifest):
885 """Given a task manifest that was triggered, aborts its execution."""
886 # TODO(vadimsh): No supported by the server yet.
887
888
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400889def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800890 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500891 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500892 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500893 dest='dimensions', metavar='FOO bar',
894 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000895 parser.filter_group.add_option(
896 '--optional-dimension', default=[], action='append', nargs=3,
897 dest='optional_dimensions', metavar='key value expiration',
898 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500899 parser.add_option_group(parser.filter_group)
900
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400901
Brad Hallf78187a2018-10-19 17:08:55 +0000902def _validate_filter_option(parser, key, value, expiration, argname):
903 if ':' in key:
904 parser.error('%s key cannot contain ":"' % argname)
905 if key.strip() != key:
906 parser.error('%s key has whitespace' % argname)
907 if not key:
908 parser.error('%s key is empty' % argname)
909
910 if value.strip() != value:
911 parser.error('%s value has whitespace' % argname)
912 if not value:
913 parser.error('%s value is empty' % argname)
914
915 if expiration is not None:
916 try:
917 expiration = int(expiration)
918 except ValueError:
919 parser.error('%s expiration is not an integer' % argname)
920 if expiration <= 0:
921 parser.error('%s expiration should be positive' % argname)
922 if expiration % 60 != 0:
923 parser.error('%s expiration is not divisible by 60' % argname)
924
925
maruelaf6b06c2017-06-08 06:26:53 -0700926def process_filter_options(parser, options):
927 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000928 _validate_filter_option(parser, key, value, None, 'dimension')
929 for key, value, exp in options.optional_dimensions:
930 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700931 options.dimensions.sort()
932
933
Vadim Shtayurab450c602014-05-12 19:23:25 -0700934def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400935 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700936 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700937 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700938 help='Number of shards to trigger and collect.')
939 parser.add_option_group(parser.sharding_group)
940
941
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400942def add_trigger_options(parser):
943 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500944 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400945 add_filter_options(parser)
946
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400947 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800948 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700949 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500950 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800951 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500952 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700953 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800954 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800955 '--env-prefix', default=[], action='append', nargs=2,
956 metavar='VAR local/path',
957 help='Prepend task-relative `local/path` to the task\'s VAR environment '
958 'variable using os-appropriate pathsep character. Can be specified '
959 'multiple times for the same VAR to add multiple paths.')
960 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400961 '--idempotent', action='store_true', default=False,
962 help='When set, the server will actively try to find a previous task '
963 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800964 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700965 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000966 help='The optional path to a file containing the secret_bytes to use '
967 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800968 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700969 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400970 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800971 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700972 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400973 help='Seconds to allow the task to be silent.')
maruel681d6802017-01-17 16:56:03 -0800974 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500975 '--raw-cmd', action='store_true', default=False,
976 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700977 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800978 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500979 '--relative-cwd',
980 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
981 'requires --raw-cmd')
982 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700983 '--cipd-package', action='append', default=[], metavar='PKG',
984 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -0700985 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -0800986 group.add_option(
987 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -0700988 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -0800989 help='"<name> <relpath>" items to keep a persistent bot managed cache')
990 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -0700991 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700992 help='Email of a service account to run the task as, or literal "bot" '
993 'string to indicate that the task should use the same account the '
994 'bot itself is using to authenticate to Swarming. Don\'t use task '
995 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -0800996 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +0000997 '--pool-task-template',
998 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
999 default='AUTO',
1000 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1001 'By default, the pool\'s TaskTemplate is automatically selected, '
1002 'according the pool configuration on the server. Choices are: '
1003 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1004 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001005 '-o', '--output', action='append', default=[], metavar='PATH',
1006 help='A list of files to return in addition to those written to '
1007 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1008 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001009 group.add_option(
1010 '--wait-for-capacity', action='store_true', default=False,
1011 help='Instructs to leave the task PENDING even if there\'s no known bot '
1012 'that could run this task, otherwise the task will be denied with '
1013 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001014 parser.add_option_group(group)
1015
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001016 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001017 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001018 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001019 help='The lower value, the more important the task is')
1020 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001021 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001022 help='Display name of the task. Defaults to '
1023 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1024 'isolated file is provided, if a hash is provided, it defaults to '
1025 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1026 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001027 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001028 help='Tags to assign to the task.')
1029 group.add_option(
1030 '--user', default='',
1031 help='User associated with the task. Defaults to authenticated user on '
1032 'the server.')
1033 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001034 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001035 help='Seconds to allow the task to be pending for a bot to run before '
1036 'this task request expires.')
1037 group.add_option(
1038 '--deadline', type='int', dest='expiration',
1039 help=optparse.SUPPRESS_HELP)
1040 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001041
1042
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001043def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001044 """Processes trigger options and does preparatory steps.
1045
1046 Returns:
1047 NewTaskRequest instance.
1048 """
maruelaf6b06c2017-06-08 06:26:53 -07001049 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001050 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001051 if args and args[0] == '--':
1052 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001053
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001054 if not options.dimensions:
1055 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001056 if not any(k == 'pool' for k, _v in options.dimensions):
1057 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001058 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1059 parser.error('--tags must be in the format key:value')
1060 if options.raw_cmd and not args:
1061 parser.error(
1062 'Arguments with --raw-cmd should be passed after -- as command '
1063 'delimiter.')
1064 if options.isolate_server and not options.namespace:
1065 parser.error(
1066 '--namespace must be a valid value when --isolate-server is used')
1067 if not options.isolated and not options.raw_cmd:
1068 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1069
1070 # Isolated
1071 # --isolated is required only if --raw-cmd wasn't provided.
1072 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1073 # preferred server.
1074 isolateserver.process_isolate_server_options(
1075 parser, options, False, not options.raw_cmd)
1076 inputs_ref = None
1077 if options.isolate_server:
1078 inputs_ref = FilesRef(
1079 isolated=options.isolated,
1080 isolatedserver=options.isolate_server,
1081 namespace=options.namespace)
1082
1083 # Command
1084 command = None
1085 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001086 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001087 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001088 if options.relative_cwd:
1089 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1090 if not a.startswith(os.getcwd()):
1091 parser.error(
1092 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001093 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001094 if options.relative_cwd:
1095 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001096 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001097
maruel0a25f6c2017-05-10 10:43:23 -07001098 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001099 cipd_packages = []
1100 for p in options.cipd_package:
1101 split = p.split(':', 2)
1102 if len(split) != 3:
1103 parser.error('CIPD packages must take the form: path:package:version')
1104 cipd_packages.append(CipdPackage(
1105 package_name=split[1],
1106 path=split[0],
1107 version=split[2]))
1108 cipd_input = None
1109 if cipd_packages:
1110 cipd_input = CipdInput(
1111 client_package=None,
1112 packages=cipd_packages,
1113 server=None)
1114
maruel0a25f6c2017-05-10 10:43:23 -07001115 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001116 secret_bytes = None
1117 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001118 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001119 secret_bytes = f.read().encode('base64')
1120
maruel0a25f6c2017-05-10 10:43:23 -07001121 # Named caches
maruel681d6802017-01-17 16:56:03 -08001122 caches = [
1123 {u'name': unicode(i[0]), u'path': unicode(i[1])}
1124 for i in options.named_cache
1125 ]
maruel0a25f6c2017-05-10 10:43:23 -07001126
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001127 env_prefixes = {}
1128 for k, v in options.env_prefix:
1129 env_prefixes.setdefault(k, []).append(v)
1130
Brad Hallf78187a2018-10-19 17:08:55 +00001131 # Get dimensions into the key/value format we can manipulate later.
1132 orig_dims = [
1133 {'key': key, 'value': value} for key, value in options.dimensions]
1134 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1135
1136 # Construct base properties that we will use for all the slices, adding in
1137 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001138 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001139 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001140 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001141 command=command,
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001142 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001143 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001144 env=options.env,
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001145 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.iteritems()],
maruel77f720b2015-09-15 12:35:22 -07001146 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001147 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001148 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001149 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001150 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001151 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001152 outputs=options.output,
1153 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001154
1155 slices = []
1156
1157 # Group the optional dimensions by expiration.
1158 dims_by_exp = {}
1159 for key, value, exp_secs in options.optional_dimensions:
1160 dims_by_exp.setdefault(int(exp_secs), []).append(
1161 {'key': key, 'value': value})
1162
1163 # Create the optional slices with expiration deltas, we fix up the properties
1164 # below.
1165 last_exp = 0
1166 for expiration_secs in sorted(dims_by_exp):
1167 t = TaskSlice(
1168 expiration_secs=expiration_secs - last_exp,
1169 properties=properties,
1170 wait_for_capacity=False)
1171 slices.append(t)
1172 last_exp = expiration_secs
1173
1174 # Add back in the default slice (the last one).
1175 exp = max(int(options.expiration) - last_exp, 60)
1176 base_task_slice = TaskSlice(
1177 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001178 properties=properties,
1179 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001180 slices.append(base_task_slice)
1181
Brad Hall7f463e62018-11-16 16:13:30 +00001182 # Add optional dimensions to the task slices, replacing a dimension that
1183 # has the same key if it is a dimension where repeating isn't valid (otherwise
1184 # we append it). Currently the only dimension we can repeat is "caches"; the
1185 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001186 extra_dims = []
Brad Hall7f463e62018-11-16 16:13:30 +00001187 for i, (_, kvs) in enumerate(sorted(dims_by_exp.iteritems(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001188 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001189 # Replace or append the key/value pairs for this expiration in extra_dims;
1190 # we keep extra_dims around because we are iterating backwards and filling
1191 # in slices with shorter expirations. Dimensions expire as time goes on so
1192 # the slices that expire earlier will generally have more dimensions.
1193 for kv in kvs:
1194 if kv['key'] == 'caches':
1195 extra_dims.append(kv)
1196 else:
1197 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1198 # Then, add all the optional dimensions to the original dimension set, again
1199 # replacing if needed.
1200 for kv in extra_dims:
1201 if kv['key'] == 'caches':
1202 dims.append(kv)
1203 else:
1204 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001205 dims.sort(key=lambda x: (x['key'], x['value']))
1206 slice_properties = properties._replace(dimensions=dims)
1207 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1208
maruel77f720b2015-09-15 12:35:22 -07001209 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001210 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001211 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001212 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001213 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001214 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001215 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001216 user=options.user,
1217 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001218
1219
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001220class TaskOutputStdoutOption(optparse.Option):
1221 """Where to output the each task's console output (stderr/stdout).
1222
1223 The output will be;
1224 none - not be downloaded.
1225 json - stored in summary.json file *only*.
1226 console - shown on stdout *only*.
1227 all - stored in summary.json and shown on stdout.
1228 """
1229
1230 choices = ['all', 'json', 'console', 'none']
1231
1232 def __init__(self, *args, **kw):
1233 optparse.Option.__init__(
1234 self,
1235 *args,
1236 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001237 default=['console', 'json'],
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001238 help=re.sub('\s\s*', ' ', self.__doc__),
1239 **kw)
1240
1241 def convert_value(self, opt, value):
1242 if value not in self.choices:
1243 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1244 self.get_opt_string(), self.choices, value))
1245 stdout_to = []
1246 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001247 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001248 elif value != 'none':
1249 stdout_to = [value]
1250 return stdout_to
1251
1252
maruel@chromium.org0437a732013-08-27 16:05:52 +00001253def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001254 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001255 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001256 help='Timeout to wait for result, set to -1 for no timeout and get '
1257 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001258 parser.group_logging.add_option(
1259 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001260 parser.group_logging.add_option(
1261 '--print-status-updates', action='store_true',
1262 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001263 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001264 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001265 '--task-summary-json',
1266 metavar='FILE',
1267 help='Dump a summary of task results to this file as json. It contains '
1268 'only shards statuses as know to server directly. Any output files '
1269 'emitted by the task can be collected by using --task-output-dir')
1270 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001271 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001272 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001273 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001274 'directory contains per-shard directory with output files produced '
1275 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001276 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001277 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001278 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001279 '--filepath-filter',
1280 help='This is regexp filter used to specify downloaded filepath when '
1281 'collecting isolated output.')
1282 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001283 '--perf', action='store_true', default=False,
1284 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001285 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001286
1287
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001288def process_collect_options(parser, options):
1289 # Only negative -1 is allowed, disallow other negative values.
1290 if options.timeout != -1 and options.timeout < 0:
1291 parser.error('Invalid --timeout value')
1292
1293
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001294@subcommand.usage('bots...')
1295def CMDbot_delete(parser, args):
1296 """Forcibly deletes bots from the Swarming server."""
1297 parser.add_option(
1298 '-f', '--force', action='store_true',
1299 help='Do not prompt for confirmation')
1300 options, args = parser.parse_args(args)
1301 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001302 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001303
1304 bots = sorted(args)
1305 if not options.force:
1306 print('Delete the following bots?')
1307 for bot in bots:
1308 print(' %s' % bot)
1309 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1310 print('Goodbye.')
1311 return 1
1312
1313 result = 0
1314 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001315 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001316 if net.url_read_json(url, data={}, method='POST') is None:
1317 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001318 result = 1
1319 return result
1320
1321
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001322def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001323 """Returns information about the bots connected to the Swarming server."""
1324 add_filter_options(parser)
1325 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001326 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001327 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001328 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001329 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001330 help='Keep both dead and alive bots')
1331 parser.filter_group.add_option(
1332 '--busy', action='store_true', help='Keep only busy bots')
1333 parser.filter_group.add_option(
1334 '--idle', action='store_true', help='Keep only idle bots')
1335 parser.filter_group.add_option(
1336 '--mp', action='store_true',
1337 help='Keep only Machine Provider managed bots')
1338 parser.filter_group.add_option(
1339 '--non-mp', action='store_true',
1340 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001341 parser.filter_group.add_option(
1342 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001343 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001344 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001345 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001346
1347 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001348 parser.error('Use only one of --keep-dead or --dead-only')
1349 if options.busy and options.idle:
1350 parser.error('Use only one of --busy or --idle')
1351 if options.mp and options.non_mp:
1352 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001353
smut281c3902018-05-30 17:50:05 -07001354 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001355 values = []
1356 if options.dead_only:
1357 values.append(('is_dead', 'TRUE'))
1358 elif options.keep_dead:
1359 values.append(('is_dead', 'NONE'))
1360 else:
1361 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001362
maruelaf6b06c2017-06-08 06:26:53 -07001363 if options.busy:
1364 values.append(('is_busy', 'TRUE'))
1365 elif options.idle:
1366 values.append(('is_busy', 'FALSE'))
1367 else:
1368 values.append(('is_busy', 'NONE'))
1369
1370 if options.mp:
1371 values.append(('is_mp', 'TRUE'))
1372 elif options.non_mp:
1373 values.append(('is_mp', 'FALSE'))
1374 else:
1375 values.append(('is_mp', 'NONE'))
1376
1377 for key, value in options.dimensions:
1378 values.append(('dimensions', '%s:%s' % (key, value)))
1379 url += urllib.urlencode(values)
1380 try:
1381 data, yielder = get_yielder(url, 0)
1382 bots = data.get('items') or []
1383 for items in yielder():
1384 if items:
1385 bots.extend(items)
1386 except Failure as e:
1387 sys.stderr.write('\n%s\n' % e)
1388 return 1
maruel77f720b2015-09-15 12:35:22 -07001389 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
maruelaf6b06c2017-06-08 06:26:53 -07001390 print bot['bot_id']
1391 if not options.bare:
1392 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1393 print ' %s' % json.dumps(dimensions, sort_keys=True)
1394 if bot.get('task_id'):
1395 print ' task: %s' % bot['task_id']
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001396 return 0
1397
1398
maruelfd0a90c2016-06-10 11:51:10 -07001399@subcommand.usage('task_id')
1400def CMDcancel(parser, args):
1401 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001402 parser.add_option(
1403 '-k', '--kill-running', action='store_true', default=False,
1404 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001405 options, args = parser.parse_args(args)
1406 if not args:
1407 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001408 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001409 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001410 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001411 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001412 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001413 print('Deleting %s failed. Probably already gone' % task_id)
1414 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001415 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001416 return 0
1417
1418
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001419@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001420def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001421 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001422
1423 The result can be in multiple part if the execution was sharded. It can
1424 potentially have retries.
1425 """
1426 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001427 parser.add_option(
1428 '-j', '--json',
1429 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001430 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001431 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001432 if not args and not options.json:
1433 parser.error('Must specify at least one task id or --json.')
1434 if args and options.json:
1435 parser.error('Only use one of task id or --json.')
1436
1437 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001438 options.json = unicode(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001439 try:
maruel1ceb3872015-10-14 06:10:44 -07001440 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001441 data = json.load(f)
1442 except (IOError, ValueError):
1443 parser.error('Failed to open %s' % options.json)
1444 try:
1445 tasks = sorted(
1446 data['tasks'].itervalues(), key=lambda x: x['shard_index'])
1447 args = [t['task_id'] for t in tasks]
1448 except (KeyError, TypeError):
1449 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001450 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001451 # Take in account all the task slices.
1452 offset = 0
1453 for s in data['request']['task_slices']:
1454 m = (offset + s['properties']['execution_timeout_secs'] +
1455 s['expiration_secs'])
1456 if m > options.timeout:
1457 options.timeout = m
1458 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001459 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001460 else:
1461 valid = frozenset('0123456789abcdef')
1462 if any(not valid.issuperset(task_id) for task_id in args):
1463 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001464
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001465 try:
1466 return collect(
1467 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001468 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001469 options.timeout,
1470 options.decorate,
1471 options.print_status_updates,
1472 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001473 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001474 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001475 options.perf,
1476 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001477 except Failure:
1478 on_error.report(None)
1479 return 1
1480
1481
maruel77f720b2015-09-15 12:35:22 -07001482@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001483def CMDpost(parser, args):
1484 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1485
1486 Input data must be sent to stdin, result is printed to stdout.
1487
1488 If HTTP response code >= 400, returns non-zero.
1489 """
1490 options, args = parser.parse_args(args)
1491 if len(args) != 1:
1492 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001493 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001494 data = sys.stdin.read()
1495 try:
1496 resp = net.url_read(url, data=data, method='POST')
1497 except net.TimeoutError:
1498 sys.stderr.write('Timeout!\n')
1499 return 1
1500 if not resp:
1501 sys.stderr.write('No response!\n')
1502 return 1
1503 sys.stdout.write(resp)
1504 return 0
1505
1506
1507@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001508def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001509 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1510 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001511
1512 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001513 Raw task request and results:
1514 swarming.py query -S server-url.com task/123456/request
1515 swarming.py query -S server-url.com task/123456/result
1516
maruel77f720b2015-09-15 12:35:22 -07001517 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001518 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001519
maruelaf6b06c2017-06-08 06:26:53 -07001520 Listing last 10 tasks on a specific bot named 'bot1':
1521 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001522
maruelaf6b06c2017-06-08 06:26:53 -07001523 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001524 quoting is important!:
1525 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001526 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001527 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001528 parser.add_option(
1529 '-L', '--limit', type='int', default=200,
1530 help='Limit to enforce on limitless items (like number of tasks); '
1531 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001532 parser.add_option(
1533 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001534 parser.add_option(
1535 '--progress', action='store_true',
1536 help='Prints a dot at each request to show progress')
1537 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001538 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001539 parser.error(
1540 'Must specify only method name and optionally query args properly '
1541 'escaped.')
smut281c3902018-05-30 17:50:05 -07001542 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001543 try:
1544 data, yielder = get_yielder(base_url, options.limit)
1545 for items in yielder():
1546 if items:
1547 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001548 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001549 sys.stderr.write('.')
1550 sys.stderr.flush()
1551 except Failure as e:
1552 sys.stderr.write('\n%s\n' % e)
1553 return 1
maruel77f720b2015-09-15 12:35:22 -07001554 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001555 sys.stderr.write('\n')
1556 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001557 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001558 options.json = unicode(os.path.abspath(options.json))
1559 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001560 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001561 try:
maruel77f720b2015-09-15 12:35:22 -07001562 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001563 sys.stdout.write('\n')
1564 except IOError:
1565 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001566 return 0
1567
1568
maruel77f720b2015-09-15 12:35:22 -07001569def CMDquery_list(parser, args):
1570 """Returns list of all the Swarming APIs that can be used with command
1571 'query'.
1572 """
1573 parser.add_option(
1574 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1575 options, args = parser.parse_args(args)
1576 if args:
1577 parser.error('No argument allowed.')
1578
1579 try:
1580 apis = endpoints_api_discovery_apis(options.swarming)
1581 except APIError as e:
1582 parser.error(str(e))
1583 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001584 options.json = unicode(os.path.abspath(options.json))
1585 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001586 json.dump(apis, f)
1587 else:
1588 help_url = (
1589 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1590 options.swarming)
maruel11e31af2017-02-15 07:30:50 -08001591 for i, (api_id, api) in enumerate(sorted(apis.iteritems())):
1592 if i:
1593 print('')
maruel77f720b2015-09-15 12:35:22 -07001594 print api_id
maruel11e31af2017-02-15 07:30:50 -08001595 print ' ' + api['description'].strip()
1596 if 'resources' in api:
1597 # Old.
1598 for j, (resource_name, resource) in enumerate(
1599 sorted(api['resources'].iteritems())):
1600 if j:
1601 print('')
1602 for method_name, method in sorted(resource['methods'].iteritems()):
1603 # Only list the GET ones.
1604 if method['httpMethod'] != 'GET':
1605 continue
1606 print '- %s.%s: %s' % (
1607 resource_name, method_name, method['path'])
1608 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001609 ' ' + l for l in textwrap.wrap(
1610 method.get('description', 'No description'), 78)))
maruel11e31af2017-02-15 07:30:50 -08001611 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1612 else:
1613 # New.
1614 for method_name, method in sorted(api['methods'].iteritems()):
maruel77f720b2015-09-15 12:35:22 -07001615 # Only list the GET ones.
1616 if method['httpMethod'] != 'GET':
1617 continue
maruel11e31af2017-02-15 07:30:50 -08001618 print '- %s: %s' % (method['id'], method['path'])
1619 print('\n'.join(
1620 ' ' + l for l in textwrap.wrap(method['description'], 78)))
maruel77f720b2015-09-15 12:35:22 -07001621 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1622 return 0
1623
1624
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001625@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001626def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001627 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001628
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001629 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001630 """
1631 add_trigger_options(parser)
1632 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001633 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001634 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001635 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001636 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001637 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001638 tasks = trigger_task_shards(
1639 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001640 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001641 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001642 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001643 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001644 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001645 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001646 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001647 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001648 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001649 task_ids = [
1650 t['task_id']
1651 for t in sorted(tasks.itervalues(), key=lambda x: x['shard_index'])
1652 ]
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001653 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001654 offset = 0
1655 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001656 m = (offset + s.properties.execution_timeout_secs +
1657 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001658 if m > options.timeout:
1659 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001660 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001661 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001662 try:
1663 return collect(
1664 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001665 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001666 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001667 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001668 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001669 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001670 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001671 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001672 options.perf,
1673 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001674 except Failure:
1675 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001676 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001677
1678
maruel18122c62015-10-23 06:31:23 -07001679@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001680def CMDreproduce(parser, args):
1681 """Runs a task locally that was triggered on the server.
1682
1683 This running locally the same commands that have been run on the bot. The data
1684 downloaded will be in a subdirectory named 'work' of the current working
1685 directory.
maruel18122c62015-10-23 06:31:23 -07001686
1687 You can pass further additional arguments to the target command by passing
1688 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001689 """
maruelc070e672016-02-22 17:32:57 -08001690 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001691 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001692 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001693 parser.add_option(
1694 '--work', metavar='DIR', default='work',
1695 help='Directory to map the task input files into')
1696 parser.add_option(
1697 '--cache', metavar='DIR', default='cache',
1698 help='Directory that contains the input cache')
1699 parser.add_option(
1700 '--leak', action='store_true',
1701 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001702 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001703 extra_args = []
1704 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001705 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001706 if len(args) > 1:
1707 if args[1] == '--':
1708 if len(args) > 2:
1709 extra_args = args[2:]
1710 else:
1711 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001712
smut281c3902018-05-30 17:50:05 -07001713 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001714 request = net.url_read_json(url)
1715 if not request:
1716 print >> sys.stderr, 'Failed to retrieve request data for the task'
1717 return 1
1718
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001719 workdir = unicode(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001720 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001721 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001722 fs.mkdir(workdir)
iannucci31ab9192017-05-02 19:11:56 -07001723 cachedir = unicode(os.path.abspath('cipd_cache'))
1724 if not fs.exists(cachedir):
1725 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001726
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001727 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001728 env = os.environ.copy()
1729 env['SWARMING_BOT_ID'] = 'reproduce'
1730 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001731 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001732 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001733 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001734 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001735 if not i['value']:
1736 env.pop(key, None)
1737 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001738 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001739
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001740 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001741 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001742 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001743 for i in env_prefixes:
1744 key = i['key']
1745 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001746 cur = env.get(key)
1747 if cur:
1748 paths.append(cur)
1749 env[key] = os.path.pathsep.join(paths)
1750
iannucci31ab9192017-05-02 19:11:56 -07001751 command = []
nodir152cba62016-05-12 16:08:56 -07001752 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001753 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001754 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001755 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001756 properties['inputs_ref']['namespace'])
1757 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001758 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1759 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1760 # leak.
1761 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001762 cache = local_caching.DiskContentAddressedCache(
Marc-Antoine Ruel79d42192019-02-06 19:24:16 +00001763 unicode(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001764 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001765 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001766 command = bundle.command
1767 if bundle.relative_cwd:
1768 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001769 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001770
1771 if properties.get('command'):
1772 command.extend(properties['command'])
1773
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001774 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Robert Iannucci24ae76a2018-02-26 12:51:18 -08001775 command = tools.fix_python_cmd(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001776 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001777 new_command = run_isolated.process_command(command, 'invalid', None)
1778 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001779 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001780 else:
1781 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001782 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001783 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001784 command, options.output, None)
1785 if not os.path.isdir(options.output):
1786 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001787 command = new_command
1788 file_path.ensure_command_has_abs_path(command, workdir)
1789
1790 if properties.get('cipd_input'):
1791 ci = properties['cipd_input']
1792 cp = ci['client_package']
1793 client_manager = cipd.get_client(
1794 ci['server'], cp['package_name'], cp['version'], cachedir)
1795
1796 with client_manager as client:
1797 by_path = collections.defaultdict(list)
1798 for pkg in ci['packages']:
1799 path = pkg['path']
1800 # cipd deals with 'root' as ''
1801 if path == '.':
1802 path = ''
1803 by_path[path].append((pkg['package_name'], pkg['version']))
1804 client.ensure(workdir, by_path, cache_dir=cachedir)
1805
maruel77f720b2015-09-15 12:35:22 -07001806 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001807 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001808 except OSError as e:
maruel29ab2fd2015-10-16 11:44:01 -07001809 print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
maruel77f720b2015-09-15 12:35:22 -07001810 print >> sys.stderr, str(e)
1811 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001812 finally:
1813 # Do not delete options.cache.
1814 if not options.leak:
1815 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001816
1817
maruel0eb1d1b2015-10-02 14:48:21 -07001818@subcommand.usage('bot_id')
1819def CMDterminate(parser, args):
1820 """Tells a bot to gracefully shut itself down as soon as it can.
1821
1822 This is done by completing whatever current task there is then exiting the bot
1823 process.
1824 """
1825 parser.add_option(
1826 '--wait', action='store_true', help='Wait for the bot to terminate')
1827 options, args = parser.parse_args(args)
1828 if len(args) != 1:
1829 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001830 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001831 request = net.url_read_json(url, data={})
1832 if not request:
1833 print >> sys.stderr, 'Failed to ask for termination'
1834 return 1
1835 if options.wait:
1836 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001837 options.swarming,
1838 [request['task_id']],
1839 0.,
1840 False,
1841 False,
1842 None,
1843 None,
1844 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001845 False,
1846 None)
maruelbfc5f872017-06-10 16:43:17 -07001847 else:
1848 print request['task_id']
maruel0eb1d1b2015-10-02 14:48:21 -07001849 return 0
1850
1851
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001852@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001853def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001854 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001855
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001856 Passes all extra arguments provided after '--' as additional command line
1857 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001858 """
1859 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001860 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001861 parser.add_option(
1862 '--dump-json',
1863 metavar='FILE',
1864 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001865 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001866 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001867 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001868 tasks = trigger_task_shards(
1869 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001870 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001871 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001872 tasks_sorted = sorted(
1873 tasks.itervalues(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001874 if options.dump_json:
1875 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001876 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001877 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001878 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001879 }
maruel46b015f2015-10-13 18:40:35 -07001880 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001881 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001882 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001883 (options.swarming, options.dump_json))
1884 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001885 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001886 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001887 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1888 print('Or visit:')
1889 for t in tasks_sorted:
1890 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001891 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001892 except Failure:
1893 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001894 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001895
1896
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001897class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001898 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001899 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001900 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001901 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001902 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001903 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001904 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001905 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001906 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001907 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001908
1909 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001910 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001911 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001912 auth.process_auth_options(self, options)
1913 user = self._process_swarming(options)
1914 if hasattr(options, 'user') and not options.user:
1915 options.user = user
1916 return options, args
1917
1918 def _process_swarming(self, options):
1919 """Processes the --swarming option and aborts if not specified.
1920
1921 Returns the identity as determined by the server.
1922 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001923 if not options.swarming:
1924 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001925 try:
1926 options.swarming = net.fix_url(options.swarming)
1927 except ValueError as e:
1928 self.error('--swarming %s' % e)
1929 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001930 try:
1931 user = auth.ensure_logged_in(options.swarming)
1932 except ValueError as e:
1933 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001934 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001935
1936
1937def main(args):
1938 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001939 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001940
1941
1942if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001943 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001944 fix_encoding.fix_encoding()
1945 tools.disable_buffering()
1946 colorama.init()
1947 sys.exit(main(sys.argv[1:]))