blob: 4610182050207164e60720ca24d6be04e33bd7e5 [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Lei Leife202df2019-06-11 17:33:34 +00008from __future__ import print_function
9
10__version__ = '1.0'
maruel@chromium.org0437a732013-08-27 16:05:52 +000011
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050012import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040013import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000014import json
15import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040016import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100018import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000019import sys
maruel11e31af2017-02-15 07:30:50 -080020import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070021import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000022import time
Takuto Ikuta35250172020-01-31 09:33:46 +000023import uuid
maruel@chromium.org0437a732013-08-27 16:05:52 +000024
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000025from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000027
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000028# third_party/
29import colorama
30from chromium import natsort
31from depot_tools import fix_encoding
32from depot_tools import subcommand
Takuto Ikuta6e2ff962019-10-29 12:35:27 +000033import six
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +000034from six.moves import urllib
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000035
36# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080037import auth
iannucci31ab9192017-05-02 19:11:56 -070038import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000039import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000040import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040041import local_caching
maruelc070e672016-02-22 17:32:57 -080042import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000043from utils import file_path
44from utils import fs
45from utils import logging_utils
46from utils import net
47from utils import on_error
48from utils import subprocess42
49from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050050
51
52class Failure(Exception):
53 """Generic failure."""
54 pass
55
56
maruel0a25f6c2017-05-10 10:43:23 -070057def default_task_name(options):
58 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050059 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070060 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070061 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070062 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070063 if options.isolated:
64 task_name += u'/' + options.isolated
65 return task_name
66 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050067
68
69### Triggering.
70
71
maruel77f720b2015-09-15 12:35:22 -070072# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070073CipdPackage = collections.namedtuple(
74 'CipdPackage',
75 [
76 'package_name',
77 'path',
78 'version',
79 ])
80
81
82# See ../appengine/swarming/swarming_rpcs.py.
83CipdInput = collections.namedtuple(
84 'CipdInput',
85 [
86 'client_package',
87 'packages',
88 'server',
89 ])
90
91
92# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070093FilesRef = collections.namedtuple(
94 'FilesRef',
95 [
96 'isolated',
97 'isolatedserver',
98 'namespace',
99 ])
100
101
102# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800103StringListPair = collections.namedtuple(
104 'StringListPair', [
105 'key',
106 'value', # repeated string
107 ]
108)
109
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000110# See ../appengine/swarming/swarming_rpcs.py.
111Containment = collections.namedtuple(
112 'Containment',
113 [
114 'lower_priority',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000115 'containment_type',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000116 ])
117
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800118
119# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700120TaskProperties = collections.namedtuple(
121 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500122 [
maruel681d6802017-01-17 16:56:03 -0800123 'caches',
borenet02f772b2016-06-22 12:42:19 -0700124 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500125 'command',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000126 'containment',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500127 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500128 'dimensions',
129 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800130 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700131 'execution_timeout_secs',
132 'extra_args',
133 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500134 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700135 'inputs_ref',
136 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700137 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700138 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700139 ])
140
141
142# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400143TaskSlice = collections.namedtuple(
144 'TaskSlice',
145 [
146 'expiration_secs',
147 'properties',
148 'wait_for_capacity',
149 ])
150
151
152# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700153NewTaskRequest = collections.namedtuple(
154 'NewTaskRequest',
155 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500156 'name',
maruel77f720b2015-09-15 12:35:22 -0700157 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500158 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400159 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700160 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500161 'tags',
162 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000163 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500164 ])
165
166
maruel77f720b2015-09-15 12:35:22 -0700167def namedtuple_to_dict(value):
168 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400169 if hasattr(value, '_asdict'):
170 return namedtuple_to_dict(value._asdict())
171 if isinstance(value, (list, tuple)):
172 return [namedtuple_to_dict(v) for v in value]
173 if isinstance(value, dict):
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000174 return {k: namedtuple_to_dict(v) for k, v in value.items()}
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400175 return value
maruel77f720b2015-09-15 12:35:22 -0700176
177
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700178def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800179 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700180
181 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500182 """
maruel77f720b2015-09-15 12:35:22 -0700183 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700184 # Don't send 'service_account' if it is None to avoid confusing older
185 # version of the server that doesn't know about 'service_account' and don't
186 # use it at all.
187 if not out['service_account']:
188 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000189 for task_slice in out['task_slices']:
190 task_slice['properties']['env'] = [
191 {'key': k, 'value': v}
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000192 for k, v in task_slice['properties']['env'].items()
Brad Hallf78187a2018-10-19 17:08:55 +0000193 ]
194 task_slice['properties']['env'].sort(key=lambda x: x['key'])
Takuto Ikuta35250172020-01-31 09:33:46 +0000195 out['request_uuid'] = str(uuid.uuid4())
maruel77f720b2015-09-15 12:35:22 -0700196 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500197
198
maruel77f720b2015-09-15 12:35:22 -0700199def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500200 """Triggers a request on the Swarming server and returns the json data.
201
202 It's the low-level function.
203
204 Returns:
205 {
206 'request': {
207 'created_ts': u'2010-01-02 03:04:05',
208 'name': ..
209 },
210 'task_id': '12300',
211 }
212 """
213 logging.info('Triggering: %s', raw_request['name'])
214
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500215 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700216 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500217 if not result:
218 on_error.report('Failed to trigger task %s' % raw_request['name'])
219 return None
maruele557bce2015-11-17 09:01:27 -0800220 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800221 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800222 msg = 'Failed to trigger task %s' % raw_request['name']
223 if result['error'].get('errors'):
224 for err in result['error']['errors']:
225 if err.get('message'):
226 msg += '\nMessage: %s' % err['message']
227 if err.get('debugInfo'):
228 msg += '\nDebug info:\n%s' % err['debugInfo']
229 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800230 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800231
232 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800233 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500234 return result
235
236
237def setup_googletest(env, shards, index):
238 """Sets googletest specific environment variables."""
239 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700240 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
241 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
242 env = env[:]
243 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
244 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500245 return env
246
247
248def trigger_task_shards(swarming, task_request, shards):
249 """Triggers one or many subtasks of a sharded task.
250
251 Returns:
252 Dict with task details, returned to caller as part of --dump-json output.
253 None in case of failure.
254 """
255 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000256 """
257 Args:
258 index: The index of the task request.
259
260 Returns:
261 raw_request: A swarming compatible JSON dictionary of the request.
262 shard_index: The index of the shard, which may be different than the index
263 of the task request.
264 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700265 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000266 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500267 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000268 for task_slice in req['task_slices']:
269 task_slice['properties']['env'] = setup_googletest(
270 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700271 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000272 else:
273 task_slices = req['task_slices']
274
275 total_shards = None
276 # Multiple tasks slices might exist if there are optional "slices", e.g.
277 # multiple ways of dispatching the task that should be equivalent. These
278 # should be functionally equivalent but we have cannot guarantee that. If
279 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
280 # slices.
281 for task_slice in task_slices:
282 for env_var in task_slice['properties']['env']:
283 if env_var['key'] == 'GTEST_SHARD_INDEX':
284 shard_index = int(env_var['value'])
285 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
286 total_shards = int(env_var['value'])
287 if total_shards > 1:
288 req['name'] += ':%s:%s' % (shard_index, total_shards)
289
290 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500291
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000292 requests = [convert(index) for index in range(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500293 tasks = {}
294 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000295 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700296 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500297 if not task:
298 break
299 logging.info('Request result: %s', task)
300 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400301 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500302 priority_warning = True
Lei Leife202df2019-06-11 17:33:34 +0000303 print('Priority was reset to %s' % task['request']['priority'],
304 file=sys.stderr)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500305 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000306 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500307 'task_id': task['task_id'],
308 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
309 }
310
311 # Some shards weren't triggered. Abort everything.
312 if len(tasks) != len(requests):
313 if tasks:
Lei Leife202df2019-06-11 17:33:34 +0000314 print('Only %d shard(s) out of %d were triggered' % (
315 len(tasks), len(requests)), file=sys.stderr)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000316 for task_dict in tasks.values():
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500317 abort_task(swarming, task_dict['task_id'])
318 return None
319
320 return tasks
321
322
323### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000324
325
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700326# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000327STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700328
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400329
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000330class TaskState(object):
331 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000332
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000333 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
334 is the source of truth for these values:
335 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400336
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000337 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400338 """
339 RUNNING = 0x10
340 PENDING = 0x20
341 EXPIRED = 0x30
342 TIMED_OUT = 0x40
343 BOT_DIED = 0x50
344 CANCELED = 0x60
345 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400346 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400347 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400348
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000349 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400350
maruel77f720b2015-09-15 12:35:22 -0700351 _ENUMS = {
352 'RUNNING': RUNNING,
353 'PENDING': PENDING,
354 'EXPIRED': EXPIRED,
355 'TIMED_OUT': TIMED_OUT,
356 'BOT_DIED': BOT_DIED,
357 'CANCELED': CANCELED,
358 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400359 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400360 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700361 }
362
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400363 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700364 def from_enum(cls, state):
365 """Returns int value based on the string."""
366 if state not in cls._ENUMS:
367 raise ValueError('Invalid state %s' % state)
368 return cls._ENUMS[state]
369
maruel@chromium.org0437a732013-08-27 16:05:52 +0000370
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700371class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700372 """Assembles task execution summary (for --task-summary-json output).
373
374 Optionally fetches task outputs from isolate server to local disk (used when
375 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700376
377 This object is shared among multiple threads running 'retrieve_results'
378 function, in particular they call 'process_shard_result' method in parallel.
379 """
380
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000381 def __init__(self, task_output_dir, task_output_stdout, shard_count,
382 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700383 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
384
385 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700386 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700387 shard_count: expected number of task shards.
388 """
maruel12e30012015-10-09 11:55:35 -0700389 self.task_output_dir = (
Takuto Ikuta6e2ff962019-10-29 12:35:27 +0000390 six.text_type(os.path.abspath(task_output_dir))
maruel12e30012015-10-09 11:55:35 -0700391 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000392 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700393 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000394 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700395
396 self._lock = threading.Lock()
397 self._per_shard_results = {}
398 self._storage = None
399
nodire5028a92016-04-29 14:38:21 -0700400 if self.task_output_dir:
401 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700402
Vadim Shtayurab450c602014-05-12 19:23:25 -0700403 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700404 """Stores results of a single task shard, fetches output files if necessary.
405
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400406 Modifies |result| in place.
407
maruel77f720b2015-09-15 12:35:22 -0700408 shard_index is 0-based.
409
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700410 Called concurrently from multiple threads.
411 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700412 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700413 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700414 if shard_index < 0 or shard_index >= self.shard_count:
415 logging.warning(
416 'Shard index %d is outside of expected range: [0; %d]',
417 shard_index, self.shard_count - 1)
418 return
419
maruel77f720b2015-09-15 12:35:22 -0700420 if result.get('outputs_ref'):
421 ref = result['outputs_ref']
422 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
423 ref['isolatedserver'],
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000424 urllib.parse.urlencode([('namespace', ref['namespace']),
425 ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400426
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700427 # Store result dict of that shard, ignore results we've already seen.
428 with self._lock:
429 if shard_index in self._per_shard_results:
430 logging.warning('Ignoring duplicate shard index %d', shard_index)
431 return
432 self._per_shard_results[shard_index] = result
433
434 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700435 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000436 server_ref = isolate_storage.ServerRef(
437 result['outputs_ref']['isolatedserver'],
438 result['outputs_ref']['namespace'])
439 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400440 if storage:
441 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400442 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
443 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400444 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700445 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400446 storage,
Lei Leife202df2019-06-11 17:33:34 +0000447 local_caching.MemoryContentAddressedCache(file_mode_mask=0o700),
maruel4409e302016-07-19 14:25:51 -0700448 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000449 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700450
451 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700452 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700453 with self._lock:
454 # Write an array of shard results with None for missing shards.
455 summary = {
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000456 'shards': [
457 self._per_shard_results.get(i) for i in range(self.shard_count)
458 ],
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700459 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000460
461 # Don't store stdout in the summary if not requested too.
462 if "json" not in self.task_output_stdout:
463 for shard_json in summary['shards']:
464 if not shard_json:
465 continue
466 if "output" in shard_json:
467 del shard_json["output"]
468 if "outputs" in shard_json:
469 del shard_json["outputs"]
470
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700471 # Write summary.json to task_output_dir as well.
472 if self.task_output_dir:
473 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700474 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700475 summary,
476 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700477 if self._storage:
478 self._storage.close()
479 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700480 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700481
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000482 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700483 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700484 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700485 with self._lock:
486 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000487 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700488 else:
489 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000490 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700491 logging.error(
492 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000493 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700494 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000495 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700496 logging.error(
497 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000498 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700499 return None
500 return self._storage
501
502
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500503def now():
504 """Exists so it can be mocked easily."""
505 return time.time()
506
507
maruel77f720b2015-09-15 12:35:22 -0700508def parse_time(value):
509 """Converts serialized time from the API to datetime.datetime."""
510 # When microseconds are 0, the '.123456' suffix is elided. This means the
511 # serialized format is not consistent, which confuses the hell out of python.
512 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
513 try:
514 return datetime.datetime.strptime(value, fmt)
515 except ValueError:
516 pass
517 raise ValueError('Failed to parse %s' % value)
518
519
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700520def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700521 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000522 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400523 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700524
Vadim Shtayurab450c602014-05-12 19:23:25 -0700525 Returns:
526 <result dict> on success.
527 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700528 """
maruel71c61c82016-02-22 06:52:05 -0800529 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700530 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700531 if include_perf:
532 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700533 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700534 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400535 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700536 attempt = 0
537
538 while not should_stop.is_set():
539 attempt += 1
540
541 # Waiting for too long -> give up.
542 current_time = now()
543 if deadline and current_time >= deadline:
544 logging.error('retrieve_results(%s) timed out on attempt %d',
545 base_url, attempt)
546 return None
547
548 # Do not spin too fast. Spin faster at the beginning though.
549 # Start with 1 sec delay and for each 30 sec of waiting add another second
550 # of delay, until hitting 15 sec ceiling.
551 if attempt > 1:
552 max_delay = min(15, 1 + (current_time - started) / 30.0)
553 delay = min(max_delay, deadline - current_time) if deadline else max_delay
554 if delay > 0:
555 logging.debug('Waiting %.1f sec before retrying', delay)
556 should_stop.wait(delay)
557 if should_stop.is_set():
558 return None
559
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400560 # Disable internal retries in net.url_read_json, since we are doing retries
561 # ourselves.
562 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700563 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
564 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400565 # Retry on 500s only if no timeout is specified.
566 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400567 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400568 if timeout == -1:
569 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400570 continue
maruel77f720b2015-09-15 12:35:22 -0700571
maruelbf53e042015-12-01 15:00:51 -0800572 if result.get('error'):
573 # An error occurred.
574 if result['error'].get('errors'):
575 for err in result['error']['errors']:
576 logging.warning(
577 'Error while reading task: %s; %s',
578 err.get('message'), err.get('debugInfo'))
579 elif result['error'].get('message'):
580 logging.warning(
581 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400582 if timeout == -1:
583 return result
maruelbf53e042015-12-01 15:00:51 -0800584 continue
585
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400586 # When timeout == -1, always return on first attempt. 500s are already
587 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000588 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000589 if fetch_stdout:
590 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700591 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700592 # Record the result, try to fetch attached output files (if any).
593 if output_collector:
594 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700595 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700596 if result.get('internal_failure'):
597 logging.error('Internal error!')
598 elif result['state'] == 'BOT_DIED':
599 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700600 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000601
602
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700603def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400604 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000605 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500606 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000607
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700608 Duplicate shards are ignored. Shards are yielded in order of completion.
609 Timed out shards are NOT yielded at all. Caller can compare number of yielded
610 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000611
612 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500613 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000614 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500615
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700616 output_collector is an optional instance of TaskOutputCollector that will be
617 used to fetch files produced by a task from isolate server to the local disk.
618
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500619 Yields:
620 (index, result). In particular, 'result' is defined as the
621 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000622 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000623 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400624 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700625 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700626 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700627
maruel@chromium.org0437a732013-08-27 16:05:52 +0000628 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
629 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700630 # Adds a task to the thread pool to call 'retrieve_results' and return
631 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400632 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000633 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700634 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000635 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400636 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000637 task_id, timeout, should_stop, output_collector, include_perf,
638 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700639
640 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400641 for shard_index, task_id in enumerate(task_ids):
642 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700643
644 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400645 shards_remaining = range(len(task_ids))
646 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700647 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700648 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700649 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000650 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700651 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700652 except threading_utils.TaskChannel.Timeout:
653 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000654 time_now = str(datetime.datetime.now())
655 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700656 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000657 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700658 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000659 (time_now, ', '.join(map(str, shards_remaining)))
660 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700661 sys.stdout.flush()
662 continue
663 except Exception:
664 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700665
666 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700667 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000668 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500669 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000670 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700671
Vadim Shtayurab450c602014-05-12 19:23:25 -0700672 # Yield back results to the caller.
673 assert shard_index in shards_remaining
674 shards_remaining.remove(shard_index)
675 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700676
maruel@chromium.org0437a732013-08-27 16:05:52 +0000677 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700678 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000679 should_stop.set()
680
681
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000682def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000683 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700684 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400685 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700686 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
687 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400688 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
689 metadata.get('abandoned_ts')):
690 pending = '%.1fs' % (
691 parse_time(metadata['abandoned_ts']) -
692 parse_time(metadata['created_ts'])
693 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400694 else:
695 pending = 'N/A'
696
maruel77f720b2015-09-15 12:35:22 -0700697 if metadata.get('duration') is not None:
698 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400699 else:
700 duration = 'N/A'
701
maruel77f720b2015-09-15 12:35:22 -0700702 if metadata.get('exit_code') is not None:
703 # Integers are encoded as string to not loose precision.
704 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400705 else:
706 exit_code = 'N/A'
707
708 bot_id = metadata.get('bot_id') or 'N/A'
709
maruel77f720b2015-09-15 12:35:22 -0700710 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400711 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000712 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400713 if metadata.get('state') == 'CANCELED':
714 tag_footer2 = ' Pending: %s CANCELED' % pending
715 elif metadata.get('state') == 'EXPIRED':
716 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400717 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400718 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
719 pending, duration, bot_id, exit_code, metadata['state'])
720 else:
721 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
722 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400723
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000724 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
725 dash_pad = '+-%s-+' % ('-' * tag_len)
726 tag_header = '| %s |' % tag_header.ljust(tag_len)
727 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
728 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400729
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000730 if include_stdout:
731 return '\n'.join([
732 dash_pad,
733 tag_header,
734 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400735 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000736 dash_pad,
737 tag_footer1,
738 tag_footer2,
739 dash_pad,
740 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000741 return '\n'.join([
742 dash_pad,
743 tag_header,
744 tag_footer2,
745 dash_pad,
746 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000747
748
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700749def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700750 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000751 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000752 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700753 """Retrieves results of a Swarming task.
754
755 Returns:
756 process exit code that should be returned to the user.
757 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000758
759 filter_cb = None
760 if filepath_filter:
761 filter_cb = re.compile(filepath_filter).match
762
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700763 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000764 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000765 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700766
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700767 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700768 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400769 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700770 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400771 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400772 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000773 output_collector, include_perf,
774 (len(task_output_stdout) > 0),
775 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700776 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700777
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400778 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700779 shard_exit_code = metadata.get('exit_code')
780 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700781 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700782 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700783 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400784 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700785 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700786
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700787 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000788 s = decorate_shard_output(
789 swarming, index, metadata,
790 "console" in task_output_stdout).encode(
791 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700792 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400793 if len(seen_shards) < len(task_ids):
794 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700795 else:
maruel77f720b2015-09-15 12:35:22 -0700796 print('%s: %s %s' % (
797 metadata.get('bot_id', 'N/A'),
798 metadata['task_id'],
799 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000800 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700801 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400802 if output:
803 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700804 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700805 summary = output_collector.finalize()
806 if task_summary_json:
807 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700808
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400809 if decorate and total_duration:
810 print('Total duration: %.1fs' % total_duration)
811
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400812 if len(seen_shards) != len(task_ids):
813 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Lei Leife202df2019-06-11 17:33:34 +0000814 print('Results from some shards are missing: %s' %
815 ', '.join(map(str, missing_shards)), file=sys.stderr)
Vadim Shtayurac524f512014-05-15 09:54:56 -0700816 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700817
maruela5490782015-09-30 10:56:59 -0700818 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000819
820
maruel77f720b2015-09-15 12:35:22 -0700821### API management.
822
823
824class APIError(Exception):
825 pass
826
827
828def endpoints_api_discovery_apis(host):
829 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
830 the APIs exposed by a host.
831
832 https://developers.google.com/discovery/v1/reference/apis/list
833 """
maruel380e3262016-08-31 16:10:06 -0700834 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
835 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700836 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
837 if data is None:
838 raise APIError('Failed to discover APIs on %s' % host)
839 out = {}
840 for api in data['items']:
841 if api['id'] == 'discovery:v1':
842 continue
843 # URL is of the following form:
844 # url = host + (
845 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
846 api_data = net.url_read_json(api['discoveryRestUrl'])
847 if api_data is None:
848 raise APIError('Failed to discover %s on %s' % (api['id'], host))
849 out[api['id']] = api_data
850 return out
851
852
maruelaf6b06c2017-06-08 06:26:53 -0700853def get_yielder(base_url, limit):
854 """Returns the first query and a function that yields following items."""
855 CHUNK_SIZE = 250
856
857 url = base_url
858 if limit:
859 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
860 data = net.url_read_json(url)
861 if data is None:
862 # TODO(maruel): Do basic diagnostic.
863 raise Failure('Failed to access %s' % url)
864 org_cursor = data.pop('cursor', None)
865 org_total = len(data.get('items') or [])
866 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
867 if not org_cursor or not org_total:
868 # This is not an iterable resource.
869 return data, lambda: []
870
871 def yielder():
872 cursor = org_cursor
873 total = org_total
874 # Some items support cursors. Try to get automatically if cursors are needed
875 # by looking at the 'cursor' items.
876 while cursor and (not limit or total < limit):
877 merge_char = '&' if '?' in base_url else '?'
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000878 url = base_url + '%scursor=%s' % (merge_char, urllib.parse.quote(cursor))
maruelaf6b06c2017-06-08 06:26:53 -0700879 if limit:
880 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
881 new = net.url_read_json(url)
882 if new is None:
883 raise Failure('Failed to access %s' % url)
884 cursor = new.get('cursor')
885 new_items = new.get('items')
886 nb_items = len(new_items or [])
887 total += nb_items
888 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
889 yield new_items
890
891 return data, yielder
892
893
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500894### Commands.
895
896
897def abort_task(_swarming, _manifest):
898 """Given a task manifest that was triggered, aborts its execution."""
899 # TODO(vadimsh): No supported by the server yet.
900
901
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400902def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800903 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500904 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500905 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500906 dest='dimensions', metavar='FOO bar',
907 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000908 parser.filter_group.add_option(
909 '--optional-dimension', default=[], action='append', nargs=3,
910 dest='optional_dimensions', metavar='key value expiration',
911 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500912 parser.add_option_group(parser.filter_group)
913
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400914
Brad Hallf78187a2018-10-19 17:08:55 +0000915def _validate_filter_option(parser, key, value, expiration, argname):
916 if ':' in key:
917 parser.error('%s key cannot contain ":"' % argname)
918 if key.strip() != key:
919 parser.error('%s key has whitespace' % argname)
920 if not key:
921 parser.error('%s key is empty' % argname)
922
923 if value.strip() != value:
924 parser.error('%s value has whitespace' % argname)
925 if not value:
926 parser.error('%s value is empty' % argname)
927
928 if expiration is not None:
929 try:
930 expiration = int(expiration)
931 except ValueError:
932 parser.error('%s expiration is not an integer' % argname)
933 if expiration <= 0:
934 parser.error('%s expiration should be positive' % argname)
935 if expiration % 60 != 0:
936 parser.error('%s expiration is not divisible by 60' % argname)
937
938
maruelaf6b06c2017-06-08 06:26:53 -0700939def process_filter_options(parser, options):
940 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000941 _validate_filter_option(parser, key, value, None, 'dimension')
942 for key, value, exp in options.optional_dimensions:
943 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700944 options.dimensions.sort()
945
946
Vadim Shtayurab450c602014-05-12 19:23:25 -0700947def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400948 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700949 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700950 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700951 help='Number of shards to trigger and collect.')
952 parser.add_option_group(parser.sharding_group)
953
954
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400955def add_trigger_options(parser):
956 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500957 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400958 add_filter_options(parser)
959
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400960 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800961 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700962 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500963 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800964 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500965 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700966 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800967 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800968 '--env-prefix', default=[], action='append', nargs=2,
969 metavar='VAR local/path',
970 help='Prepend task-relative `local/path` to the task\'s VAR environment '
971 'variable using os-appropriate pathsep character. Can be specified '
972 'multiple times for the same VAR to add multiple paths.')
973 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400974 '--idempotent', action='store_true', default=False,
975 help='When set, the server will actively try to find a previous task '
976 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800977 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700978 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000979 help='The optional path to a file containing the secret_bytes to use '
980 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800981 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700982 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400983 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800984 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700985 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400986 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000987 parser.add_option(
988 '--lower-priority', action='store_true',
989 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000990 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
991 parser.add_option(
992 '--containment-type', default='NONE', metavar='NONE',
993 choices=containment_choices,
994 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -0800995 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500996 '--raw-cmd', action='store_true', default=False,
997 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700998 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800999 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001000 '--relative-cwd',
1001 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
1002 'requires --raw-cmd')
1003 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001004 '--cipd-package', action='append', default=[], metavar='PKG',
1005 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -07001006 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001007 group.add_option(
1008 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -07001009 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001010 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1011 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001012 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001013 help='Email of a service account to run the task as, or literal "bot" '
1014 'string to indicate that the task should use the same account the '
1015 'bot itself is using to authenticate to Swarming. Don\'t use task '
1016 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001017 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001018 '--pool-task-template',
1019 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1020 default='AUTO',
1021 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1022 'By default, the pool\'s TaskTemplate is automatically selected, '
1023 'according the pool configuration on the server. Choices are: '
1024 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1025 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001026 '-o', '--output', action='append', default=[], metavar='PATH',
1027 help='A list of files to return in addition to those written to '
1028 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1029 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001030 group.add_option(
1031 '--wait-for-capacity', action='store_true', default=False,
1032 help='Instructs to leave the task PENDING even if there\'s no known bot '
1033 'that could run this task, otherwise the task will be denied with '
1034 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001035 parser.add_option_group(group)
1036
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001037 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001038 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001039 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001040 help='The lower value, the more important the task is')
1041 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001042 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001043 help='Display name of the task. Defaults to '
1044 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1045 'isolated file is provided, if a hash is provided, it defaults to '
1046 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1047 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001048 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001049 help='Tags to assign to the task.')
1050 group.add_option(
1051 '--user', default='',
1052 help='User associated with the task. Defaults to authenticated user on '
1053 'the server.')
1054 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001055 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001056 help='Seconds to allow the task to be pending for a bot to run before '
1057 'this task request expires.')
1058 group.add_option(
1059 '--deadline', type='int', dest='expiration',
1060 help=optparse.SUPPRESS_HELP)
1061 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001062
1063
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001064def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001065 """Processes trigger options and does preparatory steps.
1066
1067 Returns:
1068 NewTaskRequest instance.
1069 """
maruelaf6b06c2017-06-08 06:26:53 -07001070 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001071 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001072 if args and args[0] == '--':
1073 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001074
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001075 if not options.dimensions:
1076 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001077 if not any(k == 'pool' for k, _v in options.dimensions):
1078 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001079 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1080 parser.error('--tags must be in the format key:value')
1081 if options.raw_cmd and not args:
1082 parser.error(
1083 'Arguments with --raw-cmd should be passed after -- as command '
1084 'delimiter.')
1085 if options.isolate_server and not options.namespace:
1086 parser.error(
1087 '--namespace must be a valid value when --isolate-server is used')
1088 if not options.isolated and not options.raw_cmd:
1089 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1090
1091 # Isolated
1092 # --isolated is required only if --raw-cmd wasn't provided.
1093 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1094 # preferred server.
1095 isolateserver.process_isolate_server_options(
1096 parser, options, False, not options.raw_cmd)
1097 inputs_ref = None
1098 if options.isolate_server:
1099 inputs_ref = FilesRef(
1100 isolated=options.isolated,
1101 isolatedserver=options.isolate_server,
1102 namespace=options.namespace)
1103
1104 # Command
1105 command = None
1106 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001107 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001108 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001109 if options.relative_cwd:
1110 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1111 if not a.startswith(os.getcwd()):
1112 parser.error(
1113 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001114 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001115 if options.relative_cwd:
1116 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001117 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001118
maruel0a25f6c2017-05-10 10:43:23 -07001119 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001120 cipd_packages = []
1121 for p in options.cipd_package:
1122 split = p.split(':', 2)
1123 if len(split) != 3:
1124 parser.error('CIPD packages must take the form: path:package:version')
1125 cipd_packages.append(CipdPackage(
1126 package_name=split[1],
1127 path=split[0],
1128 version=split[2]))
1129 cipd_input = None
1130 if cipd_packages:
1131 cipd_input = CipdInput(
1132 client_package=None,
1133 packages=cipd_packages,
1134 server=None)
1135
maruel0a25f6c2017-05-10 10:43:23 -07001136 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001137 secret_bytes = None
1138 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001139 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001140 secret_bytes = f.read().encode('base64')
1141
maruel0a25f6c2017-05-10 10:43:23 -07001142 # Named caches
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001143 caches = [{
1144 u'name': six.text_type(i[0]),
1145 u'path': six.text_type(i[1])
1146 } for i in options.named_cache]
maruel0a25f6c2017-05-10 10:43:23 -07001147
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001148 env_prefixes = {}
1149 for k, v in options.env_prefix:
1150 env_prefixes.setdefault(k, []).append(v)
1151
Brad Hallf78187a2018-10-19 17:08:55 +00001152 # Get dimensions into the key/value format we can manipulate later.
1153 orig_dims = [
1154 {'key': key, 'value': value} for key, value in options.dimensions]
1155 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1156
1157 # Construct base properties that we will use for all the slices, adding in
1158 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001159 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001160 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001161 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001162 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001163 containment=Containment(
1164 lower_priority=bool(options.lower_priority),
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001165 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001166 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001167 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001168 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001169 env=options.env,
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001170 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.items()],
maruel77f720b2015-09-15 12:35:22 -07001171 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001172 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001173 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001174 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001175 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001176 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001177 outputs=options.output,
1178 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001179
1180 slices = []
1181
1182 # Group the optional dimensions by expiration.
1183 dims_by_exp = {}
1184 for key, value, exp_secs in options.optional_dimensions:
1185 dims_by_exp.setdefault(int(exp_secs), []).append(
1186 {'key': key, 'value': value})
1187
1188 # Create the optional slices with expiration deltas, we fix up the properties
1189 # below.
1190 last_exp = 0
1191 for expiration_secs in sorted(dims_by_exp):
1192 t = TaskSlice(
1193 expiration_secs=expiration_secs - last_exp,
1194 properties=properties,
1195 wait_for_capacity=False)
1196 slices.append(t)
1197 last_exp = expiration_secs
1198
1199 # Add back in the default slice (the last one).
1200 exp = max(int(options.expiration) - last_exp, 60)
1201 base_task_slice = TaskSlice(
1202 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001203 properties=properties,
1204 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001205 slices.append(base_task_slice)
1206
Brad Hall7f463e62018-11-16 16:13:30 +00001207 # Add optional dimensions to the task slices, replacing a dimension that
1208 # has the same key if it is a dimension where repeating isn't valid (otherwise
1209 # we append it). Currently the only dimension we can repeat is "caches"; the
1210 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001211 extra_dims = []
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001212 for i, (_, kvs) in enumerate(sorted(dims_by_exp.items(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001213 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001214 # Replace or append the key/value pairs for this expiration in extra_dims;
1215 # we keep extra_dims around because we are iterating backwards and filling
1216 # in slices with shorter expirations. Dimensions expire as time goes on so
1217 # the slices that expire earlier will generally have more dimensions.
1218 for kv in kvs:
1219 if kv['key'] == 'caches':
1220 extra_dims.append(kv)
1221 else:
1222 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1223 # Then, add all the optional dimensions to the original dimension set, again
1224 # replacing if needed.
1225 for kv in extra_dims:
1226 if kv['key'] == 'caches':
1227 dims.append(kv)
1228 else:
1229 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001230 dims.sort(key=lambda x: (x['key'], x['value']))
1231 slice_properties = properties._replace(dimensions=dims)
1232 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1233
maruel77f720b2015-09-15 12:35:22 -07001234 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001235 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001236 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001237 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001238 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001239 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001240 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001241 user=options.user,
1242 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001243
1244
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001245class TaskOutputStdoutOption(optparse.Option):
1246 """Where to output the each task's console output (stderr/stdout).
1247
1248 The output will be;
1249 none - not be downloaded.
1250 json - stored in summary.json file *only*.
1251 console - shown on stdout *only*.
1252 all - stored in summary.json and shown on stdout.
1253 """
1254
1255 choices = ['all', 'json', 'console', 'none']
1256
1257 def __init__(self, *args, **kw):
1258 optparse.Option.__init__(
1259 self,
1260 *args,
1261 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001262 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001263 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001264 **kw)
1265
1266 def convert_value(self, opt, value):
1267 if value not in self.choices:
1268 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1269 self.get_opt_string(), self.choices, value))
1270 stdout_to = []
1271 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001272 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001273 elif value != 'none':
1274 stdout_to = [value]
1275 return stdout_to
1276
1277
maruel@chromium.org0437a732013-08-27 16:05:52 +00001278def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001279 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001280 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001281 help='Timeout to wait for result, set to -1 for no timeout and get '
1282 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001283 parser.group_logging.add_option(
1284 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001285 parser.group_logging.add_option(
1286 '--print-status-updates', action='store_true',
1287 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001288 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001289 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001290 '--task-summary-json',
1291 metavar='FILE',
1292 help='Dump a summary of task results to this file as json. It contains '
1293 'only shards statuses as know to server directly. Any output files '
1294 'emitted by the task can be collected by using --task-output-dir')
1295 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001296 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001297 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001298 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001299 'directory contains per-shard directory with output files produced '
1300 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001301 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001302 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001303 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001304 '--filepath-filter',
1305 help='This is regexp filter used to specify downloaded filepath when '
1306 'collecting isolated output.')
1307 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001308 '--perf', action='store_true', default=False,
1309 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001310 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001311
1312
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001313def process_collect_options(parser, options):
1314 # Only negative -1 is allowed, disallow other negative values.
1315 if options.timeout != -1 and options.timeout < 0:
1316 parser.error('Invalid --timeout value')
1317
1318
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001319@subcommand.usage('bots...')
1320def CMDbot_delete(parser, args):
1321 """Forcibly deletes bots from the Swarming server."""
1322 parser.add_option(
1323 '-f', '--force', action='store_true',
1324 help='Do not prompt for confirmation')
1325 options, args = parser.parse_args(args)
1326 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001327 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001328
1329 bots = sorted(args)
1330 if not options.force:
1331 print('Delete the following bots?')
1332 for bot in bots:
1333 print(' %s' % bot)
1334 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1335 print('Goodbye.')
1336 return 1
1337
1338 result = 0
1339 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001340 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001341 if net.url_read_json(url, data={}, method='POST') is None:
1342 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001343 result = 1
1344 return result
1345
1346
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001347def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001348 """Returns information about the bots connected to the Swarming server."""
1349 add_filter_options(parser)
1350 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001351 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001352 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001353 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001354 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001355 help='Keep both dead and alive bots')
1356 parser.filter_group.add_option(
1357 '--busy', action='store_true', help='Keep only busy bots')
1358 parser.filter_group.add_option(
1359 '--idle', action='store_true', help='Keep only idle bots')
1360 parser.filter_group.add_option(
1361 '--mp', action='store_true',
1362 help='Keep only Machine Provider managed bots')
1363 parser.filter_group.add_option(
1364 '--non-mp', action='store_true',
1365 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001366 parser.filter_group.add_option(
1367 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001368 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001369 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001370 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001371
1372 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001373 parser.error('Use only one of --keep-dead or --dead-only')
1374 if options.busy and options.idle:
1375 parser.error('Use only one of --busy or --idle')
1376 if options.mp and options.non_mp:
1377 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001378
smut281c3902018-05-30 17:50:05 -07001379 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001380 values = []
1381 if options.dead_only:
1382 values.append(('is_dead', 'TRUE'))
1383 elif options.keep_dead:
1384 values.append(('is_dead', 'NONE'))
1385 else:
1386 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001387
maruelaf6b06c2017-06-08 06:26:53 -07001388 if options.busy:
1389 values.append(('is_busy', 'TRUE'))
1390 elif options.idle:
1391 values.append(('is_busy', 'FALSE'))
1392 else:
1393 values.append(('is_busy', 'NONE'))
1394
1395 if options.mp:
1396 values.append(('is_mp', 'TRUE'))
1397 elif options.non_mp:
1398 values.append(('is_mp', 'FALSE'))
1399 else:
1400 values.append(('is_mp', 'NONE'))
1401
1402 for key, value in options.dimensions:
1403 values.append(('dimensions', '%s:%s' % (key, value)))
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +00001404 url += urllib.parse.urlencode(values)
maruelaf6b06c2017-06-08 06:26:53 -07001405 try:
1406 data, yielder = get_yielder(url, 0)
1407 bots = data.get('items') or []
1408 for items in yielder():
1409 if items:
1410 bots.extend(items)
1411 except Failure as e:
1412 sys.stderr.write('\n%s\n' % e)
1413 return 1
maruel77f720b2015-09-15 12:35:22 -07001414 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
Lei Leife202df2019-06-11 17:33:34 +00001415 print(bot['bot_id'])
maruelaf6b06c2017-06-08 06:26:53 -07001416 if not options.bare:
1417 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
Lei Leife202df2019-06-11 17:33:34 +00001418 print(' %s' % json.dumps(dimensions, sort_keys=True))
maruelaf6b06c2017-06-08 06:26:53 -07001419 if bot.get('task_id'):
Lei Leife202df2019-06-11 17:33:34 +00001420 print(' task: %s' % bot['task_id'])
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001421 return 0
1422
1423
maruelfd0a90c2016-06-10 11:51:10 -07001424@subcommand.usage('task_id')
1425def CMDcancel(parser, args):
1426 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001427 parser.add_option(
1428 '-k', '--kill-running', action='store_true', default=False,
1429 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001430 options, args = parser.parse_args(args)
1431 if not args:
1432 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001433 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001434 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001435 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001436 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001437 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001438 print('Deleting %s failed. Probably already gone' % task_id)
1439 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001440 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001441 return 0
1442
1443
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001444@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001445def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001446 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001447
1448 The result can be in multiple part if the execution was sharded. It can
1449 potentially have retries.
1450 """
1451 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001452 parser.add_option(
1453 '-j', '--json',
1454 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001455 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001456 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001457 if not args and not options.json:
1458 parser.error('Must specify at least one task id or --json.')
1459 if args and options.json:
1460 parser.error('Only use one of task id or --json.')
1461
1462 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001463 options.json = six.text_type(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001464 try:
maruel1ceb3872015-10-14 06:10:44 -07001465 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001466 data = json.load(f)
1467 except (IOError, ValueError):
1468 parser.error('Failed to open %s' % options.json)
1469 try:
1470 tasks = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001471 data['tasks'].values(), key=lambda x: x['shard_index'])
maruel71c61c82016-02-22 06:52:05 -08001472 args = [t['task_id'] for t in tasks]
1473 except (KeyError, TypeError):
1474 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001475 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001476 # Take in account all the task slices.
1477 offset = 0
1478 for s in data['request']['task_slices']:
1479 m = (offset + s['properties']['execution_timeout_secs'] +
1480 s['expiration_secs'])
1481 if m > options.timeout:
1482 options.timeout = m
1483 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001484 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001485 else:
1486 valid = frozenset('0123456789abcdef')
1487 if any(not valid.issuperset(task_id) for task_id in args):
1488 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001489
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001490 try:
1491 return collect(
1492 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001493 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001494 options.timeout,
1495 options.decorate,
1496 options.print_status_updates,
1497 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001498 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001499 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001500 options.perf,
1501 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001502 except Failure:
1503 on_error.report(None)
1504 return 1
1505
1506
maruel77f720b2015-09-15 12:35:22 -07001507@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001508def CMDpost(parser, args):
1509 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1510
1511 Input data must be sent to stdin, result is printed to stdout.
1512
1513 If HTTP response code >= 400, returns non-zero.
1514 """
1515 options, args = parser.parse_args(args)
1516 if len(args) != 1:
1517 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001518 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001519 data = sys.stdin.read()
1520 try:
1521 resp = net.url_read(url, data=data, method='POST')
1522 except net.TimeoutError:
1523 sys.stderr.write('Timeout!\n')
1524 return 1
1525 if not resp:
1526 sys.stderr.write('No response!\n')
1527 return 1
1528 sys.stdout.write(resp)
1529 return 0
1530
1531
1532@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001533def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001534 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1535 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001536
1537 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001538 Raw task request and results:
1539 swarming.py query -S server-url.com task/123456/request
1540 swarming.py query -S server-url.com task/123456/result
1541
maruel77f720b2015-09-15 12:35:22 -07001542 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001543 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001544
maruelaf6b06c2017-06-08 06:26:53 -07001545 Listing last 10 tasks on a specific bot named 'bot1':
1546 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001547
maruelaf6b06c2017-06-08 06:26:53 -07001548 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001549 quoting is important!:
1550 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001551 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001552 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001553 parser.add_option(
1554 '-L', '--limit', type='int', default=200,
1555 help='Limit to enforce on limitless items (like number of tasks); '
1556 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001557 parser.add_option(
1558 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001559 parser.add_option(
1560 '--progress', action='store_true',
1561 help='Prints a dot at each request to show progress')
1562 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001563 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001564 parser.error(
1565 'Must specify only method name and optionally query args properly '
1566 'escaped.')
smut281c3902018-05-30 17:50:05 -07001567 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001568 try:
1569 data, yielder = get_yielder(base_url, options.limit)
1570 for items in yielder():
1571 if items:
1572 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001573 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001574 sys.stderr.write('.')
1575 sys.stderr.flush()
1576 except Failure as e:
1577 sys.stderr.write('\n%s\n' % e)
1578 return 1
maruel77f720b2015-09-15 12:35:22 -07001579 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001580 sys.stderr.write('\n')
1581 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001582 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001583 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001584 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001585 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001586 try:
maruel77f720b2015-09-15 12:35:22 -07001587 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001588 sys.stdout.write('\n')
1589 except IOError:
1590 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001591 return 0
1592
1593
maruel77f720b2015-09-15 12:35:22 -07001594def CMDquery_list(parser, args):
1595 """Returns list of all the Swarming APIs that can be used with command
1596 'query'.
1597 """
1598 parser.add_option(
1599 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1600 options, args = parser.parse_args(args)
1601 if args:
1602 parser.error('No argument allowed.')
1603
1604 try:
1605 apis = endpoints_api_discovery_apis(options.swarming)
1606 except APIError as e:
1607 parser.error(str(e))
1608 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001609 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001610 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001611 json.dump(apis, f)
1612 else:
1613 help_url = (
1614 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1615 options.swarming)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001616 for i, (api_id, api) in enumerate(sorted(apis.items())):
maruel11e31af2017-02-15 07:30:50 -08001617 if i:
1618 print('')
Lei Leife202df2019-06-11 17:33:34 +00001619 print(api_id)
1620 print(' ' + api['description'].strip())
maruel11e31af2017-02-15 07:30:50 -08001621 if 'resources' in api:
1622 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001623 # TODO(maruel): Remove.
1624 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001625 for j, (resource_name, resource) in enumerate(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001626 sorted(api['resources'].items())):
maruel11e31af2017-02-15 07:30:50 -08001627 if j:
1628 print('')
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001629 for method_name, method in sorted(resource['methods'].items()):
maruel11e31af2017-02-15 07:30:50 -08001630 # Only list the GET ones.
1631 if method['httpMethod'] != 'GET':
1632 continue
Lei Leife202df2019-06-11 17:33:34 +00001633 print('- %s.%s: %s' % (
1634 resource_name, method_name, method['path']))
maruel11e31af2017-02-15 07:30:50 -08001635 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001636 ' ' + l for l in textwrap.wrap(
1637 method.get('description', 'No description'), 78)))
Lei Leife202df2019-06-11 17:33:34 +00001638 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel11e31af2017-02-15 07:30:50 -08001639 else:
1640 # New.
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001641 for method_name, method in sorted(api['methods'].items()):
maruel77f720b2015-09-15 12:35:22 -07001642 # Only list the GET ones.
1643 if method['httpMethod'] != 'GET':
1644 continue
Lei Leife202df2019-06-11 17:33:34 +00001645 print('- %s: %s' % (method['id'], method['path']))
maruel11e31af2017-02-15 07:30:50 -08001646 print('\n'.join(
1647 ' ' + l for l in textwrap.wrap(method['description'], 78)))
Lei Leife202df2019-06-11 17:33:34 +00001648 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel77f720b2015-09-15 12:35:22 -07001649 return 0
1650
1651
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001652@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001653def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001654 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001655
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001656 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001657 """
1658 add_trigger_options(parser)
1659 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001660 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001661 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001662 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001663 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001664 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001665 tasks = trigger_task_shards(
1666 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001667 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001668 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001669 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001670 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001671 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001672 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001673 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001674 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001675 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001676 task_ids = [
1677 t['task_id']
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001678 for t in sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001679 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001680 for task_id in task_ids:
1681 print('Task: {server}/task?id={task}'.format(
1682 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001683 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001684 offset = 0
1685 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001686 m = (offset + s.properties.execution_timeout_secs +
1687 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001688 if m > options.timeout:
1689 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001690 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001691 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001692 try:
1693 return collect(
1694 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001695 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001696 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001697 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001698 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001699 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001700 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001701 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001702 options.perf,
1703 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001704 except Failure:
1705 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001706 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001707
1708
maruel18122c62015-10-23 06:31:23 -07001709@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001710def CMDreproduce(parser, args):
1711 """Runs a task locally that was triggered on the server.
1712
1713 This running locally the same commands that have been run on the bot. The data
1714 downloaded will be in a subdirectory named 'work' of the current working
1715 directory.
maruel18122c62015-10-23 06:31:23 -07001716
1717 You can pass further additional arguments to the target command by passing
1718 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001719 """
maruelc070e672016-02-22 17:32:57 -08001720 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001721 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001722 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001723 parser.add_option(
1724 '--work', metavar='DIR', default='work',
1725 help='Directory to map the task input files into')
1726 parser.add_option(
1727 '--cache', metavar='DIR', default='cache',
1728 help='Directory that contains the input cache')
1729 parser.add_option(
1730 '--leak', action='store_true',
1731 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001732 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001733 extra_args = []
1734 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001735 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001736 if len(args) > 1:
1737 if args[1] == '--':
1738 if len(args) > 2:
1739 extra_args = args[2:]
1740 else:
1741 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001742
smut281c3902018-05-30 17:50:05 -07001743 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001744 request = net.url_read_json(url)
1745 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001746 print('Failed to retrieve request data for the task', file=sys.stderr)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001747 return 1
1748
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001749 workdir = six.text_type(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001750 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001751 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001752 fs.mkdir(workdir)
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001753 cachedir = six.text_type(os.path.abspath('cipd_cache'))
iannucci31ab9192017-05-02 19:11:56 -07001754 if not fs.exists(cachedir):
1755 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001756
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001757 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001758 env = os.environ.copy()
1759 env['SWARMING_BOT_ID'] = 'reproduce'
1760 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001761 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001762 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001763 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001764 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001765 if not i['value']:
1766 env.pop(key, None)
1767 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001768 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001769
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001770 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001771 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001772 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001773 for i in env_prefixes:
1774 key = i['key']
1775 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001776 cur = env.get(key)
1777 if cur:
1778 paths.append(cur)
1779 env[key] = os.path.pathsep.join(paths)
1780
iannucci31ab9192017-05-02 19:11:56 -07001781 command = []
nodir152cba62016-05-12 16:08:56 -07001782 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001783 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001784 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001785 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001786 properties['inputs_ref']['namespace'])
1787 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001788 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1789 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1790 # leak.
1791 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001792 cache = local_caching.DiskContentAddressedCache(
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001793 six.text_type(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001794 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001795 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001796 command = bundle.command
1797 if bundle.relative_cwd:
1798 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001799 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001800
1801 if properties.get('command'):
1802 command.extend(properties['command'])
1803
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001804 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Brian Sheedy7a761172019-08-30 22:55:14 +00001805 command = tools.find_executable(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001806 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001807 new_command = run_isolated.process_command(command, 'invalid', None)
1808 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001809 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001810 else:
1811 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001812 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001813 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001814 command, options.output, None)
1815 if not os.path.isdir(options.output):
1816 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001817 command = new_command
1818 file_path.ensure_command_has_abs_path(command, workdir)
1819
1820 if properties.get('cipd_input'):
1821 ci = properties['cipd_input']
1822 cp = ci['client_package']
1823 client_manager = cipd.get_client(
1824 ci['server'], cp['package_name'], cp['version'], cachedir)
1825
1826 with client_manager as client:
1827 by_path = collections.defaultdict(list)
1828 for pkg in ci['packages']:
1829 path = pkg['path']
1830 # cipd deals with 'root' as ''
1831 if path == '.':
1832 path = ''
1833 by_path[path].append((pkg['package_name'], pkg['version']))
1834 client.ensure(workdir, by_path, cache_dir=cachedir)
1835
maruel77f720b2015-09-15 12:35:22 -07001836 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001837 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001838 except OSError as e:
Lei Leife202df2019-06-11 17:33:34 +00001839 print('Failed to run: %s' % ' '.join(command), file=sys.stderr)
1840 print(str(e), file=sys.stderr)
maruel77f720b2015-09-15 12:35:22 -07001841 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001842 finally:
1843 # Do not delete options.cache.
1844 if not options.leak:
1845 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001846
1847
maruel0eb1d1b2015-10-02 14:48:21 -07001848@subcommand.usage('bot_id')
1849def CMDterminate(parser, args):
1850 """Tells a bot to gracefully shut itself down as soon as it can.
1851
1852 This is done by completing whatever current task there is then exiting the bot
1853 process.
1854 """
1855 parser.add_option(
1856 '--wait', action='store_true', help='Wait for the bot to terminate')
1857 options, args = parser.parse_args(args)
1858 if len(args) != 1:
1859 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001860 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001861 request = net.url_read_json(url, data={})
1862 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001863 print('Failed to ask for termination', file=sys.stderr)
maruel0eb1d1b2015-10-02 14:48:21 -07001864 return 1
1865 if options.wait:
1866 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001867 options.swarming,
1868 [request['task_id']],
1869 0.,
1870 False,
1871 False,
1872 None,
1873 None,
1874 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001875 False,
1876 None)
maruelbfc5f872017-06-10 16:43:17 -07001877 else:
Lei Leife202df2019-06-11 17:33:34 +00001878 print(request['task_id'])
maruel0eb1d1b2015-10-02 14:48:21 -07001879 return 0
1880
1881
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001882@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001883def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001884 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001885
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001886 Passes all extra arguments provided after '--' as additional command line
1887 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001888 """
1889 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001890 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001891 parser.add_option(
1892 '--dump-json',
1893 metavar='FILE',
1894 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001895 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001896 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001897 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001898 tasks = trigger_task_shards(
1899 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001900 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001901 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001902 tasks_sorted = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001903 tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001904 if options.dump_json:
1905 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001906 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001907 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001908 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001909 }
maruel46b015f2015-10-13 18:40:35 -07001910 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001911 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001912 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001913 (options.swarming, options.dump_json))
1914 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001915 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001916 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001917 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1918 print('Or visit:')
1919 for t in tasks_sorted:
1920 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001921 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001922 except Failure:
1923 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001924 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001925
1926
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001927class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001928 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001929 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001930 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001931 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001932 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001933 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001934 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001935 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001936 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001937 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001938
1939 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001940 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001941 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001942 auth.process_auth_options(self, options)
1943 user = self._process_swarming(options)
1944 if hasattr(options, 'user') and not options.user:
1945 options.user = user
1946 return options, args
1947
1948 def _process_swarming(self, options):
1949 """Processes the --swarming option and aborts if not specified.
1950
1951 Returns the identity as determined by the server.
1952 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001953 if not options.swarming:
1954 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001955 try:
1956 options.swarming = net.fix_url(options.swarming)
1957 except ValueError as e:
1958 self.error('--swarming %s' % e)
1959 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001960 try:
1961 user = auth.ensure_logged_in(options.swarming)
1962 except ValueError as e:
1963 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001964 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001965
1966
1967def main(args):
1968 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001969 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001970
1971
1972if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001973 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001974 fix_encoding.fix_encoding()
1975 tools.disable_buffering()
1976 colorama.init()
1977 sys.exit(main(sys.argv[1:]))