blob: 4aa4ca8daf5961ae7c7e938505781eb8c3853e5b [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Lei Leife202df2019-06-11 17:33:34 +00008from __future__ import print_function
9
10__version__ = '1.0'
maruel@chromium.org0437a732013-08-27 16:05:52 +000011
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050012import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040013import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000014import json
15import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040016import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100018import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000019import sys
maruel11e31af2017-02-15 07:30:50 -080020import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070021import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000022import time
Takuto Ikuta35250172020-01-31 09:33:46 +000023import uuid
maruel@chromium.org0437a732013-08-27 16:05:52 +000024
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000025from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000027
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000028# third_party/
29import colorama
30from chromium import natsort
31from depot_tools import fix_encoding
32from depot_tools import subcommand
Takuto Ikuta6e2ff962019-10-29 12:35:27 +000033import six
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +000034from six.moves import urllib
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000035
36# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080037import auth
iannucci31ab9192017-05-02 19:11:56 -070038import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000039import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000040import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040041import local_caching
maruelc070e672016-02-22 17:32:57 -080042import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000043from utils import file_path
44from utils import fs
45from utils import logging_utils
46from utils import net
47from utils import on_error
48from utils import subprocess42
49from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050050
51
52class Failure(Exception):
53 """Generic failure."""
54 pass
55
56
maruel0a25f6c2017-05-10 10:43:23 -070057def default_task_name(options):
58 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050059 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070060 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070061 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070062 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070063 if options.isolated:
64 task_name += u'/' + options.isolated
65 return task_name
66 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050067
68
69### Triggering.
70
71
maruel77f720b2015-09-15 12:35:22 -070072# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070073CipdPackage = collections.namedtuple(
74 'CipdPackage',
75 [
76 'package_name',
77 'path',
78 'version',
79 ])
80
81
82# See ../appengine/swarming/swarming_rpcs.py.
83CipdInput = collections.namedtuple(
84 'CipdInput',
85 [
86 'client_package',
87 'packages',
88 'server',
89 ])
90
91
92# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070093FilesRef = collections.namedtuple(
94 'FilesRef',
95 [
96 'isolated',
97 'isolatedserver',
98 'namespace',
99 ])
100
101
102# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800103StringListPair = collections.namedtuple(
104 'StringListPair', [
105 'key',
106 'value', # repeated string
107 ]
108)
109
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000110# See ../appengine/swarming/swarming_rpcs.py.
111Containment = collections.namedtuple(
112 'Containment',
113 [
114 'lower_priority',
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000115 'containment_type',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000116 ])
117
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800118
119# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700120TaskProperties = collections.namedtuple(
121 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500122 [
maruel681d6802017-01-17 16:56:03 -0800123 'caches',
borenet02f772b2016-06-22 12:42:19 -0700124 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500125 'command',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000126 'containment',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500127 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500128 'dimensions',
129 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800130 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700131 'execution_timeout_secs',
132 'extra_args',
133 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500134 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700135 'inputs_ref',
136 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700137 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700138 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700139 ])
140
141
142# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400143TaskSlice = collections.namedtuple(
144 'TaskSlice',
145 [
146 'expiration_secs',
147 'properties',
148 'wait_for_capacity',
149 ])
150
151
152# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700153NewTaskRequest = collections.namedtuple(
154 'NewTaskRequest',
155 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500156 'name',
maruel77f720b2015-09-15 12:35:22 -0700157 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500158 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400159 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700160 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500161 'tags',
162 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000163 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500164 ])
165
166
maruel77f720b2015-09-15 12:35:22 -0700167def namedtuple_to_dict(value):
168 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400169 if hasattr(value, '_asdict'):
170 return namedtuple_to_dict(value._asdict())
171 if isinstance(value, (list, tuple)):
172 return [namedtuple_to_dict(v) for v in value]
173 if isinstance(value, dict):
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000174 return {k: namedtuple_to_dict(v) for k, v in value.items()}
Lei Lei73a5f732020-03-23 20:36:14 +0000175 # json.dumps in Python3 doesn't support bytes.
176 if isinstance(value, bytes):
177 return six.ensure_str(value)
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400178 return value
maruel77f720b2015-09-15 12:35:22 -0700179
180
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700181def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800182 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700183
184 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500185 """
maruel77f720b2015-09-15 12:35:22 -0700186 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700187 # Don't send 'service_account' if it is None to avoid confusing older
188 # version of the server that doesn't know about 'service_account' and don't
189 # use it at all.
190 if not out['service_account']:
191 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000192 for task_slice in out['task_slices']:
193 task_slice['properties']['env'] = [
194 {'key': k, 'value': v}
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000195 for k, v in task_slice['properties']['env'].items()
Brad Hallf78187a2018-10-19 17:08:55 +0000196 ]
197 task_slice['properties']['env'].sort(key=lambda x: x['key'])
Takuto Ikuta35250172020-01-31 09:33:46 +0000198 out['request_uuid'] = str(uuid.uuid4())
maruel77f720b2015-09-15 12:35:22 -0700199 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500200
201
maruel77f720b2015-09-15 12:35:22 -0700202def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500203 """Triggers a request on the Swarming server and returns the json data.
204
205 It's the low-level function.
206
207 Returns:
208 {
209 'request': {
210 'created_ts': u'2010-01-02 03:04:05',
211 'name': ..
212 },
213 'task_id': '12300',
214 }
215 """
216 logging.info('Triggering: %s', raw_request['name'])
217
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500218 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700219 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500220 if not result:
221 on_error.report('Failed to trigger task %s' % raw_request['name'])
222 return None
maruele557bce2015-11-17 09:01:27 -0800223 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800224 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800225 msg = 'Failed to trigger task %s' % raw_request['name']
226 if result['error'].get('errors'):
227 for err in result['error']['errors']:
228 if err.get('message'):
229 msg += '\nMessage: %s' % err['message']
230 if err.get('debugInfo'):
231 msg += '\nDebug info:\n%s' % err['debugInfo']
232 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800233 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800234
235 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800236 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500237 return result
238
239
240def setup_googletest(env, shards, index):
241 """Sets googletest specific environment variables."""
242 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700243 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
244 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
245 env = env[:]
246 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
247 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500248 return env
249
250
251def trigger_task_shards(swarming, task_request, shards):
252 """Triggers one or many subtasks of a sharded task.
253
254 Returns:
255 Dict with task details, returned to caller as part of --dump-json output.
256 None in case of failure.
257 """
258 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000259 """
260 Args:
261 index: The index of the task request.
262
263 Returns:
264 raw_request: A swarming compatible JSON dictionary of the request.
265 shard_index: The index of the shard, which may be different than the index
266 of the task request.
267 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700268 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000269 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500270 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000271 for task_slice in req['task_slices']:
272 task_slice['properties']['env'] = setup_googletest(
273 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700274 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000275 else:
276 task_slices = req['task_slices']
277
Lei Lei73a5f732020-03-23 20:36:14 +0000278 total_shards = 1
Erik Chend50a88f2019-02-16 01:22:07 +0000279 # Multiple tasks slices might exist if there are optional "slices", e.g.
280 # multiple ways of dispatching the task that should be equivalent. These
281 # should be functionally equivalent but we have cannot guarantee that. If
282 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
283 # slices.
284 for task_slice in task_slices:
285 for env_var in task_slice['properties']['env']:
286 if env_var['key'] == 'GTEST_SHARD_INDEX':
287 shard_index = int(env_var['value'])
288 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
289 total_shards = int(env_var['value'])
290 if total_shards > 1:
291 req['name'] += ':%s:%s' % (shard_index, total_shards)
292
293 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500294
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000295 requests = [convert(index) for index in range(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500296 tasks = {}
297 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000298 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700299 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500300 if not task:
301 break
302 logging.info('Request result: %s', task)
303 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400304 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500305 priority_warning = True
Lei Leife202df2019-06-11 17:33:34 +0000306 print('Priority was reset to %s' % task['request']['priority'],
307 file=sys.stderr)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500308 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000309 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500310 'task_id': task['task_id'],
311 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
312 }
313
314 # Some shards weren't triggered. Abort everything.
315 if len(tasks) != len(requests):
316 if tasks:
Lei Leife202df2019-06-11 17:33:34 +0000317 print('Only %d shard(s) out of %d were triggered' % (
318 len(tasks), len(requests)), file=sys.stderr)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +0000319 for task_dict in tasks.values():
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500320 abort_task(swarming, task_dict['task_id'])
321 return None
322
323 return tasks
324
325
326### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000327
328
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700329# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000330STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700331
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400332
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000333class TaskState(object):
334 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000335
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000336 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
337 is the source of truth for these values:
338 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400339
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000340 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400341 """
342 RUNNING = 0x10
343 PENDING = 0x20
344 EXPIRED = 0x30
345 TIMED_OUT = 0x40
346 BOT_DIED = 0x50
347 CANCELED = 0x60
348 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400349 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400350 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400351
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000352 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400353
maruel77f720b2015-09-15 12:35:22 -0700354 _ENUMS = {
355 'RUNNING': RUNNING,
356 'PENDING': PENDING,
357 'EXPIRED': EXPIRED,
358 'TIMED_OUT': TIMED_OUT,
359 'BOT_DIED': BOT_DIED,
360 'CANCELED': CANCELED,
361 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400362 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400363 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700364 }
365
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400366 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700367 def from_enum(cls, state):
368 """Returns int value based on the string."""
369 if state not in cls._ENUMS:
370 raise ValueError('Invalid state %s' % state)
371 return cls._ENUMS[state]
372
maruel@chromium.org0437a732013-08-27 16:05:52 +0000373
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700374class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700375 """Assembles task execution summary (for --task-summary-json output).
376
377 Optionally fetches task outputs from isolate server to local disk (used when
378 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700379
380 This object is shared among multiple threads running 'retrieve_results'
381 function, in particular they call 'process_shard_result' method in parallel.
382 """
383
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000384 def __init__(self, task_output_dir, task_output_stdout, shard_count,
385 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700386 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
387
388 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700389 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700390 shard_count: expected number of task shards.
391 """
maruel12e30012015-10-09 11:55:35 -0700392 self.task_output_dir = (
Takuto Ikuta6e2ff962019-10-29 12:35:27 +0000393 six.text_type(os.path.abspath(task_output_dir))
maruel12e30012015-10-09 11:55:35 -0700394 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000395 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700396 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000397 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700398
399 self._lock = threading.Lock()
400 self._per_shard_results = {}
401 self._storage = None
402
nodire5028a92016-04-29 14:38:21 -0700403 if self.task_output_dir:
404 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700405
Vadim Shtayurab450c602014-05-12 19:23:25 -0700406 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700407 """Stores results of a single task shard, fetches output files if necessary.
408
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400409 Modifies |result| in place.
410
maruel77f720b2015-09-15 12:35:22 -0700411 shard_index is 0-based.
412
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700413 Called concurrently from multiple threads.
414 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700415 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700416 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700417 if shard_index < 0 or shard_index >= self.shard_count:
418 logging.warning(
419 'Shard index %d is outside of expected range: [0; %d]',
420 shard_index, self.shard_count - 1)
421 return
422
maruel77f720b2015-09-15 12:35:22 -0700423 if result.get('outputs_ref'):
424 ref = result['outputs_ref']
425 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
426 ref['isolatedserver'],
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000427 urllib.parse.urlencode([('namespace', ref['namespace']),
428 ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400429
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700430 # Store result dict of that shard, ignore results we've already seen.
431 with self._lock:
432 if shard_index in self._per_shard_results:
433 logging.warning('Ignoring duplicate shard index %d', shard_index)
434 return
435 self._per_shard_results[shard_index] = result
436
437 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700438 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000439 server_ref = isolate_storage.ServerRef(
440 result['outputs_ref']['isolatedserver'],
441 result['outputs_ref']['namespace'])
442 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400443 if storage:
444 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400445 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
446 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400447 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700448 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400449 storage,
Lei Leife202df2019-06-11 17:33:34 +0000450 local_caching.MemoryContentAddressedCache(file_mode_mask=0o700),
maruel4409e302016-07-19 14:25:51 -0700451 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000452 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700453
454 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700455 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700456 with self._lock:
457 # Write an array of shard results with None for missing shards.
458 summary = {
Marc-Antoine Ruel0fdee222019-10-10 14:42:40 +0000459 'shards': [
460 self._per_shard_results.get(i) for i in range(self.shard_count)
461 ],
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700462 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000463
464 # Don't store stdout in the summary if not requested too.
465 if "json" not in self.task_output_stdout:
466 for shard_json in summary['shards']:
467 if not shard_json:
468 continue
469 if "output" in shard_json:
470 del shard_json["output"]
471 if "outputs" in shard_json:
472 del shard_json["outputs"]
473
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700474 # Write summary.json to task_output_dir as well.
475 if self.task_output_dir:
476 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700477 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700478 summary,
479 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700480 if self._storage:
481 self._storage.close()
482 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700483 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700484
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000485 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700486 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700487 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700488 with self._lock:
489 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000490 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700491 else:
492 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000493 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700494 logging.error(
495 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000496 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700497 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000498 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700499 logging.error(
500 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000501 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700502 return None
503 return self._storage
504
505
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500506def now():
507 """Exists so it can be mocked easily."""
508 return time.time()
509
510
maruel77f720b2015-09-15 12:35:22 -0700511def parse_time(value):
512 """Converts serialized time from the API to datetime.datetime."""
513 # When microseconds are 0, the '.123456' suffix is elided. This means the
514 # serialized format is not consistent, which confuses the hell out of python.
515 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
516 try:
517 return datetime.datetime.strptime(value, fmt)
518 except ValueError:
519 pass
520 raise ValueError('Failed to parse %s' % value)
521
522
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700523def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700524 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000525 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400526 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700527
Vadim Shtayurab450c602014-05-12 19:23:25 -0700528 Returns:
529 <result dict> on success.
530 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700531 """
maruel71c61c82016-02-22 06:52:05 -0800532 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700533 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700534 if include_perf:
535 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700536 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700537 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400538 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700539 attempt = 0
540
541 while not should_stop.is_set():
542 attempt += 1
543
544 # Waiting for too long -> give up.
545 current_time = now()
546 if deadline and current_time >= deadline:
547 logging.error('retrieve_results(%s) timed out on attempt %d',
548 base_url, attempt)
549 return None
550
551 # Do not spin too fast. Spin faster at the beginning though.
552 # Start with 1 sec delay and for each 30 sec of waiting add another second
553 # of delay, until hitting 15 sec ceiling.
554 if attempt > 1:
555 max_delay = min(15, 1 + (current_time - started) / 30.0)
556 delay = min(max_delay, deadline - current_time) if deadline else max_delay
557 if delay > 0:
558 logging.debug('Waiting %.1f sec before retrying', delay)
559 should_stop.wait(delay)
560 if should_stop.is_set():
561 return None
562
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400563 # Disable internal retries in net.url_read_json, since we are doing retries
564 # ourselves.
565 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700566 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
567 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400568 # Retry on 500s only if no timeout is specified.
569 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400570 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400571 if timeout == -1:
572 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400573 continue
maruel77f720b2015-09-15 12:35:22 -0700574
maruelbf53e042015-12-01 15:00:51 -0800575 if result.get('error'):
576 # An error occurred.
577 if result['error'].get('errors'):
578 for err in result['error']['errors']:
579 logging.warning(
580 'Error while reading task: %s; %s',
581 err.get('message'), err.get('debugInfo'))
582 elif result['error'].get('message'):
583 logging.warning(
584 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400585 if timeout == -1:
586 return result
maruelbf53e042015-12-01 15:00:51 -0800587 continue
588
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400589 # When timeout == -1, always return on first attempt. 500s are already
590 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000591 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000592 if fetch_stdout:
593 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700594 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700595 # Record the result, try to fetch attached output files (if any).
596 if output_collector:
597 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700598 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700599 if result.get('internal_failure'):
600 logging.error('Internal error!')
601 elif result['state'] == 'BOT_DIED':
602 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700603 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000604
605
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700606def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400607 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000608 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500609 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000610
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700611 Duplicate shards are ignored. Shards are yielded in order of completion.
612 Timed out shards are NOT yielded at all. Caller can compare number of yielded
613 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000614
615 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500616 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000617 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500618
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700619 output_collector is an optional instance of TaskOutputCollector that will be
620 used to fetch files produced by a task from isolate server to the local disk.
621
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500622 Yields:
623 (index, result). In particular, 'result' is defined as the
624 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000625 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000626 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400627 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700628 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700629 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700630
maruel@chromium.org0437a732013-08-27 16:05:52 +0000631 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
632 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700633 # Adds a task to the thread pool to call 'retrieve_results' and return
634 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400635 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000636 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700637 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000638 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400639 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000640 task_id, timeout, should_stop, output_collector, include_perf,
641 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700642
643 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400644 for shard_index, task_id in enumerate(task_ids):
645 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700646
647 # Wait for all of them to finish.
Lei Lei73a5f732020-03-23 20:36:14 +0000648 # Convert to list, since range in Python3 doesn't have remove.
649 shards_remaining = list(range(len(task_ids)))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400650 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700651 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700652 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700653 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000654 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700655 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700656 except threading_utils.TaskChannel.Timeout:
657 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000658 time_now = str(datetime.datetime.now())
659 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700660 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000661 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700662 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000663 (time_now, ', '.join(map(str, shards_remaining)))
664 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700665 sys.stdout.flush()
666 continue
667 except Exception:
668 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700669
670 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700671 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000672 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500673 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000674 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700675
Vadim Shtayurab450c602014-05-12 19:23:25 -0700676 # Yield back results to the caller.
677 assert shard_index in shards_remaining
678 shards_remaining.remove(shard_index)
679 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700680
maruel@chromium.org0437a732013-08-27 16:05:52 +0000681 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700682 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000683 should_stop.set()
684
685
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000686def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000687 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700688 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400689 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700690 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
691 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400692 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
693 metadata.get('abandoned_ts')):
694 pending = '%.1fs' % (
695 parse_time(metadata['abandoned_ts']) -
696 parse_time(metadata['created_ts'])
697 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400698 else:
699 pending = 'N/A'
700
maruel77f720b2015-09-15 12:35:22 -0700701 if metadata.get('duration') is not None:
702 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400703 else:
704 duration = 'N/A'
705
maruel77f720b2015-09-15 12:35:22 -0700706 if metadata.get('exit_code') is not None:
707 # Integers are encoded as string to not loose precision.
708 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400709 else:
710 exit_code = 'N/A'
711
712 bot_id = metadata.get('bot_id') or 'N/A'
713
maruel77f720b2015-09-15 12:35:22 -0700714 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400715 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000716 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400717 if metadata.get('state') == 'CANCELED':
718 tag_footer2 = ' Pending: %s CANCELED' % pending
719 elif metadata.get('state') == 'EXPIRED':
720 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400721 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400722 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
723 pending, duration, bot_id, exit_code, metadata['state'])
724 else:
725 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
726 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400727
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000728 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
729 dash_pad = '+-%s-+' % ('-' * tag_len)
730 tag_header = '| %s |' % tag_header.ljust(tag_len)
731 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
732 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400733
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000734 if include_stdout:
735 return '\n'.join([
736 dash_pad,
737 tag_header,
738 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400739 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000740 dash_pad,
741 tag_footer1,
742 tag_footer2,
743 dash_pad,
744 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000745 return '\n'.join([
746 dash_pad,
747 tag_header,
748 tag_footer2,
749 dash_pad,
750 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000751
752
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700753def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700754 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000755 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000756 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700757 """Retrieves results of a Swarming task.
758
759 Returns:
760 process exit code that should be returned to the user.
761 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000762
763 filter_cb = None
764 if filepath_filter:
765 filter_cb = re.compile(filepath_filter).match
766
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700767 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000768 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000769 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700770
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700771 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700772 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400773 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700774 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400775 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400776 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000777 output_collector, include_perf,
778 (len(task_output_stdout) > 0),
779 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700780 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700781
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400782 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700783 shard_exit_code = metadata.get('exit_code')
784 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700785 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700786 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700787 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400788 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700789 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700790
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700791 if decorate:
Lei Lei73a5f732020-03-23 20:36:14 +0000792 # s is bytes in Python3, print could not print
793 # s with nice format, so decode s to str.
794 s = six.ensure_str(
795 decorate_shard_output(swarming, index, metadata,
796 "console" in task_output_stdout).encode(
797 'utf-8', 'replace'))
leileied181762016-10-13 14:24:59 -0700798 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400799 if len(seen_shards) < len(task_ids):
800 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700801 else:
maruel77f720b2015-09-15 12:35:22 -0700802 print('%s: %s %s' % (
803 metadata.get('bot_id', 'N/A'),
804 metadata['task_id'],
805 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000806 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700807 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400808 if output:
809 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700810 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700811 summary = output_collector.finalize()
812 if task_summary_json:
813 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700814
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400815 if decorate and total_duration:
816 print('Total duration: %.1fs' % total_duration)
817
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400818 if len(seen_shards) != len(task_ids):
819 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Lei Leife202df2019-06-11 17:33:34 +0000820 print('Results from some shards are missing: %s' %
821 ', '.join(map(str, missing_shards)), file=sys.stderr)
Vadim Shtayurac524f512014-05-15 09:54:56 -0700822 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700823
maruela5490782015-09-30 10:56:59 -0700824 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000825
826
maruel77f720b2015-09-15 12:35:22 -0700827### API management.
828
829
830class APIError(Exception):
831 pass
832
833
834def endpoints_api_discovery_apis(host):
835 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
836 the APIs exposed by a host.
837
838 https://developers.google.com/discovery/v1/reference/apis/list
839 """
maruel380e3262016-08-31 16:10:06 -0700840 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
841 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700842 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
843 if data is None:
844 raise APIError('Failed to discover APIs on %s' % host)
845 out = {}
846 for api in data['items']:
847 if api['id'] == 'discovery:v1':
848 continue
849 # URL is of the following form:
850 # url = host + (
851 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
852 api_data = net.url_read_json(api['discoveryRestUrl'])
853 if api_data is None:
854 raise APIError('Failed to discover %s on %s' % (api['id'], host))
855 out[api['id']] = api_data
856 return out
857
858
maruelaf6b06c2017-06-08 06:26:53 -0700859def get_yielder(base_url, limit):
860 """Returns the first query and a function that yields following items."""
861 CHUNK_SIZE = 250
862
863 url = base_url
864 if limit:
865 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
866 data = net.url_read_json(url)
867 if data is None:
868 # TODO(maruel): Do basic diagnostic.
869 raise Failure('Failed to access %s' % url)
870 org_cursor = data.pop('cursor', None)
871 org_total = len(data.get('items') or [])
872 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
873 if not org_cursor or not org_total:
874 # This is not an iterable resource.
875 return data, lambda: []
876
877 def yielder():
878 cursor = org_cursor
879 total = org_total
880 # Some items support cursors. Try to get automatically if cursors are needed
881 # by looking at the 'cursor' items.
882 while cursor and (not limit or total < limit):
883 merge_char = '&' if '?' in base_url else '?'
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +0000884 url = base_url + '%scursor=%s' % (merge_char, urllib.parse.quote(cursor))
maruelaf6b06c2017-06-08 06:26:53 -0700885 if limit:
886 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
887 new = net.url_read_json(url)
888 if new is None:
889 raise Failure('Failed to access %s' % url)
890 cursor = new.get('cursor')
891 new_items = new.get('items')
892 nb_items = len(new_items or [])
893 total += nb_items
894 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
895 yield new_items
896
897 return data, yielder
898
899
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500900### Commands.
901
902
903def abort_task(_swarming, _manifest):
904 """Given a task manifest that was triggered, aborts its execution."""
905 # TODO(vadimsh): No supported by the server yet.
906
907
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400908def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800909 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500910 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500911 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500912 dest='dimensions', metavar='FOO bar',
913 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000914 parser.filter_group.add_option(
915 '--optional-dimension', default=[], action='append', nargs=3,
916 dest='optional_dimensions', metavar='key value expiration',
917 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500918 parser.add_option_group(parser.filter_group)
919
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400920
Brad Hallf78187a2018-10-19 17:08:55 +0000921def _validate_filter_option(parser, key, value, expiration, argname):
922 if ':' in key:
923 parser.error('%s key cannot contain ":"' % argname)
924 if key.strip() != key:
925 parser.error('%s key has whitespace' % argname)
926 if not key:
927 parser.error('%s key is empty' % argname)
928
929 if value.strip() != value:
930 parser.error('%s value has whitespace' % argname)
931 if not value:
932 parser.error('%s value is empty' % argname)
933
934 if expiration is not None:
935 try:
936 expiration = int(expiration)
937 except ValueError:
938 parser.error('%s expiration is not an integer' % argname)
939 if expiration <= 0:
940 parser.error('%s expiration should be positive' % argname)
941 if expiration % 60 != 0:
942 parser.error('%s expiration is not divisible by 60' % argname)
943
944
maruelaf6b06c2017-06-08 06:26:53 -0700945def process_filter_options(parser, options):
946 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000947 _validate_filter_option(parser, key, value, None, 'dimension')
948 for key, value, exp in options.optional_dimensions:
949 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700950 options.dimensions.sort()
951
952
Vadim Shtayurab450c602014-05-12 19:23:25 -0700953def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400954 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700955 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700956 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700957 help='Number of shards to trigger and collect.')
958 parser.add_option_group(parser.sharding_group)
959
960
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400961def add_trigger_options(parser):
962 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500963 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400964 add_filter_options(parser)
965
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400966 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800967 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700968 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500969 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800970 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500971 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700972 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800973 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800974 '--env-prefix', default=[], action='append', nargs=2,
975 metavar='VAR local/path',
976 help='Prepend task-relative `local/path` to the task\'s VAR environment '
977 'variable using os-appropriate pathsep character. Can be specified '
978 'multiple times for the same VAR to add multiple paths.')
979 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400980 '--idempotent', action='store_true', default=False,
981 help='When set, the server will actively try to find a previous task '
982 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800983 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700984 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000985 help='The optional path to a file containing the secret_bytes to use '
986 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800987 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700988 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400989 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800990 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700991 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400992 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000993 parser.add_option(
994 '--lower-priority', action='store_true',
995 help='Lowers the child process priority')
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +0000996 containment_choices = ('NONE', 'AUTO', 'JOB_OBJECT')
997 parser.add_option(
998 '--containment-type', default='NONE', metavar='NONE',
999 choices=containment_choices,
1000 help='Containment to use; one of: %s' % ', '.join(containment_choices))
maruel681d6802017-01-17 16:56:03 -08001001 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001002 '--raw-cmd', action='store_true', default=False,
1003 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -07001004 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -08001005 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001006 '--relative-cwd',
1007 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
1008 'requires --raw-cmd')
1009 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001010 '--cipd-package', action='append', default=[], metavar='PKG',
1011 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -07001012 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -08001013 group.add_option(
1014 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -07001015 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -08001016 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1017 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001018 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001019 help='Email of a service account to run the task as, or literal "bot" '
1020 'string to indicate that the task should use the same account the '
1021 'bot itself is using to authenticate to Swarming. Don\'t use task '
1022 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001023 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001024 '--pool-task-template',
1025 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1026 default='AUTO',
1027 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1028 'By default, the pool\'s TaskTemplate is automatically selected, '
1029 'according the pool configuration on the server. Choices are: '
1030 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1031 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001032 '-o', '--output', action='append', default=[], metavar='PATH',
1033 help='A list of files to return in addition to those written to '
1034 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1035 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001036 group.add_option(
1037 '--wait-for-capacity', action='store_true', default=False,
1038 help='Instructs to leave the task PENDING even if there\'s no known bot '
1039 'that could run this task, otherwise the task will be denied with '
1040 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001041 parser.add_option_group(group)
1042
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001043 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001044 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001045 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001046 help='The lower value, the more important the task is')
1047 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001048 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001049 help='Display name of the task. Defaults to '
1050 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1051 'isolated file is provided, if a hash is provided, it defaults to '
1052 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1053 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001054 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001055 help='Tags to assign to the task.')
1056 group.add_option(
1057 '--user', default='',
1058 help='User associated with the task. Defaults to authenticated user on '
1059 'the server.')
1060 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001061 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001062 help='Seconds to allow the task to be pending for a bot to run before '
1063 'this task request expires.')
1064 group.add_option(
1065 '--deadline', type='int', dest='expiration',
1066 help=optparse.SUPPRESS_HELP)
1067 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001068
1069
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001070def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001071 """Processes trigger options and does preparatory steps.
1072
1073 Returns:
1074 NewTaskRequest instance.
1075 """
maruelaf6b06c2017-06-08 06:26:53 -07001076 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001077 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001078 if args and args[0] == '--':
1079 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001080
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001081 if not options.dimensions:
1082 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001083 if not any(k == 'pool' for k, _v in options.dimensions):
1084 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001085 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1086 parser.error('--tags must be in the format key:value')
1087 if options.raw_cmd and not args:
1088 parser.error(
1089 'Arguments with --raw-cmd should be passed after -- as command '
1090 'delimiter.')
1091 if options.isolate_server and not options.namespace:
1092 parser.error(
1093 '--namespace must be a valid value when --isolate-server is used')
1094 if not options.isolated and not options.raw_cmd:
1095 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1096
1097 # Isolated
1098 # --isolated is required only if --raw-cmd wasn't provided.
1099 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1100 # preferred server.
1101 isolateserver.process_isolate_server_options(
1102 parser, options, False, not options.raw_cmd)
1103 inputs_ref = None
1104 if options.isolate_server:
1105 inputs_ref = FilesRef(
1106 isolated=options.isolated,
1107 isolatedserver=options.isolate_server,
1108 namespace=options.namespace)
1109
1110 # Command
1111 command = None
1112 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001113 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001114 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001115 if options.relative_cwd:
1116 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1117 if not a.startswith(os.getcwd()):
1118 parser.error(
1119 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001120 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001121 if options.relative_cwd:
1122 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001123 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001124
maruel0a25f6c2017-05-10 10:43:23 -07001125 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001126 cipd_packages = []
1127 for p in options.cipd_package:
1128 split = p.split(':', 2)
1129 if len(split) != 3:
1130 parser.error('CIPD packages must take the form: path:package:version')
1131 cipd_packages.append(CipdPackage(
1132 package_name=split[1],
1133 path=split[0],
1134 version=split[2]))
1135 cipd_input = None
1136 if cipd_packages:
1137 cipd_input = CipdInput(
1138 client_package=None,
1139 packages=cipd_packages,
1140 server=None)
1141
maruel0a25f6c2017-05-10 10:43:23 -07001142 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001143 secret_bytes = None
1144 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001145 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001146 secret_bytes = f.read().encode('base64')
1147
maruel0a25f6c2017-05-10 10:43:23 -07001148 # Named caches
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001149 caches = [{
1150 u'name': six.text_type(i[0]),
1151 u'path': six.text_type(i[1])
1152 } for i in options.named_cache]
maruel0a25f6c2017-05-10 10:43:23 -07001153
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001154 env_prefixes = {}
1155 for k, v in options.env_prefix:
1156 env_prefixes.setdefault(k, []).append(v)
1157
Brad Hallf78187a2018-10-19 17:08:55 +00001158 # Get dimensions into the key/value format we can manipulate later.
1159 orig_dims = [
1160 {'key': key, 'value': value} for key, value in options.dimensions]
1161 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1162
1163 # Construct base properties that we will use for all the slices, adding in
1164 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001165 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001166 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001167 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001168 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001169 containment=Containment(
1170 lower_priority=bool(options.lower_priority),
Marc-Antoine Ruel7f61a4d2019-05-22 20:10:07 +00001171 containment_type=options.containment_type,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001172 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001173 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001174 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001175 env=options.env,
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001176 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.items()],
maruel77f720b2015-09-15 12:35:22 -07001177 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001178 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001179 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001180 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001181 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001182 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001183 outputs=options.output,
1184 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001185
1186 slices = []
1187
1188 # Group the optional dimensions by expiration.
1189 dims_by_exp = {}
1190 for key, value, exp_secs in options.optional_dimensions:
1191 dims_by_exp.setdefault(int(exp_secs), []).append(
1192 {'key': key, 'value': value})
1193
1194 # Create the optional slices with expiration deltas, we fix up the properties
1195 # below.
1196 last_exp = 0
1197 for expiration_secs in sorted(dims_by_exp):
1198 t = TaskSlice(
1199 expiration_secs=expiration_secs - last_exp,
1200 properties=properties,
1201 wait_for_capacity=False)
1202 slices.append(t)
1203 last_exp = expiration_secs
1204
1205 # Add back in the default slice (the last one).
1206 exp = max(int(options.expiration) - last_exp, 60)
1207 base_task_slice = TaskSlice(
1208 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001209 properties=properties,
1210 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001211 slices.append(base_task_slice)
1212
Brad Hall7f463e62018-11-16 16:13:30 +00001213 # Add optional dimensions to the task slices, replacing a dimension that
1214 # has the same key if it is a dimension where repeating isn't valid (otherwise
1215 # we append it). Currently the only dimension we can repeat is "caches"; the
1216 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001217 extra_dims = []
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001218 for i, (_, kvs) in enumerate(sorted(dims_by_exp.items(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001219 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001220 # Replace or append the key/value pairs for this expiration in extra_dims;
1221 # we keep extra_dims around because we are iterating backwards and filling
1222 # in slices with shorter expirations. Dimensions expire as time goes on so
1223 # the slices that expire earlier will generally have more dimensions.
1224 for kv in kvs:
1225 if kv['key'] == 'caches':
1226 extra_dims.append(kv)
1227 else:
1228 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1229 # Then, add all the optional dimensions to the original dimension set, again
1230 # replacing if needed.
1231 for kv in extra_dims:
1232 if kv['key'] == 'caches':
1233 dims.append(kv)
1234 else:
1235 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001236 dims.sort(key=lambda x: (x['key'], x['value']))
1237 slice_properties = properties._replace(dimensions=dims)
1238 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1239
maruel77f720b2015-09-15 12:35:22 -07001240 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001241 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001242 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001243 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001244 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001245 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001246 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001247 user=options.user,
1248 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001249
1250
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001251class TaskOutputStdoutOption(optparse.Option):
1252 """Where to output the each task's console output (stderr/stdout).
1253
1254 The output will be;
1255 none - not be downloaded.
1256 json - stored in summary.json file *only*.
1257 console - shown on stdout *only*.
1258 all - stored in summary.json and shown on stdout.
1259 """
1260
1261 choices = ['all', 'json', 'console', 'none']
1262
1263 def __init__(self, *args, **kw):
1264 optparse.Option.__init__(
1265 self,
1266 *args,
1267 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001268 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001269 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001270 **kw)
1271
1272 def convert_value(self, opt, value):
1273 if value not in self.choices:
1274 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1275 self.get_opt_string(), self.choices, value))
1276 stdout_to = []
1277 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001278 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001279 elif value != 'none':
1280 stdout_to = [value]
1281 return stdout_to
1282
1283
maruel@chromium.org0437a732013-08-27 16:05:52 +00001284def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001285 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001286 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001287 help='Timeout to wait for result, set to -1 for no timeout and get '
1288 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001289 parser.group_logging.add_option(
1290 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001291 parser.group_logging.add_option(
1292 '--print-status-updates', action='store_true',
1293 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001294 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001295 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001296 '--task-summary-json',
1297 metavar='FILE',
1298 help='Dump a summary of task results to this file as json. It contains '
1299 'only shards statuses as know to server directly. Any output files '
1300 'emitted by the task can be collected by using --task-output-dir')
1301 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001302 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001303 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001304 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001305 'directory contains per-shard directory with output files produced '
1306 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001307 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001308 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001309 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001310 '--filepath-filter',
1311 help='This is regexp filter used to specify downloaded filepath when '
1312 'collecting isolated output.')
1313 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001314 '--perf', action='store_true', default=False,
1315 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001316 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001317
1318
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001319def process_collect_options(parser, options):
1320 # Only negative -1 is allowed, disallow other negative values.
1321 if options.timeout != -1 and options.timeout < 0:
1322 parser.error('Invalid --timeout value')
1323
1324
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001325@subcommand.usage('bots...')
1326def CMDbot_delete(parser, args):
1327 """Forcibly deletes bots from the Swarming server."""
1328 parser.add_option(
1329 '-f', '--force', action='store_true',
1330 help='Do not prompt for confirmation')
1331 options, args = parser.parse_args(args)
1332 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001333 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001334
1335 bots = sorted(args)
1336 if not options.force:
1337 print('Delete the following bots?')
1338 for bot in bots:
1339 print(' %s' % bot)
1340 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1341 print('Goodbye.')
1342 return 1
1343
1344 result = 0
1345 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001346 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001347 if net.url_read_json(url, data={}, method='POST') is None:
1348 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001349 result = 1
1350 return result
1351
1352
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001353def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001354 """Returns information about the bots connected to the Swarming server."""
1355 add_filter_options(parser)
1356 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001357 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001358 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001359 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001360 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001361 help='Keep both dead and alive bots')
1362 parser.filter_group.add_option(
1363 '--busy', action='store_true', help='Keep only busy bots')
1364 parser.filter_group.add_option(
1365 '--idle', action='store_true', help='Keep only idle bots')
1366 parser.filter_group.add_option(
1367 '--mp', action='store_true',
1368 help='Keep only Machine Provider managed bots')
1369 parser.filter_group.add_option(
1370 '--non-mp', action='store_true',
1371 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001372 parser.filter_group.add_option(
1373 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001374 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001375 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001376 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001377
1378 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001379 parser.error('Use only one of --keep-dead or --dead-only')
1380 if options.busy and options.idle:
1381 parser.error('Use only one of --busy or --idle')
1382 if options.mp and options.non_mp:
1383 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001384
smut281c3902018-05-30 17:50:05 -07001385 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001386 values = []
1387 if options.dead_only:
1388 values.append(('is_dead', 'TRUE'))
1389 elif options.keep_dead:
1390 values.append(('is_dead', 'NONE'))
1391 else:
1392 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001393
maruelaf6b06c2017-06-08 06:26:53 -07001394 if options.busy:
1395 values.append(('is_busy', 'TRUE'))
1396 elif options.idle:
1397 values.append(('is_busy', 'FALSE'))
1398 else:
1399 values.append(('is_busy', 'NONE'))
1400
1401 if options.mp:
1402 values.append(('is_mp', 'TRUE'))
1403 elif options.non_mp:
1404 values.append(('is_mp', 'FALSE'))
1405 else:
1406 values.append(('is_mp', 'NONE'))
1407
1408 for key, value in options.dimensions:
1409 values.append(('dimensions', '%s:%s' % (key, value)))
Marc-Antoine Ruelad8cabe2019-10-10 23:24:26 +00001410 url += urllib.parse.urlencode(values)
maruelaf6b06c2017-06-08 06:26:53 -07001411 try:
1412 data, yielder = get_yielder(url, 0)
1413 bots = data.get('items') or []
1414 for items in yielder():
1415 if items:
1416 bots.extend(items)
1417 except Failure as e:
1418 sys.stderr.write('\n%s\n' % e)
1419 return 1
maruel77f720b2015-09-15 12:35:22 -07001420 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
Lei Leife202df2019-06-11 17:33:34 +00001421 print(bot['bot_id'])
maruelaf6b06c2017-06-08 06:26:53 -07001422 if not options.bare:
1423 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
Lei Leife202df2019-06-11 17:33:34 +00001424 print(' %s' % json.dumps(dimensions, sort_keys=True))
maruelaf6b06c2017-06-08 06:26:53 -07001425 if bot.get('task_id'):
Lei Leife202df2019-06-11 17:33:34 +00001426 print(' task: %s' % bot['task_id'])
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001427 return 0
1428
1429
maruelfd0a90c2016-06-10 11:51:10 -07001430@subcommand.usage('task_id')
1431def CMDcancel(parser, args):
1432 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001433 parser.add_option(
1434 '-k', '--kill-running', action='store_true', default=False,
1435 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001436 options, args = parser.parse_args(args)
1437 if not args:
1438 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001439 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001440 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001441 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001442 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001443 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001444 print('Deleting %s failed. Probably already gone' % task_id)
1445 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001446 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001447 return 0
1448
1449
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001450@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001451def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001452 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001453
1454 The result can be in multiple part if the execution was sharded. It can
1455 potentially have retries.
1456 """
1457 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001458 parser.add_option(
1459 '-j', '--json',
1460 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001461 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001462 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001463 if not args and not options.json:
1464 parser.error('Must specify at least one task id or --json.')
1465 if args and options.json:
1466 parser.error('Only use one of task id or --json.')
1467
1468 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001469 options.json = six.text_type(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001470 try:
maruel1ceb3872015-10-14 06:10:44 -07001471 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001472 data = json.load(f)
1473 except (IOError, ValueError):
1474 parser.error('Failed to open %s' % options.json)
1475 try:
1476 tasks = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001477 data['tasks'].values(), key=lambda x: x['shard_index'])
maruel71c61c82016-02-22 06:52:05 -08001478 args = [t['task_id'] for t in tasks]
1479 except (KeyError, TypeError):
1480 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001481 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001482 # Take in account all the task slices.
1483 offset = 0
1484 for s in data['request']['task_slices']:
1485 m = (offset + s['properties']['execution_timeout_secs'] +
1486 s['expiration_secs'])
1487 if m > options.timeout:
1488 options.timeout = m
1489 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001490 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001491 else:
1492 valid = frozenset('0123456789abcdef')
1493 if any(not valid.issuperset(task_id) for task_id in args):
1494 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001495
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001496 try:
1497 return collect(
1498 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001499 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001500 options.timeout,
1501 options.decorate,
1502 options.print_status_updates,
1503 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001504 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001505 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001506 options.perf,
1507 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001508 except Failure:
1509 on_error.report(None)
1510 return 1
1511
1512
maruel77f720b2015-09-15 12:35:22 -07001513@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001514def CMDpost(parser, args):
1515 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1516
1517 Input data must be sent to stdin, result is printed to stdout.
1518
1519 If HTTP response code >= 400, returns non-zero.
1520 """
1521 options, args = parser.parse_args(args)
1522 if len(args) != 1:
1523 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001524 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001525 data = sys.stdin.read()
1526 try:
1527 resp = net.url_read(url, data=data, method='POST')
1528 except net.TimeoutError:
1529 sys.stderr.write('Timeout!\n')
1530 return 1
1531 if not resp:
1532 sys.stderr.write('No response!\n')
1533 return 1
1534 sys.stdout.write(resp)
1535 return 0
1536
1537
1538@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001539def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001540 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1541 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001542
1543 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001544 Raw task request and results:
1545 swarming.py query -S server-url.com task/123456/request
1546 swarming.py query -S server-url.com task/123456/result
1547
maruel77f720b2015-09-15 12:35:22 -07001548 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001549 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001550
maruelaf6b06c2017-06-08 06:26:53 -07001551 Listing last 10 tasks on a specific bot named 'bot1':
1552 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001553
maruelaf6b06c2017-06-08 06:26:53 -07001554 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001555 quoting is important!:
1556 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001557 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001558 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001559 parser.add_option(
1560 '-L', '--limit', type='int', default=200,
1561 help='Limit to enforce on limitless items (like number of tasks); '
1562 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001563 parser.add_option(
1564 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001565 parser.add_option(
1566 '--progress', action='store_true',
1567 help='Prints a dot at each request to show progress')
1568 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001569 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001570 parser.error(
1571 'Must specify only method name and optionally query args properly '
1572 'escaped.')
smut281c3902018-05-30 17:50:05 -07001573 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001574 try:
1575 data, yielder = get_yielder(base_url, options.limit)
1576 for items in yielder():
1577 if items:
1578 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001579 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001580 sys.stderr.write('.')
1581 sys.stderr.flush()
1582 except Failure as e:
1583 sys.stderr.write('\n%s\n' % e)
1584 return 1
maruel77f720b2015-09-15 12:35:22 -07001585 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001586 sys.stderr.write('\n')
1587 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001588 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001589 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001590 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001591 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001592 try:
maruel77f720b2015-09-15 12:35:22 -07001593 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001594 sys.stdout.write('\n')
1595 except IOError:
1596 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001597 return 0
1598
1599
maruel77f720b2015-09-15 12:35:22 -07001600def CMDquery_list(parser, args):
1601 """Returns list of all the Swarming APIs that can be used with command
1602 'query'.
1603 """
1604 parser.add_option(
1605 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1606 options, args = parser.parse_args(args)
1607 if args:
1608 parser.error('No argument allowed.')
1609
1610 try:
1611 apis = endpoints_api_discovery_apis(options.swarming)
1612 except APIError as e:
1613 parser.error(str(e))
1614 if options.json:
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001615 options.json = six.text_type(os.path.abspath(options.json))
maruel1ceb3872015-10-14 06:10:44 -07001616 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001617 json.dump(apis, f)
1618 else:
1619 help_url = (
1620 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1621 options.swarming)
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001622 for i, (api_id, api) in enumerate(sorted(apis.items())):
maruel11e31af2017-02-15 07:30:50 -08001623 if i:
1624 print('')
Lei Leife202df2019-06-11 17:33:34 +00001625 print(api_id)
1626 print(' ' + api['description'].strip())
maruel11e31af2017-02-15 07:30:50 -08001627 if 'resources' in api:
1628 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001629 # TODO(maruel): Remove.
1630 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001631 for j, (resource_name, resource) in enumerate(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001632 sorted(api['resources'].items())):
maruel11e31af2017-02-15 07:30:50 -08001633 if j:
1634 print('')
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001635 for method_name, method in sorted(resource['methods'].items()):
maruel11e31af2017-02-15 07:30:50 -08001636 # Only list the GET ones.
1637 if method['httpMethod'] != 'GET':
1638 continue
Lei Leife202df2019-06-11 17:33:34 +00001639 print('- %s.%s: %s' % (
1640 resource_name, method_name, method['path']))
maruel11e31af2017-02-15 07:30:50 -08001641 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001642 ' ' + l for l in textwrap.wrap(
1643 method.get('description', 'No description'), 78)))
Lei Leife202df2019-06-11 17:33:34 +00001644 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel11e31af2017-02-15 07:30:50 -08001645 else:
1646 # New.
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001647 for method_name, method in sorted(api['methods'].items()):
maruel77f720b2015-09-15 12:35:22 -07001648 # Only list the GET ones.
1649 if method['httpMethod'] != 'GET':
1650 continue
Lei Leife202df2019-06-11 17:33:34 +00001651 print('- %s: %s' % (method['id'], method['path']))
maruel11e31af2017-02-15 07:30:50 -08001652 print('\n'.join(
1653 ' ' + l for l in textwrap.wrap(method['description'], 78)))
Lei Leife202df2019-06-11 17:33:34 +00001654 print(' %s%s%s' % (help_url, api['servicePath'], method['id']))
maruel77f720b2015-09-15 12:35:22 -07001655 return 0
1656
1657
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001658@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001659def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001660 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001661
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001662 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001663 """
1664 add_trigger_options(parser)
1665 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001666 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001667 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001668 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001669 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001670 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001671 tasks = trigger_task_shards(
1672 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001673 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001674 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001675 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001676 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001677 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001678 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001679 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001680 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001681 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001682 task_ids = [
1683 t['task_id']
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001684 for t in sorted(tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001685 ]
Caleb Rouleau779c4f02019-05-22 21:18:49 +00001686 for task_id in task_ids:
1687 print('Task: {server}/task?id={task}'.format(
1688 server=options.swarming, task=task_id))
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001689 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001690 offset = 0
1691 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001692 m = (offset + s.properties.execution_timeout_secs +
1693 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001694 if m > options.timeout:
1695 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001696 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001697 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001698 try:
1699 return collect(
1700 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001701 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001702 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001703 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001704 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001705 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001706 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001707 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001708 options.perf,
1709 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001710 except Failure:
1711 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001712 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001713
1714
maruel18122c62015-10-23 06:31:23 -07001715@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001716def CMDreproduce(parser, args):
1717 """Runs a task locally that was triggered on the server.
1718
1719 This running locally the same commands that have been run on the bot. The data
1720 downloaded will be in a subdirectory named 'work' of the current working
1721 directory.
maruel18122c62015-10-23 06:31:23 -07001722
1723 You can pass further additional arguments to the target command by passing
1724 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001725 """
maruelc070e672016-02-22 17:32:57 -08001726 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001727 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001728 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001729 parser.add_option(
1730 '--work', metavar='DIR', default='work',
1731 help='Directory to map the task input files into')
1732 parser.add_option(
1733 '--cache', metavar='DIR', default='cache',
1734 help='Directory that contains the input cache')
1735 parser.add_option(
1736 '--leak', action='store_true',
1737 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001738 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001739 extra_args = []
1740 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001741 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001742 if len(args) > 1:
1743 if args[1] == '--':
1744 if len(args) > 2:
1745 extra_args = args[2:]
1746 else:
1747 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001748
smut281c3902018-05-30 17:50:05 -07001749 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001750 request = net.url_read_json(url)
1751 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001752 print('Failed to retrieve request data for the task', file=sys.stderr)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001753 return 1
1754
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001755 workdir = six.text_type(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001756 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001757 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001758 fs.mkdir(workdir)
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001759 cachedir = six.text_type(os.path.abspath('cipd_cache'))
iannucci31ab9192017-05-02 19:11:56 -07001760 if not fs.exists(cachedir):
1761 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001762
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001763 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001764 env = os.environ.copy()
1765 env['SWARMING_BOT_ID'] = 'reproduce'
1766 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001767 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001768 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001769 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001770 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001771 if not i['value']:
1772 env.pop(key, None)
1773 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001774 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001775
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001776 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001777 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001778 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001779 for i in env_prefixes:
1780 key = i['key']
1781 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001782 cur = env.get(key)
1783 if cur:
1784 paths.append(cur)
1785 env[key] = os.path.pathsep.join(paths)
1786
iannucci31ab9192017-05-02 19:11:56 -07001787 command = []
nodir152cba62016-05-12 16:08:56 -07001788 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001789 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001790 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001791 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001792 properties['inputs_ref']['namespace'])
1793 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001794 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1795 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1796 # leak.
1797 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001798 cache = local_caching.DiskContentAddressedCache(
Takuto Ikuta6e2ff962019-10-29 12:35:27 +00001799 six.text_type(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001800 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001801 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001802 command = bundle.command
1803 if bundle.relative_cwd:
1804 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001805 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001806
1807 if properties.get('command'):
1808 command.extend(properties['command'])
1809
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001810 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Brian Sheedy7a761172019-08-30 22:55:14 +00001811 command = tools.find_executable(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001812 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001813 new_command = run_isolated.process_command(command, 'invalid', None)
1814 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001815 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001816 else:
1817 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001818 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001819 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001820 command, options.output, None)
1821 if not os.path.isdir(options.output):
1822 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001823 command = new_command
1824 file_path.ensure_command_has_abs_path(command, workdir)
1825
1826 if properties.get('cipd_input'):
1827 ci = properties['cipd_input']
1828 cp = ci['client_package']
1829 client_manager = cipd.get_client(
1830 ci['server'], cp['package_name'], cp['version'], cachedir)
1831
1832 with client_manager as client:
1833 by_path = collections.defaultdict(list)
1834 for pkg in ci['packages']:
1835 path = pkg['path']
1836 # cipd deals with 'root' as ''
1837 if path == '.':
1838 path = ''
1839 by_path[path].append((pkg['package_name'], pkg['version']))
1840 client.ensure(workdir, by_path, cache_dir=cachedir)
1841
maruel77f720b2015-09-15 12:35:22 -07001842 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001843 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001844 except OSError as e:
Lei Leife202df2019-06-11 17:33:34 +00001845 print('Failed to run: %s' % ' '.join(command), file=sys.stderr)
1846 print(str(e), file=sys.stderr)
maruel77f720b2015-09-15 12:35:22 -07001847 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001848 finally:
1849 # Do not delete options.cache.
1850 if not options.leak:
1851 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001852
1853
maruel0eb1d1b2015-10-02 14:48:21 -07001854@subcommand.usage('bot_id')
1855def CMDterminate(parser, args):
1856 """Tells a bot to gracefully shut itself down as soon as it can.
1857
1858 This is done by completing whatever current task there is then exiting the bot
1859 process.
1860 """
1861 parser.add_option(
1862 '--wait', action='store_true', help='Wait for the bot to terminate')
1863 options, args = parser.parse_args(args)
1864 if len(args) != 1:
1865 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001866 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001867 request = net.url_read_json(url, data={})
1868 if not request:
Lei Leife202df2019-06-11 17:33:34 +00001869 print('Failed to ask for termination', file=sys.stderr)
maruel0eb1d1b2015-10-02 14:48:21 -07001870 return 1
1871 if options.wait:
1872 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001873 options.swarming,
1874 [request['task_id']],
1875 0.,
1876 False,
1877 False,
1878 None,
1879 None,
1880 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001881 False,
1882 None)
maruelbfc5f872017-06-10 16:43:17 -07001883 else:
Lei Leife202df2019-06-11 17:33:34 +00001884 print(request['task_id'])
maruel0eb1d1b2015-10-02 14:48:21 -07001885 return 0
1886
1887
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001888@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001889def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001890 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001891
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001892 Passes all extra arguments provided after '--' as additional command line
1893 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001894 """
1895 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001896 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001897 parser.add_option(
1898 '--dump-json',
1899 metavar='FILE',
1900 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001901 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001902 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001903 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001904 tasks = trigger_task_shards(
1905 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001906 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001907 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001908 tasks_sorted = sorted(
Marc-Antoine Ruel04903a32019-10-09 21:09:25 +00001909 tasks.values(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001910 if options.dump_json:
1911 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001912 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001913 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001914 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001915 }
maruel46b015f2015-10-13 18:40:35 -07001916 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001917 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001918 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001919 (options.swarming, options.dump_json))
1920 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001921 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001922 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001923 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1924 print('Or visit:')
1925 for t in tasks_sorted:
1926 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001927 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001928 except Failure:
1929 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001930 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001931
1932
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001933class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001934 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001935 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001936 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001937 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001938 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001939 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001940 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001941 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001942 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001943 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001944
1945 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001946 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001947 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001948 auth.process_auth_options(self, options)
1949 user = self._process_swarming(options)
1950 if hasattr(options, 'user') and not options.user:
1951 options.user = user
1952 return options, args
1953
1954 def _process_swarming(self, options):
1955 """Processes the --swarming option and aborts if not specified.
1956
1957 Returns the identity as determined by the server.
1958 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001959 if not options.swarming:
1960 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001961 try:
1962 options.swarming = net.fix_url(options.swarming)
1963 except ValueError as e:
1964 self.error('--swarming %s' % e)
1965 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001966 try:
1967 user = auth.ensure_logged_in(options.swarming)
1968 except ValueError as e:
1969 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001970 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001971
1972
1973def main(args):
1974 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001975 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001976
1977
1978if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001979 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001980 fix_encoding.fix_encoding()
1981 tools.disable_buffering()
1982 colorama.init()
Takuto Ikuta7c843c82020-04-15 05:42:54 +00001983 net.set_user_agent('swarming.py/' + __version__)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001984 sys.exit(main(sys.argv[1:]))