blob: 8d07426c8a389e9468912181c6423e66c2f253d5 [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
maruelea586f32016-04-05 11:11:33 -07002# Copyright 2013 The LUCI Authors. All rights reserved.
maruelf1f5e2a2016-05-25 17:10:39 -07003# Use of this source code is governed under the Apache License, Version 2.0
4# that can be found in the LICENSE file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00005
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
Takuto Ikuta0e3e1c42018-11-29 14:21:06 +00008__version__ = '0.14'
maruel@chromium.org0437a732013-08-27 16:05:52 +00009
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050010import collections
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -040011import datetime
maruel@chromium.org0437a732013-08-27 16:05:52 +000012import json
13import logging
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -040014import optparse
maruel@chromium.org0437a732013-08-27 16:05:52 +000015import os
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +100016import re
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import sys
maruel11e31af2017-02-15 07:30:50 -080018import textwrap
Vadim Shtayurab19319e2014-04-27 08:50:06 -070019import threading
maruel@chromium.org0437a732013-08-27 16:05:52 +000020import time
21import urllib
maruel@chromium.org0437a732013-08-27 16:05:52 +000022
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000023from utils import tools
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000024tools.force_local_third_party()
maruel@chromium.org0437a732013-08-27 16:05:52 +000025
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000026# third_party/
27import colorama
28from chromium import natsort
29from depot_tools import fix_encoding
30from depot_tools import subcommand
31
32# pylint: disable=ungrouped-imports
Vadim Shtayurae34e13a2014-02-02 11:23:26 -080033import auth
iannucci31ab9192017-05-02 19:11:56 -070034import cipd
maruel@chromium.org7b844a62013-09-17 13:04:59 +000035import isolateserver
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +000036import isolate_storage
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -040037import local_caching
maruelc070e672016-02-22 17:32:57 -080038import run_isolated
Marc-Antoine Ruel016c7602019-04-02 18:31:13 +000039from utils import file_path
40from utils import fs
41from utils import logging_utils
42from utils import net
43from utils import on_error
44from utils import subprocess42
45from utils import threading_utils
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050046
47
48class Failure(Exception):
49 """Generic failure."""
50 pass
51
52
maruel0a25f6c2017-05-10 10:43:23 -070053def default_task_name(options):
54 """Returns a default task name if not specified."""
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050055 if not options.task_name:
maruel0a25f6c2017-05-10 10:43:23 -070056 task_name = u'%s/%s' % (
marueld9cc8422017-05-09 12:07:02 -070057 options.user,
maruelaf6b06c2017-06-08 06:26:53 -070058 '_'.join('%s=%s' % (k, v) for k, v in options.dimensions))
maruel0a25f6c2017-05-10 10:43:23 -070059 if options.isolated:
60 task_name += u'/' + options.isolated
61 return task_name
62 return options.task_name
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -050063
64
65### Triggering.
66
67
maruel77f720b2015-09-15 12:35:22 -070068# See ../appengine/swarming/swarming_rpcs.py.
borenet02f772b2016-06-22 12:42:19 -070069CipdPackage = collections.namedtuple(
70 'CipdPackage',
71 [
72 'package_name',
73 'path',
74 'version',
75 ])
76
77
78# See ../appengine/swarming/swarming_rpcs.py.
79CipdInput = collections.namedtuple(
80 'CipdInput',
81 [
82 'client_package',
83 'packages',
84 'server',
85 ])
86
87
88# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -070089FilesRef = collections.namedtuple(
90 'FilesRef',
91 [
92 'isolated',
93 'isolatedserver',
94 'namespace',
95 ])
96
97
98# See ../appengine/swarming/swarming_rpcs.py.
Robert Iannuccibf5f84c2017-11-22 12:56:50 -080099StringListPair = collections.namedtuple(
100 'StringListPair', [
101 'key',
102 'value', # repeated string
103 ]
104)
105
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000106# See ../appengine/swarming/swarming_rpcs.py.
107Containment = collections.namedtuple(
108 'Containment',
109 [
110 'lower_priority',
111 ])
112
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800113
114# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700115TaskProperties = collections.namedtuple(
116 'TaskProperties',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500117 [
maruel681d6802017-01-17 16:56:03 -0800118 'caches',
borenet02f772b2016-06-22 12:42:19 -0700119 'cipd_input',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500120 'command',
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000121 'containment',
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500122 'relative_cwd',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500123 'dimensions',
124 'env',
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800125 'env_prefixes',
maruel77f720b2015-09-15 12:35:22 -0700126 'execution_timeout_secs',
127 'extra_args',
128 'grace_period_secs',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500129 'idempotent',
maruel77f720b2015-09-15 12:35:22 -0700130 'inputs_ref',
131 'io_timeout_secs',
aludwincc5524e2016-10-28 10:25:24 -0700132 'outputs',
iannuccidc80dfb2016-10-28 12:50:20 -0700133 'secret_bytes',
maruel77f720b2015-09-15 12:35:22 -0700134 ])
135
136
137# See ../appengine/swarming/swarming_rpcs.py.
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400138TaskSlice = collections.namedtuple(
139 'TaskSlice',
140 [
141 'expiration_secs',
142 'properties',
143 'wait_for_capacity',
144 ])
145
146
147# See ../appengine/swarming/swarming_rpcs.py.
maruel77f720b2015-09-15 12:35:22 -0700148NewTaskRequest = collections.namedtuple(
149 'NewTaskRequest',
150 [
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500151 'name',
maruel77f720b2015-09-15 12:35:22 -0700152 'parent_task_id',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500153 'priority',
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400154 'task_slices',
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700155 'service_account',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500156 'tags',
157 'user',
Robert Iannuccifafa7352018-06-13 17:08:17 +0000158 'pool_task_template',
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500159 ])
160
161
maruel77f720b2015-09-15 12:35:22 -0700162def namedtuple_to_dict(value):
163 """Recursively converts a namedtuple to a dict."""
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -0400164 if hasattr(value, '_asdict'):
165 return namedtuple_to_dict(value._asdict())
166 if isinstance(value, (list, tuple)):
167 return [namedtuple_to_dict(v) for v in value]
168 if isinstance(value, dict):
169 return {k: namedtuple_to_dict(v) for k, v in value.iteritems()}
170 return value
maruel77f720b2015-09-15 12:35:22 -0700171
172
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700173def task_request_to_raw_request(task_request):
maruel71c61c82016-02-22 06:52:05 -0800174 """Returns the json-compatible dict expected by the server for new request.
maruelaf6269b2015-09-10 14:37:51 -0700175
176 This is for the v1 client Swarming API.
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500177 """
maruel77f720b2015-09-15 12:35:22 -0700178 out = namedtuple_to_dict(task_request)
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700179 # Don't send 'service_account' if it is None to avoid confusing older
180 # version of the server that doesn't know about 'service_account' and don't
181 # use it at all.
182 if not out['service_account']:
183 out.pop('service_account')
Brad Hallf78187a2018-10-19 17:08:55 +0000184 for task_slice in out['task_slices']:
185 task_slice['properties']['env'] = [
186 {'key': k, 'value': v}
187 for k, v in task_slice['properties']['env'].iteritems()
188 ]
189 task_slice['properties']['env'].sort(key=lambda x: x['key'])
maruel77f720b2015-09-15 12:35:22 -0700190 return out
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500191
192
maruel77f720b2015-09-15 12:35:22 -0700193def swarming_trigger(swarming, raw_request):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500194 """Triggers a request on the Swarming server and returns the json data.
195
196 It's the low-level function.
197
198 Returns:
199 {
200 'request': {
201 'created_ts': u'2010-01-02 03:04:05',
202 'name': ..
203 },
204 'task_id': '12300',
205 }
206 """
207 logging.info('Triggering: %s', raw_request['name'])
208
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500209 result = net.url_read_json(
smut281c3902018-05-30 17:50:05 -0700210 swarming + '/_ah/api/swarming/v1/tasks/new', data=raw_request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500211 if not result:
212 on_error.report('Failed to trigger task %s' % raw_request['name'])
213 return None
maruele557bce2015-11-17 09:01:27 -0800214 if result.get('error'):
marueld4d15312015-11-16 17:22:59 -0800215 # The reply is an error.
maruele557bce2015-11-17 09:01:27 -0800216 msg = 'Failed to trigger task %s' % raw_request['name']
217 if result['error'].get('errors'):
218 for err in result['error']['errors']:
219 if err.get('message'):
220 msg += '\nMessage: %s' % err['message']
221 if err.get('debugInfo'):
222 msg += '\nDebug info:\n%s' % err['debugInfo']
223 elif result['error'].get('message'):
maruelbf53e042015-12-01 15:00:51 -0800224 msg += '\nMessage: %s' % result['error']['message']
maruele557bce2015-11-17 09:01:27 -0800225
226 on_error.report(msg)
marueld4d15312015-11-16 17:22:59 -0800227 return None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500228 return result
229
230
231def setup_googletest(env, shards, index):
232 """Sets googletest specific environment variables."""
233 if shards > 1:
maruel77f720b2015-09-15 12:35:22 -0700234 assert not any(i['key'] == 'GTEST_SHARD_INDEX' for i in env), env
235 assert not any(i['key'] == 'GTEST_TOTAL_SHARDS' for i in env), env
236 env = env[:]
237 env.append({'key': 'GTEST_SHARD_INDEX', 'value': str(index)})
238 env.append({'key': 'GTEST_TOTAL_SHARDS', 'value': str(shards)})
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500239 return env
240
241
242def trigger_task_shards(swarming, task_request, shards):
243 """Triggers one or many subtasks of a sharded task.
244
245 Returns:
246 Dict with task details, returned to caller as part of --dump-json output.
247 None in case of failure.
248 """
249 def convert(index):
Erik Chend50a88f2019-02-16 01:22:07 +0000250 """
251 Args:
252 index: The index of the task request.
253
254 Returns:
255 raw_request: A swarming compatible JSON dictionary of the request.
256 shard_index: The index of the shard, which may be different than the index
257 of the task request.
258 """
Vadim Shtayura2d83a942017-08-14 17:41:24 -0700259 req = task_request_to_raw_request(task_request)
Erik Chend50a88f2019-02-16 01:22:07 +0000260 shard_index = index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500261 if shards > 1:
Brad Hall157bec82018-11-26 22:15:38 +0000262 for task_slice in req['task_slices']:
263 task_slice['properties']['env'] = setup_googletest(
264 task_slice['properties']['env'], shards, index)
maruel77f720b2015-09-15 12:35:22 -0700265 req['name'] += ':%s:%s' % (index, shards)
Erik Chend50a88f2019-02-16 01:22:07 +0000266 else:
267 task_slices = req['task_slices']
268
269 total_shards = None
270 # Multiple tasks slices might exist if there are optional "slices", e.g.
271 # multiple ways of dispatching the task that should be equivalent. These
272 # should be functionally equivalent but we have cannot guarantee that. If
273 # we see the GTEST_SHARD_INDEX env var, we assume that it applies to all
274 # slices.
275 for task_slice in task_slices:
276 for env_var in task_slice['properties']['env']:
277 if env_var['key'] == 'GTEST_SHARD_INDEX':
278 shard_index = int(env_var['value'])
279 if env_var['key'] == 'GTEST_TOTAL_SHARDS':
280 total_shards = int(env_var['value'])
281 if total_shards > 1:
282 req['name'] += ':%s:%s' % (shard_index, total_shards)
283
284 return req, shard_index
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500285
286 requests = [convert(index) for index in xrange(shards)]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500287 tasks = {}
288 priority_warning = False
Erik Chend50a88f2019-02-16 01:22:07 +0000289 for request, shard_index in requests:
maruel77f720b2015-09-15 12:35:22 -0700290 task = swarming_trigger(swarming, request)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500291 if not task:
292 break
293 logging.info('Request result: %s', task)
294 if (not priority_warning and
Marc-Antoine Ruelb1216762017-08-17 10:07:49 -0400295 int(task['request']['priority']) != task_request.priority):
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500296 priority_warning = True
297 print >> sys.stderr, (
298 'Priority was reset to %s' % task['request']['priority'])
299 tasks[request['name']] = {
Erik Chend50a88f2019-02-16 01:22:07 +0000300 'shard_index': shard_index,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500301 'task_id': task['task_id'],
302 'view_url': '%s/user/task/%s' % (swarming, task['task_id']),
303 }
304
305 # Some shards weren't triggered. Abort everything.
306 if len(tasks) != len(requests):
307 if tasks:
308 print >> sys.stderr, 'Only %d shard(s) out of %d were triggered' % (
309 len(tasks), len(requests))
310 for task_dict in tasks.itervalues():
311 abort_task(swarming, task_dict['task_id'])
312 return None
313
314 return tasks
315
316
317### Collection.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000318
319
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700320# How often to print status updates to stdout in 'collect'.
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000321STATUS_UPDATE_INTERVAL = 5 * 60.
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700322
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400323
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000324class TaskState(object):
325 """Represents the current task state.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000326
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000327 For documentation, see the comments in the swarming_rpcs.TaskState enum, which
328 is the source of truth for these values:
329 https://cs.chromium.org/chromium/infra/luci/appengine/swarming/swarming_rpcs.py?q=TaskState\(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400330
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000331 It's in fact an enum.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400332 """
333 RUNNING = 0x10
334 PENDING = 0x20
335 EXPIRED = 0x30
336 TIMED_OUT = 0x40
337 BOT_DIED = 0x50
338 CANCELED = 0x60
339 COMPLETED = 0x70
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400340 KILLED = 0x80
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400341 NO_RESOURCE = 0x100
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400342
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000343 STATES_RUNNING = ('PENDING', 'RUNNING')
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400344
maruel77f720b2015-09-15 12:35:22 -0700345 _ENUMS = {
346 'RUNNING': RUNNING,
347 'PENDING': PENDING,
348 'EXPIRED': EXPIRED,
349 'TIMED_OUT': TIMED_OUT,
350 'BOT_DIED': BOT_DIED,
351 'CANCELED': CANCELED,
352 'COMPLETED': COMPLETED,
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400353 'KILLED': KILLED,
Marc-Antoine Ruelfc708352018-05-04 20:25:43 -0400354 'NO_RESOURCE': NO_RESOURCE,
maruel77f720b2015-09-15 12:35:22 -0700355 }
356
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400357 @classmethod
maruel77f720b2015-09-15 12:35:22 -0700358 def from_enum(cls, state):
359 """Returns int value based on the string."""
360 if state not in cls._ENUMS:
361 raise ValueError('Invalid state %s' % state)
362 return cls._ENUMS[state]
363
maruel@chromium.org0437a732013-08-27 16:05:52 +0000364
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700365class TaskOutputCollector(object):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700366 """Assembles task execution summary (for --task-summary-json output).
367
368 Optionally fetches task outputs from isolate server to local disk (used when
369 --task-output-dir is passed).
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700370
371 This object is shared among multiple threads running 'retrieve_results'
372 function, in particular they call 'process_shard_result' method in parallel.
373 """
374
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000375 def __init__(self, task_output_dir, task_output_stdout, shard_count,
376 filter_cb):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700377 """Initializes TaskOutputCollector, ensures |task_output_dir| exists.
378
379 Args:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700380 task_output_dir: (optional) local directory to put fetched files to.
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700381 shard_count: expected number of task shards.
382 """
maruel12e30012015-10-09 11:55:35 -0700383 self.task_output_dir = (
384 unicode(os.path.abspath(task_output_dir))
385 if task_output_dir else task_output_dir)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000386 self.task_output_stdout = task_output_stdout
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700387 self.shard_count = shard_count
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000388 self.filter_cb = filter_cb
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700389
390 self._lock = threading.Lock()
391 self._per_shard_results = {}
392 self._storage = None
393
nodire5028a92016-04-29 14:38:21 -0700394 if self.task_output_dir:
395 file_path.ensure_tree(self.task_output_dir)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700396
Vadim Shtayurab450c602014-05-12 19:23:25 -0700397 def process_shard_result(self, shard_index, result):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700398 """Stores results of a single task shard, fetches output files if necessary.
399
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400400 Modifies |result| in place.
401
maruel77f720b2015-09-15 12:35:22 -0700402 shard_index is 0-based.
403
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700404 Called concurrently from multiple threads.
405 """
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700406 # Sanity check index is in expected range.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700407 assert isinstance(shard_index, int)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700408 if shard_index < 0 or shard_index >= self.shard_count:
409 logging.warning(
410 'Shard index %d is outside of expected range: [0; %d]',
411 shard_index, self.shard_count - 1)
412 return
413
maruel77f720b2015-09-15 12:35:22 -0700414 if result.get('outputs_ref'):
415 ref = result['outputs_ref']
416 result['outputs_ref']['view_url'] = '%s/browse?%s' % (
417 ref['isolatedserver'],
418 urllib.urlencode(
419 [('namespace', ref['namespace']), ('hash', ref['isolated'])]))
Kevin Graneyc2c3b9e2014-08-26 09:04:17 -0400420
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700421 # Store result dict of that shard, ignore results we've already seen.
422 with self._lock:
423 if shard_index in self._per_shard_results:
424 logging.warning('Ignoring duplicate shard index %d', shard_index)
425 return
426 self._per_shard_results[shard_index] = result
427
428 # Fetch output files if necessary.
maruel77f720b2015-09-15 12:35:22 -0700429 if self.task_output_dir and result.get('outputs_ref'):
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000430 server_ref = isolate_storage.ServerRef(
431 result['outputs_ref']['isolatedserver'],
432 result['outputs_ref']['namespace'])
433 storage = self._get_storage(server_ref)
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400434 if storage:
435 # Output files are supposed to be small and they are not reused across
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400436 # tasks. So use MemoryContentAddressedCache for them instead of on-disk
437 # cache. Make files writable, so that calling script can delete them.
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400438 isolateserver.fetch_isolated(
maruel77f720b2015-09-15 12:35:22 -0700439 result['outputs_ref']['isolated'],
Marc-Antoine Ruele4dcbb82014-10-01 09:30:56 -0400440 storage,
Marc-Antoine Ruel2666d9c2018-05-18 13:52:02 -0400441 local_caching.MemoryContentAddressedCache(file_mode_mask=0700),
maruel4409e302016-07-19 14:25:51 -0700442 os.path.join(self.task_output_dir, str(shard_index)),
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000443 False, self.filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700444
445 def finalize(self):
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700446 """Assembles and returns task summary JSON, shutdowns underlying Storage."""
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700447 with self._lock:
448 # Write an array of shard results with None for missing shards.
449 summary = {
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700450 'shards': [
451 self._per_shard_results.get(i) for i in xrange(self.shard_count)
452 ],
453 }
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000454
455 # Don't store stdout in the summary if not requested too.
456 if "json" not in self.task_output_stdout:
457 for shard_json in summary['shards']:
458 if not shard_json:
459 continue
460 if "output" in shard_json:
461 del shard_json["output"]
462 if "outputs" in shard_json:
463 del shard_json["outputs"]
464
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700465 # Write summary.json to task_output_dir as well.
466 if self.task_output_dir:
467 tools.write_json(
maruel12e30012015-10-09 11:55:35 -0700468 os.path.join(self.task_output_dir, u'summary.json'),
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700469 summary,
470 False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700471 if self._storage:
472 self._storage.close()
473 self._storage = None
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700474 return summary
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700475
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000476 def _get_storage(self, server_ref):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700477 """Returns isolateserver.Storage to use to fetch files."""
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700478 assert self.task_output_dir
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700479 with self._lock:
480 if not self._storage:
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000481 self._storage = isolateserver.get_storage(server_ref)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700482 else:
483 # Shards must all use exact same isolate server and namespace.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000484 if self._storage.server_ref.url != server_ref.url:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700485 logging.error(
486 'Task shards are using multiple isolate servers: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000487 self._storage.server_ref.url, server_ref.url)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700488 return None
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000489 if self._storage.server_ref.namespace != server_ref.namespace:
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700490 logging.error(
491 'Task shards are using multiple namespaces: %s and %s',
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +0000492 self._storage.server_ref.namespace, server_ref.namespace)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700493 return None
494 return self._storage
495
496
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500497def now():
498 """Exists so it can be mocked easily."""
499 return time.time()
500
501
maruel77f720b2015-09-15 12:35:22 -0700502def parse_time(value):
503 """Converts serialized time from the API to datetime.datetime."""
504 # When microseconds are 0, the '.123456' suffix is elided. This means the
505 # serialized format is not consistent, which confuses the hell out of python.
506 for fmt in ('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S'):
507 try:
508 return datetime.datetime.strptime(value, fmt)
509 except ValueError:
510 pass
511 raise ValueError('Failed to parse %s' % value)
512
513
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700514def retrieve_results(
maruel9531ce02016-04-13 06:11:23 -0700515 base_url, shard_index, task_id, timeout, should_stop, output_collector,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000516 include_perf, fetch_stdout):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400517 """Retrieves results for a single task ID.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700518
Vadim Shtayurab450c602014-05-12 19:23:25 -0700519 Returns:
520 <result dict> on success.
521 None on failure.
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700522 """
maruel71c61c82016-02-22 06:52:05 -0800523 assert timeout is None or isinstance(timeout, float), timeout
smut281c3902018-05-30 17:50:05 -0700524 result_url = '%s/_ah/api/swarming/v1/task/%s/result' % (base_url, task_id)
maruel9531ce02016-04-13 06:11:23 -0700525 if include_perf:
526 result_url += '?include_performance_stats=true'
smut281c3902018-05-30 17:50:05 -0700527 output_url = '%s/_ah/api/swarming/v1/task/%s/stdout' % (base_url, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700528 started = now()
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400529 deadline = started + timeout if timeout > 0 else None
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700530 attempt = 0
531
532 while not should_stop.is_set():
533 attempt += 1
534
535 # Waiting for too long -> give up.
536 current_time = now()
537 if deadline and current_time >= deadline:
538 logging.error('retrieve_results(%s) timed out on attempt %d',
539 base_url, attempt)
540 return None
541
542 # Do not spin too fast. Spin faster at the beginning though.
543 # Start with 1 sec delay and for each 30 sec of waiting add another second
544 # of delay, until hitting 15 sec ceiling.
545 if attempt > 1:
546 max_delay = min(15, 1 + (current_time - started) / 30.0)
547 delay = min(max_delay, deadline - current_time) if deadline else max_delay
548 if delay > 0:
549 logging.debug('Waiting %.1f sec before retrying', delay)
550 should_stop.wait(delay)
551 if should_stop.is_set():
552 return None
553
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400554 # Disable internal retries in net.url_read_json, since we are doing retries
555 # ourselves.
556 # TODO(maruel): We'd need to know if it's a 404 and not retry at all.
maruel0eb1d1b2015-10-02 14:48:21 -0700557 # TODO(maruel): Sadly, we currently have to poll here. Use hanging HTTP
558 # request on GAE v2.
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400559 # Retry on 500s only if no timeout is specified.
560 result = net.url_read_json(result_url, retry_50x=bool(timeout == -1))
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400561 if not result:
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400562 if timeout == -1:
563 return None
Marc-Antoine Ruel200b3952014-08-14 11:07:44 -0400564 continue
maruel77f720b2015-09-15 12:35:22 -0700565
maruelbf53e042015-12-01 15:00:51 -0800566 if result.get('error'):
567 # An error occurred.
568 if result['error'].get('errors'):
569 for err in result['error']['errors']:
570 logging.warning(
571 'Error while reading task: %s; %s',
572 err.get('message'), err.get('debugInfo'))
573 elif result['error'].get('message'):
574 logging.warning(
575 'Error while reading task: %s', result['error']['message'])
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400576 if timeout == -1:
577 return result
maruelbf53e042015-12-01 15:00:51 -0800578 continue
579
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -0400580 # When timeout == -1, always return on first attempt. 500s are already
581 # retried in this case.
Marc-Antoine Ruel20b764d2018-06-22 18:08:37 +0000582 if result['state'] not in TaskState.STATES_RUNNING or timeout == -1:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000583 if fetch_stdout:
584 out = net.url_read_json(output_url)
Vadim Shtayura6fd3c7b2017-11-03 15:32:51 -0700585 result['output'] = out.get('output', '') if out else ''
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700586 # Record the result, try to fetch attached output files (if any).
587 if output_collector:
588 # TODO(vadimsh): Respect |should_stop| and |deadline| when fetching.
Vadim Shtayurab450c602014-05-12 19:23:25 -0700589 output_collector.process_shard_result(shard_index, result)
maruel77f720b2015-09-15 12:35:22 -0700590 if result.get('internal_failure'):
591 logging.error('Internal error!')
592 elif result['state'] == 'BOT_DIED':
593 logging.error('Bot died!')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700594 return result
maruel@chromium.org0437a732013-08-27 16:05:52 +0000595
596
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700597def yield_results(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400598 swarm_base_url, task_ids, timeout, max_threads, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000599 output_collector, include_perf, fetch_stdout):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500600 """Yields swarming task results from the swarming server as (index, result).
maruel@chromium.org0437a732013-08-27 16:05:52 +0000601
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700602 Duplicate shards are ignored. Shards are yielded in order of completion.
603 Timed out shards are NOT yielded at all. Caller can compare number of yielded
604 shards with len(task_keys) to verify all shards completed.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000605
606 max_threads is optional and is used to limit the number of parallel fetches
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500607 done. Since in general the number of task_keys is in the range <=10, it's not
maruel@chromium.org0437a732013-08-27 16:05:52 +0000608 worth normally to limit the number threads. Mostly used for testing purposes.
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500609
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700610 output_collector is an optional instance of TaskOutputCollector that will be
611 used to fetch files produced by a task from isolate server to the local disk.
612
Marc-Antoine Ruel5c720342014-02-21 14:46:14 -0500613 Yields:
614 (index, result). In particular, 'result' is defined as the
615 GetRunnerResults() function in services/swarming/server/test_runner.py.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000616 """
maruel@chromium.org0437a732013-08-27 16:05:52 +0000617 number_threads = (
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400618 min(max_threads, len(task_ids)) if max_threads else len(task_ids))
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700619 should_stop = threading.Event()
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700620 results_channel = threading_utils.TaskChannel()
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700621
maruel@chromium.org0437a732013-08-27 16:05:52 +0000622 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
623 try:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700624 # Adds a task to the thread pool to call 'retrieve_results' and return
625 # the results together with shard_index that produced them (as a tuple).
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400626 def enqueue_retrieve_results(shard_index, task_id):
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +0000627 # pylint: disable=no-value-for-parameter
Vadim Shtayurab450c602014-05-12 19:23:25 -0700628 task_fn = lambda *args: (shard_index, retrieve_results(*args))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000629 pool.add_task(
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400630 0, results_channel.wrap_task(task_fn), swarm_base_url, shard_index,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000631 task_id, timeout, should_stop, output_collector, include_perf,
632 fetch_stdout)
Vadim Shtayurab450c602014-05-12 19:23:25 -0700633
634 # Enqueue 'retrieve_results' calls for each shard key to run in parallel.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400635 for shard_index, task_id in enumerate(task_ids):
636 enqueue_retrieve_results(shard_index, task_id)
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700637
638 # Wait for all of them to finish.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400639 shards_remaining = range(len(task_ids))
640 active_task_count = len(task_ids)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700641 while active_task_count:
Vadim Shtayurab450c602014-05-12 19:23:25 -0700642 shard_index, result = None, None
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700643 try:
Marc-Antoine Ruel4494b6c2018-11-28 21:00:41 +0000644 shard_index, result = results_channel.next(
Vadim Shtayurab450c602014-05-12 19:23:25 -0700645 timeout=STATUS_UPDATE_INTERVAL)
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700646 except threading_utils.TaskChannel.Timeout:
647 if print_status_updates:
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000648 time_now = str(datetime.datetime.now())
649 _, time_now = time_now.split(' ')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700650 print(
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000651 '%s '
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700652 'Waiting for results from the following shards: %s' %
Jao-ke Chin-Leeba184e62018-11-19 17:04:41 +0000653 (time_now, ', '.join(map(str, shards_remaining)))
654 )
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700655 sys.stdout.flush()
656 continue
657 except Exception:
658 logging.exception('Unexpected exception in retrieve_results')
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700659
660 # A call to 'retrieve_results' finished (successfully or not).
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700661 active_task_count -= 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000662 if not result:
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -0500663 logging.error('Failed to retrieve the results for a swarming key')
maruel@chromium.org0437a732013-08-27 16:05:52 +0000664 continue
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700665
Vadim Shtayurab450c602014-05-12 19:23:25 -0700666 # Yield back results to the caller.
667 assert shard_index in shards_remaining
668 shards_remaining.remove(shard_index)
669 yield shard_index, result
Vadim Shtayurab19319e2014-04-27 08:50:06 -0700670
maruel@chromium.org0437a732013-08-27 16:05:52 +0000671 finally:
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700672 # Done or aborted with Ctrl+C, kill the remaining threads.
maruel@chromium.org0437a732013-08-27 16:05:52 +0000673 should_stop.set()
674
675
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000676def decorate_shard_output(swarming, shard_index, metadata, include_stdout):
maruel@chromium.org0437a732013-08-27 16:05:52 +0000677 """Returns wrapped output for swarming task shard."""
maruel77f720b2015-09-15 12:35:22 -0700678 if metadata.get('started_ts') and not metadata.get('deduped_from'):
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400679 pending = '%.1fs' % (
maruel77f720b2015-09-15 12:35:22 -0700680 parse_time(metadata['started_ts']) - parse_time(metadata['created_ts'])
681 ).total_seconds()
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400682 elif (metadata.get('state') in ('BOT_DIED', 'CANCELED', 'EXPIRED') and
683 metadata.get('abandoned_ts')):
684 pending = '%.1fs' % (
685 parse_time(metadata['abandoned_ts']) -
686 parse_time(metadata['created_ts'])
687 ).total_seconds()
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400688 else:
689 pending = 'N/A'
690
maruel77f720b2015-09-15 12:35:22 -0700691 if metadata.get('duration') is not None:
692 duration = '%.1fs' % metadata['duration']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400693 else:
694 duration = 'N/A'
695
maruel77f720b2015-09-15 12:35:22 -0700696 if metadata.get('exit_code') is not None:
697 # Integers are encoded as string to not loose precision.
698 exit_code = '%s' % metadata['exit_code']
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400699 else:
700 exit_code = 'N/A'
701
702 bot_id = metadata.get('bot_id') or 'N/A'
703
maruel77f720b2015-09-15 12:35:22 -0700704 url = '%s/user/task/%s' % (swarming, metadata['task_id'])
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400705 tag_header = 'Shard %d %s' % (shard_index, url)
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000706 tag_footer1 = 'End of shard %d' % (shard_index)
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400707 if metadata.get('state') == 'CANCELED':
708 tag_footer2 = ' Pending: %s CANCELED' % pending
709 elif metadata.get('state') == 'EXPIRED':
710 tag_footer2 = ' Pending: %s EXPIRED (lack of capacity)' % pending
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -0400711 elif metadata.get('state') in ('BOT_DIED', 'TIMED_OUT', 'KILLED'):
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400712 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s %s' % (
713 pending, duration, bot_id, exit_code, metadata['state'])
714 else:
715 tag_footer2 = ' Pending: %s Duration: %s Bot: %s Exit: %s' % (
716 pending, duration, bot_id, exit_code)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400717
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000718 tag_len = max(len(x) for x in [tag_header, tag_footer1, tag_footer2])
719 dash_pad = '+-%s-+' % ('-' * tag_len)
720 tag_header = '| %s |' % tag_header.ljust(tag_len)
721 tag_footer1 = '| %s |' % tag_footer1.ljust(tag_len)
722 tag_footer2 = '| %s |' % tag_footer2.ljust(tag_len)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400723
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000724 if include_stdout:
725 return '\n'.join([
726 dash_pad,
727 tag_header,
728 dash_pad,
Marc-Antoine Ruel3f9931a2017-11-03 14:34:49 -0400729 (metadata.get('output') or '').rstrip(),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000730 dash_pad,
731 tag_footer1,
732 tag_footer2,
733 dash_pad,
734 ])
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +0000735 return '\n'.join([
736 dash_pad,
737 tag_header,
738 tag_footer2,
739 dash_pad,
740 ])
maruel@chromium.org0437a732013-08-27 16:05:52 +0000741
742
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700743def collect(
maruel0eb1d1b2015-10-02 14:48:21 -0700744 swarming, task_ids, timeout, decorate, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000745 task_summary_json, task_output_dir, task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000746 include_perf, filepath_filter):
maruela5490782015-09-30 10:56:59 -0700747 """Retrieves results of a Swarming task.
748
749 Returns:
750 process exit code that should be returned to the user.
751 """
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000752
753 filter_cb = None
754 if filepath_filter:
755 filter_cb = re.compile(filepath_filter).match
756
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700757 # Collect summary JSON and output files (if task_output_dir is not None).
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000758 output_collector = TaskOutputCollector(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +0000759 task_output_dir, task_output_stdout, len(task_ids), filter_cb)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700760
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700761 seen_shards = set()
maruela5490782015-09-30 10:56:59 -0700762 exit_code = None
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400763 total_duration = 0
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700764 try:
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400765 for index, metadata in yield_results(
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400766 swarming, task_ids, timeout, None, print_status_updates,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000767 output_collector, include_perf,
768 (len(task_output_stdout) > 0),
769 ):
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700770 seen_shards.add(index)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700771
Marc-Antoine Ruel5e6ccdb2015-04-02 15:55:13 -0400772 # Default to failure if there was no process that even started.
maruel77f720b2015-09-15 12:35:22 -0700773 shard_exit_code = metadata.get('exit_code')
774 if shard_exit_code:
maruela5490782015-09-30 10:56:59 -0700775 # It's encoded as a string, so bool('0') is True.
maruel77f720b2015-09-15 12:35:22 -0700776 shard_exit_code = int(shard_exit_code)
maruela5490782015-09-30 10:56:59 -0700777 if shard_exit_code or exit_code is None:
Marc-Antoine Ruel4e6b73d2014-10-03 18:00:05 -0400778 exit_code = shard_exit_code
maruel77f720b2015-09-15 12:35:22 -0700779 total_duration += metadata.get('duration', 0)
Vadim Shtayura473455a2014-05-14 15:22:35 -0700780
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700781 if decorate:
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000782 s = decorate_shard_output(
783 swarming, index, metadata,
784 "console" in task_output_stdout).encode(
785 'utf-8', 'replace')
leileied181762016-10-13 14:24:59 -0700786 print(s)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400787 if len(seen_shards) < len(task_ids):
788 print('')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700789 else:
maruel77f720b2015-09-15 12:35:22 -0700790 print('%s: %s %s' % (
791 metadata.get('bot_id', 'N/A'),
792 metadata['task_id'],
793 shard_exit_code))
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +1000794 if "console" in task_output_stdout and metadata['output']:
maruel77f720b2015-09-15 12:35:22 -0700795 output = metadata['output'].rstrip()
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400796 if output:
797 print(''.join(' %s\n' % l for l in output.splitlines()))
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700798 finally:
Vadim Shtayurac8437bf2014-07-09 19:45:36 -0700799 summary = output_collector.finalize()
800 if task_summary_json:
801 tools.write_json(task_summary_json, summary, False)
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700802
Marc-Antoine Rueld59e8072014-10-21 18:54:45 -0400803 if decorate and total_duration:
804 print('Total duration: %.1fs' % total_duration)
805
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -0400806 if len(seen_shards) != len(task_ids):
807 missing_shards = [x for x in range(len(task_ids)) if x not in seen_shards]
Vadim Shtayura86a2cef2014-04-18 11:13:39 -0700808 print >> sys.stderr, ('Results from some shards are missing: %s' %
809 ', '.join(map(str, missing_shards)))
Vadim Shtayurac524f512014-05-15 09:54:56 -0700810 return 1
Vadim Shtayurae3fbd102014-04-29 17:05:21 -0700811
maruela5490782015-09-30 10:56:59 -0700812 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000813
814
maruel77f720b2015-09-15 12:35:22 -0700815### API management.
816
817
818class APIError(Exception):
819 pass
820
821
822def endpoints_api_discovery_apis(host):
823 """Uses Cloud Endpoints' API Discovery Service to returns metadata about all
824 the APIs exposed by a host.
825
826 https://developers.google.com/discovery/v1/reference/apis/list
827 """
maruel380e3262016-08-31 16:10:06 -0700828 # Uses the real Cloud Endpoints. This needs to be fixed once the Cloud
829 # Endpoints version is turned down.
maruel77f720b2015-09-15 12:35:22 -0700830 data = net.url_read_json(host + '/_ah/api/discovery/v1/apis')
831 if data is None:
832 raise APIError('Failed to discover APIs on %s' % host)
833 out = {}
834 for api in data['items']:
835 if api['id'] == 'discovery:v1':
836 continue
837 # URL is of the following form:
838 # url = host + (
839 # '/_ah/api/discovery/v1/apis/%s/%s/rest' % (api['id'], api['version'])
840 api_data = net.url_read_json(api['discoveryRestUrl'])
841 if api_data is None:
842 raise APIError('Failed to discover %s on %s' % (api['id'], host))
843 out[api['id']] = api_data
844 return out
845
846
maruelaf6b06c2017-06-08 06:26:53 -0700847def get_yielder(base_url, limit):
848 """Returns the first query and a function that yields following items."""
849 CHUNK_SIZE = 250
850
851 url = base_url
852 if limit:
853 url += '%slimit=%d' % ('&' if '?' in url else '?', min(CHUNK_SIZE, limit))
854 data = net.url_read_json(url)
855 if data is None:
856 # TODO(maruel): Do basic diagnostic.
857 raise Failure('Failed to access %s' % url)
858 org_cursor = data.pop('cursor', None)
859 org_total = len(data.get('items') or [])
860 logging.info('get_yielder(%s) returning %d items', base_url, org_total)
861 if not org_cursor or not org_total:
862 # This is not an iterable resource.
863 return data, lambda: []
864
865 def yielder():
866 cursor = org_cursor
867 total = org_total
868 # Some items support cursors. Try to get automatically if cursors are needed
869 # by looking at the 'cursor' items.
870 while cursor and (not limit or total < limit):
871 merge_char = '&' if '?' in base_url else '?'
872 url = base_url + '%scursor=%s' % (merge_char, urllib.quote(cursor))
873 if limit:
874 url += '&limit=%d' % min(CHUNK_SIZE, limit - total)
875 new = net.url_read_json(url)
876 if new is None:
877 raise Failure('Failed to access %s' % url)
878 cursor = new.get('cursor')
879 new_items = new.get('items')
880 nb_items = len(new_items or [])
881 total += nb_items
882 logging.info('get_yielder(%s) yielding %d items', base_url, nb_items)
883 yield new_items
884
885 return data, yielder
886
887
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500888### Commands.
889
890
891def abort_task(_swarming, _manifest):
892 """Given a task manifest that was triggered, aborts its execution."""
893 # TODO(vadimsh): No supported by the server yet.
894
895
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400896def add_filter_options(parser):
maruel681d6802017-01-17 16:56:03 -0800897 parser.filter_group = optparse.OptionGroup(parser, 'Bot selection')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500898 parser.filter_group.add_option(
Marc-Antoine Ruelb39e8cf2014-01-20 10:39:31 -0500899 '-d', '--dimension', default=[], action='append', nargs=2,
Marc-Antoine Ruel92f32422013-11-06 18:12:13 -0500900 dest='dimensions', metavar='FOO bar',
901 help='dimension to filter on')
Brad Hallf78187a2018-10-19 17:08:55 +0000902 parser.filter_group.add_option(
903 '--optional-dimension', default=[], action='append', nargs=3,
904 dest='optional_dimensions', metavar='key value expiration',
905 help='optional dimensions which will result in additional task slices ')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500906 parser.add_option_group(parser.filter_group)
907
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400908
Brad Hallf78187a2018-10-19 17:08:55 +0000909def _validate_filter_option(parser, key, value, expiration, argname):
910 if ':' in key:
911 parser.error('%s key cannot contain ":"' % argname)
912 if key.strip() != key:
913 parser.error('%s key has whitespace' % argname)
914 if not key:
915 parser.error('%s key is empty' % argname)
916
917 if value.strip() != value:
918 parser.error('%s value has whitespace' % argname)
919 if not value:
920 parser.error('%s value is empty' % argname)
921
922 if expiration is not None:
923 try:
924 expiration = int(expiration)
925 except ValueError:
926 parser.error('%s expiration is not an integer' % argname)
927 if expiration <= 0:
928 parser.error('%s expiration should be positive' % argname)
929 if expiration % 60 != 0:
930 parser.error('%s expiration is not divisible by 60' % argname)
931
932
maruelaf6b06c2017-06-08 06:26:53 -0700933def process_filter_options(parser, options):
934 for key, value in options.dimensions:
Brad Hallf78187a2018-10-19 17:08:55 +0000935 _validate_filter_option(parser, key, value, None, 'dimension')
936 for key, value, exp in options.optional_dimensions:
937 _validate_filter_option(parser, key, value, exp, 'optional-dimension')
maruelaf6b06c2017-06-08 06:26:53 -0700938 options.dimensions.sort()
939
940
Vadim Shtayurab450c602014-05-12 19:23:25 -0700941def add_sharding_options(parser):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -0400942 parser.sharding_group = optparse.OptionGroup(parser, 'Sharding options')
Vadim Shtayurab450c602014-05-12 19:23:25 -0700943 parser.sharding_group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700944 '--shards', type='int', default=1, metavar='NUMBER',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700945 help='Number of shards to trigger and collect.')
946 parser.add_option_group(parser.sharding_group)
947
948
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400949def add_trigger_options(parser):
950 """Adds all options to trigger a task on Swarming."""
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -0500951 isolateserver.add_isolate_server_options(parser)
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -0400952 add_filter_options(parser)
953
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -0400954 group = optparse.OptionGroup(parser, 'TaskSlice properties')
maruel681d6802017-01-17 16:56:03 -0800955 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700956 '-s', '--isolated', metavar='HASH',
Marc-Antoine Ruel185ded42015-01-28 20:49:18 -0500957 help='Hash of the .isolated to grab from the isolate server')
maruel681d6802017-01-17 16:56:03 -0800958 group.add_option(
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -0500959 '-e', '--env', default=[], action='append', nargs=2, metavar='FOO bar',
Vadim Shtayurab450c602014-05-12 19:23:25 -0700960 help='Environment variables to set')
maruel681d6802017-01-17 16:56:03 -0800961 group.add_option(
Robert Iannuccibf5f84c2017-11-22 12:56:50 -0800962 '--env-prefix', default=[], action='append', nargs=2,
963 metavar='VAR local/path',
964 help='Prepend task-relative `local/path` to the task\'s VAR environment '
965 'variable using os-appropriate pathsep character. Can be specified '
966 'multiple times for the same VAR to add multiple paths.')
967 group.add_option(
Marc-Antoine Ruel02196392014-10-17 16:29:43 -0400968 '--idempotent', action='store_true', default=False,
969 help='When set, the server will actively try to find a previous task '
970 'with the same parameter and return this result instead if possible')
maruel681d6802017-01-17 16:56:03 -0800971 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700972 '--secret-bytes-path', metavar='FILE',
Stephen Martinisf391c772019-02-01 01:22:12 +0000973 help='The optional path to a file containing the secret_bytes to use '
974 'with this task.')
maruel681d6802017-01-17 16:56:03 -0800975 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700976 '--hard-timeout', type='int', default=60*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400977 help='Seconds to allow the task to complete.')
maruel681d6802017-01-17 16:56:03 -0800978 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700979 '--io-timeout', type='int', default=20*60, metavar='SECS',
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -0400980 help='Seconds to allow the task to be silent.')
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +0000981 parser.add_option(
982 '--lower-priority', action='store_true',
983 help='Lowers the child process priority')
maruel681d6802017-01-17 16:56:03 -0800984 group.add_option(
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -0500985 '--raw-cmd', action='store_true', default=False,
986 help='When set, the command after -- is used as-is without run_isolated. '
maruel0a25f6c2017-05-10 10:43:23 -0700987 'In this case, the .isolated file is expected to not have a command')
maruel681d6802017-01-17 16:56:03 -0800988 group.add_option(
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -0500989 '--relative-cwd',
990 help='Ignore the isolated \'relative_cwd\' and use this one instead; '
991 'requires --raw-cmd')
992 group.add_option(
maruel5475ba62017-05-31 15:35:47 -0700993 '--cipd-package', action='append', default=[], metavar='PKG',
994 help='CIPD packages to install on the Swarming bot. Uses the format: '
borenet02f772b2016-06-22 12:42:19 -0700995 'path:package_name:version')
maruel681d6802017-01-17 16:56:03 -0800996 group.add_option(
997 '--named-cache', action='append', nargs=2, default=[],
maruel5475ba62017-05-31 15:35:47 -0700998 metavar='NAME RELPATH',
maruel681d6802017-01-17 16:56:03 -0800999 help='"<name> <relpath>" items to keep a persistent bot managed cache')
1000 group.add_option(
vadimsh93d167c2016-09-13 11:31:51 -07001001 '--service-account',
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001002 help='Email of a service account to run the task as, or literal "bot" '
1003 'string to indicate that the task should use the same account the '
1004 'bot itself is using to authenticate to Swarming. Don\'t use task '
1005 'service accounts if not given (default).')
maruel681d6802017-01-17 16:56:03 -08001006 group.add_option(
Robert Iannuccifafa7352018-06-13 17:08:17 +00001007 '--pool-task-template',
1008 choices=('AUTO', 'CANARY_PREFER', 'CANARY_NEVER', 'SKIP'),
1009 default='AUTO',
1010 help='Set how you want swarming to apply the pool\'s TaskTemplate. '
1011 'By default, the pool\'s TaskTemplate is automatically selected, '
1012 'according the pool configuration on the server. Choices are: '
1013 'AUTO, CANARY_PREFER, CANARY_NEVER, and SKIP (default: AUTO).')
1014 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001015 '-o', '--output', action='append', default=[], metavar='PATH',
1016 help='A list of files to return in addition to those written to '
1017 '${ISOLATED_OUTDIR}. An error will occur if a file specified by'
1018 'this option is also written directly to ${ISOLATED_OUTDIR}.')
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001019 group.add_option(
1020 '--wait-for-capacity', action='store_true', default=False,
1021 help='Instructs to leave the task PENDING even if there\'s no known bot '
1022 'that could run this task, otherwise the task will be denied with '
1023 'NO_RESOURCE')
maruel681d6802017-01-17 16:56:03 -08001024 parser.add_option_group(group)
1025
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001026 group = optparse.OptionGroup(parser, 'TaskRequest details')
maruel681d6802017-01-17 16:56:03 -08001027 group.add_option(
Marc-Antoine Ruel486c9b52018-07-23 19:30:47 +00001028 '--priority', type='int', default=200,
maruel681d6802017-01-17 16:56:03 -08001029 help='The lower value, the more important the task is')
1030 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001031 '-T', '--task-name', metavar='NAME',
maruel681d6802017-01-17 16:56:03 -08001032 help='Display name of the task. Defaults to '
1033 '<base_name>/<dimensions>/<isolated hash>/<timestamp> if an '
1034 'isolated file is provided, if a hash is provided, it defaults to '
1035 '<user>/<dimensions>/<isolated hash>/<timestamp>')
1036 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001037 '--tags', action='append', default=[], metavar='FOO:BAR',
maruel681d6802017-01-17 16:56:03 -08001038 help='Tags to assign to the task.')
1039 group.add_option(
1040 '--user', default='',
1041 help='User associated with the task. Defaults to authenticated user on '
1042 'the server.')
1043 group.add_option(
maruel5475ba62017-05-31 15:35:47 -07001044 '--expiration', type='int', default=6*60*60, metavar='SECS',
maruel681d6802017-01-17 16:56:03 -08001045 help='Seconds to allow the task to be pending for a bot to run before '
1046 'this task request expires.')
1047 group.add_option(
1048 '--deadline', type='int', dest='expiration',
1049 help=optparse.SUPPRESS_HELP)
1050 parser.add_option_group(group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001051
1052
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001053def process_trigger_options(parser, options, args):
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001054 """Processes trigger options and does preparatory steps.
1055
1056 Returns:
1057 NewTaskRequest instance.
1058 """
maruelaf6b06c2017-06-08 06:26:53 -07001059 process_filter_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001060 options.env = dict(options.env)
maruel0a25f6c2017-05-10 10:43:23 -07001061 if args and args[0] == '--':
1062 args = args[1:]
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001063
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001064 if not options.dimensions:
1065 parser.error('Please at least specify one --dimension')
Marc-Antoine Ruel33d198c2018-11-27 21:12:16 +00001066 if not any(k == 'pool' for k, _v in options.dimensions):
1067 parser.error('You must specify --dimension pool <value>')
maruel0a25f6c2017-05-10 10:43:23 -07001068 if not all(len(t.split(':', 1)) == 2 for t in options.tags):
1069 parser.error('--tags must be in the format key:value')
1070 if options.raw_cmd and not args:
1071 parser.error(
1072 'Arguments with --raw-cmd should be passed after -- as command '
1073 'delimiter.')
1074 if options.isolate_server and not options.namespace:
1075 parser.error(
1076 '--namespace must be a valid value when --isolate-server is used')
1077 if not options.isolated and not options.raw_cmd:
1078 parser.error('Specify at least one of --raw-cmd or --isolated or both')
1079
1080 # Isolated
1081 # --isolated is required only if --raw-cmd wasn't provided.
1082 # TODO(maruel): --isolate-server may be optional as Swarming may have its own
1083 # preferred server.
1084 isolateserver.process_isolate_server_options(
1085 parser, options, False, not options.raw_cmd)
1086 inputs_ref = None
1087 if options.isolate_server:
1088 inputs_ref = FilesRef(
1089 isolated=options.isolated,
1090 isolatedserver=options.isolate_server,
1091 namespace=options.namespace)
1092
1093 # Command
1094 command = None
1095 extra_args = None
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001096 if options.raw_cmd:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001097 command = args
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001098 if options.relative_cwd:
1099 a = os.path.normpath(os.path.abspath(options.relative_cwd))
1100 if not a.startswith(os.getcwd()):
1101 parser.error(
1102 '--relative-cwd must not try to escape the working directory')
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001103 else:
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001104 if options.relative_cwd:
1105 parser.error('--relative-cwd requires --raw-cmd')
maruel0a25f6c2017-05-10 10:43:23 -07001106 extra_args = args
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001107
maruel0a25f6c2017-05-10 10:43:23 -07001108 # CIPD
borenet02f772b2016-06-22 12:42:19 -07001109 cipd_packages = []
1110 for p in options.cipd_package:
1111 split = p.split(':', 2)
1112 if len(split) != 3:
1113 parser.error('CIPD packages must take the form: path:package:version')
1114 cipd_packages.append(CipdPackage(
1115 package_name=split[1],
1116 path=split[0],
1117 version=split[2]))
1118 cipd_input = None
1119 if cipd_packages:
1120 cipd_input = CipdInput(
1121 client_package=None,
1122 packages=cipd_packages,
1123 server=None)
1124
maruel0a25f6c2017-05-10 10:43:23 -07001125 # Secrets
iannuccidc80dfb2016-10-28 12:50:20 -07001126 secret_bytes = None
1127 if options.secret_bytes_path:
Marc-Antoine Ruel5c98fa72018-05-18 12:19:59 -04001128 with open(options.secret_bytes_path, 'rb') as f:
iannuccidc80dfb2016-10-28 12:50:20 -07001129 secret_bytes = f.read().encode('base64')
1130
maruel0a25f6c2017-05-10 10:43:23 -07001131 # Named caches
maruel681d6802017-01-17 16:56:03 -08001132 caches = [
1133 {u'name': unicode(i[0]), u'path': unicode(i[1])}
1134 for i in options.named_cache
1135 ]
maruel0a25f6c2017-05-10 10:43:23 -07001136
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001137 env_prefixes = {}
1138 for k, v in options.env_prefix:
1139 env_prefixes.setdefault(k, []).append(v)
1140
Brad Hallf78187a2018-10-19 17:08:55 +00001141 # Get dimensions into the key/value format we can manipulate later.
1142 orig_dims = [
1143 {'key': key, 'value': value} for key, value in options.dimensions]
1144 orig_dims.sort(key=lambda x: (x['key'], x['value']))
1145
1146 # Construct base properties that we will use for all the slices, adding in
1147 # optional dimensions for the fallback slices.
maruel77f720b2015-09-15 12:35:22 -07001148 properties = TaskProperties(
maruel681d6802017-01-17 16:56:03 -08001149 caches=caches,
borenet02f772b2016-06-22 12:42:19 -07001150 cipd_input=cipd_input,
maruel0a25f6c2017-05-10 10:43:23 -07001151 command=command,
Marc-Antoine Ruel89669dc2019-05-01 14:01:08 +00001152 containment=Containment(
1153 lower_priority=bool(options.lower_priority),
1154 ),
Marc-Antoine Ruelba1bf222017-12-21 21:41:01 -05001155 relative_cwd=options.relative_cwd,
Brad Hallf78187a2018-10-19 17:08:55 +00001156 dimensions=orig_dims,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001157 env=options.env,
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001158 env_prefixes=[StringListPair(k, v) for k, v in env_prefixes.iteritems()],
maruel77f720b2015-09-15 12:35:22 -07001159 execution_timeout_secs=options.hard_timeout,
maruel0a25f6c2017-05-10 10:43:23 -07001160 extra_args=extra_args,
maruel77f720b2015-09-15 12:35:22 -07001161 grace_period_secs=30,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001162 idempotent=options.idempotent,
maruel77f720b2015-09-15 12:35:22 -07001163 inputs_ref=inputs_ref,
aludwincc5524e2016-10-28 10:25:24 -07001164 io_timeout_secs=options.io_timeout,
iannuccidc80dfb2016-10-28 12:50:20 -07001165 outputs=options.output,
1166 secret_bytes=secret_bytes)
Brad Hallf78187a2018-10-19 17:08:55 +00001167
1168 slices = []
1169
1170 # Group the optional dimensions by expiration.
1171 dims_by_exp = {}
1172 for key, value, exp_secs in options.optional_dimensions:
1173 dims_by_exp.setdefault(int(exp_secs), []).append(
1174 {'key': key, 'value': value})
1175
1176 # Create the optional slices with expiration deltas, we fix up the properties
1177 # below.
1178 last_exp = 0
1179 for expiration_secs in sorted(dims_by_exp):
1180 t = TaskSlice(
1181 expiration_secs=expiration_secs - last_exp,
1182 properties=properties,
1183 wait_for_capacity=False)
1184 slices.append(t)
1185 last_exp = expiration_secs
1186
1187 # Add back in the default slice (the last one).
1188 exp = max(int(options.expiration) - last_exp, 60)
1189 base_task_slice = TaskSlice(
1190 expiration_secs=exp,
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001191 properties=properties,
1192 wait_for_capacity=options.wait_for_capacity)
Brad Hallf78187a2018-10-19 17:08:55 +00001193 slices.append(base_task_slice)
1194
Brad Hall7f463e62018-11-16 16:13:30 +00001195 # Add optional dimensions to the task slices, replacing a dimension that
1196 # has the same key if it is a dimension where repeating isn't valid (otherwise
1197 # we append it). Currently the only dimension we can repeat is "caches"; the
1198 # rest (os, cpu, etc) shouldn't be repeated.
Brad Hallf78187a2018-10-19 17:08:55 +00001199 extra_dims = []
Brad Hall7f463e62018-11-16 16:13:30 +00001200 for i, (_, kvs) in enumerate(sorted(dims_by_exp.iteritems(), reverse=True)):
Brad Hallf78187a2018-10-19 17:08:55 +00001201 dims = list(orig_dims)
Brad Hall7f463e62018-11-16 16:13:30 +00001202 # Replace or append the key/value pairs for this expiration in extra_dims;
1203 # we keep extra_dims around because we are iterating backwards and filling
1204 # in slices with shorter expirations. Dimensions expire as time goes on so
1205 # the slices that expire earlier will generally have more dimensions.
1206 for kv in kvs:
1207 if kv['key'] == 'caches':
1208 extra_dims.append(kv)
1209 else:
1210 extra_dims = [x for x in extra_dims if x['key'] != kv['key']] + [kv]
1211 # Then, add all the optional dimensions to the original dimension set, again
1212 # replacing if needed.
1213 for kv in extra_dims:
1214 if kv['key'] == 'caches':
1215 dims.append(kv)
1216 else:
1217 dims = [x for x in dims if x['key'] != kv['key']] + [kv]
Brad Hallf78187a2018-10-19 17:08:55 +00001218 dims.sort(key=lambda x: (x['key'], x['value']))
1219 slice_properties = properties._replace(dimensions=dims)
1220 slices[-2 - i] = slices[-2 - i]._replace(properties=slice_properties)
1221
maruel77f720b2015-09-15 12:35:22 -07001222 return NewTaskRequest(
maruel0a25f6c2017-05-10 10:43:23 -07001223 name=default_task_name(options),
maruel77f720b2015-09-15 12:35:22 -07001224 parent_task_id=os.environ.get('SWARMING_TASK_ID', ''),
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001225 priority=options.priority,
Brad Hallf78187a2018-10-19 17:08:55 +00001226 task_slices=slices,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001227 service_account=options.service_account,
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001228 tags=options.tags,
Robert Iannuccifafa7352018-06-13 17:08:17 +00001229 user=options.user,
1230 pool_task_template=options.pool_task_template)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001231
1232
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001233class TaskOutputStdoutOption(optparse.Option):
1234 """Where to output the each task's console output (stderr/stdout).
1235
1236 The output will be;
1237 none - not be downloaded.
1238 json - stored in summary.json file *only*.
1239 console - shown on stdout *only*.
1240 all - stored in summary.json and shown on stdout.
1241 """
1242
1243 choices = ['all', 'json', 'console', 'none']
1244
1245 def __init__(self, *args, **kw):
1246 optparse.Option.__init__(
1247 self,
1248 *args,
1249 choices=self.choices,
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001250 default=['console', 'json'],
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001251 help=re.sub(r'\s\s*', ' ', self.__doc__),
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001252 **kw)
1253
1254 def convert_value(self, opt, value):
1255 if value not in self.choices:
1256 raise optparse.OptionValueError("%s must be one of %s not %r" % (
1257 self.get_opt_string(), self.choices, value))
1258 stdout_to = []
1259 if value == 'all':
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001260 stdout_to = ['console', 'json']
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001261 elif value != 'none':
1262 stdout_to = [value]
1263 return stdout_to
1264
1265
maruel@chromium.org0437a732013-08-27 16:05:52 +00001266def add_collect_options(parser):
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001267 parser.server_group.add_option(
Marc-Antoine Ruele831f052018-04-20 15:01:03 -04001268 '-t', '--timeout', type='float', default=0.,
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001269 help='Timeout to wait for result, set to -1 for no timeout and get '
1270 'current state; defaults to waiting until the task completes')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001271 parser.group_logging.add_option(
1272 '--decorate', action='store_true', help='Decorate output')
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001273 parser.group_logging.add_option(
1274 '--print-status-updates', action='store_true',
1275 help='Print periodic status updates')
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001276 parser.task_output_group = optparse.OptionGroup(parser, 'Task output')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001277 parser.task_output_group.add_option(
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001278 '--task-summary-json',
1279 metavar='FILE',
1280 help='Dump a summary of task results to this file as json. It contains '
1281 'only shards statuses as know to server directly. Any output files '
1282 'emitted by the task can be collected by using --task-output-dir')
1283 parser.task_output_group.add_option(
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001284 '--task-output-dir',
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001285 metavar='DIR',
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001286 help='Directory to put task results into. When the task finishes, this '
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001287 'directory contains per-shard directory with output files produced '
1288 'by shards: <task-output-dir>/<zero-based-shard-index>/.')
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001289 parser.task_output_group.add_option(TaskOutputStdoutOption(
Marc-Antoine Ruel28488842017-09-12 18:09:17 -04001290 '--task-output-stdout'))
maruel9531ce02016-04-13 06:11:23 -07001291 parser.task_output_group.add_option(
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001292 '--filepath-filter',
1293 help='This is regexp filter used to specify downloaded filepath when '
1294 'collecting isolated output.')
1295 parser.task_output_group.add_option(
maruel9531ce02016-04-13 06:11:23 -07001296 '--perf', action='store_true', default=False,
1297 help='Includes performance statistics')
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001298 parser.add_option_group(parser.task_output_group)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001299
1300
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001301def process_collect_options(parser, options):
1302 # Only negative -1 is allowed, disallow other negative values.
1303 if options.timeout != -1 and options.timeout < 0:
1304 parser.error('Invalid --timeout value')
1305
1306
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001307@subcommand.usage('bots...')
1308def CMDbot_delete(parser, args):
1309 """Forcibly deletes bots from the Swarming server."""
1310 parser.add_option(
1311 '-f', '--force', action='store_true',
1312 help='Do not prompt for confirmation')
1313 options, args = parser.parse_args(args)
1314 if not args:
maruelfd0a90c2016-06-10 11:51:10 -07001315 parser.error('Please specify bots to delete')
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001316
1317 bots = sorted(args)
1318 if not options.force:
1319 print('Delete the following bots?')
1320 for bot in bots:
1321 print(' %s' % bot)
1322 if raw_input('Continue? [y/N] ') not in ('y', 'Y'):
1323 print('Goodbye.')
1324 return 1
1325
1326 result = 0
1327 for bot in bots:
smut281c3902018-05-30 17:50:05 -07001328 url = '%s/_ah/api/swarming/v1/bot/%s/delete' % (options.swarming, bot)
vadimshe4c0e242015-09-30 11:53:54 -07001329 if net.url_read_json(url, data={}, method='POST') is None:
1330 print('Deleting %s failed. Probably already gone' % bot)
Marc-Antoine Ruel13e7c882015-03-26 18:19:10 -04001331 result = 1
1332 return result
1333
1334
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001335def CMDbots(parser, args):
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001336 """Returns information about the bots connected to the Swarming server."""
1337 add_filter_options(parser)
1338 parser.filter_group.add_option(
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001339 '--dead-only', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001340 help='Filter out bots alive, useful to reap them and reimage broken bots')
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001341 parser.filter_group.add_option(
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001342 '-k', '--keep-dead', action='store_true',
maruelaf6b06c2017-06-08 06:26:53 -07001343 help='Keep both dead and alive bots')
1344 parser.filter_group.add_option(
1345 '--busy', action='store_true', help='Keep only busy bots')
1346 parser.filter_group.add_option(
1347 '--idle', action='store_true', help='Keep only idle bots')
1348 parser.filter_group.add_option(
1349 '--mp', action='store_true',
1350 help='Keep only Machine Provider managed bots')
1351 parser.filter_group.add_option(
1352 '--non-mp', action='store_true',
1353 help='Keep only non Machine Provider managed bots')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001354 parser.filter_group.add_option(
1355 '-b', '--bare', action='store_true',
Marc-Antoine Ruele7b00162014-03-12 16:59:01 -04001356 help='Do not print out dimensions')
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001357 options, args = parser.parse_args(args)
maruelaf6b06c2017-06-08 06:26:53 -07001358 process_filter_options(parser, options)
Marc-Antoine Ruel28083112014-03-13 16:34:04 -04001359
1360 if options.keep_dead and options.dead_only:
maruelaf6b06c2017-06-08 06:26:53 -07001361 parser.error('Use only one of --keep-dead or --dead-only')
1362 if options.busy and options.idle:
1363 parser.error('Use only one of --busy or --idle')
1364 if options.mp and options.non_mp:
1365 parser.error('Use only one of --mp or --non-mp')
Vadim Shtayura6b555c12014-07-23 16:22:18 -07001366
smut281c3902018-05-30 17:50:05 -07001367 url = options.swarming + '/_ah/api/swarming/v1/bots/list?'
maruelaf6b06c2017-06-08 06:26:53 -07001368 values = []
1369 if options.dead_only:
1370 values.append(('is_dead', 'TRUE'))
1371 elif options.keep_dead:
1372 values.append(('is_dead', 'NONE'))
1373 else:
1374 values.append(('is_dead', 'FALSE'))
Marc-Antoine Ruelc6c579e2014-09-08 18:43:45 -04001375
maruelaf6b06c2017-06-08 06:26:53 -07001376 if options.busy:
1377 values.append(('is_busy', 'TRUE'))
1378 elif options.idle:
1379 values.append(('is_busy', 'FALSE'))
1380 else:
1381 values.append(('is_busy', 'NONE'))
1382
1383 if options.mp:
1384 values.append(('is_mp', 'TRUE'))
1385 elif options.non_mp:
1386 values.append(('is_mp', 'FALSE'))
1387 else:
1388 values.append(('is_mp', 'NONE'))
1389
1390 for key, value in options.dimensions:
1391 values.append(('dimensions', '%s:%s' % (key, value)))
1392 url += urllib.urlencode(values)
1393 try:
1394 data, yielder = get_yielder(url, 0)
1395 bots = data.get('items') or []
1396 for items in yielder():
1397 if items:
1398 bots.extend(items)
1399 except Failure as e:
1400 sys.stderr.write('\n%s\n' % e)
1401 return 1
maruel77f720b2015-09-15 12:35:22 -07001402 for bot in natsort.natsorted(bots, key=lambda x: x['bot_id']):
maruelaf6b06c2017-06-08 06:26:53 -07001403 print bot['bot_id']
1404 if not options.bare:
1405 dimensions = {i['key']: i.get('value') for i in bot.get('dimensions', {})}
1406 print ' %s' % json.dumps(dimensions, sort_keys=True)
1407 if bot.get('task_id'):
1408 print ' task: %s' % bot['task_id']
Marc-Antoine Ruel819fb162014-03-12 16:38:26 -04001409 return 0
1410
1411
maruelfd0a90c2016-06-10 11:51:10 -07001412@subcommand.usage('task_id')
1413def CMDcancel(parser, args):
1414 """Cancels a task."""
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001415 parser.add_option(
1416 '-k', '--kill-running', action='store_true', default=False,
1417 help='Kill the task even if it was running')
maruelfd0a90c2016-06-10 11:51:10 -07001418 options, args = parser.parse_args(args)
1419 if not args:
1420 parser.error('Please specify the task to cancel')
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001421 data = {'kill_running': options.kill_running}
maruelfd0a90c2016-06-10 11:51:10 -07001422 for task_id in args:
smut281c3902018-05-30 17:50:05 -07001423 url = '%s/_ah/api/swarming/v1/task/%s/cancel' % (options.swarming, task_id)
Marc-Antoine Ruel2e52c552018-03-26 19:27:36 -04001424 resp = net.url_read_json(url, data=data, method='POST')
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001425 if resp is None:
maruelfd0a90c2016-06-10 11:51:10 -07001426 print('Deleting %s failed. Probably already gone' % task_id)
1427 return 1
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001428 logging.info('%s', resp)
maruelfd0a90c2016-06-10 11:51:10 -07001429 return 0
1430
1431
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001432@subcommand.usage('--json file | task_id...')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001433def CMDcollect(parser, args):
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001434 """Retrieves results of one or multiple Swarming task by its ID.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001435
1436 The result can be in multiple part if the execution was sharded. It can
1437 potentially have retries.
1438 """
1439 add_collect_options(parser)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001440 parser.add_option(
1441 '-j', '--json',
1442 help='Load the task ids from .json as saved by trigger --dump-json')
maruel77f720b2015-09-15 12:35:22 -07001443 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001444 process_collect_options(parser, options)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001445 if not args and not options.json:
1446 parser.error('Must specify at least one task id or --json.')
1447 if args and options.json:
1448 parser.error('Only use one of task id or --json.')
1449
1450 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001451 options.json = unicode(os.path.abspath(options.json))
Marc-Antoine Ruel9025a782015-03-17 16:42:59 -04001452 try:
maruel1ceb3872015-10-14 06:10:44 -07001453 with fs.open(options.json, 'rb') as f:
maruel71c61c82016-02-22 06:52:05 -08001454 data = json.load(f)
1455 except (IOError, ValueError):
1456 parser.error('Failed to open %s' % options.json)
1457 try:
1458 tasks = sorted(
1459 data['tasks'].itervalues(), key=lambda x: x['shard_index'])
1460 args = [t['task_id'] for t in tasks]
1461 except (KeyError, TypeError):
1462 parser.error('Failed to process %s' % options.json)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001463 if not options.timeout:
Marc-Antoine Ruelb73066b2018-04-19 20:16:55 -04001464 # Take in account all the task slices.
1465 offset = 0
1466 for s in data['request']['task_slices']:
1467 m = (offset + s['properties']['execution_timeout_secs'] +
1468 s['expiration_secs'])
1469 if m > options.timeout:
1470 options.timeout = m
1471 offset += s['expiration_secs']
Marc-Antoine Ruel9fc42612018-04-20 08:34:22 -04001472 options.timeout += 10.
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001473 else:
1474 valid = frozenset('0123456789abcdef')
1475 if any(not valid.issuperset(task_id) for task_id in args):
1476 parser.error('Task ids are 0-9a-f.')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001477
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001478 try:
1479 return collect(
1480 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001481 args,
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001482 options.timeout,
1483 options.decorate,
1484 options.print_status_updates,
1485 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001486 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001487 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001488 options.perf,
1489 options.filepath_filter)
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001490 except Failure:
1491 on_error.report(None)
1492 return 1
1493
1494
maruel77f720b2015-09-15 12:35:22 -07001495@subcommand.usage('[method name]')
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001496def CMDpost(parser, args):
1497 """Sends a JSON RPC POST to one API endpoint and prints out the raw result.
1498
1499 Input data must be sent to stdin, result is printed to stdout.
1500
1501 If HTTP response code >= 400, returns non-zero.
1502 """
1503 options, args = parser.parse_args(args)
1504 if len(args) != 1:
1505 parser.error('Must specify only API name')
smut281c3902018-05-30 17:50:05 -07001506 url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
Marc-Antoine Ruel833f5eb2018-04-25 16:49:40 -04001507 data = sys.stdin.read()
1508 try:
1509 resp = net.url_read(url, data=data, method='POST')
1510 except net.TimeoutError:
1511 sys.stderr.write('Timeout!\n')
1512 return 1
1513 if not resp:
1514 sys.stderr.write('No response!\n')
1515 return 1
1516 sys.stdout.write(resp)
1517 return 0
1518
1519
1520@subcommand.usage('[method name]')
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001521def CMDquery(parser, args):
maruel77f720b2015-09-15 12:35:22 -07001522 """Returns raw JSON information via an URL endpoint. Use 'query-list' to
1523 gather the list of API methods from the server.
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001524
1525 Examples:
maruelaf6b06c2017-06-08 06:26:53 -07001526 Raw task request and results:
1527 swarming.py query -S server-url.com task/123456/request
1528 swarming.py query -S server-url.com task/123456/result
1529
maruel77f720b2015-09-15 12:35:22 -07001530 Listing all bots:
maruel84e77aa2015-10-21 06:37:24 -07001531 swarming.py query -S server-url.com bots/list
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001532
maruelaf6b06c2017-06-08 06:26:53 -07001533 Listing last 10 tasks on a specific bot named 'bot1':
1534 swarming.py query -S server-url.com --limit 10 bot/bot1/tasks
maruel84e77aa2015-10-21 06:37:24 -07001535
maruelaf6b06c2017-06-08 06:26:53 -07001536 Listing last 10 tasks with tags os:Ubuntu-14.04 and pool:Chrome. Note that
maruel84e77aa2015-10-21 06:37:24 -07001537 quoting is important!:
1538 swarming.py query -S server-url.com --limit 10 \\
maruelaf6b06c2017-06-08 06:26:53 -07001539 'tasks/list?tags=os:Ubuntu-14.04&tags=pool:Chrome'
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001540 """
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001541 parser.add_option(
1542 '-L', '--limit', type='int', default=200,
1543 help='Limit to enforce on limitless items (like number of tasks); '
1544 'default=%default')
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001545 parser.add_option(
1546 '--json', help='Path to JSON output file (otherwise prints to stdout)')
maruel77f720b2015-09-15 12:35:22 -07001547 parser.add_option(
1548 '--progress', action='store_true',
1549 help='Prints a dot at each request to show progress')
1550 options, args = parser.parse_args(args)
marueld8aba222015-09-03 12:21:19 -07001551 if len(args) != 1:
maruel77f720b2015-09-15 12:35:22 -07001552 parser.error(
1553 'Must specify only method name and optionally query args properly '
1554 'escaped.')
smut281c3902018-05-30 17:50:05 -07001555 base_url = options.swarming + '/_ah/api/swarming/v1/' + args[0]
maruelaf6b06c2017-06-08 06:26:53 -07001556 try:
1557 data, yielder = get_yielder(base_url, options.limit)
1558 for items in yielder():
1559 if items:
1560 data['items'].extend(items)
maruel77f720b2015-09-15 12:35:22 -07001561 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001562 sys.stderr.write('.')
1563 sys.stderr.flush()
1564 except Failure as e:
1565 sys.stderr.write('\n%s\n' % e)
1566 return 1
maruel77f720b2015-09-15 12:35:22 -07001567 if options.progress:
maruelaf6b06c2017-06-08 06:26:53 -07001568 sys.stderr.write('\n')
1569 sys.stderr.flush()
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001570 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001571 options.json = unicode(os.path.abspath(options.json))
1572 tools.write_json(options.json, data, True)
Paweł Hajdan, Jr53ef0132015-03-20 17:49:18 +01001573 else:
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001574 try:
maruel77f720b2015-09-15 12:35:22 -07001575 tools.write_json(sys.stdout, data, False)
Marc-Antoine Ruelcda90ee2015-03-23 15:13:20 -04001576 sys.stdout.write('\n')
1577 except IOError:
1578 pass
Marc-Antoine Ruel79940ae2014-09-23 17:55:41 -04001579 return 0
1580
1581
maruel77f720b2015-09-15 12:35:22 -07001582def CMDquery_list(parser, args):
1583 """Returns list of all the Swarming APIs that can be used with command
1584 'query'.
1585 """
1586 parser.add_option(
1587 '--json', help='Path to JSON output file (otherwise prints to stdout)')
1588 options, args = parser.parse_args(args)
1589 if args:
1590 parser.error('No argument allowed.')
1591
1592 try:
1593 apis = endpoints_api_discovery_apis(options.swarming)
1594 except APIError as e:
1595 parser.error(str(e))
1596 if options.json:
maruel1ceb3872015-10-14 06:10:44 -07001597 options.json = unicode(os.path.abspath(options.json))
1598 with fs.open(options.json, 'wb') as f:
maruel77f720b2015-09-15 12:35:22 -07001599 json.dump(apis, f)
1600 else:
1601 help_url = (
1602 'https://apis-explorer.appspot.com/apis-explorer/?base=%s/_ah/api#p/' %
1603 options.swarming)
maruel11e31af2017-02-15 07:30:50 -08001604 for i, (api_id, api) in enumerate(sorted(apis.iteritems())):
1605 if i:
1606 print('')
maruel77f720b2015-09-15 12:35:22 -07001607 print api_id
maruel11e31af2017-02-15 07:30:50 -08001608 print ' ' + api['description'].strip()
1609 if 'resources' in api:
1610 # Old.
Marc-Antoine Ruel793bff32019-04-18 17:50:48 +00001611 # TODO(maruel): Remove.
1612 # pylint: disable=too-many-nested-blocks
maruel11e31af2017-02-15 07:30:50 -08001613 for j, (resource_name, resource) in enumerate(
1614 sorted(api['resources'].iteritems())):
1615 if j:
1616 print('')
1617 for method_name, method in sorted(resource['methods'].iteritems()):
1618 # Only list the GET ones.
1619 if method['httpMethod'] != 'GET':
1620 continue
1621 print '- %s.%s: %s' % (
1622 resource_name, method_name, method['path'])
1623 print('\n'.join(
Sergey Berezina269e1a2018-05-16 16:55:12 -07001624 ' ' + l for l in textwrap.wrap(
1625 method.get('description', 'No description'), 78)))
maruel11e31af2017-02-15 07:30:50 -08001626 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1627 else:
1628 # New.
1629 for method_name, method in sorted(api['methods'].iteritems()):
maruel77f720b2015-09-15 12:35:22 -07001630 # Only list the GET ones.
1631 if method['httpMethod'] != 'GET':
1632 continue
maruel11e31af2017-02-15 07:30:50 -08001633 print '- %s: %s' % (method['id'], method['path'])
1634 print('\n'.join(
1635 ' ' + l for l in textwrap.wrap(method['description'], 78)))
maruel77f720b2015-09-15 12:35:22 -07001636 print ' %s%s%s' % (help_url, api['servicePath'], method['id'])
1637 return 0
1638
1639
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001640@subcommand.usage('(hash|isolated) [-- extra_args]')
maruel@chromium.org0437a732013-08-27 16:05:52 +00001641def CMDrun(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001642 """Triggers a task and wait for the results.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001643
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001644 Basically, does everything to run a command remotely.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001645 """
1646 add_trigger_options(parser)
1647 add_collect_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001648 add_sharding_options(parser)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001649 options, args = parser.parse_args(args)
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001650 process_collect_options(parser, options)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001651 task_request = process_trigger_options(parser, options, args)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001652 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001653 tasks = trigger_task_shards(
1654 options.swarming, task_request, options.shards)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001655 except Failure as e:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001656 on_error.report(
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001657 'Failed to trigger %s(%s): %s' %
maruel0a25f6c2017-05-10 10:43:23 -07001658 (task_request.name, args[0], e.args[0]))
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001659 return 1
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001660 if not tasks:
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001661 on_error.report('Failed to trigger the task.')
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001662 return 1
maruel0a25f6c2017-05-10 10:43:23 -07001663 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001664 task_ids = [
1665 t['task_id']
1666 for t in sorted(tasks.itervalues(), key=lambda x: x['shard_index'])
1667 ]
Marc-Antoine Ruelf24f09c2018-03-23 16:06:18 -04001668 if not options.timeout:
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001669 offset = 0
1670 for s in task_request.task_slices:
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001671 m = (offset + s.properties.execution_timeout_secs +
1672 s.expiration_secs)
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001673 if m > options.timeout:
1674 options.timeout = m
Marc-Antoine Ruel1f835c72018-05-25 12:29:42 -04001675 offset += s.expiration_secs
Marc-Antoine Ruel3a030bc2018-04-23 10:31:25 -04001676 options.timeout += 10.
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001677 try:
1678 return collect(
1679 options.swarming,
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001680 task_ids,
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001681 options.timeout,
Vadim Shtayura86a2cef2014-04-18 11:13:39 -07001682 options.decorate,
Vadim Shtayurae3fbd102014-04-29 17:05:21 -07001683 options.print_status_updates,
Vadim Shtayurac8437bf2014-07-09 19:45:36 -07001684 options.task_summary_json,
maruel9531ce02016-04-13 06:11:23 -07001685 options.task_output_dir,
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001686 options.task_output_stdout,
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001687 options.perf,
1688 options.filepath_filter)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001689 except Failure:
1690 on_error.report(None)
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001691 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001692
1693
maruel18122c62015-10-23 06:31:23 -07001694@subcommand.usage('task_id -- <extra_args>')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001695def CMDreproduce(parser, args):
1696 """Runs a task locally that was triggered on the server.
1697
1698 This running locally the same commands that have been run on the bot. The data
1699 downloaded will be in a subdirectory named 'work' of the current working
1700 directory.
maruel18122c62015-10-23 06:31:23 -07001701
1702 You can pass further additional arguments to the target command by passing
1703 them after --.
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001704 """
maruelc070e672016-02-22 17:32:57 -08001705 parser.add_option(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001706 '--output', metavar='DIR', default='out',
maruelc070e672016-02-22 17:32:57 -08001707 help='Directory that will have results stored into')
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001708 parser.add_option(
1709 '--work', metavar='DIR', default='work',
1710 help='Directory to map the task input files into')
1711 parser.add_option(
1712 '--cache', metavar='DIR', default='cache',
1713 help='Directory that contains the input cache')
1714 parser.add_option(
1715 '--leak', action='store_true',
1716 help='Do not delete the working directory after execution')
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001717 options, args = parser.parse_args(args)
maruel18122c62015-10-23 06:31:23 -07001718 extra_args = []
1719 if not args:
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001720 parser.error('Must specify exactly one task id.')
maruel18122c62015-10-23 06:31:23 -07001721 if len(args) > 1:
1722 if args[1] == '--':
1723 if len(args) > 2:
1724 extra_args = args[2:]
1725 else:
1726 extra_args = args[1:]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001727
smut281c3902018-05-30 17:50:05 -07001728 url = options.swarming + '/_ah/api/swarming/v1/task/%s/request' % args[0]
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001729 request = net.url_read_json(url)
1730 if not request:
1731 print >> sys.stderr, 'Failed to retrieve request data for the task'
1732 return 1
1733
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001734 workdir = unicode(os.path.abspath(options.work))
maruele7cd38e2016-03-01 19:12:48 -08001735 if fs.isdir(workdir):
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001736 parser.error('Please delete the directory %r first' % options.work)
maruele7cd38e2016-03-01 19:12:48 -08001737 fs.mkdir(workdir)
iannucci31ab9192017-05-02 19:11:56 -07001738 cachedir = unicode(os.path.abspath('cipd_cache'))
1739 if not fs.exists(cachedir):
1740 fs.mkdir(cachedir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001741
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001742 properties = request['properties']
iannucci31ab9192017-05-02 19:11:56 -07001743 env = os.environ.copy()
1744 env['SWARMING_BOT_ID'] = 'reproduce'
1745 env['SWARMING_TASK_ID'] = 'reproduce'
maruel29ab2fd2015-10-16 11:44:01 -07001746 if properties.get('env'):
Marc-Antoine Ruel119b0842014-12-19 15:27:58 -05001747 logging.info('env: %r', properties['env'])
maruelb76604c2015-11-11 11:53:44 -08001748 for i in properties['env']:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001749 key = i['key']
maruelb76604c2015-11-11 11:53:44 -08001750 if not i['value']:
1751 env.pop(key, None)
1752 else:
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001753 env[key] = i['value']
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001754
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001755 if properties.get('env_prefixes'):
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001756 env_prefixes = properties['env_prefixes']
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001757 logging.info('env_prefixes: %r', env_prefixes)
Marc-Antoine Ruel36e09792018-01-09 14:03:25 -05001758 for i in env_prefixes:
1759 key = i['key']
1760 paths = [os.path.normpath(os.path.join(workdir, p)) for p in i['value']]
Robert Iannuccibf5f84c2017-11-22 12:56:50 -08001761 cur = env.get(key)
1762 if cur:
1763 paths.append(cur)
1764 env[key] = os.path.pathsep.join(paths)
1765
iannucci31ab9192017-05-02 19:11:56 -07001766 command = []
nodir152cba62016-05-12 16:08:56 -07001767 if (properties.get('inputs_ref') or {}).get('isolated'):
maruel29ab2fd2015-10-16 11:44:01 -07001768 # Create the tree.
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001769 server_ref = isolate_storage.ServerRef(
maruel29ab2fd2015-10-16 11:44:01 -07001770 properties['inputs_ref']['isolatedserver'],
Marc-Antoine Ruelb8513132018-11-20 19:48:53 +00001771 properties['inputs_ref']['namespace'])
1772 with isolateserver.get_storage(server_ref) as storage:
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001773 # Do not use MemoryContentAddressedCache here, as on 32-bits python,
1774 # inputs larger than ~1GiB will not fit in memory. This is effectively a
1775 # leak.
1776 policies = local_caching.CachePolicies(0, 0, 0, 0)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001777 cache = local_caching.DiskContentAddressedCache(
Marc-Antoine Ruel79d42192019-02-06 19:24:16 +00001778 unicode(os.path.abspath(options.cache)), policies, False)
maruel29ab2fd2015-10-16 11:44:01 -07001779 bundle = isolateserver.fetch_isolated(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001780 properties['inputs_ref']['isolated'], storage, cache, workdir, False)
maruel29ab2fd2015-10-16 11:44:01 -07001781 command = bundle.command
1782 if bundle.relative_cwd:
1783 workdir = os.path.join(workdir, bundle.relative_cwd)
maruela1b9e552016-01-06 12:42:03 -08001784 command.extend(properties.get('extra_args') or [])
iannucci31ab9192017-05-02 19:11:56 -07001785
1786 if properties.get('command'):
1787 command.extend(properties['command'])
1788
Marc-Antoine Ruelc7243592018-05-24 17:04:04 -04001789 # https://chromium.googlesource.com/infra/luci/luci-py.git/+/master/appengine/swarming/doc/Magic-Values.md
Robert Iannucci24ae76a2018-02-26 12:51:18 -08001790 command = tools.fix_python_cmd(command, env)
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001791 if not options.output:
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001792 new_command = run_isolated.process_command(command, 'invalid', None)
1793 if new_command != command:
Marc-Antoine Ruel29ba75c2018-01-10 15:04:14 -05001794 parser.error('The task has outputs, you must use --output-dir')
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001795 else:
1796 # Make the path absolute, as the process will run from a subdirectory.
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001797 options.output = os.path.abspath(options.output)
Marc-Antoine Ruel88229872018-01-10 16:35:29 -05001798 new_command = run_isolated.process_command(
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001799 command, options.output, None)
1800 if not os.path.isdir(options.output):
1801 os.makedirs(options.output)
iannucci31ab9192017-05-02 19:11:56 -07001802 command = new_command
1803 file_path.ensure_command_has_abs_path(command, workdir)
1804
1805 if properties.get('cipd_input'):
1806 ci = properties['cipd_input']
1807 cp = ci['client_package']
1808 client_manager = cipd.get_client(
1809 ci['server'], cp['package_name'], cp['version'], cachedir)
1810
1811 with client_manager as client:
1812 by_path = collections.defaultdict(list)
1813 for pkg in ci['packages']:
1814 path = pkg['path']
1815 # cipd deals with 'root' as ''
1816 if path == '.':
1817 path = ''
1818 by_path[path].append((pkg['package_name'], pkg['version']))
1819 client.ensure(workdir, by_path, cache_dir=cachedir)
1820
maruel77f720b2015-09-15 12:35:22 -07001821 try:
Marc-Antoine Ruel95c21872018-01-10 14:24:28 -05001822 return subprocess42.call(command + extra_args, env=env, cwd=workdir)
maruel77f720b2015-09-15 12:35:22 -07001823 except OSError as e:
maruel29ab2fd2015-10-16 11:44:01 -07001824 print >> sys.stderr, 'Failed to run: %s' % ' '.join(command)
maruel77f720b2015-09-15 12:35:22 -07001825 print >> sys.stderr, str(e)
1826 return 1
Marc-Antoine Ruel5aeb3bb2018-06-16 13:11:02 +00001827 finally:
1828 # Do not delete options.cache.
1829 if not options.leak:
1830 file_path.rmtree(workdir)
Marc-Antoine Ruel13a81272014-10-07 20:16:43 -04001831
1832
maruel0eb1d1b2015-10-02 14:48:21 -07001833@subcommand.usage('bot_id')
1834def CMDterminate(parser, args):
1835 """Tells a bot to gracefully shut itself down as soon as it can.
1836
1837 This is done by completing whatever current task there is then exiting the bot
1838 process.
1839 """
1840 parser.add_option(
1841 '--wait', action='store_true', help='Wait for the bot to terminate')
1842 options, args = parser.parse_args(args)
1843 if len(args) != 1:
1844 parser.error('Please provide the bot id')
smut281c3902018-05-30 17:50:05 -07001845 url = options.swarming + '/_ah/api/swarming/v1/bot/%s/terminate' % args[0]
maruel0eb1d1b2015-10-02 14:48:21 -07001846 request = net.url_read_json(url, data={})
1847 if not request:
1848 print >> sys.stderr, 'Failed to ask for termination'
1849 return 1
1850 if options.wait:
1851 return collect(
Tim 'mithro' Ansell5e8001d2017-09-08 09:32:52 +10001852 options.swarming,
1853 [request['task_id']],
1854 0.,
1855 False,
1856 False,
1857 None,
1858 None,
1859 [],
Takuto Ikuta1e6072c2018-11-06 20:42:43 +00001860 False,
1861 None)
maruelbfc5f872017-06-10 16:43:17 -07001862 else:
1863 print request['task_id']
maruel0eb1d1b2015-10-02 14:48:21 -07001864 return 0
1865
1866
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001867@subcommand.usage("(hash|isolated) [-- extra_args|raw command]")
maruel@chromium.org0437a732013-08-27 16:05:52 +00001868def CMDtrigger(parser, args):
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001869 """Triggers a Swarming task.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001870
Vadim Shtayuraae8085b2014-05-02 17:13:10 -07001871 Passes all extra arguments provided after '--' as additional command line
1872 arguments for an isolated command specified in *.isolate file.
maruel@chromium.org0437a732013-08-27 16:05:52 +00001873 """
1874 add_trigger_options(parser)
Vadim Shtayurab450c602014-05-12 19:23:25 -07001875 add_sharding_options(parser)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001876 parser.add_option(
1877 '--dump-json',
1878 metavar='FILE',
1879 help='Dump details about the triggered task(s) to this file as json')
Marc-Antoine Ruel7c543272013-11-26 13:26:15 -05001880 options, args = parser.parse_args(args)
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001881 task_request = process_trigger_options(parser, options, args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001882 try:
Marc-Antoine Ruelefdc5282014-12-12 19:31:00 -05001883 tasks = trigger_task_shards(
1884 options.swarming, task_request, options.shards)
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001885 if tasks:
maruel0a25f6c2017-05-10 10:43:23 -07001886 print('Triggered task: %s' % task_request.name)
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001887 tasks_sorted = sorted(
1888 tasks.itervalues(), key=lambda x: x['shard_index'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001889 if options.dump_json:
1890 data = {
maruel0a25f6c2017-05-10 10:43:23 -07001891 'base_task_name': task_request.name,
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001892 'tasks': tasks,
Vadim Shtayura2d83a942017-08-14 17:41:24 -07001893 'request': task_request_to_raw_request(task_request),
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001894 }
maruel46b015f2015-10-13 18:40:35 -07001895 tools.write_json(unicode(options.dump_json), data, True)
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001896 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001897 print(' tools/swarming_client/swarming.py collect -S %s --json %s' %
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001898 (options.swarming, options.dump_json))
1899 else:
Marc-Antoine Ruel12a7da42014-10-01 08:29:47 -04001900 print('To collect results, use:')
Bruce Dawsonf0a5ae42018-09-04 20:06:46 +00001901 print(' tools/swarming_client/swarming.py collect -S %s %s' %
Marc-Antoine Ruel2f6581a2014-10-03 11:09:53 -04001902 (options.swarming, ' '.join(t['task_id'] for t in tasks_sorted)))
1903 print('Or visit:')
1904 for t in tasks_sorted:
1905 print(' ' + t['view_url'])
Marc-Antoine Rueld6dbe762014-06-18 13:49:42 -04001906 return int(not tasks)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001907 except Failure:
1908 on_error.report(None)
vadimsh@chromium.orgd908a542013-10-30 01:36:17 +00001909 return 1
maruel@chromium.org0437a732013-08-27 16:05:52 +00001910
1911
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001912class OptionParserSwarming(logging_utils.OptionParserWithLogging):
maruel@chromium.org0437a732013-08-27 16:05:52 +00001913 def __init__(self, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001914 logging_utils.OptionParserWithLogging.__init__(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001915 self, prog='swarming.py', **kwargs)
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001916 self.server_group = optparse.OptionGroup(self, 'Server')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001917 self.server_group.add_option(
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001918 '-S', '--swarming',
Kevin Graney5346c162014-01-24 12:20:01 -05001919 metavar='URL', default=os.environ.get('SWARMING_SERVER', ''),
maruel@chromium.orge9403ab2013-09-20 18:03:49 +00001920 help='Swarming server to use')
Marc-Antoine Ruel5471e3d2013-11-11 19:10:32 -05001921 self.add_option_group(self.server_group)
Vadim Shtayurae34e13a2014-02-02 11:23:26 -08001922 auth.add_auth_options(self)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001923
1924 def parse_args(self, *args, **kwargs):
Marc-Antoine Ruelf74cffe2015-07-15 15:21:34 -04001925 options, args = logging_utils.OptionParserWithLogging.parse_args(
maruel@chromium.org0437a732013-08-27 16:05:52 +00001926 self, *args, **kwargs)
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001927 auth.process_auth_options(self, options)
1928 user = self._process_swarming(options)
1929 if hasattr(options, 'user') and not options.user:
1930 options.user = user
1931 return options, args
1932
1933 def _process_swarming(self, options):
1934 """Processes the --swarming option and aborts if not specified.
1935
1936 Returns the identity as determined by the server.
1937 """
maruel@chromium.org0437a732013-08-27 16:05:52 +00001938 if not options.swarming:
1939 self.error('--swarming is required.')
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001940 try:
1941 options.swarming = net.fix_url(options.swarming)
1942 except ValueError as e:
1943 self.error('--swarming %s' % e)
1944 on_error.report_on_exception_exit(options.swarming)
Marc-Antoine Ruelf7d737d2014-12-10 15:36:29 -05001945 try:
1946 user = auth.ensure_logged_in(options.swarming)
1947 except ValueError as e:
1948 self.error(str(e))
Marc-Antoine Ruel012067b2014-12-10 15:45:42 -05001949 return user
maruel@chromium.org0437a732013-08-27 16:05:52 +00001950
1951
1952def main(args):
1953 dispatcher = subcommand.CommandDispatcher(__name__)
Marc-Antoine Ruelcfb60852014-07-02 15:22:00 -04001954 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
maruel@chromium.org0437a732013-08-27 16:05:52 +00001955
1956
1957if __name__ == '__main__':
maruel8e4e40c2016-05-30 06:21:07 -07001958 subprocess42.inhibit_os_error_reporting()
maruel@chromium.org0437a732013-08-27 16:05:52 +00001959 fix_encoding.fix_encoding()
1960 tools.disable_buffering()
1961 colorama.init()
1962 sys.exit(main(sys.argv[1:]))