blob: afe49142a6227c9f6df397d9bd8798f3a5d9997a [file] [log] [blame]
maruel@chromium.org0437a732013-08-27 16:05:52 +00001#!/usr/bin/env python
2# Copyright 2013 The Chromium Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Client tool to trigger tasks or retrieve results from a Swarming server."""
7
8__version__ = '0.1'
9
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000010import binascii
maruel@chromium.org0437a732013-08-27 16:05:52 +000011import hashlib
12import json
13import logging
14import os
15import re
16import shutil
maruel@chromium.org0437a732013-08-27 16:05:52 +000017import subprocess
18import sys
19import time
20import urllib
maruel@chromium.org0437a732013-08-27 16:05:52 +000021
22from third_party import colorama
23from third_party.depot_tools import fix_encoding
24from third_party.depot_tools import subcommand
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000025
26from utils import net
maruel@chromium.org0437a732013-08-27 16:05:52 +000027from utils import threading_utils
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000028from utils import tools
29from utils import zip_package
maruel@chromium.org0437a732013-08-27 16:05:52 +000030
31import run_isolated
32
33
34ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
35TOOLS_PATH = os.path.join(ROOT_DIR, 'tools')
36
37
38# Default servers.
39# TODO(maruel): Chromium-specific.
40ISOLATE_SERVER = 'https://isolateserver-dev.appspot.com/'
41SWARM_SERVER = 'https://chromium-swarm-dev.appspot.com'
42
43
44# The default time to wait for a shard to finish running.
45DEFAULT_SHARD_WAIT_TIME = 40 * 60.
46
47
48NO_OUTPUT_FOUND = (
49 'No output produced by the test, it may have failed to run.\n'
50 '\n')
51
52
53PLATFORM_MAPPING = {
54 'cygwin': 'Windows',
55 'darwin': 'Mac',
56 'linux2': 'Linux',
57 'win32': 'Windows',
58}
59
60
61class Failure(Exception):
62 """Generic failure."""
63 pass
64
65
66class Manifest(object):
67 """Represents a Swarming task manifest.
68
69 Also includes code to zip code and upload itself.
70 """
71 def __init__(
72 self, manifest_hash, test_name, shards, test_filter, slave_os,
73 working_dir, isolate_server, verbose, profile, priority):
74 """Populates a manifest object.
75 Args:
76 manifest_hash - The manifest's sha-1 that the slave is going to fetch.
77 test_name - The name to give the test request.
78 shards - The number of swarm shards to request.
79 test_filter - The gtest filter to apply when running the test.
80 slave_os - OS to run on.
81 working_dir - Relative working directory to start the script.
82 isolate_server - isolate server url.
83 verbose - if True, have the slave print more details.
84 profile - if True, have the slave print more timing data.
85 priority - int between 0 and 1000, lower the higher priority
86 """
87 self.manifest_hash = manifest_hash
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000088 self.bundle = zip_package.ZipPackage(ROOT_DIR)
89
maruel@chromium.org0437a732013-08-27 16:05:52 +000090 self._test_name = test_name
91 self._shards = shards
92 self._test_filter = test_filter
93 self._target_platform = PLATFORM_MAPPING[slave_os]
94 self._working_dir = working_dir
95
96 self.data_server_retrieval = isolate_server + '/content/retrieve/default/'
97 self._data_server_storage = isolate_server + '/content/store/default/'
98 self._data_server_has = isolate_server + '/content/contains/default'
99 self._data_server_get_token = isolate_server + '/content/get_token'
100
101 self.verbose = bool(verbose)
102 self.profile = bool(profile)
103 self.priority = priority
104
105 self._zip_file_hash = ''
106 self._tasks = []
107 self._files = {}
108 self._token_cache = None
109
110 def _token(self):
111 if not self._token_cache:
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000112 result = net.url_open(self._data_server_get_token)
maruel@chromium.org0437a732013-08-27 16:05:52 +0000113 if not result:
114 # TODO(maruel): Implement authentication.
115 raise Failure('Failed to get token, need authentication')
116 # Quote it right away, so creating the urls is simpler.
117 self._token_cache = urllib.quote(result.read())
118 return self._token_cache
119
120 def add_task(self, task_name, actions, time_out=600):
121 """Appends a new task to the swarm manifest file."""
122 # See swarming/src/common/test_request_message.py TestObject constructor for
123 # the valid flags.
124 self._tasks.append(
125 {
126 'action': actions,
127 'decorate_output': self.verbose,
128 'test_name': task_name,
129 'time_out': time_out,
130 })
131
maruel@chromium.org0437a732013-08-27 16:05:52 +0000132 def zip_and_upload(self):
133 """Zips up all the files necessary to run a shard and uploads to Swarming
134 master.
135 """
136 assert not self._zip_file_hash
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000137
maruel@chromium.org0437a732013-08-27 16:05:52 +0000138 start_time = time.time()
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000139 zip_contents = self.bundle.zip_into_buffer()
140 self._zip_file_hash = hashlib.sha1(zip_contents).hexdigest()
maruel@chromium.org0437a732013-08-27 16:05:52 +0000141 print 'Zipping completed, time elapsed: %f' % (time.time() - start_time)
142
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000143 response = net.url_open(
maruel@chromium.org0437a732013-08-27 16:05:52 +0000144 self._data_server_has + '?token=%s' % self._token(),
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000145 data=binascii.unhexlify(self._zip_file_hash),
maruel@chromium.org0437a732013-08-27 16:05:52 +0000146 content_type='application/octet-stream')
147 if response is None:
148 print >> sys.stderr, (
149 'Unable to query server for zip file presence, aborting.')
150 return False
151
152 if response.read(1) == chr(1):
153 print 'Zip file already on server, no need to reupload.'
154 return True
155
156 print 'Zip file not on server, starting uploading.'
157
158 url = '%s%s?priority=0&token=%s' % (
159 self._data_server_storage, self._zip_file_hash, self._token())
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000160 response = net.url_open(
maruel@chromium.org0437a732013-08-27 16:05:52 +0000161 url, data=zip_contents, content_type='application/octet-stream')
162 if response is None:
163 print >> sys.stderr, 'Failed to upload the zip file: %s' % url
164 return False
165
166 return True
167
168 def to_json(self):
169 """Exports the current configuration into a swarm-readable manifest file.
170
171 This function doesn't mutate the object.
172 """
173 test_case = {
174 'test_case_name': self._test_name,
175 'data': [
176 [self.data_server_retrieval + urllib.quote(self._zip_file_hash),
177 'swarm_data.zip'],
178 ],
179 'tests': self._tasks,
180 'env_vars': {},
181 'configurations': [
182 {
183 'min_instances': self._shards,
184 'config_name': self._target_platform,
185 'dimensions': {
186 'os': self._target_platform,
187 },
188 },
189 ],
190 'working_dir': self._working_dir,
191 'restart_on_failure': True,
192 'cleanup': 'root',
193 'priority': self.priority,
194 }
195
196 # These flags are googletest specific.
197 if self._test_filter and self._test_filter != '*':
198 test_case['env_vars']['GTEST_FILTER'] = self._test_filter
199 if self._shards > 1:
200 test_case['env_vars']['GTEST_SHARD_INDEX'] = '%(instance_index)s'
201 test_case['env_vars']['GTEST_TOTAL_SHARDS'] = '%(num_instances)s'
202
203 return json.dumps(test_case, separators=(',',':'))
204
205
206def now():
207 """Exists so it can be mocked easily."""
208 return time.time()
209
210
211def get_test_keys(swarm_base_url, test_name):
212 """Returns the Swarm test key for each shards of test_name."""
213 key_data = urllib.urlencode([('name', test_name)])
214 url = '%s/get_matching_test_cases?%s' % (swarm_base_url, key_data)
215
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000216 for i in range(net.URL_OPEN_MAX_ATTEMPTS):
217 response = net.url_open(url, retry_404=True)
maruel@chromium.org0437a732013-08-27 16:05:52 +0000218 if response is None:
219 raise Failure(
220 'Error: Unable to find any tests with the name, %s, on swarm server'
221 % test_name)
222
223 result = response.read()
224 # TODO(maruel): Compare exact string.
225 if 'No matching' in result:
226 logging.warning('Unable to find any tests with the name, %s, on swarm '
227 'server' % test_name)
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000228 if i != net.URL_OPEN_MAX_ATTEMPTS:
229 net.HttpService.sleep_before_retry(i, None)
maruel@chromium.org0437a732013-08-27 16:05:52 +0000230 continue
231 return json.loads(result)
232
233 raise Failure(
234 'Error: Unable to find any tests with the name, %s, on swarm server'
235 % test_name)
236
237
238def retrieve_results(base_url, test_key, timeout, should_stop):
239 """Retrieves results for a single test_key."""
240 assert isinstance(timeout, float)
241 params = [('r', test_key)]
242 result_url = '%s/get_result?%s' % (base_url, urllib.urlencode(params))
243 start = now()
244 while True:
245 if timeout and (now() - start) >= timeout:
246 logging.error('retrieve_results(%s) timed out', base_url)
247 return {}
248 # Do retries ourselves.
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000249 response = net.url_open(result_url, retry_404=False, retry_50x=False)
maruel@chromium.org0437a732013-08-27 16:05:52 +0000250 if response is None:
251 # Aggressively poll for results. Do not use retry_404 so
252 # should_stop is polled more often.
253 remaining = min(5, timeout - (now() - start)) if timeout else 5
254 if remaining > 0:
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000255 net.HttpService.sleep_before_retry(1, remaining)
maruel@chromium.org0437a732013-08-27 16:05:52 +0000256 else:
257 try:
258 data = json.load(response) or {}
259 except (ValueError, TypeError):
260 logging.warning(
261 'Received corrupted data for test_key %s. Retrying.', test_key)
262 else:
263 if data['output']:
264 return data
265 if should_stop.get():
266 return {}
267
268
269def yield_results(swarm_base_url, test_keys, timeout, max_threads):
270 """Yields swarm test results from the swarm server as (index, result).
271
272 Duplicate shards are ignored, the first one to complete is returned.
273
274 max_threads is optional and is used to limit the number of parallel fetches
275 done. Since in general the number of test_keys is in the range <=10, it's not
276 worth normally to limit the number threads. Mostly used for testing purposes.
277 """
278 shards_remaining = range(len(test_keys))
279 number_threads = (
280 min(max_threads, len(test_keys)) if max_threads else len(test_keys))
281 should_stop = threading_utils.Bit()
282 results_remaining = len(test_keys)
283 with threading_utils.ThreadPool(number_threads, number_threads, 0) as pool:
284 try:
285 for test_key in test_keys:
286 pool.add_task(
287 0, retrieve_results, swarm_base_url, test_key, timeout, should_stop)
288 while shards_remaining and results_remaining:
289 result = pool.get_one_result()
290 results_remaining -= 1
291 if not result:
292 # Failed to retrieve one key.
293 logging.error('Failed to retrieve the results for a swarm key')
294 continue
295 shard_index = result['config_instance_index']
296 if shard_index in shards_remaining:
297 shards_remaining.remove(shard_index)
298 yield shard_index, result
299 else:
300 logging.warning('Ignoring duplicate shard index %d', shard_index)
301 # Pop the last entry, there's no such shard.
302 shards_remaining.pop()
303 finally:
304 # Done, kill the remaining threads.
305 should_stop.set()
306
307
308def chromium_setup(manifest):
309 """Sets up the commands to run.
310
311 Highly chromium specific.
312 """
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000313 # Add uncompressed zip here. It'll be compressed as part of the package sent
314 # to Swarming server.
315 run_test_name = 'run_isolated.zip'
316 manifest.bundle.add_buffer(run_test_name,
317 run_isolated.get_as_zip_package().zip_into_buffer(compress=False))
maruel@chromium.org0437a732013-08-27 16:05:52 +0000318
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000319 cleanup_script_name = 'swarm_cleanup.py'
320 manifest.bundle.add_file(os.path.join(TOOLS_PATH, cleanup_script_name),
321 cleanup_script_name)
322
maruel@chromium.org0437a732013-08-27 16:05:52 +0000323 run_cmd = [
324 'python', run_test_name,
325 '--hash', manifest.manifest_hash,
326 '--remote', manifest.data_server_retrieval.rstrip('/') + '-gzip/',
327 ]
328 if manifest.verbose or manifest.profile:
329 # Have it print the profiling section.
330 run_cmd.append('--verbose')
331 manifest.add_task('Run Test', run_cmd)
332
333 # Clean up
334 manifest.add_task('Clean Up', ['python', cleanup_script_name])
335
336
337def archive(isolated, isolate_server, verbose):
338 """Archives a .isolated and all the dependencies on the CAC."""
339 tempdir = None
340 try:
341 logging.info('Archiving')
342 cmd = [
343 sys.executable,
344 os.path.join(ROOT_DIR, 'isolate.py'),
345 'hashtable',
346 '--outdir', isolate_server,
347 '--isolated', isolated,
348 ]
349 if verbose:
350 cmd.append('--verbose')
351 logging.info(' '.join(cmd))
352 if subprocess.call(cmd, verbose):
353 return
354 return hashlib.sha1(open(isolated, 'rb').read()).hexdigest()
355 finally:
356 if tempdir:
357 shutil.rmtree(tempdir)
358
359
360def process_manifest(
361 file_sha1_or_isolated, test_name, shards, test_filter, slave_os,
362 working_dir, isolate_server, swarming, verbose, profile, priority):
363 """Process the manifest file and send off the swarm test request.
364
365 Optionally archives an .isolated file.
366 """
367 if file_sha1_or_isolated.endswith('.isolated'):
368 file_sha1 = archive(file_sha1_or_isolated, isolate_server, verbose)
369 if not file_sha1:
370 print >> sys.stderr, 'Archival failure %s' % file_sha1_or_isolated
371 return 1
372 elif re.match(r'^[a-f0-9]{40}$', file_sha1_or_isolated):
373 file_sha1 = file_sha1_or_isolated
374 else:
375 print >> sys.stderr, 'Invalid hash %s' % file_sha1_or_isolated
376 return 1
377
378 try:
379 manifest = Manifest(
380 file_sha1, test_name, shards, test_filter, slave_os,
381 working_dir, isolate_server, verbose, profile, priority)
382 except ValueError as e:
383 print >> sys.stderr, 'Unable to process %s: %s' % (test_name, e)
384 return 1
385
386 chromium_setup(manifest)
387
388 # Zip up relevant files.
389 print('Zipping up files...')
390 if not manifest.zip_and_upload():
391 return 1
392
393 # Send test requests off to swarm.
394 print('Sending test requests to swarm.')
395 print('Server: %s' % swarming)
396 print('Job name: %s' % test_name)
397 test_url = swarming + '/test'
398 manifest_text = manifest.to_json()
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000399 result = net.url_open(test_url, data={'request': manifest_text})
maruel@chromium.org0437a732013-08-27 16:05:52 +0000400 if not result:
401 print >> sys.stderr, 'Failed to send test for %s\n%s' % (
402 test_name, test_url)
403 return 1
404 try:
405 json.load(result)
406 except (ValueError, TypeError) as e:
407 print >> sys.stderr, 'Failed to send test for %s' % test_name
408 print >> sys.stderr, 'Manifest: %s' % manifest_text
409 print >> sys.stderr, str(e)
410 return 1
411 return 0
412
413
414def trigger(
415 slave_os,
416 tasks,
417 task_prefix,
418 working_dir,
419 isolate_server,
420 swarming,
421 verbose,
422 profile,
423 priority):
424 """Sends off the hash swarming test requests."""
425 highest_exit_code = 0
426 for (file_sha1, test_name, shards, testfilter) in tasks:
427 # TODO(maruel): It should first create a request manifest object, then pass
428 # it to a function to zip, archive and trigger.
429 exit_code = process_manifest(
430 file_sha1,
431 task_prefix + test_name,
432 int(shards),
433 testfilter,
434 slave_os,
435 working_dir,
436 isolate_server,
437 swarming,
438 verbose,
439 profile,
440 priority)
441 highest_exit_code = max(highest_exit_code, exit_code)
442 return highest_exit_code
443
444
445def decorate_shard_output(result, shard_exit_code):
446 """Returns wrapped output for swarming task shard."""
447 tag = 'index %s (machine tag: %s, id: %s)' % (
448 result['config_instance_index'],
449 result['machine_id'],
450 result.get('machine_tag', 'unknown'))
451 return (
452 '\n'
453 '================================================================\n'
454 'Begin output from shard %s\n'
455 '================================================================\n'
456 '\n'
457 '%s'
458 '================================================================\n'
459 'End output from shard %s. Return %d\n'
460 '================================================================\n'
461 ) % (tag, result['output'] or NO_OUTPUT_FOUND, tag, shard_exit_code)
462
463
464def collect(url, test_name, timeout, decorate):
465 """Retrieves results of a Swarming job."""
466 test_keys = get_test_keys(url, test_name)
467 if not test_keys:
468 raise Failure('No test keys to get results with.')
469
maruel@chromium.org9c1c7b52013-08-28 19:04:36 +0000470 exit_code = None
maruel@chromium.org0437a732013-08-27 16:05:52 +0000471 for _index, output in yield_results(url, test_keys, timeout, None):
472 shard_exit_codes = (output['exit_codes'] or '1').split(',')
473 shard_exit_code = max(int(i) for i in shard_exit_codes)
474 if decorate:
475 print decorate_shard_output(output, shard_exit_code)
476 else:
477 print(
478 '%s/%s: %s' % (
479 output['machine_id'],
480 output['machine_tag'],
481 output['exit_codes']))
482 print(''.join(' %s\n' % l for l in output['output'].splitlines()))
maruel@chromium.org9c1c7b52013-08-28 19:04:36 +0000483 exit_code = exit_code or shard_exit_code
484 return exit_code if exit_code is not None else 1
maruel@chromium.org0437a732013-08-27 16:05:52 +0000485
486
487def add_trigger_options(parser):
488 """Adds all options to trigger a task on Swarming."""
489 parser.add_option(
490 '-I', '--isolate-server',
491 default=ISOLATE_SERVER,
492 metavar='URL',
493 help='Isolate server where data is stored. default: %default')
494 parser.add_option(
495 '-w', '--working_dir', default='swarm_tests',
496 help='Working directory on the swarm slave side. default: %default.')
497 parser.add_option(
498 '-o', '--os', default=sys.platform,
499 help='Swarm OS image to request. Should be one of the valid sys.platform '
500 'values like darwin, linux2 or win32 default: %default.')
501 parser.add_option(
502 '-T', '--task-prefix', default='',
503 help='Prefix to give the swarm test request. default: %default')
504 parser.add_option(
505 '--profile', action='store_true',
506 default=bool(os.environ.get('ISOLATE_DEBUG')),
507 help='Have run_isolated.py print profiling info')
508 parser.add_option(
509 '--priority', type='int', default=100,
510 help='The lower value, the more important the task is')
511
512
513def process_trigger_options(parser, options):
514 options.isolate_server = options.isolate_server.rstrip('/')
515 if not options.isolate_server:
516 parser.error('--isolate-server is required.')
517 if options.os in ('', 'None'):
518 # Use the current OS.
519 options.os = sys.platform
520 if not options.os in PLATFORM_MAPPING:
521 parser.error('Invalid --os option.')
522
523
524def add_collect_options(parser):
525 parser.add_option(
526 '-t', '--timeout',
527 type='float',
528 default=DEFAULT_SHARD_WAIT_TIME,
529 help='Timeout to wait for result, set to 0 for no timeout; default: '
530 '%default s')
531 parser.add_option('--decorate', action='store_true', help='Decorate output')
532
533
534@subcommand.usage('test_name')
535def CMDcollect(parser, args):
536 """Retrieves results of a Swarming job.
537
538 The result can be in multiple part if the execution was sharded. It can
539 potentially have retries.
540 """
541 add_collect_options(parser)
542 (options, args) = parser.parse_args(args)
543 if not args:
544 parser.error('Must specify one test name.')
545 elif len(args) > 1:
546 parser.error('Must specify only one test name.')
547
548 try:
549 return collect(options.swarming, args[0], options.timeout, options.decorate)
550 except Failure as e:
551 parser.error(e.args[0])
552
553
554@subcommand.usage('[sha1|isolated ...]')
555def CMDrun(parser, args):
556 """Triggers a job and wait for the results.
557
558 Basically, does everything to run command(s) remotely.
559 """
560 add_trigger_options(parser)
561 add_collect_options(parser)
562 options, args = parser.parse_args(args)
563
564 if not args:
565 parser.error('Must pass at least one .isolated file or its sha1.')
566 process_trigger_options(parser, options)
567
568 success = []
569 for arg in args:
570 logging.info('Triggering %s', arg)
571 try:
572 result = trigger(
573 options.os,
574 [(arg, os.path.basename(arg), '1', '')],
575 options.task_prefix,
576 options.working_dir,
577 options.isolate_server,
578 options.swarming,
579 options.verbose,
580 options.profile,
581 options.priority)
582 except Failure as e:
583 result = e.args[0]
584 if result:
585 print >> sys.stderr, 'Failed to trigger %s: %s' % (arg, result)
586 else:
587 success.append(os.path.basename(arg))
588
589 if not success:
590 print >> sys.stderr, 'Failed to trigger any job.'
591 return result
592
593 code = 0
594 for arg in success:
595 logging.info('Collecting %s', arg)
596 try:
597 new_code = collect(
598 options.swarming,
599 options.task_prefix + arg,
600 options.timeout,
601 options.decorate)
602 code = max(code, new_code)
603 except Failure as e:
604 code = max(code, 1)
605 print >> sys.stderr, e.args[0]
606 return code
607
608
609def CMDtrigger(parser, args):
610 """Triggers Swarm request(s).
611
612 Accepts one or multiple --task requests, with either the sha1 of a .isolated
613 file already uploaded or the path to an .isolated file to archive, packages it
614 if needed and sends a Swarm manifest file to the Swarm server.
615 """
616 add_trigger_options(parser)
617 parser.add_option(
618 '--task', nargs=4, action='append', default=[], dest='tasks',
619 help='Task to trigger. The format is '
620 '(hash|isolated, test_name, shards, test_filter). This may be '
621 'used multiple times to send multiple hashes jobs. If an isolated '
622 'file is specified instead of an hash, it is first archived.')
623 (options, args) = parser.parse_args(args)
624
625 if args:
626 parser.error('Unknown args: %s' % args)
627 process_trigger_options(parser, options)
628 if not options.tasks:
629 parser.error('At least one --task is required.')
630
631 try:
632 return trigger(
633 options.os,
634 options.tasks,
635 options.task_prefix,
636 options.working_dir,
637 options.isolate_server,
638 options.swarming,
639 options.verbose,
640 options.profile,
641 options.priority)
642 except Failure as e:
643 parser.error(e.args[0])
644
645
646class OptionParserSwarming(tools.OptionParserWithLogging):
647 def __init__(self, **kwargs):
648 tools.OptionParserWithLogging.__init__(
649 self, prog='swarming.py', **kwargs)
650 self.add_option(
651 '-S', '--swarming', default=SWARM_SERVER,
652 help='Specify the url of the Swarming server, default: %default')
653
654 def parse_args(self, *args, **kwargs):
655 options, args = tools.OptionParserWithLogging.parse_args(
656 self, *args, **kwargs)
657 options.swarming = options.swarming.rstrip('/')
658 if not options.swarming:
659 self.error('--swarming is required.')
660 return options, args
661
662
663def main(args):
664 dispatcher = subcommand.CommandDispatcher(__name__)
665 try:
666 return dispatcher.execute(OptionParserSwarming(version=__version__), args)
667 except (
668 Failure,
669 run_isolated.MappingError,
670 run_isolated.ConfigError) as e:
671 sys.stderr.write('\nError: ')
672 sys.stderr.write(str(e))
673 sys.stderr.write('\n')
674 return 1
675
676
677if __name__ == '__main__':
678 fix_encoding.fix_encoding()
679 tools.disable_buffering()
680 colorama.init()
681 sys.exit(main(sys.argv[1:]))