blob: fd8bd6ecdbaba110c13f6256793339a7260ae16d [file] [log] [blame]
maruel@chromium.orgc6f90062012-11-07 18:32:22 +00001#!/usr/bin/env python
maruel@chromium.orgfb78d432013-08-28 21:22:40 +00002# Copyright 2013 The Chromium Authors. All rights reserved.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +00003# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Archives a set of files to a server."""
7
maruel@chromium.orgfb78d432013-08-28 21:22:40 +00008__version__ = '0.1'
9
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000010import binascii
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +000011import cStringIO
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000012import hashlib
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +000013import itertools
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000014import logging
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000015import os
16import sys
17import time
maruel@chromium.orge82112e2013-04-24 14:41:55 +000018import urllib
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +000019import zlib
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000020
maruel@chromium.orgfb78d432013-08-28 21:22:40 +000021from third_party import colorama
22from third_party.depot_tools import fix_encoding
23from third_party.depot_tools import subcommand
24
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000025import run_isolated
vadimsh@chromium.orga4326472013-08-24 02:05:41 +000026
vadimsh@chromium.org6b706212013-08-28 15:03:46 +000027from utils import net
vadimsh@chromium.orgb074b162013-08-22 17:55:46 +000028from utils import threading_utils
vadimsh@chromium.orga4326472013-08-24 02:05:41 +000029from utils import tools
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000030
31
maruel@chromium.orgfb78d432013-08-28 21:22:40 +000032# Default server.
33# TODO(maruel): Chromium-specific.
34ISOLATE_SERVER = 'https://isolateserver-dev.appspot.com/'
35
36
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000037# The minimum size of files to upload directly to the blobstore.
maruel@chromium.orgaef29f82012-12-12 15:00:42 +000038MIN_SIZE_FOR_DIRECT_BLOBSTORE = 20 * 1024
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000039
vadimsh@chromium.orgeea52422013-08-21 19:35:54 +000040# The number of files to check the isolate server per /contains query.
41# All files are sorted by likelihood of a change in the file content
42# (currently file size is used to estimate this: larger the file -> larger the
43# possibility it has changed). Then first ITEMS_PER_CONTAINS_QUERIES[0] files
44# are taken and send to '/contains', then next ITEMS_PER_CONTAINS_QUERIES[1],
45# and so on. Numbers here is a trade-off; the more per request, the lower the
46# effect of HTTP round trip latency and TCP-level chattiness. On the other hand,
47# larger values cause longer lookups, increasing the initial latency to start
48# uploading, which is especially an issue for large files. This value is
49# optimized for the "few thousands files to look up with minimal number of large
50# files missing" case.
51ITEMS_PER_CONTAINS_QUERIES = [20, 20, 50, 50, 50, 100]
csharp@chromium.org07fa7592013-01-11 18:19:30 +000052
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +000053# A list of already compressed extension types that should not receive any
54# compression before being uploaded.
55ALREADY_COMPRESSED_TYPES = [
56 '7z', 'avi', 'cur', 'gif', 'h264', 'jar', 'jpeg', 'jpg', 'pdf', 'png',
57 'wav', 'zip'
58]
59
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000060
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +000061def randomness():
62 """Generates low-entropy randomness for MIME encoding.
63
64 Exists so it can be mocked out in unit tests.
65 """
66 return str(time.time())
67
68
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000069def encode_multipart_formdata(fields, files,
70 mime_mapper=lambda _: 'application/octet-stream'):
71 """Encodes a Multipart form data object.
72
73 Args:
74 fields: a sequence (name, value) elements for
75 regular form fields.
76 files: a sequence of (name, filename, value) elements for data to be
77 uploaded as files.
78 mime_mapper: function to return the mime type from the filename.
79 Returns:
80 content_type: for httplib.HTTP instance
81 body: for httplib.HTTP instance
82 """
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +000083 boundary = hashlib.md5(randomness()).hexdigest()
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000084 body_list = []
85 for (key, value) in fields:
86 if isinstance(key, unicode):
87 value = key.encode('utf-8')
88 if isinstance(value, unicode):
89 value = value.encode('utf-8')
90 body_list.append('--' + boundary)
91 body_list.append('Content-Disposition: form-data; name="%s"' % key)
92 body_list.append('')
93 body_list.append(value)
94 body_list.append('--' + boundary)
95 body_list.append('')
96 for (key, filename, value) in files:
97 if isinstance(key, unicode):
98 value = key.encode('utf-8')
99 if isinstance(filename, unicode):
100 value = filename.encode('utf-8')
101 if isinstance(value, unicode):
102 value = value.encode('utf-8')
103 body_list.append('--' + boundary)
104 body_list.append('Content-Disposition: form-data; name="%s"; '
105 'filename="%s"' % (key, filename))
106 body_list.append('Content-Type: %s' % mime_mapper(filename))
107 body_list.append('')
108 body_list.append(value)
109 body_list.append('--' + boundary)
110 body_list.append('')
111 if body_list:
112 body_list[-2] += '--'
113 body = '\r\n'.join(body_list)
114 content_type = 'multipart/form-data; boundary=%s' % boundary
115 return content_type, body
116
117
maruel@chromium.org037758d2012-12-10 17:59:46 +0000118def sha1_file(filepath):
119 """Calculates the SHA-1 of a file without reading it all in memory at once."""
120 digest = hashlib.sha1()
121 with open(filepath, 'rb') as f:
122 while True:
123 # Read in 1mb chunks.
124 chunk = f.read(1024*1024)
125 if not chunk:
126 break
127 digest.update(chunk)
128 return digest.hexdigest()
129
130
vadimsh@chromium.org80f73002013-07-12 14:52:44 +0000131def url_read(url, **kwargs):
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000132 result = net.url_read(url, **kwargs)
vadimsh@chromium.org80f73002013-07-12 14:52:44 +0000133 if result is None:
maruel@chromium.orgef333122013-03-12 20:36:40 +0000134 # If we get no response from the server, assume it is down and raise an
135 # exception.
136 raise run_isolated.MappingError('Unable to connect to server %s' % url)
137 return result
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000138
139
maruel@chromium.orgdc359e62013-03-14 13:08:55 +0000140def upload_hash_content_to_blobstore(
141 generate_upload_url, data, hash_key, content):
vadimsh@chromium.org80f73002013-07-12 14:52:44 +0000142 """Uploads the given hash contents directly to the blobstore via a generated
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000143 url.
144
145 Arguments:
146 generate_upload_url: The url to get the new upload url from.
maruel@chromium.orgdc359e62013-03-14 13:08:55 +0000147 data: extra POST data.
148 hash_key: sha1 of the uncompressed version of content.
149 content: The contents to upload. Must fit in memory for now.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000150 """
151 logging.debug('Generating url to directly upload file to blobstore')
maruel@chromium.org92a3d2e2012-12-20 16:22:29 +0000152 assert isinstance(hash_key, str), hash_key
153 assert isinstance(content, str), (hash_key, content)
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000154 # TODO(maruel): Support large files. This would require streaming support.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000155 content_type, body = encode_multipart_formdata(
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000156 data, [('content', hash_key, content)])
vadimsh@chromium.org043b76d2013-09-12 16:15:13 +0000157 for _ in net.retry_loop(max_attempts=net.URL_OPEN_MAX_ATTEMPTS):
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000158 # Retry HTTP 50x here.
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000159 upload_url = net.url_read(generate_upload_url, data=data)
vadimsh@chromium.org80f73002013-07-12 14:52:44 +0000160 if not upload_url:
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000161 raise run_isolated.MappingError(
162 'Unable to connect to server %s' % generate_upload_url)
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000163
164 # Do not retry this request on HTTP 50x. Regenerate an upload url each time
165 # since uploading "consumes" the upload url.
vadimsh@chromium.org6b706212013-08-28 15:03:46 +0000166 result = net.url_read(
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000167 upload_url, data=body, content_type=content_type, retry_50x=False)
vadimsh@chromium.org80f73002013-07-12 14:52:44 +0000168 if result is not None:
169 return result
maruel@chromium.orgd58bf5b2013-04-26 17:57:42 +0000170 raise run_isolated.MappingError(
171 'Unable to connect to server %s' % generate_upload_url)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000172
173
maruel@chromium.org3e42ce82013-09-12 18:36:59 +0000174class IsolateServer(object):
175 def __init__(self, base_url, namespace):
176 assert base_url.startswith('http'), base_url
177 self.content_url = base_url.rstrip('/') + '/content/'
178 self.namespace = namespace
179 # TODO(maruel): Make this request much earlier asynchronously while the
180 # files are being enumerated.
181 self.token = urllib.quote(url_read(self.content_url + 'get_token'))
maruel@chromium.orgc2bfef42013-08-30 21:46:26 +0000182
maruel@chromium.org3e42ce82013-09-12 18:36:59 +0000183 def store(self, content, hash_key):
184 # TODO(maruel): Detect failures.
185 hash_key = str(hash_key)
186 if len(content) > MIN_SIZE_FOR_DIRECT_BLOBSTORE:
187 url = '%sgenerate_blobstore_url/%s/%s' % (
188 self.content_url, self.namespace, hash_key)
189 # token is guaranteed to be already quoted but it is unnecessary here, and
190 # only here.
191 data = [('token', urllib.unquote(self.token))]
192 return upload_hash_content_to_blobstore(url, data, hash_key, content)
193 else:
194 url = '%sstore/%s/%s?token=%s' % (
195 self.content_url, self.namespace, hash_key, self.token)
196 return url_read(
197 url, data=content, content_type='application/octet-stream')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000198
199
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000200def check_files_exist_on_server(query_url, queries):
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000201 """Queries the server to see which files from this batch already exist there.
202
203 Arguments:
204 queries: The hash files to potential upload to the server.
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000205 Returns:
206 missing_files: list of files that are missing on the server.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000207 """
maruel@chromium.org3e42ce82013-09-12 18:36:59 +0000208 # TODO(maruel): Move inside IsolateServer.
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000209 logging.info('Checking existence of %d files...', len(queries))
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000210 body = ''.join(
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000211 (binascii.unhexlify(meta_data['h']) for (_, meta_data) in queries))
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000212 assert (len(body) % 20) == 0, repr(body)
213
vadimsh@chromium.org80f73002013-07-12 14:52:44 +0000214 response = url_read(
215 query_url, data=body, content_type='application/octet-stream')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000216 if len(queries) != len(response):
217 raise run_isolated.MappingError(
218 'Got an incorrect number of responses from the server. Expected %d, '
219 'but got %d' % (len(queries), len(response)))
220
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000221 missing_files = [
222 queries[i] for i, flag in enumerate(response) if flag == chr(0)
223 ]
224 logging.info('Queried %d files, %d cache hit',
225 len(queries), len(queries) - len(missing_files))
226 return missing_files
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000227
228
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000229def compression_level(filename):
230 """Given a filename calculates the ideal compression level to use."""
231 file_ext = os.path.splitext(filename)[1].lower()
232 # TODO(csharp): Profile to find what compression level works best.
233 return 0 if file_ext in ALREADY_COMPRESSED_TYPES else 7
234
235
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000236def read_and_compress(filepath, level):
237 """Reads a file and returns its content gzip compressed."""
238 compressor = zlib.compressobj(level)
239 compressed_data = cStringIO.StringIO()
240 with open(filepath, 'rb') as f:
241 while True:
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000242 chunk = f.read(run_isolated.ZIPPED_FILE_CHUNK)
243 if not chunk:
244 break
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000245 compressed_data.write(compressor.compress(chunk))
246 compressed_data.write(compressor.flush(zlib.Z_FINISH))
247 value = compressed_data.getvalue()
248 compressed_data.close()
249 return value
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000250
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000251
252def zip_and_trigger_upload(infile, metadata, upload_function):
253 # TODO(csharp): Fix crbug.com/150823 and enable the touched logic again.
254 # if not metadata['T']:
255 compressed_data = read_and_compress(infile, compression_level(infile))
256 priority = (
maruel@chromium.org3e42ce82013-09-12 18:36:59 +0000257 run_isolated.RemoteOperation.HIGH if metadata.get('priority', '1') == '0'
258 else run_isolated.RemoteOperation.MED)
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000259 return upload_function(priority, compressed_data, metadata['h'], None)
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000260
261
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000262def batch_files_for_check(infiles):
263 """Splits list of files to check for existence on the server into batches.
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000264
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000265 Each batch corresponds to a single 'exists?' query to the server.
266
267 Yields:
268 batches: list of batches, each batch is a list of files.
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000269 """
vadimsh@chromium.orgeea52422013-08-21 19:35:54 +0000270 batch_count = 0
271 batch_size_limit = ITEMS_PER_CONTAINS_QUERIES[0]
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000272 next_queries = []
csharp@chromium.org90c45812013-01-23 14:27:21 +0000273 items = ((k, v) for k, v in infiles.iteritems() if 's' in v)
274 for relfile, metadata in sorted(items, key=lambda x: -x[1]['s']):
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000275 next_queries.append((relfile, metadata))
vadimsh@chromium.orgeea52422013-08-21 19:35:54 +0000276 if len(next_queries) == batch_size_limit:
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000277 yield next_queries
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000278 next_queries = []
vadimsh@chromium.orgeea52422013-08-21 19:35:54 +0000279 batch_count += 1
280 batch_size_limit = ITEMS_PER_CONTAINS_QUERIES[
281 min(batch_count, len(ITEMS_PER_CONTAINS_QUERIES) - 1)]
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000282 if next_queries:
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000283 yield next_queries
284
285
286def get_files_to_upload(contains_hash_url, infiles):
287 """Yields files that are missing on the server."""
vadimsh@chromium.orgb074b162013-08-22 17:55:46 +0000288 with threading_utils.ThreadPool(1, 16, 0, prefix='get_files_to_upload') as tp:
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000289 for files in batch_files_for_check(infiles):
vadimsh@chromium.orgb074b162013-08-22 17:55:46 +0000290 tp.add_task(0, check_files_exist_on_server, contains_hash_url, files)
291 for missing_file in itertools.chain.from_iterable(tp.iter_results()):
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000292 yield missing_file
maruel@chromium.org35fc0c82013-01-17 15:14:14 +0000293
294
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000295def upload_sha1_tree(base_url, indir, infiles, namespace):
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000296 """Uploads the given tree to the given url.
297
298 Arguments:
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000299 base_url: The base url, it is assume that |base_url|/has/ can be used to
300 query if an element was already uploaded, and |base_url|/store/
301 can be used to upload a new element.
302 indir: Root directory the infiles are based in.
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000303 infiles: dict of files to upload files from |indir| to |base_url|.
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000304 namespace: The namespace to use on the server.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000305 """
306 logging.info('upload tree(base_url=%s, indir=%s, files=%d)' %
307 (base_url, indir, len(infiles)))
maruel@chromium.org034e3962013-03-13 13:34:25 +0000308
csharp@chromium.org07fa7592013-01-11 18:19:30 +0000309 # Create a pool of workers to zip and upload any files missing from
310 # the server.
vadimsh@chromium.orgb074b162013-08-22 17:55:46 +0000311 num_threads = threading_utils.num_processors()
312 zipping_pool = threading_utils.ThreadPool(min(2, num_threads),
313 num_threads, 0, 'zip')
maruel@chromium.org3e42ce82013-09-12 18:36:59 +0000314 remote = IsolateServer(base_url, namespace)
315 remote_uploader = run_isolated.RemoteOperation(remote.store)
csharp@chromium.org07fa7592013-01-11 18:19:30 +0000316
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000317 # Starts the zip and upload process for files that are missing
318 # from the server.
maruel@chromium.org3e42ce82013-09-12 18:36:59 +0000319 contains_hash_url = '%scontains/%s?token=%s' % (
320 remote.content_url, namespace, remote.token)
csharp@chromium.org20a888c2013-01-15 15:06:55 +0000321 uploaded = []
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000322 for relfile, metadata in get_files_to_upload(contains_hash_url, infiles):
csharp@chromium.org07fa7592013-01-11 18:19:30 +0000323 infile = os.path.join(indir, relfile)
maruel@chromium.org831958f2013-01-22 15:01:46 +0000324 zipping_pool.add_task(0, zip_and_trigger_upload, infile, metadata,
csharp@chromium.org07fa7592013-01-11 18:19:30 +0000325 remote_uploader.add_item)
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000326 uploaded.append((relfile, metadata))
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000327
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000328 logging.info('Waiting for all files to finish zipping')
329 zipping_pool.join()
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000330 zipping_pool.close()
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000331 logging.info('All files zipped.')
332
333 logging.info('Waiting for all files to finish uploading')
maruel@chromium.org13eca0b2013-01-22 16:42:21 +0000334 # Will raise if any exception occurred.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000335 remote_uploader.join()
vadimsh@chromium.org53f8d5a2013-06-19 13:03:55 +0000336 remote_uploader.close()
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000337 logging.info('All files are uploaded')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000338
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000339 total = len(infiles)
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000340 total_size = sum(metadata.get('s', 0) for metadata in infiles.itervalues())
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000341 logging.info(
342 'Total: %6d, %9.1fkb',
343 total,
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000344 sum(m.get('s', 0) for m in infiles.itervalues()) / 1024.)
csharp@chromium.org20a888c2013-01-15 15:06:55 +0000345 cache_hit = set(infiles.iterkeys()) - set(x[0] for x in uploaded)
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000346 cache_hit_size = sum(infiles[i].get('s', 0) for i in cache_hit)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000347 logging.info(
348 'cache hit: %6d, %9.1fkb, %6.2f%% files, %6.2f%% size',
349 len(cache_hit),
350 cache_hit_size / 1024.,
351 len(cache_hit) * 100. / total,
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000352 cache_hit_size * 100. / total_size if total_size else 0)
csharp@chromium.org20a888c2013-01-15 15:06:55 +0000353 cache_miss = uploaded
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000354 cache_miss_size = sum(infiles[i[0]].get('s', 0) for i in cache_miss)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000355 logging.info(
356 'cache miss: %6d, %9.1fkb, %6.2f%% files, %6.2f%% size',
357 len(cache_miss),
358 cache_miss_size / 1024.,
359 len(cache_miss) * 100. / total,
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000360 cache_miss_size * 100. / total_size if total_size else 0)
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000361 return 0
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000362
363
maruel@chromium.orgfb78d432013-08-28 21:22:40 +0000364@subcommand.usage('<file1..fileN> or - to read from stdin')
365def CMDarchive(parser, args):
366 """Archives data to the server."""
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000367 options, files = parser.parse_args(args)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000368
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000369 if files == ['-']:
370 files = sys.stdin.readlines()
371
372 if not files:
373 parser.error('Nothing to upload')
maruel@chromium.orgfb78d432013-08-28 21:22:40 +0000374 if not options.isolate_server:
375 parser.error('Nowhere to send. Please specify --isolate-server')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000376
377 # Load the necessary metadata. This is going to be rewritten eventually to be
378 # more efficient.
379 infiles = dict(
380 (
381 f,
382 {
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000383 's': os.stat(f).st_size,
maruel@chromium.org037758d2012-12-10 17:59:46 +0000384 'h': sha1_file(f),
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000385 }
386 )
387 for f in files)
388
vadimsh@chromium.orga4326472013-08-24 02:05:41 +0000389 with tools.Profiler('Archive'):
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000390 return upload_sha1_tree(
maruel@chromium.orgfb78d432013-08-28 21:22:40 +0000391 base_url=options.isolate_server,
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000392 indir=os.getcwd(),
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000393 infiles=infiles,
394 namespace=options.namespace)
maruel@chromium.orgfb78d432013-08-28 21:22:40 +0000395 return 0
396
397
398def CMDdownload(parser, args):
399 """Download data from the server."""
400 _options, args = parser.parse_args(args)
401 parser.error('Sorry, it\'s not really supported.')
402 return 0
403
404
405class OptionParserIsolateServer(tools.OptionParserWithLogging):
406 def __init__(self, **kwargs):
407 tools.OptionParserWithLogging.__init__(self, **kwargs)
408 self.add_option(
409 '-I', '--isolate-server',
410 default=ISOLATE_SERVER,
411 metavar='URL',
412 help='Isolate server where data is stored. default: %default')
413 self.add_option(
414 '--namespace', default='default-gzip',
415 help='The namespace to use on the server.')
416
417 def parse_args(self, *args, **kwargs):
418 options, args = tools.OptionParserWithLogging.parse_args(
419 self, *args, **kwargs)
420 options.isolate_server = options.isolate_server.rstrip('/')
421 if not options.isolate_server:
422 self.error('--isolate-server is required.')
423 return options, args
424
425
426def main(args):
427 dispatcher = subcommand.CommandDispatcher(__name__)
428 try:
429 return dispatcher.execute(
430 OptionParserIsolateServer(version=__version__), args)
431 except (
432 run_isolated.MappingError,
433 run_isolated.ConfigError) as e:
434 sys.stderr.write('\nError: ')
435 sys.stderr.write(str(e))
436 sys.stderr.write('\n')
437 return 1
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000438
439
440if __name__ == '__main__':
maruel@chromium.orgfb78d432013-08-28 21:22:40 +0000441 fix_encoding.fix_encoding()
442 tools.disable_buffering()
443 colorama.init()
maruel@chromium.orgcb3c3d52013-03-14 18:55:30 +0000444 sys.exit(main(sys.argv[1:]))