blob: 66ae8a55a8a50de6426ab648756783ed8d7f5058 [file] [log] [blame]
maruel@chromium.orgc6f90062012-11-07 18:32:22 +00001#!/usr/bin/env python
2# Copyright (c) 2012 The Chromium Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Archives a set of files to a server."""
7
8import binascii
9import hashlib
10import logging
11import optparse
12import os
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +000013import cStringIO
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000014import sys
15import time
16import urllib2
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +000017import zlib
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000018
19import run_isolated
20
21
22# The maximum number of upload attempts to try when uploading a single file.
23MAX_UPLOAD_ATTEMPTS = 5
24
25# The minimum size of files to upload directly to the blobstore.
maruel@chromium.orgaef29f82012-12-12 15:00:42 +000026MIN_SIZE_FOR_DIRECT_BLOBSTORE = 20 * 1024
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000027
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +000028# A list of already compressed extension types that should not receive any
29# compression before being uploaded.
30ALREADY_COMPRESSED_TYPES = [
31 '7z', 'avi', 'cur', 'gif', 'h264', 'jar', 'jpeg', 'jpg', 'pdf', 'png',
32 'wav', 'zip'
33]
34
maruel@chromium.orgc6f90062012-11-07 18:32:22 +000035
36def encode_multipart_formdata(fields, files,
37 mime_mapper=lambda _: 'application/octet-stream'):
38 """Encodes a Multipart form data object.
39
40 Args:
41 fields: a sequence (name, value) elements for
42 regular form fields.
43 files: a sequence of (name, filename, value) elements for data to be
44 uploaded as files.
45 mime_mapper: function to return the mime type from the filename.
46 Returns:
47 content_type: for httplib.HTTP instance
48 body: for httplib.HTTP instance
49 """
50 boundary = hashlib.md5(str(time.time())).hexdigest()
51 body_list = []
52 for (key, value) in fields:
53 if isinstance(key, unicode):
54 value = key.encode('utf-8')
55 if isinstance(value, unicode):
56 value = value.encode('utf-8')
57 body_list.append('--' + boundary)
58 body_list.append('Content-Disposition: form-data; name="%s"' % key)
59 body_list.append('')
60 body_list.append(value)
61 body_list.append('--' + boundary)
62 body_list.append('')
63 for (key, filename, value) in files:
64 if isinstance(key, unicode):
65 value = key.encode('utf-8')
66 if isinstance(filename, unicode):
67 value = filename.encode('utf-8')
68 if isinstance(value, unicode):
69 value = value.encode('utf-8')
70 body_list.append('--' + boundary)
71 body_list.append('Content-Disposition: form-data; name="%s"; '
72 'filename="%s"' % (key, filename))
73 body_list.append('Content-Type: %s' % mime_mapper(filename))
74 body_list.append('')
75 body_list.append(value)
76 body_list.append('--' + boundary)
77 body_list.append('')
78 if body_list:
79 body_list[-2] += '--'
80 body = '\r\n'.join(body_list)
81 content_type = 'multipart/form-data; boundary=%s' % boundary
82 return content_type, body
83
84
85def gen_url_request(url, payload, content_type='application/octet-stream'):
86 """Returns a POST request."""
87 request = urllib2.Request(url, data=payload)
88 if payload is not None:
89 request.add_header('Content-Type', content_type)
90 request.add_header('Content-Length', len(payload))
91 return request
92
93
maruel@chromium.org037758d2012-12-10 17:59:46 +000094def sha1_file(filepath):
95 """Calculates the SHA-1 of a file without reading it all in memory at once."""
96 digest = hashlib.sha1()
97 with open(filepath, 'rb') as f:
98 while True:
99 # Read in 1mb chunks.
100 chunk = f.read(1024*1024)
101 if not chunk:
102 break
103 digest.update(chunk)
104 return digest.hexdigest()
105
106
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000107def url_open(url, data, content_type='application/octet-stream'):
108 """Opens the given url with the given data, repeating up to
109 MAX_UPLOAD_ATTEMPTS times if it encounters an error.
110
111 Arguments:
112 url: The url to open.
113 data: The data to send to the url.
114
115 Returns:
116 The response from the url, or it raises an exception it it failed to get
117 a response.
118 """
119 request = gen_url_request(url, data, content_type)
maruel@chromium.org3dc6abd2012-11-15 17:01:53 +0000120 last_error = None
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000121 for i in range(MAX_UPLOAD_ATTEMPTS):
122 try:
123 return urllib2.urlopen(request)
124 except urllib2.URLError as e:
maruel@chromium.org3dc6abd2012-11-15 17:01:53 +0000125 last_error = e
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000126 logging.warning('Unable to connect to %s, error msg: %s', url, e)
127 time.sleep(0.5 + i)
128
129 # If we get no response from the server after max_retries, assume it
130 # is down and raise an exception
131 raise run_isolated.MappingError(
maruel@chromium.org3dc6abd2012-11-15 17:01:53 +0000132 'Unable to connect to server, %s, to see which files are presents: %s' %
133 (url, last_error))
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000134
135
maruel@chromium.org00a7d6c2012-11-22 14:11:01 +0000136def upload_hash_content_to_blobstore(generate_upload_url, hash_key, content):
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000137 """Uploads the given hash contents directly to the blobsotre via a generated
138 url.
139
140 Arguments:
141 generate_upload_url: The url to get the new upload url from.
142 hash_contents: The contents to upload.
143 """
144 logging.debug('Generating url to directly upload file to blobstore')
maruel@chromium.org92a3d2e2012-12-20 16:22:29 +0000145 assert isinstance(hash_key, str), hash_key
146 assert isinstance(content, str), (hash_key, content)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000147 upload_url = url_open(generate_upload_url, None).read()
148
149 if not upload_url:
150 logging.error('Unable to generate upload url')
151 return
152
153 content_type, body = encode_multipart_formdata(
maruel@chromium.org00a7d6c2012-11-22 14:11:01 +0000154 [], [('hash_contents', hash_key, content)])
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000155 url_open(upload_url, body, content_type)
156
157
158class UploadRemote(run_isolated.Remote):
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000159 def __init__(self, namespace, *args, **kwargs):
160 super(UploadRemote, self).__init__(*args, **kwargs)
maruel@chromium.org21243ce2012-12-20 17:43:00 +0000161 self.namespace = str(namespace)
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000162
163 def get_file_handler(self, base_url):
maruel@chromium.org21243ce2012-12-20 17:43:00 +0000164 base_url = str(base_url)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000165 def upload_file(content, hash_key):
maruel@chromium.org21243ce2012-12-20 17:43:00 +0000166 hash_key = str(hash_key)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000167 content_url = base_url.rstrip('/') + '/content/'
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000168 if len(content) > MIN_SIZE_FOR_DIRECT_BLOBSTORE:
169 upload_hash_content_to_blobstore(
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000170 content_url + 'generate_blobstore_url/' + self.namespace + '/' +
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000171 hash_key,
maruel@chromium.org00a7d6c2012-11-22 14:11:01 +0000172 hash_key,
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000173 content)
174 else:
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000175 url_open(content_url + 'store/' + self.namespace + '/' + hash_key,
176 content)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000177 return upload_file
178
179
180def update_files_to_upload(query_url, queries, files_to_upload):
181 """Queries the server to see which files from this batch already exist there.
182
183 Arguments:
184 queries: The hash files to potential upload to the server.
185 files_to_upload: Any new files that need to be upload are added to
186 this list.
187 """
188 body = ''.join(
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000189 (binascii.unhexlify(meta_data['h']) for (_, meta_data) in queries))
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000190 assert (len(body) % 20) == 0, repr(body)
191
192 response = url_open(query_url, body).read()
193 if len(queries) != len(response):
194 raise run_isolated.MappingError(
195 'Got an incorrect number of responses from the server. Expected %d, '
196 'but got %d' % (len(queries), len(response)))
197
198 hit = 0
199 for i in range(len(response)):
200 if response[i] == chr(0):
201 files_to_upload.append(queries[i])
202 else:
203 hit += 1
204 logging.info('Queried %d files, %d cache hit', len(queries), hit)
205
206
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000207def compression_level(filename):
208 """Given a filename calculates the ideal compression level to use."""
209 file_ext = os.path.splitext(filename)[1].lower()
210 # TODO(csharp): Profile to find what compression level works best.
211 return 0 if file_ext in ALREADY_COMPRESSED_TYPES else 7
212
213
214def zip_and_trigger_upload(infile, metadata, upload_function):
215 compressor = zlib.compressobj(compression_level(infile))
216 hash_data = cStringIO.StringIO()
217 with open(infile, 'rb') as f:
218 # TODO(csharp): Fix crbug.com/150823 and enable the touched logic again.
219 while True: # and not metadata['T']:
220 chunk = f.read(run_isolated.ZIPPED_FILE_CHUNK)
221 if not chunk:
222 break
223 hash_data.write(compressor.compress(chunk))
224
225 hash_data.write(compressor.flush(zlib.Z_FINISH))
226 priority = (
227 run_isolated.Remote.HIGH if metadata.get('priority', '1') == '0'
228 else run_isolated.Remote.MED)
229 upload_function(priority, hash_data.getvalue(), metadata['h'],
230 None)
231 hash_data.close()
232
233
234def upload_sha1_tree(base_url, indir, infiles, namespace):
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000235 """Uploads the given tree to the given url.
236
237 Arguments:
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000238 base_url: The base url, it is assume that |base_url|/has/ can be used to
239 query if an element was already uploaded, and |base_url|/store/
240 can be used to upload a new element.
241 indir: Root directory the infiles are based in.
242 infiles: dict of files to map from |indir| to |outdir|.
243 namespace: The namespace to use on the server.
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000244 """
245 logging.info('upload tree(base_url=%s, indir=%s, files=%d)' %
246 (base_url, indir, len(infiles)))
247
248 # Generate the list of files that need to be uploaded (since some may already
249 # be on the server.
250 base_url = base_url.rstrip('/')
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000251 contains_hash_url = base_url + '/content/contains/' + namespace
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000252 to_upload = []
253 next_queries = []
254 for relfile, metadata in infiles.iteritems():
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000255 if 'l' in metadata:
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000256 # Skip links when uploading.
257 continue
258
259 next_queries.append((relfile, metadata))
260 if len(next_queries) == 1000:
261 update_files_to_upload(contains_hash_url, next_queries, to_upload)
262 next_queries = []
263
264 if next_queries:
265 update_files_to_upload(contains_hash_url, next_queries, to_upload)
266
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000267 # Zip the required files and then upload them.
268 # TODO(csharp): use num_processors().
269 zipping_pool = run_isolated.ThreadPool(num_threads=4)
270 remote_uploader = UploadRemote(namespace, base_url)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000271 for relfile, metadata in to_upload:
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000272 infile = os.path.join(indir, relfile)
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000273 zipping_pool.add_task(zip_and_trigger_upload, infile, metadata,
274 remote_uploader.add_item)
275 logging.info('Waiting for all files to finish zipping')
276 zipping_pool.join()
277 logging.info('All files zipped.')
278
279 logging.info('Waiting for all files to finish uploading')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000280 remote_uploader.join()
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000281 logging.info('All files are uploaded')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000282
283 exception = remote_uploader.next_exception()
284 if exception:
285 raise exception[0], exception[1], exception[2]
286 total = len(infiles)
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000287 total_size = sum(metadata.get('s', 0) for metadata in infiles.itervalues())
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000288 logging.info(
289 'Total: %6d, %9.1fkb',
290 total,
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000291 sum(m.get('s', 0) for m in infiles.itervalues()) / 1024.)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000292 cache_hit = set(infiles.iterkeys()) - set(x[0] for x in to_upload)
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000293 cache_hit_size = sum(infiles[i].get('s', 0) for i in cache_hit)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000294 logging.info(
295 'cache hit: %6d, %9.1fkb, %6.2f%% files, %6.2f%% size',
296 len(cache_hit),
297 cache_hit_size / 1024.,
298 len(cache_hit) * 100. / total,
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000299 cache_hit_size * 100. / total_size if total_size else 0)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000300 cache_miss = to_upload
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000301 cache_miss_size = sum(infiles[i[0]].get('s', 0) for i in cache_miss)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000302 logging.info(
303 'cache miss: %6d, %9.1fkb, %6.2f%% files, %6.2f%% size',
304 len(cache_miss),
305 cache_miss_size / 1024.,
306 len(cache_miss) * 100. / total,
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000307 cache_miss_size * 100. / total_size if total_size else 0)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000308
309
310def main():
311 parser = optparse.OptionParser(
312 usage='%prog [options] <file1..fileN> or - to read from stdin',
313 description=sys.modules[__name__].__doc__)
314 # TODO(maruel): Support both NFS and isolateserver.
315 parser.add_option('-o', '--outdir', help='Remote server to archive to')
316 parser.add_option(
317 '-v', '--verbose',
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000318 action='count', default=0,
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000319 help='Use multiple times to increase verbosity')
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000320 parser.add_option('--namespace', default='default-gzip',
321 help='The namespace to use on the server.')
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000322
323 options, files = parser.parse_args()
324
325 levels = [logging.ERROR, logging.INFO, logging.DEBUG]
326 logging.basicConfig(
327 level=levels[min(len(levels)-1, options.verbose)],
328 format='%(levelname)5s %(module)15s(%(lineno)3d): %(message)s')
329 if files == ['-']:
330 files = sys.stdin.readlines()
331
332 if not files:
333 parser.error('Nothing to upload')
334 if not options.outdir:
335 parser.error('Nowhere to send. Please specify --outdir')
336
337 # Load the necessary metadata. This is going to be rewritten eventually to be
338 # more efficient.
339 infiles = dict(
340 (
341 f,
342 {
maruel@chromium.orge5c17132012-11-21 18:18:46 +0000343 's': os.stat(f).st_size,
maruel@chromium.org037758d2012-12-10 17:59:46 +0000344 'h': sha1_file(f),
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000345 }
346 )
347 for f in files)
348
349 with run_isolated.Profiler('Archive'):
350 upload_sha1_tree(
351 base_url=options.outdir,
352 indir=os.getcwd(),
csharp@chromium.org59c7bcf2012-11-21 21:13:18 +0000353 infiles=infiles,
354 namespace=options.namespace)
maruel@chromium.orgc6f90062012-11-07 18:32:22 +0000355 return 0
356
357
358if __name__ == '__main__':
359 sys.exit(main())