blob: a03707b209b319df34d1128a771e286e4a6368f1 [file] [log] [blame]
David James8c846492011-01-25 17:07:29 -08001#!/usr/bin/python
2# Copyright (c) 2010 The Chromium OS Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6import datetime
7import multiprocessing
8import optparse
9import os
10import re
11import sys
12import tempfile
13import time
14
15from chromite.lib import cros_build_lib
16from chromite.lib.binpkg import (GrabLocalPackageIndex, GrabRemotePackageIndex,
17 PackageIndex)
18"""
19This script is used to upload host prebuilts as well as board BINHOSTS.
20
21If the URL starts with 'gs://', we upload using gsutil to Google Storage.
22Otherwise, rsync is used.
23
24After a build is successfully uploaded a file is updated with the proper
25BINHOST version as well as the target board. This file is defined in GIT_FILE
26
27
28To read more about prebuilts/binhost binary packages please refer to:
29http://sites/chromeos/for-team-members/engineering/releng/prebuilt-binaries-for-streamlining-the-build-process
30
31
32Example of uploading prebuilt amd64 host files to Google Storage:
33./prebuilt.py -p /b/cbuild/build -s -u gs://chromeos-prebuilt
34
35Example of uploading x86-dogfood binhosts to Google Storage:
36./prebuilt.py -b x86-dogfood -p /b/cbuild/build/ -u gs://chromeos-prebuilt -g
37
38Example of uploading prebuilt amd64 host files using rsync:
39./prebuilt.py -p /b/cbuild/build -s -u codf30.jail:/tmp
40"""
41
42# as per http://crosbug.com/5855 always filter the below packages
43_FILTER_PACKAGES = set()
44_RETRIES = 3
45_GSUTIL_BIN = '/b/build/third_party/gsutil/gsutil'
46_HOST_PACKAGES_PATH = 'chroot/var/lib/portage/pkgs'
47_HOST_TARGET = 'amd64'
48_BOARD_PATH = 'chroot/build/%(board)s'
49_BOTO_CONFIG = '/home/chrome-bot/external-boto'
50# board/board-target/version/packages/'
51_REL_BOARD_PATH = 'board/%(board)s/%(version)s/packages'
52# host/host-target/version/packages/'
53_REL_HOST_PATH = 'host/%(target)s/%(version)s/packages'
54# Private overlays to look at for builds to filter
55# relative to build path
56_PRIVATE_OVERLAY_DIR = 'src/private-overlays'
57_BINHOST_BASE_URL = 'http://commondatastorage.googleapis.com/chromeos-prebuilt'
58_PREBUILT_BASE_DIR = 'src/third_party/chromiumos-overlay/chromeos/config/'
59# Created in the event of new host targets becoming available
60_PREBUILT_MAKE_CONF = {'amd64': os.path.join(_PREBUILT_BASE_DIR,
61 'make.conf.amd64-host')}
62_BINHOST_CONF_DIR = 'src/third_party/chromiumos-overlay/chromeos/binhost'
63
64
65class FiltersEmpty(Exception):
66 """Raised when filters are used but none are found."""
67 pass
68
69
70class UploadFailed(Exception):
71 """Raised when one of the files uploaded failed."""
72 pass
73
74class UnknownBoardFormat(Exception):
75 """Raised when a function finds an unknown board format."""
76 pass
77
78class GitPushFailed(Exception):
79 """Raised when a git push failed after retry."""
80 pass
81
82
83def UpdateLocalFile(filename, value, key='PORTAGE_BINHOST'):
84 """Update the key in file with the value passed.
85 File format:
86 key="value"
87 Note quotes are added automatically
88
89 Args:
90 filename: Name of file to modify.
91 value: Value to write with the key.
92 key: The variable key to update. (Default: PORTAGE_BINHOST)
93 """
94 if os.path.exists(filename):
95 file_fh = open(filename)
96 else:
97 file_fh = open(filename, 'w+')
98 file_lines = []
99 found = False
100 keyval_str = '%(key)s=%(value)s'
101 for line in file_fh:
102 # Strip newlines from end of line. We already add newlines below.
103 line = line.rstrip("\n")
104
105 if len(line.split('=')) != 2:
106 # Skip any line that doesn't fit key=val.
107 file_lines.append(line)
108 continue
109
110 file_var, file_val = line.split('=')
111 if file_var == key:
112 found = True
113 print 'Updating %s=%s to %s="%s"' % (file_var, file_val, key, value)
114 value = '"%s"' % value
115 file_lines.append(keyval_str % {'key': key, 'value': value})
116 else:
117 file_lines.append(keyval_str % {'key': file_var, 'value': file_val})
118
119 if not found:
120 file_lines.append(keyval_str % {'key': key, 'value': value})
121
122 file_fh.close()
123 # write out new file
124 new_file_fh = open(filename, 'w')
125 new_file_fh.write('\n'.join(file_lines) + '\n')
126 new_file_fh.close()
127
128
129def RevGitPushWithRetry(retries=5):
130 """Repo sync and then push git changes in flight.
131
132 Args:
133 retries: The number of times to retry before giving up, default: 5
134
135 Raises:
136 GitPushFailed if push was unsuccessful after retries
137 """
138 for retry in range(1, retries+1):
139 try:
140 cros_build_lib.RunCommand('repo sync .', shell=True)
141 cros_build_lib.RunCommand('git push', shell=True)
142 break
143 except cros_build_lib.RunCommandError:
144 if retry < retries:
145 print 'Error pushing changes trying again (%s/%s)' % (retry, retries)
146 time.sleep(5*retry)
147 else:
148 raise GitPushFailed('Failed to push change after %s retries' % retries)
149
150
151def RevGitFile(filename, value, retries=5, key='PORTAGE_BINHOST'):
152 """Update and push the git file.
153
154 Args:
155 filename: file to modify that is in a git repo already
156 value: string representing the version of the prebuilt that has been
157 uploaded.
158 retries: The number of times to retry before giving up, default: 5
159 key: The variable key to update in the git file.
160 (Default: PORTAGE_BINHOST)
161 """
162 prebuilt_branch = 'prebuilt_branch'
163 old_cwd = os.getcwd()
164 os.chdir(os.path.dirname(filename))
165
166 cros_build_lib.RunCommand('repo sync .', shell=True)
167 cros_build_lib.RunCommand('repo start %s .' % prebuilt_branch, shell=True)
168 git_ssh_config_cmd = (
169 'git config url.ssh://git@gitrw.chromium.org:9222.pushinsteadof '
170 'http://git.chromium.org/git')
171 cros_build_lib.RunCommand(git_ssh_config_cmd, shell=True)
172 description = 'Update %s="%s" in %s' % (key, value, filename)
173 print description
174 try:
175 UpdateLocalFile(filename, value, key)
176 cros_build_lib.RunCommand('git config push.default tracking', shell=True)
177 cros_build_lib.RunCommand('git commit -am "%s"' % description, shell=True)
178 RevGitPushWithRetry(retries)
179 finally:
180 cros_build_lib.RunCommand('repo abandon %s .' % prebuilt_branch, shell=True)
181 os.chdir(old_cwd)
182
183
184def GetVersion():
185 """Get the version to put in LATEST and update the git version with."""
186 return datetime.datetime.now().strftime('%d.%m.%y.%H%M%S')
187
188
189def LoadPrivateFilters(build_path):
190 """Load private filters based on ebuilds found under _PRIVATE_OVERLAY_DIR.
191
192 This function adds filters to the global set _FILTER_PACKAGES.
193 Args:
194 build_path: Path that _PRIVATE_OVERLAY_DIR is in.
195 """
196 # TODO(scottz): eventually use manifest.xml to find the proper
197 # private overlay path.
198 filter_path = os.path.join(build_path, _PRIVATE_OVERLAY_DIR)
199 files = cros_build_lib.ListFiles(filter_path)
200 filters = []
201 for file in files:
202 if file.endswith('.ebuild'):
203 basename = os.path.basename(file)
204 match = re.match('(.*?)-\d.*.ebuild', basename)
205 if match:
206 filters.append(match.group(1))
207
208 if not filters:
209 raise FiltersEmpty('No filters were returned')
210
211 _FILTER_PACKAGES.update(filters)
212
213
214def ShouldFilterPackage(file_path):
215 """Skip a particular file if it matches a pattern.
216
217 Skip any files that machine the list of packages to filter in
218 _FILTER_PACKAGES.
219
220 Args:
221 file_path: string of a file path to inspect against _FILTER_PACKAGES
222
223 Returns:
224 True if we should filter the package,
225 False otherwise.
226 """
227 for name in _FILTER_PACKAGES:
228 if name in file_path:
229 print 'FILTERING %s' % file_path
230 return True
231
232 return False
233
234
235def _RetryRun(cmd, print_cmd=True, shell=False, cwd=None):
236 """Run the specified command, retrying if necessary.
237
238 Args:
239 cmd: The command to run.
240 print_cmd: Whether to print out the cmd.
241 shell: Whether to treat the command as a shell.
242 cwd: Working directory to run command in.
243
244 Returns:
245 True if the command succeeded. Otherwise, returns False.
246 """
247
248 # TODO(scottz): port to use _Run or similar when it is available in
249 # cros_build_lib.
250 for attempt in range(_RETRIES):
251 try:
252 output = cros_build_lib.RunCommand(cmd, print_cmd=print_cmd, shell=shell,
253 cwd=cwd)
254 return True
255 except cros_build_lib.RunCommandError:
256 print 'Failed to run %s' % cmd
257 else:
258 print 'Retry failed run %s, giving up' % cmd
259 return False
260
261
262def _GsUpload(args):
263 """Upload to GS bucket.
264
265 Args:
266 args: a tuple of two arguments that contains local_file and remote_file.
267
268 Returns:
269 Return the arg tuple of two if the upload failed
270 """
271 (local_file, remote_file) = args
272
273 cmd = '%s cp -a public-read %s %s' % (_GSUTIL_BIN, local_file, remote_file)
274 if not _RetryRun(cmd, print_cmd=False, shell=True):
275 return (local_file, remote_file)
276
277
278def RemoteUpload(files, pool=10):
279 """Upload to google storage.
280
281 Create a pool of process and call _GsUpload with the proper arguments.
282
283 Args:
284 files: dictionary with keys to local files and values to remote path.
285 pool: integer of maximum proesses to have at the same time.
286
287 Returns:
288 Return a set of tuple arguments of the failed uploads
289 """
290 # TODO(scottz) port this to use _RunManyParallel when it is available in
291 # cros_build_lib
292 pool = multiprocessing.Pool(processes=pool)
293 workers = []
294 for local_file, remote_path in files.iteritems():
295 workers.append((local_file, remote_path))
296
297 result = pool.map_async(_GsUpload, workers, chunksize=1)
298 while True:
299 try:
300 return set(result.get(60*60))
301 except multiprocessing.TimeoutError:
302 pass
303
304
305def GenerateUploadDict(base_local_path, base_remote_path, pkgs):
306 """Build a dictionary of local remote file key pairs to upload.
307
308 Args:
309 base_local_path: The base path to the files on the local hard drive.
310 remote_path: The base path to the remote paths.
311 pkgs: The packages to upload.
312
313 Returns:
314 Returns a dictionary of local_path/remote_path pairs
315 """
316 upload_files = {}
317 for pkg in pkgs:
318 suffix = pkg['CPV'] + '.tbz2'
319 local_path = os.path.join(base_local_path, suffix)
320 assert os.path.exists(local_path)
321 remote_path = '%s/%s' % (base_remote_path.rstrip('/'), suffix)
322 upload_files[local_path] = remote_path
323
324 return upload_files
325
326def GetBoardPathFromCrosOverlayList(build_path, target):
327 """Use the cros_overlay_list to determine the path to the board overlay
328 Args:
329 build_path: The path to the root of the build directory
330 target: The target that we are looking for, could consist of board and
331 board_variant, we handle that properly
332 Returns:
333 The last line from cros_overlay_list as a string
334 """
335 script_dir = os.path.join(build_path, 'src/scripts/bin')
336 cmd = ['./cros_overlay_list']
337 if re.match('.*?_.*', target):
338 (board, variant) = target.split('_')
339 cmd += ['--board', board, '--variant', variant]
340 elif re.match('.*?-\w+', target):
341 cmd += ['--board', target]
342 else:
343 raise UnknownBoardFormat('Unknown format: %s' % target)
344
345 cmd_output = cros_build_lib.RunCommand(cmd, redirect_stdout=True,
346 cwd=script_dir)
347 # We only care about the last entry
348 return cmd_output.output.splitlines().pop()
349
350
351def DeterminePrebuiltConfFile(build_path, target):
352 """Determine the prebuilt.conf file that needs to be updated for prebuilts.
353
354 Args:
355 build_path: The path to the root of the build directory
356 target: String representation of the board. This includes host and board
357 targets
358
359 Returns
360 A string path to a prebuilt.conf file to be updated.
361 """
362 if _HOST_TARGET == target:
363 # We are host.
364 # Without more examples of hosts this is a kludge for now.
365 # TODO(Scottz): as new host targets come online expand this to
366 # work more like boards.
367 make_path = _PREBUILT_MAKE_CONF[target]
368 else:
369 # We are a board
370 board = GetBoardPathFromCrosOverlayList(build_path, target)
371 make_path = os.path.join(board, 'prebuilt.conf')
372
373 return make_path
374
375
376def UpdateBinhostConfFile(path, key, value):
377 """Update binhost config file file with key=value.
378
379 Args:
380 path: Filename to update.
381 key: Key to update.
382 value: New value for key.
383 """
384 cwd = os.path.dirname(os.path.abspath(path))
385 filename = os.path.basename(path)
386 if not os.path.isdir(cwd):
387 os.makedirs(cwd)
388 if not os.path.isfile(path):
389 config_file = file(path, 'w')
390 config_file.write('FULL_BINHOST="$PORTAGE_BINHOST"\n')
391 config_file.close()
392 UpdateLocalFile(path, value, key)
393 cros_build_lib.RunCommand('git add %s' % filename, cwd=cwd, shell=True)
394 description = 'Update %s=%s in %s' % (key, value, filename)
395 cros_build_lib.RunCommand('git commit -m "%s"' % description, cwd=cwd,
396 shell=True)
397
398
399def UploadPrebuilt(build_path, upload_location, version, binhost_base_url,
400 board=None, git_sync=False, git_sync_retries=5,
401 key='PORTAGE_BINHOST', pkg_indexes=[],
402 sync_binhost_conf=False):
403 """Upload Host prebuilt files to Google Storage space.
404
405 Args:
406 build_path: The path to the root of the chroot.
407 upload_location: The upload location.
408 board: The board to upload to Google Storage. If this is None, upload
409 host packages.
410 git_sync: If set, update make.conf of target to reference the latest
411 prebuilt packages generated here.
412 git_sync_retries: How many times to retry pushing when updating git files.
413 This helps avoid failures when multiple bots are modifying the same Repo.
414 default: 5
415 key: The variable key to update in the git file. (Default: PORTAGE_BINHOST)
416 pkg_indexes: Old uploaded prebuilts to compare against. Instead of
417 uploading duplicate files, we just link to the old files.
418 sync_binhost_conf: If set, update binhost config file in chromiumos-overlay
419 for the current board or host.
420 """
421
422 if not board:
423 # We are uploading host packages
424 # TODO(scottz): eventually add support for different host_targets
425 package_path = os.path.join(build_path, _HOST_PACKAGES_PATH)
426 url_suffix = _REL_HOST_PATH % {'version': version, 'target': _HOST_TARGET}
427 package_string = _HOST_TARGET
428 git_file = os.path.join(build_path, _PREBUILT_MAKE_CONF[_HOST_TARGET])
429 binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'host',
430 '%s.conf' % _HOST_TARGET)
431 else:
432 board_path = os.path.join(build_path, _BOARD_PATH % {'board': board})
433 package_path = os.path.join(board_path, 'packages')
434 package_string = board
435 url_suffix = _REL_BOARD_PATH % {'board': board, 'version': version}
436 git_file = DeterminePrebuiltConfFile(build_path, board)
437 binhost_conf = os.path.join(build_path, _BINHOST_CONF_DIR, 'target',
438 '%s.conf' % board)
439 remote_location = '%s/%s' % (upload_location.rstrip('/'), url_suffix)
440
441 # Process Packages file, removing duplicates and filtered packages.
442 pkg_index = GrabLocalPackageIndex(package_path)
443 pkg_index.SetUploadLocation(binhost_base_url, url_suffix)
444 pkg_index.RemoveFilteredPackages(lambda pkg: ShouldFilterPackage(pkg))
445 uploads = pkg_index.ResolveDuplicateUploads(pkg_indexes)
446
447 # Write Packages file.
448 tmp_packages_file = pkg_index.WriteToNamedTemporaryFile()
449
450 if upload_location.startswith('gs://'):
451 # Build list of files to upload.
452 upload_files = GenerateUploadDict(package_path, remote_location, uploads)
453 remote_file = '%s/Packages' % remote_location.rstrip('/')
454 upload_files[tmp_packages_file.name] = remote_file
455
456 print 'Uploading %s' % package_string
457 failed_uploads = RemoteUpload(upload_files)
458 if len(failed_uploads) > 1 or (None not in failed_uploads):
459 error_msg = ['%s -> %s\n' % args for args in failed_uploads]
460 raise UploadFailed('Error uploading:\n%s' % error_msg)
461 else:
462 pkgs = ' '.join(p['CPV'] + '.tbz2' for p in uploads)
463 ssh_server, remote_path = remote_location.split(':', 1)
464 d = { 'pkg_index': tmp_packages_file.name,
465 'pkgs': pkgs,
466 'remote_packages': '%s/Packages' % remote_location.rstrip('/'),
467 'remote_path': remote_path,
468 'remote_location': remote_location,
469 'ssh_server': ssh_server }
470 cmds = ['ssh %(ssh_server)s mkdir -p %(remote_path)s' % d,
471 'rsync -av --chmod=a+r %(pkg_index)s %(remote_packages)s' % d]
472 if pkgs:
473 cmds.append('rsync -Rav %(pkgs)s %(remote_location)s/' % d)
474 for cmd in cmds:
475 if not _RetryRun(cmd, shell=True, cwd=package_path):
476 raise UploadFailed('Could not run %s' % cmd)
477
478 url_value = '%s/%s/' % (binhost_base_url, url_suffix)
479
480 if git_sync:
481 RevGitFile(git_file, url_value, retries=git_sync_retries, key=key)
482
483 if sync_binhost_conf:
484 UpdateBinhostConfFile(binhost_conf, key, url_value)
485
486def usage(parser, msg):
487 """Display usage message and parser help then exit with 1."""
488 print >> sys.stderr, msg
489 parser.print_help()
490 sys.exit(1)
491
492
493def main():
494 parser = optparse.OptionParser()
495 parser.add_option('-H', '--binhost-base-url', dest='binhost_base_url',
496 default=_BINHOST_BASE_URL,
497 help='Base URL to use for binhost in make.conf updates')
498 parser.add_option('', '--previous-binhost-url', action='append',
499 default=[], dest='previous_binhost_url',
500 help='Previous binhost URL')
501 parser.add_option('-b', '--board', dest='board', default=None,
502 help='Board type that was built on this machine')
503 parser.add_option('-p', '--build-path', dest='build_path',
504 help='Path to the chroot')
505 parser.add_option('-s', '--sync-host', dest='sync_host',
506 default=False, action='store_true',
507 help='Sync host prebuilts')
508 parser.add_option('-g', '--git-sync', dest='git_sync',
509 default=False, action='store_true',
510 help='Enable git version sync (This commits to a repo)')
511 parser.add_option('-u', '--upload', dest='upload',
512 default=None,
513 help='Upload location')
514 parser.add_option('-V', '--prepend-version', dest='prepend_version',
515 default=None,
516 help='Add an identifier to the front of the version')
517 parser.add_option('-f', '--filters', dest='filters', action='store_true',
518 default=False,
519 help='Turn on filtering of private ebuild packages')
520 parser.add_option('-k', '--key', dest='key',
521 default='PORTAGE_BINHOST',
522 help='Key to update in make.conf / binhost.conf')
523 parser.add_option('', '--sync-binhost-conf', dest='sync_binhost_conf',
524 default=False, action='store_true',
525 help='Update binhost.conf')
526
527 options, args = parser.parse_args()
528 # Setup boto environment for gsutil to use
529 os.environ['BOTO_CONFIG'] = _BOTO_CONFIG
530 if not options.build_path:
531 usage(parser, 'Error: you need provide a chroot path')
532
533 if not options.upload:
534 usage(parser, 'Error: you need to provide an upload location using -u')
535
536 if options.filters:
537 LoadPrivateFilters(options.build_path)
538
539 version = GetVersion()
540 if options.prepend_version:
541 version = '%s-%s' % (options.prepend_version, version)
542
543 pkg_indexes = []
544 for url in options.previous_binhost_url:
545 pkg_index = GrabRemotePackageIndex(url)
546 if pkg_index:
547 pkg_indexes.append(pkg_index)
548
549 if options.sync_host:
550 UploadPrebuilt(options.build_path, options.upload, version,
551 options.binhost_base_url, git_sync=options.git_sync,
552 key=options.key, pkg_indexes=pkg_indexes,
553 sync_binhost_conf=options.sync_binhost_conf)
554
555 if options.board:
556 UploadPrebuilt(options.build_path, options.upload, version,
557 options.binhost_base_url, board=options.board,
558 git_sync=options.git_sync, key=options.key,
559 pkg_indexes=pkg_indexes,
560 sync_binhost_conf=options.sync_binhost_conf)
561
562
563if __name__ == '__main__':
564 main()