Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 1 | # Copyright (c) 2012 The Chromium OS Authors. All rights reserved. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 2 | # Use of this source code is governed by a BSD-style license that can be |
| 3 | # found in the LICENSE file. |
| 4 | |
| 5 | """Helper class for interacting with the Dev Server.""" |
| 6 | |
Gilad Arnold | 55a2a37 | 2012-10-02 09:46:32 -0700 | [diff] [blame] | 7 | import base64 |
| 8 | import binascii |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 9 | import distutils.version |
| 10 | import errno |
Gilad Arnold | 55a2a37 | 2012-10-02 09:46:32 -0700 | [diff] [blame] | 11 | import hashlib |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 12 | import os |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 13 | import random |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 14 | import re |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 15 | import shutil |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 16 | import time |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 17 | |
Gilad Arnold | abb352e | 2012-09-23 01:24:27 -0700 | [diff] [blame] | 18 | import lockfile |
| 19 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 20 | import build_artifact |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 21 | import gsutil_util |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 22 | import log_util |
| 23 | |
| 24 | |
| 25 | # Module-local log function. |
| 26 | def _Log(message, *args, **kwargs): |
| 27 | return log_util.LogWithTag('UTIL', message, *args, **kwargs) |
| 28 | |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 29 | |
| 30 | AU_BASE = 'au' |
| 31 | NTON_DIR_SUFFIX = '_nton' |
| 32 | MTON_DIR_SUFFIX = '_mton' |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 33 | UPLOADED_LIST = 'UPLOADED' |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 34 | DEVSERVER_LOCK_FILE = 'devserver' |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 35 | |
Gilad Arnold | 55a2a37 | 2012-10-02 09:46:32 -0700 | [diff] [blame] | 36 | _HASH_BLOCK_SIZE = 8192 |
| 37 | |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 38 | |
| 39 | def CommaSeparatedList(value_list, is_quoted=False): |
| 40 | """Concatenates a list of strings. |
| 41 | |
| 42 | This turns ['a', 'b', 'c'] into a single string 'a, b and c'. It optionally |
| 43 | adds quotes (`a') around each element. Used for logging. |
| 44 | |
| 45 | """ |
| 46 | if is_quoted: |
| 47 | value_list = ["`" + value + "'" for value in value_list] |
| 48 | |
| 49 | if len(value_list) > 1: |
| 50 | return (', '.join(value_list[:-1]) + ' and ' + value_list[-1]) |
| 51 | elif value_list: |
| 52 | return value_list[0] |
| 53 | else: |
| 54 | return '' |
| 55 | |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 56 | class CommonUtilError(Exception): |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 57 | """Exception classes used by this module.""" |
| 58 | pass |
| 59 | |
| 60 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 61 | def ParsePayloadList(archive_url, payload_list): |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 62 | """Parse and return the full/delta payload URLs. |
| 63 | |
| 64 | Args: |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 65 | archive_url: The URL of the Google Storage bucket. |
| 66 | payload_list: A list filenames. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 67 | |
| 68 | Returns: |
| 69 | Tuple of 3 payloads URLs: (full, nton, mton). |
| 70 | |
| 71 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 72 | CommonUtilError: If payloads missing or invalid. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 73 | """ |
| 74 | full_payload_url = None |
| 75 | mton_payload_url = None |
| 76 | nton_payload_url = None |
| 77 | for payload in payload_list: |
| 78 | if '_full_' in payload: |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 79 | full_payload_url = '/'.join([archive_url, payload]) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 80 | elif '_delta_' in payload: |
| 81 | # e.g. chromeos_{from_version}_{to_version}_x86-generic_delta_dev.bin |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 82 | from_version, to_version = payload.split('_')[1:3] |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 83 | if from_version == to_version: |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 84 | nton_payload_url = '/'.join([archive_url, payload]) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 85 | else: |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 86 | mton_payload_url = '/'.join([archive_url, payload]) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 87 | |
Chris Sosa | 1228a1a | 2012-05-22 17:12:13 -0700 | [diff] [blame] | 88 | if not full_payload_url: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 89 | raise CommonUtilError( |
Chris Sosa | 1228a1a | 2012-05-22 17:12:13 -0700 | [diff] [blame] | 90 | 'Full payload is missing or has unexpected name format.', payload_list) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 91 | |
| 92 | return full_payload_url, nton_payload_url, mton_payload_url |
| 93 | |
| 94 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 95 | def IsAvailable(pattern_list, uploaded_list): |
| 96 | """Checks whether the target artifacts we wait for are available. |
Yu-Ju Hong | e61cbe9 | 2012-07-10 14:10:26 -0700 | [diff] [blame] | 97 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 98 | This method searches the uploaded_list for a match for every pattern |
| 99 | in the pattern_list. It aborts and returns false if no filename |
| 100 | matches a given pattern. |
Yu-Ju Hong | e61cbe9 | 2012-07-10 14:10:26 -0700 | [diff] [blame] | 101 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 102 | Args: |
| 103 | pattern_list: List of regular expression patterns to identify |
| 104 | the target artifacts. |
| 105 | uploaded_list: List of all uploaded files. |
Yu-Ju Hong | e61cbe9 | 2012-07-10 14:10:26 -0700 | [diff] [blame] | 106 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 107 | Returns: |
| 108 | True if there is a match for every pattern; false otherwise. |
| 109 | """ |
Yu-Ju Hong | e61cbe9 | 2012-07-10 14:10:26 -0700 | [diff] [blame] | 110 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 111 | # Pre-compile the regular expression patterns |
| 112 | compiled_patterns = [] |
| 113 | for p in pattern_list: |
| 114 | compiled_patterns.append(re.compile(p)) |
| 115 | |
| 116 | for pattern in compiled_patterns: |
| 117 | found = False |
| 118 | for filename in uploaded_list: |
| 119 | if re.search(pattern, filename): |
| 120 | found = True |
| 121 | break |
| 122 | if not found: |
| 123 | return False |
| 124 | |
| 125 | return True |
| 126 | |
| 127 | |
| 128 | def WaitUntilAvailable(to_wait_list, archive_url, err_str, timeout=600, |
| 129 | delay=10): |
| 130 | """Waits until all target artifacts are available in Google Storage or |
| 131 | until the request times out. |
| 132 | |
| 133 | This method polls Google Storage until all target artifacts are |
| 134 | available or until the timeout occurs. Because we may not know the |
| 135 | exact name of the target artifacts, the method accepts to_wait_list, a |
| 136 | list of filename patterns, to identify whether an artifact whose name |
| 137 | matches the pattern exists (e.g. use pattern '_full_' to search for |
| 138 | the full payload 'chromeos_R17-1413.0.0-a1_x86-mario_full_dev.bin'). |
| 139 | |
| 140 | Args: |
| 141 | to_wait_list: List of regular expression patterns to identify |
| 142 | the target artifacts. |
| 143 | archive_url: URL of the Google Storage bucket. |
| 144 | err_str: String to display in the error message. |
| 145 | |
| 146 | Returns: |
| 147 | The list of artifacts in the Google Storage bucket. |
| 148 | |
| 149 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 150 | CommonUtilError: If timeout occurs. |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 151 | """ |
| 152 | |
| 153 | cmd = 'gsutil cat %s/%s' % (archive_url, UPLOADED_LIST) |
| 154 | msg = 'Failed to get a list of uploaded files.' |
| 155 | |
| 156 | deadline = time.time() + timeout |
| 157 | while time.time() < deadline: |
Yu-Ju Hong | 9278405 | 2012-08-22 13:08:54 -0700 | [diff] [blame] | 158 | uploaded_list = [] |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 159 | to_delay = delay + random.uniform(.5 * delay, 1.5 * delay) |
Yu-Ju Hong | 9278405 | 2012-08-22 13:08:54 -0700 | [diff] [blame] | 160 | try: |
| 161 | # Run "gsutil cat" to retrieve the list. |
| 162 | uploaded_list = gsutil_util.GSUtilRun(cmd, msg).splitlines() |
| 163 | except gsutil_util.GSUtilError: |
| 164 | # For backward compatibility, fallling back to use "gsutil ls" |
| 165 | # when the manifest file is not present. |
| 166 | cmd = 'gsutil ls %s/*' % archive_url |
| 167 | msg = 'Failed to list payloads.' |
| 168 | payload_list = gsutil_util.GSUtilRun(cmd, msg).splitlines() |
| 169 | for payload in payload_list: |
| 170 | uploaded_list.append(payload.rsplit('/', 1)[1]) |
| 171 | |
| 172 | # Check if all target artifacts are available. |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 173 | if IsAvailable(to_wait_list, uploaded_list): |
| 174 | return uploaded_list |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 175 | _Log('Retrying in %f seconds...%s' % (to_delay, err_str)) |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 176 | time.sleep(to_delay) |
| 177 | |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 178 | raise CommonUtilError('Missing %s for %s.' % (err_str, archive_url)) |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 179 | |
| 180 | |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 181 | def GatherArtifactDownloads(main_staging_dir, archive_url, build_dir, build, |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 182 | timeout=600, delay=10): |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 183 | """Generates artifacts that we mean to download and install for autotest. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 184 | |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 185 | This method generates the list of artifacts we will need for autotest. These |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 186 | artifacts are instances of build_artifact.BuildArtifact. |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 187 | |
| 188 | Note, these artifacts can be downloaded asynchronously iff |
| 189 | !artifact.Synchronous(). |
Scott Zawalski | 51ccf9e | 2012-03-28 08:16:01 -0700 | [diff] [blame] | 190 | """ |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 191 | |
| 192 | # Wait up to 10 minutes for the full payload to be uploaded because we |
| 193 | # do not know the exact name of the full payload. |
| 194 | |
| 195 | # We also wait for 'autotest.tar' because we do not know what type of |
| 196 | # autotest tarballs (tar or tar.bz2) is available |
| 197 | # (crosbug.com/32312). This dependency can be removed once all |
| 198 | # branches move to the new 'tar' format. |
| 199 | to_wait_list = ['_full_', 'autotest.tar'] |
| 200 | err_str = 'full payload or autotest tarball' |
| 201 | uploaded_list = WaitUntilAvailable(to_wait_list, archive_url, err_str, |
| 202 | timeout=600) |
Scott Zawalski | 51ccf9e | 2012-03-28 08:16:01 -0700 | [diff] [blame] | 203 | |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 204 | # First we gather the urls/paths for the update payloads. |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 205 | full_url, nton_url, mton_url = ParsePayloadList(archive_url, uploaded_list) |
Scott Zawalski | 51ccf9e | 2012-03-28 08:16:01 -0700 | [diff] [blame] | 206 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 207 | full_payload = os.path.join(build_dir, build_artifact.ROOT_UPDATE) |
Scott Zawalski | 51ccf9e | 2012-03-28 08:16:01 -0700 | [diff] [blame] | 208 | |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 209 | artifacts = [] |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 210 | artifacts.append(build_artifact.BuildArtifact( |
| 211 | full_url, main_staging_dir, full_payload, synchronous=True)) |
Chris Sosa | 1228a1a | 2012-05-22 17:12:13 -0700 | [diff] [blame] | 212 | |
| 213 | if nton_url: |
| 214 | nton_payload = os.path.join(build_dir, AU_BASE, build + NTON_DIR_SUFFIX, |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 215 | build_artifact.ROOT_UPDATE) |
| 216 | artifacts.append(build_artifact.AUTestPayloadBuildArtifact( |
| 217 | nton_url, main_staging_dir, nton_payload)) |
Chris Sosa | 1228a1a | 2012-05-22 17:12:13 -0700 | [diff] [blame] | 218 | |
Chris Sosa | 781ba6d | 2012-04-11 12:44:43 -0700 | [diff] [blame] | 219 | if mton_url: |
| 220 | mton_payload = os.path.join(build_dir, AU_BASE, build + MTON_DIR_SUFFIX, |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 221 | build_artifact.ROOT_UPDATE) |
| 222 | artifacts.append(build_artifact.AUTestPayloadBuildArtifact( |
Chris Sosa | 781ba6d | 2012-04-11 12:44:43 -0700 | [diff] [blame] | 223 | mton_url, main_staging_dir, mton_payload)) |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 224 | |
Yu-Ju Hong | e61cbe9 | 2012-07-10 14:10:26 -0700 | [diff] [blame] | 225 | |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 226 | # Gather information about autotest tarballs. Use autotest.tar if available. |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 227 | if build_artifact.AUTOTEST_PACKAGE in uploaded_list: |
| 228 | autotest_url = '%s/%s' % (archive_url, build_artifact.AUTOTEST_PACKAGE) |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 229 | else: |
| 230 | # Use autotest.tar.bz for backward compatibility. This can be |
| 231 | # removed once all branches start using "autotest.tar" |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 232 | autotest_url = '%s/%s' % ( |
| 233 | archive_url, build_artifact.AUTOTEST_ZIPPED_PACKAGE) |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 234 | |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 235 | # Next we gather the miscellaneous payloads. |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 236 | stateful_url = archive_url + '/' + build_artifact.STATEFUL_UPDATE |
| 237 | test_suites_url = (archive_url + '/' + build_artifact.TEST_SUITES_PACKAGE) |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 238 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 239 | stateful_payload = os.path.join(build_dir, build_artifact.STATEFUL_UPDATE) |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 240 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 241 | artifacts.append(build_artifact.BuildArtifact( |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 242 | stateful_url, main_staging_dir, stateful_payload, synchronous=True)) |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 243 | artifacts.append(build_artifact.AutotestTarballBuildArtifact( |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 244 | autotest_url, main_staging_dir, build_dir)) |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 245 | artifacts.append(build_artifact.TarballBuildArtifact( |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 246 | test_suites_url, main_staging_dir, build_dir, synchronous=True)) |
| 247 | return artifacts |
Scott Zawalski | 51ccf9e | 2012-03-28 08:16:01 -0700 | [diff] [blame] | 248 | |
| 249 | |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 250 | def GatherSymbolArtifactDownloads(temp_download_dir, archive_url, staging_dir, |
| 251 | timeout=600, delay=10): |
| 252 | """Generates debug symbol artifacts that we mean to download and stage. |
| 253 | |
| 254 | This method generates the list of artifacts we will need to |
| 255 | symbolicate crash dumps that occur during autotest runs. These |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 256 | artifacts are instances of build_artifact.BuildArtifact. |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 257 | |
| 258 | This will poll google storage until the debug symbol artifact becomes |
| 259 | available, or until the 10 minute timeout is up. |
| 260 | |
| 261 | @param temp_download_dir: the tempdir into which we're downloading artifacts |
| 262 | prior to staging them. |
| 263 | @param archive_url: the google storage url of the bucket where the debug |
| 264 | symbols for the desired build are stored. |
| 265 | @param staging_dir: the dir into which to stage the symbols |
| 266 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 267 | @return an iterable of one DebugTarballBuildArtifact pointing to the right |
| 268 | debug symbols. This is an iterable so that it's similar to |
| 269 | GatherArtifactDownloads. Also, it's possible that someday we might |
| 270 | have more than one. |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 271 | """ |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 272 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 273 | artifact_name = build_artifact.DEBUG_SYMBOLS |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 274 | WaitUntilAvailable([artifact_name], archive_url, 'debug symbols', |
| 275 | timeout=timeout, delay=delay) |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 276 | artifact = build_artifact.DebugTarballBuildArtifact( |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 277 | archive_url + '/' + artifact_name, |
| 278 | temp_download_dir, |
| 279 | staging_dir) |
| 280 | return [artifact] |
Yu-Ju Hong | 825ddc3 | 2012-08-13 18:47:10 -0700 | [diff] [blame] | 281 | |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 282 | |
| 283 | def GatherImageArchiveArtifactDownloads(temp_download_dir, archive_url, |
| 284 | staging_dir, image_file_list, |
| 285 | timeout=600, delay=10): |
| 286 | """Generates image archive artifact(s) for downloading / staging. |
| 287 | |
| 288 | Generates the list of artifacts that are used for extracting Chrome OS images |
| 289 | from. Currently, it returns a single artifact, which is a zipfile configured |
| 290 | to extract a given list of images. It first polls Google Storage unti lthe |
| 291 | desired artifacts become available (or a timeout expires). |
| 292 | |
| 293 | Args: |
| 294 | temp_download_dir: temporary directory, used for downloading artifacts |
| 295 | archive_url: URI to the bucket where the artifacts are stored |
| 296 | staging_dir: directory into which to stage the extracted files |
| 297 | image_file_list: list of image files to be extracted |
| 298 | Returns: |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 299 | list of downloadable artifacts (of type ZipfileBuildArtifact), currently |
| 300 | containing a single obejct |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 301 | """ |
| 302 | |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 303 | artifact_name = build_artifact.IMAGE_ARCHIVE |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 304 | WaitUntilAvailable([artifact_name], archive_url, 'image archive', |
| 305 | timeout=timeout, delay=delay) |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 306 | artifact = build_artifact.ZipfileBuildArtifact( |
Gilad Arnold | 6f99b98 | 2012-09-12 10:49:40 -0700 | [diff] [blame] | 307 | archive_url + '/' + artifact_name, |
| 308 | temp_download_dir, staging_dir, |
| 309 | unzip_file_list=image_file_list) |
| 310 | return [artifact] |
Chris Masone | 816e38c | 2012-05-02 12:22:36 -0700 | [diff] [blame] | 311 | |
| 312 | |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 313 | def PrepareBuildDirectory(build_dir): |
| 314 | """Preliminary staging of installation directory for build. |
Scott Zawalski | 51ccf9e | 2012-03-28 08:16:01 -0700 | [diff] [blame] | 315 | |
| 316 | Args: |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 317 | build_dir: Directory to install build components into. |
| 318 | """ |
Chris Sosa | 47a7d4e | 2012-03-28 11:26:55 -0700 | [diff] [blame] | 319 | if not os.path.isdir(build_dir): |
| 320 | os.path.makedirs(build_dir) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 321 | |
| 322 | # Create blank chromiumos_test_image.bin. Otherwise the Dev Server will |
| 323 | # try to rebuild it unnecessarily. |
Gilad Arnold | c65330c | 2012-09-20 15:17:48 -0700 | [diff] [blame] | 324 | test_image = os.path.join(build_dir, build_artifact.TEST_IMAGE) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 325 | open(test_image, 'a').close() |
| 326 | |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 327 | |
| 328 | def SafeSandboxAccess(static_dir, path): |
| 329 | """Verify that the path is in static_dir. |
| 330 | |
| 331 | Args: |
| 332 | static_dir: Directory where builds are served from. |
| 333 | path: Path to verify. |
| 334 | |
| 335 | Returns: |
| 336 | True if path is in static_dir, False otherwise |
| 337 | """ |
| 338 | static_dir = os.path.realpath(static_dir) |
| 339 | path = os.path.realpath(path) |
| 340 | return (path.startswith(static_dir) and path != static_dir) |
| 341 | |
| 342 | |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 343 | def AcquireLock(static_dir, tag, create_once=True): |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 344 | """Acquires a lock for a given tag. |
| 345 | |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 346 | Creates a directory for the specified tag, and atomically creates a lock file |
| 347 | in it. This tells other components the resource/task represented by the tag |
| 348 | is unavailable. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 349 | |
| 350 | Args: |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 351 | static_dir: Directory where builds are served from. |
| 352 | tag: Unique resource/task identifier. Use '/' for nested tags. |
| 353 | create_once: Determines whether the directory must be freshly created; this |
| 354 | preserves previous semantics of the lock acquisition. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 355 | |
| 356 | Returns: |
| 357 | Path to the created directory or None if creation failed. |
| 358 | |
| 359 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 360 | CommonUtilError: If lock can't be acquired. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 361 | """ |
| 362 | build_dir = os.path.join(static_dir, tag) |
| 363 | if not SafeSandboxAccess(static_dir, build_dir): |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 364 | raise CommonUtilError('Invalid tag "%s".' % tag) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 365 | |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 366 | # Create the directory. |
| 367 | is_created = False |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 368 | try: |
| 369 | os.makedirs(build_dir) |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 370 | is_created = True |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 371 | except OSError, e: |
| 372 | if e.errno == errno.EEXIST: |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 373 | if create_once: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 374 | raise CommonUtilError(str(e)) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 375 | else: |
| 376 | raise |
| 377 | |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 378 | # Lock the directory. |
| 379 | try: |
| 380 | lock = lockfile.FileLock(os.path.join(build_dir, DEVSERVER_LOCK_FILE)) |
| 381 | lock.acquire(timeout=0) |
| 382 | except lockfile.AlreadyLocked, e: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 383 | raise CommonUtilError(str(e)) |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 384 | except: |
| 385 | # In any other case, remove the directory if we actually created it, so |
| 386 | # that subsequent attempts won't fail to re-create it. |
| 387 | if is_created: |
| 388 | shutil.rmtree(build_dir) |
| 389 | raise |
| 390 | |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 391 | return build_dir |
| 392 | |
| 393 | |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 394 | def ReleaseLock(static_dir, tag, destroy=False): |
| 395 | """Releases the lock for a given tag. |
| 396 | |
| 397 | Optionally, removes the locked directory entirely. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 398 | |
| 399 | Args: |
| 400 | static_dir: Directory where builds are served from. |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 401 | tag: Unique resource/task identifier. Use '/' for nested tags. |
| 402 | destroy: Determines whether the locked directory should be removed |
| 403 | entirely. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 404 | |
| 405 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 406 | CommonUtilError: If lock can't be released. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 407 | """ |
| 408 | build_dir = os.path.join(static_dir, tag) |
| 409 | if not SafeSandboxAccess(static_dir, build_dir): |
Gilad Arnold | 55a2a37 | 2012-10-02 09:46:32 -0700 | [diff] [blame] | 410 | raise CommonUtilError('Invalid tag "%s".' % tag) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 411 | |
Gilad Arnold | 5174ca2 | 2012-09-12 10:49:09 -0700 | [diff] [blame] | 412 | lock = lockfile.FileLock(os.path.join(build_dir, DEVSERVER_LOCK_FILE)) |
Gilad Arnold | ea162aa | 2012-09-24 16:57:59 -0700 | [diff] [blame] | 413 | try: |
| 414 | lock.break_lock() |
| 415 | if destroy: |
| 416 | shutil.rmtree(build_dir) |
| 417 | except Exception, e: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 418 | raise CommonUtilError(str(e)) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 419 | |
| 420 | |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 421 | def GetLatestBuildVersion(static_dir, target, milestone=None): |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 422 | """Retrieves the latest build version for a given board. |
| 423 | |
| 424 | Args: |
| 425 | static_dir: Directory where builds are served from. |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 426 | target: The build target, typically a combination of the board and the |
| 427 | type of build e.g. x86-mario-release. |
| 428 | milestone: For latest build set to None, for builds only in a specific |
| 429 | milestone set to a str of format Rxx (e.g. R16). Default: None. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 430 | |
| 431 | Returns: |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 432 | If latest found, a full build string is returned e.g. R17-1234.0.0-a1-b983. |
| 433 | If no latest is found for some reason or another a '' string is returned. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 434 | |
| 435 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 436 | CommonUtilError: If for some reason the latest build cannot be |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 437 | deteremined, this could be due to the dir not existing or no builds |
| 438 | being present after filtering on milestone. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 439 | """ |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 440 | target_path = os.path.join(static_dir, target) |
| 441 | if not os.path.isdir(target_path): |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 442 | raise CommonUtilError('Cannot find path %s' % target_path) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 443 | |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 444 | builds = [distutils.version.LooseVersion(build) for build in |
| 445 | os.listdir(target_path)] |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 446 | |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 447 | if milestone and builds: |
| 448 | # Check if milestone Rxx is in the string representation of the build. |
| 449 | builds = filter(lambda x: milestone.upper() in str(x), builds) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 450 | |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 451 | if not builds: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 452 | raise CommonUtilError('Could not determine build for %s' % target) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 453 | |
Scott Zawalski | 1695453 | 2012-03-20 15:31:36 -0400 | [diff] [blame] | 454 | return str(max(builds)) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 455 | |
| 456 | |
Scott Zawalski | 84a39c9 | 2012-01-13 15:12:42 -0500 | [diff] [blame] | 457 | def GetControlFile(static_dir, build, control_path): |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 458 | """Attempts to pull the requested control file from the Dev Server. |
| 459 | |
| 460 | Args: |
| 461 | static_dir: Directory where builds are served from. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 462 | build: Fully qualified build string; e.g. R17-1234.0.0-a1-b983. |
| 463 | control_path: Path to control file on Dev Server relative to Autotest root. |
| 464 | |
| 465 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 466 | CommonUtilError: If lock can't be acquired. |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 467 | |
| 468 | Returns: |
| 469 | Content of the requested control file. |
| 470 | """ |
Scott Zawalski | 1572d15 | 2012-01-16 14:36:02 -0500 | [diff] [blame] | 471 | # Be forgiving if the user passes in the control_path with a leading / |
| 472 | control_path = control_path.lstrip('/') |
Scott Zawalski | 84a39c9 | 2012-01-13 15:12:42 -0500 | [diff] [blame] | 473 | control_path = os.path.join(static_dir, build, 'autotest', |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 474 | control_path) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 475 | if not SafeSandboxAccess(static_dir, control_path): |
Gilad Arnold | 55a2a37 | 2012-10-02 09:46:32 -0700 | [diff] [blame] | 476 | raise CommonUtilError('Invalid control file "%s".' % control_path) |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 477 | |
Scott Zawalski | 84a39c9 | 2012-01-13 15:12:42 -0500 | [diff] [blame] | 478 | if not os.path.exists(control_path): |
| 479 | # TODO(scottz): Come up with some sort of error mechanism. |
| 480 | # crosbug.com/25040 |
| 481 | return 'Unknown control path %s' % control_path |
| 482 | |
Frank Farzan | 37761d1 | 2011-12-01 14:29:08 -0800 | [diff] [blame] | 483 | with open(control_path, 'r') as control_file: |
| 484 | return control_file.read() |
| 485 | |
| 486 | |
Scott Zawalski | 84a39c9 | 2012-01-13 15:12:42 -0500 | [diff] [blame] | 487 | def GetControlFileList(static_dir, build): |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 488 | """List all control|control. files in the specified board/build path. |
| 489 | |
| 490 | Args: |
| 491 | static_dir: Directory where builds are served from. |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 492 | build: Fully qualified build string; e.g. R17-1234.0.0-a1-b983. |
| 493 | |
| 494 | Raises: |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 495 | CommonUtilError: If path is outside of sandbox. |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 496 | |
| 497 | Returns: |
| 498 | String of each file separated by a newline. |
| 499 | """ |
Scott Zawalski | 1572d15 | 2012-01-16 14:36:02 -0500 | [diff] [blame] | 500 | autotest_dir = os.path.join(static_dir, build, 'autotest/') |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 501 | if not SafeSandboxAccess(static_dir, autotest_dir): |
Gilad Arnold | 17fe03d | 2012-10-02 10:05:01 -0700 | [diff] [blame] | 502 | raise CommonUtilError('Autotest dir not in sandbox "%s".' % autotest_dir) |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 503 | |
| 504 | control_files = set() |
Scott Zawalski | 84a39c9 | 2012-01-13 15:12:42 -0500 | [diff] [blame] | 505 | if not os.path.exists(autotest_dir): |
| 506 | # TODO(scottz): Come up with some sort of error mechanism. |
| 507 | # crosbug.com/25040 |
| 508 | return 'Unknown build path %s' % autotest_dir |
| 509 | |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 510 | for entry in os.walk(autotest_dir): |
| 511 | dir_path, _, files = entry |
| 512 | for file_entry in files: |
| 513 | if file_entry.startswith('control.') or file_entry == 'control': |
| 514 | control_files.add(os.path.join(dir_path, |
Chris Sosa | ea148d9 | 2012-03-06 16:22:04 -0800 | [diff] [blame] | 515 | file_entry).replace(autotest_dir, '')) |
Scott Zawalski | 4647ce6 | 2012-01-03 17:17:28 -0500 | [diff] [blame] | 516 | |
| 517 | return '\n'.join(control_files) |
| 518 | |
| 519 | |
Gilad Arnold | 55a2a37 | 2012-10-02 09:46:32 -0700 | [diff] [blame] | 520 | def GetFileSize(file_path): |
| 521 | """Returns the size in bytes of the file given.""" |
| 522 | return os.path.getsize(file_path) |
| 523 | |
| 524 | |
| 525 | def GetFileHashes(file_path, do_sha1=False, do_sha256=False, do_md5=False): |
| 526 | """Computes and returns a list of requested hashes. |
| 527 | |
| 528 | Args: |
| 529 | file_path: path to file to be hashed |
| 530 | do_sha1: whether or not to compute a SHA1 hash |
| 531 | do_sha256: whether or not to compute a SHA256 hash |
| 532 | do_md5: whether or not to compute a MD5 hash |
| 533 | Returns: |
| 534 | A dictionary containing binary hash values, keyed by 'sha1', 'sha256' and |
| 535 | 'md5', respectively. |
| 536 | """ |
| 537 | hashes = {} |
| 538 | if (do_sha1 or do_sha256 or do_md5): |
| 539 | # Initialize hashers. |
| 540 | hasher_sha1 = hashlib.sha1() if do_sha1 else None |
| 541 | hasher_sha256 = hashlib.sha256() if do_sha256 else None |
| 542 | hasher_md5 = hashlib.md5() if do_md5 else None |
| 543 | |
| 544 | # Read blocks from file, update hashes. |
| 545 | with open(file_path, 'rb') as fd: |
| 546 | while True: |
| 547 | block = fd.read(_HASH_BLOCK_SIZE) |
| 548 | if not block: |
| 549 | break |
| 550 | hasher_sha1 and hasher_sha1.update(block) |
| 551 | hasher_sha256 and hasher_sha256.update(block) |
| 552 | hasher_md5 and hasher_md5.update(block) |
| 553 | |
| 554 | # Update return values. |
| 555 | if hasher_sha1: |
| 556 | hashes['sha1'] = hasher_sha1.digest() |
| 557 | if hasher_sha256: |
| 558 | hashes['sha256'] = hasher_sha256.digest() |
| 559 | if hasher_md5: |
| 560 | hashes['md5'] = hasher_md5.digest() |
| 561 | |
| 562 | return hashes |
| 563 | |
| 564 | |
| 565 | def GetFileSha1(file_path): |
| 566 | """Returns the SHA1 checksum of the file given (base64 encoded).""" |
| 567 | return base64.b64encode(GetFileHashes(file_path, do_sha1=True)['sha1']) |
| 568 | |
| 569 | |
| 570 | def GetFileSha256(file_path): |
| 571 | """Returns the SHA256 checksum of the file given (base64 encoded).""" |
| 572 | return base64.b64encode(GetFileHashes(file_path, do_sha256=True)['sha256']) |
| 573 | |
| 574 | |
| 575 | def GetFileMd5(file_path): |
| 576 | """Returns the MD5 checksum of the file given (hex encoded).""" |
| 577 | return binascii.hexlify(GetFileHashes(file_path, do_md5=True)['md5']) |
| 578 | |
| 579 | |
| 580 | def CopyFile(source, dest): |
| 581 | """Copies a file from |source| to |dest|.""" |
| 582 | _Log('Copy File %s -> %s' % (source, dest)) |
| 583 | shutil.copy(source, dest) |