Marc-Antoine Ruel | 34f5f28 | 2018-05-16 16:04:31 -0400 | [diff] [blame] | 1 | # Copyright 2018 The LUCI Authors. All rights reserved. |
| 2 | # Use of this source code is governed under the Apache License, Version 2.0 |
| 3 | # that can be found in the LICENSE file. |
| 4 | |
| 5 | """Define local cache policies.""" |
| 6 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 7 | import contextlib |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 8 | import errno |
| 9 | import io |
| 10 | import logging |
| 11 | import os |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 12 | import random |
| 13 | import string |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 14 | import sys |
| 15 | |
| 16 | from utils import file_path |
| 17 | from utils import fs |
| 18 | from utils import lru |
| 19 | from utils import threading_utils |
| 20 | from utils import tools |
| 21 | |
| 22 | |
| 23 | # The file size to be used when we don't know the correct file size, |
| 24 | # generally used for .isolated files. |
| 25 | UNKNOWN_FILE_SIZE = None |
| 26 | |
| 27 | |
| 28 | def file_write(path, content_generator): |
| 29 | """Writes file content as generated by content_generator. |
| 30 | |
| 31 | Creates the intermediary directory as needed. |
| 32 | |
| 33 | Returns the number of bytes written. |
| 34 | |
| 35 | Meant to be mocked out in unit tests. |
| 36 | """ |
| 37 | file_path.ensure_tree(os.path.dirname(path)) |
| 38 | total = 0 |
| 39 | with fs.open(path, 'wb') as f: |
| 40 | for d in content_generator: |
| 41 | total += len(d) |
| 42 | f.write(d) |
| 43 | return total |
| 44 | |
| 45 | |
| 46 | def is_valid_file(path, size): |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 47 | """Returns if the given files appears valid. |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 48 | |
| 49 | Currently it just checks the file exists and its size matches the expectation. |
| 50 | """ |
| 51 | if size == UNKNOWN_FILE_SIZE: |
| 52 | return fs.isfile(path) |
| 53 | try: |
| 54 | actual_size = fs.stat(path).st_size |
| 55 | except OSError as e: |
| 56 | logging.warning( |
| 57 | 'Can\'t read item %s, assuming it\'s invalid: %s', |
| 58 | os.path.basename(path), e) |
| 59 | return False |
| 60 | if size != actual_size: |
| 61 | logging.warning( |
| 62 | 'Found invalid item %s; %d != %d', |
| 63 | os.path.basename(path), actual_size, size) |
| 64 | return False |
| 65 | return True |
| 66 | |
| 67 | |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 68 | def _get_recursive_size(path): |
| 69 | """Returns the total data size for the specified path. |
| 70 | |
| 71 | This function can be surprisingly slow on OSX, so its output should be cached. |
| 72 | """ |
| 73 | try: |
| 74 | total = 0 |
| 75 | for root, _, files in fs.walk(path): |
| 76 | for f in files: |
| 77 | total += fs.lstat(os.path.join(root, f)).st_size |
| 78 | return total |
| 79 | except (IOError, OSError, UnicodeEncodeError) as exc: |
| 80 | logging.warning('Exception while getting the size of %s:\n%s', path, exc) |
| 81 | return None |
| 82 | |
| 83 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 84 | class NamedCacheError(Exception): |
| 85 | """Named cache specific error.""" |
| 86 | |
| 87 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 88 | class NoMoreSpace(Exception): |
| 89 | """Not enough space to map the whole directory.""" |
| 90 | pass |
| 91 | |
Marc-Antoine Ruel | 34f5f28 | 2018-05-16 16:04:31 -0400 | [diff] [blame] | 92 | |
| 93 | class CachePolicies(object): |
| 94 | def __init__(self, max_cache_size, min_free_space, max_items, max_age_secs): |
| 95 | """Common caching policies for the multiple caches (isolated, named, cipd). |
| 96 | |
| 97 | Arguments: |
| 98 | - max_cache_size: Trim if the cache gets larger than this value. If 0, the |
| 99 | cache is effectively a leak. |
| 100 | - min_free_space: Trim if disk free space becomes lower than this value. If |
| 101 | 0, it will unconditionally fill the disk. |
| 102 | - max_items: Maximum number of items to keep in the cache. If 0, do not |
| 103 | enforce a limit. |
| 104 | - max_age_secs: Maximum age an item is kept in the cache until it is |
| 105 | automatically evicted. Having a lot of dead luggage slows |
| 106 | everything down. |
| 107 | """ |
| 108 | self.max_cache_size = max_cache_size |
| 109 | self.min_free_space = min_free_space |
| 110 | self.max_items = max_items |
| 111 | self.max_age_secs = max_age_secs |
| 112 | |
| 113 | def __str__(self): |
| 114 | return ( |
| 115 | 'CachePolicies(max_cache_size=%s; max_items=%s; min_free_space=%s; ' |
| 116 | 'max_age_secs=%s)') % ( |
| 117 | self.max_cache_size, self.max_items, self.min_free_space, |
| 118 | self.max_age_secs) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 119 | |
| 120 | |
| 121 | class CacheMiss(Exception): |
| 122 | """Raised when an item is not in cache.""" |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 123 | def __init__(self, digest): |
| 124 | self.digest = digest |
| 125 | super(CacheMiss, self).__init__( |
| 126 | 'Item with digest %r is not found in cache' % digest) |
| 127 | |
| 128 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 129 | class Cache(object): |
| 130 | def __init__(self, cache_dir): |
| 131 | if cache_dir is not None: |
| 132 | assert isinstance(cache_dir, unicode), cache_dir |
| 133 | assert file_path.isabs(cache_dir), cache_dir |
| 134 | self.cache_dir = cache_dir |
| 135 | self._lock = threading_utils.LockWithAssert() |
| 136 | # Profiling values. |
| 137 | self._added = [] |
| 138 | self._evicted = [] |
| 139 | self._used = [] |
| 140 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 141 | def __len__(self): |
| 142 | """Returns the number of entries in the cache.""" |
| 143 | raise NotImplementedError() |
| 144 | |
| 145 | def __iter__(self): |
| 146 | """Iterates over all the entries names.""" |
| 147 | raise NotImplementedError() |
| 148 | |
| 149 | def __contains__(self, name): |
| 150 | """Returns if an entry is in the cache.""" |
| 151 | raise NotImplementedError() |
| 152 | |
| 153 | @property |
| 154 | def total_size(self): |
| 155 | """Returns the total size of the cache in bytes.""" |
| 156 | raise NotImplementedError() |
| 157 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 158 | @property |
| 159 | def added(self): |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 160 | """Returns a list of the size for each entry added.""" |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 161 | with self._lock: |
| 162 | return self._added[:] |
| 163 | |
| 164 | @property |
| 165 | def used(self): |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 166 | """Returns a list of the size for each entry used.""" |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 167 | with self._lock: |
| 168 | return self._used[:] |
| 169 | |
| 170 | def cleanup(self): |
| 171 | """Deletes any corrupted item from the cache and trims it if necessary.""" |
| 172 | raise NotImplementedError() |
| 173 | |
| 174 | def trim(self): |
| 175 | """Enforces cache policies. |
| 176 | |
| 177 | Returns: |
| 178 | Number of items evicted. |
| 179 | """ |
| 180 | raise NotImplementedError() |
| 181 | |
| 182 | |
| 183 | class ContentAddressedCache(Cache): |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 184 | """Content addressed cache that stores objects temporarily. |
| 185 | |
| 186 | It can be accessed concurrently from multiple threads, so it should protect |
| 187 | its internal state with some lock. |
| 188 | """ |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 189 | |
| 190 | def __enter__(self): |
| 191 | """Context manager interface.""" |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 192 | # TODO(maruel): Remove. |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 193 | return self |
| 194 | |
| 195 | def __exit__(self, _exc_type, _exec_value, _traceback): |
| 196 | """Context manager interface.""" |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 197 | # TODO(maruel): Remove. |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 198 | return False |
| 199 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 200 | def touch(self, digest, size): |
| 201 | """Ensures item is not corrupted and updates its LRU position. |
| 202 | |
| 203 | Arguments: |
| 204 | digest: hash digest of item to check. |
| 205 | size: expected size of this item. |
| 206 | |
| 207 | Returns: |
| 208 | True if item is in cache and not corrupted. |
| 209 | """ |
| 210 | raise NotImplementedError() |
| 211 | |
| 212 | def evict(self, digest): |
| 213 | """Removes item from cache if it's there.""" |
| 214 | raise NotImplementedError() |
| 215 | |
| 216 | def getfileobj(self, digest): |
| 217 | """Returns a readable file like object. |
| 218 | |
| 219 | If file exists on the file system it will have a .name attribute with an |
| 220 | absolute path to the file. |
| 221 | """ |
| 222 | raise NotImplementedError() |
| 223 | |
| 224 | def write(self, digest, content): |
| 225 | """Reads data from |content| generator and stores it in cache. |
| 226 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 227 | It is possible to write to an object that already exists. It may be |
| 228 | ignored (sent to /dev/null) but the timestamp is still updated. |
| 229 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 230 | Returns digest to simplify chaining. |
| 231 | """ |
| 232 | raise NotImplementedError() |
| 233 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 234 | |
| 235 | class MemoryContentAddressedCache(ContentAddressedCache): |
| 236 | """ContentAddressedCache implementation that stores everything in memory.""" |
| 237 | |
| 238 | def __init__(self, file_mode_mask=0500): |
| 239 | """Args: |
| 240 | file_mode_mask: bit mask to AND file mode with. Default value will make |
| 241 | all mapped files to be read only. |
| 242 | """ |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 243 | super(MemoryContentAddressedCache, self).__init__(None) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 244 | self._file_mode_mask = file_mode_mask |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 245 | # Items in a LRU lookup dict(digest: size). |
| 246 | self._lru = lru.LRUDict() |
| 247 | |
| 248 | # Cache interface implementation. |
| 249 | |
| 250 | def __len__(self): |
| 251 | with self._lock: |
| 252 | return len(self._lru) |
| 253 | |
| 254 | def __iter__(self): |
| 255 | # This is not thread-safe. |
| 256 | return self._lru.__iter__() |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 257 | |
| 258 | def __contains__(self, digest): |
| 259 | with self._lock: |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 260 | return digest in self._lru |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 261 | |
| 262 | @property |
| 263 | def total_size(self): |
| 264 | with self._lock: |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 265 | return sum(len(i) for i in self._lru.itervalues()) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 266 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 267 | def trim(self): |
| 268 | """Trimming is not implemented for MemoryContentAddressedCache.""" |
| 269 | return 0 |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 270 | |
| 271 | def cleanup(self): |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 272 | """Cleaning is irrelevant, as there's no stateful serialization.""" |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 273 | pass |
| 274 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 275 | # ContentAddressedCache interface implementation. |
| 276 | |
| 277 | def __contains__(self, digest): |
| 278 | with self._lock: |
| 279 | return digest in self._lru |
| 280 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 281 | def touch(self, digest, size): |
| 282 | with self._lock: |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 283 | try: |
| 284 | self._lru.touch(digest) |
| 285 | except KeyError: |
| 286 | return False |
| 287 | return True |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 288 | |
| 289 | def evict(self, digest): |
| 290 | with self._lock: |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 291 | if digest in self._lru: |
| 292 | v = self._lru.pop(digest) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 293 | self._evicted.add(v) |
| 294 | |
| 295 | def getfileobj(self, digest): |
| 296 | with self._lock: |
| 297 | try: |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 298 | d = self._lru[digest] |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 299 | except KeyError: |
| 300 | raise CacheMiss(digest) |
| 301 | self._used.append(len(d)) |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 302 | self._lru.touch(digest) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 303 | return io.BytesIO(d) |
| 304 | |
| 305 | def write(self, digest, content): |
| 306 | # Assemble whole stream before taking the lock. |
| 307 | data = ''.join(content) |
| 308 | with self._lock: |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 309 | self._lru.add(digest, data) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 310 | self._added.append(len(data)) |
| 311 | return digest |
| 312 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 313 | |
| 314 | class DiskContentAddressedCache(ContentAddressedCache): |
| 315 | """Stateful LRU cache in a flat hash table in a directory. |
| 316 | |
| 317 | Saves its state as json file. |
| 318 | """ |
| 319 | STATE_FILE = u'state.json' |
| 320 | |
| 321 | def __init__(self, cache_dir, policies, hash_algo, trim, time_fn=None): |
| 322 | """ |
| 323 | Arguments: |
| 324 | cache_dir: directory where to place the cache. |
| 325 | policies: CachePolicies instance, cache retention policies. |
| 326 | algo: hashing algorithm used. |
| 327 | trim: if True to enforce |policies| right away. |
| 328 | It can be done later by calling trim() explicitly. |
| 329 | """ |
| 330 | # All protected methods (starting with '_') except _path should be called |
| 331 | # with self._lock held. |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 332 | super(DiskContentAddressedCache, self).__init__(cache_dir) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 333 | self.policies = policies |
| 334 | self.hash_algo = hash_algo |
| 335 | self.state_file = os.path.join(cache_dir, self.STATE_FILE) |
| 336 | # Items in a LRU lookup dict(digest: size). |
| 337 | self._lru = lru.LRUDict() |
| 338 | # Current cached free disk space. It is updated by self._trim(). |
| 339 | file_path.ensure_tree(self.cache_dir) |
| 340 | self._free_disk = file_path.get_free_space(self.cache_dir) |
| 341 | # The first item in the LRU cache that must not be evicted during this run |
| 342 | # since it was referenced. All items more recent that _protected in the LRU |
| 343 | # cache are also inherently protected. It could be a set() of all items |
| 344 | # referenced but this increases memory usage without a use case. |
| 345 | self._protected = None |
| 346 | # Cleanup operations done by self._load(), if any. |
| 347 | self._operations = [] |
| 348 | with tools.Profiler('Setup'): |
| 349 | with self._lock: |
| 350 | self._load(trim, time_fn) |
| 351 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 352 | # Cache interface implementation. |
| 353 | |
| 354 | def __len__(self): |
| 355 | with self._lock: |
| 356 | return len(self._lru) |
| 357 | |
| 358 | def __iter__(self): |
| 359 | # This is not thread-safe. |
| 360 | return self._lru.__iter__() |
| 361 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 362 | def __contains__(self, digest): |
| 363 | with self._lock: |
| 364 | return digest in self._lru |
| 365 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 366 | @property |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 367 | def total_size(self): |
| 368 | with self._lock: |
| 369 | return sum(self._lru.itervalues()) |
| 370 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 371 | def cleanup(self): |
| 372 | """Cleans up the cache directory. |
| 373 | |
| 374 | Ensures there is no unknown files in cache_dir. |
| 375 | Ensures the read-only bits are set correctly. |
| 376 | |
| 377 | At that point, the cache was already loaded, trimmed to respect cache |
| 378 | policies. |
| 379 | """ |
| 380 | with self._lock: |
| 381 | fs.chmod(self.cache_dir, 0700) |
| 382 | # Ensure that all files listed in the state still exist and add new ones. |
Marc-Antoine Ruel | 09a76e4 | 2018-06-14 19:02:00 +0000 | [diff] [blame] | 383 | previous = set(self._lru) |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 384 | # It'd be faster if there were a readdir() function. |
| 385 | for filename in fs.listdir(self.cache_dir): |
| 386 | if filename == self.STATE_FILE: |
| 387 | fs.chmod(os.path.join(self.cache_dir, filename), 0600) |
| 388 | continue |
| 389 | if filename in previous: |
| 390 | fs.chmod(os.path.join(self.cache_dir, filename), 0400) |
| 391 | previous.remove(filename) |
| 392 | continue |
| 393 | |
| 394 | # An untracked file. Delete it. |
| 395 | logging.warning('Removing unknown file %s from cache', filename) |
| 396 | p = self._path(filename) |
| 397 | if fs.isdir(p): |
| 398 | try: |
| 399 | file_path.rmtree(p) |
| 400 | except OSError: |
| 401 | pass |
| 402 | else: |
| 403 | file_path.try_remove(p) |
| 404 | continue |
| 405 | |
| 406 | if previous: |
| 407 | # Filter out entries that were not found. |
| 408 | logging.warning('Removed %d lost files', len(previous)) |
| 409 | for filename in previous: |
| 410 | self._lru.pop(filename) |
| 411 | self._save() |
| 412 | |
| 413 | # What remains to be done is to hash every single item to |
| 414 | # detect corruption, then save to ensure state.json is up to date. |
| 415 | # Sadly, on a 50GiB cache with 100MiB/s I/O, this is still over 8 minutes. |
| 416 | # TODO(maruel): Let's revisit once directory metadata is stored in |
| 417 | # state.json so only the files that had been mapped since the last cleanup() |
| 418 | # call are manually verified. |
| 419 | # |
| 420 | #with self._lock: |
| 421 | # for digest in self._lru: |
| 422 | # if not isolated_format.is_valid_hash( |
| 423 | # self._path(digest), self.hash_algo): |
| 424 | # self.evict(digest) |
| 425 | # logging.info('Deleted corrupted item: %s', digest) |
| 426 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 427 | # ContentAddressedCache interface implementation. |
| 428 | |
| 429 | def __contains__(self, digest): |
| 430 | with self._lock: |
| 431 | return digest in self._lru |
| 432 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 433 | def touch(self, digest, size): |
| 434 | """Verifies an actual file is valid and bumps its LRU position. |
| 435 | |
| 436 | Returns False if the file is missing or invalid. Doesn't kick it from LRU |
| 437 | though (call 'evict' explicitly). |
| 438 | |
| 439 | Note that is doesn't compute the hash so it could still be corrupted if the |
| 440 | file size didn't change. |
| 441 | |
| 442 | TODO(maruel): More stringent verification while keeping the check fast. |
| 443 | """ |
| 444 | # Do the check outside the lock. |
| 445 | if not is_valid_file(self._path(digest), size): |
| 446 | return False |
| 447 | |
| 448 | # Update its LRU position. |
| 449 | with self._lock: |
| 450 | if digest not in self._lru: |
| 451 | return False |
| 452 | self._lru.touch(digest) |
| 453 | self._protected = self._protected or digest |
| 454 | return True |
| 455 | |
| 456 | def evict(self, digest): |
| 457 | with self._lock: |
| 458 | # Do not check for 'digest == self._protected' since it could be because |
| 459 | # the object is corrupted. |
| 460 | self._lru.pop(digest) |
| 461 | self._delete_file(digest, UNKNOWN_FILE_SIZE) |
| 462 | |
| 463 | def getfileobj(self, digest): |
| 464 | try: |
| 465 | f = fs.open(self._path(digest), 'rb') |
| 466 | with self._lock: |
| 467 | self._used.append(self._lru[digest]) |
| 468 | return f |
| 469 | except IOError: |
| 470 | raise CacheMiss(digest) |
| 471 | |
| 472 | def write(self, digest, content): |
| 473 | assert content is not None |
| 474 | with self._lock: |
| 475 | self._protected = self._protected or digest |
| 476 | path = self._path(digest) |
| 477 | # A stale broken file may remain. It is possible for the file to have write |
| 478 | # access bit removed which would cause the file_write() call to fail to open |
| 479 | # in write mode. Take no chance here. |
| 480 | file_path.try_remove(path) |
| 481 | try: |
| 482 | size = file_write(path, content) |
| 483 | except: |
| 484 | # There are two possible places were an exception can occur: |
| 485 | # 1) Inside |content| generator in case of network or unzipping errors. |
| 486 | # 2) Inside file_write itself in case of disk IO errors. |
| 487 | # In any case delete an incomplete file and propagate the exception to |
| 488 | # caller, it will be logged there. |
| 489 | file_path.try_remove(path) |
| 490 | raise |
| 491 | # Make the file read-only in the cache. This has a few side-effects since |
| 492 | # the file node is modified, so every directory entries to this file becomes |
| 493 | # read-only. It's fine here because it is a new file. |
| 494 | file_path.set_read_only(path, True) |
| 495 | with self._lock: |
| 496 | self._add(digest, size) |
| 497 | return digest |
| 498 | |
| 499 | def get_oldest(self): |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 500 | try: |
| 501 | return self._lru.get_oldest()[0] |
| 502 | except KeyError: |
| 503 | return None |
| 504 | |
| 505 | def get_timestamp(self, digest): |
| 506 | """Returns timestamp of last use of an item. |
| 507 | |
| 508 | Raises KeyError if item is not found. |
| 509 | """ |
| 510 | return self._lru.get_timestamp(digest) |
| 511 | |
| 512 | def trim(self): |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 513 | with self._lock: |
| 514 | return self._trim() |
| 515 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 516 | # Internal functions. |
| 517 | |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 518 | def _load(self, trim, time_fn): |
| 519 | """Loads state of the cache from json file. |
| 520 | |
| 521 | If cache_dir does not exist on disk, it is created. |
| 522 | """ |
| 523 | self._lock.assert_locked() |
| 524 | |
| 525 | if not fs.isfile(self.state_file): |
| 526 | if not fs.isdir(self.cache_dir): |
| 527 | fs.makedirs(self.cache_dir) |
| 528 | else: |
| 529 | # Load state of the cache. |
| 530 | try: |
| 531 | self._lru = lru.LRUDict.load(self.state_file) |
| 532 | except ValueError as err: |
| 533 | logging.error('Failed to load cache state: %s' % (err,)) |
| 534 | # Don't want to keep broken state file. |
| 535 | file_path.try_remove(self.state_file) |
| 536 | if time_fn: |
| 537 | self._lru.time_fn = time_fn |
| 538 | if trim: |
| 539 | self._trim() |
Marc-Antoine Ruel | 2666d9c | 2018-05-18 13:52:02 -0400 | [diff] [blame] | 540 | if self._evicted: |
| 541 | logging.info( |
| 542 | 'Trimming evicted items with the following sizes: %s', |
| 543 | sorted(self._evicted)) |
| 544 | |
| 545 | def _save(self): |
| 546 | """Saves the LRU ordering.""" |
| 547 | self._lock.assert_locked() |
| 548 | if sys.platform != 'win32': |
| 549 | d = os.path.dirname(self.state_file) |
| 550 | if fs.isdir(d): |
| 551 | # Necessary otherwise the file can't be created. |
| 552 | file_path.set_read_only(d, False) |
| 553 | if fs.isfile(self.state_file): |
| 554 | file_path.set_read_only(self.state_file, False) |
| 555 | self._lru.save(self.state_file) |
| 556 | |
| 557 | def _trim(self): |
| 558 | """Trims anything we don't know, make sure enough free space exists.""" |
| 559 | self._lock.assert_locked() |
| 560 | |
| 561 | # Trim old items. |
| 562 | if self.policies.max_age_secs: |
| 563 | cutoff = self._lru.time_fn() - self.policies.max_age_secs |
| 564 | while self._lru: |
| 565 | oldest = self._lru.get_oldest() |
| 566 | if oldest[1][1] >= cutoff: |
| 567 | break |
| 568 | self._remove_lru_file(True) |
| 569 | |
| 570 | # Ensure maximum cache size. |
| 571 | if self.policies.max_cache_size: |
| 572 | total_size = sum(self._lru.itervalues()) |
| 573 | while total_size > self.policies.max_cache_size: |
| 574 | total_size -= self._remove_lru_file(True) |
| 575 | |
| 576 | # Ensure maximum number of items in the cache. |
| 577 | if self.policies.max_items and len(self._lru) > self.policies.max_items: |
| 578 | for _ in xrange(len(self._lru) - self.policies.max_items): |
| 579 | self._remove_lru_file(True) |
| 580 | |
| 581 | # Ensure enough free space. |
| 582 | self._free_disk = file_path.get_free_space(self.cache_dir) |
| 583 | trimmed_due_to_space = 0 |
| 584 | while ( |
| 585 | self.policies.min_free_space and |
| 586 | self._lru and |
| 587 | self._free_disk < self.policies.min_free_space): |
| 588 | trimmed_due_to_space += 1 |
| 589 | self._remove_lru_file(True) |
| 590 | |
| 591 | if trimmed_due_to_space: |
| 592 | total_usage = sum(self._lru.itervalues()) |
| 593 | usage_percent = 0. |
| 594 | if total_usage: |
| 595 | usage_percent = 100. * float(total_usage) / self.policies.max_cache_size |
| 596 | |
| 597 | logging.warning( |
| 598 | 'Trimmed %s file(s) due to not enough free disk space: %.1fkb free,' |
| 599 | ' %.1fkb cache (%.1f%% of its maximum capacity of %.1fkb)', |
| 600 | trimmed_due_to_space, |
| 601 | self._free_disk / 1024., |
| 602 | total_usage / 1024., |
| 603 | usage_percent, |
| 604 | self.policies.max_cache_size / 1024.) |
| 605 | self._save() |
| 606 | return trimmed_due_to_space |
| 607 | |
| 608 | def _path(self, digest): |
| 609 | """Returns the path to one item.""" |
| 610 | return os.path.join(self.cache_dir, digest) |
| 611 | |
| 612 | def _remove_lru_file(self, allow_protected): |
| 613 | """Removes the lastest recently used file and returns its size.""" |
| 614 | self._lock.assert_locked() |
| 615 | try: |
| 616 | digest, (size, _) = self._lru.get_oldest() |
| 617 | if not allow_protected and digest == self._protected: |
| 618 | total_size = sum(self._lru.itervalues())+size |
| 619 | msg = ( |
| 620 | 'Not enough space to fetch the whole isolated tree.\n' |
| 621 | ' %s\n cache=%dbytes, %d items; %sb free_space') % ( |
| 622 | self.policies, total_size, len(self._lru)+1, self._free_disk) |
| 623 | raise NoMoreSpace(msg) |
| 624 | except KeyError: |
| 625 | # That means an internal error. |
| 626 | raise NoMoreSpace('Nothing to remove, can\'t happend') |
| 627 | digest, (size, _) = self._lru.pop_oldest() |
| 628 | logging.debug('Removing LRU file %s', digest) |
| 629 | self._delete_file(digest, size) |
| 630 | return size |
| 631 | |
| 632 | def _add(self, digest, size=UNKNOWN_FILE_SIZE): |
| 633 | """Adds an item into LRU cache marking it as a newest one.""" |
| 634 | self._lock.assert_locked() |
| 635 | if size == UNKNOWN_FILE_SIZE: |
| 636 | size = fs.stat(self._path(digest)).st_size |
| 637 | self._added.append(size) |
| 638 | self._lru.add(digest, size) |
| 639 | self._free_disk -= size |
| 640 | # Do a quicker version of self._trim(). It only enforces free disk space, |
| 641 | # not cache size limits. It doesn't actually look at real free disk space, |
| 642 | # only uses its cache values. self._trim() will be called later to enforce |
| 643 | # real trimming but doing this quick version here makes it possible to map |
| 644 | # an isolated that is larger than the current amount of free disk space when |
| 645 | # the cache size is already large. |
| 646 | while ( |
| 647 | self.policies.min_free_space and |
| 648 | self._lru and |
| 649 | self._free_disk < self.policies.min_free_space): |
| 650 | if self._remove_lru_file(False) == -1: |
| 651 | break |
| 652 | |
| 653 | def _delete_file(self, digest, size=UNKNOWN_FILE_SIZE): |
| 654 | """Deletes cache file from the file system.""" |
| 655 | self._lock.assert_locked() |
| 656 | try: |
| 657 | if size == UNKNOWN_FILE_SIZE: |
| 658 | try: |
| 659 | size = fs.stat(self._path(digest)).st_size |
| 660 | except OSError: |
| 661 | size = 0 |
| 662 | file_path.try_remove(self._path(digest)) |
| 663 | self._evicted.append(size) |
| 664 | self._free_disk += size |
| 665 | except OSError as e: |
| 666 | if e.errno != errno.ENOENT: |
| 667 | logging.error('Error attempting to delete a file %s:\n%s' % (digest, e)) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 668 | |
| 669 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 670 | class NamedCache(Cache): |
| 671 | """Manages cache directories. |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 672 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 673 | A cache entry is a tuple (name, path), where |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 674 | name is a short identifier that describes the contents of the cache, e.g. |
| 675 | "git_v8" could be all git repositories required by v8 builds, or |
| 676 | "build_chromium" could be build artefacts of the Chromium. |
| 677 | path is a directory path relative to the task run dir. Cache installation |
| 678 | puts the requested cache directory at the path. |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 679 | """ |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 680 | _DIR_ALPHABET = string.ascii_letters + string.digits |
| 681 | STATE_FILE = u'state.json' |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 682 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 683 | def __init__(self, cache_dir, policies, time_fn=None): |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 684 | """Initializes NamedCaches. |
| 685 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 686 | Arguments: |
| 687 | - cache_dir is a directory for persistent cache storage. |
| 688 | - policies is a CachePolicies instance. |
| 689 | - time_fn is a function that returns timestamp (float) and used to take |
| 690 | timestamps when new caches are requested. Used in unit tests. |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 691 | """ |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 692 | super(NamedCache, self).__init__(cache_dir) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 693 | self._policies = policies |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 694 | # LRU {cache_name -> tuple(cache_location, size)} |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 695 | self.state_file = os.path.join(cache_dir, self.STATE_FILE) |
| 696 | self._lru = lru.LRUDict() |
| 697 | if not fs.isdir(self.cache_dir): |
| 698 | fs.makedirs(self.cache_dir) |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 699 | elif os.path.isfile(self.state_file): |
Marc-Antoine Ruel | 3543e21 | 2018-05-23 01:04:34 +0000 | [diff] [blame] | 700 | try: |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 701 | self._lru = lru.LRUDict.load(self.state_file) |
| 702 | except ValueError: |
| 703 | logging.exception('failed to load named cache state file') |
| 704 | logging.warning('deleting named caches') |
| 705 | file_path.rmtree(self.cache_dir) |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 706 | with self._lock: |
| 707 | self._try_upgrade() |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 708 | if time_fn: |
| 709 | self._lru.time_fn = time_fn |
| 710 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 711 | def get_timestamp(self, name): |
| 712 | """Returns timestamp of last use of an item. |
| 713 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 714 | Raises KeyError if cache is not found. |
| 715 | """ |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 716 | with self._lock: |
| 717 | assert isinstance(name, basestring), name |
| 718 | return self._lru.get_timestamp(name) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 719 | |
| 720 | @property |
| 721 | def available(self): |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 722 | """Returns a set of names of available caches.""" |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 723 | with self._lock: |
Marc-Antoine Ruel | 09a76e4 | 2018-06-14 19:02:00 +0000 | [diff] [blame] | 724 | return set(self._lru) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 725 | |
| 726 | def install(self, path, name): |
| 727 | """Moves the directory for the specified named cache to |path|. |
| 728 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 729 | path must be absolute, unicode and must not exist. |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 730 | |
| 731 | Raises NamedCacheError if cannot install the cache. |
| 732 | """ |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 733 | logging.info('Installing named cache %r to %r', name, path) |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 734 | with self._lock: |
| 735 | try: |
| 736 | if os.path.isdir(path): |
| 737 | raise NamedCacheError( |
| 738 | 'installation directory %r already exists' % path) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 739 | |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 740 | if name in self._lru: |
| 741 | rel_cache, _size = self._lru.get(name) |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 742 | abs_cache = os.path.join(self.cache_dir, rel_cache) |
| 743 | if os.path.isdir(abs_cache): |
| 744 | logging.info('Moving %r to %r', abs_cache, path) |
| 745 | file_path.ensure_tree(os.path.dirname(path)) |
| 746 | fs.rename(abs_cache, path) |
| 747 | self._remove(name) |
| 748 | return |
| 749 | |
| 750 | logging.warning( |
| 751 | 'directory for named cache %r does not exist at %s', name, |
| 752 | rel_cache) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 753 | self._remove(name) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 754 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 755 | # The named cache does not exist, create an empty directory. |
| 756 | # When uninstalling, we will move it back to the cache and create an |
| 757 | # an entry. |
| 758 | file_path.ensure_tree(path) |
| 759 | except (IOError, OSError) as ex: |
| 760 | raise NamedCacheError( |
| 761 | 'cannot install cache named %r at %r: %s' % ( |
| 762 | name, path, ex)) |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 763 | finally: |
| 764 | self._save() |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 765 | |
| 766 | def uninstall(self, path, name): |
| 767 | """Moves the cache directory back. Opposite to install(). |
| 768 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 769 | path must be absolute and unicode. |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 770 | |
| 771 | Raises NamedCacheError if cannot uninstall the cache. |
| 772 | """ |
| 773 | logging.info('Uninstalling named cache %r from %r', name, path) |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 774 | with self._lock: |
| 775 | try: |
| 776 | if not os.path.isdir(path): |
| 777 | logging.warning( |
| 778 | 'Directory %r does not exist anymore. Cache lost.', path) |
| 779 | return |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 780 | |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 781 | if name in self._lru: |
| 782 | # This shouldn't happen but just remove the preexisting one and move |
| 783 | # on. |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 784 | logging.warning('overwriting an existing named cache %r', name) |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 785 | self._remove(name) |
| 786 | rel_cache = self._allocate_dir() |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 787 | |
| 788 | # Move the dir and create an entry for the named cache. |
| 789 | abs_cache = os.path.join(self.cache_dir, rel_cache) |
| 790 | logging.info('Moving %r to %r', path, abs_cache) |
| 791 | file_path.ensure_tree(os.path.dirname(abs_cache)) |
| 792 | fs.rename(path, abs_cache) |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 793 | |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 794 | # That succeeded, calculate its new size. |
| 795 | size = _get_recursive_size(abs_cache) |
| 796 | if not size: |
| 797 | # Do not save empty named cache. |
| 798 | return |
| 799 | self._lru.add(name, (rel_cache, size)) |
| 800 | |
| 801 | # Create symlink <cache_dir>/<named>/<name> -> <cache_dir>/<short name> |
| 802 | # for user convenience. |
| 803 | named_path = self._get_named_path(name) |
| 804 | if os.path.exists(named_path): |
| 805 | file_path.remove(named_path) |
| 806 | else: |
| 807 | file_path.ensure_tree(os.path.dirname(named_path)) |
| 808 | |
| 809 | try: |
| 810 | fs.symlink(abs_cache, named_path) |
| 811 | logging.info('Created symlink %r to %r', named_path, abs_cache) |
| 812 | except OSError: |
| 813 | # Ignore on Windows. It happens when running as a normal user or when |
| 814 | # UAC is enabled and the user is a filtered administrator account. |
| 815 | if sys.platform != 'win32': |
| 816 | raise |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 817 | except (IOError, OSError) as ex: |
| 818 | raise NamedCacheError( |
| 819 | 'cannot uninstall cache named %r at %r: %s' % ( |
| 820 | name, path, ex)) |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 821 | finally: |
| 822 | self._save() |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 823 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 824 | # Cache interface implementation. |
| 825 | |
| 826 | def __len__(self): |
| 827 | with self._lock: |
| 828 | return len(self._lru) |
| 829 | |
| 830 | def __iter__(self): |
| 831 | # This is not thread-safe. |
| 832 | return self._lru.__iter__() |
| 833 | |
| 834 | def __contains__(self, digest): |
| 835 | with self._lock: |
| 836 | return digest in self._lru |
| 837 | |
| 838 | @property |
| 839 | def total_size(self): |
| 840 | with self._lock: |
| 841 | return sum(size for _rel_path, size in self._lru.itervalues()) |
| 842 | |
| 843 | def get_oldest(self): |
| 844 | with self._lock: |
| 845 | try: |
| 846 | return self._lru.get_oldest()[0] |
| 847 | except KeyError: |
| 848 | return None |
| 849 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 850 | def trim(self): |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 851 | with self._lock: |
| 852 | if not os.path.isdir(self.cache_dir): |
| 853 | return 0 |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 854 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 855 | removed = [] |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 856 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 857 | def _remove_lru_file(): |
| 858 | """Removes the oldest LRU entry. LRU must not be empty.""" |
| 859 | name, _data = self._lru.get_oldest() |
| 860 | logging.info('Removing named cache %r', name) |
| 861 | self._remove(name) |
| 862 | removed.append(name) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 863 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 864 | # Trim according to maximum number of items. |
| 865 | while len(self._lru) > self._policies.max_items: |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 866 | _remove_lru_file() |
| 867 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 868 | # Trim according to maximum age. |
| 869 | if self._policies.max_age_secs: |
| 870 | cutoff = self._lru.time_fn() - self._policies.max_age_secs |
| 871 | while self._lru: |
| 872 | _name, (_content, timestamp) = self._lru.get_oldest() |
| 873 | if timestamp >= cutoff: |
| 874 | break |
| 875 | _remove_lru_file() |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 876 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 877 | # Trim according to minimum free space. |
| 878 | if self._policies.min_free_space: |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 879 | while self._lru: |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 880 | free_space = file_path.get_free_space(self.cache_dir) |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 881 | if free_space >= self._policies.min_free_space: |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 882 | break |
| 883 | _remove_lru_file() |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 884 | |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 885 | # Trim according to maximum total size. |
| 886 | if self._policies.max_cache_size: |
| 887 | while self._lru: |
| 888 | total = sum(size for _rel_cache, size in self._lru.itervalues()) |
| 889 | if total <= self._policies.max_cache_size: |
| 890 | break |
| 891 | _remove_lru_file() |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 892 | |
Marc-Antoine Ruel | e79ddbf | 2018-06-13 18:33:07 +0000 | [diff] [blame] | 893 | self._save() |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 894 | return len(removed) |
| 895 | |
| 896 | def cleanup(self): |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 897 | # TODO(maruel): Implement. In particular, remove the unexpected files and |
| 898 | # directories! |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 899 | pass |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 900 | |
Marc-Antoine Ruel | 5d7606b | 2018-06-15 19:06:12 +0000 | [diff] [blame^] | 901 | # Internal functions. |
| 902 | |
| 903 | def _try_upgrade(self): |
| 904 | """Upgrades from the old format to the new one if necessary. |
| 905 | |
| 906 | This code can be removed so all bots are known to have the right new format. |
| 907 | """ |
| 908 | if not self._lru: |
| 909 | return |
| 910 | _name, (data, _ts) = self._lru.get_oldest() |
| 911 | if isinstance(data, (list, tuple)): |
| 912 | return |
| 913 | # Update to v2. |
| 914 | def upgrade(_name, rel_cache): |
| 915 | abs_cache = os.path.join(self.cache_dir, rel_cache) |
| 916 | return rel_cache, _get_recursive_size(abs_cache) |
| 917 | self._lru.transform(upgrade) |
| 918 | self._save() |
| 919 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 920 | def _allocate_dir(self): |
| 921 | """Creates and returns relative path of a new cache directory.""" |
| 922 | # We randomly generate directory names that have two lower/upper case |
| 923 | # letters or digits. Total number of possibilities is (26*2 + 10)^2 = 3844. |
| 924 | abc_len = len(self._DIR_ALPHABET) |
| 925 | tried = set() |
| 926 | while len(tried) < 1000: |
| 927 | i = random.randint(0, abc_len * abc_len - 1) |
| 928 | rel_path = ( |
| 929 | self._DIR_ALPHABET[i / abc_len] + |
| 930 | self._DIR_ALPHABET[i % abc_len]) |
| 931 | if rel_path in tried: |
| 932 | continue |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 933 | abs_path = os.path.join(self.cache_dir, rel_path) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 934 | if not fs.exists(abs_path): |
| 935 | return rel_path |
| 936 | tried.add(rel_path) |
| 937 | raise NamedCacheError( |
| 938 | 'could not allocate a new cache dir, too many cache dirs') |
| 939 | |
| 940 | def _remove(self, name): |
| 941 | """Removes a cache directory and entry. |
| 942 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 943 | Returns: |
| 944 | Number of caches deleted. |
| 945 | """ |
| 946 | self._lock.assert_locked() |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 947 | # First try to remove the alias if it exists. |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 948 | named_dir = self._get_named_path(name) |
| 949 | if fs.islink(named_dir): |
| 950 | fs.unlink(named_dir) |
| 951 | |
Marc-Antoine Ruel | 33e9f10 | 2018-06-14 19:08:01 +0000 | [diff] [blame] | 952 | # Then remove the actual data. |
| 953 | if name not in self._lru: |
| 954 | return |
| 955 | rel_path, _size = self._lru.get(name) |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 956 | abs_path = os.path.join(self.cache_dir, rel_path) |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 957 | if os.path.isdir(abs_path): |
| 958 | file_path.rmtree(abs_path) |
| 959 | self._lru.pop(name) |
| 960 | |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 961 | def _save(self): |
| 962 | self._lock.assert_locked() |
| 963 | file_path.ensure_tree(self.cache_dir) |
| 964 | self._lru.save(self.state_file) |
| 965 | |
Marc-Antoine Ruel | 8b11dbd | 2018-05-18 14:31:22 -0400 | [diff] [blame] | 966 | def _get_named_path(self, name): |
Marc-Antoine Ruel | 49f9f8d | 2018-05-24 15:57:06 -0400 | [diff] [blame] | 967 | return os.path.join(self.cache_dir, 'named', name) |