Kuang-che Wu | 3eb6b50 | 2018-06-06 16:15:18 +0800 | [diff] [blame^] | 1 | # Copyright 2018 The Chromium OS Authors. All rights reserved. |
| 2 | # Use of this source code is governed by a BSD-style license that can be |
| 3 | # found in the LICENSE file. |
| 4 | """Model of source code organization and changes. |
| 5 | |
| 6 | This module modeled complex source code organization, i.e. nested git repos, |
| 7 | and their version relationship, i.e. pinned or floating git repo. In other |
| 8 | words, it's abstraction of chrome's gclient DEPS, and chromeos and Android's |
| 9 | repo manifest. |
| 10 | """ |
| 11 | |
| 12 | from __future__ import print_function |
| 13 | import copy |
| 14 | import json |
| 15 | import logging |
| 16 | import os |
| 17 | import re |
| 18 | import shutil |
| 19 | |
| 20 | from bisect_kit import cli |
| 21 | from bisect_kit import git_util |
| 22 | |
| 23 | logger = logging.getLogger(__name__) |
| 24 | |
| 25 | _re_intra_rev = r'^([^,]+)~([^,]+)/(\d+)$' |
| 26 | |
| 27 | SPEC_FIXED = 'fixed' |
| 28 | SPEC_FLOAT = 'float' |
| 29 | _DIFF_CACHE_DIR = 'bisectkit-cache' |
| 30 | |
| 31 | |
| 32 | def make_intra_rev(a, b, index): |
| 33 | """Makes intra-rev version string. |
| 34 | |
| 35 | Between two major "named" versions a and b, there are many small changes |
| 36 | (commits) in-between. bisect-kit will identify all those instances and bisect |
| 37 | them. We give names to those instances and call these names as "intra-rev" |
| 38 | which stands for minor version numbers within two major version. |
| 39 | |
| 40 | Note, a+index (without b) is not enough to identify an unique change due to |
| 41 | branches. Take chromeos as example, both 9900.1.0 and 9901.0.0 are derived |
| 42 | from 9900.0.0, so "9900.0.0 plus 100 changes" may ambiguously refer to states |
| 43 | in 9900.1.0 and 9901.0.0. |
| 44 | |
| 45 | Args: |
| 46 | a: the start version |
| 47 | b: the end version |
| 48 | index: the index number of changes between a and b |
| 49 | |
| 50 | Returns: |
| 51 | the intra-rev version string |
| 52 | """ |
| 53 | return '%s~%s/%d' % (a, b, index) |
| 54 | |
| 55 | |
| 56 | def parse_intra_rev(rev): |
| 57 | """Decomposes intra-rev string. |
| 58 | |
| 59 | See comments of make_intra_rev for what is intra-rev. |
| 60 | |
| 61 | Args: |
| 62 | rev: intra-rev string or normal version number |
| 63 | |
| 64 | Returns: |
| 65 | (start, end, index). If rev is not intra-rev, it must be normal version |
| 66 | number and returns (rev, rev, 0). |
| 67 | """ |
| 68 | m = re.match(_re_intra_rev, rev) |
| 69 | if m: |
| 70 | return m.group(1), m.group(2), int(m.group(3)) |
| 71 | else: |
| 72 | return rev, rev, 0 |
| 73 | |
| 74 | |
| 75 | def argtype_intra_rev(argtype): |
| 76 | """Validates argument is intra-rev. |
| 77 | |
| 78 | Args: |
| 79 | argtype: argtype function which validates major version number |
| 80 | |
| 81 | Returns: |
| 82 | A new argtype function which matches intra-rev |
| 83 | """ |
| 84 | |
| 85 | def argtype_function(s): |
| 86 | m = re.match(_re_intra_rev, s) |
| 87 | if m: |
| 88 | try: |
| 89 | argtype(m.group(1)) |
| 90 | argtype(m.group(2)) |
| 91 | return s |
| 92 | except cli.ArgTypeError as e: |
| 93 | examples = [] |
| 94 | for example in e.example: |
| 95 | examples.append(make_intra_rev(example, example, 10)) |
| 96 | raise cli.ArgTypeError('Invalid intra rev', examples) |
| 97 | raise cli.ArgTypeError('Invalid intra rev', |
| 98 | [make_intra_rev('<rev1>', '<rev2>', 10)]) |
| 99 | |
| 100 | return argtype_function |
| 101 | |
| 102 | |
| 103 | def _normalize_repo_url(repo_url): |
| 104 | repo_url = re.sub(r'https://chrome-internal.googlesource.com/a/', |
| 105 | r'https://chrome-internal.googlesource.com/', repo_url) |
| 106 | repo_url = re.sub(r'\.git$', '', repo_url) |
| 107 | return repo_url |
| 108 | |
| 109 | |
| 110 | class PathSpec(object): |
| 111 | """Specified code version of one path. |
| 112 | |
| 113 | Attributes: |
| 114 | path: local path, relative to project base dir |
| 115 | repo_url: code repository location |
| 116 | at: code version, could be git hash or branch name |
| 117 | """ |
| 118 | |
| 119 | def __init__(self, path, repo_url, at): |
| 120 | self.path = path |
| 121 | self.repo_url = repo_url |
| 122 | self.at = at |
| 123 | |
| 124 | def is_static(self): |
| 125 | return git_util.is_git_rev(self.at) |
| 126 | |
| 127 | def __eq__(self, rhs): |
| 128 | if self.path != rhs.path: |
| 129 | return False |
| 130 | if self.at != rhs.at: |
| 131 | return False |
| 132 | if _normalize_repo_url(self.repo_url) != _normalize_repo_url(rhs.repo_url): |
| 133 | return False |
| 134 | return True |
| 135 | |
| 136 | def __ne__(self, rhs): |
| 137 | return not self == rhs |
| 138 | |
| 139 | |
| 140 | class Spec(object): |
| 141 | """Collection of PathSpec. |
| 142 | |
| 143 | Spec is analogy to gclient's DEPS and repo's manifest. |
| 144 | |
| 145 | Attributes: |
| 146 | spec_type: type of spec, SPEC_FIXED or SPEC_FLOAT. SPEC_FIXED means code |
| 147 | version is pinned and fixed. On the other hand, SPEC_FLOAT is not |
| 148 | pinned and the actual version (git commit) may change over time. |
| 149 | name: name of this spec, for debugging purpose. usually version number |
| 150 | or git hash |
| 151 | timestamp: timestamp of this spec |
| 152 | path: path of spec |
| 153 | entries: paths to PathSpec dict |
| 154 | """ |
| 155 | |
| 156 | def __init__(self, spec_type, name, timestamp, path, entries=None): |
| 157 | self.spec_type = spec_type |
| 158 | self.name = name |
| 159 | self.timestamp = timestamp |
| 160 | self.path = path |
| 161 | self.entries = entries |
| 162 | |
| 163 | def copy(self): |
| 164 | return copy.deepcopy(self) |
| 165 | |
| 166 | def similar_score(self, rhs): |
| 167 | """Calculates similar score to another Spec. |
| 168 | |
| 169 | Returns: |
| 170 | score of similarity. Smaller value is more similar. |
| 171 | """ |
| 172 | score = 0 |
| 173 | for path in set(self.entries) & set(rhs.entries): |
| 174 | if rhs[path] == self[path]: |
| 175 | continue |
| 176 | if rhs[path].at == self[path].at: |
| 177 | # it's often that remote repo moved around but should be treated as the |
| 178 | # same one |
| 179 | score += 0.1 |
| 180 | else: |
| 181 | score += 1 |
| 182 | score += len(set(self.entries) ^ set(rhs.entries)) |
| 183 | return score |
| 184 | |
| 185 | def is_static(self): |
| 186 | return all(path_spec.is_static() for path_spec in self.entries.values()) |
| 187 | |
| 188 | def is_subset(self, rhs): |
| 189 | return set(self.entries.keys()) <= set(rhs.entries.keys()) |
| 190 | |
| 191 | def __getitem__(self, path): |
| 192 | return self.entries[path] |
| 193 | |
| 194 | def __contains__(self, path): |
| 195 | return path in self.entries |
| 196 | |
| 197 | def apply(self, action_group): |
| 198 | self.timestamp = action_group.timestamp |
| 199 | self.name = '(%s)' % self.timestamp |
| 200 | for action in action_group.actions: |
| 201 | if isinstance(action, GitAddRepo): |
| 202 | self.entries[action.path] = PathSpec(action.path, action.repo_url, |
| 203 | action.rev) |
| 204 | elif isinstance(action, GitCheckoutCommit): |
| 205 | self.entries[action.path].at = action.rev |
| 206 | elif isinstance(action, GitRemoveRepo): |
| 207 | del self.entries[action.path] |
| 208 | else: |
| 209 | assert 0, 'unknown action: %s' % action.__class__.__name__ |
| 210 | |
| 211 | def dump(self): |
| 212 | # for debugging |
| 213 | print(self.name, self.path, self.timestamp) |
| 214 | print('size', len(self.entries)) |
| 215 | for path, path_spec in sorted(self.entries.items()): |
| 216 | print(path, path_spec.at) |
| 217 | |
| 218 | def diff(self, rhs): |
| 219 | logger.info('diff between %s and %s', self.name, rhs.name) |
| 220 | expect = set(self.entries) |
| 221 | actual = set(rhs.entries) |
| 222 | common = 0 |
| 223 | for path in sorted(expect - actual): |
| 224 | logger.info('-%s', path) |
| 225 | for path in sorted(actual - expect): |
| 226 | logger.info('+%s', path) |
| 227 | for path in sorted(expect & actual): |
| 228 | if self[path] == rhs[path]: |
| 229 | common += 1 |
| 230 | continue |
| 231 | if self[path].at != rhs[path].at: |
| 232 | logger.info(' %s: at %s vs %s', path, self[path].at, rhs[path].at) |
| 233 | if self[path].repo_url != rhs[path].repo_url: |
| 234 | logger.info(' %s: repo_url %s vs %s', path, self[path].repo_url, |
| 235 | rhs[path].repo_url) |
| 236 | logger.info('and common=%s', common) |
| 237 | |
| 238 | |
| 239 | class Action(object): |
| 240 | """Actions describe changes from one Spec to another. |
| 241 | |
| 242 | Attributes: |
| 243 | timestamp: action time |
| 244 | path: action path, which is relative to project root |
| 245 | """ |
| 246 | |
| 247 | def __init__(self, timestamp, path): |
| 248 | self.timestamp = timestamp |
| 249 | self.path = path |
| 250 | |
| 251 | def apply(self, _root_dir): |
| 252 | raise NotImplementedError |
| 253 | |
| 254 | def summary(self, _code_storage): |
| 255 | raise NotImplementedError |
| 256 | |
| 257 | def __eq__(self, rhs): |
| 258 | return self.__dict__ == rhs.__dict__ |
| 259 | |
| 260 | def serialize(self): |
| 261 | return self.__class__.__name__, self.__dict__ |
| 262 | |
| 263 | |
| 264 | def unserialize_action(data): |
| 265 | classes = [GitCheckoutCommit, GitAddRepo, GitRemoveRepo] |
| 266 | class_name, values = data |
| 267 | assert class_name in [cls.__name__ for cls in classes |
| 268 | ], 'unknown action class: %s' % class_name |
| 269 | for cls in classes: |
| 270 | if class_name == cls.__name__: |
| 271 | return cls(**values) |
| 272 | |
| 273 | |
| 274 | class ActionGroup(object): |
| 275 | """Atomic group of Action objects |
| 276 | |
| 277 | This models atomic commits (for example, gerrit topic, or circular |
| 278 | CQ-DEPEND). Otherwise, one ActionGroup usually consists only one Action |
| 279 | object. |
| 280 | """ |
| 281 | |
| 282 | def __init__(self, timestamp, comment=None): |
| 283 | self.timestamp = timestamp |
| 284 | self.name = None |
| 285 | self.actions = [] |
| 286 | self.comment = comment |
| 287 | |
| 288 | def add(self, action): |
| 289 | self.actions.append(action) |
| 290 | |
| 291 | def serialize(self): |
| 292 | return (self.timestamp, self.name, [a.serialize() for a in self.actions]) |
| 293 | |
| 294 | def summary(self, code_storage): |
| 295 | if self.comment: |
| 296 | return self.comment |
| 297 | # TODO(kcwu): support multiple Actions |
| 298 | assert len(self.actions) == 1 |
| 299 | return self.actions[0].summary(code_storage) |
| 300 | |
| 301 | @staticmethod |
| 302 | def unserialize(data): |
| 303 | ag = ActionGroup(data[0]) |
| 304 | ag.name = data[1] |
| 305 | for x in data[2]: |
| 306 | ag.add(unserialize_action(x)) |
| 307 | return ag |
| 308 | |
| 309 | def apply(self, root_dir): |
| 310 | for action in self.actions: |
| 311 | action.apply(root_dir) |
| 312 | |
| 313 | |
| 314 | class GitCheckoutCommit(Action): |
| 315 | """Describes a git commit action. |
| 316 | |
| 317 | Attributes: |
| 318 | repo_url: the corresponding url of git repo |
| 319 | rev: git commit to checkout |
| 320 | """ |
| 321 | |
| 322 | def __init__(self, timestamp, path, repo_url, rev): |
| 323 | super(GitCheckoutCommit, self).__init__(timestamp, path) |
| 324 | self.repo_url = repo_url |
| 325 | self.rev = rev |
| 326 | |
| 327 | def apply(self, root_dir): |
| 328 | git_repo = os.path.join(root_dir, self.path) |
| 329 | assert git_util.is_git_root(git_repo) |
| 330 | git_util.checkout_version(git_repo, self.rev) |
| 331 | |
| 332 | def summary(self, code_storage): |
| 333 | git_root = code_storage.cached_git_root(self.repo_url) |
| 334 | summary = git_util.get_commit_log(git_root, self.rev).splitlines()[0] |
| 335 | return 'commit %s %s %r' % (self.rev[:10], self.path, summary) |
| 336 | |
| 337 | |
| 338 | class GitAddRepo(Action): |
| 339 | """Describes a git repo add action. |
| 340 | |
| 341 | Attributes: |
| 342 | repo_url: the corresponding url of git repo to add |
| 343 | rev: git commit to checkout |
| 344 | """ |
| 345 | |
| 346 | def __init__(self, timestamp, path, repo_url, rev): |
| 347 | super(GitAddRepo, self).__init__(timestamp, path) |
| 348 | self.repo_url = repo_url |
| 349 | self.rev = rev |
| 350 | |
| 351 | def apply(self, root_dir): |
| 352 | git_repo = os.path.join(root_dir, self.path) |
| 353 | assert os.path.exists(git_repo) |
| 354 | assert git_util.is_git_root(git_repo) |
| 355 | |
| 356 | def summary(self, _code_storage): |
| 357 | return 'add repo %s from %s@%s' % (self.path, self.repo_url, self.rev[:10]) |
| 358 | |
| 359 | |
| 360 | class GitRemoveRepo(Action): |
| 361 | """Describes a git repo remove action.""" |
| 362 | |
| 363 | def __init__(self, timestamp, path): |
| 364 | super(GitRemoveRepo, self).__init__(timestamp, path) |
| 365 | |
| 366 | def apply(self, root_dir): |
| 367 | assert self.path |
| 368 | git_repo = os.path.join(root_dir, self.path) |
| 369 | assert git_util.is_git_root(git_repo) |
| 370 | assert 0 |
| 371 | shutil.rmtree(git_repo) |
| 372 | |
| 373 | def summary(self, _code_storage): |
| 374 | return 'remove repo %s' % self.path |
| 375 | |
| 376 | |
| 377 | def apply_actions(code_storage, action_groups, root_dir): |
| 378 | # Speed optimization: only apply the last one of consecutive commits per |
| 379 | # repo. It is possible to optimize further, but need to take care git repo |
| 380 | # add/remove within another repo. |
| 381 | commits = {} |
| 382 | |
| 383 | def batch_apply(commits): |
| 384 | for i, commit_action in sorted(commits.values()): |
| 385 | logger.debug('[%d] applying "%r"', i, commit_action.summary(code_storage)) |
| 386 | commit_action.apply(root_dir) |
| 387 | |
| 388 | for i, action_group in enumerate(action_groups, 1): |
| 389 | for action in action_group: |
| 390 | if not isinstance(action, GitCheckoutCommit): |
| 391 | break |
| 392 | else: |
| 393 | # If all actions are commits, defer them for batch processing. |
| 394 | for action in action_group: |
| 395 | commits[action.path] = (i, action) |
| 396 | continue |
| 397 | |
| 398 | batch_apply(commits) |
| 399 | commits = {} |
| 400 | action.apply(root_dir) |
| 401 | |
| 402 | batch_apply(commits) |
| 403 | |
| 404 | |
| 405 | class SpecManager(object): |
| 406 | """Spec related abstract operations. |
| 407 | |
| 408 | This class enumerates Spec instances and switch disk state to Spec. |
| 409 | |
| 410 | In other words, this class abstracts: |
| 411 | - discovery of gclient's DEPS and repo's manifest |
| 412 | - gclient sync and repo sync |
| 413 | """ |
| 414 | |
| 415 | def collect_float_spec(self, old, new): |
| 416 | """Collects float Spec between two versions. |
| 417 | |
| 418 | This method may fetch spec from network. However, it should not switch tree |
| 419 | version state. |
| 420 | """ |
| 421 | raise NotImplementedError |
| 422 | |
| 423 | def collect_fixed_spec(self, old, new): |
| 424 | """Collects fixed Spec between two versions. |
| 425 | |
| 426 | This method may fetch spec from network. However, it should not switch tree |
| 427 | version state. |
| 428 | """ |
| 429 | raise NotImplementedError |
| 430 | |
| 431 | def parse_spec(self, spec): |
| 432 | """Parses information for Spec object. |
| 433 | |
| 434 | Args: |
| 435 | spec: Spec object. It specifies what to parse and the parsed information |
| 436 | is stored inside. |
| 437 | """ |
| 438 | raise NotImplementedError |
| 439 | |
| 440 | def sync_disk_state(self, rev): |
| 441 | """Switch source tree state to given version.""" |
| 442 | raise NotImplementedError |
| 443 | |
| 444 | |
| 445 | class CodeStorage(object): |
| 446 | """Query code history and commit relationship without checkout. |
| 447 | |
| 448 | Because paths inside source tree may be deleted or map to different remote |
| 449 | repo in different versions, we cannot query git information of one version |
| 450 | but the tree state is at another version. In order to query information |
| 451 | without changing tree state and fast, we need out of tree source code |
| 452 | storage. |
| 453 | |
| 454 | This class assumes all git repos are mirrored somewhere on local disk. |
| 455 | Subclasses just need to implement cached_git_root() which returns the |
| 456 | location. |
| 457 | |
| 458 | In other words, this class abstracts operations upon gclient's cache-dir |
| 459 | repo's mirror. |
| 460 | """ |
| 461 | |
| 462 | def cached_git_root(self, repo_url): |
| 463 | """The cached path of given remote git repo. |
| 464 | |
| 465 | Args: |
| 466 | repo_url: URL of git remote repo |
| 467 | |
| 468 | Returns: |
| 469 | path of cache folder |
| 470 | """ |
| 471 | raise NotImplementedError |
| 472 | |
| 473 | def is_ancestor_commit(self, spec, path, old, new): |
| 474 | """Determine one commit is ancestor of another. |
| 475 | |
| 476 | Args: |
| 477 | spec: Spec object |
| 478 | path: local path relative to project root |
| 479 | old: commit id |
| 480 | new: commit id |
| 481 | |
| 482 | Returns: |
| 483 | True if `old` is ancestor of `new` |
| 484 | """ |
| 485 | git_root = self.cached_git_root(spec[path].repo_url) |
| 486 | return git_util.is_ancestor_commit(git_root, old, new) |
| 487 | |
| 488 | def get_rev_by_time(self, spec, path, timestamp): |
| 489 | """Get commit hash of given spec by time. |
| 490 | |
| 491 | Args: |
| 492 | spec: Spec object |
| 493 | path: local path relative to project root |
| 494 | timestamp: |
| 495 | |
| 496 | Returns: |
| 497 | The commit hash of given time. If there are commits with the given |
| 498 | timestamp, returns the last commit. |
| 499 | """ |
| 500 | git_root = self.cached_git_root(spec[path].repo_url) |
| 501 | # spec[path].at is remote reference name. Since git_root is a mirror, it's |
| 502 | # no need to convert the name. |
| 503 | return git_util.get_rev_by_time(git_root, timestamp, spec[path].at) |
| 504 | |
| 505 | def get_actions_between_two_commit(self, spec, path, old, new): |
| 506 | git_root = self.cached_git_root(spec[path].repo_url) |
| 507 | result = [] |
| 508 | for timestamp, git_rev in git_util.list_commits_between_commits( |
| 509 | git_root, old, new): |
| 510 | result.append( |
| 511 | GitCheckoutCommit(timestamp, path, spec[path].repo_url, git_rev)) |
| 512 | return result |
| 513 | |
| 514 | def is_containing_commit(self, spec, path, rev): |
| 515 | git_root = self.cached_git_root(spec[path].repo_url) |
| 516 | return git_util.is_containing_commit(git_root, rev) |
| 517 | |
| 518 | def are_spec_commits_available(self, spec): |
| 519 | for path, path_spec in spec.entries.items(): |
| 520 | if not path_spec.is_static(): |
| 521 | continue |
| 522 | if not self.is_containing_commit(spec, path, path_spec.at): |
| 523 | return False |
| 524 | return True |
| 525 | |
| 526 | |
| 527 | class CodeManager(object): |
| 528 | """Class to reconstruct historical source tree state. |
| 529 | |
| 530 | This class can reconstruct all moments of source tree state and diffs between |
| 531 | them. |
| 532 | |
| 533 | Attributes: |
| 534 | root_dir: root path of project source tree |
| 535 | spec_manager: SpecManager object |
| 536 | code_storage: CodeStorage object |
| 537 | """ |
| 538 | |
| 539 | def __init__(self, root_dir, spec_manager, code_storage): |
| 540 | self.root_dir = root_dir |
| 541 | self.spec_manager = spec_manager |
| 542 | self.code_storage = code_storage |
| 543 | |
| 544 | def generate_actions_between_specs(self, prev_float, next_float): |
| 545 | """Generates actions between two float specs. |
| 546 | |
| 547 | Args: |
| 548 | prev_float: start of spec object (exclusive) |
| 549 | next_float: end of spec object (inclusive) |
| 550 | |
| 551 | Returns: |
| 552 | list of Action object (unordered) |
| 553 | """ |
| 554 | actions = [] |
| 555 | for path in set(prev_float.entries) | set(next_float.entries): |
| 556 | |
| 557 | # Add repo |
| 558 | if path not in prev_float: |
| 559 | if next_float[path].is_static(): |
| 560 | next_at = next_float[path].at |
| 561 | else: |
| 562 | next_at = self.code_storage.get_rev_by_time(next_float, path, |
| 563 | next_float.timestamp) |
| 564 | actions.append( |
| 565 | GitAddRepo(next_float.timestamp, path, next_float[path].repo_url, |
| 566 | next_at)) |
| 567 | continue |
| 568 | |
| 569 | # Existing path is floating, enumerates commits until next spec. |
| 570 | # |
| 571 | # prev_at till_at |
| 572 | # prev branch ---> o --------> o --------> o --------> o --------> ... |
| 573 | # ^ ^ |
| 574 | # prev_float.timestamp next_float.timestamp |
| 575 | if not prev_float[path].is_static(): |
| 576 | prev_at = self.code_storage.get_rev_by_time(prev_float, path, |
| 577 | prev_float.timestamp) |
| 578 | till_at = self.code_storage.get_rev_by_time(prev_float, path, |
| 579 | next_float.timestamp) |
| 580 | |
| 581 | actions.extend( |
| 582 | self.code_storage.get_actions_between_two_commit( |
| 583 | prev_float, path, prev_at, till_at)) |
| 584 | else: |
| 585 | prev_at = till_at = prev_float[path].at |
| 586 | |
| 587 | # At next_float.timestamp. |
| 588 | if path not in next_float: |
| 589 | # remove repo |
| 590 | actions.append(GitRemoveRepo(next_float.timestamp, path)) |
| 591 | next_at = None |
| 592 | |
| 593 | elif next_float[path].is_static(): |
| 594 | # pinned to certain commit on different branch |
| 595 | next_at = next_float[path].at |
| 596 | |
| 597 | elif next_float[path].at == prev_float[path].at: |
| 598 | # keep floating on the same branch |
| 599 | next_at = till_at |
| 600 | |
| 601 | else: |
| 602 | # switch to another branch |
| 603 | # prev_at till_at |
| 604 | # prev branch ---> o --------> o --------> o --------> o --------> ... |
| 605 | # |
| 606 | # next_at |
| 607 | # next branch ...... o ------> o --------> o -----> ... |
| 608 | # ^ ^ |
| 609 | # prev_float.timestamp next_float.timestamp |
| 610 | next_at = self.code_storage.get_rev_by_time(next_float, path, |
| 611 | next_float.timestamp) |
| 612 | |
| 613 | if next_at and next_at != till_at: |
| 614 | actions.append( |
| 615 | GitCheckoutCommit(next_float.timestamp, path, |
| 616 | next_float[path].repo_url, next_at)) |
| 617 | |
| 618 | return actions |
| 619 | |
| 620 | def synthesize_fixed_spec(self, float_spec, timestamp): |
| 621 | """Synthesizes fixed spec from float spec of given time. |
| 622 | |
| 623 | Args: |
| 624 | float_spec: the float spec |
| 625 | timestamp: snapshot time |
| 626 | |
| 627 | Returns: |
| 628 | Spec object |
| 629 | """ |
| 630 | result = {} |
| 631 | for path, path_spec in float_spec.entries.items(): |
| 632 | if not path_spec.is_static(): |
| 633 | at = self.code_storage.get_rev_by_time(float_spec, path, timestamp) |
| 634 | path_spec = PathSpec(path_spec.path, path_spec.repo_url, at) |
| 635 | |
| 636 | result[path] = copy.deepcopy(path_spec) |
| 637 | |
| 638 | name = '%s@%s' % (float_spec.path, timestamp) |
| 639 | return Spec(SPEC_FIXED, name, timestamp, float_spec.path, result) |
| 640 | |
| 641 | def reorder_actions(self, actions): |
| 642 | """Reorder and cluster actions. |
| 643 | |
| 644 | Args: |
| 645 | actions: list of Action objects |
| 646 | |
| 647 | Returns: |
| 648 | list of ActionGroup objects |
| 649 | """ |
| 650 | # TODO(kcwu): support atomic commits across repos |
| 651 | actions.sort(key=lambda x: x.timestamp) |
| 652 | result = [] |
| 653 | for action in actions: |
| 654 | group = ActionGroup(action.timestamp) |
| 655 | group.add(action) |
| 656 | result.append(group) |
| 657 | return result |
| 658 | |
| 659 | def match_spec(self, target, specs, start_index=0): |
| 660 | threshold = 3600 |
| 661 | # ideal_index is the index of last spec before target |
| 662 | # begin and end are the range of indexes within threshold (inclusive) |
| 663 | ideal_index = None |
| 664 | begin, end = None, None |
| 665 | for i, spec in enumerate(specs[start_index:], start_index): |
| 666 | if spec.timestamp <= target.timestamp: |
| 667 | ideal_index = i |
| 668 | if abs(spec.timestamp - target.timestamp) < threshold: |
| 669 | if begin is None: |
| 670 | begin = i |
| 671 | end = i |
| 672 | |
| 673 | candidates = [] |
| 674 | if ideal_index is not None: |
| 675 | candidates.append(ideal_index) |
| 676 | if begin is not None: |
| 677 | candidates.extend(range(begin, end + 1)) |
| 678 | if not candidates: |
| 679 | logger.error('unable to match %s: all specs are after it', target.name) |
| 680 | return None |
| 681 | |
| 682 | compatible_candidates = [ |
| 683 | i for i in candidates if specs[i].is_subset(target) |
| 684 | ] |
| 685 | if not compatible_candidates: |
| 686 | logger.error('unable to match %s: no compatible specs', target.name) |
| 687 | spec = specs[candidates[0]] |
| 688 | target.diff(spec) |
| 689 | return None |
| 690 | |
| 691 | scores = [] |
| 692 | for i in compatible_candidates: |
| 693 | scores.append((specs[i].similar_score(target), i)) |
| 694 | scores.sort() |
| 695 | |
| 696 | score, index = scores[0] |
| 697 | if score != 0: |
| 698 | logger.warning('not exactly match (score=%s): %s', score, target.name) |
| 699 | target.diff(specs[index]) |
| 700 | |
| 701 | if index < ideal_index: |
| 702 | logger.warning( |
| 703 | '%s (%s) matched earlier spec at %s instead of %s, racing? offset %d', |
| 704 | target.name, target.timestamp, specs[index].timestamp, |
| 705 | specs[ideal_index].timestamp, |
| 706 | specs[ideal_index].timestamp - target.timestamp) |
| 707 | if index > ideal_index: |
| 708 | logger.warning( |
| 709 | 'spec committed at %d matched later commit at %d. bad server clock?', |
| 710 | target.timestamp, specs[index].timestamp) |
| 711 | |
| 712 | return index |
| 713 | |
| 714 | def associate_fixed_and_synthesized_specs(self, fixed_specs, |
| 715 | synthesized_specs): |
| 716 | # All fixed specs are snapshot of float specs. Theoretically, they |
| 717 | # should be identical to one of the synthesized specs. |
| 718 | # However, it's not always true for some reasons --- maybe due to race |
| 719 | # condition, maybe due to bugs of this bisect-kit. |
| 720 | # To overcome this glitch, we try to match them by similarity instead of |
| 721 | # exact match. |
| 722 | result = [] |
| 723 | last_index = 0 |
| 724 | for i, fixed_spec in enumerate(fixed_specs): |
| 725 | matched_index = self.match_spec(fixed_spec, synthesized_specs, last_index) |
| 726 | if matched_index is None: |
| 727 | if i in (0, len(fixed_specs) - 1): |
| 728 | logger.error('essential spec mismatch, unable to continue') |
| 729 | assert 0 |
| 730 | else: |
| 731 | logger.warning('%s do not match, skip', fixed_spec.name) |
| 732 | continue |
| 733 | result.append((i, matched_index)) |
| 734 | last_index = matched_index |
| 735 | |
| 736 | return result |
| 737 | |
| 738 | def _create_make_up_actions(self, fixed_spec, synthesized): |
| 739 | timestamp = synthesized.timestamp |
| 740 | make_up = ActionGroup( |
| 741 | timestamp, comment='make up glitch for %s' % fixed_spec.name) |
| 742 | for path in set(fixed_spec.entries) & set(synthesized.entries): |
| 743 | if fixed_spec[path].at == synthesized[path].at: |
| 744 | continue |
| 745 | action = GitCheckoutCommit(timestamp, path, synthesized[path].repo_url, |
| 746 | synthesized[path].at) |
| 747 | make_up.add(action) |
| 748 | |
| 749 | if not make_up.actions: |
| 750 | return None |
| 751 | return make_up |
| 752 | |
| 753 | def build_revlist(self, old, new): |
| 754 | """Build revlist. |
| 755 | |
| 756 | Returns: |
| 757 | list of rev string |
| 758 | """ |
| 759 | logger.info('build_revlist') |
| 760 | revlist = [] |
| 761 | |
| 762 | # step 1, find all float and fixed specs in the given range. |
| 763 | fixed_specs = self.spec_manager.collect_fixed_spec(old, new) |
| 764 | float_specs = self.spec_manager.collect_float_spec(old, new) |
| 765 | while float_specs[-1].timestamp > fixed_specs[-1].timestamp: |
| 766 | float_specs.pop() |
| 767 | assert float_specs |
| 768 | for spec in float_specs + fixed_specs: |
| 769 | self.spec_manager.parse_spec(spec) |
| 770 | |
| 771 | # step 2, synthesize all fixed specs in the range from float specs. |
| 772 | specs = float_specs + [fixed_specs[-1]] |
| 773 | actions = [] |
| 774 | logger.debug('len(specs)=%d', len(specs)) |
| 775 | for i in range(len(specs) - 1): |
| 776 | prev_float = specs[i] |
| 777 | next_float = specs[i + 1] |
| 778 | logger.debug('[%d], between %s (%s) and %s (%s)', i, prev_float.name, |
| 779 | prev_float.timestamp, next_float.name, next_float.timestamp) |
| 780 | actions += self.generate_actions_between_specs(prev_float, next_float) |
| 781 | action_groups = self.reorder_actions(actions) |
| 782 | |
| 783 | spec = self.synthesize_fixed_spec(float_specs[0], fixed_specs[0].timestamp) |
| 784 | synthesized = [spec.copy()] |
| 785 | for action_group in action_groups: |
| 786 | spec.apply(action_group) |
| 787 | synthesized.append(spec.copy()) |
| 788 | |
| 789 | # step 3, associate fixed specs with synthesized specs. |
| 790 | associated_pairs = self.associate_fixed_and_synthesized_specs( |
| 791 | fixed_specs, synthesized) |
| 792 | |
| 793 | # step 4, group actions and cache them |
| 794 | for i, (fixed_index, synthesized_index) in enumerate(associated_pairs[:-1]): |
| 795 | next_fixed_index, next_synthesized_index = associated_pairs[i + 1] |
| 796 | revlist.append(fixed_specs[fixed_index].name) |
| 797 | this_action_groups = [] |
| 798 | |
| 799 | # handle glitch |
| 800 | if fixed_specs[fixed_index].similar_score( |
| 801 | synthesized[synthesized_index]) != 0: |
| 802 | assert synthesized[synthesized_index].is_subset( |
| 803 | fixed_specs[fixed_index]) |
| 804 | skipped = set(fixed_specs[fixed_index].entries) - set( |
| 805 | synthesized[synthesized_index].entries) |
| 806 | if skipped: |
| 807 | logger.warning( |
| 808 | 'between %s and %s, ' |
| 809 | 'bisect-kit cannot analyze commit history of following paths:', |
| 810 | fixed_specs[fixed_index].name, fixed_specs[next_fixed_index].name) |
| 811 | for path in sorted(skipped): |
| 812 | logger.warning(' %s', path) |
| 813 | |
| 814 | make_up = self._create_make_up_actions(fixed_specs[fixed_index], |
| 815 | synthesized[synthesized_index]) |
| 816 | if make_up: |
| 817 | this_action_groups.append(make_up) |
| 818 | |
| 819 | this_action_groups.extend( |
| 820 | action_groups[synthesized_index:next_synthesized_index]) |
| 821 | for idx, ag in enumerate(this_action_groups, 1): |
| 822 | rev = make_intra_rev(fixed_specs[fixed_index].name, |
| 823 | fixed_specs[next_fixed_index].name, idx) |
| 824 | ag.name = rev |
| 825 | revlist.append(rev) |
| 826 | |
| 827 | self.save_action_groups_between_releases( |
| 828 | fixed_specs[fixed_index].name, fixed_specs[next_fixed_index].name, |
| 829 | this_action_groups) |
| 830 | revlist.append(fixed_specs[associated_pairs[-1][0]].name) |
| 831 | |
| 832 | return revlist |
| 833 | |
| 834 | def save_action_groups_between_releases(self, old, new, action_groups): |
| 835 | data = [ag.serialize() for ag in action_groups] |
| 836 | |
| 837 | cache_dir = os.path.join(self.root_dir, _DIFF_CACHE_DIR) |
| 838 | if not os.path.exists(cache_dir): |
| 839 | os.makedirs(cache_dir) |
| 840 | cache_filename = os.path.join(cache_dir, '%s,%s.json' % (old, new)) |
| 841 | with file(cache_filename, 'w') as fp: |
| 842 | json.dump(data, fp, indent=4, sort_keys=True) |
| 843 | |
| 844 | def load_action_groups_between_releases(self, old, new): |
| 845 | cache_dir = os.path.join(self.root_dir, _DIFF_CACHE_DIR) |
| 846 | cache_filename = os.path.join(cache_dir, '%s,%s.json' % (old, new)) |
| 847 | if not os.path.exists(cache_filename): |
| 848 | raise Exception('cached revlist not found: %s' % cache_filename) |
| 849 | |
| 850 | result = [] |
| 851 | for data in json.load(file(cache_filename)): |
| 852 | result.append(ActionGroup.unserialize(data)) |
| 853 | |
| 854 | return result |
| 855 | |
| 856 | def view_rev_diff(self, old, new): |
| 857 | old_base, _, _ = parse_intra_rev(old) |
| 858 | _, new_next, _ = parse_intra_rev(new) |
| 859 | assert old_base != new_next |
| 860 | |
| 861 | revlist = [] |
| 862 | rev_summary = {} |
| 863 | fixed_specs = self.spec_manager.collect_fixed_spec(old_base, new_next) |
| 864 | for i, spec in enumerate(fixed_specs[:-1]): |
| 865 | action_groups = self.load_action_groups_between_releases( |
| 866 | fixed_specs[i].name, fixed_specs[i + 1].name) |
| 867 | revlist.append(spec.name) |
| 868 | rev_summary[spec.name] = '' |
| 869 | for action_group in action_groups: |
| 870 | revlist.append(action_group.name) |
| 871 | rev_summary[action_group.name] = action_group.summary(self.code_storage) |
| 872 | |
| 873 | revlist.append(fixed_specs[-1].name) |
| 874 | rev_summary[fixed_specs[-1].name] = '' |
| 875 | |
| 876 | old_index = revlist.index(old) |
| 877 | new_index = revlist.index(new) |
| 878 | for rev in revlist[old_index:new_index + 1]: |
| 879 | logger.info('%s %s', rev, rev_summary[rev]) |
| 880 | |
| 881 | def switch(self, rev): |
| 882 | # easy case |
| 883 | if not re.match(_re_intra_rev, rev): |
| 884 | self.spec_manager.sync_disk_state(rev) |
| 885 | return |
| 886 | |
| 887 | rev_old, rev_new, idx = parse_intra_rev(rev) |
| 888 | action_groups = self.load_action_groups_between_releases(rev_old, rev_new) |
| 889 | assert 0 <= idx <= len(action_groups) |
| 890 | action_groups = action_groups[:idx] |
| 891 | |
| 892 | self.spec_manager.sync_disk_state(rev_old) |
| 893 | |
| 894 | apply_actions(self.code_storage, action_groups, self.root_dir) |