Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 1 | #!/usr/bin/env python3 |
| 2 | # -*- coding: utf-8 -*- |
| 3 | # Copyright 2021 The Chromium OS Authors. All rights reserved. |
| 4 | # Use of this source code is governed by a BSD-style license that can be |
| 5 | # found in the LICENSE file. |
| 6 | """ This script cleans up the vendor directory. |
| 7 | """ |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 8 | import argparse |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 9 | import collections |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 10 | import hashlib |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 11 | import json |
| 12 | import os |
| 13 | import pathlib |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 14 | import re |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 15 | import shutil |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 16 | import subprocess |
George Burgess IV | 0483370 | 2022-08-09 22:00:38 -0700 | [diff] [blame] | 17 | import textwrap |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 18 | import toml |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 19 | |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 20 | # We only care about crates we're actually going to use and that's usually |
| 21 | # limited to ones with cfg(linux). For running `cargo metadata`, limit results |
| 22 | # to only this platform |
| 23 | DEFAULT_PLATFORM_FILTER = "x86_64-unknown-linux-gnu" |
| 24 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 25 | |
| 26 | def _rerun_checksums(package_path): |
| 27 | """Re-run checksums for given package. |
| 28 | |
| 29 | Writes resulting checksums to $package_path/.cargo-checksum.json. |
| 30 | """ |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 31 | hashes = dict() |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 32 | checksum_path = os.path.join(package_path, '.cargo-checksum.json') |
| 33 | if not pathlib.Path(checksum_path).is_file(): |
| 34 | return False |
| 35 | |
| 36 | with open(checksum_path, 'r') as fread: |
| 37 | contents = json.load(fread) |
| 38 | |
| 39 | for root, _, files in os.walk(package_path, topdown=True): |
| 40 | for f in files: |
| 41 | # Don't checksum an existing checksum file |
| 42 | if f == ".cargo-checksum.json": |
| 43 | continue |
| 44 | |
| 45 | file_path = os.path.join(root, f) |
| 46 | with open(file_path, 'rb') as frb: |
| 47 | m = hashlib.sha256() |
| 48 | m.update(frb.read()) |
| 49 | d = m.hexdigest() |
| 50 | |
| 51 | # Key is relative to the package path so strip from beginning |
| 52 | key = os.path.relpath(file_path, package_path) |
| 53 | hashes[key] = d |
| 54 | |
| 55 | if hashes: |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 56 | print("{} regenerated {} hashes".format(package_path, |
| 57 | len(hashes.keys()))) |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 58 | contents['files'] = hashes |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 59 | with open(checksum_path, 'w') as fwrite: |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 60 | json.dump(contents, fwrite, sort_keys=True) |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 61 | |
| 62 | return True |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 63 | |
| 64 | |
| 65 | def _remove_OWNERS_checksum(root): |
| 66 | """ Delete all OWNERS files from the checksum file. |
| 67 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 68 | Args: |
| 69 | root: Root directory for the vendored crate. |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 70 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 71 | Returns: |
| 72 | True if OWNERS was found and cleaned up. Otherwise False. |
| 73 | """ |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 74 | checksum_path = os.path.join(root, '.cargo-checksum.json') |
| 75 | if not pathlib.Path(checksum_path).is_file(): |
| 76 | return False |
| 77 | |
| 78 | with open(checksum_path, 'r') as fread: |
| 79 | contents = json.load(fread) |
| 80 | |
| 81 | del_keys = [] |
| 82 | for cfile in contents['files']: |
| 83 | if 'OWNERS' in cfile: |
| 84 | del_keys.append(cfile) |
| 85 | |
| 86 | for key in del_keys: |
| 87 | del contents['files'][key] |
| 88 | |
| 89 | if del_keys: |
| 90 | print('{} deleted: {}'.format(root, del_keys)) |
| 91 | with open(checksum_path, 'w') as fwrite: |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 92 | json.dump(contents, fwrite, sort_keys=True) |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 93 | |
| 94 | return bool(del_keys) |
| 95 | |
| 96 | |
| 97 | def cleanup_owners(vendor_path): |
| 98 | """ Remove owners checksums from the vendor directory. |
| 99 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 100 | We currently do not check in the OWNERS files from vendored crates because |
| 101 | they interfere with the find-owners functionality in gerrit. This cleanup |
| 102 | simply finds all instances of "OWNERS" in the checksum files within and |
| 103 | removes them. |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 104 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 105 | Args: |
| 106 | vendor_path: Absolute path to vendor directory. |
| 107 | """ |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 108 | deps_cleaned = [] |
| 109 | for root, dirs, _ in os.walk(vendor_path): |
| 110 | for d in dirs: |
| 111 | removed = _remove_OWNERS_checksum(os.path.join(root, d)) |
| 112 | if removed: |
| 113 | deps_cleaned.append(d) |
| 114 | |
| 115 | if deps_cleaned: |
| 116 | print('Cleanup owners:\n {}'.format("\n".join(deps_cleaned))) |
| 117 | |
| 118 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 119 | def apply_single_patch(patch, workdir): |
| 120 | """Apply a single patch and return whether it was successful. |
| 121 | |
| 122 | Returns: |
| 123 | True if successful. False otherwise. |
| 124 | """ |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 125 | print(f"-- Applying {patch} to {workdir}") |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 126 | proc = subprocess.run(["patch", "-p1", "-i", patch], cwd=workdir) |
| 127 | return proc.returncode == 0 |
| 128 | |
| 129 | |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 130 | def determine_vendor_crates(vendor_path): |
| 131 | """Returns a map of {crate_name: [directory]} at the given vendor_path.""" |
| 132 | result = collections.defaultdict(list) |
| 133 | for crate_name_plus_ver in os.listdir(vendor_path): |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 134 | name, _ = crate_name_plus_ver.rsplit('-', 1) |
| 135 | result[name].append(crate_name_plus_ver) |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 136 | |
| 137 | for crate_list in result.values(): |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 138 | crate_list.sort() |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 139 | return result |
| 140 | |
| 141 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 142 | def apply_patches(patches_path, vendor_path): |
| 143 | """Finds patches and applies them to sub-folders in the vendored crates. |
| 144 | |
| 145 | Args: |
| 146 | patches_path: Path to folder with patches. Expect all patches to be one |
| 147 | level down (matching the crate name). |
| 148 | vendor_path: Root path to vendored crates directory. |
| 149 | """ |
| 150 | checksums_for = {} |
| 151 | |
| 152 | # Don't bother running if patches directory is empty |
| 153 | if not pathlib.Path(patches_path).is_dir(): |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 154 | return |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 155 | |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 156 | vendor_crate_map = determine_vendor_crates(vendor_path) |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 157 | # Look for all patches and apply them |
| 158 | for d in os.listdir(patches_path): |
| 159 | dir_path = os.path.join(patches_path, d) |
| 160 | |
| 161 | # We don't process patches in root dir |
| 162 | if not os.path.isdir(dir_path): |
| 163 | continue |
| 164 | |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 165 | for patch in os.listdir(dir_path): |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 166 | file_path = os.path.join(dir_path, patch) |
| 167 | |
| 168 | # Skip if not a patch file |
| 169 | if not os.path.isfile(file_path) or not patch.endswith(".patch"): |
| 170 | continue |
| 171 | |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 172 | # We accept one of two forms here: |
| 173 | # - direct targets (these name # `${crate_name}-${version}`) |
| 174 | # - simply the crate name (which applies to all versions of the |
| 175 | # crate) |
| 176 | direct_target = os.path.join(vendor_path, d) |
| 177 | if os.path.isdir(direct_target): |
| 178 | # If there are any patches, queue checksums for that folder. |
| 179 | checksums_for[d] = True |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 180 | |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 181 | # Apply the patch. Exit from patch loop if patching failed. |
| 182 | if not apply_single_patch(file_path, direct_target): |
| 183 | print("Failed to apply patch: {}".format(patch)) |
| 184 | break |
| 185 | elif d in vendor_crate_map: |
| 186 | for crate in vendor_crate_map[d]: |
| 187 | checksums_for[crate] = True |
| 188 | target = os.path.join(vendor_path, crate) |
| 189 | if not apply_single_patch(file_path, target): |
| 190 | print(f'Failed to apply patch {patch} to {target}') |
| 191 | break |
| 192 | else: |
| 193 | raise RuntimeError(f'Unknown crate in {vendor_path}: {d}') |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 194 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 195 | # Re-run checksums for all modified packages since we applied patches. |
| 196 | for key in checksums_for.keys(): |
| 197 | _rerun_checksums(os.path.join(vendor_path, key)) |
| 198 | |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 199 | |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 200 | def fetch_project_cargo_toml_files(working_dir): |
| 201 | """Returns all Cargo.toml files under working_dir.""" |
| 202 | projects = working_dir / 'projects' |
| 203 | return sorted(projects.glob('**/Cargo.toml')) |
| 204 | |
| 205 | |
Abhishek Pandit-Subedi | fa90238 | 2021-08-20 11:04:33 -0700 | [diff] [blame] | 206 | def run_cargo_vendor(working_dir): |
| 207 | """Runs cargo vendor. |
| 208 | |
| 209 | Args: |
| 210 | working_dir: Directory to run inside. This should be the directory where |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 211 | Cargo.toml is kept. |
Abhishek Pandit-Subedi | fa90238 | 2021-08-20 11:04:33 -0700 | [diff] [blame] | 212 | """ |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 213 | # Cargo will refuse to revendor into versioned directories, which leads to |
| 214 | # repeated `./vendor.py` invocations trying to apply patches to |
| 215 | # already-patched sources. Remove the existing vendor directory to avoid |
| 216 | # this. |
| 217 | vendor_dir = working_dir / 'vendor' |
| 218 | if vendor_dir.exists(): |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 219 | shutil.rmtree(vendor_dir) |
| 220 | |
| 221 | cargo_cmdline = ['cargo', 'vendor', '--versioned-dirs', '-v'] |
| 222 | for i, cargo_toml in enumerate(fetch_project_cargo_toml_files(working_dir)): |
| 223 | # `cargo vendor` requires a 'root' manifest; select an arbitrary one, |
| 224 | # then tack other manifests on to it. Order doesn't really matter. |
| 225 | if i == 0: |
| 226 | cargo_cmdline.append('--manifest-path') |
| 227 | else: |
| 228 | cargo_cmdline.append('-s') |
| 229 | cargo_cmdline.append(str(cargo_toml)) |
| 230 | |
| 231 | # Autocreate src/lib.rs if necessary. |
| 232 | lib_rs = cargo_toml.parent / 'src' / 'lib.rs' |
| 233 | if not lib_rs.exists(): |
| 234 | lib_rs.parent.mkdir(exist_ok=True) |
| 235 | lib_rs.write_bytes(b'') |
| 236 | |
| 237 | # Always place vendor/ at the top-level directory. |
| 238 | cargo_cmdline += ('--', 'vendor') |
| 239 | subprocess.check_call(cargo_cmdline, cwd=working_dir) |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 240 | |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 241 | |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 242 | def load_metadata(working_dir, filter_platform=DEFAULT_PLATFORM_FILTER): |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 243 | """Load metadata for all projects under a given directory. |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 244 | |
| 245 | Args: |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 246 | working_dir: Base directory to run from. |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 247 | filter_platform: Filter packages to ones configured for this platform. |
| 248 | """ |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 249 | metadata_objects = [] |
| 250 | for manifest_path in fetch_project_cargo_toml_files(working_dir): |
| 251 | cmd = [ |
| 252 | 'cargo', 'metadata', '--format-version', '1', '--manifest-path', |
| 253 | manifest_path |
| 254 | ] |
| 255 | # Conditionally add platform filter |
| 256 | if filter_platform: |
| 257 | cmd += ("--filter-platform", filter_platform) |
| 258 | output = subprocess.check_output(cmd, cwd=working_dir) |
| 259 | metadata_objects.append(json.loads(output)) |
| 260 | return metadata_objects |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 261 | |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 262 | |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 263 | def load_all_metadata_packages(working_dir, |
| 264 | filter_platform=DEFAULT_PLATFORM_FILTER, |
| 265 | unique=False): |
| 266 | """Returns a list of all packages returned by load_metadata.""" |
| 267 | results = [] |
| 268 | for metadata in load_metadata(working_dir, filter_platform): |
| 269 | results += metadata['packages'] |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 270 | |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 271 | if not unique: |
| 272 | return results |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 273 | |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 274 | new_results = [] |
| 275 | seen_keys = set() |
| 276 | for item in results: |
| 277 | key = item['id'] |
| 278 | if key in seen_keys: |
| 279 | continue |
| 280 | seen_keys.add(key) |
| 281 | new_results.append(item) |
| 282 | return new_results |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 283 | |
| 284 | class LicenseManager: |
| 285 | """ Manage consolidating licenses for all packages.""" |
| 286 | |
| 287 | # These are all the licenses we support. Keys are what is seen in metadata and |
| 288 | # values are what is expected by the ebuild. |
| 289 | SUPPORTED_LICENSES = { |
| 290 | 'Apache-2.0': 'Apache-2.0', |
| 291 | 'MIT': 'MIT', |
| 292 | 'BSD-3-Clause': 'BSD-3', |
| 293 | 'ISC': 'ISC' |
| 294 | } |
| 295 | |
| 296 | # Prefer to take attribution licenses in this order. All these require that |
| 297 | # we actually use the license file found in the package so they MUST have |
| 298 | # a license file set. |
| 299 | PREFERRED_ATTRIB_LICENSE_ORDER = ['MIT', 'BSD-3', 'ISC'] |
| 300 | |
| 301 | # If Apache license is found, always prefer it (simplifies attribution) |
| 302 | APACHE_LICENSE = 'Apache-2.0' |
| 303 | |
| 304 | # Regex for license files found in the vendored directories. Search for |
| 305 | # these files with re.IGNORECASE. |
| 306 | # |
| 307 | # These will be searched in order with the earlier entries being preferred. |
| 308 | LICENSE_NAMES_REGEX = [ |
| 309 | r'^license-mit$', |
| 310 | r'^copyright$', |
| 311 | r'^licen[cs]e.*$', |
| 312 | ] |
| 313 | |
| 314 | # Some crates have their license file in other crates. This usually occurs |
| 315 | # because multiple crates are published from the same git repository and the |
| 316 | # license isn't updated in each sub-crate. In these cases, we can just |
| 317 | # ignore these packages. |
| 318 | MAP_LICENSE_TO_OTHER = { |
| 319 | 'failure_derive': 'failure', |
| 320 | 'grpcio-compiler': 'grpcio', |
| 321 | 'grpcio-sys': 'grpcio', |
| 322 | 'rustyline-derive': 'rustyline', |
| 323 | } |
| 324 | |
| 325 | # Map a package to a specific license and license file. Only use this if |
| 326 | # a package doesn't have an easily discoverable license or exports its |
| 327 | # license in a weird way. Prefer to patch the project with a license and |
| 328 | # upstream the patch instead. |
| 329 | STATIC_LICENSE_MAP = { |
| 330 | # "package name": ( "license name", "license file relative location") |
| 331 | } |
| 332 | |
| 333 | def __init__(self, working_dir, vendor_dir): |
| 334 | self.working_dir = working_dir |
| 335 | self.vendor_dir = vendor_dir |
| 336 | |
| 337 | def _find_license_in_dir(self, search_dir): |
| 338 | for p in os.listdir(search_dir): |
| 339 | # Ignore anything that's not a file |
| 340 | if not os.path.isfile(os.path.join(search_dir, p)): |
| 341 | continue |
| 342 | |
| 343 | # Now check if the name matches any of the regexes |
| 344 | # We'll return the first matching file. |
| 345 | for regex in self.LICENSE_NAMES_REGEX: |
| 346 | if re.search(regex, p, re.IGNORECASE): |
| 347 | yield os.path.join(search_dir, p) |
| 348 | break |
| 349 | |
| 350 | def _guess_license_type(self, license_file): |
| 351 | if '-MIT' in license_file: |
| 352 | return 'MIT' |
| 353 | elif '-APACHE' in license_file: |
| 354 | return 'APACHE' |
| 355 | elif '-BSD' in license_file: |
| 356 | return 'BSD-3' |
| 357 | |
| 358 | with open(license_file, 'r') as f: |
| 359 | lines = f.read() |
| 360 | if 'MIT' in lines: |
| 361 | return 'MIT' |
| 362 | elif 'Apache' in lines: |
| 363 | return 'APACHE' |
| 364 | elif 'BSD 3-Clause' in lines: |
| 365 | return 'BSD-3' |
| 366 | |
| 367 | return '' |
| 368 | |
George Burgess IV | 0483370 | 2022-08-09 22:00:38 -0700 | [diff] [blame] | 369 | def generate_license(self, skip_license_check, print_map_to_file, |
| 370 | license_shorthand_file): |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 371 | """Generate single massive license file from metadata.""" |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 372 | all_packages = load_all_metadata_packages(self.working_dir, unique=True) |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 373 | |
| 374 | has_license_types = set() |
| 375 | bad_licenses = {} |
| 376 | |
| 377 | # Keep license map ordered so it generates a consistent license map |
| 378 | license_map = {} |
| 379 | |
| 380 | skip_license_check = skip_license_check or [] |
| 381 | |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 382 | for package in all_packages: |
| 383 | # Skip the synthesized Cargo.toml packages that exist solely to |
| 384 | # list dependencies. |
| 385 | if 'path+file:///' in package['id']: |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 386 | continue |
| 387 | |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 388 | pkg_name = package['name'] |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 389 | if pkg_name in skip_license_check: |
| 390 | print( |
| 391 | "Skipped license check on {}. Reason: Skipped from command line" |
| 392 | .format(pkg_name)) |
| 393 | continue |
| 394 | |
| 395 | if pkg_name in self.MAP_LICENSE_TO_OTHER: |
| 396 | print( |
| 397 | 'Skipped license check on {}. Reason: License already in {}' |
| 398 | .format(pkg_name, self.MAP_LICENSE_TO_OTHER[pkg_name])) |
| 399 | continue |
| 400 | |
| 401 | # Check if we have a static license map for this package. Use the |
| 402 | # static values if we have it already set. |
| 403 | if pkg_name in self.STATIC_LICENSE_MAP: |
| 404 | (license, license_file) = self.STATIC_LICENSE_MAP[pkg_name] |
| 405 | license_map[pkg_name] = { |
| 406 | "license": license, |
| 407 | "license_file": license_file, |
| 408 | } |
| 409 | continue |
| 410 | |
| 411 | license_files = [] |
George Burgess IV | 93ba473 | 2022-08-13 14:10:10 -0700 | [diff] [blame] | 412 | # use `or ''` instead of get's default, since `package` may have a |
| 413 | # None value for 'license'. |
| 414 | license = package.get('license') or '' |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 415 | |
| 416 | # We ignore the metadata for license file because most crates don't |
| 417 | # have it set. Just scan the source for licenses. |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 418 | pkg_version = package['version'] |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 419 | license_files = list(self._find_license_in_dir( |
| 420 | os.path.join(self.vendor_dir, f'{pkg_name}-{pkg_version}'))) |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 421 | |
| 422 | # If there are multiple licenses, they are delimited with "OR" or "/" |
| 423 | delim = ' OR ' if ' OR ' in license else '/' |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 424 | found = [x.strip() for x in license.split(delim)] |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 425 | |
| 426 | # Filter licenses to ones we support |
| 427 | licenses_or = [ |
| 428 | self.SUPPORTED_LICENSES[f] for f in found |
| 429 | if f in self.SUPPORTED_LICENSES |
| 430 | ] |
| 431 | |
| 432 | # If apache license is found, always prefer it because it simplifies |
| 433 | # license attribution (we can use existing Apache notice) |
| 434 | if self.APACHE_LICENSE in licenses_or: |
| 435 | has_license_types.add(self.APACHE_LICENSE) |
| 436 | license_map[pkg_name] = {'license': self.APACHE_LICENSE} |
| 437 | |
| 438 | # Handle single license that has at least one license file |
| 439 | # We pick the first license file and the license |
| 440 | elif len(licenses_or) == 1: |
| 441 | if license_files: |
| 442 | l = licenses_or[0] |
| 443 | lf = license_files[0] |
| 444 | |
| 445 | has_license_types.add(l) |
| 446 | license_map[pkg_name] = { |
| 447 | 'license': l, |
| 448 | 'license_file': os.path.relpath(lf, self.working_dir), |
| 449 | } |
| 450 | else: |
| 451 | bad_licenses[pkg_name] = "{} missing license file".format( |
| 452 | licenses_or[0]) |
| 453 | # Handle multiple licenses |
| 454 | elif len(licenses_or) > 1: |
| 455 | # Check preferred licenses in order |
| 456 | license_found = False |
| 457 | for l in self.PREFERRED_ATTRIB_LICENSE_ORDER: |
| 458 | if not l in licenses_or: |
| 459 | continue |
| 460 | |
| 461 | for f in license_files: |
| 462 | if self._guess_license_type(f) == l: |
| 463 | license_found = True |
| 464 | has_license_types.add(l) |
| 465 | license_map[pkg_name] = { |
| 466 | 'license': |
| 467 | l, |
| 468 | 'license_file': |
| 469 | os.path.relpath(f, self.working_dir), |
| 470 | } |
| 471 | break |
| 472 | |
| 473 | # Break out of loop if license is found |
| 474 | if license_found: |
| 475 | break |
| 476 | else: |
| 477 | bad_licenses[pkg_name] = license |
| 478 | |
| 479 | # If we had any bad licenses, we need to abort |
| 480 | if bad_licenses: |
| 481 | for k in bad_licenses.keys(): |
| 482 | print("{} had no acceptable licenses: {}".format( |
| 483 | k, bad_licenses[k])) |
| 484 | raise Exception("Bad licenses in vendored packages.") |
| 485 | |
| 486 | # Write license map to file |
| 487 | if print_map_to_file: |
| 488 | with open(os.path.join(self.working_dir, print_map_to_file), |
| 489 | 'w') as lfile: |
| 490 | json.dump(license_map, lfile, sort_keys=True) |
| 491 | |
| 492 | # Raise missing licenses unless we have a valid reason to ignore them |
| 493 | raise_missing_license = False |
| 494 | for name, v in license_map.items(): |
| 495 | if 'license_file' not in v and v.get('license', |
| 496 | '') != self.APACHE_LICENSE: |
| 497 | raise_missing_license = True |
| 498 | print(" {}: Missing license file. Fix or add to ignorelist.". |
| 499 | format(name)) |
| 500 | |
| 501 | if raise_missing_license: |
| 502 | raise Exception( |
| 503 | "Unhandled missing license file. " |
| 504 | "Make sure all are accounted for before continuing.") |
| 505 | |
George Burgess IV | 0483370 | 2022-08-09 22:00:38 -0700 | [diff] [blame] | 506 | sorted_licenses = sorted(has_license_types) |
| 507 | print("Add the following licenses to the ebuild:\n", |
| 508 | sorted_licenses) |
| 509 | header = textwrap.dedent("""\ |
| 510 | # File to describe the licenses used by this registry. |
| 511 | # Used to it's easy to automatically verify ebuilds are updated. |
| 512 | # Each line is a license. Lines starting with # are comments. |
| 513 | """) |
| 514 | with open(license_shorthand_file, 'w', encoding='utf-8') as f: |
| 515 | f.write(header) |
| 516 | f.write('\n'.join(sorted_licenses)) |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 517 | |
| 518 | |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 519 | # TODO(abps) - This needs to be replaced with datalog later. We should compile |
| 520 | # all crab files into datalog and query it with our requirements |
| 521 | # instead. |
| 522 | class CrabManager: |
| 523 | """Manage audit files.""" |
| 524 | def __init__(self, working_dir, crab_dir): |
| 525 | self.working_dir = working_dir |
| 526 | self.crab_dir = crab_dir |
| 527 | |
| 528 | def _check_bad_traits(self, crabdata): |
| 529 | """Checks that a package's crab audit meets our requirements. |
| 530 | |
| 531 | Args: |
| 532 | crabdata: Dict with crab keys in standard templated format. |
| 533 | """ |
| 534 | common = crabdata['common'] |
| 535 | # TODO(b/200578411) - Figure out what conditions we should enforce as |
| 536 | # part of the audit. |
| 537 | conditions = [ |
| 538 | common.get('deny', None), |
| 539 | ] |
| 540 | |
| 541 | # If any conditions are true, this crate is not acceptable. |
| 542 | return any(conditions) |
| 543 | |
| 544 | def verify_traits(self): |
| 545 | """ Verify that all required CRAB traits for this repository are met. |
| 546 | """ |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 547 | all_packages = load_all_metadata_packages(self.working_dir, unique=True) |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 548 | |
| 549 | failing_crates = {} |
| 550 | |
| 551 | # Verify all packages have a CRAB file associated with it and they meet |
| 552 | # all our required traits |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 553 | for package in all_packages: |
| 554 | # Skip the synthesized Cargo.toml packages that exist solely to |
| 555 | # list dependencies. |
| 556 | if 'path+file:///' in package['id']: |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 557 | continue |
| 558 | |
| 559 | crabname = "{}-{}".format(package['name'], package['version']) |
| 560 | filename = os.path.join(self.crab_dir, "{}.toml".format(crabname)) |
| 561 | |
| 562 | # If crab file doesn't exist, the crate fails |
| 563 | if not os.path.isfile(filename): |
| 564 | failing_crates[crabname] = "No crab file".format(filename) |
| 565 | continue |
| 566 | |
| 567 | with open(filename, 'r') as f: |
| 568 | crabdata = toml.loads(f.read()) |
| 569 | |
| 570 | # If crab file's crate_name and version keys don't match this |
| 571 | # package, it also fails. This is just housekeeping... |
| 572 | if package['name'] != crabdata['crate_name'] or package[ |
| 573 | 'version'] != crabdata['version']: |
| 574 | failing_crates[crabname] = "Crate name or version don't match" |
| 575 | continue |
| 576 | |
| 577 | if self._check_bad_traits(crabdata): |
| 578 | failing_crates[crabname] = "Failed bad traits check" |
| 579 | |
| 580 | # If we had any failing crates, list them now |
| 581 | if failing_crates: |
| 582 | print('Failed CRAB audit:') |
| 583 | for k, v in failing_crates.items(): |
| 584 | print(' {}: {}'.format(k, v)) |
| 585 | |
| 586 | |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 587 | class CrateDestroyer(): |
| 588 | LIB_RS_BODY = """compile_error!("This crate cannot be built for this configuration.");\n""" |
| 589 | |
| 590 | def __init__(self, working_dir, vendor_dir): |
| 591 | self.working_dir = working_dir |
| 592 | self.vendor_dir = vendor_dir |
| 593 | |
| 594 | def _modify_cargo_toml(self, pkg_path): |
| 595 | with open(os.path.join(pkg_path, "Cargo.toml"), "r") as cargo: |
| 596 | contents = toml.load(cargo) |
| 597 | |
| 598 | # Change description, license and delete license key |
| 599 | contents["package"]["description"] = "Empty crate that should not build." |
| 600 | contents["package"]["license"] = "Apache-2.0" |
| 601 | if contents["package"].get("license_file"): |
| 602 | del contents["package"]["license_file"] |
| 603 | |
| 604 | with open(os.path.join(pkg_path, "Cargo.toml"), "w") as cargo: |
| 605 | toml.dump(contents, cargo) |
| 606 | |
| 607 | def _replace_source_contents(self, package_path): |
| 608 | # First load the checksum file before starting |
| 609 | checksum_file = os.path.join(package_path, ".cargo-checksum.json") |
| 610 | with open(checksum_file, 'r') as csum: |
| 611 | checksum_contents = json.load(csum) |
| 612 | |
| 613 | # Also load the cargo.toml file which we need to write back |
| 614 | cargo_file = os.path.join(package_path, "Cargo.toml") |
George Burgess IV | 3e344e4 | 2022-08-09 21:07:04 -0700 | [diff] [blame] | 615 | with open(cargo_file, 'rb') as cfile: |
| 616 | cargo_contents = cfile.read() |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 617 | |
| 618 | shutil.rmtree(package_path) |
| 619 | |
| 620 | # Make package and src dirs and replace lib.rs |
| 621 | os.makedirs(os.path.join(package_path, "src"), exist_ok=True) |
| 622 | with open(os.path.join(package_path, "src", "lib.rs"), "w") as librs: |
| 623 | librs.write(self.LIB_RS_BODY) |
| 624 | |
| 625 | # Restore cargo.toml |
George Burgess IV | 3e344e4 | 2022-08-09 21:07:04 -0700 | [diff] [blame] | 626 | with open(cargo_file, 'wb') as cfile: |
| 627 | cfile.write(cargo_contents) |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 628 | |
| 629 | # Restore checksum |
| 630 | with open(checksum_file, 'w') as csum: |
| 631 | json.dump(checksum_contents, csum) |
| 632 | |
| 633 | def destroy_unused_crates(self): |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 634 | all_packages = load_all_metadata_packages(self.working_dir, |
| 635 | filter_platform=None, |
| 636 | unique=True) |
| 637 | used_packages = {p["name"] |
| 638 | for p in load_all_metadata_packages(self.working_dir)} |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 639 | |
| 640 | cleaned_packages = [] |
George Burgess IV | 40cc91c | 2022-08-15 13:07:40 -0700 | [diff] [blame^] | 641 | # Since we're asking for _all_ metadata packages, we may see |
| 642 | # duplication. |
| 643 | for package in all_packages: |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 644 | # Skip used packages |
| 645 | if package["name"] in used_packages: |
| 646 | continue |
| 647 | |
| 648 | # Detect the correct package path to destroy |
| 649 | pkg_path = os.path.join(self.vendor_dir, "{}-{}".format(package["name"], package["version"])) |
| 650 | if not os.path.isdir(pkg_path): |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 651 | print(f'Crate {package["name"]} not found at {pkg_path}') |
| 652 | continue |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 653 | |
| 654 | self._replace_source_contents(pkg_path) |
| 655 | self._modify_cargo_toml(pkg_path) |
| 656 | _rerun_checksums(pkg_path) |
| 657 | cleaned_packages.append(package["name"]) |
| 658 | |
| 659 | for pkg in cleaned_packages: |
George Burgess IV | 635f726 | 2022-08-09 21:32:20 -0700 | [diff] [blame] | 660 | print("Removed unused crate", pkg) |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 661 | |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 662 | def main(args): |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 663 | current_path = pathlib.Path(__file__).parent.absolute() |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 664 | patches = os.path.join(current_path, "patches") |
| 665 | vendor = os.path.join(current_path, "vendor") |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 666 | crab_dir = os.path.join(current_path, "crab", "crates") |
George Burgess IV | 0483370 | 2022-08-09 22:00:38 -0700 | [diff] [blame] | 667 | license_shorthand_file = os.path.join(current_path, "licenses_used.txt") |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 668 | |
Abhishek Pandit-Subedi | fa90238 | 2021-08-20 11:04:33 -0700 | [diff] [blame] | 669 | # First, actually run cargo vendor |
| 670 | run_cargo_vendor(current_path) |
| 671 | |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 672 | # Order matters here: |
| 673 | # - Apply patches (also re-calculates checksums) |
| 674 | # - Cleanup any owners files (otherwise, git check-in or checksums are |
| 675 | # unhappy) |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 676 | # - Destroy unused crates |
Abhishek Pandit-Subedi | 5065a0f | 2021-06-13 20:38:55 +0000 | [diff] [blame] | 677 | apply_patches(patches, vendor) |
| 678 | cleanup_owners(vendor) |
Abhishek Pandit-Subedi | f0eb6e0 | 2021-09-24 16:36:12 -0700 | [diff] [blame] | 679 | destroyer = CrateDestroyer(current_path, vendor) |
| 680 | destroyer.destroy_unused_crates() |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 681 | |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 682 | # Combine license file and check for any bad licenses |
| 683 | lm = LicenseManager(current_path, vendor) |
George Burgess IV | 0483370 | 2022-08-09 22:00:38 -0700 | [diff] [blame] | 684 | lm.generate_license(args.skip_license_check, args.license_map, |
| 685 | license_shorthand_file) |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 686 | |
Abhishek Pandit-Subedi | ce0f5b2 | 2021-09-10 15:50:08 -0700 | [diff] [blame] | 687 | # Run crab audit on all packages |
| 688 | crab = CrabManager(current_path, crab_dir) |
| 689 | crab.verify_traits() |
| 690 | |
Abhishek Pandit-Subedi | b75bd56 | 2021-02-25 15:32:22 -0800 | [diff] [blame] | 691 | |
| 692 | if __name__ == '__main__': |
Abhishek Pandit-Subedi | e393cb7 | 2021-08-22 10:41:13 -0700 | [diff] [blame] | 693 | parser = argparse.ArgumentParser(description='Vendor packages properly') |
| 694 | parser.add_argument('--skip-license-check', |
| 695 | '-s', |
| 696 | help='Skip the license check on a specific package', |
| 697 | action='append') |
| 698 | parser.add_argument('--license-map', help='Write license map to this file') |
| 699 | args = parser.parse_args() |
| 700 | |
| 701 | main(args) |