blob: db5ef1a5c97758dc4fb868b25b7bb480aff2f4e6 [file] [log] [blame]
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -08001#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3# Copyright 2021 The Chromium OS Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6""" This script cleans up the vendor directory.
7"""
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -07008import argparse
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +00009import hashlib
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080010import json
11import os
12import pathlib
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070013import re
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000014import subprocess
15
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070016# We only care about crates we're actually going to use and that's usually
17# limited to ones with cfg(linux). For running `cargo metadata`, limit results
18# to only this platform
19DEFAULT_PLATFORM_FILTER = "x86_64-unknown-linux-gnu"
20
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000021
22def _rerun_checksums(package_path):
23 """Re-run checksums for given package.
24
25 Writes resulting checksums to $package_path/.cargo-checksum.json.
26 """
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070027 hashes = dict()
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000028 checksum_path = os.path.join(package_path, '.cargo-checksum.json')
29 if not pathlib.Path(checksum_path).is_file():
30 return False
31
32 with open(checksum_path, 'r') as fread:
33 contents = json.load(fread)
34
35 for root, _, files in os.walk(package_path, topdown=True):
36 for f in files:
37 # Don't checksum an existing checksum file
38 if f == ".cargo-checksum.json":
39 continue
40
41 file_path = os.path.join(root, f)
42 with open(file_path, 'rb') as frb:
43 m = hashlib.sha256()
44 m.update(frb.read())
45 d = m.hexdigest()
46
47 # Key is relative to the package path so strip from beginning
48 key = os.path.relpath(file_path, package_path)
49 hashes[key] = d
50
51 if hashes:
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070052 print("{} regenerated {} hashes".format(package_path,
53 len(hashes.keys())))
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000054 contents['files'] = hashes
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000055 with open(checksum_path, 'w') as fwrite:
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070056 json.dump(contents, fwrite, sort_keys=True)
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000057
58 return True
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080059
60
61def _remove_OWNERS_checksum(root):
62 """ Delete all OWNERS files from the checksum file.
63
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000064 Args:
65 root: Root directory for the vendored crate.
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080066
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000067 Returns:
68 True if OWNERS was found and cleaned up. Otherwise False.
69 """
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080070 checksum_path = os.path.join(root, '.cargo-checksum.json')
71 if not pathlib.Path(checksum_path).is_file():
72 return False
73
74 with open(checksum_path, 'r') as fread:
75 contents = json.load(fread)
76
77 del_keys = []
78 for cfile in contents['files']:
79 if 'OWNERS' in cfile:
80 del_keys.append(cfile)
81
82 for key in del_keys:
83 del contents['files'][key]
84
85 if del_keys:
86 print('{} deleted: {}'.format(root, del_keys))
87 with open(checksum_path, 'w') as fwrite:
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070088 json.dump(contents, fwrite, sort_keys=True)
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080089
90 return bool(del_keys)
91
92
93def cleanup_owners(vendor_path):
94 """ Remove owners checksums from the vendor directory.
95
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000096 We currently do not check in the OWNERS files from vendored crates because
97 they interfere with the find-owners functionality in gerrit. This cleanup
98 simply finds all instances of "OWNERS" in the checksum files within and
99 removes them.
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800100
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000101 Args:
102 vendor_path: Absolute path to vendor directory.
103 """
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800104 deps_cleaned = []
105 for root, dirs, _ in os.walk(vendor_path):
106 for d in dirs:
107 removed = _remove_OWNERS_checksum(os.path.join(root, d))
108 if removed:
109 deps_cleaned.append(d)
110
111 if deps_cleaned:
112 print('Cleanup owners:\n {}'.format("\n".join(deps_cleaned)))
113
114
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000115def apply_single_patch(patch, workdir):
116 """Apply a single patch and return whether it was successful.
117
118 Returns:
119 True if successful. False otherwise.
120 """
121 print("-- Applying {}".format(patch))
122 proc = subprocess.run(["patch", "-p1", "-i", patch], cwd=workdir)
123 return proc.returncode == 0
124
125
126def apply_patches(patches_path, vendor_path):
127 """Finds patches and applies them to sub-folders in the vendored crates.
128
129 Args:
130 patches_path: Path to folder with patches. Expect all patches to be one
131 level down (matching the crate name).
132 vendor_path: Root path to vendored crates directory.
133 """
134 checksums_for = {}
135
136 # Don't bother running if patches directory is empty
137 if not pathlib.Path(patches_path).is_dir():
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700138 return
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000139
140 # Look for all patches and apply them
141 for d in os.listdir(patches_path):
142 dir_path = os.path.join(patches_path, d)
143
144 # We don't process patches in root dir
145 if not os.path.isdir(dir_path):
146 continue
147
148 for patch in os.listdir(os.path.join(dir_path)):
149 file_path = os.path.join(dir_path, patch)
150
151 # Skip if not a patch file
152 if not os.path.isfile(file_path) or not patch.endswith(".patch"):
153 continue
154
155 # If there are any patches, queue checksums for that folder.
156 checksums_for[d] = True
157
158 # Apply the patch. Exit from patch loop if patching failed.
159 success = apply_single_patch(file_path,
160 os.path.join(vendor_path, d))
161 if not success:
162 print("Failed to apply patch: {}".format(patch))
163 break
164
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000165 # Re-run checksums for all modified packages since we applied patches.
166 for key in checksums_for.keys():
167 _rerun_checksums(os.path.join(vendor_path, key))
168
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700169
Abhishek Pandit-Subedifa902382021-08-20 11:04:33 -0700170def run_cargo_vendor(working_dir):
171 """Runs cargo vendor.
172
173 Args:
174 working_dir: Directory to run inside. This should be the directory where
175 Cargo.toml is kept.
176 """
177 subprocess.check_call(["cargo", "vendor"], cwd=working_dir)
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000178
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700179def load_metadata(working_dir, filter_platform=DEFAULT_PLATFORM_FILTER):
180 """Load metadata for manifest at given directory.
181
182 Args:
183 working_dir: Directory to run from.
184 filter_platform: Filter packages to ones configured for this platform.
185 """
186 manifest_path = os.path.join(working_dir, 'Cargo.toml')
187 cmd = [
188 'cargo', 'metadata', '--format-version', '1', "--filter-platform",
189 filter_platform, '--manifest-path', manifest_path
190 ]
191 output = subprocess.check_output(cmd, cwd=working_dir)
192
193 return json.loads(output)
194
195
196class LicenseManager:
197 """ Manage consolidating licenses for all packages."""
198
199 # These are all the licenses we support. Keys are what is seen in metadata and
200 # values are what is expected by the ebuild.
201 SUPPORTED_LICENSES = {
202 'Apache-2.0': 'Apache-2.0',
203 'MIT': 'MIT',
204 'BSD-3-Clause': 'BSD-3',
205 'ISC': 'ISC'
206 }
207
208 # Prefer to take attribution licenses in this order. All these require that
209 # we actually use the license file found in the package so they MUST have
210 # a license file set.
211 PREFERRED_ATTRIB_LICENSE_ORDER = ['MIT', 'BSD-3', 'ISC']
212
213 # If Apache license is found, always prefer it (simplifies attribution)
214 APACHE_LICENSE = 'Apache-2.0'
215
216 # Regex for license files found in the vendored directories. Search for
217 # these files with re.IGNORECASE.
218 #
219 # These will be searched in order with the earlier entries being preferred.
220 LICENSE_NAMES_REGEX = [
221 r'^license-mit$',
222 r'^copyright$',
223 r'^licen[cs]e.*$',
224 ]
225
226 # Some crates have their license file in other crates. This usually occurs
227 # because multiple crates are published from the same git repository and the
228 # license isn't updated in each sub-crate. In these cases, we can just
229 # ignore these packages.
230 MAP_LICENSE_TO_OTHER = {
231 'failure_derive': 'failure',
232 'grpcio-compiler': 'grpcio',
233 'grpcio-sys': 'grpcio',
234 'rustyline-derive': 'rustyline',
235 }
236
237 # Map a package to a specific license and license file. Only use this if
238 # a package doesn't have an easily discoverable license or exports its
239 # license in a weird way. Prefer to patch the project with a license and
240 # upstream the patch instead.
241 STATIC_LICENSE_MAP = {
242 # "package name": ( "license name", "license file relative location")
243 }
244
245 def __init__(self, working_dir, vendor_dir):
246 self.working_dir = working_dir
247 self.vendor_dir = vendor_dir
248
249 def _find_license_in_dir(self, search_dir):
250 for p in os.listdir(search_dir):
251 # Ignore anything that's not a file
252 if not os.path.isfile(os.path.join(search_dir, p)):
253 continue
254
255 # Now check if the name matches any of the regexes
256 # We'll return the first matching file.
257 for regex in self.LICENSE_NAMES_REGEX:
258 if re.search(regex, p, re.IGNORECASE):
259 yield os.path.join(search_dir, p)
260 break
261
262 def _guess_license_type(self, license_file):
263 if '-MIT' in license_file:
264 return 'MIT'
265 elif '-APACHE' in license_file:
266 return 'APACHE'
267 elif '-BSD' in license_file:
268 return 'BSD-3'
269
270 with open(license_file, 'r') as f:
271 lines = f.read()
272 if 'MIT' in lines:
273 return 'MIT'
274 elif 'Apache' in lines:
275 return 'APACHE'
276 elif 'BSD 3-Clause' in lines:
277 return 'BSD-3'
278
279 return ''
280
281 def generate_license(self, skip_license_check, print_map_to_file):
282 """Generate single massive license file from metadata."""
283 metadata = load_metadata(self.working_dir)
284
285 has_license_types = set()
286 bad_licenses = {}
287
288 # Keep license map ordered so it generates a consistent license map
289 license_map = {}
290
291 skip_license_check = skip_license_check or []
292
293 for package in metadata['packages']:
294 pkg_name = package['name']
295
296 # Skip vendor libs directly
297 if pkg_name == "vendor_libs":
298 continue
299
300 if pkg_name in skip_license_check:
301 print(
302 "Skipped license check on {}. Reason: Skipped from command line"
303 .format(pkg_name))
304 continue
305
306 if pkg_name in self.MAP_LICENSE_TO_OTHER:
307 print(
308 'Skipped license check on {}. Reason: License already in {}'
309 .format(pkg_name, self.MAP_LICENSE_TO_OTHER[pkg_name]))
310 continue
311
312 # Check if we have a static license map for this package. Use the
313 # static values if we have it already set.
314 if pkg_name in self.STATIC_LICENSE_MAP:
315 (license, license_file) = self.STATIC_LICENSE_MAP[pkg_name]
316 license_map[pkg_name] = {
317 "license": license,
318 "license_file": license_file,
319 }
320 continue
321
322 license_files = []
323 license = package.get('license', '')
324
325 # We ignore the metadata for license file because most crates don't
326 # have it set. Just scan the source for licenses.
327 license_files = [
328 x for x in self._find_license_in_dir(
329 os.path.join(self.vendor_dir, pkg_name))
330 ]
331
332 # If there are multiple licenses, they are delimited with "OR" or "/"
333 delim = ' OR ' if ' OR ' in license else '/'
334 found = license.split(delim)
335
336 # Filter licenses to ones we support
337 licenses_or = [
338 self.SUPPORTED_LICENSES[f] for f in found
339 if f in self.SUPPORTED_LICENSES
340 ]
341
342 # If apache license is found, always prefer it because it simplifies
343 # license attribution (we can use existing Apache notice)
344 if self.APACHE_LICENSE in licenses_or:
345 has_license_types.add(self.APACHE_LICENSE)
346 license_map[pkg_name] = {'license': self.APACHE_LICENSE}
347
348 # Handle single license that has at least one license file
349 # We pick the first license file and the license
350 elif len(licenses_or) == 1:
351 if license_files:
352 l = licenses_or[0]
353 lf = license_files[0]
354
355 has_license_types.add(l)
356 license_map[pkg_name] = {
357 'license': l,
358 'license_file': os.path.relpath(lf, self.working_dir),
359 }
360 else:
361 bad_licenses[pkg_name] = "{} missing license file".format(
362 licenses_or[0])
363 # Handle multiple licenses
364 elif len(licenses_or) > 1:
365 # Check preferred licenses in order
366 license_found = False
367 for l in self.PREFERRED_ATTRIB_LICENSE_ORDER:
368 if not l in licenses_or:
369 continue
370
371 for f in license_files:
372 if self._guess_license_type(f) == l:
373 license_found = True
374 has_license_types.add(l)
375 license_map[pkg_name] = {
376 'license':
377 l,
378 'license_file':
379 os.path.relpath(f, self.working_dir),
380 }
381 break
382
383 # Break out of loop if license is found
384 if license_found:
385 break
386 else:
387 bad_licenses[pkg_name] = license
388
389 # If we had any bad licenses, we need to abort
390 if bad_licenses:
391 for k in bad_licenses.keys():
392 print("{} had no acceptable licenses: {}".format(
393 k, bad_licenses[k]))
394 raise Exception("Bad licenses in vendored packages.")
395
396 # Write license map to file
397 if print_map_to_file:
398 with open(os.path.join(self.working_dir, print_map_to_file),
399 'w') as lfile:
400 json.dump(license_map, lfile, sort_keys=True)
401
402 # Raise missing licenses unless we have a valid reason to ignore them
403 raise_missing_license = False
404 for name, v in license_map.items():
405 if 'license_file' not in v and v.get('license',
406 '') != self.APACHE_LICENSE:
407 raise_missing_license = True
408 print(" {}: Missing license file. Fix or add to ignorelist.".
409 format(name))
410
411 if raise_missing_license:
412 raise Exception(
413 "Unhandled missing license file. "
414 "Make sure all are accounted for before continuing.")
415
416 print("Add the following licenses to the ebuild: \n",
417 sorted([x for x in has_license_types]))
418
419
420def main(args):
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800421 current_path = pathlib.Path(__file__).parent.absolute()
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000422 patches = os.path.join(current_path, "patches")
423 vendor = os.path.join(current_path, "vendor")
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800424
Abhishek Pandit-Subedifa902382021-08-20 11:04:33 -0700425 # First, actually run cargo vendor
426 run_cargo_vendor(current_path)
427
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000428 # Order matters here:
429 # - Apply patches (also re-calculates checksums)
430 # - Cleanup any owners files (otherwise, git check-in or checksums are
431 # unhappy)
432 apply_patches(patches, vendor)
433 cleanup_owners(vendor)
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800434
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700435 # Combine license file and check for any bad licenses
436 lm = LicenseManager(current_path, vendor)
437 lm.generate_license(args.skip_license_check, args.license_map)
438
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800439
440if __name__ == '__main__':
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700441 parser = argparse.ArgumentParser(description='Vendor packages properly')
442 parser.add_argument('--skip-license-check',
443 '-s',
444 help='Skip the license check on a specific package',
445 action='append')
446 parser.add_argument('--license-map', help='Write license map to this file')
447 args = parser.parse_args()
448
449 main(args)