blob: eb3f3b3c2f3d04cf1aa32ca11afcd8ffeb8f0844 [file] [log] [blame]
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -08001#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3# Copyright 2021 The Chromium OS Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6""" This script cleans up the vendor directory.
7"""
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -07008import argparse
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +00009import hashlib
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080010import json
11import os
12import pathlib
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070013import re
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -070014import shutil
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000015import subprocess
Abhishek Pandit-Subedice0f5b22021-09-10 15:50:08 -070016import toml
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000017
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070018# We only care about crates we're actually going to use and that's usually
19# limited to ones with cfg(linux). For running `cargo metadata`, limit results
20# to only this platform
21DEFAULT_PLATFORM_FILTER = "x86_64-unknown-linux-gnu"
22
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000023
24def _rerun_checksums(package_path):
25 """Re-run checksums for given package.
26
27 Writes resulting checksums to $package_path/.cargo-checksum.json.
28 """
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070029 hashes = dict()
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000030 checksum_path = os.path.join(package_path, '.cargo-checksum.json')
31 if not pathlib.Path(checksum_path).is_file():
32 return False
33
34 with open(checksum_path, 'r') as fread:
35 contents = json.load(fread)
36
37 for root, _, files in os.walk(package_path, topdown=True):
38 for f in files:
39 # Don't checksum an existing checksum file
40 if f == ".cargo-checksum.json":
41 continue
42
43 file_path = os.path.join(root, f)
44 with open(file_path, 'rb') as frb:
45 m = hashlib.sha256()
46 m.update(frb.read())
47 d = m.hexdigest()
48
49 # Key is relative to the package path so strip from beginning
50 key = os.path.relpath(file_path, package_path)
51 hashes[key] = d
52
53 if hashes:
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070054 print("{} regenerated {} hashes".format(package_path,
55 len(hashes.keys())))
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000056 contents['files'] = hashes
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000057 with open(checksum_path, 'w') as fwrite:
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070058 json.dump(contents, fwrite, sort_keys=True)
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000059
60 return True
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080061
62
63def _remove_OWNERS_checksum(root):
64 """ Delete all OWNERS files from the checksum file.
65
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000066 Args:
67 root: Root directory for the vendored crate.
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080068
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000069 Returns:
70 True if OWNERS was found and cleaned up. Otherwise False.
71 """
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080072 checksum_path = os.path.join(root, '.cargo-checksum.json')
73 if not pathlib.Path(checksum_path).is_file():
74 return False
75
76 with open(checksum_path, 'r') as fread:
77 contents = json.load(fread)
78
79 del_keys = []
80 for cfile in contents['files']:
81 if 'OWNERS' in cfile:
82 del_keys.append(cfile)
83
84 for key in del_keys:
85 del contents['files'][key]
86
87 if del_keys:
88 print('{} deleted: {}'.format(root, del_keys))
89 with open(checksum_path, 'w') as fwrite:
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -070090 json.dump(contents, fwrite, sort_keys=True)
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -080091
92 return bool(del_keys)
93
94
95def cleanup_owners(vendor_path):
96 """ Remove owners checksums from the vendor directory.
97
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +000098 We currently do not check in the OWNERS files from vendored crates because
99 they interfere with the find-owners functionality in gerrit. This cleanup
100 simply finds all instances of "OWNERS" in the checksum files within and
101 removes them.
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800102
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000103 Args:
104 vendor_path: Absolute path to vendor directory.
105 """
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800106 deps_cleaned = []
107 for root, dirs, _ in os.walk(vendor_path):
108 for d in dirs:
109 removed = _remove_OWNERS_checksum(os.path.join(root, d))
110 if removed:
111 deps_cleaned.append(d)
112
113 if deps_cleaned:
114 print('Cleanup owners:\n {}'.format("\n".join(deps_cleaned)))
115
116
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000117def apply_single_patch(patch, workdir):
118 """Apply a single patch and return whether it was successful.
119
120 Returns:
121 True if successful. False otherwise.
122 """
123 print("-- Applying {}".format(patch))
124 proc = subprocess.run(["patch", "-p1", "-i", patch], cwd=workdir)
125 return proc.returncode == 0
126
127
128def apply_patches(patches_path, vendor_path):
129 """Finds patches and applies them to sub-folders in the vendored crates.
130
131 Args:
132 patches_path: Path to folder with patches. Expect all patches to be one
133 level down (matching the crate name).
134 vendor_path: Root path to vendored crates directory.
135 """
136 checksums_for = {}
137
138 # Don't bother running if patches directory is empty
139 if not pathlib.Path(patches_path).is_dir():
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700140 return
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000141
142 # Look for all patches and apply them
143 for d in os.listdir(patches_path):
144 dir_path = os.path.join(patches_path, d)
145
146 # We don't process patches in root dir
147 if not os.path.isdir(dir_path):
148 continue
149
150 for patch in os.listdir(os.path.join(dir_path)):
151 file_path = os.path.join(dir_path, patch)
152
153 # Skip if not a patch file
154 if not os.path.isfile(file_path) or not patch.endswith(".patch"):
155 continue
156
157 # If there are any patches, queue checksums for that folder.
158 checksums_for[d] = True
159
160 # Apply the patch. Exit from patch loop if patching failed.
161 success = apply_single_patch(file_path,
162 os.path.join(vendor_path, d))
163 if not success:
164 print("Failed to apply patch: {}".format(patch))
165 break
166
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000167 # Re-run checksums for all modified packages since we applied patches.
168 for key in checksums_for.keys():
169 _rerun_checksums(os.path.join(vendor_path, key))
170
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700171
Abhishek Pandit-Subedifa902382021-08-20 11:04:33 -0700172def run_cargo_vendor(working_dir):
173 """Runs cargo vendor.
174
175 Args:
176 working_dir: Directory to run inside. This should be the directory where
Abhishek Pandit-Subedice0f5b22021-09-10 15:50:08 -0700177 Cargo.toml is kept.
Abhishek Pandit-Subedifa902382021-08-20 11:04:33 -0700178 """
179 subprocess.check_call(["cargo", "vendor"], cwd=working_dir)
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000180
Abhishek Pandit-Subedice0f5b22021-09-10 15:50:08 -0700181
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700182def load_metadata(working_dir, filter_platform=DEFAULT_PLATFORM_FILTER):
183 """Load metadata for manifest at given directory.
184
185 Args:
186 working_dir: Directory to run from.
187 filter_platform: Filter packages to ones configured for this platform.
188 """
189 manifest_path = os.path.join(working_dir, 'Cargo.toml')
190 cmd = [
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700191 'cargo', 'metadata', '--format-version', '1', '--manifest-path',
192 manifest_path
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700193 ]
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700194
195 # Conditionally add platform filter
196 if filter_platform:
197 cmd.append("--filter-platform")
198 cmd.append(filter_platform)
199
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700200 output = subprocess.check_output(cmd, cwd=working_dir)
201
202 return json.loads(output)
203
204
205class LicenseManager:
206 """ Manage consolidating licenses for all packages."""
207
208 # These are all the licenses we support. Keys are what is seen in metadata and
209 # values are what is expected by the ebuild.
210 SUPPORTED_LICENSES = {
211 'Apache-2.0': 'Apache-2.0',
212 'MIT': 'MIT',
213 'BSD-3-Clause': 'BSD-3',
214 'ISC': 'ISC'
215 }
216
217 # Prefer to take attribution licenses in this order. All these require that
218 # we actually use the license file found in the package so they MUST have
219 # a license file set.
220 PREFERRED_ATTRIB_LICENSE_ORDER = ['MIT', 'BSD-3', 'ISC']
221
222 # If Apache license is found, always prefer it (simplifies attribution)
223 APACHE_LICENSE = 'Apache-2.0'
224
225 # Regex for license files found in the vendored directories. Search for
226 # these files with re.IGNORECASE.
227 #
228 # These will be searched in order with the earlier entries being preferred.
229 LICENSE_NAMES_REGEX = [
230 r'^license-mit$',
231 r'^copyright$',
232 r'^licen[cs]e.*$',
233 ]
234
235 # Some crates have their license file in other crates. This usually occurs
236 # because multiple crates are published from the same git repository and the
237 # license isn't updated in each sub-crate. In these cases, we can just
238 # ignore these packages.
239 MAP_LICENSE_TO_OTHER = {
240 'failure_derive': 'failure',
241 'grpcio-compiler': 'grpcio',
242 'grpcio-sys': 'grpcio',
243 'rustyline-derive': 'rustyline',
244 }
245
246 # Map a package to a specific license and license file. Only use this if
247 # a package doesn't have an easily discoverable license or exports its
248 # license in a weird way. Prefer to patch the project with a license and
249 # upstream the patch instead.
250 STATIC_LICENSE_MAP = {
251 # "package name": ( "license name", "license file relative location")
252 }
253
254 def __init__(self, working_dir, vendor_dir):
255 self.working_dir = working_dir
256 self.vendor_dir = vendor_dir
257
258 def _find_license_in_dir(self, search_dir):
259 for p in os.listdir(search_dir):
260 # Ignore anything that's not a file
261 if not os.path.isfile(os.path.join(search_dir, p)):
262 continue
263
264 # Now check if the name matches any of the regexes
265 # We'll return the first matching file.
266 for regex in self.LICENSE_NAMES_REGEX:
267 if re.search(regex, p, re.IGNORECASE):
268 yield os.path.join(search_dir, p)
269 break
270
271 def _guess_license_type(self, license_file):
272 if '-MIT' in license_file:
273 return 'MIT'
274 elif '-APACHE' in license_file:
275 return 'APACHE'
276 elif '-BSD' in license_file:
277 return 'BSD-3'
278
279 with open(license_file, 'r') as f:
280 lines = f.read()
281 if 'MIT' in lines:
282 return 'MIT'
283 elif 'Apache' in lines:
284 return 'APACHE'
285 elif 'BSD 3-Clause' in lines:
286 return 'BSD-3'
287
288 return ''
289
290 def generate_license(self, skip_license_check, print_map_to_file):
291 """Generate single massive license file from metadata."""
292 metadata = load_metadata(self.working_dir)
293
294 has_license_types = set()
295 bad_licenses = {}
296
297 # Keep license map ordered so it generates a consistent license map
298 license_map = {}
299
300 skip_license_check = skip_license_check or []
301
302 for package in metadata['packages']:
303 pkg_name = package['name']
304
305 # Skip vendor libs directly
306 if pkg_name == "vendor_libs":
307 continue
308
309 if pkg_name in skip_license_check:
310 print(
311 "Skipped license check on {}. Reason: Skipped from command line"
312 .format(pkg_name))
313 continue
314
315 if pkg_name in self.MAP_LICENSE_TO_OTHER:
316 print(
317 'Skipped license check on {}. Reason: License already in {}'
318 .format(pkg_name, self.MAP_LICENSE_TO_OTHER[pkg_name]))
319 continue
320
321 # Check if we have a static license map for this package. Use the
322 # static values if we have it already set.
323 if pkg_name in self.STATIC_LICENSE_MAP:
324 (license, license_file) = self.STATIC_LICENSE_MAP[pkg_name]
325 license_map[pkg_name] = {
326 "license": license,
327 "license_file": license_file,
328 }
329 continue
330
331 license_files = []
332 license = package.get('license', '')
333
334 # We ignore the metadata for license file because most crates don't
335 # have it set. Just scan the source for licenses.
336 license_files = [
337 x for x in self._find_license_in_dir(
338 os.path.join(self.vendor_dir, pkg_name))
339 ]
340
341 # If there are multiple licenses, they are delimited with "OR" or "/"
342 delim = ' OR ' if ' OR ' in license else '/'
343 found = license.split(delim)
344
345 # Filter licenses to ones we support
346 licenses_or = [
347 self.SUPPORTED_LICENSES[f] for f in found
348 if f in self.SUPPORTED_LICENSES
349 ]
350
351 # If apache license is found, always prefer it because it simplifies
352 # license attribution (we can use existing Apache notice)
353 if self.APACHE_LICENSE in licenses_or:
354 has_license_types.add(self.APACHE_LICENSE)
355 license_map[pkg_name] = {'license': self.APACHE_LICENSE}
356
357 # Handle single license that has at least one license file
358 # We pick the first license file and the license
359 elif len(licenses_or) == 1:
360 if license_files:
361 l = licenses_or[0]
362 lf = license_files[0]
363
364 has_license_types.add(l)
365 license_map[pkg_name] = {
366 'license': l,
367 'license_file': os.path.relpath(lf, self.working_dir),
368 }
369 else:
370 bad_licenses[pkg_name] = "{} missing license file".format(
371 licenses_or[0])
372 # Handle multiple licenses
373 elif len(licenses_or) > 1:
374 # Check preferred licenses in order
375 license_found = False
376 for l in self.PREFERRED_ATTRIB_LICENSE_ORDER:
377 if not l in licenses_or:
378 continue
379
380 for f in license_files:
381 if self._guess_license_type(f) == l:
382 license_found = True
383 has_license_types.add(l)
384 license_map[pkg_name] = {
385 'license':
386 l,
387 'license_file':
388 os.path.relpath(f, self.working_dir),
389 }
390 break
391
392 # Break out of loop if license is found
393 if license_found:
394 break
395 else:
396 bad_licenses[pkg_name] = license
397
398 # If we had any bad licenses, we need to abort
399 if bad_licenses:
400 for k in bad_licenses.keys():
401 print("{} had no acceptable licenses: {}".format(
402 k, bad_licenses[k]))
403 raise Exception("Bad licenses in vendored packages.")
404
405 # Write license map to file
406 if print_map_to_file:
407 with open(os.path.join(self.working_dir, print_map_to_file),
408 'w') as lfile:
409 json.dump(license_map, lfile, sort_keys=True)
410
411 # Raise missing licenses unless we have a valid reason to ignore them
412 raise_missing_license = False
413 for name, v in license_map.items():
414 if 'license_file' not in v and v.get('license',
415 '') != self.APACHE_LICENSE:
416 raise_missing_license = True
417 print(" {}: Missing license file. Fix or add to ignorelist.".
418 format(name))
419
420 if raise_missing_license:
421 raise Exception(
422 "Unhandled missing license file. "
423 "Make sure all are accounted for before continuing.")
424
425 print("Add the following licenses to the ebuild: \n",
426 sorted([x for x in has_license_types]))
427
428
Abhishek Pandit-Subedice0f5b22021-09-10 15:50:08 -0700429# TODO(abps) - This needs to be replaced with datalog later. We should compile
430# all crab files into datalog and query it with our requirements
431# instead.
432class CrabManager:
433 """Manage audit files."""
434 def __init__(self, working_dir, crab_dir):
435 self.working_dir = working_dir
436 self.crab_dir = crab_dir
437
438 def _check_bad_traits(self, crabdata):
439 """Checks that a package's crab audit meets our requirements.
440
441 Args:
442 crabdata: Dict with crab keys in standard templated format.
443 """
444 common = crabdata['common']
445 # TODO(b/200578411) - Figure out what conditions we should enforce as
446 # part of the audit.
447 conditions = [
448 common.get('deny', None),
449 ]
450
451 # If any conditions are true, this crate is not acceptable.
452 return any(conditions)
453
454 def verify_traits(self):
455 """ Verify that all required CRAB traits for this repository are met.
456 """
457 metadata = load_metadata(self.working_dir)
458
459 failing_crates = {}
460
461 # Verify all packages have a CRAB file associated with it and they meet
462 # all our required traits
463 for package in metadata['packages']:
464 # Skip vendor_libs
465 if package['name'] == 'vendor_libs':
466 continue
467
468 crabname = "{}-{}".format(package['name'], package['version'])
469 filename = os.path.join(self.crab_dir, "{}.toml".format(crabname))
470
471 # If crab file doesn't exist, the crate fails
472 if not os.path.isfile(filename):
473 failing_crates[crabname] = "No crab file".format(filename)
474 continue
475
476 with open(filename, 'r') as f:
477 crabdata = toml.loads(f.read())
478
479 # If crab file's crate_name and version keys don't match this
480 # package, it also fails. This is just housekeeping...
481 if package['name'] != crabdata['crate_name'] or package[
482 'version'] != crabdata['version']:
483 failing_crates[crabname] = "Crate name or version don't match"
484 continue
485
486 if self._check_bad_traits(crabdata):
487 failing_crates[crabname] = "Failed bad traits check"
488
489 # If we had any failing crates, list them now
490 if failing_crates:
491 print('Failed CRAB audit:')
492 for k, v in failing_crates.items():
493 print(' {}: {}'.format(k, v))
494
495
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700496class CrateDestroyer():
497 LIB_RS_BODY = """compile_error!("This crate cannot be built for this configuration.");\n"""
498
499 def __init__(self, working_dir, vendor_dir):
500 self.working_dir = working_dir
501 self.vendor_dir = vendor_dir
502
503 def _modify_cargo_toml(self, pkg_path):
504 with open(os.path.join(pkg_path, "Cargo.toml"), "r") as cargo:
505 contents = toml.load(cargo)
506
507 # Change description, license and delete license key
508 contents["package"]["description"] = "Empty crate that should not build."
509 contents["package"]["license"] = "Apache-2.0"
510 if contents["package"].get("license_file"):
511 del contents["package"]["license_file"]
512
513 with open(os.path.join(pkg_path, "Cargo.toml"), "w") as cargo:
514 toml.dump(contents, cargo)
515
516 def _replace_source_contents(self, package_path):
517 # First load the checksum file before starting
518 checksum_file = os.path.join(package_path, ".cargo-checksum.json")
519 with open(checksum_file, 'r') as csum:
520 checksum_contents = json.load(csum)
521
522 # Also load the cargo.toml file which we need to write back
523 cargo_file = os.path.join(package_path, "Cargo.toml")
George Burgess IV3e344e42022-08-09 21:07:04 -0700524 with open(cargo_file, 'rb') as cfile:
525 cargo_contents = cfile.read()
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700526
527 shutil.rmtree(package_path)
528
529 # Make package and src dirs and replace lib.rs
530 os.makedirs(os.path.join(package_path, "src"), exist_ok=True)
531 with open(os.path.join(package_path, "src", "lib.rs"), "w") as librs:
532 librs.write(self.LIB_RS_BODY)
533
534 # Restore cargo.toml
George Burgess IV3e344e42022-08-09 21:07:04 -0700535 with open(cargo_file, 'wb') as cfile:
536 cfile.write(cargo_contents)
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700537
538 # Restore checksum
539 with open(checksum_file, 'w') as csum:
540 json.dump(checksum_contents, csum)
541
542 def destroy_unused_crates(self):
543 all_packages = load_metadata(self.working_dir, filter_platform=None)
544 used_packages = set([p["name"] for p in load_metadata(self.working_dir)["packages"]])
545
546 cleaned_packages = []
547 for package in all_packages["packages"]:
548
549 # Skip used packages
550 if package["name"] in used_packages:
551 continue
552
553 # Detect the correct package path to destroy
554 pkg_path = os.path.join(self.vendor_dir, "{}-{}".format(package["name"], package["version"]))
555 if not os.path.isdir(pkg_path):
556 pkg_path = os.path.join(self.vendor_dir, package["name"])
557 if not os.path.isdir(pkg_path):
558 print("Crate {} not found at {}".format(package["name"], pkg_path))
559 continue
560
561 self._replace_source_contents(pkg_path)
562 self._modify_cargo_toml(pkg_path)
563 _rerun_checksums(pkg_path)
564 cleaned_packages.append(package["name"])
565
566 for pkg in cleaned_packages:
567 print("Removed unused crate ", pkg)
568
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700569def main(args):
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800570 current_path = pathlib.Path(__file__).parent.absolute()
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000571 patches = os.path.join(current_path, "patches")
572 vendor = os.path.join(current_path, "vendor")
Abhishek Pandit-Subedice0f5b22021-09-10 15:50:08 -0700573 crab_dir = os.path.join(current_path, "crab", "crates")
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800574
Abhishek Pandit-Subedifa902382021-08-20 11:04:33 -0700575 # First, actually run cargo vendor
576 run_cargo_vendor(current_path)
577
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000578 # Order matters here:
579 # - Apply patches (also re-calculates checksums)
580 # - Cleanup any owners files (otherwise, git check-in or checksums are
581 # unhappy)
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700582 # - Destroy unused crates
Abhishek Pandit-Subedi5065a0f2021-06-13 20:38:55 +0000583 apply_patches(patches, vendor)
584 cleanup_owners(vendor)
Abhishek Pandit-Subedif0eb6e02021-09-24 16:36:12 -0700585 destroyer = CrateDestroyer(current_path, vendor)
586 destroyer.destroy_unused_crates()
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800587
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700588 # Combine license file and check for any bad licenses
589 lm = LicenseManager(current_path, vendor)
590 lm.generate_license(args.skip_license_check, args.license_map)
591
Abhishek Pandit-Subedice0f5b22021-09-10 15:50:08 -0700592 # Run crab audit on all packages
593 crab = CrabManager(current_path, crab_dir)
594 crab.verify_traits()
595
Abhishek Pandit-Subedib75bd562021-02-25 15:32:22 -0800596
597if __name__ == '__main__':
Abhishek Pandit-Subedie393cb72021-08-22 10:41:13 -0700598 parser = argparse.ArgumentParser(description='Vendor packages properly')
599 parser.add_argument('--skip-license-check',
600 '-s',
601 help='Skip the license check on a specific package',
602 action='append')
603 parser.add_argument('--license-map', help='Write license map to this file')
604 args = parser.parse_args()
605
606 main(args)