blob: 18cb176166c1d20b18e84df777be2ce82264e7e8 [file] [log] [blame]
Alex Deymo3cfb9cd2014-08-18 15:56:35 -07001# Copyright 2014 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
Mike Frysinger750c5f52014-09-16 16:16:57 -04005"""Script to discover dependencies and other file information from a build.
Alex Deymo3cfb9cd2014-08-18 15:56:35 -07006
7Some files in the image are installed to provide some functionality, such as
8chrome, shill or bluetoothd provide different functionality that can be
9present or not on a given build. Many other files are dependencies from these
10files that need to be present in the image for them to work. These dependencies
11come from needed shared libraries, executed files and other configuration files
12read.
13
14This script currently discovers dependencies between ELF files for libraries
15required at load time (libraries loaded by the dynamic linker) but not
Alex Deymo365b10c2014-08-25 13:14:28 -070016libraries loaded at runtime with dlopen(). It also computes size and file type
17in several cases to help understand the contents of the built image.
Alex Deymo3cfb9cd2014-08-18 15:56:35 -070018"""
19
Mike Frysinger383367e2014-09-16 15:06:17 -040020from __future__ import print_function
21
Alex Deymo3cfb9cd2014-08-18 15:56:35 -070022import itertools
23import json
24import multiprocessing
25import os
26import re
27import stat
28
29from chromite.lib import commandline
30from chromite.lib import cros_build_lib
Alex Deymo365b10c2014-08-25 13:14:28 -070031from chromite.lib import filetype
Alex Deymo3cfb9cd2014-08-18 15:56:35 -070032from chromite.lib import parseelf
33from chromite.scripts import lddtree
34
35
36# Regex to parse Gentoo atoms. This should match the following ebuild names,
37# splitting the package name from the version.
38# without version:
39# chromeos-base/tty
40# chromeos-base/libchrome-271506
41# sys-kernel/chromeos-kernel-3_8
42# with version:
43# chromeos-base/tty-0.0.1-r4
44# chromeos-base/libchrome-271506-r5
45# sys-kernel/chromeos-kernel-3_8-3.8.11-r35
46RE_EBUILD_WITHOUT_VERSION = r'^([a-z0-9\-]+/[a-zA-Z0-9\_\+\-]+)$'
47RE_EBUILD_WITH_VERSION = (
48 r'^=?([a-z0-9\-]+/[a-zA-Z0-9\_\+\-]+)\-([^\-]+(\-r\d+)?)$')
49
50
51def ParseELFWithArgs(args):
52 """Wrapper to parseelf.ParseELF accepting a single arg.
53
54 This wrapper is required to use multiprocessing.Pool.map function.
55
56 Returns:
57 A 2-tuple with the passed relative path and the result of ParseELF(). On
58 error, when ParseELF() returns None, this function returns None.
59 """
60 elf = parseelf.ParseELF(*args)
61 if elf is None:
62 return
63 return args[1], elf
64
65
66class DepTracker(object):
67 """Tracks dependencies and file information in a root directory.
68
69 This class computes dependencies and other information related to the files
70 in the root image.
71 """
72
73 def __init__(self, root, jobs=1):
74 root_st = os.lstat(root)
75 if not stat.S_ISDIR(root_st.st_mode):
76 raise Exception('root (%s) must be a directory' % root)
77 self._root = root.rstrip('/') + '/'
Alex Deymo365b10c2014-08-25 13:14:28 -070078 self._file_type_decoder = filetype.FileTypeDecoder(root)
Alex Deymo3cfb9cd2014-08-18 15:56:35 -070079
80 # A wrapper to the multiprocess map function. We avoid launching a pool
81 # of processes when jobs is 1 so python exceptions kill the main process,
82 # useful for debugging.
83 if jobs > 1:
84 self._pool = multiprocessing.Pool(jobs)
85 self._imap = self._pool.map
86 else:
87 self._imap = itertools.imap
88
89 self._files = {}
90 self._ebuilds = {}
91
92 # Mapping of rel_paths for symlinks and hardlinks. Hardlinks are assumed
93 # to point to the lowest lexicographically file with the same inode.
94 self._symlinks = {}
95 self._hardlinks = {}
96
Alex Deymo3cfb9cd2014-08-18 15:56:35 -070097 def Init(self):
98 """Generates the initial list of files."""
99 # First iteration over all the files in root searching for symlinks and
100 # non-regular files.
101 seen_inodes = {}
102 for basepath, _, filenames in sorted(os.walk(self._root)):
103 for filename in sorted(filenames):
104 full_path = os.path.join(basepath, filename)
105 rel_path = full_path[len(self._root):]
106 st = os.lstat(full_path)
107
108 file_data = {
109 'size': st.st_size,
110 }
111 self._files[rel_path] = file_data
112
113 # Track symlinks.
114 if stat.S_ISLNK(st.st_mode):
115 link_path = os.readlink(full_path)
116 # lddtree's normpath handles a little more cases than the os.path
117 # version. In particular, it handles the '//' case.
118 self._symlinks[rel_path] = (
119 link_path.lstrip('/') if link_path and link_path[0] == '/' else
120 lddtree.normpath(os.path.join(os.path.dirname(rel_path),
121 link_path)))
122 file_data['deps'] = {
123 'symlink': [self._symlinks[rel_path]]
124 }
125
126 # Track hardlinks.
127 if st.st_ino in seen_inodes:
128 self._hardlinks[rel_path] = seen_inodes[st.st_ino]
129 continue
130 seen_inodes[st.st_ino] = rel_path
131
132 def SaveJSON(self, filename):
133 """Save the computed information to a JSON file.
134
135 Args:
136 filename: The destination JSON file.
137 """
138 data = {
139 'files': self._files,
140 'ebuilds': self._ebuilds,
141 }
142 json.dump(data, open(filename, 'w'))
143
144
145 def ComputeEbuildDeps(self, portage_db):
146 """Compute the dependencies between ebuilds and files.
147
148 Iterates over the list of ebuilds in the database and annotates the files
149 with the ebuilds they are in. For each ebuild installing a file in the root,
150 also compute the direct dependencies. Stores the information internally.
151
152 Args:
153 portage_db: The path to the portage db. Usually "/var/db/pkg".
154 """
155 portage_db = portage_db.rstrip('/') + '/'
156 for basepath, _, filenames in sorted(os.walk(portage_db)):
157 if 'CONTENTS' in filenames:
158 full_path = os.path.join(basepath, 'CONTENTS')
159 pkg = basepath[len(portage_db):]
160 pkg_files = []
161 pkg_size = 0
162 for line in open(full_path):
163 line = line.split()
164 # Line format is: "type file_path [more space-separated fields]".
165 # Discard any other line without at least the first two fields. The
166 # remaining fields depend on the type.
167 if len(line) < 2:
168 continue
169 typ, file_path = line[:2]
170 # We ignore other entries like for example "dir".
171 if not typ in ('obj', 'sym'):
172 continue
173 file_path = file_path.lstrip('/')
174 # We ignore files installed in the SYSROOT that weren't copied to the
175 # image.
176 if not file_path in self._files:
177 continue
178 pkg_files.append(file_path)
179 file_data = self._files[file_path]
180 if 'ebuild' in file_data:
181 cros_build_lib.Warning('Duplicated entry for %s: %s and %',
182 file_path, file_data['ebuild'], pkg)
183 file_data['ebuild'] = pkg
184 pkg_size += file_data['size']
185 if pkg_files:
186 pkg_atom = pkg
187 pkg_version = None
188 m = re.match(RE_EBUILD_WITHOUT_VERSION, pkg)
189 if m:
190 pkg_atom = m.group(1)
191 m = re.match(RE_EBUILD_WITH_VERSION, pkg)
192 if m:
193 pkg_atom = m.group(1)
194 pkg_version = m.group(2)
195 self._ebuilds[pkg] = {
196 'size': pkg_size,
197 'files': len(pkg_files),
198 'atom': pkg_atom,
199 'version': pkg_version,
200 }
201 # TODO(deymo): Parse dependencies between ebuilds.
202
203 def ComputeELFFileDeps(self):
204 """Computes the dependencies between files.
205
206 Computes the dependencies between the files in the root directory passed
207 during construction. The dependencies are inferred for ELF files.
208 The list of dependencies for each file in the passed rootfs as a dict().
209 The result's keys are the relative path of the files and the value of each
210 file is a list of dependencies. A dependency is a tuple (dep_path,
211 dep_type) where the dep_path is relative path from the passed root to the
212 dependent file and dep_type is one the following strings stating how the
213 dependency was discovered:
214 'ldd': The dependent ELF file is listed as needed in the dynamic section.
215 'symlink': The dependent file is a symlink to the depending.
216 If there are dependencies of a given type whose target file wasn't
217 determined, a tuple (None, dep_type) is included. This is the case for
218 example is a program uses library that wasn't found.
219 """
220 ldpaths = lddtree.LoadLdpaths(self._root)
221
222 # First iteration over all the files in root searching for symlinks and
223 # non-regular files.
224 parseelf_args = []
225 for rel_path, file_data in self._files.iteritems():
226 if rel_path in self._symlinks or rel_path in self._hardlinks:
227 continue
228
229 full_path = os.path.join(self._root, rel_path)
230 st = os.lstat(full_path)
231 if not stat.S_ISREG(st.st_mode):
232 continue
233 parseelf_args.append((self._root, rel_path, ldpaths))
234
235 # Parallelize the ELF lookup step since it is quite expensive.
236 elfs = dict(x for x in self._imap(ParseELFWithArgs, parseelf_args)
237 if not x is None)
238
239 for rel_path, elf in elfs.iteritems():
240 file_data = self._files[rel_path]
Alex Deymo365b10c2014-08-25 13:14:28 -0700241 # Fill in the ftype if not set yet. We complete this value at this point
242 # to avoid re-parsing the ELF file later.
243 if not 'ftype' in file_data:
244 ftype = self._file_type_decoder.GetType(rel_path, elf=elf)
245 if ftype:
246 file_data['ftype'] = ftype
247
Alex Deymo3cfb9cd2014-08-18 15:56:35 -0700248 file_deps = file_data.get('deps', {})
249 # Dependencies based on the result of ldd.
250 for lib in elf.get('needed', []):
251 lib_path = elf['libs'][lib]['path']
252 if not 'ldd' in file_deps:
253 file_deps['ldd'] = []
254 file_deps['ldd'].append(lib_path)
255
256 if file_deps:
257 file_data['deps'] = file_deps
258
Alex Deymo365b10c2014-08-25 13:14:28 -0700259 def ComputeFileTypes(self):
260 """Computes all the missing file type for the files in the root."""
261 for rel_path, file_data in self._files.iteritems():
262 if 'ftype' in file_data:
263 continue
264 ftype = self._file_type_decoder.GetType(rel_path)
265 if ftype:
266 file_data['ftype'] = ftype
267
Alex Deymo3cfb9cd2014-08-18 15:56:35 -0700268
269def ParseArgs(argv):
270 """Return parsed commandline arguments."""
271
272 parser = commandline.ArgumentParser()
273 parser.add_argument(
274 '-j', '--jobs', type=int, default=multiprocessing.cpu_count(),
275 help='number of simultaneous jobs.')
276 parser.add_argument(
277 '--portage-db', type='path', metavar='PORTAGE_DB',
278 help='parse portage DB for ebuild information')
279 parser.add_argument(
280 '--json', type='path',
281 help='store information in JSON file')
282
283 parser.add_argument(
284 'root', type='path',
285 help='path to the directory where the rootfs is mounted.')
286
287 opts = parser.parse_args(argv)
288 opts.Freeze()
289 return opts
290
291
292def main(argv):
293 """Main function to start the script."""
294 opts = ParseArgs(argv)
295 cros_build_lib.Debug('Options are %s', opts)
296
297 dt = DepTracker(opts.root, jobs=opts.jobs)
298 dt.Init()
299
300 dt.ComputeELFFileDeps()
Alex Deymo365b10c2014-08-25 13:14:28 -0700301 dt.ComputeFileTypes()
Alex Deymo3cfb9cd2014-08-18 15:56:35 -0700302
303 if opts.portage_db:
304 dt.ComputeEbuildDeps(opts.portage_db)
305
306 if opts.json:
307 dt.SaveJSON(opts.json)