blob: 2150ce1b1c5361c3ccb0199d65dafe0b6b46dee8 [file] [log] [blame]
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -07001#!/usr/bin/python
2# -*- coding: utf-8 -*-
3# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6
7"""Generate and upload tarballs for default apps cache.
8
9Run inside the 'files' dir containing 'external_extensions.json' file:
10$ chromite/bin/chrome_update_extension_cache --create --upload \\
11 chromeos-default-apps-1.0.0
12
13Always increment the version when you update an existing package.
14If no new files are added, increment the third version number.
15 e.g. 1.0.0 -> 1.0.1
16If you change list of default extensions, increment the second version number.
17 e.g. 1.0.0 -> 1.1.0
18
19Also you need to regenerate the Manifest with the new tarball digest.
20Run inside the chroot:
21$ ebuild chromeos-default-apps-1.0.0.ebuild manifest --force
22"""
23
24import json
25import os
26import urllib
27import xml.dom.minidom
28
29from chromite.lib import commandline
30from chromite.lib import cros_build_lib
31from chromite.lib import gs
32from chromite.lib import osutils
33
34
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -070035UPLOAD_URL_BASE = 'gs://chromeos-localmirror-private/distfiles'
36
37
Don Garrettec5cf902013-09-05 15:49:59 -070038def DownloadCrx(ext, extension, crxdir):
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -070039 """Download .crx file from WebStore and update entry."""
40 cros_build_lib.Info('Extension "%s"(%s)...', extension['name'], ext)
41
42 update_url = '%s?x=id%%3D%s%%26uc' % (extension['external_update_url'], ext)
43 response = urllib.urlopen(update_url)
44 if response.getcode() != 200:
45 cros_build_lib.Error('Cannot get update response, URL: %s, error: %d',
46 update_url, response.getcode())
47 return False
48
49 dom = xml.dom.minidom.parse(response)
50 status = dom.getElementsByTagName('app')[0].getAttribute('status')
51 if status != 'ok':
52 cros_build_lib.Error('Cannot fetch extension, status: %s', status)
53 return False
54
55 node = dom.getElementsByTagName('updatecheck')[0]
56 url = node.getAttribute('codebase')
57 version = node.getAttribute('version')
58 filename = '%s-%s.crx' % (ext, version)
59 response = urllib.urlopen(url)
60 if response.getcode() != 200:
61 cros_build_lib.Error('Cannot download extension, URL: %s, error: %d',
62 url, response.getcode())
63 return False
64
Don Garrettec5cf902013-09-05 15:49:59 -070065 osutils.WriteFile(os.path.join(crxdir, 'extensions', filename),
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -070066 response.read())
67
Dmitry Polukhine9d8fac2013-09-20 13:11:21 -070068 # Keep external_update_url in json file, ExternalCache will take care about
69 # replacing it with proper external_crx path and version.
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -070070
71 cros_build_lib.Info('Downloaded, current version %s', version)
72 return True
73
74
Don Garrettec5cf902013-09-05 15:49:59 -070075def CreateValidationFiles(validationdir, crxdir, identifier):
76 """Create validationfiles for all extensions in |crxdir|."""
77
78 verified_files = []
79
80 # Discover all extensions to be validated (but not JSON files).
81 for directory, _, filenames in os.walk(os.path.join(crxdir, 'extensions')):
82
83 # Make directory relative to output dir by removing crxdir and /.
84 for filename in filenames:
85 verified_files.append(os.path.join(directory[len(crxdir)+1:],
86 filename))
87
88 validation_file = os.path.join(validationdir, '%s.validation' % identifier)
89
90 osutils.SafeMakedirs(validationdir)
91 cros_build_lib.RunCommand(['sha256sum'] + verified_files,
92 log_stdout_to_file=validation_file,
93 cwd=crxdir, print_cmd=False)
94 cros_build_lib.Info('Hashes created.')
95
96
97def CreateCacheTarball(extensions, outputdir, identifier, tarball):
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -070098 """Cache |extensions| in |outputdir| and pack them in |tarball|."""
Don Garrettec5cf902013-09-05 15:49:59 -070099
100 crxdir = os.path.join(outputdir, 'crx')
101 jsondir = os.path.join(outputdir, 'json')
102 validationdir = os.path.join(outputdir, 'validation')
103
104 osutils.SafeMakedirs(os.path.join(crxdir, 'extensions', 'managed_users'))
105 osutils.SafeMakedirs(os.path.join(jsondir, 'extensions', 'managed_users'))
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700106 was_errors = False
107 for ext in extensions:
108 managed_users = extensions[ext].get('managed_users', 'no')
109 cache_crx = extensions[ext].get('cache_crx', 'yes')
110
111 # Remove fields that shouldn't be in the output file.
112 for key in ('cache_crx', 'managed_users'):
113 extensions[ext].pop(key, None)
114
115 if cache_crx == 'yes':
Don Garrettec5cf902013-09-05 15:49:59 -0700116 if not DownloadCrx(ext, extensions[ext], crxdir):
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700117 was_errors = True
118 elif cache_crx == 'no':
119 pass
120 else:
121 cros_build_lib.Die('Unknown value for "cache_crx" %s for %s',
122 cache_crx, ext)
123
124 if managed_users == 'yes':
Don Garrettec5cf902013-09-05 15:49:59 -0700125 json_file = os.path.join(jsondir,
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700126 'extensions/managed_users/%s.json' % ext)
127 json.dump(extensions[ext],
128 open(json_file, 'w'),
129 sort_keys=True,
130 indent=2,
131 separators=(',', ': '))
132
133 if managed_users != 'only':
Don Garrettec5cf902013-09-05 15:49:59 -0700134 json_file = os.path.join(jsondir, 'extensions/%s.json' % ext)
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700135 json.dump(extensions[ext],
136 open(json_file, 'w'),
137 sort_keys=True,
138 indent=2,
139 separators=(',', ': '))
140
141 if was_errors:
142 cros_build_lib.Die('FAIL to download some extensions')
143
Don Garrettec5cf902013-09-05 15:49:59 -0700144 CreateValidationFiles(validationdir, crxdir, identifier)
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700145 cros_build_lib.CreateTarball(tarball, outputdir)
146 cros_build_lib.Info('Tarball created %s', tarball)
147
148
149def main(argv):
150 parser = commandline.ArgumentParser(
151 '%%(prog)s [options] <version>\n\n%s' % __doc__)
152 parser.add_argument('version', nargs=1)
153 parser.add_argument('--path', default=None, type='path',
154 help='Path of files dir with external_extensions.json')
155 parser.add_argument('--create', default=False, action='store_true',
156 help='Create cache tarball with specified name')
157 parser.add_argument('--upload', default=False, action='store_true',
158 help='Upload cache tarball with specified name')
159 options = parser.parse_args(argv)
160
161 if options.path:
162 os.chdir(options.path)
163
164 if not (options.create or options.upload):
165 cros_build_lib.Die('Need at least --create or --upload args')
166
167 if not os.path.exists('external_extensions.json'):
168 cros_build_lib.Die('No external_extensions.json in %s. Did you forget the '
169 '--path option?', os.getcwd())
170
Don Garrettec5cf902013-09-05 15:49:59 -0700171 identifier = options.version[0]
172 tarball = '%s.tar.xz' % identifier
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700173 if options.create:
174 extensions = json.load(open('external_extensions.json', 'r'))
175 with osutils.TempDir() as tempdir:
Don Garrettec5cf902013-09-05 15:49:59 -0700176 CreateCacheTarball(extensions, tempdir, identifier,
177 os.path.abspath(tarball))
Dmitry Polukhincbdd21c2013-08-13 10:42:04 -0700178
179 if options.upload:
180 ctx = gs.GSContext()
181 url = os.path.join(UPLOAD_URL_BASE, tarball)
182 if ctx.Exists(url):
183 cros_build_lib.Die('This version already exists on Google Storage (%s)!\n'
184 'NEVER REWRITE EXISTING FILE. IT WILL BREAK CHROME OS '
185 'BUILD!!!', url)
186 ctx.Copy(os.path.abspath(tarball), url, acl='project-private')
187 cros_build_lib.Info('Tarball uploaded %s', url)
188 osutils.SafeUnlink(os.path.abspath(tarball))