Left: | ||
Right: |
OLD | NEW |
---|---|
1 # This file is part of the Adblock Plus web scripts, | 1 # This file is part of the Adblock Plus web scripts, |
2 # Copyright (C) 2006-present eyeo GmbH | 2 # Copyright (C) 2006-present eyeo GmbH |
3 # | 3 # |
4 # Adblock Plus is free software: you can redistribute it and/or modify | 4 # Adblock Plus is free software: you can redistribute it and/or modify |
5 # it under the terms of the GNU General Public License version 3 as | 5 # it under the terms of the GNU General Public License version 3 as |
6 # published by the Free Software Foundation. | 6 # published by the Free Software Foundation. |
7 # | 7 # |
8 # Adblock Plus is distributed in the hope that it will be useful, | 8 # Adblock Plus is distributed in the hope that it will be useful, |
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
11 # GNU General Public License for more details. | 11 # GNU General Public License for more details. |
12 # | 12 # |
13 # You should have received a copy of the GNU General Public License | 13 # You should have received a copy of the GNU General Public License |
14 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. | 14 # along with Adblock Plus. If not, see <http://www.gnu.org/licenses/>. |
15 | 15 |
16 """ | 16 """ |
17 | 17 |
18 Nightly builds generation script | 18 Nightly builds generation script |
19 ================================ | 19 ================================ |
20 | 20 |
21 This script generates nightly builds of extensions, together | 21 This script generates nightly builds of extensions, together |
22 with changelogs and documentation. | 22 with changelogs and documentation. |
23 | 23 |
24 """ | 24 """ |
25 | 25 |
26 import argparse | |
26 import ConfigParser | 27 import ConfigParser |
27 import base64 | 28 import base64 |
28 import hashlib | 29 import hashlib |
29 import hmac | 30 import hmac |
30 import json | 31 import json |
31 import logging | 32 import logging |
32 import os | 33 import os |
33 import pipes | 34 import pipes |
34 import random | 35 import random |
35 import shutil | 36 import shutil |
(...skipping 27 matching lines...) Expand all Loading... | |
63 raise urllib2.HTTPError(req.get_full_url(), code, | 64 raise urllib2.HTTPError(req.get_full_url(), code, |
64 '{}\n{}'.format(msg, fp.read()), hdrs, fp) | 65 '{}\n{}'.format(msg, fp.read()), hdrs, fp) |
65 | 66 |
66 | 67 |
67 class NightlyBuild(object): | 68 class NightlyBuild(object): |
68 """ | 69 """ |
69 Performs the build process for an extension, | 70 Performs the build process for an extension, |
70 generating changelogs and documentation. | 71 generating changelogs and documentation. |
71 """ | 72 """ |
72 | 73 |
74 downloadable_repos = {'gecko'} | |
75 | |
73 def __init__(self, config): | 76 def __init__(self, config): |
74 """ | 77 """ |
75 Creates a NightlyBuild instance; we are simply | 78 Creates a NightlyBuild instance; we are simply |
76 recording the configuration settings here. | 79 recording the configuration settings here. |
77 """ | 80 """ |
78 self.config = config | 81 self.config = config |
79 self.revision = self.getCurrentRevision() | 82 self.revision = self.getCurrentRevision() |
80 try: | 83 try: |
81 self.previousRevision = config.latestRevision | 84 self.previousRevision = config.latestRevision |
82 except: | 85 except: |
(...skipping 15 matching lines...) Expand all Loading... | |
98 'defaults.id=', self.config.repository | 101 'defaults.id=', self.config.repository |
99 ] | 102 ] |
100 return subprocess.check_output(command).strip() | 103 return subprocess.check_output(command).strip() |
101 | 104 |
102 def getCurrentBuild(self): | 105 def getCurrentBuild(self): |
103 """ | 106 """ |
104 calculates the (typically numerical) build ID for the current build | 107 calculates the (typically numerical) build ID for the current build |
105 """ | 108 """ |
106 command = ['hg', 'id', '-n', '--config', 'defaults.id=', self.tempdir] | 109 command = ['hg', 'id', '-n', '--config', 'defaults.id=', self.tempdir] |
107 build = subprocess.check_output(command).strip() | 110 build = subprocess.check_output(command).strip() |
108 if self.config.type == 'gecko': | |
109 build += 'beta' | |
110 return build | 111 return build |
111 | 112 |
112 def getChanges(self): | 113 def getChanges(self): |
113 """ | 114 """ |
114 retrieve changes between the current and previous ("first") revision | 115 retrieve changes between the current and previous ("first") revision |
115 """ | 116 """ |
116 command = [ | 117 command = [ |
117 'hg', 'log', '-R', self.tempdir, '-r', | 118 'hg', 'log', '-R', self.tempdir, '-r', |
118 'reverse(ancestors({}))'.format(self.config.revision), '-l', '50', | 119 'reverse(ancestors({}))'.format(self.config.revision), '-l', '50', |
119 '--encoding', 'utf-8', '--template', | 120 '--encoding', 'utf-8', '--template', |
(...skipping 14 matching lines...) Expand all Loading... | |
134 self.tempdir = tempfile.mkdtemp(prefix=self.config.repositoryName) | 135 self.tempdir = tempfile.mkdtemp(prefix=self.config.repositoryName) |
135 command = ['hg', 'clone', '-q', self.config.repository, '-u', | 136 command = ['hg', 'clone', '-q', self.config.repository, '-u', |
136 self.config.revision, self.tempdir] | 137 self.config.revision, self.tempdir] |
137 subprocess.check_call(command) | 138 subprocess.check_call(command) |
138 | 139 |
139 # Make sure to run ensure_dependencies.py if present | 140 # Make sure to run ensure_dependencies.py if present |
140 depscript = os.path.join(self.tempdir, 'ensure_dependencies.py') | 141 depscript = os.path.join(self.tempdir, 'ensure_dependencies.py') |
141 if os.path.isfile(depscript): | 142 if os.path.isfile(depscript): |
142 subprocess.check_call([sys.executable, depscript, '-q']) | 143 subprocess.check_call([sys.executable, depscript, '-q']) |
143 | 144 |
145 def symlink_or_copy(self, source, target): | |
146 if hasattr(os, 'symlink'): | |
147 if os.path.exists(target): | |
148 os.remove(target) | |
149 os.symlink(os.path.basename(source), target) | |
150 else: | |
151 shutil.copyfile(source, target) | |
152 | |
144 def writeChangelog(self, changes): | 153 def writeChangelog(self, changes): |
145 """ | 154 """ |
146 write the changelog file into the cloned repository | 155 write the changelog file into the cloned repository |
147 """ | 156 """ |
148 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) | 157 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) |
149 if not os.path.exists(baseDir): | 158 if not os.path.exists(baseDir): |
150 os.makedirs(baseDir) | 159 os.makedirs(baseDir) |
151 changelogFile = '%s-%s.changelog.xhtml' % (self.basename, self.version) | 160 changelogFile = '%s-%s.changelog.xhtml' % (self.basename, self.version) |
152 changelogPath = os.path.join(baseDir, changelogFile) | 161 changelogPath = os.path.join(baseDir, changelogFile) |
153 self.changelogURL = urlparse.urljoin(self.config.nightliesURL, self.base name + '/' + changelogFile) | 162 self.changelogURL = urlparse.urljoin(self.config.nightliesURL, self.base name + '/' + changelogFile) |
154 | 163 |
155 template = get_template(get_config().get('extensions', 'changelogTemplat e')) | 164 template = get_template(get_config().get('extensions', 'changelogTemplat e')) |
156 template.stream({'changes': changes}).dump(changelogPath, encoding='utf- 8') | 165 template.stream({'changes': changes}).dump(changelogPath, encoding='utf- 8') |
157 | 166 |
158 linkPath = os.path.join(baseDir, '00latest.changelog.xhtml') | 167 linkPath = os.path.join(baseDir, '00latest.changelog.xhtml') |
159 if hasattr(os, 'symlink'): | 168 self.symlink_or_copy(changelogPath, linkPath) |
160 if os.path.exists(linkPath): | |
161 os.remove(linkPath) | |
162 os.symlink(os.path.basename(changelogPath), linkPath) | |
163 else: | |
164 shutil.copyfile(changelogPath, linkPath) | |
165 | 169 |
166 def readGeckoMetadata(self): | 170 def readGeckoMetadata(self): |
167 """ | 171 """ |
168 read Gecko-specific metadata file from a cloned repository | 172 read Gecko-specific metadata file from a cloned repository |
169 and parse id, version, basename and the compat section | 173 and parse id, version, basename and the compat section |
170 out of the file | 174 out of the file |
171 """ | 175 """ |
172 import buildtools.packagerChrome as packager | 176 import buildtools.packagerChrome as packager |
173 metadata = packager.readMetadata(self.tempdir, self.config.type) | 177 metadata = packager.readMetadata(self.tempdir, self.config.type) |
174 self.extensionID = metadata.get('general', 'id') | 178 self.extensionID = packager.get_app_id(False, metadata) |
175 self.version = packager.getBuildVersion(self.tempdir, metadata, False, | 179 self.version = packager.getBuildVersion(self.tempdir, metadata, False, |
176 self.buildNum) | 180 self.buildNum) |
177 self.basename = metadata.get('general', 'basename') | 181 self.basename = metadata.get('general', 'basename') |
182 self.min_version = metadata.get('compat', 'gecko') | |
178 | 183 |
179 def readAndroidMetadata(self): | 184 def readAndroidMetadata(self): |
180 """ | 185 """ |
181 Read Android-specific metadata from AndroidManifest.xml file. | 186 Read Android-specific metadata from AndroidManifest.xml file. |
182 """ | 187 """ |
183 manifestFile = open(os.path.join(self.tempdir, 'AndroidManifest.xml'), ' r') | 188 manifestFile = open(os.path.join(self.tempdir, 'AndroidManifest.xml'), ' r') |
184 manifest = parseXml(manifestFile) | 189 manifest = parseXml(manifestFile) |
185 manifestFile.close() | 190 manifestFile.close() |
186 | 191 |
187 root = manifest.documentElement | 192 root = manifest.documentElement |
(...skipping 26 matching lines...) Expand all Loading... | |
214 self.version = packager.getBuildVersion(self.tempdir, metadata, False, | 219 self.version = packager.getBuildVersion(self.tempdir, metadata, False, |
215 self.buildNum) | 220 self.buildNum) |
216 self.basename = metadata.get('general', 'basename') | 221 self.basename = metadata.get('general', 'basename') |
217 | 222 |
218 self.compat = [] | 223 self.compat = [] |
219 if metadata.has_section('compat') and metadata.has_option('compat', 'chr ome'): | 224 if metadata.has_section('compat') and metadata.has_option('compat', 'chr ome'): |
220 self.compat.append({'id': 'chrome', 'minVersion': metadata.get('comp at', 'chrome')}) | 225 self.compat.append({'id': 'chrome', 'minVersion': metadata.get('comp at', 'chrome')}) |
221 | 226 |
222 def readSafariMetadata(self): | 227 def readSafariMetadata(self): |
223 import sitescripts.extensions.bin.legacy.packagerSafari as packager | 228 import sitescripts.extensions.bin.legacy.packagerSafari as packager |
224 from buildtools import xarfile | 229 from sitescripts.extensions.bin.legacy import xarfile |
225 metadata = packager.readMetadata(self.tempdir, self.config.type) | 230 metadata = packager.readMetadata(self.tempdir, self.config.type) |
226 certs = xarfile.read_certificates_and_key(self.config.keyFile)[0] | 231 certs = xarfile.read_certificates_and_key(self.config.keyFile)[0] |
227 | 232 |
228 self.certificateID = packager.get_developer_identifier(certs) | 233 self.certificateID = packager.get_developer_identifier(certs) |
229 self.version = packager.getBuildVersion(self.tempdir, metadata, False, | 234 self.version = packager.getBuildVersion(self.tempdir, metadata, False, |
230 self.buildNum) | 235 self.buildNum) |
231 self.shortVersion = metadata.get('general', 'version') | 236 self.shortVersion = metadata.get('general', 'version') |
232 self.basename = metadata.get('general', 'basename') | 237 self.basename = metadata.get('general', 'basename') |
233 self.updatedFromGallery = False | 238 self.updatedFromGallery = False |
234 | 239 |
(...skipping 16 matching lines...) Expand all Loading... | |
251 """ | 256 """ |
252 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) | 257 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) |
253 if self.config.type == 'safari': | 258 if self.config.type == 'safari': |
254 manifestPath = os.path.join(baseDir, 'updates.plist') | 259 manifestPath = os.path.join(baseDir, 'updates.plist') |
255 templateName = 'safariUpdateManifest' | 260 templateName = 'safariUpdateManifest' |
256 autoescape = True | 261 autoescape = True |
257 elif self.config.type == 'android': | 262 elif self.config.type == 'android': |
258 manifestPath = os.path.join(baseDir, 'updates.xml') | 263 manifestPath = os.path.join(baseDir, 'updates.xml') |
259 templateName = 'androidUpdateManifest' | 264 templateName = 'androidUpdateManifest' |
260 autoescape = True | 265 autoescape = True |
266 elif self.config.type == 'gecko': | |
267 manifestPath = os.path.join(baseDir, 'updates.json') | |
268 templateName = 'geckoUpdateManifest' | |
269 autoescape = True | |
261 else: | 270 else: |
262 return | 271 return |
263 | 272 |
264 if not os.path.exists(baseDir): | 273 if not os.path.exists(baseDir): |
265 os.makedirs(baseDir) | 274 os.makedirs(baseDir) |
266 | 275 |
267 # ABP for Android used to have its own update manifest format. We need t o | 276 # ABP for Android used to have its own update manifest format. We need t o |
268 # generate both that and the new one in the libadblockplus format as lon g | 277 # generate both that and the new one in the libadblockplus format as lon g |
269 # as a significant amount of users is on an old version. | 278 # as a significant amount of users is on an old version. |
270 if self.config.type == 'android': | 279 if self.config.type == 'android': |
(...skipping 24 matching lines...) Expand all Loading... | |
295 from sitescripts.extensions.utils import writeIEUpdateManifest as doWrit e | 304 from sitescripts.extensions.utils import writeIEUpdateManifest as doWrit e |
296 doWrite(manifestPath, [{ | 305 doWrite(manifestPath, [{ |
297 'basename': self.basename, | 306 'basename': self.basename, |
298 'version': version, | 307 'version': version, |
299 'updateURL': updateURL | 308 'updateURL': updateURL |
300 }]) | 309 }]) |
301 | 310 |
302 for suffix in ['-x86.msi', '-x64.msi', '-gpo-x86.msi', '-gpo-x64.msi']: | 311 for suffix in ['-x86.msi', '-x64.msi', '-gpo-x86.msi', '-gpo-x64.msi']: |
303 linkPath = os.path.join(baseDir, '00latest%s' % suffix) | 312 linkPath = os.path.join(baseDir, '00latest%s' % suffix) |
304 outputPath = os.path.join(baseDir, self.basename + '-' + version + s uffix) | 313 outputPath = os.path.join(baseDir, self.basename + '-' + version + s uffix) |
305 if hasattr(os, 'symlink'): | 314 self.symlink_or_copy(outputPath, linkPath) |
306 if os.path.exists(linkPath): | |
307 os.remove(linkPath) | |
308 os.symlink(os.path.basename(outputPath), linkPath) | |
309 else: | |
310 shutil.copyfile(outputPath, linkPath) | |
311 | 315 |
312 def build(self): | 316 def build(self): |
313 """ | 317 """ |
314 run the build command in the tempdir | 318 run the build command in the tempdir |
315 """ | 319 """ |
316 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) | 320 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) |
317 if not os.path.exists(baseDir): | 321 if not os.path.exists(baseDir): |
318 os.makedirs(baseDir) | 322 os.makedirs(baseDir) |
319 outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packa geSuffix) | 323 outputFile = '%s-%s%s' % (self.basename, self.version, self.config.packa geSuffix) |
320 self.path = os.path.join(baseDir, outputFile) | 324 self.path = os.path.join(baseDir, outputFile) |
(...skipping 17 matching lines...) Expand all Loading... | |
338 # clear broken output if any | 342 # clear broken output if any |
339 if os.path.exists(self.path): | 343 if os.path.exists(self.path): |
340 os.remove(self.path) | 344 os.remove(self.path) |
341 raise | 345 raise |
342 else: | 346 else: |
343 env = os.environ | 347 env = os.environ |
344 spiderMonkeyBinary = self.config.spiderMonkeyBinary | 348 spiderMonkeyBinary = self.config.spiderMonkeyBinary |
345 if spiderMonkeyBinary: | 349 if spiderMonkeyBinary: |
346 env = dict(env, SPIDERMONKEY_BINARY=spiderMonkeyBinary) | 350 env = dict(env, SPIDERMONKEY_BINARY=spiderMonkeyBinary) |
347 | 351 |
348 command = [os.path.join(self.tempdir, 'build.py'), | 352 command = [os.path.join(self.tempdir, 'build.py')] |
349 'build', '-t', self.config.type, '-b', self.buildNum] | 353 if self.config.type == 'safari': |
354 command.extend(['-t', self.config.type, 'build']) | |
355 else: | |
356 command.extend(['build', '-t', self.config.type]) | |
357 command.extend(['-b', self.buildNum]) | |
358 | |
350 if self.config.type not in {'gecko', 'edge'}: | 359 if self.config.type not in {'gecko', 'edge'}: |
351 command.extend(['-k', self.config.keyFile]) | 360 command.extend(['-k', self.config.keyFile]) |
352 command.append(self.path) | 361 command.append(self.path) |
353 subprocess.check_call(command, env=env) | 362 subprocess.check_call(command, env=env) |
354 | 363 |
355 if not os.path.exists(self.path): | 364 if not os.path.exists(self.path): |
356 raise Exception("Build failed, output file hasn't been created") | 365 raise Exception("Build failed, output file hasn't been created") |
357 | 366 |
358 linkPath = os.path.join(baseDir, '00latest%s' % self.config.packageSuffi x) | 367 if self.config.type not in self.downloadable_repos: |
359 if hasattr(os, 'symlink'): | 368 linkPath = os.path.join(baseDir, |
360 if os.path.exists(linkPath): | 369 '00latest' + self.config.packageSuffix) |
361 os.remove(linkPath) | 370 self.symlink_or_copy(self.path, linkPath) |
362 os.symlink(os.path.basename(self.path), linkPath) | |
363 else: | |
364 shutil.copyfile(self.path, linkPath) | |
365 | 371 |
366 def retireBuilds(self): | 372 def retireBuilds(self): |
367 """ | 373 """ |
368 removes outdated builds, returns the sorted version numbers of remaini ng | 374 removes outdated builds, returns the sorted version numbers of remaini ng |
369 builds | 375 builds |
370 """ | 376 """ |
371 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) | 377 baseDir = os.path.join(self.config.nightliesDirectory, self.basename) |
372 versions = [] | 378 versions = [] |
373 prefix = self.basename + '-' | 379 prefix = self.basename + '-' |
374 suffix = self.config.packageSuffix | 380 suffix = self.config.packageSuffix |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
407 'download': packageFile, | 413 'download': packageFile, |
408 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)), | 414 'mtime': os.path.getmtime(os.path.join(baseDir, packageFile)), |
409 'size': os.path.getsize(os.path.join(baseDir, packageFile)) | 415 'size': os.path.getsize(os.path.join(baseDir, packageFile)) |
410 } | 416 } |
411 if os.path.exists(os.path.join(baseDir, changelogFile)): | 417 if os.path.exists(os.path.join(baseDir, changelogFile)): |
412 link['changelog'] = changelogFile | 418 link['changelog'] = changelogFile |
413 links.append(link) | 419 links.append(link) |
414 template = get_template(get_config().get('extensions', 'nightlyIndexPage ')) | 420 template = get_template(get_config().get('extensions', 'nightlyIndexPage ')) |
415 template.stream({'config': self.config, 'links': links}).dump(outputPath ) | 421 template.stream({'config': self.config, 'links': links}).dump(outputPath ) |
416 | 422 |
417 def uploadToMozillaAddons(self): | 423 def read_downloads_lockfile(self): |
418 import urllib3 | 424 path = get_config().get('extensions', 'downloadLockFile') |
425 try: | |
426 with open(path, 'r') as fp: | |
427 current = json.load(fp) | |
428 except IOError: | |
429 logging.warning('No lockfile found. Creating ' + path) | |
430 current = {} | |
419 | 431 |
432 return current | |
433 | |
434 def write_downloads_lockfile(self, values): | |
435 path = get_config().get('extensions', 'downloadLockFile') | |
436 with open(path, 'w') as fp: | |
437 json.dump(values, fp) | |
438 | |
439 def add_to_downloads_lockfile(self, platform, values): | |
440 current = self.read_downloads_lockfile() | |
441 | |
442 current.setdefault(platform, []) | |
443 current[platform].append(values) | |
444 | |
445 self.write_downloads_lockfile(current) | |
446 | |
447 def remove_from_downloads_lockfile(self, platform, filter_key, | |
448 filter_value): | |
449 current = self.read_downloads_lockfile() | |
450 try: | |
451 for i, entry in enumerate(current[platform]): | |
452 if entry[filter_key] == filter_value: | |
453 del current[platform][i] | |
454 if len(current[platform]) == 0: | |
455 del current[platform] | |
456 except KeyError: | |
457 pass | |
458 self.write_downloads_lockfile(current) | |
459 | |
460 def generate_jwt_request(self, issuer, secret, url, method, data=None, | |
461 add_headers=[]): | |
420 header = { | 462 header = { |
421 'alg': 'HS256', # HMAC-SHA256 | 463 'alg': 'HS256', # HMAC-SHA256 |
422 'typ': 'JWT', | 464 'typ': 'JWT', |
423 } | 465 } |
424 | 466 |
425 issued = int(time.time()) | 467 issued = int(time.time()) |
426 payload = { | 468 payload = { |
427 'iss': get_config().get('extensions', 'amo_key'), | 469 'iss': issuer, |
428 'jti': random.random(), | 470 'jti': random.random(), |
429 'iat': issued, | 471 'iat': issued, |
430 'exp': issued + 60, | 472 'exp': issued + 60, |
431 } | 473 } |
432 | 474 |
433 input = '{}.{}'.format( | 475 hmac_data = '{}.{}'.format( |
434 base64.b64encode(json.dumps(header)), | 476 base64.b64encode(json.dumps(header)), |
435 base64.b64encode(json.dumps(payload)) | 477 base64.b64encode(json.dumps(payload)) |
436 ) | 478 ) |
437 | 479 |
438 signature = hmac.new(get_config().get('extensions', 'amo_secret'), | 480 signature = hmac.new(secret, msg=hmac_data, |
439 msg=input, | |
440 digestmod=hashlib.sha256).digest() | 481 digestmod=hashlib.sha256).digest() |
441 token = '{}.{}'.format(input, base64.b64encode(signature)) | 482 token = '{}.{}'.format(hmac_data, base64.b64encode(signature)) |
483 | |
484 request = urllib2.Request(url, data) | |
485 request.add_header('Authorization', 'JWT ' + token) | |
486 for header in add_headers: | |
487 request.add_header(*header) | |
488 request.get_method = lambda: method | |
489 | |
490 return request | |
491 | |
492 def uploadToMozillaAddons(self): | |
493 import urllib3 | |
494 | |
495 config = get_config() | |
442 | 496 |
443 upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/' | 497 upload_url = ('https://addons.mozilla.org/api/v3/addons/{}/' |
444 'versions/{}/').format(self.extensionID, self.version) | 498 'versions/{}/').format(self.extensionID, self.version) |
445 | 499 |
446 with open(self.path, 'rb') as file: | 500 with open(self.path, 'rb') as file: |
447 data, content_type = urllib3.filepost.encode_multipart_formdata({ | 501 data, content_type = urllib3.filepost.encode_multipart_formdata({ |
448 'upload': ( | 502 'upload': ( |
449 os.path.basename(self.path), | 503 os.path.basename(self.path), |
450 file.read(), | 504 file.read(), |
451 'application/x-xpinstall' | 505 'application/x-xpinstall' |
452 ) | 506 ) |
453 }) | 507 }) |
454 | 508 |
455 request = urllib2.Request(upload_url, data=data) | 509 request = self.generate_jwt_request( |
456 request.add_header('Content-Type', content_type) | 510 config.get('extensions', 'amo_key'), |
457 request.add_header('Authorization', 'JWT ' + token) | 511 config.get('extensions', 'amo_secret'), |
458 request.get_method = lambda: 'PUT' | 512 upload_url, |
513 'PUT', | |
514 data, | |
515 (('Content-Type', content_type),), | |
Sebastian Noack
2018/03/09 16:16:27
Nit: I missed this one before. Again, comma after
tlucas
2018/03/12 07:22:46
Done.
Vasily Kuznetsov
2018/03/13 17:49:59
I actually prefer the comma after the last argumen
| |
516 ) | |
459 | 517 |
460 try: | 518 try: |
461 urllib2.urlopen(request).close() | 519 urllib2.urlopen(request).close() |
462 except urllib2.HTTPError as e: | 520 except urllib2.HTTPError as e: |
463 try: | 521 try: |
464 logging.error(e.read()) | 522 logging.error(e.read()) |
465 finally: | 523 finally: |
466 e.close() | 524 e.close() |
467 raise | 525 raise |
468 | 526 |
527 self.add_to_downloads_lockfile( | |
528 self.config.type, | |
529 { | |
530 'buildtype': 'devbuild', | |
531 'app_id': self.extensionID, | |
532 'version': self.version, | |
533 } | |
534 ) | |
535 os.remove(self.path) | |
536 | |
537 def download_from_mozilla_addons(self, buildtype, version, app_id): | |
538 config = get_config() | |
539 iss = config.get('extensions', 'amo_key') | |
540 secret = config.get('extensions', 'amo_secret') | |
541 | |
542 url = ('https://addons.mozilla.org/api/v3/addons/{}/' | |
543 'versions/{}/').format(app_id, version) | |
544 | |
545 request = self.generate_jwt_request(iss, secret, url, 'GET') | |
546 response = json.load(urllib2.urlopen(request)) | |
547 | |
548 necessary = ['passed_review', 'reviewed', 'processed', 'valid'] | |
549 if all(response[x] for x in necessary): | |
550 download_url = response['files'][0]['download_url'] | |
551 checksum = response['files'][0]['hash'] | |
552 | |
553 filename = '{}-{}.xpi'.format(self.basename, version) | |
554 file_path = os.path.join( | |
555 config.get('extensions', 'nightliesDirectory'), | |
556 self.basename, | |
557 filename | |
558 ) | |
559 | |
560 request = self.generate_jwt_request(iss, secret, download_url, | |
561 'GET') | |
562 try: | |
563 response = urllib2.urlopen(request) | |
564 except urllib2.HTTPError as e: | |
565 logging.error(e.read()) | |
566 | |
567 # Verify the extension's integrity | |
568 file_content = response.read() | |
569 sha256 = hashlib.sha256(file_content) | |
570 returned_checksum = '{}:{}'.format(sha256.name, sha256.hexdigest()) | |
571 | |
572 if returned_checksum != checksum: | |
573 logging.error('Checksum could not be verified: {} vs {}' | |
574 ''.format(checksum, returned_checksum)) | |
575 | |
576 with open(file_path, 'w') as fp: | |
577 fp.write(file_content) | |
578 | |
579 self.update_link = os.path.join( | |
580 config.get('extensions', 'nightliesURL'), | |
581 self.basename, | |
582 filename | |
583 ) | |
584 | |
585 self.remove_from_downloads_lockfile(self.config.type, | |
586 'version', | |
587 version) | |
588 elif not response['passed_review'] or not response['valid']: | |
589 # When the review failed for any reason, we want to know about it | |
590 logging.error(json.dumps(response, indent=4)) | |
591 self.remove_from_downloads_lockfile(self.config.type, | |
592 'version', | |
593 version) | |
594 | |
469 def uploadToChromeWebStore(self): | 595 def uploadToChromeWebStore(self): |
470 | 596 |
471 opener = urllib2.build_opener(HTTPErrorBodyHandler) | 597 opener = urllib2.build_opener(HTTPErrorBodyHandler) |
472 | 598 |
473 # use refresh token to obtain a valid access token | 599 # use refresh token to obtain a valid access token |
474 # https://developers.google.com/accounts/docs/OAuth2WebServer#refresh | 600 # https://developers.google.com/accounts/docs/OAuth2WebServer#refresh |
475 | 601 |
476 response = json.load(opener.open( | 602 response = json.load(opener.open( |
477 'https://accounts.google.com/o/oauth2/token', | 603 'https://accounts.google.com/o/oauth2/token', |
478 | 604 |
(...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
655 self.readSafariMetadata() | 781 self.readSafariMetadata() |
656 elif self.config.type == 'gecko': | 782 elif self.config.type == 'gecko': |
657 self.readGeckoMetadata() | 783 self.readGeckoMetadata() |
658 elif self.config.type == 'edge': | 784 elif self.config.type == 'edge': |
659 self.read_edge_metadata() | 785 self.read_edge_metadata() |
660 else: | 786 else: |
661 raise Exception('Unknown build type {}' % self.config.type) | 787 raise Exception('Unknown build type {}' % self.config.type) |
662 | 788 |
663 # create development build | 789 # create development build |
664 self.build() | 790 self.build() |
791 if self.config.type not in self.downloadable_repos: | |
792 # write out changelog | |
793 self.writeChangelog(self.getChanges()) | |
665 | 794 |
666 # write out changelog | 795 # write update manifest |
667 self.writeChangelog(self.getChanges()) | 796 self.writeUpdateManifest() |
668 | |
669 # write update manifest | |
670 self.writeUpdateManifest() | |
671 | 797 |
672 # retire old builds | 798 # retire old builds |
673 versions = self.retireBuilds() | 799 versions = self.retireBuilds() |
674 | 800 |
675 if self.config.type == 'ie': | 801 if self.config.type == 'ie': |
676 self.writeIEUpdateManifest(versions) | 802 self.writeIEUpdateManifest(versions) |
677 | 803 |
678 # update index page | 804 if self.config.type not in self.downloadable_repos: |
679 self.updateIndex(versions) | 805 # update index page |
806 self.updateIndex(versions) | |
680 | 807 |
681 # update nightlies config | 808 # update nightlies config |
682 self.config.latestRevision = self.revision | 809 self.config.latestRevision = self.revision |
683 | 810 |
684 if (self.config.type == 'gecko' and | 811 if (self.config.type == 'gecko' and |
685 self.config.galleryID and | 812 self.config.galleryID and |
686 get_config().has_option('extensions', 'amo_key')): | 813 get_config().has_option('extensions', 'amo_key')): |
687 self.uploadToMozillaAddons() | 814 self.uploadToMozillaAddons() |
688 elif self.config.type == 'chrome' and self.config.clientID and self. config.clientSecret and self.config.refreshToken: | 815 elif self.config.type == 'chrome' and self.config.clientID and self. config.clientSecret and self.config.refreshToken: |
689 self.uploadToChromeWebStore() | 816 self.uploadToChromeWebStore() |
690 elif self.config.type == 'edge' and self.config.clientID and self.co nfig.clientSecret and self.config.refreshToken and self.config.tenantID: | 817 elif self.config.type == 'edge' and self.config.clientID and self.co nfig.clientSecret and self.config.refreshToken and self.config.tenantID: |
691 self.upload_to_windows_store() | 818 self.upload_to_windows_store() |
692 | 819 |
693 finally: | 820 finally: |
694 # clean up | 821 # clean up |
695 if self.tempdir: | 822 if self.tempdir: |
696 shutil.rmtree(self.tempdir, ignore_errors=True) | 823 shutil.rmtree(self.tempdir, ignore_errors=True) |
697 | 824 |
825 def download(self): | |
826 with open(get_config().get('extensions', 'downloadLockFile')) as fp: | |
827 download_info = json.load(fp) | |
698 | 828 |
699 def main(): | 829 downloads = self.downloadable_repos.intersection(download_info.keys()) |
830 | |
831 if self.config.type in downloads: | |
832 try: | |
833 self.copyRepository() | |
834 self.readGeckoMetadata() | |
835 | |
836 for data in download_info[self.config.type]: | |
837 self.version = data['version'] | |
838 | |
839 self.download_from_mozilla_addons(**data) | |
840 | |
841 # write out changelog | |
842 self.writeChangelog(self.getChanges()) | |
843 | |
844 # write update manifest | |
845 self.writeUpdateManifest() | |
846 | |
847 # retire old builds | |
848 versions = self.retireBuilds() | |
849 # update index page | |
850 self.updateIndex(versions) | |
851 finally: | |
852 # clean up | |
853 if self.tempdir: | |
854 shutil.rmtree(self.tempdir, ignore_errors=True) | |
855 | |
856 | |
857 def main(download=False): | |
700 """ | 858 """ |
701 main function for createNightlies.py | 859 main function for createNightlies.py |
702 """ | 860 """ |
703 nightlyConfig = ConfigParser.SafeConfigParser() | 861 nightlyConfig = ConfigParser.SafeConfigParser() |
704 nightlyConfigFile = get_config().get('extensions', 'nightliesData') | 862 nightlyConfigFile = get_config().get('extensions', 'nightliesData') |
863 | |
705 if os.path.exists(nightlyConfigFile): | 864 if os.path.exists(nightlyConfigFile): |
706 nightlyConfig.read(nightlyConfigFile) | 865 nightlyConfig.read(nightlyConfigFile) |
707 | 866 |
708 # build all extensions specified in the configuration file | 867 # build all extensions specified in the configuration file |
709 # and generate changelogs and documentations for each: | 868 # and generate changelogs and documentations for each: |
710 data = None | 869 data = None |
711 for repo in Configuration.getRepositoryConfigurations(nightlyConfig): | 870 for repo in Configuration.getRepositoryConfigurations(nightlyConfig): |
712 build = None | 871 build = None |
713 try: | 872 try: |
714 build = NightlyBuild(repo) | 873 build = NightlyBuild(repo) |
715 if build.hasChanges(): | 874 if download: |
875 build.download() | |
876 elif build.hasChanges(): | |
716 build.run() | 877 build.run() |
717 except Exception as ex: | 878 except Exception as ex: |
718 logging.error('The build for %s failed:', repo) | 879 logging.error('The build for %s failed:', repo) |
719 logging.exception(ex) | 880 logging.exception(ex) |
720 | 881 |
721 file = open(nightlyConfigFile, 'wb') | 882 file = open(nightlyConfigFile, 'wb') |
722 nightlyConfig.write(file) | 883 nightlyConfig.write(file) |
723 | 884 |
724 | 885 |
725 if __name__ == '__main__': | 886 if __name__ == '__main__': |
726 main() | 887 parser = argparse.ArgumentParser() |
888 parser.add_argument('--download', action='store_true', default=False) | |
889 args = parser.parse_args() | |
890 main(args.download) | |
OLD | NEW |