OLD | NEW |
1 # This file is part of the Adblock Plus web scripts, | 1 # This file is part of the Adblock Plus web scripts, |
2 # Copyright (C) 2006-present eyeo GmbH | 2 # Copyright (C) 2006-present eyeo GmbH |
3 # | 3 # |
4 # Adblock Plus is free software: you can redistribute it and/or modify | 4 # Adblock Plus is free software: you can redistribute it and/or modify |
5 # it under the terms of the GNU General Public License version 3 as | 5 # it under the terms of the GNU General Public License version 3 as |
6 # published by the Free Software Foundation. | 6 # published by the Free Software Foundation. |
7 # | 7 # |
8 # Adblock Plus is distributed in the hope that it will be useful, | 8 # Adblock Plus is distributed in the hope that it will be useful, |
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of | 9 # but WITHOUT ANY WARRANTY; without even the implied warranty of |
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | 10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
145 regexp = re.compile(r'\s*\[((?:\w+,)*\w+)\]$') | 145 regexp = re.compile(r'\s*\[((?:\w+,)*\w+)\]$') |
146 match = re.search(regexp, value) | 146 match = re.search(regexp, value) |
147 if match: | 147 if match: |
148 value = re.sub(regexp, r'', value) | 148 value = re.sub(regexp, r'', value) |
149 for keyword in match.group(1).split(','): | 149 for keyword in match.group(1).split(','): |
150 keyword = keyword.lower() | 150 keyword = keyword.lower() |
151 if keyword in keywords: | 151 if keyword in keywords: |
152 keywords[keyword] = True | 152 keywords[keyword] = True |
153 else: | 153 else: |
154 warn('Unknown keyword %s given for attribute %s
in %s' % (keyword, key, path)) | 154 warn('Unknown keyword %s given for attribute %s
in %s' % (keyword, key, path)) |
155 (name, url) = (self.name, value) | 155 name, url = (self.name, value) |
156 if key == 'variant': | 156 if key == 'variant': |
157 match = re.search(r'(.+?)\s+(\S+)$', value) | 157 match = re.search(r'(.+?)\s+(\S+)$', value) |
158 if match: | 158 if match: |
159 (name, url) = (match.group(1), match.group(2)) | 159 name, url = (match.group(1), match.group(2)) |
160 else: | 160 else: |
161 warn('Invalid variant format in %s, no name given?'
% (path)) | 161 warn('Invalid variant format in %s, no name given?'
% path) |
162 if not _validate_URL(url): | 162 if not _validate_URL(url): |
163 warn('Invalid list URL %s given in %s' % (url, path)) | 163 warn('Invalid list URL %s given in %s' % (url, path)) |
164 self.variants.append([name, url, keywords['complete']]) | 164 self.variants.append([name, url, keywords['complete']]) |
165 if keywords['recommendation']: | 165 if keywords['recommendation']: |
166 self._data['recommendation'] = self._data['variants'][-1
] | 166 self._data['recommendation'] = self._data['variants'][-1
] |
167 self._data['catchall'] = keywords['catchall'] | 167 self._data['catchall'] = keywords['catchall'] |
168 | 168 |
169 elif key == 'deprecated' or key == 'unavailable': | 169 elif key == 'deprecated' or key == 'unavailable': |
170 self._data[key] = True | 170 self._data[key] = True |
171 | 171 |
(...skipping 20 matching lines...) Expand all Loading... |
192 for group in mandatory: | 192 for group in mandatory: |
193 found = False | 193 found = False |
194 for key in group: | 194 for key in group: |
195 if self._data[key] != None: | 195 if self._data[key] != None: |
196 found = True | 196 found = True |
197 if not found: | 197 if not found: |
198 str = ', '.join(group) | 198 str = ', '.join(group) |
199 warn('None of the attributes %s present in %s' % (str, path)) | 199 warn('None of the attributes %s present in %s' % (str, path)) |
200 | 200 |
201 if len(self.variants) == 0: | 201 if len(self.variants) == 0: |
202 warn('No list locations given in %s' % (path)) | 202 warn('No list locations given in %s' % path) |
203 if self.type not in ('ads', 'anti-adblock', 'other', 'malware', 'social'
, 'privacy'): | 203 if self.type not in ('ads', 'anti-adblock', 'other', 'malware', 'social'
, 'privacy'): |
204 warn('Unknown type given in %s' % (path)) | 204 warn('Unknown type given in %s' % path) |
205 if self.digest != 'daily' and self.digest != 'weekly': | 205 if self.digest != 'daily' and self.digest != 'weekly': |
206 warn('Unknown digest frequency given in %s' % (path)) | 206 warn('Unknown digest frequency given in %s' % path) |
207 if not self.digestDay[0:3].lower() in weekdays: | 207 if not self.digestDay[0:3].lower() in weekdays: |
208 warn('Unknown digest day given in %s' % (path)) | 208 warn('Unknown digest day given in %s' % path) |
209 self.digestDay = 'wed' | 209 self.digestDay = 'wed' |
210 self.digestDay = weekdays[self.digestDay[0:3].lower()] | 210 self.digestDay = weekdays[self.digestDay[0:3].lower()] |
211 if self.recommendation is not None and self.type == 'ads' and not (self.
languages and self.languages.strip()): | 211 if self.recommendation is not None and self.type == 'ads' and not (self.
languages and self.languages.strip()): |
212 warn('Recommendation without languages in %s' % (path)) | 212 warn('Recommendation without languages in %s' % path) |
213 if len(self.supplements) == 0: | 213 if len(self.supplements) == 0: |
214 for [name, url, complete] in self.variants: | 214 for [name, url, complete] in self.variants: |
215 if complete: | 215 if complete: |
216 warn('Variant marked as complete for non-supplemental subscr
iption in %s' % (path)) | 216 warn('Variant marked as complete for non-supplemental subscr
iption in %s' % path) |
217 break | 217 break |
218 | 218 |
219 self.variants.sort(key=lambda variant: (self.recommendation == variant)
* 2 + variant[2], reverse=True) | 219 self.variants.sort(key=lambda variant: (self.recommendation == variant)
* 2 + variant[2], reverse=True) |
220 | 220 |
221 | 221 |
222 def parse_file(path, data): | 222 def parse_file(path, data): |
223 return Subscription(path, data) | 223 return Subscription(path, data) |
224 | 224 |
225 | 225 |
226 def calculate_supplemented(lists): | 226 def calculate_supplemented(lists): |
(...skipping 21 matching lines...) Expand all Loading... |
248 data = subprocess.check_output(['hg', 'archive', '-R', repo, '-r', 'default'
, '-t', 'tar', '-I', os.path.join(repo, '*.subscription'), '-']) | 248 data = subprocess.check_output(['hg', 'archive', '-R', repo, '-r', 'default'
, '-t', 'tar', '-I', os.path.join(repo, '*.subscription'), '-']) |
249 | 249 |
250 result = {} | 250 result = {} |
251 with tarfile.open(mode='r:', fileobj=StringIO(data)) as archive: | 251 with tarfile.open(mode='r:', fileobj=StringIO(data)) as archive: |
252 for fileinfo in archive: | 252 for fileinfo in archive: |
253 filedata = parse_file(fileinfo.name, codecs.getreader('utf8')(archiv
e.extractfile(fileinfo))) | 253 filedata = parse_file(fileinfo.name, codecs.getreader('utf8')(archiv
e.extractfile(fileinfo))) |
254 if filedata.unavailable: | 254 if filedata.unavailable: |
255 continue | 255 continue |
256 | 256 |
257 if filedata.name in result: | 257 if filedata.name in result: |
258 warn('Name %s is claimed by multiple files' % (filedata.name)) | 258 warn('Name %s is claimed by multiple files' % filedata.name) |
259 result[filedata.name] = filedata | 259 result[filedata.name] = filedata |
260 | 260 |
261 calculate_supplemented(result) | 261 calculate_supplemented(result) |
262 return result | 262 return result |
263 | 263 |
264 | 264 |
265 def getFallbackData(): | 265 def getFallbackData(): |
266 repo = os.path.abspath(get_config().get('subscriptions', 'repository')) | 266 repo = os.path.abspath(get_config().get('subscriptions', 'repository')) |
267 redirectdata = subprocess.check_output(['hg', '-R', repo, 'cat', '-r', 'defa
ult', os.path.join(repo, 'redirects')]) | 267 redirectdata = subprocess.check_output(['hg', '-R', repo, 'cat', '-r', 'defa
ult', os.path.join(repo, 'redirects')]) |
268 gonedata = subprocess.check_output(['hg', '-R', repo, 'cat', '-r', 'default'
, os.path.join(repo, 'gone')]) | 268 gonedata = subprocess.check_output(['hg', '-R', repo, 'cat', '-r', 'default'
, os.path.join(repo, 'gone')]) |
269 return (redirectdata, gonedata) | 269 return (redirectdata, gonedata) |
270 | 270 |
271 | 271 |
272 def _validate_URL(url): | 272 def _validate_URL(url): |
273 parse_result = urlparse(url) | 273 parse_result = urlparse(url) |
274 return parse_result.scheme in ('http', 'https') and parse_result.netloc != '
' | 274 return parse_result.scheme in ('http', 'https') and parse_result.netloc != '
' |
OLD | NEW |