1# Copyright 2013-2021 The Distro Tracker Developers 

2# See the COPYRIGHT file at the top-level directory of this distribution and 

3# at https://deb.li/DTAuthors 

4# 

5# This file is part of Distro Tracker. It is subject to the license terms 

6# in the LICENSE file found in the top-level directory of this 

7# distribution and at https://deb.li/DTLicense. No part of Distro Tracker, 

8# including this file, may be copied, modified, propagated, or distributed 

9# except according to the terms contained in the LICENSE file. 

10 

11""" 

12Debian-specific tasks. 

13""" 

14 

15import collections 

16import itertools 

17import json 

18import logging 

19import os 

20import re 

21import warnings 

22from enum import Enum 

23 

24from bs4 import BeautifulSoup as soup, MarkupResemblesLocatorWarning 

25 

26from debian import deb822, debian_support 

27from debian.debian_support import AptPkgVersion 

28 

29import debianbts 

30 

31from django.conf import settings 

32from django.core.exceptions import ValidationError 

33from django.db import transaction 

34from django.db.models import Prefetch 

35from django.utils.http import urlencode 

36 

37import yaml 

38 

39from distro_tracker.accounts.models import UserEmail 

40from distro_tracker.core.models import ( 

41 ActionItem, 

42 ActionItemType, 

43 BinaryPackageBugStats, 

44 BinaryPackageName, 

45 BugDisplayManagerMixin, 

46 PackageBugStats, 

47 PackageData, 

48 PackageName, 

49 Repository, 

50 SourcePackageDeps, 

51 SourcePackageName 

52) 

53from distro_tracker.core.tasks import BaseTask 

54from distro_tracker.core.tasks.mixins import ImportExternalData, PackageTagging 

55from distro_tracker.core.tasks.schedulers import IntervalScheduler 

56from distro_tracker.core.utils import get_or_none 

57from distro_tracker.core.utils.http import get_resource_text 

58from distro_tracker.core.utils.misc import get_data_checksum 

59from distro_tracker.core.utils.packages import ( 

60 html_package_list, 

61 package_url 

62) 

63from distro_tracker.vendor.debian.models import ( 

64 BuildLogCheckStats, 

65 LintianStats, 

66 PackageExcuses, 

67 PackageTransition, 

68 UbuntuPackage 

69) 

70 

71from .models import DebianContributor 

72 

73logger = logging.getLogger(__name__) 

74logger_input = logging.getLogger('distro_tracker.input') 

75 

76warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning) 

77 

78 

79class RetrieveDebianMaintainersTask(BaseTask): 

80 """ 

81 Retrieves (and updates if necessary) a list of Debian Maintainers. 

82 """ 

83 

84 class Scheduler(IntervalScheduler): 

85 interval = 3600 * 24 

86 

87 def execute_main(self): 

88 url = "https://ftp-master.debian.org/dm.txt" 

89 content = get_resource_text(url, force_update=self.force_update, 

90 only_if_updated=True) 

91 if content is None: 91 ↛ 93line 91 didn't jump to line 93, because the condition on line 91 was never true

92 # No need to do anything if the cached item was still not updated 

93 return 

94 

95 maintainers = {} 

96 lines = content.splitlines() 

97 for stanza in deb822.Deb822.iter_paragraphs(lines): 

98 if 'Uid' in stanza and 'Allow' in stanza: 98 ↛ 97line 98 didn't jump to line 97, because the condition on line 98 was never false

99 # Allow is a comma-separated string of 'package (DD fpr)' items, 

100 # where DD fpr is the fingerprint of the DD that granted the 

101 # permission 

102 name, email = stanza['Uid'].rsplit(' ', 1) 

103 email = email.strip('<>') 

104 for pair in stanza['Allow'].split(','): 

105 pair = pair.strip() 

106 pkg, dd_fpr = pair.split() 

107 maintainers.setdefault(email, []) 

108 maintainers[email].append(pkg) 

109 

110 # Now update the developer information 

111 with transaction.atomic(): 

112 # Reset all old maintainers first. 

113 qs = DebianContributor.objects.filter(is_debian_maintainer=True) 

114 qs.update(is_debian_maintainer=False) 

115 

116 for email, packages in maintainers.items(): 

117 try: 

118 user_email, _ = UserEmail.objects.get_or_create(email=email) 

119 except ValidationError: 

120 logger_input.info('%s refers to invalid email "%s".', 

121 url, email) 

122 continue 

123 

124 contributor, _ = DebianContributor.objects.get_or_create( 

125 email=user_email) 

126 

127 contributor.is_debian_maintainer = True 

128 contributor.allowed_packages = packages 

129 contributor.save() 

130 

131 

132class RetrieveLowThresholdNmuTask(BaseTask): 

133 """ 

134 Updates the list of Debian Maintainers which agree with the lowthreshold 

135 NMU. 

136 """ 

137 

138 class Scheduler(IntervalScheduler): 

139 interval = 3600 * 24 

140 

141 def _retrieve_emails(self): 

142 """ 

143 Helper function which obtains the list of emails of maintainers that 

144 agree with the lowthreshold NMU. 

145 """ 

146 url = 'https://wiki.debian.org/LowThresholdNmu?action=raw' 

147 content = get_resource_text(url, force_update=self.force_update, 

148 only_if_updated=True) 

149 if content is None: 149 ↛ 150line 149 didn't jump to line 150, because the condition on line 149 was never true

150 return 

151 

152 emails = [] 

153 devel_php_RE = re.compile( 

154 r'https?://qa\.debian\.org/developer\.php\?login=([^\s&|]+)') 

155 word_RE = re.compile(r'^\w+$') 

156 for line in content.splitlines(): 

157 match = devel_php_RE.search(line) 

158 while match: # look for several matches on the same line 

159 email = None 

160 login = match.group(1) 

161 if word_RE.match(login): 

162 email = login + '@debian.org' 

163 elif login.find('@') >= 0: 163 ↛ 165line 163 didn't jump to line 165, because the condition on line 163 was never false

164 email = login 

165 if email: 165 ↛ 167line 165 didn't jump to line 167, because the condition on line 165 was never false

166 emails.append(email) 

167 line = line[match.end():] 

168 match = devel_php_RE.search(line) 

169 return emails 

170 

171 def execute_main(self): 

172 emails = self._retrieve_emails() 

173 with transaction.atomic(): 

174 # Reset all threshold flags first. 

175 qs = DebianContributor.objects.filter( 

176 agree_with_low_threshold_nmu=True) 

177 qs.update(agree_with_low_threshold_nmu=False) 

178 

179 for email in emails: 

180 try: 

181 email, _ = UserEmail.objects.get_or_create(email=email) 

182 except ValidationError: 

183 logger_input.info( 

184 'LowThresholdNmu refers to invalid email "%s".', email) 

185 continue 

186 

187 contributor, _ = DebianContributor.objects.get_or_create( 

188 email=email) 

189 

190 contributor.agree_with_low_threshold_nmu = True 

191 contributor.save() 

192 

193 

194class UpdatePackageBugStats(BaseTask, BugDisplayManagerMixin): 

195 """ 

196 Updates the BTS bug stats for all packages (source, binary and pseudo). 

197 Creates :class:`distro_tracker.core.ActionItem` instances for packages 

198 which have bugs tagged help or patch. 

199 """ 

200 

201 class Scheduler(IntervalScheduler): 

202 interval = 3600 

203 

204 PATCH_BUG_ACTION_ITEM_TYPE_NAME = 'debian-patch-bugs-warning' 

205 HELP_BUG_ACTION_ITEM_TYPE_NAME = 'debian-help-bugs-warning' 

206 

207 PATCH_ITEM_SHORT_DESCRIPTION = ( 

208 '<a href="{url}">{count}</a> tagged patch in the ' 

209 '<abbr title="Bug Tracking System">BTS</abbr>') 

210 HELP_ITEM_SHORT_DESCRIPTION = ( 

211 '<a href="{url}">{count}</a> tagged help in the ' 

212 '<abbr title="Bug Tracking System">BTS</abbr>') 

213 PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/patch-bugs-action-item.html' 

214 HELP_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/help-bugs-action-item.html' 

215 

216 bug_categories = ( 

217 'rc', 

218 'normal', 

219 'wishlist', 

220 'fixed', 

221 'patch', 

222 ) 

223 

224 def initialize(self, *args, **kwargs): 

225 super(UpdatePackageBugStats, self).initialize(*args, **kwargs) 

226 # The :class:`distro_tracker.core.models.ActionItemType` instances which 

227 # this task can create. 

228 self.patch_item_type = ActionItemType.objects.create_or_update( 

229 type_name=self.PATCH_BUG_ACTION_ITEM_TYPE_NAME, 

230 full_description_template=self.PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE) 

231 self.help_item_type = ActionItemType.objects.create_or_update( 

232 type_name=self.HELP_BUG_ACTION_ITEM_TYPE_NAME, 

233 full_description_template=self.HELP_ITEM_FULL_DESCRIPTION_TEMPLATE) 

234 

235 def _get_tagged_bug_stats(self, tag, user=None): 

236 """ 

237 Using the BTS interface, retrieves the statistics of bugs with a 

238 particular tag. 

239 

240 :param tag: The tag for which the statistics are required. 

241 :type tag: string 

242 :param user: The email of the user who tagged the bug with the given 

243 tag. 

244 :type user: string 

245 

246 :returns: A dict mapping package names to the count of bugs with the 

247 given tag. 

248 """ 

249 debian_ca_bundle = '/etc/ssl/ca-debian/ca-certificates.crt' 

250 if os.path.exists(debian_ca_bundle): 

251 os.environ['SSL_CERT_FILE'] = debian_ca_bundle 

252 if user: 

253 bug_numbers = debianbts.get_usertag(user, tags=[tag]).get(tag, []) 

254 else: 

255 bug_numbers = debianbts.get_bugs(tag=tag) 

256 

257 # Match each retrieved bug ID to a package and then find the aggregate 

258 # count for each package. 

259 bug_stats = {} 

260 bugs = debianbts.get_status(bug_numbers) 

261 for bug in bugs: 

262 if bug.done or bug.fixed_versions or bug.pending == 'done': 

263 continue 

264 

265 bug_stats.setdefault(bug.package, 0) 

266 bug_stats[bug.package] += 1 

267 

268 return bug_stats 

269 

270 def _extend_bug_stats(self, bug_stats, extra_stats, category_name): 

271 """ 

272 Helper method which adds extra bug stats to an already existing list of 

273 stats. 

274 

275 :param bug_stats: An already existing list of bug stats. Maps package 

276 names to list of bug category descriptions. 

277 :type bug_stats: dict 

278 :param extra_stats: Extra bug stats which should be added to 

279 ``bug_stats``. Maps package names to integers representing bug 

280 counts. 

281 :type extra_stats: dict 

282 :param category_name: The name of the bug category which is being added 

283 :type category_name: string 

284 """ 

285 for package, count in extra_stats.items(): 

286 bug_stats.setdefault(package, []) 

287 bug_stats[package].append({ 

288 'category_name': category_name, 

289 'bug_count': count, 

290 }) 

291 

292 def _create_patch_bug_action_item(self, package, bug_stats): 

293 """ 

294 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

295 the given package if it contains any bugs tagged patch. 

296 

297 :param package: The package for which the action item should be 

298 updated. 

299 :type package: :class:`distro_tracker.core.models.PackageName` 

300 :param bug_stats: A dictionary mapping category names to structures 

301 describing those categories. Those structures should be 

302 identical to the ones stored in the :class:`PackageBugStats` 

303 instance. 

304 :type bug_stats: dict 

305 """ 

306 # Get the old action item, if any 

307 action_item = package.get_action_item_for_type( 

308 self.PATCH_BUG_ACTION_ITEM_TYPE_NAME) 

309 

310 if 'patch' not in bug_stats or bug_stats['patch']['bug_count'] == 0: 

311 # Remove the old action item, since the package does not have any 

312 # bugs tagged patch anymore. 

313 if action_item is not None: 

314 action_item.delete() 

315 return 

316 

317 # If the package has bugs tagged patch, update the action item 

318 if action_item is None: 

319 action_item = ActionItem( 

320 package=package, 

321 item_type=self.patch_item_type) 

322 

323 bug_count = bug_stats['patch']['bug_count'] 

324 # Include the URL in the short description 

325 url = self.bug_manager.get_bug_tracker_url( 

326 package.name, 'source', 'patch') 

327 if not url: 327 ↛ 328line 327 didn't jump to line 328, because the condition on line 327 was never true

328 url = '' 

329 # Include the bug count in the short description 

330 count = '{bug_count} bug'.format(bug_count=bug_count) 

331 if bug_count > 1: 

332 count += 's' 

333 action_item.short_description = \ 

334 self.PATCH_ITEM_SHORT_DESCRIPTION.format(url=url, count=count) 

335 # Set additional URLs and merged bug count in the extra data for a full 

336 # description 

337 action_item.extra_data = { 

338 'bug_count': bug_count, 

339 'merged_count': bug_stats['patch'].get('merged_count', 0), 

340 'url': url, 

341 'merged_url': self.bug_manager.get_bug_tracker_url( 

342 package.name, 'source', 'patch-merged'), 

343 } 

344 action_item.save() 

345 

346 def _create_help_bug_action_item(self, package, bug_stats): 

347 """ 

348 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

349 the given package if it contains any bugs tagged help. 

350 

351 :param package: The package for which the action item should be 

352 updated. 

353 :type package: :class:`distro_tracker.core.models.PackageName` 

354 :param bug_stats: A dictionary mapping category names to structures 

355 describing those categories. Those structures should be 

356 identical to the ones stored in the :class:`PackageBugStats` 

357 instance. 

358 :type bug_stats: dict 

359 """ 

360 # Get the old action item, if any 

361 action_item = package.get_action_item_for_type( 

362 self.HELP_BUG_ACTION_ITEM_TYPE_NAME) 

363 

364 if 'help' not in bug_stats or bug_stats['help']['bug_count'] == 0: 

365 # Remove the old action item, since the package does not have any 

366 # bugs tagged patch anymore. 

367 if action_item is not None: 

368 action_item.delete() 

369 return 

370 

371 # If the package has bugs tagged patch, update the action item 

372 if action_item is None: 

373 action_item = ActionItem( 

374 package=package, 

375 item_type=self.help_item_type) 

376 

377 bug_count = bug_stats['help']['bug_count'] 

378 # Include the URL in the short description 

379 url = self.bug_manager.get_bug_tracker_url( 

380 package.name, 'source', 'help') 

381 if not url: 381 ↛ 382line 381 didn't jump to line 382, because the condition on line 381 was never true

382 url = '' 

383 # Include the bug count in the short description 

384 count = '{bug_count} bug'.format(bug_count=bug_count) 

385 if bug_count > 1: 

386 count += 's' 

387 action_item.short_description = self.HELP_ITEM_SHORT_DESCRIPTION.format( 

388 url=url, count=count) 

389 # Set additional URLs and merged bug count in the extra data for a full 

390 # description 

391 action_item.extra_data = { 

392 'bug_count': bug_count, 

393 'url': url, 

394 } 

395 action_item.save() 

396 

397 def _create_action_items(self, package_bug_stats): 

398 """ 

399 Method which creates a :class:`distro_tracker.core.models.ActionItem` 

400 instance for a package based on the given package stats. 

401 

402 For now, an action item is created if the package either has bugs 

403 tagged as help or patch. 

404 """ 

405 # Transform the bug stats to a structure easier to pass to functions 

406 # for particular bug-category action items. 

407 bug_stats = { 

408 category['category_name']: category 

409 for category in package_bug_stats.stats 

410 } 

411 package = package_bug_stats.package 

412 self._create_patch_bug_action_item(package, bug_stats) 

413 self._create_help_bug_action_item(package, bug_stats) 

414 

415 def _get_udd_bug_stats(self): 

416 url = 'https://udd.debian.org/cgi-bin/ddpo-bugs.cgi' 

417 response_content = get_resource_text(url) 

418 if not response_content: 

419 return 

420 

421 # Each line in the response should be bug stats for a single package 

422 bug_stats = {} 

423 for line in response_content.splitlines(): 

424 line = line.strip() 

425 try: 

426 package_name, bug_counts = line, '' 

427 if line.startswith('src:'): 

428 src, package_name, bug_counts = line.split(':', 2) 

429 else: 

430 package_name, bug_counts = line.split(':', 1) 

431 # Merged counts are in parentheses so remove those before 

432 # splitting the numbers 

433 bug_counts = re.sub(r'[()]', ' ', bug_counts).split() 

434 bug_counts = [int(count) for count in bug_counts] 

435 except ValueError: 

436 logger.warning( 

437 'Failed to parse bug information for %s: %s', 

438 package_name, bug_counts, exc_info=1) 

439 continue 

440 

441 # Match the extracted counts with category names 

442 bug_stats[package_name] = [ 

443 { 

444 'category_name': category_name, 

445 'bug_count': bug_count, 

446 'merged_count': merged_count, 

447 } 

448 for category_name, (bug_count, merged_count) in zip( 

449 self.bug_categories, zip(bug_counts[::2], bug_counts[1::2])) 

450 ] 

451 

452 return bug_stats 

453 

454 def _remove_obsolete_action_items(self, package_names): 

455 """ 

456 Removes action items for packages which no longer have any bug stats. 

457 """ 

458 ActionItem.objects.delete_obsolete_items( 

459 item_types=[self.patch_item_type, self.help_item_type], 

460 non_obsolete_packages=package_names) 

461 

462 def update_source_and_pseudo_bugs(self): 

463 """ 

464 Performs the update of bug statistics for source and pseudo packages. 

465 """ 

466 # First get the bug stats exposed by the UDD. 

467 bug_stats = self._get_udd_bug_stats() 

468 if not bug_stats: 

469 bug_stats = {} 

470 

471 # Add in help bugs from the BTS interface 

472 try: 

473 help_bugs = self._get_tagged_bug_stats('help') 

474 self._extend_bug_stats(bug_stats, help_bugs, 'help') 

475 except RuntimeError: 

476 logger.exception("Could not get bugs tagged help") 

477 

478 # Add in newcomer bugs from the BTS interface 

479 try: 

480 newcomer_bugs = self._get_tagged_bug_stats('newcomer') 

481 self._extend_bug_stats(bug_stats, newcomer_bugs, 'newcomer') 

482 except RuntimeError: 

483 logger.exception("Could not get bugs tagged newcomer") 

484 

485 with transaction.atomic(): 

486 # Clear previous stats 

487 PackageBugStats.objects.all().delete() 

488 self._remove_obsolete_action_items(bug_stats.keys()) 

489 # Get all packages which have updated stats, along with their 

490 # action items in 2 DB queries. 

491 packages = PackageName.objects.filter(name__in=bug_stats.keys()) 

492 packages.prefetch_related('action_items') 

493 

494 # Update stats and action items. 

495 stats = [] 

496 for package in packages: 

497 # Save the raw package bug stats 

498 package_bug_stats = PackageBugStats( 

499 package=package, stats=bug_stats[package.name]) 

500 stats.append(package_bug_stats) 

501 

502 # Add action items for the package. 

503 self._create_action_items(package_bug_stats) 

504 

505 PackageBugStats.objects.bulk_create(stats) 

506 

507 def update_binary_bugs(self): 

508 """ 

509 Performs the update of bug statistics for binary packages. 

510 """ 

511 url = 'https://udd.debian.org/cgi-bin/bugs-binpkgs-pts.cgi' 

512 response_content = get_resource_text(url) 

513 if not response_content: 

514 return 

515 

516 # Extract known binary package bug stats: each line is a separate pkg 

517 bug_stats = {} 

518 for line in response_content.splitlines(): 

519 package_name, bug_counts = line.split(None, 1) 

520 bug_counts = bug_counts.split() 

521 try: 

522 bug_counts = [int(count) for count in bug_counts] 

523 except ValueError: 

524 logger.exception( 

525 'Failed to parse bug information for %s: %s', 

526 package_name, bug_counts) 

527 continue 

528 

529 bug_stats[package_name] = [ 

530 { 

531 'category_name': category_name, 

532 'bug_count': bug_count, 

533 } 

534 for category_name, bug_count in zip( 

535 self.bug_categories, bug_counts) 

536 ] 

537 

538 with transaction.atomic(): 

539 # Clear previous stats 

540 BinaryPackageBugStats.objects.all().delete() 

541 packages = \ 

542 BinaryPackageName.objects.filter(name__in=bug_stats.keys()) 

543 # Create new stats in a single query 

544 stats = [ 

545 BinaryPackageBugStats(package=package, 

546 stats=bug_stats[package.name]) 

547 for package in packages 

548 ] 

549 BinaryPackageBugStats.objects.bulk_create(stats) 

550 

551 def execute_main(self): 

552 # Stats for source and pseudo packages is retrieved from a different 

553 # resource (with a different structure) than stats for binary packages. 

554 self.update_source_and_pseudo_bugs() 

555 self.update_binary_bugs() 

556 

557 

558class UpdateLintianStatsTask(BaseTask): 

559 """ 

560 Updates packages' lintian stats. 

561 """ 

562 

563 class Scheduler(IntervalScheduler): 

564 interval = 3600 * 4 

565 

566 ACTION_ITEM_TYPE_NAME = 'lintian-warnings-and-errors' 

567 ITEM_DESCRIPTION = 'lintian reports <a href="{url}">{report}</a>' 

568 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/lintian-action-item.html' 

569 

570 def initialize(self, *args, **kwargs): 

571 super(UpdateLintianStatsTask, self).initialize(*args, **kwargs) 

572 self.lintian_action_item_type = ActionItemType.objects.create_or_update( 

573 type_name=self.ACTION_ITEM_TYPE_NAME, 

574 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

575 

576 def get_lintian_stats(self): 

577 url = 'https://udd.debian.org/lintian-qa-list.txt' 

578 content = get_resource_text(url, force_update=self.force_update, 

579 only_if_updated=True) 

580 if content is None: 580 ↛ 581line 580 didn't jump to line 581, because the condition on line 580 was never true

581 return 

582 

583 all_stats = {} 

584 categories = ( 

585 'errors', 

586 'warnings', 

587 'pedantics', 

588 'experimentals', 

589 'overriddens', 

590 ) 

591 for line in content.splitlines(): 

592 package, stats = line.split(None, 1) 

593 stats = stats.split() 

594 try: 

595 all_stats[package] = { 

596 category: int(count) 

597 for count, category in zip(stats, categories) 

598 } 

599 except ValueError: 

600 logger.exception( 

601 'Failed to parse lintian information for %s: %s', 

602 package, line) 

603 continue 

604 

605 return all_stats 

606 

607 def update_action_item(self, package, lintian_stats): 

608 """ 

609 Updates the :class:`ActionItem` for the given package based on the 

610 :class:`LintianStats <distro_tracker.vendor.debian.models.LintianStats` 

611 given in ``package_stats``. If the package has errors or warnings an 

612 :class:`ActionItem` is created. 

613 """ 

614 package_stats = lintian_stats.stats 

615 warnings, errors = ( 

616 package_stats.get('warnings'), package_stats.get('errors', 0)) 

617 # Get the old action item for this warning, if it exists. 

618 lintian_action_item = package.get_action_item_for_type( 

619 self.lintian_action_item_type.type_name) 

620 if not warnings and not errors: 

621 if lintian_action_item: 

622 # If the item previously existed, delete it now since there 

623 # are no longer any warnings/errors. 

624 lintian_action_item.delete() 

625 return 

626 

627 # The item didn't previously have an action item: create it now 

628 if lintian_action_item is None: 

629 lintian_action_item = ActionItem( 

630 package=package, 

631 item_type=self.lintian_action_item_type) 

632 

633 lintian_url = lintian_stats.get_lintian_url() 

634 new_extra_data = { 

635 'warnings': warnings, 

636 'errors': errors, 

637 'lintian_url': lintian_url, 

638 } 

639 if lintian_action_item.extra_data: 

640 old_extra_data = lintian_action_item.extra_data 

641 if (old_extra_data['warnings'] == warnings and 

642 old_extra_data['errors'] == errors): 

643 # No need to update 

644 return 

645 

646 lintian_action_item.extra_data = new_extra_data 

647 

648 if errors and warnings: 

649 report = '{} error{} and {} warning{}'.format( 

650 errors, 

651 's' if errors > 1 else '', 

652 warnings, 

653 's' if warnings > 1 else '') 

654 elif errors: 

655 report = '{} error{}'.format( 

656 errors, 

657 's' if errors > 1 else '') 

658 elif warnings: 658 ↛ 663line 658 didn't jump to line 663, because the condition on line 658 was never false

659 report = '{} warning{}'.format( 

660 warnings, 

661 's' if warnings > 1 else '') 

662 

663 lintian_action_item.short_description = self.ITEM_DESCRIPTION.format( 

664 url=lintian_url, 

665 report=report) 

666 

667 # If there are errors make the item a high severity issue 

668 if errors: 

669 lintian_action_item.severity = ActionItem.SEVERITY_HIGH 

670 

671 lintian_action_item.save() 

672 

673 def execute_main(self): 

674 all_lintian_stats = self.get_lintian_stats() 

675 if not all_lintian_stats: 

676 return 

677 

678 # Discard all old stats 

679 LintianStats.objects.all().delete() 

680 

681 packages = PackageName.objects.filter(name__in=all_lintian_stats.keys()) 

682 packages.prefetch_related('action_items') 

683 # Remove action items for packages which no longer have associated 

684 # lintian data. 

685 ActionItem.objects.delete_obsolete_items( 

686 [self.lintian_action_item_type], all_lintian_stats.keys()) 

687 

688 stats = [] 

689 for package in packages: 

690 package_stats = all_lintian_stats[package.name] 

691 # Save the raw lintian stats. 

692 lintian_stats = LintianStats(package=package, stats=package_stats) 

693 stats.append(lintian_stats) 

694 # Create an ActionItem if there are errors or warnings 

695 self.update_action_item(package, lintian_stats) 

696 

697 LintianStats.objects.bulk_create(stats) 

698 

699 

700class UpdateAppStreamStatsTask(BaseTask): 

701 """ 

702 Updates packages' AppStream issue hints data. 

703 """ 

704 

705 class Scheduler(IntervalScheduler): 

706 interval = 3600 * 6 

707 

708 ACTION_ITEM_TYPE_NAME = 'appstream-issue-hints' 

709 ITEM_DESCRIPTION = 'AppStream hints: {report} for {packageurllist}' 

710 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/appstream-action-item.html' 

711 

712 def initialize(self, *args, **kwargs): 

713 super(UpdateAppStreamStatsTask, self).initialize(*args, **kwargs) 

714 self.appstream_action_item_type = \ 

715 ActionItemType.objects.create_or_update( 

716 type_name=self.ACTION_ITEM_TYPE_NAME, 

717 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

718 self._tag_severities = {} 

719 

720 def _load_tag_severities(self): 

721 url = 'https://appstream.debian.org/hints/sid/hint-definitions.json' 

722 json_data = get_resource_text(url, force_update=True) 

723 

724 data = json.loads(json_data) 

725 for tag, info in data.items(): 

726 self._tag_severities[tag] = info['severity'] 

727 

728 def _load_appstream_hint_stats(self, section, arch, all_stats={}): 

729 url = 'https://appstream.debian.org/hints/sid/{}/Hints-{}.json.gz' \ 

730 .format(section, arch) 

731 hints_json = get_resource_text(url, force_update=self.force_update) 

732 

733 hints = json.loads(hints_json) 

734 for hint in hints: 

735 pkid = hint['package'] 

736 parts = pkid.split('/') 

737 package_name = parts[0] 

738 

739 # get the source package for this binary package name 

740 src_pkgname = None 

741 if SourcePackageName.objects.exists_with_name(package_name): 

742 package = SourcePackageName.objects.get(name=package_name) 

743 src_pkgname = package.name 

744 elif BinaryPackageName.objects.exists_with_name(package_name): 

745 bin_package = BinaryPackageName.objects.get(name=package_name) 

746 package = bin_package.main_source_package_name 

747 src_pkgname = package.name 

748 else: 

749 src_pkgname = package_name 

750 

751 if src_pkgname not in all_stats: 

752 all_stats[src_pkgname] = {} 

753 if package_name not in all_stats[src_pkgname]: 753 ↛ 756line 753 didn't jump to line 756, because the condition on line 753 was never false

754 all_stats[src_pkgname][package_name] = {} 

755 

756 for cid, h in hint['hints'].items(): 

757 for e in h: 

758 severity = self._tag_severities[e['tag']] 

759 if severity == "error": 

760 sevkey = "errors" 

761 elif severity == "warning": 

762 sevkey = "warnings" 

763 elif severity == "info": 763 ↛ 766line 763 didn't jump to line 766, because the condition on line 763 was never false

764 sevkey = "infos" 

765 else: 

766 continue 

767 if sevkey not in all_stats[src_pkgname][package_name]: 

768 all_stats[src_pkgname][package_name][sevkey] = 1 

769 else: 

770 all_stats[src_pkgname][package_name][sevkey] += 1 

771 

772 return all_stats 

773 

774 def _get_appstream_url(self, package, bin_pkgname): 

775 """ 

776 Returns the AppStream URL for the given PackageName in :package. 

777 """ 

778 

779 src_package = get_or_none(SourcePackageName, pk=package.pk) 

780 if not src_package: 780 ↛ 781line 780 didn't jump to line 781, because the condition on line 780 was never true

781 return '#' 

782 

783 if not src_package.main_version: 

784 return '#' 

785 

786 component = 'main' 

787 main_entry = src_package.main_entry 

788 if main_entry: 788 ↛ 789line 788 didn't jump to line 789, because the condition on line 788 was never true

789 component = main_entry.component 

790 if not component: 

791 component = 'main' 

792 

793 return ( 

794 'https://appstream.debian.org/sid/{}/issues/{}.html' 

795 .format(component, bin_pkgname) 

796 ) 

797 

798 def _create_final_stats_report(self, package, package_stats): 

799 """ 

800 Returns a transformed statistics report to be stored in the database. 

801 """ 

802 

803 as_report = package_stats.copy() 

804 for bin_package in list(as_report.keys()): 

805 # we currently don't want to display info-type hints 

806 as_report[bin_package].pop('infos', None) 

807 if as_report[bin_package]: 807 ↛ 811line 807 didn't jump to line 811, because the condition on line 807 was never false

808 as_report[bin_package]['url'] = \ 

809 self._get_appstream_url(package, bin_package) 

810 else: 

811 as_report.pop(bin_package) 

812 return as_report 

813 

814 def update_action_item(self, package, package_stats): 

815 """ 

816 Updates the :class:`ActionItem` for the given package based on the 

817 AppStream hint statistics given in ``package_stats``. 

818 If the package has errors or warnings an 

819 :class:`ActionItem` is created. 

820 """ 

821 

822 total_warnings = 0 

823 total_errors = 0 

824 packageurllist = [] 

825 for bin_pkgname, info in package_stats.items(): 

826 total_warnings += info.get('warnings', 0) 

827 total_errors += info.get('errors', 0) 

828 url = self._get_appstream_url(package, bin_pkgname) 

829 packageurllist.append(f'<a href="{url}">{bin_pkgname}</a>') 

830 

831 # Get the old action item for this warning, if it exists. 

832 appstream_action_item = package.get_action_item_for_type( 

833 self.appstream_action_item_type.type_name) 

834 if not total_warnings and not total_errors: 

835 if appstream_action_item: 

836 # If the item previously existed, delete it now since there 

837 # are no longer any warnings/errors. 

838 appstream_action_item.delete() 

839 return 

840 

841 # The item didn't previously have an action item: create it now 

842 if appstream_action_item is None: 

843 appstream_action_item = ActionItem( 

844 package=package, 

845 item_type=self.appstream_action_item_type) 

846 

847 as_report = self._create_final_stats_report(package, package_stats) 

848 

849 if appstream_action_item.extra_data: 

850 old_extra_data = appstream_action_item.extra_data 

851 if old_extra_data == as_report: 

852 # No need to update 

853 return 

854 

855 appstream_action_item.extra_data = as_report 

856 

857 if total_errors and total_warnings: 

858 short_report = '{} error{} and {} warning{}'.format( 

859 total_errors, 

860 's' if total_errors > 1 else '', 

861 total_warnings, 

862 's' if total_warnings > 1 else '') 

863 elif total_errors: 

864 short_report = '{} error{}'.format( 

865 total_errors, 

866 's' if total_errors > 1 else '') 

867 elif total_warnings: 867 ↛ 872line 867 didn't jump to line 872

868 short_report = '{} warning{}'.format( 

869 total_warnings, 

870 's' if total_warnings > 1 else '') 

871 

872 appstream_action_item.short_description = \ 

873 self.ITEM_DESCRIPTION.format(packageurllist=",".join( 

874 packageurllist), report=short_report) 

875 

876 # If there are errors make the item a high severity issue; 

877 # otherwise, make sure to set the severity as normal in case the item 

878 # existed already 

879 if total_errors: 

880 appstream_action_item.severity = ActionItem.SEVERITY_HIGH 

881 else: 

882 appstream_action_item.severity = ActionItem.SEVERITY_NORMAL 

883 

884 appstream_action_item.save() 

885 

886 def execute_main(self): 

887 self._load_tag_severities() 

888 all_stats = {} 

889 repository = Repository.objects.get(default=True) 

890 arch = "amd64" 

891 for component in repository.components: 

892 self._load_appstream_hint_stats(component, arch, all_stats) 

893 if not all_stats: 893 ↛ 894line 893 didn't jump to line 894, because the condition on line 893 was never true

894 return 

895 

896 with transaction.atomic(): 

897 # Delete obsolete data 

898 PackageData.objects.filter(key='appstream').delete() 

899 

900 packages = PackageName.objects.filter(name__in=all_stats.keys()) 

901 packages.prefetch_related('action_items') 

902 

903 stats = [] 

904 for package in packages: 

905 package_stats = all_stats[package.name] 

906 stats.append( 

907 PackageData( 

908 package=package, 

909 key='appstream', 

910 value=package_stats 

911 ) 

912 ) 

913 

914 # Create an ActionItem if there are errors or warnings 

915 self.update_action_item(package, package_stats) 

916 

917 PackageData.objects.bulk_create(stats) 

918 # Remove action items for packages which no longer have associated 

919 # AppStream hints. 

920 ActionItem.objects.delete_obsolete_items( 

921 [self.appstream_action_item_type], all_stats.keys()) 

922 

923 

924class UpdateTransitionsTask(BaseTask): 

925 

926 class Scheduler(IntervalScheduler): 

927 interval = 3600 

928 

929 REJECT_LIST_URL = 'https://ftp-master.debian.org/transitions.yaml' 

930 PACKAGE_TRANSITION_LIST_URL = ( 

931 'https://release.debian.org/transitions/export/packages.yaml') 

932 

933 def _get_yaml_resource(self, url, **kwargs): 

934 """ 

935 Gets the YAML resource at the given URL and returns it as a Python 

936 object. 

937 """ 

938 content = get_resource_text(url, **kwargs) 

939 if content: 

940 return yaml.safe_load(content) 

941 

942 def _add_reject_transitions(self, packages): 

943 """ 

944 Adds the transitions which cause uploads to be rejected to the 

945 given ``packages`` dict. 

946 """ 

947 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL) 

948 for key, transition in reject_list.items(): 

949 for package in transition['packages']: 

950 packages.setdefault(package, {}) 

951 packages[package].setdefault(key, {}) 

952 packages[package][key]['reject'] = True 

953 packages[package][key]['status'] = 'ongoing' 

954 

955 def _add_package_transition_list(self, packages): 

956 """ 

957 Adds the ongoing and planned transitions to the given ``packages`` 

958 dict. 

959 """ 

960 package_transition_list = self._get_yaml_resource( 

961 self.PACKAGE_TRANSITION_LIST_URL) 

962 

963 wanted_transition_statuses = ('ongoing', 'planned') 

964 for package_info in package_transition_list: 

965 package_name = package_info['name'] 

966 for transition_name, status in package_info['list']: 

967 if status not in wanted_transition_statuses: 

968 # Skip transitions with an unwanted status 

969 continue 

970 

971 packages.setdefault(package_name, {}) 

972 packages[package_name].setdefault(transition_name, {}) 

973 packages[package_name][transition_name]['status'] = status 

974 

975 def execute_main(self): 

976 # Update the relevant resources first 

977 kwargs = { 

978 'force_update': self.force_update, 

979 'only_if_updated': True, 

980 } 

981 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL, **kwargs) 

982 package_transition_list = self._get_yaml_resource( 

983 self.PACKAGE_TRANSITION_LIST_URL, **kwargs) 

984 

985 if reject_list is None and package_transition_list is None: 

986 # Nothing to do - at least one needs to be updated... 

987 return 

988 

989 package_transitions = {} 

990 self._add_reject_transitions(package_transitions) 

991 self._add_package_transition_list(package_transitions) 

992 

993 PackageTransition.objects.all().delete() 

994 # Get the packages which have transitions 

995 packages = PackageName.objects.filter( 

996 name__in=package_transitions.keys()) 

997 transitions = [] 

998 for package in packages: 

999 for transition_name, data in \ 

1000 package_transitions[package.name].items(): 

1001 transitions.append(PackageTransition( 

1002 package=package, 

1003 transition_name=transition_name, 

1004 status=data.get('status', None), 

1005 reject=data.get('reject', False))) 

1006 

1007 PackageTransition.objects.bulk_create(transitions) 

1008 

1009 

1010class UpdateExcusesTask(BaseTask): 

1011 

1012 class Scheduler(IntervalScheduler): 

1013 interval = 3600 

1014 

1015 ACTION_ITEM_TYPE_NAME = 'debian-testing-migration' 

1016 ITEM_DESCRIPTION = ( 

1017 "The package has not entered testing even though the delay is over") 

1018 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/testing-migration-action-item.html' 

1019 

1020 class AgeVerdict(Enum): 

1021 PKG_OF_AGE = 0 

1022 PKG_TOO_OLD = 1 

1023 PKG_TOO_YOUNG = 2 

1024 PKG_WO_POLICY = 3 

1025 

1026 def initialize(self, *args, **kwargs): 

1027 super(UpdateExcusesTask, self).initialize(*args, **kwargs) 

1028 self.action_item_type = ActionItemType.objects.create_or_update( 

1029 type_name=self.ACTION_ITEM_TYPE_NAME, 

1030 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1031 

1032 def _adapt_excuse_links(self, excuse): 

1033 """ 

1034 If the excuse contains any anchor links, convert them to links to Distro 

1035 Tracker package pages. Return the original text unmodified, otherwise. 

1036 """ 

1037 re_anchor_href = re.compile(r'^#(.*)$') 

1038 html = soup(excuse, 'html.parser') 

1039 for a_tag in html.findAll('a', {'href': True}): 

1040 href = a_tag['href'] 

1041 match = re_anchor_href.match(href) 

1042 if not match: 1042 ↛ 1043line 1042 didn't jump to line 1043, because the condition on line 1042 was never true

1043 continue 

1044 package = match.group(1).split('/')[0] 

1045 a_tag['href'] = package_url(package) 

1046 

1047 return str(html) 

1048 

1049 def _skip_excuses_item(self, item_text): 

1050 if not item_text: 

1051 return True 

1052 # We ignore these excuses 

1053 if "Section" in item_text or "Maintainer" in item_text: 

1054 return True 

1055 return False 

1056 

1057 def _check_age(self, source): 

1058 """Checks the age of the package and compares it to the age requirement 

1059 for migration""" 

1060 

1061 if 'policy_info' not in source or 'age' not in source['policy_info']: 1061 ↛ 1062line 1061 didn't jump to line 1062, because the condition on line 1061 was never true

1062 return (self.AgeVerdict.PKG_WO_POLICY, None, None) 

1063 

1064 age = source['policy_info']['age']['current-age'] 

1065 limit = source['policy_info']['age']['age-requirement'] 

1066 if age > limit: 

1067 return (self.AgeVerdict.PKG_TOO_OLD, age, limit) 

1068 elif age < limit: 1068 ↛ 1069line 1068 didn't jump to line 1069, because the condition on line 1068 was never true

1069 return (self.AgeVerdict.PKG_TOO_YOUNG, age, limit) 

1070 else: 

1071 return (self.AgeVerdict.PKG_OF_AGE, age, limit) 

1072 

1073 def _extract_problematic(self, source): 

1074 verdict, age, limit = self._check_age(source) 

1075 

1076 if verdict == self.AgeVerdict.PKG_TOO_OLD: 

1077 return (source['item-name'], {'age': age, 'limit': limit}) 

1078 

1079 @staticmethod 

1080 def _make_excuses_check_dependencies(source): 

1081 """Checks the dependencies of the package (blocked-by and 

1082 migrate-after) and returns a list to display.""" 

1083 

1084 addendum = [] 

1085 

1086 if 'dependencies' in source: 

1087 blocked_by = source['dependencies'].get('blocked-by', []) 

1088 after = source['dependencies'].get('migrate-after', []) 

1089 after = [ 

1090 element 

1091 for element in after 

1092 if element not in blocked_by 

1093 ] 

1094 if blocked_by: 1094 ↛ 1095line 1094 didn't jump to line 1095, because the condition on line 1094 was never true

1095 addendum.append("Blocked by: %s" % ( 

1096 html_package_list(blocked_by), 

1097 )) 

1098 if after: 1098 ↛ 1103line 1098 didn't jump to line 1103, because the condition on line 1098 was never false

1099 addendum.append("Migrates after: %s" % ( 

1100 html_package_list(after), 

1101 )) 

1102 

1103 return addendum 

1104 

1105 @staticmethod 

1106 def _make_excuses_check_verdict(source): 

1107 """Checks the migration policy verdict of the package and builds an 

1108 excuses message depending on the result.""" 

1109 

1110 addendum = [] 

1111 

1112 if 'migration-policy-verdict' in source: 1112 ↛ 1113line 1112 didn't jump to line 1113, because the condition on line 1112 was never true

1113 verdict = source['migration-policy-verdict'] 

1114 if verdict == 'REJECTED_BLOCKED_BY_ANOTHER_ITEM': 

1115 addendum.append("Migration status: Blocked. Can't migrate " 

1116 "due to a non-migratable dependency. Check " 

1117 "status below." 

1118 ) 

1119 

1120 return addendum 

1121 

1122 def _make_excuses(self, source): 

1123 """Make the excuses list for a source item using the yaml data it 

1124 contains""" 

1125 

1126 excuses = [ 

1127 self._adapt_excuse_links(excuse) 

1128 for excuse in source['excuses'] 

1129 ] 

1130 

1131 # This is the place where we compute some additionnal 

1132 # messages that should be added to excuses. 

1133 addendum = [] 

1134 

1135 addendum.extend(self._make_excuses_check_verdict(source)) 

1136 addendum.extend(self._make_excuses_check_dependencies(source)) 

1137 

1138 excuses = addendum + excuses 

1139 

1140 if 'is-candidate' in source: 1140 ↛ 1144line 1140 didn't jump to line 1144, because the condition on line 1140 was never false

1141 if not source['is-candidate']: 1141 ↛ 1144line 1141 didn't jump to line 1144, because the condition on line 1141 was never false

1142 excuses.append("Not considered") 

1143 

1144 return ( 

1145 source['item-name'], 

1146 excuses, 

1147 ) 

1148 

1149 def _get_excuses_and_problems(self, content): 

1150 """ 

1151 Gets the excuses for each package. 

1152 Also finds a list of packages which have not migrated to testing 

1153 agter the necessary time has passed. 

1154 

1155 :returns: A two-tuple where the first element is a dict mapping 

1156 package names to a list of excuses. The second element is a dict 

1157 mapping packages names to a problem information. Problem information 

1158 is a dict with the keys ``age`` and ``limit``. 

1159 """ 

1160 if 'sources' not in content: 1160 ↛ 1161line 1160 didn't jump to line 1161, because the condition on line 1160 was never true

1161 logger.warning("Invalid format of excuses file") 

1162 return 

1163 

1164 sources = content['sources'] 

1165 excuses = [ 

1166 self._make_excuses(source) 

1167 for source in sources 

1168 if '/' not in source['item-name'] 

1169 ] 

1170 problems = [ 

1171 self._extract_problematic(source) 

1172 for source in sources 

1173 if '/' not in source['item-name'] 

1174 ] 

1175 problematic = [p for p in problems if p] 

1176 return dict(excuses), dict(problematic) 

1177 

1178 def _create_action_item(self, package, extra_data): 

1179 """ 

1180 Creates a :class:`distro_tracker.core.models.ActionItem` for the given 

1181 package including the given extra data. The item indicates that there is 

1182 a problem with the package migrating to testing. 

1183 """ 

1184 action_item = \ 

1185 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1186 if action_item is None: 

1187 action_item = ActionItem( 

1188 package=package, 

1189 item_type=self.action_item_type) 

1190 

1191 action_item.short_description = self.ITEM_DESCRIPTION 

1192 if package.main_entry: 1192 ↛ 1193line 1192 didn't jump to line 1193, because the condition on line 1192 was never true

1193 query_string = urlencode({'package': package.name}) 

1194 extra_data['check_why_url'] = ( 

1195 'https://qa.debian.org/excuses.php' 

1196 '?{query_string}'.format(query_string=query_string)) 

1197 

1198 action_item.extra_data = extra_data 

1199 action_item.save() 

1200 

1201 def _remove_obsolete_action_items(self, problematic): 

1202 """ 

1203 Remove action items for packages which are no longer problematic. 

1204 """ 

1205 ActionItem.objects.delete_obsolete_items( 

1206 item_types=[self.action_item_type], 

1207 non_obsolete_packages=problematic.keys()) 

1208 

1209 def _get_excuses_yaml(self): 

1210 """ 

1211 Function returning the content of excuses from debian-release 

1212 :returns: a dict of excuses or ``None`` if the content in the 

1213 cache is up to date. 

1214 """ 

1215 url = 'https://release.debian.org/britney/excuses.yaml' 

1216 content = get_resource_text(url, force_update=self.force_update, 

1217 only_if_updated=True) 

1218 if content is None: 

1219 return 

1220 

1221 return yaml.safe_load(content) 

1222 

1223 def execute_main(self): 

1224 content_lines = self._get_excuses_yaml() 

1225 if not content_lines: 1225 ↛ 1226line 1225 didn't jump to line 1226, because the condition on line 1225 was never true

1226 return 

1227 

1228 result = self._get_excuses_and_problems(content_lines) 

1229 if not result: 1229 ↛ 1230line 1229 didn't jump to line 1230, because the condition on line 1229 was never true

1230 return 

1231 package_excuses, problematic = result 

1232 

1233 with transaction.atomic(): 

1234 # Remove stale excuses data and action items which are not still 

1235 # problematic. 

1236 self._remove_obsolete_action_items(problematic) 

1237 PackageExcuses.objects.all().delete() 

1238 

1239 excuses = [] 

1240 packages = SourcePackageName.objects.filter( 

1241 name__in=package_excuses.keys()) 

1242 packages.prefetch_related('action_items') 

1243 for package in packages: 

1244 excuse = PackageExcuses( 

1245 package=package, 

1246 excuses=package_excuses[package.name]) 

1247 excuses.append(excuse) 

1248 if package.name in problematic: 

1249 self._create_action_item(package, problematic[package.name]) 

1250 

1251 # Create all excuses in a single query 

1252 PackageExcuses.objects.bulk_create(excuses) 

1253 

1254 

1255class UpdateBuildLogCheckStats(BaseTask): 

1256 

1257 class Scheduler(IntervalScheduler): 

1258 interval = 3600 * 6 

1259 

1260 ACTION_ITEM_TYPE_NAME = 'debian-build-logcheck' 

1261 ITEM_DESCRIPTION = 'Build log checks report <a href="{url}">{report}</a>' 

1262 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/logcheck-action-item.html' 

1263 

1264 def initialize(self, *args, **kwargs): 

1265 super(UpdateBuildLogCheckStats, self).initialize(*args, **kwargs) 

1266 self.action_item_type = ActionItemType.objects.create_or_update( 

1267 type_name=self.ACTION_ITEM_TYPE_NAME, 

1268 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1269 

1270 def _get_buildd_content(self): 

1271 url = 'https://qa.debian.org/bls/logcheck.txt' 

1272 return get_resource_text(url) 

1273 

1274 def get_buildd_stats(self): 

1275 content = self._get_buildd_content() 

1276 stats = {} 

1277 for line in content.splitlines(): 

1278 pkg, errors, warnings = line.split("|")[:3] 

1279 try: 

1280 errors, warnings = int(errors), int(warnings) 

1281 except ValueError: 

1282 continue 

1283 stats[pkg] = { 

1284 'errors': errors, 

1285 'warnings': warnings, 

1286 } 

1287 return stats 

1288 

1289 def create_action_item(self, package, stats): 

1290 """ 

1291 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

1292 the given package if the build logcheck stats indicate 

1293 """ 

1294 action_item = \ 

1295 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1296 

1297 errors = stats.get('errors', 0) 

1298 warnings = stats.get('warnings', 0) 

1299 

1300 if not errors and not warnings: 

1301 # Remove the previous action item since the package no longer has 

1302 # errors/warnings. 

1303 if action_item is not None: 

1304 action_item.delete() 

1305 return 

1306 

1307 if action_item is None: 

1308 action_item = ActionItem( 

1309 package=package, 

1310 item_type=self.action_item_type) 

1311 

1312 if action_item.extra_data: 

1313 if action_item.extra_data == stats: 1313 ↛ 1317line 1313 didn't jump to line 1317, because the condition on line 1313 was never false

1314 # Nothing has changed -- do not update the item 

1315 return 

1316 

1317 logcheck_url = "https://qa.debian.org/bls/packages/{hash}/{pkg}.html"\ 

1318 .format(hash=package.name[0], pkg=package.name) 

1319 if errors and warnings: 

1320 report = '{} error{} and {} warning{}'.format( 

1321 errors, 

1322 's' if errors > 1 else '', 

1323 warnings, 

1324 's' if warnings > 1 else '') 

1325 action_item.severity = ActionItem.SEVERITY_HIGH 

1326 elif errors: 

1327 report = '{} error{}'.format( 

1328 errors, 

1329 's' if errors > 1 else '') 

1330 action_item.severity = ActionItem.SEVERITY_HIGH 

1331 elif warnings: 1331 ↛ 1337line 1331 didn't jump to line 1337, because the condition on line 1331 was never false

1332 report = '{} warning{}'.format( 

1333 warnings, 

1334 's' if warnings > 1 else '') 

1335 action_item.severity = ActionItem.SEVERITY_LOW 

1336 

1337 action_item.short_description = self.ITEM_DESCRIPTION.format( 

1338 url=logcheck_url, 

1339 report=report) 

1340 action_item.extra_data = stats 

1341 action_item.save() 

1342 

1343 def execute_main(self): 

1344 # Build a dict with stats from both buildd and clang 

1345 stats = self.get_buildd_stats() 

1346 

1347 BuildLogCheckStats.objects.all().delete() 

1348 ActionItem.objects.delete_obsolete_items( 

1349 [self.action_item_type], stats.keys()) 

1350 

1351 packages = SourcePackageName.objects.filter(name__in=stats.keys()) 

1352 packages = packages.prefetch_related('action_items') 

1353 

1354 logcheck_stats = [] 

1355 for package in packages: 

1356 logcheck_stat = BuildLogCheckStats( 

1357 package=package, 

1358 stats=stats[package.name]) 

1359 logcheck_stats.append(logcheck_stat) 

1360 

1361 self.create_action_item(package, stats[package.name]) 

1362 

1363 # One SQL query to create all the stats. 

1364 BuildLogCheckStats.objects.bulk_create(logcheck_stats) 

1365 

1366 

1367class DebianWatchFileScannerUpdate(BaseTask): 

1368 

1369 class Scheduler(IntervalScheduler): 

1370 interval = 3600 * 6 

1371 

1372 ACTION_ITEM_TYPE_NAMES = ( 

1373 'new-upstream-version', 

1374 'watch-failure', 

1375 ) 

1376 ACTION_ITEM_TEMPLATES = { 

1377 'new-upstream-version': "debian/new-upstream-version-action-item.html", 

1378 'watch-failure': "debian/watch-failure-action-item.html", 

1379 } 

1380 ITEM_DESCRIPTIONS = { 

1381 'new-upstream-version': lambda item: ( 

1382 'A new upstream version is available: ' 

1383 '<a href="{url}">{version}</a>'.format( 

1384 url=item.extra_data['upstream_url'], 

1385 version=item.extra_data['upstream_version'])), 

1386 'watch-failure': lambda item: ( 

1387 'Problems while searching for a new upstream version'), 

1388 } 

1389 ITEM_SEVERITIES = { 

1390 'new-upstream-version': ActionItem.SEVERITY_HIGH, 

1391 'watch-failure': ActionItem.SEVERITY_HIGH, 

1392 } 

1393 

1394 def initialize(self, *args, **kwargs): 

1395 super(DebianWatchFileScannerUpdate, self).initialize(*args, **kwargs) 

1396 self.action_item_types = { 

1397 type_name: ActionItemType.objects.create_or_update( 

1398 type_name=type_name, 

1399 full_description_template=self.ACTION_ITEM_TEMPLATES.get( 

1400 type_name, None)) 

1401 for type_name in self.ACTION_ITEM_TYPE_NAMES 

1402 } 

1403 

1404 def _get_upstream_status_content(self): 

1405 url = 'https://udd.debian.org/cgi-bin/upstream-status.json.cgi' 

1406 return get_resource_text(url) 

1407 

1408 def _remove_obsolete_action_items(self, item_type_name, 

1409 non_obsolete_packages): 

1410 """ 

1411 Removes any existing :class:`ActionItem` with the given type name based 

1412 on the list of package names which should still have the items based on 

1413 the processed stats. 

1414 """ 

1415 action_item_type = self.action_item_types[item_type_name] 

1416 ActionItem.objects.delete_obsolete_items( 

1417 item_types=[action_item_type], 

1418 non_obsolete_packages=non_obsolete_packages) 

1419 

1420 def get_upstream_status_stats(self, stats): 

1421 """ 

1422 Gets the stats from the downloaded data and puts them in the given 

1423 ``stats`` dictionary. 

1424 The keys of the dict are package names. 

1425 

1426 :returns: A a two-tuple where the first item is a list of packages 

1427 which have new upstream versions and the second is a list of 

1428 packages which have watch failures. 

1429 """ 

1430 content = self._get_upstream_status_content() 

1431 dehs_data = None 

1432 if content: 

1433 dehs_data = json.loads(content) 

1434 if not dehs_data: 

1435 return [], [] 

1436 

1437 all_new_versions, all_failures = [], [] 

1438 for entry in dehs_data: 

1439 package_name = entry['package'] 

1440 stats.setdefault(package_name, {}) 

1441 stats[package_name]['upstream_version'] = entry['upstream-version'] 

1442 stats[package_name]['upstream_url'] = entry['upstream-url'] 

1443 if 'status' in entry and ('Newer version' in entry['status'] or 

1444 'newer package' in entry['status']): 

1445 stats[package_name]['new-upstream-version'] = { 

1446 'upstream_version': entry['upstream-version'], 

1447 'upstream_url': entry['upstream-url'], 

1448 } 

1449 all_new_versions.append(package_name) 

1450 if entry.get('warnings') or entry.get('errors'): 

1451 msg = '{}\n{}'.format( 

1452 entry.get('errors') or '', 

1453 entry.get('warnings') or '', 

1454 ).strip() 

1455 stats[package_name]['watch-failure'] = { 

1456 'warning': msg, 

1457 } 

1458 all_failures.append(package_name) 

1459 

1460 return all_new_versions, all_failures 

1461 

1462 def update_package_info(self, package, stats): 

1463 """ 

1464 Updates upstream information of the given package based on the given 

1465 stats. Upstream data is saved as a :class:`PackageData` within the 

1466 `general` key 

1467 

1468 :param package: The package to which the upstream info should be 

1469 associated. 

1470 :type package: :class:`distro_tracker.core.models.PackageName` 

1471 :param stats: The stats which are used to create the upstream info. 

1472 :type stats: :class:`dict` 

1473 """ 

1474 try: 

1475 watch_data = package.watch_status[0] 

1476 except IndexError: 

1477 watch_data = PackageData( 

1478 package=package, 

1479 key='upstream-watch-status', 

1480 ) 

1481 

1482 watch_data.value = stats 

1483 watch_data.save() 

1484 

1485 def update_action_item(self, item_type, package, stats): 

1486 """ 

1487 Updates the action item of the given type for the given package based 

1488 on the given stats. 

1489 

1490 The severity of the item is defined by the :attr:`ITEM_SEVERITIES` dict. 

1491 

1492 The short descriptions are created by passing the :class:`ActionItem` 

1493 (with extra data already set) to the callables defined in 

1494 :attr:`ITEM_DESCRIPTIONS`. 

1495 

1496 :param item_type: The type of the :class:`ActionItem` that should be 

1497 updated. 

1498 :type item_type: string 

1499 :param package: The package to which this action item should be 

1500 associated. 

1501 :type package: :class:`distro_tracker.core.models.PackageName` 

1502 :param stats: The stats which are used to create the action item. 

1503 :type stats: :class:`dict` 

1504 """ 

1505 action_item = package.get_action_item_for_type(item_type) 

1506 if action_item is None: 

1507 # Create an action item... 

1508 action_item = ActionItem( 

1509 package=package, 

1510 item_type=self.action_item_types[item_type]) 

1511 

1512 if item_type in self.ITEM_SEVERITIES: 1512 ↛ 1514line 1512 didn't jump to line 1514, because the condition on line 1512 was never false

1513 action_item.severity = self.ITEM_SEVERITIES[item_type] 

1514 action_item.extra_data = stats 

1515 action_item.short_description = \ 

1516 self.ITEM_DESCRIPTIONS[item_type](action_item) 

1517 

1518 action_item.save() 

1519 

1520 @transaction.atomic 

1521 def execute_main(self): 

1522 stats = {} 

1523 new_upstream_version, failures = self.get_upstream_status_stats(stats) 

1524 updated_packages_per_type = { 

1525 'new-upstream-version': new_upstream_version, 

1526 'watch-failure': failures, 

1527 } 

1528 

1529 # Remove obsolete action items for each of the categories... 

1530 for item_type, packages in updated_packages_per_type.items(): 

1531 self._remove_obsolete_action_items(item_type, packages) 

1532 

1533 packages = SourcePackageName.objects.filter( 

1534 name__in=stats.keys()) 

1535 filter_qs = PackageData.objects.filter(key='upstream-watch-status') 

1536 packages = packages.prefetch_related( 

1537 'action_items', 

1538 Prefetch('data', queryset=filter_qs, to_attr='watch_status') 

1539 ) 

1540 

1541 # Update action items for each package 

1542 for package in packages: 

1543 for type_name in self.ACTION_ITEM_TYPE_NAMES: 

1544 if type_name in stats[package.name]: 

1545 # method(package, stats[package.name][type_name]) 

1546 self.update_action_item( 

1547 type_name, package, stats[package.name][type_name]) 

1548 

1549 self.update_package_info(package, stats[package.name]) 

1550 

1551 

1552class UpdateSecurityIssuesTask(BaseTask): 

1553 

1554 class Scheduler(IntervalScheduler): 

1555 interval = 3600 * 3 

1556 

1557 ACTION_ITEM_TYPE_NAME = 'debian-security-issue-in-{}' 

1558 ACTION_ITEM_TEMPLATE = 'debian/security-issue-action-item.html' 

1559 ITEM_DESCRIPTION_TEMPLATE = { 

1560 'open': '<a href="{url}">{count} security {issue}</a> in {release}', 

1561 'nodsa': 

1562 '<a href="{url}">{count} low-priority security {issue}</a> ' 

1563 'in {release}', 

1564 'none': 'No known security issue in {release}', 

1565 } 

1566 CVE_DATA_URL = 'https://security-tracker.debian.org/tracker/data/json' 

1567 DISTRIBUTIONS_URL = ( 

1568 'https://security-tracker.debian.org/tracker/distributions.json' 

1569 ) 

1570 

1571 def initialize(self, *args, **kwargs): 

1572 super(UpdateSecurityIssuesTask, self).initialize(*args, **kwargs) 

1573 self._action_item_type = {} 

1574 self._issues = None 

1575 self._distributions = None 

1576 

1577 def action_item_type(self, release): 

1578 return self._action_item_type.setdefault( 

1579 release, ActionItemType.objects.create_or_update( 

1580 type_name=self.ACTION_ITEM_TYPE_NAME.format(release), 

1581 full_description_template=self.ACTION_ITEM_TEMPLATE)) 

1582 

1583 def _get_distributions(self): 

1584 if not self._distributions: 

1585 content = get_resource_text(self.DISTRIBUTIONS_URL) 

1586 self._distributions = json.loads(content) 

1587 return self._distributions 

1588 

1589 def _get_support_status(self, release): 

1590 """ 

1591 Return support status of a given release as documented by the 

1592 security team in the security tracker. 

1593 """ 

1594 return self._get_distributions().get(release, {}).get('support', 

1595 'unknown') 

1596 

1597 def _get_issues_content(self): 

1598 if self._issues: 1598 ↛ 1599line 1598 didn't jump to line 1599, because the condition on line 1598 was never true

1599 return self._issues 

1600 content = get_resource_text(self.CVE_DATA_URL) 

1601 if content: 1601 ↛ exitline 1601 didn't return from function '_get_issues_content', because the condition on line 1601 was never false

1602 self._issues = json.loads(content) 

1603 return self._issues 

1604 

1605 @classmethod 

1606 def _update_stats_with_nodsa_entry(cls, stats, nodsa_entry, 

1607 entry_id, description): 

1608 stats['nodsa'] += 1 

1609 

1610 nodsa_details = {'description': description, 

1611 'nodsa': nodsa_entry.get('nodsa', ''), 

1612 'nodsa_reason': nodsa_entry.get('nodsa_reason', '') 

1613 } 

1614 

1615 nodsa_reason = nodsa_details['nodsa_reason'] 

1616 if nodsa_reason == '': 

1617 nodsa_details['needs_triaging'] = True 

1618 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1619 nodsa_details 

1620 elif nodsa_reason == 'postponed': 1620 ↛ 1621line 1620 didn't jump to line 1621, because the condition on line 1620 was never true

1621 nodsa_details['fixed_via_stable_update'] = True 

1622 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1623 nodsa_details 

1624 elif nodsa_reason == 'ignored': 1624 ↛ exitline 1624 didn't return from function '_update_stats_with_nodsa_entry', because the condition on line 1624 was never false

1625 stats['nodsa_ignored_details'][entry_id] = nodsa_details 

1626 

1627 @classmethod 

1628 def get_issues_summary(cls, issues): 

1629 result = {} 

1630 for issue_id, issue_data in issues.items(): 

1631 for release, data in issue_data['releases'].items(): 

1632 stats = result.setdefault(release, { 

1633 'open': 0, 

1634 'open_details': {}, 

1635 'nodsa': 0, 

1636 'unimportant': 0, 

1637 'next_point_update_details': {}, 

1638 'nodsa_maintainer_to_handle_details': {}, 

1639 'nodsa_ignored_details': {}, 

1640 }) 

1641 description = issue_data.get('description', '') 

1642 if (data.get('status', '') == 'resolved' or 

1643 data.get('urgency', '') == 'end-of-life'): 

1644 continue 

1645 elif data.get('urgency', '') == 'unimportant': 

1646 stats['unimportant'] += 1 

1647 elif data.get('next_point_update', False): 

1648 stats['next_point_update_details'][issue_id] = \ 

1649 {'description': description} 

1650 elif data.get('nodsa', False) is not False: 

1651 cls._update_stats_with_nodsa_entry(stats, 

1652 data, issue_id, 

1653 description 

1654 ) 

1655 else: 

1656 stats['open'] += 1 

1657 stats['open_details'][issue_id] = \ 

1658 {'description': description} 

1659 

1660 return result 

1661 

1662 @classmethod 

1663 def get_issues_stats(cls, content): 

1664 """ 

1665 Gets package issue stats from Debian's security tracker. 

1666 """ 

1667 stats = {} 

1668 for pkg, issues in content.items(): 

1669 stats[pkg] = cls.get_issues_summary(issues) 

1670 return stats 

1671 

1672 def _get_short_description(self, key, action_item): 

1673 count = action_item.extra_data['security_issues_count'] 

1674 url = 'https://security-tracker.debian.org/tracker/source-package/{}' 

1675 return self.ITEM_DESCRIPTION_TEMPLATE[key].format( 

1676 count=count, 

1677 issue='issues' if count > 1 else 'issue', 

1678 release=action_item.extra_data.get('release', 'sid'), 

1679 url=url.format(action_item.package.name), 

1680 ) 

1681 

1682 def update_action_item(self, stats, action_item): 

1683 """ 

1684 Updates the ``debian-security-issue`` action item based on the 

1685 security issues. 

1686 """ 

1687 

1688 security_issues_count = stats['open'] + stats['nodsa'] 

1689 action_item.extra_data['security_issues_count'] = security_issues_count 

1690 action_item.extra_data['support_status'] = ( 

1691 self._get_support_status(action_item.extra_data['release']) 

1692 ) 

1693 

1694 for base_key in ['open', 

1695 'next_point_update', 

1696 'nodsa_maintainer_to_handle', 

1697 'nodsa_ignored']: 

1698 details_key = base_key + '_details' 

1699 count_key = base_key + '_count' 

1700 

1701 action_item.extra_data[details_key] = stats[details_key] 

1702 action_item.extra_data[count_key] = len(stats[details_key]) 

1703 

1704 # nodsa_next_point_update / nodsa_ignored_details are displayed 

1705 # only if there is anything else to show 

1706 nodsa_create_action = (stats['nodsa'] - 

1707 len(stats['nodsa_ignored_details'])) > 0 

1708 

1709 if stats['open']: 

1710 action_item.severity = ActionItem.SEVERITY_HIGH 

1711 action_item.short_description = \ 

1712 self._get_short_description('open', action_item) 

1713 elif nodsa_create_action: 

1714 action_item.severity = ActionItem.SEVERITY_LOW 

1715 action_item.short_description = \ 

1716 self._get_short_description('nodsa', action_item) 

1717 else: 

1718 action_item.severity = ActionItem.SEVERITY_WISHLIST 

1719 action_item.short_description = \ 

1720 self._get_short_description('none', action_item) 

1721 

1722 @classmethod 

1723 def generate_package_data(cls, issues): 

1724 return { 

1725 'details': issues, 

1726 'stats': cls.get_issues_summary(issues), 

1727 'checksum': get_data_checksum(issues) 

1728 } 

1729 

1730 def want_action_item(self, pkgdata, release): 

1731 stats = pkgdata.value.get('stats', {}).get(release) 

1732 if stats is None: 1732 ↛ 1733line 1732 didn't jump to line 1733, because the condition on line 1732 was never true

1733 return False 

1734 

1735 supported_by = self._get_support_status(release) 

1736 if supported_by == "end-of-life": 

1737 return False 

1738 elif supported_by == "security": 

1739 count = stats.get('open', 0) + stats.get('nodsa', 0) 

1740 else: 

1741 count = stats.get('open', 0) 

1742 

1743 if count == 0: 

1744 return False 

1745 

1746 return True 

1747 

1748 def process_pkg_action_items(self, pkgdata, existing_action_items): 

1749 release_ai = {} 

1750 to_add = [] 

1751 to_update = [] 

1752 to_drop = [] 

1753 global_stats = pkgdata.value.get('stats', {}) 

1754 for ai in existing_action_items: 

1755 release = ai.extra_data['release'] 

1756 release_ai[release] = ai 

1757 for release, stats in global_stats.items(): 

1758 ai = release_ai.get(release) 

1759 

1760 if self.want_action_item(pkgdata, release): 

1761 if ai: 

1762 to_update.append(ai) 

1763 else: 

1764 ai = ActionItem( 

1765 item_type=self.action_item_type(release), 

1766 package=pkgdata.package, 

1767 extra_data={'release': release} 

1768 ) 

1769 to_add.append(ai) 

1770 self.update_action_item(stats, ai) 

1771 else: 

1772 if ai: 

1773 to_drop.append(ai) 

1774 

1775 return to_add, to_update, to_drop 

1776 

1777 def execute_main(self): 

1778 # Fetch all debian-security PackageData 

1779 all_pkgdata = PackageData.objects.select_related( 

1780 'package').filter(key='debian-security').only( 

1781 'package__name', 'value') 

1782 

1783 all_data = {} 

1784 packages = {} 

1785 for pkgdata in all_pkgdata: 

1786 all_data[pkgdata.package.name] = pkgdata 

1787 packages[pkgdata.package.name] = pkgdata.package 

1788 

1789 # Fetch all debian-security ActionItems 

1790 pkg_action_items = collections.defaultdict(lambda: []) 

1791 all_action_items = ActionItem.objects.select_related( 

1792 'package').filter( 

1793 item_type__type_name__startswith='debian-security-issue-in-') 

1794 for action_item in all_action_items: 

1795 pkg_action_items[action_item.package.name].append(action_item) 

1796 

1797 # Check for changes on distributions.json 

1798 distributions_checksum = get_data_checksum(self._get_distributions()) 

1799 if self.data.get('distributions_checksum') != distributions_checksum: 

1800 # New distributions.json, force update all action items 

1801 self.force_update = True 

1802 self.data['distributions_checksum'] = distributions_checksum 

1803 

1804 # Scan the security tracker data 

1805 content = self._get_issues_content() 

1806 to_add = [] 

1807 to_update = [] 

1808 for pkgname, issues in content.items(): 

1809 if pkgname in all_data: 

1810 # Check if we need to update the existing data 

1811 checksum = get_data_checksum(issues) 

1812 if not self.force_update and \ 

1813 all_data[pkgname].value.get('checksum', '') == checksum: 

1814 continue 

1815 # Update the data 

1816 pkgdata = all_data[pkgname] 

1817 pkgdata.value = self.generate_package_data(issues) 

1818 to_update.append(pkgdata) 

1819 else: 

1820 # Add data for a new package 

1821 package, _ = PackageName.objects.get_or_create(name=pkgname) 

1822 to_add.append( 

1823 PackageData( 

1824 package=package, 

1825 key='debian-security', 

1826 value=self.generate_package_data(issues) 

1827 ) 

1828 ) 

1829 # Process action items 

1830 ai_to_add = [] 

1831 ai_to_update = [] 

1832 ai_to_drop = [] 

1833 for pkgdata in itertools.chain(to_add, to_update): 

1834 add, update, drop = self.process_pkg_action_items( 

1835 pkgdata, pkg_action_items[pkgdata.package.name]) 

1836 ai_to_add.extend(add) 

1837 ai_to_update.extend(update) 

1838 ai_to_drop.extend(drop) 

1839 # Sync in database 

1840 with transaction.atomic(): 

1841 # Delete obsolete data 

1842 PackageData.objects.filter( 

1843 key='debian-security').exclude( 

1844 package__name__in=content.keys()).delete() 

1845 ActionItem.objects.filter( 

1846 item_type__type_name__startswith='debian-security-issue-in-' 

1847 ).exclude(package__name__in=content.keys()).delete() 

1848 ActionItem.objects.filter( 

1849 item_type__type_name__startswith='debian-security-issue-in-', 

1850 id__in=[ai.id for ai in ai_to_drop]).delete() 

1851 # Add new entries 

1852 PackageData.objects.bulk_create(to_add) 

1853 ActionItem.objects.bulk_create(ai_to_add) 

1854 # Update existing entries 

1855 for pkgdata in to_update: 

1856 pkgdata.save() 

1857 for ai in ai_to_update: 

1858 ai.save() 

1859 

1860 

1861class UpdatePiuPartsTask(BaseTask): 

1862 """ 

1863 Retrieves the piuparts stats for all the suites defined in the 

1864 :data:`distro_tracker.project.local_settings.DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES` 

1865 """ 

1866 

1867 class Scheduler(IntervalScheduler): 

1868 interval = 3600 * 3 

1869 

1870 ACTION_ITEM_TYPE_NAME = 'debian-piuparts-test-fail' 

1871 ACTION_ITEM_TEMPLATE = 'debian/piuparts-action-item.html' 

1872 ITEM_DESCRIPTION = 'piuparts found (un)installation error(s)' 

1873 

1874 def initialize(self, *args, **kwargs): 

1875 super(UpdatePiuPartsTask, self).initialize(*args, **kwargs) 

1876 self.action_item_type = ActionItemType.objects.create_or_update( 

1877 type_name=self.ACTION_ITEM_TYPE_NAME, 

1878 full_description_template=self.ACTION_ITEM_TEMPLATE) 

1879 

1880 def _get_piuparts_content(self, suite): 

1881 """ 

1882 :returns: The content of the piuparts report for the given package 

1883 or ``None`` if there is no data for the particular suite. 

1884 """ 

1885 url = 'https://piuparts.debian.org/{suite}/sources.txt' 

1886 return get_resource_text(url.format(suite=suite)) 

1887 

1888 def get_piuparts_stats(self): 

1889 suites = getattr(settings, 'DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES', []) 

1890 failing_packages = {} 

1891 for suite in suites: 

1892 content = self._get_piuparts_content(suite) 

1893 if content is None: 

1894 logger.info("There is no piuparts for suite: %s", suite) 

1895 continue 

1896 

1897 for line in content.splitlines(): 

1898 package_name, status = line.split(':', 1) 

1899 package_name, status = package_name.strip(), status.strip() 

1900 if status == 'fail': 

1901 failing_packages.setdefault(package_name, []) 

1902 failing_packages[package_name].append(suite) 

1903 

1904 return failing_packages 

1905 

1906 def create_action_item(self, package, suites): 

1907 """ 

1908 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

1909 instance for the package based on the list of suites in which the 

1910 piuparts installation test failed. 

1911 """ 

1912 action_item = package.get_action_item_for_type(self.action_item_type) 

1913 if action_item is None: 

1914 action_item = ActionItem( 

1915 package=package, 

1916 item_type=self.action_item_type, 

1917 short_description=self.ITEM_DESCRIPTION) 

1918 

1919 if action_item.extra_data: 

1920 existing_items = action_item.extra_data.get('suites', []) 

1921 if list(sorted(existing_items)) == list(sorted(suites)): 

1922 # No need to update this item 

1923 return 

1924 action_item.extra_data = { 

1925 'suites': suites, 

1926 } 

1927 action_item.save() 

1928 

1929 def execute_main(self): 

1930 failing_packages = self.get_piuparts_stats() 

1931 

1932 ActionItem.objects.delete_obsolete_items( 

1933 item_types=[self.action_item_type], 

1934 non_obsolete_packages=failing_packages.keys()) 

1935 

1936 packages = SourcePackageName.objects.filter( 

1937 name__in=failing_packages.keys()) 

1938 packages = packages.prefetch_related('action_items') 

1939 

1940 for package in packages: 

1941 self.create_action_item(package, failing_packages[package.name]) 

1942 

1943 

1944class UpdateUbuntuStatsTask(BaseTask): 

1945 """ 

1946 The task updates Ubuntu stats for packages. These stats are displayed in a 

1947 separate panel. 

1948 """ 

1949 

1950 class Scheduler(IntervalScheduler): 

1951 interval = 3600 * 3 

1952 

1953 def initialize(self, *args, **kwargs): 

1954 super(UpdateUbuntuStatsTask, self).initialize(*args, **kwargs) 

1955 

1956 def _get_versions_content(self): 

1957 url = 'https://udd.debian.org/cgi-bin/ubuntupackages.cgi' 

1958 return get_resource_text(url) 

1959 

1960 def get_ubuntu_versions(self): 

1961 """ 

1962 Retrieves the Ubuntu package versions. 

1963 

1964 :returns: A dict mapping package names to Ubuntu versions. 

1965 """ 

1966 content = self._get_versions_content() 

1967 

1968 package_versions = {} 

1969 for line in content.splitlines(): 

1970 package, version = line.split(' ', 1) 

1971 version = version.strip() 

1972 package_versions[package] = version 

1973 

1974 return package_versions 

1975 

1976 def _get_bug_stats_content(self): 

1977 url = 'https://udd.debian.org/cgi-bin/ubuntubugs.cgi' 

1978 return get_resource_text(url) 

1979 

1980 def get_ubuntu_bug_stats(self): 

1981 """ 

1982 Retrieves the Ubuntu bug stats of a package. Bug stats contain the 

1983 count of bugs and the count of patches. 

1984 

1985 :returns: A dict mapping package names to a dict of package stats. 

1986 """ 

1987 content = self._get_bug_stats_content() 

1988 

1989 bug_stats = {} 

1990 for line in content.splitlines(): 

1991 package_name, bug_count, patch_count = line.split("|", 2) 

1992 try: 

1993 bug_count, patch_count = int(bug_count), int(patch_count) 

1994 except ValueError: 

1995 continue 

1996 bug_stats[package_name] = { 

1997 'bug_count': bug_count, 

1998 'patch_count': patch_count, 

1999 } 

2000 

2001 return bug_stats 

2002 

2003 def _get_ubuntu_patch_diff_content(self): 

2004 url = 'https://patches.ubuntu.com/PATCHES' 

2005 return get_resource_text(url) 

2006 

2007 def get_ubuntu_patch_diffs(self): 

2008 """ 

2009 Retrieves the Ubuntu patch diff information. The information consists 

2010 of the diff URL and the version of the Ubuntu package to which the 

2011 diff belongs to. 

2012 

2013 :returns: A dict mapping package names to diff information. 

2014 """ 

2015 content = self._get_ubuntu_patch_diff_content() 

2016 

2017 patch_diffs = {} 

2018 re_diff_version = re.compile(r'_(\S+)\.patch') 

2019 for line in content.splitlines(): 

2020 package_name, diff_url = line.split(' ', 1) 

2021 # Extract the version of the package from the diff url 

2022 match = re_diff_version.search(diff_url) 

2023 if not match: 2023 ↛ 2025line 2023 didn't jump to line 2025, because the condition on line 2023 was never true

2024 # Invalid URL: no version 

2025 continue 

2026 version = match.group(1) 

2027 patch_diffs[package_name] = { 

2028 'version': version, 

2029 'diff_url': diff_url 

2030 } 

2031 

2032 return patch_diffs 

2033 

2034 def execute_main(self): 

2035 package_versions = self.get_ubuntu_versions() 

2036 bug_stats = self.get_ubuntu_bug_stats() 

2037 patch_diffs = self.get_ubuntu_patch_diffs() 

2038 

2039 obsolete_ubuntu_pkgs = UbuntuPackage.objects.exclude( 

2040 package__name__in=package_versions.keys()) 

2041 obsolete_ubuntu_pkgs.delete() 

2042 

2043 packages = PackageName.objects.filter(name__in=package_versions.keys()) 

2044 packages = packages.prefetch_related('ubuntu_package') 

2045 

2046 for package in packages: 

2047 version = package_versions[package.name] 

2048 bugs = bug_stats.get(package.name, None) 

2049 diff = patch_diffs.get(package.name, None) 

2050 

2051 try: 

2052 ubuntu_package = package.ubuntu_package 

2053 ubuntu_package.version = version 

2054 ubuntu_package.bugs = bugs 

2055 ubuntu_package.patch_diff = diff 

2056 ubuntu_package.save() 

2057 except UbuntuPackage.DoesNotExist: 

2058 ubuntu_package = UbuntuPackage.objects.create( 

2059 package=package, 

2060 version=version, 

2061 bugs=bugs, 

2062 patch_diff=diff) 

2063 

2064 

2065class UpdateWnppStatsTask(BaseTask): 

2066 """ 

2067 The task updates the WNPP bugs for all packages. 

2068 """ 

2069 

2070 class Scheduler(IntervalScheduler): 

2071 interval = 3600 * 3 

2072 

2073 ACTION_ITEM_TYPE_NAME = 'debian-wnpp-issue' 

2074 ACTION_ITEM_TEMPLATE = 'debian/wnpp-action-item.html' 

2075 ITEM_DESCRIPTION = '<a href="{url}">{wnpp_type}: {wnpp_msg}</a>' 

2076 

2077 def initialize(self, *args, **kwargs): 

2078 super(UpdateWnppStatsTask, self).initialize(*args, **kwargs) 

2079 self.action_item_type = ActionItemType.objects.create_or_update( 

2080 type_name=self.ACTION_ITEM_TYPE_NAME, 

2081 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2082 

2083 def get_wnpp_stats(self): 

2084 """ 

2085 Retrieves and parses the wnpp stats for all packages. WNPP stats 

2086 include the WNPP type and the BTS bug id. 

2087 

2088 :returns: A dict mapping package names to wnpp stats. 

2089 """ 

2090 url = 'https://qa.debian.org/data/bts/wnpp_rm' 

2091 content = get_resource_text(url, only_if_updated=True) 

2092 if content is None: 2092 ↛ 2093line 2092 didn't jump to line 2093, because the condition on line 2092 was never true

2093 return 

2094 

2095 wnpp_stats = {} 

2096 for line in content.splitlines(): 

2097 line = line.strip() 

2098 try: 

2099 package_name, wnpp_type, bug_id = line.split('|')[0].split() 

2100 bug_id = int(bug_id) 

2101 except ValueError: 

2102 # Badly formatted bug number 

2103 continue 

2104 # Strip the colon from the end of the package name 

2105 package_name = package_name[:-1] 

2106 

2107 wnpp_stats[package_name] = { 

2108 'wnpp_type': wnpp_type, 

2109 'bug_id': bug_id, 

2110 } 

2111 

2112 return wnpp_stats 

2113 

2114 def update_action_item(self, package, stats): 

2115 """ 

2116 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2117 instance for the given type indicating that the package has a WNPP 

2118 issue. 

2119 """ 

2120 action_item = package.get_action_item_for_type(self.action_item_type) 

2121 if not action_item: 

2122 action_item = ActionItem( 

2123 package=package, 

2124 item_type=self.action_item_type) 

2125 

2126 # Check if the stats have actually been changed 

2127 if action_item.extra_data: 

2128 if action_item.extra_data.get('wnpp_info', None) == stats: 

2129 # Nothing to do -- stll the same data 

2130 return 

2131 

2132 # Update the data since something has changed 

2133 try: 

2134 release = package.main_entry.repository.suite or \ 

2135 package.main_entry.repository.codename 

2136 except AttributeError: 

2137 release = None 

2138 

2139 msgs = { 

2140 'O': "This package has been orphaned and needs a maintainer.", 

2141 'ITA': "Someone intends to adopt this package.", 

2142 'RFA': "The maintainer wants to pass over package maintainance.", 

2143 'RFH': "The maintainer is looking for help with this package.", 

2144 'ITP': "Someone is planning to reintroduce this package.", 

2145 'RFP': "There is a request to reintroduce this package.", 

2146 'RM': "This package has been requested to be removed.", 

2147 'RFS': "A sponsor is needed to update this package.", 

2148 '?': "The WNPP database contains an entry for this package." 

2149 } 

2150 wnpp_type = stats['wnpp_type'] 

2151 try: 

2152 wnpp_msg = msgs[wnpp_type] 

2153 except KeyError: 

2154 wnpp_msg = msgs['?'] 

2155 

2156 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2157 url='https://bugs.debian.org/{}'.format(stats['bug_id']), 

2158 wnpp_type=wnpp_type, wnpp_msg=wnpp_msg) 

2159 action_item.extra_data = { 

2160 'wnpp_info': stats, 

2161 'release': release, 

2162 } 

2163 action_item.save() 

2164 

2165 def update_depneedsmaint_action_item(self, package_needs_maintainer, stats): 

2166 short_description_template = \ 

2167 'Depends on packages which need a new maintainer' 

2168 package_needs_maintainer.get_absolute_url() 

2169 action_item_type = ActionItemType.objects.create_or_update( 

2170 type_name='debian-depneedsmaint', 

2171 full_description_template='debian/depneedsmaint-action-item.html') 

2172 dependencies = SourcePackageDeps.objects.filter( 

2173 dependency=package_needs_maintainer) 

2174 for dependency in dependencies: 2174 ↛ 2175line 2174 didn't jump to line 2175, because the loop on line 2174 never started

2175 package = dependency.source 

2176 action_item = package.get_action_item_for_type(action_item_type) 

2177 if not action_item: 

2178 action_item = ActionItem( 

2179 package=package, 

2180 item_type=action_item_type, 

2181 extra_data={}) 

2182 

2183 pkgdata = { 

2184 'bug': stats['bug_id'], 

2185 'details': dependency.details, 

2186 } 

2187 

2188 if (action_item.extra_data.get(package_needs_maintainer.name, {}) == 

2189 pkgdata): 

2190 # Nothing has changed 

2191 continue 

2192 

2193 action_item.short_description = short_description_template 

2194 action_item.extra_data[package_needs_maintainer.name] = pkgdata 

2195 

2196 action_item.save() 

2197 

2198 @transaction.atomic 

2199 def execute_main(self): 

2200 wnpp_stats = self.get_wnpp_stats() 

2201 if wnpp_stats is None: 2201 ↛ 2203line 2201 didn't jump to line 2203, because the condition on line 2201 was never true

2202 # Nothing to do: cached content up to date 

2203 return 

2204 

2205 ActionItem.objects.delete_obsolete_items( 

2206 item_types=[self.action_item_type], 

2207 non_obsolete_packages=wnpp_stats.keys()) 

2208 # Remove obsolete action items for packages whose dependencies need a 

2209 # new maintainer. 

2210 packages_need_maintainer = [] 

2211 for name, stats in wnpp_stats.items(): 

2212 if stats['wnpp_type'] in ('O', 'RFA'): 

2213 packages_need_maintainer.append(name) 

2214 packages_depneeds_maint = [ 

2215 package.name for package in SourcePackageName.objects.filter( 

2216 source_dependencies__dependency__name__in=packages_need_maintainer) # noqa 

2217 ] 

2218 ActionItem.objects.delete_obsolete_items( 

2219 item_types=[ 

2220 ActionItemType.objects.get_or_create( 

2221 type_name='debian-depneedsmaint')[0], 

2222 ], 

2223 non_obsolete_packages=packages_depneeds_maint) 

2224 

2225 # Drop all reverse references 

2226 for ai in ActionItem.objects.filter( 2226 ↛ 2228line 2226 didn't jump to line 2228, because the loop on line 2226 never started

2227 item_type__type_name='debian-depneedsmaint'): 

2228 ai.extra_data = {} 

2229 ai.save() 

2230 

2231 packages = SourcePackageName.objects.filter(name__in=wnpp_stats.keys()) 

2232 packages = packages.prefetch_related('action_items') 

2233 

2234 for package in packages: 

2235 stats = wnpp_stats[package.name] 

2236 self.update_action_item(package, stats) 

2237 # Update action items for packages which depend on this one to 

2238 # indicate that a dependency needs a new maintainer. 

2239 if package.name in packages_need_maintainer: 

2240 self.update_depneedsmaint_action_item(package, stats) 

2241 

2242 

2243class UpdateNewQueuePackages(BaseTask): 

2244 """ 

2245 Updates the versions of source packages found in the NEW queue. 

2246 """ 

2247 

2248 class Scheduler(IntervalScheduler): 

2249 interval = 3600 

2250 

2251 DATA_KEY = 'debian-new-queue-info' 

2252 

2253 def initialize(self, *args, **kwargs): 

2254 super(UpdateNewQueuePackages, self).initialize(*args, **kwargs) 

2255 

2256 def extract_package_info(self, content): 

2257 """ 

2258 Extracts the package information from the content of the NEW queue. 

2259 

2260 :returns: A dict mapping package names to a dict mapping the 

2261 distribution name in which the package is found to the version 

2262 information for the most recent version of the package in the dist. 

2263 """ 

2264 packages = {} 

2265 for stanza in deb822.Deb822.iter_paragraphs(content.splitlines()): 

2266 necessary_fields = ('Source', 'Queue', 'Version', 'Distribution') 

2267 if not all(field in stanza for field in necessary_fields): 

2268 continue 

2269 if stanza['Queue'] != 'new': 2269 ↛ 2270line 2269 didn't jump to line 2270, because the condition on line 2269 was never true

2270 continue 

2271 

2272 versions = stanza['Version'].split() 

2273 # Save only the most recent version 

2274 version = max(versions, key=lambda x: AptPkgVersion(x)) 

2275 

2276 package_name = stanza['Source'] 

2277 pkginfo = packages.setdefault(package_name, {}) 

2278 distribution = stanza['Distribution'] 

2279 if distribution in pkginfo: 

2280 current_version = pkginfo[distribution]['version'] 

2281 if debian_support.version_compare(version, current_version) < 0: 

2282 # The already saved version is more recent than this one. 

2283 continue 

2284 

2285 pkginfo[distribution] = { 

2286 'version': version, 

2287 } 

2288 

2289 return packages 

2290 

2291 def _get_new_content(self): 

2292 url = 'https://ftp-master.debian.org/new.822' 

2293 return get_resource_text(url, force_update=self.force_update, 

2294 only_if_updated=True) 

2295 

2296 def execute_main(self): 

2297 content = self._get_new_content() 

2298 if content is None: 2298 ↛ 2299line 2298 didn't jump to line 2299, because the condition on line 2298 was never true

2299 return 

2300 

2301 all_package_info = self.extract_package_info(content) 

2302 

2303 packages = SourcePackageName.objects.filter( 

2304 name__in=all_package_info.keys()) 

2305 

2306 with transaction.atomic(): 

2307 # Drop old entries 

2308 PackageData.objects.filter(key=self.DATA_KEY).delete() 

2309 # Prepare current entries 

2310 data = [] 

2311 for package in packages: 

2312 new_queue_info = PackageData( 

2313 key=self.DATA_KEY, 

2314 package=package, 

2315 value=all_package_info[package.name]) 

2316 data.append(new_queue_info) 

2317 # Bulk create them 

2318 PackageData.objects.bulk_create(data) 

2319 

2320 

2321class UpdateAutoRemovalsStatsTask(BaseTask): 

2322 """ 

2323 A task for updating autoremovals information on all packages. 

2324 """ 

2325 

2326 class Scheduler(IntervalScheduler): 

2327 interval = 3600 

2328 

2329 ACTION_ITEM_TYPE_NAME = 'debian-autoremoval' 

2330 ACTION_ITEM_TEMPLATE = 'debian/autoremoval-action-item.html' 

2331 ITEM_DESCRIPTION = ('Marked for autoremoval on {removal_date}' + 

2332 '{dependencies}: {bugs}') 

2333 

2334 def initialize(self, *args, **kwargs): 

2335 super(UpdateAutoRemovalsStatsTask, self).initialize(*args, **kwargs) 

2336 self.action_item_type = ActionItemType.objects.create_or_update( 

2337 type_name=self.ACTION_ITEM_TYPE_NAME, 

2338 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2339 

2340 def get_autoremovals_stats(self): 

2341 """ 

2342 Retrieves and parses the autoremoval stats for all packages. 

2343 Autoremoval stats include the BTS bugs id. 

2344 

2345 :returns: A dict mapping package names to autoremoval stats. 

2346 """ 

2347 content = get_resource_text( 

2348 'https://udd.debian.org/cgi-bin/autoremovals.yaml.cgi', 

2349 force_update=self.force_update, 

2350 only_if_updated=True 

2351 ) 

2352 if content: 2352 ↛ exitline 2352 didn't return from function 'get_autoremovals_stats', because the condition on line 2352 was never false

2353 return yaml.safe_load(content) 

2354 

2355 def update_action_item(self, package, stats): 

2356 """ 

2357 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2358 instance for the given type indicating that the package has an 

2359 autoremoval issue. 

2360 """ 

2361 action_item = package.get_action_item_for_type(self.action_item_type) 

2362 if not action_item: 2362 ↛ 2368line 2362 didn't jump to line 2368, because the condition on line 2362 was never false

2363 action_item = ActionItem( 

2364 package=package, 

2365 item_type=self.action_item_type, 

2366 severity=ActionItem.SEVERITY_HIGH) 

2367 

2368 bugs_dependencies = stats.get('bugs_dependencies', []) 

2369 buggy_dependencies = stats.get('buggy_dependencies', []) 

2370 reverse_dependencies = stats.get('rdeps', []) 

2371 all_bugs = stats['bugs'] + bugs_dependencies 

2372 link = '<a href="https://bugs.debian.org/{}">#{}</a>' 

2373 removal_date = stats['removal_date'].strftime('%d %B') 

2374 if isinstance(removal_date, bytes): 2374 ↛ 2375line 2374 didn't jump to line 2375, because the condition on line 2374 was never true

2375 removal_date = removal_date.decode('utf-8', 'ignore') 

2376 

2377 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2378 removal_date=removal_date, 

2379 dependencies=(' due to ' + html_package_list( 

2380 buggy_dependencies) if buggy_dependencies else ''), 

2381 bugs=', '.join(link.format(bug, bug) for bug in all_bugs)) 

2382 

2383 # datetime objects are not JSON-serializable, convert them ourselves 

2384 for key in stats.keys(): 

2385 if hasattr(stats[key], 'strftime'): 

2386 stats[key] = stats[key].strftime('%a %d %b %Y') 

2387 

2388 action_item.extra_data = { 

2389 'stats': stats, 

2390 'removal_date': stats['removal_date'], 

2391 'version': stats.get('version', ''), 

2392 'bugs': ', '.join(link.format(bug, bug) for bug in stats['bugs']), 

2393 'bugs_dependencies': ', '.join( 

2394 link.format(bug, bug) for bug in bugs_dependencies), 

2395 'buggy_dependencies': 

2396 html_package_list(buggy_dependencies), 

2397 'reverse_dependencies': 

2398 html_package_list(reverse_dependencies), 

2399 'number_rdeps': len(reverse_dependencies)} 

2400 action_item.save() 

2401 

2402 def execute_main(self): 

2403 autoremovals_stats = self.get_autoremovals_stats() 

2404 if autoremovals_stats is None: 2404 ↛ 2406line 2404 didn't jump to line 2406, because the condition on line 2404 was never true

2405 # Nothing to do: cached content up to date 

2406 return 

2407 

2408 ActionItem.objects.delete_obsolete_items( 

2409 item_types=[self.action_item_type], 

2410 non_obsolete_packages=autoremovals_stats.keys()) 

2411 

2412 packages = SourcePackageName.objects.filter( 

2413 name__in=autoremovals_stats.keys()) 

2414 packages = packages.prefetch_related('action_items') 

2415 

2416 for package in packages: 

2417 self.update_action_item(package, autoremovals_stats[package.name]) 

2418 

2419 

2420class UpdatePackageScreenshotsTask(BaseTask): 

2421 """ 

2422 Check if a screenshot exists on screenshots.debian.net, and add a 

2423 key to PackageData if it does. 

2424 """ 

2425 

2426 class Scheduler(IntervalScheduler): 

2427 interval = 3600 * 24 

2428 

2429 DATA_KEY = 'screenshots' 

2430 

2431 def _get_screenshots(self): 

2432 url = 'https://screenshots.debian.net/json/packages' 

2433 content = get_resource_text(url, force_update=self.force_update, 

2434 only_if_updated=True) 

2435 if content is None: 2435 ↛ 2436line 2435 didn't jump to line 2436, because the condition on line 2435 was never true

2436 return 

2437 

2438 data = json.loads(content) 

2439 return data 

2440 

2441 def execute_main(self): 

2442 content = self._get_screenshots() 

2443 if content is None: 2443 ↛ 2444line 2443 didn't jump to line 2444, because the condition on line 2443 was never true

2444 return 

2445 

2446 packages_with_screenshots = [] 

2447 for item in content['packages']: 

2448 try: 

2449 package = SourcePackageName.objects.get(name=item['name']) 

2450 packages_with_screenshots.append(package) 

2451 except SourcePackageName.DoesNotExist: 

2452 pass 

2453 

2454 with transaction.atomic(): 

2455 PackageData.objects.filter(key='screenshots').delete() 

2456 

2457 data = [] 

2458 for package in packages_with_screenshots: 

2459 try: 

2460 screenshot_info = package.data.get(key=self.DATA_KEY) 

2461 screenshot_info.value['screenshots'] = 'true' 

2462 except PackageData.DoesNotExist: 

2463 screenshot_info = PackageData( 

2464 key=self.DATA_KEY, 

2465 package=package, 

2466 value={'screenshots': 'true'}) 

2467 

2468 data.append(screenshot_info) 

2469 

2470 PackageData.objects.bulk_create(data) 

2471 

2472 

2473class UpdateBuildReproducibilityTask(BaseTask): 

2474 

2475 class Scheduler(IntervalScheduler): 

2476 interval = 3600 * 6 

2477 

2478 BASE_URL = 'https://tests.reproducible-builds.org' 

2479 ACTION_ITEM_TYPE_NAME = 'debian-build-reproducibility' 

2480 ACTION_ITEM_TEMPLATE = 'debian/build-reproducibility-action-item.html' 

2481 ITEM_DESCRIPTION = { 

2482 'blacklisted': '<a href="{url}">Blacklisted</a> from build ' 

2483 'reproducibility testing', 

2484 'FTBFS': '<a href="{url}">Fails to build</a> during reproducibility ' 

2485 'testing', 

2486 'reproducible': None, 

2487 'FTBR': '<a href="{url}">Does not build reproducibly</a> ' 

2488 'during testing', 

2489 '404': None, 

2490 'not for us': None, 

2491 } 

2492 

2493 def initialize(self, *args, **kwargs): 

2494 super(UpdateBuildReproducibilityTask, self).initialize(*args, **kwargs) 

2495 self.action_item_type = ActionItemType.objects.create_or_update( 

2496 type_name=self.ACTION_ITEM_TYPE_NAME, 

2497 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2498 

2499 def get_build_reproducibility(self): 

2500 url = '{}/debian/reproducible-tracker.json'.format(self.BASE_URL) 

2501 content = get_resource_text(url, force_update=self.force_update, 

2502 only_if_updated=True) 

2503 if content is None: 2503 ↛ 2504line 2503 didn't jump to line 2504, because the condition on line 2503 was never true

2504 return 

2505 

2506 reproducibilities = json.loads(content) 

2507 packages = {} 

2508 for item in reproducibilities: 

2509 package = item['package'] 

2510 status = item['status'] 

2511 missing = package not in packages 

2512 important = self.ITEM_DESCRIPTION.get(status) is not None 

2513 if important or missing: 2513 ↛ 2508line 2513 didn't jump to line 2508, because the condition on line 2513 was never false

2514 packages[package] = status 

2515 

2516 return packages 

2517 

2518 def update_action_item(self, package, status): 

2519 description = self.ITEM_DESCRIPTION.get(status) 

2520 

2521 if not description: # Not worth an action item 

2522 return False 

2523 

2524 action_item = package.get_action_item_for_type( 

2525 self.action_item_type.type_name) 

2526 if action_item is None: 2526 ↛ 2532line 2526 didn't jump to line 2532, because the condition on line 2526 was never false

2527 action_item = ActionItem( 

2528 package=package, 

2529 item_type=self.action_item_type, 

2530 severity=ActionItem.SEVERITY_NORMAL) 

2531 

2532 url = "{}/debian/rb-pkg/{}.html".format(self.BASE_URL, package.name) 

2533 action_item.short_description = description.format(url=url) 

2534 action_item.save() 

2535 return True 

2536 

2537 def execute_main(self): 

2538 reproducibilities = self.get_build_reproducibility() 

2539 if reproducibilities is None: 2539 ↛ 2540line 2539 didn't jump to line 2540, because the condition on line 2539 was never true

2540 return 

2541 

2542 with transaction.atomic(): 

2543 PackageData.objects.filter(key='reproducibility').delete() 

2544 

2545 packages = [] 

2546 data = [] 

2547 

2548 for name, status in reproducibilities.items(): 

2549 try: 

2550 package = SourcePackageName.objects.get(name=name) 

2551 if self.update_action_item(package, status): 

2552 packages.append(package) 

2553 except SourcePackageName.DoesNotExist: 

2554 continue 

2555 

2556 reproducibility_info = PackageData( 

2557 key='reproducibility', 

2558 package=package, 

2559 value={'reproducibility': status}) 

2560 data.append(reproducibility_info) 

2561 

2562 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2563 packages) 

2564 PackageData.objects.bulk_create(data) 

2565 

2566 

2567class MultiArchHintsTask(BaseTask): 

2568 

2569 class Scheduler(IntervalScheduler): 

2570 interval = 3600 * 6 

2571 

2572 ACTIONS_WEB = 'https://wiki.debian.org/MultiArch/Hints' 

2573 ACTIONS_URL = 'https://dedup.debian.net/static/multiarch-hints.yaml' 

2574 ACTION_ITEM_TYPE_NAME = 'debian-multiarch-hints' 

2575 ACTION_ITEM_TEMPLATE = 'debian/multiarch-hints.html' 

2576 ACTION_ITEM_DESCRIPTION = \ 

2577 '<a href="{link}">Multiarch hinter</a> reports {count} issue(s)' 

2578 

2579 def initialize(self, *args, **kwargs): 

2580 super(MultiArchHintsTask, self).initialize(*args, **kwargs) 

2581 self.action_item_type = ActionItemType.objects.create_or_update( 

2582 type_name=self.ACTION_ITEM_TYPE_NAME, 

2583 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2584 self.SEVERITIES = {} 

2585 for value, name in ActionItem.SEVERITIES: 

2586 self.SEVERITIES[name] = value 

2587 

2588 def get_data(self): 

2589 data = get_resource_text(self.ACTIONS_URL) 

2590 if data: 

2591 return yaml.safe_load(data) 

2592 

2593 def get_packages(self): 

2594 data = self.get_data() 

2595 if data is None: 

2596 return 

2597 if data['format'] != 'multiarch-hints-1.0': 

2598 return None 

2599 data = data['hints'] 

2600 packages = collections.defaultdict(dict) 

2601 for item in data: 

2602 if 'source' not in item: 

2603 continue 

2604 package = item['source'] 

2605 wishlist = ActionItem.SEVERITY_WISHLIST 

2606 severity = self.SEVERITIES.get(item['severity'], wishlist) 

2607 pkg_severity = packages[package].get('severity', wishlist) 

2608 packages[package]['severity'] = max(severity, pkg_severity) 

2609 packages[package].setdefault('hints', []).append( 

2610 (item['description'], item['link'])) 

2611 return packages 

2612 

2613 def update_action_item(self, package, severity, description, extra_data): 

2614 action_item = package.get_action_item_for_type( 

2615 self.action_item_type.type_name) 

2616 if action_item is None: 

2617 action_item = ActionItem( 

2618 package=package, 

2619 item_type=self.action_item_type) 

2620 action_item.severity = severity 

2621 action_item.short_description = description 

2622 action_item.extra_data = extra_data 

2623 action_item.save() 

2624 

2625 def execute_main(self): 

2626 packages = self.get_packages() 

2627 if not packages: 

2628 return 

2629 

2630 with transaction.atomic(): 

2631 for name, data in packages.items(): 

2632 try: 

2633 package = SourcePackageName.objects.get(name=name) 

2634 except SourcePackageName.DoesNotExist: 

2635 continue 

2636 

2637 description = self.ACTION_ITEM_DESCRIPTION.format( 

2638 count=len(data['hints']), link=self.ACTIONS_WEB) 

2639 self.update_action_item(package, data['severity'], description, 

2640 data['hints']) 

2641 

2642 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2643 packages.keys()) 

2644 

2645 

2646class UpdateVcsWatchTask(BaseTask): 

2647 """ 

2648 Updates packages' vcswatch stats. 

2649 """ 

2650 

2651 class Scheduler(IntervalScheduler): 

2652 interval = 3600 

2653 

2654 ACTION_ITEM_TYPE_NAME = 'vcswatch-warnings-and-errors' 

2655 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/vcswatch-action-item.html' 

2656 VCSWATCH_URL = 'https://qa.debian.org/cgi-bin/vcswatch?package=%(package)s' 

2657 VCSWATCH_DATA_URL = 'https://qa.debian.org/data/vcswatch/vcswatch.json.gz' 

2658 

2659 VCSWATCH_STATUS_DICT = { 

2660 "NEW": { 

2661 "description": 

2662 '<a href="{vcswatch_url}">version in VCS is newer</a> than in ' 

2663 'repository, is it time to upload?', 

2664 "severity": ActionItem.SEVERITY_NORMAL, 

2665 }, 

2666 "COMMITS": { 

2667 "description": 

2668 '<a href="{vcswatch_url}">{commits} new commit{commits_s}</a> ' 

2669 'since last upload, is it time to release?', 

2670 "severity": ActionItem.SEVERITY_NORMAL, 

2671 }, 

2672 "OLD": { 

2673 'description': 

2674 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2675 'date</a>, push the missing commits.', 

2676 "severity": ActionItem.SEVERITY_HIGH, 

2677 }, 

2678 "UNREL": { 

2679 "description": 

2680 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2681 'date</a>, push the missing commits.', 

2682 "severity": ActionItem.SEVERITY_HIGH, 

2683 }, 

2684 "ERROR": { 

2685 "description": 

2686 '<a href="{vcswatch_url}">Failed to analyze the VCS ' 

2687 'repository</a>. Please troubleshoot and fix the issue.', 

2688 "severity": ActionItem.SEVERITY_HIGH, 

2689 }, 

2690 "DEFAULT": { 

2691 "description": 

2692 '<a href="{url}">Unexpected status</a> ({status}) reported by ' 

2693 'VcsWatch.', 

2694 "severity": ActionItem.SEVERITY_HIGH, 

2695 }, 

2696 } 

2697 

2698 def initialize(self, *args, **kwargs): 

2699 super(UpdateVcsWatchTask, self).initialize(*args, **kwargs) 

2700 self.vcswatch_ai_type = ActionItemType.objects.create_or_update( 

2701 type_name=self.ACTION_ITEM_TYPE_NAME, 

2702 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE 

2703 ) 

2704 

2705 def get_vcswatch_data(self): 

2706 text = get_resource_text(self.VCSWATCH_DATA_URL) 

2707 

2708 if text is None: 2708 ↛ 2709line 2708 didn't jump to line 2709, because the condition on line 2708 was never true

2709 return 

2710 

2711 # There's some text, let's load! 

2712 data = json.loads(text) 

2713 

2714 out = {} 

2715 # This allows to save a lot of list search later. 

2716 for entry in data: 

2717 out[entry[u'package']] = entry 

2718 

2719 return out 

2720 

2721 def clean_package_info(self, package_infos_without_watch, todo): 

2722 """Takes a list of :class:`PackageData` which do not 

2723 have a watch entry and cleans it. Then schedule in todo what 

2724 to do with them. 

2725 """ 

2726 for package_info in package_infos_without_watch: 

2727 if 'QA' in package_info.value: 2727 ↛ 2726line 2727 didn't jump to line 2726, because the condition on line 2727 was never false

2728 package_info.value.pop('QA') 

2729 if (list(package_info.value.keys()) == ['checksum'] or 

2730 not package_info.value.keys()): 

2731 todo['drop']['package_infos'].append(package_info) 

2732 else: 

2733 package_info.value['checksum'] = get_data_checksum( 

2734 package_info.value 

2735 ) 

2736 todo['update']['package_infos'].append(package_info) 

2737 

2738 def update_action_item(self, package, vcswatch_data, action_item, todo): 

2739 """ 

2740 For a given :class:`ActionItem` and a given vcswatch data, updates 

2741 properly the todo dict if required. 

2742 

2743 Returns dependingly on what has been done. If something is to 

2744 be updated, returns True, if nothing is to be updated, returns 

2745 False. If the calling loop should `continue`, returns `None`. 

2746 

2747 :rtype: bool or `None` 

2748 """ 

2749 

2750 package_status = vcswatch_data['status'] 

2751 

2752 if package_status == "OK": 

2753 # Everything is fine, let's purge the action item. Not the 

2754 # package extracted info as its QA url is still relevant. 

2755 if action_item: 2755 ↛ 2759line 2755 didn't jump to line 2759, because the condition on line 2755 was never false

2756 todo['drop']['action_items'].append(action_item) 

2757 

2758 # Nothing more to do! 

2759 return False 

2760 

2761 # NOT BEFORE "OK" check!! 

2762 if package_status not in self.VCSWATCH_STATUS_DICT: 2762 ↛ 2763line 2762 didn't jump to line 2763, because the condition on line 2762 was never true

2763 package_status = "DEFAULT" 

2764 

2765 # If we are here, then something is not OK. Let's check if we 

2766 # already had some intel regarding the current package status. 

2767 if action_item is None: 

2768 action_item = ActionItem( 

2769 package=package, 

2770 item_type=self.vcswatch_ai_type) 

2771 todo['add']['action_items'].append(action_item) 

2772 else: 

2773 todo['update']['action_items'].append(action_item) 

2774 

2775 # Computes the watch URL 

2776 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2777 

2778 if action_item.extra_data: 

2779 extra_data = action_item.extra_data 

2780 else: 

2781 extra_data = {} 

2782 

2783 # Fetches the long description and severity from 

2784 # the VCSWATCH_STATUS_DICT dict. 

2785 action_item.severity = \ 

2786 self.VCSWATCH_STATUS_DICT[package_status]['severity'] 

2787 

2788 nb_commits = int(vcswatch_data["commits"] or 0) 

2789 

2790 # The new data 

2791 new_extra_data = { 

2792 'vcswatch_url': vcswatch_url, 

2793 } 

2794 new_extra_data.update(vcswatch_data) 

2795 

2796 extra_data_match = all([ 

2797 new_extra_data[key] == extra_data.get(key, None) 

2798 for key in new_extra_data 

2799 ]) 

2800 

2801 # If everything is fine and we are not forcing the update 

2802 # then we proceed to the next package. 

2803 if extra_data_match and not self.force_update: 2803 ↛ 2805line 2803 didn't jump to line 2805, because the condition on line 2803 was never true

2804 # Remove from the todolist 

2805 todo['update']['action_items'].remove(action_item) 

2806 return False 

2807 else: 

2808 # Report for short description of the :class:`ActionItem` 

2809 desc = self.VCSWATCH_STATUS_DICT[package_status]['description'] 

2810 commits_s = 's' if nb_commits != 1 else '' 

2811 action_item.short_description = \ 

2812 desc.format(commits_s=commits_s, **new_extra_data) 

2813 action_item.extra_data = new_extra_data 

2814 return True 

2815 

2816 def update_package_info(self, package, vcswatch_data, package_info, key, 

2817 todo): 

2818 # Same thing with PackageData 

2819 if package_info is None: 

2820 package_info = PackageData( 

2821 package=package, 

2822 key=key, 

2823 ) 

2824 todo['add']['package_infos'].append(package_info) 

2825 else: 

2826 todo['update']['package_infos'].append(package_info) 

2827 

2828 # Computes the watch URL 

2829 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2830 

2831 new_value = dict(package_info.value) 

2832 if key == 'vcs_extra_links': 

2833 new_value['QA'] = vcswatch_url 

2834 elif key == 'vcswatch': 2834 ↛ 2844line 2834 didn't jump to line 2844, because the condition on line 2834 was never false

2835 if 'package_version' in vcswatch_data: 2835 ↛ 2837line 2835 didn't jump to line 2837, because the condition on line 2835 was never false

2836 new_value['package_version'] = vcswatch_data['package_version'] 

2837 if 'changelog_version' in vcswatch_data: 2837 ↛ 2840line 2837 didn't jump to line 2840, because the condition on line 2837 was never false

2838 new_value['changelog_version'] = vcswatch_data[ 

2839 'changelog_version'] 

2840 if 'changelog_distribution' in vcswatch_data: 2840 ↛ 2844line 2840 didn't jump to line 2844, because the condition on line 2840 was never false

2841 new_value['changelog_distribution'] = vcswatch_data[ 

2842 'changelog_distribution'] 

2843 

2844 new_value['checksum'] = get_data_checksum(new_value) 

2845 

2846 package_info_match = ( 

2847 new_value['checksum'] == package_info.value.get('checksum', None) 

2848 ) 

2849 

2850 if package_info_match and not self.force_update: 

2851 todo['update']['package_infos'].remove(package_info) 

2852 return False 

2853 else: 

2854 package_info.value = new_value 

2855 return True 

2856 

2857 def update_packages_item(self, packages, vcswatch_datas): 

2858 """Generates the lists of :class:`ActionItem` to be added, 

2859 deleted or updated regarding the status of their packages. 

2860 

2861 Categories of statuses are: 

2862 {u'COMMITS', u'ERROR', u'NEW', u'OK', u'OLD', u'UNREL'} 

2863 

2864 Basically, it fetches all info from :class:`PackageData` 

2865 with key='vcs', the ones without data matching vcswatch_datas are 

2866 stored in one variable that's iterated through directly, and if 

2867 there was something before, it is purged. Then, all entries in 

2868 that queryset that have no relevant intel anymore are scheduled 

2869 to be deleted. The others are only updated. 

2870 

2871 All :class:`PackageData` matching vcswatch_datas 

2872 are stored in another variable. The same is done with the list of 

2873 :class:`ActionItem` that match this task type. 

2874 

2875 Then, it iterates on all vcswatch_datas' packages and it tries to 

2876 determine if there are any news, if so, it updates apopriately the 

2877 prospective :class:`ActionItem` and :class:`PackageData`, 

2878 and schedule them to be updated. If no data was existent, then 

2879 it creates them and schedule them to be added to the database. 

2880 

2881 At the end, this function returns a dict of all instances of 

2882 :class:`ActionItem` and :class:`PackageData` stored 

2883 in subdicts depending on their class and what is to be done 

2884 with them. 

2885 

2886 :rtype: dict 

2887 

2888 """ 

2889 

2890 todo = { 

2891 'drop': { 

2892 'action_items': [], 

2893 'package_infos': [], 

2894 }, 

2895 'update': { 

2896 'action_items': [], 

2897 'package_infos': [], 

2898 }, 

2899 'add': { 

2900 'action_items': [], 

2901 'package_infos': [], 

2902 }, 

2903 } 

2904 

2905 package_info_keys = ['vcs_extra_links', 'vcswatch'] 

2906 package_infos = {} 

2907 for key in package_info_keys: 

2908 # Fetches all PackageData with a given key for packages having 

2909 # a vcswatch key. As the pair (package, key) is unique, there is a 

2910 # bijection between these data, and we fetch them classifying them 

2911 # by package name. 

2912 for package_info in PackageData.objects.select_related( 

2913 'package').filter(key=key).only('package__name', 'value'): 

2914 if package_info.package.name not in package_infos: 

2915 package_infos[package_info.package.name] = {} 

2916 package_infos[package_info.package.name][key] = package_info 

2917 

2918 # As :class:`PackageData` key=vcs_extra_links is shared, we 

2919 # have to clean up those with vcs watch_url that aren't in vcs_data 

2920 package_infos_without_watch = PackageData.objects.filter( 

2921 key='vcs_extra_links').exclude( 

2922 package__name__in=vcswatch_datas.keys()).only('value') 

2923 

2924 # Do the actual clean. 

2925 self.clean_package_info(package_infos_without_watch, todo) 

2926 

2927 # Fetches all :class:`ActionItem` for packages concerned by a vcswatch 

2928 # action. 

2929 action_items = { 

2930 action_item.package.name: action_item 

2931 for action_item in ActionItem.objects.select_related( 

2932 'package' 

2933 ).filter(item_type=self.vcswatch_ai_type) 

2934 } 

2935 

2936 for package in packages: 

2937 # Get the vcswatch_data from the whole vcswatch_datas 

2938 vcswatch_data = vcswatch_datas[package.name] 

2939 

2940 # Get the old action item for this warning, if it exists. 

2941 action_item = action_items.get(package.name, None) 

2942 

2943 # Updates the :class:`ActionItem`. If _continue is None, 

2944 # then there is nothing more to do with this package. 

2945 # If it is False, then no update is pending for the 

2946 # :class:`ActionItem`, else there is an update 

2947 # to do. 

2948 _ai_continue = self.update_action_item( 

2949 package, 

2950 vcswatch_data, 

2951 action_item, 

2952 todo) 

2953 

2954 _pi_continue = False 

2955 for key in package_info_keys: 

2956 try: 

2957 package_info = package_infos[package.name][key] 

2958 except KeyError: 

2959 package_info = None 

2960 

2961 _pi_continue |= self.update_package_info( 

2962 package, 

2963 vcswatch_data, 

2964 package_info, 

2965 key, 

2966 todo) 

2967 

2968 if not _ai_continue and not _pi_continue: 

2969 continue 

2970 

2971 return todo 

2972 

2973 def execute_main(self): 

2974 # Get the actual vcswatch json file from qa.debian.org 

2975 vcs_data = self.get_vcswatch_data() 

2976 

2977 # Only fetch the packages that are in the json dict. 

2978 packages = PackageName.objects.filter(name__in=vcs_data.keys()) 

2979 

2980 # Faster than fetching the action items one by one in a loop 

2981 # when handling each package. 

2982 packages.prefetch_related('action_items') 

2983 

2984 # Determine wether something is to be kept or dropped. 

2985 todo = self.update_packages_item(packages, vcs_data) 

2986 

2987 with transaction.atomic(): 

2988 # Delete the :class:`ActionItem` that are osbolete, and also 

2989 # the :class:`PackageData` of the same. 

2990 ActionItem.objects.delete_obsolete_items( 

2991 [self.vcswatch_ai_type], 

2992 vcs_data.keys()) 

2993 PackageData.objects.filter( 

2994 key='vcs_extra_links', 

2995 id__in=[ 

2996 package_info.id 

2997 for package_info in todo['drop']['package_infos'] 

2998 ] 

2999 ).delete() 

3000 

3001 # Then delete the :class:`ActionItem` that are to be deleted. 

3002 ActionItem.objects.filter( 

3003 item_type__type_name=self.vcswatch_ai_type.type_name, 

3004 id__in=[ 

3005 action_item.id 

3006 for action_item in todo['drop']['action_items'] 

3007 ] 

3008 ).delete() 

3009 

3010 # Then bulk_create the :class:`ActionItem` to add and the 

3011 # :class:`PackageData` 

3012 ActionItem.objects.bulk_create(todo['add']['action_items']) 

3013 PackageData.objects.bulk_create(todo['add']['package_infos']) 

3014 

3015 # Update existing entries 

3016 for action_item in todo['update']['action_items']: 

3017 action_item.save() 

3018 for package_info in todo['update']['package_infos']: 

3019 package_info.save() 

3020 

3021 

3022class TagPackagesWithRcBugs(BaseTask, PackageTagging): 

3023 """ 

3024 Performs an update of 'rc-bugs' tag for packages. 

3025 """ 

3026 

3027 class Scheduler(IntervalScheduler): 

3028 interval = 3600 

3029 

3030 TAG_NAME = 'tag:rc-bugs' 

3031 TAG_DISPLAY_NAME = 'rc bugs' 

3032 TAG_COLOR_TYPE = 'danger' 

3033 TAG_DESCRIPTION = 'The package has Release Critical bugs' 

3034 TAG_TABLE_TITLE = 'Packages with RC bugs' 

3035 

3036 def packages_to_tag(self): 

3037 all_bug_stats = PackageBugStats.objects.prefetch_related('package') 

3038 packages_list = [] 

3039 for bug_stats in all_bug_stats: 

3040 categories = bug_stats.stats 

3041 found = False 

3042 for category in categories: 3042 ↛ 3039line 3042 didn't jump to line 3039, because the loop on line 3042 didn't complete

3043 if found: 

3044 break 

3045 if category['category_name'] == 'rc': 3045 ↛ 3042line 3045 didn't jump to line 3042, because the condition on line 3045 was never false

3046 found = True 

3047 if category['bug_count'] > 0: 

3048 packages_list.append(bug_stats.package) 

3049 return packages_list 

3050 

3051 

3052class TagPackagesWithNewUpstreamVersion(BaseTask, PackageTagging): 

3053 """ 

3054 Performs an update of 'new-upstream-version' tag for packages. 

3055 """ 

3056 

3057 class Scheduler(IntervalScheduler): 

3058 interval = 3600 * 3 

3059 

3060 TAG_NAME = 'tag:new-upstream-version' 

3061 TAG_DISPLAY_NAME = 'new upstream version' 

3062 TAG_COLOR_TYPE = 'warning' 

3063 TAG_DESCRIPTION = 'The upstream has a newer version available' 

3064 TAG_TABLE_TITLE = 'Newer upstream version' 

3065 

3066 def packages_to_tag(self): 

3067 try: 

3068 action_type = ActionItemType.objects.get( 

3069 type_name='new-upstream-version') 

3070 except ActionItemType.DoesNotExist: 

3071 return [] 

3072 

3073 packages_list = [] 

3074 items = action_type.action_items.prefetch_related('package') 

3075 for item in items: 

3076 packages_list.append(item.package) 

3077 return packages_list 

3078 

3079 

3080class UpdateDependencySatisfactionTask(BaseTask): 

3081 """ 

3082 Fetches binary package installability results from qa.debian.org/dose 

3083 """ 

3084 

3085 class Scheduler(IntervalScheduler): 

3086 interval = 3600 * 3 

3087 

3088 BASE_URL = 'https://qa.debian.org/dose/debcheck/unstable_main/latest' 

3089 ACTION_ITEM_TYPE_NAME = 'debian-dependency-satisfaction' 

3090 ACTION_ITEM_TEMPLATE = 'debian/dependency-satisfaction-action-item.html' 

3091 

3092 def __init__(self, force_update=False, *args, **kwargs): 

3093 super(UpdateDependencySatisfactionTask, self).__init__(*args, **kwargs) 

3094 self.force_update = force_update 

3095 self.action_item_type = ActionItemType.objects.create_or_update( 

3096 type_name=self.ACTION_ITEM_TYPE_NAME, 

3097 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3098 

3099 def set_parameters(self, parameters): 

3100 if 'force_update' in parameters: 

3101 self.force_update = parameters['force_update'] 

3102 

3103 def get_dependency_satisfaction(self): 

3104 url = '{}/each.txt'.format(self.BASE_URL) 

3105 content = get_resource_text(url, force_update=self.force_update, 

3106 only_if_updated=True) 

3107 if content is None: 3107 ↛ 3108line 3107 didn't jump to line 3108, because the condition on line 3107 was never true

3108 return 

3109 

3110 dep_sats = collections.defaultdict(set) 

3111 for i, line in enumerate(content.splitlines()): 

3112 binpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3113 try: 

3114 bin_package = BinaryPackageName.objects.get(name=binpkg_name) 

3115 srcpkg_name = bin_package.main_source_package_name 

3116 except BinaryPackageName.DoesNotExist: 

3117 continue 

3118 arches = set([arch.strip() for arch in arches.split()]) 

3119 # TODO: retrieve this list programmatically, either from 

3120 # https://api.ftp-master.debian.org/suite/testing 

3121 # or from the Architecture field in the Release file 

3122 # for testing (both lists should be equal). 

3123 arches = arches.intersection( 

3124 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3125 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3126 # only report problems for release architectures 

3127 if not arches: 

3128 continue 

3129 # if the package is arch:all, only report problems on amd64 

3130 if isnative != "True": 

3131 arches = arches.intersection({"amd64"}) 

3132 if not arches: 

3133 continue 

3134 dep_sats[srcpkg_name].add( 

3135 (binpkg_name, ver, tuple(arches), expl, anchor)) 

3136 # turn sets into lists 

3137 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3138 return dep_sats 

3139 

3140 def update_action_item(self, package, unsats): 

3141 action_item = package.get_action_item_for_type( 

3142 self.action_item_type.type_name) 

3143 if action_item is None: 3143 ↛ 3148line 3143 didn't jump to line 3148

3144 action_item = ActionItem( 

3145 package=package, 

3146 item_type=self.action_item_type, 

3147 severity=ActionItem.SEVERITY_HIGH) 

3148 action_item.short_description = \ 

3149 "{count} binary package{plural} {have} unsatisfiable " \ 

3150 "dependencies".format( 

3151 count=len(unsats), 

3152 plural='' if len(unsats) == 1 else 's', 

3153 have='has' if len(unsats) == 1 else 'have', 

3154 ) 

3155 action_item.extra_data = { 

3156 'unsats': unsats, 

3157 'base_url': '{}/packages/'.format(self.BASE_URL), 

3158 } 

3159 action_item.save() 

3160 

3161 def execute(self): 

3162 dep_sats = self.get_dependency_satisfaction() 

3163 if dep_sats is None: 3163 ↛ 3164line 3163 didn't jump to line 3164, because the condition on line 3163 was never true

3164 return 

3165 

3166 with transaction.atomic(): 

3167 PackageData.objects.filter(key='dependency_satisfaction').delete() 

3168 

3169 packages = [] 

3170 pkgdata_list = [] 

3171 

3172 for name, unsats in dep_sats.items(): 

3173 try: 

3174 package = SourcePackageName.objects.get(name=name) 

3175 packages.append(package) 

3176 self.update_action_item(package, unsats) 

3177 except SourcePackageName.DoesNotExist: 

3178 continue 

3179 

3180 dep_sat_info = PackageData( 

3181 key='dependency_satisfaction', 

3182 package=package, 

3183 value={'dependency_satisfaction': unsats}) 

3184 pkgdata_list.append(dep_sat_info) 

3185 

3186 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3187 packages) 

3188 PackageData.objects.bulk_create(pkgdata_list) 

3189 

3190 

3191class UpdateBuildDependencySatisfactionTask(BaseTask): 

3192 """ 

3193 Fetches source package installability results from qa.debian.org/dose 

3194 """ 

3195 

3196 class Scheduler(IntervalScheduler): 

3197 interval = 3600 * 3 

3198 

3199 BASE_URL = 'https://qa.debian.org/dose/debcheck/src_unstable_main/latest' 

3200 ACTION_ITEM_TYPE_NAME = 'debian-builddependency-satisfaction' 

3201 ACTION_ITEM_TEMPLATE = \ 

3202 'debian/builddependency-satisfaction-action-item.html' 

3203 

3204 def __init__(self, *args, **kwargs): 

3205 super(UpdateBuildDependencySatisfactionTask, self).__init__(*args, 

3206 **kwargs) 

3207 self.action_item_type = ActionItemType.objects.create_or_update( 

3208 type_name=self.ACTION_ITEM_TYPE_NAME, 

3209 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3210 

3211 def get_dependency_satisfaction(self): 

3212 url = '{}/each.txt'.format(self.BASE_URL) 

3213 content = get_resource_text(url, force_update=self.force_update, 

3214 only_if_updated=True) 

3215 if content is None: 3215 ↛ 3216line 3215 didn't jump to line 3216, because the condition on line 3215 was never true

3216 return 

3217 

3218 dep_sats = collections.defaultdict(set) 

3219 for i, line in enumerate(content.splitlines()): 

3220 srcpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3221 arches = set([arch.strip() for arch in arches.split()]) 

3222 # TODO: retrieve this list programmatically, either from 

3223 # https://api.ftp-master.debian.org/suite/testing 

3224 # or from the Architecture field in the Release file 

3225 # for testing (both lists should be equal). 

3226 arches = arches.intersection( 

3227 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3228 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3229 # only report problems for release architectures 

3230 if not arches: 

3231 continue 

3232 # if the source package only builds arch:all binary packages, only 

3233 # report problems on amd64 

3234 if isnative != "True": 

3235 arches = arches.intersection({"amd64"}) 

3236 if not arches: 

3237 continue 

3238 dep_sats[srcpkg_name].add( 

3239 (srcpkg_name, tuple(arches), expl, anchor)) 

3240 # turn sets into lists 

3241 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3242 return dep_sats 

3243 

3244 def update_action_item(self, package, unsats): 

3245 action_item = package.get_action_item_for_type( 

3246 self.action_item_type.type_name) 

3247 if action_item is None: 3247 ↛ 3252line 3247 didn't jump to line 3252

3248 action_item = ActionItem( 

3249 package=package, 

3250 item_type=self.action_item_type, 

3251 severity=ActionItem.SEVERITY_HIGH) 

3252 action_item.short_description = \ 

3253 "source package has {count} unsatisfiable " \ 

3254 "build dependenc{plural}".format( 

3255 count=len(unsats), 

3256 plural='y' if len(unsats) == 1 else 'ies', 

3257 ) 

3258 action_item.extra_data = { 

3259 'unsats': unsats, 

3260 'base_url': '{}/packages/'.format(self.BASE_URL), 

3261 } 

3262 action_item.save() 

3263 

3264 def execute(self): 

3265 dep_sats = self.get_dependency_satisfaction() 

3266 if dep_sats is None: 3266 ↛ 3267line 3266 didn't jump to line 3267, because the condition on line 3266 was never true

3267 return 

3268 

3269 with transaction.atomic(): 

3270 PackageData.objects.filter( 

3271 key='builddependency_satisfaction').delete() 

3272 

3273 packages = [] 

3274 pkgdata_list = [] 

3275 

3276 for name, unsats in dep_sats.items(): 

3277 try: 

3278 package = SourcePackageName.objects.get(name=name) 

3279 packages.append(package) 

3280 self.update_action_item(package, unsats) 

3281 except SourcePackageName.DoesNotExist: 

3282 continue 

3283 

3284 dep_sat_info = PackageData( 

3285 key='builddependency_satisfaction', 

3286 package=package, 

3287 value={'builddependency_satisfaction': unsats}) 

3288 pkgdata_list.append(dep_sat_info) 

3289 

3290 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3291 packages) 

3292 PackageData.objects.bulk_create(pkgdata_list) 

3293 

3294 

3295class UpdateDl10nStatsTask(BaseTask): 

3296 """ 

3297 Updates packages' l10n statistics. 

3298 """ 

3299 

3300 class Scheduler(IntervalScheduler): 

3301 interval = 3600 * 6 

3302 

3303 ACTION_ITEM_TYPE_NAME = 'dl10n' 

3304 ITEM_DESCRIPTION = \ 

3305 '<a href="{url}">Issues</a> found with some translations' 

3306 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/dl10n-action-item.html' 

3307 

3308 def initialize(self, *args, **kwargs): 

3309 super(UpdateDl10nStatsTask, self).initialize(*args, **kwargs) 

3310 self.l10n_action_item_type = \ 

3311 ActionItemType.objects.create_or_update( 

3312 type_name=self.ACTION_ITEM_TYPE_NAME, 

3313 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

3314 

3315 def _load_l10n_stats(self): 

3316 url = 'https://i18n.debian.org/l10n-pkg-status/pkglist' 

3317 content = get_resource_text(url, force_update=self.force_update, 

3318 only_if_updated=True) 

3319 if content is None: 3319 ↛ 3320line 3319 didn't jump to line 3320, because the condition on line 3319 was never true

3320 return 

3321 

3322 def parse_score(score): 

3323 if score == '-': 

3324 return None 

3325 return int(score) 

3326 

3327 all_stats = {} 

3328 

3329 # The format of the file is (copied from its header): 

3330 # <package> <version> (<comma sperated scores>) <link> <todo> 

3331 line_re = re.compile( 

3332 r'^([^\s]+) ([^\s]+) \(([^)]+)\) ([^\s]+) ([^\s]+)') 

3333 for line in content.splitlines(): 

3334 if not line or line.startswith('#'): 3334 ↛ 3335line 3334 didn't jump to line 3335, because the condition on line 3334 was never true

3335 continue 

3336 match = line_re.search(line) 

3337 if not match: 3337 ↛ 3338line 3337 didn't jump to line 3338, because the condition on line 3337 was never true

3338 logger.warning('Failed to parse l10n pkglist line: %s', line) 

3339 continue 

3340 

3341 src_pkgname = match.group(1) 

3342 try: 

3343 scores = match.group(3).split(',') 

3344 score_debian = parse_score(scores[0]) 

3345 score_other = parse_score(scores[1]) 

3346 # <todo> is a "0" or "1" string, so convert through int to get 

3347 # a proper bool 

3348 todo = bool(int(match.group(5))) 

3349 except (IndexError, ValueError): 

3350 logger.warning( 

3351 'Failed to parse l10n scores: %s', 

3352 line, exc_info=1) 

3353 continue 

3354 link = match.group(4) 

3355 if not score_debian and not score_other: 3355 ↛ 3356line 3355 didn't jump to line 3356, because the condition on line 3355 was never true

3356 continue 

3357 

3358 all_stats[src_pkgname] = { 

3359 'score_debian': score_debian, 

3360 'score_other': score_other, 

3361 'link': link, 

3362 'todo': todo, 

3363 } 

3364 

3365 return all_stats 

3366 

3367 def update_action_item(self, package, package_stats): 

3368 todo = package_stats['todo'] 

3369 

3370 # Get the old action item, if it exists. 

3371 l10n_action_item = package.get_action_item_for_type( 

3372 self.l10n_action_item_type.type_name) 

3373 if not todo: 

3374 if l10n_action_item: 

3375 # If the item previously existed, delete it now since there 

3376 # are no longer any warnings/errors. 

3377 l10n_action_item.delete() 

3378 return 

3379 

3380 # The item didn't previously have an action item: create it now 

3381 if l10n_action_item is None: 

3382 desc = self.ITEM_DESCRIPTION.format(url=package_stats['link']) 

3383 l10n_action_item = ActionItem( 

3384 package=package, 

3385 item_type=self.l10n_action_item_type, 

3386 severity=ActionItem.SEVERITY_LOW, 

3387 short_description=desc) 

3388 

3389 if l10n_action_item.extra_data: 

3390 old_extra_data = l10n_action_item.extra_data 

3391 if old_extra_data == package_stats: 3391 ↛ 3393line 3391 didn't jump to line 3393, because the condition on line 3391 was never true

3392 # No need to update 

3393 return 

3394 

3395 l10n_action_item.extra_data = package_stats 

3396 

3397 l10n_action_item.save() 

3398 

3399 def execute_main(self): 

3400 stats = self._load_l10n_stats() 

3401 if not stats: 3401 ↛ 3402line 3401 didn't jump to line 3402, because the condition on line 3401 was never true

3402 return 

3403 

3404 with transaction.atomic(): 

3405 PackageData.objects.filter(key='dl10n').delete() 

3406 

3407 packages = [] 

3408 pkgdata_list = [] 

3409 

3410 for name, stat in stats.items(): 

3411 try: 

3412 package = SourcePackageName.objects.get(name=name) 

3413 packages.append(package) 

3414 self.update_action_item(package, stat) 

3415 except SourcePackageName.DoesNotExist: 

3416 continue 

3417 

3418 dl10n_stat = PackageData( 

3419 key='dl10n', 

3420 package=package, 

3421 value=stat) 

3422 pkgdata_list.append(dl10n_stat) 

3423 

3424 ActionItem.objects.delete_obsolete_items( 

3425 [self.l10n_action_item_type], packages) 

3426 PackageData.objects.bulk_create(pkgdata_list) 

3427 

3428 

3429class UpdateDebianPatchesTask(BaseTask, ImportExternalData): 

3430 """ 

3431 Import statistics about Debian patches from UDD. 

3432 """ 

3433 

3434 class Scheduler(IntervalScheduler): 

3435 interval = 3600 * 6 

3436 

3437 data_url = 'https://udd.debian.org/patches.cgi?json=1' 

3438 action_item_types = [ 

3439 { 

3440 'type_name': 'debian-patches', 

3441 'full_description_template': 

3442 'debian/debian-patches-action-item.html', 

3443 }, 

3444 ] 

3445 

3446 def generate_package_data(self): 

3447 pkgdata = {} 

3448 for entry in self.external_data: 

3449 source = entry.get('source') 

3450 if source: 3450 ↛ 3448line 3450 didn't jump to line 3448, because the condition on line 3450 was never false

3451 data = entry.copy() 

3452 data['url'] = self._generate_url(entry) 

3453 pkgdata[source] = data 

3454 

3455 return [ 

3456 ('debian-patches', pkgdata), 

3457 ] 

3458 

3459 @staticmethod 

3460 def _generate_url(entry): 

3461 query_string = urlencode({ 

3462 'src': entry.get('source'), 

3463 'version': entry.get('version'), 

3464 }) 

3465 return f"https://udd.debian.org/patches.cgi?{query_string}" 

3466 

3467 def generate_action_items(self): 

3468 pkgdata = {} 

3469 for entry in self.external_data: 

3470 # Skip invalid entries and those without (problematic) patches 

3471 source = entry.get('source') 

3472 forwarded_invalid = entry.get('forwarded_invalid', 0) 

3473 forwarded_no = entry.get('forwarded_no', 0) 

3474 if not source: 3474 ↛ 3475line 3474 didn't jump to line 3475, because the condition on line 3474 was never true

3475 continue # Invalid, no source package data 

3476 if entry.get('status') != 'patches': 

3477 continue # No patch at all 

3478 if forwarded_invalid == 0 and forwarded_no == 0: 

3479 continue # No problematic patch 

3480 

3481 # Build the parameterers for the action item 

3482 severity = ActionItem.SEVERITY_LOW 

3483 desc = '' 

3484 url = self._generate_url(entry) 

3485 

3486 if forwarded_invalid: 

3487 severity = ActionItem.SEVERITY_HIGH 

3488 count = f"{forwarded_invalid} patch" 

3489 if forwarded_invalid > 1: 

3490 count += 'es' 

3491 count = f'<a href="{url}">{count}</a>' 

3492 desc += f"{count} with invalid metadata" 

3493 

3494 if forwarded_no: 

3495 if desc: 

3496 desc += ', ' 

3497 count = f"{forwarded_no} patch" 

3498 if forwarded_no > 1: 

3499 count += 'es' 

3500 count = f'<a href="{url}">{count}</a>' 

3501 desc += f"{count} to forward upstream" 

3502 

3503 extra_data = entry.copy() 

3504 extra_data['url'] = url 

3505 

3506 # Record the action item parameters 

3507 pkgdata[source] = { 

3508 'short_description': f"debian/patches: {desc}", 

3509 'severity': severity, 

3510 'extra_data': extra_data, 

3511 } 

3512 

3513 return [ 

3514 ('debian-patches', pkgdata), 

3515 ]