1# Copyright 2013-2021 The Distro Tracker Developers 

2# See the COPYRIGHT file at the top-level directory of this distribution and 

3# at https://deb.li/DTAuthors 

4# 

5# This file is part of Distro Tracker. It is subject to the license terms 

6# in the LICENSE file found in the top-level directory of this 

7# distribution and at https://deb.li/DTLicense. No part of Distro Tracker, 

8# including this file, may be copied, modified, propagated, or distributed 

9# except according to the terms contained in the LICENSE file. 

10 

11""" 

12Debian-specific tasks. 

13""" 

14 

15import collections 

16import itertools 

17import json 

18import logging 

19import os 

20import re 

21from enum import Enum 

22 

23from bs4 import BeautifulSoup as soup 

24 

25from debian import deb822, debian_support 

26from debian.debian_support import AptPkgVersion 

27 

28import debianbts 

29 

30from django.conf import settings 

31from django.core.exceptions import ValidationError 

32from django.db import transaction 

33from django.db.models import Prefetch 

34from django.utils.http import urlencode 

35 

36import yaml 

37 

38from distro_tracker.accounts.models import UserEmail 

39from distro_tracker.core.models import ( 

40 ActionItem, 

41 ActionItemType, 

42 BinaryPackageBugStats, 

43 BinaryPackageName, 

44 BugDisplayManagerMixin, 

45 PackageBugStats, 

46 PackageData, 

47 PackageName, 

48 Repository, 

49 SourcePackageDeps, 

50 SourcePackageName 

51) 

52from distro_tracker.core.tasks import BaseTask 

53from distro_tracker.core.tasks.mixins import ImportExternalData, PackageTagging 

54from distro_tracker.core.tasks.schedulers import IntervalScheduler 

55from distro_tracker.core.utils import get_or_none 

56from distro_tracker.core.utils.http import get_resource_text 

57from distro_tracker.core.utils.misc import get_data_checksum 

58from distro_tracker.core.utils.packages import ( 

59 html_package_list, 

60 package_url 

61) 

62from distro_tracker.vendor.debian.models import ( 

63 BuildLogCheckStats, 

64 LintianStats, 

65 PackageExcuses, 

66 PackageTransition, 

67 UbuntuPackage 

68) 

69 

70from .models import DebianContributor 

71 

72logger = logging.getLogger(__name__) 

73logger_input = logging.getLogger('distro_tracker.input') 

74 

75 

76class RetrieveDebianMaintainersTask(BaseTask): 

77 """ 

78 Retrieves (and updates if necessary) a list of Debian Maintainers. 

79 """ 

80 

81 class Scheduler(IntervalScheduler): 

82 interval = 3600 * 24 

83 

84 def execute_main(self): 

85 url = "https://ftp-master.debian.org/dm.txt" 

86 content = get_resource_text(url, force_update=self.force_update, 

87 only_if_updated=True) 

88 if content is None: 88 ↛ 90line 88 didn't jump to line 90, because the condition on line 88 was never true

89 # No need to do anything if the cached item was still not updated 

90 return 

91 

92 maintainers = {} 

93 lines = content.splitlines() 

94 for stanza in deb822.Deb822.iter_paragraphs(lines): 

95 if 'Uid' in stanza and 'Allow' in stanza: 95 ↛ 94line 95 didn't jump to line 94, because the condition on line 95 was never false

96 # Allow is a comma-separated string of 'package (DD fpr)' items, 

97 # where DD fpr is the fingerprint of the DD that granted the 

98 # permission 

99 name, email = stanza['Uid'].rsplit(' ', 1) 

100 email = email.strip('<>') 

101 for pair in stanza['Allow'].split(','): 

102 pair = pair.strip() 

103 pkg, dd_fpr = pair.split() 

104 maintainers.setdefault(email, []) 

105 maintainers[email].append(pkg) 

106 

107 # Now update the developer information 

108 with transaction.atomic(): 

109 # Reset all old maintainers first. 

110 qs = DebianContributor.objects.filter(is_debian_maintainer=True) 

111 qs.update(is_debian_maintainer=False) 

112 

113 for email, packages in maintainers.items(): 

114 try: 

115 user_email, _ = UserEmail.objects.get_or_create(email=email) 

116 except ValidationError: 

117 logger_input.info('%s refers to invalid email "%s".', 

118 url, email) 

119 continue 

120 

121 contributor, _ = DebianContributor.objects.get_or_create( 

122 email=user_email) 

123 

124 contributor.is_debian_maintainer = True 

125 contributor.allowed_packages = packages 

126 contributor.save() 

127 

128 

129class RetrieveLowThresholdNmuTask(BaseTask): 

130 """ 

131 Updates the list of Debian Maintainers which agree with the lowthreshold 

132 NMU. 

133 """ 

134 

135 class Scheduler(IntervalScheduler): 

136 interval = 3600 * 24 

137 

138 def _retrieve_emails(self): 

139 """ 

140 Helper function which obtains the list of emails of maintainers that 

141 agree with the lowthreshold NMU. 

142 """ 

143 url = 'https://wiki.debian.org/LowThresholdNmu?action=raw' 

144 content = get_resource_text(url, force_update=self.force_update, 

145 only_if_updated=True) 

146 if content is None: 146 ↛ 147line 146 didn't jump to line 147, because the condition on line 146 was never true

147 return 

148 

149 emails = [] 

150 devel_php_RE = re.compile( 

151 r'https?://qa\.debian\.org/developer\.php\?login=([^\s&|]+)') 

152 word_RE = re.compile(r'^\w+$') 

153 for line in content.splitlines(): 

154 match = devel_php_RE.search(line) 

155 while match: # look for several matches on the same line 

156 email = None 

157 login = match.group(1) 

158 if word_RE.match(login): 

159 email = login + '@debian.org' 

160 elif login.find('@') >= 0: 160 ↛ 162line 160 didn't jump to line 162, because the condition on line 160 was never false

161 email = login 

162 if email: 162 ↛ 164line 162 didn't jump to line 164, because the condition on line 162 was never false

163 emails.append(email) 

164 line = line[match.end():] 

165 match = devel_php_RE.search(line) 

166 return emails 

167 

168 def execute_main(self): 

169 emails = self._retrieve_emails() 

170 with transaction.atomic(): 

171 # Reset all threshold flags first. 

172 qs = DebianContributor.objects.filter( 

173 agree_with_low_threshold_nmu=True) 

174 qs.update(agree_with_low_threshold_nmu=False) 

175 

176 for email in emails: 

177 try: 

178 email, _ = UserEmail.objects.get_or_create(email=email) 

179 except ValidationError: 

180 logger_input.info( 

181 'LowThresholdNmu refers to invalid email "%s".', email) 

182 continue 

183 

184 contributor, _ = DebianContributor.objects.get_or_create( 

185 email=email) 

186 

187 contributor.agree_with_low_threshold_nmu = True 

188 contributor.save() 

189 

190 

191class UpdatePackageBugStats(BaseTask, BugDisplayManagerMixin): 

192 """ 

193 Updates the BTS bug stats for all packages (source, binary and pseudo). 

194 Creates :class:`distro_tracker.core.ActionItem` instances for packages 

195 which have bugs tagged help or patch. 

196 """ 

197 

198 class Scheduler(IntervalScheduler): 

199 interval = 3600 

200 

201 PATCH_BUG_ACTION_ITEM_TYPE_NAME = 'debian-patch-bugs-warning' 

202 HELP_BUG_ACTION_ITEM_TYPE_NAME = 'debian-help-bugs-warning' 

203 

204 PATCH_ITEM_SHORT_DESCRIPTION = ( 

205 '<a href="{url}">{count}</a> tagged patch in the ' 

206 '<abbr title="Bug Tracking System">BTS</abbr>') 

207 HELP_ITEM_SHORT_DESCRIPTION = ( 

208 '<a href="{url}">{count}</a> tagged help in the ' 

209 '<abbr title="Bug Tracking System">BTS</abbr>') 

210 PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/patch-bugs-action-item.html' 

211 HELP_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/help-bugs-action-item.html' 

212 

213 bug_categories = ( 

214 'rc', 

215 'normal', 

216 'wishlist', 

217 'fixed', 

218 'patch', 

219 ) 

220 

221 def initialize(self, *args, **kwargs): 

222 super(UpdatePackageBugStats, self).initialize(*args, **kwargs) 

223 # The :class:`distro_tracker.core.models.ActionItemType` instances which 

224 # this task can create. 

225 self.patch_item_type = ActionItemType.objects.create_or_update( 

226 type_name=self.PATCH_BUG_ACTION_ITEM_TYPE_NAME, 

227 full_description_template=self.PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE) 

228 self.help_item_type = ActionItemType.objects.create_or_update( 

229 type_name=self.HELP_BUG_ACTION_ITEM_TYPE_NAME, 

230 full_description_template=self.HELP_ITEM_FULL_DESCRIPTION_TEMPLATE) 

231 

232 def _get_tagged_bug_stats(self, tag, user=None): 

233 """ 

234 Using the BTS interface, retrieves the statistics of bugs with a 

235 particular tag. 

236 

237 :param tag: The tag for which the statistics are required. 

238 :type tag: string 

239 :param user: The email of the user who tagged the bug with the given 

240 tag. 

241 :type user: string 

242 

243 :returns: A dict mapping package names to the count of bugs with the 

244 given tag. 

245 """ 

246 debian_ca_bundle = '/etc/ssl/ca-debian/ca-certificates.crt' 

247 if os.path.exists(debian_ca_bundle): 

248 os.environ['SSL_CERT_FILE'] = debian_ca_bundle 

249 if user: 

250 bug_numbers = debianbts.get_usertag(user, tags=[tag]).get(tag, []) 

251 else: 

252 bug_numbers = debianbts.get_bugs(tag=tag) 

253 

254 # Match each retrieved bug ID to a package and then find the aggregate 

255 # count for each package. 

256 bug_stats = {} 

257 bugs = debianbts.get_status(bug_numbers) 

258 for bug in bugs: 

259 if bug.done or bug.fixed_versions or bug.pending == 'done': 

260 continue 

261 

262 bug_stats.setdefault(bug.package, 0) 

263 bug_stats[bug.package] += 1 

264 

265 return bug_stats 

266 

267 def _extend_bug_stats(self, bug_stats, extra_stats, category_name): 

268 """ 

269 Helper method which adds extra bug stats to an already existing list of 

270 stats. 

271 

272 :param bug_stats: An already existing list of bug stats. Maps package 

273 names to list of bug category descriptions. 

274 :type bug_stats: dict 

275 :param extra_stats: Extra bug stats which should be added to 

276 ``bug_stats``. Maps package names to integers representing bug 

277 counts. 

278 :type extra_stats: dict 

279 :param category_name: The name of the bug category which is being added 

280 :type category_name: string 

281 """ 

282 for package, count in extra_stats.items(): 

283 bug_stats.setdefault(package, []) 

284 bug_stats[package].append({ 

285 'category_name': category_name, 

286 'bug_count': count, 

287 }) 

288 

289 def _create_patch_bug_action_item(self, package, bug_stats): 

290 """ 

291 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

292 the given package if it contains any bugs tagged patch. 

293 

294 :param package: The package for which the action item should be 

295 updated. 

296 :type package: :class:`distro_tracker.core.models.PackageName` 

297 :param bug_stats: A dictionary mapping category names to structures 

298 describing those categories. Those structures should be 

299 identical to the ones stored in the :class:`PackageBugStats` 

300 instance. 

301 :type bug_stats: dict 

302 """ 

303 # Get the old action item, if any 

304 action_item = package.get_action_item_for_type( 

305 self.PATCH_BUG_ACTION_ITEM_TYPE_NAME) 

306 

307 if 'patch' not in bug_stats or bug_stats['patch']['bug_count'] == 0: 

308 # Remove the old action item, since the package does not have any 

309 # bugs tagged patch anymore. 

310 if action_item is not None: 

311 action_item.delete() 

312 return 

313 

314 # If the package has bugs tagged patch, update the action item 

315 if action_item is None: 

316 action_item = ActionItem( 

317 package=package, 

318 item_type=self.patch_item_type) 

319 

320 bug_count = bug_stats['patch']['bug_count'] 

321 # Include the URL in the short description 

322 url = self.bug_manager.get_bug_tracker_url( 

323 package.name, 'source', 'patch') 

324 if not url: 324 ↛ 325line 324 didn't jump to line 325, because the condition on line 324 was never true

325 url = '' 

326 # Include the bug count in the short description 

327 count = '{bug_count} bug'.format(bug_count=bug_count) 

328 if bug_count > 1: 

329 count += 's' 

330 action_item.short_description = \ 

331 self.PATCH_ITEM_SHORT_DESCRIPTION.format(url=url, count=count) 

332 # Set additional URLs and merged bug count in the extra data for a full 

333 # description 

334 action_item.extra_data = { 

335 'bug_count': bug_count, 

336 'merged_count': bug_stats['patch'].get('merged_count', 0), 

337 'url': url, 

338 'merged_url': self.bug_manager.get_bug_tracker_url( 

339 package.name, 'source', 'patch-merged'), 

340 } 

341 action_item.save() 

342 

343 def _create_help_bug_action_item(self, package, bug_stats): 

344 """ 

345 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

346 the given package if it contains any bugs tagged help. 

347 

348 :param package: The package for which the action item should be 

349 updated. 

350 :type package: :class:`distro_tracker.core.models.PackageName` 

351 :param bug_stats: A dictionary mapping category names to structures 

352 describing those categories. Those structures should be 

353 identical to the ones stored in the :class:`PackageBugStats` 

354 instance. 

355 :type bug_stats: dict 

356 """ 

357 # Get the old action item, if any 

358 action_item = package.get_action_item_for_type( 

359 self.HELP_BUG_ACTION_ITEM_TYPE_NAME) 

360 

361 if 'help' not in bug_stats or bug_stats['help']['bug_count'] == 0: 

362 # Remove the old action item, since the package does not have any 

363 # bugs tagged patch anymore. 

364 if action_item is not None: 

365 action_item.delete() 

366 return 

367 

368 # If the package has bugs tagged patch, update the action item 

369 if action_item is None: 

370 action_item = ActionItem( 

371 package=package, 

372 item_type=self.help_item_type) 

373 

374 bug_count = bug_stats['help']['bug_count'] 

375 # Include the URL in the short description 

376 url = self.bug_manager.get_bug_tracker_url( 

377 package.name, 'source', 'help') 

378 if not url: 378 ↛ 379line 378 didn't jump to line 379, because the condition on line 378 was never true

379 url = '' 

380 # Include the bug count in the short description 

381 count = '{bug_count} bug'.format(bug_count=bug_count) 

382 if bug_count > 1: 

383 count += 's' 

384 action_item.short_description = self.HELP_ITEM_SHORT_DESCRIPTION.format( 

385 url=url, count=count) 

386 # Set additional URLs and merged bug count in the extra data for a full 

387 # description 

388 action_item.extra_data = { 

389 'bug_count': bug_count, 

390 'url': url, 

391 } 

392 action_item.save() 

393 

394 def _create_action_items(self, package_bug_stats): 

395 """ 

396 Method which creates a :class:`distro_tracker.core.models.ActionItem` 

397 instance for a package based on the given package stats. 

398 

399 For now, an action item is created if the package either has bugs 

400 tagged as help or patch. 

401 """ 

402 # Transform the bug stats to a structure easier to pass to functions 

403 # for particular bug-category action items. 

404 bug_stats = { 

405 category['category_name']: category 

406 for category in package_bug_stats.stats 

407 } 

408 package = package_bug_stats.package 

409 self._create_patch_bug_action_item(package, bug_stats) 

410 self._create_help_bug_action_item(package, bug_stats) 

411 

412 def _get_udd_bug_stats(self): 

413 url = 'https://udd.debian.org/cgi-bin/ddpo-bugs.cgi' 

414 response_content = get_resource_text(url) 

415 if not response_content: 

416 return 

417 

418 # Each line in the response should be bug stats for a single package 

419 bug_stats = {} 

420 for line in response_content.splitlines(): 

421 line = line.strip() 

422 try: 

423 package_name, bug_counts = line, '' 

424 if line.startswith('src:'): 

425 src, package_name, bug_counts = line.split(':', 2) 

426 else: 

427 package_name, bug_counts = line.split(':', 1) 

428 # Merged counts are in parentheses so remove those before 

429 # splitting the numbers 

430 bug_counts = re.sub(r'[()]', ' ', bug_counts).split() 

431 bug_counts = [int(count) for count in bug_counts] 

432 except ValueError: 

433 logger.warning( 

434 'Failed to parse bug information for %s: %s', 

435 package_name, bug_counts, exc_info=1) 

436 continue 

437 

438 # Match the extracted counts with category names 

439 bug_stats[package_name] = [ 

440 { 

441 'category_name': category_name, 

442 'bug_count': bug_count, 

443 'merged_count': merged_count, 

444 } 

445 for category_name, (bug_count, merged_count) in zip( 

446 self.bug_categories, zip(bug_counts[::2], bug_counts[1::2])) 

447 ] 

448 

449 return bug_stats 

450 

451 def _remove_obsolete_action_items(self, package_names): 

452 """ 

453 Removes action items for packages which no longer have any bug stats. 

454 """ 

455 ActionItem.objects.delete_obsolete_items( 

456 item_types=[self.patch_item_type, self.help_item_type], 

457 non_obsolete_packages=package_names) 

458 

459 def update_source_and_pseudo_bugs(self): 

460 """ 

461 Performs the update of bug statistics for source and pseudo packages. 

462 """ 

463 # First get the bug stats exposed by the UDD. 

464 bug_stats = self._get_udd_bug_stats() 

465 if not bug_stats: 

466 bug_stats = {} 

467 

468 # Add in help bugs from the BTS interface 

469 try: 

470 help_bugs = self._get_tagged_bug_stats('help') 

471 self._extend_bug_stats(bug_stats, help_bugs, 'help') 

472 except RuntimeError: 

473 logger.exception("Could not get bugs tagged help") 

474 

475 # Add in newcomer bugs from the BTS interface 

476 try: 

477 newcomer_bugs = self._get_tagged_bug_stats('newcomer') 

478 self._extend_bug_stats(bug_stats, newcomer_bugs, 'newcomer') 

479 except RuntimeError: 

480 logger.exception("Could not get bugs tagged newcomer") 

481 

482 with transaction.atomic(): 

483 # Clear previous stats 

484 PackageBugStats.objects.all().delete() 

485 self._remove_obsolete_action_items(bug_stats.keys()) 

486 # Get all packages which have updated stats, along with their 

487 # action items in 2 DB queries. 

488 packages = PackageName.objects.filter(name__in=bug_stats.keys()) 

489 packages.prefetch_related('action_items') 

490 

491 # Update stats and action items. 

492 stats = [] 

493 for package in packages: 

494 # Save the raw package bug stats 

495 package_bug_stats = PackageBugStats( 

496 package=package, stats=bug_stats[package.name]) 

497 stats.append(package_bug_stats) 

498 

499 # Add action items for the package. 

500 self._create_action_items(package_bug_stats) 

501 

502 PackageBugStats.objects.bulk_create(stats) 

503 

504 def update_binary_bugs(self): 

505 """ 

506 Performs the update of bug statistics for binary packages. 

507 """ 

508 url = 'https://udd.debian.org/cgi-bin/bugs-binpkgs-pts.cgi' 

509 response_content = get_resource_text(url) 

510 if not response_content: 

511 return 

512 

513 # Extract known binary package bug stats: each line is a separate pkg 

514 bug_stats = {} 

515 for line in response_content.splitlines(): 

516 package_name, bug_counts = line.split(None, 1) 

517 bug_counts = bug_counts.split() 

518 try: 

519 bug_counts = [int(count) for count in bug_counts] 

520 except ValueError: 

521 logger.exception( 

522 'Failed to parse bug information for %s: %s', 

523 package_name, bug_counts) 

524 continue 

525 

526 bug_stats[package_name] = [ 

527 { 

528 'category_name': category_name, 

529 'bug_count': bug_count, 

530 } 

531 for category_name, bug_count in zip( 

532 self.bug_categories, bug_counts) 

533 ] 

534 

535 with transaction.atomic(): 

536 # Clear previous stats 

537 BinaryPackageBugStats.objects.all().delete() 

538 packages = \ 

539 BinaryPackageName.objects.filter(name__in=bug_stats.keys()) 

540 # Create new stats in a single query 

541 stats = [ 

542 BinaryPackageBugStats(package=package, 

543 stats=bug_stats[package.name]) 

544 for package in packages 

545 ] 

546 BinaryPackageBugStats.objects.bulk_create(stats) 

547 

548 def execute_main(self): 

549 # Stats for source and pseudo packages is retrieved from a different 

550 # resource (with a different structure) than stats for binary packages. 

551 self.update_source_and_pseudo_bugs() 

552 self.update_binary_bugs() 

553 

554 

555class UpdateLintianStatsTask(BaseTask): 

556 """ 

557 Updates packages' lintian stats. 

558 """ 

559 

560 class Scheduler(IntervalScheduler): 

561 interval = 3600 * 4 

562 

563 ACTION_ITEM_TYPE_NAME = 'lintian-warnings-and-errors' 

564 ITEM_DESCRIPTION = 'lintian reports <a href="{url}">{report}</a>' 

565 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/lintian-action-item.html' 

566 

567 def initialize(self, *args, **kwargs): 

568 super(UpdateLintianStatsTask, self).initialize(*args, **kwargs) 

569 self.lintian_action_item_type = ActionItemType.objects.create_or_update( 

570 type_name=self.ACTION_ITEM_TYPE_NAME, 

571 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

572 

573 def get_lintian_stats(self): 

574 url = 'https://udd.debian.org/lintian-qa-list.txt' 

575 content = get_resource_text(url, force_update=self.force_update, 

576 only_if_updated=True) 

577 if content is None: 577 ↛ 578line 577 didn't jump to line 578, because the condition on line 577 was never true

578 return 

579 

580 all_stats = {} 

581 categories = ( 

582 'errors', 

583 'warnings', 

584 'pedantics', 

585 'experimentals', 

586 'overriddens', 

587 ) 

588 for line in content.splitlines(): 

589 package, stats = line.split(None, 1) 

590 stats = stats.split() 

591 try: 

592 all_stats[package] = { 

593 category: int(count) 

594 for count, category in zip(stats, categories) 

595 } 

596 except ValueError: 

597 logger.exception( 

598 'Failed to parse lintian information for %s: %s', 

599 package, line) 

600 continue 

601 

602 return all_stats 

603 

604 def update_action_item(self, package, lintian_stats): 

605 """ 

606 Updates the :class:`ActionItem` for the given package based on the 

607 :class:`LintianStats <distro_tracker.vendor.debian.models.LintianStats` 

608 given in ``package_stats``. If the package has errors or warnings an 

609 :class:`ActionItem` is created. 

610 """ 

611 package_stats = lintian_stats.stats 

612 warnings, errors = ( 

613 package_stats.get('warnings'), package_stats.get('errors', 0)) 

614 # Get the old action item for this warning, if it exists. 

615 lintian_action_item = package.get_action_item_for_type( 

616 self.lintian_action_item_type.type_name) 

617 if not warnings and not errors: 

618 if lintian_action_item: 

619 # If the item previously existed, delete it now since there 

620 # are no longer any warnings/errors. 

621 lintian_action_item.delete() 

622 return 

623 

624 # The item didn't previously have an action item: create it now 

625 if lintian_action_item is None: 

626 lintian_action_item = ActionItem( 

627 package=package, 

628 item_type=self.lintian_action_item_type) 

629 

630 lintian_url = lintian_stats.get_lintian_url() 

631 new_extra_data = { 

632 'warnings': warnings, 

633 'errors': errors, 

634 'lintian_url': lintian_url, 

635 } 

636 if lintian_action_item.extra_data: 

637 old_extra_data = lintian_action_item.extra_data 

638 if (old_extra_data['warnings'] == warnings and 

639 old_extra_data['errors'] == errors): 

640 # No need to update 

641 return 

642 

643 lintian_action_item.extra_data = new_extra_data 

644 

645 if errors and warnings: 

646 report = '{} error{} and {} warning{}'.format( 

647 errors, 

648 's' if errors > 1 else '', 

649 warnings, 

650 's' if warnings > 1 else '') 

651 elif errors: 

652 report = '{} error{}'.format( 

653 errors, 

654 's' if errors > 1 else '') 

655 elif warnings: 655 ↛ 660line 655 didn't jump to line 660, because the condition on line 655 was never false

656 report = '{} warning{}'.format( 

657 warnings, 

658 's' if warnings > 1 else '') 

659 

660 lintian_action_item.short_description = self.ITEM_DESCRIPTION.format( 

661 url=lintian_url, 

662 report=report) 

663 

664 # If there are errors make the item a high severity issue 

665 if errors: 

666 lintian_action_item.severity = ActionItem.SEVERITY_HIGH 

667 

668 lintian_action_item.save() 

669 

670 def execute_main(self): 

671 all_lintian_stats = self.get_lintian_stats() 

672 if not all_lintian_stats: 

673 return 

674 

675 # Discard all old stats 

676 LintianStats.objects.all().delete() 

677 

678 packages = PackageName.objects.filter(name__in=all_lintian_stats.keys()) 

679 packages.prefetch_related('action_items') 

680 # Remove action items for packages which no longer have associated 

681 # lintian data. 

682 ActionItem.objects.delete_obsolete_items( 

683 [self.lintian_action_item_type], all_lintian_stats.keys()) 

684 

685 stats = [] 

686 for package in packages: 

687 package_stats = all_lintian_stats[package.name] 

688 # Save the raw lintian stats. 

689 lintian_stats = LintianStats(package=package, stats=package_stats) 

690 stats.append(lintian_stats) 

691 # Create an ActionItem if there are errors or warnings 

692 self.update_action_item(package, lintian_stats) 

693 

694 LintianStats.objects.bulk_create(stats) 

695 

696 

697class UpdateAppStreamStatsTask(BaseTask): 

698 """ 

699 Updates packages' AppStream issue hints data. 

700 """ 

701 

702 class Scheduler(IntervalScheduler): 

703 interval = 3600 * 6 

704 

705 ACTION_ITEM_TYPE_NAME = 'appstream-issue-hints' 

706 ITEM_DESCRIPTION = 'AppStream hints: {report} for {packageurllist}' 

707 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/appstream-action-item.html' 

708 

709 def initialize(self, *args, **kwargs): 

710 super(UpdateAppStreamStatsTask, self).initialize(*args, **kwargs) 

711 self.appstream_action_item_type = \ 

712 ActionItemType.objects.create_or_update( 

713 type_name=self.ACTION_ITEM_TYPE_NAME, 

714 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

715 self._tag_severities = {} 

716 

717 def _load_tag_severities(self): 

718 url = 'https://appstream.debian.org/hints/sid/hint-definitions.json' 

719 json_data = get_resource_text(url, force_update=True) 

720 

721 data = json.loads(json_data) 

722 for tag, info in data.items(): 

723 self._tag_severities[tag] = info['severity'] 

724 

725 def _load_appstream_hint_stats(self, section, arch, all_stats={}): 

726 url = 'https://appstream.debian.org/hints/sid/{}/Hints-{}.json.gz' \ 

727 .format(section, arch) 

728 hints_json = get_resource_text(url, force_update=self.force_update) 

729 

730 hints = json.loads(hints_json) 

731 for hint in hints: 

732 pkid = hint['package'] 

733 parts = pkid.split('/') 

734 package_name = parts[0] 

735 

736 # get the source package for this binary package name 

737 src_pkgname = None 

738 if SourcePackageName.objects.exists_with_name(package_name): 

739 package = SourcePackageName.objects.get(name=package_name) 

740 src_pkgname = package.name 

741 elif BinaryPackageName.objects.exists_with_name(package_name): 

742 bin_package = BinaryPackageName.objects.get(name=package_name) 

743 package = bin_package.main_source_package_name 

744 src_pkgname = package.name 

745 else: 

746 src_pkgname = package_name 

747 

748 if src_pkgname not in all_stats: 

749 all_stats[src_pkgname] = {} 

750 if package_name not in all_stats[src_pkgname]: 750 ↛ 753line 750 didn't jump to line 753, because the condition on line 750 was never false

751 all_stats[src_pkgname][package_name] = {} 

752 

753 for cid, h in hint['hints'].items(): 

754 for e in h: 

755 severity = self._tag_severities[e['tag']] 

756 if severity == "error": 

757 sevkey = "errors" 

758 elif severity == "warning": 

759 sevkey = "warnings" 

760 elif severity == "info": 760 ↛ 763line 760 didn't jump to line 763, because the condition on line 760 was never false

761 sevkey = "infos" 

762 else: 

763 continue 

764 if sevkey not in all_stats[src_pkgname][package_name]: 

765 all_stats[src_pkgname][package_name][sevkey] = 1 

766 else: 

767 all_stats[src_pkgname][package_name][sevkey] += 1 

768 

769 return all_stats 

770 

771 def _get_appstream_url(self, package, bin_pkgname): 

772 """ 

773 Returns the AppStream URL for the given PackageName in :package. 

774 """ 

775 

776 src_package = get_or_none(SourcePackageName, pk=package.pk) 

777 if not src_package: 777 ↛ 778line 777 didn't jump to line 778, because the condition on line 777 was never true

778 return '#' 

779 

780 if not src_package.main_version: 

781 return '#' 

782 

783 component = 'main' 

784 main_entry = src_package.main_entry 

785 if main_entry: 785 ↛ 786line 785 didn't jump to line 786, because the condition on line 785 was never true

786 component = main_entry.component 

787 if not component: 

788 component = 'main' 

789 

790 return ( 

791 'https://appstream.debian.org/sid/{}/issues/{}.html' 

792 .format(component, bin_pkgname) 

793 ) 

794 

795 def _create_final_stats_report(self, package, package_stats): 

796 """ 

797 Returns a transformed statistics report to be stored in the database. 

798 """ 

799 

800 as_report = package_stats.copy() 

801 for bin_package in list(as_report.keys()): 

802 # we currently don't want to display info-type hints 

803 as_report[bin_package].pop('infos', None) 

804 if as_report[bin_package]: 804 ↛ 808line 804 didn't jump to line 808, because the condition on line 804 was never false

805 as_report[bin_package]['url'] = \ 

806 self._get_appstream_url(package, bin_package) 

807 else: 

808 as_report.pop(bin_package) 

809 return as_report 

810 

811 def update_action_item(self, package, package_stats): 

812 """ 

813 Updates the :class:`ActionItem` for the given package based on the 

814 AppStream hint statistics given in ``package_stats``. 

815 If the package has errors or warnings an 

816 :class:`ActionItem` is created. 

817 """ 

818 

819 total_warnings = 0 

820 total_errors = 0 

821 packageurllist = [] 

822 for bin_pkgname, info in package_stats.items(): 

823 total_warnings += info.get('warnings', 0) 

824 total_errors += info.get('errors', 0) 

825 url = self._get_appstream_url(package, bin_pkgname) 

826 packageurllist.append(f'<a href="{url}">{bin_pkgname}</a>') 

827 

828 # Get the old action item for this warning, if it exists. 

829 appstream_action_item = package.get_action_item_for_type( 

830 self.appstream_action_item_type.type_name) 

831 if not total_warnings and not total_errors: 

832 if appstream_action_item: 

833 # If the item previously existed, delete it now since there 

834 # are no longer any warnings/errors. 

835 appstream_action_item.delete() 

836 return 

837 

838 # The item didn't previously have an action item: create it now 

839 if appstream_action_item is None: 

840 appstream_action_item = ActionItem( 

841 package=package, 

842 item_type=self.appstream_action_item_type) 

843 

844 as_report = self._create_final_stats_report(package, package_stats) 

845 

846 if appstream_action_item.extra_data: 

847 old_extra_data = appstream_action_item.extra_data 

848 if old_extra_data == as_report: 

849 # No need to update 

850 return 

851 

852 appstream_action_item.extra_data = as_report 

853 

854 if total_errors and total_warnings: 

855 short_report = '{} error{} and {} warning{}'.format( 

856 total_errors, 

857 's' if total_errors > 1 else '', 

858 total_warnings, 

859 's' if total_warnings > 1 else '') 

860 elif total_errors: 

861 short_report = '{} error{}'.format( 

862 total_errors, 

863 's' if total_errors > 1 else '') 

864 elif total_warnings: 864 ↛ 869line 864 didn't jump to line 869

865 short_report = '{} warning{}'.format( 

866 total_warnings, 

867 's' if total_warnings > 1 else '') 

868 

869 appstream_action_item.short_description = \ 

870 self.ITEM_DESCRIPTION.format(packageurllist=",".join( 

871 packageurllist), report=short_report) 

872 

873 # If there are errors make the item a high severity issue; 

874 # otherwise, make sure to set the severity as normal in case the item 

875 # existed already 

876 if total_errors: 

877 appstream_action_item.severity = ActionItem.SEVERITY_HIGH 

878 else: 

879 appstream_action_item.severity = ActionItem.SEVERITY_NORMAL 

880 

881 appstream_action_item.save() 

882 

883 def execute_main(self): 

884 self._load_tag_severities() 

885 all_stats = {} 

886 repository = Repository.objects.get(default=True) 

887 arch = "amd64" 

888 for component in repository.components: 

889 self._load_appstream_hint_stats(component, arch, all_stats) 

890 if not all_stats: 890 ↛ 891line 890 didn't jump to line 891, because the condition on line 890 was never true

891 return 

892 

893 with transaction.atomic(): 

894 # Delete obsolete data 

895 PackageData.objects.filter(key='appstream').delete() 

896 

897 packages = PackageName.objects.filter(name__in=all_stats.keys()) 

898 packages.prefetch_related('action_items') 

899 

900 stats = [] 

901 for package in packages: 

902 package_stats = all_stats[package.name] 

903 stats.append( 

904 PackageData( 

905 package=package, 

906 key='appstream', 

907 value=package_stats 

908 ) 

909 ) 

910 

911 # Create an ActionItem if there are errors or warnings 

912 self.update_action_item(package, package_stats) 

913 

914 PackageData.objects.bulk_create(stats) 

915 # Remove action items for packages which no longer have associated 

916 # AppStream hints. 

917 ActionItem.objects.delete_obsolete_items( 

918 [self.appstream_action_item_type], all_stats.keys()) 

919 

920 

921class UpdateTransitionsTask(BaseTask): 

922 

923 class Scheduler(IntervalScheduler): 

924 interval = 3600 

925 

926 REJECT_LIST_URL = 'https://ftp-master.debian.org/transitions.yaml' 

927 PACKAGE_TRANSITION_LIST_URL = ( 

928 'https://release.debian.org/transitions/export/packages.yaml') 

929 

930 def _get_yaml_resource(self, url, **kwargs): 

931 """ 

932 Gets the YAML resource at the given URL and returns it as a Python 

933 object. 

934 """ 

935 content = get_resource_text(url, **kwargs) 

936 if content: 

937 return yaml.safe_load(content) 

938 

939 def _add_reject_transitions(self, packages): 

940 """ 

941 Adds the transitions which cause uploads to be rejected to the 

942 given ``packages`` dict. 

943 """ 

944 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL) 

945 for key, transition in reject_list.items(): 

946 for package in transition['packages']: 

947 packages.setdefault(package, {}) 

948 packages[package].setdefault(key, {}) 

949 packages[package][key]['reject'] = True 

950 packages[package][key]['status'] = 'ongoing' 

951 

952 def _add_package_transition_list(self, packages): 

953 """ 

954 Adds the ongoing and planned transitions to the given ``packages`` 

955 dict. 

956 """ 

957 package_transition_list = self._get_yaml_resource( 

958 self.PACKAGE_TRANSITION_LIST_URL) 

959 

960 wanted_transition_statuses = ('ongoing', 'planned') 

961 for package_info in package_transition_list: 

962 package_name = package_info['name'] 

963 for transition_name, status in package_info['list']: 

964 if status not in wanted_transition_statuses: 

965 # Skip transitions with an unwanted status 

966 continue 

967 

968 packages.setdefault(package_name, {}) 

969 packages[package_name].setdefault(transition_name, {}) 

970 packages[package_name][transition_name]['status'] = status 

971 

972 def execute_main(self): 

973 # Update the relevant resources first 

974 kwargs = { 

975 'force_update': self.force_update, 

976 'only_if_updated': True, 

977 } 

978 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL, **kwargs) 

979 package_transition_list = self._get_yaml_resource( 

980 self.PACKAGE_TRANSITION_LIST_URL, **kwargs) 

981 

982 if reject_list is None and package_transition_list is None: 

983 # Nothing to do - at least one needs to be updated... 

984 return 

985 

986 package_transitions = {} 

987 self._add_reject_transitions(package_transitions) 

988 self._add_package_transition_list(package_transitions) 

989 

990 PackageTransition.objects.all().delete() 

991 # Get the packages which have transitions 

992 packages = PackageName.objects.filter( 

993 name__in=package_transitions.keys()) 

994 transitions = [] 

995 for package in packages: 

996 for transition_name, data in \ 

997 package_transitions[package.name].items(): 

998 transitions.append(PackageTransition( 

999 package=package, 

1000 transition_name=transition_name, 

1001 status=data.get('status', None), 

1002 reject=data.get('reject', False))) 

1003 

1004 PackageTransition.objects.bulk_create(transitions) 

1005 

1006 

1007class UpdateExcusesTask(BaseTask): 

1008 

1009 class Scheduler(IntervalScheduler): 

1010 interval = 3600 

1011 

1012 ACTION_ITEM_TYPE_NAME = 'debian-testing-migration' 

1013 ITEM_DESCRIPTION = ( 

1014 "The package has not entered testing even though the delay is over") 

1015 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/testing-migration-action-item.html' 

1016 

1017 class AgeVerdict(Enum): 

1018 PKG_OF_AGE = 0 

1019 PKG_TOO_OLD = 1 

1020 PKG_TOO_YOUNG = 2 

1021 PKG_WO_POLICY = 3 

1022 

1023 def initialize(self, *args, **kwargs): 

1024 super(UpdateExcusesTask, self).initialize(*args, **kwargs) 

1025 self.action_item_type = ActionItemType.objects.create_or_update( 

1026 type_name=self.ACTION_ITEM_TYPE_NAME, 

1027 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1028 

1029 def _adapt_excuse_links(self, excuse): 

1030 """ 

1031 If the excuse contains any anchor links, convert them to links to Distro 

1032 Tracker package pages. Return the original text unmodified, otherwise. 

1033 """ 

1034 re_anchor_href = re.compile(r'^#(.*)$') 

1035 html = soup(excuse, 'html.parser') 

1036 for a_tag in html.findAll('a', {'href': True}): 

1037 href = a_tag['href'] 

1038 match = re_anchor_href.match(href) 

1039 if not match: 1039 ↛ 1040line 1039 didn't jump to line 1040, because the condition on line 1039 was never true

1040 continue 

1041 package = match.group(1).split('/')[0] 

1042 a_tag['href'] = package_url(package) 

1043 

1044 return str(html) 

1045 

1046 def _skip_excuses_item(self, item_text): 

1047 if not item_text: 

1048 return True 

1049 # We ignore these excuses 

1050 if "Section" in item_text or "Maintainer" in item_text: 

1051 return True 

1052 return False 

1053 

1054 def _check_age(self, source): 

1055 """Checks the age of the package and compares it to the age requirement 

1056 for migration""" 

1057 

1058 if 'policy_info' not in source or 'age' not in source['policy_info']: 1058 ↛ 1059line 1058 didn't jump to line 1059, because the condition on line 1058 was never true

1059 return (self.AgeVerdict.PKG_WO_POLICY, None, None) 

1060 

1061 age = source['policy_info']['age']['current-age'] 

1062 limit = source['policy_info']['age']['age-requirement'] 

1063 if age > limit: 

1064 return (self.AgeVerdict.PKG_TOO_OLD, age, limit) 

1065 elif age < limit: 1065 ↛ 1066line 1065 didn't jump to line 1066, because the condition on line 1065 was never true

1066 return (self.AgeVerdict.PKG_TOO_YOUNG, age, limit) 

1067 else: 

1068 return (self.AgeVerdict.PKG_OF_AGE, age, limit) 

1069 

1070 def _extract_problematic(self, source): 

1071 verdict, age, limit = self._check_age(source) 

1072 

1073 if verdict == self.AgeVerdict.PKG_TOO_OLD: 

1074 return (source['item-name'], {'age': age, 'limit': limit}) 

1075 

1076 @staticmethod 

1077 def _make_excuses_check_dependencies(source): 

1078 """Checks the dependencies of the package (blocked-by and 

1079 migrate-after) and returns a list to display.""" 

1080 

1081 addendum = [] 

1082 

1083 if 'dependencies' in source: 

1084 blocked_by = source['dependencies'].get('blocked-by', []) 

1085 after = source['dependencies'].get('migrate-after', []) 

1086 after = [ 

1087 element 

1088 for element in after 

1089 if element not in blocked_by 

1090 ] 

1091 if blocked_by: 1091 ↛ 1092line 1091 didn't jump to line 1092, because the condition on line 1091 was never true

1092 addendum.append("Blocked by: %s" % ( 

1093 html_package_list(blocked_by), 

1094 )) 

1095 if after: 1095 ↛ 1100line 1095 didn't jump to line 1100, because the condition on line 1095 was never false

1096 addendum.append("Migrates after: %s" % ( 

1097 html_package_list(after), 

1098 )) 

1099 

1100 return addendum 

1101 

1102 @staticmethod 

1103 def _make_excuses_check_verdict(source): 

1104 """Checks the migration policy verdict of the package and builds an 

1105 excuses message depending on the result.""" 

1106 

1107 addendum = [] 

1108 

1109 if 'migration-policy-verdict' in source: 1109 ↛ 1110line 1109 didn't jump to line 1110, because the condition on line 1109 was never true

1110 verdict = source['migration-policy-verdict'] 

1111 if verdict == 'REJECTED_BLOCKED_BY_ANOTHER_ITEM': 

1112 addendum.append("Migration status: Blocked. Can't migrate " 

1113 "due to a non-migratable dependency. Check " 

1114 "status below." 

1115 ) 

1116 

1117 return addendum 

1118 

1119 def _make_excuses(self, source): 

1120 """Make the excuses list for a source item using the yaml data it 

1121 contains""" 

1122 

1123 excuses = [ 

1124 self._adapt_excuse_links(excuse) 

1125 for excuse in source['excuses'] 

1126 ] 

1127 

1128 # This is the place where we compute some additionnal 

1129 # messages that should be added to excuses. 

1130 addendum = [] 

1131 

1132 addendum.extend(self._make_excuses_check_verdict(source)) 

1133 addendum.extend(self._make_excuses_check_dependencies(source)) 

1134 

1135 excuses = addendum + excuses 

1136 

1137 if 'is-candidate' in source: 1137 ↛ 1141line 1137 didn't jump to line 1141, because the condition on line 1137 was never false

1138 if not source['is-candidate']: 1138 ↛ 1141line 1138 didn't jump to line 1141, because the condition on line 1138 was never false

1139 excuses.append("Not considered") 

1140 

1141 return ( 

1142 source['item-name'], 

1143 excuses, 

1144 ) 

1145 

1146 def _get_excuses_and_problems(self, content): 

1147 """ 

1148 Gets the excuses for each package. 

1149 Also finds a list of packages which have not migrated to testing 

1150 agter the necessary time has passed. 

1151 

1152 :returns: A two-tuple where the first element is a dict mapping 

1153 package names to a list of excuses. The second element is a dict 

1154 mapping packages names to a problem information. Problem information 

1155 is a dict with the keys ``age`` and ``limit``. 

1156 """ 

1157 if 'sources' not in content: 1157 ↛ 1158line 1157 didn't jump to line 1158, because the condition on line 1157 was never true

1158 logger.warning("Invalid format of excuses file") 

1159 return 

1160 

1161 sources = content['sources'] 

1162 excuses = [ 

1163 self._make_excuses(source) 

1164 for source in sources 

1165 if '/' not in source['item-name'] 

1166 ] 

1167 problems = [ 

1168 self._extract_problematic(source) 

1169 for source in sources 

1170 if '/' not in source['item-name'] 

1171 ] 

1172 problematic = [p for p in problems if p] 

1173 return dict(excuses), dict(problematic) 

1174 

1175 def _create_action_item(self, package, extra_data): 

1176 """ 

1177 Creates a :class:`distro_tracker.core.models.ActionItem` for the given 

1178 package including the given extra data. The item indicates that there is 

1179 a problem with the package migrating to testing. 

1180 """ 

1181 action_item = \ 

1182 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1183 if action_item is None: 

1184 action_item = ActionItem( 

1185 package=package, 

1186 item_type=self.action_item_type) 

1187 

1188 action_item.short_description = self.ITEM_DESCRIPTION 

1189 if package.main_entry: 1189 ↛ 1190line 1189 didn't jump to line 1190, because the condition on line 1189 was never true

1190 query_string = urlencode({'package': package.name}) 

1191 extra_data['check_why_url'] = ( 

1192 'https://qa.debian.org/excuses.php' 

1193 '?{query_string}'.format(query_string=query_string)) 

1194 

1195 action_item.extra_data = extra_data 

1196 action_item.save() 

1197 

1198 def _remove_obsolete_action_items(self, problematic): 

1199 """ 

1200 Remove action items for packages which are no longer problematic. 

1201 """ 

1202 ActionItem.objects.delete_obsolete_items( 

1203 item_types=[self.action_item_type], 

1204 non_obsolete_packages=problematic.keys()) 

1205 

1206 def _get_excuses_yaml(self): 

1207 """ 

1208 Function returning the content of excuses from debian-release 

1209 :returns: a dict of excuses or ``None`` if the content in the 

1210 cache is up to date. 

1211 """ 

1212 url = 'https://release.debian.org/britney/excuses.yaml' 

1213 content = get_resource_text(url, force_update=self.force_update, 

1214 only_if_updated=True) 

1215 if content is None: 

1216 return 

1217 

1218 return yaml.safe_load(content) 

1219 

1220 def execute_main(self): 

1221 content_lines = self._get_excuses_yaml() 

1222 if not content_lines: 1222 ↛ 1223line 1222 didn't jump to line 1223, because the condition on line 1222 was never true

1223 return 

1224 

1225 result = self._get_excuses_and_problems(content_lines) 

1226 if not result: 1226 ↛ 1227line 1226 didn't jump to line 1227, because the condition on line 1226 was never true

1227 return 

1228 package_excuses, problematic = result 

1229 

1230 with transaction.atomic(): 

1231 # Remove stale excuses data and action items which are not still 

1232 # problematic. 

1233 self._remove_obsolete_action_items(problematic) 

1234 PackageExcuses.objects.all().delete() 

1235 

1236 excuses = [] 

1237 packages = SourcePackageName.objects.filter( 

1238 name__in=package_excuses.keys()) 

1239 packages.prefetch_related('action_items') 

1240 for package in packages: 

1241 excuse = PackageExcuses( 

1242 package=package, 

1243 excuses=package_excuses[package.name]) 

1244 excuses.append(excuse) 

1245 if package.name in problematic: 

1246 self._create_action_item(package, problematic[package.name]) 

1247 

1248 # Create all excuses in a single query 

1249 PackageExcuses.objects.bulk_create(excuses) 

1250 

1251 

1252class UpdateBuildLogCheckStats(BaseTask): 

1253 

1254 class Scheduler(IntervalScheduler): 

1255 interval = 3600 * 6 

1256 

1257 ACTION_ITEM_TYPE_NAME = 'debian-build-logcheck' 

1258 ITEM_DESCRIPTION = 'Build log checks report <a href="{url}">{report}</a>' 

1259 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/logcheck-action-item.html' 

1260 

1261 def initialize(self, *args, **kwargs): 

1262 super(UpdateBuildLogCheckStats, self).initialize(*args, **kwargs) 

1263 self.action_item_type = ActionItemType.objects.create_or_update( 

1264 type_name=self.ACTION_ITEM_TYPE_NAME, 

1265 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1266 

1267 def _get_buildd_content(self): 

1268 url = 'https://qa.debian.org/bls/logcheck.txt' 

1269 return get_resource_text(url) 

1270 

1271 def get_buildd_stats(self): 

1272 content = self._get_buildd_content() 

1273 stats = {} 

1274 for line in content.splitlines(): 

1275 pkg, errors, warnings = line.split("|")[:3] 

1276 try: 

1277 errors, warnings = int(errors), int(warnings) 

1278 except ValueError: 

1279 continue 

1280 stats[pkg] = { 

1281 'errors': errors, 

1282 'warnings': warnings, 

1283 } 

1284 return stats 

1285 

1286 def create_action_item(self, package, stats): 

1287 """ 

1288 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

1289 the given package if the build logcheck stats indicate 

1290 """ 

1291 action_item = \ 

1292 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1293 

1294 errors = stats.get('errors', 0) 

1295 warnings = stats.get('warnings', 0) 

1296 

1297 if not errors and not warnings: 

1298 # Remove the previous action item since the package no longer has 

1299 # errors/warnings. 

1300 if action_item is not None: 

1301 action_item.delete() 

1302 return 

1303 

1304 if action_item is None: 

1305 action_item = ActionItem( 

1306 package=package, 

1307 item_type=self.action_item_type) 

1308 

1309 if action_item.extra_data: 

1310 if action_item.extra_data == stats: 1310 ↛ 1314line 1310 didn't jump to line 1314, because the condition on line 1310 was never false

1311 # Nothing has changed -- do not update the item 

1312 return 

1313 

1314 logcheck_url = "https://qa.debian.org/bls/packages/{hash}/{pkg}.html"\ 

1315 .format(hash=package.name[0], pkg=package.name) 

1316 if errors and warnings: 

1317 report = '{} error{} and {} warning{}'.format( 

1318 errors, 

1319 's' if errors > 1 else '', 

1320 warnings, 

1321 's' if warnings > 1 else '') 

1322 action_item.severity = ActionItem.SEVERITY_HIGH 

1323 elif errors: 

1324 report = '{} error{}'.format( 

1325 errors, 

1326 's' if errors > 1 else '') 

1327 action_item.severity = ActionItem.SEVERITY_HIGH 

1328 elif warnings: 1328 ↛ 1334line 1328 didn't jump to line 1334, because the condition on line 1328 was never false

1329 report = '{} warning{}'.format( 

1330 warnings, 

1331 's' if warnings > 1 else '') 

1332 action_item.severity = ActionItem.SEVERITY_LOW 

1333 

1334 action_item.short_description = self.ITEM_DESCRIPTION.format( 

1335 url=logcheck_url, 

1336 report=report) 

1337 action_item.extra_data = stats 

1338 action_item.save() 

1339 

1340 def execute_main(self): 

1341 # Build a dict with stats from both buildd and clang 

1342 stats = self.get_buildd_stats() 

1343 

1344 BuildLogCheckStats.objects.all().delete() 

1345 ActionItem.objects.delete_obsolete_items( 

1346 [self.action_item_type], stats.keys()) 

1347 

1348 packages = SourcePackageName.objects.filter(name__in=stats.keys()) 

1349 packages = packages.prefetch_related('action_items') 

1350 

1351 logcheck_stats = [] 

1352 for package in packages: 

1353 logcheck_stat = BuildLogCheckStats( 

1354 package=package, 

1355 stats=stats[package.name]) 

1356 logcheck_stats.append(logcheck_stat) 

1357 

1358 self.create_action_item(package, stats[package.name]) 

1359 

1360 # One SQL query to create all the stats. 

1361 BuildLogCheckStats.objects.bulk_create(logcheck_stats) 

1362 

1363 

1364class DebianWatchFileScannerUpdate(BaseTask): 

1365 

1366 class Scheduler(IntervalScheduler): 

1367 interval = 3600 * 6 

1368 

1369 ACTION_ITEM_TYPE_NAMES = ( 

1370 'new-upstream-version', 

1371 'watch-failure', 

1372 ) 

1373 ACTION_ITEM_TEMPLATES = { 

1374 'new-upstream-version': "debian/new-upstream-version-action-item.html", 

1375 'watch-failure': "debian/watch-failure-action-item.html", 

1376 } 

1377 ITEM_DESCRIPTIONS = { 

1378 'new-upstream-version': lambda item: ( 

1379 'A new upstream version is available: ' 

1380 '<a href="{url}">{version}</a>'.format( 

1381 url=item.extra_data['upstream_url'], 

1382 version=item.extra_data['upstream_version'])), 

1383 'watch-failure': lambda item: ( 

1384 'Problems while searching for a new upstream version'), 

1385 } 

1386 ITEM_SEVERITIES = { 

1387 'new-upstream-version': ActionItem.SEVERITY_HIGH, 

1388 'watch-failure': ActionItem.SEVERITY_HIGH, 

1389 } 

1390 

1391 def initialize(self, *args, **kwargs): 

1392 super(DebianWatchFileScannerUpdate, self).initialize(*args, **kwargs) 

1393 self.action_item_types = { 

1394 type_name: ActionItemType.objects.create_or_update( 

1395 type_name=type_name, 

1396 full_description_template=self.ACTION_ITEM_TEMPLATES.get( 

1397 type_name, None)) 

1398 for type_name in self.ACTION_ITEM_TYPE_NAMES 

1399 } 

1400 

1401 def _get_upstream_status_content(self): 

1402 url = 'https://udd.debian.org/cgi-bin/upstream-status.json.cgi' 

1403 return get_resource_text(url) 

1404 

1405 def _remove_obsolete_action_items(self, item_type_name, 

1406 non_obsolete_packages): 

1407 """ 

1408 Removes any existing :class:`ActionItem` with the given type name based 

1409 on the list of package names which should still have the items based on 

1410 the processed stats. 

1411 """ 

1412 action_item_type = self.action_item_types[item_type_name] 

1413 ActionItem.objects.delete_obsolete_items( 

1414 item_types=[action_item_type], 

1415 non_obsolete_packages=non_obsolete_packages) 

1416 

1417 def get_upstream_status_stats(self, stats): 

1418 """ 

1419 Gets the stats from the downloaded data and puts them in the given 

1420 ``stats`` dictionary. 

1421 The keys of the dict are package names. 

1422 

1423 :returns: A a two-tuple where the first item is a list of packages 

1424 which have new upstream versions and the second is a list of 

1425 packages which have watch failures. 

1426 """ 

1427 content = self._get_upstream_status_content() 

1428 dehs_data = None 

1429 if content: 

1430 dehs_data = json.loads(content) 

1431 if not dehs_data: 

1432 return [], [] 

1433 

1434 all_new_versions, all_failures = [], [] 

1435 for entry in dehs_data: 

1436 package_name = entry['package'] 

1437 stats.setdefault(package_name, {}) 

1438 stats[package_name]['upstream_version'] = entry['upstream-version'] 

1439 stats[package_name]['upstream_url'] = entry['upstream-url'] 

1440 if 'status' in entry and ('Newer version' in entry['status'] or 

1441 'newer package' in entry['status']): 

1442 stats[package_name]['new-upstream-version'] = { 

1443 'upstream_version': entry['upstream-version'], 

1444 'upstream_url': entry['upstream-url'], 

1445 } 

1446 all_new_versions.append(package_name) 

1447 if entry.get('warnings') or entry.get('errors'): 

1448 msg = '{}\n{}'.format( 

1449 entry.get('errors') or '', 

1450 entry.get('warnings') or '', 

1451 ).strip() 

1452 stats[package_name]['watch-failure'] = { 

1453 'warning': msg, 

1454 } 

1455 all_failures.append(package_name) 

1456 

1457 return all_new_versions, all_failures 

1458 

1459 def update_package_info(self, package, stats): 

1460 """ 

1461 Updates upstream information of the given package based on the given 

1462 stats. Upstream data is saved as a :class:`PackageData` within the 

1463 `general` key 

1464 

1465 :param package: The package to which the upstream info should be 

1466 associated. 

1467 :type package: :class:`distro_tracker.core.models.PackageName` 

1468 :param stats: The stats which are used to create the upstream info. 

1469 :type stats: :class:`dict` 

1470 """ 

1471 try: 

1472 watch_data = package.watch_status[0] 

1473 except IndexError: 

1474 watch_data = PackageData( 

1475 package=package, 

1476 key='upstream-watch-status', 

1477 ) 

1478 

1479 watch_data.value = stats 

1480 watch_data.save() 

1481 

1482 def update_action_item(self, item_type, package, stats): 

1483 """ 

1484 Updates the action item of the given type for the given package based 

1485 on the given stats. 

1486 

1487 The severity of the item is defined by the :attr:`ITEM_SEVERITIES` dict. 

1488 

1489 The short descriptions are created by passing the :class:`ActionItem` 

1490 (with extra data already set) to the callables defined in 

1491 :attr:`ITEM_DESCRIPTIONS`. 

1492 

1493 :param item_type: The type of the :class:`ActionItem` that should be 

1494 updated. 

1495 :type item_type: string 

1496 :param package: The package to which this action item should be 

1497 associated. 

1498 :type package: :class:`distro_tracker.core.models.PackageName` 

1499 :param stats: The stats which are used to create the action item. 

1500 :type stats: :class:`dict` 

1501 """ 

1502 action_item = package.get_action_item_for_type(item_type) 

1503 if action_item is None: 

1504 # Create an action item... 

1505 action_item = ActionItem( 

1506 package=package, 

1507 item_type=self.action_item_types[item_type]) 

1508 

1509 if item_type in self.ITEM_SEVERITIES: 1509 ↛ 1511line 1509 didn't jump to line 1511, because the condition on line 1509 was never false

1510 action_item.severity = self.ITEM_SEVERITIES[item_type] 

1511 action_item.extra_data = stats 

1512 action_item.short_description = \ 

1513 self.ITEM_DESCRIPTIONS[item_type](action_item) 

1514 

1515 action_item.save() 

1516 

1517 @transaction.atomic 

1518 def execute_main(self): 

1519 stats = {} 

1520 new_upstream_version, failures = self.get_upstream_status_stats(stats) 

1521 updated_packages_per_type = { 

1522 'new-upstream-version': new_upstream_version, 

1523 'watch-failure': failures, 

1524 } 

1525 

1526 # Remove obsolete action items for each of the categories... 

1527 for item_type, packages in updated_packages_per_type.items(): 

1528 self._remove_obsolete_action_items(item_type, packages) 

1529 

1530 packages = SourcePackageName.objects.filter( 

1531 name__in=stats.keys()) 

1532 filter_qs = PackageData.objects.filter(key='upstream-watch-status') 

1533 packages = packages.prefetch_related( 

1534 'action_items', 

1535 Prefetch('data', queryset=filter_qs, to_attr='watch_status') 

1536 ) 

1537 

1538 # Update action items for each package 

1539 for package in packages: 

1540 for type_name in self.ACTION_ITEM_TYPE_NAMES: 

1541 if type_name in stats[package.name]: 

1542 # method(package, stats[package.name][type_name]) 

1543 self.update_action_item( 

1544 type_name, package, stats[package.name][type_name]) 

1545 

1546 self.update_package_info(package, stats[package.name]) 

1547 

1548 

1549class UpdateSecurityIssuesTask(BaseTask): 

1550 

1551 class Scheduler(IntervalScheduler): 

1552 interval = 3600 * 3 

1553 

1554 ACTION_ITEM_TYPE_NAME = 'debian-security-issue-in-{}' 

1555 ACTION_ITEM_TEMPLATE = 'debian/security-issue-action-item.html' 

1556 ITEM_DESCRIPTION_TEMPLATE = { 

1557 'open': '<a href="{url}">{count} security {issue}</a> in {release}', 

1558 'nodsa': 

1559 '<a href="{url}">{count} low-priority security {issue}</a> ' 

1560 'in {release}', 

1561 'none': 'No known security issue in {release}', 

1562 } 

1563 CVE_DATA_URL = 'https://security-tracker.debian.org/tracker/data/json' 

1564 DISTRIBUTIONS_URL = ( 

1565 'https://security-tracker.debian.org/tracker/distributions.json' 

1566 ) 

1567 

1568 def initialize(self, *args, **kwargs): 

1569 super(UpdateSecurityIssuesTask, self).initialize(*args, **kwargs) 

1570 self._action_item_type = {} 

1571 self._issues = None 

1572 self._distributions = None 

1573 

1574 def action_item_type(self, release): 

1575 return self._action_item_type.setdefault( 

1576 release, ActionItemType.objects.create_or_update( 

1577 type_name=self.ACTION_ITEM_TYPE_NAME.format(release), 

1578 full_description_template=self.ACTION_ITEM_TEMPLATE)) 

1579 

1580 def _get_distributions(self): 

1581 if not self._distributions: 

1582 content = get_resource_text(self.DISTRIBUTIONS_URL) 

1583 self._distributions = json.loads(content) 

1584 return self._distributions 

1585 

1586 def _get_support_status(self, release): 

1587 """ 

1588 Return support status of a given release as documented by the 

1589 security team in the security tracker. 

1590 """ 

1591 return self._get_distributions().get(release, {}).get('support', 

1592 'unknown') 

1593 

1594 def _get_issues_content(self): 

1595 if self._issues: 1595 ↛ 1596line 1595 didn't jump to line 1596, because the condition on line 1595 was never true

1596 return self._issues 

1597 content = get_resource_text(self.CVE_DATA_URL) 

1598 if content: 1598 ↛ exitline 1598 didn't return from function '_get_issues_content', because the condition on line 1598 was never false

1599 self._issues = json.loads(content) 

1600 return self._issues 

1601 

1602 @classmethod 

1603 def _update_stats_with_nodsa_entry(cls, stats, nodsa_entry, 

1604 entry_id, description): 

1605 stats['nodsa'] += 1 

1606 

1607 nodsa_details = {'description': description, 

1608 'nodsa': nodsa_entry.get('nodsa', ''), 

1609 'nodsa_reason': nodsa_entry.get('nodsa_reason', '') 

1610 } 

1611 

1612 nodsa_reason = nodsa_details['nodsa_reason'] 

1613 if nodsa_reason == '': 

1614 nodsa_details['needs_triaging'] = True 

1615 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1616 nodsa_details 

1617 elif nodsa_reason == 'postponed': 1617 ↛ 1618line 1617 didn't jump to line 1618, because the condition on line 1617 was never true

1618 nodsa_details['fixed_via_stable_update'] = True 

1619 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1620 nodsa_details 

1621 elif nodsa_reason == 'ignored': 1621 ↛ exitline 1621 didn't return from function '_update_stats_with_nodsa_entry', because the condition on line 1621 was never false

1622 stats['nodsa_ignored_details'][entry_id] = nodsa_details 

1623 

1624 @classmethod 

1625 def get_issues_summary(cls, issues): 

1626 result = {} 

1627 for issue_id, issue_data in issues.items(): 

1628 for release, data in issue_data['releases'].items(): 

1629 stats = result.setdefault(release, { 

1630 'open': 0, 

1631 'open_details': {}, 

1632 'nodsa': 0, 

1633 'unimportant': 0, 

1634 'next_point_update_details': {}, 

1635 'nodsa_maintainer_to_handle_details': {}, 

1636 'nodsa_ignored_details': {}, 

1637 }) 

1638 description = issue_data.get('description', '') 

1639 if (data.get('status', '') == 'resolved' or 

1640 data.get('urgency', '') == 'end-of-life'): 

1641 continue 

1642 elif data.get('urgency', '') == 'unimportant': 

1643 stats['unimportant'] += 1 

1644 elif data.get('next_point_update', False): 

1645 stats['next_point_update_details'][issue_id] = \ 

1646 {'description': description} 

1647 elif data.get('nodsa', False) is not False: 

1648 cls._update_stats_with_nodsa_entry(stats, 

1649 data, issue_id, 

1650 description 

1651 ) 

1652 else: 

1653 stats['open'] += 1 

1654 stats['open_details'][issue_id] = \ 

1655 {'description': description} 

1656 

1657 return result 

1658 

1659 @classmethod 

1660 def get_issues_stats(cls, content): 

1661 """ 

1662 Gets package issue stats from Debian's security tracker. 

1663 """ 

1664 stats = {} 

1665 for pkg, issues in content.items(): 

1666 stats[pkg] = cls.get_issues_summary(issues) 

1667 return stats 

1668 

1669 def _get_short_description(self, key, action_item): 

1670 count = action_item.extra_data['security_issues_count'] 

1671 url = 'https://security-tracker.debian.org/tracker/source-package/{}' 

1672 return self.ITEM_DESCRIPTION_TEMPLATE[key].format( 

1673 count=count, 

1674 issue='issues' if count > 1 else 'issue', 

1675 release=action_item.extra_data.get('release', 'sid'), 

1676 url=url.format(action_item.package.name), 

1677 ) 

1678 

1679 def update_action_item(self, stats, action_item): 

1680 """ 

1681 Updates the ``debian-security-issue`` action item based on the 

1682 security issues. 

1683 """ 

1684 

1685 security_issues_count = stats['open'] + stats['nodsa'] 

1686 action_item.extra_data['security_issues_count'] = security_issues_count 

1687 action_item.extra_data['support_status'] = ( 

1688 self._get_support_status(action_item.extra_data['release']) 

1689 ) 

1690 

1691 for base_key in ['open', 

1692 'next_point_update', 

1693 'nodsa_maintainer_to_handle', 

1694 'nodsa_ignored']: 

1695 details_key = base_key + '_details' 

1696 count_key = base_key + '_count' 

1697 

1698 action_item.extra_data[details_key] = stats[details_key] 

1699 action_item.extra_data[count_key] = len(stats[details_key]) 

1700 

1701 # nodsa_next_point_update / nodsa_ignored_details are displayed 

1702 # only if there is anything else to show 

1703 nodsa_create_action = (stats['nodsa'] - 

1704 len(stats['nodsa_ignored_details'])) > 0 

1705 

1706 if stats['open']: 

1707 action_item.severity = ActionItem.SEVERITY_HIGH 

1708 action_item.short_description = \ 

1709 self._get_short_description('open', action_item) 

1710 elif nodsa_create_action: 

1711 action_item.severity = ActionItem.SEVERITY_LOW 

1712 action_item.short_description = \ 

1713 self._get_short_description('nodsa', action_item) 

1714 else: 

1715 action_item.severity = ActionItem.SEVERITY_WISHLIST 

1716 action_item.short_description = \ 

1717 self._get_short_description('none', action_item) 

1718 

1719 @classmethod 

1720 def generate_package_data(cls, issues): 

1721 return { 

1722 'details': issues, 

1723 'stats': cls.get_issues_summary(issues), 

1724 'checksum': get_data_checksum(issues) 

1725 } 

1726 

1727 def want_action_item(self, pkgdata, release): 

1728 stats = pkgdata.value.get('stats', {}).get(release) 

1729 if stats is None: 1729 ↛ 1730line 1729 didn't jump to line 1730, because the condition on line 1729 was never true

1730 return False 

1731 

1732 supported_by = self._get_support_status(release) 

1733 if supported_by == "end-of-life": 

1734 return False 

1735 elif supported_by == "security": 

1736 count = stats.get('open', 0) + stats.get('nodsa', 0) 

1737 else: 

1738 count = stats.get('open', 0) 

1739 

1740 if count == 0: 

1741 return False 

1742 

1743 return True 

1744 

1745 def process_pkg_action_items(self, pkgdata, existing_action_items): 

1746 release_ai = {} 

1747 to_add = [] 

1748 to_update = [] 

1749 to_drop = [] 

1750 global_stats = pkgdata.value.get('stats', {}) 

1751 for ai in existing_action_items: 

1752 release = ai.extra_data['release'] 

1753 release_ai[release] = ai 

1754 for release, stats in global_stats.items(): 

1755 ai = release_ai.get(release) 

1756 

1757 if self.want_action_item(pkgdata, release): 

1758 if ai: 

1759 to_update.append(ai) 

1760 else: 

1761 ai = ActionItem( 

1762 item_type=self.action_item_type(release), 

1763 package=pkgdata.package, 

1764 extra_data={'release': release} 

1765 ) 

1766 to_add.append(ai) 

1767 self.update_action_item(stats, ai) 

1768 else: 

1769 if ai: 

1770 to_drop.append(ai) 

1771 

1772 return to_add, to_update, to_drop 

1773 

1774 def execute_main(self): 

1775 # Fetch all debian-security PackageData 

1776 all_pkgdata = PackageData.objects.select_related( 

1777 'package').filter(key='debian-security').only( 

1778 'package__name', 'value') 

1779 

1780 all_data = {} 

1781 packages = {} 

1782 for pkgdata in all_pkgdata: 

1783 all_data[pkgdata.package.name] = pkgdata 

1784 packages[pkgdata.package.name] = pkgdata.package 

1785 

1786 # Fetch all debian-security ActionItems 

1787 pkg_action_items = collections.defaultdict(lambda: []) 

1788 all_action_items = ActionItem.objects.select_related( 

1789 'package').filter( 

1790 item_type__type_name__startswith='debian-security-issue-in-') 

1791 for action_item in all_action_items: 

1792 pkg_action_items[action_item.package.name].append(action_item) 

1793 

1794 # Check for changes on distributions.json 

1795 distributions_checksum = get_data_checksum(self._get_distributions()) 

1796 if self.data.get('distributions_checksum') != distributions_checksum: 

1797 # New distributions.json, force update all action items 

1798 self.force_update = True 

1799 self.data['distributions_checksum'] = distributions_checksum 

1800 

1801 # Scan the security tracker data 

1802 content = self._get_issues_content() 

1803 to_add = [] 

1804 to_update = [] 

1805 for pkgname, issues in content.items(): 

1806 if pkgname in all_data: 

1807 # Check if we need to update the existing data 

1808 checksum = get_data_checksum(issues) 

1809 if not self.force_update and \ 

1810 all_data[pkgname].value.get('checksum', '') == checksum: 

1811 continue 

1812 # Update the data 

1813 pkgdata = all_data[pkgname] 

1814 pkgdata.value = self.generate_package_data(issues) 

1815 to_update.append(pkgdata) 

1816 else: 

1817 # Add data for a new package 

1818 package, _ = PackageName.objects.get_or_create(name=pkgname) 

1819 to_add.append( 

1820 PackageData( 

1821 package=package, 

1822 key='debian-security', 

1823 value=self.generate_package_data(issues) 

1824 ) 

1825 ) 

1826 # Process action items 

1827 ai_to_add = [] 

1828 ai_to_update = [] 

1829 ai_to_drop = [] 

1830 for pkgdata in itertools.chain(to_add, to_update): 

1831 add, update, drop = self.process_pkg_action_items( 

1832 pkgdata, pkg_action_items[pkgdata.package.name]) 

1833 ai_to_add.extend(add) 

1834 ai_to_update.extend(update) 

1835 ai_to_drop.extend(drop) 

1836 # Sync in database 

1837 with transaction.atomic(): 

1838 # Delete obsolete data 

1839 PackageData.objects.filter( 

1840 key='debian-security').exclude( 

1841 package__name__in=content.keys()).delete() 

1842 ActionItem.objects.filter( 

1843 item_type__type_name__startswith='debian-security-issue-in-' 

1844 ).exclude(package__name__in=content.keys()).delete() 

1845 ActionItem.objects.filter( 

1846 item_type__type_name__startswith='debian-security-issue-in-', 

1847 id__in=[ai.id for ai in ai_to_drop]).delete() 

1848 # Add new entries 

1849 PackageData.objects.bulk_create(to_add) 

1850 ActionItem.objects.bulk_create(ai_to_add) 

1851 # Update existing entries 

1852 for pkgdata in to_update: 

1853 pkgdata.save() 

1854 for ai in ai_to_update: 

1855 ai.save() 

1856 

1857 

1858class UpdatePiuPartsTask(BaseTask): 

1859 """ 

1860 Retrieves the piuparts stats for all the suites defined in the 

1861 :data:`distro_tracker.project.local_settings.DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES` 

1862 """ 

1863 

1864 class Scheduler(IntervalScheduler): 

1865 interval = 3600 * 3 

1866 

1867 ACTION_ITEM_TYPE_NAME = 'debian-piuparts-test-fail' 

1868 ACTION_ITEM_TEMPLATE = 'debian/piuparts-action-item.html' 

1869 ITEM_DESCRIPTION = 'piuparts found (un)installation error(s)' 

1870 

1871 def initialize(self, *args, **kwargs): 

1872 super(UpdatePiuPartsTask, self).initialize(*args, **kwargs) 

1873 self.action_item_type = ActionItemType.objects.create_or_update( 

1874 type_name=self.ACTION_ITEM_TYPE_NAME, 

1875 full_description_template=self.ACTION_ITEM_TEMPLATE) 

1876 

1877 def _get_piuparts_content(self, suite): 

1878 """ 

1879 :returns: The content of the piuparts report for the given package 

1880 or ``None`` if there is no data for the particular suite. 

1881 """ 

1882 url = 'https://piuparts.debian.org/{suite}/sources.txt' 

1883 return get_resource_text(url.format(suite=suite)) 

1884 

1885 def get_piuparts_stats(self): 

1886 suites = getattr(settings, 'DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES', []) 

1887 failing_packages = {} 

1888 for suite in suites: 

1889 content = self._get_piuparts_content(suite) 

1890 if content is None: 

1891 logger.info("There is no piuparts for suite: %s", suite) 

1892 continue 

1893 

1894 for line in content.splitlines(): 

1895 package_name, status = line.split(':', 1) 

1896 package_name, status = package_name.strip(), status.strip() 

1897 if status == 'fail': 

1898 failing_packages.setdefault(package_name, []) 

1899 failing_packages[package_name].append(suite) 

1900 

1901 return failing_packages 

1902 

1903 def create_action_item(self, package, suites): 

1904 """ 

1905 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

1906 instance for the package based on the list of suites in which the 

1907 piuparts installation test failed. 

1908 """ 

1909 action_item = package.get_action_item_for_type(self.action_item_type) 

1910 if action_item is None: 

1911 action_item = ActionItem( 

1912 package=package, 

1913 item_type=self.action_item_type, 

1914 short_description=self.ITEM_DESCRIPTION) 

1915 

1916 if action_item.extra_data: 

1917 existing_items = action_item.extra_data.get('suites', []) 

1918 if list(sorted(existing_items)) == list(sorted(suites)): 

1919 # No need to update this item 

1920 return 

1921 action_item.extra_data = { 

1922 'suites': suites, 

1923 } 

1924 action_item.save() 

1925 

1926 def execute_main(self): 

1927 failing_packages = self.get_piuparts_stats() 

1928 

1929 ActionItem.objects.delete_obsolete_items( 

1930 item_types=[self.action_item_type], 

1931 non_obsolete_packages=failing_packages.keys()) 

1932 

1933 packages = SourcePackageName.objects.filter( 

1934 name__in=failing_packages.keys()) 

1935 packages = packages.prefetch_related('action_items') 

1936 

1937 for package in packages: 

1938 self.create_action_item(package, failing_packages[package.name]) 

1939 

1940 

1941class UpdateUbuntuStatsTask(BaseTask): 

1942 """ 

1943 The task updates Ubuntu stats for packages. These stats are displayed in a 

1944 separate panel. 

1945 """ 

1946 

1947 class Scheduler(IntervalScheduler): 

1948 interval = 3600 * 3 

1949 

1950 def initialize(self, *args, **kwargs): 

1951 super(UpdateUbuntuStatsTask, self).initialize(*args, **kwargs) 

1952 

1953 def _get_versions_content(self): 

1954 url = 'https://udd.debian.org/cgi-bin/ubuntupackages.cgi' 

1955 return get_resource_text(url) 

1956 

1957 def get_ubuntu_versions(self): 

1958 """ 

1959 Retrieves the Ubuntu package versions. 

1960 

1961 :returns: A dict mapping package names to Ubuntu versions. 

1962 """ 

1963 content = self._get_versions_content() 

1964 

1965 package_versions = {} 

1966 for line in content.splitlines(): 

1967 package, version = line.split(' ', 1) 

1968 version = version.strip() 

1969 package_versions[package] = version 

1970 

1971 return package_versions 

1972 

1973 def _get_bug_stats_content(self): 

1974 url = 'https://udd.debian.org/cgi-bin/ubuntubugs.cgi' 

1975 return get_resource_text(url) 

1976 

1977 def get_ubuntu_bug_stats(self): 

1978 """ 

1979 Retrieves the Ubuntu bug stats of a package. Bug stats contain the 

1980 count of bugs and the count of patches. 

1981 

1982 :returns: A dict mapping package names to a dict of package stats. 

1983 """ 

1984 content = self._get_bug_stats_content() 

1985 

1986 bug_stats = {} 

1987 for line in content.splitlines(): 

1988 package_name, bug_count, patch_count = line.split("|", 2) 

1989 try: 

1990 bug_count, patch_count = int(bug_count), int(patch_count) 

1991 except ValueError: 

1992 continue 

1993 bug_stats[package_name] = { 

1994 'bug_count': bug_count, 

1995 'patch_count': patch_count, 

1996 } 

1997 

1998 return bug_stats 

1999 

2000 def _get_ubuntu_patch_diff_content(self): 

2001 url = 'https://patches.ubuntu.com/PATCHES' 

2002 return get_resource_text(url) 

2003 

2004 def get_ubuntu_patch_diffs(self): 

2005 """ 

2006 Retrieves the Ubuntu patch diff information. The information consists 

2007 of the diff URL and the version of the Ubuntu package to which the 

2008 diff belongs to. 

2009 

2010 :returns: A dict mapping package names to diff information. 

2011 """ 

2012 content = self._get_ubuntu_patch_diff_content() 

2013 

2014 patch_diffs = {} 

2015 re_diff_version = re.compile(r'_(\S+)\.patch') 

2016 for line in content.splitlines(): 

2017 package_name, diff_url = line.split(' ', 1) 

2018 # Extract the version of the package from the diff url 

2019 match = re_diff_version.search(diff_url) 

2020 if not match: 2020 ↛ 2022line 2020 didn't jump to line 2022, because the condition on line 2020 was never true

2021 # Invalid URL: no version 

2022 continue 

2023 version = match.group(1) 

2024 patch_diffs[package_name] = { 

2025 'version': version, 

2026 'diff_url': diff_url 

2027 } 

2028 

2029 return patch_diffs 

2030 

2031 def execute_main(self): 

2032 package_versions = self.get_ubuntu_versions() 

2033 bug_stats = self.get_ubuntu_bug_stats() 

2034 patch_diffs = self.get_ubuntu_patch_diffs() 

2035 

2036 obsolete_ubuntu_pkgs = UbuntuPackage.objects.exclude( 

2037 package__name__in=package_versions.keys()) 

2038 obsolete_ubuntu_pkgs.delete() 

2039 

2040 packages = PackageName.objects.filter(name__in=package_versions.keys()) 

2041 packages = packages.prefetch_related('ubuntu_package') 

2042 

2043 for package in packages: 

2044 version = package_versions[package.name] 

2045 bugs = bug_stats.get(package.name, None) 

2046 diff = patch_diffs.get(package.name, None) 

2047 

2048 try: 

2049 ubuntu_package = package.ubuntu_package 

2050 ubuntu_package.version = version 

2051 ubuntu_package.bugs = bugs 

2052 ubuntu_package.patch_diff = diff 

2053 ubuntu_package.save() 

2054 except UbuntuPackage.DoesNotExist: 

2055 ubuntu_package = UbuntuPackage.objects.create( 

2056 package=package, 

2057 version=version, 

2058 bugs=bugs, 

2059 patch_diff=diff) 

2060 

2061 

2062class UpdateWnppStatsTask(BaseTask): 

2063 """ 

2064 The task updates the WNPP bugs for all packages. 

2065 """ 

2066 

2067 class Scheduler(IntervalScheduler): 

2068 interval = 3600 * 3 

2069 

2070 ACTION_ITEM_TYPE_NAME = 'debian-wnpp-issue' 

2071 ACTION_ITEM_TEMPLATE = 'debian/wnpp-action-item.html' 

2072 ITEM_DESCRIPTION = '<a href="{url}">{wnpp_type}: {wnpp_msg}</a>' 

2073 

2074 def initialize(self, *args, **kwargs): 

2075 super(UpdateWnppStatsTask, self).initialize(*args, **kwargs) 

2076 self.action_item_type = ActionItemType.objects.create_or_update( 

2077 type_name=self.ACTION_ITEM_TYPE_NAME, 

2078 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2079 

2080 def get_wnpp_stats(self): 

2081 """ 

2082 Retrieves and parses the wnpp stats for all packages. WNPP stats 

2083 include the WNPP type and the BTS bug id. 

2084 

2085 :returns: A dict mapping package names to wnpp stats. 

2086 """ 

2087 url = 'https://qa.debian.org/data/bts/wnpp_rm' 

2088 content = get_resource_text(url, only_if_updated=True) 

2089 if content is None: 2089 ↛ 2090line 2089 didn't jump to line 2090, because the condition on line 2089 was never true

2090 return 

2091 

2092 wnpp_stats = {} 

2093 for line in content.splitlines(): 

2094 line = line.strip() 

2095 try: 

2096 package_name, wnpp_type, bug_id = line.split('|')[0].split() 

2097 bug_id = int(bug_id) 

2098 except ValueError: 

2099 # Badly formatted bug number 

2100 continue 

2101 # Strip the colon from the end of the package name 

2102 package_name = package_name[:-1] 

2103 

2104 wnpp_stats[package_name] = { 

2105 'wnpp_type': wnpp_type, 

2106 'bug_id': bug_id, 

2107 } 

2108 

2109 return wnpp_stats 

2110 

2111 def update_action_item(self, package, stats): 

2112 """ 

2113 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2114 instance for the given type indicating that the package has a WNPP 

2115 issue. 

2116 """ 

2117 action_item = package.get_action_item_for_type(self.action_item_type) 

2118 if not action_item: 

2119 action_item = ActionItem( 

2120 package=package, 

2121 item_type=self.action_item_type) 

2122 

2123 # Check if the stats have actually been changed 

2124 if action_item.extra_data: 

2125 if action_item.extra_data.get('wnpp_info', None) == stats: 

2126 # Nothing to do -- stll the same data 

2127 return 

2128 

2129 # Update the data since something has changed 

2130 try: 

2131 release = package.main_entry.repository.suite or \ 

2132 package.main_entry.repository.codename 

2133 except AttributeError: 

2134 release = None 

2135 

2136 msgs = { 

2137 'O': "This package has been orphaned and needs a maintainer.", 

2138 'ITA': "Someone intends to adopt this package.", 

2139 'RFA': "The maintainer wants to pass over package maintainance.", 

2140 'RFH': "The maintainer is looking for help with this package.", 

2141 'ITP': "Someone is planning to reintroduce this package.", 

2142 'RFP': "There is a request to reintroduce this package.", 

2143 'RM': "This package has been requested to be removed.", 

2144 'RFS': "A sponsor is needed to update this package.", 

2145 '?': "The WNPP database contains an entry for this package." 

2146 } 

2147 wnpp_type = stats['wnpp_type'] 

2148 try: 

2149 wnpp_msg = msgs[wnpp_type] 

2150 except KeyError: 

2151 wnpp_msg = msgs['?'] 

2152 

2153 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2154 url='https://bugs.debian.org/{}'.format(stats['bug_id']), 

2155 wnpp_type=wnpp_type, wnpp_msg=wnpp_msg) 

2156 action_item.extra_data = { 

2157 'wnpp_info': stats, 

2158 'release': release, 

2159 } 

2160 action_item.save() 

2161 

2162 def update_depneedsmaint_action_item(self, package_needs_maintainer, stats): 

2163 short_description_template = \ 

2164 'Depends on packages which need a new maintainer' 

2165 package_needs_maintainer.get_absolute_url() 

2166 action_item_type = ActionItemType.objects.create_or_update( 

2167 type_name='debian-depneedsmaint', 

2168 full_description_template='debian/depneedsmaint-action-item.html') 

2169 dependencies = SourcePackageDeps.objects.filter( 

2170 dependency=package_needs_maintainer) 

2171 for dependency in dependencies: 2171 ↛ 2172line 2171 didn't jump to line 2172, because the loop on line 2171 never started

2172 package = dependency.source 

2173 action_item = package.get_action_item_for_type(action_item_type) 

2174 if not action_item: 

2175 action_item = ActionItem( 

2176 package=package, 

2177 item_type=action_item_type, 

2178 extra_data={}) 

2179 

2180 pkgdata = { 

2181 'bug': stats['bug_id'], 

2182 'details': dependency.details, 

2183 } 

2184 

2185 if (action_item.extra_data.get(package_needs_maintainer.name, {}) == 

2186 pkgdata): 

2187 # Nothing has changed 

2188 continue 

2189 

2190 action_item.short_description = short_description_template 

2191 action_item.extra_data[package_needs_maintainer.name] = pkgdata 

2192 

2193 action_item.save() 

2194 

2195 @transaction.atomic 

2196 def execute_main(self): 

2197 wnpp_stats = self.get_wnpp_stats() 

2198 if wnpp_stats is None: 2198 ↛ 2200line 2198 didn't jump to line 2200, because the condition on line 2198 was never true

2199 # Nothing to do: cached content up to date 

2200 return 

2201 

2202 ActionItem.objects.delete_obsolete_items( 

2203 item_types=[self.action_item_type], 

2204 non_obsolete_packages=wnpp_stats.keys()) 

2205 # Remove obsolete action items for packages whose dependencies need a 

2206 # new maintainer. 

2207 packages_need_maintainer = [] 

2208 for name, stats in wnpp_stats.items(): 

2209 if stats['wnpp_type'] in ('O', 'RFA'): 

2210 packages_need_maintainer.append(name) 

2211 packages_depneeds_maint = [ 

2212 package.name for package in SourcePackageName.objects.filter( 

2213 source_dependencies__dependency__name__in=packages_need_maintainer) # noqa 

2214 ] 

2215 ActionItem.objects.delete_obsolete_items( 

2216 item_types=[ 

2217 ActionItemType.objects.get_or_create( 

2218 type_name='debian-depneedsmaint')[0], 

2219 ], 

2220 non_obsolete_packages=packages_depneeds_maint) 

2221 

2222 # Drop all reverse references 

2223 for ai in ActionItem.objects.filter( 2223 ↛ 2225line 2223 didn't jump to line 2225, because the loop on line 2223 never started

2224 item_type__type_name='debian-depneedsmaint'): 

2225 ai.extra_data = {} 

2226 ai.save() 

2227 

2228 packages = SourcePackageName.objects.filter(name__in=wnpp_stats.keys()) 

2229 packages = packages.prefetch_related('action_items') 

2230 

2231 for package in packages: 

2232 stats = wnpp_stats[package.name] 

2233 self.update_action_item(package, stats) 

2234 # Update action items for packages which depend on this one to 

2235 # indicate that a dependency needs a new maintainer. 

2236 if package.name in packages_need_maintainer: 

2237 self.update_depneedsmaint_action_item(package, stats) 

2238 

2239 

2240class UpdateNewQueuePackages(BaseTask): 

2241 """ 

2242 Updates the versions of source packages found in the NEW queue. 

2243 """ 

2244 

2245 class Scheduler(IntervalScheduler): 

2246 interval = 3600 

2247 

2248 DATA_KEY = 'debian-new-queue-info' 

2249 

2250 def initialize(self, *args, **kwargs): 

2251 super(UpdateNewQueuePackages, self).initialize(*args, **kwargs) 

2252 

2253 def extract_package_info(self, content): 

2254 """ 

2255 Extracts the package information from the content of the NEW queue. 

2256 

2257 :returns: A dict mapping package names to a dict mapping the 

2258 distribution name in which the package is found to the version 

2259 information for the most recent version of the package in the dist. 

2260 """ 

2261 packages = {} 

2262 for stanza in deb822.Deb822.iter_paragraphs(content.splitlines()): 

2263 necessary_fields = ('Source', 'Queue', 'Version', 'Distribution') 

2264 if not all(field in stanza for field in necessary_fields): 

2265 continue 

2266 if stanza['Queue'] != 'new': 2266 ↛ 2267line 2266 didn't jump to line 2267, because the condition on line 2266 was never true

2267 continue 

2268 

2269 versions = stanza['Version'].split() 

2270 # Save only the most recent version 

2271 version = max(versions, key=lambda x: AptPkgVersion(x)) 

2272 

2273 package_name = stanza['Source'] 

2274 pkginfo = packages.setdefault(package_name, {}) 

2275 distribution = stanza['Distribution'] 

2276 if distribution in pkginfo: 

2277 current_version = pkginfo[distribution]['version'] 

2278 if debian_support.version_compare(version, current_version) < 0: 

2279 # The already saved version is more recent than this one. 

2280 continue 

2281 

2282 pkginfo[distribution] = { 

2283 'version': version, 

2284 } 

2285 

2286 return packages 

2287 

2288 def _get_new_content(self): 

2289 url = 'https://ftp-master.debian.org/new.822' 

2290 return get_resource_text(url, force_update=self.force_update, 

2291 only_if_updated=True) 

2292 

2293 def execute_main(self): 

2294 content = self._get_new_content() 

2295 if content is None: 2295 ↛ 2296line 2295 didn't jump to line 2296, because the condition on line 2295 was never true

2296 return 

2297 

2298 all_package_info = self.extract_package_info(content) 

2299 

2300 packages = SourcePackageName.objects.filter( 

2301 name__in=all_package_info.keys()) 

2302 

2303 with transaction.atomic(): 

2304 # Drop old entries 

2305 PackageData.objects.filter(key=self.DATA_KEY).delete() 

2306 # Prepare current entries 

2307 data = [] 

2308 for package in packages: 

2309 new_queue_info = PackageData( 

2310 key=self.DATA_KEY, 

2311 package=package, 

2312 value=all_package_info[package.name]) 

2313 data.append(new_queue_info) 

2314 # Bulk create them 

2315 PackageData.objects.bulk_create(data) 

2316 

2317 

2318class UpdateAutoRemovalsStatsTask(BaseTask): 

2319 """ 

2320 A task for updating autoremovals information on all packages. 

2321 """ 

2322 

2323 class Scheduler(IntervalScheduler): 

2324 interval = 3600 

2325 

2326 ACTION_ITEM_TYPE_NAME = 'debian-autoremoval' 

2327 ACTION_ITEM_TEMPLATE = 'debian/autoremoval-action-item.html' 

2328 ITEM_DESCRIPTION = ('Marked for autoremoval on {removal_date}' + 

2329 '{dependencies}: {bugs}') 

2330 

2331 def initialize(self, *args, **kwargs): 

2332 super(UpdateAutoRemovalsStatsTask, self).initialize(*args, **kwargs) 

2333 self.action_item_type = ActionItemType.objects.create_or_update( 

2334 type_name=self.ACTION_ITEM_TYPE_NAME, 

2335 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2336 

2337 def get_autoremovals_stats(self): 

2338 """ 

2339 Retrieves and parses the autoremoval stats for all packages. 

2340 Autoremoval stats include the BTS bugs id. 

2341 

2342 :returns: A dict mapping package names to autoremoval stats. 

2343 """ 

2344 content = get_resource_text( 

2345 'https://udd.debian.org/cgi-bin/autoremovals.yaml.cgi', 

2346 force_update=self.force_update, 

2347 only_if_updated=True 

2348 ) 

2349 if content: 2349 ↛ exitline 2349 didn't return from function 'get_autoremovals_stats', because the condition on line 2349 was never false

2350 return yaml.safe_load(content) 

2351 

2352 def update_action_item(self, package, stats): 

2353 """ 

2354 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2355 instance for the given type indicating that the package has an 

2356 autoremoval issue. 

2357 """ 

2358 action_item = package.get_action_item_for_type(self.action_item_type) 

2359 if not action_item: 2359 ↛ 2365line 2359 didn't jump to line 2365, because the condition on line 2359 was never false

2360 action_item = ActionItem( 

2361 package=package, 

2362 item_type=self.action_item_type, 

2363 severity=ActionItem.SEVERITY_HIGH) 

2364 

2365 bugs_dependencies = stats.get('bugs_dependencies', []) 

2366 buggy_dependencies = stats.get('buggy_dependencies', []) 

2367 reverse_dependencies = stats.get('rdeps', []) 

2368 all_bugs = stats['bugs'] + bugs_dependencies 

2369 link = '<a href="https://bugs.debian.org/{}">#{}</a>' 

2370 removal_date = stats['removal_date'].strftime('%d %B') 

2371 if isinstance(removal_date, bytes): 2371 ↛ 2372line 2371 didn't jump to line 2372, because the condition on line 2371 was never true

2372 removal_date = removal_date.decode('utf-8', 'ignore') 

2373 

2374 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2375 removal_date=removal_date, 

2376 dependencies=(' due to ' + html_package_list( 

2377 buggy_dependencies) if buggy_dependencies else ''), 

2378 bugs=', '.join(link.format(bug, bug) for bug in all_bugs)) 

2379 

2380 # datetime objects are not JSON-serializable, convert them ourselves 

2381 for key in stats.keys(): 

2382 if hasattr(stats[key], 'strftime'): 

2383 stats[key] = stats[key].strftime('%a %d %b %Y') 

2384 

2385 action_item.extra_data = { 

2386 'stats': stats, 

2387 'removal_date': stats['removal_date'], 

2388 'version': stats.get('version', ''), 

2389 'bugs': ', '.join(link.format(bug, bug) for bug in stats['bugs']), 

2390 'bugs_dependencies': ', '.join( 

2391 link.format(bug, bug) for bug in bugs_dependencies), 

2392 'buggy_dependencies': 

2393 html_package_list(buggy_dependencies), 

2394 'reverse_dependencies': 

2395 html_package_list(reverse_dependencies), 

2396 'number_rdeps': len(reverse_dependencies)} 

2397 action_item.save() 

2398 

2399 def execute_main(self): 

2400 autoremovals_stats = self.get_autoremovals_stats() 

2401 if autoremovals_stats is None: 2401 ↛ 2403line 2401 didn't jump to line 2403, because the condition on line 2401 was never true

2402 # Nothing to do: cached content up to date 

2403 return 

2404 

2405 ActionItem.objects.delete_obsolete_items( 

2406 item_types=[self.action_item_type], 

2407 non_obsolete_packages=autoremovals_stats.keys()) 

2408 

2409 packages = SourcePackageName.objects.filter( 

2410 name__in=autoremovals_stats.keys()) 

2411 packages = packages.prefetch_related('action_items') 

2412 

2413 for package in packages: 

2414 self.update_action_item(package, autoremovals_stats[package.name]) 

2415 

2416 

2417class UpdatePackageScreenshotsTask(BaseTask): 

2418 """ 

2419 Check if a screenshot exists on screenshots.debian.net, and add a 

2420 key to PackageData if it does. 

2421 """ 

2422 

2423 class Scheduler(IntervalScheduler): 

2424 interval = 3600 * 24 

2425 

2426 DATA_KEY = 'screenshots' 

2427 

2428 def _get_screenshots(self): 

2429 url = 'https://screenshots.debian.net/json/packages' 

2430 content = get_resource_text(url, force_update=self.force_update, 

2431 only_if_updated=True) 

2432 if content is None: 2432 ↛ 2433line 2432 didn't jump to line 2433, because the condition on line 2432 was never true

2433 return 

2434 

2435 data = json.loads(content) 

2436 return data 

2437 

2438 def execute_main(self): 

2439 content = self._get_screenshots() 

2440 if content is None: 2440 ↛ 2441line 2440 didn't jump to line 2441, because the condition on line 2440 was never true

2441 return 

2442 

2443 packages_with_screenshots = [] 

2444 for item in content['packages']: 

2445 try: 

2446 package = SourcePackageName.objects.get(name=item['name']) 

2447 packages_with_screenshots.append(package) 

2448 except SourcePackageName.DoesNotExist: 

2449 pass 

2450 

2451 with transaction.atomic(): 

2452 PackageData.objects.filter(key='screenshots').delete() 

2453 

2454 data = [] 

2455 for package in packages_with_screenshots: 

2456 try: 

2457 screenshot_info = package.data.get(key=self.DATA_KEY) 

2458 screenshot_info.value['screenshots'] = 'true' 

2459 except PackageData.DoesNotExist: 

2460 screenshot_info = PackageData( 

2461 key=self.DATA_KEY, 

2462 package=package, 

2463 value={'screenshots': 'true'}) 

2464 

2465 data.append(screenshot_info) 

2466 

2467 PackageData.objects.bulk_create(data) 

2468 

2469 

2470class UpdateBuildReproducibilityTask(BaseTask): 

2471 

2472 class Scheduler(IntervalScheduler): 

2473 interval = 3600 * 6 

2474 

2475 BASE_URL = 'https://tests.reproducible-builds.org' 

2476 ACTION_ITEM_TYPE_NAME = 'debian-build-reproducibility' 

2477 ACTION_ITEM_TEMPLATE = 'debian/build-reproducibility-action-item.html' 

2478 ITEM_DESCRIPTION = { 

2479 'blacklisted': '<a href="{url}">Blacklisted</a> from build ' 

2480 'reproducibility testing', 

2481 'FTBFS': '<a href="{url}">Fails to build</a> during reproducibility ' 

2482 'testing', 

2483 'reproducible': None, 

2484 'FTBR': '<a href="{url}">Does not build reproducibly</a> ' 

2485 'during testing', 

2486 '404': None, 

2487 'not for us': None, 

2488 } 

2489 

2490 def initialize(self, *args, **kwargs): 

2491 super(UpdateBuildReproducibilityTask, self).initialize(*args, **kwargs) 

2492 self.action_item_type = ActionItemType.objects.create_or_update( 

2493 type_name=self.ACTION_ITEM_TYPE_NAME, 

2494 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2495 

2496 def get_build_reproducibility(self): 

2497 url = '{}/debian/reproducible-tracker.json'.format(self.BASE_URL) 

2498 content = get_resource_text(url, force_update=self.force_update, 

2499 only_if_updated=True) 

2500 if content is None: 2500 ↛ 2501line 2500 didn't jump to line 2501, because the condition on line 2500 was never true

2501 return 

2502 

2503 reproducibilities = json.loads(content) 

2504 packages = {} 

2505 for item in reproducibilities: 

2506 package = item['package'] 

2507 status = item['status'] 

2508 missing = package not in packages 

2509 important = self.ITEM_DESCRIPTION.get(status) is not None 

2510 if important or missing: 2510 ↛ 2505line 2510 didn't jump to line 2505, because the condition on line 2510 was never false

2511 packages[package] = status 

2512 

2513 return packages 

2514 

2515 def update_action_item(self, package, status): 

2516 description = self.ITEM_DESCRIPTION.get(status) 

2517 

2518 if not description: # Not worth an action item 

2519 return False 

2520 

2521 action_item = package.get_action_item_for_type( 

2522 self.action_item_type.type_name) 

2523 if action_item is None: 2523 ↛ 2529line 2523 didn't jump to line 2529, because the condition on line 2523 was never false

2524 action_item = ActionItem( 

2525 package=package, 

2526 item_type=self.action_item_type, 

2527 severity=ActionItem.SEVERITY_NORMAL) 

2528 

2529 url = "{}/debian/rb-pkg/{}.html".format(self.BASE_URL, package.name) 

2530 action_item.short_description = description.format(url=url) 

2531 action_item.save() 

2532 return True 

2533 

2534 def execute_main(self): 

2535 reproducibilities = self.get_build_reproducibility() 

2536 if reproducibilities is None: 2536 ↛ 2537line 2536 didn't jump to line 2537, because the condition on line 2536 was never true

2537 return 

2538 

2539 with transaction.atomic(): 

2540 PackageData.objects.filter(key='reproducibility').delete() 

2541 

2542 packages = [] 

2543 data = [] 

2544 

2545 for name, status in reproducibilities.items(): 

2546 try: 

2547 package = SourcePackageName.objects.get(name=name) 

2548 if self.update_action_item(package, status): 

2549 packages.append(package) 

2550 except SourcePackageName.DoesNotExist: 

2551 continue 

2552 

2553 reproducibility_info = PackageData( 

2554 key='reproducibility', 

2555 package=package, 

2556 value={'reproducibility': status}) 

2557 data.append(reproducibility_info) 

2558 

2559 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2560 packages) 

2561 PackageData.objects.bulk_create(data) 

2562 

2563 

2564class MultiArchHintsTask(BaseTask): 

2565 

2566 class Scheduler(IntervalScheduler): 

2567 interval = 3600 * 6 

2568 

2569 ACTIONS_WEB = 'https://wiki.debian.org/MultiArch/Hints' 

2570 ACTIONS_URL = 'https://dedup.debian.net/static/multiarch-hints.yaml' 

2571 ACTION_ITEM_TYPE_NAME = 'debian-multiarch-hints' 

2572 ACTION_ITEM_TEMPLATE = 'debian/multiarch-hints.html' 

2573 ACTION_ITEM_DESCRIPTION = \ 

2574 '<a href="{link}">Multiarch hinter</a> reports {count} issue(s)' 

2575 

2576 def initialize(self, *args, **kwargs): 

2577 super(MultiArchHintsTask, self).initialize(*args, **kwargs) 

2578 self.action_item_type = ActionItemType.objects.create_or_update( 

2579 type_name=self.ACTION_ITEM_TYPE_NAME, 

2580 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2581 self.SEVERITIES = {} 

2582 for value, name in ActionItem.SEVERITIES: 

2583 self.SEVERITIES[name] = value 

2584 

2585 def get_data(self): 

2586 data = get_resource_text(self.ACTIONS_URL) 

2587 if data: 

2588 return yaml.safe_load(data) 

2589 

2590 def get_packages(self): 

2591 data = self.get_data() 

2592 if data is None: 

2593 return 

2594 if data['format'] != 'multiarch-hints-1.0': 

2595 return None 

2596 data = data['hints'] 

2597 packages = collections.defaultdict(dict) 

2598 for item in data: 

2599 if 'source' not in item: 

2600 continue 

2601 package = item['source'] 

2602 wishlist = ActionItem.SEVERITY_WISHLIST 

2603 severity = self.SEVERITIES.get(item['severity'], wishlist) 

2604 pkg_severity = packages[package].get('severity', wishlist) 

2605 packages[package]['severity'] = max(severity, pkg_severity) 

2606 packages[package].setdefault('hints', []).append( 

2607 (item['description'], item['link'])) 

2608 return packages 

2609 

2610 def update_action_item(self, package, severity, description, extra_data): 

2611 action_item = package.get_action_item_for_type( 

2612 self.action_item_type.type_name) 

2613 if action_item is None: 

2614 action_item = ActionItem( 

2615 package=package, 

2616 item_type=self.action_item_type) 

2617 action_item.severity = severity 

2618 action_item.short_description = description 

2619 action_item.extra_data = extra_data 

2620 action_item.save() 

2621 

2622 def execute_main(self): 

2623 packages = self.get_packages() 

2624 if not packages: 

2625 return 

2626 

2627 with transaction.atomic(): 

2628 for name, data in packages.items(): 

2629 try: 

2630 package = SourcePackageName.objects.get(name=name) 

2631 except SourcePackageName.DoesNotExist: 

2632 continue 

2633 

2634 description = self.ACTION_ITEM_DESCRIPTION.format( 

2635 count=len(data['hints']), link=self.ACTIONS_WEB) 

2636 self.update_action_item(package, data['severity'], description, 

2637 data['hints']) 

2638 

2639 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2640 packages.keys()) 

2641 

2642 

2643class UpdateVcsWatchTask(BaseTask): 

2644 """ 

2645 Updates packages' vcswatch stats. 

2646 """ 

2647 

2648 class Scheduler(IntervalScheduler): 

2649 interval = 3600 

2650 

2651 ACTION_ITEM_TYPE_NAME = 'vcswatch-warnings-and-errors' 

2652 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/vcswatch-action-item.html' 

2653 VCSWATCH_URL = 'https://qa.debian.org/cgi-bin/vcswatch?package=%(package)s' 

2654 VCSWATCH_DATA_URL = 'https://qa.debian.org/data/vcswatch/vcswatch.json.gz' 

2655 

2656 VCSWATCH_STATUS_DICT = { 

2657 "NEW": { 

2658 "description": 

2659 '<a href="{vcswatch_url}">version in VCS is newer</a> than in ' 

2660 'repository, is it time to upload?', 

2661 "severity": ActionItem.SEVERITY_NORMAL, 

2662 }, 

2663 "COMMITS": { 

2664 "description": 

2665 '<a href="{vcswatch_url}">{commits} new commit{commits_s}</a> ' 

2666 'since last upload, is it time to release?', 

2667 "severity": ActionItem.SEVERITY_NORMAL, 

2668 }, 

2669 "OLD": { 

2670 'description': 

2671 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2672 'date</a>, push the missing commits.', 

2673 "severity": ActionItem.SEVERITY_HIGH, 

2674 }, 

2675 "UNREL": { 

2676 "description": 

2677 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2678 'date</a>, push the missing commits.', 

2679 "severity": ActionItem.SEVERITY_HIGH, 

2680 }, 

2681 "ERROR": { 

2682 "description": 

2683 '<a href="{vcswatch_url}">Failed to analyze the VCS ' 

2684 'repository</a>. Please troubleshoot and fix the issue.', 

2685 "severity": ActionItem.SEVERITY_HIGH, 

2686 }, 

2687 "DEFAULT": { 

2688 "description": 

2689 '<a href="{url}">Unexpected status</a> ({status}) reported by ' 

2690 'VcsWatch.', 

2691 "severity": ActionItem.SEVERITY_HIGH, 

2692 }, 

2693 } 

2694 

2695 def initialize(self, *args, **kwargs): 

2696 super(UpdateVcsWatchTask, self).initialize(*args, **kwargs) 

2697 self.vcswatch_ai_type = ActionItemType.objects.create_or_update( 

2698 type_name=self.ACTION_ITEM_TYPE_NAME, 

2699 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE 

2700 ) 

2701 

2702 def get_vcswatch_data(self): 

2703 text = get_resource_text(self.VCSWATCH_DATA_URL) 

2704 

2705 if text is None: 2705 ↛ 2706line 2705 didn't jump to line 2706, because the condition on line 2705 was never true

2706 return 

2707 

2708 # There's some text, let's load! 

2709 data = json.loads(text) 

2710 

2711 out = {} 

2712 # This allows to save a lot of list search later. 

2713 for entry in data: 

2714 out[entry[u'package']] = entry 

2715 

2716 return out 

2717 

2718 def clean_package_info(self, package_infos_without_watch, todo): 

2719 """Takes a list of :class:`PackageData` which do not 

2720 have a watch entry and cleans it. Then schedule in todo what 

2721 to do with them. 

2722 """ 

2723 for package_info in package_infos_without_watch: 

2724 if 'QA' in package_info.value: 2724 ↛ 2723line 2724 didn't jump to line 2723, because the condition on line 2724 was never false

2725 package_info.value.pop('QA') 

2726 if (list(package_info.value.keys()) == ['checksum'] or 

2727 not package_info.value.keys()): 

2728 todo['drop']['package_infos'].append(package_info) 

2729 else: 

2730 package_info.value['checksum'] = get_data_checksum( 

2731 package_info.value 

2732 ) 

2733 todo['update']['package_infos'].append(package_info) 

2734 

2735 def update_action_item(self, package, vcswatch_data, action_item, todo): 

2736 """ 

2737 For a given :class:`ActionItem` and a given vcswatch data, updates 

2738 properly the todo dict if required. 

2739 

2740 Returns dependingly on what has been done. If something is to 

2741 be updated, returns True, if nothing is to be updated, returns 

2742 False. If the calling loop should `continue`, returns `None`. 

2743 

2744 :rtype: bool or `None` 

2745 """ 

2746 

2747 package_status = vcswatch_data['status'] 

2748 

2749 if package_status == "OK": 

2750 # Everything is fine, let's purge the action item. Not the 

2751 # package extracted info as its QA url is still relevant. 

2752 if action_item: 2752 ↛ 2756line 2752 didn't jump to line 2756, because the condition on line 2752 was never false

2753 todo['drop']['action_items'].append(action_item) 

2754 

2755 # Nothing more to do! 

2756 return False 

2757 

2758 # NOT BEFORE "OK" check!! 

2759 if package_status not in self.VCSWATCH_STATUS_DICT: 2759 ↛ 2760line 2759 didn't jump to line 2760, because the condition on line 2759 was never true

2760 package_status = "DEFAULT" 

2761 

2762 # If we are here, then something is not OK. Let's check if we 

2763 # already had some intel regarding the current package status. 

2764 if action_item is None: 

2765 action_item = ActionItem( 

2766 package=package, 

2767 item_type=self.vcswatch_ai_type) 

2768 todo['add']['action_items'].append(action_item) 

2769 else: 

2770 todo['update']['action_items'].append(action_item) 

2771 

2772 # Computes the watch URL 

2773 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2774 

2775 if action_item.extra_data: 

2776 extra_data = action_item.extra_data 

2777 else: 

2778 extra_data = {} 

2779 

2780 # Fetches the long description and severity from 

2781 # the VCSWATCH_STATUS_DICT dict. 

2782 action_item.severity = \ 

2783 self.VCSWATCH_STATUS_DICT[package_status]['severity'] 

2784 

2785 nb_commits = int(vcswatch_data["commits"] or 0) 

2786 

2787 # The new data 

2788 new_extra_data = { 

2789 'vcswatch_url': vcswatch_url, 

2790 } 

2791 new_extra_data.update(vcswatch_data) 

2792 

2793 extra_data_match = all([ 

2794 new_extra_data[key] == extra_data.get(key, None) 

2795 for key in new_extra_data 

2796 ]) 

2797 

2798 # If everything is fine and we are not forcing the update 

2799 # then we proceed to the next package. 

2800 if extra_data_match and not self.force_update: 2800 ↛ 2802line 2800 didn't jump to line 2802, because the condition on line 2800 was never true

2801 # Remove from the todolist 

2802 todo['update']['action_items'].remove(action_item) 

2803 return False 

2804 else: 

2805 # Report for short description of the :class:`ActionItem` 

2806 desc = self.VCSWATCH_STATUS_DICT[package_status]['description'] 

2807 commits_s = 's' if nb_commits != 1 else '' 

2808 action_item.short_description = \ 

2809 desc.format(commits_s=commits_s, **new_extra_data) 

2810 action_item.extra_data = new_extra_data 

2811 return True 

2812 

2813 def update_package_info(self, package, vcswatch_data, package_info, key, 

2814 todo): 

2815 # Same thing with PackageData 

2816 if package_info is None: 

2817 package_info = PackageData( 

2818 package=package, 

2819 key=key, 

2820 ) 

2821 todo['add']['package_infos'].append(package_info) 

2822 else: 

2823 todo['update']['package_infos'].append(package_info) 

2824 

2825 # Computes the watch URL 

2826 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2827 

2828 new_value = dict(package_info.value) 

2829 if key == 'vcs_extra_links': 

2830 new_value['QA'] = vcswatch_url 

2831 elif key == 'vcswatch': 2831 ↛ 2841line 2831 didn't jump to line 2841, because the condition on line 2831 was never false

2832 if 'package_version' in vcswatch_data: 2832 ↛ 2834line 2832 didn't jump to line 2834, because the condition on line 2832 was never false

2833 new_value['package_version'] = vcswatch_data['package_version'] 

2834 if 'changelog_version' in vcswatch_data: 2834 ↛ 2837line 2834 didn't jump to line 2837, because the condition on line 2834 was never false

2835 new_value['changelog_version'] = vcswatch_data[ 

2836 'changelog_version'] 

2837 if 'changelog_distribution' in vcswatch_data: 2837 ↛ 2841line 2837 didn't jump to line 2841, because the condition on line 2837 was never false

2838 new_value['changelog_distribution'] = vcswatch_data[ 

2839 'changelog_distribution'] 

2840 

2841 new_value['checksum'] = get_data_checksum(new_value) 

2842 

2843 package_info_match = ( 

2844 new_value['checksum'] == package_info.value.get('checksum', None) 

2845 ) 

2846 

2847 if package_info_match and not self.force_update: 

2848 todo['update']['package_infos'].remove(package_info) 

2849 return False 

2850 else: 

2851 package_info.value = new_value 

2852 return True 

2853 

2854 def update_packages_item(self, packages, vcswatch_datas): 

2855 """Generates the lists of :class:`ActionItem` to be added, 

2856 deleted or updated regarding the status of their packages. 

2857 

2858 Categories of statuses are: 

2859 {u'COMMITS', u'ERROR', u'NEW', u'OK', u'OLD', u'UNREL'} 

2860 

2861 Basically, it fetches all info from :class:`PackageData` 

2862 with key='vcs', the ones without data matching vcswatch_datas are 

2863 stored in one variable that's iterated through directly, and if 

2864 there was something before, it is purged. Then, all entries in 

2865 that queryset that have no relevant intel anymore are scheduled 

2866 to be deleted. The others are only updated. 

2867 

2868 All :class:`PackageData` matching vcswatch_datas 

2869 are stored in another variable. The same is done with the list of 

2870 :class:`ActionItem` that match this task type. 

2871 

2872 Then, it iterates on all vcswatch_datas' packages and it tries to 

2873 determine if there are any news, if so, it updates apopriately the 

2874 prospective :class:`ActionItem` and :class:`PackageData`, 

2875 and schedule them to be updated. If no data was existent, then 

2876 it creates them and schedule them to be added to the database. 

2877 

2878 At the end, this function returns a dict of all instances of 

2879 :class:`ActionItem` and :class:`PackageData` stored 

2880 in subdicts depending on their class and what is to be done 

2881 with them. 

2882 

2883 :rtype: dict 

2884 

2885 """ 

2886 

2887 todo = { 

2888 'drop': { 

2889 'action_items': [], 

2890 'package_infos': [], 

2891 }, 

2892 'update': { 

2893 'action_items': [], 

2894 'package_infos': [], 

2895 }, 

2896 'add': { 

2897 'action_items': [], 

2898 'package_infos': [], 

2899 }, 

2900 } 

2901 

2902 package_info_keys = ['vcs_extra_links', 'vcswatch'] 

2903 package_infos = {} 

2904 for key in package_info_keys: 

2905 # Fetches all PackageData with a given key for packages having 

2906 # a vcswatch key. As the pair (package, key) is unique, there is a 

2907 # bijection between these data, and we fetch them classifying them 

2908 # by package name. 

2909 for package_info in PackageData.objects.select_related( 

2910 'package').filter(key=key).only('package__name', 'value'): 

2911 if package_info.package.name not in package_infos: 

2912 package_infos[package_info.package.name] = {} 

2913 package_infos[package_info.package.name][key] = package_info 

2914 

2915 # As :class:`PackageData` key=vcs_extra_links is shared, we 

2916 # have to clean up those with vcs watch_url that aren't in vcs_data 

2917 package_infos_without_watch = PackageData.objects.filter( 

2918 key='vcs_extra_links').exclude( 

2919 package__name__in=vcswatch_datas.keys()).only('value') 

2920 

2921 # Do the actual clean. 

2922 self.clean_package_info(package_infos_without_watch, todo) 

2923 

2924 # Fetches all :class:`ActionItem` for packages concerned by a vcswatch 

2925 # action. 

2926 action_items = { 

2927 action_item.package.name: action_item 

2928 for action_item in ActionItem.objects.select_related( 

2929 'package' 

2930 ).filter(item_type=self.vcswatch_ai_type) 

2931 } 

2932 

2933 for package in packages: 

2934 # Get the vcswatch_data from the whole vcswatch_datas 

2935 vcswatch_data = vcswatch_datas[package.name] 

2936 

2937 # Get the old action item for this warning, if it exists. 

2938 action_item = action_items.get(package.name, None) 

2939 

2940 # Updates the :class:`ActionItem`. If _continue is None, 

2941 # then there is nothing more to do with this package. 

2942 # If it is False, then no update is pending for the 

2943 # :class:`ActionItem`, else there is an update 

2944 # to do. 

2945 _ai_continue = self.update_action_item( 

2946 package, 

2947 vcswatch_data, 

2948 action_item, 

2949 todo) 

2950 

2951 _pi_continue = False 

2952 for key in package_info_keys: 

2953 try: 

2954 package_info = package_infos[package.name][key] 

2955 except KeyError: 

2956 package_info = None 

2957 

2958 _pi_continue |= self.update_package_info( 

2959 package, 

2960 vcswatch_data, 

2961 package_info, 

2962 key, 

2963 todo) 

2964 

2965 if not _ai_continue and not _pi_continue: 

2966 continue 

2967 

2968 return todo 

2969 

2970 def execute_main(self): 

2971 # Get the actual vcswatch json file from qa.debian.org 

2972 vcs_data = self.get_vcswatch_data() 

2973 

2974 # Only fetch the packages that are in the json dict. 

2975 packages = PackageName.objects.filter(name__in=vcs_data.keys()) 

2976 

2977 # Faster than fetching the action items one by one in a loop 

2978 # when handling each package. 

2979 packages.prefetch_related('action_items') 

2980 

2981 # Determine wether something is to be kept or dropped. 

2982 todo = self.update_packages_item(packages, vcs_data) 

2983 

2984 with transaction.atomic(): 

2985 # Delete the :class:`ActionItem` that are osbolete, and also 

2986 # the :class:`PackageData` of the same. 

2987 ActionItem.objects.delete_obsolete_items( 

2988 [self.vcswatch_ai_type], 

2989 vcs_data.keys()) 

2990 PackageData.objects.filter( 

2991 key='vcs_extra_links', 

2992 id__in=[ 

2993 package_info.id 

2994 for package_info in todo['drop']['package_infos'] 

2995 ] 

2996 ).delete() 

2997 

2998 # Then delete the :class:`ActionItem` that are to be deleted. 

2999 ActionItem.objects.filter( 

3000 item_type__type_name=self.vcswatch_ai_type.type_name, 

3001 id__in=[ 

3002 action_item.id 

3003 for action_item in todo['drop']['action_items'] 

3004 ] 

3005 ).delete() 

3006 

3007 # Then bulk_create the :class:`ActionItem` to add and the 

3008 # :class:`PackageData` 

3009 ActionItem.objects.bulk_create(todo['add']['action_items']) 

3010 PackageData.objects.bulk_create(todo['add']['package_infos']) 

3011 

3012 # Update existing entries 

3013 for action_item in todo['update']['action_items']: 

3014 action_item.save() 

3015 for package_info in todo['update']['package_infos']: 

3016 package_info.save() 

3017 

3018 

3019class TagPackagesWithRcBugs(BaseTask, PackageTagging): 

3020 """ 

3021 Performs an update of 'rc-bugs' tag for packages. 

3022 """ 

3023 

3024 class Scheduler(IntervalScheduler): 

3025 interval = 3600 

3026 

3027 TAG_NAME = 'tag:rc-bugs' 

3028 TAG_DISPLAY_NAME = 'rc bugs' 

3029 TAG_COLOR_TYPE = 'danger' 

3030 TAG_DESCRIPTION = 'The package has Release Critical bugs' 

3031 TAG_TABLE_TITLE = 'Packages with RC bugs' 

3032 

3033 def packages_to_tag(self): 

3034 all_bug_stats = PackageBugStats.objects.prefetch_related('package') 

3035 packages_list = [] 

3036 for bug_stats in all_bug_stats: 

3037 categories = bug_stats.stats 

3038 found = False 

3039 for category in categories: 3039 ↛ 3036line 3039 didn't jump to line 3036, because the loop on line 3039 didn't complete

3040 if found: 

3041 break 

3042 if category['category_name'] == 'rc': 3042 ↛ 3039line 3042 didn't jump to line 3039, because the condition on line 3042 was never false

3043 found = True 

3044 if category['bug_count'] > 0: 

3045 packages_list.append(bug_stats.package) 

3046 return packages_list 

3047 

3048 

3049class TagPackagesWithNewUpstreamVersion(BaseTask, PackageTagging): 

3050 """ 

3051 Performs an update of 'new-upstream-version' tag for packages. 

3052 """ 

3053 

3054 class Scheduler(IntervalScheduler): 

3055 interval = 3600 * 3 

3056 

3057 TAG_NAME = 'tag:new-upstream-version' 

3058 TAG_DISPLAY_NAME = 'new upstream version' 

3059 TAG_COLOR_TYPE = 'warning' 

3060 TAG_DESCRIPTION = 'The upstream has a newer version available' 

3061 TAG_TABLE_TITLE = 'Newer upstream version' 

3062 

3063 def packages_to_tag(self): 

3064 try: 

3065 action_type = ActionItemType.objects.get( 

3066 type_name='new-upstream-version') 

3067 except ActionItemType.DoesNotExist: 

3068 return [] 

3069 

3070 packages_list = [] 

3071 items = action_type.action_items.prefetch_related('package') 

3072 for item in items: 

3073 packages_list.append(item.package) 

3074 return packages_list 

3075 

3076 

3077class UpdateDependencySatisfactionTask(BaseTask): 

3078 """ 

3079 Fetches binary package installability results from qa.debian.org/dose 

3080 """ 

3081 

3082 class Scheduler(IntervalScheduler): 

3083 interval = 3600 * 3 

3084 

3085 BASE_URL = 'https://qa.debian.org/dose/debcheck/unstable_main/latest' 

3086 ACTION_ITEM_TYPE_NAME = 'debian-dependency-satisfaction' 

3087 ACTION_ITEM_TEMPLATE = 'debian/dependency-satisfaction-action-item.html' 

3088 

3089 def __init__(self, force_update=False, *args, **kwargs): 

3090 super(UpdateDependencySatisfactionTask, self).__init__(*args, **kwargs) 

3091 self.force_update = force_update 

3092 self.action_item_type = ActionItemType.objects.create_or_update( 

3093 type_name=self.ACTION_ITEM_TYPE_NAME, 

3094 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3095 

3096 def set_parameters(self, parameters): 

3097 if 'force_update' in parameters: 

3098 self.force_update = parameters['force_update'] 

3099 

3100 def get_dependency_satisfaction(self): 

3101 url = '{}/each.txt'.format(self.BASE_URL) 

3102 content = get_resource_text(url, force_update=self.force_update, 

3103 only_if_updated=True) 

3104 if content is None: 3104 ↛ 3105line 3104 didn't jump to line 3105, because the condition on line 3104 was never true

3105 return 

3106 

3107 dep_sats = collections.defaultdict(set) 

3108 for i, line in enumerate(content.splitlines()): 

3109 binpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3110 try: 

3111 bin_package = BinaryPackageName.objects.get(name=binpkg_name) 

3112 srcpkg_name = bin_package.main_source_package_name 

3113 except BinaryPackageName.DoesNotExist: 

3114 continue 

3115 arches = set([arch.strip() for arch in arches.split()]) 

3116 # TODO: retrieve this list programmatically, either from 

3117 # https://api.ftp-master.debian.org/suite/testing 

3118 # or from the Architecture field in the Release file 

3119 # for testing (both lists should be equal). 

3120 arches = arches.intersection( 

3121 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3122 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3123 # only report problems for release architectures 

3124 if not arches: 

3125 continue 

3126 # if the package is arch:all, only report problems on amd64 

3127 if isnative != "True": 

3128 arches = arches.intersection({"amd64"}) 

3129 if not arches: 

3130 continue 

3131 dep_sats[srcpkg_name].add( 

3132 (binpkg_name, ver, tuple(arches), expl, anchor)) 

3133 # turn sets into lists 

3134 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3135 return dep_sats 

3136 

3137 def update_action_item(self, package, unsats): 

3138 action_item = package.get_action_item_for_type( 

3139 self.action_item_type.type_name) 

3140 if action_item is None: 3140 ↛ 3145line 3140 didn't jump to line 3145

3141 action_item = ActionItem( 

3142 package=package, 

3143 item_type=self.action_item_type, 

3144 severity=ActionItem.SEVERITY_HIGH) 

3145 action_item.short_description = \ 

3146 "{count} binary package{plural} {have} unsatisfiable " \ 

3147 "dependencies".format( 

3148 count=len(unsats), 

3149 plural='' if len(unsats) == 1 else 's', 

3150 have='has' if len(unsats) == 1 else 'have', 

3151 ) 

3152 action_item.extra_data = { 

3153 'unsats': unsats, 

3154 'base_url': '{}/packages/'.format(self.BASE_URL), 

3155 } 

3156 action_item.save() 

3157 

3158 def execute(self): 

3159 dep_sats = self.get_dependency_satisfaction() 

3160 if dep_sats is None: 3160 ↛ 3161line 3160 didn't jump to line 3161, because the condition on line 3160 was never true

3161 return 

3162 

3163 with transaction.atomic(): 

3164 PackageData.objects.filter(key='dependency_satisfaction').delete() 

3165 

3166 packages = [] 

3167 pkgdata_list = [] 

3168 

3169 for name, unsats in dep_sats.items(): 

3170 try: 

3171 package = SourcePackageName.objects.get(name=name) 

3172 packages.append(package) 

3173 self.update_action_item(package, unsats) 

3174 except SourcePackageName.DoesNotExist: 

3175 continue 

3176 

3177 dep_sat_info = PackageData( 

3178 key='dependency_satisfaction', 

3179 package=package, 

3180 value={'dependency_satisfaction': unsats}) 

3181 pkgdata_list.append(dep_sat_info) 

3182 

3183 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3184 packages) 

3185 PackageData.objects.bulk_create(pkgdata_list) 

3186 

3187 

3188class UpdateBuildDependencySatisfactionTask(BaseTask): 

3189 """ 

3190 Fetches source package installability results from qa.debian.org/dose 

3191 """ 

3192 

3193 class Scheduler(IntervalScheduler): 

3194 interval = 3600 * 3 

3195 

3196 BASE_URL = 'https://qa.debian.org/dose/debcheck/src_unstable_main/latest' 

3197 ACTION_ITEM_TYPE_NAME = 'debian-builddependency-satisfaction' 

3198 ACTION_ITEM_TEMPLATE = \ 

3199 'debian/builddependency-satisfaction-action-item.html' 

3200 

3201 def __init__(self, *args, **kwargs): 

3202 super(UpdateBuildDependencySatisfactionTask, self).__init__(*args, 

3203 **kwargs) 

3204 self.action_item_type = ActionItemType.objects.create_or_update( 

3205 type_name=self.ACTION_ITEM_TYPE_NAME, 

3206 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3207 

3208 def get_dependency_satisfaction(self): 

3209 url = '{}/each.txt'.format(self.BASE_URL) 

3210 content = get_resource_text(url, force_update=self.force_update, 

3211 only_if_updated=True) 

3212 if content is None: 3212 ↛ 3213line 3212 didn't jump to line 3213, because the condition on line 3212 was never true

3213 return 

3214 

3215 dep_sats = collections.defaultdict(set) 

3216 for i, line in enumerate(content.splitlines()): 

3217 srcpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3218 arches = set([arch.strip() for arch in arches.split()]) 

3219 # TODO: retrieve this list programmatically, either from 

3220 # https://api.ftp-master.debian.org/suite/testing 

3221 # or from the Architecture field in the Release file 

3222 # for testing (both lists should be equal). 

3223 arches = arches.intersection( 

3224 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3225 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3226 # only report problems for release architectures 

3227 if not arches: 

3228 continue 

3229 # if the source package only builds arch:all binary packages, only 

3230 # report problems on amd64 

3231 if isnative != "True": 

3232 arches = arches.intersection({"amd64"}) 

3233 if not arches: 

3234 continue 

3235 dep_sats[srcpkg_name].add( 

3236 (srcpkg_name, tuple(arches), expl, anchor)) 

3237 # turn sets into lists 

3238 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3239 return dep_sats 

3240 

3241 def update_action_item(self, package, unsats): 

3242 action_item = package.get_action_item_for_type( 

3243 self.action_item_type.type_name) 

3244 if action_item is None: 3244 ↛ 3249line 3244 didn't jump to line 3249

3245 action_item = ActionItem( 

3246 package=package, 

3247 item_type=self.action_item_type, 

3248 severity=ActionItem.SEVERITY_HIGH) 

3249 action_item.short_description = \ 

3250 "source package has {count} unsatisfiable " \ 

3251 "build dependenc{plural}".format( 

3252 count=len(unsats), 

3253 plural='y' if len(unsats) == 1 else 'ies', 

3254 ) 

3255 action_item.extra_data = { 

3256 'unsats': unsats, 

3257 'base_url': '{}/packages/'.format(self.BASE_URL), 

3258 } 

3259 action_item.save() 

3260 

3261 def execute(self): 

3262 dep_sats = self.get_dependency_satisfaction() 

3263 if dep_sats is None: 3263 ↛ 3264line 3263 didn't jump to line 3264, because the condition on line 3263 was never true

3264 return 

3265 

3266 with transaction.atomic(): 

3267 PackageData.objects.filter( 

3268 key='builddependency_satisfaction').delete() 

3269 

3270 packages = [] 

3271 pkgdata_list = [] 

3272 

3273 for name, unsats in dep_sats.items(): 

3274 try: 

3275 package = SourcePackageName.objects.get(name=name) 

3276 packages.append(package) 

3277 self.update_action_item(package, unsats) 

3278 except SourcePackageName.DoesNotExist: 

3279 continue 

3280 

3281 dep_sat_info = PackageData( 

3282 key='builddependency_satisfaction', 

3283 package=package, 

3284 value={'builddependency_satisfaction': unsats}) 

3285 pkgdata_list.append(dep_sat_info) 

3286 

3287 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3288 packages) 

3289 PackageData.objects.bulk_create(pkgdata_list) 

3290 

3291 

3292class UpdateDl10nStatsTask(BaseTask): 

3293 """ 

3294 Updates packages' l10n statistics. 

3295 """ 

3296 

3297 class Scheduler(IntervalScheduler): 

3298 interval = 3600 * 6 

3299 

3300 ACTION_ITEM_TYPE_NAME = 'dl10n' 

3301 ITEM_DESCRIPTION = \ 

3302 '<a href="{url}">Issues</a> found with some translations' 

3303 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/dl10n-action-item.html' 

3304 

3305 def initialize(self, *args, **kwargs): 

3306 super(UpdateDl10nStatsTask, self).initialize(*args, **kwargs) 

3307 self.l10n_action_item_type = \ 

3308 ActionItemType.objects.create_or_update( 

3309 type_name=self.ACTION_ITEM_TYPE_NAME, 

3310 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

3311 

3312 def _load_l10n_stats(self): 

3313 url = 'https://i18n.debian.org/l10n-pkg-status/pkglist' 

3314 content = get_resource_text(url, force_update=self.force_update, 

3315 only_if_updated=True) 

3316 if content is None: 3316 ↛ 3317line 3316 didn't jump to line 3317, because the condition on line 3316 was never true

3317 return 

3318 

3319 def parse_score(score): 

3320 if score == '-': 

3321 return None 

3322 return int(score) 

3323 

3324 all_stats = {} 

3325 

3326 # The format of the file is (copied from its header): 

3327 # <package> <version> (<comma sperated scores>) <link> <todo> 

3328 line_re = re.compile( 

3329 r'^([^\s]+) ([^\s]+) \(([^)]+)\) ([^\s]+) ([^\s]+)') 

3330 for line in content.splitlines(): 

3331 if not line or line.startswith('#'): 3331 ↛ 3332line 3331 didn't jump to line 3332, because the condition on line 3331 was never true

3332 continue 

3333 match = line_re.search(line) 

3334 if not match: 3334 ↛ 3335line 3334 didn't jump to line 3335, because the condition on line 3334 was never true

3335 logger.warning('Failed to parse l10n pkglist line: %s', line) 

3336 continue 

3337 

3338 src_pkgname = match.group(1) 

3339 try: 

3340 scores = match.group(3).split(',') 

3341 score_debian = parse_score(scores[0]) 

3342 score_other = parse_score(scores[1]) 

3343 # <todo> is a "0" or "1" string, so convert through int to get 

3344 # a proper bool 

3345 todo = bool(int(match.group(5))) 

3346 except (IndexError, ValueError): 

3347 logger.warning( 

3348 'Failed to parse l10n scores: %s', 

3349 line, exc_info=1) 

3350 continue 

3351 link = match.group(4) 

3352 if not score_debian and not score_other: 3352 ↛ 3353line 3352 didn't jump to line 3353, because the condition on line 3352 was never true

3353 continue 

3354 

3355 all_stats[src_pkgname] = { 

3356 'score_debian': score_debian, 

3357 'score_other': score_other, 

3358 'link': link, 

3359 'todo': todo, 

3360 } 

3361 

3362 return all_stats 

3363 

3364 def update_action_item(self, package, package_stats): 

3365 todo = package_stats['todo'] 

3366 

3367 # Get the old action item, if it exists. 

3368 l10n_action_item = package.get_action_item_for_type( 

3369 self.l10n_action_item_type.type_name) 

3370 if not todo: 

3371 if l10n_action_item: 

3372 # If the item previously existed, delete it now since there 

3373 # are no longer any warnings/errors. 

3374 l10n_action_item.delete() 

3375 return 

3376 

3377 # The item didn't previously have an action item: create it now 

3378 if l10n_action_item is None: 

3379 desc = self.ITEM_DESCRIPTION.format(url=package_stats['link']) 

3380 l10n_action_item = ActionItem( 

3381 package=package, 

3382 item_type=self.l10n_action_item_type, 

3383 severity=ActionItem.SEVERITY_LOW, 

3384 short_description=desc) 

3385 

3386 if l10n_action_item.extra_data: 

3387 old_extra_data = l10n_action_item.extra_data 

3388 if old_extra_data == package_stats: 3388 ↛ 3390line 3388 didn't jump to line 3390, because the condition on line 3388 was never true

3389 # No need to update 

3390 return 

3391 

3392 l10n_action_item.extra_data = package_stats 

3393 

3394 l10n_action_item.save() 

3395 

3396 def execute_main(self): 

3397 stats = self._load_l10n_stats() 

3398 if not stats: 3398 ↛ 3399line 3398 didn't jump to line 3399, because the condition on line 3398 was never true

3399 return 

3400 

3401 with transaction.atomic(): 

3402 PackageData.objects.filter(key='dl10n').delete() 

3403 

3404 packages = [] 

3405 pkgdata_list = [] 

3406 

3407 for name, stat in stats.items(): 

3408 try: 

3409 package = SourcePackageName.objects.get(name=name) 

3410 packages.append(package) 

3411 self.update_action_item(package, stat) 

3412 except SourcePackageName.DoesNotExist: 

3413 continue 

3414 

3415 dl10n_stat = PackageData( 

3416 key='dl10n', 

3417 package=package, 

3418 value=stat) 

3419 pkgdata_list.append(dl10n_stat) 

3420 

3421 ActionItem.objects.delete_obsolete_items( 

3422 [self.l10n_action_item_type], packages) 

3423 PackageData.objects.bulk_create(pkgdata_list) 

3424 

3425 

3426class UpdateDebianPatchesTask(BaseTask, ImportExternalData): 

3427 """ 

3428 Import statistics about Debian patches from UDD. 

3429 """ 

3430 

3431 class Scheduler(IntervalScheduler): 

3432 interval = 3600 * 6 

3433 

3434 data_url = 'https://udd.debian.org/patches.cgi?json=1' 

3435 action_item_types = [ 

3436 { 

3437 'type_name': 'debian-patches', 

3438 'full_description_template': 

3439 'debian/debian-patches-action-item.html', 

3440 }, 

3441 ] 

3442 

3443 def generate_package_data(self): 

3444 pkgdata = {} 

3445 for entry in self.external_data: 

3446 source = entry.get('source') 

3447 if source: 3447 ↛ 3445line 3447 didn't jump to line 3445, because the condition on line 3447 was never false

3448 data = entry.copy() 

3449 data['url'] = self._generate_url(entry) 

3450 pkgdata[source] = data 

3451 

3452 return [ 

3453 ('debian-patches', pkgdata), 

3454 ] 

3455 

3456 @staticmethod 

3457 def _generate_url(entry): 

3458 query_string = urlencode({ 

3459 'src': entry.get('source'), 

3460 'version': entry.get('version'), 

3461 }) 

3462 return f"https://udd.debian.org/patches.cgi?{query_string}" 

3463 

3464 def generate_action_items(self): 

3465 pkgdata = {} 

3466 for entry in self.external_data: 

3467 # Skip invalid entries and those without (problematic) patches 

3468 source = entry.get('source') 

3469 forwarded_invalid = entry.get('forwarded_invalid', 0) 

3470 forwarded_no = entry.get('forwarded_no', 0) 

3471 if not source: 3471 ↛ 3472line 3471 didn't jump to line 3472, because the condition on line 3471 was never true

3472 continue # Invalid, no source package data 

3473 if entry.get('status') != 'patches': 

3474 continue # No patch at all 

3475 if forwarded_invalid == 0 and forwarded_no == 0: 

3476 continue # No problematic patch 

3477 

3478 # Build the parameterers for the action item 

3479 severity = ActionItem.SEVERITY_LOW 

3480 desc = '' 

3481 url = self._generate_url(entry) 

3482 

3483 if forwarded_invalid: 

3484 severity = ActionItem.SEVERITY_HIGH 

3485 count = f"{forwarded_invalid} patch" 

3486 if forwarded_invalid > 1: 

3487 count += 'es' 

3488 count = f'<a href="{url}">{count}</a>' 

3489 desc += f"{count} with invalid metadata" 

3490 

3491 if forwarded_no: 

3492 if desc: 

3493 desc += ', ' 

3494 count = f"{forwarded_no} patch" 

3495 if forwarded_no > 1: 

3496 count += 'es' 

3497 count = f'<a href="{url}">{count}</a>' 

3498 desc += f"{count} to forward upstream" 

3499 

3500 extra_data = entry.copy() 

3501 extra_data['url'] = url 

3502 

3503 # Record the action item parameters 

3504 pkgdata[source] = { 

3505 'short_description': f"debian/patches: {desc}", 

3506 'severity': severity, 

3507 'extra_data': extra_data, 

3508 } 

3509 

3510 return [ 

3511 ('debian-patches', pkgdata), 

3512 ]