Coverage for distro_tracker/vendor/debian/tracker_tasks.py: 84%

1778 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2025-08-19 06:37 +0000

1# Copyright 2013-2021 The Distro Tracker Developers 

2# See the COPYRIGHT file at the top-level directory of this distribution and 

3# at https://deb.li/DTAuthors 

4# 

5# This file is part of Distro Tracker. It is subject to the license terms 

6# in the LICENSE file found in the top-level directory of this 

7# distribution and at https://deb.li/DTLicense. No part of Distro Tracker, 

8# including this file, may be copied, modified, propagated, or distributed 

9# except according to the terms contained in the LICENSE file. 

10 

11""" 

12Debian-specific tasks. 

13""" 

14 

15import collections 

16import itertools 

17import json 

18import logging 

19import os 

20import re 

21import urllib.parse 

22import warnings 

23from enum import Enum 

24 

25from bs4 import BeautifulSoup as soup, MarkupResemblesLocatorWarning 

26 

27from debian import deb822, debian_support 

28from debian.debian_support import AptPkgVersion 

29 

30import debianbts 

31 

32from django.conf import settings 

33from django.core.exceptions import ValidationError 

34from django.db import transaction 

35from django.db.models import Prefetch 

36from django.utils.http import urlencode 

37 

38import yaml 

39 

40from distro_tracker.accounts.models import UserEmail 

41from distro_tracker.core.models import ( 

42 ActionItem, 

43 ActionItemType, 

44 BinaryPackageBugStats, 

45 BinaryPackageName, 

46 BugDisplayManagerMixin, 

47 PackageBugStats, 

48 PackageData, 

49 PackageName, 

50 Repository, 

51 SourcePackageDeps, 

52 SourcePackageName 

53) 

54from distro_tracker.core.tasks import BaseTask 

55from distro_tracker.core.tasks.mixins import ImportExternalData, PackageTagging 

56from distro_tracker.core.tasks.schedulers import IntervalScheduler 

57from distro_tracker.core.utils import get_or_none 

58from distro_tracker.core.utils.http import get_resource_text 

59from distro_tracker.core.utils.misc import get_data_checksum 

60from distro_tracker.core.utils.packages import ( 

61 html_package_list, 

62 package_url 

63) 

64from distro_tracker.vendor.debian.models import ( 

65 BuildLogCheckStats, 

66 LintianStats, 

67 PackageExcuses, 

68 PackageTransition, 

69 UbuntuPackage 

70) 

71 

72from .models import DebianContributor 

73 

74logger = logging.getLogger(__name__) 

75logger_input = logging.getLogger('distro_tracker.input') 

76 

77warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning) 

78 

79 

80class RetrieveDebianMaintainersTask(BaseTask): 

81 """ 

82 Retrieves (and updates if necessary) a list of Debian Maintainers. 

83 """ 

84 

85 class Scheduler(IntervalScheduler): 

86 interval = 3600 * 24 

87 

88 def execute_main(self): 

89 url = "https://ftp-master.debian.org/dm.txt" 

90 content = get_resource_text(url, force_update=self.force_update, 

91 only_if_updated=True) 

92 if content is None: 92 ↛ 94line 92 didn't jump to line 94, because the condition on line 92 was never true

93 # No need to do anything if the cached item was still not updated 

94 return 

95 

96 maintainers = {} 

97 lines = content.splitlines() 

98 for stanza in deb822.Deb822.iter_paragraphs(lines): 

99 if 'Uid' in stanza and 'Allow' in stanza: 99 ↛ 98line 99 didn't jump to line 98, because the condition on line 99 was never false

100 # Allow is a comma-separated string of 'package (DD fpr)' items, 

101 # where DD fpr is the fingerprint of the DD that granted the 

102 # permission 

103 name, email = stanza['Uid'].rsplit(' ', 1) 

104 email = email.strip('<>') 

105 for pair in stanza['Allow'].split(','): 

106 pair = pair.strip() 

107 pkg, dd_fpr = pair.split() 

108 maintainers.setdefault(email, []) 

109 maintainers[email].append(pkg) 

110 

111 # Now update the developer information 

112 with transaction.atomic(): 

113 # Reset all old maintainers first. 

114 qs = DebianContributor.objects.filter(is_debian_maintainer=True) 

115 qs.update(is_debian_maintainer=False) 

116 

117 for email, packages in maintainers.items(): 

118 try: 

119 user_email, _ = UserEmail.objects.get_or_create(email=email) 

120 except ValidationError: 

121 logger_input.info('%s refers to invalid email "%s".', 

122 url, email) 

123 continue 

124 

125 contributor, _ = DebianContributor.objects.get_or_create( 

126 email=user_email) 

127 

128 contributor.is_debian_maintainer = True 

129 contributor.allowed_packages = packages 

130 contributor.save() 

131 

132 

133class RetrieveLowThresholdNmuTask(BaseTask): 

134 """ 

135 Updates the list of Debian Maintainers which agree with the lowthreshold 

136 NMU. 

137 """ 

138 

139 class Scheduler(IntervalScheduler): 

140 interval = 3600 * 24 

141 

142 def _retrieve_emails(self): 

143 """ 

144 Helper function which obtains the list of emails of maintainers that 

145 agree with the lowthreshold NMU. 

146 """ 

147 url = 'https://wiki.debian.org/LowThresholdNmu?action=raw' 

148 content = get_resource_text(url, force_update=self.force_update, 

149 only_if_updated=True) 

150 if content is None: 150 ↛ 151line 150 didn't jump to line 151, because the condition on line 150 was never true

151 return 

152 

153 emails = [] 

154 devel_php_RE = re.compile( 

155 r'https?://qa\.debian\.org/developer\.php\?login=([^\s&|]+)') 

156 word_RE = re.compile(r'^\w+$') 

157 for line in content.splitlines(): 

158 match = devel_php_RE.search(line) 

159 while match: # look for several matches on the same line 

160 email = None 

161 login = match.group(1) 

162 if word_RE.match(login): 

163 email = login + '@debian.org' 

164 elif login.find('@') >= 0: 164 ↛ 166line 164 didn't jump to line 166, because the condition on line 164 was never false

165 email = login 

166 if email: 166 ↛ 168line 166 didn't jump to line 168, because the condition on line 166 was never false

167 emails.append(email) 

168 line = line[match.end():] 

169 match = devel_php_RE.search(line) 

170 return emails 

171 

172 def execute_main(self): 

173 emails = self._retrieve_emails() 

174 with transaction.atomic(): 

175 # Reset all threshold flags first. 

176 qs = DebianContributor.objects.filter( 

177 agree_with_low_threshold_nmu=True) 

178 qs.update(agree_with_low_threshold_nmu=False) 

179 

180 for email in emails: 

181 try: 

182 email, _ = UserEmail.objects.get_or_create(email=email) 

183 except ValidationError: 

184 logger_input.info( 

185 'LowThresholdNmu refers to invalid email "%s".', email) 

186 continue 

187 

188 contributor, _ = DebianContributor.objects.get_or_create( 

189 email=email) 

190 

191 contributor.agree_with_low_threshold_nmu = True 

192 contributor.save() 

193 

194 

195class UpdatePackageBugStats(BaseTask, BugDisplayManagerMixin): 

196 """ 

197 Updates the BTS bug stats for all packages (source, binary and pseudo). 

198 Creates :class:`distro_tracker.core.ActionItem` instances for packages 

199 which have bugs tagged help or patch. 

200 """ 

201 

202 class Scheduler(IntervalScheduler): 

203 interval = 3600 

204 

205 PATCH_BUG_ACTION_ITEM_TYPE_NAME = 'debian-patch-bugs-warning' 

206 HELP_BUG_ACTION_ITEM_TYPE_NAME = 'debian-help-bugs-warning' 

207 

208 PATCH_ITEM_SHORT_DESCRIPTION = ( 

209 '<a href="{url}">{count}</a> tagged patch in the ' 

210 '<abbr title="Bug Tracking System">BTS</abbr>') 

211 HELP_ITEM_SHORT_DESCRIPTION = ( 

212 '<a href="{url}">{count}</a> tagged help in the ' 

213 '<abbr title="Bug Tracking System">BTS</abbr>') 

214 PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/patch-bugs-action-item.html' 

215 HELP_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/help-bugs-action-item.html' 

216 

217 bug_categories = ( 

218 'rc', 

219 'normal', 

220 'wishlist', 

221 'fixed', 

222 'patch', 

223 ) 

224 

225 def initialize(self, *args, **kwargs): 

226 super(UpdatePackageBugStats, self).initialize(*args, **kwargs) 

227 # The :class:`distro_tracker.core.models.ActionItemType` instances which 

228 # this task can create. 

229 self.patch_item_type = ActionItemType.objects.create_or_update( 

230 type_name=self.PATCH_BUG_ACTION_ITEM_TYPE_NAME, 

231 full_description_template=self.PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE) 

232 self.help_item_type = ActionItemType.objects.create_or_update( 

233 type_name=self.HELP_BUG_ACTION_ITEM_TYPE_NAME, 

234 full_description_template=self.HELP_ITEM_FULL_DESCRIPTION_TEMPLATE) 

235 

236 def _get_tagged_bug_stats(self, tag, user=None): 

237 """ 

238 Using the BTS interface, retrieves the statistics of bugs with a 

239 particular tag. 

240 

241 :param tag: The tag for which the statistics are required. 

242 :type tag: string 

243 :param user: The email of the user who tagged the bug with the given 

244 tag. 

245 :type user: string 

246 

247 :returns: A dict mapping package names to the count of bugs with the 

248 given tag. 

249 """ 

250 debian_ca_bundle = '/etc/ssl/ca-debian/ca-certificates.crt' 

251 if os.path.exists(debian_ca_bundle): 

252 os.environ['SSL_CERT_FILE'] = debian_ca_bundle 

253 if user: 

254 bug_numbers = debianbts.get_usertag(user, tags=[tag]).get(tag, []) 

255 else: 

256 bug_numbers = debianbts.get_bugs(tag=tag) 

257 

258 # Match each retrieved bug ID to a package and then find the aggregate 

259 # count for each package. 

260 bug_stats = {} 

261 bugs = debianbts.get_status(bug_numbers) 

262 for bug in bugs: 

263 if bug.done or bug.fixed_versions or bug.pending == 'done': 

264 continue 

265 

266 bug_stats.setdefault(bug.package, 0) 

267 bug_stats[bug.package] += 1 

268 

269 return bug_stats 

270 

271 def _extend_bug_stats(self, bug_stats, extra_stats, category_name): 

272 """ 

273 Helper method which adds extra bug stats to an already existing list of 

274 stats. 

275 

276 :param bug_stats: An already existing list of bug stats. Maps package 

277 names to list of bug category descriptions. 

278 :type bug_stats: dict 

279 :param extra_stats: Extra bug stats which should be added to 

280 ``bug_stats``. Maps package names to integers representing bug 

281 counts. 

282 :type extra_stats: dict 

283 :param category_name: The name of the bug category which is being added 

284 :type category_name: string 

285 """ 

286 for package, count in extra_stats.items(): 

287 bug_stats.setdefault(package, []) 

288 bug_stats[package].append({ 

289 'category_name': category_name, 

290 'bug_count': count, 

291 }) 

292 

293 def _create_patch_bug_action_item(self, package, bug_stats): 

294 """ 

295 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

296 the given package if it contains any bugs tagged patch. 

297 

298 :param package: The package for which the action item should be 

299 updated. 

300 :type package: :class:`distro_tracker.core.models.PackageName` 

301 :param bug_stats: A dictionary mapping category names to structures 

302 describing those categories. Those structures should be 

303 identical to the ones stored in the :class:`PackageBugStats` 

304 instance. 

305 :type bug_stats: dict 

306 """ 

307 # Get the old action item, if any 

308 action_item = package.get_action_item_for_type( 

309 self.PATCH_BUG_ACTION_ITEM_TYPE_NAME) 

310 

311 if 'patch' not in bug_stats or bug_stats['patch']['bug_count'] == 0: 

312 # Remove the old action item, since the package does not have any 

313 # bugs tagged patch anymore. 

314 if action_item is not None: 

315 action_item.delete() 

316 return 

317 

318 # If the package has bugs tagged patch, update the action item 

319 if action_item is None: 

320 action_item = ActionItem( 

321 package=package, 

322 item_type=self.patch_item_type) 

323 

324 bug_count = bug_stats['patch']['bug_count'] 

325 # Include the URL in the short description 

326 url = self.bug_manager.get_bug_tracker_url( 

327 package.name, 'source', 'patch') 

328 if not url: 328 ↛ 329line 328 didn't jump to line 329, because the condition on line 328 was never true

329 url = '' 

330 # Include the bug count in the short description 

331 count = '{bug_count} bug'.format(bug_count=bug_count) 

332 if bug_count > 1: 

333 count += 's' 

334 action_item.short_description = \ 

335 self.PATCH_ITEM_SHORT_DESCRIPTION.format(url=url, count=count) 

336 # Set additional URLs and merged bug count in the extra data for a full 

337 # description 

338 action_item.extra_data = { 

339 'bug_count': bug_count, 

340 'merged_count': bug_stats['patch'].get('merged_count', 0), 

341 'url': url, 

342 'merged_url': self.bug_manager.get_bug_tracker_url( 

343 package.name, 'source', 'patch-merged'), 

344 } 

345 action_item.save() 

346 

347 def _create_help_bug_action_item(self, package, bug_stats): 

348 """ 

349 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

350 the given package if it contains any bugs tagged help. 

351 

352 :param package: The package for which the action item should be 

353 updated. 

354 :type package: :class:`distro_tracker.core.models.PackageName` 

355 :param bug_stats: A dictionary mapping category names to structures 

356 describing those categories. Those structures should be 

357 identical to the ones stored in the :class:`PackageBugStats` 

358 instance. 

359 :type bug_stats: dict 

360 """ 

361 # Get the old action item, if any 

362 action_item = package.get_action_item_for_type( 

363 self.HELP_BUG_ACTION_ITEM_TYPE_NAME) 

364 

365 if 'help' not in bug_stats or bug_stats['help']['bug_count'] == 0: 

366 # Remove the old action item, since the package does not have any 

367 # bugs tagged patch anymore. 

368 if action_item is not None: 

369 action_item.delete() 

370 return 

371 

372 # If the package has bugs tagged patch, update the action item 

373 if action_item is None: 

374 action_item = ActionItem( 

375 package=package, 

376 item_type=self.help_item_type) 

377 

378 bug_count = bug_stats['help']['bug_count'] 

379 # Include the URL in the short description 

380 url = self.bug_manager.get_bug_tracker_url( 

381 package.name, 'source', 'help') 

382 if not url: 382 ↛ 383line 382 didn't jump to line 383, because the condition on line 382 was never true

383 url = '' 

384 # Include the bug count in the short description 

385 count = '{bug_count} bug'.format(bug_count=bug_count) 

386 if bug_count > 1: 

387 count += 's' 

388 action_item.short_description = self.HELP_ITEM_SHORT_DESCRIPTION.format( 

389 url=url, count=count) 

390 # Set additional URLs and merged bug count in the extra data for a full 

391 # description 

392 action_item.extra_data = { 

393 'bug_count': bug_count, 

394 'url': url, 

395 } 

396 action_item.save() 

397 

398 def _create_action_items(self, package_bug_stats): 

399 """ 

400 Method which creates a :class:`distro_tracker.core.models.ActionItem` 

401 instance for a package based on the given package stats. 

402 

403 For now, an action item is created if the package either has bugs 

404 tagged as help or patch. 

405 """ 

406 # Transform the bug stats to a structure easier to pass to functions 

407 # for particular bug-category action items. 

408 bug_stats = { 

409 category['category_name']: category 

410 for category in package_bug_stats.stats 

411 } 

412 package = package_bug_stats.package 

413 self._create_patch_bug_action_item(package, bug_stats) 

414 self._create_help_bug_action_item(package, bug_stats) 

415 

416 def _get_udd_bug_stats(self): 

417 url = 'https://udd.debian.org/cgi-bin/ddpo-bugs.cgi' 

418 response_content = get_resource_text(url) 

419 if not response_content: 

420 return 

421 

422 # Each line in the response should be bug stats for a single package 

423 bug_stats = {} 

424 for line in response_content.splitlines(): 

425 line = line.strip() 

426 try: 

427 package_name, bug_counts = line, '' 

428 if line.startswith('src:'): 

429 src, package_name, bug_counts = line.split(':', 2) 

430 else: 

431 package_name, bug_counts = line.split(':', 1) 

432 # Merged counts are in parentheses so remove those before 

433 # splitting the numbers 

434 bug_counts = re.sub(r'[()]', ' ', bug_counts).split() 

435 bug_counts = [int(count) for count in bug_counts] 

436 except ValueError: 

437 logger.warning( 

438 'Failed to parse bug information for %s: %s', 

439 package_name, bug_counts, exc_info=1) 

440 continue 

441 

442 # Match the extracted counts with category names 

443 bug_stats[package_name] = [ 

444 { 

445 'category_name': category_name, 

446 'bug_count': bug_count, 

447 'merged_count': merged_count, 

448 } 

449 for category_name, (bug_count, merged_count) in zip( 

450 self.bug_categories, zip(bug_counts[::2], bug_counts[1::2])) 

451 ] 

452 

453 return bug_stats 

454 

455 def _remove_obsolete_action_items(self, package_names): 

456 """ 

457 Removes action items for packages which no longer have any bug stats. 

458 """ 

459 ActionItem.objects.delete_obsolete_items( 

460 item_types=[self.patch_item_type, self.help_item_type], 

461 non_obsolete_packages=package_names) 

462 

463 def update_source_and_pseudo_bugs(self): 

464 """ 

465 Performs the update of bug statistics for source and pseudo packages. 

466 """ 

467 # First get the bug stats exposed by the UDD. 

468 bug_stats = self._get_udd_bug_stats() 

469 if not bug_stats: 

470 bug_stats = {} 

471 

472 # Add in help bugs from the BTS interface 

473 try: 

474 help_bugs = self._get_tagged_bug_stats('help') 

475 self._extend_bug_stats(bug_stats, help_bugs, 'help') 

476 except RuntimeError: 

477 logger.exception("Could not get bugs tagged help") 

478 

479 # Add in newcomer bugs from the BTS interface 

480 try: 

481 newcomer_bugs = self._get_tagged_bug_stats('newcomer') 

482 self._extend_bug_stats(bug_stats, newcomer_bugs, 'newcomer') 

483 except RuntimeError: 

484 logger.exception("Could not get bugs tagged newcomer") 

485 

486 with transaction.atomic(): 

487 # Clear previous stats 

488 PackageBugStats.objects.all().delete() 

489 self._remove_obsolete_action_items(bug_stats.keys()) 

490 # Get all packages which have updated stats, along with their 

491 # action items in 2 DB queries. 

492 packages = PackageName.objects.filter(name__in=bug_stats.keys()) 

493 packages.prefetch_related('action_items') 

494 

495 # Update stats and action items. 

496 stats = [] 

497 for package in packages: 

498 # Save the raw package bug stats 

499 package_bug_stats = PackageBugStats( 

500 package=package, stats=bug_stats[package.name]) 

501 stats.append(package_bug_stats) 

502 

503 # Add action items for the package. 

504 self._create_action_items(package_bug_stats) 

505 

506 PackageBugStats.objects.bulk_create(stats) 

507 

508 def update_binary_bugs(self): 

509 """ 

510 Performs the update of bug statistics for binary packages. 

511 """ 

512 url = 'https://udd.debian.org/cgi-bin/bugs-binpkgs-pts.cgi' 

513 response_content = get_resource_text(url) 

514 if not response_content: 

515 return 

516 

517 # Extract known binary package bug stats: each line is a separate pkg 

518 bug_stats = {} 

519 for line in response_content.splitlines(): 

520 package_name, bug_counts = line.split(None, 1) 

521 bug_counts = bug_counts.split() 

522 try: 

523 bug_counts = [int(count) for count in bug_counts] 

524 except ValueError: 

525 logger.exception( 

526 'Failed to parse bug information for %s: %s', 

527 package_name, bug_counts) 

528 continue 

529 

530 bug_stats[package_name] = [ 

531 { 

532 'category_name': category_name, 

533 'bug_count': bug_count, 

534 } 

535 for category_name, bug_count in zip( 

536 self.bug_categories, bug_counts) 

537 ] 

538 

539 with transaction.atomic(): 

540 # Clear previous stats 

541 BinaryPackageBugStats.objects.all().delete() 

542 packages = \ 

543 BinaryPackageName.objects.filter(name__in=bug_stats.keys()) 

544 # Create new stats in a single query 

545 stats = [ 

546 BinaryPackageBugStats(package=package, 

547 stats=bug_stats[package.name]) 

548 for package in packages 

549 ] 

550 BinaryPackageBugStats.objects.bulk_create(stats) 

551 

552 def execute_main(self): 

553 # Stats for source and pseudo packages is retrieved from a different 

554 # resource (with a different structure) than stats for binary packages. 

555 self.update_source_and_pseudo_bugs() 

556 self.update_binary_bugs() 

557 

558 

559class UpdateLintianStatsTask(BaseTask): 

560 """ 

561 Updates packages' lintian stats. 

562 """ 

563 

564 class Scheduler(IntervalScheduler): 

565 interval = 3600 * 4 

566 

567 ACTION_ITEM_TYPE_NAME = 'lintian-warnings-and-errors' 

568 ITEM_DESCRIPTION = 'lintian reports <a href="{url}">{report}</a>' 

569 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/lintian-action-item.html' 

570 

571 def initialize(self, *args, **kwargs): 

572 super(UpdateLintianStatsTask, self).initialize(*args, **kwargs) 

573 self.lintian_action_item_type = ActionItemType.objects.create_or_update( 

574 type_name=self.ACTION_ITEM_TYPE_NAME, 

575 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

576 

577 def get_lintian_stats(self): 

578 url = 'https://udd.debian.org/lintian-qa-list.txt' 

579 content = get_resource_text(url, force_update=self.force_update, 

580 only_if_updated=True) 

581 if content is None: 581 ↛ 582line 581 didn't jump to line 582, because the condition on line 581 was never true

582 return 

583 

584 all_stats = {} 

585 categories = ( 

586 'errors', 

587 'warnings', 

588 'pedantics', 

589 'experimentals', 

590 'overriddens', 

591 ) 

592 for line in content.splitlines(): 

593 package, stats = line.split(None, 1) 

594 stats = stats.split() 

595 try: 

596 all_stats[package] = { 

597 category: int(count) 

598 for count, category in zip(stats, categories) 

599 } 

600 except ValueError: 

601 logger.exception( 

602 'Failed to parse lintian information for %s: %s', 

603 package, line) 

604 continue 

605 

606 return all_stats 

607 

608 def update_action_item(self, package, lintian_stats): 

609 """ 

610 Updates the :class:`ActionItem` for the given package based on the 

611 :class:`LintianStats <distro_tracker.vendor.debian.models.LintianStats` 

612 given in ``package_stats``. If the package has errors or warnings an 

613 :class:`ActionItem` is created. 

614 """ 

615 package_stats = lintian_stats.stats 

616 warnings, errors = ( 

617 package_stats.get('warnings'), package_stats.get('errors', 0)) 

618 # Get the old action item for this warning, if it exists. 

619 lintian_action_item = package.get_action_item_for_type( 

620 self.lintian_action_item_type.type_name) 

621 if not warnings and not errors: 

622 if lintian_action_item: 

623 # If the item previously existed, delete it now since there 

624 # are no longer any warnings/errors. 

625 lintian_action_item.delete() 

626 return 

627 

628 # The item didn't previously have an action item: create it now 

629 if lintian_action_item is None: 

630 lintian_action_item = ActionItem( 

631 package=package, 

632 item_type=self.lintian_action_item_type) 

633 

634 lintian_url = lintian_stats.get_lintian_url() 

635 new_extra_data = { 

636 'warnings': warnings, 

637 'errors': errors, 

638 'lintian_url': lintian_url, 

639 } 

640 if lintian_action_item.extra_data: 

641 old_extra_data = lintian_action_item.extra_data 

642 if (old_extra_data['warnings'] == warnings and 

643 old_extra_data['errors'] == errors): 

644 # No need to update 

645 return 

646 

647 lintian_action_item.extra_data = new_extra_data 

648 

649 if errors and warnings: 

650 report = '{} error{} and {} warning{}'.format( 

651 errors, 

652 's' if errors > 1 else '', 

653 warnings, 

654 's' if warnings > 1 else '') 

655 elif errors: 

656 report = '{} error{}'.format( 

657 errors, 

658 's' if errors > 1 else '') 

659 elif warnings: 659 ↛ 664line 659 didn't jump to line 664, because the condition on line 659 was never false

660 report = '{} warning{}'.format( 

661 warnings, 

662 's' if warnings > 1 else '') 

663 

664 lintian_action_item.short_description = self.ITEM_DESCRIPTION.format( 

665 url=lintian_url, 

666 report=report) 

667 

668 # If there are errors make the item a high severity issue 

669 if errors: 

670 lintian_action_item.severity = ActionItem.SEVERITY_HIGH 

671 

672 lintian_action_item.save() 

673 

674 def execute_main(self): 

675 all_lintian_stats = self.get_lintian_stats() 

676 if not all_lintian_stats: 

677 return 

678 

679 # Discard all old stats 

680 LintianStats.objects.all().delete() 

681 

682 packages = PackageName.objects.filter(name__in=all_lintian_stats.keys()) 

683 packages.prefetch_related('action_items') 

684 # Remove action items for packages which no longer have associated 

685 # lintian data. 

686 ActionItem.objects.delete_obsolete_items( 

687 [self.lintian_action_item_type], all_lintian_stats.keys()) 

688 

689 stats = [] 

690 for package in packages: 

691 package_stats = all_lintian_stats[package.name] 

692 # Save the raw lintian stats. 

693 lintian_stats = LintianStats(package=package, stats=package_stats) 

694 stats.append(lintian_stats) 

695 # Create an ActionItem if there are errors or warnings 

696 self.update_action_item(package, lintian_stats) 

697 

698 LintianStats.objects.bulk_create(stats) 

699 

700 

701class UpdateAppStreamStatsTask(BaseTask): 

702 """ 

703 Updates packages' AppStream issue hints data. 

704 """ 

705 

706 class Scheduler(IntervalScheduler): 

707 interval = 3600 * 6 

708 

709 ACTION_ITEM_TYPE_NAME = 'appstream-issue-hints' 

710 ITEM_DESCRIPTION = 'AppStream hints: {report} for {packageurllist}' 

711 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/appstream-action-item.html' 

712 

713 def initialize(self, *args, **kwargs): 

714 super(UpdateAppStreamStatsTask, self).initialize(*args, **kwargs) 

715 self.appstream_action_item_type = \ 

716 ActionItemType.objects.create_or_update( 

717 type_name=self.ACTION_ITEM_TYPE_NAME, 

718 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

719 self._tag_severities = {} 

720 

721 def _load_tag_severities(self): 

722 url = 'https://appstream.debian.org/hints/sid/hint-definitions.json' 

723 json_data = get_resource_text(url, force_update=True) 

724 

725 data = json.loads(json_data) 

726 for tag, info in data.items(): 

727 self._tag_severities[tag] = info['severity'] 

728 

729 def _load_appstream_hint_stats(self, section, arch, all_stats={}): 

730 url = 'https://appstream.debian.org/hints/sid/{}/Hints-{}.json.gz' \ 

731 .format(section, arch) 

732 hints_json = get_resource_text(url, force_update=self.force_update) 

733 

734 hints = json.loads(hints_json) 

735 for hint in hints: 

736 pkid = hint['package'] 

737 parts = pkid.split('/') 

738 package_name = parts[0] 

739 

740 # get the source package for this binary package name 

741 src_pkgname = None 

742 if SourcePackageName.objects.exists_with_name(package_name): 

743 package = SourcePackageName.objects.get(name=package_name) 

744 src_pkgname = package.name 

745 elif BinaryPackageName.objects.exists_with_name(package_name): 

746 bin_package = BinaryPackageName.objects.get(name=package_name) 

747 package = bin_package.main_source_package_name 

748 src_pkgname = package.name 

749 else: 

750 src_pkgname = package_name 

751 

752 if src_pkgname not in all_stats: 

753 all_stats[src_pkgname] = {} 

754 if package_name not in all_stats[src_pkgname]: 754 ↛ 757line 754 didn't jump to line 757, because the condition on line 754 was never false

755 all_stats[src_pkgname][package_name] = {} 

756 

757 for cid, h in hint['hints'].items(): 

758 for e in h: 

759 severity = self._tag_severities[e['tag']] 

760 if severity == "error": 

761 sevkey = "errors" 

762 elif severity == "warning": 

763 sevkey = "warnings" 

764 elif severity == "info": 764 ↛ 767line 764 didn't jump to line 767, because the condition on line 764 was never false

765 sevkey = "infos" 

766 else: 

767 continue 

768 if sevkey not in all_stats[src_pkgname][package_name]: 

769 all_stats[src_pkgname][package_name][sevkey] = 1 

770 else: 

771 all_stats[src_pkgname][package_name][sevkey] += 1 

772 

773 return all_stats 

774 

775 def _get_appstream_url(self, package, bin_pkgname): 

776 """ 

777 Returns the AppStream URL for the given PackageName in :package. 

778 """ 

779 

780 src_package = get_or_none(SourcePackageName, pk=package.pk) 

781 if not src_package: 781 ↛ 782line 781 didn't jump to line 782, because the condition on line 781 was never true

782 return '#' 

783 

784 if not src_package.main_version: 

785 return '#' 

786 

787 component = 'main' 

788 main_entry = src_package.main_entry 

789 if main_entry: 789 ↛ 790line 789 didn't jump to line 790, because the condition on line 789 was never true

790 component = main_entry.component 

791 if not component: 

792 component = 'main' 

793 

794 return ( 

795 'https://appstream.debian.org/sid/{}/issues/{}.html' 

796 .format(component, bin_pkgname) 

797 ) 

798 

799 def _create_final_stats_report(self, package, package_stats): 

800 """ 

801 Returns a transformed statistics report to be stored in the database. 

802 """ 

803 

804 as_report = package_stats.copy() 

805 for bin_package in list(as_report.keys()): 

806 # we currently don't want to display info-type hints 

807 as_report[bin_package].pop('infos', None) 

808 if as_report[bin_package]: 808 ↛ 812line 808 didn't jump to line 812, because the condition on line 808 was never false

809 as_report[bin_package]['url'] = \ 

810 self._get_appstream_url(package, bin_package) 

811 else: 

812 as_report.pop(bin_package) 

813 return as_report 

814 

815 def update_action_item(self, package, package_stats): 

816 """ 

817 Updates the :class:`ActionItem` for the given package based on the 

818 AppStream hint statistics given in ``package_stats``. 

819 If the package has errors or warnings an 

820 :class:`ActionItem` is created. 

821 """ 

822 

823 total_warnings = 0 

824 total_errors = 0 

825 packageurllist = [] 

826 for bin_pkgname, info in package_stats.items(): 

827 total_warnings += info.get('warnings', 0) 

828 total_errors += info.get('errors', 0) 

829 url = self._get_appstream_url(package, bin_pkgname) 

830 packageurllist.append(f'<a href="{url}">{bin_pkgname}</a>') 

831 

832 # Get the old action item for this warning, if it exists. 

833 appstream_action_item = package.get_action_item_for_type( 

834 self.appstream_action_item_type.type_name) 

835 if not total_warnings and not total_errors: 

836 if appstream_action_item: 

837 # If the item previously existed, delete it now since there 

838 # are no longer any warnings/errors. 

839 appstream_action_item.delete() 

840 return 

841 

842 # The item didn't previously have an action item: create it now 

843 if appstream_action_item is None: 

844 appstream_action_item = ActionItem( 

845 package=package, 

846 item_type=self.appstream_action_item_type) 

847 

848 as_report = self._create_final_stats_report(package, package_stats) 

849 

850 if appstream_action_item.extra_data: 

851 old_extra_data = appstream_action_item.extra_data 

852 if old_extra_data == as_report: 

853 # No need to update 

854 return 

855 

856 appstream_action_item.extra_data = as_report 

857 

858 if total_errors and total_warnings: 

859 short_report = '{} error{} and {} warning{}'.format( 

860 total_errors, 

861 's' if total_errors > 1 else '', 

862 total_warnings, 

863 's' if total_warnings > 1 else '') 

864 elif total_errors: 

865 short_report = '{} error{}'.format( 

866 total_errors, 

867 's' if total_errors > 1 else '') 

868 elif total_warnings: 868 ↛ 873line 868 didn't jump to line 873

869 short_report = '{} warning{}'.format( 

870 total_warnings, 

871 's' if total_warnings > 1 else '') 

872 

873 appstream_action_item.short_description = \ 

874 self.ITEM_DESCRIPTION.format(packageurllist=",".join( 

875 packageurllist), report=short_report) 

876 

877 # If there are errors make the item a high severity issue; 

878 # otherwise, make sure to set the severity as normal in case the item 

879 # existed already 

880 if total_errors: 

881 appstream_action_item.severity = ActionItem.SEVERITY_HIGH 

882 else: 

883 appstream_action_item.severity = ActionItem.SEVERITY_NORMAL 

884 

885 appstream_action_item.save() 

886 

887 def execute_main(self): 

888 self._load_tag_severities() 

889 all_stats = {} 

890 repository = Repository.objects.get(default=True) 

891 arch = "amd64" 

892 for component in repository.components: 

893 self._load_appstream_hint_stats(component, arch, all_stats) 

894 if not all_stats: 894 ↛ 895line 894 didn't jump to line 895, because the condition on line 894 was never true

895 return 

896 

897 with transaction.atomic(): 

898 # Delete obsolete data 

899 PackageData.objects.filter(key='appstream').delete() 

900 

901 packages = PackageName.objects.filter(name__in=all_stats.keys()) 

902 packages.prefetch_related('action_items') 

903 

904 stats = [] 

905 for package in packages: 

906 package_stats = all_stats[package.name] 

907 stats.append( 

908 PackageData( 

909 package=package, 

910 key='appstream', 

911 value=package_stats 

912 ) 

913 ) 

914 

915 # Create an ActionItem if there are errors or warnings 

916 self.update_action_item(package, package_stats) 

917 

918 PackageData.objects.bulk_create(stats) 

919 # Remove action items for packages which no longer have associated 

920 # AppStream hints. 

921 ActionItem.objects.delete_obsolete_items( 

922 [self.appstream_action_item_type], all_stats.keys()) 

923 

924 

925class UpdateTransitionsTask(BaseTask): 

926 

927 class Scheduler(IntervalScheduler): 

928 interval = 3600 

929 

930 REJECT_LIST_URL = 'https://ftp-master.debian.org/transitions.yaml' 

931 PACKAGE_TRANSITION_LIST_URL = ( 

932 'https://release.debian.org/transitions/export/packages.yaml') 

933 

934 def _get_yaml_resource(self, url, **kwargs): 

935 """ 

936 Gets the YAML resource at the given URL and returns it as a Python 

937 object. 

938 """ 

939 content = get_resource_text(url, **kwargs) 

940 if content: 

941 return yaml.safe_load(content) 

942 

943 def _add_reject_transitions(self, packages): 

944 """ 

945 Adds the transitions which cause uploads to be rejected to the 

946 given ``packages`` dict. 

947 """ 

948 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL) 

949 for key, transition in reject_list.items(): 

950 for package in transition['packages']: 

951 packages.setdefault(package, {}) 

952 packages[package].setdefault(key, {}) 

953 packages[package][key]['reject'] = True 

954 packages[package][key]['status'] = 'ongoing' 

955 

956 def _add_package_transition_list(self, packages): 

957 """ 

958 Adds the ongoing and planned transitions to the given ``packages`` 

959 dict. 

960 """ 

961 package_transition_list = self._get_yaml_resource( 

962 self.PACKAGE_TRANSITION_LIST_URL) 

963 

964 wanted_transition_statuses = ('ongoing', 'planned') 

965 for package_info in package_transition_list: 

966 package_name = package_info['name'] 

967 for transition_name, status in package_info['list']: 

968 if status not in wanted_transition_statuses: 

969 # Skip transitions with an unwanted status 

970 continue 

971 

972 packages.setdefault(package_name, {}) 

973 packages[package_name].setdefault(transition_name, {}) 

974 packages[package_name][transition_name]['status'] = status 

975 

976 def execute_main(self): 

977 # Update the relevant resources first 

978 kwargs = { 

979 'force_update': self.force_update, 

980 'only_if_updated': True, 

981 } 

982 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL, **kwargs) 

983 package_transition_list = self._get_yaml_resource( 

984 self.PACKAGE_TRANSITION_LIST_URL, **kwargs) 

985 

986 if reject_list is None and package_transition_list is None: 

987 # Nothing to do - at least one needs to be updated... 

988 return 

989 

990 package_transitions = {} 

991 self._add_reject_transitions(package_transitions) 

992 self._add_package_transition_list(package_transitions) 

993 

994 PackageTransition.objects.all().delete() 

995 # Get the packages which have transitions 

996 packages = PackageName.objects.filter( 

997 name__in=package_transitions.keys()) 

998 transitions = [] 

999 for package in packages: 

1000 for transition_name, data in \ 

1001 package_transitions[package.name].items(): 

1002 transitions.append(PackageTransition( 

1003 package=package, 

1004 transition_name=transition_name, 

1005 status=data.get('status', None), 

1006 reject=data.get('reject', False))) 

1007 

1008 PackageTransition.objects.bulk_create(transitions) 

1009 

1010 

1011class UpdateExcusesTask(BaseTask): 

1012 

1013 class Scheduler(IntervalScheduler): 

1014 interval = 3600 

1015 

1016 ACTION_ITEM_TYPE_NAME = 'debian-testing-migration' 

1017 ITEM_DESCRIPTION = ( 

1018 "The package has not entered testing even though the delay is over") 

1019 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/testing-migration-action-item.html' 

1020 

1021 class AgeVerdict(Enum): 

1022 PKG_OF_AGE = 0 

1023 PKG_TOO_OLD = 1 

1024 PKG_TOO_YOUNG = 2 

1025 PKG_WO_POLICY = 3 

1026 

1027 def initialize(self, *args, **kwargs): 

1028 super(UpdateExcusesTask, self).initialize(*args, **kwargs) 

1029 self.action_item_type = ActionItemType.objects.create_or_update( 

1030 type_name=self.ACTION_ITEM_TYPE_NAME, 

1031 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1032 

1033 def _adapt_excuse_links(self, excuse): 

1034 """ 

1035 If the excuse contains any anchor links, convert them to links to Distro 

1036 Tracker package pages. Return the original text unmodified, otherwise. 

1037 """ 

1038 re_anchor_href = re.compile(r'^#(.*)$') 

1039 html = soup(excuse, 'html.parser') 

1040 for a_tag in html.findAll('a', {'href': True}): 

1041 href = a_tag['href'] 

1042 match = re_anchor_href.match(href) 

1043 if not match: 1043 ↛ 1044line 1043 didn't jump to line 1044, because the condition on line 1043 was never true

1044 continue 

1045 package = match.group(1).split('/')[0] 

1046 a_tag['href'] = package_url(package) 

1047 

1048 return str(html) 

1049 

1050 def _skip_excuses_item(self, item_text): 

1051 if not item_text: 

1052 return True 

1053 # We ignore these excuses 

1054 if "Section" in item_text or "Maintainer" in item_text: 

1055 return True 

1056 return False 

1057 

1058 def _check_age(self, source): 

1059 """Checks the age of the package and compares it to the age requirement 

1060 for migration""" 

1061 

1062 if 'policy_info' not in source or 'age' not in source['policy_info']: 1062 ↛ 1063line 1062 didn't jump to line 1063, because the condition on line 1062 was never true

1063 return (self.AgeVerdict.PKG_WO_POLICY, None, None) 

1064 

1065 age = source['policy_info']['age']['current-age'] 

1066 limit = source['policy_info']['age']['age-requirement'] 

1067 if age > limit: 

1068 return (self.AgeVerdict.PKG_TOO_OLD, age, limit) 

1069 elif age < limit: 1069 ↛ 1070line 1069 didn't jump to line 1070, because the condition on line 1069 was never true

1070 return (self.AgeVerdict.PKG_TOO_YOUNG, age, limit) 

1071 else: 

1072 return (self.AgeVerdict.PKG_OF_AGE, age, limit) 

1073 

1074 def _extract_problematic(self, source): 

1075 verdict, age, limit = self._check_age(source) 

1076 

1077 if verdict == self.AgeVerdict.PKG_TOO_OLD: 

1078 return (source['item-name'], {'age': age, 'limit': limit}) 

1079 

1080 @staticmethod 

1081 def _make_excuses_check_dependencies(source): 

1082 """Checks the dependencies of the package (blocked-by and 

1083 migrate-after) and returns a list to display.""" 

1084 

1085 addendum = [] 

1086 

1087 if 'dependencies' in source: 

1088 blocked_by = source['dependencies'].get('blocked-by', []) 

1089 after = source['dependencies'].get('migrate-after', []) 

1090 after = [ 

1091 element 

1092 for element in after 

1093 if element not in blocked_by 

1094 ] 

1095 if blocked_by: 1095 ↛ 1096line 1095 didn't jump to line 1096, because the condition on line 1095 was never true

1096 addendum.append("Blocked by: %s" % ( 

1097 html_package_list(blocked_by), 

1098 )) 

1099 if after: 1099 ↛ 1104line 1099 didn't jump to line 1104, because the condition on line 1099 was never false

1100 addendum.append("Migrates after: %s" % ( 

1101 html_package_list(after), 

1102 )) 

1103 

1104 return addendum 

1105 

1106 @staticmethod 

1107 def _make_excuses_check_verdict(source): 

1108 """Checks the migration policy verdict of the package and builds an 

1109 excuses message depending on the result.""" 

1110 

1111 addendum = [] 

1112 

1113 if 'migration-policy-verdict' in source: 1113 ↛ 1114line 1113 didn't jump to line 1114, because the condition on line 1113 was never true

1114 verdict = source['migration-policy-verdict'] 

1115 if verdict == 'REJECTED_BLOCKED_BY_ANOTHER_ITEM': 

1116 addendum.append("Migration status: Blocked. Can't migrate " 

1117 "due to a non-migratable dependency. Check " 

1118 "status below." 

1119 ) 

1120 

1121 return addendum 

1122 

1123 def _make_excuses(self, source): 

1124 """Make the excuses list for a source item using the yaml data it 

1125 contains""" 

1126 

1127 excuses = [ 

1128 self._adapt_excuse_links(excuse) 

1129 for excuse in source['excuses'] 

1130 ] 

1131 

1132 # This is the place where we compute some additionnal 

1133 # messages that should be added to excuses. 

1134 addendum = [] 

1135 

1136 addendum.extend(self._make_excuses_check_verdict(source)) 

1137 addendum.extend(self._make_excuses_check_dependencies(source)) 

1138 

1139 excuses = addendum + excuses 

1140 

1141 if 'is-candidate' in source: 1141 ↛ 1145line 1141 didn't jump to line 1145, because the condition on line 1141 was never false

1142 if not source['is-candidate']: 1142 ↛ 1145line 1142 didn't jump to line 1145, because the condition on line 1142 was never false

1143 excuses.append("Not considered") 

1144 

1145 return ( 

1146 source['item-name'], 

1147 excuses, 

1148 ) 

1149 

1150 def _get_excuses_and_problems(self, content): 

1151 """ 

1152 Gets the excuses for each package. 

1153 Also finds a list of packages which have not migrated to testing 

1154 agter the necessary time has passed. 

1155 

1156 :returns: A two-tuple where the first element is a dict mapping 

1157 package names to a list of excuses. The second element is a dict 

1158 mapping packages names to a problem information. Problem information 

1159 is a dict with the keys ``age`` and ``limit``. 

1160 """ 

1161 if 'sources' not in content: 1161 ↛ 1162line 1161 didn't jump to line 1162, because the condition on line 1161 was never true

1162 logger.warning("Invalid format of excuses file") 

1163 return 

1164 

1165 sources = content['sources'] 

1166 excuses = [ 

1167 self._make_excuses(source) 

1168 for source in sources 

1169 if '/' not in source['item-name'] 

1170 ] 

1171 problems = [ 

1172 self._extract_problematic(source) 

1173 for source in sources 

1174 if '/' not in source['item-name'] 

1175 ] 

1176 problematic = [p for p in problems if p] 

1177 return dict(excuses), dict(problematic) 

1178 

1179 def _create_action_item(self, package, extra_data): 

1180 """ 

1181 Creates a :class:`distro_tracker.core.models.ActionItem` for the given 

1182 package including the given extra data. The item indicates that there is 

1183 a problem with the package migrating to testing. 

1184 """ 

1185 action_item = \ 

1186 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1187 if action_item is None: 

1188 action_item = ActionItem( 

1189 package=package, 

1190 item_type=self.action_item_type) 

1191 

1192 action_item.short_description = self.ITEM_DESCRIPTION 

1193 if package.main_entry: 1193 ↛ 1194line 1193 didn't jump to line 1194, because the condition on line 1193 was never true

1194 query_string = urlencode({'package': package.name}) 

1195 extra_data['check_why_url'] = ( 

1196 'https://qa.debian.org/excuses.php' 

1197 '?{query_string}'.format(query_string=query_string)) 

1198 

1199 action_item.extra_data = extra_data 

1200 action_item.save() 

1201 

1202 def _remove_obsolete_action_items(self, problematic): 

1203 """ 

1204 Remove action items for packages which are no longer problematic. 

1205 """ 

1206 ActionItem.objects.delete_obsolete_items( 

1207 item_types=[self.action_item_type], 

1208 non_obsolete_packages=problematic.keys()) 

1209 

1210 def _get_excuses_yaml(self): 

1211 """ 

1212 Function returning the content of excuses from debian-release 

1213 :returns: a dict of excuses or ``None`` if the content in the 

1214 cache is up to date. 

1215 """ 

1216 url = 'https://release.debian.org/britney/excuses.yaml' 

1217 content = get_resource_text(url, force_update=self.force_update, 

1218 only_if_updated=True) 

1219 if content is None: 

1220 return 

1221 

1222 return yaml.safe_load(content) 

1223 

1224 def execute_main(self): 

1225 content_lines = self._get_excuses_yaml() 

1226 if not content_lines: 1226 ↛ 1227line 1226 didn't jump to line 1227, because the condition on line 1226 was never true

1227 return 

1228 

1229 result = self._get_excuses_and_problems(content_lines) 

1230 if not result: 1230 ↛ 1231line 1230 didn't jump to line 1231, because the condition on line 1230 was never true

1231 return 

1232 package_excuses, problematic = result 

1233 

1234 with transaction.atomic(): 

1235 # Remove stale excuses data and action items which are not still 

1236 # problematic. 

1237 self._remove_obsolete_action_items(problematic) 

1238 PackageExcuses.objects.all().delete() 

1239 

1240 excuses = [] 

1241 packages = SourcePackageName.objects.filter( 

1242 name__in=package_excuses.keys()) 

1243 packages.prefetch_related('action_items') 

1244 for package in packages: 

1245 excuse = PackageExcuses( 

1246 package=package, 

1247 excuses=package_excuses[package.name]) 

1248 excuses.append(excuse) 

1249 if package.name in problematic: 

1250 self._create_action_item(package, problematic[package.name]) 

1251 

1252 # Create all excuses in a single query 

1253 PackageExcuses.objects.bulk_create(excuses) 

1254 

1255 

1256class UpdateBuildLogCheckStats(BaseTask): 

1257 

1258 class Scheduler(IntervalScheduler): 

1259 interval = 3600 * 6 

1260 

1261 ACTION_ITEM_TYPE_NAME = 'debian-build-logcheck' 

1262 ITEM_DESCRIPTION = 'Build log checks report <a href="{url}">{report}</a>' 

1263 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/logcheck-action-item.html' 

1264 

1265 def initialize(self, *args, **kwargs): 

1266 super(UpdateBuildLogCheckStats, self).initialize(*args, **kwargs) 

1267 self.action_item_type = ActionItemType.objects.create_or_update( 

1268 type_name=self.ACTION_ITEM_TYPE_NAME, 

1269 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1270 

1271 def _get_buildd_content(self): 

1272 url = 'https://qa.debian.org/bls/logcheck.txt' 

1273 return get_resource_text(url) 

1274 

1275 def get_buildd_stats(self): 

1276 content = self._get_buildd_content() 

1277 stats = {} 

1278 for line in content.splitlines(): 

1279 pkg, errors, warnings = line.split("|")[:3] 

1280 try: 

1281 errors, warnings = int(errors), int(warnings) 

1282 except ValueError: 

1283 continue 

1284 stats[pkg] = { 

1285 'errors': errors, 

1286 'warnings': warnings, 

1287 } 

1288 return stats 

1289 

1290 def create_action_item(self, package, stats): 

1291 """ 

1292 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

1293 the given package if the build logcheck stats indicate 

1294 """ 

1295 action_item = \ 

1296 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1297 

1298 errors = stats.get('errors', 0) 

1299 warnings = stats.get('warnings', 0) 

1300 

1301 if not errors and not warnings: 

1302 # Remove the previous action item since the package no longer has 

1303 # errors/warnings. 

1304 if action_item is not None: 

1305 action_item.delete() 

1306 return 

1307 

1308 if action_item is None: 

1309 action_item = ActionItem( 

1310 package=package, 

1311 item_type=self.action_item_type) 

1312 

1313 if action_item.extra_data: 

1314 if action_item.extra_data == stats: 1314 ↛ 1318line 1314 didn't jump to line 1318, because the condition on line 1314 was never false

1315 # Nothing has changed -- do not update the item 

1316 return 

1317 

1318 logcheck_url = "https://qa.debian.org/bls/packages/{hash}/{pkg}.html"\ 

1319 .format(hash=package.name[0], pkg=package.name) 

1320 if errors and warnings: 

1321 report = '{} error{} and {} warning{}'.format( 

1322 errors, 

1323 's' if errors > 1 else '', 

1324 warnings, 

1325 's' if warnings > 1 else '') 

1326 action_item.severity = ActionItem.SEVERITY_HIGH 

1327 elif errors: 

1328 report = '{} error{}'.format( 

1329 errors, 

1330 's' if errors > 1 else '') 

1331 action_item.severity = ActionItem.SEVERITY_HIGH 

1332 elif warnings: 1332 ↛ 1338line 1332 didn't jump to line 1338, because the condition on line 1332 was never false

1333 report = '{} warning{}'.format( 

1334 warnings, 

1335 's' if warnings > 1 else '') 

1336 action_item.severity = ActionItem.SEVERITY_LOW 

1337 

1338 action_item.short_description = self.ITEM_DESCRIPTION.format( 

1339 url=logcheck_url, 

1340 report=report) 

1341 action_item.extra_data = stats 

1342 action_item.save() 

1343 

1344 def execute_main(self): 

1345 # Build a dict with stats from both buildd and clang 

1346 stats = self.get_buildd_stats() 

1347 

1348 BuildLogCheckStats.objects.all().delete() 

1349 ActionItem.objects.delete_obsolete_items( 

1350 [self.action_item_type], stats.keys()) 

1351 

1352 packages = SourcePackageName.objects.filter(name__in=stats.keys()) 

1353 packages = packages.prefetch_related('action_items') 

1354 

1355 logcheck_stats = [] 

1356 for package in packages: 

1357 logcheck_stat = BuildLogCheckStats( 

1358 package=package, 

1359 stats=stats[package.name]) 

1360 logcheck_stats.append(logcheck_stat) 

1361 

1362 self.create_action_item(package, stats[package.name]) 

1363 

1364 # One SQL query to create all the stats. 

1365 BuildLogCheckStats.objects.bulk_create(logcheck_stats) 

1366 

1367 

1368class DebianWatchFileScannerUpdate(BaseTask): 

1369 

1370 class Scheduler(IntervalScheduler): 

1371 interval = 3600 * 6 

1372 

1373 ACTION_ITEM_TYPE_NAMES = ( 

1374 'new-upstream-version', 

1375 'watch-failure', 

1376 ) 

1377 ACTION_ITEM_TEMPLATES = { 

1378 'new-upstream-version': "debian/new-upstream-version-action-item.html", 

1379 'watch-failure': "debian/watch-failure-action-item.html", 

1380 } 

1381 ITEM_DESCRIPTIONS = { 

1382 'new-upstream-version': lambda item: ( 

1383 'A new upstream version is available: ' 

1384 '<a href="{url}">{version}</a>'.format( 

1385 url=item.extra_data['upstream_url'], 

1386 version=item.extra_data['upstream_version'])), 

1387 'watch-failure': lambda item: ( 

1388 'Problems while searching for a new upstream version'), 

1389 } 

1390 ITEM_SEVERITIES = { 

1391 'new-upstream-version': ActionItem.SEVERITY_HIGH, 

1392 'watch-failure': ActionItem.SEVERITY_HIGH, 

1393 } 

1394 

1395 def initialize(self, *args, **kwargs): 

1396 super(DebianWatchFileScannerUpdate, self).initialize(*args, **kwargs) 

1397 self.action_item_types = { 

1398 type_name: ActionItemType.objects.create_or_update( 

1399 type_name=type_name, 

1400 full_description_template=self.ACTION_ITEM_TEMPLATES.get( 

1401 type_name, None)) 

1402 for type_name in self.ACTION_ITEM_TYPE_NAMES 

1403 } 

1404 

1405 def _get_upstream_status_content(self): 

1406 url = 'https://udd.debian.org/cgi-bin/upstream-status.json.cgi' 

1407 return get_resource_text(url) 

1408 

1409 def _remove_obsolete_action_items(self, item_type_name, 

1410 non_obsolete_packages): 

1411 """ 

1412 Removes any existing :class:`ActionItem` with the given type name based 

1413 on the list of package names which should still have the items based on 

1414 the processed stats. 

1415 """ 

1416 action_item_type = self.action_item_types[item_type_name] 

1417 ActionItem.objects.delete_obsolete_items( 

1418 item_types=[action_item_type], 

1419 non_obsolete_packages=non_obsolete_packages) 

1420 

1421 def get_upstream_status_stats(self, stats): 

1422 """ 

1423 Gets the stats from the downloaded data and puts them in the given 

1424 ``stats`` dictionary. 

1425 The keys of the dict are package names. 

1426 

1427 :returns: A a two-tuple where the first item is a list of packages 

1428 which have new upstream versions and the second is a list of 

1429 packages which have watch failures. 

1430 """ 

1431 content = self._get_upstream_status_content() 

1432 dehs_data = None 

1433 if content: 

1434 dehs_data = json.loads(content) 

1435 if not dehs_data: 

1436 return [], [] 

1437 

1438 all_new_versions, all_failures = [], [] 

1439 for entry in dehs_data: 

1440 package_name = entry['package'] 

1441 stats.setdefault(package_name, {}) 

1442 stats[package_name]['upstream_version'] = entry['upstream-version'] 

1443 stats[package_name]['upstream_url'] = entry['upstream-url'] 

1444 if 'status' in entry and ('Newer version' in entry['status'] or 

1445 'newer package' in entry['status']): 

1446 stats[package_name]['new-upstream-version'] = { 

1447 'upstream_version': entry['upstream-version'], 

1448 'upstream_url': entry['upstream-url'], 

1449 } 

1450 all_new_versions.append(package_name) 

1451 if entry.get('warnings') or entry.get('errors'): 

1452 msg = '{}\n{}'.format( 

1453 entry.get('errors') or '', 

1454 entry.get('warnings') or '', 

1455 ).strip() 

1456 stats[package_name]['watch-failure'] = { 

1457 'warning': msg, 

1458 } 

1459 all_failures.append(package_name) 

1460 

1461 return all_new_versions, all_failures 

1462 

1463 def update_package_info(self, package, stats): 

1464 """ 

1465 Updates upstream information of the given package based on the given 

1466 stats. Upstream data is saved as a :class:`PackageData` within the 

1467 `general` key 

1468 

1469 :param package: The package to which the upstream info should be 

1470 associated. 

1471 :type package: :class:`distro_tracker.core.models.PackageName` 

1472 :param stats: The stats which are used to create the upstream info. 

1473 :type stats: :class:`dict` 

1474 """ 

1475 try: 

1476 watch_data = package.watch_status[0] 

1477 except IndexError: 

1478 watch_data = PackageData( 

1479 package=package, 

1480 key='upstream-watch-status', 

1481 ) 

1482 

1483 watch_data.value = stats 

1484 watch_data.save() 

1485 

1486 def update_action_item(self, item_type, package, stats): 

1487 """ 

1488 Updates the action item of the given type for the given package based 

1489 on the given stats. 

1490 

1491 The severity of the item is defined by the :attr:`ITEM_SEVERITIES` dict. 

1492 

1493 The short descriptions are created by passing the :class:`ActionItem` 

1494 (with extra data already set) to the callables defined in 

1495 :attr:`ITEM_DESCRIPTIONS`. 

1496 

1497 :param item_type: The type of the :class:`ActionItem` that should be 

1498 updated. 

1499 :type item_type: string 

1500 :param package: The package to which this action item should be 

1501 associated. 

1502 :type package: :class:`distro_tracker.core.models.PackageName` 

1503 :param stats: The stats which are used to create the action item. 

1504 :type stats: :class:`dict` 

1505 """ 

1506 action_item = package.get_action_item_for_type(item_type) 

1507 if action_item is None: 

1508 # Create an action item... 

1509 action_item = ActionItem( 

1510 package=package, 

1511 item_type=self.action_item_types[item_type]) 

1512 

1513 if item_type in self.ITEM_SEVERITIES: 1513 ↛ 1515line 1513 didn't jump to line 1515, because the condition on line 1513 was never false

1514 action_item.severity = self.ITEM_SEVERITIES[item_type] 

1515 action_item.extra_data = stats 

1516 action_item.short_description = \ 

1517 self.ITEM_DESCRIPTIONS[item_type](action_item) 

1518 

1519 action_item.save() 

1520 

1521 @transaction.atomic 

1522 def execute_main(self): 

1523 stats = {} 

1524 new_upstream_version, failures = self.get_upstream_status_stats(stats) 

1525 updated_packages_per_type = { 

1526 'new-upstream-version': new_upstream_version, 

1527 'watch-failure': failures, 

1528 } 

1529 

1530 # Remove obsolete action items for each of the categories... 

1531 for item_type, packages in updated_packages_per_type.items(): 

1532 self._remove_obsolete_action_items(item_type, packages) 

1533 

1534 packages = SourcePackageName.objects.filter( 

1535 name__in=stats.keys()) 

1536 filter_qs = PackageData.objects.filter(key='upstream-watch-status') 

1537 packages = packages.prefetch_related( 

1538 'action_items', 

1539 Prefetch('data', queryset=filter_qs, to_attr='watch_status') 

1540 ) 

1541 

1542 # Update action items for each package 

1543 for package in packages: 

1544 for type_name in self.ACTION_ITEM_TYPE_NAMES: 

1545 if type_name in stats[package.name]: 

1546 # method(package, stats[package.name][type_name]) 

1547 self.update_action_item( 

1548 type_name, package, stats[package.name][type_name]) 

1549 

1550 self.update_package_info(package, stats[package.name]) 

1551 

1552 

1553class UpdateSecurityIssuesTask(BaseTask): 

1554 

1555 class Scheduler(IntervalScheduler): 

1556 interval = 3600 * 3 

1557 

1558 ACTION_ITEM_TYPE_NAME = 'debian-security-issue-in-{}' 

1559 ACTION_ITEM_TEMPLATE = 'debian/security-issue-action-item.html' 

1560 ITEM_DESCRIPTION_TEMPLATE = { 

1561 'open': '<a href="{url}">{count} security {issue}</a> in {release}', 

1562 'nodsa': 

1563 '<a href="{url}">{count} low-priority security {issue}</a> ' 

1564 'in {release}', 

1565 'none': 'No known security issue in {release}', 

1566 } 

1567 CVE_DATA_URL = 'https://security-tracker.debian.org/tracker/data/json' 

1568 DISTRIBUTIONS_URL = ( 

1569 'https://security-tracker.debian.org/tracker/distributions.json' 

1570 ) 

1571 

1572 def initialize(self, *args, **kwargs): 

1573 super(UpdateSecurityIssuesTask, self).initialize(*args, **kwargs) 

1574 self._action_item_type = {} 

1575 self._issues = None 

1576 self._distributions = None 

1577 

1578 def action_item_type(self, release): 

1579 return self._action_item_type.setdefault( 

1580 release, ActionItemType.objects.create_or_update( 

1581 type_name=self.ACTION_ITEM_TYPE_NAME.format(release), 

1582 full_description_template=self.ACTION_ITEM_TEMPLATE)) 

1583 

1584 def _get_distributions(self): 

1585 if not self._distributions: 

1586 content = get_resource_text(self.DISTRIBUTIONS_URL) 

1587 self._distributions = json.loads(content) 

1588 return self._distributions 

1589 

1590 def _get_support_status(self, release): 

1591 """ 

1592 Return support status of a given release as documented by the 

1593 security team in the security tracker. 

1594 """ 

1595 return self._get_distributions().get(release, {}).get('support', 

1596 'unknown') 

1597 

1598 def _get_issues_content(self): 

1599 if self._issues: 1599 ↛ 1600line 1599 didn't jump to line 1600, because the condition on line 1599 was never true

1600 return self._issues 

1601 content = get_resource_text(self.CVE_DATA_URL) 

1602 if content: 1602 ↛ exitline 1602 didn't return from function '_get_issues_content', because the condition on line 1602 was never false

1603 self._issues = json.loads(content) 

1604 return self._issues 

1605 

1606 @classmethod 

1607 def _update_stats_with_nodsa_entry(cls, stats, nodsa_entry, 

1608 entry_id, description): 

1609 stats['nodsa'] += 1 

1610 

1611 nodsa_details = {'description': description, 

1612 'nodsa': nodsa_entry.get('nodsa', ''), 

1613 'nodsa_reason': nodsa_entry.get('nodsa_reason', '') 

1614 } 

1615 

1616 nodsa_reason = nodsa_details['nodsa_reason'] 

1617 if nodsa_reason == '': 

1618 nodsa_details['needs_triaging'] = True 

1619 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1620 nodsa_details 

1621 elif nodsa_reason == 'postponed': 1621 ↛ 1622line 1621 didn't jump to line 1622, because the condition on line 1621 was never true

1622 nodsa_details['fixed_via_stable_update'] = True 

1623 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1624 nodsa_details 

1625 elif nodsa_reason == 'ignored': 1625 ↛ exitline 1625 didn't return from function '_update_stats_with_nodsa_entry', because the condition on line 1625 was never false

1626 stats['nodsa_ignored_details'][entry_id] = nodsa_details 

1627 

1628 @classmethod 

1629 def get_issues_summary(cls, issues): 

1630 result = {} 

1631 for issue_id, issue_data in issues.items(): 

1632 for release, data in issue_data['releases'].items(): 

1633 stats = result.setdefault(release, { 

1634 'open': 0, 

1635 'open_details': {}, 

1636 'nodsa': 0, 

1637 'unimportant': 0, 

1638 'next_point_update_details': {}, 

1639 'nodsa_maintainer_to_handle_details': {}, 

1640 'nodsa_ignored_details': {}, 

1641 }) 

1642 description = issue_data.get('description', '') 

1643 if (data.get('status', '') == 'resolved' or 

1644 data.get('urgency', '') == 'end-of-life'): 

1645 continue 

1646 elif data.get('urgency', '') == 'unimportant': 

1647 stats['unimportant'] += 1 

1648 elif data.get('next_point_update', False): 

1649 stats['next_point_update_details'][issue_id] = \ 

1650 {'description': description} 

1651 elif data.get('nodsa', False) is not False: 

1652 cls._update_stats_with_nodsa_entry(stats, 

1653 data, issue_id, 

1654 description 

1655 ) 

1656 else: 

1657 stats['open'] += 1 

1658 stats['open_details'][issue_id] = \ 

1659 {'description': description} 

1660 

1661 return result 

1662 

1663 @classmethod 

1664 def get_issues_stats(cls, content): 

1665 """ 

1666 Gets package issue stats from Debian's security tracker. 

1667 """ 

1668 stats = {} 

1669 for pkg, issues in content.items(): 

1670 stats[pkg] = cls.get_issues_summary(issues) 

1671 return stats 

1672 

1673 def _get_short_description(self, key, action_item): 

1674 count = action_item.extra_data['security_issues_count'] 

1675 url = 'https://security-tracker.debian.org/tracker/source-package/{}' 

1676 return self.ITEM_DESCRIPTION_TEMPLATE[key].format( 

1677 count=count, 

1678 issue='issues' if count > 1 else 'issue', 

1679 release=action_item.extra_data.get('release', 'sid'), 

1680 url=url.format(action_item.package.name), 

1681 ) 

1682 

1683 def update_action_item(self, stats, action_item): 

1684 """ 

1685 Updates the ``debian-security-issue`` action item based on the 

1686 security issues. 

1687 """ 

1688 

1689 security_issues_count = stats['open'] + stats['nodsa'] 

1690 action_item.extra_data['security_issues_count'] = security_issues_count 

1691 action_item.extra_data['support_status'] = ( 

1692 self._get_support_status(action_item.extra_data['release']) 

1693 ) 

1694 

1695 for base_key in ['open', 

1696 'next_point_update', 

1697 'nodsa_maintainer_to_handle', 

1698 'nodsa_ignored']: 

1699 details_key = base_key + '_details' 

1700 count_key = base_key + '_count' 

1701 

1702 action_item.extra_data[details_key] = stats[details_key] 

1703 action_item.extra_data[count_key] = len(stats[details_key]) 

1704 

1705 # nodsa_next_point_update / nodsa_ignored_details are displayed 

1706 # only if there is anything else to show 

1707 nodsa_create_action = (stats['nodsa'] - 

1708 len(stats['nodsa_ignored_details'])) > 0 

1709 

1710 if stats['open']: 

1711 action_item.severity = ActionItem.SEVERITY_HIGH 

1712 action_item.short_description = \ 

1713 self._get_short_description('open', action_item) 

1714 elif nodsa_create_action: 

1715 action_item.severity = ActionItem.SEVERITY_LOW 

1716 action_item.short_description = \ 

1717 self._get_short_description('nodsa', action_item) 

1718 else: 

1719 action_item.severity = ActionItem.SEVERITY_WISHLIST 

1720 action_item.short_description = \ 

1721 self._get_short_description('none', action_item) 

1722 

1723 @classmethod 

1724 def generate_package_data(cls, issues): 

1725 return { 

1726 'details': issues, 

1727 'stats': cls.get_issues_summary(issues), 

1728 'checksum': get_data_checksum(issues) 

1729 } 

1730 

1731 def want_action_item(self, pkgdata, release): 

1732 stats = pkgdata.value.get('stats', {}).get(release) 

1733 if stats is None: 1733 ↛ 1734line 1733 didn't jump to line 1734, because the condition on line 1733 was never true

1734 return False 

1735 

1736 supported_by = self._get_support_status(release) 

1737 if supported_by == "end-of-life": 

1738 return False 

1739 elif supported_by == "security": 

1740 count = stats.get('open', 0) + stats.get('nodsa', 0) 

1741 else: 

1742 count = stats.get('open', 0) 

1743 

1744 if count == 0: 

1745 return False 

1746 

1747 return True 

1748 

1749 def process_pkg_action_items(self, pkgdata, existing_action_items): 

1750 release_ai = {} 

1751 to_add = [] 

1752 to_update = [] 

1753 to_drop = [] 

1754 global_stats = pkgdata.value.get('stats', {}) 

1755 for ai in existing_action_items: 

1756 release = ai.extra_data['release'] 

1757 release_ai[release] = ai 

1758 for release, stats in global_stats.items(): 

1759 ai = release_ai.get(release) 

1760 

1761 if self.want_action_item(pkgdata, release): 

1762 if ai: 

1763 to_update.append(ai) 

1764 else: 

1765 ai = ActionItem( 

1766 item_type=self.action_item_type(release), 

1767 package=pkgdata.package, 

1768 extra_data={'release': release} 

1769 ) 

1770 to_add.append(ai) 

1771 self.update_action_item(stats, ai) 

1772 else: 

1773 if ai: 

1774 to_drop.append(ai) 

1775 

1776 return to_add, to_update, to_drop 

1777 

1778 def execute_main(self): 

1779 # Fetch all debian-security PackageData 

1780 all_pkgdata = PackageData.objects.select_related( 

1781 'package').filter(key='debian-security').only( 

1782 'package__name', 'value') 

1783 

1784 all_data = {} 

1785 packages = {} 

1786 for pkgdata in all_pkgdata: 

1787 all_data[pkgdata.package.name] = pkgdata 

1788 packages[pkgdata.package.name] = pkgdata.package 

1789 

1790 # Fetch all debian-security ActionItems 

1791 pkg_action_items = collections.defaultdict(lambda: []) 

1792 all_action_items = ActionItem.objects.select_related( 

1793 'package').filter( 

1794 item_type__type_name__startswith='debian-security-issue-in-') 

1795 for action_item in all_action_items: 

1796 pkg_action_items[action_item.package.name].append(action_item) 

1797 

1798 # Check for changes on distributions.json 

1799 distributions_checksum = get_data_checksum(self._get_distributions()) 

1800 if self.data.get('distributions_checksum') != distributions_checksum: 

1801 # New distributions.json, force update all action items 

1802 self.force_update = True 

1803 self.data['distributions_checksum'] = distributions_checksum 

1804 

1805 # Scan the security tracker data 

1806 content = self._get_issues_content() 

1807 to_add = [] 

1808 to_update = [] 

1809 for pkgname, issues in content.items(): 

1810 if pkgname in all_data: 

1811 # Check if we need to update the existing data 

1812 checksum = get_data_checksum(issues) 

1813 if not self.force_update and \ 

1814 all_data[pkgname].value.get('checksum', '') == checksum: 

1815 continue 

1816 # Update the data 

1817 pkgdata = all_data[pkgname] 

1818 pkgdata.value = self.generate_package_data(issues) 

1819 to_update.append(pkgdata) 

1820 else: 

1821 # Add data for a new package 

1822 package, _ = PackageName.objects.get_or_create(name=pkgname) 

1823 to_add.append( 

1824 PackageData( 

1825 package=package, 

1826 key='debian-security', 

1827 value=self.generate_package_data(issues) 

1828 ) 

1829 ) 

1830 # Process action items 

1831 ai_to_add = [] 

1832 ai_to_update = [] 

1833 ai_to_drop = [] 

1834 for pkgdata in itertools.chain(to_add, to_update): 

1835 add, update, drop = self.process_pkg_action_items( 

1836 pkgdata, pkg_action_items[pkgdata.package.name]) 

1837 ai_to_add.extend(add) 

1838 ai_to_update.extend(update) 

1839 ai_to_drop.extend(drop) 

1840 # Sync in database 

1841 with transaction.atomic(): 

1842 # Delete obsolete data 

1843 PackageData.objects.filter( 

1844 key='debian-security').exclude( 

1845 package__name__in=content.keys()).delete() 

1846 ActionItem.objects.filter( 

1847 item_type__type_name__startswith='debian-security-issue-in-' 

1848 ).exclude(package__name__in=content.keys()).delete() 

1849 ActionItem.objects.filter( 

1850 item_type__type_name__startswith='debian-security-issue-in-', 

1851 id__in=[ai.id for ai in ai_to_drop]).delete() 

1852 # Add new entries 

1853 PackageData.objects.bulk_create(to_add) 

1854 ActionItem.objects.bulk_create(ai_to_add) 

1855 # Update existing entries 

1856 for pkgdata in to_update: 

1857 pkgdata.save() 

1858 for ai in ai_to_update: 

1859 ai.save() 

1860 

1861 

1862class UpdatePiuPartsTask(BaseTask): 

1863 """ 

1864 Retrieves the piuparts stats for all the suites defined in the 

1865 :data:`distro_tracker.project.local_settings.DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES` 

1866 """ 

1867 

1868 class Scheduler(IntervalScheduler): 

1869 interval = 3600 * 3 

1870 

1871 ACTION_ITEM_TYPE_NAME = 'debian-piuparts-test-fail' 

1872 ACTION_ITEM_TEMPLATE = 'debian/piuparts-action-item.html' 

1873 ITEM_DESCRIPTION = 'piuparts found (un)installation error(s)' 

1874 

1875 def initialize(self, *args, **kwargs): 

1876 super(UpdatePiuPartsTask, self).initialize(*args, **kwargs) 

1877 self.action_item_type = ActionItemType.objects.create_or_update( 

1878 type_name=self.ACTION_ITEM_TYPE_NAME, 

1879 full_description_template=self.ACTION_ITEM_TEMPLATE) 

1880 

1881 def _get_piuparts_content(self, suite): 

1882 """ 

1883 :returns: The content of the piuparts report for the given package 

1884 or ``None`` if there is no data for the particular suite. 

1885 """ 

1886 url = 'https://piuparts.debian.org/{suite}/sources.txt' 

1887 return get_resource_text(url.format(suite=suite)) 

1888 

1889 def get_piuparts_stats(self): 

1890 suites = getattr(settings, 'DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES', []) 

1891 failing_packages = {} 

1892 for suite in suites: 

1893 content = self._get_piuparts_content(suite) 

1894 if content is None: 

1895 logger.info("There is no piuparts for suite: %s", suite) 

1896 continue 

1897 

1898 for line in content.splitlines(): 

1899 package_name, status = line.split(':', 1) 

1900 package_name, status = package_name.strip(), status.strip() 

1901 if status == 'fail': 

1902 failing_packages.setdefault(package_name, []) 

1903 failing_packages[package_name].append(suite) 

1904 

1905 return failing_packages 

1906 

1907 def create_action_item(self, package, suites): 

1908 """ 

1909 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

1910 instance for the package based on the list of suites in which the 

1911 piuparts installation test failed. 

1912 """ 

1913 action_item = package.get_action_item_for_type(self.action_item_type) 

1914 if action_item is None: 

1915 action_item = ActionItem( 

1916 package=package, 

1917 item_type=self.action_item_type, 

1918 short_description=self.ITEM_DESCRIPTION) 

1919 

1920 if action_item.extra_data: 

1921 existing_items = action_item.extra_data.get('suites', []) 

1922 if list(sorted(existing_items)) == list(sorted(suites)): 

1923 # No need to update this item 

1924 return 

1925 action_item.extra_data = { 

1926 'suites': suites, 

1927 } 

1928 action_item.save() 

1929 

1930 def execute_main(self): 

1931 failing_packages = self.get_piuparts_stats() 

1932 

1933 ActionItem.objects.delete_obsolete_items( 

1934 item_types=[self.action_item_type], 

1935 non_obsolete_packages=failing_packages.keys()) 

1936 

1937 packages = SourcePackageName.objects.filter( 

1938 name__in=failing_packages.keys()) 

1939 packages = packages.prefetch_related('action_items') 

1940 

1941 for package in packages: 

1942 self.create_action_item(package, failing_packages[package.name]) 

1943 

1944 

1945class UpdateUbuntuStatsTask(BaseTask): 

1946 """ 

1947 The task updates Ubuntu stats for packages. These stats are displayed in a 

1948 separate panel. 

1949 """ 

1950 

1951 class Scheduler(IntervalScheduler): 

1952 interval = 3600 * 3 

1953 

1954 def initialize(self, *args, **kwargs): 

1955 super(UpdateUbuntuStatsTask, self).initialize(*args, **kwargs) 

1956 

1957 def _get_versions_content(self): 

1958 url = 'https://udd.debian.org/cgi-bin/ubuntupackages.cgi' 

1959 return get_resource_text(url) 

1960 

1961 def get_ubuntu_versions(self): 

1962 """ 

1963 Retrieves the Ubuntu package versions. 

1964 

1965 :returns: A dict mapping package names to Ubuntu versions. 

1966 """ 

1967 content = self._get_versions_content() 

1968 

1969 package_versions = {} 

1970 for line in content.splitlines(): 

1971 package, version = line.split(' ', 1) 

1972 version = version.strip() 

1973 package_versions[package] = version 

1974 

1975 return package_versions 

1976 

1977 def _get_bug_stats_content(self): 

1978 url = 'https://udd.debian.org/cgi-bin/ubuntubugs.cgi' 

1979 return get_resource_text(url) 

1980 

1981 def get_ubuntu_bug_stats(self): 

1982 """ 

1983 Retrieves the Ubuntu bug stats of a package. Bug stats contain the 

1984 count of bugs and the count of patches. 

1985 

1986 :returns: A dict mapping package names to a dict of package stats. 

1987 """ 

1988 content = self._get_bug_stats_content() 

1989 

1990 bug_stats = {} 

1991 for line in content.splitlines(): 

1992 package_name, bug_count, patch_count = line.split("|", 2) 

1993 try: 

1994 bug_count, patch_count = int(bug_count), int(patch_count) 

1995 except ValueError: 

1996 continue 

1997 bug_stats[package_name] = { 

1998 'bug_count': bug_count, 

1999 'patch_count': patch_count, 

2000 } 

2001 

2002 return bug_stats 

2003 

2004 def _get_ubuntu_patch_diff_content(self): 

2005 url = 'https://patches.ubuntu.com/PATCHES' 

2006 return get_resource_text(url) 

2007 

2008 def get_ubuntu_patch_diffs(self): 

2009 """ 

2010 Retrieves the Ubuntu patch diff information. The information consists 

2011 of the diff URL and the version of the Ubuntu package to which the 

2012 diff belongs to. 

2013 

2014 :returns: A dict mapping package names to diff information. 

2015 """ 

2016 content = self._get_ubuntu_patch_diff_content() 

2017 

2018 patch_diffs = {} 

2019 re_diff_version = re.compile(r'_(\S+)\.patch') 

2020 for line in content.splitlines(): 

2021 package_name, diff_url = line.split(' ', 1) 

2022 # Extract the version of the package from the diff url 

2023 match = re_diff_version.search(diff_url) 

2024 if not match: 2024 ↛ 2026line 2024 didn't jump to line 2026, because the condition on line 2024 was never true

2025 # Invalid URL: no version 

2026 continue 

2027 version = match.group(1) 

2028 patch_diffs[package_name] = { 

2029 'version': version, 

2030 'diff_url': diff_url 

2031 } 

2032 

2033 return patch_diffs 

2034 

2035 def execute_main(self): 

2036 package_versions = self.get_ubuntu_versions() 

2037 bug_stats = self.get_ubuntu_bug_stats() 

2038 patch_diffs = self.get_ubuntu_patch_diffs() 

2039 

2040 obsolete_ubuntu_pkgs = UbuntuPackage.objects.exclude( 

2041 package__name__in=package_versions.keys()) 

2042 obsolete_ubuntu_pkgs.delete() 

2043 

2044 packages = PackageName.objects.filter(name__in=package_versions.keys()) 

2045 packages = packages.prefetch_related('ubuntu_package') 

2046 

2047 for package in packages: 

2048 version = package_versions[package.name] 

2049 bugs = bug_stats.get(package.name, None) 

2050 diff = patch_diffs.get(package.name, None) 

2051 

2052 try: 

2053 ubuntu_package = package.ubuntu_package 

2054 ubuntu_package.version = version 

2055 ubuntu_package.bugs = bugs 

2056 ubuntu_package.patch_diff = diff 

2057 ubuntu_package.save() 

2058 except UbuntuPackage.DoesNotExist: 

2059 ubuntu_package = UbuntuPackage.objects.create( 

2060 package=package, 

2061 version=version, 

2062 bugs=bugs, 

2063 patch_diff=diff) 

2064 

2065 

2066class UpdateWnppStatsTask(BaseTask): 

2067 """ 

2068 The task updates the WNPP bugs for all packages. 

2069 """ 

2070 

2071 class Scheduler(IntervalScheduler): 

2072 interval = 3600 * 3 

2073 

2074 ACTION_ITEM_TYPE_NAME = 'debian-wnpp-issue' 

2075 ACTION_ITEM_TEMPLATE = 'debian/wnpp-action-item.html' 

2076 ITEM_DESCRIPTION = '<a href="{url}">{wnpp_type}: {wnpp_msg}</a>' 

2077 

2078 def initialize(self, *args, **kwargs): 

2079 super(UpdateWnppStatsTask, self).initialize(*args, **kwargs) 

2080 self.action_item_type = ActionItemType.objects.create_or_update( 

2081 type_name=self.ACTION_ITEM_TYPE_NAME, 

2082 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2083 

2084 def get_wnpp_stats(self): 

2085 """ 

2086 Retrieves and parses the wnpp stats for all packages. WNPP stats 

2087 include the WNPP type and the BTS bug id. 

2088 

2089 :returns: A dict mapping package names to wnpp stats. 

2090 """ 

2091 url = 'https://qa.debian.org/data/bts/wnpp_rm' 

2092 content = get_resource_text(url, only_if_updated=True) 

2093 if content is None: 2093 ↛ 2094line 2093 didn't jump to line 2094, because the condition on line 2093 was never true

2094 return 

2095 

2096 wnpp_stats = {} 

2097 for line in content.splitlines(): 

2098 line = line.strip() 

2099 try: 

2100 package_name, wnpp_type, bug_id = line.split('|')[0].split() 

2101 bug_id = int(bug_id) 

2102 except ValueError: 

2103 # Badly formatted bug number 

2104 continue 

2105 # Strip the colon from the end of the package name 

2106 package_name = package_name[:-1] 

2107 

2108 wnpp_stats[package_name] = { 

2109 'wnpp_type': wnpp_type, 

2110 'bug_id': bug_id, 

2111 } 

2112 

2113 return wnpp_stats 

2114 

2115 def update_action_item(self, package, stats): 

2116 """ 

2117 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2118 instance for the given type indicating that the package has a WNPP 

2119 issue. 

2120 """ 

2121 action_item = package.get_action_item_for_type(self.action_item_type) 

2122 if not action_item: 

2123 action_item = ActionItem( 

2124 package=package, 

2125 item_type=self.action_item_type) 

2126 

2127 # Check if the stats have actually been changed 

2128 if action_item.extra_data: 

2129 if action_item.extra_data.get('wnpp_info', None) == stats: 

2130 # Nothing to do -- stll the same data 

2131 return 

2132 

2133 # Update the data since something has changed 

2134 try: 

2135 release = package.main_entry.repository.suite or \ 

2136 package.main_entry.repository.codename 

2137 except AttributeError: 

2138 release = None 

2139 

2140 msgs = { 

2141 'O': "This package has been orphaned and needs a maintainer.", 

2142 'ITA': "Someone intends to adopt this package.", 

2143 'RFA': "The maintainer wants to pass over package maintenance.", 

2144 'RFH': "The maintainer is looking for help with this package.", 

2145 'ITP': "Someone is planning to reintroduce this package.", 

2146 'RFP': "There is a request to reintroduce this package.", 

2147 'RM': "This package has been requested to be removed.", 

2148 'RFS': "A sponsor is needed to update this package.", 

2149 '?': "The WNPP database contains an entry for this package." 

2150 } 

2151 wnpp_type = stats['wnpp_type'] 

2152 try: 

2153 wnpp_msg = msgs[wnpp_type] 

2154 except KeyError: 

2155 wnpp_msg = msgs['?'] 

2156 

2157 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2158 url='https://bugs.debian.org/{}'.format(stats['bug_id']), 

2159 wnpp_type=wnpp_type, wnpp_msg=wnpp_msg) 

2160 action_item.extra_data = { 

2161 'wnpp_info': stats, 

2162 'release': release, 

2163 } 

2164 action_item.save() 

2165 

2166 def update_depneedsmaint_action_item(self, package_needs_maintainer, stats): 

2167 short_description_template = \ 

2168 'Depends on packages which need a new maintainer' 

2169 package_needs_maintainer.get_absolute_url() 

2170 action_item_type = ActionItemType.objects.create_or_update( 

2171 type_name='debian-depneedsmaint', 

2172 full_description_template='debian/depneedsmaint-action-item.html') 

2173 dependencies = SourcePackageDeps.objects.filter( 

2174 dependency=package_needs_maintainer) 

2175 for dependency in dependencies: 2175 ↛ 2176line 2175 didn't jump to line 2176, because the loop on line 2175 never started

2176 package = dependency.source 

2177 action_item = package.get_action_item_for_type(action_item_type) 

2178 if not action_item: 

2179 action_item = ActionItem( 

2180 package=package, 

2181 item_type=action_item_type, 

2182 extra_data={}) 

2183 

2184 pkgdata = { 

2185 'bug': stats['bug_id'], 

2186 'details': dependency.details, 

2187 } 

2188 

2189 if (action_item.extra_data.get(package_needs_maintainer.name, {}) == 

2190 pkgdata): 

2191 # Nothing has changed 

2192 continue 

2193 

2194 action_item.short_description = short_description_template 

2195 action_item.extra_data[package_needs_maintainer.name] = pkgdata 

2196 

2197 action_item.save() 

2198 

2199 @transaction.atomic 

2200 def execute_main(self): 

2201 wnpp_stats = self.get_wnpp_stats() 

2202 if wnpp_stats is None: 2202 ↛ 2204line 2202 didn't jump to line 2204, because the condition on line 2202 was never true

2203 # Nothing to do: cached content up to date 

2204 return 

2205 

2206 ActionItem.objects.delete_obsolete_items( 

2207 item_types=[self.action_item_type], 

2208 non_obsolete_packages=wnpp_stats.keys()) 

2209 # Remove obsolete action items for packages whose dependencies need a 

2210 # new maintainer. 

2211 packages_need_maintainer = [] 

2212 for name, stats in wnpp_stats.items(): 

2213 if stats['wnpp_type'] in ('O', 'RFA'): 

2214 packages_need_maintainer.append(name) 

2215 packages_depneeds_maint = [ 

2216 package.name for package in SourcePackageName.objects.filter( 

2217 source_dependencies__dependency__name__in=packages_need_maintainer) # noqa 

2218 ] 

2219 ActionItem.objects.delete_obsolete_items( 

2220 item_types=[ 

2221 ActionItemType.objects.get_or_create( 

2222 type_name='debian-depneedsmaint')[0], 

2223 ], 

2224 non_obsolete_packages=packages_depneeds_maint) 

2225 

2226 # Drop all reverse references 

2227 for ai in ActionItem.objects.filter( 2227 ↛ 2229line 2227 didn't jump to line 2229, because the loop on line 2227 never started

2228 item_type__type_name='debian-depneedsmaint'): 

2229 ai.extra_data = {} 

2230 ai.save() 

2231 

2232 packages = SourcePackageName.objects.filter(name__in=wnpp_stats.keys()) 

2233 packages = packages.prefetch_related('action_items') 

2234 

2235 for package in packages: 

2236 stats = wnpp_stats[package.name] 

2237 self.update_action_item(package, stats) 

2238 # Update action items for packages which depend on this one to 

2239 # indicate that a dependency needs a new maintainer. 

2240 if package.name in packages_need_maintainer: 

2241 self.update_depneedsmaint_action_item(package, stats) 

2242 

2243 

2244class UpdateNewQueuePackages(BaseTask): 

2245 """ 

2246 Updates the versions of source packages found in the NEW queue. 

2247 """ 

2248 

2249 class Scheduler(IntervalScheduler): 

2250 interval = 3600 

2251 

2252 DATA_KEY = 'debian-new-queue-info' 

2253 

2254 def initialize(self, *args, **kwargs): 

2255 super(UpdateNewQueuePackages, self).initialize(*args, **kwargs) 

2256 

2257 def extract_package_info(self, content): 

2258 """ 

2259 Extracts the package information from the content of the NEW queue. 

2260 

2261 :returns: A dict mapping package names to a dict mapping the 

2262 distribution name in which the package is found to the version 

2263 information for the most recent version of the package in the dist. 

2264 """ 

2265 packages = {} 

2266 for stanza in deb822.Deb822.iter_paragraphs(content.splitlines()): 

2267 necessary_fields = ('Source', 'Queue', 'Version', 'Distribution') 

2268 if not all(field in stanza for field in necessary_fields): 

2269 continue 

2270 if stanza['Queue'] != 'new': 2270 ↛ 2271line 2270 didn't jump to line 2271, because the condition on line 2270 was never true

2271 continue 

2272 

2273 versions = stanza['Version'].split() 

2274 # Save only the most recent version 

2275 version = max(versions, key=lambda x: AptPkgVersion(x)) 

2276 

2277 package_name = stanza['Source'] 

2278 pkginfo = packages.setdefault(package_name, {}) 

2279 distribution = stanza['Distribution'] 

2280 if distribution in pkginfo: 

2281 current_version = pkginfo[distribution]['version'] 

2282 if debian_support.version_compare(version, current_version) < 0: 

2283 # The already saved version is more recent than this one. 

2284 continue 

2285 

2286 pkginfo[distribution] = { 

2287 'version': version, 

2288 } 

2289 

2290 return packages 

2291 

2292 def _get_new_content(self): 

2293 url = 'https://ftp-master.debian.org/new.822' 

2294 return get_resource_text(url, force_update=self.force_update, 

2295 only_if_updated=True) 

2296 

2297 def execute_main(self): 

2298 content = self._get_new_content() 

2299 if content is None: 2299 ↛ 2300line 2299 didn't jump to line 2300, because the condition on line 2299 was never true

2300 return 

2301 

2302 all_package_info = self.extract_package_info(content) 

2303 

2304 packages = SourcePackageName.objects.filter( 

2305 name__in=all_package_info.keys()) 

2306 

2307 with transaction.atomic(): 

2308 # Drop old entries 

2309 PackageData.objects.filter(key=self.DATA_KEY).delete() 

2310 # Prepare current entries 

2311 data = [] 

2312 for package in packages: 

2313 new_queue_info = PackageData( 

2314 key=self.DATA_KEY, 

2315 package=package, 

2316 value=all_package_info[package.name]) 

2317 data.append(new_queue_info) 

2318 # Bulk create them 

2319 PackageData.objects.bulk_create(data) 

2320 

2321 

2322class UpdateAutoRemovalsStatsTask(BaseTask): 

2323 """ 

2324 A task for updating autoremovals information on all packages. 

2325 """ 

2326 

2327 class Scheduler(IntervalScheduler): 

2328 interval = 3600 

2329 

2330 ACTION_ITEM_TYPE_NAME = 'debian-autoremoval' 

2331 ACTION_ITEM_TEMPLATE = 'debian/autoremoval-action-item.html' 

2332 ITEM_DESCRIPTION = ('Marked for autoremoval on {removal_date}' + 

2333 '{dependencies}: {bugs}') 

2334 

2335 def initialize(self, *args, **kwargs): 

2336 super(UpdateAutoRemovalsStatsTask, self).initialize(*args, **kwargs) 

2337 self.action_item_type = ActionItemType.objects.create_or_update( 

2338 type_name=self.ACTION_ITEM_TYPE_NAME, 

2339 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2340 

2341 def get_autoremovals_stats(self): 

2342 """ 

2343 Retrieves and parses the autoremoval stats for all packages. 

2344 Autoremoval stats include the BTS bugs id. 

2345 

2346 :returns: A dict mapping package names to autoremoval stats. 

2347 """ 

2348 content = get_resource_text( 

2349 'https://udd.debian.org/cgi-bin/autoremovals.yaml.cgi', 

2350 force_update=self.force_update, 

2351 only_if_updated=True 

2352 ) 

2353 if content: 2353 ↛ exitline 2353 didn't return from function 'get_autoremovals_stats', because the condition on line 2353 was never false

2354 return yaml.safe_load(content) 

2355 

2356 def update_action_item(self, package, stats): 

2357 """ 

2358 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2359 instance for the given type indicating that the package has an 

2360 autoremoval issue. 

2361 """ 

2362 action_item = package.get_action_item_for_type(self.action_item_type) 

2363 if not action_item: 2363 ↛ 2369line 2363 didn't jump to line 2369, because the condition on line 2363 was never false

2364 action_item = ActionItem( 

2365 package=package, 

2366 item_type=self.action_item_type, 

2367 severity=ActionItem.SEVERITY_HIGH) 

2368 

2369 bugs_dependencies = stats.get('bugs_dependencies', []) 

2370 buggy_dependencies = stats.get('buggy_dependencies', []) 

2371 reverse_dependencies = stats.get('rdeps', []) 

2372 all_bugs = stats['bugs'] + bugs_dependencies 

2373 link = '<a href="https://bugs.debian.org/{}">#{}</a>' 

2374 removal_date = stats['removal_date'].strftime('%d %B') 

2375 if isinstance(removal_date, bytes): 2375 ↛ 2376line 2375 didn't jump to line 2376, because the condition on line 2375 was never true

2376 removal_date = removal_date.decode('utf-8', 'ignore') 

2377 

2378 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2379 removal_date=removal_date, 

2380 dependencies=(' due to ' + html_package_list( 

2381 buggy_dependencies) if buggy_dependencies else ''), 

2382 bugs=', '.join(link.format(bug, bug) for bug in all_bugs)) 

2383 

2384 # datetime objects are not JSON-serializable, convert them ourselves 

2385 for key in stats.keys(): 

2386 if hasattr(stats[key], 'strftime'): 

2387 stats[key] = stats[key].strftime('%a %d %b %Y') 

2388 

2389 action_item.extra_data = { 

2390 'stats': stats, 

2391 'removal_date': stats['removal_date'], 

2392 'version': stats.get('version', ''), 

2393 'bugs': ', '.join(link.format(bug, bug) for bug in stats['bugs']), 

2394 'bugs_dependencies': ', '.join( 

2395 link.format(bug, bug) for bug in bugs_dependencies), 

2396 'buggy_dependencies': 

2397 html_package_list(buggy_dependencies), 

2398 'reverse_dependencies': 

2399 html_package_list(reverse_dependencies), 

2400 'number_rdeps': len(reverse_dependencies)} 

2401 action_item.save() 

2402 

2403 def execute_main(self): 

2404 autoremovals_stats = self.get_autoremovals_stats() 

2405 if autoremovals_stats is None: 2405 ↛ 2407line 2405 didn't jump to line 2407, because the condition on line 2405 was never true

2406 # Nothing to do: cached content up to date 

2407 return 

2408 

2409 ActionItem.objects.delete_obsolete_items( 

2410 item_types=[self.action_item_type], 

2411 non_obsolete_packages=autoremovals_stats.keys()) 

2412 

2413 packages = SourcePackageName.objects.filter( 

2414 name__in=autoremovals_stats.keys()) 

2415 packages = packages.prefetch_related('action_items') 

2416 

2417 for package in packages: 

2418 self.update_action_item(package, autoremovals_stats[package.name]) 

2419 

2420 

2421class UpdatePackageScreenshotsTask(BaseTask): 

2422 """ 

2423 Check if a screenshot exists on screenshots.debian.net, and add a 

2424 key to PackageData if it does. 

2425 """ 

2426 

2427 class Scheduler(IntervalScheduler): 

2428 interval = 3600 * 24 

2429 

2430 DATA_KEY = 'screenshots' 

2431 

2432 def _get_screenshots(self): 

2433 url = 'https://screenshots.debian.net/json/packages' 

2434 content = get_resource_text(url, force_update=self.force_update, 

2435 only_if_updated=True) 

2436 if content is None: 2436 ↛ 2437line 2436 didn't jump to line 2437, because the condition on line 2436 was never true

2437 return 

2438 

2439 data = json.loads(content) 

2440 return data 

2441 

2442 def execute_main(self): 

2443 content = self._get_screenshots() 

2444 if content is None: 2444 ↛ 2445line 2444 didn't jump to line 2445, because the condition on line 2444 was never true

2445 return 

2446 

2447 packages_with_screenshots = [] 

2448 for item in content['packages']: 

2449 try: 

2450 package = SourcePackageName.objects.get(name=item['name']) 

2451 packages_with_screenshots.append(package) 

2452 except SourcePackageName.DoesNotExist: 

2453 pass 

2454 

2455 with transaction.atomic(): 

2456 PackageData.objects.filter(key='screenshots').delete() 

2457 

2458 data = [] 

2459 for package in packages_with_screenshots: 

2460 try: 

2461 screenshot_info = package.data.get(key=self.DATA_KEY) 

2462 screenshot_info.value['screenshots'] = 'true' 

2463 except PackageData.DoesNotExist: 

2464 screenshot_info = PackageData( 

2465 key=self.DATA_KEY, 

2466 package=package, 

2467 value={'screenshots': 'true'}) 

2468 

2469 data.append(screenshot_info) 

2470 

2471 PackageData.objects.bulk_create(data) 

2472 

2473 

2474class UpdateBuildReproducibilityTask(BaseTask): 

2475 

2476 class Scheduler(IntervalScheduler): 

2477 interval = 3600 * 6 

2478 

2479 BASE_URL = 'https://tests.reproducible-builds.org' 

2480 ACTION_ITEM_TYPE_NAME = 'debian-build-reproducibility' 

2481 ACTION_ITEM_TEMPLATE = 'debian/build-reproducibility-action-item.html' 

2482 ITEM_DESCRIPTION = { 

2483 'blacklisted': '<a href="{url}">Blacklisted</a> from build ' 

2484 'reproducibility testing', 

2485 'FTBFS': '<a href="{url}">Fails to build</a> during reproducibility ' 

2486 'testing', 

2487 'reproducible': None, 

2488 'FTBR': '<a href="{url}">Does not build reproducibly</a> ' 

2489 'during testing', 

2490 '404': None, 

2491 'not for us': None, 

2492 } 

2493 

2494 def initialize(self, *args, **kwargs): 

2495 super(UpdateBuildReproducibilityTask, self).initialize(*args, **kwargs) 

2496 self.action_item_type = ActionItemType.objects.create_or_update( 

2497 type_name=self.ACTION_ITEM_TYPE_NAME, 

2498 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2499 

2500 def get_build_reproducibility(self): 

2501 url = '{}/debian/reproducible-tracker.json'.format(self.BASE_URL) 

2502 content = get_resource_text(url, force_update=self.force_update, 

2503 only_if_updated=True) 

2504 if content is None: 2504 ↛ 2505line 2504 didn't jump to line 2505, because the condition on line 2504 was never true

2505 return 

2506 

2507 reproducibilities = json.loads(content) 

2508 packages = {} 

2509 for item in reproducibilities: 

2510 package = item['package'] 

2511 status = item['status'] 

2512 missing = package not in packages 

2513 important = self.ITEM_DESCRIPTION.get(status) is not None 

2514 if important or missing: 2514 ↛ 2509line 2514 didn't jump to line 2509, because the condition on line 2514 was never false

2515 packages[package] = status 

2516 

2517 return packages 

2518 

2519 def update_action_item(self, package, status): 

2520 description = self.ITEM_DESCRIPTION.get(status) 

2521 

2522 if not description: # Not worth an action item 

2523 return False 

2524 

2525 action_item = package.get_action_item_for_type( 

2526 self.action_item_type.type_name) 

2527 if action_item is None: 2527 ↛ 2533line 2527 didn't jump to line 2533, because the condition on line 2527 was never false

2528 action_item = ActionItem( 

2529 package=package, 

2530 item_type=self.action_item_type, 

2531 severity=ActionItem.SEVERITY_NORMAL) 

2532 

2533 url = "{}/debian/rb-pkg/{}.html".format(self.BASE_URL, package.name) 

2534 action_item.short_description = description.format(url=url) 

2535 action_item.save() 

2536 return True 

2537 

2538 def execute_main(self): 

2539 reproducibilities = self.get_build_reproducibility() 

2540 if reproducibilities is None: 2540 ↛ 2541line 2540 didn't jump to line 2541, because the condition on line 2540 was never true

2541 return 

2542 

2543 with transaction.atomic(): 

2544 PackageData.objects.filter(key='reproducibility').delete() 

2545 

2546 packages = [] 

2547 data = [] 

2548 

2549 for name, status in reproducibilities.items(): 

2550 try: 

2551 package = SourcePackageName.objects.get(name=name) 

2552 if self.update_action_item(package, status): 

2553 packages.append(package) 

2554 except SourcePackageName.DoesNotExist: 

2555 continue 

2556 

2557 reproducibility_info = PackageData( 

2558 key='reproducibility', 

2559 package=package, 

2560 value={'reproducibility': status}) 

2561 data.append(reproducibility_info) 

2562 

2563 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2564 packages) 

2565 PackageData.objects.bulk_create(data) 

2566 

2567 

2568class MultiArchHintsTask(BaseTask): 

2569 

2570 class Scheduler(IntervalScheduler): 

2571 interval = 3600 * 6 

2572 

2573 ACTIONS_WEB = 'https://wiki.debian.org/MultiArch/Hints' 

2574 ACTIONS_URL = 'https://dedup.debian.net/static/multiarch-hints.yaml' 

2575 ACTION_ITEM_TYPE_NAME = 'debian-multiarch-hints' 

2576 ACTION_ITEM_TEMPLATE = 'debian/multiarch-hints.html' 

2577 ACTION_ITEM_DESCRIPTION = \ 

2578 '<a href="{link}">Multiarch hinter</a> reports {count} issue(s)' 

2579 

2580 def initialize(self, *args, **kwargs): 

2581 super(MultiArchHintsTask, self).initialize(*args, **kwargs) 

2582 self.action_item_type = ActionItemType.objects.create_or_update( 

2583 type_name=self.ACTION_ITEM_TYPE_NAME, 

2584 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2585 self.SEVERITIES = {} 

2586 for value, name in ActionItem.SEVERITIES: 

2587 self.SEVERITIES[name] = value 

2588 

2589 def get_data(self): 

2590 data = get_resource_text(self.ACTIONS_URL) 

2591 if data: 

2592 return yaml.safe_load(data) 

2593 

2594 def get_packages(self): 

2595 data = self.get_data() 

2596 if data is None: 

2597 return 

2598 if data['format'] != 'multiarch-hints-1.0': 

2599 return None 

2600 data = data['hints'] 

2601 packages = collections.defaultdict(dict) 

2602 for item in data: 

2603 if 'source' not in item: 

2604 continue 

2605 package = item['source'] 

2606 wishlist = ActionItem.SEVERITY_WISHLIST 

2607 severity = self.SEVERITIES.get(item['severity'], wishlist) 

2608 pkg_severity = packages[package].get('severity', wishlist) 

2609 packages[package]['severity'] = max(severity, pkg_severity) 

2610 packages[package].setdefault('hints', []).append( 

2611 (item['description'], item['link'])) 

2612 return packages 

2613 

2614 def update_action_item(self, package, severity, description, extra_data): 

2615 action_item = package.get_action_item_for_type( 

2616 self.action_item_type.type_name) 

2617 if action_item is None: 

2618 action_item = ActionItem( 

2619 package=package, 

2620 item_type=self.action_item_type) 

2621 action_item.severity = severity 

2622 action_item.short_description = description 

2623 action_item.extra_data = extra_data 

2624 action_item.save() 

2625 

2626 def execute_main(self): 

2627 packages = self.get_packages() 

2628 if not packages: 

2629 return 

2630 

2631 with transaction.atomic(): 

2632 for name, data in packages.items(): 

2633 try: 

2634 package = SourcePackageName.objects.get(name=name) 

2635 except SourcePackageName.DoesNotExist: 

2636 continue 

2637 

2638 description = self.ACTION_ITEM_DESCRIPTION.format( 

2639 count=len(data['hints']), link=self.ACTIONS_WEB) 

2640 self.update_action_item(package, data['severity'], description, 

2641 data['hints']) 

2642 

2643 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2644 packages.keys()) 

2645 

2646 

2647class UpdateVcsWatchTask(BaseTask): 

2648 """ 

2649 Updates packages' vcswatch stats. 

2650 """ 

2651 

2652 class Scheduler(IntervalScheduler): 

2653 interval = 3600 

2654 

2655 VCSWATCH_ACTION_ITEM_TYPE_NAME = 'vcswatch-warnings-and-errors' 

2656 VCSWATCH_ACTION_ITEM_TEMPLATE = 'debian/vcswatch-action-item.html' 

2657 SALSA_MR_ACTION_ITEM_TYPE_NAME = 'salsa-merge-requests' 

2658 SALSA_MR_ACTION_ITEM_TEMPLATE = 'debian/salsa-mr-action-item.html' 

2659 SALSA_MR_SHORT_DESCRIPTION = '<a href="{url}">{count_str}</a> in Salsa' 

2660 VCSWATCH_URL = 'https://qa.debian.org/cgi-bin/vcswatch?package=%(package)s' 

2661 VCSWATCH_DATA_URL = 'https://qa.debian.org/data/vcswatch/vcswatch.json.gz' 

2662 

2663 VCSWATCH_STATUS_DICT = { 

2664 "NEW": { 

2665 "description": 

2666 '<a href="{vcswatch_url}">version in VCS is newer</a> than in ' 

2667 'repository, is it time to upload?', 

2668 "severity": ActionItem.SEVERITY_NORMAL, 

2669 }, 

2670 "COMMITS": { 

2671 "description": 

2672 '<a href="{vcswatch_url}">{commits} new commit{commits_s}</a> ' 

2673 'since last upload, is it time to release?', 

2674 "severity": ActionItem.SEVERITY_NORMAL, 

2675 }, 

2676 "OLD": { 

2677 'description': 

2678 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2679 'date</a>, push the missing commits.', 

2680 "severity": ActionItem.SEVERITY_HIGH, 

2681 }, 

2682 "UNREL": { 

2683 "description": 

2684 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2685 'date</a>, push the missing commits.', 

2686 "severity": ActionItem.SEVERITY_HIGH, 

2687 }, 

2688 "ERROR": { 

2689 "description": 

2690 '<a href="{vcswatch_url}">Failed to analyze the VCS ' 

2691 'repository</a>. Please troubleshoot and fix the issue.', 

2692 "severity": ActionItem.SEVERITY_HIGH, 

2693 }, 

2694 "DEFAULT": { 

2695 "description": 

2696 '<a href="{url}">Unexpected status</a> ({status}) reported by ' 

2697 'VcsWatch.', 

2698 "severity": ActionItem.SEVERITY_HIGH, 

2699 }, 

2700 } 

2701 

2702 def initialize(self, *args, **kwargs): 

2703 super(UpdateVcsWatchTask, self).initialize(*args, **kwargs) 

2704 self.vcswatch_ai_type = ActionItemType.objects.create_or_update( 

2705 type_name=self.VCSWATCH_ACTION_ITEM_TYPE_NAME, 

2706 full_description_template=self.VCSWATCH_ACTION_ITEM_TEMPLATE 

2707 ) 

2708 self.salsa_mr_ai_type = ActionItemType.objects.create_or_update( 

2709 type_name=self.SALSA_MR_ACTION_ITEM_TYPE_NAME, 

2710 full_description_template=self.SALSA_MR_ACTION_ITEM_TEMPLATE 

2711 ) 

2712 

2713 def get_vcswatch_data(self): 

2714 text = get_resource_text(self.VCSWATCH_DATA_URL) 

2715 

2716 if text is None: 2716 ↛ 2717line 2716 didn't jump to line 2717, because the condition on line 2716 was never true

2717 return 

2718 

2719 # There's some text, let's load! 

2720 data = json.loads(text) 

2721 

2722 out = {} 

2723 # This allows to save a lot of list search later. 

2724 for entry in data: 

2725 out[entry[u'package']] = entry 

2726 

2727 return out 

2728 

2729 def clean_package_info(self, package_infos_without_watch, todo): 

2730 """Takes a list of :class:`PackageData` which do not 

2731 have a watch entry and cleans it. Then schedule in todo what 

2732 to do with them. 

2733 """ 

2734 for package_info in package_infos_without_watch: 

2735 if 'QA' in package_info.value: 2735 ↛ 2734line 2735 didn't jump to line 2734, because the condition on line 2735 was never false

2736 package_info.value.pop('QA') 

2737 if (list(package_info.value.keys()) == ['checksum'] or 

2738 not package_info.value.keys()): 

2739 todo['drop']['package_infos'].append(package_info) 

2740 else: 

2741 package_info.value['checksum'] = get_data_checksum( 

2742 package_info.value 

2743 ) 

2744 todo['update']['package_infos'].append(package_info) 

2745 

2746 def update_action_item(self, package, vcswatch_data, action_item, todo): 

2747 """ 

2748 For a given :class:`ActionItem` and a given vcswatch data, updates 

2749 properly the todo dict if required. 

2750 

2751 Returns dependingly on what has been done. If something is to 

2752 be updated, returns True, if nothing is to be updated, returns 

2753 False. If the calling loop should `continue`, returns `None`. 

2754 

2755 :rtype: bool or `None` 

2756 """ 

2757 

2758 package_status = vcswatch_data['status'] 

2759 

2760 if package_status == "OK": 

2761 # Everything is fine, let's purge the action item. Not the 

2762 # package extracted info as its QA url is still relevant. 

2763 if action_item: 2763 ↛ 2767line 2763 didn't jump to line 2767, because the condition on line 2763 was never false

2764 todo['drop']['action_items'].append(action_item) 

2765 

2766 # Nothing more to do! 

2767 return False 

2768 

2769 # NOT BEFORE "OK" check!! 

2770 if package_status not in self.VCSWATCH_STATUS_DICT: 2770 ↛ 2771line 2770 didn't jump to line 2771, because the condition on line 2770 was never true

2771 package_status = "DEFAULT" 

2772 

2773 # If we are here, then something is not OK. Let's check if we 

2774 # already had some intel regarding the current package status. 

2775 if action_item is None: 

2776 action_item = ActionItem( 

2777 package=package, 

2778 item_type=self.vcswatch_ai_type) 

2779 todo['add']['action_items'].append(action_item) 

2780 else: 

2781 todo['update']['action_items'].append(action_item) 

2782 

2783 # Computes the watch URL 

2784 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2785 

2786 if action_item.extra_data: 

2787 extra_data = action_item.extra_data 

2788 else: 

2789 extra_data = {} 

2790 

2791 # Fetches the long description and severity from 

2792 # the VCSWATCH_STATUS_DICT dict. 

2793 action_item.severity = \ 

2794 self.VCSWATCH_STATUS_DICT[package_status]['severity'] 

2795 

2796 nb_commits = int(vcswatch_data["commits"] or 0) 

2797 

2798 # The new data 

2799 new_extra_data = { 

2800 'vcswatch_url': vcswatch_url, 

2801 } 

2802 new_extra_data.update(vcswatch_data) 

2803 

2804 extra_data_match = all([ 

2805 new_extra_data[key] == extra_data.get(key, None) 

2806 for key in new_extra_data 

2807 ]) 

2808 

2809 # If everything is fine and we are not forcing the update 

2810 # then we proceed to the next package. 

2811 if extra_data_match and not self.force_update: 2811 ↛ 2813line 2811 didn't jump to line 2813, because the condition on line 2811 was never true

2812 # Remove from the todolist 

2813 todo['update']['action_items'].remove(action_item) 

2814 return False 

2815 else: 

2816 # Report for short description of the :class:`ActionItem` 

2817 desc = self.VCSWATCH_STATUS_DICT[package_status]['description'] 

2818 commits_s = 's' if nb_commits != 1 else '' 

2819 action_item.short_description = \ 

2820 desc.format(commits_s=commits_s, **new_extra_data) 

2821 action_item.extra_data = new_extra_data 

2822 return True 

2823 

2824 def update_package_info(self, package, vcswatch_data, package_info, key, 

2825 todo): 

2826 # Same thing with PackageData 

2827 if package_info is None: 

2828 package_info = PackageData( 

2829 package=package, 

2830 key=key, 

2831 ) 

2832 todo['add']['package_infos'].append(package_info) 

2833 else: 

2834 todo['update']['package_infos'].append(package_info) 

2835 

2836 # Computes the watch URL 

2837 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2838 

2839 new_value = dict(package_info.value) 

2840 if key == 'vcs_extra_links': 

2841 new_value['QA'] = vcswatch_url 

2842 elif key == 'vcswatch': 2842 ↛ 2852line 2842 didn't jump to line 2852, because the condition on line 2842 was never false

2843 if 'package_version' in vcswatch_data: 2843 ↛ 2845line 2843 didn't jump to line 2845, because the condition on line 2843 was never false

2844 new_value['package_version'] = vcswatch_data['package_version'] 

2845 if 'changelog_version' in vcswatch_data: 2845 ↛ 2848line 2845 didn't jump to line 2848, because the condition on line 2845 was never false

2846 new_value['changelog_version'] = vcswatch_data[ 

2847 'changelog_version'] 

2848 if 'changelog_distribution' in vcswatch_data: 2848 ↛ 2852line 2848 didn't jump to line 2852, because the condition on line 2848 was never false

2849 new_value['changelog_distribution'] = vcswatch_data[ 

2850 'changelog_distribution'] 

2851 

2852 new_value['checksum'] = get_data_checksum(new_value) 

2853 

2854 package_info_match = ( 

2855 new_value['checksum'] == package_info.value.get('checksum', None) 

2856 ) 

2857 

2858 if package_info_match and not self.force_update: 

2859 todo['update']['package_infos'].remove(package_info) 

2860 return False 

2861 else: 

2862 package_info.value = new_value 

2863 return True 

2864 

2865 def update_packages_item(self, packages, vcswatch_datas): 

2866 """Generates the lists of :class:`ActionItem` to be added, 

2867 deleted or updated regarding the status of their packages. 

2868 

2869 Categories of statuses are: 

2870 {u'COMMITS', u'ERROR', u'NEW', u'OK', u'OLD', u'UNREL'} 

2871 

2872 Basically, it fetches all info from :class:`PackageData` 

2873 with key='vcs', the ones without data matching vcswatch_datas are 

2874 stored in one variable that's iterated through directly, and if 

2875 there was something before, it is purged. Then, all entries in 

2876 that queryset that have no relevant intel anymore are scheduled 

2877 to be deleted. The others are only updated. 

2878 

2879 All :class:`PackageData` matching vcswatch_datas 

2880 are stored in another variable. The same is done with the list of 

2881 :class:`ActionItem` that match this task type. 

2882 

2883 Then, it iterates on all vcswatch_datas' packages and it tries to 

2884 determine if there are any news, if so, it updates apopriately the 

2885 prospective :class:`ActionItem` and :class:`PackageData`, 

2886 and schedule them to be updated. If no data was existent, then 

2887 it creates them and schedule them to be added to the database. 

2888 

2889 At the end, this function returns a dict of all instances of 

2890 :class:`ActionItem` and :class:`PackageData` stored 

2891 in subdicts depending on their class and what is to be done 

2892 with them. 

2893 

2894 :rtype: dict 

2895 

2896 """ 

2897 

2898 todo = { 

2899 'drop': { 

2900 'action_items': [], 

2901 'package_infos': [], 

2902 }, 

2903 'update': { 

2904 'action_items': [], 

2905 'package_infos': [], 

2906 }, 

2907 'add': { 

2908 'action_items': [], 

2909 'package_infos': [], 

2910 }, 

2911 } 

2912 

2913 package_info_keys = ['vcs_extra_links', 'vcswatch'] 

2914 package_infos = {} 

2915 for key in package_info_keys: 

2916 # Fetches all PackageData with a given key for packages having 

2917 # a vcswatch key. As the pair (package, key) is unique, there is a 

2918 # bijection between these data, and we fetch them classifying them 

2919 # by package name. 

2920 for package_info in PackageData.objects.select_related( 

2921 'package').filter(key=key).only('package__name', 'value'): 

2922 if package_info.package.name not in package_infos: 

2923 package_infos[package_info.package.name] = {} 

2924 package_infos[package_info.package.name][key] = package_info 

2925 

2926 # As :class:`PackageData` key=vcs_extra_links is shared, we 

2927 # have to clean up those with vcs watch_url that aren't in vcs_data 

2928 package_infos_without_watch = PackageData.objects.filter( 

2929 key='vcs_extra_links').exclude( 

2930 package__name__in=vcswatch_datas.keys()).only('value') 

2931 

2932 # Do the actual clean. 

2933 self.clean_package_info(package_infos_without_watch, todo) 

2934 

2935 # Fetches all :class:`ActionItem` for packages concerned by a vcswatch 

2936 # action. 

2937 action_items = { 

2938 action_item.package.name: action_item 

2939 for action_item in ActionItem.objects.select_related( 

2940 'package' 

2941 ).filter(item_type=self.vcswatch_ai_type) 

2942 } 

2943 

2944 for package in packages: 

2945 # Get the vcswatch_data from the whole vcswatch_datas 

2946 vcswatch_data = vcswatch_datas[package.name] 

2947 

2948 # Get the old action item for this warning, if it exists. 

2949 action_item = action_items.get(package.name, None) 

2950 

2951 # Updates the :class:`ActionItem`. If _continue is None, 

2952 # then there is nothing more to do with this package. 

2953 # If it is False, then no update is pending for the 

2954 # :class:`ActionItem`, else there is an update 

2955 # to do. 

2956 _ai_continue = self.update_action_item( 

2957 package, 

2958 vcswatch_data, 

2959 action_item, 

2960 todo) 

2961 

2962 _pi_continue = False 

2963 for key in package_info_keys: 

2964 try: 

2965 package_info = package_infos[package.name][key] 

2966 except KeyError: 

2967 package_info = None 

2968 

2969 _pi_continue |= self.update_package_info( 

2970 package, 

2971 vcswatch_data, 

2972 package_info, 

2973 key, 

2974 todo) 

2975 

2976 if not _ai_continue and not _pi_continue: 

2977 continue 

2978 

2979 return todo 

2980 

2981 def update_action_item_for_salsa_mrs(self, package, vcswatch_data, 

2982 action_item, todo): 

2983 """ 

2984 For a given :class:`ActionItem` and a given vcswatch data, updates 

2985 properly the todo dict if required. 

2986 

2987 :rtype: `None` 

2988 """ 

2989 

2990 try: 

2991 parsed_url = urllib.parse.urlparse(vcswatch_data['url']) 

2992 except (KeyError, ValueError): 

2993 is_salsa = False 

2994 else: 

2995 is_salsa = parsed_url.netloc == 'salsa.debian.org' 

2996 merge_requests = vcswatch_data.get('merge_requests', 0) 

2997 need_ai = is_salsa and merge_requests 

2998 

2999 if not need_ai: 

3000 # There are no open Salsa MRs (or we can't check) so remove the AI 

3001 if action_item: 

3002 todo['drop'].append(action_item) 

3003 

3004 # Nothing more to do! 

3005 return 

3006 

3007 if action_item is None: 

3008 action_item = ActionItem( 

3009 package=package, 

3010 item_type=self.salsa_mr_ai_type) 

3011 todo['add'].append(action_item) 

3012 else: 

3013 todo['update'].append(action_item) 

3014 

3015 # Computes the Salsa MR URL 

3016 salsa_path = parsed_url.path.split(' ')[0] 

3017 if salsa_path.endswith('.git'): 3017 ↛ 3019line 3017 didn't jump to line 3019, because the condition on line 3017 was never false

3018 salsa_path = salsa_path[:-4] 

3019 url = 'https://salsa.debian.org{}/-/merge_requests'.format(salsa_path) 

3020 

3021 if action_item.extra_data: 

3022 extra_data = action_item.extra_data 

3023 else: 

3024 extra_data = {} 

3025 

3026 new_extra_data = { 

3027 'count': merge_requests, 

3028 'url': url, 

3029 } 

3030 

3031 extra_data_match = all([ 

3032 new_extra_data[key] == extra_data.get(key, None) 

3033 for key in new_extra_data 

3034 ]) 

3035 

3036 # If everything is fine and we are not forcing the update 

3037 # then we proceed to the next package. 

3038 if extra_data_match and not self.force_update: 3038 ↛ 3040line 3038 didn't jump to line 3040, because the condition on line 3038 was never true

3039 # Remove from the todolist 

3040 todo['update'].remove(action_item) 

3041 else: 

3042 # Report for short description of the :class:`ActionItem` 

3043 count_str = '{} open merge request{}'.format( 

3044 merge_requests, 

3045 's' if merge_requests != 1 else '') 

3046 action_item.short_description = \ 

3047 self.SALSA_MR_SHORT_DESCRIPTION.format( 

3048 count_str=count_str, 

3049 **new_extra_data) 

3050 action_item.severity = ActionItem.SEVERITY_NORMAL 

3051 action_item.extra_data = new_extra_data 

3052 

3053 def update_packages_item_for_salsa_mrs(self, packages, vcswatch_datas): 

3054 """Generates the lists of :class:`ActionItem` to be added, 

3055 deleted or updated regarding open Salsa MRs for their packages. 

3056 

3057 At the end, this function returns a dict of all instances of 

3058 :class:`ActionItem` stored in subdicts depending on their class 

3059 and what is to be done with them. 

3060 

3061 :rtype: dict 

3062 

3063 """ 

3064 

3065 todo = { 

3066 'drop': [], 

3067 'update': [], 

3068 'add': [], 

3069 } 

3070 

3071 # Fetches all :class:`ActionItem` for packages concerned by a salsa mr 

3072 # action. 

3073 action_items = { 

3074 action_item.package.name: action_item 

3075 for action_item in ActionItem.objects.select_related( 

3076 'package' 

3077 ).filter(item_type=self.salsa_mr_ai_type) 

3078 } 

3079 

3080 for package in packages: 

3081 # Get the vcswatch_data from the whole vcswatch_datas 

3082 vcswatch_data = vcswatch_datas[package.name] 

3083 

3084 # Get the old action item for this warning, if it exists. 

3085 action_item = action_items.get(package.name, None) 

3086 

3087 # Updates the :class:`ActionItem`. 

3088 self.update_action_item_for_salsa_mrs( 

3089 package, 

3090 vcswatch_data, 

3091 action_item, 

3092 todo) 

3093 

3094 return todo 

3095 

3096 def execute_main(self): 

3097 # Get the actual vcswatch json file from qa.debian.org 

3098 vcs_data = self.get_vcswatch_data() 

3099 

3100 # Only fetch the packages that are in the json dict. 

3101 packages = PackageName.objects.filter(name__in=vcs_data.keys()) 

3102 

3103 # Faster than fetching the action items one by one in a loop 

3104 # when handling each package. 

3105 packages.prefetch_related('action_items') 

3106 

3107 # Determine wether something is to be kept or dropped. 

3108 todo = self.update_packages_item(packages, vcs_data) 

3109 todo_salsa_mrs = self.update_packages_item_for_salsa_mrs( 

3110 packages, 

3111 vcs_data) 

3112 

3113 with transaction.atomic(): 

3114 # Delete the :class:`ActionItem` that are osbolete, and also 

3115 # the :class:`PackageData` of the same. 

3116 ActionItem.objects.delete_obsolete_items( 

3117 [self.vcswatch_ai_type, self.salsa_mr_ai_type], 

3118 vcs_data.keys()) 

3119 PackageData.objects.filter( 

3120 key='vcs_extra_links', 

3121 id__in=[ 

3122 package_info.id 

3123 for package_info in todo['drop']['package_infos'] 

3124 ] 

3125 ).delete() 

3126 

3127 # Then delete the :class:`ActionItem` that are to be deleted. 

3128 ActionItem.objects.filter( 

3129 item_type__type_name=self.vcswatch_ai_type.type_name, 

3130 id__in=[ 

3131 action_item.id 

3132 for action_item in todo['drop']['action_items'] 

3133 ] 

3134 ).delete() 

3135 ActionItem.objects.filter( 

3136 item_type__type_name=self.salsa_mr_ai_type.type_name, 

3137 id__in=[ 

3138 action_item.id 

3139 for action_item in todo_salsa_mrs['drop'] 

3140 ] 

3141 ).delete() 

3142 

3143 # Then bulk_create the :class:`ActionItem` to add and the 

3144 # :class:`PackageData` 

3145 ActionItem.objects.bulk_create(todo['add']['action_items']) 

3146 PackageData.objects.bulk_create(todo['add']['package_infos']) 

3147 ActionItem.objects.bulk_create(todo_salsa_mrs['add']) 

3148 

3149 # Update existing entries 

3150 for action_item in todo['update']['action_items']: 

3151 action_item.save() 

3152 for package_info in todo['update']['package_infos']: 

3153 package_info.save() 

3154 for action_item in todo_salsa_mrs['update']: 

3155 action_item.save() 

3156 

3157 

3158class TagPackagesWithRcBugs(BaseTask, PackageTagging): 

3159 """ 

3160 Performs an update of 'rc-bugs' tag for packages. 

3161 """ 

3162 

3163 class Scheduler(IntervalScheduler): 

3164 interval = 3600 

3165 

3166 TAG_NAME = 'tag:rc-bugs' 

3167 TAG_DISPLAY_NAME = 'rc bugs' 

3168 TAG_COLOR_TYPE = 'danger' 

3169 TAG_DESCRIPTION = 'The package has Release Critical bugs' 

3170 TAG_TABLE_TITLE = 'Packages with RC bugs' 

3171 

3172 def packages_to_tag(self): 

3173 all_bug_stats = PackageBugStats.objects.prefetch_related('package') 

3174 packages_list = [] 

3175 for bug_stats in all_bug_stats: 

3176 categories = bug_stats.stats 

3177 found = False 

3178 for category in categories: 3178 ↛ 3175line 3178 didn't jump to line 3175, because the loop on line 3178 didn't complete

3179 if found: 

3180 break 

3181 if category['category_name'] == 'rc': 3181 ↛ 3178line 3181 didn't jump to line 3178, because the condition on line 3181 was never false

3182 found = True 

3183 if category['bug_count'] > 0: 

3184 packages_list.append(bug_stats.package) 

3185 return packages_list 

3186 

3187 

3188class TagPackagesWithNewUpstreamVersion(BaseTask, PackageTagging): 

3189 """ 

3190 Performs an update of 'new-upstream-version' tag for packages. 

3191 """ 

3192 

3193 class Scheduler(IntervalScheduler): 

3194 interval = 3600 * 3 

3195 

3196 TAG_NAME = 'tag:new-upstream-version' 

3197 TAG_DISPLAY_NAME = 'new upstream version' 

3198 TAG_COLOR_TYPE = 'warning' 

3199 TAG_DESCRIPTION = 'The upstream has a newer version available' 

3200 TAG_TABLE_TITLE = 'Newer upstream version' 

3201 

3202 def packages_to_tag(self): 

3203 try: 

3204 action_type = ActionItemType.objects.get( 

3205 type_name='new-upstream-version') 

3206 except ActionItemType.DoesNotExist: 

3207 return [] 

3208 

3209 packages_list = [] 

3210 items = action_type.action_items.prefetch_related('package') 

3211 for item in items: 

3212 packages_list.append(item.package) 

3213 return packages_list 

3214 

3215 

3216class UpdateDependencySatisfactionTask(BaseTask): 

3217 """ 

3218 Fetches binary package installability results from qa.debian.org/dose 

3219 """ 

3220 

3221 class Scheduler(IntervalScheduler): 

3222 interval = 3600 * 3 

3223 

3224 BASE_URL = 'https://qa.debian.org/dose/debcheck/unstable_main/latest' 

3225 ACTION_ITEM_TYPE_NAME = 'debian-dependency-satisfaction' 

3226 ACTION_ITEM_TEMPLATE = 'debian/dependency-satisfaction-action-item.html' 

3227 

3228 def __init__(self, force_update=False, *args, **kwargs): 

3229 super(UpdateDependencySatisfactionTask, self).__init__(*args, **kwargs) 

3230 self.force_update = force_update 

3231 self.action_item_type = ActionItemType.objects.create_or_update( 

3232 type_name=self.ACTION_ITEM_TYPE_NAME, 

3233 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3234 

3235 def set_parameters(self, parameters): 

3236 if 'force_update' in parameters: 

3237 self.force_update = parameters['force_update'] 

3238 

3239 def get_dependency_satisfaction(self): 

3240 url = '{}/each.txt'.format(self.BASE_URL) 

3241 content = get_resource_text(url, force_update=self.force_update, 

3242 only_if_updated=True) 

3243 if content is None: 3243 ↛ 3244line 3243 didn't jump to line 3244, because the condition on line 3243 was never true

3244 return 

3245 

3246 dep_sats = collections.defaultdict(set) 

3247 for i, line in enumerate(content.splitlines()): 

3248 binpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3249 try: 

3250 bin_package = BinaryPackageName.objects.get(name=binpkg_name) 

3251 srcpkg_name = bin_package.main_source_package_name 

3252 except BinaryPackageName.DoesNotExist: 

3253 continue 

3254 arches = set([arch.strip() for arch in arches.split()]) 

3255 # TODO: retrieve this list programmatically, either from 

3256 # https://api.ftp-master.debian.org/suite/testing 

3257 # or from the Architecture field in the Release file 

3258 # for testing (both lists should be equal). 

3259 arches = arches.intersection( 

3260 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3261 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3262 # only report problems for release architectures 

3263 if not arches: 

3264 continue 

3265 # if the package is arch:all, only report problems on amd64 

3266 if isnative != "True": 

3267 arches = arches.intersection({"amd64"}) 

3268 if not arches: 

3269 continue 

3270 dep_sats[srcpkg_name].add( 

3271 (binpkg_name, ver, tuple(arches), expl, anchor)) 

3272 # turn sets into lists 

3273 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3274 return dep_sats 

3275 

3276 def update_action_item(self, package, unsats): 

3277 action_item = package.get_action_item_for_type( 

3278 self.action_item_type.type_name) 

3279 if action_item is None: 3279 ↛ 3284line 3279 didn't jump to line 3284

3280 action_item = ActionItem( 

3281 package=package, 

3282 item_type=self.action_item_type, 

3283 severity=ActionItem.SEVERITY_HIGH) 

3284 action_item.short_description = \ 

3285 "{count} binary package{plural} {have} unsatisfiable " \ 

3286 "dependencies".format( 

3287 count=len(unsats), 

3288 plural='' if len(unsats) == 1 else 's', 

3289 have='has' if len(unsats) == 1 else 'have', 

3290 ) 

3291 action_item.extra_data = { 

3292 'unsats': unsats, 

3293 'base_url': '{}/packages/'.format(self.BASE_URL), 

3294 } 

3295 action_item.save() 

3296 

3297 def execute(self): 

3298 dep_sats = self.get_dependency_satisfaction() 

3299 if dep_sats is None: 3299 ↛ 3300line 3299 didn't jump to line 3300, because the condition on line 3299 was never true

3300 return 

3301 

3302 with transaction.atomic(): 

3303 PackageData.objects.filter(key='dependency_satisfaction').delete() 

3304 

3305 packages = [] 

3306 pkgdata_list = [] 

3307 

3308 for name, unsats in dep_sats.items(): 

3309 try: 

3310 package = SourcePackageName.objects.get(name=name) 

3311 packages.append(package) 

3312 self.update_action_item(package, unsats) 

3313 except SourcePackageName.DoesNotExist: 

3314 continue 

3315 

3316 dep_sat_info = PackageData( 

3317 key='dependency_satisfaction', 

3318 package=package, 

3319 value={'dependency_satisfaction': unsats}) 

3320 pkgdata_list.append(dep_sat_info) 

3321 

3322 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3323 packages) 

3324 PackageData.objects.bulk_create(pkgdata_list) 

3325 

3326 

3327class UpdateBuildDependencySatisfactionTask(BaseTask): 

3328 """ 

3329 Fetches source package installability results from qa.debian.org/dose 

3330 """ 

3331 

3332 class Scheduler(IntervalScheduler): 

3333 interval = 3600 * 3 

3334 

3335 BASE_URL = 'https://qa.debian.org/dose/debcheck/src_unstable_main/latest' 

3336 ACTION_ITEM_TYPE_NAME = 'debian-builddependency-satisfaction' 

3337 ACTION_ITEM_TEMPLATE = \ 

3338 'debian/builddependency-satisfaction-action-item.html' 

3339 

3340 def __init__(self, *args, **kwargs): 

3341 super(UpdateBuildDependencySatisfactionTask, self).__init__(*args, 

3342 **kwargs) 

3343 self.action_item_type = ActionItemType.objects.create_or_update( 

3344 type_name=self.ACTION_ITEM_TYPE_NAME, 

3345 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3346 

3347 def get_dependency_satisfaction(self): 

3348 url = '{}/each.txt'.format(self.BASE_URL) 

3349 content = get_resource_text(url, force_update=self.force_update, 

3350 only_if_updated=True) 

3351 if content is None: 3351 ↛ 3352line 3351 didn't jump to line 3352, because the condition on line 3351 was never true

3352 return 

3353 

3354 dep_sats = collections.defaultdict(set) 

3355 for i, line in enumerate(content.splitlines()): 

3356 srcpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3357 arches = set([arch.strip() for arch in arches.split()]) 

3358 # TODO: retrieve this list programmatically, either from 

3359 # https://api.ftp-master.debian.org/suite/testing 

3360 # or from the Architecture field in the Release file 

3361 # for testing (both lists should be equal). 

3362 arches = arches.intersection( 

3363 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3364 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3365 # only report problems for release architectures 

3366 if not arches: 

3367 continue 

3368 # if the source package only builds arch:all binary packages, only 

3369 # report problems on amd64 

3370 if isnative != "True": 

3371 arches = arches.intersection({"amd64"}) 

3372 if not arches: 

3373 continue 

3374 dep_sats[srcpkg_name].add( 

3375 (srcpkg_name, tuple(arches), expl, anchor)) 

3376 # turn sets into lists 

3377 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3378 return dep_sats 

3379 

3380 def update_action_item(self, package, unsats): 

3381 action_item = package.get_action_item_for_type( 

3382 self.action_item_type.type_name) 

3383 if action_item is None: 3383 ↛ 3388line 3383 didn't jump to line 3388

3384 action_item = ActionItem( 

3385 package=package, 

3386 item_type=self.action_item_type, 

3387 severity=ActionItem.SEVERITY_HIGH) 

3388 action_item.short_description = \ 

3389 "source package has {count} unsatisfiable " \ 

3390 "build dependenc{plural}".format( 

3391 count=len(unsats), 

3392 plural='y' if len(unsats) == 1 else 'ies', 

3393 ) 

3394 action_item.extra_data = { 

3395 'unsats': unsats, 

3396 'base_url': '{}/packages/'.format(self.BASE_URL), 

3397 } 

3398 action_item.save() 

3399 

3400 def execute(self): 

3401 dep_sats = self.get_dependency_satisfaction() 

3402 if dep_sats is None: 3402 ↛ 3403line 3402 didn't jump to line 3403, because the condition on line 3402 was never true

3403 return 

3404 

3405 with transaction.atomic(): 

3406 PackageData.objects.filter( 

3407 key='builddependency_satisfaction').delete() 

3408 

3409 packages = [] 

3410 pkgdata_list = [] 

3411 

3412 for name, unsats in dep_sats.items(): 

3413 try: 

3414 package = SourcePackageName.objects.get(name=name) 

3415 packages.append(package) 

3416 self.update_action_item(package, unsats) 

3417 except SourcePackageName.DoesNotExist: 

3418 continue 

3419 

3420 dep_sat_info = PackageData( 

3421 key='builddependency_satisfaction', 

3422 package=package, 

3423 value={'builddependency_satisfaction': unsats}) 

3424 pkgdata_list.append(dep_sat_info) 

3425 

3426 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3427 packages) 

3428 PackageData.objects.bulk_create(pkgdata_list) 

3429 

3430 

3431class UpdateDl10nStatsTask(BaseTask): 

3432 """ 

3433 Updates packages' l10n statistics. 

3434 """ 

3435 

3436 class Scheduler(IntervalScheduler): 

3437 interval = 3600 * 6 

3438 

3439 ACTION_ITEM_TYPE_NAME = 'dl10n' 

3440 ITEM_DESCRIPTION = \ 

3441 '<a href="{url}">Issues</a> found with some translations' 

3442 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/dl10n-action-item.html' 

3443 

3444 def initialize(self, *args, **kwargs): 

3445 super(UpdateDl10nStatsTask, self).initialize(*args, **kwargs) 

3446 self.l10n_action_item_type = \ 

3447 ActionItemType.objects.create_or_update( 

3448 type_name=self.ACTION_ITEM_TYPE_NAME, 

3449 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

3450 

3451 def _load_l10n_stats(self): 

3452 url = 'https://i18n.debian.org/l10n-pkg-status/pkglist' 

3453 content = get_resource_text(url, force_update=self.force_update, 

3454 only_if_updated=True) 

3455 if content is None: 3455 ↛ 3456line 3455 didn't jump to line 3456, because the condition on line 3455 was never true

3456 return 

3457 

3458 def parse_score(score): 

3459 if score == '-': 

3460 return None 

3461 return int(score) 

3462 

3463 all_stats = {} 

3464 

3465 # The format of the file is (copied from its header): 

3466 # <package> <version> (<comma sperated scores>) <link> <todo> 

3467 line_re = re.compile( 

3468 r'^([^\s]+) ([^\s]+) \(([^)]+)\) ([^\s]+) ([^\s]+)') 

3469 for line in content.splitlines(): 

3470 if not line or line.startswith('#'): 3470 ↛ 3471line 3470 didn't jump to line 3471, because the condition on line 3470 was never true

3471 continue 

3472 match = line_re.search(line) 

3473 if not match: 3473 ↛ 3474line 3473 didn't jump to line 3474, because the condition on line 3473 was never true

3474 logger.warning('Failed to parse l10n pkglist line: %s', line) 

3475 continue 

3476 

3477 src_pkgname = match.group(1) 

3478 try: 

3479 scores = match.group(3).split(',') 

3480 score_debian = parse_score(scores[0]) 

3481 score_other = parse_score(scores[1]) 

3482 # <todo> is a "0" or "1" string, so convert through int to get 

3483 # a proper bool 

3484 todo = bool(int(match.group(5))) 

3485 except (IndexError, ValueError): 

3486 logger.warning( 

3487 'Failed to parse l10n scores: %s', 

3488 line, exc_info=1) 

3489 continue 

3490 link = match.group(4) 

3491 if not score_debian and not score_other: 3491 ↛ 3492line 3491 didn't jump to line 3492, because the condition on line 3491 was never true

3492 continue 

3493 

3494 all_stats[src_pkgname] = { 

3495 'score_debian': score_debian, 

3496 'score_other': score_other, 

3497 'link': link, 

3498 'todo': todo, 

3499 } 

3500 

3501 return all_stats 

3502 

3503 def update_action_item(self, package, package_stats): 

3504 todo = package_stats['todo'] 

3505 

3506 # Get the old action item, if it exists. 

3507 l10n_action_item = package.get_action_item_for_type( 

3508 self.l10n_action_item_type.type_name) 

3509 if not todo: 

3510 if l10n_action_item: 

3511 # If the item previously existed, delete it now since there 

3512 # are no longer any warnings/errors. 

3513 l10n_action_item.delete() 

3514 return 

3515 

3516 # The item didn't previously have an action item: create it now 

3517 if l10n_action_item is None: 

3518 desc = self.ITEM_DESCRIPTION.format(url=package_stats['link']) 

3519 l10n_action_item = ActionItem( 

3520 package=package, 

3521 item_type=self.l10n_action_item_type, 

3522 severity=ActionItem.SEVERITY_LOW, 

3523 short_description=desc) 

3524 

3525 if l10n_action_item.extra_data: 

3526 old_extra_data = l10n_action_item.extra_data 

3527 if old_extra_data == package_stats: 3527 ↛ 3529line 3527 didn't jump to line 3529, because the condition on line 3527 was never true

3528 # No need to update 

3529 return 

3530 

3531 l10n_action_item.extra_data = package_stats 

3532 

3533 l10n_action_item.save() 

3534 

3535 def execute_main(self): 

3536 stats = self._load_l10n_stats() 

3537 if not stats: 3537 ↛ 3538line 3537 didn't jump to line 3538, because the condition on line 3537 was never true

3538 return 

3539 

3540 with transaction.atomic(): 

3541 PackageData.objects.filter(key='dl10n').delete() 

3542 

3543 packages = [] 

3544 pkgdata_list = [] 

3545 

3546 for name, stat in stats.items(): 

3547 try: 

3548 package = SourcePackageName.objects.get(name=name) 

3549 packages.append(package) 

3550 self.update_action_item(package, stat) 

3551 except SourcePackageName.DoesNotExist: 

3552 continue 

3553 

3554 dl10n_stat = PackageData( 

3555 key='dl10n', 

3556 package=package, 

3557 value=stat) 

3558 pkgdata_list.append(dl10n_stat) 

3559 

3560 ActionItem.objects.delete_obsolete_items( 

3561 [self.l10n_action_item_type], packages) 

3562 PackageData.objects.bulk_create(pkgdata_list) 

3563 

3564 

3565class UpdateDebianPatchesTask(BaseTask, ImportExternalData): 

3566 """ 

3567 Import statistics about Debian patches from UDD. 

3568 """ 

3569 

3570 class Scheduler(IntervalScheduler): 

3571 interval = 3600 * 6 

3572 

3573 data_url = 'https://udd.debian.org/patches.cgi?json=1' 

3574 action_item_types = [ 

3575 { 

3576 'type_name': 'debian-patches', 

3577 'full_description_template': 

3578 'debian/debian-patches-action-item.html', 

3579 }, 

3580 ] 

3581 

3582 def generate_package_data(self): 

3583 pkgdata = {} 

3584 for entry in self.external_data: 

3585 source = entry.get('source') 

3586 if source: 3586 ↛ 3584line 3586 didn't jump to line 3584, because the condition on line 3586 was never false

3587 data = entry.copy() 

3588 data['url'] = self._generate_url(entry) 

3589 pkgdata[source] = data 

3590 

3591 return [ 

3592 ('debian-patches', pkgdata), 

3593 ] 

3594 

3595 @staticmethod 

3596 def _generate_url(entry): 

3597 query_string = urlencode({ 

3598 'src': entry.get('source'), 

3599 'version': entry.get('version'), 

3600 }) 

3601 return f"https://udd.debian.org/patches.cgi?{query_string}" 

3602 

3603 def generate_action_items(self): 

3604 pkgdata = {} 

3605 for entry in self.external_data: 

3606 # Skip invalid entries and those without (problematic) patches 

3607 source = entry.get('source') 

3608 forwarded_invalid = entry.get('forwarded_invalid', 0) 

3609 forwarded_no = entry.get('forwarded_no', 0) 

3610 if not source: 3610 ↛ 3611line 3610 didn't jump to line 3611, because the condition on line 3610 was never true

3611 continue # Invalid, no source package data 

3612 if entry.get('status') != 'patches': 

3613 continue # No patch at all 

3614 if forwarded_invalid == 0 and forwarded_no == 0: 

3615 continue # No problematic patch 

3616 

3617 # Build the parameterers for the action item 

3618 severity = ActionItem.SEVERITY_LOW 

3619 desc = '' 

3620 url = self._generate_url(entry) 

3621 

3622 if forwarded_invalid: 

3623 severity = ActionItem.SEVERITY_HIGH 

3624 count = f"{forwarded_invalid} patch" 

3625 if forwarded_invalid > 1: 

3626 count += 'es' 

3627 count = f'<a href="{url}">{count}</a>' 

3628 desc += f"{count} with invalid metadata" 

3629 

3630 if forwarded_no: 

3631 if desc: 

3632 desc += ', ' 

3633 count = f"{forwarded_no} patch" 

3634 if forwarded_no > 1: 

3635 count += 'es' 

3636 count = f'<a href="{url}">{count}</a>' 

3637 desc += f"{count} to forward upstream" 

3638 

3639 extra_data = entry.copy() 

3640 extra_data['url'] = url 

3641 

3642 # Record the action item parameters 

3643 pkgdata[source] = { 

3644 'short_description': f"debian/patches: {desc}", 

3645 'severity': severity, 

3646 'extra_data': extra_data, 

3647 } 

3648 

3649 return [ 

3650 ('debian-patches', pkgdata), 

3651 ]