1# Copyright 2013-2021 The Distro Tracker Developers 

2# See the COPYRIGHT file at the top-level directory of this distribution and 

3# at https://deb.li/DTAuthors 

4# 

5# This file is part of Distro Tracker. It is subject to the license terms 

6# in the LICENSE file found in the top-level directory of this 

7# distribution and at https://deb.li/DTLicense. No part of Distro Tracker, 

8# including this file, may be copied, modified, propagated, or distributed 

9# except according to the terms contained in the LICENSE file. 

10 

11""" 

12Debian-specific tasks. 

13""" 

14 

15import collections 

16import itertools 

17import json 

18import logging 

19import os 

20import re 

21from enum import Enum 

22 

23from bs4 import BeautifulSoup as soup 

24 

25from debian import deb822, debian_support 

26from debian.debian_support import AptPkgVersion 

27 

28import debianbts 

29 

30from django.conf import settings 

31from django.core.exceptions import ValidationError 

32from django.db import transaction 

33from django.db.models import Prefetch 

34from django.utils.http import urlencode 

35 

36import yaml 

37 

38from distro_tracker.accounts.models import UserEmail 

39from distro_tracker.core.models import ( 

40 ActionItem, 

41 ActionItemType, 

42 BinaryPackageBugStats, 

43 BinaryPackageName, 

44 BugDisplayManagerMixin, 

45 PackageBugStats, 

46 PackageData, 

47 PackageName, 

48 Repository, 

49 SourcePackageDeps, 

50 SourcePackageName 

51) 

52from distro_tracker.core.tasks import BaseTask 

53from distro_tracker.core.tasks.mixins import PackageTagging 

54from distro_tracker.core.tasks.schedulers import IntervalScheduler 

55from distro_tracker.core.utils import get_or_none 

56from distro_tracker.core.utils.http import get_resource_text 

57from distro_tracker.core.utils.misc import get_data_checksum 

58from distro_tracker.core.utils.packages import ( 

59 html_package_list, 

60 package_url 

61) 

62from distro_tracker.vendor.debian.models import ( 

63 BuildLogCheckStats, 

64 LintianStats, 

65 PackageExcuses, 

66 PackageTransition, 

67 UbuntuPackage 

68) 

69 

70from .models import DebianContributor 

71 

72logger = logging.getLogger(__name__) 

73logger_input = logging.getLogger('distro_tracker.input') 

74 

75 

76class RetrieveDebianMaintainersTask(BaseTask): 

77 """ 

78 Retrieves (and updates if necessary) a list of Debian Maintainers. 

79 """ 

80 

81 class Scheduler(IntervalScheduler): 

82 interval = 3600 * 24 

83 

84 def execute_main(self): 

85 url = "https://ftp-master.debian.org/dm.txt" 

86 content = get_resource_text(url, force_update=self.force_update, 

87 only_if_updated=True) 

88 if content is None: 88 ↛ 90line 88 didn't jump to line 90, because the condition on line 88 was never true

89 # No need to do anything if the cached item was still not updated 

90 return 

91 

92 maintainers = {} 

93 lines = content.splitlines() 

94 for stanza in deb822.Deb822.iter_paragraphs(lines): 

95 if 'Uid' in stanza and 'Allow' in stanza: 95 ↛ 94line 95 didn't jump to line 94, because the condition on line 95 was never false

96 # Allow is a comma-separated string of 'package (DD fpr)' items, 

97 # where DD fpr is the fingerprint of the DD that granted the 

98 # permission 

99 name, email = stanza['Uid'].rsplit(' ', 1) 

100 email = email.strip('<>') 

101 for pair in stanza['Allow'].split(','): 

102 pair = pair.strip() 

103 pkg, dd_fpr = pair.split() 

104 maintainers.setdefault(email, []) 

105 maintainers[email].append(pkg) 

106 

107 # Now update the developer information 

108 with transaction.atomic(): 

109 # Reset all old maintainers first. 

110 qs = DebianContributor.objects.filter(is_debian_maintainer=True) 

111 qs.update(is_debian_maintainer=False) 

112 

113 for email, packages in maintainers.items(): 

114 try: 

115 user_email, _ = UserEmail.objects.get_or_create(email=email) 

116 except ValidationError: 

117 logger_input.info('%s refers to invalid email "%s".', 

118 url, email) 

119 continue 

120 

121 contributor, _ = DebianContributor.objects.get_or_create( 

122 email=user_email) 

123 

124 contributor.is_debian_maintainer = True 

125 contributor.allowed_packages = packages 

126 contributor.save() 

127 

128 

129class RetrieveLowThresholdNmuTask(BaseTask): 

130 """ 

131 Updates the list of Debian Maintainers which agree with the lowthreshold 

132 NMU. 

133 """ 

134 

135 class Scheduler(IntervalScheduler): 

136 interval = 3600 * 24 

137 

138 def _retrieve_emails(self): 

139 """ 

140 Helper function which obtains the list of emails of maintainers that 

141 agree with the lowthreshold NMU. 

142 """ 

143 url = 'https://wiki.debian.org/LowThresholdNmu?action=raw' 

144 content = get_resource_text(url, force_update=self.force_update, 

145 only_if_updated=True) 

146 if content is None: 146 ↛ 147line 146 didn't jump to line 147, because the condition on line 146 was never true

147 return 

148 

149 emails = [] 

150 devel_php_RE = re.compile( 

151 r'https?://qa\.debian\.org/developer\.php\?login=([^\s&|]+)') 

152 word_RE = re.compile(r'^\w+$') 

153 for line in content.splitlines(): 

154 match = devel_php_RE.search(line) 

155 while match: # look for several matches on the same line 

156 email = None 

157 login = match.group(1) 

158 if word_RE.match(login): 

159 email = login + '@debian.org' 

160 elif login.find('@') >= 0: 160 ↛ 162line 160 didn't jump to line 162, because the condition on line 160 was never false

161 email = login 

162 if email: 162 ↛ 164line 162 didn't jump to line 164, because the condition on line 162 was never false

163 emails.append(email) 

164 line = line[match.end():] 

165 match = devel_php_RE.search(line) 

166 return emails 

167 

168 def execute_main(self): 

169 emails = self._retrieve_emails() 

170 with transaction.atomic(): 

171 # Reset all threshold flags first. 

172 qs = DebianContributor.objects.filter( 

173 agree_with_low_threshold_nmu=True) 

174 qs.update(agree_with_low_threshold_nmu=False) 

175 

176 for email in emails: 

177 try: 

178 email, _ = UserEmail.objects.get_or_create(email=email) 

179 except ValidationError: 

180 logger_input.info( 

181 'LowThresholdNmu refers to invalid email "%s".', email) 

182 continue 

183 

184 contributor, _ = DebianContributor.objects.get_or_create( 

185 email=email) 

186 

187 contributor.agree_with_low_threshold_nmu = True 

188 contributor.save() 

189 

190 

191class UpdatePackageBugStats(BaseTask, BugDisplayManagerMixin): 

192 """ 

193 Updates the BTS bug stats for all packages (source, binary and pseudo). 

194 Creates :class:`distro_tracker.core.ActionItem` instances for packages 

195 which have bugs tagged help or patch. 

196 """ 

197 

198 class Scheduler(IntervalScheduler): 

199 interval = 3600 

200 

201 PATCH_BUG_ACTION_ITEM_TYPE_NAME = 'debian-patch-bugs-warning' 

202 HELP_BUG_ACTION_ITEM_TYPE_NAME = 'debian-help-bugs-warning' 

203 

204 PATCH_ITEM_SHORT_DESCRIPTION = ( 

205 '<a href="{url}">{count}</a> tagged patch in the ' 

206 '<abbr title="Bug Tracking System">BTS</abbr>') 

207 HELP_ITEM_SHORT_DESCRIPTION = ( 

208 '<a href="{url}">{count}</a> tagged help in the ' 

209 '<abbr title="Bug Tracking System">BTS</abbr>') 

210 PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/patch-bugs-action-item.html' 

211 HELP_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/help-bugs-action-item.html' 

212 

213 bug_categories = ( 

214 'rc', 

215 'normal', 

216 'wishlist', 

217 'fixed', 

218 'patch', 

219 ) 

220 

221 def initialize(self, *args, **kwargs): 

222 super(UpdatePackageBugStats, self).initialize(*args, **kwargs) 

223 # The :class:`distro_tracker.core.models.ActionItemType` instances which 

224 # this task can create. 

225 self.patch_item_type = ActionItemType.objects.create_or_update( 

226 type_name=self.PATCH_BUG_ACTION_ITEM_TYPE_NAME, 

227 full_description_template=self.PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE) 

228 self.help_item_type = ActionItemType.objects.create_or_update( 

229 type_name=self.HELP_BUG_ACTION_ITEM_TYPE_NAME, 

230 full_description_template=self.HELP_ITEM_FULL_DESCRIPTION_TEMPLATE) 

231 

232 def _get_tagged_bug_stats(self, tag, user=None): 

233 """ 

234 Using the BTS interface, retrieves the statistics of bugs with a 

235 particular tag. 

236 

237 :param tag: The tag for which the statistics are required. 

238 :type tag: string 

239 :param user: The email of the user who tagged the bug with the given 

240 tag. 

241 :type user: string 

242 

243 :returns: A dict mapping package names to the count of bugs with the 

244 given tag. 

245 """ 

246 debian_ca_bundle = '/etc/ssl/ca-debian/ca-certificates.crt' 

247 if os.path.exists(debian_ca_bundle): 

248 os.environ['SSL_CERT_FILE'] = debian_ca_bundle 

249 if user: 

250 bug_numbers = debianbts.get_usertag(user, tags=[tag]).get(tag, []) 

251 else: 

252 bug_numbers = debianbts.get_bugs(tag=tag) 

253 

254 # Match each retrieved bug ID to a package and then find the aggregate 

255 # count for each package. 

256 bug_stats = {} 

257 bugs = debianbts.get_status(bug_numbers) 

258 for bug in bugs: 

259 if bug.done or bug.fixed_versions or bug.pending == 'done': 

260 continue 

261 

262 bug_stats.setdefault(bug.package, 0) 

263 bug_stats[bug.package] += 1 

264 

265 return bug_stats 

266 

267 def _extend_bug_stats(self, bug_stats, extra_stats, category_name): 

268 """ 

269 Helper method which adds extra bug stats to an already existing list of 

270 stats. 

271 

272 :param bug_stats: An already existing list of bug stats. Maps package 

273 names to list of bug category descriptions. 

274 :type bug_stats: dict 

275 :param extra_stats: Extra bug stats which should be added to 

276 ``bug_stats``. Maps package names to integers representing bug 

277 counts. 

278 :type extra_stats: dict 

279 :param category_name: The name of the bug category which is being added 

280 :type category_name: string 

281 """ 

282 for package, count in extra_stats.items(): 

283 bug_stats.setdefault(package, []) 

284 bug_stats[package].append({ 

285 'category_name': category_name, 

286 'bug_count': count, 

287 }) 

288 

289 def _create_patch_bug_action_item(self, package, bug_stats): 

290 """ 

291 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

292 the given package if it contains any bugs tagged patch. 

293 

294 :param package: The package for which the action item should be 

295 updated. 

296 :type package: :class:`distro_tracker.core.models.PackageName` 

297 :param bug_stats: A dictionary mapping category names to structures 

298 describing those categories. Those structures should be 

299 identical to the ones stored in the :class:`PackageBugStats` 

300 instance. 

301 :type bug_stats: dict 

302 """ 

303 # Get the old action item, if any 

304 action_item = package.get_action_item_for_type( 

305 self.PATCH_BUG_ACTION_ITEM_TYPE_NAME) 

306 

307 if 'patch' not in bug_stats or bug_stats['patch']['bug_count'] == 0: 

308 # Remove the old action item, since the package does not have any 

309 # bugs tagged patch anymore. 

310 if action_item is not None: 

311 action_item.delete() 

312 return 

313 

314 # If the package has bugs tagged patch, update the action item 

315 if action_item is None: 

316 action_item = ActionItem( 

317 package=package, 

318 item_type=self.patch_item_type) 

319 

320 bug_count = bug_stats['patch']['bug_count'] 

321 # Include the URL in the short description 

322 url = self.bug_manager.get_bug_tracker_url( 

323 package.name, 'source', 'patch') 

324 if not url: 324 ↛ 325line 324 didn't jump to line 325, because the condition on line 324 was never true

325 url = '' 

326 # Include the bug count in the short description 

327 count = '{bug_count} bug'.format(bug_count=bug_count) 

328 if bug_count > 1: 

329 count += 's' 

330 action_item.short_description = \ 

331 self.PATCH_ITEM_SHORT_DESCRIPTION.format(url=url, count=count) 

332 # Set additional URLs and merged bug count in the extra data for a full 

333 # description 

334 action_item.extra_data = { 

335 'bug_count': bug_count, 

336 'merged_count': bug_stats['patch'].get('merged_count', 0), 

337 'url': url, 

338 'merged_url': self.bug_manager.get_bug_tracker_url( 

339 package.name, 'source', 'patch-merged'), 

340 } 

341 action_item.save() 

342 

343 def _create_help_bug_action_item(self, package, bug_stats): 

344 """ 

345 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

346 the given package if it contains any bugs tagged help. 

347 

348 :param package: The package for which the action item should be 

349 updated. 

350 :type package: :class:`distro_tracker.core.models.PackageName` 

351 :param bug_stats: A dictionary mapping category names to structures 

352 describing those categories. Those structures should be 

353 identical to the ones stored in the :class:`PackageBugStats` 

354 instance. 

355 :type bug_stats: dict 

356 """ 

357 # Get the old action item, if any 

358 action_item = package.get_action_item_for_type( 

359 self.HELP_BUG_ACTION_ITEM_TYPE_NAME) 

360 

361 if 'help' not in bug_stats or bug_stats['help']['bug_count'] == 0: 

362 # Remove the old action item, since the package does not have any 

363 # bugs tagged patch anymore. 

364 if action_item is not None: 

365 action_item.delete() 

366 return 

367 

368 # If the package has bugs tagged patch, update the action item 

369 if action_item is None: 

370 action_item = ActionItem( 

371 package=package, 

372 item_type=self.help_item_type) 

373 

374 bug_count = bug_stats['help']['bug_count'] 

375 # Include the URL in the short description 

376 url = self.bug_manager.get_bug_tracker_url( 

377 package.name, 'source', 'help') 

378 if not url: 378 ↛ 379line 378 didn't jump to line 379, because the condition on line 378 was never true

379 url = '' 

380 # Include the bug count in the short description 

381 count = '{bug_count} bug'.format(bug_count=bug_count) 

382 if bug_count > 1: 

383 count += 's' 

384 action_item.short_description = self.HELP_ITEM_SHORT_DESCRIPTION.format( 

385 url=url, count=count) 

386 # Set additional URLs and merged bug count in the extra data for a full 

387 # description 

388 action_item.extra_data = { 

389 'bug_count': bug_count, 

390 'url': url, 

391 } 

392 action_item.save() 

393 

394 def _create_action_items(self, package_bug_stats): 

395 """ 

396 Method which creates a :class:`distro_tracker.core.models.ActionItem` 

397 instance for a package based on the given package stats. 

398 

399 For now, an action item is created if the package either has bugs 

400 tagged as help or patch. 

401 """ 

402 # Transform the bug stats to a structure easier to pass to functions 

403 # for particular bug-category action items. 

404 bug_stats = { 

405 category['category_name']: category 

406 for category in package_bug_stats.stats 

407 } 

408 package = package_bug_stats.package 

409 self._create_patch_bug_action_item(package, bug_stats) 

410 self._create_help_bug_action_item(package, bug_stats) 

411 

412 def _get_udd_bug_stats(self): 

413 url = 'https://udd.debian.org/cgi-bin/ddpo-bugs.cgi' 

414 response_content = get_resource_text(url) 

415 if not response_content: 

416 return 

417 

418 # Each line in the response should be bug stats for a single package 

419 bug_stats = {} 

420 for line in response_content.splitlines(): 

421 line = line.strip() 

422 try: 

423 package_name, bug_counts = line, '' 

424 if line.startswith('src:'): 

425 src, package_name, bug_counts = line.split(':', 2) 

426 else: 

427 package_name, bug_counts = line.split(':', 1) 

428 # Merged counts are in parentheses so remove those before 

429 # splitting the numbers 

430 bug_counts = re.sub(r'[()]', ' ', bug_counts).split() 

431 bug_counts = [int(count) for count in bug_counts] 

432 except ValueError: 

433 logger.warning( 

434 'Failed to parse bug information for %s: %s', 

435 package_name, bug_counts, exc_info=1) 

436 continue 

437 

438 # Match the extracted counts with category names 

439 bug_stats[package_name] = [ 

440 { 

441 'category_name': category_name, 

442 'bug_count': bug_count, 

443 'merged_count': merged_count, 

444 } 

445 for category_name, (bug_count, merged_count) in zip( 

446 self.bug_categories, zip(bug_counts[::2], bug_counts[1::2])) 

447 ] 

448 

449 return bug_stats 

450 

451 def _remove_obsolete_action_items(self, package_names): 

452 """ 

453 Removes action items for packages which no longer have any bug stats. 

454 """ 

455 ActionItem.objects.delete_obsolete_items( 

456 item_types=[self.patch_item_type, self.help_item_type], 

457 non_obsolete_packages=package_names) 

458 

459 def update_source_and_pseudo_bugs(self): 

460 """ 

461 Performs the update of bug statistics for source and pseudo packages. 

462 """ 

463 # First get the bug stats exposed by the UDD. 

464 bug_stats = self._get_udd_bug_stats() 

465 if not bug_stats: 

466 bug_stats = {} 

467 

468 # Add in help bugs from the BTS interface 

469 try: 

470 help_bugs = self._get_tagged_bug_stats('help') 

471 self._extend_bug_stats(bug_stats, help_bugs, 'help') 

472 except RuntimeError: 

473 logger.exception("Could not get bugs tagged help") 

474 

475 # Add in newcomer bugs from the BTS interface 

476 try: 

477 newcomer_bugs = self._get_tagged_bug_stats('newcomer') 

478 self._extend_bug_stats(bug_stats, newcomer_bugs, 'newcomer') 

479 except RuntimeError: 

480 logger.exception("Could not get bugs tagged newcomer") 

481 

482 with transaction.atomic(): 

483 # Clear previous stats 

484 PackageBugStats.objects.all().delete() 

485 self._remove_obsolete_action_items(bug_stats.keys()) 

486 # Get all packages which have updated stats, along with their 

487 # action items in 2 DB queries. 

488 packages = PackageName.objects.filter(name__in=bug_stats.keys()) 

489 packages.prefetch_related('action_items') 

490 

491 # Update stats and action items. 

492 stats = [] 

493 for package in packages: 

494 # Save the raw package bug stats 

495 package_bug_stats = PackageBugStats( 

496 package=package, stats=bug_stats[package.name]) 

497 stats.append(package_bug_stats) 

498 

499 # Add action items for the package. 

500 self._create_action_items(package_bug_stats) 

501 

502 PackageBugStats.objects.bulk_create(stats) 

503 

504 def update_binary_bugs(self): 

505 """ 

506 Performs the update of bug statistics for binary packages. 

507 """ 

508 url = 'https://udd.debian.org/cgi-bin/bugs-binpkgs-pts.cgi' 

509 response_content = get_resource_text(url) 

510 if not response_content: 

511 return 

512 

513 # Extract known binary package bug stats: each line is a separate pkg 

514 bug_stats = {} 

515 for line in response_content.splitlines(): 

516 package_name, bug_counts = line.split(None, 1) 

517 bug_counts = bug_counts.split() 

518 try: 

519 bug_counts = [int(count) for count in bug_counts] 

520 except ValueError: 

521 logger.exception( 

522 'Failed to parse bug information for %s: %s', 

523 package_name, bug_counts) 

524 continue 

525 

526 bug_stats[package_name] = [ 

527 { 

528 'category_name': category_name, 

529 'bug_count': bug_count, 

530 } 

531 for category_name, bug_count in zip( 

532 self.bug_categories, bug_counts) 

533 ] 

534 

535 with transaction.atomic(): 

536 # Clear previous stats 

537 BinaryPackageBugStats.objects.all().delete() 

538 packages = \ 

539 BinaryPackageName.objects.filter(name__in=bug_stats.keys()) 

540 # Create new stats in a single query 

541 stats = [ 

542 BinaryPackageBugStats(package=package, 

543 stats=bug_stats[package.name]) 

544 for package in packages 

545 ] 

546 BinaryPackageBugStats.objects.bulk_create(stats) 

547 

548 def execute_main(self): 

549 # Stats for source and pseudo packages is retrieved from a different 

550 # resource (with a different structure) than stats for binary packages. 

551 self.update_source_and_pseudo_bugs() 

552 self.update_binary_bugs() 

553 

554 

555class UpdateLintianStatsTask(BaseTask): 

556 """ 

557 Updates packages' lintian stats. 

558 """ 

559 

560 class Scheduler(IntervalScheduler): 

561 interval = 3600 * 4 

562 

563 ACTION_ITEM_TYPE_NAME = 'lintian-warnings-and-errors' 

564 ITEM_DESCRIPTION = 'lintian reports <a href="{url}">{report}</a>' 

565 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/lintian-action-item.html' 

566 

567 def initialize(self, *args, **kwargs): 

568 super(UpdateLintianStatsTask, self).initialize(*args, **kwargs) 

569 self.lintian_action_item_type = ActionItemType.objects.create_or_update( 

570 type_name=self.ACTION_ITEM_TYPE_NAME, 

571 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

572 

573 def get_lintian_stats(self): 

574 url = 'https://lintian.debian.org/static/qa-list.txt' 

575 content = get_resource_text(url, force_update=self.force_update, 

576 only_if_updated=True) 

577 if content is None: 577 ↛ 578line 577 didn't jump to line 578, because the condition on line 577 was never true

578 return 

579 

580 all_stats = {} 

581 categories = ( 

582 'errors', 

583 'warnings', 

584 'pedantics', 

585 'experimentals', 

586 'overriddens', 

587 ) 

588 for line in content.splitlines(): 

589 package, stats = line.split(None, 1) 

590 stats = stats.split() 

591 try: 

592 all_stats[package] = { 

593 category: int(count) 

594 for count, category in zip(stats, categories) 

595 } 

596 except ValueError: 

597 logger.exception( 

598 'Failed to parse lintian information for %s: %s', 

599 package, line) 

600 continue 

601 

602 return all_stats 

603 

604 def update_action_item(self, package, lintian_stats): 

605 """ 

606 Updates the :class:`ActionItem` for the given package based on the 

607 :class:`LintianStats <distro_tracker.vendor.debian.models.LintianStats` 

608 given in ``package_stats``. If the package has errors or warnings an 

609 :class:`ActionItem` is created. 

610 """ 

611 package_stats = lintian_stats.stats 

612 warnings, errors = ( 

613 package_stats.get('warnings'), package_stats.get('errors', 0)) 

614 # Get the old action item for this warning, if it exists. 

615 lintian_action_item = package.get_action_item_for_type( 

616 self.lintian_action_item_type.type_name) 

617 if not warnings and not errors: 

618 if lintian_action_item: 

619 # If the item previously existed, delete it now since there 

620 # are no longer any warnings/errors. 

621 lintian_action_item.delete() 

622 return 

623 

624 # The item didn't previously have an action item: create it now 

625 if lintian_action_item is None: 

626 lintian_action_item = ActionItem( 

627 package=package, 

628 item_type=self.lintian_action_item_type) 

629 

630 lintian_url = lintian_stats.get_lintian_url() 

631 new_extra_data = { 

632 'warnings': warnings, 

633 'errors': errors, 

634 'lintian_url': lintian_url, 

635 } 

636 if lintian_action_item.extra_data: 

637 old_extra_data = lintian_action_item.extra_data 

638 if (old_extra_data['warnings'] == warnings and 

639 old_extra_data['errors'] == errors): 

640 # No need to update 

641 return 

642 

643 lintian_action_item.extra_data = new_extra_data 

644 

645 if errors and warnings: 

646 report = '{} error{} and {} warning{}'.format( 

647 errors, 

648 's' if errors > 1 else '', 

649 warnings, 

650 's' if warnings > 1 else '') 

651 elif errors: 

652 report = '{} error{}'.format( 

653 errors, 

654 's' if errors > 1 else '') 

655 elif warnings: 655 ↛ 660line 655 didn't jump to line 660, because the condition on line 655 was never false

656 report = '{} warning{}'.format( 

657 warnings, 

658 's' if warnings > 1 else '') 

659 

660 lintian_action_item.short_description = self.ITEM_DESCRIPTION.format( 

661 url=lintian_url, 

662 report=report) 

663 

664 # If there are errors make the item a high severity issue 

665 if errors: 

666 lintian_action_item.severity = ActionItem.SEVERITY_HIGH 

667 

668 lintian_action_item.save() 

669 

670 def execute_main(self): 

671 all_lintian_stats = self.get_lintian_stats() 

672 if not all_lintian_stats: 

673 return 

674 

675 # Discard all old stats 

676 LintianStats.objects.all().delete() 

677 

678 packages = PackageName.objects.filter(name__in=all_lintian_stats.keys()) 

679 packages.prefetch_related('action_items') 

680 # Remove action items for packages which no longer have associated 

681 # lintian data. 

682 ActionItem.objects.delete_obsolete_items( 

683 [self.lintian_action_item_type], all_lintian_stats.keys()) 

684 

685 stats = [] 

686 for package in packages: 

687 package_stats = all_lintian_stats[package.name] 

688 # Save the raw lintian stats. 

689 lintian_stats = LintianStats(package=package, stats=package_stats) 

690 stats.append(lintian_stats) 

691 # Create an ActionItem if there are errors or warnings 

692 self.update_action_item(package, lintian_stats) 

693 

694 LintianStats.objects.bulk_create(stats) 

695 

696 

697class UpdateAppStreamStatsTask(BaseTask): 

698 """ 

699 Updates packages' AppStream issue hints data. 

700 """ 

701 

702 class Scheduler(IntervalScheduler): 

703 interval = 3600 * 6 

704 

705 ACTION_ITEM_TYPE_NAME = 'appstream-issue-hints' 

706 ITEM_DESCRIPTION = 'AppStream hints: {report}' 

707 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/appstream-action-item.html' 

708 

709 def initialize(self, *args, **kwargs): 

710 super(UpdateAppStreamStatsTask, self).initialize(*args, **kwargs) 

711 self.appstream_action_item_type = \ 

712 ActionItemType.objects.create_or_update( 

713 type_name=self.ACTION_ITEM_TYPE_NAME, 

714 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

715 self._tag_severities = {} 

716 

717 def _load_tag_severities(self): 

718 url = 'https://appstream.debian.org/hints/sid/hint-definitions.json' 

719 json_data = get_resource_text(url, force_update=True) 

720 

721 data = json.loads(json_data) 

722 for tag, info in data.items(): 

723 self._tag_severities[tag] = info['severity'] 

724 

725 def _load_appstream_hint_stats(self, section, arch, all_stats={}): 

726 url = 'https://appstream.debian.org/hints/sid/{}/Hints-{}.json.gz' \ 

727 .format(section, arch) 

728 hints_json = get_resource_text(url, force_update=self.force_update) 

729 

730 hints = json.loads(hints_json) 

731 for hint in hints: 

732 pkid = hint['package'] 

733 parts = pkid.split('/') 

734 package_name = parts[0] 

735 

736 # get the source package for this binary package name 

737 src_pkgname = None 

738 if SourcePackageName.objects.exists_with_name(package_name): 

739 package = SourcePackageName.objects.get(name=package_name) 

740 src_pkgname = package.name 

741 elif BinaryPackageName.objects.exists_with_name(package_name): 

742 bin_package = BinaryPackageName.objects.get(name=package_name) 

743 package = bin_package.main_source_package_name 

744 src_pkgname = package.name 

745 else: 

746 src_pkgname = package_name 

747 

748 if src_pkgname not in all_stats: 

749 all_stats[src_pkgname] = {} 

750 if package_name not in all_stats[src_pkgname]: 750 ↛ 753line 750 didn't jump to line 753, because the condition on line 750 was never false

751 all_stats[src_pkgname][package_name] = {} 

752 

753 for cid, h in hint['hints'].items(): 

754 for e in h: 

755 severity = self._tag_severities[e['tag']] 

756 if severity == "error": 

757 sevkey = "errors" 

758 elif severity == "warning": 

759 sevkey = "warnings" 

760 elif severity == "info": 760 ↛ 763line 760 didn't jump to line 763, because the condition on line 760 was never false

761 sevkey = "infos" 

762 else: 

763 continue 

764 if sevkey not in all_stats[src_pkgname][package_name]: 

765 all_stats[src_pkgname][package_name][sevkey] = 1 

766 else: 

767 all_stats[src_pkgname][package_name][sevkey] += 1 

768 

769 return all_stats 

770 

771 def _get_appstream_url(self, package, bin_pkgname): 

772 """ 

773 Returns the AppStream URL for the given PackageName in :package. 

774 """ 

775 

776 src_package = get_or_none(SourcePackageName, pk=package.pk) 

777 if not src_package: 777 ↛ 778line 777 didn't jump to line 778, because the condition on line 777 was never true

778 return '#' 

779 

780 if not src_package.main_version: 

781 return '#' 

782 

783 component = 'main' 

784 main_entry = src_package.main_entry 

785 if main_entry: 785 ↛ 786line 785 didn't jump to line 786, because the condition on line 785 was never true

786 component = main_entry.component 

787 if not component: 

788 component = 'main' 

789 

790 return ( 

791 'https://appstream.debian.org/sid/{}/issues/{}.html' 

792 .format(component, bin_pkgname) 

793 ) 

794 

795 def _create_final_stats_report(self, package, package_stats): 

796 """ 

797 Returns a transformed statistics report to be stored in the database. 

798 """ 

799 

800 as_report = package_stats.copy() 

801 for bin_package in list(as_report.keys()): 

802 # we currently don't want to display info-type hints 

803 as_report[bin_package].pop('infos', None) 

804 if as_report[bin_package]: 804 ↛ 808line 804 didn't jump to line 808, because the condition on line 804 was never false

805 as_report[bin_package]['url'] = \ 

806 self._get_appstream_url(package, bin_package) 

807 else: 

808 as_report.pop(bin_package) 

809 return as_report 

810 

811 def update_action_item(self, package, package_stats): 

812 """ 

813 Updates the :class:`ActionItem` for the given package based on the 

814 AppStream hint statistics given in ``package_stats``. 

815 If the package has errors or warnings an 

816 :class:`ActionItem` is created. 

817 """ 

818 

819 total_warnings = 0 

820 total_errors = 0 

821 for bin_pkgname, info in package_stats.items(): 

822 total_warnings += info.get('warnings', 0) 

823 total_errors += info.get('errors', 0) 

824 

825 # Get the old action item for this warning, if it exists. 

826 appstream_action_item = package.get_action_item_for_type( 

827 self.appstream_action_item_type.type_name) 

828 if not total_warnings and not total_errors: 

829 if appstream_action_item: 

830 # If the item previously existed, delete it now since there 

831 # are no longer any warnings/errors. 

832 appstream_action_item.delete() 

833 return 

834 

835 # The item didn't previously have an action item: create it now 

836 if appstream_action_item is None: 

837 appstream_action_item = ActionItem( 

838 package=package, 

839 item_type=self.appstream_action_item_type) 

840 

841 as_report = self._create_final_stats_report(package, package_stats) 

842 

843 if appstream_action_item.extra_data: 

844 old_extra_data = appstream_action_item.extra_data 

845 if old_extra_data == as_report: 

846 # No need to update 

847 return 

848 

849 appstream_action_item.extra_data = as_report 

850 

851 if total_errors and total_warnings: 

852 short_report = '{} error{} and {} warning{}'.format( 

853 total_errors, 

854 's' if total_errors > 1 else '', 

855 total_warnings, 

856 's' if total_warnings > 1 else '') 

857 elif total_errors: 

858 short_report = '{} error{}'.format( 

859 total_errors, 

860 's' if total_errors > 1 else '') 

861 elif total_warnings: 861 ↛ 866line 861 didn't jump to line 866

862 short_report = '{} warning{}'.format( 

863 total_warnings, 

864 's' if total_warnings > 1 else '') 

865 

866 appstream_action_item.short_description = \ 

867 self.ITEM_DESCRIPTION.format(report=short_report) 

868 

869 # If there are errors make the item a high severity issue; 

870 # otherwise, make sure to set the severity as normal in case the item 

871 # existed already 

872 if total_errors: 

873 appstream_action_item.severity = ActionItem.SEVERITY_HIGH 

874 else: 

875 appstream_action_item.severity = ActionItem.SEVERITY_NORMAL 

876 

877 appstream_action_item.save() 

878 

879 def execute_main(self): 

880 self._load_tag_severities() 

881 all_stats = {} 

882 repository = Repository.objects.get(default=True) 

883 arch = "amd64" 

884 for component in repository.components: 

885 self._load_appstream_hint_stats(component, arch, all_stats) 

886 if not all_stats: 886 ↛ 887line 886 didn't jump to line 887, because the condition on line 886 was never true

887 return 

888 

889 with transaction.atomic(): 

890 # Delete obsolete data 

891 PackageData.objects.filter(key='appstream').delete() 

892 

893 packages = PackageName.objects.filter(name__in=all_stats.keys()) 

894 packages.prefetch_related('action_items') 

895 

896 stats = [] 

897 for package in packages: 

898 package_stats = all_stats[package.name] 

899 stats.append( 

900 PackageData( 

901 package=package, 

902 key='appstream', 

903 value=package_stats 

904 ) 

905 ) 

906 

907 # Create an ActionItem if there are errors or warnings 

908 self.update_action_item(package, package_stats) 

909 

910 PackageData.objects.bulk_create(stats) 

911 # Remove action items for packages which no longer have associated 

912 # AppStream hints. 

913 ActionItem.objects.delete_obsolete_items( 

914 [self.appstream_action_item_type], all_stats.keys()) 

915 

916 

917class UpdateTransitionsTask(BaseTask): 

918 

919 class Scheduler(IntervalScheduler): 

920 interval = 3600 

921 

922 REJECT_LIST_URL = 'https://ftp-master.debian.org/transitions.yaml' 

923 PACKAGE_TRANSITION_LIST_URL = ( 

924 'https://release.debian.org/transitions/export/packages.yaml') 

925 

926 def _get_yaml_resource(self, url, **kwargs): 

927 """ 

928 Gets the YAML resource at the given URL and returns it as a Python 

929 object. 

930 """ 

931 content = get_resource_text(url, **kwargs) 

932 if content: 

933 return yaml.safe_load(content) 

934 

935 def _add_reject_transitions(self, packages): 

936 """ 

937 Adds the transitions which cause uploads to be rejected to the 

938 given ``packages`` dict. 

939 """ 

940 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL) 

941 for key, transition in reject_list.items(): 

942 for package in transition['packages']: 

943 packages.setdefault(package, {}) 

944 packages[package].setdefault(key, {}) 

945 packages[package][key]['reject'] = True 

946 packages[package][key]['status'] = 'ongoing' 

947 

948 def _add_package_transition_list(self, packages): 

949 """ 

950 Adds the ongoing and planned transitions to the given ``packages`` 

951 dict. 

952 """ 

953 package_transition_list = self._get_yaml_resource( 

954 self.PACKAGE_TRANSITION_LIST_URL) 

955 

956 wanted_transition_statuses = ('ongoing', 'planned') 

957 for package_info in package_transition_list: 

958 package_name = package_info['name'] 

959 for transition_name, status in package_info['list']: 

960 if status not in wanted_transition_statuses: 

961 # Skip transitions with an unwanted status 

962 continue 

963 

964 packages.setdefault(package_name, {}) 

965 packages[package_name].setdefault(transition_name, {}) 

966 packages[package_name][transition_name]['status'] = status 

967 

968 def execute_main(self): 

969 # Update the relevant resources first 

970 kwargs = { 

971 'force_update': self.force_update, 

972 'only_if_updated': True, 

973 } 

974 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL, **kwargs) 

975 package_transition_list = self._get_yaml_resource( 

976 self.PACKAGE_TRANSITION_LIST_URL, **kwargs) 

977 

978 if reject_list is None and package_transition_list is None: 

979 # Nothing to do - at least one needs to be updated... 

980 return 

981 

982 package_transitions = {} 

983 self._add_reject_transitions(package_transitions) 

984 self._add_package_transition_list(package_transitions) 

985 

986 PackageTransition.objects.all().delete() 

987 # Get the packages which have transitions 

988 packages = PackageName.objects.filter( 

989 name__in=package_transitions.keys()) 

990 transitions = [] 

991 for package in packages: 

992 for transition_name, data in \ 

993 package_transitions[package.name].items(): 

994 transitions.append(PackageTransition( 

995 package=package, 

996 transition_name=transition_name, 

997 status=data.get('status', None), 

998 reject=data.get('reject', False))) 

999 

1000 PackageTransition.objects.bulk_create(transitions) 

1001 

1002 

1003class UpdateExcusesTask(BaseTask): 

1004 

1005 class Scheduler(IntervalScheduler): 

1006 interval = 3600 

1007 

1008 ACTION_ITEM_TYPE_NAME = 'debian-testing-migration' 

1009 ITEM_DESCRIPTION = ( 

1010 "The package has not entered testing even though the delay is over") 

1011 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/testing-migration-action-item.html' 

1012 

1013 class AgeVerdict(Enum): 

1014 PKG_OF_AGE = 0 

1015 PKG_TOO_OLD = 1 

1016 PKG_TOO_YOUNG = 2 

1017 PKG_WO_POLICY = 3 

1018 

1019 def initialize(self, *args, **kwargs): 

1020 super(UpdateExcusesTask, self).initialize(*args, **kwargs) 

1021 self.action_item_type = ActionItemType.objects.create_or_update( 

1022 type_name=self.ACTION_ITEM_TYPE_NAME, 

1023 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1024 

1025 def _adapt_excuse_links(self, excuse): 

1026 """ 

1027 If the excuse contains any anchor links, convert them to links to Distro 

1028 Tracker package pages. Return the original text unmodified, otherwise. 

1029 """ 

1030 re_anchor_href = re.compile(r'^#(.*)$') 

1031 html = soup(excuse, 'html.parser') 

1032 for a_tag in html.findAll('a', {'href': True}): 

1033 href = a_tag['href'] 

1034 match = re_anchor_href.match(href) 

1035 if not match: 1035 ↛ 1036line 1035 didn't jump to line 1036, because the condition on line 1035 was never true

1036 continue 

1037 package = match.group(1).split('/')[0] 

1038 a_tag['href'] = package_url(package) 

1039 

1040 return str(html) 

1041 

1042 def _skip_excuses_item(self, item_text): 

1043 if not item_text: 

1044 return True 

1045 # We ignore these excuses 

1046 if "Section" in item_text or "Maintainer" in item_text: 

1047 return True 

1048 return False 

1049 

1050 def _check_age(self, source): 

1051 """Checks the age of the package and compares it to the age requirement 

1052 for migration""" 

1053 

1054 if 'policy_info' not in source or 'age' not in source['policy_info']: 1054 ↛ 1055line 1054 didn't jump to line 1055, because the condition on line 1054 was never true

1055 return (self.AgeVerdict.PKG_WO_POLICY, None, None) 

1056 

1057 age = source['policy_info']['age']['current-age'] 

1058 limit = source['policy_info']['age']['age-requirement'] 

1059 if age > limit: 

1060 return (self.AgeVerdict.PKG_TOO_OLD, age, limit) 

1061 elif age < limit: 1061 ↛ 1062line 1061 didn't jump to line 1062, because the condition on line 1061 was never true

1062 return (self.AgeVerdict.PKG_TOO_YOUNG, age, limit) 

1063 else: 

1064 return (self.AgeVerdict.PKG_OF_AGE, age, limit) 

1065 

1066 def _extract_problematic(self, source): 

1067 verdict, age, limit = self._check_age(source) 

1068 

1069 if verdict == self.AgeVerdict.PKG_TOO_OLD: 

1070 return (source['item-name'], {'age': age, 'limit': limit}) 

1071 

1072 @staticmethod 

1073 def _make_excuses_check_dependencies(source): 

1074 """Checks the dependencies of the package (blocked-by and 

1075 migrate-after) and returns a list to display.""" 

1076 

1077 addendum = [] 

1078 

1079 if 'dependencies' in source: 

1080 blocked_by = source['dependencies'].get('blocked-by', []) 

1081 after = source['dependencies'].get('migrate-after', []) 

1082 after = [ 

1083 element 

1084 for element in after 

1085 if element not in blocked_by 

1086 ] 

1087 if blocked_by: 1087 ↛ 1088line 1087 didn't jump to line 1088, because the condition on line 1087 was never true

1088 addendum.append("Blocked by: %s" % ( 

1089 html_package_list(blocked_by), 

1090 )) 

1091 if after: 1091 ↛ 1096line 1091 didn't jump to line 1096, because the condition on line 1091 was never false

1092 addendum.append("Migrates after: %s" % ( 

1093 html_package_list(after), 

1094 )) 

1095 

1096 return addendum 

1097 

1098 @staticmethod 

1099 def _make_excuses_check_verdict(source): 

1100 """Checks the migration policy verdict of the package and builds an 

1101 excuses message depending on the result.""" 

1102 

1103 addendum = [] 

1104 

1105 if 'migration-policy-verdict' in source: 1105 ↛ 1106line 1105 didn't jump to line 1106, because the condition on line 1105 was never true

1106 verdict = source['migration-policy-verdict'] 

1107 if verdict == 'REJECTED_BLOCKED_BY_ANOTHER_ITEM': 

1108 addendum.append("Migration status: Blocked. Can't migrate " 

1109 "due to a non-migratable dependency. Check " 

1110 "status below." 

1111 ) 

1112 

1113 return addendum 

1114 

1115 def _make_excuses(self, source): 

1116 """Make the excuses list for a source item using the yaml data it 

1117 contains""" 

1118 

1119 excuses = [ 

1120 self._adapt_excuse_links(excuse) 

1121 for excuse in source['excuses'] 

1122 ] 

1123 

1124 # This is the place where we compute some additionnal 

1125 # messages that should be added to excuses. 

1126 addendum = [] 

1127 

1128 addendum.extend(self._make_excuses_check_verdict(source)) 

1129 addendum.extend(self._make_excuses_check_dependencies(source)) 

1130 

1131 excuses = addendum + excuses 

1132 

1133 if 'is-candidate' in source: 1133 ↛ 1137line 1133 didn't jump to line 1137, because the condition on line 1133 was never false

1134 if not source['is-candidate']: 1134 ↛ 1137line 1134 didn't jump to line 1137, because the condition on line 1134 was never false

1135 excuses.append("Not considered") 

1136 

1137 return ( 

1138 source['item-name'], 

1139 excuses, 

1140 ) 

1141 

1142 def _get_excuses_and_problems(self, content): 

1143 """ 

1144 Gets the excuses for each package. 

1145 Also finds a list of packages which have not migrated to testing 

1146 agter the necessary time has passed. 

1147 

1148 :returns: A two-tuple where the first element is a dict mapping 

1149 package names to a list of excuses. The second element is a dict 

1150 mapping packages names to a problem information. Problem information 

1151 is a dict with the keys ``age`` and ``limit``. 

1152 """ 

1153 if 'sources' not in content: 1153 ↛ 1154line 1153 didn't jump to line 1154, because the condition on line 1153 was never true

1154 logger.warning("Invalid format of excuses file") 

1155 return 

1156 

1157 sources = content['sources'] 

1158 excuses = [ 

1159 self._make_excuses(source) 

1160 for source in sources 

1161 if '/' not in source['item-name'] 

1162 ] 

1163 problems = [ 

1164 self._extract_problematic(source) 

1165 for source in sources 

1166 if '/' not in source['item-name'] 

1167 ] 

1168 problematic = [p for p in problems if p] 

1169 return dict(excuses), dict(problematic) 

1170 

1171 def _create_action_item(self, package, extra_data): 

1172 """ 

1173 Creates a :class:`distro_tracker.core.models.ActionItem` for the given 

1174 package including the given extra data. The item indicates that there is 

1175 a problem with the package migrating to testing. 

1176 """ 

1177 action_item = \ 

1178 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1179 if action_item is None: 

1180 action_item = ActionItem( 

1181 package=package, 

1182 item_type=self.action_item_type) 

1183 

1184 action_item.short_description = self.ITEM_DESCRIPTION 

1185 if package.main_entry: 1185 ↛ 1186line 1185 didn't jump to line 1186, because the condition on line 1185 was never true

1186 query_string = urlencode({'package': package.name}) 

1187 extra_data['check_why_url'] = ( 

1188 'https://qa.debian.org/excuses.php' 

1189 '?{query_string}'.format(query_string=query_string)) 

1190 

1191 action_item.extra_data = extra_data 

1192 action_item.save() 

1193 

1194 def _remove_obsolete_action_items(self, problematic): 

1195 """ 

1196 Remove action items for packages which are no longer problematic. 

1197 """ 

1198 ActionItem.objects.delete_obsolete_items( 

1199 item_types=[self.action_item_type], 

1200 non_obsolete_packages=problematic.keys()) 

1201 

1202 def _get_excuses_yaml(self): 

1203 """ 

1204 Function returning the content of excuses from debian-release 

1205 :returns: a dict of excuses or ``None`` if the content in the 

1206 cache is up to date. 

1207 """ 

1208 url = 'https://release.debian.org/britney/excuses.yaml' 

1209 content = get_resource_text(url, force_update=self.force_update, 

1210 only_if_updated=True) 

1211 if content is None: 

1212 return 

1213 

1214 return yaml.safe_load(content) 

1215 

1216 def execute_main(self): 

1217 content_lines = self._get_excuses_yaml() 

1218 if not content_lines: 1218 ↛ 1219line 1218 didn't jump to line 1219, because the condition on line 1218 was never true

1219 return 

1220 

1221 result = self._get_excuses_and_problems(content_lines) 

1222 if not result: 1222 ↛ 1223line 1222 didn't jump to line 1223, because the condition on line 1222 was never true

1223 return 

1224 package_excuses, problematic = result 

1225 

1226 with transaction.atomic(): 

1227 # Remove stale excuses data and action items which are not still 

1228 # problematic. 

1229 self._remove_obsolete_action_items(problematic) 

1230 PackageExcuses.objects.all().delete() 

1231 

1232 excuses = [] 

1233 packages = SourcePackageName.objects.filter( 

1234 name__in=package_excuses.keys()) 

1235 packages.prefetch_related('action_items') 

1236 for package in packages: 

1237 excuse = PackageExcuses( 

1238 package=package, 

1239 excuses=package_excuses[package.name]) 

1240 excuses.append(excuse) 

1241 if package.name in problematic: 

1242 self._create_action_item(package, problematic[package.name]) 

1243 

1244 # Create all excuses in a single query 

1245 PackageExcuses.objects.bulk_create(excuses) 

1246 

1247 

1248class UpdateBuildLogCheckStats(BaseTask): 

1249 

1250 class Scheduler(IntervalScheduler): 

1251 interval = 3600 * 6 

1252 

1253 ACTION_ITEM_TYPE_NAME = 'debian-build-logcheck' 

1254 ITEM_DESCRIPTION = 'Build log checks report <a href="{url}">{report}</a>' 

1255 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/logcheck-action-item.html' 

1256 

1257 def initialize(self, *args, **kwargs): 

1258 super(UpdateBuildLogCheckStats, self).initialize(*args, **kwargs) 

1259 self.action_item_type = ActionItemType.objects.create_or_update( 

1260 type_name=self.ACTION_ITEM_TYPE_NAME, 

1261 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

1262 

1263 def _get_buildd_content(self): 

1264 url = 'https://qa.debian.org/bls/logcheck.txt' 

1265 return get_resource_text(url) 

1266 

1267 def get_buildd_stats(self): 

1268 content = self._get_buildd_content() 

1269 stats = {} 

1270 for line in content.splitlines(): 

1271 pkg, errors, warnings = line.split("|")[:3] 

1272 try: 

1273 errors, warnings = int(errors), int(warnings) 

1274 except ValueError: 

1275 continue 

1276 stats[pkg] = { 

1277 'errors': errors, 

1278 'warnings': warnings, 

1279 } 

1280 return stats 

1281 

1282 def create_action_item(self, package, stats): 

1283 """ 

1284 Creates a :class:`distro_tracker.core.models.ActionItem` instance for 

1285 the given package if the build logcheck stats indicate 

1286 """ 

1287 action_item = \ 

1288 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME) 

1289 

1290 errors = stats.get('errors', 0) 

1291 warnings = stats.get('warnings', 0) 

1292 

1293 if not errors and not warnings: 

1294 # Remove the previous action item since the package no longer has 

1295 # errors/warnings. 

1296 if action_item is not None: 

1297 action_item.delete() 

1298 return 

1299 

1300 if action_item is None: 

1301 action_item = ActionItem( 

1302 package=package, 

1303 item_type=self.action_item_type) 

1304 

1305 if action_item.extra_data: 

1306 if action_item.extra_data == stats: 1306 ↛ 1310line 1306 didn't jump to line 1310, because the condition on line 1306 was never false

1307 # Nothing has changed -- do not update the item 

1308 return 

1309 

1310 logcheck_url = "https://qa.debian.org/bls/packages/{hash}/{pkg}.html"\ 

1311 .format(hash=package.name[0], pkg=package.name) 

1312 if errors and warnings: 

1313 report = '{} error{} and {} warning{}'.format( 

1314 errors, 

1315 's' if errors > 1 else '', 

1316 warnings, 

1317 's' if warnings > 1 else '') 

1318 action_item.severity = ActionItem.SEVERITY_HIGH 

1319 elif errors: 

1320 report = '{} error{}'.format( 

1321 errors, 

1322 's' if errors > 1 else '') 

1323 action_item.severity = ActionItem.SEVERITY_HIGH 

1324 elif warnings: 1324 ↛ 1330line 1324 didn't jump to line 1330, because the condition on line 1324 was never false

1325 report = '{} warning{}'.format( 

1326 warnings, 

1327 's' if warnings > 1 else '') 

1328 action_item.severity = ActionItem.SEVERITY_LOW 

1329 

1330 action_item.short_description = self.ITEM_DESCRIPTION.format( 

1331 url=logcheck_url, 

1332 report=report) 

1333 action_item.extra_data = stats 

1334 action_item.save() 

1335 

1336 def execute_main(self): 

1337 # Build a dict with stats from both buildd and clang 

1338 stats = self.get_buildd_stats() 

1339 

1340 BuildLogCheckStats.objects.all().delete() 

1341 ActionItem.objects.delete_obsolete_items( 

1342 [self.action_item_type], stats.keys()) 

1343 

1344 packages = SourcePackageName.objects.filter(name__in=stats.keys()) 

1345 packages = packages.prefetch_related('action_items') 

1346 

1347 logcheck_stats = [] 

1348 for package in packages: 

1349 logcheck_stat = BuildLogCheckStats( 

1350 package=package, 

1351 stats=stats[package.name]) 

1352 logcheck_stats.append(logcheck_stat) 

1353 

1354 self.create_action_item(package, stats[package.name]) 

1355 

1356 # One SQL query to create all the stats. 

1357 BuildLogCheckStats.objects.bulk_create(logcheck_stats) 

1358 

1359 

1360class DebianWatchFileScannerUpdate(BaseTask): 

1361 

1362 class Scheduler(IntervalScheduler): 

1363 interval = 3600 * 6 

1364 

1365 ACTION_ITEM_TYPE_NAMES = ( 

1366 'new-upstream-version', 

1367 'watch-failure', 

1368 ) 

1369 ACTION_ITEM_TEMPLATES = { 

1370 'new-upstream-version': "debian/new-upstream-version-action-item.html", 

1371 'watch-failure': "debian/watch-failure-action-item.html", 

1372 } 

1373 ITEM_DESCRIPTIONS = { 

1374 'new-upstream-version': lambda item: ( 

1375 'A new upstream version is available: ' 

1376 '<a href="{url}">{version}</a>'.format( 

1377 url=item.extra_data['upstream_url'], 

1378 version=item.extra_data['upstream_version'])), 

1379 'watch-failure': lambda item: ( 

1380 'Problems while searching for a new upstream version'), 

1381 } 

1382 ITEM_SEVERITIES = { 

1383 'new-upstream-version': ActionItem.SEVERITY_HIGH, 

1384 'watch-failure': ActionItem.SEVERITY_HIGH, 

1385 } 

1386 

1387 def initialize(self, *args, **kwargs): 

1388 super(DebianWatchFileScannerUpdate, self).initialize(*args, **kwargs) 

1389 self.action_item_types = { 

1390 type_name: ActionItemType.objects.create_or_update( 

1391 type_name=type_name, 

1392 full_description_template=self.ACTION_ITEM_TEMPLATES.get( 

1393 type_name, None)) 

1394 for type_name in self.ACTION_ITEM_TYPE_NAMES 

1395 } 

1396 

1397 def _get_upstream_status_content(self): 

1398 url = 'https://udd.debian.org/cgi-bin/upstream-status.json.cgi' 

1399 return get_resource_text(url) 

1400 

1401 def _remove_obsolete_action_items(self, item_type_name, 

1402 non_obsolete_packages): 

1403 """ 

1404 Removes any existing :class:`ActionItem` with the given type name based 

1405 on the list of package names which should still have the items based on 

1406 the processed stats. 

1407 """ 

1408 action_item_type = self.action_item_types[item_type_name] 

1409 ActionItem.objects.delete_obsolete_items( 

1410 item_types=[action_item_type], 

1411 non_obsolete_packages=non_obsolete_packages) 

1412 

1413 def get_upstream_status_stats(self, stats): 

1414 """ 

1415 Gets the stats from the downloaded data and puts them in the given 

1416 ``stats`` dictionary. 

1417 The keys of the dict are package names. 

1418 

1419 :returns: A a two-tuple where the first item is a list of packages 

1420 which have new upstream versions and the second is a list of 

1421 packages which have watch failures. 

1422 """ 

1423 content = self._get_upstream_status_content() 

1424 dehs_data = None 

1425 if content: 

1426 dehs_data = json.loads(content) 

1427 if not dehs_data: 

1428 return [], [] 

1429 

1430 all_new_versions, all_failures = [], [] 

1431 for entry in dehs_data: 

1432 package_name = entry['package'] 

1433 stats.setdefault(package_name, {}) 

1434 stats[package_name]['upstream_version'] = entry['upstream-version'] 

1435 stats[package_name]['upstream_url'] = entry['upstream-url'] 

1436 if 'status' in entry and ('Newer version' in entry['status'] or 

1437 'newer package' in entry['status']): 

1438 stats[package_name]['new-upstream-version'] = { 

1439 'upstream_version': entry['upstream-version'], 

1440 'upstream_url': entry['upstream-url'], 

1441 } 

1442 all_new_versions.append(package_name) 

1443 if entry.get('warnings') or entry.get('errors'): 

1444 msg = '{}\n{}'.format( 

1445 entry.get('errors') or '', 

1446 entry.get('warnings') or '', 

1447 ).strip() 

1448 stats[package_name]['watch-failure'] = { 

1449 'warning': msg, 

1450 } 

1451 all_failures.append(package_name) 

1452 

1453 return all_new_versions, all_failures 

1454 

1455 def update_package_info(self, package, stats): 

1456 """ 

1457 Updates upstream information of the given package based on the given 

1458 stats. Upstream data is saved as a :class:`PackageData` within the 

1459 `general` key 

1460 

1461 :param package: The package to which the upstream info should be 

1462 associated. 

1463 :type package: :class:`distro_tracker.core.models.PackageName` 

1464 :param stats: The stats which are used to create the upstream info. 

1465 :type stats: :class:`dict` 

1466 """ 

1467 try: 

1468 watch_data = package.watch_status[0] 

1469 except IndexError: 

1470 watch_data = PackageData( 

1471 package=package, 

1472 key='upstream-watch-status', 

1473 ) 

1474 

1475 watch_data.value = stats 

1476 watch_data.save() 

1477 

1478 def update_action_item(self, item_type, package, stats): 

1479 """ 

1480 Updates the action item of the given type for the given package based 

1481 on the given stats. 

1482 

1483 The severity of the item is defined by the :attr:`ITEM_SEVERITIES` dict. 

1484 

1485 The short descriptions are created by passing the :class:`ActionItem` 

1486 (with extra data already set) to the callables defined in 

1487 :attr:`ITEM_DESCRIPTIONS`. 

1488 

1489 :param item_type: The type of the :class:`ActionItem` that should be 

1490 updated. 

1491 :type item_type: string 

1492 :param package: The package to which this action item should be 

1493 associated. 

1494 :type package: :class:`distro_tracker.core.models.PackageName` 

1495 :param stats: The stats which are used to create the action item. 

1496 :type stats: :class:`dict` 

1497 """ 

1498 action_item = package.get_action_item_for_type(item_type) 

1499 if action_item is None: 

1500 # Create an action item... 

1501 action_item = ActionItem( 

1502 package=package, 

1503 item_type=self.action_item_types[item_type]) 

1504 

1505 if item_type in self.ITEM_SEVERITIES: 1505 ↛ 1507line 1505 didn't jump to line 1507, because the condition on line 1505 was never false

1506 action_item.severity = self.ITEM_SEVERITIES[item_type] 

1507 action_item.extra_data = stats 

1508 action_item.short_description = \ 

1509 self.ITEM_DESCRIPTIONS[item_type](action_item) 

1510 

1511 action_item.save() 

1512 

1513 @transaction.atomic 

1514 def execute_main(self): 

1515 stats = {} 

1516 new_upstream_version, failures = self.get_upstream_status_stats(stats) 

1517 updated_packages_per_type = { 

1518 'new-upstream-version': new_upstream_version, 

1519 'watch-failure': failures, 

1520 } 

1521 

1522 # Remove obsolete action items for each of the categories... 

1523 for item_type, packages in updated_packages_per_type.items(): 

1524 self._remove_obsolete_action_items(item_type, packages) 

1525 

1526 packages = SourcePackageName.objects.filter( 

1527 name__in=stats.keys()) 

1528 filter_qs = PackageData.objects.filter(key='upstream-watch-status') 

1529 packages = packages.prefetch_related( 

1530 'action_items', 

1531 Prefetch('data', queryset=filter_qs, to_attr='watch_status') 

1532 ) 

1533 

1534 # Update action items for each package 

1535 for package in packages: 

1536 for type_name in self.ACTION_ITEM_TYPE_NAMES: 

1537 if type_name in stats[package.name]: 

1538 # method(package, stats[package.name][type_name]) 

1539 self.update_action_item( 

1540 type_name, package, stats[package.name][type_name]) 

1541 

1542 self.update_package_info(package, stats[package.name]) 

1543 

1544 

1545class UpdateSecurityIssuesTask(BaseTask): 

1546 

1547 class Scheduler(IntervalScheduler): 

1548 interval = 3600 * 3 

1549 

1550 ACTION_ITEM_TYPE_NAME = 'debian-security-issue-in-{}' 

1551 ACTION_ITEM_TEMPLATE = 'debian/security-issue-action-item.html' 

1552 ITEM_DESCRIPTION_TEMPLATE = { 

1553 'open': '<a href="{url}">{count} security {issue}</a> in {release}', 

1554 'nodsa': 

1555 '<a href="{url}">{count} low-priority security {issue}</a> ' 

1556 'in {release}', 

1557 'none': 'No known security issue in {release}', 

1558 } 

1559 CVE_DATA_URL = 'https://security-tracker.debian.org/tracker/data/json' 

1560 DISTRIBUTIONS_URL = ( 

1561 'https://security-tracker.debian.org/tracker/distributions.json' 

1562 ) 

1563 

1564 def initialize(self, *args, **kwargs): 

1565 super(UpdateSecurityIssuesTask, self).initialize(*args, **kwargs) 

1566 self._action_item_type = {} 

1567 self._issues = None 

1568 self._distributions = None 

1569 

1570 def action_item_type(self, release): 

1571 return self._action_item_type.setdefault( 

1572 release, ActionItemType.objects.create_or_update( 

1573 type_name=self.ACTION_ITEM_TYPE_NAME.format(release), 

1574 full_description_template=self.ACTION_ITEM_TEMPLATE)) 

1575 

1576 def _get_distributions(self): 

1577 if not self._distributions: 

1578 content = get_resource_text(self.DISTRIBUTIONS_URL) 

1579 self._distributions = json.loads(content) 

1580 return self._distributions 

1581 

1582 def _get_support_status(self, release): 

1583 """ 

1584 Return support status of a given release as documented by the 

1585 security team in the security tracker. 

1586 """ 

1587 return self._get_distributions().get(release, {}).get('support', 

1588 'unknown') 

1589 

1590 def _get_issues_content(self): 

1591 if self._issues: 1591 ↛ 1592line 1591 didn't jump to line 1592, because the condition on line 1591 was never true

1592 return self._issues 

1593 content = get_resource_text(self.CVE_DATA_URL) 

1594 if content: 1594 ↛ exitline 1594 didn't return from function '_get_issues_content', because the condition on line 1594 was never false

1595 self._issues = json.loads(content) 

1596 return self._issues 

1597 

1598 @classmethod 

1599 def _update_stats_with_nodsa_entry(cls, stats, nodsa_entry, 

1600 entry_id, description): 

1601 stats['nodsa'] += 1 

1602 

1603 nodsa_details = {'description': description, 

1604 'nodsa': nodsa_entry.get('nodsa', ''), 

1605 'nodsa_reason': nodsa_entry.get('nodsa_reason', '') 

1606 } 

1607 

1608 nodsa_reason = nodsa_details['nodsa_reason'] 

1609 if nodsa_reason == '': 

1610 nodsa_details['needs_triaging'] = True 

1611 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1612 nodsa_details 

1613 elif nodsa_reason == 'postponed': 1613 ↛ 1614line 1613 didn't jump to line 1614, because the condition on line 1613 was never true

1614 nodsa_details['fixed_via_stable_update'] = True 

1615 stats['nodsa_maintainer_to_handle_details'][entry_id] = \ 

1616 nodsa_details 

1617 elif nodsa_reason == 'ignored': 1617 ↛ exitline 1617 didn't return from function '_update_stats_with_nodsa_entry', because the condition on line 1617 was never false

1618 stats['nodsa_ignored_details'][entry_id] = nodsa_details 

1619 

1620 @classmethod 

1621 def get_issues_summary(cls, issues): 

1622 result = {} 

1623 for issue_id, issue_data in issues.items(): 

1624 for release, data in issue_data['releases'].items(): 

1625 stats = result.setdefault(release, { 

1626 'open': 0, 

1627 'open_details': {}, 

1628 'nodsa': 0, 

1629 'unimportant': 0, 

1630 'next_point_update_details': {}, 

1631 'nodsa_maintainer_to_handle_details': {}, 

1632 'nodsa_ignored_details': {}, 

1633 }) 

1634 description = issue_data.get('description', '') 

1635 if (data.get('status', '') == 'resolved' or 

1636 data.get('urgency', '') == 'end-of-life'): 

1637 continue 

1638 elif data.get('urgency', '') == 'unimportant': 

1639 stats['unimportant'] += 1 

1640 elif data.get('next_point_update', False): 

1641 stats['next_point_update_details'][issue_id] = \ 

1642 {'description': description} 

1643 elif data.get('nodsa', False) is not False: 

1644 cls._update_stats_with_nodsa_entry(stats, 

1645 data, issue_id, 

1646 description 

1647 ) 

1648 else: 

1649 stats['open'] += 1 

1650 stats['open_details'][issue_id] = \ 

1651 {'description': description} 

1652 

1653 return result 

1654 

1655 @classmethod 

1656 def get_issues_stats(cls, content): 

1657 """ 

1658 Gets package issue stats from Debian's security tracker. 

1659 """ 

1660 stats = {} 

1661 for pkg, issues in content.items(): 

1662 stats[pkg] = cls.get_issues_summary(issues) 

1663 return stats 

1664 

1665 def _get_short_description(self, key, action_item): 

1666 count = action_item.extra_data['security_issues_count'] 

1667 url = 'https://security-tracker.debian.org/tracker/source-package/{}' 

1668 return self.ITEM_DESCRIPTION_TEMPLATE[key].format( 

1669 count=count, 

1670 issue='issues' if count > 1 else 'issue', 

1671 release=action_item.extra_data.get('release', 'sid'), 

1672 url=url.format(action_item.package.name), 

1673 ) 

1674 

1675 def update_action_item(self, stats, action_item): 

1676 """ 

1677 Updates the ``debian-security-issue`` action item based on the 

1678 security issues. 

1679 """ 

1680 

1681 security_issues_count = stats['open'] + stats['nodsa'] 

1682 action_item.extra_data['security_issues_count'] = security_issues_count 

1683 action_item.extra_data['support_status'] = ( 

1684 self._get_support_status(action_item.extra_data['release']) 

1685 ) 

1686 

1687 for base_key in ['open', 

1688 'next_point_update', 

1689 'nodsa_maintainer_to_handle', 

1690 'nodsa_ignored']: 

1691 details_key = base_key + '_details' 

1692 count_key = base_key + '_count' 

1693 

1694 action_item.extra_data[details_key] = stats[details_key] 

1695 action_item.extra_data[count_key] = len(stats[details_key]) 

1696 

1697 # nodsa_next_point_update / nodsa_ignored_details are displayed 

1698 # only if there is anything else to show 

1699 nodsa_create_action = (stats['nodsa'] - 

1700 len(stats['nodsa_ignored_details'])) > 0 

1701 

1702 if stats['open']: 

1703 action_item.severity = ActionItem.SEVERITY_HIGH 

1704 action_item.short_description = \ 

1705 self._get_short_description('open', action_item) 

1706 elif nodsa_create_action: 

1707 action_item.severity = ActionItem.SEVERITY_LOW 

1708 action_item.short_description = \ 

1709 self._get_short_description('nodsa', action_item) 

1710 else: 

1711 action_item.severity = ActionItem.SEVERITY_WISHLIST 

1712 action_item.short_description = \ 

1713 self._get_short_description('none', action_item) 

1714 

1715 @classmethod 

1716 def generate_package_data(cls, issues): 

1717 return { 

1718 'details': issues, 

1719 'stats': cls.get_issues_summary(issues), 

1720 'checksum': get_data_checksum(issues) 

1721 } 

1722 

1723 def want_action_item(self, pkgdata, release): 

1724 stats = pkgdata.value.get('stats', {}).get(release) 

1725 if stats is None: 1725 ↛ 1726line 1725 didn't jump to line 1726, because the condition on line 1725 was never true

1726 return False 

1727 

1728 supported_by = self._get_support_status(release) 

1729 if supported_by == "end-of-life": 

1730 return False 

1731 elif supported_by == "security": 

1732 count = stats.get('open', 0) + stats.get('nodsa', 0) 

1733 else: 

1734 count = stats.get('open', 0) 

1735 

1736 if count == 0: 

1737 return False 

1738 

1739 return True 

1740 

1741 def process_pkg_action_items(self, pkgdata, existing_action_items): 

1742 release_ai = {} 

1743 to_add = [] 

1744 to_update = [] 

1745 to_drop = [] 

1746 global_stats = pkgdata.value.get('stats', {}) 

1747 for ai in existing_action_items: 

1748 release = ai.extra_data['release'] 

1749 release_ai[release] = ai 

1750 for release, stats in global_stats.items(): 

1751 ai = release_ai.get(release) 

1752 

1753 if self.want_action_item(pkgdata, release): 

1754 if ai: 

1755 to_update.append(ai) 

1756 else: 

1757 ai = ActionItem( 

1758 item_type=self.action_item_type(release), 

1759 package=pkgdata.package, 

1760 extra_data={'release': release} 

1761 ) 

1762 to_add.append(ai) 

1763 self.update_action_item(stats, ai) 

1764 else: 

1765 if ai: 

1766 to_drop.append(ai) 

1767 

1768 return to_add, to_update, to_drop 

1769 

1770 def execute_main(self): 

1771 # Fetch all debian-security PackageData 

1772 all_pkgdata = PackageData.objects.select_related( 

1773 'package').filter(key='debian-security').only( 

1774 'package__name', 'value') 

1775 

1776 all_data = {} 

1777 packages = {} 

1778 for pkgdata in all_pkgdata: 

1779 all_data[pkgdata.package.name] = pkgdata 

1780 packages[pkgdata.package.name] = pkgdata.package 

1781 

1782 # Fetch all debian-security ActionItems 

1783 pkg_action_items = collections.defaultdict(lambda: []) 

1784 all_action_items = ActionItem.objects.select_related( 

1785 'package').filter( 

1786 item_type__type_name__startswith='debian-security-issue-in-') 

1787 for action_item in all_action_items: 

1788 pkg_action_items[action_item.package.name].append(action_item) 

1789 

1790 # Check for changes on distributions.json 

1791 distributions_checksum = get_data_checksum(self._get_distributions()) 

1792 if self.data.get('distributions_checksum') != distributions_checksum: 

1793 # New distributions.json, force update all action items 

1794 self.force_update = True 

1795 self.data['distributions_checksum'] = distributions_checksum 

1796 

1797 # Scan the security tracker data 

1798 content = self._get_issues_content() 

1799 to_add = [] 

1800 to_update = [] 

1801 for pkgname, issues in content.items(): 

1802 if pkgname in all_data: 

1803 # Check if we need to update the existing data 

1804 checksum = get_data_checksum(issues) 

1805 if not self.force_update and \ 

1806 all_data[pkgname].value.get('checksum', '') == checksum: 

1807 continue 

1808 # Update the data 

1809 pkgdata = all_data[pkgname] 

1810 pkgdata.value = self.generate_package_data(issues) 

1811 to_update.append(pkgdata) 

1812 else: 

1813 # Add data for a new package 

1814 package, _ = PackageName.objects.get_or_create(name=pkgname) 

1815 to_add.append( 

1816 PackageData( 

1817 package=package, 

1818 key='debian-security', 

1819 value=self.generate_package_data(issues) 

1820 ) 

1821 ) 

1822 # Process action items 

1823 ai_to_add = [] 

1824 ai_to_update = [] 

1825 ai_to_drop = [] 

1826 for pkgdata in itertools.chain(to_add, to_update): 

1827 add, update, drop = self.process_pkg_action_items( 

1828 pkgdata, pkg_action_items[pkgdata.package.name]) 

1829 ai_to_add.extend(add) 

1830 ai_to_update.extend(update) 

1831 ai_to_drop.extend(drop) 

1832 # Sync in database 

1833 with transaction.atomic(): 

1834 # Delete obsolete data 

1835 PackageData.objects.filter( 

1836 key='debian-security').exclude( 

1837 package__name__in=content.keys()).delete() 

1838 ActionItem.objects.filter( 

1839 item_type__type_name__startswith='debian-security-issue-in-' 

1840 ).exclude(package__name__in=content.keys()).delete() 

1841 ActionItem.objects.filter( 

1842 item_type__type_name__startswith='debian-security-issue-in-', 

1843 id__in=[ai.id for ai in ai_to_drop]).delete() 

1844 # Add new entries 

1845 PackageData.objects.bulk_create(to_add) 

1846 ActionItem.objects.bulk_create(ai_to_add) 

1847 # Update existing entries 

1848 for pkgdata in to_update: 

1849 pkgdata.save() 

1850 for ai in ai_to_update: 

1851 ai.save() 

1852 

1853 

1854class UpdatePiuPartsTask(BaseTask): 

1855 """ 

1856 Retrieves the piuparts stats for all the suites defined in the 

1857 :data:`distro_tracker.project.local_settings.DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES` 

1858 """ 

1859 

1860 class Scheduler(IntervalScheduler): 

1861 interval = 3600 * 3 

1862 

1863 ACTION_ITEM_TYPE_NAME = 'debian-piuparts-test-fail' 

1864 ACTION_ITEM_TEMPLATE = 'debian/piuparts-action-item.html' 

1865 ITEM_DESCRIPTION = 'piuparts found (un)installation error(s)' 

1866 

1867 def initialize(self, *args, **kwargs): 

1868 super(UpdatePiuPartsTask, self).initialize(*args, **kwargs) 

1869 self.action_item_type = ActionItemType.objects.create_or_update( 

1870 type_name=self.ACTION_ITEM_TYPE_NAME, 

1871 full_description_template=self.ACTION_ITEM_TEMPLATE) 

1872 

1873 def _get_piuparts_content(self, suite): 

1874 """ 

1875 :returns: The content of the piuparts report for the given package 

1876 or ``None`` if there is no data for the particular suite. 

1877 """ 

1878 url = 'https://piuparts.debian.org/{suite}/sources.txt' 

1879 return get_resource_text(url.format(suite=suite)) 

1880 

1881 def get_piuparts_stats(self): 

1882 suites = getattr(settings, 'DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES', []) 

1883 failing_packages = {} 

1884 for suite in suites: 

1885 content = self._get_piuparts_content(suite) 

1886 if content is None: 

1887 logger.info("There is no piuparts for suite: %s", suite) 

1888 continue 

1889 

1890 for line in content.splitlines(): 

1891 package_name, status = line.split(':', 1) 

1892 package_name, status = package_name.strip(), status.strip() 

1893 if status == 'fail': 

1894 failing_packages.setdefault(package_name, []) 

1895 failing_packages[package_name].append(suite) 

1896 

1897 return failing_packages 

1898 

1899 def create_action_item(self, package, suites): 

1900 """ 

1901 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

1902 instance for the package based on the list of suites in which the 

1903 piuparts installation test failed. 

1904 """ 

1905 action_item = package.get_action_item_for_type(self.action_item_type) 

1906 if action_item is None: 

1907 action_item = ActionItem( 

1908 package=package, 

1909 item_type=self.action_item_type, 

1910 short_description=self.ITEM_DESCRIPTION) 

1911 

1912 if action_item.extra_data: 

1913 existing_items = action_item.extra_data.get('suites', []) 

1914 if list(sorted(existing_items)) == list(sorted(suites)): 

1915 # No need to update this item 

1916 return 

1917 action_item.extra_data = { 

1918 'suites': suites, 

1919 } 

1920 action_item.save() 

1921 

1922 def execute_main(self): 

1923 failing_packages = self.get_piuparts_stats() 

1924 

1925 ActionItem.objects.delete_obsolete_items( 

1926 item_types=[self.action_item_type], 

1927 non_obsolete_packages=failing_packages.keys()) 

1928 

1929 packages = SourcePackageName.objects.filter( 

1930 name__in=failing_packages.keys()) 

1931 packages = packages.prefetch_related('action_items') 

1932 

1933 for package in packages: 

1934 self.create_action_item(package, failing_packages[package.name]) 

1935 

1936 

1937class UpdateUbuntuStatsTask(BaseTask): 

1938 """ 

1939 The task updates Ubuntu stats for packages. These stats are displayed in a 

1940 separate panel. 

1941 """ 

1942 

1943 class Scheduler(IntervalScheduler): 

1944 interval = 3600 * 3 

1945 

1946 def initialize(self, *args, **kwargs): 

1947 super(UpdateUbuntuStatsTask, self).initialize(*args, **kwargs) 

1948 

1949 def _get_versions_content(self): 

1950 url = 'https://udd.debian.org/cgi-bin/ubuntupackages.cgi' 

1951 return get_resource_text(url) 

1952 

1953 def get_ubuntu_versions(self): 

1954 """ 

1955 Retrieves the Ubuntu package versions. 

1956 

1957 :returns: A dict mapping package names to Ubuntu versions. 

1958 """ 

1959 content = self._get_versions_content() 

1960 

1961 package_versions = {} 

1962 for line in content.splitlines(): 

1963 package, version = line.split(' ', 1) 

1964 version = version.strip() 

1965 package_versions[package] = version 

1966 

1967 return package_versions 

1968 

1969 def _get_bug_stats_content(self): 

1970 url = 'https://udd.debian.org/cgi-bin/ubuntubugs.cgi' 

1971 return get_resource_text(url) 

1972 

1973 def get_ubuntu_bug_stats(self): 

1974 """ 

1975 Retrieves the Ubuntu bug stats of a package. Bug stats contain the 

1976 count of bugs and the count of patches. 

1977 

1978 :returns: A dict mapping package names to a dict of package stats. 

1979 """ 

1980 content = self._get_bug_stats_content() 

1981 

1982 bug_stats = {} 

1983 for line in content.splitlines(): 

1984 package_name, bug_count, patch_count = line.split("|", 2) 

1985 try: 

1986 bug_count, patch_count = int(bug_count), int(patch_count) 

1987 except ValueError: 

1988 continue 

1989 bug_stats[package_name] = { 

1990 'bug_count': bug_count, 

1991 'patch_count': patch_count, 

1992 } 

1993 

1994 return bug_stats 

1995 

1996 def _get_ubuntu_patch_diff_content(self): 

1997 url = 'https://patches.ubuntu.com/PATCHES' 

1998 return get_resource_text(url) 

1999 

2000 def get_ubuntu_patch_diffs(self): 

2001 """ 

2002 Retrieves the Ubuntu patch diff information. The information consists 

2003 of the diff URL and the version of the Ubuntu package to which the 

2004 diff belongs to. 

2005 

2006 :returns: A dict mapping package names to diff information. 

2007 """ 

2008 content = self._get_ubuntu_patch_diff_content() 

2009 

2010 patch_diffs = {} 

2011 re_diff_version = re.compile(r'_(\S+)\.patch') 

2012 for line in content.splitlines(): 

2013 package_name, diff_url = line.split(' ', 1) 

2014 # Extract the version of the package from the diff url 

2015 match = re_diff_version.search(diff_url) 

2016 if not match: 2016 ↛ 2018line 2016 didn't jump to line 2018, because the condition on line 2016 was never true

2017 # Invalid URL: no version 

2018 continue 

2019 version = match.group(1) 

2020 patch_diffs[package_name] = { 

2021 'version': version, 

2022 'diff_url': diff_url 

2023 } 

2024 

2025 return patch_diffs 

2026 

2027 def execute_main(self): 

2028 package_versions = self.get_ubuntu_versions() 

2029 bug_stats = self.get_ubuntu_bug_stats() 

2030 patch_diffs = self.get_ubuntu_patch_diffs() 

2031 

2032 obsolete_ubuntu_pkgs = UbuntuPackage.objects.exclude( 

2033 package__name__in=package_versions.keys()) 

2034 obsolete_ubuntu_pkgs.delete() 

2035 

2036 packages = PackageName.objects.filter(name__in=package_versions.keys()) 

2037 packages = packages.prefetch_related('ubuntu_package') 

2038 

2039 for package in packages: 

2040 version = package_versions[package.name] 

2041 bugs = bug_stats.get(package.name, None) 

2042 diff = patch_diffs.get(package.name, None) 

2043 

2044 try: 

2045 ubuntu_package = package.ubuntu_package 

2046 ubuntu_package.version = version 

2047 ubuntu_package.bugs = bugs 

2048 ubuntu_package.patch_diff = diff 

2049 ubuntu_package.save() 

2050 except UbuntuPackage.DoesNotExist: 

2051 ubuntu_package = UbuntuPackage.objects.create( 

2052 package=package, 

2053 version=version, 

2054 bugs=bugs, 

2055 patch_diff=diff) 

2056 

2057 

2058class UpdateWnppStatsTask(BaseTask): 

2059 """ 

2060 The task updates the WNPP bugs for all packages. 

2061 """ 

2062 

2063 class Scheduler(IntervalScheduler): 

2064 interval = 3600 * 3 

2065 

2066 ACTION_ITEM_TYPE_NAME = 'debian-wnpp-issue' 

2067 ACTION_ITEM_TEMPLATE = 'debian/wnpp-action-item.html' 

2068 ITEM_DESCRIPTION = '<a href="{url}">{wnpp_type}: {wnpp_msg}</a>' 

2069 

2070 def initialize(self, *args, **kwargs): 

2071 super(UpdateWnppStatsTask, self).initialize(*args, **kwargs) 

2072 self.action_item_type = ActionItemType.objects.create_or_update( 

2073 type_name=self.ACTION_ITEM_TYPE_NAME, 

2074 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2075 

2076 def get_wnpp_stats(self): 

2077 """ 

2078 Retrieves and parses the wnpp stats for all packages. WNPP stats 

2079 include the WNPP type and the BTS bug id. 

2080 

2081 :returns: A dict mapping package names to wnpp stats. 

2082 """ 

2083 url = 'https://qa.debian.org/data/bts/wnpp_rm' 

2084 content = get_resource_text(url, only_if_updated=True) 

2085 if content is None: 2085 ↛ 2086line 2085 didn't jump to line 2086, because the condition on line 2085 was never true

2086 return 

2087 

2088 wnpp_stats = {} 

2089 for line in content.splitlines(): 

2090 line = line.strip() 

2091 try: 

2092 package_name, wnpp_type, bug_id = line.split('|')[0].split() 

2093 bug_id = int(bug_id) 

2094 except ValueError: 

2095 # Badly formatted bug number 

2096 continue 

2097 # Strip the colon from the end of the package name 

2098 package_name = package_name[:-1] 

2099 

2100 wnpp_stats[package_name] = { 

2101 'wnpp_type': wnpp_type, 

2102 'bug_id': bug_id, 

2103 } 

2104 

2105 return wnpp_stats 

2106 

2107 def update_action_item(self, package, stats): 

2108 """ 

2109 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2110 instance for the given type indicating that the package has a WNPP 

2111 issue. 

2112 """ 

2113 action_item = package.get_action_item_for_type(self.action_item_type) 

2114 if not action_item: 

2115 action_item = ActionItem( 

2116 package=package, 

2117 item_type=self.action_item_type) 

2118 

2119 # Check if the stats have actually been changed 

2120 if action_item.extra_data: 

2121 if action_item.extra_data.get('wnpp_info', None) == stats: 

2122 # Nothing to do -- stll the same data 

2123 return 

2124 

2125 # Update the data since something has changed 

2126 try: 

2127 release = package.main_entry.repository.suite or \ 

2128 package.main_entry.repository.codename 

2129 except AttributeError: 

2130 release = None 

2131 

2132 msgs = { 

2133 'O': "This package has been orphaned and needs a maintainer.", 

2134 'ITA': "Someone intends to adopt this package.", 

2135 'RFA': "The maintainer wants to pass over package maintainance.", 

2136 'RFH': "The maintainer is looking for help with this package.", 

2137 'ITP': "Someone is planning to reintroduce this package.", 

2138 'RFP': "There is a request to reintroduce this package.", 

2139 'RM': "This package has been requested to be removed.", 

2140 'RFS': "A sponsor is needed to update this package.", 

2141 '?': "The WNPP database contains an entry for this package." 

2142 } 

2143 wnpp_type = stats['wnpp_type'] 

2144 try: 

2145 wnpp_msg = msgs[wnpp_type] 

2146 except KeyError: 

2147 wnpp_msg = msgs['?'] 

2148 

2149 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2150 url='https://bugs.debian.org/{}'.format(stats['bug_id']), 

2151 wnpp_type=wnpp_type, wnpp_msg=wnpp_msg) 

2152 action_item.extra_data = { 

2153 'wnpp_info': stats, 

2154 'release': release, 

2155 } 

2156 action_item.save() 

2157 

2158 def update_depneedsmaint_action_item(self, package_needs_maintainer, stats): 

2159 short_description_template = \ 

2160 'Depends on packages which need a new maintainer' 

2161 package_needs_maintainer.get_absolute_url() 

2162 action_item_type = ActionItemType.objects.create_or_update( 

2163 type_name='debian-depneedsmaint', 

2164 full_description_template='debian/depneedsmaint-action-item.html') 

2165 dependencies = SourcePackageDeps.objects.filter( 

2166 dependency=package_needs_maintainer) 

2167 for dependency in dependencies: 2167 ↛ 2168line 2167 didn't jump to line 2168, because the loop on line 2167 never started

2168 package = dependency.source 

2169 action_item = package.get_action_item_for_type(action_item_type) 

2170 if not action_item: 

2171 action_item = ActionItem( 

2172 package=package, 

2173 item_type=action_item_type, 

2174 extra_data={}) 

2175 

2176 pkgdata = { 

2177 'bug': stats['bug_id'], 

2178 'details': dependency.details, 

2179 } 

2180 

2181 if (action_item.extra_data.get(package_needs_maintainer.name, {}) == 

2182 pkgdata): 

2183 # Nothing has changed 

2184 continue 

2185 

2186 action_item.short_description = short_description_template 

2187 action_item.extra_data[package_needs_maintainer.name] = pkgdata 

2188 

2189 action_item.save() 

2190 

2191 @transaction.atomic 

2192 def execute_main(self): 

2193 wnpp_stats = self.get_wnpp_stats() 

2194 if wnpp_stats is None: 2194 ↛ 2196line 2194 didn't jump to line 2196, because the condition on line 2194 was never true

2195 # Nothing to do: cached content up to date 

2196 return 

2197 

2198 ActionItem.objects.delete_obsolete_items( 

2199 item_types=[self.action_item_type], 

2200 non_obsolete_packages=wnpp_stats.keys()) 

2201 # Remove obsolete action items for packages whose dependencies need a 

2202 # new maintainer. 

2203 packages_need_maintainer = [] 

2204 for name, stats in wnpp_stats.items(): 

2205 if stats['wnpp_type'] in ('O', 'RFA'): 

2206 packages_need_maintainer.append(name) 

2207 packages_depneeds_maint = [ 

2208 package.name for package in SourcePackageName.objects.filter( 

2209 source_dependencies__dependency__name__in=packages_need_maintainer) # noqa 

2210 ] 

2211 ActionItem.objects.delete_obsolete_items( 

2212 item_types=[ 

2213 ActionItemType.objects.get_or_create( 

2214 type_name='debian-depneedsmaint')[0], 

2215 ], 

2216 non_obsolete_packages=packages_depneeds_maint) 

2217 

2218 # Drop all reverse references 

2219 for ai in ActionItem.objects.filter( 2219 ↛ 2221line 2219 didn't jump to line 2221, because the loop on line 2219 never started

2220 item_type__type_name='debian-depneedsmaint'): 

2221 ai.extra_data = {} 

2222 ai.save() 

2223 

2224 packages = SourcePackageName.objects.filter(name__in=wnpp_stats.keys()) 

2225 packages = packages.prefetch_related('action_items') 

2226 

2227 for package in packages: 

2228 stats = wnpp_stats[package.name] 

2229 self.update_action_item(package, stats) 

2230 # Update action items for packages which depend on this one to 

2231 # indicate that a dependency needs a new maintainer. 

2232 if package.name in packages_need_maintainer: 

2233 self.update_depneedsmaint_action_item(package, stats) 

2234 

2235 

2236class UpdateNewQueuePackages(BaseTask): 

2237 """ 

2238 Updates the versions of source packages found in the NEW queue. 

2239 """ 

2240 

2241 class Scheduler(IntervalScheduler): 

2242 interval = 3600 

2243 

2244 DATA_KEY = 'debian-new-queue-info' 

2245 

2246 def initialize(self, *args, **kwargs): 

2247 super(UpdateNewQueuePackages, self).initialize(*args, **kwargs) 

2248 

2249 def extract_package_info(self, content): 

2250 """ 

2251 Extracts the package information from the content of the NEW queue. 

2252 

2253 :returns: A dict mapping package names to a dict mapping the 

2254 distribution name in which the package is found to the version 

2255 information for the most recent version of the package in the dist. 

2256 """ 

2257 packages = {} 

2258 for stanza in deb822.Deb822.iter_paragraphs(content.splitlines()): 

2259 necessary_fields = ('Source', 'Queue', 'Version', 'Distribution') 

2260 if not all(field in stanza for field in necessary_fields): 

2261 continue 

2262 if stanza['Queue'] != 'new': 2262 ↛ 2263line 2262 didn't jump to line 2263, because the condition on line 2262 was never true

2263 continue 

2264 

2265 versions = stanza['Version'].split() 

2266 # Save only the most recent version 

2267 version = max(versions, key=lambda x: AptPkgVersion(x)) 

2268 

2269 package_name = stanza['Source'] 

2270 pkginfo = packages.setdefault(package_name, {}) 

2271 distribution = stanza['Distribution'] 

2272 if distribution in pkginfo: 

2273 current_version = pkginfo[distribution]['version'] 

2274 if debian_support.version_compare(version, current_version) < 0: 

2275 # The already saved version is more recent than this one. 

2276 continue 

2277 

2278 pkginfo[distribution] = { 

2279 'version': version, 

2280 } 

2281 

2282 return packages 

2283 

2284 def _get_new_content(self): 

2285 url = 'https://ftp-master.debian.org/new.822' 

2286 return get_resource_text(url, force_update=self.force_update, 

2287 only_if_updated=True) 

2288 

2289 def execute_main(self): 

2290 content = self._get_new_content() 

2291 if content is None: 2291 ↛ 2292line 2291 didn't jump to line 2292, because the condition on line 2291 was never true

2292 return 

2293 

2294 all_package_info = self.extract_package_info(content) 

2295 

2296 packages = SourcePackageName.objects.filter( 

2297 name__in=all_package_info.keys()) 

2298 

2299 with transaction.atomic(): 

2300 # Drop old entries 

2301 PackageData.objects.filter(key=self.DATA_KEY).delete() 

2302 # Prepare current entries 

2303 data = [] 

2304 for package in packages: 

2305 new_queue_info = PackageData( 

2306 key=self.DATA_KEY, 

2307 package=package, 

2308 value=all_package_info[package.name]) 

2309 data.append(new_queue_info) 

2310 # Bulk create them 

2311 PackageData.objects.bulk_create(data) 

2312 

2313 

2314class UpdateAutoRemovalsStatsTask(BaseTask): 

2315 """ 

2316 A task for updating autoremovals information on all packages. 

2317 """ 

2318 

2319 class Scheduler(IntervalScheduler): 

2320 interval = 3600 

2321 

2322 ACTION_ITEM_TYPE_NAME = 'debian-autoremoval' 

2323 ACTION_ITEM_TEMPLATE = 'debian/autoremoval-action-item.html' 

2324 ITEM_DESCRIPTION = ('Marked for autoremoval on {removal_date}' + 

2325 '{dependencies}: {bugs}') 

2326 

2327 def initialize(self, *args, **kwargs): 

2328 super(UpdateAutoRemovalsStatsTask, self).initialize(*args, **kwargs) 

2329 self.action_item_type = ActionItemType.objects.create_or_update( 

2330 type_name=self.ACTION_ITEM_TYPE_NAME, 

2331 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2332 

2333 def get_autoremovals_stats(self): 

2334 """ 

2335 Retrieves and parses the autoremoval stats for all packages. 

2336 Autoremoval stats include the BTS bugs id. 

2337 

2338 :returns: A dict mapping package names to autoremoval stats. 

2339 """ 

2340 content = get_resource_text( 

2341 'https://udd.debian.org/cgi-bin/autoremovals.yaml.cgi', 

2342 force_update=self.force_update, 

2343 only_if_updated=True 

2344 ) 

2345 if content: 2345 ↛ exitline 2345 didn't return from function 'get_autoremovals_stats', because the condition on line 2345 was never false

2346 return yaml.safe_load(content) 

2347 

2348 def update_action_item(self, package, stats): 

2349 """ 

2350 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>` 

2351 instance for the given type indicating that the package has an 

2352 autoremoval issue. 

2353 """ 

2354 action_item = package.get_action_item_for_type(self.action_item_type) 

2355 if not action_item: 2355 ↛ 2361line 2355 didn't jump to line 2361, because the condition on line 2355 was never false

2356 action_item = ActionItem( 

2357 package=package, 

2358 item_type=self.action_item_type, 

2359 severity=ActionItem.SEVERITY_HIGH) 

2360 

2361 bugs_dependencies = stats.get('bugs_dependencies', []) 

2362 buggy_dependencies = stats.get('buggy_dependencies', []) 

2363 reverse_dependencies = stats.get('rdeps', []) 

2364 all_bugs = stats['bugs'] + bugs_dependencies 

2365 link = '<a href="https://bugs.debian.org/{}">#{}</a>' 

2366 removal_date = stats['removal_date'].strftime('%d %B') 

2367 if isinstance(removal_date, bytes): 2367 ↛ 2368line 2367 didn't jump to line 2368, because the condition on line 2367 was never true

2368 removal_date = removal_date.decode('utf-8', 'ignore') 

2369 

2370 action_item.short_description = self.ITEM_DESCRIPTION.format( 

2371 removal_date=removal_date, 

2372 dependencies=(' due to ' + html_package_list( 

2373 buggy_dependencies) if buggy_dependencies else ''), 

2374 bugs=', '.join(link.format(bug, bug) for bug in all_bugs)) 

2375 

2376 # datetime objects are not JSON-serializable, convert them ourselves 

2377 for key in stats.keys(): 

2378 if hasattr(stats[key], 'strftime'): 

2379 stats[key] = stats[key].strftime('%a %d %b %Y') 

2380 

2381 action_item.extra_data = { 

2382 'stats': stats, 

2383 'removal_date': stats['removal_date'], 

2384 'version': stats.get('version', ''), 

2385 'bugs': ', '.join(link.format(bug, bug) for bug in stats['bugs']), 

2386 'bugs_dependencies': ', '.join( 

2387 link.format(bug, bug) for bug in bugs_dependencies), 

2388 'buggy_dependencies': 

2389 html_package_list(buggy_dependencies), 

2390 'reverse_dependencies': 

2391 html_package_list(reverse_dependencies), 

2392 'number_rdeps': len(reverse_dependencies)} 

2393 action_item.save() 

2394 

2395 def execute_main(self): 

2396 autoremovals_stats = self.get_autoremovals_stats() 

2397 if autoremovals_stats is None: 2397 ↛ 2399line 2397 didn't jump to line 2399, because the condition on line 2397 was never true

2398 # Nothing to do: cached content up to date 

2399 return 

2400 

2401 ActionItem.objects.delete_obsolete_items( 

2402 item_types=[self.action_item_type], 

2403 non_obsolete_packages=autoremovals_stats.keys()) 

2404 

2405 packages = SourcePackageName.objects.filter( 

2406 name__in=autoremovals_stats.keys()) 

2407 packages = packages.prefetch_related('action_items') 

2408 

2409 for package in packages: 

2410 self.update_action_item(package, autoremovals_stats[package.name]) 

2411 

2412 

2413class UpdatePackageScreenshotsTask(BaseTask): 

2414 """ 

2415 Check if a screenshot exists on screenshots.debian.net, and add a 

2416 key to PackageData if it does. 

2417 """ 

2418 

2419 class Scheduler(IntervalScheduler): 

2420 interval = 3600 * 24 

2421 

2422 DATA_KEY = 'screenshots' 

2423 

2424 def _get_screenshots(self): 

2425 url = 'https://screenshots.debian.net/json/packages' 

2426 content = get_resource_text(url, force_update=self.force_update, 

2427 only_if_updated=True) 

2428 if content is None: 2428 ↛ 2429line 2428 didn't jump to line 2429, because the condition on line 2428 was never true

2429 return 

2430 

2431 data = json.loads(content) 

2432 return data 

2433 

2434 def execute_main(self): 

2435 content = self._get_screenshots() 

2436 if content is None: 2436 ↛ 2437line 2436 didn't jump to line 2437, because the condition on line 2436 was never true

2437 return 

2438 

2439 packages_with_screenshots = [] 

2440 for item in content['packages']: 

2441 try: 

2442 package = SourcePackageName.objects.get(name=item['name']) 

2443 packages_with_screenshots.append(package) 

2444 except SourcePackageName.DoesNotExist: 

2445 pass 

2446 

2447 with transaction.atomic(): 

2448 PackageData.objects.filter(key='screenshots').delete() 

2449 

2450 data = [] 

2451 for package in packages_with_screenshots: 

2452 try: 

2453 screenshot_info = package.data.get(key=self.DATA_KEY) 

2454 screenshot_info.value['screenshots'] = 'true' 

2455 except PackageData.DoesNotExist: 

2456 screenshot_info = PackageData( 

2457 key=self.DATA_KEY, 

2458 package=package, 

2459 value={'screenshots': 'true'}) 

2460 

2461 data.append(screenshot_info) 

2462 

2463 PackageData.objects.bulk_create(data) 

2464 

2465 

2466class UpdateBuildReproducibilityTask(BaseTask): 

2467 

2468 class Scheduler(IntervalScheduler): 

2469 interval = 3600 * 6 

2470 

2471 BASE_URL = 'https://tests.reproducible-builds.org' 

2472 ACTION_ITEM_TYPE_NAME = 'debian-build-reproducibility' 

2473 ACTION_ITEM_TEMPLATE = 'debian/build-reproducibility-action-item.html' 

2474 ITEM_DESCRIPTION = { 

2475 'blacklisted': '<a href="{url}">Blacklisted</a> from build ' 

2476 'reproducibility testing', 

2477 'FTBFS': '<a href="{url}">Fails to build</a> during reproducibility ' 

2478 'testing', 

2479 'reproducible': None, 

2480 'FTBR': '<a href="{url}">Does not build reproducibly</a> ' 

2481 'during testing', 

2482 '404': None, 

2483 'not for us': None, 

2484 } 

2485 

2486 def initialize(self, *args, **kwargs): 

2487 super(UpdateBuildReproducibilityTask, self).initialize(*args, **kwargs) 

2488 self.action_item_type = ActionItemType.objects.create_or_update( 

2489 type_name=self.ACTION_ITEM_TYPE_NAME, 

2490 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2491 

2492 def get_build_reproducibility(self): 

2493 url = '{}/debian/reproducible-tracker.json'.format(self.BASE_URL) 

2494 content = get_resource_text(url, force_update=self.force_update, 

2495 only_if_updated=True) 

2496 if content is None: 2496 ↛ 2497line 2496 didn't jump to line 2497, because the condition on line 2496 was never true

2497 return 

2498 

2499 reproducibilities = json.loads(content) 

2500 packages = {} 

2501 for item in reproducibilities: 

2502 package = item['package'] 

2503 status = item['status'] 

2504 missing = package not in packages 

2505 important = self.ITEM_DESCRIPTION.get(status) is not None 

2506 if important or missing: 2506 ↛ 2501line 2506 didn't jump to line 2501, because the condition on line 2506 was never false

2507 packages[package] = status 

2508 

2509 return packages 

2510 

2511 def update_action_item(self, package, status): 

2512 description = self.ITEM_DESCRIPTION.get(status) 

2513 

2514 if not description: # Not worth an action item 

2515 return False 

2516 

2517 action_item = package.get_action_item_for_type( 

2518 self.action_item_type.type_name) 

2519 if action_item is None: 2519 ↛ 2525line 2519 didn't jump to line 2525, because the condition on line 2519 was never false

2520 action_item = ActionItem( 

2521 package=package, 

2522 item_type=self.action_item_type, 

2523 severity=ActionItem.SEVERITY_NORMAL) 

2524 

2525 url = "{}/debian/rb-pkg/{}.html".format(self.BASE_URL, package.name) 

2526 action_item.short_description = description.format(url=url) 

2527 action_item.save() 

2528 return True 

2529 

2530 def execute_main(self): 

2531 reproducibilities = self.get_build_reproducibility() 

2532 if reproducibilities is None: 2532 ↛ 2533line 2532 didn't jump to line 2533, because the condition on line 2532 was never true

2533 return 

2534 

2535 with transaction.atomic(): 

2536 PackageData.objects.filter(key='reproducibility').delete() 

2537 

2538 packages = [] 

2539 data = [] 

2540 

2541 for name, status in reproducibilities.items(): 

2542 try: 

2543 package = SourcePackageName.objects.get(name=name) 

2544 if self.update_action_item(package, status): 

2545 packages.append(package) 

2546 except SourcePackageName.DoesNotExist: 

2547 continue 

2548 

2549 reproducibility_info = PackageData( 

2550 key='reproducibility', 

2551 package=package, 

2552 value={'reproducibility': status}) 

2553 data.append(reproducibility_info) 

2554 

2555 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2556 packages) 

2557 PackageData.objects.bulk_create(data) 

2558 

2559 

2560class MultiArchHintsTask(BaseTask): 

2561 

2562 class Scheduler(IntervalScheduler): 

2563 interval = 3600 * 6 

2564 

2565 ACTIONS_WEB = 'https://wiki.debian.org/MultiArch/Hints' 

2566 ACTIONS_URL = 'https://dedup.debian.net/static/multiarch-hints.yaml' 

2567 ACTION_ITEM_TYPE_NAME = 'debian-multiarch-hints' 

2568 ACTION_ITEM_TEMPLATE = 'debian/multiarch-hints.html' 

2569 ACTION_ITEM_DESCRIPTION = \ 

2570 '<a href="{link}">Multiarch hinter</a> reports {count} issue(s)' 

2571 

2572 def initialize(self, *args, **kwargs): 

2573 super(MultiArchHintsTask, self).initialize(*args, **kwargs) 

2574 self.action_item_type = ActionItemType.objects.create_or_update( 

2575 type_name=self.ACTION_ITEM_TYPE_NAME, 

2576 full_description_template=self.ACTION_ITEM_TEMPLATE) 

2577 self.SEVERITIES = {} 

2578 for value, name in ActionItem.SEVERITIES: 

2579 self.SEVERITIES[name] = value 

2580 

2581 def get_data(self): 

2582 data = get_resource_text(self.ACTIONS_URL) 

2583 if data: 

2584 return yaml.safe_load(data) 

2585 

2586 def get_packages(self): 

2587 data = self.get_data() 

2588 if data is None: 

2589 return 

2590 if data['format'] != 'multiarch-hints-1.0': 

2591 return None 

2592 data = data['hints'] 

2593 packages = collections.defaultdict(dict) 

2594 for item in data: 

2595 if 'source' not in item: 

2596 continue 

2597 package = item['source'] 

2598 wishlist = ActionItem.SEVERITY_WISHLIST 

2599 severity = self.SEVERITIES.get(item['severity'], wishlist) 

2600 pkg_severity = packages[package].get('severity', wishlist) 

2601 packages[package]['severity'] = max(severity, pkg_severity) 

2602 packages[package].setdefault('hints', []).append( 

2603 (item['description'], item['link'])) 

2604 return packages 

2605 

2606 def update_action_item(self, package, severity, description, extra_data): 

2607 action_item = package.get_action_item_for_type( 

2608 self.action_item_type.type_name) 

2609 if action_item is None: 

2610 action_item = ActionItem( 

2611 package=package, 

2612 item_type=self.action_item_type) 

2613 action_item.severity = severity 

2614 action_item.short_description = description 

2615 action_item.extra_data = extra_data 

2616 action_item.save() 

2617 

2618 def execute_main(self): 

2619 packages = self.get_packages() 

2620 if not packages: 

2621 return 

2622 

2623 with transaction.atomic(): 

2624 for name, data in packages.items(): 

2625 try: 

2626 package = SourcePackageName.objects.get(name=name) 

2627 except SourcePackageName.DoesNotExist: 

2628 continue 

2629 

2630 description = self.ACTION_ITEM_DESCRIPTION.format( 

2631 count=len(data['hints']), link=self.ACTIONS_WEB) 

2632 self.update_action_item(package, data['severity'], description, 

2633 data['hints']) 

2634 

2635 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

2636 packages.keys()) 

2637 

2638 

2639class UpdateVcsWatchTask(BaseTask): 

2640 """ 

2641 Updates packages' vcswatch stats. 

2642 """ 

2643 

2644 class Scheduler(IntervalScheduler): 

2645 interval = 3600 

2646 

2647 ACTION_ITEM_TYPE_NAME = 'vcswatch-warnings-and-errors' 

2648 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/vcswatch-action-item.html' 

2649 VCSWATCH_URL = 'https://qa.debian.org/cgi-bin/vcswatch?package=%(package)s' 

2650 VCSWATCH_DATA_URL = 'https://qa.debian.org/data/vcswatch/vcswatch.json.gz' 

2651 

2652 VCSWATCH_STATUS_DICT = { 

2653 "NEW": { 

2654 "description": 

2655 '<a href="{vcswatch_url}">version in VCS is newer</a> than in ' 

2656 'repository, is it time to upload?', 

2657 "severity": ActionItem.SEVERITY_NORMAL, 

2658 }, 

2659 "COMMITS": { 

2660 "description": 

2661 '<a href="{vcswatch_url}">{commits} new commit{commits_s}</a> ' 

2662 'since last upload, is it time to release?', 

2663 "severity": ActionItem.SEVERITY_NORMAL, 

2664 }, 

2665 "OLD": { 

2666 'description': 

2667 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2668 'date</a>, push the missing commits.', 

2669 "severity": ActionItem.SEVERITY_HIGH, 

2670 }, 

2671 "UNREL": { 

2672 "description": 

2673 'The <a href="{vcswatch_url}">VCS repository is not up to ' 

2674 'date</a>, push the missing commits.', 

2675 "severity": ActionItem.SEVERITY_HIGH, 

2676 }, 

2677 "ERROR": { 

2678 "description": 

2679 '<a href="{vcswatch_url}">Failed to analyze the VCS ' 

2680 'repository</a>. Please troubleshoot and fix the issue.', 

2681 "severity": ActionItem.SEVERITY_HIGH, 

2682 }, 

2683 "DEFAULT": { 

2684 "description": 

2685 '<a href="{url}">Unexpected status</a> ({status}) reported by ' 

2686 'VcsWatch.', 

2687 "severity": ActionItem.SEVERITY_HIGH, 

2688 }, 

2689 } 

2690 

2691 def initialize(self, *args, **kwargs): 

2692 super(UpdateVcsWatchTask, self).initialize(*args, **kwargs) 

2693 self.vcswatch_ai_type = ActionItemType.objects.create_or_update( 

2694 type_name=self.ACTION_ITEM_TYPE_NAME, 

2695 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE 

2696 ) 

2697 

2698 def get_vcswatch_data(self): 

2699 text = get_resource_text(self.VCSWATCH_DATA_URL) 

2700 

2701 if text is None: 2701 ↛ 2702line 2701 didn't jump to line 2702, because the condition on line 2701 was never true

2702 return 

2703 

2704 # There's some text, let's load! 

2705 data = json.loads(text) 

2706 

2707 out = {} 

2708 # This allows to save a lot of list search later. 

2709 for entry in data: 

2710 out[entry[u'package']] = entry 

2711 

2712 return out 

2713 

2714 def clean_package_info(self, package_infos_without_watch, todo): 

2715 """Takes a list of :class:`PackageData` which do not 

2716 have a watch entry and cleans it. Then schedule in todo what 

2717 to do with them. 

2718 """ 

2719 for package_info in package_infos_without_watch: 

2720 if 'QA' in package_info.value: 2720 ↛ 2719line 2720 didn't jump to line 2719, because the condition on line 2720 was never false

2721 package_info.value.pop('QA') 

2722 if (list(package_info.value.keys()) == ['checksum'] or 

2723 not package_info.value.keys()): 

2724 todo['drop']['package_infos'].append(package_info) 

2725 else: 

2726 package_info.value['checksum'] = get_data_checksum( 

2727 package_info.value 

2728 ) 

2729 todo['update']['package_infos'].append(package_info) 

2730 

2731 def update_action_item(self, package, vcswatch_data, action_item, todo): 

2732 """ 

2733 For a given :class:`ActionItem` and a given vcswatch data, updates 

2734 properly the todo dict if required. 

2735 

2736 Returns dependingly on what has been done. If something is to 

2737 be updated, returns True, if nothing is to be updated, returns 

2738 False. If the calling loop should `continue`, returns `None`. 

2739 

2740 :rtype: bool or `None` 

2741 """ 

2742 

2743 package_status = vcswatch_data['status'] 

2744 

2745 if package_status == "OK": 

2746 # Everything is fine, let's purge the action item. Not the 

2747 # package extracted info as its QA url is still relevant. 

2748 if action_item: 2748 ↛ 2752line 2748 didn't jump to line 2752, because the condition on line 2748 was never false

2749 todo['drop']['action_items'].append(action_item) 

2750 

2751 # Nothing more to do! 

2752 return False 

2753 

2754 # NOT BEFORE "OK" check!! 

2755 if package_status not in self.VCSWATCH_STATUS_DICT: 2755 ↛ 2756line 2755 didn't jump to line 2756, because the condition on line 2755 was never true

2756 package_status = "DEFAULT" 

2757 

2758 # If we are here, then something is not OK. Let's check if we 

2759 # already had some intel regarding the current package status. 

2760 if action_item is None: 

2761 action_item = ActionItem( 

2762 package=package, 

2763 item_type=self.vcswatch_ai_type) 

2764 todo['add']['action_items'].append(action_item) 

2765 else: 

2766 todo['update']['action_items'].append(action_item) 

2767 

2768 # Computes the watch URL 

2769 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2770 

2771 if action_item.extra_data: 

2772 extra_data = action_item.extra_data 

2773 else: 

2774 extra_data = {} 

2775 

2776 # Fetches the long description and severity from 

2777 # the VCSWATCH_STATUS_DICT dict. 

2778 action_item.severity = \ 

2779 self.VCSWATCH_STATUS_DICT[package_status]['severity'] 

2780 

2781 nb_commits = int(vcswatch_data["commits"] or 0) 

2782 

2783 # The new data 

2784 new_extra_data = { 

2785 'vcswatch_url': vcswatch_url, 

2786 } 

2787 new_extra_data.update(vcswatch_data) 

2788 

2789 extra_data_match = all([ 

2790 new_extra_data[key] == extra_data.get(key, None) 

2791 for key in new_extra_data 

2792 ]) 

2793 

2794 # If everything is fine and we are not forcing the update 

2795 # then we proceed to the next package. 

2796 if extra_data_match and not self.force_update: 2796 ↛ 2798line 2796 didn't jump to line 2798, because the condition on line 2796 was never true

2797 # Remove from the todolist 

2798 todo['update']['action_items'].remove(action_item) 

2799 return False 

2800 else: 

2801 # Report for short description of the :class:`ActionItem` 

2802 desc = self.VCSWATCH_STATUS_DICT[package_status]['description'] 

2803 commits_s = 's' if nb_commits != 1 else '' 

2804 action_item.short_description = \ 

2805 desc.format(commits_s=commits_s, **new_extra_data) 

2806 action_item.extra_data = new_extra_data 

2807 return True 

2808 

2809 def update_package_info(self, package, vcswatch_data, package_info, key, 

2810 todo): 

2811 # Same thing with PackageData 

2812 if package_info is None: 

2813 package_info = PackageData( 

2814 package=package, 

2815 key=key, 

2816 ) 

2817 todo['add']['package_infos'].append(package_info) 

2818 else: 

2819 todo['update']['package_infos'].append(package_info) 

2820 

2821 # Computes the watch URL 

2822 vcswatch_url = self.VCSWATCH_URL % {'package': package.name} 

2823 

2824 new_value = dict(package_info.value) 

2825 if key == 'vcs_extra_links': 

2826 new_value['QA'] = vcswatch_url 

2827 elif key == 'vcswatch': 2827 ↛ 2837line 2827 didn't jump to line 2837, because the condition on line 2827 was never false

2828 if 'package_version' in vcswatch_data: 2828 ↛ 2830line 2828 didn't jump to line 2830, because the condition on line 2828 was never false

2829 new_value['package_version'] = vcswatch_data['package_version'] 

2830 if 'changelog_version' in vcswatch_data: 2830 ↛ 2833line 2830 didn't jump to line 2833, because the condition on line 2830 was never false

2831 new_value['changelog_version'] = vcswatch_data[ 

2832 'changelog_version'] 

2833 if 'changelog_distribution' in vcswatch_data: 2833 ↛ 2837line 2833 didn't jump to line 2837, because the condition on line 2833 was never false

2834 new_value['changelog_distribution'] = vcswatch_data[ 

2835 'changelog_distribution'] 

2836 

2837 new_value['checksum'] = get_data_checksum(new_value) 

2838 

2839 package_info_match = ( 

2840 new_value['checksum'] == package_info.value.get('checksum', None) 

2841 ) 

2842 

2843 if package_info_match and not self.force_update: 

2844 todo['update']['package_infos'].remove(package_info) 

2845 return False 

2846 else: 

2847 package_info.value = new_value 

2848 return True 

2849 

2850 def update_packages_item(self, packages, vcswatch_datas): 

2851 """Generates the lists of :class:`ActionItem` to be added, 

2852 deleted or updated regarding the status of their packages. 

2853 

2854 Categories of statuses are: 

2855 {u'COMMITS', u'ERROR', u'NEW', u'OK', u'OLD', u'UNREL'} 

2856 

2857 Basically, it fetches all info from :class:`PackageData` 

2858 with key='vcs', the ones without data matching vcswatch_datas are 

2859 stored in one variable that's iterated through directly, and if 

2860 there was something before, it is purged. Then, all entries in 

2861 that queryset that have no relevant intel anymore are scheduled 

2862 to be deleted. The others are only updated. 

2863 

2864 All :class:`PackageData` matching vcswatch_datas 

2865 are stored in another variable. The same is done with the list of 

2866 :class:`ActionItem` that match this task type. 

2867 

2868 Then, it iterates on all vcswatch_datas' packages and it tries to 

2869 determine if there are any news, if so, it updates apopriately the 

2870 prospective :class:`ActionItem` and :class:`PackageData`, 

2871 and schedule them to be updated. If no data was existent, then 

2872 it creates them and schedule them to be added to the database. 

2873 

2874 At the end, this function returns a dict of all instances of 

2875 :class:`ActionItem` and :class:`PackageData` stored 

2876 in subdicts depending on their class and what is to be done 

2877 with them. 

2878 

2879 :rtype: dict 

2880 

2881 """ 

2882 

2883 todo = { 

2884 'drop': { 

2885 'action_items': [], 

2886 'package_infos': [], 

2887 }, 

2888 'update': { 

2889 'action_items': [], 

2890 'package_infos': [], 

2891 }, 

2892 'add': { 

2893 'action_items': [], 

2894 'package_infos': [], 

2895 }, 

2896 } 

2897 

2898 package_info_keys = ['vcs_extra_links', 'vcswatch'] 

2899 package_infos = {} 

2900 for key in package_info_keys: 

2901 # Fetches all PackageData with a given key for packages having 

2902 # a vcswatch key. As the pair (package, key) is unique, there is a 

2903 # bijection between these data, and we fetch them classifying them 

2904 # by package name. 

2905 for package_info in PackageData.objects.select_related( 

2906 'package').filter(key=key).only('package__name', 'value'): 

2907 if package_info.package.name not in package_infos: 

2908 package_infos[package_info.package.name] = {} 

2909 package_infos[package_info.package.name][key] = package_info 

2910 

2911 # As :class:`PackageData` key=vcs_extra_links is shared, we 

2912 # have to clean up those with vcs watch_url that aren't in vcs_data 

2913 package_infos_without_watch = PackageData.objects.filter( 

2914 key='vcs_extra_links').exclude( 

2915 package__name__in=vcswatch_datas.keys()).only('value') 

2916 

2917 # Do the actual clean. 

2918 self.clean_package_info(package_infos_without_watch, todo) 

2919 

2920 # Fetches all :class:`ActionItem` for packages concerned by a vcswatch 

2921 # action. 

2922 action_items = { 

2923 action_item.package.name: action_item 

2924 for action_item in ActionItem.objects.select_related( 

2925 'package' 

2926 ).filter(item_type=self.vcswatch_ai_type) 

2927 } 

2928 

2929 for package in packages: 

2930 # Get the vcswatch_data from the whole vcswatch_datas 

2931 vcswatch_data = vcswatch_datas[package.name] 

2932 

2933 # Get the old action item for this warning, if it exists. 

2934 action_item = action_items.get(package.name, None) 

2935 

2936 # Updates the :class:`ActionItem`. If _continue is None, 

2937 # then there is nothing more to do with this package. 

2938 # If it is False, then no update is pending for the 

2939 # :class:`ActionItem`, else there is an update 

2940 # to do. 

2941 _ai_continue = self.update_action_item( 

2942 package, 

2943 vcswatch_data, 

2944 action_item, 

2945 todo) 

2946 

2947 _pi_continue = False 

2948 for key in package_info_keys: 

2949 try: 

2950 package_info = package_infos[package.name][key] 

2951 except KeyError: 

2952 package_info = None 

2953 

2954 _pi_continue |= self.update_package_info( 

2955 package, 

2956 vcswatch_data, 

2957 package_info, 

2958 key, 

2959 todo) 

2960 

2961 if not _ai_continue and not _pi_continue: 

2962 continue 

2963 

2964 return todo 

2965 

2966 def execute_main(self): 

2967 # Get the actual vcswatch json file from qa.debian.org 

2968 vcs_data = self.get_vcswatch_data() 

2969 

2970 # Only fetch the packages that are in the json dict. 

2971 packages = PackageName.objects.filter(name__in=vcs_data.keys()) 

2972 

2973 # Faster than fetching the action items one by one in a loop 

2974 # when handling each package. 

2975 packages.prefetch_related('action_items') 

2976 

2977 # Determine wether something is to be kept or dropped. 

2978 todo = self.update_packages_item(packages, vcs_data) 

2979 

2980 with transaction.atomic(): 

2981 # Delete the :class:`ActionItem` that are osbolete, and also 

2982 # the :class:`PackageData` of the same. 

2983 ActionItem.objects.delete_obsolete_items( 

2984 [self.vcswatch_ai_type], 

2985 vcs_data.keys()) 

2986 PackageData.objects.filter( 

2987 key='vcs_extra_links', 

2988 id__in=[ 

2989 package_info.id 

2990 for package_info in todo['drop']['package_infos'] 

2991 ] 

2992 ).delete() 

2993 

2994 # Then delete the :class:`ActionItem` that are to be deleted. 

2995 ActionItem.objects.filter( 

2996 item_type__type_name=self.vcswatch_ai_type.type_name, 

2997 id__in=[ 

2998 action_item.id 

2999 for action_item in todo['drop']['action_items'] 

3000 ] 

3001 ).delete() 

3002 

3003 # Then bulk_create the :class:`ActionItem` to add and the 

3004 # :class:`PackageData` 

3005 ActionItem.objects.bulk_create(todo['add']['action_items']) 

3006 PackageData.objects.bulk_create(todo['add']['package_infos']) 

3007 

3008 # Update existing entries 

3009 for action_item in todo['update']['action_items']: 

3010 action_item.save() 

3011 for package_info in todo['update']['package_infos']: 

3012 package_info.save() 

3013 

3014 

3015class TagPackagesWithRcBugs(BaseTask, PackageTagging): 

3016 """ 

3017 Performs an update of 'rc-bugs' tag for packages. 

3018 """ 

3019 

3020 class Scheduler(IntervalScheduler): 

3021 interval = 3600 

3022 

3023 TAG_NAME = 'tag:rc-bugs' 

3024 TAG_DISPLAY_NAME = 'rc bugs' 

3025 TAG_COLOR_TYPE = 'danger' 

3026 TAG_DESCRIPTION = 'The package has Release Critical bugs' 

3027 TAG_TABLE_TITLE = 'Packages with RC bugs' 

3028 

3029 def packages_to_tag(self): 

3030 all_bug_stats = PackageBugStats.objects.prefetch_related('package') 

3031 packages_list = [] 

3032 for bug_stats in all_bug_stats: 

3033 categories = bug_stats.stats 

3034 found = False 

3035 for category in categories: 3035 ↛ 3032line 3035 didn't jump to line 3032, because the loop on line 3035 didn't complete

3036 if found: 

3037 break 

3038 if category['category_name'] == 'rc': 3038 ↛ 3035line 3038 didn't jump to line 3035, because the condition on line 3038 was never false

3039 found = True 

3040 if category['bug_count'] > 0: 

3041 packages_list.append(bug_stats.package) 

3042 return packages_list 

3043 

3044 

3045class TagPackagesWithNewUpstreamVersion(BaseTask, PackageTagging): 

3046 """ 

3047 Performs an update of 'new-upstream-version' tag for packages. 

3048 """ 

3049 

3050 class Scheduler(IntervalScheduler): 

3051 interval = 3600 * 3 

3052 

3053 TAG_NAME = 'tag:new-upstream-version' 

3054 TAG_DISPLAY_NAME = 'new upstream version' 

3055 TAG_COLOR_TYPE = 'warning' 

3056 TAG_DESCRIPTION = 'The upstream has a newer version available' 

3057 TAG_TABLE_TITLE = 'Newer upstream version' 

3058 

3059 def packages_to_tag(self): 

3060 try: 

3061 action_type = ActionItemType.objects.get( 

3062 type_name='new-upstream-version') 

3063 except ActionItemType.DoesNotExist: 

3064 return [] 

3065 

3066 packages_list = [] 

3067 items = action_type.action_items.prefetch_related('package') 

3068 for item in items: 

3069 packages_list.append(item.package) 

3070 return packages_list 

3071 

3072 

3073class UpdateDependencySatisfactionTask(BaseTask): 

3074 """ 

3075 Fetches binary package installability results from qa.debian.org/dose 

3076 """ 

3077 

3078 class Scheduler(IntervalScheduler): 

3079 interval = 3600 * 3 

3080 

3081 BASE_URL = 'https://qa.debian.org/dose/debcheck/unstable_main/latest' 

3082 ACTION_ITEM_TYPE_NAME = 'debian-dependency-satisfaction' 

3083 ACTION_ITEM_TEMPLATE = 'debian/dependency-satisfaction-action-item.html' 

3084 

3085 def __init__(self, force_update=False, *args, **kwargs): 

3086 super(UpdateDependencySatisfactionTask, self).__init__(*args, **kwargs) 

3087 self.force_update = force_update 

3088 self.action_item_type = ActionItemType.objects.create_or_update( 

3089 type_name=self.ACTION_ITEM_TYPE_NAME, 

3090 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3091 

3092 def set_parameters(self, parameters): 

3093 if 'force_update' in parameters: 

3094 self.force_update = parameters['force_update'] 

3095 

3096 def get_dependency_satisfaction(self): 

3097 url = '{}/each.txt'.format(self.BASE_URL) 

3098 content = get_resource_text(url, force_update=self.force_update, 

3099 only_if_updated=True) 

3100 if content is None: 3100 ↛ 3101line 3100 didn't jump to line 3101, because the condition on line 3100 was never true

3101 return 

3102 

3103 dep_sats = collections.defaultdict(set) 

3104 for i, line in enumerate(content.splitlines()): 

3105 binpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3106 try: 

3107 bin_package = BinaryPackageName.objects.get(name=binpkg_name) 

3108 srcpkg_name = bin_package.main_source_package_name 

3109 except BinaryPackageName.DoesNotExist: 

3110 continue 

3111 arches = set([arch.strip() for arch in arches.split()]) 

3112 # TODO: retrieve this list programmatically, either from 

3113 # https://api.ftp-master.debian.org/suite/testing 

3114 # or from the Architecture field in the Release file 

3115 # for testing (both lists should be equal). 

3116 arches = arches.intersection( 

3117 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3118 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3119 # only report problems for release architectures 

3120 if not arches: 

3121 continue 

3122 # if the package is arch:all, only report problems on amd64 

3123 if isnative != "True": 

3124 arches = arches.intersection({"amd64"}) 

3125 if not arches: 

3126 continue 

3127 dep_sats[srcpkg_name].add( 

3128 (binpkg_name, ver, tuple(arches), expl, anchor)) 

3129 # turn sets into lists 

3130 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3131 return dep_sats 

3132 

3133 def update_action_item(self, package, unsats): 

3134 action_item = package.get_action_item_for_type( 

3135 self.action_item_type.type_name) 

3136 if action_item is None: 3136 ↛ 3141line 3136 didn't jump to line 3141

3137 action_item = ActionItem( 

3138 package=package, 

3139 item_type=self.action_item_type, 

3140 severity=ActionItem.SEVERITY_HIGH) 

3141 action_item.short_description = \ 

3142 "{count} binary package{plural} {have} unsatisfiable " \ 

3143 "dependencies".format( 

3144 count=len(unsats), 

3145 plural='' if len(unsats) == 1 else 's', 

3146 have='has' if len(unsats) == 1 else 'have', 

3147 ) 

3148 action_item.extra_data = { 

3149 'unsats': unsats, 

3150 'base_url': '{}/packages/'.format(self.BASE_URL), 

3151 } 

3152 action_item.save() 

3153 

3154 def execute(self): 

3155 dep_sats = self.get_dependency_satisfaction() 

3156 if dep_sats is None: 3156 ↛ 3157line 3156 didn't jump to line 3157, because the condition on line 3156 was never true

3157 return 

3158 

3159 with transaction.atomic(): 

3160 PackageData.objects.filter(key='dependency_satisfaction').delete() 

3161 

3162 packages = [] 

3163 pkgdata_list = [] 

3164 

3165 for name, unsats in dep_sats.items(): 

3166 try: 

3167 package = SourcePackageName.objects.get(name=name) 

3168 packages.append(package) 

3169 self.update_action_item(package, unsats) 

3170 except SourcePackageName.DoesNotExist: 

3171 continue 

3172 

3173 dep_sat_info = PackageData( 

3174 key='dependency_satisfaction', 

3175 package=package, 

3176 value={'dependency_satisfaction': unsats}) 

3177 pkgdata_list.append(dep_sat_info) 

3178 

3179 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3180 packages) 

3181 PackageData.objects.bulk_create(pkgdata_list) 

3182 

3183 

3184class UpdateBuildDependencySatisfactionTask(BaseTask): 

3185 """ 

3186 Fetches source package installability results from qa.debian.org/dose 

3187 """ 

3188 

3189 class Scheduler(IntervalScheduler): 

3190 interval = 3600 * 3 

3191 

3192 BASE_URL = 'https://qa.debian.org/dose/debcheck/src_unstable_main/latest' 

3193 ACTION_ITEM_TYPE_NAME = 'debian-builddependency-satisfaction' 

3194 ACTION_ITEM_TEMPLATE = \ 

3195 'debian/builddependency-satisfaction-action-item.html' 

3196 

3197 def __init__(self, *args, **kwargs): 

3198 super(UpdateBuildDependencySatisfactionTask, self).__init__(*args, 

3199 **kwargs) 

3200 self.action_item_type = ActionItemType.objects.create_or_update( 

3201 type_name=self.ACTION_ITEM_TYPE_NAME, 

3202 full_description_template=self.ACTION_ITEM_TEMPLATE) 

3203 

3204 def get_dependency_satisfaction(self): 

3205 url = '{}/each.txt'.format(self.BASE_URL) 

3206 content = get_resource_text(url, force_update=self.force_update, 

3207 only_if_updated=True) 

3208 if content is None: 3208 ↛ 3209line 3208 didn't jump to line 3209, because the condition on line 3208 was never true

3209 return 

3210 

3211 dep_sats = collections.defaultdict(set) 

3212 for i, line in enumerate(content.splitlines()): 

3213 srcpkg_name, ver, isnative, anchor, expl, arches = line.split('#') 

3214 arches = set([arch.strip() for arch in arches.split()]) 

3215 # TODO: retrieve this list programmatically, either from 

3216 # https://api.ftp-master.debian.org/suite/testing 

3217 # or from the Architecture field in the Release file 

3218 # for testing (both lists should be equal). 

3219 arches = arches.intersection( 

3220 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips', 

3221 'mips64el', 'mipsel', 'ppc64el', 's390x'}) 

3222 # only report problems for release architectures 

3223 if not arches: 

3224 continue 

3225 # if the source package only builds arch:all binary packages, only 

3226 # report problems on amd64 

3227 if isnative != "True": 

3228 arches = arches.intersection({"amd64"}) 

3229 if not arches: 

3230 continue 

3231 dep_sats[srcpkg_name].add( 

3232 (srcpkg_name, tuple(arches), expl, anchor)) 

3233 # turn sets into lists 

3234 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()]) 

3235 return dep_sats 

3236 

3237 def update_action_item(self, package, unsats): 

3238 action_item = package.get_action_item_for_type( 

3239 self.action_item_type.type_name) 

3240 if action_item is None: 3240 ↛ 3245line 3240 didn't jump to line 3245

3241 action_item = ActionItem( 

3242 package=package, 

3243 item_type=self.action_item_type, 

3244 severity=ActionItem.SEVERITY_HIGH) 

3245 action_item.short_description = \ 

3246 "source package has {count} unsatisfiable " \ 

3247 "build dependenc{plural}".format( 

3248 count=len(unsats), 

3249 plural='y' if len(unsats) == 1 else 'ies', 

3250 ) 

3251 action_item.extra_data = { 

3252 'unsats': unsats, 

3253 'base_url': '{}/packages/'.format(self.BASE_URL), 

3254 } 

3255 action_item.save() 

3256 

3257 def execute(self): 

3258 dep_sats = self.get_dependency_satisfaction() 

3259 if dep_sats is None: 3259 ↛ 3260line 3259 didn't jump to line 3260, because the condition on line 3259 was never true

3260 return 

3261 

3262 with transaction.atomic(): 

3263 PackageData.objects.filter( 

3264 key='builddependency_satisfaction').delete() 

3265 

3266 packages = [] 

3267 pkgdata_list = [] 

3268 

3269 for name, unsats in dep_sats.items(): 

3270 try: 

3271 package = SourcePackageName.objects.get(name=name) 

3272 packages.append(package) 

3273 self.update_action_item(package, unsats) 

3274 except SourcePackageName.DoesNotExist: 

3275 continue 

3276 

3277 dep_sat_info = PackageData( 

3278 key='builddependency_satisfaction', 

3279 package=package, 

3280 value={'builddependency_satisfaction': unsats}) 

3281 pkgdata_list.append(dep_sat_info) 

3282 

3283 ActionItem.objects.delete_obsolete_items([self.action_item_type], 

3284 packages) 

3285 PackageData.objects.bulk_create(pkgdata_list) 

3286 

3287 

3288class UpdateDl10nStatsTask(BaseTask): 

3289 """ 

3290 Updates packages' l10n statistics. 

3291 """ 

3292 

3293 class Scheduler(IntervalScheduler): 

3294 interval = 3600 * 6 

3295 

3296 ACTION_ITEM_TYPE_NAME = 'dl10n' 

3297 ITEM_DESCRIPTION = \ 

3298 '<a href="{url}">Issues</a> found with some translations' 

3299 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/dl10n-action-item.html' 

3300 

3301 def initialize(self, *args, **kwargs): 

3302 super(UpdateDl10nStatsTask, self).initialize(*args, **kwargs) 

3303 self.l10n_action_item_type = \ 

3304 ActionItemType.objects.create_or_update( 

3305 type_name=self.ACTION_ITEM_TYPE_NAME, 

3306 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE) 

3307 

3308 def _load_l10n_stats(self): 

3309 url = 'https://i18n.debian.org/l10n-pkg-status/pkglist' 

3310 content = get_resource_text(url, force_update=self.force_update, 

3311 only_if_updated=True) 

3312 if content is None: 3312 ↛ 3313line 3312 didn't jump to line 3313, because the condition on line 3312 was never true

3313 return 

3314 

3315 def parse_score(score): 

3316 if score == '-': 

3317 return None 

3318 return int(score) 

3319 

3320 all_stats = {} 

3321 

3322 # The format of the file is (copied from its header): 

3323 # <package> <version> (<comma sperated scores>) <link> <todo> 

3324 line_re = re.compile( 

3325 r'^([^\s]+) ([^\s]+) \(([^)]+)\) ([^\s]+) ([^\s]+)') 

3326 for line in content.splitlines(): 

3327 if not line or line.startswith('#'): 3327 ↛ 3328line 3327 didn't jump to line 3328, because the condition on line 3327 was never true

3328 continue 

3329 match = line_re.search(line) 

3330 if not match: 3330 ↛ 3331line 3330 didn't jump to line 3331, because the condition on line 3330 was never true

3331 logger.warning('Failed to parse l10n pkglist line: %s', line) 

3332 continue 

3333 

3334 src_pkgname = match.group(1) 

3335 try: 

3336 scores = match.group(3).split(',') 

3337 score_debian = parse_score(scores[0]) 

3338 score_other = parse_score(scores[1]) 

3339 # <todo> is a "0" or "1" string, so convert through int to get 

3340 # a proper bool 

3341 todo = bool(int(match.group(5))) 

3342 except (IndexError, ValueError): 

3343 logger.warning( 

3344 'Failed to parse l10n scores: %s', 

3345 line, exc_info=1) 

3346 continue 

3347 link = match.group(4) 

3348 if not score_debian and not score_other: 3348 ↛ 3349line 3348 didn't jump to line 3349, because the condition on line 3348 was never true

3349 continue 

3350 

3351 all_stats[src_pkgname] = { 

3352 'score_debian': score_debian, 

3353 'score_other': score_other, 

3354 'link': link, 

3355 'todo': todo, 

3356 } 

3357 

3358 return all_stats 

3359 

3360 def update_action_item(self, package, package_stats): 

3361 todo = package_stats['todo'] 

3362 

3363 # Get the old action item, if it exists. 

3364 l10n_action_item = package.get_action_item_for_type( 

3365 self.l10n_action_item_type.type_name) 

3366 if not todo: 

3367 if l10n_action_item: 

3368 # If the item previously existed, delete it now since there 

3369 # are no longer any warnings/errors. 

3370 l10n_action_item.delete() 

3371 return 

3372 

3373 # The item didn't previously have an action item: create it now 

3374 if l10n_action_item is None: 

3375 desc = self.ITEM_DESCRIPTION.format(url=package_stats['link']) 

3376 l10n_action_item = ActionItem( 

3377 package=package, 

3378 item_type=self.l10n_action_item_type, 

3379 severity=ActionItem.SEVERITY_LOW, 

3380 short_description=desc) 

3381 

3382 if l10n_action_item.extra_data: 

3383 old_extra_data = l10n_action_item.extra_data 

3384 if old_extra_data == package_stats: 3384 ↛ 3386line 3384 didn't jump to line 3386, because the condition on line 3384 was never true

3385 # No need to update 

3386 return 

3387 

3388 l10n_action_item.extra_data = package_stats 

3389 

3390 l10n_action_item.save() 

3391 

3392 def execute_main(self): 

3393 stats = self._load_l10n_stats() 

3394 if not stats: 3394 ↛ 3395line 3394 didn't jump to line 3395, because the condition on line 3394 was never true

3395 return 

3396 

3397 with transaction.atomic(): 

3398 PackageData.objects.filter(key='dl10n').delete() 

3399 

3400 packages = [] 

3401 pkgdata_list = [] 

3402 

3403 for name, stat in stats.items(): 

3404 try: 

3405 package = SourcePackageName.objects.get(name=name) 

3406 packages.append(package) 

3407 self.update_action_item(package, stat) 

3408 except SourcePackageName.DoesNotExist: 

3409 continue 

3410 

3411 dl10n_stat = PackageData( 

3412 key='dl10n', 

3413 package=package, 

3414 value=stat) 

3415 pkgdata_list.append(dl10n_stat) 

3416 

3417 ActionItem.objects.delete_obsolete_items( 

3418 [self.l10n_action_item_type], packages) 

3419 PackageData.objects.bulk_create(pkgdata_list)