Coverage for distro_tracker/vendor/debian/tracker_tasks.py: 84%
1716 statements
« prev ^ index » next coverage.py v6.5.0, created at 2025-10-07 08:16 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2025-10-07 08:16 +0000
1# Copyright 2013-2021 The Distro Tracker Developers
2# See the COPYRIGHT file at the top-level directory of this distribution and
3# at https://deb.li/DTAuthors
4#
5# This file is part of Distro Tracker. It is subject to the license terms
6# in the LICENSE file found in the top-level directory of this
7# distribution and at https://deb.li/DTLicense. No part of Distro Tracker,
8# including this file, may be copied, modified, propagated, or distributed
9# except according to the terms contained in the LICENSE file.
11"""
12Debian-specific tasks.
13"""
15import collections
16import itertools
17import json
18import logging
19import os
20import re
21import urllib.parse
22import warnings
23from enum import Enum
25from bs4 import BeautifulSoup as soup, MarkupResemblesLocatorWarning
27from debian import deb822, debian_support
28from debian.debian_support import AptPkgVersion
30import debianbts
32from django.conf import settings
33from django.core.exceptions import ValidationError
34from django.db import transaction
35from django.db.models import Prefetch
36from django.utils.http import urlencode
38import yaml
40from distro_tracker.accounts.models import UserEmail
41from distro_tracker.core.models import (
42 ActionItem,
43 ActionItemType,
44 BinaryPackageBugStats,
45 BinaryPackageName,
46 BugDisplayManagerMixin,
47 PackageBugStats,
48 PackageData,
49 PackageName,
50 Repository,
51 SourcePackageDeps,
52 SourcePackageName
53)
54from distro_tracker.core.tasks import BaseTask
55from distro_tracker.core.tasks.mixins import ImportExternalData, PackageTagging
56from distro_tracker.core.tasks.schedulers import IntervalScheduler
57from distro_tracker.core.utils import get_or_none
58from distro_tracker.core.utils.http import get_resource_text
59from distro_tracker.core.utils.misc import get_data_checksum
60from distro_tracker.core.utils.packages import (
61 html_package_list,
62 package_url
63)
64from distro_tracker.vendor.debian.models import (
65 LintianStats,
66 PackageExcuses,
67 PackageTransition,
68 UbuntuPackage
69)
71from .models import DebianContributor
73logger = logging.getLogger(__name__)
74logger_input = logging.getLogger('distro_tracker.input')
76warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning)
79class RetrieveDebianMaintainersTask(BaseTask):
80 """
81 Retrieves (and updates if necessary) a list of Debian Maintainers.
82 """
84 class Scheduler(IntervalScheduler):
85 interval = 3600 * 24
87 def execute_main(self):
88 url = "https://ftp-master.debian.org/dm.txt"
89 content = get_resource_text(url, force_update=self.force_update,
90 only_if_updated=True)
91 if content is None: 91 ↛ 93line 91 didn't jump to line 93, because the condition on line 91 was never true
92 # No need to do anything if the cached item was still not updated
93 return
95 maintainers = {}
96 lines = content.splitlines()
97 for stanza in deb822.Deb822.iter_paragraphs(lines):
98 if 'Uid' in stanza and 'Allow' in stanza: 98 ↛ 97line 98 didn't jump to line 97, because the condition on line 98 was never false
99 # Allow is a comma-separated string of 'package (DD fpr)' items,
100 # where DD fpr is the fingerprint of the DD that granted the
101 # permission
102 name, email = stanza['Uid'].rsplit(' ', 1)
103 email = email.strip('<>')
104 for pair in stanza['Allow'].split(','):
105 pair = pair.strip()
106 pkg, dd_fpr = pair.split()
107 maintainers.setdefault(email, [])
108 maintainers[email].append(pkg)
110 # Now update the developer information
111 with transaction.atomic():
112 # Reset all old maintainers first.
113 qs = DebianContributor.objects.filter(is_debian_maintainer=True)
114 qs.update(is_debian_maintainer=False)
116 for email, packages in maintainers.items():
117 try:
118 user_email, _ = UserEmail.objects.get_or_create(email=email)
119 except ValidationError:
120 logger_input.info('%s refers to invalid email "%s".',
121 url, email)
122 continue
124 contributor, _ = DebianContributor.objects.get_or_create(
125 email=user_email)
127 contributor.is_debian_maintainer = True
128 contributor.allowed_packages = packages
129 contributor.save()
132class RetrieveLowThresholdNmuTask(BaseTask):
133 """
134 Updates the list of Debian Maintainers which agree with the lowthreshold
135 NMU.
136 """
138 class Scheduler(IntervalScheduler):
139 interval = 3600 * 24
141 def _retrieve_emails(self):
142 """
143 Helper function which obtains the list of emails of maintainers that
144 agree with the lowthreshold NMU.
145 """
146 url = 'https://wiki.debian.org/LowThresholdNmu?action=raw'
147 content = get_resource_text(url, force_update=self.force_update,
148 only_if_updated=True)
149 if content is None: 149 ↛ 150line 149 didn't jump to line 150, because the condition on line 149 was never true
150 return
152 emails = []
153 devel_php_RE = re.compile(
154 r'https?://qa\.debian\.org/developer\.php\?login=([^\s&|]+)')
155 word_RE = re.compile(r'^\w+$')
156 for line in content.splitlines():
157 match = devel_php_RE.search(line)
158 while match: # look for several matches on the same line
159 email = None
160 login = match.group(1)
161 if word_RE.match(login):
162 email = login + '@debian.org'
163 elif login.find('@') >= 0: 163 ↛ 165line 163 didn't jump to line 165, because the condition on line 163 was never false
164 email = login
165 if email: 165 ↛ 167line 165 didn't jump to line 167, because the condition on line 165 was never false
166 emails.append(email)
167 line = line[match.end():]
168 match = devel_php_RE.search(line)
169 return emails
171 def execute_main(self):
172 emails = self._retrieve_emails()
173 with transaction.atomic():
174 # Reset all threshold flags first.
175 qs = DebianContributor.objects.filter(
176 agree_with_low_threshold_nmu=True)
177 qs.update(agree_with_low_threshold_nmu=False)
179 for email in emails:
180 try:
181 email, _ = UserEmail.objects.get_or_create(email=email)
182 except ValidationError:
183 logger_input.info(
184 'LowThresholdNmu refers to invalid email "%s".', email)
185 continue
187 contributor, _ = DebianContributor.objects.get_or_create(
188 email=email)
190 contributor.agree_with_low_threshold_nmu = True
191 contributor.save()
194class UpdatePackageBugStats(BaseTask, BugDisplayManagerMixin):
195 """
196 Updates the BTS bug stats for all packages (source, binary and pseudo).
197 Creates :class:`distro_tracker.core.ActionItem` instances for packages
198 which have bugs tagged help or patch.
199 """
201 class Scheduler(IntervalScheduler):
202 interval = 3600
204 PATCH_BUG_ACTION_ITEM_TYPE_NAME = 'debian-patch-bugs-warning'
205 HELP_BUG_ACTION_ITEM_TYPE_NAME = 'debian-help-bugs-warning'
207 PATCH_ITEM_SHORT_DESCRIPTION = (
208 '<a href="{url}">{count}</a> tagged patch in the '
209 '<abbr title="Bug Tracking System">BTS</abbr>')
210 HELP_ITEM_SHORT_DESCRIPTION = (
211 '<a href="{url}">{count}</a> tagged help in the '
212 '<abbr title="Bug Tracking System">BTS</abbr>')
213 PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/patch-bugs-action-item.html'
214 HELP_ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/help-bugs-action-item.html'
216 bug_categories = (
217 'rc',
218 'normal',
219 'wishlist',
220 'fixed',
221 'patch',
222 )
224 def initialize(self, *args, **kwargs):
225 super(UpdatePackageBugStats, self).initialize(*args, **kwargs)
226 # The :class:`distro_tracker.core.models.ActionItemType` instances which
227 # this task can create.
228 self.patch_item_type = ActionItemType.objects.create_or_update(
229 type_name=self.PATCH_BUG_ACTION_ITEM_TYPE_NAME,
230 full_description_template=self.PATCH_ITEM_FULL_DESCRIPTION_TEMPLATE)
231 self.help_item_type = ActionItemType.objects.create_or_update(
232 type_name=self.HELP_BUG_ACTION_ITEM_TYPE_NAME,
233 full_description_template=self.HELP_ITEM_FULL_DESCRIPTION_TEMPLATE)
235 def _get_tagged_bug_stats(self, tag, user=None):
236 """
237 Using the BTS interface, retrieves the statistics of bugs with a
238 particular tag.
240 :param tag: The tag for which the statistics are required.
241 :type tag: string
242 :param user: The email of the user who tagged the bug with the given
243 tag.
244 :type user: string
246 :returns: A dict mapping package names to the count of bugs with the
247 given tag.
248 """
249 debian_ca_bundle = '/etc/ssl/ca-debian/ca-certificates.crt'
250 if os.path.exists(debian_ca_bundle):
251 os.environ['SSL_CERT_FILE'] = debian_ca_bundle
252 if user:
253 bug_numbers = debianbts.get_usertag(user, tags=[tag]).get(tag, [])
254 else:
255 bug_numbers = debianbts.get_bugs(tag=tag)
257 # Match each retrieved bug ID to a package and then find the aggregate
258 # count for each package.
259 bug_stats = {}
260 bugs = debianbts.get_status(bug_numbers)
261 for bug in bugs:
262 if bug.done or bug.fixed_versions or bug.pending == 'done':
263 continue
265 bug_stats.setdefault(bug.package, 0)
266 bug_stats[bug.package] += 1
268 return bug_stats
270 def _extend_bug_stats(self, bug_stats, extra_stats, category_name):
271 """
272 Helper method which adds extra bug stats to an already existing list of
273 stats.
275 :param bug_stats: An already existing list of bug stats. Maps package
276 names to list of bug category descriptions.
277 :type bug_stats: dict
278 :param extra_stats: Extra bug stats which should be added to
279 ``bug_stats``. Maps package names to integers representing bug
280 counts.
281 :type extra_stats: dict
282 :param category_name: The name of the bug category which is being added
283 :type category_name: string
284 """
285 for package, count in extra_stats.items():
286 bug_stats.setdefault(package, [])
287 bug_stats[package].append({
288 'category_name': category_name,
289 'bug_count': count,
290 })
292 def _create_patch_bug_action_item(self, package, bug_stats):
293 """
294 Creates a :class:`distro_tracker.core.models.ActionItem` instance for
295 the given package if it contains any bugs tagged patch.
297 :param package: The package for which the action item should be
298 updated.
299 :type package: :class:`distro_tracker.core.models.PackageName`
300 :param bug_stats: A dictionary mapping category names to structures
301 describing those categories. Those structures should be
302 identical to the ones stored in the :class:`PackageBugStats`
303 instance.
304 :type bug_stats: dict
305 """
306 # Get the old action item, if any
307 action_item = package.get_action_item_for_type(
308 self.PATCH_BUG_ACTION_ITEM_TYPE_NAME)
310 if 'patch' not in bug_stats or bug_stats['patch']['bug_count'] == 0:
311 # Remove the old action item, since the package does not have any
312 # bugs tagged patch anymore.
313 if action_item is not None:
314 action_item.delete()
315 return
317 # If the package has bugs tagged patch, update the action item
318 if action_item is None:
319 action_item = ActionItem(
320 package=package,
321 item_type=self.patch_item_type)
323 bug_count = bug_stats['patch']['bug_count']
324 # Include the URL in the short description
325 url = self.bug_manager.get_bug_tracker_url(
326 package.name, 'source', 'patch')
327 if not url: 327 ↛ 328line 327 didn't jump to line 328, because the condition on line 327 was never true
328 url = ''
329 # Include the bug count in the short description
330 count = '{bug_count} bug'.format(bug_count=bug_count)
331 if bug_count > 1:
332 count += 's'
333 action_item.short_description = \
334 self.PATCH_ITEM_SHORT_DESCRIPTION.format(url=url, count=count)
335 # Set additional URLs and merged bug count in the extra data for a full
336 # description
337 action_item.extra_data = {
338 'bug_count': bug_count,
339 'merged_count': bug_stats['patch'].get('merged_count', 0),
340 'url': url,
341 'merged_url': self.bug_manager.get_bug_tracker_url(
342 package.name, 'source', 'patch-merged'),
343 }
344 action_item.save()
346 def _create_help_bug_action_item(self, package, bug_stats):
347 """
348 Creates a :class:`distro_tracker.core.models.ActionItem` instance for
349 the given package if it contains any bugs tagged help.
351 :param package: The package for which the action item should be
352 updated.
353 :type package: :class:`distro_tracker.core.models.PackageName`
354 :param bug_stats: A dictionary mapping category names to structures
355 describing those categories. Those structures should be
356 identical to the ones stored in the :class:`PackageBugStats`
357 instance.
358 :type bug_stats: dict
359 """
360 # Get the old action item, if any
361 action_item = package.get_action_item_for_type(
362 self.HELP_BUG_ACTION_ITEM_TYPE_NAME)
364 if 'help' not in bug_stats or bug_stats['help']['bug_count'] == 0:
365 # Remove the old action item, since the package does not have any
366 # bugs tagged patch anymore.
367 if action_item is not None:
368 action_item.delete()
369 return
371 # If the package has bugs tagged patch, update the action item
372 if action_item is None:
373 action_item = ActionItem(
374 package=package,
375 item_type=self.help_item_type)
377 bug_count = bug_stats['help']['bug_count']
378 # Include the URL in the short description
379 url = self.bug_manager.get_bug_tracker_url(
380 package.name, 'source', 'help')
381 if not url: 381 ↛ 382line 381 didn't jump to line 382, because the condition on line 381 was never true
382 url = ''
383 # Include the bug count in the short description
384 count = '{bug_count} bug'.format(bug_count=bug_count)
385 if bug_count > 1:
386 count += 's'
387 action_item.short_description = self.HELP_ITEM_SHORT_DESCRIPTION.format(
388 url=url, count=count)
389 # Set additional URLs and merged bug count in the extra data for a full
390 # description
391 action_item.extra_data = {
392 'bug_count': bug_count,
393 'url': url,
394 }
395 action_item.save()
397 def _create_action_items(self, package_bug_stats):
398 """
399 Method which creates a :class:`distro_tracker.core.models.ActionItem`
400 instance for a package based on the given package stats.
402 For now, an action item is created if the package either has bugs
403 tagged as help or patch.
404 """
405 # Transform the bug stats to a structure easier to pass to functions
406 # for particular bug-category action items.
407 bug_stats = {
408 category['category_name']: category
409 for category in package_bug_stats.stats
410 }
411 package = package_bug_stats.package
412 self._create_patch_bug_action_item(package, bug_stats)
413 self._create_help_bug_action_item(package, bug_stats)
415 def _get_udd_bug_stats(self):
416 url = 'https://udd.debian.org/cgi-bin/ddpo-bugs.cgi'
417 response_content = get_resource_text(url)
418 if not response_content:
419 return
421 # Each line in the response should be bug stats for a single package
422 bug_stats = {}
423 for line in response_content.splitlines():
424 line = line.strip()
425 try:
426 package_name, bug_counts = line.rsplit(':', 1)
427 if package_name.startswith('src:'):
428 src, package_name = package_name.split(':', 1)
429 # Merged counts are in parentheses so remove those before
430 # splitting the numbers
431 bug_counts = re.sub(r'[()]', ' ', bug_counts).split()
432 bug_counts = [int(count) for count in bug_counts]
433 except ValueError:
434 logger.warning(
435 'Failed to parse bug information for %s: %s',
436 package_name, bug_counts, exc_info=1)
437 continue
439 # Match the extracted counts with category names
440 bug_stats[package_name] = [
441 {
442 'category_name': category_name,
443 'bug_count': bug_count,
444 'merged_count': merged_count,
445 }
446 for category_name, (bug_count, merged_count) in zip(
447 self.bug_categories, zip(bug_counts[::2], bug_counts[1::2]))
448 ]
450 return bug_stats
452 def _remove_obsolete_action_items(self, package_names):
453 """
454 Removes action items for packages which no longer have any bug stats.
455 """
456 ActionItem.objects.delete_obsolete_items(
457 item_types=[self.patch_item_type, self.help_item_type],
458 non_obsolete_packages=package_names)
460 def update_source_and_pseudo_bugs(self):
461 """
462 Performs the update of bug statistics for source and pseudo packages.
463 """
464 # First get the bug stats exposed by the UDD.
465 bug_stats = self._get_udd_bug_stats()
466 if not bug_stats:
467 bug_stats = {}
469 # Add in help bugs from the BTS interface
470 try:
471 help_bugs = self._get_tagged_bug_stats('help')
472 self._extend_bug_stats(bug_stats, help_bugs, 'help')
473 except RuntimeError:
474 logger.exception("Could not get bugs tagged help")
476 # Add in newcomer bugs from the BTS interface
477 try:
478 newcomer_bugs = self._get_tagged_bug_stats('newcomer')
479 self._extend_bug_stats(bug_stats, newcomer_bugs, 'newcomer')
480 except RuntimeError:
481 logger.exception("Could not get bugs tagged newcomer")
483 with transaction.atomic():
484 # Clear previous stats
485 PackageBugStats.objects.all().delete()
486 self._remove_obsolete_action_items(bug_stats.keys())
487 # Get all packages which have updated stats, along with their
488 # action items in 2 DB queries.
489 packages = PackageName.objects.filter(name__in=bug_stats.keys())
490 packages.prefetch_related('action_items')
492 # Update stats and action items.
493 stats = []
494 for package in packages:
495 # Save the raw package bug stats
496 package_bug_stats = PackageBugStats(
497 package=package, stats=bug_stats[package.name])
498 stats.append(package_bug_stats)
500 # Add action items for the package.
501 self._create_action_items(package_bug_stats)
503 PackageBugStats.objects.bulk_create(stats)
505 def update_binary_bugs(self):
506 """
507 Performs the update of bug statistics for binary packages.
508 """
509 url = 'https://udd.debian.org/cgi-bin/bugs-binpkgs-pts.cgi'
510 response_content = get_resource_text(url)
511 if not response_content:
512 return
514 # Extract known binary package bug stats: each line is a separate pkg
515 bug_stats = {}
516 for line in response_content.splitlines():
517 package_name, bug_counts = line.split(None, 1)
518 bug_counts = bug_counts.split()
519 try:
520 bug_counts = [int(count) for count in bug_counts]
521 except ValueError:
522 logger.exception(
523 'Failed to parse bug information for %s: %s',
524 package_name, bug_counts)
525 continue
527 bug_stats[package_name] = [
528 {
529 'category_name': category_name,
530 'bug_count': bug_count,
531 }
532 for category_name, bug_count in zip(
533 self.bug_categories, bug_counts)
534 ]
536 with transaction.atomic():
537 # Clear previous stats
538 BinaryPackageBugStats.objects.all().delete()
539 packages = \
540 BinaryPackageName.objects.filter(name__in=bug_stats.keys())
541 # Create new stats in a single query
542 stats = [
543 BinaryPackageBugStats(package=package,
544 stats=bug_stats[package.name])
545 for package in packages
546 ]
547 BinaryPackageBugStats.objects.bulk_create(stats)
549 def execute_main(self):
550 # Stats for source and pseudo packages is retrieved from a different
551 # resource (with a different structure) than stats for binary packages.
552 self.update_source_and_pseudo_bugs()
553 self.update_binary_bugs()
556class UpdateLintianStatsTask(BaseTask):
557 """
558 Updates packages' lintian stats.
559 """
561 class Scheduler(IntervalScheduler):
562 interval = 3600 * 4
564 ACTION_ITEM_TYPE_NAME = 'lintian-warnings-and-errors'
565 ITEM_DESCRIPTION = 'lintian reports <a href="{url}">{report}</a>'
566 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/lintian-action-item.html'
568 def initialize(self, *args, **kwargs):
569 super(UpdateLintianStatsTask, self).initialize(*args, **kwargs)
570 self.lintian_action_item_type = ActionItemType.objects.create_or_update(
571 type_name=self.ACTION_ITEM_TYPE_NAME,
572 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE)
574 def get_lintian_stats(self):
575 url = 'https://udd.debian.org/lintian-qa-list.txt'
576 content = get_resource_text(url, force_update=self.force_update,
577 only_if_updated=True)
578 if content is None: 578 ↛ 579line 578 didn't jump to line 579, because the condition on line 578 was never true
579 return
581 all_stats = {}
582 categories = (
583 'errors',
584 'warnings',
585 'pedantics',
586 'experimentals',
587 'overriddens',
588 )
589 for line in content.splitlines():
590 package, stats = line.split(None, 1)
591 stats = stats.split()
592 try:
593 all_stats[package] = {
594 category: int(count)
595 for count, category in zip(stats, categories)
596 }
597 except ValueError:
598 logger.exception(
599 'Failed to parse lintian information for %s: %s',
600 package, line)
601 continue
603 return all_stats
605 def update_action_item(self, package, lintian_stats):
606 """
607 Updates the :class:`ActionItem` for the given package based on the
608 :class:`LintianStats <distro_tracker.vendor.debian.models.LintianStats`
609 given in ``package_stats``. If the package has errors or warnings an
610 :class:`ActionItem` is created.
611 """
612 package_stats = lintian_stats.stats
613 warnings, errors = (
614 package_stats.get('warnings'), package_stats.get('errors', 0))
615 # Get the old action item for this warning, if it exists.
616 lintian_action_item = package.get_action_item_for_type(
617 self.lintian_action_item_type.type_name)
618 if not warnings and not errors:
619 if lintian_action_item:
620 # If the item previously existed, delete it now since there
621 # are no longer any warnings/errors.
622 lintian_action_item.delete()
623 return
625 # The item didn't previously have an action item: create it now
626 if lintian_action_item is None:
627 lintian_action_item = ActionItem(
628 package=package,
629 item_type=self.lintian_action_item_type)
631 lintian_url = lintian_stats.get_lintian_url()
632 new_extra_data = {
633 'warnings': warnings,
634 'errors': errors,
635 'lintian_url': lintian_url,
636 }
637 if lintian_action_item.extra_data:
638 old_extra_data = lintian_action_item.extra_data
639 if (old_extra_data['warnings'] == warnings and
640 old_extra_data['errors'] == errors):
641 # No need to update
642 return
644 lintian_action_item.extra_data = new_extra_data
646 if errors and warnings:
647 report = '{} error{} and {} warning{}'.format(
648 errors,
649 's' if errors > 1 else '',
650 warnings,
651 's' if warnings > 1 else '')
652 elif errors:
653 report = '{} error{}'.format(
654 errors,
655 's' if errors > 1 else '')
656 elif warnings: 656 ↛ 661line 656 didn't jump to line 661, because the condition on line 656 was never false
657 report = '{} warning{}'.format(
658 warnings,
659 's' if warnings > 1 else '')
661 lintian_action_item.short_description = self.ITEM_DESCRIPTION.format(
662 url=lintian_url,
663 report=report)
665 # If there are errors make the item a high severity issue
666 if errors:
667 lintian_action_item.severity = ActionItem.SEVERITY_HIGH
669 lintian_action_item.save()
671 def execute_main(self):
672 all_lintian_stats = self.get_lintian_stats()
673 if not all_lintian_stats:
674 return
676 # Discard all old stats
677 LintianStats.objects.all().delete()
679 packages = PackageName.objects.filter(name__in=all_lintian_stats.keys())
680 packages.prefetch_related('action_items')
681 # Remove action items for packages which no longer have associated
682 # lintian data.
683 ActionItem.objects.delete_obsolete_items(
684 [self.lintian_action_item_type], all_lintian_stats.keys())
686 stats = []
687 for package in packages:
688 package_stats = all_lintian_stats[package.name]
689 # Save the raw lintian stats.
690 lintian_stats = LintianStats(package=package, stats=package_stats)
691 stats.append(lintian_stats)
692 # Create an ActionItem if there are errors or warnings
693 self.update_action_item(package, lintian_stats)
695 LintianStats.objects.bulk_create(stats)
698class UpdateAppStreamStatsTask(BaseTask):
699 """
700 Updates packages' AppStream issue hints data.
701 """
703 class Scheduler(IntervalScheduler):
704 interval = 3600 * 6
706 ACTION_ITEM_TYPE_NAME = 'appstream-issue-hints'
707 ITEM_DESCRIPTION = 'AppStream hints: {report} for {packageurllist}'
708 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/appstream-action-item.html'
710 def initialize(self, *args, **kwargs):
711 super(UpdateAppStreamStatsTask, self).initialize(*args, **kwargs)
712 self.appstream_action_item_type = \
713 ActionItemType.objects.create_or_update(
714 type_name=self.ACTION_ITEM_TYPE_NAME,
715 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE)
716 self._tag_severities = {}
718 def _load_tag_severities(self):
719 url = 'https://appstream.debian.org/hints/sid/hint-definitions.json'
720 json_data = get_resource_text(url, force_update=True)
722 data = json.loads(json_data)
723 for tag, info in data.items():
724 self._tag_severities[tag] = info['severity']
726 def _load_appstream_hint_stats(self, section, arch, all_stats={}):
727 url = 'https://appstream.debian.org/hints/sid/{}/Hints-{}.json.gz' \
728 .format(section, arch)
729 hints_json = get_resource_text(url, force_update=self.force_update)
731 hints = json.loads(hints_json)
732 for hint in hints:
733 pkid = hint['package']
734 parts = pkid.split('/')
735 package_name = parts[0]
737 # get the source package for this binary package name
738 src_pkgname = None
739 if SourcePackageName.objects.exists_with_name(package_name):
740 package = SourcePackageName.objects.get(name=package_name)
741 src_pkgname = package.name
742 elif BinaryPackageName.objects.exists_with_name(package_name):
743 bin_package = BinaryPackageName.objects.get(name=package_name)
744 package = bin_package.main_source_package_name
745 src_pkgname = package.name
746 else:
747 src_pkgname = package_name
749 if src_pkgname not in all_stats:
750 all_stats[src_pkgname] = {}
751 if package_name not in all_stats[src_pkgname]: 751 ↛ 754line 751 didn't jump to line 754, because the condition on line 751 was never false
752 all_stats[src_pkgname][package_name] = {}
754 for cid, h in hint['hints'].items():
755 for e in h:
756 severity = self._tag_severities[e['tag']]
757 if severity == "error":
758 sevkey = "errors"
759 elif severity == "warning":
760 sevkey = "warnings"
761 elif severity == "info": 761 ↛ 764line 761 didn't jump to line 764, because the condition on line 761 was never false
762 sevkey = "infos"
763 else:
764 continue
765 if sevkey not in all_stats[src_pkgname][package_name]:
766 all_stats[src_pkgname][package_name][sevkey] = 1
767 else:
768 all_stats[src_pkgname][package_name][sevkey] += 1
770 return all_stats
772 def _get_appstream_url(self, package, bin_pkgname):
773 """
774 Returns the AppStream URL for the given PackageName in :package.
775 """
777 src_package = get_or_none(SourcePackageName, pk=package.pk)
778 if not src_package: 778 ↛ 779line 778 didn't jump to line 779, because the condition on line 778 was never true
779 return '#'
781 if not src_package.main_version:
782 return '#'
784 component = 'main'
785 main_entry = src_package.main_entry
786 if main_entry: 786 ↛ 787line 786 didn't jump to line 787, because the condition on line 786 was never true
787 component = main_entry.component
788 if not component:
789 component = 'main'
791 return (
792 'https://appstream.debian.org/sid/{}/issues/{}.html'
793 .format(component, bin_pkgname)
794 )
796 def _create_final_stats_report(self, package, package_stats):
797 """
798 Returns a transformed statistics report to be stored in the database.
799 """
801 as_report = package_stats.copy()
802 for bin_package in list(as_report.keys()):
803 # we currently don't want to display info-type hints
804 as_report[bin_package].pop('infos', None)
805 if as_report[bin_package]: 805 ↛ 809line 805 didn't jump to line 809, because the condition on line 805 was never false
806 as_report[bin_package]['url'] = \
807 self._get_appstream_url(package, bin_package)
808 else:
809 as_report.pop(bin_package)
810 return as_report
812 def update_action_item(self, package, package_stats):
813 """
814 Updates the :class:`ActionItem` for the given package based on the
815 AppStream hint statistics given in ``package_stats``.
816 If the package has errors or warnings an
817 :class:`ActionItem` is created.
818 """
820 total_warnings = 0
821 total_errors = 0
822 packageurllist = []
823 for bin_pkgname, info in package_stats.items():
824 total_warnings += info.get('warnings', 0)
825 total_errors += info.get('errors', 0)
826 url = self._get_appstream_url(package, bin_pkgname)
827 packageurllist.append(f'<a href="{url}">{bin_pkgname}</a>')
829 # Get the old action item for this warning, if it exists.
830 appstream_action_item = package.get_action_item_for_type(
831 self.appstream_action_item_type.type_name)
832 if not total_warnings and not total_errors:
833 if appstream_action_item:
834 # If the item previously existed, delete it now since there
835 # are no longer any warnings/errors.
836 appstream_action_item.delete()
837 return
839 # The item didn't previously have an action item: create it now
840 if appstream_action_item is None:
841 appstream_action_item = ActionItem(
842 package=package,
843 item_type=self.appstream_action_item_type)
845 as_report = self._create_final_stats_report(package, package_stats)
847 if appstream_action_item.extra_data:
848 old_extra_data = appstream_action_item.extra_data
849 if old_extra_data == as_report:
850 # No need to update
851 return
853 appstream_action_item.extra_data = as_report
855 if total_errors and total_warnings:
856 short_report = '{} error{} and {} warning{}'.format(
857 total_errors,
858 's' if total_errors > 1 else '',
859 total_warnings,
860 's' if total_warnings > 1 else '')
861 elif total_errors:
862 short_report = '{} error{}'.format(
863 total_errors,
864 's' if total_errors > 1 else '')
865 elif total_warnings: 865 ↛ 870line 865 didn't jump to line 870
866 short_report = '{} warning{}'.format(
867 total_warnings,
868 's' if total_warnings > 1 else '')
870 appstream_action_item.short_description = \
871 self.ITEM_DESCRIPTION.format(packageurllist=",".join(
872 packageurllist), report=short_report)
874 # If there are errors make the item a high severity issue;
875 # otherwise, make sure to set the severity as normal in case the item
876 # existed already
877 if total_errors:
878 appstream_action_item.severity = ActionItem.SEVERITY_HIGH
879 else:
880 appstream_action_item.severity = ActionItem.SEVERITY_NORMAL
882 appstream_action_item.save()
884 def execute_main(self):
885 self._load_tag_severities()
886 all_stats = {}
887 repository = Repository.objects.get(default=True)
888 arch = "amd64"
889 for component in repository.components:
890 self._load_appstream_hint_stats(component, arch, all_stats)
891 if not all_stats: 891 ↛ 892line 891 didn't jump to line 892, because the condition on line 891 was never true
892 return
894 with transaction.atomic():
895 # Delete obsolete data
896 PackageData.objects.filter(key='appstream').delete()
898 packages = PackageName.objects.filter(name__in=all_stats.keys())
899 packages.prefetch_related('action_items')
901 stats = []
902 for package in packages:
903 package_stats = all_stats[package.name]
904 stats.append(
905 PackageData(
906 package=package,
907 key='appstream',
908 value=package_stats
909 )
910 )
912 # Create an ActionItem if there are errors or warnings
913 self.update_action_item(package, package_stats)
915 PackageData.objects.bulk_create(stats)
916 # Remove action items for packages which no longer have associated
917 # AppStream hints.
918 ActionItem.objects.delete_obsolete_items(
919 [self.appstream_action_item_type], all_stats.keys())
922class UpdateTransitionsTask(BaseTask):
924 class Scheduler(IntervalScheduler):
925 interval = 3600
927 REJECT_LIST_URL = 'https://ftp-master.debian.org/transitions.yaml'
928 PACKAGE_TRANSITION_LIST_URL = (
929 'https://release.debian.org/transitions/export/packages.yaml')
931 def _get_yaml_resource(self, url, **kwargs):
932 """
933 Gets the YAML resource at the given URL and returns it as a Python
934 object.
935 """
936 content = get_resource_text(url, **kwargs)
937 if content:
938 return yaml.safe_load(content)
940 def _add_reject_transitions(self, packages):
941 """
942 Adds the transitions which cause uploads to be rejected to the
943 given ``packages`` dict.
944 """
945 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL)
946 for key, transition in reject_list.items():
947 for package in transition['packages']:
948 packages.setdefault(package, {})
949 packages[package].setdefault(key, {})
950 packages[package][key]['reject'] = True
951 packages[package][key]['status'] = 'ongoing'
953 def _add_package_transition_list(self, packages):
954 """
955 Adds the ongoing and planned transitions to the given ``packages``
956 dict.
957 """
958 package_transition_list = self._get_yaml_resource(
959 self.PACKAGE_TRANSITION_LIST_URL)
961 wanted_transition_statuses = ('ongoing', 'planned')
962 for package_info in package_transition_list:
963 package_name = package_info['name']
964 for transition_name, status in package_info['list']:
965 if status not in wanted_transition_statuses:
966 # Skip transitions with an unwanted status
967 continue
969 packages.setdefault(package_name, {})
970 packages[package_name].setdefault(transition_name, {})
971 packages[package_name][transition_name]['status'] = status
973 def execute_main(self):
974 # Update the relevant resources first
975 kwargs = {
976 'force_update': self.force_update,
977 'only_if_updated': True,
978 }
979 reject_list = self._get_yaml_resource(self.REJECT_LIST_URL, **kwargs)
980 package_transition_list = self._get_yaml_resource(
981 self.PACKAGE_TRANSITION_LIST_URL, **kwargs)
983 if reject_list is None and package_transition_list is None:
984 # Nothing to do - at least one needs to be updated...
985 return
987 package_transitions = {}
988 self._add_reject_transitions(package_transitions)
989 self._add_package_transition_list(package_transitions)
991 PackageTransition.objects.all().delete()
992 # Get the packages which have transitions
993 packages = PackageName.objects.filter(
994 name__in=package_transitions.keys())
995 transitions = []
996 for package in packages:
997 for transition_name, data in \
998 package_transitions[package.name].items():
999 transitions.append(PackageTransition(
1000 package=package,
1001 transition_name=transition_name,
1002 status=data.get('status', None),
1003 reject=data.get('reject', False)))
1005 PackageTransition.objects.bulk_create(transitions)
1008class UpdateExcusesTask(BaseTask):
1010 class Scheduler(IntervalScheduler):
1011 interval = 3600
1013 ACTION_ITEM_TYPE_NAME = 'debian-testing-migration'
1014 ITEM_DESCRIPTION = (
1015 "The package has not entered testing even though the delay is over")
1016 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/testing-migration-action-item.html'
1018 class AgeVerdict(Enum):
1019 PKG_OF_AGE = 0
1020 PKG_TOO_OLD = 1
1021 PKG_TOO_YOUNG = 2
1022 PKG_WO_POLICY = 3
1024 def initialize(self, *args, **kwargs):
1025 super(UpdateExcusesTask, self).initialize(*args, **kwargs)
1026 self.action_item_type = ActionItemType.objects.create_or_update(
1027 type_name=self.ACTION_ITEM_TYPE_NAME,
1028 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE)
1030 def _adapt_excuse_links(self, excuse):
1031 """
1032 If the excuse contains any anchor links, convert them to links to Distro
1033 Tracker package pages. Return the original text unmodified, otherwise.
1034 """
1035 re_anchor_href = re.compile(r'^#(.*)$')
1036 html = soup(excuse, 'html.parser')
1037 for a_tag in html.findAll('a', {'href': True}):
1038 href = a_tag['href']
1039 match = re_anchor_href.match(href)
1040 if not match: 1040 ↛ 1041line 1040 didn't jump to line 1041, because the condition on line 1040 was never true
1041 continue
1042 package = match.group(1).split('/')[0]
1043 a_tag['href'] = package_url(package)
1045 return str(html)
1047 def _skip_excuses_item(self, item_text):
1048 if not item_text:
1049 return True
1050 # We ignore these excuses
1051 if "Section" in item_text or "Maintainer" in item_text:
1052 return True
1053 return False
1055 def _check_age(self, source):
1056 """Checks the age of the package and compares it to the age requirement
1057 for migration"""
1059 if 'policy_info' not in source or 'age' not in source['policy_info']: 1059 ↛ 1060line 1059 didn't jump to line 1060, because the condition on line 1059 was never true
1060 return (self.AgeVerdict.PKG_WO_POLICY, None, None)
1062 age = source['policy_info']['age']['current-age']
1063 limit = source['policy_info']['age']['age-requirement']
1064 if age > limit:
1065 return (self.AgeVerdict.PKG_TOO_OLD, age, limit)
1066 elif age < limit: 1066 ↛ 1067line 1066 didn't jump to line 1067, because the condition on line 1066 was never true
1067 return (self.AgeVerdict.PKG_TOO_YOUNG, age, limit)
1068 else:
1069 return (self.AgeVerdict.PKG_OF_AGE, age, limit)
1071 def _extract_problematic(self, source):
1072 verdict, age, limit = self._check_age(source)
1074 if verdict == self.AgeVerdict.PKG_TOO_OLD:
1075 return (source['item-name'], {'age': age, 'limit': limit})
1077 @staticmethod
1078 def _make_excuses_check_dependencies(source):
1079 """Checks the dependencies of the package (blocked-by and
1080 migrate-after) and returns a list to display."""
1082 addendum = []
1084 if 'dependencies' in source:
1085 blocked_by = source['dependencies'].get('blocked-by', [])
1086 after = source['dependencies'].get('migrate-after', [])
1087 after = [
1088 element
1089 for element in after
1090 if element not in blocked_by
1091 ]
1092 if blocked_by: 1092 ↛ 1093line 1092 didn't jump to line 1093, because the condition on line 1092 was never true
1093 addendum.append("Blocked by: %s" % (
1094 html_package_list(blocked_by),
1095 ))
1096 if after: 1096 ↛ 1101line 1096 didn't jump to line 1101, because the condition on line 1096 was never false
1097 addendum.append("Migrates after: %s" % (
1098 html_package_list(after),
1099 ))
1101 return addendum
1103 @staticmethod
1104 def _make_excuses_check_verdict(source):
1105 """Checks the migration policy verdict of the package and builds an
1106 excuses message depending on the result."""
1108 addendum = []
1110 if 'migration-policy-verdict' in source: 1110 ↛ 1111line 1110 didn't jump to line 1111, because the condition on line 1110 was never true
1111 verdict = source['migration-policy-verdict']
1112 if verdict == 'REJECTED_BLOCKED_BY_ANOTHER_ITEM':
1113 addendum.append("Migration status: Blocked. Can't migrate "
1114 "due to a non-migratable dependency. Check "
1115 "status below."
1116 )
1118 return addendum
1120 def _make_excuses(self, source):
1121 """Make the excuses list for a source item using the yaml data it
1122 contains"""
1124 excuses = [
1125 self._adapt_excuse_links(excuse)
1126 for excuse in source['excuses']
1127 ]
1129 # This is the place where we compute some additionnal
1130 # messages that should be added to excuses.
1131 addendum = []
1133 addendum.extend(self._make_excuses_check_verdict(source))
1134 addendum.extend(self._make_excuses_check_dependencies(source))
1136 excuses = addendum + excuses
1138 if 'is-candidate' in source: 1138 ↛ 1142line 1138 didn't jump to line 1142, because the condition on line 1138 was never false
1139 if not source['is-candidate']: 1139 ↛ 1142line 1139 didn't jump to line 1142, because the condition on line 1139 was never false
1140 excuses.append("Not considered")
1142 return (
1143 source['item-name'],
1144 excuses,
1145 )
1147 def _get_excuses_and_problems(self, content):
1148 """
1149 Gets the excuses for each package.
1150 Also finds a list of packages which have not migrated to testing
1151 agter the necessary time has passed.
1153 :returns: A two-tuple where the first element is a dict mapping
1154 package names to a list of excuses. The second element is a dict
1155 mapping packages names to a problem information. Problem information
1156 is a dict with the keys ``age`` and ``limit``.
1157 """
1158 if 'sources' not in content: 1158 ↛ 1159line 1158 didn't jump to line 1159, because the condition on line 1158 was never true
1159 logger.warning("Invalid format of excuses file")
1160 return
1162 sources = content['sources']
1163 excuses = [
1164 self._make_excuses(source)
1165 for source in sources
1166 if '/' not in source['item-name']
1167 ]
1168 problems = [
1169 self._extract_problematic(source)
1170 for source in sources
1171 if '/' not in source['item-name']
1172 ]
1173 problematic = [p for p in problems if p]
1174 return dict(excuses), dict(problematic)
1176 def _create_action_item(self, package, extra_data):
1177 """
1178 Creates a :class:`distro_tracker.core.models.ActionItem` for the given
1179 package including the given extra data. The item indicates that there is
1180 a problem with the package migrating to testing.
1181 """
1182 action_item = \
1183 package.get_action_item_for_type(self.ACTION_ITEM_TYPE_NAME)
1184 if action_item is None:
1185 action_item = ActionItem(
1186 package=package,
1187 item_type=self.action_item_type)
1189 action_item.short_description = self.ITEM_DESCRIPTION
1190 if package.main_entry: 1190 ↛ 1191line 1190 didn't jump to line 1191, because the condition on line 1190 was never true
1191 query_string = urlencode({'package': package.name})
1192 extra_data['check_why_url'] = (
1193 'https://qa.debian.org/excuses.php'
1194 '?{query_string}'.format(query_string=query_string))
1196 action_item.extra_data = extra_data
1197 action_item.save()
1199 def _remove_obsolete_action_items(self, problematic):
1200 """
1201 Remove action items for packages which are no longer problematic.
1202 """
1203 ActionItem.objects.delete_obsolete_items(
1204 item_types=[self.action_item_type],
1205 non_obsolete_packages=problematic.keys())
1207 def _get_excuses_yaml(self):
1208 """
1209 Function returning the content of excuses from debian-release
1210 :returns: a dict of excuses or ``None`` if the content in the
1211 cache is up to date.
1212 """
1213 url = 'https://release.debian.org/britney/excuses.yaml'
1214 content = get_resource_text(url, force_update=self.force_update,
1215 only_if_updated=True)
1216 if content is None:
1217 return
1219 return yaml.safe_load(content)
1221 def execute_main(self):
1222 content_lines = self._get_excuses_yaml()
1223 if not content_lines: 1223 ↛ 1224line 1223 didn't jump to line 1224, because the condition on line 1223 was never true
1224 return
1226 result = self._get_excuses_and_problems(content_lines)
1227 if not result: 1227 ↛ 1228line 1227 didn't jump to line 1228, because the condition on line 1227 was never true
1228 return
1229 package_excuses, problematic = result
1231 with transaction.atomic():
1232 # Remove stale excuses data and action items which are not still
1233 # problematic.
1234 self._remove_obsolete_action_items(problematic)
1235 PackageExcuses.objects.all().delete()
1237 excuses = []
1238 packages = SourcePackageName.objects.filter(
1239 name__in=package_excuses.keys())
1240 packages.prefetch_related('action_items')
1241 for package in packages:
1242 excuse = PackageExcuses(
1243 package=package,
1244 excuses=package_excuses[package.name])
1245 excuses.append(excuse)
1246 if package.name in problematic:
1247 self._create_action_item(package, problematic[package.name])
1249 # Create all excuses in a single query
1250 PackageExcuses.objects.bulk_create(excuses)
1253class DebianWatchFileScannerUpdate(BaseTask):
1255 class Scheduler(IntervalScheduler):
1256 interval = 3600 * 6
1258 ACTION_ITEM_TYPE_NAMES = (
1259 'new-upstream-version',
1260 'watch-failure',
1261 )
1262 ACTION_ITEM_TEMPLATES = {
1263 'new-upstream-version': "debian/new-upstream-version-action-item.html",
1264 'watch-failure': "debian/watch-failure-action-item.html",
1265 }
1266 ITEM_DESCRIPTIONS = {
1267 'new-upstream-version': lambda item: (
1268 'A new upstream version is available: '
1269 '<a href="{url}">{version}</a>'.format(
1270 url=item.extra_data['upstream_url'],
1271 version=item.extra_data['upstream_version'])),
1272 'watch-failure': lambda item: (
1273 'Problems while searching for a new upstream version'),
1274 }
1275 ITEM_SEVERITIES = {
1276 'new-upstream-version': ActionItem.SEVERITY_HIGH,
1277 'watch-failure': ActionItem.SEVERITY_HIGH,
1278 }
1280 def initialize(self, *args, **kwargs):
1281 super(DebianWatchFileScannerUpdate, self).initialize(*args, **kwargs)
1282 self.action_item_types = {
1283 type_name: ActionItemType.objects.create_or_update(
1284 type_name=type_name,
1285 full_description_template=self.ACTION_ITEM_TEMPLATES.get(
1286 type_name, None))
1287 for type_name in self.ACTION_ITEM_TYPE_NAMES
1288 }
1290 def _get_upstream_status_content(self):
1291 url = 'https://udd.debian.org/cgi-bin/upstream-status.json.cgi'
1292 return get_resource_text(url)
1294 def _remove_obsolete_action_items(self, item_type_name,
1295 non_obsolete_packages):
1296 """
1297 Removes any existing :class:`ActionItem` with the given type name based
1298 on the list of package names which should still have the items based on
1299 the processed stats.
1300 """
1301 action_item_type = self.action_item_types[item_type_name]
1302 ActionItem.objects.delete_obsolete_items(
1303 item_types=[action_item_type],
1304 non_obsolete_packages=non_obsolete_packages)
1306 def get_upstream_status_stats(self, stats):
1307 """
1308 Gets the stats from the downloaded data and puts them in the given
1309 ``stats`` dictionary.
1310 The keys of the dict are package names.
1312 :returns: A a two-tuple where the first item is a list of packages
1313 which have new upstream versions and the second is a list of
1314 packages which have watch failures.
1315 """
1316 content = self._get_upstream_status_content()
1317 dehs_data = None
1318 if content:
1319 dehs_data = json.loads(content)
1320 if not dehs_data:
1321 return [], []
1323 all_new_versions, all_failures = [], []
1324 for entry in dehs_data:
1325 package_name = entry['package']
1326 stats.setdefault(package_name, {})
1327 stats[package_name]['upstream_version'] = entry['upstream-version']
1328 stats[package_name]['upstream_url'] = entry['upstream-url']
1329 if 'status' in entry and ('Newer version' in entry['status'] or
1330 'newer package' in entry['status']):
1331 stats[package_name]['new-upstream-version'] = {
1332 'upstream_version': entry['upstream-version'],
1333 'upstream_url': entry['upstream-url'],
1334 }
1335 all_new_versions.append(package_name)
1336 if entry.get('warnings') or entry.get('errors'):
1337 msg = '{}\n{}'.format(
1338 entry.get('errors') or '',
1339 entry.get('warnings') or '',
1340 ).strip()
1341 stats[package_name]['watch-failure'] = {
1342 'warning': msg,
1343 }
1344 all_failures.append(package_name)
1346 return all_new_versions, all_failures
1348 def update_package_info(self, package, stats):
1349 """
1350 Updates upstream information of the given package based on the given
1351 stats. Upstream data is saved as a :class:`PackageData` within the
1352 `general` key
1354 :param package: The package to which the upstream info should be
1355 associated.
1356 :type package: :class:`distro_tracker.core.models.PackageName`
1357 :param stats: The stats which are used to create the upstream info.
1358 :type stats: :class:`dict`
1359 """
1360 try:
1361 watch_data = package.watch_status[0]
1362 except IndexError:
1363 watch_data = PackageData(
1364 package=package,
1365 key='upstream-watch-status',
1366 )
1368 watch_data.value = stats
1369 watch_data.save()
1371 def update_action_item(self, item_type, package, stats):
1372 """
1373 Updates the action item of the given type for the given package based
1374 on the given stats.
1376 The severity of the item is defined by the :attr:`ITEM_SEVERITIES` dict.
1378 The short descriptions are created by passing the :class:`ActionItem`
1379 (with extra data already set) to the callables defined in
1380 :attr:`ITEM_DESCRIPTIONS`.
1382 :param item_type: The type of the :class:`ActionItem` that should be
1383 updated.
1384 :type item_type: string
1385 :param package: The package to which this action item should be
1386 associated.
1387 :type package: :class:`distro_tracker.core.models.PackageName`
1388 :param stats: The stats which are used to create the action item.
1389 :type stats: :class:`dict`
1390 """
1391 action_item = package.get_action_item_for_type(item_type)
1392 if action_item is None:
1393 # Create an action item...
1394 action_item = ActionItem(
1395 package=package,
1396 item_type=self.action_item_types[item_type])
1398 if item_type in self.ITEM_SEVERITIES: 1398 ↛ 1400line 1398 didn't jump to line 1400, because the condition on line 1398 was never false
1399 action_item.severity = self.ITEM_SEVERITIES[item_type]
1400 action_item.extra_data = stats
1401 action_item.short_description = \
1402 self.ITEM_DESCRIPTIONS[item_type](action_item)
1404 action_item.save()
1406 @transaction.atomic
1407 def execute_main(self):
1408 stats = {}
1409 new_upstream_version, failures = self.get_upstream_status_stats(stats)
1410 updated_packages_per_type = {
1411 'new-upstream-version': new_upstream_version,
1412 'watch-failure': failures,
1413 }
1415 # Remove obsolete action items for each of the categories...
1416 for item_type, packages in updated_packages_per_type.items():
1417 self._remove_obsolete_action_items(item_type, packages)
1419 packages = SourcePackageName.objects.filter(
1420 name__in=stats.keys())
1421 filter_qs = PackageData.objects.filter(key='upstream-watch-status')
1422 packages = packages.prefetch_related(
1423 'action_items',
1424 Prefetch('data', queryset=filter_qs, to_attr='watch_status')
1425 )
1427 # Update action items for each package
1428 for package in packages:
1429 for type_name in self.ACTION_ITEM_TYPE_NAMES:
1430 if type_name in stats[package.name]:
1431 # method(package, stats[package.name][type_name])
1432 self.update_action_item(
1433 type_name, package, stats[package.name][type_name])
1435 self.update_package_info(package, stats[package.name])
1438class UpdateSecurityIssuesTask(BaseTask):
1440 class Scheduler(IntervalScheduler):
1441 interval = 3600 * 3
1443 ACTION_ITEM_TYPE_NAME = 'debian-security-issue-in-{}'
1444 ACTION_ITEM_TEMPLATE = 'debian/security-issue-action-item.html'
1445 ITEM_DESCRIPTION_TEMPLATE = {
1446 'open': '<a href="{url}">{count} security {issue}</a> in {release}',
1447 'nodsa':
1448 '<a href="{url}">{count} low-priority security {issue}</a> '
1449 'in {release}',
1450 'none': 'No known security issue in {release}',
1451 }
1452 CVE_DATA_URL = 'https://security-tracker.debian.org/tracker/data/json'
1453 DISTRIBUTIONS_URL = (
1454 'https://security-tracker.debian.org/tracker/distributions.json'
1455 )
1457 def initialize(self, *args, **kwargs):
1458 super(UpdateSecurityIssuesTask, self).initialize(*args, **kwargs)
1459 self._action_item_type = {}
1460 self._issues = None
1461 self._distributions = None
1463 def action_item_type(self, release):
1464 return self._action_item_type.setdefault(
1465 release, ActionItemType.objects.create_or_update(
1466 type_name=self.ACTION_ITEM_TYPE_NAME.format(release),
1467 full_description_template=self.ACTION_ITEM_TEMPLATE))
1469 def _get_distributions(self):
1470 if not self._distributions:
1471 content = get_resource_text(self.DISTRIBUTIONS_URL)
1472 self._distributions = json.loads(content)
1473 return self._distributions
1475 def _get_support_status(self, release):
1476 """
1477 Return support status of a given release as documented by the
1478 security team in the security tracker.
1479 """
1480 return self._get_distributions().get(release, {}).get('support',
1481 'unknown')
1483 def _get_issues_content(self):
1484 if self._issues: 1484 ↛ 1485line 1484 didn't jump to line 1485, because the condition on line 1484 was never true
1485 return self._issues
1486 content = get_resource_text(self.CVE_DATA_URL)
1487 if content: 1487 ↛ exitline 1487 didn't return from function '_get_issues_content', because the condition on line 1487 was never false
1488 self._issues = json.loads(content)
1489 return self._issues
1491 @classmethod
1492 def _update_stats_with_nodsa_entry(cls, stats, nodsa_entry,
1493 entry_id, description):
1494 stats['nodsa'] += 1
1496 nodsa_details = {'description': description,
1497 'nodsa': nodsa_entry.get('nodsa', ''),
1498 'nodsa_reason': nodsa_entry.get('nodsa_reason', '')
1499 }
1501 nodsa_reason = nodsa_details['nodsa_reason']
1502 if nodsa_reason == '':
1503 nodsa_details['needs_triaging'] = True
1504 stats['nodsa_maintainer_to_handle_details'][entry_id] = \
1505 nodsa_details
1506 elif nodsa_reason == 'postponed': 1506 ↛ 1507line 1506 didn't jump to line 1507, because the condition on line 1506 was never true
1507 nodsa_details['fixed_via_stable_update'] = True
1508 stats['nodsa_maintainer_to_handle_details'][entry_id] = \
1509 nodsa_details
1510 elif nodsa_reason == 'ignored': 1510 ↛ exitline 1510 didn't return from function '_update_stats_with_nodsa_entry', because the condition on line 1510 was never false
1511 stats['nodsa_ignored_details'][entry_id] = nodsa_details
1513 @classmethod
1514 def get_issues_summary(cls, issues):
1515 result = {}
1516 for issue_id, issue_data in issues.items():
1517 for release, data in issue_data['releases'].items():
1518 stats = result.setdefault(release, {
1519 'open': 0,
1520 'open_details': {},
1521 'nodsa': 0,
1522 'unimportant': 0,
1523 'next_point_update_details': {},
1524 'nodsa_maintainer_to_handle_details': {},
1525 'nodsa_ignored_details': {},
1526 })
1527 description = issue_data.get('description', '')
1528 if (data.get('status', '') == 'resolved' or
1529 data.get('urgency', '') == 'end-of-life'):
1530 continue
1531 elif data.get('urgency', '') == 'unimportant':
1532 stats['unimportant'] += 1
1533 elif data.get('next_point_update', False):
1534 stats['next_point_update_details'][issue_id] = \
1535 {'description': description}
1536 elif data.get('nodsa', False) is not False:
1537 cls._update_stats_with_nodsa_entry(stats,
1538 data, issue_id,
1539 description
1540 )
1541 else:
1542 stats['open'] += 1
1543 stats['open_details'][issue_id] = \
1544 {'description': description}
1546 return result
1548 @classmethod
1549 def get_issues_stats(cls, content):
1550 """
1551 Gets package issue stats from Debian's security tracker.
1552 """
1553 stats = {}
1554 for pkg, issues in content.items():
1555 stats[pkg] = cls.get_issues_summary(issues)
1556 return stats
1558 def _get_short_description(self, key, action_item):
1559 count = action_item.extra_data['security_issues_count']
1560 url = 'https://security-tracker.debian.org/tracker/source-package/{}'
1561 return self.ITEM_DESCRIPTION_TEMPLATE[key].format(
1562 count=count,
1563 issue='issues' if count > 1 else 'issue',
1564 release=action_item.extra_data.get('release', 'sid'),
1565 url=url.format(action_item.package.name),
1566 )
1568 def update_action_item(self, stats, action_item):
1569 """
1570 Updates the ``debian-security-issue`` action item based on the
1571 security issues.
1572 """
1574 security_issues_count = stats['open'] + stats['nodsa']
1575 action_item.extra_data['security_issues_count'] = security_issues_count
1576 action_item.extra_data['support_status'] = (
1577 self._get_support_status(action_item.extra_data['release'])
1578 )
1580 for base_key in ['open',
1581 'next_point_update',
1582 'nodsa_maintainer_to_handle',
1583 'nodsa_ignored']:
1584 details_key = base_key + '_details'
1585 count_key = base_key + '_count'
1587 action_item.extra_data[details_key] = stats[details_key]
1588 action_item.extra_data[count_key] = len(stats[details_key])
1590 # nodsa_next_point_update / nodsa_ignored_details are displayed
1591 # only if there is anything else to show
1592 nodsa_create_action = (stats['nodsa'] -
1593 len(stats['nodsa_ignored_details'])) > 0
1595 if stats['open']:
1596 action_item.severity = ActionItem.SEVERITY_HIGH
1597 action_item.short_description = \
1598 self._get_short_description('open', action_item)
1599 elif nodsa_create_action:
1600 action_item.severity = ActionItem.SEVERITY_LOW
1601 action_item.short_description = \
1602 self._get_short_description('nodsa', action_item)
1603 else:
1604 action_item.severity = ActionItem.SEVERITY_WISHLIST
1605 action_item.short_description = \
1606 self._get_short_description('none', action_item)
1608 @classmethod
1609 def generate_package_data(cls, issues):
1610 return {
1611 'details': issues,
1612 'stats': cls.get_issues_summary(issues),
1613 'checksum': get_data_checksum(issues)
1614 }
1616 def want_action_item(self, pkgdata, release):
1617 stats = pkgdata.value.get('stats', {}).get(release)
1618 if stats is None: 1618 ↛ 1619line 1618 didn't jump to line 1619, because the condition on line 1618 was never true
1619 return False
1621 supported_by = self._get_support_status(release)
1622 if supported_by == "end-of-life":
1623 return False
1624 elif supported_by == "security":
1625 count = stats.get('open', 0) + stats.get('nodsa', 0)
1626 else:
1627 count = stats.get('open', 0)
1629 if count == 0:
1630 return False
1632 return True
1634 def process_pkg_action_items(self, pkgdata, existing_action_items):
1635 release_ai = {}
1636 to_add = []
1637 to_update = []
1638 to_drop = []
1639 global_stats = pkgdata.value.get('stats', {})
1640 for ai in existing_action_items:
1641 release = ai.extra_data['release']
1642 release_ai[release] = ai
1643 for release, stats in global_stats.items():
1644 ai = release_ai.get(release)
1646 if self.want_action_item(pkgdata, release):
1647 if ai:
1648 to_update.append(ai)
1649 else:
1650 ai = ActionItem(
1651 item_type=self.action_item_type(release),
1652 package=pkgdata.package,
1653 extra_data={'release': release}
1654 )
1655 to_add.append(ai)
1656 self.update_action_item(stats, ai)
1657 else:
1658 if ai:
1659 to_drop.append(ai)
1661 return to_add, to_update, to_drop
1663 def execute_main(self):
1664 # Fetch all debian-security PackageData
1665 all_pkgdata = PackageData.objects.select_related(
1666 'package').filter(key='debian-security').only(
1667 'package__name', 'value')
1669 all_data = {}
1670 packages = {}
1671 for pkgdata in all_pkgdata:
1672 all_data[pkgdata.package.name] = pkgdata
1673 packages[pkgdata.package.name] = pkgdata.package
1675 # Fetch all debian-security ActionItems
1676 pkg_action_items = collections.defaultdict(lambda: [])
1677 all_action_items = ActionItem.objects.select_related(
1678 'package').filter(
1679 item_type__type_name__startswith='debian-security-issue-in-')
1680 for action_item in all_action_items:
1681 pkg_action_items[action_item.package.name].append(action_item)
1683 # Check for changes on distributions.json
1684 distributions_checksum = get_data_checksum(self._get_distributions())
1685 if self.data.get('distributions_checksum') != distributions_checksum:
1686 # New distributions.json, force update all action items
1687 self.force_update = True
1688 self.data['distributions_checksum'] = distributions_checksum
1690 # Scan the security tracker data
1691 content = self._get_issues_content()
1692 to_add = []
1693 to_update = []
1694 for pkgname, issues in content.items():
1695 if pkgname in all_data:
1696 # Check if we need to update the existing data
1697 checksum = get_data_checksum(issues)
1698 if not self.force_update and \
1699 all_data[pkgname].value.get('checksum', '') == checksum:
1700 continue
1701 # Update the data
1702 pkgdata = all_data[pkgname]
1703 pkgdata.value = self.generate_package_data(issues)
1704 to_update.append(pkgdata)
1705 else:
1706 # Add data for a new package
1707 package, _ = PackageName.objects.get_or_create(name=pkgname)
1708 to_add.append(
1709 PackageData(
1710 package=package,
1711 key='debian-security',
1712 value=self.generate_package_data(issues)
1713 )
1714 )
1715 # Process action items
1716 ai_to_add = []
1717 ai_to_update = []
1718 ai_to_drop = []
1719 for pkgdata in itertools.chain(to_add, to_update):
1720 add, update, drop = self.process_pkg_action_items(
1721 pkgdata, pkg_action_items[pkgdata.package.name])
1722 ai_to_add.extend(add)
1723 ai_to_update.extend(update)
1724 ai_to_drop.extend(drop)
1725 # Sync in database
1726 with transaction.atomic():
1727 # Delete obsolete data
1728 PackageData.objects.filter(
1729 key='debian-security').exclude(
1730 package__name__in=content.keys()).delete()
1731 ActionItem.objects.filter(
1732 item_type__type_name__startswith='debian-security-issue-in-'
1733 ).exclude(package__name__in=content.keys()).delete()
1734 ActionItem.objects.filter(
1735 item_type__type_name__startswith='debian-security-issue-in-',
1736 id__in=[ai.id for ai in ai_to_drop]).delete()
1737 # Add new entries
1738 PackageData.objects.bulk_create(to_add)
1739 ActionItem.objects.bulk_create(ai_to_add)
1740 # Update existing entries
1741 for pkgdata in to_update:
1742 pkgdata.save()
1743 for ai in ai_to_update:
1744 ai.save()
1747class UpdatePiuPartsTask(BaseTask):
1748 """
1749 Retrieves the piuparts stats for all the suites defined in the
1750 :data:`distro_tracker.project.local_settings.DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES`
1751 """
1753 class Scheduler(IntervalScheduler):
1754 interval = 3600 * 3
1756 ACTION_ITEM_TYPE_NAME = 'debian-piuparts-test-fail'
1757 ACTION_ITEM_TEMPLATE = 'debian/piuparts-action-item.html'
1758 ITEM_DESCRIPTION = 'piuparts found (un)installation error(s)'
1760 def initialize(self, *args, **kwargs):
1761 super(UpdatePiuPartsTask, self).initialize(*args, **kwargs)
1762 self.action_item_type = ActionItemType.objects.create_or_update(
1763 type_name=self.ACTION_ITEM_TYPE_NAME,
1764 full_description_template=self.ACTION_ITEM_TEMPLATE)
1766 def _get_piuparts_content(self, suite):
1767 """
1768 :returns: The content of the piuparts report for the given package
1769 or ``None`` if there is no data for the particular suite.
1770 """
1771 url = 'https://piuparts.debian.org/{suite}/sources.txt'
1772 return get_resource_text(url.format(suite=suite))
1774 def get_piuparts_stats(self):
1775 suites = getattr(settings, 'DISTRO_TRACKER_DEBIAN_PIUPARTS_SUITES', [])
1776 failing_packages = {}
1777 for suite in suites:
1778 content = self._get_piuparts_content(suite)
1779 if content is None:
1780 logger.info("There is no piuparts for suite: %s", suite)
1781 continue
1783 for line in content.splitlines():
1784 package_name, status = line.split(':', 1)
1785 package_name, status = package_name.strip(), status.strip()
1786 if status == 'fail':
1787 failing_packages.setdefault(package_name, [])
1788 failing_packages[package_name].append(suite)
1790 return failing_packages
1792 def create_action_item(self, package, suites):
1793 """
1794 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>`
1795 instance for the package based on the list of suites in which the
1796 piuparts installation test failed.
1797 """
1798 action_item = package.get_action_item_for_type(self.action_item_type)
1799 if action_item is None:
1800 action_item = ActionItem(
1801 package=package,
1802 item_type=self.action_item_type,
1803 short_description=self.ITEM_DESCRIPTION)
1805 if action_item.extra_data:
1806 existing_items = action_item.extra_data.get('suites', [])
1807 if list(sorted(existing_items)) == list(sorted(suites)):
1808 # No need to update this item
1809 return
1810 action_item.extra_data = {
1811 'suites': suites,
1812 }
1813 action_item.save()
1815 def execute_main(self):
1816 failing_packages = self.get_piuparts_stats()
1818 ActionItem.objects.delete_obsolete_items(
1819 item_types=[self.action_item_type],
1820 non_obsolete_packages=failing_packages.keys())
1822 packages = SourcePackageName.objects.filter(
1823 name__in=failing_packages.keys())
1824 packages = packages.prefetch_related('action_items')
1826 for package in packages:
1827 self.create_action_item(package, failing_packages[package.name])
1830class UpdateUbuntuStatsTask(BaseTask):
1831 """
1832 The task updates Ubuntu stats for packages. These stats are displayed in a
1833 separate panel.
1834 """
1836 class Scheduler(IntervalScheduler):
1837 interval = 3600 * 3
1839 def initialize(self, *args, **kwargs):
1840 super(UpdateUbuntuStatsTask, self).initialize(*args, **kwargs)
1842 def _get_versions_content(self):
1843 url = 'https://udd.debian.org/cgi-bin/ubuntupackages.cgi'
1844 return get_resource_text(url)
1846 def get_ubuntu_versions(self):
1847 """
1848 Retrieves the Ubuntu package versions.
1850 :returns: A dict mapping package names to Ubuntu versions.
1851 """
1852 content = self._get_versions_content()
1854 package_versions = {}
1855 for line in content.splitlines():
1856 package, version = line.split(' ', 1)
1857 version = version.strip()
1858 package_versions[package] = version
1860 return package_versions
1862 def _get_bug_stats_content(self):
1863 url = 'https://udd.debian.org/cgi-bin/ubuntubugs.cgi'
1864 return get_resource_text(url)
1866 def get_ubuntu_bug_stats(self):
1867 """
1868 Retrieves the Ubuntu bug stats of a package. Bug stats contain the
1869 count of bugs and the count of patches.
1871 :returns: A dict mapping package names to a dict of package stats.
1872 """
1873 content = self._get_bug_stats_content()
1875 bug_stats = {}
1876 for line in content.splitlines():
1877 package_name, bug_count, patch_count = line.split("|", 2)
1878 try:
1879 bug_count, patch_count = int(bug_count), int(patch_count)
1880 except ValueError:
1881 continue
1882 bug_stats[package_name] = {
1883 'bug_count': bug_count,
1884 'patch_count': patch_count,
1885 }
1887 return bug_stats
1889 def _get_ubuntu_patch_diff_content(self):
1890 url = 'https://patches.ubuntu.com/PATCHES'
1891 return get_resource_text(url)
1893 def get_ubuntu_patch_diffs(self):
1894 """
1895 Retrieves the Ubuntu patch diff information. The information consists
1896 of the diff URL and the version of the Ubuntu package to which the
1897 diff belongs to.
1899 :returns: A dict mapping package names to diff information.
1900 """
1901 content = self._get_ubuntu_patch_diff_content()
1903 patch_diffs = {}
1904 re_diff_version = re.compile(r'_(\S+)\.patch')
1905 for line in content.splitlines():
1906 package_name, diff_url = line.split(' ', 1)
1907 # Extract the version of the package from the diff url
1908 match = re_diff_version.search(diff_url)
1909 if not match: 1909 ↛ 1911line 1909 didn't jump to line 1911, because the condition on line 1909 was never true
1910 # Invalid URL: no version
1911 continue
1912 version = match.group(1)
1913 patch_diffs[package_name] = {
1914 'version': version,
1915 'diff_url': diff_url
1916 }
1918 return patch_diffs
1920 def execute_main(self):
1921 package_versions = self.get_ubuntu_versions()
1922 bug_stats = self.get_ubuntu_bug_stats()
1923 patch_diffs = self.get_ubuntu_patch_diffs()
1925 obsolete_ubuntu_pkgs = UbuntuPackage.objects.exclude(
1926 package__name__in=package_versions.keys())
1927 obsolete_ubuntu_pkgs.delete()
1929 packages = PackageName.objects.filter(name__in=package_versions.keys())
1930 packages = packages.prefetch_related('ubuntu_package')
1932 for package in packages:
1933 version = package_versions[package.name]
1934 bugs = bug_stats.get(package.name, None)
1935 diff = patch_diffs.get(package.name, None)
1937 try:
1938 ubuntu_package = package.ubuntu_package
1939 ubuntu_package.version = version
1940 ubuntu_package.bugs = bugs
1941 ubuntu_package.patch_diff = diff
1942 ubuntu_package.save()
1943 except UbuntuPackage.DoesNotExist:
1944 ubuntu_package = UbuntuPackage.objects.create(
1945 package=package,
1946 version=version,
1947 bugs=bugs,
1948 patch_diff=diff)
1951class UpdateWnppStatsTask(BaseTask):
1952 """
1953 The task updates the WNPP bugs for all packages.
1954 """
1956 class Scheduler(IntervalScheduler):
1957 interval = 3600 * 3
1959 ACTION_ITEM_TYPE_NAME = 'debian-wnpp-issue'
1960 ACTION_ITEM_TEMPLATE = 'debian/wnpp-action-item.html'
1961 ITEM_DESCRIPTION = '<a href="{url}">{wnpp_type}: {wnpp_msg}</a>'
1963 def initialize(self, *args, **kwargs):
1964 super(UpdateWnppStatsTask, self).initialize(*args, **kwargs)
1965 self.action_item_type = ActionItemType.objects.create_or_update(
1966 type_name=self.ACTION_ITEM_TYPE_NAME,
1967 full_description_template=self.ACTION_ITEM_TEMPLATE)
1969 def get_wnpp_stats(self):
1970 """
1971 Retrieves and parses the wnpp stats for all packages. WNPP stats
1972 include the WNPP type and the BTS bug id.
1974 :returns: A dict mapping package names to wnpp stats.
1975 """
1976 url = 'https://qa.debian.org/data/bts/wnpp_rm'
1977 content = get_resource_text(url, only_if_updated=True)
1978 if content is None: 1978 ↛ 1979line 1978 didn't jump to line 1979, because the condition on line 1978 was never true
1979 return
1981 wnpp_stats = {}
1982 for line in content.splitlines():
1983 line = line.strip()
1984 try:
1985 package_name, wnpp_type, bug_id = line.split('|')[0].split()
1986 bug_id = int(bug_id)
1987 except ValueError:
1988 # Badly formatted bug number
1989 continue
1990 # Strip the colon from the end of the package name
1991 package_name = package_name[:-1]
1993 wnpp_stats[package_name] = {
1994 'wnpp_type': wnpp_type,
1995 'bug_id': bug_id,
1996 }
1998 return wnpp_stats
2000 def update_action_item(self, package, stats):
2001 """
2002 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>`
2003 instance for the given type indicating that the package has a WNPP
2004 issue.
2005 """
2006 action_item = package.get_action_item_for_type(self.action_item_type)
2007 if not action_item:
2008 action_item = ActionItem(
2009 package=package,
2010 item_type=self.action_item_type)
2012 # Check if the stats have actually been changed
2013 if action_item.extra_data:
2014 if action_item.extra_data.get('wnpp_info', None) == stats:
2015 # Nothing to do -- stll the same data
2016 return
2018 # Update the data since something has changed
2019 try:
2020 release = package.main_entry.repository.suite or \
2021 package.main_entry.repository.codename
2022 except AttributeError:
2023 release = None
2025 msgs = {
2026 'O': "This package has been orphaned and needs a maintainer.",
2027 'ITA': "Someone intends to adopt this package.",
2028 'RFA': "The maintainer wants to pass over package maintenance.",
2029 'RFH': "The maintainer is looking for help with this package.",
2030 'ITP': "Someone is planning to reintroduce this package.",
2031 'RFP': "There is a request to reintroduce this package.",
2032 'RM': "This package has been requested to be removed.",
2033 'RFS': "A sponsor is needed to update this package.",
2034 '?': "The WNPP database contains an entry for this package."
2035 }
2036 wnpp_type = stats['wnpp_type']
2037 try:
2038 wnpp_msg = msgs[wnpp_type]
2039 except KeyError:
2040 wnpp_msg = msgs['?']
2042 action_item.short_description = self.ITEM_DESCRIPTION.format(
2043 url='https://bugs.debian.org/{}'.format(stats['bug_id']),
2044 wnpp_type=wnpp_type, wnpp_msg=wnpp_msg)
2045 action_item.extra_data = {
2046 'wnpp_info': stats,
2047 'release': release,
2048 }
2049 action_item.save()
2051 def update_depneedsmaint_action_item(self, package_needs_maintainer, stats):
2052 short_description_template = \
2053 'Depends on packages which need a new maintainer'
2054 package_needs_maintainer.get_absolute_url()
2055 action_item_type = ActionItemType.objects.create_or_update(
2056 type_name='debian-depneedsmaint',
2057 full_description_template='debian/depneedsmaint-action-item.html')
2058 dependencies = SourcePackageDeps.objects.filter(
2059 dependency=package_needs_maintainer)
2060 for dependency in dependencies: 2060 ↛ 2061line 2060 didn't jump to line 2061, because the loop on line 2060 never started
2061 package = dependency.source
2062 action_item = package.get_action_item_for_type(action_item_type)
2063 if not action_item:
2064 action_item = ActionItem(
2065 package=package,
2066 item_type=action_item_type,
2067 extra_data={})
2069 pkgdata = {
2070 'bug': stats['bug_id'],
2071 'details': dependency.details,
2072 }
2074 if (action_item.extra_data.get(package_needs_maintainer.name, {}) ==
2075 pkgdata):
2076 # Nothing has changed
2077 continue
2079 action_item.short_description = short_description_template
2080 action_item.extra_data[package_needs_maintainer.name] = pkgdata
2082 action_item.save()
2084 @transaction.atomic
2085 def execute_main(self):
2086 wnpp_stats = self.get_wnpp_stats()
2087 if wnpp_stats is None: 2087 ↛ 2089line 2087 didn't jump to line 2089, because the condition on line 2087 was never true
2088 # Nothing to do: cached content up to date
2089 return
2091 ActionItem.objects.delete_obsolete_items(
2092 item_types=[self.action_item_type],
2093 non_obsolete_packages=wnpp_stats.keys())
2094 # Remove obsolete action items for packages whose dependencies need a
2095 # new maintainer.
2096 packages_need_maintainer = []
2097 for name, stats in wnpp_stats.items():
2098 if stats['wnpp_type'] in ('O', 'RFA'):
2099 packages_need_maintainer.append(name)
2100 packages_depneeds_maint = [
2101 package.name for package in SourcePackageName.objects.filter(
2102 source_dependencies__dependency__name__in=packages_need_maintainer) # noqa
2103 ]
2104 ActionItem.objects.delete_obsolete_items(
2105 item_types=[
2106 ActionItemType.objects.get_or_create(
2107 type_name='debian-depneedsmaint')[0],
2108 ],
2109 non_obsolete_packages=packages_depneeds_maint)
2111 # Drop all reverse references
2112 for ai in ActionItem.objects.filter( 2112 ↛ 2114line 2112 didn't jump to line 2114, because the loop on line 2112 never started
2113 item_type__type_name='debian-depneedsmaint'):
2114 ai.extra_data = {}
2115 ai.save()
2117 packages = SourcePackageName.objects.filter(name__in=wnpp_stats.keys())
2118 packages = packages.prefetch_related('action_items')
2120 for package in packages:
2121 stats = wnpp_stats[package.name]
2122 self.update_action_item(package, stats)
2123 # Update action items for packages which depend on this one to
2124 # indicate that a dependency needs a new maintainer.
2125 if package.name in packages_need_maintainer:
2126 self.update_depneedsmaint_action_item(package, stats)
2129class UpdateNewQueuePackages(BaseTask):
2130 """
2131 Updates the versions of source packages found in the NEW queue.
2132 """
2134 class Scheduler(IntervalScheduler):
2135 interval = 3600
2137 DATA_KEY = 'debian-new-queue-info'
2139 def initialize(self, *args, **kwargs):
2140 super(UpdateNewQueuePackages, self).initialize(*args, **kwargs)
2142 def extract_package_info(self, content):
2143 """
2144 Extracts the package information from the content of the NEW queue.
2146 :returns: A dict mapping package names to a dict mapping the
2147 distribution name in which the package is found to the version
2148 information for the most recent version of the package in the dist.
2149 """
2150 packages = {}
2151 for stanza in deb822.Deb822.iter_paragraphs(content.splitlines()):
2152 necessary_fields = ('Source', 'Queue', 'Version', 'Distribution')
2153 if not all(field in stanza for field in necessary_fields):
2154 continue
2155 if stanza['Queue'] != 'new': 2155 ↛ 2156line 2155 didn't jump to line 2156, because the condition on line 2155 was never true
2156 continue
2158 versions = stanza['Version'].split()
2159 # Save only the most recent version
2160 version = max(versions, key=lambda x: AptPkgVersion(x))
2162 package_name = stanza['Source']
2163 pkginfo = packages.setdefault(package_name, {})
2164 distribution = stanza['Distribution']
2165 if distribution in pkginfo:
2166 current_version = pkginfo[distribution]['version']
2167 if debian_support.version_compare(version, current_version) < 0:
2168 # The already saved version is more recent than this one.
2169 continue
2171 pkginfo[distribution] = {
2172 'version': version,
2173 }
2175 return packages
2177 def _get_new_content(self):
2178 url = 'https://ftp-master.debian.org/new.822'
2179 return get_resource_text(url, force_update=self.force_update,
2180 only_if_updated=True)
2182 def execute_main(self):
2183 content = self._get_new_content()
2184 if content is None: 2184 ↛ 2185line 2184 didn't jump to line 2185, because the condition on line 2184 was never true
2185 return
2187 all_package_info = self.extract_package_info(content)
2189 packages = SourcePackageName.objects.filter(
2190 name__in=all_package_info.keys())
2192 with transaction.atomic():
2193 # Drop old entries
2194 PackageData.objects.filter(key=self.DATA_KEY).delete()
2195 # Prepare current entries
2196 data = []
2197 for package in packages:
2198 new_queue_info = PackageData(
2199 key=self.DATA_KEY,
2200 package=package,
2201 value=all_package_info[package.name])
2202 data.append(new_queue_info)
2203 # Bulk create them
2204 PackageData.objects.bulk_create(data)
2207class UpdateAutoRemovalsStatsTask(BaseTask):
2208 """
2209 A task for updating autoremovals information on all packages.
2210 """
2212 class Scheduler(IntervalScheduler):
2213 interval = 3600
2215 ACTION_ITEM_TYPE_NAME = 'debian-autoremoval'
2216 ACTION_ITEM_TEMPLATE = 'debian/autoremoval-action-item.html'
2217 ITEM_DESCRIPTION = ('Marked for autoremoval on {removal_date}' +
2218 '{dependencies}: {bugs}')
2220 def initialize(self, *args, **kwargs):
2221 super(UpdateAutoRemovalsStatsTask, self).initialize(*args, **kwargs)
2222 self.action_item_type = ActionItemType.objects.create_or_update(
2223 type_name=self.ACTION_ITEM_TYPE_NAME,
2224 full_description_template=self.ACTION_ITEM_TEMPLATE)
2226 def get_autoremovals_stats(self):
2227 """
2228 Retrieves and parses the autoremoval stats for all packages.
2229 Autoremoval stats include the BTS bugs id.
2231 :returns: A dict mapping package names to autoremoval stats.
2232 """
2233 content = get_resource_text(
2234 'https://udd.debian.org/cgi-bin/autoremovals.yaml.cgi',
2235 force_update=self.force_update,
2236 only_if_updated=True
2237 )
2238 if content: 2238 ↛ exitline 2238 didn't return from function 'get_autoremovals_stats', because the condition on line 2238 was never false
2239 return yaml.safe_load(content)
2241 def update_action_item(self, package, stats):
2242 """
2243 Creates an :class:`ActionItem <distro_tracker.core.models.ActionItem>`
2244 instance for the given type indicating that the package has an
2245 autoremoval issue.
2246 """
2247 action_item = package.get_action_item_for_type(self.action_item_type)
2248 if not action_item: 2248 ↛ 2254line 2248 didn't jump to line 2254, because the condition on line 2248 was never false
2249 action_item = ActionItem(
2250 package=package,
2251 item_type=self.action_item_type,
2252 severity=ActionItem.SEVERITY_HIGH)
2254 bugs_dependencies = stats.get('bugs_dependencies', [])
2255 buggy_dependencies = stats.get('buggy_dependencies', [])
2256 reverse_dependencies = stats.get('rdeps', [])
2257 all_bugs = stats['bugs'] + bugs_dependencies
2258 link = '<a href="https://bugs.debian.org/{}">#{}</a>'
2259 removal_date = stats['removal_date'].strftime('%d %B')
2260 if isinstance(removal_date, bytes): 2260 ↛ 2261line 2260 didn't jump to line 2261, because the condition on line 2260 was never true
2261 removal_date = removal_date.decode('utf-8', 'ignore')
2263 action_item.short_description = self.ITEM_DESCRIPTION.format(
2264 removal_date=removal_date,
2265 dependencies=(' due to ' + html_package_list(
2266 buggy_dependencies) if buggy_dependencies else ''),
2267 bugs=', '.join(link.format(bug, bug) for bug in all_bugs))
2269 # datetime objects are not JSON-serializable, convert them ourselves
2270 for key in stats.keys():
2271 if hasattr(stats[key], 'strftime'):
2272 stats[key] = stats[key].strftime('%a %d %b %Y')
2274 action_item.extra_data = {
2275 'stats': stats,
2276 'removal_date': stats['removal_date'],
2277 'version': stats.get('version', ''),
2278 'bugs': ', '.join(link.format(bug, bug) for bug in stats['bugs']),
2279 'bugs_dependencies': ', '.join(
2280 link.format(bug, bug) for bug in bugs_dependencies),
2281 'buggy_dependencies':
2282 html_package_list(buggy_dependencies),
2283 'reverse_dependencies':
2284 html_package_list(reverse_dependencies),
2285 'number_rdeps': len(reverse_dependencies)}
2286 action_item.save()
2288 def execute_main(self):
2289 autoremovals_stats = self.get_autoremovals_stats()
2290 if autoremovals_stats is None: 2290 ↛ 2292line 2290 didn't jump to line 2292, because the condition on line 2290 was never true
2291 # Nothing to do: cached content up to date
2292 return
2294 ActionItem.objects.delete_obsolete_items(
2295 item_types=[self.action_item_type],
2296 non_obsolete_packages=autoremovals_stats.keys())
2298 packages = SourcePackageName.objects.filter(
2299 name__in=autoremovals_stats.keys())
2300 packages = packages.prefetch_related('action_items')
2302 for package in packages:
2303 self.update_action_item(package, autoremovals_stats[package.name])
2306class UpdatePackageScreenshotsTask(BaseTask):
2307 """
2308 Check if a screenshot exists on screenshots.debian.net, and add a
2309 key to PackageData if it does.
2310 """
2312 class Scheduler(IntervalScheduler):
2313 interval = 3600 * 24
2315 DATA_KEY = 'screenshots'
2317 def _get_screenshots(self):
2318 url = 'https://screenshots.debian.net/json/packages'
2319 content = get_resource_text(url, force_update=self.force_update,
2320 only_if_updated=True)
2321 if content is None: 2321 ↛ 2322line 2321 didn't jump to line 2322, because the condition on line 2321 was never true
2322 return
2324 data = json.loads(content)
2325 return data
2327 def execute_main(self):
2328 content = self._get_screenshots()
2329 if content is None: 2329 ↛ 2330line 2329 didn't jump to line 2330, because the condition on line 2329 was never true
2330 return
2332 packages_with_screenshots = []
2333 for item in content['packages']:
2334 try:
2335 package = SourcePackageName.objects.get(name=item['name'])
2336 packages_with_screenshots.append(package)
2337 except SourcePackageName.DoesNotExist:
2338 pass
2340 with transaction.atomic():
2341 PackageData.objects.filter(key='screenshots').delete()
2343 data = []
2344 for package in packages_with_screenshots:
2345 try:
2346 screenshot_info = package.data.get(key=self.DATA_KEY)
2347 screenshot_info.value['screenshots'] = 'true'
2348 except PackageData.DoesNotExist:
2349 screenshot_info = PackageData(
2350 key=self.DATA_KEY,
2351 package=package,
2352 value={'screenshots': 'true'})
2354 data.append(screenshot_info)
2356 PackageData.objects.bulk_create(data)
2359class UpdateBuildReproducibilityTask(BaseTask):
2361 class Scheduler(IntervalScheduler):
2362 interval = 3600 * 6
2364 BASE_URL = 'https://tests.reproducible-builds.org'
2365 ACTION_ITEM_TYPE_NAME = 'debian-build-reproducibility'
2366 ACTION_ITEM_TEMPLATE = 'debian/build-reproducibility-action-item.html'
2367 ITEM_DESCRIPTION = {
2368 'blacklisted': '<a href="{url}">Blacklisted</a> from build '
2369 'reproducibility testing',
2370 'FTBFS': '<a href="{url}">Fails to build</a> during reproducibility '
2371 'testing',
2372 'reproducible': None,
2373 'FTBR': '<a href="{url}">Does not build reproducibly</a> '
2374 'during testing',
2375 '404': None,
2376 'not for us': None,
2377 }
2379 def initialize(self, *args, **kwargs):
2380 super(UpdateBuildReproducibilityTask, self).initialize(*args, **kwargs)
2381 self.action_item_type = ActionItemType.objects.create_or_update(
2382 type_name=self.ACTION_ITEM_TYPE_NAME,
2383 full_description_template=self.ACTION_ITEM_TEMPLATE)
2385 def get_build_reproducibility(self):
2386 url = '{}/debian/reproducible-tracker.json'.format(self.BASE_URL)
2387 content = get_resource_text(url, force_update=self.force_update,
2388 only_if_updated=True)
2389 if content is None: 2389 ↛ 2390line 2389 didn't jump to line 2390, because the condition on line 2389 was never true
2390 return
2392 reproducibilities = json.loads(content)
2393 packages = {}
2394 for item in reproducibilities:
2395 package = item['package']
2396 status = item['status']
2397 missing = package not in packages
2398 important = self.ITEM_DESCRIPTION.get(status) is not None
2399 if important or missing: 2399 ↛ 2394line 2399 didn't jump to line 2394, because the condition on line 2399 was never false
2400 packages[package] = status
2402 return packages
2404 def update_action_item(self, package, status):
2405 description = self.ITEM_DESCRIPTION.get(status)
2407 if not description: # Not worth an action item
2408 return False
2410 action_item = package.get_action_item_for_type(
2411 self.action_item_type.type_name)
2412 if action_item is None: 2412 ↛ 2418line 2412 didn't jump to line 2418, because the condition on line 2412 was never false
2413 action_item = ActionItem(
2414 package=package,
2415 item_type=self.action_item_type,
2416 severity=ActionItem.SEVERITY_NORMAL)
2418 url = "{}/debian/rb-pkg/{}.html".format(self.BASE_URL, package.name)
2419 action_item.short_description = description.format(url=url)
2420 action_item.save()
2421 return True
2423 def execute_main(self):
2424 reproducibilities = self.get_build_reproducibility()
2425 if reproducibilities is None: 2425 ↛ 2426line 2425 didn't jump to line 2426, because the condition on line 2425 was never true
2426 return
2428 with transaction.atomic():
2429 PackageData.objects.filter(key='reproducibility').delete()
2431 packages = []
2432 data = []
2434 for name, status in reproducibilities.items():
2435 try:
2436 package = SourcePackageName.objects.get(name=name)
2437 if self.update_action_item(package, status):
2438 packages.append(package)
2439 except SourcePackageName.DoesNotExist:
2440 continue
2442 reproducibility_info = PackageData(
2443 key='reproducibility',
2444 package=package,
2445 value={'reproducibility': status})
2446 data.append(reproducibility_info)
2448 ActionItem.objects.delete_obsolete_items([self.action_item_type],
2449 packages)
2450 PackageData.objects.bulk_create(data)
2453class MultiArchHintsTask(BaseTask):
2455 class Scheduler(IntervalScheduler):
2456 interval = 3600 * 6
2458 ACTIONS_WEB = 'https://wiki.debian.org/MultiArch/Hints'
2459 ACTIONS_URL = 'https://dedup.debian.net/static/multiarch-hints.yaml'
2460 ACTION_ITEM_TYPE_NAME = 'debian-multiarch-hints'
2461 ACTION_ITEM_TEMPLATE = 'debian/multiarch-hints.html'
2462 ACTION_ITEM_DESCRIPTION = \
2463 '<a href="{link}">Multiarch hinter</a> reports {count} issue(s)'
2465 def initialize(self, *args, **kwargs):
2466 super(MultiArchHintsTask, self).initialize(*args, **kwargs)
2467 self.action_item_type = ActionItemType.objects.create_or_update(
2468 type_name=self.ACTION_ITEM_TYPE_NAME,
2469 full_description_template=self.ACTION_ITEM_TEMPLATE)
2470 self.SEVERITIES = {}
2471 for value, name in ActionItem.SEVERITIES:
2472 self.SEVERITIES[name] = value
2474 def get_data(self):
2475 data = get_resource_text(self.ACTIONS_URL)
2476 if data:
2477 return yaml.safe_load(data)
2479 def get_packages(self):
2480 data = self.get_data()
2481 if data is None:
2482 return
2483 if data['format'] != 'multiarch-hints-1.0':
2484 return None
2485 data = data['hints']
2486 packages = collections.defaultdict(dict)
2487 for item in data:
2488 if 'source' not in item:
2489 continue
2490 package = item['source']
2491 wishlist = ActionItem.SEVERITY_WISHLIST
2492 severity = self.SEVERITIES.get(item['severity'], wishlist)
2493 pkg_severity = packages[package].get('severity', wishlist)
2494 packages[package]['severity'] = max(severity, pkg_severity)
2495 packages[package].setdefault('hints', []).append(
2496 (item['description'], item['link']))
2497 return packages
2499 def update_action_item(self, package, severity, description, extra_data):
2500 action_item = package.get_action_item_for_type(
2501 self.action_item_type.type_name)
2502 if action_item is None:
2503 action_item = ActionItem(
2504 package=package,
2505 item_type=self.action_item_type)
2506 action_item.severity = severity
2507 action_item.short_description = description
2508 action_item.extra_data = extra_data
2509 action_item.save()
2511 def execute_main(self):
2512 packages = self.get_packages()
2513 if not packages:
2514 return
2516 with transaction.atomic():
2517 for name, data in packages.items():
2518 try:
2519 package = SourcePackageName.objects.get(name=name)
2520 except SourcePackageName.DoesNotExist:
2521 continue
2523 description = self.ACTION_ITEM_DESCRIPTION.format(
2524 count=len(data['hints']), link=self.ACTIONS_WEB)
2525 self.update_action_item(package, data['severity'], description,
2526 data['hints'])
2528 ActionItem.objects.delete_obsolete_items([self.action_item_type],
2529 packages.keys())
2532class UpdateVcsWatchTask(BaseTask):
2533 """
2534 Updates packages' vcswatch stats.
2535 """
2537 class Scheduler(IntervalScheduler):
2538 interval = 3600
2540 VCSWATCH_ACTION_ITEM_TYPE_NAME = 'vcswatch-warnings-and-errors'
2541 VCSWATCH_ACTION_ITEM_TEMPLATE = 'debian/vcswatch-action-item.html'
2542 SALSA_MR_ACTION_ITEM_TYPE_NAME = 'salsa-merge-requests'
2543 SALSA_MR_ACTION_ITEM_TEMPLATE = 'debian/salsa-mr-action-item.html'
2544 SALSA_MR_SHORT_DESCRIPTION = '<a href="{url}">{count_str}</a> in Salsa'
2545 VCSWATCH_URL = 'https://qa.debian.org/cgi-bin/vcswatch?package=%(package)s'
2546 VCSWATCH_DATA_URL = 'https://qa.debian.org/data/vcswatch/vcswatch.json.gz'
2548 VCSWATCH_STATUS_DICT = {
2549 "NEW": {
2550 "description":
2551 '<a href="{vcswatch_url}">version in VCS is newer</a> than in '
2552 'repository, is it time to upload?',
2553 "severity": ActionItem.SEVERITY_NORMAL,
2554 },
2555 "COMMITS": {
2556 "description":
2557 '<a href="{vcswatch_url}">{commits} new commit{commits_s}</a> '
2558 'since last upload, is it time to release?',
2559 "severity": ActionItem.SEVERITY_NORMAL,
2560 },
2561 "OLD": {
2562 'description':
2563 'The <a href="{vcswatch_url}">VCS repository is not up to '
2564 'date</a>, push the missing commits.',
2565 "severity": ActionItem.SEVERITY_HIGH,
2566 },
2567 "UNREL": {
2568 "description":
2569 'The <a href="{vcswatch_url}">VCS repository is not up to '
2570 'date</a>, push the missing commits.',
2571 "severity": ActionItem.SEVERITY_HIGH,
2572 },
2573 "ERROR": {
2574 "description":
2575 '<a href="{vcswatch_url}">Failed to analyze the VCS '
2576 'repository</a>. Please troubleshoot and fix the issue.',
2577 "severity": ActionItem.SEVERITY_HIGH,
2578 },
2579 "DEFAULT": {
2580 "description":
2581 '<a href="{url}">Unexpected status</a> ({status}) reported by '
2582 'VcsWatch.',
2583 "severity": ActionItem.SEVERITY_HIGH,
2584 },
2585 }
2587 def initialize(self, *args, **kwargs):
2588 super(UpdateVcsWatchTask, self).initialize(*args, **kwargs)
2589 self.vcswatch_ai_type = ActionItemType.objects.create_or_update(
2590 type_name=self.VCSWATCH_ACTION_ITEM_TYPE_NAME,
2591 full_description_template=self.VCSWATCH_ACTION_ITEM_TEMPLATE
2592 )
2593 self.salsa_mr_ai_type = ActionItemType.objects.create_or_update(
2594 type_name=self.SALSA_MR_ACTION_ITEM_TYPE_NAME,
2595 full_description_template=self.SALSA_MR_ACTION_ITEM_TEMPLATE
2596 )
2598 def get_vcswatch_data(self):
2599 text = get_resource_text(self.VCSWATCH_DATA_URL)
2601 if text is None: 2601 ↛ 2602line 2601 didn't jump to line 2602, because the condition on line 2601 was never true
2602 return
2604 # There's some text, let's load!
2605 data = json.loads(text)
2607 out = {}
2608 # This allows to save a lot of list search later.
2609 for entry in data:
2610 out[entry[u'package']] = entry
2612 return out
2614 def clean_package_info(self, package_infos_without_watch, todo):
2615 """Takes a list of :class:`PackageData` which do not
2616 have a watch entry and cleans it. Then schedule in todo what
2617 to do with them.
2618 """
2619 for package_info in package_infos_without_watch:
2620 if 'QA' in package_info.value: 2620 ↛ 2619line 2620 didn't jump to line 2619, because the condition on line 2620 was never false
2621 package_info.value.pop('QA')
2622 if (list(package_info.value.keys()) == ['checksum'] or
2623 not package_info.value.keys()):
2624 todo['drop']['package_infos'].append(package_info)
2625 else:
2626 package_info.value['checksum'] = get_data_checksum(
2627 package_info.value
2628 )
2629 todo['update']['package_infos'].append(package_info)
2631 def update_action_item(self, package, vcswatch_data, action_item, todo):
2632 """
2633 For a given :class:`ActionItem` and a given vcswatch data, updates
2634 properly the todo dict if required.
2636 Returns dependingly on what has been done. If something is to
2637 be updated, returns True, if nothing is to be updated, returns
2638 False. If the calling loop should `continue`, returns `None`.
2640 :rtype: bool or `None`
2641 """
2643 package_status = vcswatch_data['status']
2645 if package_status == "OK":
2646 # Everything is fine, let's purge the action item. Not the
2647 # package extracted info as its QA url is still relevant.
2648 if action_item: 2648 ↛ 2652line 2648 didn't jump to line 2652, because the condition on line 2648 was never false
2649 todo['drop']['action_items'].append(action_item)
2651 # Nothing more to do!
2652 return False
2654 # NOT BEFORE "OK" check!!
2655 if package_status not in self.VCSWATCH_STATUS_DICT: 2655 ↛ 2656line 2655 didn't jump to line 2656, because the condition on line 2655 was never true
2656 package_status = "DEFAULT"
2658 # If we are here, then something is not OK. Let's check if we
2659 # already had some intel regarding the current package status.
2660 if action_item is None:
2661 action_item = ActionItem(
2662 package=package,
2663 item_type=self.vcswatch_ai_type)
2664 todo['add']['action_items'].append(action_item)
2665 else:
2666 todo['update']['action_items'].append(action_item)
2668 # Computes the watch URL
2669 vcswatch_url = self.VCSWATCH_URL % {'package': package.name}
2671 if action_item.extra_data:
2672 extra_data = action_item.extra_data
2673 else:
2674 extra_data = {}
2676 # Fetches the long description and severity from
2677 # the VCSWATCH_STATUS_DICT dict.
2678 action_item.severity = \
2679 self.VCSWATCH_STATUS_DICT[package_status]['severity']
2681 nb_commits = int(vcswatch_data["commits"] or 0)
2683 # The new data
2684 new_extra_data = {
2685 'vcswatch_url': vcswatch_url,
2686 }
2687 new_extra_data.update(vcswatch_data)
2689 extra_data_match = all([
2690 new_extra_data[key] == extra_data.get(key, None)
2691 for key in new_extra_data
2692 ])
2694 # If everything is fine and we are not forcing the update
2695 # then we proceed to the next package.
2696 if extra_data_match and not self.force_update: 2696 ↛ 2698line 2696 didn't jump to line 2698, because the condition on line 2696 was never true
2697 # Remove from the todolist
2698 todo['update']['action_items'].remove(action_item)
2699 return False
2700 else:
2701 # Report for short description of the :class:`ActionItem`
2702 desc = self.VCSWATCH_STATUS_DICT[package_status]['description']
2703 commits_s = 's' if nb_commits != 1 else ''
2704 action_item.short_description = \
2705 desc.format(commits_s=commits_s, **new_extra_data)
2706 action_item.extra_data = new_extra_data
2707 return True
2709 def update_package_info(self, package, vcswatch_data, package_info, key,
2710 todo):
2711 # Same thing with PackageData
2712 if package_info is None:
2713 package_info = PackageData(
2714 package=package,
2715 key=key,
2716 )
2717 todo['add']['package_infos'].append(package_info)
2718 else:
2719 todo['update']['package_infos'].append(package_info)
2721 # Computes the watch URL
2722 vcswatch_url = self.VCSWATCH_URL % {'package': package.name}
2724 new_value = dict(package_info.value)
2725 if key == 'vcs_extra_links':
2726 new_value['QA'] = vcswatch_url
2727 elif key == 'vcswatch': 2727 ↛ 2737line 2727 didn't jump to line 2737, because the condition on line 2727 was never false
2728 if 'package_version' in vcswatch_data: 2728 ↛ 2730line 2728 didn't jump to line 2730, because the condition on line 2728 was never false
2729 new_value['package_version'] = vcswatch_data['package_version']
2730 if 'changelog_version' in vcswatch_data: 2730 ↛ 2733line 2730 didn't jump to line 2733, because the condition on line 2730 was never false
2731 new_value['changelog_version'] = vcswatch_data[
2732 'changelog_version']
2733 if 'changelog_distribution' in vcswatch_data: 2733 ↛ 2737line 2733 didn't jump to line 2737, because the condition on line 2733 was never false
2734 new_value['changelog_distribution'] = vcswatch_data[
2735 'changelog_distribution']
2737 new_value['checksum'] = get_data_checksum(new_value)
2739 package_info_match = (
2740 new_value['checksum'] == package_info.value.get('checksum', None)
2741 )
2743 if package_info_match and not self.force_update:
2744 todo['update']['package_infos'].remove(package_info)
2745 return False
2746 else:
2747 package_info.value = new_value
2748 return True
2750 def update_packages_item(self, packages, vcswatch_datas):
2751 """Generates the lists of :class:`ActionItem` to be added,
2752 deleted or updated regarding the status of their packages.
2754 Categories of statuses are:
2755 {u'COMMITS', u'ERROR', u'NEW', u'OK', u'OLD', u'UNREL'}
2757 Basically, it fetches all info from :class:`PackageData`
2758 with key='vcs', the ones without data matching vcswatch_datas are
2759 stored in one variable that's iterated through directly, and if
2760 there was something before, it is purged. Then, all entries in
2761 that queryset that have no relevant intel anymore are scheduled
2762 to be deleted. The others are only updated.
2764 All :class:`PackageData` matching vcswatch_datas
2765 are stored in another variable. The same is done with the list of
2766 :class:`ActionItem` that match this task type.
2768 Then, it iterates on all vcswatch_datas' packages and it tries to
2769 determine if there are any news, if so, it updates apopriately the
2770 prospective :class:`ActionItem` and :class:`PackageData`,
2771 and schedule them to be updated. If no data was existent, then
2772 it creates them and schedule them to be added to the database.
2774 At the end, this function returns a dict of all instances of
2775 :class:`ActionItem` and :class:`PackageData` stored
2776 in subdicts depending on their class and what is to be done
2777 with them.
2779 :rtype: dict
2781 """
2783 todo = {
2784 'drop': {
2785 'action_items': [],
2786 'package_infos': [],
2787 },
2788 'update': {
2789 'action_items': [],
2790 'package_infos': [],
2791 },
2792 'add': {
2793 'action_items': [],
2794 'package_infos': [],
2795 },
2796 }
2798 package_info_keys = ['vcs_extra_links', 'vcswatch']
2799 package_infos = {}
2800 for key in package_info_keys:
2801 # Fetches all PackageData with a given key for packages having
2802 # a vcswatch key. As the pair (package, key) is unique, there is a
2803 # bijection between these data, and we fetch them classifying them
2804 # by package name.
2805 for package_info in PackageData.objects.select_related(
2806 'package').filter(key=key).only('package__name', 'value'):
2807 if package_info.package.name not in package_infos:
2808 package_infos[package_info.package.name] = {}
2809 package_infos[package_info.package.name][key] = package_info
2811 # As :class:`PackageData` key=vcs_extra_links is shared, we
2812 # have to clean up those with vcs watch_url that aren't in vcs_data
2813 package_infos_without_watch = PackageData.objects.filter(
2814 key='vcs_extra_links').exclude(
2815 package__name__in=vcswatch_datas.keys()).only('value')
2817 # Do the actual clean.
2818 self.clean_package_info(package_infos_without_watch, todo)
2820 # Fetches all :class:`ActionItem` for packages concerned by a vcswatch
2821 # action.
2822 action_items = {
2823 action_item.package.name: action_item
2824 for action_item in ActionItem.objects.select_related(
2825 'package'
2826 ).filter(item_type=self.vcswatch_ai_type)
2827 }
2829 for package in packages:
2830 # Get the vcswatch_data from the whole vcswatch_datas
2831 vcswatch_data = vcswatch_datas[package.name]
2833 # Get the old action item for this warning, if it exists.
2834 action_item = action_items.get(package.name, None)
2836 # Updates the :class:`ActionItem`. If _continue is None,
2837 # then there is nothing more to do with this package.
2838 # If it is False, then no update is pending for the
2839 # :class:`ActionItem`, else there is an update
2840 # to do.
2841 _ai_continue = self.update_action_item(
2842 package,
2843 vcswatch_data,
2844 action_item,
2845 todo)
2847 _pi_continue = False
2848 for key in package_info_keys:
2849 try:
2850 package_info = package_infos[package.name][key]
2851 except KeyError:
2852 package_info = None
2854 _pi_continue |= self.update_package_info(
2855 package,
2856 vcswatch_data,
2857 package_info,
2858 key,
2859 todo)
2861 if not _ai_continue and not _pi_continue:
2862 continue
2864 return todo
2866 def update_action_item_for_salsa_mrs(self, package, vcswatch_data,
2867 action_item, todo):
2868 """
2869 For a given :class:`ActionItem` and a given vcswatch data, updates
2870 properly the todo dict if required.
2872 :rtype: `None`
2873 """
2875 try:
2876 parsed_url = urllib.parse.urlparse(vcswatch_data['url'])
2877 except (KeyError, ValueError):
2878 is_salsa = False
2879 else:
2880 is_salsa = parsed_url.netloc == 'salsa.debian.org'
2881 merge_requests = vcswatch_data.get('merge_requests', 0)
2882 need_ai = is_salsa and merge_requests
2884 if not need_ai:
2885 # There are no open Salsa MRs (or we can't check) so remove the AI
2886 if action_item:
2887 todo['drop'].append(action_item)
2889 # Nothing more to do!
2890 return
2892 if action_item is None:
2893 action_item = ActionItem(
2894 package=package,
2895 item_type=self.salsa_mr_ai_type)
2896 todo['add'].append(action_item)
2897 else:
2898 todo['update'].append(action_item)
2900 # Computes the Salsa MR URL
2901 salsa_path = parsed_url.path.split(' ')[0]
2902 if salsa_path.endswith('.git'): 2902 ↛ 2904line 2902 didn't jump to line 2904, because the condition on line 2902 was never false
2903 salsa_path = salsa_path[:-4]
2904 url = 'https://salsa.debian.org{}/-/merge_requests'.format(salsa_path)
2906 if action_item.extra_data:
2907 extra_data = action_item.extra_data
2908 else:
2909 extra_data = {}
2911 new_extra_data = {
2912 'count': merge_requests,
2913 'url': url,
2914 }
2916 extra_data_match = all([
2917 new_extra_data[key] == extra_data.get(key, None)
2918 for key in new_extra_data
2919 ])
2921 # If everything is fine and we are not forcing the update
2922 # then we proceed to the next package.
2923 if extra_data_match and not self.force_update: 2923 ↛ 2925line 2923 didn't jump to line 2925, because the condition on line 2923 was never true
2924 # Remove from the todolist
2925 todo['update'].remove(action_item)
2926 else:
2927 # Report for short description of the :class:`ActionItem`
2928 count_str = '{} open merge request{}'.format(
2929 merge_requests,
2930 's' if merge_requests != 1 else '')
2931 action_item.short_description = \
2932 self.SALSA_MR_SHORT_DESCRIPTION.format(
2933 count_str=count_str,
2934 **new_extra_data)
2935 action_item.severity = ActionItem.SEVERITY_NORMAL
2936 action_item.extra_data = new_extra_data
2938 def update_packages_item_for_salsa_mrs(self, packages, vcswatch_datas):
2939 """Generates the lists of :class:`ActionItem` to be added,
2940 deleted or updated regarding open Salsa MRs for their packages.
2942 At the end, this function returns a dict of all instances of
2943 :class:`ActionItem` stored in subdicts depending on their class
2944 and what is to be done with them.
2946 :rtype: dict
2948 """
2950 todo = {
2951 'drop': [],
2952 'update': [],
2953 'add': [],
2954 }
2956 # Fetches all :class:`ActionItem` for packages concerned by a salsa mr
2957 # action.
2958 action_items = {
2959 action_item.package.name: action_item
2960 for action_item in ActionItem.objects.select_related(
2961 'package'
2962 ).filter(item_type=self.salsa_mr_ai_type)
2963 }
2965 for package in packages:
2966 # Get the vcswatch_data from the whole vcswatch_datas
2967 vcswatch_data = vcswatch_datas[package.name]
2969 # Get the old action item for this warning, if it exists.
2970 action_item = action_items.get(package.name, None)
2972 # Updates the :class:`ActionItem`.
2973 self.update_action_item_for_salsa_mrs(
2974 package,
2975 vcswatch_data,
2976 action_item,
2977 todo)
2979 return todo
2981 def execute_main(self):
2982 # Get the actual vcswatch json file from qa.debian.org
2983 vcs_data = self.get_vcswatch_data()
2985 # Only fetch the packages that are in the json dict.
2986 packages = PackageName.objects.filter(name__in=vcs_data.keys())
2988 # Faster than fetching the action items one by one in a loop
2989 # when handling each package.
2990 packages.prefetch_related('action_items')
2992 # Determine wether something is to be kept or dropped.
2993 todo = self.update_packages_item(packages, vcs_data)
2994 todo_salsa_mrs = self.update_packages_item_for_salsa_mrs(
2995 packages,
2996 vcs_data)
2998 with transaction.atomic():
2999 # Delete the :class:`ActionItem` that are osbolete, and also
3000 # the :class:`PackageData` of the same.
3001 ActionItem.objects.delete_obsolete_items(
3002 [self.vcswatch_ai_type, self.salsa_mr_ai_type],
3003 vcs_data.keys())
3004 PackageData.objects.filter(
3005 key='vcs_extra_links',
3006 id__in=[
3007 package_info.id
3008 for package_info in todo['drop']['package_infos']
3009 ]
3010 ).delete()
3012 # Then delete the :class:`ActionItem` that are to be deleted.
3013 ActionItem.objects.filter(
3014 item_type__type_name=self.vcswatch_ai_type.type_name,
3015 id__in=[
3016 action_item.id
3017 for action_item in todo['drop']['action_items']
3018 ]
3019 ).delete()
3020 ActionItem.objects.filter(
3021 item_type__type_name=self.salsa_mr_ai_type.type_name,
3022 id__in=[
3023 action_item.id
3024 for action_item in todo_salsa_mrs['drop']
3025 ]
3026 ).delete()
3028 # Then bulk_create the :class:`ActionItem` to add and the
3029 # :class:`PackageData`
3030 ActionItem.objects.bulk_create(todo['add']['action_items'])
3031 PackageData.objects.bulk_create(todo['add']['package_infos'])
3032 ActionItem.objects.bulk_create(todo_salsa_mrs['add'])
3034 # Update existing entries
3035 for action_item in todo['update']['action_items']:
3036 action_item.save()
3037 for package_info in todo['update']['package_infos']:
3038 package_info.save()
3039 for action_item in todo_salsa_mrs['update']:
3040 action_item.save()
3043class TagPackagesWithRcBugs(BaseTask, PackageTagging):
3044 """
3045 Performs an update of 'rc-bugs' tag for packages.
3046 """
3048 class Scheduler(IntervalScheduler):
3049 interval = 3600
3051 TAG_NAME = 'tag:rc-bugs'
3052 TAG_DISPLAY_NAME = 'rc bugs'
3053 TAG_COLOR_TYPE = 'danger'
3054 TAG_DESCRIPTION = 'The package has Release Critical bugs'
3055 TAG_TABLE_TITLE = 'Packages with RC bugs'
3057 def packages_to_tag(self):
3058 all_bug_stats = PackageBugStats.objects.prefetch_related('package')
3059 packages_list = []
3060 for bug_stats in all_bug_stats:
3061 categories = bug_stats.stats
3062 found = False
3063 for category in categories: 3063 ↛ 3060line 3063 didn't jump to line 3060, because the loop on line 3063 didn't complete
3064 if found:
3065 break
3066 if category['category_name'] == 'rc': 3066 ↛ 3063line 3066 didn't jump to line 3063, because the condition on line 3066 was never false
3067 found = True
3068 if category['bug_count'] > 0:
3069 packages_list.append(bug_stats.package)
3070 return packages_list
3073class TagPackagesWithNewUpstreamVersion(BaseTask, PackageTagging):
3074 """
3075 Performs an update of 'new-upstream-version' tag for packages.
3076 """
3078 class Scheduler(IntervalScheduler):
3079 interval = 3600 * 3
3081 TAG_NAME = 'tag:new-upstream-version'
3082 TAG_DISPLAY_NAME = 'new upstream version'
3083 TAG_COLOR_TYPE = 'warning'
3084 TAG_DESCRIPTION = 'The upstream has a newer version available'
3085 TAG_TABLE_TITLE = 'Newer upstream version'
3087 def packages_to_tag(self):
3088 try:
3089 action_type = ActionItemType.objects.get(
3090 type_name='new-upstream-version')
3091 except ActionItemType.DoesNotExist:
3092 return []
3094 packages_list = []
3095 items = action_type.action_items.prefetch_related('package')
3096 for item in items:
3097 packages_list.append(item.package)
3098 return packages_list
3101class UpdateDependencySatisfactionTask(BaseTask):
3102 """
3103 Fetches binary package installability results from qa.debian.org/dose
3104 """
3106 class Scheduler(IntervalScheduler):
3107 interval = 3600 * 3
3109 BASE_URL = 'https://qa.debian.org/dose/debcheck/unstable_main/latest'
3110 ACTION_ITEM_TYPE_NAME = 'debian-dependency-satisfaction'
3111 ACTION_ITEM_TEMPLATE = 'debian/dependency-satisfaction-action-item.html'
3113 def __init__(self, force_update=False, *args, **kwargs):
3114 super(UpdateDependencySatisfactionTask, self).__init__(*args, **kwargs)
3115 self.force_update = force_update
3116 self.action_item_type = ActionItemType.objects.create_or_update(
3117 type_name=self.ACTION_ITEM_TYPE_NAME,
3118 full_description_template=self.ACTION_ITEM_TEMPLATE)
3120 def set_parameters(self, parameters):
3121 if 'force_update' in parameters:
3122 self.force_update = parameters['force_update']
3124 def get_dependency_satisfaction(self):
3125 url = '{}/each.txt'.format(self.BASE_URL)
3126 content = get_resource_text(url, force_update=self.force_update,
3127 only_if_updated=True)
3128 if content is None: 3128 ↛ 3129line 3128 didn't jump to line 3129, because the condition on line 3128 was never true
3129 return
3131 dep_sats = collections.defaultdict(set)
3132 for i, line in enumerate(content.splitlines()):
3133 binpkg_name, ver, isnative, anchor, expl, arches = line.split('#')
3134 try:
3135 bin_package = BinaryPackageName.objects.get(name=binpkg_name)
3136 srcpkg_name = bin_package.main_source_package_name
3137 except BinaryPackageName.DoesNotExist:
3138 continue
3139 arches = set([arch.strip() for arch in arches.split()])
3140 # TODO: retrieve this list programmatically, either from
3141 # https://api.ftp-master.debian.org/suite/testing
3142 # or from the Architecture field in the Release file
3143 # for testing (both lists should be equal).
3144 arches = arches.intersection(
3145 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips',
3146 'mips64el', 'mipsel', 'ppc64el', 's390x'})
3147 # only report problems for release architectures
3148 if not arches:
3149 continue
3150 # if the package is arch:all, only report problems on amd64
3151 if isnative != "True":
3152 arches = arches.intersection({"amd64"})
3153 if not arches:
3154 continue
3155 dep_sats[srcpkg_name].add(
3156 (binpkg_name, ver, tuple(arches), expl, anchor))
3157 # turn sets into lists
3158 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()])
3159 return dep_sats
3161 def update_action_item(self, package, unsats):
3162 action_item = package.get_action_item_for_type(
3163 self.action_item_type.type_name)
3164 if action_item is None: 3164 ↛ 3169line 3164 didn't jump to line 3169
3165 action_item = ActionItem(
3166 package=package,
3167 item_type=self.action_item_type,
3168 severity=ActionItem.SEVERITY_HIGH)
3169 action_item.short_description = \
3170 "{count} binary package{plural} {have} unsatisfiable " \
3171 "dependencies".format(
3172 count=len(unsats),
3173 plural='' if len(unsats) == 1 else 's',
3174 have='has' if len(unsats) == 1 else 'have',
3175 )
3176 action_item.extra_data = {
3177 'unsats': unsats,
3178 'base_url': '{}/packages/'.format(self.BASE_URL),
3179 }
3180 action_item.save()
3182 def execute(self):
3183 dep_sats = self.get_dependency_satisfaction()
3184 if dep_sats is None: 3184 ↛ 3185line 3184 didn't jump to line 3185, because the condition on line 3184 was never true
3185 return
3187 with transaction.atomic():
3188 PackageData.objects.filter(key='dependency_satisfaction').delete()
3190 packages = []
3191 pkgdata_list = []
3193 for name, unsats in dep_sats.items():
3194 try:
3195 package = SourcePackageName.objects.get(name=name)
3196 packages.append(package)
3197 self.update_action_item(package, unsats)
3198 except SourcePackageName.DoesNotExist:
3199 continue
3201 dep_sat_info = PackageData(
3202 key='dependency_satisfaction',
3203 package=package,
3204 value={'dependency_satisfaction': unsats})
3205 pkgdata_list.append(dep_sat_info)
3207 ActionItem.objects.delete_obsolete_items([self.action_item_type],
3208 packages)
3209 PackageData.objects.bulk_create(pkgdata_list)
3212class UpdateBuildDependencySatisfactionTask(BaseTask):
3213 """
3214 Fetches source package installability results from qa.debian.org/dose
3215 """
3217 class Scheduler(IntervalScheduler):
3218 interval = 3600 * 3
3220 BASE_URL = 'https://qa.debian.org/dose/debcheck/src_unstable_main/latest'
3221 ACTION_ITEM_TYPE_NAME = 'debian-builddependency-satisfaction'
3222 ACTION_ITEM_TEMPLATE = \
3223 'debian/builddependency-satisfaction-action-item.html'
3225 def __init__(self, *args, **kwargs):
3226 super(UpdateBuildDependencySatisfactionTask, self).__init__(*args,
3227 **kwargs)
3228 self.action_item_type = ActionItemType.objects.create_or_update(
3229 type_name=self.ACTION_ITEM_TYPE_NAME,
3230 full_description_template=self.ACTION_ITEM_TEMPLATE)
3232 def get_dependency_satisfaction(self):
3233 url = '{}/each.txt'.format(self.BASE_URL)
3234 content = get_resource_text(url, force_update=self.force_update,
3235 only_if_updated=True)
3236 if content is None: 3236 ↛ 3237line 3236 didn't jump to line 3237, because the condition on line 3236 was never true
3237 return
3239 dep_sats = collections.defaultdict(set)
3240 for i, line in enumerate(content.splitlines()):
3241 srcpkg_name, ver, isnative, anchor, expl, arches = line.split('#')
3242 arches = set([arch.strip() for arch in arches.split()])
3243 # TODO: retrieve this list programmatically, either from
3244 # https://api.ftp-master.debian.org/suite/testing
3245 # or from the Architecture field in the Release file
3246 # for testing (both lists should be equal).
3247 arches = arches.intersection(
3248 {'amd64', 'arm64', 'armel', 'armhf', 'i386', 'mips',
3249 'mips64el', 'mipsel', 'ppc64el', 's390x'})
3250 # only report problems for release architectures
3251 if not arches:
3252 continue
3253 # if the source package only builds arch:all binary packages, only
3254 # report problems on amd64
3255 if isnative != "True":
3256 arches = arches.intersection({"amd64"})
3257 if not arches:
3258 continue
3259 dep_sats[srcpkg_name].add(
3260 (srcpkg_name, tuple(arches), expl, anchor))
3261 # turn sets into lists
3262 dep_sats = dict([(k, list(v)) for k, v in dep_sats.items()])
3263 return dep_sats
3265 def update_action_item(self, package, unsats):
3266 action_item = package.get_action_item_for_type(
3267 self.action_item_type.type_name)
3268 if action_item is None: 3268 ↛ 3273line 3268 didn't jump to line 3273
3269 action_item = ActionItem(
3270 package=package,
3271 item_type=self.action_item_type,
3272 severity=ActionItem.SEVERITY_HIGH)
3273 action_item.short_description = \
3274 "source package has {count} unsatisfiable " \
3275 "build dependenc{plural}".format(
3276 count=len(unsats),
3277 plural='y' if len(unsats) == 1 else 'ies',
3278 )
3279 action_item.extra_data = {
3280 'unsats': unsats,
3281 'base_url': '{}/packages/'.format(self.BASE_URL),
3282 }
3283 action_item.save()
3285 def execute(self):
3286 dep_sats = self.get_dependency_satisfaction()
3287 if dep_sats is None: 3287 ↛ 3288line 3287 didn't jump to line 3288, because the condition on line 3287 was never true
3288 return
3290 with transaction.atomic():
3291 PackageData.objects.filter(
3292 key='builddependency_satisfaction').delete()
3294 packages = []
3295 pkgdata_list = []
3297 for name, unsats in dep_sats.items():
3298 try:
3299 package = SourcePackageName.objects.get(name=name)
3300 packages.append(package)
3301 self.update_action_item(package, unsats)
3302 except SourcePackageName.DoesNotExist:
3303 continue
3305 dep_sat_info = PackageData(
3306 key='builddependency_satisfaction',
3307 package=package,
3308 value={'builddependency_satisfaction': unsats})
3309 pkgdata_list.append(dep_sat_info)
3311 ActionItem.objects.delete_obsolete_items([self.action_item_type],
3312 packages)
3313 PackageData.objects.bulk_create(pkgdata_list)
3316class UpdateDl10nStatsTask(BaseTask):
3317 """
3318 Updates packages' l10n statistics.
3319 """
3321 class Scheduler(IntervalScheduler):
3322 interval = 3600 * 6
3324 ACTION_ITEM_TYPE_NAME = 'dl10n'
3325 ITEM_DESCRIPTION = \
3326 '<a href="{url}">Issues</a> found with some translations'
3327 ITEM_FULL_DESCRIPTION_TEMPLATE = 'debian/dl10n-action-item.html'
3329 def initialize(self, *args, **kwargs):
3330 super(UpdateDl10nStatsTask, self).initialize(*args, **kwargs)
3331 self.l10n_action_item_type = \
3332 ActionItemType.objects.create_or_update(
3333 type_name=self.ACTION_ITEM_TYPE_NAME,
3334 full_description_template=self.ITEM_FULL_DESCRIPTION_TEMPLATE)
3336 def _load_l10n_stats(self):
3337 url = 'https://i18n.debian.org/l10n-pkg-status/pkglist'
3338 content = get_resource_text(url, force_update=self.force_update,
3339 only_if_updated=True)
3340 if content is None: 3340 ↛ 3341line 3340 didn't jump to line 3341, because the condition on line 3340 was never true
3341 return
3343 def parse_score(score):
3344 if score == '-':
3345 return None
3346 return int(score)
3348 all_stats = {}
3350 # The format of the file is (copied from its header):
3351 # <package> <version> (<comma sperated scores>) <link> <todo>
3352 line_re = re.compile(
3353 r'^([^\s]+) ([^\s]+) \(([^)]+)\) ([^\s]+) ([^\s]+)')
3354 for line in content.splitlines():
3355 if not line or line.startswith('#'): 3355 ↛ 3356line 3355 didn't jump to line 3356, because the condition on line 3355 was never true
3356 continue
3357 match = line_re.search(line)
3358 if not match: 3358 ↛ 3359line 3358 didn't jump to line 3359, because the condition on line 3358 was never true
3359 logger.warning('Failed to parse l10n pkglist line: %s', line)
3360 continue
3362 src_pkgname = match.group(1)
3363 try:
3364 scores = match.group(3).split(',')
3365 score_debian = parse_score(scores[0])
3366 score_other = parse_score(scores[1])
3367 # <todo> is a "0" or "1" string, so convert through int to get
3368 # a proper bool
3369 todo = bool(int(match.group(5)))
3370 except (IndexError, ValueError):
3371 logger.warning(
3372 'Failed to parse l10n scores: %s',
3373 line, exc_info=1)
3374 continue
3375 link = match.group(4)
3376 if not score_debian and not score_other: 3376 ↛ 3377line 3376 didn't jump to line 3377, because the condition on line 3376 was never true
3377 continue
3379 all_stats[src_pkgname] = {
3380 'score_debian': score_debian,
3381 'score_other': score_other,
3382 'link': link,
3383 'todo': todo,
3384 }
3386 return all_stats
3388 def update_action_item(self, package, package_stats):
3389 todo = package_stats['todo']
3391 # Get the old action item, if it exists.
3392 l10n_action_item = package.get_action_item_for_type(
3393 self.l10n_action_item_type.type_name)
3394 if not todo:
3395 if l10n_action_item:
3396 # If the item previously existed, delete it now since there
3397 # are no longer any warnings/errors.
3398 l10n_action_item.delete()
3399 return
3401 # The item didn't previously have an action item: create it now
3402 if l10n_action_item is None:
3403 desc = self.ITEM_DESCRIPTION.format(url=package_stats['link'])
3404 l10n_action_item = ActionItem(
3405 package=package,
3406 item_type=self.l10n_action_item_type,
3407 severity=ActionItem.SEVERITY_LOW,
3408 short_description=desc)
3410 if l10n_action_item.extra_data:
3411 old_extra_data = l10n_action_item.extra_data
3412 if old_extra_data == package_stats: 3412 ↛ 3414line 3412 didn't jump to line 3414, because the condition on line 3412 was never true
3413 # No need to update
3414 return
3416 l10n_action_item.extra_data = package_stats
3418 l10n_action_item.save()
3420 def execute_main(self):
3421 stats = self._load_l10n_stats()
3422 if not stats: 3422 ↛ 3423line 3422 didn't jump to line 3423, because the condition on line 3422 was never true
3423 return
3425 with transaction.atomic():
3426 PackageData.objects.filter(key='dl10n').delete()
3428 packages = []
3429 pkgdata_list = []
3431 for name, stat in stats.items():
3432 try:
3433 package = SourcePackageName.objects.get(name=name)
3434 packages.append(package)
3435 self.update_action_item(package, stat)
3436 except SourcePackageName.DoesNotExist:
3437 continue
3439 dl10n_stat = PackageData(
3440 key='dl10n',
3441 package=package,
3442 value=stat)
3443 pkgdata_list.append(dl10n_stat)
3445 ActionItem.objects.delete_obsolete_items(
3446 [self.l10n_action_item_type], packages)
3447 PackageData.objects.bulk_create(pkgdata_list)
3450class UpdateDebianPatchesTask(BaseTask, ImportExternalData):
3451 """
3452 Import statistics about Debian patches from UDD.
3453 """
3455 class Scheduler(IntervalScheduler):
3456 interval = 3600 * 6
3458 data_url = 'https://udd.debian.org/patches.cgi?json=1'
3459 action_item_types = [
3460 {
3461 'type_name': 'debian-patches',
3462 'full_description_template':
3463 'debian/debian-patches-action-item.html',
3464 },
3465 ]
3467 def generate_package_data(self):
3468 pkgdata = {}
3469 for entry in self.external_data:
3470 source = entry.get('source')
3471 if source: 3471 ↛ 3469line 3471 didn't jump to line 3469, because the condition on line 3471 was never false
3472 data = entry.copy()
3473 data['url'] = self._generate_url(entry)
3474 pkgdata[source] = data
3476 return [
3477 ('debian-patches', pkgdata),
3478 ]
3480 @staticmethod
3481 def _generate_url(entry):
3482 query_string = urlencode({
3483 'src': entry.get('source'),
3484 'version': entry.get('version'),
3485 })
3486 return f"https://udd.debian.org/patches.cgi?{query_string}"
3488 def generate_action_items(self):
3489 pkgdata = {}
3490 for entry in self.external_data:
3491 # Skip invalid entries and those without (problematic) patches
3492 source = entry.get('source')
3493 forwarded_invalid = entry.get('forwarded_invalid', 0)
3494 forwarded_no = entry.get('forwarded_no', 0)
3495 if not source: 3495 ↛ 3496line 3495 didn't jump to line 3496, because the condition on line 3495 was never true
3496 continue # Invalid, no source package data
3497 if entry.get('status') != 'patches':
3498 continue # No patch at all
3499 if forwarded_invalid == 0 and forwarded_no == 0:
3500 continue # No problematic patch
3502 # Build the parameterers for the action item
3503 severity = ActionItem.SEVERITY_LOW
3504 desc = ''
3505 url = self._generate_url(entry)
3507 if forwarded_invalid:
3508 severity = ActionItem.SEVERITY_HIGH
3509 count = f"{forwarded_invalid} patch"
3510 if forwarded_invalid > 1:
3511 count += 'es'
3512 count = f'<a href="{url}">{count}</a>'
3513 desc += f"{count} with invalid metadata"
3515 if forwarded_no:
3516 if desc:
3517 desc += ', '
3518 count = f"{forwarded_no} patch"
3519 if forwarded_no > 1:
3520 count += 'es'
3521 count = f'<a href="{url}">{count}</a>'
3522 desc += f"{count} to forward upstream"
3524 extra_data = entry.copy()
3525 extra_data['url'] = url
3527 # Record the action item parameters
3528 pkgdata[source] = {
3529 'short_description': f"debian/patches: {desc}",
3530 'severity': severity,
3531 'extra_data': extra_data,
3532 }
3534 return [
3535 ('debian-patches', pkgdata),
3536 ]