Coverage for britney2/policies/policy.py: 86%
1218 statements
« prev ^ index » next coverage.py v6.5.0, created at 2024-04-18 20:48 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2024-04-18 20:48 +0000
1import json
2import logging
3import os
4import re
5import sys
6import time
7from typing import Optional, Callable, Any
9import yaml
10from enum import IntEnum, unique
11from collections import defaultdict
12from urllib.parse import quote
14import apt_pkg
16from britney2 import PackageId, SourcePackage, SuiteClass, Suites
17from britney2.hints import Hint, HintCollection, HintParser, split_into_one_hint_per_package
18from britney2.inputs.suiteloader import SuiteContentLoader
19from britney2.migrationitem import MigrationItem
20from britney2.policies import PolicyVerdict, ApplySrcPolicy
21from britney2.utils import (compute_reverse_tree,
22 find_newer_binaries,
23 get_dependency_solvers,
24 is_smooth_update_allowed,
25 parse_option,
26 )
27from britney2 import DependencyType
28from britney2.excusedeps import DependencySpec
31class PolicyLoadRequest:
32 __slots__ = ('_options_name', '_default_value', '_policy_constructor')
34 def __init__(self,
35 policy_constructor: Callable[[Any, Suites], 'BasePolicy'],
36 options_name: Optional[str],
37 default_value: bool):
38 self._policy_constructor = policy_constructor
39 self._options_name = options_name
40 self._default_value = default_value
42 def is_enabled(self, options) -> bool:
43 if self._options_name is None:
44 assert self._default_value
45 return True
46 actual_value = getattr(options, self._options_name, None)
47 if actual_value is None:
48 return self._default_value
49 return actual_value.lower() in ('yes', 'y', 'true', 't')
51 def load(self, options: Any, suite_info: Suites) -> 'BasePolicy':
52 return self._policy_constructor(options, suite_info)
54 @classmethod
55 def always_load(cls, policy_constructor: Callable[[Any, Suites], 'BasePolicy']) -> 'PolicyLoadRequest':
56 return cls(policy_constructor, None, True)
58 @classmethod
59 def conditionally_load(cls,
60 policy_constructor: Callable[[Any, Suites], 'BasePolicy'],
61 option_name: str,
62 default_value: bool) -> 'PolicyLoadRequest':
63 return cls(policy_constructor, option_name, default_value)
66class PolicyEngine(object):
67 def __init__(self):
68 self._policies: list["BasePolicy"] = []
70 def add_policy(self, policy: "BasePolicy") -> None:
71 self._policies.append(policy)
73 def load_policies(self, options, suite_info: Suites, policy_load_requests: list[PolicyLoadRequest]) -> None:
74 for policy_load_request in policy_load_requests:
75 if policy_load_request.is_enabled(options):
76 self.add_policy(policy_load_request.load(options, suite_info))
78 def register_policy_hints(self, hint_parser: HintParser) -> None:
79 for policy in self._policies:
80 policy.register_hints(hint_parser)
82 def initialise(self, britney, hints) -> None:
83 for policy in self._policies:
84 policy.hints = hints
85 policy.initialise(britney)
87 def save_state(self, britney) -> None:
88 for policy in self._policies:
89 policy.save_state(britney)
91 def apply_src_policies(self, item: MigrationItem, source_t: Optional[SourcePackage], source_u: SourcePackage,
92 excuse):
93 excuse_verdict = excuse.policy_verdict
94 source_suite = item.suite
95 suite_class = source_suite.suite_class
96 for policy in self._policies:
97 pinfo = {}
98 policy_verdict = PolicyVerdict.NOT_APPLICABLE
99 if suite_class in policy.applicable_suites:
100 if policy.src_policy.run_arch:
101 for arch in policy.options.architectures:
102 v = policy.apply_srcarch_policy_impl(pinfo, item, arch, source_t, source_u, excuse)
103 policy_verdict = PolicyVerdict.worst_of(policy_verdict, v)
104 if policy.src_policy.run_src:
105 v = policy.apply_src_policy_impl(pinfo, item, source_t, source_u, excuse)
106 policy_verdict = PolicyVerdict.worst_of(policy_verdict, v)
107 # The base policy provides this field, so the subclass should leave it blank
108 assert 'verdict' not in pinfo
109 if policy_verdict != PolicyVerdict.NOT_APPLICABLE:
110 excuse.policy_info[policy.policy_id] = pinfo
111 pinfo['verdict'] = policy_verdict.name
112 excuse_verdict = PolicyVerdict.worst_of(policy_verdict, excuse_verdict)
113 excuse.policy_verdict = excuse_verdict
115 def apply_srcarch_policies(self, item: MigrationItem, arch, source_t: Optional[SourcePackage],
116 source_u: SourcePackage, excuse):
117 excuse_verdict = excuse.policy_verdict
118 source_suite = item.suite
119 suite_class = source_suite.suite_class
120 for policy in self._policies:
121 pinfo = {}
122 if suite_class in policy.applicable_suites:
123 policy_verdict = policy.apply_srcarch_policy_impl(pinfo, item, arch, source_t, source_u, excuse)
124 excuse_verdict = PolicyVerdict.worst_of(policy_verdict, excuse_verdict)
125 # The base policy provides this field, so the subclass should leave it blank
126 assert 'verdict' not in pinfo
127 if policy_verdict != PolicyVerdict.NOT_APPLICABLE:
128 excuse.policy_info[policy.policy_id] = pinfo
129 pinfo['verdict'] = policy_verdict.name
130 excuse.policy_verdict = excuse_verdict
133class BasePolicy(object):
135 def __init__(self, policy_id: str, options, suite_info: Suites, applicable_suites: set[SuiteClass],
136 src_policy: ApplySrcPolicy = ApplySrcPolicy.RUN_SRC):
137 """The BasePolicy constructor
139 :param policy_id An string identifying the policy. It will
140 determine the key used for the excuses.yaml etc.
142 :param options The options member of Britney with all the
143 config options.
145 :param applicable_suites A set of suite classes where this
146 policy applies.
147 """
148 self.policy_id = policy_id
149 self.options = options
150 self.suite_info = suite_info
151 self.applicable_suites = applicable_suites
152 self.src_policy = src_policy
153 self.hints: Optional[HintCollection] = None
154 logger_name = ".".join((self.__class__.__module__, self.__class__.__name__))
155 self.logger = logging.getLogger(logger_name)
157 @property
158 def state_dir(self):
159 return self.options.state_dir
161 def register_hints(self, hint_parser: HintParser) -> None: # pragma: no cover
162 """Register new hints that this policy accepts
164 :param hint_parser: An instance of HintParser (see HintParser.register_hint_type)
165 """
166 pass
168 def initialise(self, britney) -> None: # pragma: no cover
169 """Called once to make the policy initialise any data structures
171 This is useful for e.g. parsing files or other "heavy do-once" work.
173 :param britney This is the instance of the "Britney" class.
174 """
175 pass
177 def save_state(self, britney) -> None: # pragma: no cover
178 """Called once at the end of the run to make the policy save any persistent data
180 Note this will *not* be called for "dry-runs" as such runs should not change
181 the state.
183 :param britney This is the instance of the "Britney" class.
184 """
185 pass
187 def apply_src_policy_impl(self, policy_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
188 source_data_srcdist: SourcePackage, excuse): # pragma: no cover
189 """Apply a policy on a given source migration
191 Britney will call this method on a given source package, when
192 Britney is considering to migrate it from the given source
193 suite to the target suite. The policy will then evaluate the
194 the migration and then return a verdict.
196 :param policy_info A dictionary of all policy results. The
197 policy can add a value stored in a key related to its name.
198 (e.g. policy_info['age'] = {...}). This will go directly into
199 the "excuses.yaml" output.
201 :param item The migration item the policy is applied to.
203 :param source_data_tdist Information about the source package
204 in the target distribution (e.g. "testing"). This is the
205 data structure in source_suite.sources[source_name]
207 :param source_data_srcdist Information about the source
208 package in the source distribution (e.g. "unstable" or "tpu").
209 This is the data structure in target_suite.sources[source_name]
211 :return A Policy Verdict (e.g. PolicyVerdict.PASS)
212 """
213 return PolicyVerdict.NOT_APPLICABLE
215 def apply_srcarch_policy_impl(self, policy_info, item: MigrationItem, arch,
216 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
217 excuse):
218 """Apply a policy on a given binary migration
220 Britney will call this method on binaries from a given source package
221 on a given architecture, when Britney is considering to migrate them
222 from the given source suite to the target suite. The policy will then
223 evaluate the migration and then return a verdict.
225 :param policy_info A dictionary of all policy results. The
226 policy can add a value stored in a key related to its name.
227 (e.g. policy_info['age'] = {...}). This will go directly into
228 the "excuses.yaml" output.
230 :param item The migration item the policy is applied to.
232 :param arch The architecture the item is applied to. This is mostly
233 relevant for policies where src_policy is not ApplySrcPolicy.RUN_SRC
234 (as that is the only case where arch can differ from item.architecture)
236 :param source_data_tdist Information about the source package
237 in the target distribution (e.g. "testing"). This is the
238 data structure in source_suite.sources[source_name]
240 :param source_data_srcdist Information about the source
241 package in the source distribution (e.g. "unstable" or "tpu").
242 This is the data structure in target_suite.sources[source_name]
244 :return A Policy Verdict (e.g. PolicyVerdict.PASS)
245 """
246 # if the policy doesn't implement this function, assume it's OK
247 return PolicyVerdict.NOT_APPLICABLE
250class SimplePolicyHint(Hint):
252 def __init__(self, user, hint_type, policy_parameter, packages):
253 super().__init__(user, hint_type, packages)
254 self._policy_parameter = policy_parameter
256 def __eq__(self, other):
257 if self.type != other.type or self._policy_parameter != other._policy_parameter:
258 return False
259 return super().__eq__(other)
261 def str(self) -> str:
262 return '%s %s %s' % (self._type, str(self._policy_parameter), ' '.join(x.name for x in self._packages))
265class AgeDayHint(SimplePolicyHint):
267 @property
268 def days(self):
269 return self._policy_parameter
272class IgnoreRCBugHint(SimplePolicyHint):
274 @property
275 def ignored_rcbugs(self):
276 return self._policy_parameter
279def simple_policy_hint_parser_function(class_name, converter):
280 def f(mi_factory, hints, who, hint_name, policy_parameter, *args):
281 for item in mi_factory.parse_items(*args):
282 hints.add_hint(class_name(who, hint_name, converter(policy_parameter), [item]))
283 return f
286class AgePolicy(BasePolicy):
287 """Configurable Aging policy for source migrations
289 The AgePolicy will let packages stay in the source suite for a pre-defined
290 amount of days before letting migrate (based on their urgency, if any).
292 The AgePolicy's decision is influenced by the following:
294 State files:
295 * ${STATE_DIR}/age-policy-urgencies: File containing urgencies for source
296 packages. Note that urgencies are "sticky" and the most "urgent" urgency
297 will be used (i.e. the one with lowest age-requirements).
298 - This file needs to be updated externally, if the policy should take
299 urgencies into consideration. If empty (or not updated), the policy
300 will simply use the default urgency (see the "Config" section below)
301 - In Debian, these values are taken from the .changes file, but that is
302 not a requirement for Britney.
303 * ${STATE_DIR}/age-policy-dates: File containing the age of all source
304 packages.
305 - The policy will automatically update this file.
306 Config:
307 * DEFAULT_URGENCY: Name of the urgency used for packages without an urgency
308 (or for unknown urgencies). Will also be used to set the "minimum"
309 aging requirements for packages not in the target suite.
310 * MINDAYS_<URGENCY>: The age-requirements in days for packages with the
311 given urgency.
312 - Commonly used urgencies are: low, medium, high, emergency, critical
313 Hints:
314 * urgent <source>/<version>: Disregard the age requirements for a given
315 source/version.
316 * age-days X <source>/<version>: Set the age requirements for a given
317 source/version to X days. Note that X can exceed the highest
318 age-requirement normally given.
320 """
322 def __init__(self, options, suite_info: Suites):
323 super().__init__('age', options, suite_info, {SuiteClass.PRIMARY_SOURCE_SUITE})
324 self._min_days = self._generate_mindays_table()
325 self._min_days_default = None # initialised later
326 # britney's "day" begins at 7pm (we want aging to occur in the 22:00Z run and we run Britney 2-4 times a day)
327 # NB: _date_now is used in tests
328 time_now = time.time()
329 if hasattr(self.options, 'fake_runtime'):
330 time_now = int(self.options.fake_runtime)
331 self.logger.info("overriding runtime with fake_runtime %d" % time_now)
333 self._date_now = int(((time_now / (60*60)) - 19) / 24)
334 self._dates = {}
335 self._urgencies = {}
336 self._default_urgency = self.options.default_urgency
337 self._penalty_immune_urgencies = frozenset()
338 if hasattr(self.options, 'no_penalties'):
339 self._penalty_immune_urgencies = frozenset(x.strip() for x in self.options.no_penalties.split())
340 self._bounty_min_age: Optional[int] = None # initialised later
342 def _generate_mindays_table(self):
343 mindays = {}
344 for k in dir(self.options):
345 if not k.startswith('mindays_'):
346 continue
347 v = getattr(self.options, k)
348 try:
349 as_days = int(v)
350 except ValueError:
351 raise ValueError("Unable to parse " + k + " as a number of days. Must be 0 or a positive integer")
352 if as_days < 0: 352 ↛ 353line 352 didn't jump to line 353, because the condition on line 352 was never true
353 raise ValueError("The value of " + k + " must be zero or a positive integer")
354 mindays[k.split("_")[1]] = as_days
355 return mindays
357 def register_hints(self, hint_parser: HintParser) -> None:
358 hint_parser.register_hint_type('age-days', simple_policy_hint_parser_function(AgeDayHint, int), min_args=2)
359 hint_parser.register_hint_type('urgent', split_into_one_hint_per_package)
361 def initialise(self, britney) -> None:
362 super().initialise(britney)
363 self._read_dates_file()
364 self._read_urgencies_file()
365 if self._default_urgency not in self._min_days: # pragma: no cover
366 raise ValueError("Missing age-requirement for default urgency (MINDAYS_%s)" % self._default_urgency)
367 self._min_days_default = self._min_days[self._default_urgency]
368 try:
369 self._bounty_min_age = int(self.options.bounty_min_age)
370 except ValueError: 370 ↛ 371line 370 didn't jump to line 371, because the exception caught by line 370 didn't happen
371 if self.options.bounty_min_age in self._min_days:
372 self._bounty_min_age = self._min_days[self.options.bounty_min_age]
373 else: # pragma: no cover
374 raise ValueError('Please fix BOUNTY_MIN_AGE in the britney configuration')
375 except AttributeError:
376 # The option wasn't defined in the configuration
377 self._bounty_min_age = 0
379 def save_state(self, britney) -> None:
380 super().save_state(britney)
381 self._write_dates_file()
383 def apply_src_policy_impl(self, age_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
384 source_data_srcdist: SourcePackage, excuse):
385 # retrieve the urgency for the upload, ignoring it if this is a NEW package
386 # (not present in the target suite)
387 source_name = item.package
388 urgency = self._urgencies.get(source_name, self._default_urgency)
390 if urgency not in self._min_days: 390 ↛ 391line 390 didn't jump to line 391, because the condition on line 390 was never true
391 age_info['unknown-urgency'] = urgency
392 urgency = self._default_urgency
394 if not source_data_tdist:
395 if self._min_days[urgency] < self._min_days_default:
396 age_info['urgency-reduced'] = {
397 'from': urgency,
398 'to': self._default_urgency,
399 }
400 urgency = self._default_urgency
402 if source_name not in self._dates:
403 self._dates[source_name] = (source_data_srcdist.version, self._date_now)
404 elif self._dates[source_name][0] != source_data_srcdist.version:
405 self._dates[source_name] = (source_data_srcdist.version, self._date_now)
407 days_old = self._date_now - self._dates[source_name][1]
408 min_days = self._min_days[urgency]
409 for bounty in excuse.bounty:
410 if excuse.bounty[bounty]: 410 ↛ 409line 410 didn't jump to line 409, because the condition on line 410 was never false
411 self.logger.info('Applying bounty for %s granted by %s: %d days',
412 source_name, bounty, excuse.bounty[bounty])
413 excuse.addinfo('Required age reduced by %d days because of %s' %
414 (excuse.bounty[bounty], bounty))
415 assert excuse.bounty[bounty] > 0, "negative bounties shouldn't happen"
416 min_days -= excuse.bounty[bounty]
417 if urgency not in self._penalty_immune_urgencies:
418 for penalty in excuse.penalty:
419 if excuse.penalty[penalty]: 419 ↛ 418line 419 didn't jump to line 418, because the condition on line 419 was never false
420 self.logger.info('Applying penalty for %s given by %s: %d days',
421 source_name, penalty, excuse.penalty[penalty])
422 excuse.addinfo('Required age increased by %d days because of %s' %
423 (excuse.penalty[penalty], penalty))
424 assert excuse.penalty[penalty] > 0, "negative penalties should be handled earlier"
425 min_days += excuse.penalty[penalty]
427 # the age in BOUNTY_MIN_AGE can be higher than the one associated with
428 # the real urgency, so don't forget to take it into account
429 bounty_min_age = min(self._bounty_min_age, self._min_days[urgency])
430 if min_days < bounty_min_age:
431 min_days = bounty_min_age
432 excuse.addinfo('Required age is not allowed to drop below %d days' % min_days)
434 age_info['current-age'] = days_old
436 for age_days_hint in self.hints.search('age-days', package=source_name,
437 version=source_data_srcdist.version):
438 new_req = age_days_hint.days
439 age_info['age-requirement-reduced'] = {
440 'new-requirement': new_req,
441 'changed-by': age_days_hint.user
442 }
443 if 'original-age-requirement' not in age_info: 443 ↛ 445line 443 didn't jump to line 445, because the condition on line 443 was never false
444 age_info['original-age-requirement'] = min_days
445 min_days = new_req
447 age_info['age-requirement'] = min_days
448 res = PolicyVerdict.PASS
450 if days_old < min_days:
451 urgent_hints = self.hints.search('urgent', package=source_name,
452 version=source_data_srcdist.version)
453 if urgent_hints:
454 age_info['age-requirement-reduced'] = {
455 'new-requirement': 0,
456 'changed-by': urgent_hints[0].user
457 }
458 res = PolicyVerdict.PASS_HINTED
459 else:
460 res = PolicyVerdict.REJECTED_TEMPORARILY
462 # update excuse
463 age_hint = age_info.get('age-requirement-reduced', None)
464 age_min_req = age_info['age-requirement']
465 if age_hint:
466 new_req = age_hint['new-requirement']
467 who = age_hint['changed-by']
468 if new_req:
469 excuse.addinfo("Overriding age needed from %d days to %d by %s" % (
470 age_min_req, new_req, who))
471 age_min_req = new_req
472 else:
473 excuse.addinfo("Too young, but urgency pushed by %s" % who)
474 age_min_req = 0
475 excuse.setdaysold(age_info['current-age'], age_min_req)
477 if age_min_req == 0:
478 excuse.addinfo("%d days old" % days_old)
479 elif days_old < age_min_req:
480 excuse.add_verdict_info(res, "Too young, only %d of %d days old" %
481 (days_old, age_min_req))
482 else:
483 excuse.addinfo("%d days old (needed %d days)" %
484 (days_old, age_min_req))
486 return res
488 def _read_dates_file(self):
489 """Parse the dates file"""
490 dates = self._dates
491 fallback_filename = os.path.join(self.suite_info.target_suite.path, 'Dates')
492 using_new_name = False
493 try:
494 filename = os.path.join(self.state_dir, 'age-policy-dates')
495 if not os.path.exists(filename) and os.path.exists(fallback_filename):
496 filename = fallback_filename
497 else:
498 using_new_name = True
499 except AttributeError:
500 if os.path.exists(fallback_filename):
501 filename = fallback_filename
502 else:
503 raise RuntimeError("Please set STATE_DIR in the britney configuration")
505 try:
506 with open(filename, encoding='utf-8') as fd:
507 for line in fd:
508 if line.startswith('#'):
509 # Ignore comment lines (mostly used for tests)
510 continue
511 # <source> <version> <date>)
512 ln = line.split()
513 if len(ln) != 3: # pragma: no cover
514 continue
515 try:
516 dates[ln[0]] = (ln[1], int(ln[2]))
517 except ValueError: # pragma: no cover
518 pass
519 except FileNotFoundError:
520 if not using_new_name: 520 ↛ 522line 520 didn't jump to line 522, because the condition on line 520 was never true
521 # If we using the legacy name, then just give up
522 raise
523 self.logger.info("%s does not appear to exist. Creating it", filename)
524 with open(filename, mode='x', encoding='utf-8'):
525 pass
527 def _read_urgencies_file(self):
528 urgencies = self._urgencies
529 min_days_default = self._min_days_default
530 fallback_filename = os.path.join(self.suite_info.target_suite.path, 'Urgency')
531 try:
532 filename = os.path.join(self.state_dir, 'age-policy-urgencies')
533 if not os.path.exists(filename) and os.path.exists(fallback_filename):
534 filename = fallback_filename
535 except AttributeError:
536 filename = fallback_filename
538 sources_s = self.suite_info.primary_source_suite.sources
539 sources_t = self.suite_info.target_suite.sources
541 with open(filename, errors='surrogateescape', encoding='ascii') as fd:
542 for line in fd:
543 if line.startswith('#'):
544 # Ignore comment lines (mostly used for tests)
545 continue
546 # <source> <version> <urgency>
547 ln = line.split()
548 if len(ln) != 3: 548 ↛ 549line 548 didn't jump to line 549, because the condition on line 548 was never true
549 continue
551 # read the minimum days associated with the urgencies
552 urgency_old = urgencies.get(ln[0], None)
553 mindays_old = self._min_days.get(urgency_old, 1000)
554 mindays_new = self._min_days.get(ln[2], min_days_default)
556 # if the new urgency is lower (so the min days are higher), do nothing
557 if mindays_old <= mindays_new:
558 continue
560 # if the package exists in the target suite and it is more recent, do nothing
561 tsrcv = sources_t.get(ln[0], None)
562 if tsrcv and apt_pkg.version_compare(tsrcv.version, ln[1]) >= 0:
563 continue
565 # if the package doesn't exist in the primary source suite or it is older, do nothing
566 usrcv = sources_s.get(ln[0], None)
567 if not usrcv or apt_pkg.version_compare(usrcv.version, ln[1]) < 0: 567 ↛ 568line 567 didn't jump to line 568, because the condition on line 567 was never true
568 continue
570 # update the urgency for the package
571 urgencies[ln[0]] = ln[2]
573 def _write_dates_file(self):
574 dates = self._dates
575 try:
576 directory = self.state_dir
577 basename = 'age-policy-dates'
578 old_file = os.path.join(self.suite_info.target_suite.path, 'Dates')
579 except AttributeError:
580 directory = self.suite_info.target_suite.path
581 basename = 'Dates'
582 old_file = None
583 filename = os.path.join(directory, basename)
584 filename_tmp = os.path.join(directory, '%s_new' % basename)
585 with open(filename_tmp, 'w', encoding='utf-8') as fd:
586 for pkg in sorted(dates):
587 version, date = dates[pkg]
588 fd.write("%s %s %d\n" % (pkg, version, date))
589 os.rename(filename_tmp, filename)
590 if old_file is not None and os.path.exists(old_file):
591 self.logger.info("Removing old age-policy-dates file %s", old_file)
592 os.unlink(old_file)
595class RCBugPolicy(BasePolicy):
596 """RC bug regression policy for source migrations
598 The RCBugPolicy will read provided list of RC bugs and block any
599 source upload that would introduce a *new* RC bug in the target
600 suite.
602 The RCBugPolicy's decision is influenced by the following:
604 State files:
605 * ${STATE_DIR}/rc-bugs-${SUITE_NAME}: File containing RC bugs for packages in
606 the given suite (one for both primary source suite and the target sutie is
607 needed).
608 - These files need to be updated externally.
609 """
611 def __init__(self, options, suite_info: Suites):
612 super().__init__('rc-bugs', options, suite_info, {SuiteClass.PRIMARY_SOURCE_SUITE})
613 self._bugs_source: Optional[dict[str, set[str]]] = None
614 self._bugs_target: Optional[dict[str, set[str]]] = None
616 def register_hints(self, hint_parser: HintParser) -> None:
617 f = simple_policy_hint_parser_function(IgnoreRCBugHint, lambda x: frozenset(x.split(',')))
618 hint_parser.register_hint_type('ignore-rc-bugs',
619 f,
620 min_args=2)
622 def initialise(self, britney) -> None:
623 super().initialise(britney)
624 source_suite = self.suite_info.primary_source_suite
625 target_suite = self.suite_info.target_suite
626 fallback_unstable = os.path.join(source_suite.path, 'BugsV')
627 fallback_testing = os.path.join(target_suite.path, 'BugsV')
628 try:
629 filename_unstable = os.path.join(self.state_dir, 'rc-bugs-%s' % source_suite.name)
630 filename_testing = os.path.join(self.state_dir, 'rc-bugs-%s' % target_suite.name)
631 if not os.path.exists(filename_unstable) and not os.path.exists(filename_testing) and \
632 os.path.exists(fallback_unstable) and os.path.exists(fallback_testing):
633 filename_unstable = fallback_unstable
634 filename_testing = fallback_testing
635 except AttributeError:
636 filename_unstable = fallback_unstable
637 filename_testing = fallback_testing
638 self._bugs_source = self._read_bugs(filename_unstable)
639 self._bugs_target = self._read_bugs(filename_testing)
641 def apply_src_policy_impl(self, rcbugs_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
642 source_data_srcdist: SourcePackage, excuse):
643 assert self._bugs_source is not None # for type checking
644 assert self._bugs_target is not None # for type checking
645 bugs_t = set()
646 bugs_u = set()
647 source_name = item.package
649 for src_key in (source_name, 'src:%s' % source_name):
650 if source_data_tdist and src_key in self._bugs_target:
651 bugs_t.update(self._bugs_target[src_key])
652 if src_key in self._bugs_source:
653 bugs_u.update(self._bugs_source[src_key])
655 for pkg, _, _ in source_data_srcdist.binaries:
656 if pkg in self._bugs_source:
657 bugs_u |= self._bugs_source[pkg]
658 if source_data_tdist:
659 for pkg, _, _ in source_data_tdist.binaries:
660 if pkg in self._bugs_target:
661 bugs_t |= self._bugs_target[pkg]
663 # If a package is not in the target suite, it has no RC bugs per
664 # definition. Unfortunately, it seems that the live-data is
665 # not always accurate (e.g. live-2011-12-13 suggests that
666 # obdgpslogger had the same bug in testing and unstable,
667 # but obdgpslogger was not in testing at that time).
668 # - For the curious, obdgpslogger was removed on that day
669 # and the BTS probably had not caught up with that fact.
670 # (https://tracker.debian.org/news/415935)
671 assert not bugs_t or source_data_tdist, "%s had bugs in the target suite but is not present" % source_name
673 verdict = PolicyVerdict.PASS
675 for ignore_hint in self.hints.search('ignore-rc-bugs', package=source_name,
676 version=source_data_srcdist.version):
677 ignored_bugs = ignore_hint.ignored_rcbugs
679 # Only handle one hint for now
680 if 'ignored-bugs' in rcbugs_info:
681 self.logger.info("Ignoring ignore-rc-bugs hint from %s on %s due to another hint from %s",
682 ignore_hint.user, source_name, rcbugs_info['ignored-bugs']['issued-by'])
683 continue
684 if not ignored_bugs.isdisjoint(bugs_u): 684 ↛ 693line 684 didn't jump to line 693, because the condition on line 684 was never false
685 bugs_u -= ignored_bugs
686 bugs_t -= ignored_bugs
687 rcbugs_info['ignored-bugs'] = {
688 'bugs': sorted(ignored_bugs),
689 'issued-by': ignore_hint.user
690 }
691 verdict = PolicyVerdict.PASS_HINTED
692 else:
693 self.logger.info("Ignoring ignore-rc-bugs hint from %s on %s as none of %s affect the package",
694 ignore_hint.user, source_name, str(ignored_bugs))
696 rcbugs_info['shared-bugs'] = sorted(bugs_u & bugs_t)
697 rcbugs_info['unique-source-bugs'] = sorted(bugs_u - bugs_t)
698 rcbugs_info['unique-target-bugs'] = sorted(bugs_t - bugs_u)
700 # update excuse
701 new_bugs = rcbugs_info['unique-source-bugs']
702 old_bugs = rcbugs_info['unique-target-bugs']
703 excuse.setbugs(old_bugs, new_bugs)
705 if new_bugs:
706 verdict = PolicyVerdict.REJECTED_PERMANENTLY
707 excuse.add_verdict_info(verdict, "Updating %s would introduce bugs in %s: %s" % (
708 source_name, self.suite_info.target_suite.name, ", ".join(
709 ["<a href=\"https://bugs.debian.org/%s\">#%s</a>" % (quote(a), a) for a in new_bugs])))
711 if old_bugs:
712 excuse.addinfo("Updating %s will fix bugs in %s: %s" % (
713 source_name, self.suite_info.target_suite.name, ", ".join(
714 ["<a href=\"https://bugs.debian.org/%s\">#%s</a>" % (quote(a), a) for a in old_bugs])))
716 return verdict
718 def _read_bugs(self, filename: str) -> dict[str, set[str]]:
719 """Read the release critical bug summary from the specified file
721 The file contains rows with the format:
723 <package-name> <bug number>[,<bug number>...]
725 The method returns a dictionary where the key is the binary package
726 name and the value is the list of open RC bugs for it.
727 """
728 bugs: dict[str, set[str]] = {}
729 self.logger.info("Loading RC bugs data from %s", filename)
730 with open(filename, encoding='ascii') as f:
731 for line in f:
732 ln = line.split()
733 if len(ln) != 2: # pragma: no cover
734 self.logger.warning("Malformed line found in line %s", line)
735 continue
736 pkg = ln[0]
737 if pkg not in bugs:
738 bugs[pkg] = set()
739 bugs[pkg].update(ln[1].split(","))
740 return bugs
743class PiupartsPolicy(BasePolicy):
745 def __init__(self, options, suite_info: Suites):
746 super().__init__('piuparts', options, suite_info, {SuiteClass.PRIMARY_SOURCE_SUITE})
747 self._piuparts_source: Optional[dict[str, tuple[str, str]]] = None
748 self._piuparts_target: Optional[dict[str, tuple[str, str]]] = None
750 def register_hints(self, hint_parser: HintParser) -> None:
751 hint_parser.register_hint_type('ignore-piuparts', split_into_one_hint_per_package)
753 def initialise(self, britney):
754 super().initialise(britney)
755 source_suite = self.suite_info.primary_source_suite
756 target_suite = self.suite_info.target_suite
757 try:
758 filename_unstable = os.path.join(self.state_dir, 'piuparts-summary-%s.json' % source_suite.name)
759 filename_testing = os.path.join(self.state_dir, 'piuparts-summary-%s.json' % target_suite.name)
760 except AttributeError as e: # pragma: no cover
761 raise RuntimeError("Please set STATE_DIR in the britney configuration") from e
762 self._piuparts_source = self._read_piuparts_summary(filename_unstable, keep_url=True)
763 self._piuparts_target = self._read_piuparts_summary(filename_testing, keep_url=False)
765 def apply_src_policy_impl(self, piuparts_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
766 source_data_srcdist: SourcePackage, excuse):
767 assert self._piuparts_source is not None # for type checking
768 assert self._piuparts_target is not None # for type checking
769 source_name = item.package
771 if source_name in self._piuparts_target:
772 testing_state = self._piuparts_target[source_name][0]
773 else:
774 testing_state = 'X'
775 url: Optional[str]
776 if source_name in self._piuparts_source:
777 unstable_state, url = self._piuparts_source[source_name]
778 else:
779 unstable_state = 'X'
780 url = None
781 url_html = "(no link yet)"
782 if url is not None:
783 url_html = '<a href="{0}">{0}</a>'.format(url)
785 if unstable_state == 'P':
786 # Not a regression
787 msg = 'Piuparts tested OK - {0}'.format(url_html)
788 result = PolicyVerdict.PASS
789 piuparts_info['test-results'] = 'pass'
790 elif unstable_state == 'F':
791 if testing_state != unstable_state:
792 piuparts_info['test-results'] = 'regression'
793 msg = 'Rejected due to piuparts regression - {0}'.format(url_html)
794 result = PolicyVerdict.REJECTED_PERMANENTLY
795 else:
796 piuparts_info['test-results'] = 'failed'
797 msg = 'Ignoring piuparts failure (Not a regression) - {0}'.format(url_html)
798 result = PolicyVerdict.PASS
799 elif unstable_state == 'W':
800 msg = 'Waiting for piuparts test results (stalls migration) - {0}'.format(url_html)
801 result = PolicyVerdict.REJECTED_TEMPORARILY
802 piuparts_info['test-results'] = 'waiting-for-test-results'
803 else:
804 msg = 'Cannot be tested by piuparts (not a blocker) - {0}'.format(url_html)
805 piuparts_info['test-results'] = 'cannot-be-tested'
806 result = PolicyVerdict.PASS
808 if url is not None:
809 piuparts_info['piuparts-test-url'] = url
810 if result.is_rejected:
811 excuse.add_verdict_info(result, msg)
812 else:
813 excuse.addinfo(msg)
815 if result.is_rejected:
816 for ignore_hint in self.hints.search('ignore-piuparts',
817 package=source_name,
818 version=source_data_srcdist.version):
819 piuparts_info['ignored-piuparts'] = {
820 'issued-by': ignore_hint.user
821 }
822 result = PolicyVerdict.PASS_HINTED
823 excuse.addinfo("Ignoring piuparts issue as requested by {0}".format(ignore_hint.user))
824 break
826 return result
828 def _read_piuparts_summary(self, filename, keep_url=True):
829 summary = {}
830 self.logger.info("Loading piuparts report from %s", filename)
831 with open(filename) as fd:
832 if os.fstat(fd.fileno()).st_size < 1:
833 return summary
834 data = json.load(fd)
835 try:
836 if data['_id'] != 'Piuparts Package Test Results Summary' or data['_version'] != '1.0': # pragma: no cover
837 raise ValueError('Piuparts results in {0} does not have the correct ID or version'.format(filename))
838 except KeyError as e: # pragma: no cover
839 raise ValueError('Piuparts results in {0} is missing id or version field'.format(filename)) from e
840 for source, suite_data in data['packages'].items():
841 if len(suite_data) != 1: # pragma: no cover
842 raise ValueError('Piuparts results in {0}, the source {1} does not have exactly one result set'.format(
843 filename, source
844 ))
845 item = next(iter(suite_data.values()))
846 state, _, url = item
847 if not keep_url:
848 url = None
849 summary[source] = (state, url)
851 return summary
854class DependsPolicy(BasePolicy):
856 def __init__(self, options, suite_info: Suites):
857 super().__init__('depends', options, suite_info,
858 {SuiteClass.PRIMARY_SOURCE_SUITE, SuiteClass.ADDITIONAL_SOURCE_SUITE},
859 ApplySrcPolicy.RUN_ON_EVERY_ARCH_ONLY)
860 self._britney = None
861 self.pkg_universe = None
862 self.broken_packages = None
863 self.all_binaries = None
864 self.nobreakall_arches = None
865 self.new_arches = None
866 self.break_arches = None
867 self.allow_uninst = None
869 def initialise(self, britney):
870 super().initialise(britney)
871 self._britney = britney
872 self.pkg_universe = britney.pkg_universe
873 self.broken_packages = britney.pkg_universe.broken_packages
874 self.all_binaries = britney.all_binaries
875 self.nobreakall_arches = self.options.nobreakall_arches
876 self.new_arches = self.options.new_arches
877 self.break_arches = self.options.break_arches
878 self.allow_uninst = britney.allow_uninst
880 def apply_srcarch_policy_impl(self, deps_info, item: MigrationItem, arch,
881 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
882 excuse):
883 verdict = PolicyVerdict.PASS
885 if arch in self.break_arches or arch in self.new_arches:
886 # we don't check these in the policy (TODO - for now?)
887 return verdict
889 source_suite = item.suite
890 target_suite = self.suite_info.target_suite
892 packages_s_a = source_suite.binaries[arch]
893 packages_t_a = target_suite.binaries[arch]
895 my_bins = sorted(excuse.packages[arch])
897 arch_all_installable = set()
898 arch_arch_installable = set()
899 consider_it_regression = True
901 for pkg_id in my_bins:
902 pkg_name = pkg_id.package_name
903 binary_u = packages_s_a[pkg_name]
904 pkg_arch = binary_u.architecture
906 # in some cases, we want to track the uninstallability of a
907 # package (because the autopkgtest policy uses this), but we still
908 # want to allow the package to be uninstallable
909 skip_dep_check = False
911 if (binary_u.source_version != source_data_srcdist.version):
912 # don't check cruft in unstable
913 continue
915 if (item.architecture != 'source' and pkg_arch == 'all'):
916 # we don't care about the existing arch: all binaries when
917 # checking a binNMU item, because the arch: all binaries won't
918 # migrate anyway
919 skip_dep_check = True
921 if pkg_arch == 'all' and arch not in self.nobreakall_arches:
922 skip_dep_check = True
924 if pkg_name in self.allow_uninst[arch]: 924 ↛ 927line 924 didn't jump to line 927, because the condition on line 924 was never true
925 # this binary is allowed to become uninstallable, so we don't
926 # need to check anything
927 skip_dep_check = True
929 if pkg_name in packages_t_a:
930 oldbin = packages_t_a[pkg_name]
931 if not target_suite.is_installable(oldbin.pkg_id):
932 # as the current binary in testing is already
933 # uninstallable, the newer version is allowed to be
934 # uninstallable as well, so we don't need to check
935 # anything
936 skip_dep_check = True
937 consider_it_regression = False
939 if pkg_id in self.broken_packages:
940 if pkg_arch == 'all':
941 arch_all_installable.add(False)
942 else:
943 arch_arch_installable.add(False)
944 # dependencies can't be satisfied by all the known binaries -
945 # this certainly won't work...
946 excuse.add_unsatisfiable_on_arch(arch)
947 if skip_dep_check:
948 # ...but if the binary is allowed to become uninstallable,
949 # we don't care
950 # we still want the binary to be listed as uninstallable,
951 continue
952 verdict = PolicyVerdict.REJECTED_PERMANENTLY
953 excuse.add_verdict_info(verdict, "%s/%s has unsatisfiable dependency" % (
954 pkg_name, arch))
955 excuse.addreason("depends")
956 else:
957 if pkg_arch == 'all':
958 arch_all_installable.add(True)
959 else:
960 arch_arch_installable.add(True)
962 if skip_dep_check:
963 continue
965 deps = self.pkg_universe.dependencies_of(pkg_id)
967 for dep in deps:
968 # dep is a list of packages, each of which satisfy the
969 # dependency
971 if dep == frozenset():
972 continue
973 is_ok = False
974 needed_for_dep = set()
976 for alternative in dep:
977 if target_suite.is_pkg_in_the_suite(alternative):
978 # dep can be satisfied in testing - ok
979 is_ok = True
980 elif alternative in my_bins:
981 # can be satisfied by binary from same item: will be
982 # ok if item migrates
983 is_ok = True
984 else:
985 needed_for_dep.add(alternative)
987 if not is_ok:
988 spec = DependencySpec(DependencyType.DEPENDS, arch)
989 excuse.add_package_depends(spec, needed_for_dep)
991 # The autopkgtest policy needs delicate trade offs for
992 # non-installability. The current choice (considering source
993 # migration and only binaries built by the version of the
994 # source):
995 #
996 # * Run autopkgtest if all arch:$arch binaries are installable
997 # (but some or all arch:all binaries are not)
998 #
999 # * Don't schedule nor wait for not installable arch:all only package
1000 # on ! NOBREAKALL_ARCHES
1001 #
1002 # * Run autopkgtest if installability isn't a regression (there are (or
1003 # rather, should) not be a lot of packages in this state, and most
1004 # likely they'll just fail quickly)
1005 #
1006 # * Don't schedule, but wait otherwise
1007 if arch_arch_installable == {True} and False in arch_all_installable:
1008 deps_info.setdefault('autopkgtest_run_anyways', []).append(arch)
1009 elif (arch not in self.nobreakall_arches and
1010 arch_arch_installable == set() and
1011 False in arch_all_installable):
1012 deps_info.setdefault('arch_all_not_installable', []).append(arch)
1013 elif not consider_it_regression:
1014 deps_info.setdefault('autopkgtest_run_anyways', []).append(arch)
1016 return verdict
1019@unique
1020class BuildDepResult(IntEnum):
1021 # relation is satisfied in target
1022 OK = 1
1023 # relation can be satisfied by other packages in source
1024 DEPENDS = 2
1025 # relation cannot be satisfied
1026 FAILED = 3
1029class BuildDependsPolicy(BasePolicy):
1031 def __init__(self, options, suite_info: Suites):
1032 super().__init__('build-depends', options, suite_info,
1033 {SuiteClass.PRIMARY_SOURCE_SUITE, SuiteClass.ADDITIONAL_SOURCE_SUITE})
1034 self._britney = None
1035 self._all_buildarch = []
1037 parse_option(options, 'all_buildarch')
1039 def initialise(self, britney):
1040 super().initialise(britney)
1041 self._britney = britney
1042 if self.options.all_buildarch:
1043 self._all_buildarch = SuiteContentLoader.config_str_as_list(self.options.all_buildarch, [])
1045 def apply_src_policy_impl(self, build_deps_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
1046 source_data_srcdist: SourcePackage, excuse,
1047 get_dependency_solvers=get_dependency_solvers):
1048 verdict = PolicyVerdict.PASS
1050 # analyze the dependency fields (if present)
1051 deps = source_data_srcdist.build_deps_arch
1052 if deps:
1053 v = self._check_build_deps(deps, DependencyType.BUILD_DEPENDS, build_deps_info, item,
1054 source_data_tdist, source_data_srcdist, excuse,
1055 get_dependency_solvers=get_dependency_solvers)
1056 verdict = PolicyVerdict.worst_of(verdict, v)
1058 ideps = source_data_srcdist.build_deps_indep
1059 if ideps:
1060 v = self._check_build_deps(ideps, DependencyType.BUILD_DEPENDS_INDEP, build_deps_info, item,
1061 source_data_tdist, source_data_srcdist, excuse,
1062 get_dependency_solvers=get_dependency_solvers)
1063 verdict = PolicyVerdict.worst_of(verdict, v)
1065 return verdict
1067 def _get_check_archs(self, archs, dep_type):
1068 oos = self.options.outofsync_arches
1070 if dep_type == DependencyType.BUILD_DEPENDS:
1071 return [arch for arch in self.options.architectures if arch in archs and arch not in oos]
1073 # first try the all buildarch
1074 checkarchs = list(self._all_buildarch)
1075 # then try the architectures where this source has arch specific
1076 # binaries (in the order of the architecture config file)
1077 checkarchs.extend(arch for arch in self.options.architectures if arch in archs and arch not in checkarchs)
1078 # then try all other architectures
1079 checkarchs.extend(arch for arch in self.options.architectures if arch not in checkarchs)
1081 # and drop OUTOFSYNC_ARCHES
1082 return [arch for arch in checkarchs if arch not in oos]
1084 def _add_info_for_arch(self, arch, excuses_info, blockers, results, dep_type, target_suite, source_suite, excuse, verdict):
1085 if arch in blockers:
1086 packages = blockers[arch]
1088 # for the solving packages, update the excuse to add the dependencies
1089 for p in packages:
1090 if arch not in self.options.break_arches: 1090 ↛ 1089line 1090 didn't jump to line 1089, because the condition on line 1090 was never false
1091 spec = DependencySpec(dep_type, arch)
1092 excuse.add_package_depends(spec, {p})
1094 if arch in results and results[arch] == BuildDepResult.FAILED:
1095 verdict = PolicyVerdict.worst_of(verdict, PolicyVerdict.REJECTED_PERMANENTLY)
1097 if arch in excuses_info:
1098 for excuse_text in excuses_info[arch]:
1099 if verdict.is_rejected: 1099 ↛ 1102line 1099 didn't jump to line 1102, because the condition on line 1099 was never false
1100 excuse.add_verdict_info(verdict, excuse_text)
1101 else:
1102 excuse.addinfo(excuse_text)
1104 return verdict
1106 def _check_build_deps(self, deps, dep_type, build_deps_info, item: MigrationItem,
1107 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage, excuse,
1108 get_dependency_solvers=get_dependency_solvers):
1109 verdict = PolicyVerdict.PASS
1110 any_arch_ok = dep_type == DependencyType.BUILD_DEPENDS_INDEP
1112 britney = self._britney
1114 # local copies for better performance
1115 parse_src_depends = apt_pkg.parse_src_depends
1117 source_name = item.package
1118 source_suite = item.suite
1119 target_suite = self.suite_info.target_suite
1120 binaries_s = source_suite.binaries
1121 provides_s = source_suite.provides_table
1122 binaries_t = target_suite.binaries
1123 provides_t = target_suite.provides_table
1124 unsat_bd = {}
1125 relevant_archs = {binary.architecture for binary in source_data_srcdist.binaries
1126 if britney.all_binaries[binary].architecture != 'all'}
1128 excuses_info = defaultdict(list)
1129 blockers = defaultdict(set)
1130 arch_results = {}
1131 result_archs = defaultdict(list)
1132 bestresult = BuildDepResult.FAILED
1133 check_archs = self._get_check_archs(relevant_archs, dep_type)
1134 if not check_archs:
1135 # when the arch list is empty, we check the b-d on any arch, instead of all archs
1136 # this happens for Build-Depens on a source package that only produces arch: all binaries
1137 any_arch_ok = True
1138 check_archs = self._get_check_archs(self.options.architectures, DependencyType.BUILD_DEPENDS_INDEP)
1140 for arch in check_archs:
1141 # retrieve the binary package from the specified suite and arch
1142 binaries_s_a = binaries_s[arch]
1143 provides_s_a = provides_s[arch]
1144 binaries_t_a = binaries_t[arch]
1145 provides_t_a = provides_t[arch]
1146 arch_results[arch] = BuildDepResult.OK
1147 # for every dependency block (formed as conjunction of disjunction)
1148 for block_txt in deps.split(','):
1149 block = parse_src_depends(block_txt, False, arch)
1150 # Unlike regular dependencies, some clauses of the Build-Depends(-Arch|-Indep) can be
1151 # filtered out by (e.g.) architecture restrictions. We need to cope with this while
1152 # keeping block_txt and block aligned.
1153 if not block:
1154 # Relation is not relevant for this architecture.
1155 continue
1156 block = block[0]
1157 # if the block is satisfied in the target suite, then skip the block
1158 if get_dependency_solvers(block, binaries_t_a, provides_t_a, build_depends=True):
1159 # Satisfied in the target suite; all ok.
1160 continue
1162 # check if the block can be satisfied in the source suite, and list the solving packages
1163 packages = get_dependency_solvers(block, binaries_s_a, provides_s_a, build_depends=True)
1164 sources = sorted(p.source for p in packages)
1166 # if the dependency can be satisfied by the same source package, skip the block:
1167 # obviously both binary packages will enter the target suite together
1168 if source_name in sources: 1168 ↛ 1169line 1168 didn't jump to line 1169, because the condition on line 1168 was never true
1169 continue
1171 # if no package can satisfy the dependency, add this information to the excuse
1172 if not packages:
1173 excuses_info[arch].append("%s unsatisfiable %s on %s: %s" % (source_name, dep_type, arch, block_txt.strip()))
1174 if arch not in unsat_bd: 1174 ↛ 1176line 1174 didn't jump to line 1176, because the condition on line 1174 was never false
1175 unsat_bd[arch] = []
1176 unsat_bd[arch].append(block_txt.strip())
1177 arch_results[arch] = BuildDepResult.FAILED
1178 continue
1180 blockers[arch].update(p.pkg_id for p in packages)
1181 if arch_results[arch] < BuildDepResult.DEPENDS:
1182 arch_results[arch] = BuildDepResult.DEPENDS
1184 if any_arch_ok:
1185 if arch_results[arch] < bestresult:
1186 bestresult = arch_results[arch]
1187 result_archs[arch_results[arch]].append(arch)
1188 if bestresult == BuildDepResult.OK:
1189 # we found an architecture where the b-deps-indep are
1190 # satisfied in the target suite, so we can stop
1191 break
1193 if any_arch_ok:
1194 arch = result_archs[bestresult][0]
1195 excuse.add_detailed_info("Checking %s on %s" % (dep_type.get_description(), arch))
1196 key = "check-%s-on-arch" % dep_type.get_reason()
1197 build_deps_info[key] = arch
1198 verdict = self._add_info_for_arch(
1199 arch, excuses_info, blockers, arch_results,
1200 dep_type, target_suite, source_suite, excuse, verdict)
1202 else:
1203 for arch in check_archs:
1204 verdict = self._add_info_for_arch(
1205 arch, excuses_info, blockers, arch_results,
1206 dep_type, target_suite, source_suite, excuse, verdict)
1208 if unsat_bd:
1209 build_deps_info['unsatisfiable-arch-build-depends'] = unsat_bd
1211 return verdict
1214class BuiltUsingPolicy(BasePolicy):
1215 """Built-Using policy
1217 Binaries that incorporate (part of) another source package must list these
1218 sources under 'Built-Using'.
1220 This policy checks if the corresponding sources are available in the
1221 target suite. If they are not, but they are candidates for migration, a
1222 dependency is added.
1224 If the binary incorporates a newer version of a source, that is not (yet)
1225 a candidate, we don't want to accept that binary. A rebuild later in the
1226 primary suite wouldn't fix the issue, because that would incorporate the
1227 newer version again.
1229 If the binary incorporates an older version of the source, a newer version
1230 will be accepted as a replacement. We assume that this can be fixed by
1231 rebuilding the binary at some point during the development cycle.
1233 Requiring exact version of the source would not be useful in practice. A
1234 newer upload of that source wouldn't be blocked by this policy, so the
1235 built-using would be outdated anyway.
1237 """
1239 def __init__(self, options, suite_info: Suites):
1240 super().__init__('built-using', options, suite_info,
1241 {SuiteClass.PRIMARY_SOURCE_SUITE, SuiteClass.ADDITIONAL_SOURCE_SUITE},
1242 ApplySrcPolicy.RUN_ON_EVERY_ARCH_ONLY)
1244 def initialise(self, britney):
1245 super().initialise(britney)
1247 def apply_srcarch_policy_impl(self, build_deps_info, item: MigrationItem, arch,
1248 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
1249 excuse):
1250 verdict = PolicyVerdict.PASS
1252 source_suite = item.suite
1253 target_suite = self.suite_info.target_suite
1254 binaries_s = source_suite.binaries
1256 def check_bu_in_suite(bu_source, bu_version, source_suite):
1257 found = False
1258 if bu_source not in source_suite.sources:
1259 return found
1260 s_source = source_suite.sources[bu_source]
1261 s_ver = s_source.version
1262 if apt_pkg.version_compare(s_ver, bu_version) >= 0:
1263 found = True
1264 dep = PackageId(bu_source, s_ver, "source")
1265 if arch in self.options.break_arches:
1266 excuse.add_detailed_info("Ignoring Built-Using for %s/%s on %s" % (
1267 pkg_name, arch, dep.uvname))
1268 else:
1269 spec = DependencySpec(DependencyType.BUILT_USING, arch)
1270 excuse.add_package_depends(spec, {dep})
1271 excuse.add_detailed_info("%s/%s has Built-Using on %s" % (
1272 pkg_name, arch, dep.uvname))
1274 return found
1276 for pkg_id in sorted(x for x in source_data_srcdist.binaries if x.architecture == arch):
1277 pkg_name = pkg_id.package_name
1279 # retrieve the testing (if present) and unstable corresponding binary packages
1280 binary_s = binaries_s[arch][pkg_name]
1282 for bu in binary_s.builtusing:
1283 bu_source = bu[0]
1284 bu_version = bu[1]
1285 found = False
1286 if bu_source in target_suite.sources:
1287 t_source = target_suite.sources[bu_source]
1288 t_ver = t_source.version
1289 if apt_pkg.version_compare(t_ver, bu_version) >= 0:
1290 found = True
1292 if not found:
1293 found = check_bu_in_suite(bu_source, bu_version, source_suite)
1295 if not found and source_suite.suite_class.is_additional_source:
1296 found = check_bu_in_suite(bu_source, bu_version, self.suite_info.primary_source_suite)
1298 if not found:
1299 if arch in self.options.break_arches:
1300 excuse.add_detailed_info("Ignoring unsatisfiable Built-Using for %s/%s on %s %s" % (
1301 pkg_name, arch, bu_source, bu_version))
1302 else:
1303 verdict = PolicyVerdict.worst_of(verdict, PolicyVerdict.REJECTED_PERMANENTLY)
1304 excuse.add_verdict_info(verdict, "%s/%s has unsatisfiable Built-Using on %s %s" % (
1305 pkg_name, arch, bu_source, bu_version))
1307 return verdict
1310class BlockPolicy(BasePolicy):
1312 BLOCK_HINT_REGEX = re.compile('^(un)?(block-?.*)$')
1314 def __init__(self, options, suite_info: Suites):
1315 super().__init__('block', options, suite_info,
1316 {SuiteClass.PRIMARY_SOURCE_SUITE, SuiteClass.ADDITIONAL_SOURCE_SUITE})
1317 self._britney = None
1318 self._blockall = {}
1320 def initialise(self, britney):
1321 super().initialise(britney)
1322 self._britney = britney
1323 for hint in self.hints.search(type='block-all'):
1324 self._blockall[hint.package] = hint
1326 self._key_packages = []
1327 if 'key' in self._blockall:
1328 self._key_packages = self._read_key_packages()
1330 def _read_key_packages(self):
1331 """Read the list of key packages
1333 The file contains data in the yaml format :
1335 - reason: <something>
1336 source: <package>
1338 The method returns a list of all key packages.
1339 """
1340 filename = os.path.join(self.state_dir, 'key_packages.yaml')
1341 self.logger.info("Loading key packages from %s", filename)
1342 if os.path.exists(filename): 1342 ↛ 1347line 1342 didn't jump to line 1347, because the condition on line 1342 was never false
1343 with open(filename) as f:
1344 data = yaml.safe_load(f)
1345 key_packages = [item['source'] for item in data]
1346 else:
1347 self.logger.error("Britney was asked to block key packages, " +
1348 "but no key_packages.yaml file was found.")
1349 sys.exit(1)
1351 return key_packages
1353 def register_hints(self, hint_parser: HintParser) -> None:
1354 # block related hints are currently defined in hint.py
1355 pass
1357 def _check_blocked(self, item, arch, version, excuse):
1358 verdict = PolicyVerdict.PASS
1359 blocked = {}
1360 unblocked = {}
1361 block_info = {}
1362 source_suite = item.suite
1363 suite_name = source_suite.name
1364 src = item.package
1365 is_primary = source_suite.suite_class == SuiteClass.PRIMARY_SOURCE_SUITE
1367 tooltip = "please contact %s-release if update is needed" % self.options.distribution
1369 shints = self.hints.search(package=src)
1370 mismatches = False
1371 r = self.BLOCK_HINT_REGEX
1372 for hint in shints:
1373 m = r.match(hint.type)
1374 if m:
1375 if m.group(1) == 'un':
1376 if hint.version != version or hint.suite.name != suite_name or \
1377 (hint.architecture != arch and hint.architecture != 'source'):
1378 self.logger.info('hint mismatch: %s %s %s', version, arch, suite_name)
1379 mismatches = True
1380 else:
1381 unblocked[m.group(2)] = hint.user
1382 excuse.add_hint(hint)
1383 else:
1384 # block(-*) hint: only accepts a source, so this will
1385 # always match
1386 blocked[m.group(2)] = hint.user
1387 excuse.add_hint(hint)
1389 if 'block' not in blocked and is_primary:
1390 # if there is a specific block hint for this package, we don't
1391 # check for the general hints
1393 if self.options.distribution == "debian": 1393 ↛ 1397line 1393 didn't jump to line 1397, because the condition on line 1393 was never false
1394 url = "https://release.debian.org/testing/freeze_policy.html"
1395 tooltip = 'Follow the <a href="%s">freeze policy</a> when applying for an unblock' % url
1397 if 'source' in self._blockall:
1398 blocked['block'] = self._blockall['source'].user
1399 excuse.add_hint(self._blockall['source'])
1400 elif 'new-source' in self._blockall and \
1401 src not in self.suite_info.target_suite.sources:
1402 blocked['block'] = self._blockall['new-source'].user
1403 excuse.add_hint(self._blockall['new-source'])
1404 # no tooltip: new sources will probably not be accepted anyway
1405 block_info['block'] = "blocked by %s: is not in %s" % \
1406 (self._blockall['new-source'].user,
1407 self.suite_info.target_suite.name)
1408 elif 'key' in self._blockall and src in self._key_packages:
1409 blocked['block'] = self._blockall['key'].user
1410 excuse.add_hint(self._blockall['key'])
1411 block_info['block'] = "blocked by %s: is a key package (%s)" % \
1412 (self._blockall['key'].user, tooltip)
1413 elif 'no-autopkgtest' in self._blockall:
1414 if excuse.autopkgtest_results == {'PASS'}:
1415 if not blocked: 1415 ↛ 1431line 1415 didn't jump to line 1431, because the condition on line 1415 was never false
1416 excuse.addinfo("not blocked: has successful autopkgtest")
1417 else:
1418 blocked['block'] = self._blockall['no-autopkgtest'].user
1419 excuse.add_hint(self._blockall['no-autopkgtest'])
1420 if not excuse.autopkgtest_results:
1421 block_info['block'] = "blocked by %s: does not have autopkgtest (%s)" % \
1422 (self._blockall['no-autopkgtest'].user, tooltip)
1423 else:
1424 block_info['block'] = "blocked by %s: autopkgtest not fully successful (%s)" % \
1425 (self._blockall['no-autopkgtest'].user, tooltip)
1427 elif not is_primary:
1428 blocked['block'] = suite_name
1429 excuse.needs_approval = True
1431 for block_cmd in blocked:
1432 unblock_cmd = 'un'+block_cmd
1433 if block_cmd in unblocked:
1434 if is_primary or block_cmd == 'block-udeb':
1435 excuse.addinfo("Ignoring %s request by %s, due to %s request by %s" %
1436 (block_cmd, blocked[block_cmd], unblock_cmd, unblocked[block_cmd]))
1437 else:
1438 excuse.addinfo("Approved by %s" % (unblocked[block_cmd]))
1439 else:
1440 verdict = PolicyVerdict.REJECTED_NEEDS_APPROVAL
1441 if is_primary or block_cmd == 'block-udeb':
1442 # redirect people to d-i RM for udeb things:
1443 if block_cmd == 'block-udeb':
1444 tooltip = "please contact the d-i release manager if an update is needed"
1445 if block_cmd in block_info:
1446 info = block_info[block_cmd]
1447 else:
1448 info = "Not touching package due to %s request by %s (%s)" % \
1449 (block_cmd, blocked[block_cmd], tooltip)
1450 excuse.add_verdict_info(verdict, info)
1451 else:
1452 excuse.add_verdict_info(verdict, "NEEDS APPROVAL BY RM")
1453 excuse.addreason("block")
1454 if mismatches:
1455 excuse.add_detailed_info("Some hints for %s do not match this item" % src)
1456 return verdict
1458 def apply_src_policy_impl(self, block_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
1459 source_data_srcdist: SourcePackage, excuse):
1460 return self._check_blocked(item, "source", source_data_srcdist.version, excuse)
1462 def apply_srcarch_policy_impl(self, block_info, item: MigrationItem, arch,
1463 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
1464 excuse):
1465 return self._check_blocked(item, arch, source_data_srcdist.version, excuse)
1468class BuiltOnBuilddPolicy(BasePolicy):
1470 def __init__(self, options, suite_info: Suites):
1471 super().__init__('builtonbuildd', options, suite_info,
1472 {SuiteClass.PRIMARY_SOURCE_SUITE, SuiteClass.ADDITIONAL_SOURCE_SUITE},
1473 ApplySrcPolicy.RUN_ON_EVERY_ARCH_ONLY)
1474 self._britney = None
1475 self._builtonbuildd = {
1476 'signerinfo': None,
1477 }
1479 def register_hints(self, hint_parser: HintParser) -> None:
1480 hint_parser.register_hint_type('allow-archall-maintainer-upload', split_into_one_hint_per_package)
1482 def initialise(self, britney):
1483 super().initialise(britney)
1484 self._britney = britney
1485 try:
1486 filename_signerinfo = os.path.join(self.state_dir, 'signers.json')
1487 except AttributeError as e: # pragma: no cover
1488 raise RuntimeError("Please set STATE_DIR in the britney configuration") from e
1489 self._builtonbuildd['signerinfo'] = self._read_signerinfo(filename_signerinfo)
1491 def apply_srcarch_policy_impl(self, buildd_info, item: MigrationItem, arch,
1492 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
1493 excuse):
1494 verdict = PolicyVerdict.PASS
1495 signers = self._builtonbuildd['signerinfo']
1497 if "signed-by" not in buildd_info:
1498 buildd_info["signed-by"] = {}
1500 source_suite = item.suite
1502 # horribe hard-coding, but currently, we don't keep track of the
1503 # component when loading the packages files
1504 component = "main"
1505 # we use the source component, because a binary in contrib can
1506 # belong to a source in main
1507 section = source_data_srcdist.section
1508 if section.find("/") > -1:
1509 component = section.split('/')[0]
1511 packages_s_a = source_suite.binaries[arch]
1513 for pkg_id in sorted(x for x in source_data_srcdist.binaries if x.architecture == arch):
1514 pkg_name = pkg_id.package_name
1515 binary_u = packages_s_a[pkg_name]
1516 pkg_arch = binary_u.architecture
1518 if (binary_u.source_version != source_data_srcdist.version): 1518 ↛ 1519line 1518 didn't jump to line 1519, because the condition on line 1518 was never true
1519 continue
1521 if (item.architecture != 'source' and pkg_arch == 'all'):
1522 # we don't care about the existing arch: all binaries when
1523 # checking a binNMU item, because the arch: all binaries won't
1524 # migrate anyway
1525 continue
1527 signer = None
1528 uid = None
1529 uidinfo = ""
1530 buildd_ok = False
1531 failure_verdict = PolicyVerdict.REJECTED_PERMANENTLY
1532 try:
1533 signer = signers[pkg_name][pkg_id.version][pkg_arch]
1534 if signer["buildd"]:
1535 buildd_ok = True
1536 uid = signer['uid']
1537 uidinfo = "arch %s binaries uploaded by %s" % (pkg_arch, uid)
1538 except KeyError:
1539 self.logger.info("signer info for %s %s (%s) on %s not found " % (pkg_name, binary_u.version, pkg_arch, arch))
1540 uidinfo = "upload info for arch %s binaries not found" % (pkg_arch)
1541 failure_verdict = PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT
1542 if not buildd_ok:
1543 if component != "main":
1544 if not buildd_ok and pkg_arch not in buildd_info["signed-by"]: 1544 ↛ 1546line 1544 didn't jump to line 1546, because the condition on line 1544 was never false
1545 excuse.add_detailed_info("%s, but package in %s" % (uidinfo, component))
1546 buildd_ok = True
1547 elif pkg_arch == 'all':
1548 allow_hints = self.hints.search('allow-archall-maintainer-upload', package=item.package)
1549 if allow_hints:
1550 buildd_ok = True
1551 verdict = PolicyVerdict.worst_of(verdict, PolicyVerdict.PASS_HINTED)
1552 if pkg_arch not in buildd_info["signed-by"]:
1553 excuse.addinfo("%s, but whitelisted by %s" % (uidinfo, allow_hints[0].user))
1554 if not buildd_ok:
1555 verdict = failure_verdict
1556 if pkg_arch not in buildd_info["signed-by"]:
1557 if pkg_arch == 'all':
1558 uidinfo += ', a new source-only upload is needed to allow migration'
1559 excuse.add_verdict_info(verdict, "Not built on buildd: %s" % (uidinfo))
1561 if pkg_arch in buildd_info["signed-by"] and buildd_info["signed-by"][pkg_arch] != uid: 1561 ↛ 1562line 1561 didn't jump to line 1562, because the condition on line 1561 was never true
1562 self.logger.info("signer mismatch for %s (%s %s) on %s: %s, while %s already listed" %
1563 (pkg_name, binary_u.source, binary_u.source_version,
1564 pkg_arch, uid, buildd_info["signed-by"][pkg_arch]))
1566 buildd_info["signed-by"][pkg_arch] = uid
1568 return verdict
1570 def _read_signerinfo(self, filename):
1571 signerinfo = {}
1572 self.logger.info("Loading signer info from %s", filename)
1573 with open(filename) as fd: 1573 ↛ exitline 1573 didn't return from function '_read_signerinfo', because the return on line 1575 wasn't executed
1574 if os.fstat(fd.fileno()).st_size < 1: 1574 ↛ 1575line 1574 didn't jump to line 1575, because the condition on line 1574 was never true
1575 return signerinfo
1576 signerinfo = json.load(fd)
1578 return signerinfo
1581class ImplicitDependencyPolicy(BasePolicy):
1582 """Implicit Dependency policy
1584 Upgrading a package pkg-a can break the installability of a package pkg-b.
1585 A newer version (or the removal) of pkg-b might fix the issue. In that
1586 case, pkg-a has an 'implicit dependency' on pkg-b, because pkg-a can only
1587 migrate if pkg-b also migrates.
1589 This policy tries to discover a few common cases, and adds the relevant
1590 info to the excuses. If another item is needed to fix the
1591 uninstallability, a dependency is added. If no newer item can fix it, this
1592 excuse will be blocked.
1594 Note that the migration step will check the installability of every
1595 package, so this policy doesn't need to handle every corner case. It
1596 must, however, make sure that no excuse is unnecessarily blocked.
1598 Some cases that should be detected by this policy:
1600 * pkg-a is upgraded from 1.0-1 to 2.0-1, while
1601 pkg-b has "Depends: pkg-a (<< 2.0)"
1602 This typically happens if pkg-b has a strict dependency on pkg-a because
1603 it uses some non-stable internal interface (examples are glibc,
1604 binutils, python3-defaults, ...)
1606 * pkg-a is upgraded from 1.0-1 to 2.0-1, and
1607 pkg-a 1.0-1 has "Provides: provides-1",
1608 pkg-a 2.0-1 has "Provides: provides-2",
1609 pkg-b has "Depends: provides-1"
1610 This typically happens when pkg-a has an interface that changes between
1611 versions, and a virtual package is used to identify the version of this
1612 interface (e.g. perl-api-x.y)
1614 """
1616 def __init__(self, options, suite_info: Suites):
1617 super().__init__('implicit-deps', options, suite_info,
1618 {SuiteClass.PRIMARY_SOURCE_SUITE, SuiteClass.ADDITIONAL_SOURCE_SUITE},
1619 ApplySrcPolicy.RUN_ON_EVERY_ARCH_ONLY)
1620 self._pkg_universe = None
1621 self._all_binaries = None
1622 self._smooth_updates = None
1623 self._nobreakall_arches = None
1624 self._new_arches = None
1625 self._break_arches = None
1626 self._allow_uninst = None
1628 def initialise(self, britney):
1629 super().initialise(britney)
1630 self._pkg_universe = britney.pkg_universe
1631 self._all_binaries = britney.all_binaries
1632 self._smooth_updates = britney.options.smooth_updates
1633 self._nobreakall_arches = self.options.nobreakall_arches
1634 self._new_arches = self.options.new_arches
1635 self._break_arches = self.options.break_arches
1636 self._allow_uninst = britney.allow_uninst
1637 self._outofsync_arches = self.options.outofsync_arches
1639 def can_be_removed(self, pkg):
1640 src = pkg.source
1641 target_suite = self.suite_info.target_suite
1643 # TODO these conditions shouldn't be hardcoded here
1644 # ideally, we would be able to look up excuses to see if the removal
1645 # is in there, but in the current flow, this policy is called before
1646 # all possible excuses exist, so there is no list for us to check
1648 if src not in self.suite_info.primary_source_suite.sources:
1649 # source for pkg not in unstable: candidate for removal
1650 return True
1652 source_t = target_suite.sources[src]
1653 for hint in self.hints.search('remove', package=src, version=source_t.version):
1654 # removal hint for the source in testing: candidate for removal
1655 return True
1657 if target_suite.is_cruft(pkg):
1658 # if pkg is cruft in testing, removal will be tried
1659 return True
1661 # the case were the newer version of the source no longer includes the
1662 # binary (or includes a cruft version of the binary) will be handled
1663 # separately (in that case there might be an implicit dependency on
1664 # the newer source)
1666 return False
1668 def should_skip_rdep(self, pkg, source_name, myarch):
1669 target_suite = self.suite_info.target_suite
1671 if not target_suite.is_pkg_in_the_suite(pkg.pkg_id):
1672 # it is not in the target suite, migration cannot break anything
1673 return True
1675 if pkg.source == source_name:
1676 # if it is built from the same source, it will be upgraded
1677 # with the source
1678 return True
1680 if self.can_be_removed(pkg):
1681 # could potentially be removed, so if that happens, it won't be
1682 # broken
1683 return True
1685 if pkg.architecture == 'all' and \
1686 myarch not in self._nobreakall_arches:
1687 # arch all on non nobreakarch is allowed to become uninstallable
1688 return True
1690 if pkg.pkg_id.package_name in self._allow_uninst[myarch]:
1691 # there is a hint to allow this binary to become uninstallable
1692 return True
1694 if not target_suite.is_installable(pkg.pkg_id):
1695 # it is already uninstallable in the target suite, migration
1696 # cannot break anything
1697 return True
1699 return False
1701 def breaks_installability(self, pkg_id_t, pkg_id_s, pkg_to_check):
1702 """
1703 Check if upgrading pkg_id_t to pkg_id_s breaks the installability of
1704 pkg_to_check.
1706 To check if removing pkg_id_t breaks pkg_to_check, set pkg_id_s to
1707 None.
1708 """
1710 pkg_universe = self._pkg_universe
1711 negative_deps = pkg_universe.negative_dependencies_of(pkg_to_check)
1713 for dep in pkg_universe.dependencies_of(pkg_to_check):
1714 if pkg_id_t not in dep:
1715 # this depends doesn't have pkg_id_t as alternative, so
1716 # upgrading pkg_id_t cannot break this dependency clause
1717 continue
1719 # We check all the alternatives for this dependency, to find one
1720 # that can satisfy it when pkg_id_t is upgraded to pkg_id_s
1721 found_alternative = False
1722 for d in dep:
1723 if d in negative_deps:
1724 # If this alternative dependency conflicts with
1725 # pkg_to_check, it cannot be used to satisfy the
1726 # dependency.
1727 # This commonly happens when breaks are added to pkg_id_s.
1728 continue
1730 if d.package_name != pkg_id_t.package_name:
1731 # a binary different from pkg_id_t can satisfy the dep, so
1732 # upgrading pkg_id_t won't break this dependency
1733 found_alternative = True
1734 break
1736 if d != pkg_id_s:
1737 # We want to know the impact of the upgrade of
1738 # pkg_id_t to pkg_id_s. If pkg_id_s migrates to the
1739 # target suite, any other version of this binary will
1740 # not be there, so it cannot satisfy this dependency.
1741 # This includes pkg_id_t, but also other versions.
1742 continue
1744 # pkg_id_s can satisfy the dep
1745 found_alternative = True
1747 if not found_alternative:
1748 return True
1750 def check_upgrade(self, pkg_id_t, pkg_id_s, source_name, myarch, broken_binaries, excuse):
1751 verdict = PolicyVerdict.PASS
1753 pkg_universe = self._pkg_universe
1754 all_binaries = self._all_binaries
1756 # check all rdeps of the package in testing
1757 rdeps_t = pkg_universe.reverse_dependencies_of(pkg_id_t)
1759 for rdep_pkg in sorted(rdeps_t):
1760 rdep_p = all_binaries[rdep_pkg]
1762 # check some cases where the rdep won't become uninstallable, or
1763 # where we don't care if it does
1764 if self.should_skip_rdep(rdep_p, source_name, myarch):
1765 continue
1767 if not self.breaks_installability(pkg_id_t, pkg_id_s, rdep_pkg):
1768 # if upgrading pkg_id_t to pkg_id_s doesn't break rdep_pkg,
1769 # there is no implicit dependency
1770 continue
1772 # The upgrade breaks the installability of the rdep. We need to
1773 # find out if there is a newer version of the rdep that solves the
1774 # uninstallability. If that is the case, there is an implicit
1775 # dependency. If not, the upgrade will fail.
1777 # check source versions
1778 newer_versions = find_newer_binaries(self.suite_info, rdep_p,
1779 add_source_for_dropped_bin=True)
1780 good_newer_versions = set()
1781 for npkg, suite in newer_versions:
1782 if npkg.architecture == 'source':
1783 # When a newer version of the source package doesn't have
1784 # the binary, we get the source as 'newer version'. In
1785 # this case, the binary will not be uninstallable if the
1786 # newer source migrates, because it is no longer there.
1787 good_newer_versions.add(npkg)
1788 continue
1789 if not self.breaks_installability(pkg_id_t, pkg_id_s, npkg):
1790 good_newer_versions.add(npkg)
1792 if good_newer_versions:
1793 spec = DependencySpec(DependencyType.IMPLICIT_DEPENDENCY, myarch)
1794 excuse.add_package_depends(spec, good_newer_versions)
1795 else:
1796 # no good newer versions: no possible solution
1797 broken_binaries.add(rdep_pkg.name)
1798 if pkg_id_s:
1799 action = "migrating %s to %s" % (
1800 pkg_id_s.name,
1801 self.suite_info.target_suite.name)
1802 else:
1803 action = "removing %s from %s" % (
1804 pkg_id_t.name,
1805 self.suite_info.target_suite.name)
1806 info = '{0} makes <a href="#{1}">{1}</a> uninstallable'.format(
1807 action, rdep_pkg.name)
1808 verdict = PolicyVerdict.REJECTED_PERMANENTLY
1809 excuse.add_verdict_info(verdict, info)
1811 return verdict
1813 def apply_srcarch_policy_impl(self, implicit_dep_info, item: MigrationItem, arch,
1814 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
1815 excuse):
1816 verdict = PolicyVerdict.PASS
1818 if not source_data_tdist:
1819 # this item is not currently in testing: no implicit dependency
1820 return verdict
1822 if excuse.hasreason("missingbuild"):
1823 # if the build is missing, the policy would treat this as if the
1824 # binaries would be removed, which would give incorrect (and
1825 # confusing) info
1826 info = "missing build, not checking implicit dependencies on %s" % (arch)
1827 excuse.add_detailed_info(info)
1828 return verdict
1830 source_suite = item.suite
1831 source_name = item.package
1832 target_suite = self.suite_info.target_suite
1833 all_binaries = self._all_binaries
1835 # we check all binaries for this excuse that are currently in testing
1836 relevant_binaries = [x for x in source_data_tdist.binaries if (arch == 'source' or x.architecture == arch)
1837 and x.package_name in target_suite.binaries[x.architecture]
1838 and x.architecture not in self._new_arches
1839 and x.architecture not in self._break_arches
1840 and x.architecture not in self._outofsync_arches]
1842 broken_binaries = set()
1844 for pkg_id_t in sorted(relevant_binaries):
1845 mypkg = pkg_id_t.package_name
1846 myarch = pkg_id_t.architecture
1847 binaries_t_a = target_suite.binaries[myarch]
1848 binaries_s_a = source_suite.binaries[myarch]
1850 if target_suite.is_cruft(all_binaries[pkg_id_t]):
1851 # this binary is cruft in testing: it will stay around as long
1852 # as necessary to satisfy dependencies, so we don't need to
1853 # care
1854 continue
1856 if mypkg in binaries_s_a:
1857 mybin = binaries_s_a[mypkg]
1858 pkg_id_s = mybin.pkg_id
1859 if mybin.source != source_name:
1860 # hijack: this is too complicated to check, so we ignore
1861 # it (the migration code will check the installability
1862 # later anyway)
1863 pass
1864 elif mybin.source_version != source_data_srcdist.version:
1865 # cruft in source suite: pretend the binary doesn't exist
1866 pkg_id_s = None
1867 elif pkg_id_t == pkg_id_s:
1868 # same binary (probably arch: all from a binNMU):
1869 # 'upgrading' doesn't change anything, for this binary, so
1870 # it won't break anything
1871 continue
1872 else:
1873 pkg_id_s = None
1875 if not pkg_id_s and \
1876 is_smooth_update_allowed(binaries_t_a[mypkg], self._smooth_updates, self.hints):
1877 # the binary isn't in the new version (or is cruft there), and
1878 # smooth updates are allowed: the binary can stay around if
1879 # that is necessary to satisfy dependencies, so we don't need
1880 # to check it
1881 continue
1883 if not pkg_id_s and \
1884 source_data_tdist.version == source_data_srcdist.version and \
1885 source_suite.suite_class == SuiteClass.ADDITIONAL_SOURCE_SUITE and \
1886 binaries_t_a[mypkg].architecture == 'all':
1887 # we're very probably migrating a binNMU built in tpu where the arch:all
1888 # binaries were not copied to it as that's not needed. This policy could
1889 # needlessly block.
1890 continue
1892 v = self.check_upgrade(pkg_id_t, pkg_id_s, source_name, myarch, broken_binaries, excuse)
1893 verdict = PolicyVerdict.worst_of(verdict, v)
1895 # each arch is processed separately, so if we already have info from
1896 # other archs, we need to merge the info from this arch
1897 broken_old = set()
1898 if 'implicit-deps' not in implicit_dep_info:
1899 implicit_dep_info['implicit-deps'] = {}
1900 else:
1901 broken_old = set(implicit_dep_info['implicit-deps']['broken-binaries'])
1903 implicit_dep_info['implicit-deps']['broken-binaries'] = \
1904 sorted(broken_old | broken_binaries)
1906 return verdict
1909class ReverseRemovalPolicy(BasePolicy):
1911 def __init__(self, options, suite_info: Suites):
1912 super().__init__('reverseremoval', options, suite_info,
1913 {SuiteClass.PRIMARY_SOURCE_SUITE,
1914 SuiteClass.ADDITIONAL_SOURCE_SUITE})
1915 self._britney = None
1917 def register_hints(self, hint_parser: HintParser) -> None:
1918 hint_parser.register_hint_type('ignore-reverse-remove',
1919 split_into_one_hint_per_package)
1921 def initialise(self, britney):
1922 super().initialise(britney)
1923 self._britney = britney
1925 pkg_universe = britney.pkg_universe
1926 source_suites = britney.suite_info.source_suites
1927 target_suite = britney.suite_info.target_suite
1929 # Build set of the sources of reverse (Build-) Depends
1930 hints = self.hints.search('remove')
1932 rev_bin = defaultdict(set)
1933 for hint in hints:
1934 for item in hint.packages:
1935 # I think we don't need to look at the target suite
1936 for src_suite in source_suites:
1937 try:
1938 my_bins = set(src_suite.sources[item.uvname].binaries)
1939 except KeyError:
1940 continue
1941 compute_reverse_tree(pkg_universe, my_bins)
1942 for bin in my_bins:
1943 rev_bin.setdefault(bin, set()).add(item.uvname)
1945 rev_src = defaultdict(set)
1946 for bin_pkg, reasons in rev_bin.items():
1947 # If the pkg is in the target suite, there's nothing this
1948 # policy wants to do.
1949 if target_suite.is_pkg_in_the_suite(bin_pkg):
1950 continue
1951 bin = britney.all_binaries[bin_pkg]
1952 bin_src = bin.source + '/' + bin.source_version
1953 rev_src.setdefault(bin_src, set()).update(reasons)
1954 self._block_src_for_rm_hint = rev_src
1956 def apply_src_policy_impl(self, rev_remove_info, item: MigrationItem, source_data_tdist: Optional[SourcePackage],
1957 source_data_srcdist: SourcePackage, excuse):
1958 verdict = PolicyVerdict.PASS
1960 if item.name in self._block_src_for_rm_hint:
1961 reason = ", ".join(sorted(self._block_src_for_rm_hint[item.name]))
1962 ignore_hints = self.hints.search('ignore-reverse-remove',
1963 package=item.uvname,
1964 version=item.version)
1965 excuse.addreason('reverseremoval')
1966 if ignore_hints:
1967 excuse.addreason('ignore-reverse-remove')
1968 excuse.addinfo(
1969 "Should block migration because of remove hint for %s, but forced by %s" %
1970 (reason, ignore_hints[0].user))
1971 verdict = PolicyVerdict.PASS_HINTED
1972 else:
1973 excuse.addinfo(
1974 "Remove hint for (transitive) dependency: %s" % reason)
1975 verdict = PolicyVerdict.REJECTED_PERMANENTLY
1977 return verdict
1980class ReproduciblePolicy(BasePolicy):
1982 def __init__(self, options, suite_info: Suites):
1983 super().__init__('reproducible', options, suite_info,
1984 {SuiteClass.PRIMARY_SOURCE_SUITE},
1985 ApplySrcPolicy.RUN_ON_EVERY_ARCH_ONLY)
1986 self._reproducible = {
1987 'source': {},
1988 'target': {},
1989 }
1991 # Default values for this policy's options
1992 parse_option(options, 'repro_success_bounty', default=0, to_int=True)
1993 parse_option(options, 'repro_regression_penalty', default=0, to_int=True)
1994 parse_option(options, 'repro_url')
1995 parse_option(options, 'repro_retry_url')
1996 parse_option(options, 'repro_components')
1998 def register_hints(self, hint_parser: HintParser) -> None:
1999 hint_parser.register_hint_type('ignore-reproducible',
2000 split_into_one_hint_per_package)
2002 def initialise(self, britney):
2003 super().initialise(britney)
2004 source_suite = self.suite_info.primary_source_suite
2005 target_suite = self.suite_info.target_suite
2006 try:
2007 filename = os.path.join(self.state_dir, 'reproducible.json')
2008 except AttributeError as e: # pragma: no cover
2009 raise RuntimeError("Please set STATE_DIR in the britney configuration") from e
2011 self._reproducible = self._read_repro_status(
2012 filename,
2013 source=set((source_suite.name, source_suite.codename)),
2014 target=set((target_suite.name, target_suite.codename)))
2016 def apply_srcarch_policy_impl(self, reproducible_info, item: MigrationItem, arch,
2017 source_data_tdist: Optional[SourcePackage], source_data_srcdist: SourcePackage,
2018 excuse):
2019 verdict = PolicyVerdict.PASS
2021 # we don't want to apply this policy (yet) on binNMUs
2022 if item.architecture != 'source':
2023 return verdict
2025 # we're not supposed to judge on this arch
2026 if arch not in self.options.repro_arches:
2027 return verdict
2029 # bail out if this arch has no packages for this source (not build
2030 # here)
2031 if arch not in excuse.packages:
2032 return verdict
2034 # horrible hard-coding, but currently, we don't keep track of the
2035 # component when loading the packages files
2036 component = "main"
2037 section = source_data_srcdist.section
2038 if "/" in section:
2039 component = section.split('/')[0]
2041 if self.options.repro_components and component not in self.options.repro_components:
2042 return verdict
2044 source_name = item.package
2045 try:
2046 tar_res = self._reproducible['target'][arch]
2047 src_res = self._reproducible['source'][arch]
2048 except KeyError:
2049 verdict = PolicyVerdict.REJECTED_TEMPORARILY
2050 msg = "No reproducible data available at all for %s" % arch
2051 excuse.add_verdict_info(verdict, msg)
2052 return verdict
2054 if source_data_tdist is None:
2055 target_suite_state = 'new'
2056 elif source_name not in tar_res:
2057 target_suite_state = 'unknown'
2058 elif tar_res[source_name]['version'] == source_data_tdist.version:
2059 target_suite_state = tar_res[source_name]['status']
2060 else:
2061 target_suite_state = 'stale'
2063 if (source_name in src_res and
2064 src_res[source_name]['version'] == item.version):
2065 source_suite_state = src_res[source_name]['status']
2066 else:
2067 source_suite_state = 'unknown'
2069 # status of ['E404', 'FTBFS', 'FTBR', 'NFU', 'blacklisted', 'depwait',
2070 # 'reproducible', 'timeout'] with ['new', 'stale', 'unknown']
2071 wait_states = ('E404', 'depwait', 'stale', 'timeout', 'unknown')
2072 no_build_states = ('FTBFS', 'NFU', 'blacklisted')
2074 # if this package doesn't build on this architecture, we don't need to
2075 # judge it
2076 # FTBFS: Fails to build from source on r-b infra
2077 # NFU: the package explicitly doesn't support building on this arch
2078 # blacklisted: per package per arch per suite
2079 if source_suite_state in no_build_states:
2080 return verdict
2081 # Assume depwait in the source suite only are intermittent (might not
2082 # be true, e.g. with new build depends)
2083 if (source_suite_state == target_suite_state and
2084 target_suite_state == 'depwait'):
2085 return verdict
2087 if self.options.repro_url:
2088 url = self.options.repro_url.format(package=quote(source_name),
2089 arch=arch)
2090 url_html = ' - <a href="%s">info</a>' % url
2091 if self.options.repro_retry_url:
2092 url_html += ' <a href="%s">♻ </a>' % \
2093 self.options.repro_retry_url.format(
2094 package=quote(source_name),
2095 arch=arch)
2096 # When run on multiple archs, the last one "wins"
2097 reproducible_info['reproducible-test-url'] = url
2098 else:
2099 url = None
2100 url_html = ''
2102 eligible_for_bounty = False
2103 if source_suite_state == 'reproducible':
2104 verdict = PolicyVerdict.PASS
2105 msg = 'Reproducible on %s%s' % (arch, url_html)
2106 reproducible_info.setdefault('test-results', []).append('reproducible on %s' % arch)
2107 eligible_for_bounty = True
2108 elif source_suite_state == 'FTBR':
2109 if target_suite_state == 'new':
2110 verdict = PolicyVerdict.REJECTED_PERMANENTLY
2111 msg = 'New but not reproducible on %s%s' % (arch, url_html)
2112 reproducible_info.setdefault('test-results', []).append('new but not reproducible on %s' % arch)
2113 elif target_suite_state in wait_states:
2114 verdict = PolicyVerdict.REJECTED_TEMPORARILY
2115 msg = 'Waiting for reproducibility reference results on %s%s' % (arch, url_html)
2116 reproducible_info.setdefault(
2117 'test-results',
2118 []).append('waiting-for-reference-results on %s' % arch)
2119 elif target_suite_state == 'reproducible':
2120 verdict = PolicyVerdict.REJECTED_PERMANENTLY
2121 msg = 'Reproducibility regression on %s%s' % (arch, url_html)
2122 reproducible_info.setdefault('test-results', []).append('regression on %s' % arch)
2123 elif target_suite_state == 'FTBR':
2124 verdict = PolicyVerdict.PASS
2125 msg = 'Ignoring non-reproducibility on %s (not a regression)%s' % (
2126 arch, url_html)
2127 reproducible_info.setdefault('test-results', []).append('not reproducible on %s' % arch)
2128 else:
2129 verdict = PolicyVerdict.REJECTED_PERMANENTLY
2130 msg = 'No reference result, but not reproducibility on %s%s' % (arch, url_html)
2131 reproducible_info.setdefault('test-results', []).append('reference %s on %s' %
2132 (target_suite_state, arch))
2133 elif source_suite_state in wait_states:
2134 verdict = PolicyVerdict.REJECTED_TEMPORARILY
2135 msg = 'Waiting for reproducibility test results on %s%s' % (arch, url_html)
2136 reproducible_info.setdefault('test-results', []).append('waiting-for-test-results on %s' % arch)
2137 else:
2138 raise KeyError('Unhandled reproducibility state %s' % source_suite_state)
2140 if verdict.is_rejected:
2141 for hint_arch in ('source', arch):
2142 for ignore_hint in self.hints.search(
2143 'ignore-reproducible',
2144 package=source_name,
2145 version=source_data_srcdist.version,
2146 architecture=hint_arch):
2147 verdict = PolicyVerdict.PASS_HINTED
2148 reproducible_info.setdefault(
2149 'ignored-reproducible', {}).setdefault(arch, {}).setdefault(
2150 'issued-by', []).append(ignore_hint.user)
2151 excuse.addinfo("Ignoring reproducibility issue on %s as requested "
2152 "by %s" % (arch, ignore_hint.user))
2153 break
2155 if (self.options.repro_success_bounty and eligible_for_bounty):
2156 excuse.add_bounty('reproducibility', self.options.repro_success_bounty)
2158 if self.options.repro_regression_penalty and \
2159 verdict in {PolicyVerdict.REJECTED_PERMANENTLY, PolicyVerdict.REJECTED_TEMPORARILY}:
2160 if self.options.repro_regression_penalty > 0:
2161 excuse.add_penalty('reproducibility', self.options.repro_regression_penalty)
2162 # In case we give penalties instead of blocking, we must always pass
2163 verdict = PolicyVerdict.PASS
2165 if verdict.is_rejected:
2166 excuse.add_verdict_info(verdict, msg)
2167 else:
2168 excuse.addinfo(msg)
2170 return verdict
2172 def _read_repro_status(self, filename, source, target):
2173 summary = self._reproducible
2174 self.logger.info("Loading reproducibility report from %s", filename)
2175 with open(filename) as fd:
2176 if os.fstat(fd.fileno()).st_size < 1:
2177 return summary
2178 data = json.load(fd)
2180 for result in data:
2181 if result['suite'] in source:
2182 summary['source'].setdefault(
2183 result['architecture'], {})[result['package']] = result
2184 if result['suite'] in target:
2185 summary['target'].setdefault(
2186 result['architecture'], {})[result['package']] = result
2188 return summary