Coverage for britney2/excusefinder.py: 93%

331 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2024-04-18 20:48 +0000

1from itertools import chain 

2from urllib.parse import quote 

3 

4import apt_pkg 

5import logging 

6 

7from britney2 import PackageId, Suites 

8from britney2.excuse import Excuse 

9from britney2.migrationitem import MigrationItem 

10from britney2.policies import PolicyVerdict 

11from britney2.utils import invalidate_excuses, find_smooth_updateable_binaries 

12 

13 

14class ExcuseFinder(object): 

15 

16 def __init__(self, options, suite_info: Suites, all_binaries, pkg_universe, policy_engine, mi_factory, hints): 

17 logger_name = ".".join((self.__class__.__module__, self.__class__.__name__)) 

18 self.logger = logging.getLogger(logger_name) 

19 self.options = options 

20 self.suite_info = suite_info 

21 self.all_binaries = all_binaries 

22 self.pkg_universe = pkg_universe 

23 self._policy_engine = policy_engine 

24 self._migration_item_factory = mi_factory 

25 self.hints = hints 

26 self.excuses = {} 

27 

28 def _get_build_link(self, arch, src, ver, label=None): 

29 """Return a link to the build logs, labelled 'arch' per default""" 

30 if label is None: 

31 label = arch 

32 if self.options.build_url: 

33 url = self.options.build_url.format( 

34 arch=arch, source=quote(src), version=quote(ver)) 

35 return "<a href=\"%s\" target=\"_blank\">%s</a>" % (url, label) 

36 else: 

37 return label 

38 

39 def _should_remove_source(self, item: MigrationItem) -> bool: 

40 """Check if a source package should be removed from testing 

41 

42 This method checks if a source package should be removed from the 

43 target suite; this happens if the source package is not 

44 present in the primary source suite anymore. 

45 

46 It returns True if the package can be removed, False otherwise. 

47 In the former case, a new excuse is appended to the object 

48 attribute excuses. 

49 """ 

50 if hasattr(self.options, 'partial_source'): 50 ↛ 51line 50 didn't jump to line 51, because the condition on line 50 was never true

51 return False 

52 # if the source package is available in unstable, then do nothing 

53 source_suite = self.suite_info.primary_source_suite 

54 pkg = item.package 

55 if pkg in source_suite.sources: 55 ↛ 56line 55 didn't jump to line 56, because the condition on line 55 was never true

56 return False 

57 # otherwise, add a new excuse for its removal 

58 src = item.suite.sources[pkg] 

59 excuse = Excuse(item) 

60 excuse.addinfo("Package not in %s, will try to remove" % source_suite.name) 

61 excuse.set_vers(src.version, None) 

62 src.maintainer and excuse.set_maint(src.maintainer) 

63 src.section and excuse.set_section(src.section) 

64 

65 # if the package is blocked, skip it 

66 for hint in self.hints.search('block', package=pkg, removal=True): 

67 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

68 excuse.add_verdict_info( 

69 excuse.policy_verdict, 

70 "Not touching package, as requested by %s " 

71 "(contact %s-release if update is needed)" % (hint.user, 

72 self.options.distribution)) 

73 excuse.addreason("block") 

74 self.excuses[excuse.name] = excuse 

75 return False 

76 

77 excuse.policy_verdict = PolicyVerdict.PASS 

78 self.excuses[excuse.name] = excuse 

79 return True 

80 

81 def _should_upgrade_srcarch(self, item: MigrationItem) -> bool: 

82 """Check if a set of binary packages should be upgraded 

83 

84 This method checks if the binary packages produced by the source 

85 package on the given architecture should be upgraded; this can 

86 happen also if the migration is a binary-NMU for the given arch. 

87 

88 It returns False if the given packages don't need to be upgraded, 

89 True otherwise. In the former case, a new excuse is appended to 

90 the object attribute excuses. 

91 """ 

92 # retrieve the source packages for testing and suite 

93 

94 target_suite = self.suite_info.target_suite 

95 source_suite = item.suite 

96 src = item.package 

97 arch = item.architecture 

98 source_t = target_suite.sources[src] 

99 source_u = source_suite.sources[src] 

100 

101 excuse = Excuse(item) 

102 excuse.set_vers(source_t.version, source_t.version) 

103 source_u.maintainer and excuse.set_maint(source_u.maintainer) 

104 source_u.section and excuse.set_section(source_u.section) 

105 

106 # if there is a `remove' hint and the requested version is the same as the 

107 # version in testing, then stop here and return False 

108 # (as a side effect, a removal may generate such excuses for both the source 

109 # package and its binary packages on each architecture) 

110 for hint in self.hints.search('remove', package=src, version=source_t.version): 

111 excuse.add_hint(hint) 

112 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

113 excuse.add_verdict_info(excuse.policy_verdict, "Removal request by %s" % (hint.user)) 

114 excuse.add_verdict_info(excuse.policy_verdict, "Trying to remove package, not update it") 

115 self.excuses[excuse.name] = excuse 

116 return False 

117 

118 # the starting point is that there is nothing wrong and nothing worth doing 

119 anywrongver = False 

120 anyworthdoing = False 

121 

122 packages_t_a = target_suite.binaries[arch] 

123 packages_s_a = source_suite.binaries[arch] 

124 

125 wrong_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

126 

127 # for every binary package produced by this source in unstable for this architecture 

128 for pkg_id in sorted(x for x in source_u.binaries if x.architecture == arch): 

129 pkg_name = pkg_id.package_name 

130 # TODO filter binaries based on checks below? 

131 excuse.add_package(pkg_id) 

132 

133 # retrieve the testing (if present) and unstable corresponding binary packages 

134 binary_t = packages_t_a[pkg_name] if pkg_name in packages_t_a else None 

135 binary_u = packages_s_a[pkg_name] 

136 

137 # this is the source version for the new binary package 

138 pkgsv = binary_u.source_version 

139 

140 # if the new binary package is architecture-independent, then skip it 

141 if binary_u.architecture == 'all': 

142 if pkg_id not in source_t.binaries: 

143 # only add a note if the arch:all does not match the expected version 

144 excuse.add_detailed_info("Ignoring %s %s (from %s) as it is arch: all" % (pkg_name, binary_u.version, pkgsv)) 

145 continue 

146 

147 # if the new binary package is not from the same source as the testing one, then skip it 

148 # this implies that this binary migration is part of a source migration 

149 if source_u.version == pkgsv and source_t.version != pkgsv: 149 ↛ 150line 149 didn't jump to line 150, because the condition on line 149 was never true

150 anywrongver = True 

151 excuse.add_verdict_info( 

152 wrong_verdict, 

153 "From wrong source: %s %s (%s not %s)" % 

154 (pkg_name, binary_u.version, pkgsv, source_t.version)) 

155 continue 

156 

157 # cruft in unstable 

158 if source_u.version != pkgsv and source_t.version != pkgsv: 

159 if self.options.ignore_cruft: 

160 excuse.add_detailed_info("Old cruft: %s %s (but ignoring cruft, so nevermind)" % (pkg_name, pkgsv)) 

161 else: 

162 anywrongver = True 

163 excuse.add_verdict_info(wrong_verdict, "Old cruft: %s %s" % (pkg_name, pkgsv)) 

164 continue 

165 

166 # if the source package has been updated in unstable and this is a binary migration, skip it 

167 # (the binaries are now out-of-date) 

168 if source_t.version == pkgsv and source_t.version != source_u.version: 168 ↛ 169line 168 didn't jump to line 169, because the condition on line 168 was never true

169 anywrongver = True 

170 excuse.add_verdict_info( 

171 wrong_verdict, 

172 "From wrong source: %s %s (%s not %s)" % 

173 (pkg_name, binary_u.version, pkgsv, source_u.version)) 

174 continue 

175 

176 # if the binary is not present in testing, then it is a new binary; 

177 # in this case, there is something worth doing 

178 if not binary_t: 

179 excuse.add_detailed_info("New binary: %s (%s)" % (pkg_name, binary_u.version)) 

180 anyworthdoing = True 

181 continue 

182 

183 # at this point, the binary package is present in testing, so we can compare 

184 # the versions of the packages ... 

185 vcompare = apt_pkg.version_compare(binary_t.version, binary_u.version) 

186 

187 # ... if updating would mean downgrading, then stop here: there is something wrong 

188 if vcompare > 0: 188 ↛ 189line 188 didn't jump to line 189, because the condition on line 188 was never true

189 anywrongver = True 

190 excuse.add_verdict_info( 

191 wrong_verdict, 

192 "Not downgrading: %s (%s to %s)" % (pkg_name, binary_t.version, binary_u.version)) 

193 break 

194 # ... if updating would mean upgrading, then there is something worth doing 

195 elif vcompare < 0: 

196 excuse.add_detailed_info("Updated binary: %s (%s to %s)" % (pkg_name, binary_t.version, binary_u.version)) 

197 anyworthdoing = True 

198 

199 srcv = source_u.version 

200 same_source = source_t.version == srcv 

201 primary_source_suite = self.suite_info.primary_source_suite 

202 is_primary_source = source_suite == primary_source_suite 

203 

204 # if there is nothing wrong and there is something worth doing or the source 

205 # package is not fake, then check what packages should be removed 

206 if not anywrongver and (anyworthdoing or not source_u.is_fakesrc): 

207 # we want to remove binaries that are no longer produced by the 

208 # new source, but there are some special cases: 

209 # - if this is binary-only (same_source) and not from the primary 

210 # source, we don't do any removals: 

211 # binNMUs in *pu on some architectures would otherwise result in 

212 # the removal of binaries on other architectures 

213 # - for the primary source, smooth binaries in the target suite 

214 # are not considered for removal 

215 if not same_source or is_primary_source: 

216 smoothbins = set() 

217 if is_primary_source: 217 ↛ 231line 217 didn't jump to line 231, because the condition on line 217 was never false

218 binaries_t = target_suite.binaries 

219 possible_smooth_updates = [p for p in source_t.binaries if p.architecture == arch] 

220 smoothbins = find_smooth_updateable_binaries(possible_smooth_updates, 

221 source_u, 

222 self.pkg_universe, 

223 target_suite, 

224 binaries_t, 

225 source_suite.binaries, 

226 frozenset(), 

227 self.options.smooth_updates, 

228 self.hints) 

229 

230 # for every binary package produced by this source in testing for this architecture 

231 for pkg_id in sorted(x for x in source_t.binaries if x.architecture == arch): 

232 pkg = pkg_id.package_name 

233 # if the package is architecture-independent, then ignore it 

234 tpkg_data = packages_t_a[pkg] 

235 if tpkg_data.architecture == 'all': 

236 if pkg_id not in source_u.binaries: 

237 # only add a note if the arch:all does not match the expected version 

238 excuse.add_detailed_info("Ignoring removal of %s as it is arch: all" % (pkg)) 

239 continue 

240 # if the package is not produced by the new source package, then remove it from testing 

241 if pkg not in packages_s_a: 

242 excuse.add_detailed_info("Removed binary: %s %s" % (pkg, tpkg_data.version)) 

243 # the removed binary is only interesting if this is a binary-only migration, 

244 # as otherwise the updated source will already cause the binary packages 

245 # to be updated 

246 if same_source and pkg_id not in smoothbins: 

247 # Special-case, if the binary is a candidate for a smooth update, we do not consider 

248 # it "interesting" on its own. This case happens quite often with smooth updatable 

249 # packages, where the old binary "survives" a full run because it still has 

250 # reverse dependencies. 

251 anyworthdoing = True 

252 

253 if not anyworthdoing and not (self.options.archall_inconsistency_allowed and 

254 excuse.detailed_info): 

255 # nothing worth doing, we don't add an excuse to the list, we just return false 

256 return False 

257 

258 if not anyworthdoing: 

259 # This source has binary differences between the target and source 

260 # suite, but we're not going to upgrade them. Part of the purpose 

261 # of options.archall_inconsistency_allowed is to log the excuse 

262 # with a temporary failure such that the administrators can take 

263 # action so they wish. 

264 excuse.policy_verdict = PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT 

265 excuse.addreason("everything-ignored") 

266 

267 else: 

268 # there is something worth doing 

269 # we assume that this package will be ok, if not invalidated below 

270 excuse.policy_verdict = PolicyVerdict.PASS 

271 

272 # if there is something something wrong, reject this package 

273 if anywrongver: 

274 excuse.policy_verdict = wrong_verdict 

275 

276 self._policy_engine.apply_srcarch_policies(item, arch, source_t, source_u, excuse) 

277 

278 self.excuses[excuse.name] = excuse 

279 return excuse.is_valid 

280 

281 def _should_upgrade_src(self, item): 

282 """Check if source package should be upgraded 

283 

284 This method checks if a source package should be upgraded. The analysis 

285 is performed for the source package specified by the `src' parameter, 

286 for the distribution `source_suite'. 

287 

288 It returns False if the given package doesn't need to be upgraded, 

289 True otherwise. In the former case, a new excuse is appended to 

290 the object attribute excuses. 

291 """ 

292 

293 src = item.package 

294 source_suite = item.suite 

295 suite_name = source_suite.name 

296 source_u = source_suite.sources[src] 

297 if source_u.is_fakesrc: 297 ↛ 299line 297 didn't jump to line 299, because the condition on line 297 was never true

298 # it is a fake package created to satisfy Britney implementation details; silently ignore it 

299 return False 

300 

301 target_suite = self.suite_info.target_suite 

302 # retrieve the source packages for testing (if available) and suite 

303 if src in target_suite.sources: 

304 source_t = target_suite.sources[src] 

305 # if testing and unstable have the same version, then this is a candidate for binary-NMUs only 

306 if apt_pkg.version_compare(source_t.version, source_u.version) == 0: 306 ↛ 307line 306 didn't jump to line 307, because the condition on line 306 was never true

307 return False 

308 else: 

309 source_t = None 

310 

311 excuse = Excuse(item) 

312 excuse.set_vers(source_t and source_t.version or None, source_u.version) 

313 source_u.maintainer and excuse.set_maint(source_u.maintainer) 

314 source_u.section and excuse.set_section(source_u.section) 

315 excuse.add_package(PackageId(src, source_u.version, "source")) 

316 

317 # if the version in unstable is older, then stop here with a warning in the excuse and return False 

318 if source_t and apt_pkg.version_compare(source_u.version, source_t.version) < 0: 

319 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

320 excuse.add_verdict_info( 

321 excuse.policy_verdict, 

322 "ALERT: %s is newer in the target suite (%s %s)" % (src, source_t.version, source_u.version)) 

323 self.excuses[excuse.name] = excuse 

324 excuse.addreason("newerintesting") 

325 return False 

326 

327 # the starting point is that we will update the candidate 

328 excuse.policy_verdict = PolicyVerdict.PASS 

329 

330 # if there is a `remove' hint and the requested version is the same as the 

331 # version in testing, then stop here and return False 

332 for hint in self.hints.search('remove', package=src): 

333 if source_t and source_t.version == hint.version or \ 333 ↛ 332line 333 didn't jump to line 332, because the condition on line 333 was never false

334 source_u.version == hint.version: 

335 excuse.add_hint(hint) 

336 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

337 excuse.add_verdict_info(excuse.policy_verdict, "Removal request by %s" % (hint.user)) 

338 excuse.add_verdict_info(excuse.policy_verdict, "Trying to remove package, not update it") 

339 break 

340 

341 all_binaries = self.all_binaries 

342 

343 # at this point, we check the status of the builds on all the supported architectures 

344 # to catch the out-of-date ones 

345 archs_to_consider = list(self.options.architectures) 

346 archs_to_consider.append('all') 

347 for arch in archs_to_consider: 

348 oodbins = {} 

349 uptodatebins = False 

350 # for every binary package produced by this source in the suite for this architecture 

351 if arch == 'all': 

352 consider_binaries = source_u.binaries 

353 else: 

354 # Will also include arch:all for the given architecture (they are filtered out 

355 # below) 

356 consider_binaries = sorted(x for x in source_u.binaries if x.architecture == arch) 

357 for pkg_id in consider_binaries: 

358 pkg = pkg_id.package_name 

359 

360 # retrieve the binary package and its source version 

361 binary_u = all_binaries[pkg_id] 

362 pkgsv = binary_u.source_version 

363 

364 # arch:all packages are treated separately from arch:arch 

365 if binary_u.architecture != arch: 

366 continue 

367 

368 # TODO filter binaries based on checks below? 

369 excuse.add_package(pkg_id) 

370 

371 # if it wasn't built by the same source, it is out-of-date 

372 # if there is at least one binary on this arch which is 

373 # up-to-date, there is a build on this arch 

374 if source_u.version != pkgsv or pkg_id.architecture == "faux": 

375 if pkgsv not in oodbins: 

376 oodbins[pkgsv] = set() 

377 oodbins[pkgsv].add(pkg) 

378 if pkg_id.architecture != "faux": 

379 excuse.add_old_binary(pkg, pkgsv) 

380 continue 

381 else: 

382 uptodatebins = True 

383 

384 # if there are out-of-date packages, warn about them in the excuse and set excuse.is_valid 

385 # to False to block the update; if the architecture where the package is out-of-date is 

386 # in the `outofsync_arches' list, then do not block the update 

387 if oodbins: 

388 oodtxt = "" 

389 for v in sorted(oodbins): 

390 if oodtxt: 390 ↛ 391line 390 didn't jump to line 391, because the condition on line 390 was never true

391 oodtxt = oodtxt + "; " 

392 oodtxt = oodtxt + "%s (from %s)" % \ 

393 (", ".join(sorted(oodbins[v])), 

394 self._get_build_link(arch, src, v, label=v)) 

395 

396 if uptodatebins: 

397 text = "old binaries left on %s: %s" % \ 

398 (self._get_build_link(arch, src, source_u.version), oodtxt) 

399 else: 

400 text = "missing build on %s" % \ 

401 (self._get_build_link(arch, src, source_u.version)) 

402 

403 if arch in self.options.outofsync_arches: 

404 text = text + " (but %s isn't keeping up, so nevermind)" % (arch) 

405 if not uptodatebins: 405 ↛ 347line 405 didn't jump to line 347, because the condition on line 405 was never false

406 excuse.missing_build_on_ood_arch(arch) 

407 else: 

408 if uptodatebins: 

409 if self.options.ignore_cruft: 

410 text = text + " (but ignoring cruft, so nevermind)" 

411 excuse.add_detailed_info(text) 

412 else: 

413 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

414 excuse.addreason("cruft") 

415 excuse.add_verdict_info(excuse.policy_verdict, text) 

416 else: 

417 excuse.policy_verdict = PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT 

418 excuse.missing_build_on_arch(arch) 

419 excuse.addreason("missingbuild") 

420 excuse.add_verdict_info(excuse.policy_verdict, text) 

421 if excuse.old_binaries: 

422 excuse.add_detailed_info("old binaries on %s: %s" % (arch, oodtxt)) 

423 

424 # if the source package has no binaries, set is_valid to False to block the update 

425 if not {x for x in source_u.binaries if x[2] != "faux"}: 

426 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

427 excuse.add_verdict_info(excuse.policy_verdict, "%s has no binaries on any arch" % src) 

428 excuse.addreason("no-binaries") 

429 

430 self._policy_engine.apply_src_policies(item, source_t, source_u, excuse) 

431 

432 if source_suite.suite_class.is_additional_source and source_t: 

433 # o-o-d(ish) checks for (t-)p-u 

434 # This only makes sense if the package is actually in testing. 

435 for arch in self.options.architectures: 

436 # if the package in testing has no binaries on this 

437 # architecture, it can't be out-of-date 

438 if not any(x for x in source_t.binaries 

439 if x.architecture == arch and all_binaries[x].architecture != 'all'): 

440 continue 

441 

442 # if the (t-)p-u package has produced any binaries on 

443 # this architecture then we assume it's ok. this allows for 

444 # uploads to (t-)p-u which intentionally drop binary 

445 # packages 

446 if any(x for x in source_suite.binaries[arch].values() 

447 if x.source == src and x.source_version == source_u.version and x.architecture != 'all'): 

448 continue 

449 

450 # TODO: Find a way to avoid hardcoding pu/stable relation. 

451 if suite_name == 'pu': 451 ↛ 452line 451 didn't jump to line 452, because the condition on line 451 was never true

452 base = 'stable' 

453 else: 

454 base = target_suite.name 

455 text = "Not yet built on %s (relative to target suite)" % \ 

456 (self._get_build_link(arch, src, source_u.version)) 

457 

458 if arch in self.options.outofsync_arches: 458 ↛ 459line 458 didn't jump to line 459, because the condition on line 458 was never true

459 text = text + " (but %s isn't keeping up, so never mind)" % (arch) 

460 excuse.missing_build_on_ood_arch(arch) 

461 excuse.addinfo(text) 

462 else: 

463 excuse.policy_verdict = PolicyVerdict.REJECTED_CANNOT_DETERMINE_IF_PERMANENT 

464 excuse.missing_build_on_arch(arch) 

465 excuse.addreason("missingbuild") 

466 excuse.add_verdict_info(excuse.policy_verdict, text) 

467 

468 # check if there is a `force' hint for this package, which allows it to go in even if it is not updateable 

469 forces = self.hints.search('force', package=src, version=source_u.version) 

470 if forces: 

471 # force() updates the final verdict for us 

472 changed_state = excuse.force() 

473 if changed_state: 

474 excuse.addinfo("Should ignore, but forced by %s" % (forces[0].user)) 

475 

476 self.excuses[excuse.name] = excuse 

477 return excuse.is_valid 

478 

479 def _compute_excuses_and_initial_actionable_items(self): 

480 # list of local methods and variables (for better performance) 

481 excuses = self.excuses 

482 suite_info = self.suite_info 

483 pri_source_suite = suite_info.primary_source_suite 

484 architectures = self.options.architectures 

485 should_remove_source = self._should_remove_source 

486 should_upgrade_srcarch = self._should_upgrade_srcarch 

487 should_upgrade_src = self._should_upgrade_src 

488 

489 sources_ps = pri_source_suite.sources 

490 sources_t = suite_info.target_suite.sources 

491 

492 # this set will contain the packages which are valid candidates; 

493 # if a package is going to be removed, it will have a "-" prefix 

494 actionable_items = set() 

495 actionable_items_add = actionable_items.add # Every . in a loop slows it down 

496 

497 # for every source package in testing, check if it should be removed 

498 for pkg in sources_t: 

499 if pkg not in sources_ps: 

500 src = sources_t[pkg] 

501 item = MigrationItem(package=pkg, 

502 version=src.version, 

503 suite=suite_info.target_suite, 

504 is_removal=True) 

505 if should_remove_source(item): 

506 actionable_items_add(item) 

507 

508 # for every source package in the source suites, check if it should be upgraded 

509 for suite in chain((pri_source_suite, *suite_info.additional_source_suites)): 

510 sources_s = suite.sources 

511 for pkg in sources_s: 

512 src_s_data = sources_s[pkg] 

513 if src_s_data.is_fakesrc: 

514 continue 

515 src_t_data = sources_t.get(pkg) 

516 

517 if src_t_data is None or apt_pkg.version_compare(src_s_data.version, src_t_data.version) != 0: 

518 item = MigrationItem(package=pkg, 

519 version=src_s_data.version, 

520 suite=suite) 

521 # check if the source package should be upgraded 

522 if should_upgrade_src(item): 

523 actionable_items_add(item) 

524 else: 

525 # package has same version in source and target suite; check if any of the 

526 # binaries have changed on the various architectures 

527 for arch in architectures: 

528 item = MigrationItem(package=pkg, 

529 version=src_s_data.version, 

530 architecture=arch, 

531 suite=suite) 

532 if should_upgrade_srcarch(item): 

533 actionable_items_add(item) 

534 

535 # process the `remove' hints, if the given package is not yet in actionable_items 

536 for hint in self.hints['remove']: 

537 src = hint.package 

538 if src not in sources_t: 

539 continue 

540 

541 existing_items = set(x for x in actionable_items if x.package == src) 

542 if existing_items: 

543 self.logger.info("removal hint '%s' ignored due to existing item(s) %s" % 

544 (hint, [i.name for i in existing_items])) 

545 continue 

546 

547 tsrcv = sources_t[src].version 

548 item = MigrationItem(package=src, 

549 version=tsrcv, 

550 suite=suite_info.target_suite, 

551 is_removal=True) 

552 

553 # check if the version specified in the hint is the same as the considered package 

554 if tsrcv != hint.version: 554 ↛ 555line 554 didn't jump to line 555, because the condition on line 554 was never true

555 continue 

556 

557 # add the removal of the package to actionable_items and build a new excuse 

558 excuse = Excuse(item) 

559 excuse.set_vers(tsrcv, None) 

560 excuse.addinfo("Removal request by %s" % (hint.user)) 

561 # if the removal of the package is blocked, skip it 

562 blocked = False 

563 for blockhint in self.hints.search('block', package=src, removal=True): 

564 excuse.policy_verdict = PolicyVerdict.REJECTED_PERMANENTLY 

565 excuse.add_verdict_info( 

566 excuse.policy_verdict, 

567 "Not removing package, due to block hint by %s " 

568 "(contact %s-release if update is needed)" % (blockhint.user, 

569 self.options.distribution)) 

570 excuse.addreason("block") 

571 blocked = True 

572 

573 if blocked: 

574 excuses[excuse.name] = excuse 

575 continue 

576 

577 actionable_items_add(item) 

578 excuse.addinfo("Package is broken, will try to remove") 

579 excuse.add_hint(hint) 

580 # Using "PASS" here as "Created by a hint" != "accepted due to hint". In a future 

581 # where there might be policy checks on removals, it would make sense to distinguish 

582 # those two states. Not sure that future will ever be. 

583 excuse.policy_verdict = PolicyVerdict.PASS 

584 excuses[excuse.name] = excuse 

585 

586 return actionable_items 

587 

588 def find_actionable_excuses(self): 

589 excuses = self.excuses 

590 actionable_items = self._compute_excuses_and_initial_actionable_items() 

591 valid = {x.name for x in actionable_items} 

592 

593 # extract the not considered packages, which are in the excuses but not in upgrade_me 

594 unconsidered = {ename for ename in excuses if ename not in valid} 

595 invalidated = set() 

596 

597 invalidate_excuses(excuses, valid, unconsidered, invalidated) 

598 

599 # check that the list of actionable items matches the list of valid 

600 # excuses 

601 assert_sets_equal(valid, {x for x in excuses if excuses[x].is_valid}) 

602 

603 # check that the rdeps for all invalid excuses were invalidated 

604 assert_sets_equal(invalidated, {x for x in excuses if not excuses[x].is_valid}) 

605 

606 actionable_items = {x for x in actionable_items if x.name in valid} 

607 return excuses, actionable_items 

608 

609 

610def assert_sets_equal(a, b): 

611 if a != b: 611 ↛ 612line 611 didn't jump to line 612, because the condition on line 611 was never true

612 raise AssertionError("sets not equal a-b {} b-a {}".format(a-b, b-a))