Package entropy :: Package client :: Package interfaces :: Module dep

Source Code for Module entropy.client.interfaces.dep

   1  # -*- coding: utf-8 -*- 
   2  """ 
   3   
   4      @author: Fabio Erculiani <[email protected]> 
   5      @contact: [email protected] 
   6      @copyright: Fabio Erculiani 
   7      @license: GPL-2 
   8   
   9      B{Entropy Package Manager Client Dependency handling Interface}. 
  10   
  11  """ 
  12  import os 
  13  import collections 
  14  import hashlib 
  15   
  16  from entropy.const import etpConst, const_debug_write, \ 
  17      const_isnumber, const_convert_to_rawstring, const_convert_to_unicode, \ 
  18      const_debug_enabled, const_file_readable 
  19  from entropy.exceptions import RepositoryError, SystemDatabaseError, \ 
  20      DependenciesNotFound, DependenciesNotRemovable, DependenciesCollision 
  21  from entropy.graph import Graph 
  22  from entropy.misc import Lifo 
  23  from entropy.output import bold, darkgreen, darkred, blue, purple, teal, brown 
  24  from entropy.i18n import _ 
  25  from entropy.db.exceptions import IntegrityError, OperationalError, \ 
  26      DatabaseError, InterfaceError, Error as EntropyRepositoryError 
  27  from entropy.db.skel import EntropyRepositoryBase 
  28  from entropy.client.interfaces.db import InstalledPackagesRepository 
  29  from entropy.client.misc import sharedinstlock 
  30   
  31  import entropy.dep 
32 33 34 -class CalculatorsMixin:
35 36 @sharedinstlock
37 - def dependencies_test(self):
38 39 # get all the installed packages 40 installed_packages = self.installed_repository().listAllPackageIds() 41 42 pdepend_id = etpConst['dependency_type_ids']['pdepend_id'] 43 bdepend_id = etpConst['dependency_type_ids']['bdepend_id'] 44 deps_not_matched = set() 45 deps_cache = set() 46 47 total = len(installed_packages) 48 for count, package_id in enumerate(installed_packages, 1): 49 50 if (count % 150 == 0) or (count == total) or (count == 1): 51 atom = self.installed_repository().retrieveAtom(package_id) 52 self.output( 53 darkgreen(_("Checking %s") % (bold(atom),)), 54 importance = 0, 55 level = "info", 56 back = True, 57 count = (count, total), 58 header = darkred(" @@ ") 59 ) 60 61 xdeps = self.installed_repository().retrieveDependencies(package_id, 62 exclude_deptypes = (pdepend_id, bdepend_id,)) 63 64 # filter out already matched pkgs 65 xdeps = [x for x in xdeps if x not in deps_cache] 66 deps_cache.update(xdeps) 67 68 needed_deps = [(x, self.installed_repository().atomMatch(x),) for \ 69 x in xdeps] 70 deps_not_matched |= set( 71 [x for x, (y, z,) in needed_deps if y == -1]) 72 73 return deps_not_matched
74
75 - def __handle_multi_repo_matches(self, results, extended_results, 76 valid_repos):
77 78 pkg_info = {} 79 ver_info = {} 80 # package repos have always the precedence, so if we find them, 81 # we should second what user wants, installing his package 82 pkg_repos = [x for x in results if x.endswith(etpConst['packagesext'])] 83 if pkg_repos: 84 newrepos = results.copy() 85 for x in newrepos: 86 if x.endswith(etpConst['packagesext']): 87 continue 88 del results[x] 89 90 version_duplicates = set() 91 versions = set() 92 for repo in results: 93 pkg_info[repo] = {} 94 if extended_results: 95 version = results[repo][1] 96 pkg_info[repo]['versiontag'] = results[repo][2] 97 pkg_info[repo]['revision'] = results[repo][3] 98 else: 99 dbconn = self.open_repository(repo) 100 pkg_info[repo]['versiontag'] = dbconn.retrieveTag(results[repo]) 101 pkg_info[repo]['revision'] = dbconn.retrieveRevision( 102 results[repo]) 103 version = dbconn.retrieveVersion(results[repo]) 104 pkg_info[repo]['version'] = version 105 ver_info[version] = repo 106 if version in versions: 107 version_duplicates.add(version) 108 versions.add(version) 109 110 newer_ver = entropy.dep.get_newer_version(list(versions))[0] 111 # if no duplicates are found or newer version is not in 112 # duplicates we're done 113 if (not version_duplicates) or (newer_ver not in version_duplicates): 114 reponame = ver_info.get(newer_ver) 115 return (results[reponame], reponame) 116 117 # we have two repositories with >two packages with the same version 118 # check package tag 119 120 conflict_entries = {} 121 tags_duplicates = set() 122 tags = set() 123 tagsInfo = {} 124 for repo in pkg_info: 125 if pkg_info[repo]['version'] != newer_ver: 126 continue 127 conflict_entries[repo] = {} 128 versiontag = pkg_info[repo]['versiontag'] 129 if versiontag in tags: 130 tags_duplicates.add(versiontag) 131 tags.add(versiontag) 132 tagsInfo[versiontag] = repo 133 conflict_entries[repo]['versiontag'] = versiontag 134 conflict_entries[repo]['revision'] = pkg_info[repo]['revision'] 135 136 # tags will always be != [] 137 newer_tag = entropy.dep.sort_entropy_package_tags(tags)[-1] 138 if newer_tag not in tags_duplicates: 139 reponame = tagsInfo.get(newer_tag) 140 return (results[reponame], reponame) 141 142 # in this case, we have >two packages with the same version and tag 143 # check package revision 144 145 conflictingRevisions = {} 146 revisions = set() 147 revisions_duplicates = set() 148 revisionInfo = {} 149 for repo in conflict_entries: 150 if conflict_entries[repo]['versiontag'] == newer_tag: 151 conflictingRevisions[repo] = {} 152 versionrev = conflict_entries[repo]['revision'] 153 if versionrev in revisions: 154 revisions_duplicates.add(versionrev) 155 revisions.add(versionrev) 156 revisionInfo[versionrev] = repo 157 conflictingRevisions[repo]['revision'] = versionrev 158 159 newerRevision = max(revisions) 160 if newerRevision not in revisions_duplicates: 161 reponame = revisionInfo.get(newerRevision) 162 return (results[reponame], reponame) 163 164 # final step, in this case we have >two packages with 165 # the same version, tag and revision 166 # get the repository with the biggest priority 167 for reponame in valid_repos: 168 if reponame in conflictingRevisions: 169 return (results[reponame], reponame)
170
171 - def atom_match(self, atom, match_slot = None, mask_filter = True, 172 multi_match = False, multi_repo = False, match_repo = None, 173 extended_results = False, use_cache = True):
174 """ 175 Match one or more packages inside all the available repositories. 176 """ 177 # support match in repository from shell 178 # [email protected],repo2,repo3 179 atom, repos = entropy.dep.dep_get_match_in_repos(atom) 180 if (match_repo is None) and (repos is not None): 181 match_repo = repos 182 if match_repo is None: 183 match_repo = tuple() 184 185 cache_key = None 186 if self.xcache and use_cache: 187 sha = hashlib.sha1() 188 189 cache_fmt = "a{%s}mr{%s}ms{%s}rh{%s}mf{%s}" 190 cache_fmt += "ar{%s}m{%s}cm{%s}s{%s;%s;%s}" 191 cache_s = cache_fmt % ( 192 atom, 193 ";".join(match_repo), 194 match_slot, 195 self.repositories_checksum(), 196 mask_filter, 197 ";".join(sorted(self._settings['repositories']['available'])), 198 self._settings.packages_configuration_hash(), 199 self._settings_client_plugin.packages_configuration_hash(), 200 multi_match, 201 multi_repo, 202 extended_results) 203 sha.update(const_convert_to_rawstring(cache_s)) 204 205 cache_key = "atom_match/atom_match_%s" % (sha.hexdigest(),) 206 207 cached = self._cacher.pop(cache_key) 208 if cached is not None: 209 return cached 210 211 valid_repos = self._enabled_repos 212 if match_repo and (type(match_repo) in (list, tuple, set)): 213 valid_repos = list(match_repo) 214 215 repo_results = {} 216 217 # simple "or" dependency support 218 # app-foo/foo-1.2.3;app-foo/bar-1.4.3? 219 if atom.endswith(etpConst['entropyordepquestion']): 220 # or dependency! 221 atoms = atom[:-1].split(etpConst['entropyordepsep']) 222 for s_atom in atoms: 223 for repo in valid_repos: 224 data, rc = self.atom_match(s_atom, match_slot = match_slot, 225 mask_filter = mask_filter, multi_match = multi_match, 226 multi_repo = multi_repo, match_repo = match_repo, 227 extended_results = extended_results, 228 use_cache = use_cache) 229 if rc != 1: 230 # checking against 1 works in any case here 231 # for simple, multi and extended match 232 return data, rc 233 else: 234 for repo in valid_repos: 235 236 # search 237 try: 238 dbconn = self.open_repository(repo) 239 except (RepositoryError, SystemDatabaseError): 240 # ouch, repository not available or corrupted ! 241 continue 242 xuse_cache = use_cache 243 244 while True: 245 try: 246 query_data, query_rc = dbconn.atomMatch( 247 atom, 248 matchSlot = match_slot, 249 maskFilter = mask_filter, 250 extendedResults = extended_results, 251 useCache = xuse_cache 252 ) 253 if query_rc == 0: 254 # package found, add to our dictionary 255 if extended_results: 256 repo_results[repo] = (query_data[0], 257 query_data[2], query_data[3], 258 query_data[4]) 259 else: 260 repo_results[repo] = query_data 261 except TypeError: 262 if not xuse_cache: 263 raise 264 xuse_cache = False 265 continue 266 except (OperationalError, DatabaseError): 267 # OperationalError => error in data format 268 # DatabaseError => database disk image is malformed 269 # repository fooked, skip! 270 break 271 break 272 273 dbpkginfo = (-1, 1) 274 if extended_results: 275 dbpkginfo = ((-1, None, None, None), 1) 276 277 if multi_repo and repo_results: 278 279 data = set() 280 for repoid in repo_results: 281 data.add((repo_results[repoid], repoid)) 282 dbpkginfo = (data, 0) 283 284 elif len(repo_results) == 1: 285 # one result found 286 repo = list(repo_results.keys())[0] 287 dbpkginfo = (repo_results[repo], repo) 288 289 elif len(repo_results) > 1: 290 291 # we have to decide which version should be taken 292 mypkginfo = self.__handle_multi_repo_matches(repo_results, 293 extended_results, valid_repos) 294 if mypkginfo is not None: 295 dbpkginfo = mypkginfo 296 297 # multimatch support 298 if multi_match: 299 300 if dbpkginfo[1] == 1: 301 dbpkginfo = set(), 1 302 else: # can be "0" or a string, but 1 means failure 303 if multi_repo: 304 data = set() 305 for q_id, q_repo in dbpkginfo[0]: 306 dbconn = self.open_repository(q_repo) 307 query_data, query_rc = dbconn.atomMatch( 308 atom, 309 matchSlot = match_slot, 310 maskFilter = mask_filter, 311 multiMatch = True, 312 extendedResults = extended_results 313 ) 314 if extended_results: 315 for item in query_data: 316 _item_d = (item[0], item[2], item[3], item[4]) 317 data.add((_item_d, q_repo)) 318 else: 319 for x in query_data: 320 data.add((x, q_repo)) 321 dbpkginfo = (data, 0) 322 else: 323 dbconn = self.open_repository(dbpkginfo[1]) 324 query_data, query_rc = dbconn.atomMatch( 325 atom, 326 matchSlot = match_slot, 327 maskFilter = mask_filter, 328 multiMatch = True, 329 extendedResults = extended_results 330 ) 331 if extended_results: 332 dbpkginfo = ( 333 set([((x[0], x[2], x[3], x[4]), dbpkginfo[1]) \ 334 for x in query_data]), 0) 335 else: 336 dbpkginfo = ( 337 set([(x, dbpkginfo[1]) for x in query_data]), 0) 338 339 if cache_key is not None: 340 self._cacher.push(cache_key, dbpkginfo) 341 342 return dbpkginfo
343
344 - def atom_search(self, keyword, description = False, repositories = None, 345 use_cache = True):
346 """ 347 Search packages inside all the available repositories, including the 348 installed packages one. 349 Results are returned in random order by default, and as a list of 350 package matches (pkg_id_int, repo_string). 351 352 @param keyword: string to search 353 @type keyword: string 354 @keyword description: if True, also search through package description 355 @type description: bool 356 @keyword repositories: list of repository identifiers to search 357 packages into 358 @type repositories: string 359 @keyword use_cache: if True, on-disk caching is used 360 @type use_cache: bool 361 """ 362 if repositories is None: 363 repositories = self.repositories()[:] 364 repositories.insert(0, InstalledPackagesRepository.NAME) 365 366 cache_key = None 367 if self.xcache and use_cache: 368 sha = hashlib.sha1() 369 370 cache_s = "k{%s}re{%s}de{%s}rh{%s}m{%s}cm{%s}ar{%s}" % ( 371 keyword, 372 ";".join(repositories), 373 description, 374 self.repositories_checksum(), 375 self._settings.packages_configuration_hash(), 376 self._settings_client_plugin.packages_configuration_hash(), 377 ";".join(sorted(self._settings['repositories']['available'])), 378 ) 379 sha.update(const_convert_to_rawstring(cache_s)) 380 381 cache_key = "atom_search/s_%s" % (sha.hexdigest(),) 382 383 cached = self._cacher.pop(cache_key) 384 if cached is not None: 385 return cached 386 387 atom = keyword[:] 388 match_slot = entropy.dep.dep_getslot(atom) 389 if match_slot: 390 atom = entropy.dep.remove_slot(atom) 391 search_tag = entropy.dep.dep_gettag(atom) 392 if search_tag: 393 atom = entropy.dep.remove_tag(atom) 394 395 matches = [] 396 397 for repository in repositories: 398 399 try: 400 repo = self.open_repository(repository) 401 except (RepositoryError, SystemDatabaseError): 402 # ouch, repository not available or corrupted ! 403 continue 404 405 pkg_ids = repo.searchPackages( 406 atom, slot = match_slot, 407 tag = search_tag, 408 just_id = True) 409 410 matches.extend((pkg_id, repository) for pkg_id in pkg_ids) 411 412 # less relevance 413 if description: 414 matches_cache = set() 415 matches_cache.update(matches) 416 417 for repository in repositories: 418 419 try: 420 repo = self.open_repository(repository) 421 except (RepositoryError, SystemDatabaseError): 422 # ouch, repository not available or corrupted ! 423 continue 424 425 pkg_ids = repo.searchDescription(keyword, just_id = True) 426 pkg_matches = [(pkg_id, repository) for pkg_id in pkg_ids] 427 matches.extend(pkg_match for pkg_match in pkg_matches if \ 428 pkg_match not in matches_cache) 429 matches_cache.update(pkg_matches) 430 431 matches_cache.clear() 432 433 if cache_key is not None: 434 self._cacher.push(cache_key, matches) 435 436 return matches
437
438 - def _resolve_or_dependencies(self, dependencies, selected_matches, 439 _selected_matches_cache = None):
440 """ 441 Resolve a simple or dependency like "foo;bar;baz?" by looking at the 442 currently installed packages and those that would be installed. 443 The outcome is the selected dependency, if possible. 444 445 @param dependencies: ordered list of or dependencies, recursion not 446 supported. 447 @type dependencies: list 448 @param selected_matches: a list of package matches that 449 compose the dependency graph. 450 @type selected_matches: list 451 @return: the new dependency string 452 @rtype: tuple 453 """ 454 inst_repo = self.installed_repository() 455 if _selected_matches_cache is None: 456 cache = {} 457 else: 458 cache = _selected_matches_cache 459 460 def _generate_keyslot_cache(): 461 keyslot_map = {} 462 keyslot_set = set() 463 for package_id, repository_id in selected_matches: 464 repo = self.open_repository(repository_id) 465 keyslot = repo.retrieveKeySlot(package_id) 466 keyslot_set.add(keyslot) 467 468 obj = keyslot_map.setdefault(keyslot, set()) 469 obj.add((package_id, repository_id)) 470 cache['map'] = keyslot_map 471 cache['set'] = keyslot_set
472 473 selected = False 474 found_matches = [] 475 for dep in dependencies: 476 477 # determine if dependency has been explicitly selected 478 matches, _pkg_rc = self.atom_match( 479 dep, multi_match = True, multi_repo = True) 480 if matches: 481 found_matches.append((dep, matches)) 482 if const_debug_enabled(): 483 const_debug_write( 484 __name__, 485 "_resolve_or_dependency, " 486 "or dependency, filtering %s, got matches: %s" % ( 487 dep, matches,)) 488 489 if const_debug_enabled(): 490 const_debug_write( 491 __name__, 492 "_resolve_or_dependency, " 493 "filtered list: %s" % (found_matches,)) 494 495 for dep, matches in found_matches: 496 common = set(matches) & selected_matches 497 if common: 498 if const_debug_enabled(): 499 const_debug_write( 500 __name__, 501 "_resolve_or_dependency, " 502 "or dependency candidate => %s, " 503 "has been explicitly selected. " 504 "Found the dependency though." % (dep,)) 505 dependency = dep 506 selected = True 507 break 508 509 package_ids, _pkg_rc = inst_repo.atomMatch( 510 dep, multiMatch = True) 511 if not package_ids: 512 # no matches, skip this. 513 if const_debug_enabled(): 514 const_debug_write( 515 __name__, 516 "_resolve_or_dependency, " 517 "or dependency candidate => %s, no " 518 "installed matches, skipping for now" % (dep,)) 519 continue 520 521 if const_debug_enabled(): 522 const_debug_write( 523 __name__, 524 "_resolve_or_dependency, " 525 "or dependency candidate => %s ?" % ( 526 dep,)) 527 528 # generate cache now. 529 if not cache: 530 _generate_keyslot_cache() 531 532 dep_keyslot_set = set() 533 for package_id in package_ids: 534 dep_keyslot_set.add( 535 inst_repo.retrieveKeySlot(package_id)) 536 common = cache['set'] & dep_keyslot_set 537 538 if not common: 539 # there is nothing in common between the 540 # dependency and the selected matches. 541 # We found it ! 542 if const_debug_enabled(): 543 const_debug_write( 544 __name__, 545 "_resolve_or_dependency, " 546 "or dependency candidate => %s, " 547 "no common keyslots between selected and this. " 548 "Found the dependency though." % (dep,)) 549 dependency = dep 550 selected = True 551 break 552 553 if const_debug_enabled(): 554 const_debug_write( 555 __name__, 556 "_resolve_or_dependency, " 557 "or dependency candidate => %s, " 558 "common slots with selected matches: %s " 559 "(selected matches: %s)" % ( 560 dep, common, selected_matches,)) 561 562 if common: 563 common_pkg_matches = set() 564 for keyslot in common: 565 common_pkg_matches.update(cache['map'][keyslot]) 566 567 # determining if the new packages are still matching 568 # the selected dependency in the or literal. 569 repo_matches, repo_rc = self.atom_match( 570 dep, multi_match = True, multi_repo = True) 571 common = set(repo_matches) & common_pkg_matches 572 573 if const_debug_enabled(): 574 if common: 575 const_debug_write( 576 __name__, 577 "_resolve_or_dependency, " 578 "or dependency candidate => %s, " 579 "common slots with selected matches: %s " 580 "(selected matches: %s)" % ( 581 dep, common, selected_matches,)) 582 else: 583 const_debug_write( 584 __name__, 585 "_resolve_or_dependency, " 586 "or dependency candidate => %s, " 587 "installing %s would make the dependency " 588 "invalid." % (dep, common,)) 589 590 if not common: 591 if const_debug_enabled(): 592 const_debug_write( 593 __name__, 594 "_resolve_or_dependency, " 595 "or dependency candidate => %s, " 596 "no common packages found. Sorry." % ( 597 dep,)) 598 continue 599 600 if const_debug_enabled(): 601 const_debug_write( 602 __name__, 603 "_resolve_or_dependency, " 604 "or dependency, selected => %s, from: %s" % ( 605 dep, dependencies,)) 606 # found it, rewrite dependency and c_ids 607 dependency = dep 608 selected = True 609 break 610 611 if not selected: 612 # then pick the first available in repositories, if any, 613 # which is considered the default choice. 614 if found_matches: 615 dependency, _matches = found_matches[0] 616 if const_debug_enabled(): 617 const_debug_write( 618 __name__, 619 "_resolve_or_dependency, " 620 "or dependency candidate => %s, will " 621 "pick this (the default one)" % (dependency,)) 622 else: 623 dependency = dependencies[0] 624 if const_debug_enabled(): 625 const_debug_write( 626 __name__, 627 "_resolve_or_dependency, " 628 "or dependency candidate => %s, nothing found, " 629 "will pick this (the first one)" % (dependency,)) 630 631 return dependency
632 633 DISABLE_SLOT_INTERSECTION = os.getenv("ETP_DISABLE_SLOT_INTERSECTION") 634
635 - def _get_unsatisfied_dependencies(self, dependencies, deep_deps = False, 636 relaxed_deps = False, depcache = None, 637 match_repo = None):
638 639 inst_repo = self.installed_repository() 640 cl_settings = self.ClientSettings() 641 misc_settings = cl_settings['misc'] 642 ignore_spm_downgrades = misc_settings['ignore_spm_downgrades'] 643 cache_key = None 644 645 if self.xcache: 646 sha = hashlib.sha1() 647 648 cache_s = "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|v7" % ( 649 ";".join(sorted(dependencies)), 650 deep_deps, 651 inst_repo.checksum(), 652 self.repositories_checksum(), 653 self._settings.packages_configuration_hash(), 654 self._settings_client_plugin.packages_configuration_hash(), 655 ";".join(sorted(self._settings['repositories']['available'])), 656 relaxed_deps, 657 ignore_spm_downgrades, 658 match_repo) 659 sha.update(const_convert_to_rawstring(cache_s)) 660 661 cache_key = "unsat_deps/%s" % ( 662 sha.hexdigest()) 663 664 cached = self._cacher.pop(cache_key) 665 if cached is not None: 666 return cached 667 668 if const_debug_enabled(): 669 const_debug_write(__name__, 670 "_get_unsatisfied_dependencies (not cached, deep: %s) for => %s" % ( 671 deep_deps, dependencies,)) 672 673 etp_cmp = entropy.dep.entropy_compare_versions 674 etp_get_rev = entropy.dep.dep_get_entropy_revision 675 676 if depcache is None: 677 depcache = {} 678 679 def push_to_cache(dependency, is_unsat): 680 # push to cache 681 depcache[dependency] = is_unsat
682 683 def _my_get_available_tags(dependency, installed_tags): 684 available_tags = set() 685 matches, t_rc = self.atom_match(dependency, multi_match = True, 686 multi_repo = True, match_repo = match_repo) 687 for pkg_id, repo_id in matches: 688 dbconn = self.open_repository(repo_id) 689 t_ver_tag = dbconn.retrieveTag(pkg_id) 690 if installed_tags is None: 691 available_tags.add(t_ver_tag) 692 elif t_ver_tag in installed_tags: 693 available_tags.add(t_ver_tag) 694 return sorted(available_tags, reverse = True) 695 696 def _is_matching_tag(c_ids, pkg_dep, tag): 697 for c_id in c_ids: 698 c_slot = inst_repo.retrieveSlot(c_id) 699 # pkg_dep already contains the tag part 700 a_id, a_repo_id = self.atom_match(pkg_dep, 701 match_slot = c_slot, match_repo = match_repo) 702 if a_repo_id == 1: 703 continue 704 return True 705 return False 706 707 unsatisfied = set() 708 for dependency in dependencies: 709 710 if dependency in depcache: 711 # already analized ? 712 is_unsat = depcache[dependency] 713 if is_unsat: 714 unsatisfied.add(dependency) 715 if const_debug_enabled(): 716 const_debug_write(__name__, 717 "_get_unsatisfied_dependencies control cached for => %s" % ( 718 dependency,)) 719 const_debug_write(__name__, "...") 720 continue 721 722 ### conflict 723 if dependency.startswith("!"): 724 package_id, rc = inst_repo.atomMatch( 725 dependency[1:]) 726 if package_id != -1: 727 if const_debug_enabled(): 728 const_debug_write( 729 __name__, 730 "_get_unsatisfied_dependencies conflict not " 731 "found on system for => %s" % (dependency,)) 732 const_debug_write(__name__, "...") 733 unsatisfied.add(dependency) 734 push_to_cache(dependency, True) 735 continue 736 737 if const_debug_enabled(): 738 const_debug_write(__name__, "...") 739 push_to_cache(dependency, False) 740 continue 741 742 c_ids, c_rc = inst_repo.atomMatch(dependency, 743 multiMatch = True) 744 if c_rc != 0: 745 746 # check if dependency can be matched in available repos and 747 # if it is a tagged package, in this case, we need to rewrite 748 # the dependency string to restrict its scope 749 dependency_tag = entropy.dep.dep_gettag(dependency) 750 if not dependency_tag: 751 # also filter out empty tags (pkgs without tags) 752 av_tags = [x for x in \ 753 _my_get_available_tags(dependency, None) if x] 754 if av_tags: 755 matching_tags = set() 756 i_key = entropy.dep.dep_getkey(dependency) 757 for a_tag in av_tags: 758 a_dep_tag = i_key + \ 759 etpConst['entropytagprefix'] + a_tag 760 c_ids, c_rc = inst_repo.atomMatch( 761 a_dep_tag, multiMatch = True) 762 if c_rc != 0: 763 continue 764 if _is_matching_tag(c_ids, a_dep_tag, a_tag): 765 matching_tags.add(a_tag) 766 767 if matching_tags: 768 best_tag = entropy.dep.sort_entropy_package_tags( 769 matching_tags)[-1] 770 dependency += etpConst['entropytagprefix'] + \ 771 best_tag 772 773 if const_debug_enabled(): 774 const_debug_write( 775 __name__, 776 "_get_unsatisfied_dependencies not " 777 "satisfied on system for => %s" % ( 778 dependency,)) 779 const_debug_write(__name__, "...") 780 unsatisfied.add(dependency) 781 push_to_cache(dependency, True) 782 continue 783 784 # support for app-foo/foo-123~-1 785 # -1 revision means, always pull the latest 786 do_rev_deep = False 787 if not deep_deps: 788 string_rev = etp_get_rev(dependency) 789 if string_rev == -1: 790 do_rev_deep = True 791 792 # force_unsatisfied is another way to see "deep_deps". 793 # in this case, we are going to consider valid any dep that 794 # matches something in installed packages repo. 795 if (not deep_deps) and (not do_rev_deep) and (relaxed_deps): 796 if const_debug_enabled(): 797 const_debug_write( 798 __name__, 799 "_get_unsatisfied_dependencies " 800 "(force unsat) SATISFIED => %s" % ( 801 dependency,)) 802 const_debug_write(__name__, "...") 803 push_to_cache(dependency, False) 804 continue 805 806 # WARN: unfortunately, need to deal with Portage (and other 807 # backends) old-style PROVIDE metadata 808 if entropy.dep.dep_getcat(dependency) == \ 809 EntropyRepositoryBase.VIRTUAL_META_PACKAGE_CATEGORY: 810 provide_stop = False 811 for c_id in c_ids: 812 # optimize speed with a trick 813 _provide = dict( 814 inst_repo.retrieveProvide(c_id)) 815 if dependency in _provide: 816 if const_debug_enabled(): 817 const_debug_write( 818 __name__, 819 "_get_unsatisfied_dependencies old-style " 820 "provide, satisfied => %s" % ( 821 dependency,)) 822 const_debug_write(__name__, "...") 823 push_to_cache(dependency, False) 824 provide_stop = True 825 break 826 if provide_stop: 827 continue 828 829 r_id, r_repo = self.atom_match(dependency, match_repo = match_repo) 830 if r_id == -1: 831 if const_debug_enabled(): 832 const_debug_write(__name__, 833 "_get_unsatisfied_dependencies repository match " 834 "not found for => %s, CONSIDER SATISFIED !" % (dependency,)) 835 const_debug_write(__name__, "...") 836 push_to_cache(dependency, False) 837 continue 838 839 # Slot intersection support: 840 # certain dependency strings could have 841 # cross-SLOT scope (multiple slots for same package are valid) 842 # causing unwanted dependencies to be pulled in. 843 # For example: if dependency is "dev-lang/python" 844 # and we have dev-lang/python-2 installed, python-3 845 # should be filtered out (if possible) by checking if 846 # the installed best dependency match slot is still 847 # available in repositories. 848 # If it is, restrict the dependency scope to the intersection 849 # between available SLOTs and installed SLOT. 850 multi_repo = False 851 if match_repo is None: 852 multi_repo = True 853 854 available_slots = set() 855 if not self.DISABLE_SLOT_INTERSECTION: 856 r_matches, r_rcs = self.atom_match( 857 dependency, match_repo = match_repo, 858 multi_match = True, multi_repo = multi_repo) 859 available_slots |= set(self.open_repository(x[1]).retrieveSlot( 860 x[0]) for x in r_matches) 861 if len(available_slots) > 1: 862 # more than one slot available 863 # pick the best one by calling atomMatch() without multiMatch 864 c_id, c_rc = inst_repo.atomMatch( 865 dependency) 866 installed_slot = None 867 if c_id != -1: 868 installed_slot = inst_repo.retrieveSlot( 869 c_id) 870 if installed_slot in available_slots: 871 # restrict my matching to installed_slot, rewrite 872 # r_id r_repo 873 # NOTE: assume that dependency has no tag nor etp rev 874 # also, if we got multiple slots, it means that the 875 # same dep is expressed without slot. 876 old_r_id = r_id 877 old_r_repo = r_repo 878 r_id, r_repo = self.atom_match( 879 dependency, match_slot = installed_slot) 880 if r_id != -1: 881 # append slot to dependency 882 dependency += etpConst['entropyslotprefix'] \ 883 + installed_slot 884 885 if const_debug_enabled(): 886 from_atom = self.open_repository( 887 old_r_repo).retrieveAtom(old_r_id) 888 to_atom = self.open_repository( 889 r_repo).retrieveAtom(r_id) 890 const_debug_write( 891 __name__, 892 "_get_unsatisfied_dependencies " 893 " SLOT intersection: installed: " 894 "%s, available: %s, from: %s [%s], to: %s [%s]" % ( 895 installed_slot, available_slots, 896 (old_r_id, old_r_repo), 897 from_atom, (r_id, r_repo), 898 to_atom,)) 899 900 dbconn = self.open_repository(r_repo) 901 try: 902 repo_pkgver, repo_pkgtag, repo_pkgrev = \ 903 dbconn.getVersioningData(r_id) 904 # note: read rationale below 905 repo_digest = dbconn.retrieveDigest(r_id) 906 except (InterfaceError, TypeError,): 907 # package entry is broken 908 if const_debug_enabled(): 909 const_debug_write( 910 __name__, 911 "_get_unsatisfied_dependencies repository " 912 "entry broken for match => %s" % ( 913 (r_id, r_repo),)) 914 const_debug_write(__name__, "...") 915 unsatisfied.add(dependency) 916 push_to_cache(dependency, True) 917 continue 918 919 client_data = set() 920 for c_id in c_ids: 921 try: 922 installed_ver, installed_tag, installed_rev = \ 923 inst_repo.getVersioningData(c_id) 924 # note: read rationale below 925 installed_digest = inst_repo.retrieveDigest( 926 c_id) 927 except TypeError: # corrupted entry? 928 installed_ver = "0" 929 installed_tag = '' 930 installed_rev = 0 931 installed_digest = None 932 client_data.add((installed_ver, installed_tag, installed_rev, 933 installed_digest,)) 934 935 # restrict dependency matching scope inside mutually available 936 # package tags. Equals to tags available in both installed and 937 # available repositories. 938 dependency_tag = entropy.dep.dep_gettag(dependency) 939 installed_tags = [x[1] for x in client_data if x[1]] 940 if installed_tags and not dependency_tag: 941 942 installed_tags = set(installed_tags) 943 available_tags = _my_get_available_tags(dependency, 944 installed_tags) 945 946 if available_tags: 947 # always take the higher tag. 948 # NOW, reset variables used here below to make them 949 # pointing to proper tagged package, keeping scoped 950 # handling. 951 best_tag = entropy.dep.sort_entropy_package_tags( 952 available_tags)[-1] 953 954 # also change "dependency" to make it pointing to a 955 # stricter set of possible matches. 956 dependency = dependency + \ 957 etpConst['entropytagprefix'] + best_tag 958 r_id, r_repo = self.atom_match(dependency, 959 match_repo = match_repo) 960 dbconn = self.open_repository(r_repo) 961 repo_pkgver, repo_pkgtag, repo_pkgrev = \ 962 dbconn.getVersioningData(r_id) 963 repo_digest = dbconn.retrieveDigest(r_id) 964 965 # this is required for multi-slotted packages (like python) 966 # and when people mix Entropy and Portage 967 do_cont = False 968 for installed_ver, installed_tag, installed_rev, cdigest in client_data: 969 970 vcmp = etp_cmp((repo_pkgver, repo_pkgtag, repo_pkgrev,), 971 (installed_ver, installed_tag, installed_rev,)) 972 973 # check if both pkgs share the same branch and digest, this must 974 # be done to avoid system inconsistencies across branch upgrades 975 if vcmp == 0: 976 # cdigest == "0" if repo has been manually (user-side) 977 # generated 978 if (cdigest != repo_digest) and (cdigest != "0"): 979 vcmp = 1 980 981 # check against SPM downgrades and ignore_spm_downgrades 982 if (vcmp < 0) and ignore_spm_downgrades and \ 983 (installed_rev == etpConst['spmetprev']) \ 984 and (installed_rev != repo_pkgrev): 985 # In this case, do not override Source Package Manager 986 # installed pkgs 987 if const_debug_enabled(): 988 const_debug_write(__name__, 989 "_get_unsatisfied_dependencies => SPM downgrade! " + \ 990 "(not cached, deep: %s) => %s" % ( 991 deep_deps, dependency,)) 992 vcmp = 0 993 994 if vcmp == 0: 995 if const_debug_enabled(): 996 const_debug_write(__name__, 997 "_get_unsatisfied_dependencies SATISFIED equals " + \ 998 "(not cached, deep: %s) => %s" % ( 999 deep_deps, dependency,)) 1000 const_debug_write(__name__, "...") 1001 do_cont = True 1002 push_to_cache(dependency, False) 1003 break 1004 1005 ver_tag_repo = (repo_pkgver, repo_pkgtag,) 1006 ver_tag_inst = (installed_ver, installed_tag,) 1007 rev_match = repo_pkgrev != installed_rev 1008 1009 if do_rev_deep and rev_match and (ver_tag_repo == ver_tag_inst): 1010 # this is unsatisfied then, need to continue to exit from 1011 # for cycle and add it to unsatisfied 1012 continue 1013 1014 if deep_deps: 1015 # also this is clearly unsatisfied if deep is enabled 1016 continue 1017 1018 if (ver_tag_repo == ver_tag_inst) and rev_match: 1019 if const_debug_enabled(): 1020 const_debug_write(__name__, 1021 "_get_unsatisfied_dependencies SATISFIED " + \ 1022 "w/o rev (not cached, deep: %s) => %s" % ( 1023 deep_deps, dependency,)) 1024 const_debug_write(__name__, "...") 1025 do_cont = True 1026 push_to_cache(dependency, False) 1027 break 1028 1029 if do_cont: 1030 continue 1031 1032 # if we get here it means that there are no matching packages 1033 if const_debug_enabled(): 1034 const_debug_write( 1035 __name__, 1036 "_get_unsatisfied_dependencies NOT SATISFIED " 1037 "(not cached, deep: %s) => %s" % ( 1038 deep_deps, dependency,)) 1039 const_debug_write(__name__, "...") 1040 1041 unsatisfied.add(dependency) 1042 push_to_cache(dependency, True) 1043 1044 if self.xcache: 1045 self._cacher.push(cache_key, unsatisfied) 1046 1047 return unsatisfied 1048
1049 - def packages_expand(self, packages):
1050 """ 1051 Given a list of user requested packages, expands it resolving for 1052 instance, items such as package sets. 1053 1054 @param packages: list of user requested packages 1055 @type packages: list 1056 @return: expanded list 1057 @rtype: list 1058 """ 1059 new_packages = [] 1060 sets = self.Sets() 1061 1062 set_pfx = etpConst['packagesetprefix'] 1063 for pkg_id in range(len(packages)): 1064 package = packages[pkg_id] 1065 1066 # expand package sets 1067 if package.startswith(set_pfx): 1068 cur_sets = sets.expand(package, raise_exceptions = False) 1069 set_pkgs = sorted(cur_sets) 1070 new_packages.extend([x for x in set_pkgs if x not in packages]) 1071 else: 1072 new_packages.append(package) 1073 1074 return new_packages
1075
1076 - def __generate_dependency_tree_inst_hooks(self, installed_match, 1077 pkg_match, build_deps, 1078 elements_cache, 1079 ldpaths):
1080 1081 if const_debug_enabled(): 1082 inst_atom = self.installed_repository().retrieveAtom( 1083 installed_match[0]) 1084 atom = self.open_repository(pkg_match[1] 1085 ).retrieveAtom(pkg_match[0]) 1086 const_debug_write(__name__, 1087 "__generate_dependency_tree_inst_hooks " 1088 "input: installed %s, avail %s" % (inst_atom, atom,)) 1089 1090 # these are inverse dependencies 1091 broken_children_matches = self._lookup_library_drops(pkg_match, 1092 installed_match[0]) 1093 if const_debug_enabled(): 1094 const_debug_write(__name__, 1095 "__generate_dependency_tree_inst_hooks " 1096 "_lookup_library_drops, broken_children_matches => %s" % ( 1097 broken_children_matches,)) 1098 1099 after_pkgs, before_pkgs = self._lookup_library_breakages( 1100 pkg_match, installed_match[0], ldpaths) 1101 if const_debug_enabled(): 1102 const_debug_write(__name__, 1103 "__generate_dependency_tree_inst_hooks " 1104 "_lookup_library_breakages, " 1105 "after => %s, before => %s" % ( 1106 after_pkgs, before_pkgs,)) 1107 1108 inverse_deps = self._lookup_inverse_dependencies(pkg_match, 1109 installed_match[0], build_deps, elements_cache) 1110 if const_debug_enabled(): 1111 const_debug_write(__name__, 1112 "__generate_dependency_tree_inst_hooks " 1113 "_lookup_inverse_dependencies, inverse_deps => %s" % ( 1114 inverse_deps,)) 1115 1116 return broken_children_matches, after_pkgs, before_pkgs, inverse_deps
1117
1118 - def __generate_dependency_tree_analyze_conflict(self, pkg_match, 1119 conflict_str, conflicts, stack, graph, deep_deps):
1120 1121 conflict_atom = conflict_str[1:] 1122 c_package_id, xst = self.installed_repository().atomMatch(conflict_atom) 1123 if c_package_id == -1: 1124 return # conflicting pkg is not installed 1125 1126 confl_replacement = self._lookup_conflict_replacement( 1127 conflict_atom, c_package_id, deep_deps = deep_deps) 1128 1129 if const_debug_enabled(): 1130 const_debug_write(__name__, 1131 "__generate_dependency_tree_analyze_conflict " 1132 "replacement => %s" % (confl_replacement,)) 1133 1134 if confl_replacement is not None: 1135 graph.add(pkg_match, set([confl_replacement])) 1136 stack.push(confl_replacement) 1137 return 1138 1139 # conflict is installed, we need to record it 1140 conflicts.add(c_package_id)
1141
1142 - def __generate_dependency_tree_resolve_conditional(self, unsatisfied_deps, 1143 selected_matches, selected_matches_cache):
1144 1145 # expand list of package dependencies evaluating conditionals 1146 unsatisfied_deps = entropy.dep.expand_dependencies(unsatisfied_deps, 1147 [self.open_repository(repo_id) for repo_id in self._enabled_repos], 1148 selected_matches = selected_matches) 1149 1150 def _simple_or_dep_map(dependency): 1151 # simple or dependency format support. 1152 if dependency.endswith(etpConst['entropyordepquestion']): 1153 deps = dependency[:-1].split(etpConst['entropyordepsep']) 1154 return self._resolve_or_dependencies( 1155 deps, selected_matches, 1156 _selected_matches_cache=selected_matches_cache) 1157 return dependency
1158 1159 return set(map(_simple_or_dep_map, unsatisfied_deps)) 1160 1161 DISABLE_REWRITE_SELECTED_MATCHES = os.getenv( 1162 "ETP_DISABLE_REWRITE_SELECTED_MATCHES") 1163
1164 - def __rewrite_selected_matches(self, unsatisfied_deps, selected_matches):
1165 """ 1166 This function scans the unsatisfied dependencies and tries to rewrite 1167 them if they are in the "selected_matches" set. This set contains the 1168 unordered list of package matches requested by the user. We should 1169 respect them as much as we can. 1170 1171 See Sabayon bug #4475. This is a fixup code and hopefully runs in 1172 O(len(unsatisfied_deps)) thanks to memoization. 1173 """ 1174 if (not selected_matches) or self.DISABLE_REWRITE_SELECTED_MATCHES: 1175 return unsatisfied_deps 1176 1177 def _in_selected_matches(dep): 1178 matches, m_rc = self.atom_match( 1179 dep, multi_match = True, multi_repo = True) 1180 common = selected_matches & matches 1181 if common: 1182 # we deterministically pick the first entry 1183 # because the other ones will be pulled in anyway. 1184 for package_id, repository_id in sorted(common): 1185 repo = self.open_repository(repository_id) 1186 keyslot = repo.retrieveKeySlotAggregated(package_id) 1187 if keyslot is None: 1188 continue 1189 1190 const_debug_write( 1191 __name__, 1192 "__rewrite_selected_matches, rewritten: " 1193 "%s to %s" % (dep, keyslot,)) 1194 return keyslot 1195 return dep
1196 1197 return set(map(_in_selected_matches, unsatisfied_deps)) 1198 1199 DISABLE_AUTOCONFLICT = os.getenv("ETP_DISABLE_AUTOCONFLICT") 1200
1201 - def __generate_dependency_tree_analyze_deplist(self, pkg_match, repo_db, 1202 stack, graph, deps_not_found, conflicts, unsat_cache, relaxed_deps, 1203 build_deps, deep_deps, empty_deps, recursive, selected_matches, 1204 elements_cache, selected_matches_cache):
1205 1206 pkg_id, repo_id = pkg_match 1207 # exclude build dependencies 1208 excluded_deptypes = [etpConst['dependency_type_ids']['pdepend_id']] 1209 if not build_deps: 1210 excluded_deptypes += [etpConst['dependency_type_ids']['bdepend_id']] 1211 1212 myundeps = repo_db.retrieveDependenciesList(pkg_id, 1213 exclude_deptypes = excluded_deptypes, 1214 resolve_conditional_deps = False) 1215 1216 # this solves some conditional dependencies using selected_matches. 1217 # also expands all the conditional dependencies using 1218 # entropy.dep.expand_dependencies() 1219 if const_debug_enabled(): 1220 atom = repo_db.retrieveAtom(pkg_id) 1221 const_debug_write(__name__, 1222 "__generate_dependency_tree_analyze_deplist conditionals " 1223 "%s, %s, current dependency list => %s" % ( 1224 pkg_match, atom, myundeps,)) 1225 myundeps = self.__generate_dependency_tree_resolve_conditional( 1226 myundeps, selected_matches, selected_matches_cache) 1227 if const_debug_enabled(): 1228 const_debug_write(__name__, 1229 "__generate_dependency_tree_analyze_deplist conditionals, " 1230 "new dependency list => %s" % (myundeps,)) 1231 1232 my_conflicts = set([x for x in myundeps if x.startswith("!")]) 1233 1234 auto_conflicts = self._generate_dependency_inverse_conflicts( 1235 pkg_match) 1236 my_conflicts |= auto_conflicts 1237 1238 # check conflicts 1239 if my_conflicts: 1240 myundeps -= my_conflicts 1241 for my_conflict in my_conflicts: 1242 self.__generate_dependency_tree_analyze_conflict( 1243 pkg_match, my_conflict, 1244 conflicts, stack, graph, deep_deps) 1245 1246 if const_debug_enabled(): 1247 const_debug_write(__name__, 1248 "__generate_dependency_tree_analyze_deplist filtered " 1249 "dependency list => %s" % (myundeps,)) 1250 1251 if not empty_deps: 1252 1253 myundeps = self._get_unsatisfied_dependencies(myundeps, 1254 deep_deps = deep_deps, relaxed_deps = relaxed_deps, 1255 depcache = unsat_cache) 1256 myundeps = self.__rewrite_selected_matches( 1257 myundeps, selected_matches) 1258 1259 if const_debug_enabled(): 1260 const_debug_write(__name__, 1261 "__generate_dependency_tree_analyze_deplist " + \ 1262 "filtered UNSATISFIED dependencies => %s" % (myundeps,)) 1263 1264 def _post_deps_filter(post_dep): 1265 pkg_matches, rc = self.atom_match(post_dep, 1266 multi_match = True, multi_repo = True) 1267 commons = pkg_matches & elements_cache 1268 if commons: 1269 return False 1270 return True
1271 1272 post_deps = [] 1273 # PDEPENDs support 1274 myundeps, post_deps = self._lookup_post_dependencies(repo_db, 1275 pkg_id, myundeps) 1276 if (not empty_deps) and post_deps: 1277 # validate post dependencies, make them not contain matches already 1278 # pulled in, this cuts potential circular dependencies: 1279 # nvidia-drivers pulls in nvidia-userspace which has nvidia-drivers 1280 # listed as post-dependency 1281 post_deps = list(filter(_post_deps_filter, post_deps)) 1282 post_deps = self.__generate_dependency_tree_resolve_conditional( 1283 post_deps, selected_matches, selected_matches_cache) 1284 post_deps = self._get_unsatisfied_dependencies(post_deps, 1285 deep_deps = deep_deps, relaxed_deps = relaxed_deps, 1286 depcache = unsat_cache) 1287 1288 if const_debug_enabled(): 1289 const_debug_write(__name__, 1290 "generate_dependency_tree POST dependencies ADDED => %s" % ( 1291 post_deps,)) 1292 1293 deps = set() 1294 for unsat_dep in myundeps: 1295 match_pkg_id, match_repo_id = self.atom_match(unsat_dep) 1296 if match_pkg_id == -1: 1297 # dependency not found ! 1298 deps_not_found.add(unsat_dep) 1299 continue 1300 1301 deps.add((match_pkg_id, match_repo_id)) 1302 if recursive: 1303 # push to stack only if recursive 1304 stack.push((match_pkg_id, match_repo_id)) 1305 1306 post_deps_matches = set() 1307 for post_dep in post_deps: 1308 match_pkg_id, match_repo_id = self.atom_match(post_dep) 1309 # if post dependency is not found, we can happily ignore the fact 1310 if match_pkg_id == -1: 1311 # not adding to deps_not_found 1312 continue 1313 post_deps_matches.add((match_pkg_id, match_repo_id)) 1314 if recursive: 1315 # push to stack only if recursive 1316 stack.push((match_pkg_id, match_repo_id)) 1317 1318 return deps, post_deps_matches 1319
1320 - def _generate_dependency_inverse_conflicts(self, package_match, 1321 just_id = False):
1322 """ 1323 Given a package match, generate a list of conflicts by looking 1324 at the installed packages repository and its "!<dep>" dependency 1325 strings. This is useful because sometimes, packages miss conflict 1326 information on both sides. A hates B, but B doesn't say anything about 1327 A, A is the installed package. 1328 1329 @param package_match: an Entropy package match 1330 @type package_match: tuple 1331 @keyword just_id: if True, return installed package ids instead of 1332 conflict dependency strings 1333 @type just_id: bool 1334 @return: a list (set) of conflicts 1335 @rtype: set 1336 """ 1337 conflicts = set() 1338 # XXX Experimental feature, make possible to override it XXX 1339 if self.DISABLE_AUTOCONFLICT is not None: 1340 return conflicts 1341 1342 pkg_id, repository_id = package_match 1343 repo_db = self.open_repository(repository_id) 1344 1345 pkg_key = entropy.dep.dep_getkey(repo_db.retrieveAtom(pkg_id)) 1346 potential_conflicts = self.installed_repository().searchConflict( 1347 pkg_key) 1348 1349 for dep_package_id, conflict_str in potential_conflicts: 1350 confl_pkg_ids, confl_pkg_rc = repo_db.atomMatch( 1351 conflict_str, multiMatch = True) 1352 1353 # is this really me? ignore the rc, just go straight to ids 1354 if pkg_id not in confl_pkg_ids: 1355 continue 1356 1357 if just_id: 1358 conflicts.add(dep_package_id) 1359 break 1360 else: 1361 # yes, this is really me! 1362 dep_key_slot = self.installed_repository().retrieveKeySlot( 1363 dep_package_id) 1364 if dep_key_slot is not None: 1365 dep_key, dep_slot = dep_key_slot 1366 dep_confl_str = "!%s%s%s" % (dep_key, 1367 etpConst['entropyslotprefix'], dep_slot) 1368 conflicts.add(dep_confl_str) 1369 if const_debug_enabled(): 1370 const_debug_write(__name__, 1371 "_generate_dependency_inverse_conflict " 1372 "adding auto-conflict => %s, conflict_str was: %s" % ( 1373 dep_confl_str, conflict_str,)) 1374 break 1375 1376 return conflicts
1377
1378 - def _generate_dependency_tree(self, matched_atom, graph, 1379 empty_deps = False, relaxed_deps = False, build_deps = False, 1380 only_deps = False, deep_deps = False, unsatisfied_deps_cache = None, 1381 elements_cache = None, post_deps_cache = None, recursive = True, 1382 selected_matches = None, selected_matches_cache = None, ldpaths = None):
1383 1384 pkg_id, pkg_repo = matched_atom 1385 if (pkg_id == -1) or (pkg_repo == 1): 1386 raise AttributeError("invalid matched_atom: %s" % (matched_atom,)) 1387 1388 # this cache avoids adding the same element to graph 1389 # several times, when it is supposed to be already handled 1390 if elements_cache is None: 1391 elements_cache = set() 1392 if unsatisfied_deps_cache is None: 1393 unsatisfied_deps_cache = {} 1394 if post_deps_cache is None: 1395 post_deps_cache = {} 1396 1397 if selected_matches is None: 1398 selected_matches = set() 1399 1400 if ldpaths is None: 1401 ldpaths = frozenset() 1402 1403 deps_not_found = set() 1404 conflicts = set() 1405 first_element = True 1406 1407 stack = Lifo() 1408 stack.push(matched_atom) 1409 inverse_dep_stack_cache = {} 1410 graph_cache = set() 1411 1412 while stack.is_filled(): 1413 1414 # get item from stack 1415 pkg_id, repo_id = stack.pop() 1416 pkg_match = (pkg_id, repo_id) 1417 1418 if pkg_match in elements_cache: 1419 # already pushed to graph 1420 continue 1421 elements_cache.add(pkg_match) 1422 1423 # now we are ready to open repository 1424 repo_db = self.open_repository(repo_id) 1425 1426 ## first element checks 1427 add_to_graph = True 1428 if first_element: 1429 first_element = False 1430 1431 if only_deps: 1432 # in this case, we only add pkg_match to 1433 # the graph if it's a dependency of something else 1434 # also, with only_deps we should ignore if pkg is masked 1435 add_to_graph = False 1436 else: 1437 # we need to check if first element is masked because of 1438 # course, we don't trust function caller. 1439 mask_pkg_id, idreason = repo_db.maskFilter(pkg_id) 1440 if mask_pkg_id == -1: 1441 mask_atom = repo_db.retrieveAtom(pkg_id) 1442 if mask_atom is None: 1443 mask_atom = 'N/A' # wtf? 1444 deps_not_found.add(mask_atom) 1445 continue # back to while 1446 1447 # search inside installed packages repository if there's something 1448 # in the same slot, if so, do some extra checks first. 1449 try: 1450 pkg_key, pkg_slot = repo_db.retrieveKeySlot(pkg_id) 1451 except TypeError: 1452 deps_not_found.add("unknown_%s_%s" % (pkg_id, repo_id,)) 1453 continue 1454 cm_package_id, cm_result = self.installed_repository().atomMatch( 1455 pkg_key, matchSlot = pkg_slot) 1456 1457 if cm_package_id != -1: 1458 # this method does: 1459 # - broken libraries detection 1460 # - inverse dependencies check 1461 children_matches, after_pkgs, before_pkgs, inverse_deps = \ 1462 self.__generate_dependency_tree_inst_hooks( 1463 (cm_package_id, cm_result), pkg_match, 1464 build_deps, elements_cache, ldpaths) 1465 # this is fine this way, these are strong inverse deps 1466 # and their order is already written in stone 1467 for inv_match in inverse_deps: 1468 stack.push(inv_match) 1469 # children_matches are always inverse dependencies, and 1470 # must be stated as such, once they eventually end into 1471 # the graph (see below) 1472 for child_match in children_matches: 1473 obj = inverse_dep_stack_cache.setdefault(child_match, set()) 1474 obj.add(pkg_match) 1475 stack.push(child_match) 1476 1477 # these are misc and cannot be differentiated 1478 for br_match in after_pkgs: # don't care about the position 1479 if br_match in children_matches: 1480 # already pushed and inverse dep 1481 continue 1482 stack.push(br_match) 1483 for br_match in before_pkgs: 1484 # enforce dependency explicitly? 1485 if br_match in children_matches: 1486 # already pushed and inverse dep 1487 continue 1488 stack.push(br_match) 1489 if before_pkgs: 1490 graph.add(pkg_match, before_pkgs) 1491 1492 dep_matches, post_dep_matches = \ 1493 self.__generate_dependency_tree_analyze_deplist( 1494 pkg_match, repo_db, stack, graph, deps_not_found, 1495 conflicts, unsatisfied_deps_cache, relaxed_deps, 1496 build_deps, deep_deps, empty_deps, recursive, 1497 selected_matches, elements_cache, selected_matches_cache) 1498 1499 if post_dep_matches: 1500 obj = post_deps_cache.setdefault(pkg_match, set()) 1501 obj.update(post_dep_matches) 1502 1503 # eventually add our package match to depgraph 1504 if add_to_graph: 1505 graph.add(pkg_match, dep_matches) 1506 graph_cache.add(pkg_match) 1507 pkg_match_set = set([pkg_match]) 1508 for post_dep_match in post_dep_matches: 1509 graph.add(post_dep_match, pkg_match_set) 1510 1511 # add cached "inverse of inverse (==direct)" deps, if available 1512 for pkg_match in graph_cache: 1513 inv_deps = inverse_dep_stack_cache.get(pkg_match) 1514 if inv_deps: 1515 graph.add(pkg_match, inv_deps) 1516 if const_debug_enabled(): 1517 atom = self.open_repository(pkg_match[1]).retrieveAtom( 1518 pkg_match[0]) 1519 wanted_deps = [self.open_repository(y).retrieveAtom(x) \ 1520 for x, y in inv_deps] 1521 const_debug_write(__name__, 1522 "_generate_dependency_tree(revdep cache) %s wants %s" % ( 1523 purple(atom), blue(" ".join(wanted_deps)),)) 1524 1525 graph_cache.clear() 1526 inverse_dep_stack_cache.clear() 1527 # if deps not found, we won't do dep-sorting at all 1528 if deps_not_found: 1529 #del stack 1530 raise DependenciesNotFound(deps_not_found) 1531 1532 return graph, conflicts
1533
1534 - def _lookup_post_dependencies(self, repo_db, repo_package_id, 1535 unsatisfied_deps):
1536 1537 post_deps = repo_db.retrievePostDependencies(repo_package_id) 1538 1539 if const_debug_enabled(): 1540 const_debug_write(__name__, 1541 "_lookup_post_dependencies POST dependencies for %s => %s" % ( 1542 (repo_package_id, repo_db.repository_id()), post_deps,)) 1543 1544 if post_deps: 1545 1546 # do some filtering 1547 # it is correct to not use my_dep_filter here 1548 unsatisfied_deps = [x for x in unsatisfied_deps \ 1549 if x not in post_deps] 1550 1551 return unsatisfied_deps, post_deps
1552 1553
1554 - def _lookup_system_mask_repository_deps(self):
1555 1556 client_settings = self.ClientSettings() 1557 data = client_settings['repositories']['system_mask'] 1558 1559 if not data: 1560 return [] 1561 mydata = [] 1562 cached_items = set() 1563 for atom in data: 1564 mymatch = self.atom_match(atom) 1565 if mymatch[0] == -1: # ignore missing ones intentionally 1566 continue 1567 if mymatch in cached_items: 1568 continue 1569 if mymatch not in mydata: 1570 # check if not found 1571 myaction = self._get_package_action(mymatch) 1572 # only if the package is not installed 1573 if myaction == 1: 1574 mydata.append(mymatch) 1575 cached_items.add(mymatch) 1576 return mydata
1577
1578 - def _lookup_conflict_replacement(self, conflict_atom, client_package_id, 1579 deep_deps):
1580 1581 if entropy.dep.isjustname(conflict_atom): 1582 return 1583 1584 conflict_match = self.atom_match(conflict_atom) 1585 mykey, myslot = self.installed_repository().retrieveKeySlot( 1586 client_package_id) 1587 new_match = self.atom_match(mykey, match_slot = myslot) 1588 if (conflict_match == new_match) or (new_match[1] == 1): 1589 return 1590 1591 action = self._get_package_action( 1592 new_match, installed_package_id = client_package_id) 1593 if (action == 0) and (not deep_deps): 1594 return 1595 1596 return new_match
1597
1598 - def _lookup_inverse_dependencies(self, match, installed_package_id, 1599 build_deps, elements_cache):
1600 """ 1601 Lookup inverse dependencies and return them as a list of package 1602 matches. 1603 """ 1604 cmpstat = self._get_package_action( 1605 match, installed_package_id = installed_package_id) 1606 if cmpstat == 0: 1607 return set() 1608 1609 keyslots_cache = set() 1610 match_cache = {} 1611 results = set() 1612 inst_repo = self.installed_repository() 1613 1614 excluded_dep_types = ( 1615 etpConst['dependency_type_ids']['bdepend_id'],) 1616 if build_deps: 1617 excluded_dep_types = None 1618 1619 reverse_deps = inst_repo.retrieveReverseDependencies( 1620 installed_package_id, exclude_deptypes = excluded_dep_types) 1621 1622 for inst_package_id in reverse_deps: 1623 1624 key_slot = inst_repo.retrieveKeySlotAggregated( 1625 inst_package_id) 1626 if key_slot is None: 1627 continue 1628 if key_slot in keyslots_cache: 1629 continue 1630 1631 keyslots_cache.add(key_slot) 1632 1633 # grab its deps 1634 mydeps = inst_repo.retrieveDependencies( 1635 inst_package_id, exclude_deptypes = excluded_dep_types) 1636 found = False 1637 1638 for mydep in mydeps: 1639 mymatch = match_cache.get(mydep, 0) 1640 if mymatch == 0: 1641 mymatch = self.atom_match(mydep) 1642 match_cache[mydep] = mymatch 1643 if mymatch == match: 1644 found = True 1645 break 1646 1647 if not found: 1648 mymatch = self.atom_match(key_slot) 1649 if mymatch[0] == -1: 1650 continue 1651 cmpstat = self._get_package_action( 1652 mymatch, installed_package_id = inst_package_id) 1653 if cmpstat == 0: 1654 continue 1655 1656 # this will take a life, also check if we haven't already 1657 # pulled this match in. 1658 # This happens because the reverse dependency string is 1659 # too much generic and could pull in conflicting packages. 1660 # NOTE: this is a hack and real weighted graph 1661 # would be required 1662 mymatches, rc = self.atom_match( 1663 key_slot, multi_match = True, 1664 multi_repo = True) 1665 got_it = mymatches & elements_cache 1666 if got_it: 1667 if const_debug_enabled(): 1668 atom = self.open_repository( 1669 mymatch[1]).retrieveAtom(mymatch[0]) 1670 const_debug_write(__name__, 1671 "_lookup_inverse_dependencies, ignoring " 1672 "%s, %s -- because already pulled in as: %s" % ( 1673 atom, mymatch, got_it,)) 1674 # yeah, pulled in, ignore 1675 continue 1676 1677 if const_debug_enabled(): 1678 atom = self.open_repository(mymatch[1]).retrieveAtom( 1679 mymatch[0]) 1680 const_debug_write(__name__, 1681 "_lookup_inverse_dependencies, " 1682 "adding inverse dep => %s" % (atom,)) 1683 results.add(mymatch) 1684 1685 return results
1686
1687 - def _lookup_library_drops(self, match, installed_package_id):
1688 """ 1689 Look for packages that would break if package match 1690 at "match" would be installed and the current version 1691 at "installed_package_id" replaced. 1692 This method looks at what a package provides in terms of 1693 libraries. 1694 1695 @param match: the package match that would be installed 1696 @type match: tuple 1697 @param installed_package_id: the installed package identifier 1698 that would be replaced 1699 @type installed_package_id: int 1700 @return: package matches that should be updated as well 1701 @rtype: set 1702 """ 1703 match_package_id, match_repo_id = match 1704 1705 inst_repo = self.installed_repository() 1706 match_repo = self.open_repository(match_repo_id) 1707 repo_libs = match_repo.retrieveProvidedLibraries(match_package_id) 1708 1709 # compute a list of sonames that are going to be dropped 1710 client_libs = inst_repo.retrieveProvidedLibraries( 1711 installed_package_id) 1712 removed_libs = [x for x in client_libs if x not in repo_libs] 1713 1714 if not removed_libs: 1715 if const_debug_enabled(): 1716 inst_atom = inst_repo.retrieveAtom(installed_package_id) 1717 atom = match_repo.retrieveAtom(match_package_id) 1718 const_debug_write( 1719 __name__, 1720 "_lookup_library_drops, " 1721 "no libraries would be removed for: " 1722 "[%s] and [%s] (%s -> %s)" % ( 1723 match, installed_package_id, 1724 atom, inst_atom)) 1725 return set() 1726 1727 # look for installed packages needing these to-be-dropped 1728 # sonames 1729 inst_package_ids = set() 1730 for lib, path, elf in removed_libs: 1731 inst_package_ids |= inst_repo.searchNeeded(lib, 1732 elfclass = elf) 1733 if not inst_package_ids: 1734 return set() 1735 1736 # this is used to filter out "match" from broken_matches 1737 # in the for loop below 1738 match_keyslot = None 1739 1740 broken_matches = set() 1741 for inst_package_id in inst_package_ids: 1742 1743 # is this package available in repos? 1744 # maybe it's been dropped upstream... 1745 keyslot = inst_repo.retrieveKeySlotAggregated( 1746 inst_package_id) 1747 if keyslot is None: 1748 continue 1749 package_id, repository_id = self.atom_match(keyslot) 1750 if package_id == -1: 1751 continue 1752 1753 # do we already have the latest version installed? 1754 cmpstat = self._get_package_action( 1755 (package_id, repository_id), 1756 installed_package_id = inst_package_id) 1757 if cmpstat == 0: 1758 const_debug_write( 1759 __name__, 1760 "_lookup_library_drops, " 1761 "a package would break but no updates are available. " 1762 "(%s, %s)" % (keyslot, match,)) 1763 continue 1764 1765 # not against myself. it can happen... 1766 # this is faster than key+slot lookup 1767 if (package_id, repository_id) == match: 1768 const_debug_write( 1769 __name__, 1770 "_lookup_library_drops, not adding myself. " 1771 "match %s is the same." % (match,)) 1772 continue 1773 1774 # not against the same key+slot 1775 if match_keyslot is None: 1776 match_keyslot = match_repo.retrieveKeySlotAggregated( 1777 match_repo_id) 1778 # assuming that a repeatedly None value does not hurt 1779 if keyslot == match_keyslot: 1780 const_debug_write( 1781 __name__, 1782 "_lookup_library_drops, not adding myself. " 1783 "keyslot %s is the same for %s and %s" % ( 1784 keyslot, match, 1785 (package_id, repository_id),) 1786 ) 1787 continue 1788 1789 if const_debug_enabled(): 1790 atom = self.open_repository(repository_id).retrieveAtom( 1791 package_id) 1792 const_debug_write(__name__, 1793 "_lookup_library_drops, " 1794 "adding broken library link package => %s, pulling: %s" % ( 1795 keyslot, atom,)) 1796 1797 broken_matches.add((package_id, repository_id)) 1798 1799 if const_debug_enabled() and broken_matches: 1800 const_debug_write(__name__, 1801 "_lookup_library_drops, " 1802 "total removed libs for iteration: %s" % (removed_libs,)) 1803 1804 return broken_matches
1805
1806 - def __get_library_breakages(self, package_match, installed_package_id):
1807 """ 1808 Get a list of library dependencies (at ELF metadata level) 1809 that have been bumped for the given package. 1810 The newly added ones, are considered a bump. In this way, whether 1811 they are already present in the package dependencies or not, a 1812 proper relation will be inserted on the dependency graph. 1813 It can happen that a library may be considered satisfied 1814 as package dependency but not on the current system state. 1815 """ 1816 package_id, repository_id = package_match 1817 inst_repo = self.installed_repository() 1818 repo = self.open_repository(repository_id) 1819 1820 # Ignore user library path and user library soname, not relevant. 1821 repo_needed = { 1822 (soname, elf, rpath) for _usr_path, _usr_soname, soname, elf, rpath 1823 in repo.retrieveNeededLibraries(package_id)} 1824 installed_needed = { 1825 (soname, elf, rpath) for _usr_path, _usr_soname, soname, elf, rpath 1826 in inst_repo.retrieveNeededLibraries(installed_package_id)} 1827 1828 # intersect the two dicts and find the libraries that 1829 # have not changed. We assume that a pkg cannot link 1830 # the same SONAME with two different elf classes. 1831 # but that is what retrieveNeededLibraries() assumes as well 1832 common_libs = repo_needed & installed_needed 1833 for lib_data in common_libs: 1834 repo_needed.discard(lib_data) 1835 installed_needed.discard(lib_data) 1836 1837 soname_ext = const_convert_to_unicode(".so") 1838 # x[0] is soname. 1839 repo_split = {x: tuple(x[0].split(soname_ext)) for x in repo_needed} 1840 installed_split = { 1841 x: tuple(x[0].split(soname_ext)) for x in installed_needed} 1842 1843 inst_lib_dumps = set() # was installed_side 1844 repo_lib_dumps = set() # was repo_side 1845 # ^^ library dumps using repository NEEDED metadata 1846 1847 for lib_data, lib_name in installed_split.items(): 1848 lib, elfclass, rpath = lib_data 1849 if lib_name in repo_split.values(): 1850 # (library name, elf class) 1851 inst_lib_dumps.add((lib, elfclass, rpath)) 1852 1853 for lib_data, lib_name in repo_split.items(): 1854 lib, elfclass, rpath = lib_data 1855 if lib_name in installed_split.values(): 1856 repo_lib_dumps.add((lib, elfclass, rpath)) 1857 1858 # now consider the case in where we have new libraries 1859 # that are not in the installed libraries set. 1860 new_libraries = set(repo_split.values()) - set(installed_split.values()) 1861 if new_libraries: 1862 1863 # Reverse repo_split in order to generate a mapping 1864 # between a library name and its set of full libraries 1865 reversed_repo_split = {} 1866 for lib_data, lib_name in repo_split.items(): 1867 lib, elfclass, rpath = lib_data 1868 obj = reversed_repo_split.setdefault(lib_name, set()) 1869 obj.add((lib, elfclass, rpath)) 1870 1871 for lib_name in new_libraries: 1872 repo_lib_dumps |= reversed_repo_split[lib_name] 1873 1874 return inst_lib_dumps, repo_lib_dumps
1875
1876 - def _lookup_library_breakages(self, match, installed_package_id, ldpaths):
1877 """ 1878 Lookup packages that need to be bumped because "match" is being 1879 installed and "installed_package_id" removed. 1880 1881 This method uses ELF NEEDED package metadata in order to accomplish 1882 this task. 1883 """ 1884 inst_repo = self.installed_repository() 1885 cache_key = None 1886 1887 if self.xcache: 1888 cache_s = "%s|%s|%s|%s|%s|%s|%s|%s|r8" % ( 1889 match, 1890 installed_package_id, 1891 inst_repo.checksum(), 1892 self.repositories_checksum(), 1893 self._settings.packages_configuration_hash(), 1894 self._settings_client_plugin.packages_configuration_hash(), 1895 ";".join(sorted(self._settings['repositories']['available'])), 1896 ";".join(sorted(ldpaths)), 1897 ) 1898 sha = hashlib.sha1() 1899 sha.update(const_convert_to_rawstring(cache_s)) 1900 1901 cache_key = "library_breakage/%s" % (sha.hexdigest(),) 1902 1903 cached = self._cacher.pop(cache_key) 1904 if cached is not None: 1905 return cached 1906 1907 client_side, repo_side = self.__get_library_breakages( 1908 match, installed_package_id) 1909 1910 matches = self._lookup_library_breakages_available( 1911 match, repo_side, ldpaths) 1912 installed_matches = self._lookup_library_breakages_installed( 1913 installed_package_id, client_side) 1914 1915 # filter out myself 1916 installed_matches.discard(match) 1917 # drop items in repo_patches from installed_matches 1918 installed_matches -= matches 1919 1920 if self.xcache: 1921 self._cacher.push(cache_key, (installed_matches, matches)) 1922 1923 return installed_matches, matches
1924
1925 - def _lookup_library_breakages_available(self, package_match, 1926 bumped_needed_libs, 1927 ldpaths):
1928 """ 1929 Generate a list of package matches that should be bumped 1930 if the given libraries were installed. 1931 The returned list is composed by packages which are providing 1932 the new libraries. 1933 1934 We assume that a repository is in a consistent state and 1935 packages requiring libfoo.so.1 have been dropped alltogether. 1936 """ 1937 package_id, repository_id = package_match 1938 excluded_dep_types = ( 1939 etpConst['dependency_type_ids']['bdepend_id'],) 1940 1941 matched_deps = set() 1942 virtual_cat = EntropyRepositoryBase.VIRTUAL_META_PACKAGE_CATEGORY 1943 1944 repo = self.open_repository(repository_id) 1945 dependencies = repo.retrieveDependencies( 1946 package_id, exclude_deptypes = excluded_dep_types) 1947 for dependency in dependencies: 1948 depmatch = self.atom_match(dependency) 1949 if depmatch[0] == -1: 1950 continue 1951 1952 # Properly handle virtual packages 1953 dep_pkg_id, dep_repo = depmatch 1954 dep_db = self.open_repository(dep_repo) 1955 depcat = dep_db.retrieveCategory(dep_pkg_id) 1956 1957 if depcat == virtual_cat: 1958 # in this case, we must go down one level in order to catch 1959 # the real, underlying dependencies. Otherwise, the 1960 # condition "if x in matched_deps" below will fail. 1961 # Scenario: dev-libs/glib depends against virtual/libffi. 1962 # virtual/libffi points to dev-libs/libffi which got a 1963 # soname bump. Buggy outcome: dev-libs/libffi is not 1964 # pulled in as dependency when it should be. 1965 virtual_dependencies = dep_db.retrieveDependencies( 1966 dep_pkg_id, exclude_deptypes = excluded_dep_types) 1967 for virtual_dependency in virtual_dependencies: 1968 virtualmatch = self.atom_match(virtual_dependency) 1969 if virtualmatch[0] == -1: 1970 continue 1971 matched_deps.add(virtualmatch) 1972 1973 matched_deps.add(depmatch) 1974 1975 found_matches = set() 1976 keyslot = repo.retrieveKeySlotAggregated(package_id) 1977 for needed, elfclass, rpath in bumped_needed_libs: 1978 1979 package_ldpaths = ldpaths | set(entropy.tools.parse_rpath(rpath)) 1980 1981 found = False 1982 for s_repo_id in self._settings['repositories']['order']: 1983 1984 s_repo = self.open_repository(s_repo_id) 1985 solved_needed = s_repo.resolveNeeded( 1986 needed, elfclass = elfclass, extended = True) 1987 1988 # Filter out resolved needed that are not in package LDPATH. 1989 solved_needed = filter( 1990 lambda x: os.path.dirname(x[1]) in package_ldpaths, 1991 solved_needed) 1992 1993 for repo_pkg_id, path in solved_needed: 1994 repo_pkg_match = (repo_pkg_id, s_repo_id) 1995 1996 if package_match == repo_pkg_match: 1997 # myself? no! 1998 continue 1999 2000 if repo_pkg_match not in matched_deps: 2001 # not a matched dep! 2002 continue 2003 2004 s_keyslot = s_repo.retrieveKeySlotAggregated( 2005 repo_pkg_id) 2006 if s_keyslot == keyslot: 2007 # do not pull anything inside the same keyslot! 2008 continue 2009 2010 found_matches.add(repo_pkg_match) 2011 found = True 2012 break 2013 2014 if found: 2015 break 2016 2017 if not found: 2018 # TODO: make it a real warning 2019 const_debug_write( 2020 __name__, 2021 "_lookup_library_breakages_available, HUGE QA BUG, " 2022 "no (%s, %s) needed dependency for %s" % ( 2023 needed, elfclass, package_match,)) 2024 2025 matches = set() 2026 for _package_id, _repository_id in found_matches: 2027 _match = _package_id, _repository_id 2028 2029 cmpstat = self._get_package_action(_match) 2030 if cmpstat == 0: 2031 continue 2032 2033 if const_debug_enabled(): 2034 atom = self.open_repository( 2035 _repository_id).retrieveAtom(_package_id) 2036 const_debug_write( 2037 __name__, 2038 "_lookup_library_breakages_available, " 2039 "adding repo atom => %s" % (atom,)) 2040 2041 matches.add(_match) 2042 2043 return matches
2044
2045 - def _lookup_library_breakages_installed(self, 2046 installed_package_id, bumped_needed_libs):
2047 """ 2048 Generate a list of package matches that should be bumped 2049 if the given libraries were removed. 2050 2051 For instance: a package needs libfoo.so.2 while 2052 its installed version needs libfoo.so.1. This method will 2053 produce a list of updatable package matches that were 2054 relying on libfoo.so.1. 2055 We assume that a repository is in a consistent state and 2056 packages requiring libfoo.so.1 have been dropped alltogether. 2057 """ 2058 inst_repo = self.installed_repository() 2059 2060 # all the packages in bumped_needed_libs should be 2061 # pulled in and updated 2062 installed_package_ids = set() 2063 for needed, elfclass, rpath in bumped_needed_libs: 2064 found_neededs = inst_repo.searchNeeded( 2065 needed, elfclass = elfclass) 2066 installed_package_ids |= found_neededs 2067 # drop myself 2068 installed_package_ids.discard(installed_package_id) 2069 2070 inst_keyslots = {inst_repo.retrieveKeySlotAggregated(x): x 2071 for x in installed_package_ids} 2072 inst_keyslots.pop(None, None) 2073 2074 # these can be pulled in after 2075 installed_matches = set() 2076 for keyslot, inst_package_id in inst_keyslots.items(): 2077 2078 package_id, repository_id = self.atom_match(keyslot) 2079 if package_id == -1: 2080 continue 2081 pkg_match = package_id, repository_id 2082 2083 cmpstat = self._get_package_action( 2084 pkg_match, installed_package_id = inst_package_id) 2085 if cmpstat == 0: 2086 continue 2087 2088 if const_debug_enabled(): 2089 atom = self.open_repository( 2090 repository_id).retrieveAtom(package_id) 2091 const_debug_write( 2092 __name__, 2093 "_lookup_library_breakages, " 2094 "adding client atom => %s (%s)" % (atom, pkg_match)) 2095 2096 installed_matches.add(pkg_match) 2097 2098 return installed_matches
2099 2100 DISABLE_ASAP_SCHEDULING = os.getenv("ETP_DISABLE_ASAP_SCHEDULING") 2101
2102 - def __get_required_packages_asap_scheduling(self, deptree, adj_map, 2103 post_deps_cache):
2104 """ 2105 Rewrite dependency tree generate by Graph in order to have 2106 post-dependencies scheduled as soon as possible. 2107 """ 2108 def _shift_deptree(): 2109 for lvl in sorted(deptree.keys(), reverse = True): 2110 deptree[lvl+1] = deptree[lvl] 2111 min_lvl = min(deptree.keys()) 2112 deptree[min_lvl] = tuple()
2113 2114 def _make_room(xlevel): 2115 for lvl in sorted(deptree.keys(), reverse = True): 2116 if lvl >= xlevel: 2117 deptree[lvl+1] = deptree[lvl] 2118 else: 2119 break 2120 deptree[xlevel] = tuple() 2121 2122 def _find_first_requiring(dep_match, start_level): 2123 # find the closest 2124 for lvl in sorted(deptree.keys(), reverse = True): 2125 if lvl >= start_level: 2126 continue 2127 deps = deptree[lvl] 2128 for dep in deps: 2129 if dep_match in adj_map[dep]: 2130 # found ! 2131 return dep 2132 2133 levels = {} 2134 def _setup_levels(): 2135 for lvl, deps in deptree.items(): 2136 for dep in deps: 2137 levels[dep] = lvl 2138 _setup_levels() 2139 2140 for pkg_match, post_deps in post_deps_cache.items(): 2141 for post_dep in post_deps: 2142 level = levels[post_dep] 2143 first_requiring = _find_first_requiring(post_dep, level) 2144 # NOTE: this heuristic only works if nothing is requiring 2145 # post dependency 2146 if first_requiring is None: 2147 # add it right after 2148 stick_level = levels[pkg_match] - 1 2149 if stick_level == 0: 2150 _shift_deptree() 2151 _setup_levels() 2152 stick_level = levels[pkg_match] - 1 2153 level = levels[post_dep] 2154 2155 # NOTE: this can leave holes in the tree 2156 # rewrite 2157 deptree[level] = tuple((x for x in deptree[level] \ 2158 if x != post_dep)) 2159 deptree[stick_level] = tuple((x for x in \ 2160 deptree[stick_level] if x != post_dep)) 2161 2162 if deptree[stick_level]: 2163 _make_room(stick_level) 2164 2165 deptree[stick_level] = (post_dep,) 2166 _setup_levels() 2167
2168 - def _get_required_packages(self, package_matches, empty_deps = False, 2169 deep_deps = False, relaxed_deps = False, build_deps = False, 2170 only_deps = False, quiet = False, recursive = True):
2171 2172 ldpaths = frozenset(entropy.tools.collect_linker_paths()) 2173 inst_repo = self.installed_repository() 2174 cache_key = None 2175 2176 if self.xcache: 2177 sha = hashlib.sha1() 2178 2179 cache_s = "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|v8" % ( 2180 ";".join(["%s" % (x,) for x in sorted(package_matches)]), 2181 empty_deps, 2182 deep_deps, 2183 relaxed_deps, 2184 build_deps, 2185 only_deps, 2186 recursive, 2187 inst_repo.checksum(), 2188 self.repositories_checksum(), 2189 self._settings.packages_configuration_hash(), 2190 self._settings_client_plugin.packages_configuration_hash(), 2191 ";".join(sorted(self._settings['repositories']['available'])), 2192 # needed when users do bogus things like editing config files 2193 # manually (branch setting) 2194 self._settings['repositories']['branch'], 2195 ";".join(sorted(ldpaths))) 2196 2197 sha.update(const_convert_to_rawstring(cache_s)) 2198 cache_key = "deptree/dep_tree_%s" % (sha.hexdigest(),) 2199 2200 cached = self._cacher.pop(cache_key) 2201 if cached is not None: 2202 return cached 2203 2204 graph = Graph() 2205 deptree_conflicts = set() 2206 atomlen = len(package_matches) 2207 count = 0 2208 deps_not_found = set() 2209 2210 # check if there are repositories needing some mandatory packages 2211 forced_matches = self._lookup_system_mask_repository_deps() 2212 if forced_matches: 2213 # XXX: can cause conflicting packages to be listed together. 2214 # should verify if each package_match points to the same match? 2215 # we can have conflicting pkgs in repo or even across repos. 2216 if isinstance(package_matches, (tuple, list)): 2217 package_matches = forced_matches + [x for x in package_matches \ 2218 if x not in forced_matches] 2219 elif isinstance(package_matches, set): 2220 # we cannot do anything about the order here 2221 package_matches |= set(forced_matches) 2222 else: 2223 raise AttributeError("unsupported package_matches type") 2224 2225 sort_dep_text = _("Sorting dependencies") 2226 unsat_deps_cache = {} 2227 elements_cache = set() 2228 selected_matches_cache = {} 2229 selected_matches_set = set(package_matches) 2230 post_deps_cache = {} 2231 matchfilter = set() 2232 for matched_atom in package_matches: 2233 2234 pkg_id, pkg_repo = matched_atom 2235 if (pkg_id == -1) or (pkg_repo == 1): 2236 raise AttributeError("invalid matched_atom: %s" % ( 2237 matched_atom,)) 2238 2239 if const_debug_enabled(): 2240 const_debug_write(__name__, 2241 "_get_required_packages matched_atom => %s" % ( 2242 matched_atom,)) 2243 2244 if not quiet: 2245 count += 1 2246 if (count%10 == 0) or (count == atomlen) or (count == 1): 2247 self.output(sort_dep_text, importance = 0, 2248 level = "info", back = True, header = ":: ", 2249 footer = " ::", percent = True, 2250 count = (count, atomlen) 2251 ) 2252 2253 if matched_atom in matchfilter: 2254 continue 2255 2256 try: 2257 mygraph, conflicts = self._generate_dependency_tree( 2258 matched_atom, graph, empty_deps = empty_deps, 2259 deep_deps = deep_deps, relaxed_deps = relaxed_deps, 2260 build_deps = build_deps, only_deps = only_deps, 2261 elements_cache = elements_cache, 2262 unsatisfied_deps_cache = unsat_deps_cache, 2263 post_deps_cache = post_deps_cache, 2264 recursive = recursive, 2265 selected_matches = selected_matches_set, 2266 selected_matches_cache = selected_matches_cache, 2267 ldpaths = ldpaths 2268 ) 2269 except DependenciesNotFound as err: 2270 deps_not_found |= err.value 2271 conflicts = set() 2272 2273 deptree_conflicts |= conflicts 2274 2275 if deps_not_found: 2276 graph.destroy() 2277 raise DependenciesNotFound(deps_not_found) 2278 2279 # get adjacency map before it gets destroyed by solve() 2280 adj_map = dict((x.item(), set(k.item() for k in y)) \ 2281 for x, y in graph.get_adjacency_map().items()) 2282 # solve depgraph and append conflicts 2283 deptree = graph.solve() 2284 if 0 in deptree: 2285 graph.destroy() 2286 raise KeyError("Graph contains a dep_level == 0") 2287 2288 # now check and report dependencies with colliding scope and in case, 2289 # raise DependenciesCollision, containing information about collisions 2290 # and what requires the packages involved 2291 _dup_deps_collisions = {} 2292 for _level, _deps in deptree.items(): 2293 for pkg_id, pkg_repo in _deps: 2294 keyslot = self.open_repository(pkg_repo).retrieveKeySlot(pkg_id) 2295 ks_set = _dup_deps_collisions.setdefault(keyslot, set()) 2296 ks_set.add((pkg_id, pkg_repo)) 2297 _colliding_deps = [x for x in _dup_deps_collisions.values() if \ 2298 len(x) > 1] 2299 2300 if _colliding_deps: 2301 _pkg_revdeps = {} 2302 for _pkg_matches in _colliding_deps: 2303 for _pkg_match in _pkg_matches: 2304 _pkg_node = graph.get_node(_pkg_match) 2305 _pkg_revdeps[_pkg_match] = [x.origin().item() for \ 2306 x in _pkg_node.arches() if \ 2307 not _pkg_node.is_arch_outgoing(x)] 2308 2309 graph.destroy() 2310 raise DependenciesCollision((_colliding_deps, _pkg_revdeps)) 2311 2312 # now use the ASAP herustic to anticipate post-dependencies 2313 # as much as possible 2314 if self.DISABLE_ASAP_SCHEDULING is None: 2315 # NOTE: this method can leave holes in deptree 2316 # they are removed right below 2317 self.__get_required_packages_asap_scheduling(deptree, 2318 adj_map, post_deps_cache) 2319 2320 # reverse ketys in deptree, this allows correct order (not inverse) 2321 level_count = 0 2322 reverse_tree = {} 2323 for key in sorted(deptree, reverse = True): 2324 level_count += 1 2325 # fixup possible holes 2326 if not deptree[key]: 2327 continue 2328 reverse_tree[level_count] = deptree[key] 2329 2330 graph.destroy() 2331 reverse_tree[0] = deptree_conflicts 2332 2333 if self.xcache: 2334 self._cacher.push(cache_key, reverse_tree) 2335 2336 return reverse_tree
2337
2338 - def __filter_depends_multimatched_atoms(self, package_id, repo_id, depends, 2339 filter_match_cache = None):
2340 2341 remove_depends = set() 2342 excluded_dep_types = (etpConst['dependency_type_ids']['bdepend_id'],) 2343 if filter_match_cache is None: 2344 filter_match_cache = {} 2345 # filter_match_cache dramatically improves performance 2346 2347 for d_package_id, d_repo_id in depends: 2348 2349 cached = filter_match_cache.get((d_package_id, d_repo_id)) 2350 if cached is None: 2351 2352 my_remove_depends = set() 2353 2354 dbconn = self.open_repository(d_repo_id) 2355 mydeps = dbconn.retrieveDependencies(d_package_id, 2356 exclude_deptypes = excluded_dep_types) 2357 2358 for mydep in mydeps: 2359 2360 matches, rslt = dbconn.atomMatch(mydep, 2361 multiMatch = True) 2362 if rslt != 0: 2363 continue 2364 matches = set((x, d_repo_id) for x in matches) 2365 2366 if len(matches) > 1: 2367 if (package_id, repo_id) in matches: 2368 # are all in depends? 2369 matches -= depends 2370 if matches: 2371 # no, they aren't 2372 my_remove_depends.add((d_package_id, d_repo_id)) 2373 2374 filter_match_cache[(d_package_id, d_repo_id)] = my_remove_depends 2375 cached = my_remove_depends 2376 2377 remove_depends |= cached 2378 2379 depends -= remove_depends 2380 return depends
2381
2382 - def _get_installed_packages_system_mask(self):
2383 """ 2384 Get the installed packages matches system mask metadata. 2385 """ 2386 sha = hashlib.sha1() 2387 2388 inst_repo = self.installed_repository() 2389 cache_s = "%s|%s|v1" % ( 2390 inst_repo.checksum(), 2391 self._settings['repositories']['branch']) 2392 2393 sha.update(const_convert_to_rawstring(cache_s)) 2394 cache_key = "system_mask/mask_%s" % (sha.hexdigest(),) 2395 2396 if self.xcache: 2397 cached = self._cacher.pop(cache_key) 2398 if cached is not None: 2399 return cached 2400 2401 settings = self.Settings() 2402 cl_settings = self.ClientSettings() 2403 repo_settings = cl_settings['repositories'] 2404 repos_mask_list = repo_settings['system_mask'] 2405 m_list = repos_mask_list + settings['system_mask'] 2406 2407 mc_cache = set() 2408 mask_installed = [] 2409 mask_installed_keys = {} 2410 2411 for atom in m_list: 2412 try: 2413 m_ids, m_r = inst_repo.atomMatch( 2414 atom, multiMatch = True) 2415 if m_r != 0: 2416 continue 2417 except EntropyRepositoryError: 2418 continue 2419 2420 mykey = entropy.dep.dep_getkey(atom) 2421 obj = mask_installed_keys.setdefault(mykey, set()) 2422 for m_id in m_ids: 2423 if m_id in mc_cache: 2424 continue 2425 mc_cache.add(m_id) 2426 mask_installed.append(m_id) 2427 obj.add(m_id) 2428 2429 data = { 2430 'ids': mask_installed, 2431 'keys': mask_installed_keys, 2432 } 2433 2434 if self.xcache: 2435 self._cacher.push(cache_key, data, async = False) 2436 2437 return data
2438 2439 DISABLE_NEEDED_SCANNING = os.getenv("ETP_DISABLE_ELF_NEEDED_SCANNING") 2440
2441 - def _generate_reverse_dependency_tree(self, matched_atoms, deep = False, 2442 recursive = True, empty = False, system_packages = True, 2443 elf_needed_scanning = True):
2444 2445 """ 2446 @raise DependenciesNotRemovable: if at least one dependencies is 2447 considered vital for the system. 2448 """ 2449 2450 # experimental feature, make possible to override it 2451 # please remove in future. 2452 if self.DISABLE_NEEDED_SCANNING: 2453 elf_needed_scanning = False 2454 2455 if const_debug_enabled(): 2456 const_debug_write(__name__, 2457 "\n_generate_reverse_dependency_tree " \ 2458 "[m:%s => %s|d:%s|r:%s|e:%s|s:%s|es:%s]" \ 2459 % (matched_atoms, 2460 [self.open_repository(x[1]).retrieveAtom(x[0]) \ 2461 for x in matched_atoms], deep, recursive, empty, 2462 system_packages, elf_needed_scanning)) 2463 2464 inst_repo = self.installed_repository() 2465 cache_key = None 2466 2467 if self.xcache: 2468 sha = hashlib.sha1() 2469 2470 cache_s = "ma{%s}s{%s;%s;%s;%s;%s;%s;%s;%s;%s;%s}v5" % ( 2471 ";".join(["%s" % (x,) for x in sorted(matched_atoms)]), 2472 deep, 2473 recursive, 2474 empty, 2475 system_packages, 2476 elf_needed_scanning, 2477 inst_repo.checksum(), 2478 self.repositories_checksum(), 2479 self._settings.packages_configuration_hash(), 2480 self._settings_client_plugin.packages_configuration_hash(), 2481 ";".join(sorted(self._settings['repositories']['available'])), 2482 ) 2483 sha.update(const_convert_to_rawstring(cache_s)) 2484 2485 cache_key = "depends/tree_%s" % (sha.hexdigest(),) 2486 2487 cached = self._cacher.pop(cache_key) 2488 if cached is not None: 2489 return cached 2490 2491 if const_debug_enabled(): 2492 const_debug_write(__name__, 2493 "\n_generate_reverse_dependency_tree [m:%s] not cached!" % ( 2494 matched_atoms,)) 2495 2496 count = 0 2497 match_cache = set() 2498 stack = Lifo() 2499 graph = Graph() 2500 not_removable_deps = set() 2501 deep_dep_map = {} 2502 filter_multimatch_cache = {} 2503 needed_providers_left = {} 2504 2505 system_mask_data = self._get_installed_packages_system_mask() 2506 2507 # post-dependencies won't be pulled in 2508 pdepend_id = etpConst['dependency_type_ids']['pdepend_id'] 2509 bdepend_id = etpConst['dependency_type_ids']['bdepend_id'] 2510 rem_dep_text = _("Calculating inverse dependencies for") 2511 for match in matched_atoms: 2512 stack.push(match) 2513 2514 def get_deps(repo_db, d_deps): 2515 deps = set() 2516 for d_dep in d_deps: 2517 if repo_db is self.installed_repository(): 2518 m_package_id, m_rc_x = repo_db.atomMatch(d_dep) 2519 m_rc = InstalledPackagesRepository.NAME 2520 else: 2521 m_package_id, m_rc = self.atom_match(d_dep) 2522 2523 if m_package_id != -1: 2524 deps.add((m_package_id, m_rc)) 2525 2526 return deps
2527 2528 def get_direct_deps(repo_db, pkg_id): 2529 return repo_db.retrieveDependencies(pkg_id, 2530 exclude_deptypes = (bdepend_id,)) 2531 2532 def filter_deps(raw_deps): 2533 filtered_deps = set() 2534 for mydep, m_repo_id in raw_deps: 2535 m_repo_db = self.open_repository(m_repo_id) 2536 2537 if system_packages: 2538 if m_repo_db.isSystemPackage(mydep): 2539 if const_debug_enabled(): 2540 const_debug_write(__name__, 2541 "\n_generate_reverse_dependency_tree [md:%s] " 2542 "cannot calculate, it's a system package" \ 2543 % ((mydep, m_repo_id),)) 2544 continue 2545 if m_repo_db is self.installed_repository(): 2546 if mydep in system_mask_data['ids']: 2547 if const_debug_enabled(): 2548 const_debug_write(__name__, 2549 "\n_generate_reverse_dependency_tree [md:%s] " 2550 "cannot calculate, it's in sysmask" \ 2551 % ((mydep, m_repo_id),)) 2552 continue 2553 2554 filtered_deps.add((mydep, m_repo_id,)) 2555 return filtered_deps 2556 2557 def _filter_simple_or_revdeps(pkg_id, repo_id, repo_db, 2558 reverse_deps_ids): 2559 # filter out reverse dependencies whose or dependencies 2560 # are anyway satisfied 2561 reverse_deps = set() 2562 for dep_pkg_id, dep_str in reverse_deps_ids: 2563 if dep_str.endswith(etpConst['entropyordepquestion']): 2564 or_dep_lst = dep_str[:-1].split(etpConst['entropyordepsep']) 2565 # how many are currently installed? 2566 or_dep_ids = set() 2567 for or_dep in or_dep_lst: 2568 or_pkg_id, or_rc = repo_db.atomMatch(or_dep) 2569 if or_rc == 0: 2570 or_dep_ids.add(or_pkg_id) 2571 if pkg_id in or_dep_ids: 2572 or_dep_ids.discard(pkg_id) 2573 if or_dep_ids: 2574 # drop already analyzed matches 2575 or_dep_matches = set((x, repo_id) for x in or_dep_ids) 2576 or_dep_matches -= match_cache 2577 if or_dep_matches: 2578 # we can ignore this 2579 if const_debug_enabled(): 2580 const_debug_write(__name__, 2581 brown("\n_generate_reverse_dependency_tree" \ 2582 ".get_revdeps ignoring %s => %s " \ 2583 "due to: %s, for %s" % ( 2584 (pkg_id, repo_id), 2585 self.open_repository(repo_id).retrieveAtom( 2586 pkg_id), 2587 dep_str, 2588 self.open_repository(repo_id).retrieveAtom( 2589 dep_pkg_id)))) 2590 continue 2591 elif const_debug_enabled(): 2592 const_debug_write(__name__, 2593 teal("\n_generate_reverse_dependency_tree" \ 2594 ".get_revdeps cannot ignore %s :: %s " \ 2595 ":: dep_str: %s, for : %s" % ( 2596 (pkg_id, repo_id), 2597 self.open_repository(repo_id).retrieveAtom( 2598 pkg_id), 2599 dep_str, 2600 self.open_repository(repo_id).retrieveAtom( 2601 dep_pkg_id)))) 2602 reverse_deps.add((dep_pkg_id, repo_id)) 2603 return reverse_deps 2604 2605 def get_revdeps(pkg_id, repo_id, repo_db): 2606 # obtain its inverse deps 2607 reverse_deps_ids = repo_db.retrieveReverseDependencies( 2608 pkg_id, exclude_deptypes = (pdepend_id, bdepend_id,), 2609 extended = True) 2610 if const_debug_enabled(): 2611 const_debug_write(__name__, 2612 "\n_generate_reverse_dependency_tree.get_revdeps: " \ 2613 "orig revdeps: %s => %s" % (sorted(reverse_deps_ids), 2614 sorted([repo_db.retrieveAtom(x[0]) for x in \ 2615 reverse_deps_ids]),)) 2616 2617 reverse_deps = _filter_simple_or_revdeps(pkg_id, repo_id, repo_db, 2618 reverse_deps_ids) 2619 if const_debug_enabled(): 2620 const_debug_write(__name__, 2621 "\n_generate_reverse_dependency_tree.get_revdeps: " \ 2622 "after filter: %s => %s" % (sorted(reverse_deps), 2623 sorted([repo_db.retrieveAtom(x[0]) for x in \ 2624 reverse_deps]),)) 2625 2626 if reverse_deps: 2627 reverse_deps = self.__filter_depends_multimatched_atoms( 2628 pkg_id, repo_id, reverse_deps, 2629 filter_match_cache = filter_multimatch_cache) 2630 if const_debug_enabled(): 2631 const_debug_write(__name__, 2632 "\n_generate_reverse_dependency_tree.get_revdeps: " \ 2633 "after filter_depends: %s => %s" % ( 2634 sorted(reverse_deps), 2635 sorted([repo_db.retrieveAtom(x[0]) for x in \ 2636 reverse_deps]),)) 2637 2638 return reverse_deps 2639 2640 def get_revdeps_lib(pkg_id, repo_id, repo_db): 2641 provided_libs = repo_db.retrieveProvidedLibraries(pkg_id) 2642 reverse_deps = set() 2643 2644 for needed, path, elfclass in provided_libs: 2645 # let's see what package is actually resolving 2646 # this library, if there are more than one, we 2647 # can still be happy. 2648 needed_key = (needed, elfclass) 2649 needed_providers = needed_providers_left.get(needed_key) 2650 if needed_providers is None: 2651 needed_providers = set(repo_db.resolveNeeded( 2652 needed, elfclass = elfclass)) 2653 needed_providers_left[needed_key] = needed_providers 2654 2655 # remove myself 2656 needed_providers.discard(pkg_id) 2657 if needed_providers: 2658 # another package is providing the same library 2659 # so it's not a problem to skip this package. 2660 if const_debug_enabled(): 2661 const_debug_write( 2662 __name__, 2663 "_generate_reverse_dependency_tree.get_revdeps_lib:" 2664 " skipping needed dependencies for (%s, %s, %s)," 2665 " still having: %s" % ( 2666 needed, path, elfclass, 2667 [repo_db.retrieveAtom(x) 2668 for x in needed_providers])) 2669 continue 2670 2671 for needed_package_id in repo_db.searchNeeded( 2672 needed, elfclass = elfclass): 2673 reverse_deps.add((needed_package_id, repo_id)) 2674 2675 if reverse_deps: 2676 reverse_deps = self.__filter_depends_multimatched_atoms( 2677 pkg_id, repo_id, reverse_deps, 2678 filter_match_cache = filter_multimatch_cache) 2679 # remove myself 2680 reverse_deps.discard((pkg_id, repo_id)) 2681 2682 # remove packages in the same slot, this is required in a case 2683 # like this: 2684 # _generate_reverse_dependency_tree [m:(17434, '__system__') 2685 # => x11-drivers/xf86-video-virtualbox-4.0.4#2.6.37-sabayon] 2686 # rev_deps: set([(17432, '__system__'), 2687 # (17435, '__system__')]) => 2688 # ['x11-drivers/xf86-video-virtualbox-4.0.4#2.6.38-sabayon', 2689 # 'app-emulation/virtualbox-guest-additions-4.0.4#2.6.37-sabayon'] 2690 # :: reverse_deps_lib: set([(17432, '__system__')]) 2691 # where xf86-video-virtualbox erroneously pulls in its cousin :-) 2692 keyslot = None 2693 pkg_tag = None 2694 if reverse_deps: 2695 # only if we advertise a package tag 2696 pkg_tag = repo_db.retrieveTag(pkg_id) 2697 if pkg_tag: 2698 keyslot = repo_db.retrieveKeySlotAggregated(pkg_id) 2699 2700 if keyslot and pkg_tag: 2701 keyslot = entropy.dep.remove_tag_from_slot(keyslot) 2702 filtered_reverse_deps = set() 2703 for revdep_match in reverse_deps: 2704 revdep_pkg_id, revdep_repo_id = revdep_match 2705 revdep_db = self.open_repository(revdep_repo_id) 2706 revdep_keyslot = revdep_db.retrieveKeySlotAggregated( 2707 revdep_pkg_id) 2708 if revdep_keyslot is not None: 2709 revdep_keyslot = entropy.dep.remove_tag_from_slot( 2710 revdep_keyslot) 2711 if revdep_keyslot != keyslot: 2712 filtered_reverse_deps.add(revdep_match) 2713 reverse_deps = filtered_reverse_deps 2714 2715 return reverse_deps 2716 2717 def setup_revdeps(filtered_deps): 2718 for d_rev_dep, d_repo_id in filtered_deps: 2719 d_repo_db = self.open_repository(d_repo_id) 2720 mydepends = d_repo_db.retrieveReverseDependencies( 2721 d_rev_dep, exclude_deptypes = \ 2722 (pdepend_id, bdepend_id,)) 2723 deep_dep_map[(d_rev_dep, d_repo_id)] = \ 2724 set((x, d_repo_id) for x in mydepends) 2725 2726 if const_debug_enabled(): 2727 const_debug_write(__name__, 2728 "\n_generate_reverse_dependency_tree [d_dep:%s] " \ 2729 "reverse deps: %s" % ((d_rev_dep, d_repo_id), 2730 mydepends,)) 2731 2732 while stack.is_filled(): 2733 2734 pkg_id, repo_id = stack.pop() 2735 if (pkg_id, repo_id) in match_cache: 2736 # already analyzed 2737 continue 2738 match_cache.add((pkg_id, repo_id)) 2739 2740 if system_packages: 2741 system_pkg = not self.validate_package_removal(pkg_id, 2742 repo_id = repo_id) 2743 2744 if system_pkg: 2745 # this is a system package, removal forbidden 2746 not_removable_deps.add((pkg_id, repo_id)) 2747 if const_debug_enabled(): 2748 const_debug_write(__name__, 2749 "\n_generate_reverse_dependency_tree %s is sys_pkg!" % ( 2750 (pkg_id, repo_id),)) 2751 continue 2752 2753 repo_db = self.open_repository(repo_id) 2754 2755 count += 1 2756 p_atom = repo_db.retrieveAtom(pkg_id) 2757 if p_atom is None: 2758 if const_debug_enabled(): 2759 const_debug_write(__name__, 2760 "\n_generate_reverse_dependency_tree %s not available!" % ( 2761 (pkg_id, repo_id),)) 2762 continue 2763 self.output( 2764 blue(rem_dep_text + " %s" % (purple(p_atom),)), 2765 importance = 0, 2766 level = "info", 2767 back = True, 2768 header = '|/-\\'[count%4]+" " 2769 ) 2770 2771 reverse_deps = get_revdeps(pkg_id, repo_id, repo_db) 2772 if const_debug_enabled(): 2773 const_debug_write(__name__, 2774 "\n_generate_reverse_dependency_tree, [m:%s => %s], " \ 2775 "get_revdeps: %s => %s" % ( 2776 (pkg_id, repo_id), p_atom, reverse_deps, 2777 [self.open_repository(x[1]).retrieveAtom(x[0]) \ 2778 for x in reverse_deps])) 2779 2780 reverse_deps_lib = set() 2781 if elf_needed_scanning: 2782 # use metadata collected during package generation to 2783 # look for dependencies based on ELF NEEDED. 2784 # a nice example is libpng-1.2 vs libpng-1.4 when pkg 2785 # lists a generic media-libs/libpng as dependency. 2786 reverse_deps_lib = get_revdeps_lib(pkg_id, repo_id, repo_db) 2787 reverse_deps |= reverse_deps_lib 2788 2789 if const_debug_enabled(): 2790 const_debug_write(__name__, 2791 "\n_generate_reverse_dependency_tree [m:%s => %s] " \ 2792 "rev_deps: %s => %s :: reverse_deps_lib: %s" % ( 2793 (pkg_id, repo_id), p_atom, reverse_deps, 2794 [self.open_repository(x[1]).retrieveAtom(x[0]) \ 2795 for x in reverse_deps], 2796 reverse_deps_lib,)) 2797 2798 if deep: 2799 2800 d_deps = get_direct_deps(repo_db, pkg_id) 2801 if const_debug_enabled(): 2802 const_debug_write(__name__, 2803 "\n_generate_reverse_dependency_tree [m:%s] d_deps: %s" % ( 2804 (pkg_id, repo_id), d_deps,)) 2805 2806 # now filter them 2807 mydeps = filter_deps(get_deps(repo_db, d_deps)) 2808 2809 if const_debug_enabled(): 2810 const_debug_write(__name__, 2811 "\n_generate_reverse_dependency_tree done filtering out" \ 2812 " direct dependencies: %s" % (mydeps,)) 2813 2814 if empty: 2815 reverse_deps |= mydeps 2816 if const_debug_enabled(): 2817 const_debug_write(__name__, 2818 "\n_generate_reverse_dependency_tree done empty=True," \ 2819 " adding: %s" % (mydeps,)) 2820 else: 2821 # to properly pull in every direct dependency with no 2822 # reverse dependencies, we need to setup a dependency 2823 # map first, and then make sure there are no chained 2824 # package identifiers by removing direct dependencies 2825 # from the list of reverse dependencies 2826 setup_revdeps(mydeps) 2827 2828 if empty: 2829 empty = False 2830 2831 if recursive: 2832 for rev_dep in reverse_deps: 2833 stack.push(rev_dep) 2834 graph.add((pkg_id, repo_id), reverse_deps) 2835 2836 2837 del stack 2838 if not_removable_deps: 2839 raise DependenciesNotRemovable(not_removable_deps) 2840 deptree = graph.solve() 2841 2842 if deep: 2843 # in order to catch unused reverse dependencies 2844 # it is required to iterate over the direct dependencies 2845 # every time a new direct dependency gets pulled in in 2846 # the removal queue. 2847 # in this way, every orphan package will be considered 2848 # for removal automatically. 2849 2850 flat_dep_tree = set() 2851 for r_deps in deptree.values(): 2852 flat_dep_tree.update(r_deps) 2853 2854 while True: 2855 change = False 2856 # now try to deeply remove unused packages 2857 # iterate over a copy 2858 for pkg_match in deep_dep_map.keys(): 2859 deep_dep_map[pkg_match] -= flat_dep_tree 2860 if (not deep_dep_map[pkg_match]) and \ 2861 (pkg_match not in flat_dep_tree): 2862 2863 graph.add(pkg_match, set()) 2864 flat_dep_tree.add(pkg_match) 2865 2866 # get direct dependencies 2867 pkg_id, pkg_repo = pkg_match 2868 repo_db = self.open_repository(pkg_repo) 2869 pkg_d_deps = get_direct_deps(repo_db, pkg_id) 2870 pkg_d_matches = filter_deps( 2871 get_deps(repo_db, pkg_d_deps)) 2872 setup_revdeps(pkg_d_matches) 2873 change = True 2874 2875 if not change: 2876 break 2877 2878 deptree = graph.solve() 2879 del flat_dep_tree 2880 2881 graph.destroy() 2882 2883 if cache_key is not None: 2884 self._cacher.push(cache_key, deptree) 2885 2886 return deptree 2887 2888 @sharedinstlock
2889 - def calculate_masked_packages(self, use_cache = True):
2890 """ 2891 Compute a list of masked packages. For masked packages it is meant 2892 a list of packages that cannot be installed without explicit user 2893 confirmation. 2894 2895 @keyword use_cache: use on-disk cache 2896 @type use_cache: bool 2897 @return: list of masked package matches + mask reason id 2898 [((package_id, repository_id), reason_id), ...] 2899 @rtype: list 2900 """ 2901 sha = hashlib.sha1() 2902 2903 cache_s = "{%s;%s}v2" % ( 2904 self.repositories_checksum(), 2905 # needed when users do bogus things like editing config files 2906 # manually (branch setting) 2907 self._settings['repositories']['branch']) 2908 sha.update(const_convert_to_rawstring(cache_s)) 2909 2910 cache_key = "available/masked_%s" % (sha.hexdigest(),) 2911 2912 if use_cache and self.xcache: 2913 cached = self._cacher.pop(cache_key) 2914 if cached is not None: 2915 return cached 2916 2917 masked = [] 2918 for repository_id in self.filter_repositories(self.repositories()): 2919 repo = self.open_repository(repository_id) 2920 try: 2921 # db may be corrupted, we cannot deal with it here 2922 package_ids = repo.listAllPackageIds() 2923 except OperationalError: 2924 continue 2925 2926 def fm(pkg_id): 2927 pkg_id_filtered, reason_id = repo.maskFilter(pkg_id) 2928 if pkg_id_filtered == -1: 2929 return ((pkg_id, repository_id,), reason_id) 2930 return None
2931 masked += [x for x in map(fm, package_ids) if x is not None] 2932 2933 # add live unmasked elements too 2934 unmasks = self._settings['live_packagemasking']['unmask_matches'] 2935 live_reason_id = etpConst['pkg_masking_reference']['user_live_unmask'] 2936 for package_id, repository_id in unmasks: 2937 match_data = ((package_id, repository_id), live_reason_id,) 2938 if match_data in masked: 2939 continue 2940 masked.append(match_data) 2941 2942 if self.xcache: 2943 self._cacher.push(cache_key, masked) 2944 2945 return masked 2946 2947 @sharedinstlock
2948 - def calculate_available_packages(self, use_cache = True):
2949 """ 2950 Compute a list of available packages in repositories. For available 2951 packages it is meant a list of non-installed packages. 2952 2953 @keyword use_cache: use on-disk cache 2954 @type use_cache: bool 2955 @return: list of available package matches 2956 @rtype: list 2957 """ 2958 sha = hashlib.sha1() 2959 2960 cache_s = "{%s;%s}v2" % ( 2961 self.repositories_checksum(), 2962 # needed when users do bogus things like editing config files 2963 # manually (branch setting) 2964 self._settings['repositories']['branch']) 2965 sha.update(const_convert_to_rawstring(cache_s)) 2966 2967 cache_key = "available/packages_%s" % (sha.hexdigest(),) 2968 2969 if use_cache and self.xcache: 2970 cached = self._cacher.pop(cache_key) 2971 if cached is not None: 2972 return cached 2973 2974 available = [] 2975 for repository_id in self.filter_repositories(self.repositories()): 2976 repo = self.open_repository(repository_id) 2977 try: 2978 # db may be corrupted, we cannot deal with it here 2979 package_ids = [x for x in repo.listAllPackageIds( 2980 order_by = 'atom') if repo.maskFilter(x)[0] != -1] 2981 except OperationalError: 2982 continue 2983 myavailable = [] 2984 do_break = False 2985 for package_id in package_ids: 2986 if do_break: 2987 break 2988 # get key + slot 2989 try: 2990 key_slot = repo.retrieveKeySlot(package_id) 2991 if key_slot is None: 2992 # mmh... invalid entry, ignore 2993 continue 2994 key, slot = key_slot 2995 matches = self.installed_repository().searchKeySlot(key, slot) 2996 except (DatabaseError, IntegrityError, OperationalError,): 2997 do_break = True 2998 continue 2999 if not matches: 3000 myavailable.append((package_id, repository_id)) 3001 3002 available += myavailable[:] 3003 3004 if self.xcache: 3005 self._cacher.push(cache_key, available) 3006 3007 return available
3008 3009 @sharedinstlock
3010 - def calculate_critical_updates(self, use_cache = True):
3011 3012 # check if we are branch migrating 3013 # in this case, critical pkgs feature is disabled 3014 in_branch_upgrade = etpConst['etp_in_branch_upgrade_file'] 3015 if const_file_readable(in_branch_upgrade): 3016 return set(), [] 3017 3018 enabled_repos = self.filter_repositories(self.repositories()) 3019 repo_order = [x for x in self._settings['repositories']['order'] if 3020 x in enabled_repos] 3021 3022 inst_repo = self.installed_repository() 3023 3024 cache_s = "%s|%s|%s|%s|%s|%s|%s|%s|v5" % ( 3025 enabled_repos, 3026 inst_repo.checksum(), 3027 self.repositories_checksum(), 3028 self._settings.packages_configuration_hash(), 3029 self._settings_client_plugin.packages_configuration_hash(), 3030 ";".join(sorted(self._settings['repositories']['available'])), 3031 repo_order, 3032 # needed when users do bogus things like editing config files 3033 # manually (branch setting) 3034 self._settings['repositories']['branch'], 3035 ) 3036 sha = hashlib.sha1() 3037 sha.update(const_convert_to_rawstring(cache_s)) 3038 3039 cache_key = "critical/%s" % (sha.hexdigest(),) 3040 3041 if use_cache and self.xcache: 3042 cached = self._cacher.pop(cache_key) 3043 if cached is not None: 3044 return cached 3045 3046 client_settings = self.ClientSettings() 3047 critical_data = client_settings['repositories']['critical_updates'] 3048 3049 # do not match package repositories, never consider them in updates! 3050 # that would be a nonsense, since package repos are temporary. 3051 enabled_repos = self.filter_repositories(self.repositories()) 3052 match_repos = tuple([x for x in \ 3053 self._settings['repositories']['order'] if x in enabled_repos]) 3054 3055 atoms = set() 3056 atom_matches = {} 3057 for repoid in critical_data: 3058 for atom in critical_data[repoid]: 3059 match_id, match_repo = self.atom_match(atom, 3060 match_repo = match_repos) 3061 if match_repo == 1: 3062 continue 3063 atom_matches[atom] = (match_id, match_repo,) 3064 atoms.add(atom) 3065 3066 atoms = self._get_unsatisfied_dependencies(atoms, relaxed_deps = True, 3067 match_repo = match_repos) 3068 matches = [atom_matches.get(atom) for atom in atoms] 3069 data = (atoms, matches) 3070 3071 if self.xcache: 3072 self._cacher.push(cache_key, data, async = False) 3073 3074 return data
3075 3076 @sharedinstlock
3077 - def calculate_security_updates(self, use_cache = True):
3078 """ 3079 Return a list of security updates available using Entropy Security 3080 interface and Client.calculate_updates(). 3081 3082 @keyword use_cache: Use Entropy cache, if available 3083 @type use_cache: bool 3084 @return: list of Entropy package matches that should be updated 3085 @rtype: list 3086 """ 3087 outcome = self.calculate_updates( 3088 critical_updates = False, use_cache = use_cache) 3089 update, remove = outcome['update'], outcome['remove'] 3090 fine, spm_fine = outcome['fine'], outcome['spm_fine'] 3091 if not update: 3092 return [] 3093 3094 deps = set() 3095 3096 security = self.Security() 3097 for advisory_id in security.list(): 3098 deps.update(security.affected_id(advisory_id)) 3099 3100 sec_updates = [] 3101 inst_repo = self.installed_repository() 3102 for vul_dep in deps: 3103 pkg_id, rc = inst_repo.atomMatch(vul_dep) 3104 if pkg_id == -1: 3105 continue 3106 3107 matches, rc = self.atom_match(vul_dep, multi_repo = True, 3108 multi_match = True) 3109 3110 # filter dups, keeping order 3111 matches = [x for x in matches if x not in sec_updates] 3112 sec_updates += [x for x in matches if x in update] 3113 3114 return sec_updates
3115 3116 @sharedinstlock
3117 - def calculate_updates(self, empty = False, use_cache = True, 3118 critical_updates = True, quiet = False):
3119 """ 3120 Calculate package updates. By default, this method also handles critical 3121 updates priority. Updates (as well as other objects here) are returned 3122 in alphabetical order. To generate a valid installation queue, have a 3123 look at Client.get_install_queue(). 3124 3125 @keyword empty: consider the installed packages repository 3126 empty. Mark every package as update. 3127 @type empty: bool 3128 @keyword use_cache: use Entropy cache 3129 @type use_cache: bool 3130 @keyword critical_updates: if False, disable critical updates check 3131 priority. 3132 @type critical_updates: bool 3133 @keyword quiet: do not print any status info if True 3134 @type quiet: bool 3135 @return: dict composed by (list of package matches ("update" key), 3136 list of installed package identifiers ("remove" key), list of 3137 package names already up-to-date ("fine" key), list of package names 3138 already up-to-date when user enabled "ignore-spm-downgrades", 3139 "spm_fine" key), if critical updates were found ("critical_found" 3140 key). If critical_found is True, relaxed dependencies calculation 3141 must be enforced. 3142 @rtype: tuple 3143 """ 3144 cl_settings = self.ClientSettings() 3145 misc_settings = cl_settings['misc'] 3146 3147 # critical updates hook, if enabled 3148 # this will force callers to receive only critical updates 3149 if misc_settings.get('forcedupdates') and critical_updates: 3150 _atoms, update = self.calculate_critical_updates( 3151 use_cache = use_cache) 3152 if update: 3153 return { 3154 'update': update, 3155 'remove': [], 3156 'fine': [], 3157 'spm_fine': [], 3158 'critical_found': True, 3159 } 3160 3161 inst_repo = self.installed_repository() 3162 ignore_spm_downgrades = misc_settings['ignore_spm_downgrades'] 3163 enabled_repos = self.filter_repositories(self.repositories()) 3164 repo_order = [x for x in self._settings['repositories']['order'] if 3165 x in enabled_repos] 3166 3167 cache_s = "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|v7" % ( 3168 empty, 3169 enabled_repos, 3170 inst_repo.checksum(), 3171 self.repositories_checksum(), 3172 self._settings.packages_configuration_hash(), 3173 self._settings_client_plugin.packages_configuration_hash(), 3174 ";".join(sorted(self._settings['repositories']['available'])), 3175 repo_order, 3176 ignore_spm_downgrades, 3177 # needed when users do bogus things like editing config files 3178 # manually (branch setting) 3179 self._settings['repositories']['branch'], 3180 ) 3181 3182 sha = hashlib