Package entropy :: Package client :: Package interfaces :: Package package :: Package actions :: Module install

Source Code for Module entropy.client.interfaces.package.actions.install

   1  # -*- coding: utf-8 -*- 
   2  """ 
   3   
   4      @author: Fabio Erculiani <[email protected]> 
   5      @contact: [email protected] 
   6      @copyright: Fabio Erculiani 
   7      @license: GPL-2 
   8   
   9      B{Entropy Package Manager Client Package Interface}. 
  10   
  11  """ 
  12  import errno 
  13  import os 
  14  import shutil 
  15  import stat 
  16  import time 
  17   
  18  from entropy.const import etpConst, const_convert_to_unicode, \ 
  19      const_mkdtemp, const_mkstemp, const_convert_to_rawstring, \ 
  20      const_is_python3, const_debug_write 
  21  from entropy.exceptions import EntropyException 
  22  from entropy.i18n import _ 
  23  from entropy.output import darkred, red, purple, brown, blue, darkgreen, teal 
  24   
  25  import entropy.dep 
  26  import entropy.tools 
  27   
  28  from ._manage import _PackageInstallRemoveAction 
  29  from ._triggers import Trigger 
  30   
  31  from .. import _content as Content 
  32  from .. import preservedlibs 
  33   
  34   
35 -class _PackageInstallAction(_PackageInstallRemoveAction):
36 """ 37 PackageAction used for package installation. 38 """ 39
40 - class InvalidArchitecture(EntropyException):
41 """ 42 Raised when a package for another architecture is attempted 43 to be installed. 44 """
45 46 NAME = "install" 47
48 - def __init__(self, entropy_client, package_match, opts = None):
49 """ 50 Object constructor. 51 """ 52 super(_PackageInstallAction, self).__init__( 53 entropy_client, package_match, opts = opts)
54
55 - def finalize(self):
56 """ 57 Finalize the object, release all its resources. 58 """ 59 super(_PackageInstallAction, self).finalize() 60 if self._meta is not None: 61 meta = self._meta 62 self._meta = None 63 meta.clear()
64
65 - def _get_remove_package_id_unlocked(self, inst_repo):
66 """ 67 Return the installed packages repository package id 68 that would be removed. 69 """ 70 repo = self._entropy.open_repository(self._repository_id) 71 key_slot = repo.retrieveKeySlotAggregated(self._package_id) 72 remove_package_id, _inst_rc = inst_repo.atomMatch(key_slot) 73 return remove_package_id
74
75 - def setup(self):
76 """ 77 Setup the PackageAction. 78 """ 79 if self._meta is not None: 80 # already configured 81 return 82 83 metadata = {} 84 splitdebug_metadata = self._get_splitdebug_metadata() 85 metadata.update(splitdebug_metadata) 86 87 misc_settings = self._entropy.ClientSettings()['misc'] 88 metadata['edelta_support'] = misc_settings['edelta_support'] 89 is_package_repo = self._entropy._is_package_repository( 90 self._repository_id) 91 92 # These are used by Spm.entropy_install_unpack_hook() 93 metadata['package_id'] = self._package_id 94 metadata['repository_id'] = self._repository_id 95 96 # if splitdebug is enabled, check if it's also enabled 97 # via package.splitdebug 98 if metadata['splitdebug']: 99 # yeah, this has to affect exported splitdebug setting 100 # because it is read during package files installation 101 # Older splitdebug data was in the same package file of 102 # the actual content. Later on, splitdebug data was moved 103 # to its own package file that gets downloaded and unpacked 104 # only if required (if splitdebug is enabled) 105 metadata['splitdebug'] = self._package_splitdebug_enabled( 106 self._package_match) 107 108 # fetch abort function 109 metadata['fetch_abort_function'] = self._opts.get( 110 'fetch_abort_function') 111 112 # Used by Spm.entropy_install_unpack_hook() 113 metadata['repository_id'] = self._repository_id 114 metadata['package_id'] = self._package_id 115 116 install_source = etpConst['install_sources']['unknown'] 117 meta_inst_source = self._opts.get('install_source', install_source) 118 if meta_inst_source in list(etpConst['install_sources'].values()): 119 install_source = meta_inst_source 120 metadata['install_source'] = install_source 121 122 metadata['already_protected_config_files'] = {} 123 metadata['configprotect_data'] = [] 124 125 repo = self._entropy.open_repository(self._repository_id) 126 127 metadata['atom'] = repo.retrieveAtom(self._package_id) 128 129 # use by Spm.entropy_install_unpack_hook(), 130 # and remove_installed_package() 131 metadata['category'] = repo.retrieveCategory(self._package_id) 132 metadata['name'] = repo.retrieveName(self._package_id) 133 metadata['version'] = repo.retrieveVersion(self._package_id) 134 metadata['versiontag'] = repo.retrieveTag(self._package_id) 135 metadata['slot'] = repo.retrieveSlot(self._package_id) 136 137 metadata['extra_download'] = [] 138 metadata['splitdebug_pkgfile'] = True 139 if not is_package_repo: 140 metadata['splitdebug_pkgfile'] = False 141 extra_download = repo.retrieveExtraDownload(self._package_id) 142 if not metadata['splitdebug']: 143 extra_download = [x for x in extra_download if \ 144 x['type'] != "debug"] 145 metadata['extra_download'] += extra_download 146 147 metadata['download'] = repo.retrieveDownloadURL(self._package_id) 148 149 description = repo.retrieveDescription(self._package_id) 150 if description: 151 if len(description) > 74: 152 description = description[:74].strip() 153 description += "..." 154 metadata['description'] = description 155 156 metadata['remove_metaopts'] = { 157 'removeconfig': True, 158 } 159 metadata['remove_metaopts'].update( 160 self._opts.get('remove_metaopts', {})) 161 162 metadata['merge_from'] = None 163 mf = self._opts.get('merge_from') 164 if mf is not None: 165 metadata['merge_from'] = const_convert_to_unicode(mf) 166 metadata['removeconfig'] = self._opts.get('removeconfig', False) 167 168 # collects directories whose content has been modified 169 # this information is then handed to the Trigger 170 metadata['affected_directories'] = set() 171 metadata['affected_infofiles'] = set() 172 173 # craete an atomically safe unpack directory path 174 unpack_dir = os.path.join( 175 etpConst['entropyunpackdir'], 176 self._escape_path(metadata['atom']).lstrip(os.path.sep)) 177 try: 178 os.makedirs(unpack_dir, 0o755) 179 except OSError as err: 180 if err.errno != errno.EEXIST: 181 raise 182 183 metadata['smartpackage'] = False 184 # set unpack dir and image dir 185 if is_package_repo: 186 187 try: 188 compiled_arch = repo.getSetting("arch") 189 arch_fine = compiled_arch == etpConst['currentarch'] 190 except KeyError: 191 arch_fine = True # sorry, old db, cannot check 192 193 if not arch_fine: 194 raise self.InvalidArchitecture( 195 "Package compiled for a different architecture") 196 197 repo_data = self._settings['repositories'] 198 repo_meta = repo_data['available'][self._repository_id] 199 metadata['smartpackage'] = repo_meta['smartpackage'] 200 201 # create a symlink into a generic entropy temp directory 202 # and reference the file from there. This will avoid 203 # Entropy locking code to change ownership and permissions 204 # of the directory containing the package file. 205 pkg_dir = const_mkdtemp(dir=unpack_dir, prefix="repository_pkgdir") 206 pkgpath = os.path.join( 207 pkg_dir, os.path.basename(repo_meta['pkgpath'])) 208 209 os.symlink(repo_meta['pkgpath'], pkgpath) 210 211 metadata['pkgpath'] = pkgpath 212 213 else: 214 metadata['pkgpath'] = self.get_standard_fetch_disk_path( 215 metadata['download']) 216 217 metadata['unpackdir'] = const_mkdtemp(dir=unpack_dir) 218 219 metadata['imagedir'] = os.path.join( 220 metadata['unpackdir'], 221 etpConst['entropyimagerelativepath']) 222 223 metadata['pkgdbpath'] = os.path.join(metadata['unpackdir'], 224 "edb", "pkg.db") 225 226 metadata['phases'] = [] 227 metadata['phases'].append(self._remove_conflicts_phase) 228 229 if metadata['merge_from']: 230 metadata['phases'].append(self._merge_phase) 231 else: 232 metadata['phases'].append(self._unpack_phase) 233 234 metadata['phases'].append(self._setup_package_phase) 235 metadata['phases'].append(self._tarball_ownership_fixup_phase) 236 metadata['phases'].append(self._pre_install_phase) 237 metadata['phases'].append(self._install_phase) 238 metadata['phases'].append(self._post_install_phase) 239 metadata['phases'].append(self._cleanup_phase) 240 241 # SPM can place metadata here if it should be copied to 242 # the install trigger 243 metadata['__install_trigger__'] = {} 244 245 self._meta = metadata
246
247 - def _run(self):
248 """ 249 Execute the action. Return an exit status. 250 """ 251 self.setup() 252 253 spm_class = self._entropy.Spm_class() 254 exit_st = spm_class.entropy_install_setup_hook( 255 self._entropy, self._meta) 256 if exit_st != 0: 257 return exit_st 258 259 for method in self._meta['phases']: 260 exit_st = method() 261 if exit_st != 0: 262 break 263 return exit_st
264
265 - def _escape_path(self, path):
266 """ 267 Some applications (like ld) don't like ":" in path, others just don't 268 escape paths at all. So, it's better to avoid to use field separators 269 in path. 270 """ 271 path = path.replace(":", "_") 272 path = path.replace("~", "_") 273 return path
274
275 - def _get_package_conflicts_unlocked(self, inst_repo, entropy_repository, 276 package_id):
277 """ 278 Return a set of conflict dependencies for the given package. 279 """ 280 conflicts = entropy_repository.retrieveConflicts(package_id) 281 282 found_conflicts = set() 283 for conflict in conflicts: 284 inst_package_id, _inst_rc = inst_repo.atomMatch(conflict) 285 if inst_package_id == -1: 286 continue 287 288 # check if the package shares the same key and slot 289 match_data = entropy_repository.retrieveKeySlot(package_id) 290 installed_match_data = inst_repo.retrieveKeySlot(inst_package_id) 291 if match_data != installed_match_data: 292 found_conflicts.add(inst_package_id) 293 294 # auto conflicts support 295 found_conflicts |= self._entropy._generate_dependency_inverse_conflicts( 296 (package_id, entropy_repository.name), just_id=True) 297 298 return found_conflicts
299
300 - def _remove_conflicts_phase(self):
301 """ 302 Execute the package conflicts removal phase. 303 """ 304 inst_repo = self._entropy.installed_repository() 305 with inst_repo.shared(): 306 307 repo = self._entropy.open_repository(self._repository_id) 308 confl_package_ids = self._get_package_conflicts_unlocked( 309 inst_repo, repo, self._package_id) 310 if not confl_package_ids: 311 return 0 312 313 # calculate removal dependencies 314 # system_packages must be False because we should not exclude 315 # them from the dependency tree in any case. Also, we cannot trigger 316 # DependenciesNotRemovable() exception, too. 317 proposed_pkg_ids = self._entropy.get_removal_queue( 318 confl_package_ids, system_packages = False) 319 # we don't want to remove the whole inverse dependencies of course, 320 # but just the conflicting ones, in a proper order 321 package_ids = [x for x in proposed_pkg_ids if x in 322 confl_package_ids] 323 # make sure that every package is listed in package_ids before 324 # proceeding, cannot keep packages behind anyway, and must be fault 325 # tolerant. Besides, having missing packages here should never 326 # happen. 327 package_ids += [x for x in confl_package_ids if x not in \ 328 package_ids] 329 330 if not package_ids: 331 const_debug_write( 332 __name__, "_remove_conflict_phase: no package_ids, no need to filter.") 333 return 0 334 335 # Now we have a list of packages that should be removed, but 336 # we may want to check whether a new version of such package 337 # would not cause the removal to begin with. 338 # We have two options: 339 # 1. we are already planning to install the package, and perhaps 340 # we should just let this happen. 341 # 2. we are not planning to install the package, and perhaps 342 # we should. 343 install_queue = self._opts.get('install_queue', []) 344 if not install_queue: 345 const_debug_write( 346 __name__, "_remove_conflict_phase: empty install_queue.") 347 348 # This is O(nm) [m = number of packages to install that could get us here] 349 # But the chance to get here is low (basically m is low), the cardinality 350 # of n is within 1-2000 worst case, typical case for a 12 months old system 351 # is around 700-1000 (according to my experience). Plus, this data is 352 # super fast to retrieve (retrieveKeySlotTag is heavily cached, see 353 # entropy/db/sqlite.py.) So, as long as this is not a real problem, there is 354 # no need for further optimizations. 355 key_slot_tags = set() 356 for pkg_id, repo_id in install_queue: 357 repo = self._entropy.open_repository(repo_id) 358 key_slot_tag = repo.retrieveKeySlotTag(pkg_id) 359 if key_slot_tag: 360 key_slot_tags.add(key_slot_tag) 361 362 filtered = set() 363 for package_id in package_ids: 364 inst_key_slot_tag = inst_repo.retrieveKeySlotTag(package_id) 365 if inst_key_slot_tag in key_slot_tags: 366 const_debug_write( 367 __name__, 368 "_remove_conflict_phase: %s is being installed, skipping." % ( 369 inst_key_slot_tag,)) 370 filtered.add(package_id) 371 372 key_slot_tags.clear() # help Python gc, FWIW. 373 package_ids = [x for x in package_ids if x not in filtered] 374 375 if not package_ids: 376 const_debug_write( 377 __name__, 378 "_remove_conflict_phase: no package_ids left after " 379 "install_queue filtering.") 380 return 0 381 382 # make sure to run this without locks, or deadlock happenz 383 factory = self._entropy.PackageActionFactory() 384 for package_id in package_ids: 385 386 pkg = factory.get( 387 factory.REMOVE_ACTION, 388 (package_id, inst_repo.name), 389 opts = self._meta['remove_metaopts']) 390 pkg.set_xterm_header(self._xterm_header) 391 392 exit_st = pkg.start() 393 pkg.finalize() 394 if exit_st != 0: 395 return exit_st 396 397 return 0
398
399 - def _unpack_package(self, package_path, image_dir, pkg_dbpath):
400 """ 401 Effectively unpack the package tarballs. 402 """ 403 txt = "%s: %s" % ( 404 blue(_("Unpacking")), 405 red(os.path.basename(package_path)), 406 ) 407 self._entropy.output( 408 txt, 409 importance = 1, 410 level = "info", 411 header = red(" ## ") 412 ) 413 414 self._entropy.logger.log( 415 "[Package]", 416 etpConst['logging']['normal_loglevel_id'], 417 "Unpacking package: %s" % (package_path,) 418 ) 419 420 # removed in the meantime? fail. 421 # this is just a safety measure, but won't do anything 422 # against races. 423 if not os.path.isfile(package_path): 424 self._entropy.logger.log( 425 "[Package]", 426 etpConst['logging']['normal_loglevel_id'], 427 "Error, package was removed: %s" % (package_path,) 428 ) 429 return 1 430 431 # make sure image_dir always exists 432 # pkgs not providing any file would cause image_dir 433 # to not be created by uncompress_tarball 434 try: 435 os.makedirs(image_dir, 0o755) 436 except OSError as err: 437 if err.errno != errno.EEXIST: 438 self._entropy.logger.log( 439 "[Package]", etpConst['logging']['normal_loglevel_id'], 440 "Unable to mkdir: %s, error: %s" % ( 441 image_dir, repr(err),) 442 ) 443 self._entropy.output( 444 "%s: %s" % (brown(_("Unpack error")), err.errno,), 445 importance = 1, 446 level = "error", 447 header = red(" ## ") 448 ) 449 return 1 450 451 # pkg_dbpath is only non-None for the base package file 452 # extra package files don't carry any other edb information 453 if pkg_dbpath is not None: 454 # extract entropy database from package file 455 # in order to avoid having to read content data 456 # from the repository database, which, in future 457 # is allowed to not provide such info. 458 pkg_dbdir = os.path.dirname(pkg_dbpath) 459 try: 460 os.makedirs(pkg_dbdir, 0o755) 461 except OSError as err: 462 if err.errno != errno.EEXIST: 463 raise 464 # extract edb 465 dump_exit_st = entropy.tools.dump_entropy_metadata( 466 package_path, pkg_dbpath) 467 if not dump_exit_st: 468 # error during entropy db extraction from package file 469 # might be because edb entry point is not found or 470 # because there is not enough space for it 471 self._entropy.logger.log( 472 "[Package]", etpConst['logging']['normal_loglevel_id'], 473 "Unable to dump edb for: " + pkg_dbpath 474 ) 475 self._entropy.output( 476 brown(_("Unable to find Entropy metadata in package")), 477 importance = 1, 478 level = "error", 479 header = red(" ## ") 480 ) 481 return 1 482 483 try: 484 exit_st = entropy.tools.uncompress_tarball( 485 package_path, 486 extract_path = image_dir, 487 catch_empty = True 488 ) 489 except EOFError as err: 490 self._entropy.logger.log( 491 "[Package]", etpConst['logging']['normal_loglevel_id'], 492 "EOFError on " + package_path + " " + \ 493 repr(err) 494 ) 495 entropy.tools.print_traceback() 496 # try again until unpack_tries goes to 0 497 exit_st = 1 498 except Exception as err: 499 self._entropy.logger.log( 500 "[Package]", 501 etpConst['logging']['normal_loglevel_id'], 502 "Ouch! error while unpacking " + \ 503 package_path + " " + repr(err) 504 ) 505 entropy.tools.print_traceback() 506 # try again until unpack_tries goes to 0 507 exit_st = 1 508 509 if exit_st != 0: 510 self._entropy.logger.log( 511 "[Package]", etpConst['logging']['normal_loglevel_id'], 512 "Unable to unpack: %s" % (package_path,) 513 ) 514 self._entropy.output( 515 brown(_("Unable to unpack package")), 516 importance = 1, 517 level = "error", 518 header = red(" ## ") 519 ) 520 521 return exit_st
522
523 - def _fill_image_dir(self, merge_from, image_dir):
524 """ 525 Fill the image directory with content from a filesystme path. 526 """ 527 repo = self._entropy.open_repository(self._repository_id) 528 # this is triggered by merge_from pkgmeta metadata 529 # even if repositories are allowed to not have content 530 # metadata, in this particular case, it is mandatory 531 contents = repo.retrieveContentIter( 532 self._package_id, 533 order_by = "file") 534 535 for path, ftype in contents: 536 # convert back to filesystem str 537 encoded_path = path 538 path = os.path.join(merge_from, encoded_path[1:]) 539 topath = os.path.join(image_dir, encoded_path[1:]) 540 path = const_convert_to_rawstring(path) 541 topath = const_convert_to_rawstring(topath) 542 543 try: 544 exist = os.lstat(path) 545 except OSError: 546 continue # skip file 547 548 if "dir" == ftype and \ 549 not stat.S_ISDIR(exist.st_mode) and \ 550 os.path.isdir(path): 551 # workaround for directory symlink issues 552 path = os.path.realpath(path) 553 554 copystat = False 555 # if our directory is a symlink instead, then copy the symlink 556 if os.path.islink(path): 557 tolink = os.readlink(path) 558 if os.path.islink(topath): 559 os.remove(topath) 560 os.symlink(tolink, topath) 561 elif os.path.isdir(path): 562 try: 563 os.makedirs(topath) 564 copystat = True 565 except OSError as err: 566 if err.errno != errno.EEXIST: 567 raise 568 elif os.path.isfile(path): 569 if os.path.isfile(topath): 570 os.remove(topath) # should never happen 571 shutil.copy2(path, topath) 572 copystat = True 573 574 if copystat: 575 user = os.stat(path)[stat.ST_UID] 576 group = os.stat(path)[stat.ST_GID] 577 os.chown(topath, user, group) 578 shutil.copystat(path, topath)
579
580 - def _merge_phase(self):
581 """ 582 Execute the merge (from) phase. 583 """ 584 xterm_title = "%s %s: %s" % ( 585 self._xterm_header, 586 _("Merging"), 587 self._meta['atom'], 588 ) 589 self._entropy.set_title(xterm_title) 590 591 txt = "%s: %s" % ( 592 blue(_("Merging package")), 593 red(self._meta['atom']), 594 ) 595 self._entropy.output( 596 txt, 597 importance = 1, 598 level = "info", 599 header = red(" ## ") 600 ) 601 self._entropy.logger.log( 602 "[Package]", 603 etpConst['logging']['normal_loglevel_id'], 604 "Merging package: %s" % (self._meta['atom'],) 605 ) 606 607 self._fill_image_dir(self._meta['merge_from'], 608 self._meta['imagedir']) 609 spm_class = self._entropy.Spm_class() 610 return spm_class.entropy_install_unpack_hook(self._entropy, 611 self._meta)
612
613 - def _unpack_phase(self):
614 """ 615 Execute the unpack phase. 616 """ 617 xterm_title = "%s %s: %s" % ( 618 self._xterm_header, 619 _("Unpacking"), 620 self._meta['download'], 621 ) 622 self._entropy.set_title(xterm_title) 623 624 def _unpack_error(exit_st): 625 msg = _("An error occurred while trying to unpack the package") 626 errormsg = "%s. %s. %s: %s" % ( 627 red(msg), 628 red(_("Check if your system is healthy")), 629 blue(_("Error")), 630 exit_st, 631 ) 632 self._entropy.output( 633 errormsg, 634 importance = 1, 635 level = "error", 636 header = red(" ## ") 637 )
638 639 locks = [] 640 try: 641 download_path = self._meta['pkgpath'] 642 lock = self.path_lock(download_path) 643 locks.append(lock) 644 645 with lock.shared(): 646 647 if not self._stat_path(download_path): 648 const_debug_write( 649 __name__, 650 "_unpack_phase: %s vanished" % ( 651 download_path,)) 652 _unpack_error(2) 653 return 2 654 655 exit_st = self._unpack_package( 656 download_path, 657 self._meta['imagedir'], 658 self._meta['pkgdbpath']) 659 660 if exit_st != 0: 661 const_debug_write( 662 __name__, 663 "_unpack_phase: %s unpack error: %s" % ( 664 download_path, exit_st)) 665 _unpack_error(exit_st) 666 return exit_st 667 668 for extra_download in self._meta['extra_download']: 669 download = extra_download['download'] 670 download_path = self.get_standard_fetch_disk_path(download) 671 extra_lock = self.path_lock(download_path) 672 locks.append(extra_lock) 673 674 with extra_lock.shared(): 675 if not self._stat_path(download_path): 676 const_debug_write( 677 __name__, 678 "_unpack_phase: %s vanished" % ( 679 download_path,)) 680 _unpack_error(2) 681 return 2 682 683 exit_st = self._unpack_package( 684 download_path, 685 self._meta['imagedir'], 686 None) 687 688 if exit_st != 0: 689 const_debug_write( 690 __name__, 691 "_unpack_phase: %s unpack error: %s" % ( 692 download_path, exit_st,)) 693 _unpack_error(exit_st) 694 return exit_st 695 696 finally: 697 for l in locks: 698 l.close() 699 700 spm_class = self._entropy.Spm_class() 701 # call Spm unpack hook 702 return spm_class.entropy_install_unpack_hook(self._entropy, 703 self._meta)
704
705 - def _setup_package_phase(self):
706 """ 707 Execute the package setup phase. 708 """ 709 xterm_title = "%s %s: %s" % ( 710 self._xterm_header, 711 _("Setup"), 712 self._meta['atom'], 713 ) 714 self._entropy.set_title(xterm_title) 715 716 data = self._get_install_trigger_data() 717 trigger = Trigger( 718 self._entropy, 719 self.NAME, 720 "setup", 721 data, 722 data) 723 724 exit_st = 0 725 ack = trigger.prepare() 726 if ack: 727 exit_st = trigger.run() 728 trigger.kill() 729 730 if exit_st != 0: 731 return exit_st 732 733 return 0
734
735 - def _pre_install_phase(self):
736 """ 737 Execute the pre-install phase. 738 """ 739 xterm_title = "%s %s: %s" % ( 740 self._xterm_header, 741 _("Pre-install"), 742 self._meta['atom'], 743 ) 744 self._entropy.set_title(xterm_title) 745 746 data = self._get_install_trigger_data() 747 trigger = Trigger( 748 self._entropy, 749 self.NAME, 750 "preinstall", 751 data, 752 data) 753 754 exit_st = 0 755 ack = trigger.prepare() 756 if ack: 757 exit_st = trigger.run() 758 trigger.kill() 759 760 return exit_st
761
762 - def _tarball_ownership_fixup_phase(self):
763 """ 764 Execute the tarball file ownership fixup phase. 765 New uid or gids could have created after the setup phase. 766 """ 767 # NOTE: fixup permissions in the image directory 768 # the setup phase could have created additional users and groups 769 package_paths = [self._meta['pkgpath']] 770 for extra_download in self._meta['extra_download']: 771 package_paths.append( 772 self.get_standard_fetch_disk_path(extra_download['download']) 773 ) 774 775 for package_path in package_paths: 776 lock = None 777 778 try: 779 lock = self.path_lock(package_path) 780 with lock.shared(): 781 782 if not self._stat_path(package_path): 783 const_debug_write( 784 __name__, 785 "_tarball_ownership_fixup_phase: %s vanished" % ( 786 package_path,)) 787 788 self._entropy.output( 789 "%s: vanished" % ( 790 brown(_("Error during package files " 791 "permissions setup")) 792 ,), 793 importance = 1, 794 level = "error", 795 header = darkred(" !!! ") 796 ) 797 return 1 798 799 try: 800 entropy.tools.apply_tarball_ownership( 801 package_path, self._meta['imagedir']) 802 except IOError as err: 803 msg = "%s: %s" % ( 804 brown(_("Error during package files " 805 "permissions setup")), 806 err,) 807 808 self._entropy.output( 809 msg, 810 importance = 1, 811 level = "error", 812 header = darkred(" !!! ") 813 ) 814 return 1 815 816 finally: 817 if lock is not None: 818 lock.close() 819 820 return 0
821
822 - def _get_install_trigger_data(self):
823 """ 824 Get the metadata used during removal phases by Trigger. 825 """ 826 repo = self._entropy.open_repository(self._repository_id) 827 828 data = {} 829 data.update(repo.getTriggerData(self._package_id)) 830 831 splitdebug_metadata = self._get_splitdebug_metadata() 832 data.update(splitdebug_metadata) 833 834 data['unpackdir'] = self._meta['unpackdir'] 835 data['imagedir'] = self._meta['imagedir'] 836 837 data['affected_directories'] = self._meta['affected_directories'] 838 data['affected_infofiles'] = self._meta['affected_infofiles'] 839 data['spm_repository'] = repo.retrieveSpmRepository(self._package_id) 840 data['accept_license'] = self._get_licenses(repo, self._package_id) 841 842 # replace current empty "content" metadata info 843 # content metadata is required by 844 # _spm_install_package() -> Spm.add_installed_package() 845 # in case of injected packages (SPM metadata might be 846 # incomplete). 847 data['content'] = self._meta.get('content', data['content']) 848 849 # SPM hook 850 data.update(self._meta['__install_trigger__']) 851 852 return data
853
854 - def _pre_remove_package_unlocked(self, data):
855 """ 856 Execute the pre-remove phase. 857 """ 858 xterm_title = "%s %s: %s" % ( 859 self._xterm_header, 860 _("Pre-remove"), 861 self._meta['atom'], 862 ) 863 self._entropy.set_title(xterm_title) 864 865 trigger = Trigger( 866 self._entropy, 867 self.NAME, 868 "preremove", 869 data, 870 self._get_install_trigger_data()) 871 872 exit_st = 0 873 ack = trigger.prepare() 874 if ack: 875 exit_st = trigger.run() 876 trigger.kill() 877 878 return exit_st
879
880 - def _install_clean_unlocked(self, inst_repo, installed_package_id, 881 clean_content, removecontent_file, 882 remove_atom, removed_libs, 883 config_protect_metadata):
884 """ 885 Cleanup package files not used anymore by newly installed version. 886 This is part of the atomic install, which overwrites the live fs with 887 new files and removes old afterwards. 888 """ 889 sys_root = self._get_system_root(self._meta) 890 891 preserved_mgr = preservedlibs.PreservedLibraries( 892 inst_repo, installed_package_id, 893 removed_libs, root = sys_root) 894 895 if clean_content: 896 self._entropy.output( 897 blue(_("Cleaning previously installed application data.")), 898 importance = 1, 899 level = "info", 900 header = red(" ## ") 901 ) 902 903 self._remove_content_from_system( 904 inst_repo, 905 remove_atom, 906 self._meta['removeconfig'], 907 sys_root, 908 config_protect_metadata['config_protect+mask'], 909 removecontent_file, 910 self._meta['already_protected_config_files'], 911 self._meta['affected_directories'], 912 self._meta['affected_infofiles'], 913 preserved_mgr) 914 915 # garbage collect preserved libraries that are no longer needed 916 self._garbage_collect_preserved_libs(preserved_mgr) 917 918 return 0
919
920 - def _post_remove_package_unlocked(self, data):
921 """ 922 Execute the post-remove phase. 923 """ 924 xterm_title = "%s %s: %s" % ( 925 self._xterm_header, 926 _("Post-remove"), 927 self._meta['atom'], 928 ) 929 self._entropy.set_title(xterm_title) 930 931 trigger = Trigger( 932 self._entropy, 933 self.NAME, 934 "postremove", 935 data, 936 self._get_install_trigger_data()) 937 938 exit_st = 0 939 ack = trigger.prepare() 940 if ack: 941 exit_st = trigger.run() 942 trigger.kill() 943 944 return exit_st
945
946 - def _post_remove_install_package_unlocked(self, atom):
947 """ 948 Execute the post-remove SPM package metadata phase. 949 """ 950 self._entropy.logger.log( 951 "[Package]", 952 etpConst['logging']['normal_loglevel_id'], 953 "Remove old package (spm data): %s" % (atom,) 954 ) 955 956 return self._spm_remove_package(atom, self._meta)
957
958 - def _install_spm_package_unlocked(self, inst_repo, installed_package_id):
959 """ 960 Execute the installation of SPM package metadata. 961 """ 962 spm = self._entropy.Spm() 963 964 self._entropy.logger.log( 965 "[Package]", 966 etpConst['logging']['normal_loglevel_id'], 967 "Installing new SPM entry: %s" % (self._meta['atom'],) 968 ) 969 970 spm_uid = spm.add_installed_package(self._meta) 971 if spm_uid != -1: 972 inst_repo.insertSpmUid(installed_package_id, spm_uid) 973 inst_repo.commit() 974 975 return 0
976
977 - def _post_install_phase(self):
978 """ 979 Execute the post-install phase. 980 """ 981 xterm_title = "%s %s: %s" % ( 982 self._xterm_header, 983 _("Post-install"), 984 self._meta['atom'], 985 ) 986 self._entropy.set_title(xterm_title) 987 988 data = self._get_install_trigger_data() 989 trigger = Trigger( 990 self._entropy, 991 self.NAME, 992 "postinstall", 993 data, 994 data) 995 996 exit_st = 0 997 ack = trigger.prepare() 998 if ack: 999 exit_st = trigger.run() 1000 trigger.kill() 1001 1002 return exit_st
1003
1004 - def _cleanup_phase(self):
1005 """ 1006 Execute the cleanup phase. 1007 """ 1008 xterm_title = "%s %s: %s" % ( 1009 self._xterm_header, 1010 _("Cleaning"), 1011 self._meta['atom'], 1012 ) 1013 self._entropy.set_title(xterm_title) 1014 1015 txt = "%s: %s" % ( 1016 blue(_("Cleaning")), 1017 red(self._meta['atom']), 1018 ) 1019 self._entropy.output( 1020 txt, 1021 importance = 1, 1022 level = "info", 1023 header = red(" ## ") 1024 ) 1025 1026 # shutil.rmtree wants raw strings, otherwise it will explode 1027 unpack_dir = const_convert_to_rawstring(self._meta['unpackdir']) 1028 1029 # best-effort below. 1030 try: 1031 shutil.rmtree(unpack_dir, True) 1032 except shutil.Error as err: 1033 self._entropy.logger.log( 1034 "[Package]", etpConst['logging']['normal_loglevel_id'], 1035 "WARNING!!! Failed to cleanup directory %s," \ 1036 " error: %s" % (unpack_dir, err,)) 1037 try: 1038 os.rmdir(unpack_dir) 1039 except OSError: 1040 pass 1041 1042 return 0
1043
1044 - def _filter_out_files_installed_on_diff_path(self, content_file, 1045 installed_content):
1046 """ 1047 Use case: if a package provided files in /lib then, a new version 1048 of that package moved the same files under /lib64, we need to check 1049 if both directory paths solve to the same inode and if so, 1050 add to our set that we're going to return. 1051 """ 1052 sys_root = self._get_system_root(self._meta) 1053 second_pass_removal = set() 1054 1055 if not installed_content: 1056 # nothing to filter, no-op 1057 return 1058 def _main_filter(_path): 1059 item_dir = os.path.dirname("%s%s" % ( 1060 sys_root, _path,)) 1061 item = os.path.join( 1062 os.path.realpath(item_dir), 1063 os.path.basename(_path)) 1064 if item in installed_content: 1065 second_pass_removal.add(item) 1066 return False 1067 return True
1068 1069 # first pass, remove direct matches, schedule a second pass 1070 # list of files 1071 Content.filter_content_file(content_file, _main_filter) 1072 1073 if not second_pass_removal: 1074 # done then 1075 return 1076 1077 # second pass, drop remaining files 1078 # unfortunately, this is the only way to work it out 1079 # with iterators 1080 def _filter(_path): 1081 return _path not in second_pass_removal 1082 Content.filter_content_file(content_file, _filter) 1083
1084 - def _add_installed_package_unlocked(self, inst_repo,removecontent_file, 1085 items_installed, items_not_installed):
1086 """ 1087 For internal use only. 1088 Copy package from repository to installed packages one. 1089 """ 1090 1091 def _merge_removecontent(inst_repo, repo, _package_id): 1092 1093 # nothing to do if there is no content to remove 1094 if removecontent_file is None: 1095 return 1096 1097 # determine if there is a package to remove first 1098 remove_package_id = self._get_remove_package_id_unlocked(inst_repo) 1099 if remove_package_id == -1: 1100 return 1101 1102 # NOTE: this could be a source of memory consumption 1103 # but generally, the difference between two contents 1104 # is really small 1105 content_diff = list(inst_repo.contentDiff( 1106 remove_package_id, 1107 repo, 1108 _package_id, 1109 extended=True)) 1110 1111 if content_diff: 1112 1113 # reverse-order compare 1114 def _cmp_func(_path, _spath): 1115 if _path > _spath: 1116 return -1 1117 elif _path == _spath: 1118 return 0 1119 return 1
1120 1121 # must be sorted, and in reverse order 1122 # or the merge step won't work 1123 content_diff.sort(reverse=True) 1124 1125 Content.merge_content_file( 1126 removecontent_file, 1127 content_diff, _cmp_func) 1128 1129 smart_pkg = self._meta['smartpackage'] 1130 repo = self._entropy.open_repository(self._repository_id) 1131 1132 splitdebug, splitdebug_dirs = ( 1133 self._meta['splitdebug'], 1134 self._meta['splitdebug_dirs']) 1135 1136 if smart_pkg or self._meta['merge_from']: 1137 1138 data = repo.getPackageData(self._package_id, 1139 content_insert_formatted = True, 1140 get_changelog = False, get_content = False, 1141 get_content_safety = False) 1142 1143 content = repo.retrieveContentIter( 1144 self._package_id) 1145 content_file = self._generate_content_file( 1146 content, package_id = self._package_id, 1147 filter_splitdebug = True, 1148 splitdebug = splitdebug, 1149 splitdebug_dirs = splitdebug_dirs) 1150 1151 content_safety = repo.retrieveContentSafetyIter( 1152 self._package_id) 1153 content_safety_file = self._generate_content_safety_file( 1154 content_safety) 1155 1156 _merge_removecontent(inst_repo, repo, self._package_id) 1157 1158 else: 1159 1160 # normal repositories 1161 data = repo.getPackageData(self._package_id, 1162 get_content = False, get_changelog = False) 1163 1164 # indexing_override = False : no need to index tables 1165 # xcache = False : no need to use on-disk cache 1166 # skipChecks = False : creating missing tables is unwanted, 1167 # and also no foreign keys update 1168 # readOnly = True: no need to open in write mode 1169 pkg_repo = self._entropy.open_generic_repository( 1170 self._meta['pkgdbpath'], skip_checks = True, 1171 indexing_override = False, read_only = True, 1172 xcache = False) 1173 1174 # it is safe to consider that package dbs coming from repos 1175 # contain only one entry 1176 pkg_package_id = sorted(pkg_repo.listAllPackageIds(), 1177 reverse = True)[0] 1178 content = pkg_repo.retrieveContentIter( 1179 pkg_package_id) 1180 content_file = self._generate_content_file( 1181 content, package_id = self._package_id, 1182 filter_splitdebug = True, 1183 splitdebug = splitdebug, 1184 splitdebug_dirs = splitdebug_dirs) 1185 1186 # setup content safety metadata, get from package 1187 content_safety = pkg_repo.retrieveContentSafetyIter( 1188 pkg_package_id) 1189 content_safety_file = self._generate_content_safety_file( 1190 content_safety) 1191 1192 _merge_removecontent(inst_repo, pkg_repo, pkg_package_id) 1193 1194 pkg_repo.close() 1195 1196 # items_installed is useful to avoid the removal of installed 1197 # files by __remove_package just because 1198 # there's a difference in the directory path, perhaps, 1199 # which is not handled correctly by 1200 # EntropyRepository.contentDiff for obvious reasons 1201 # (think about stuff in /usr/lib and /usr/lib64, 1202 # where the latter is just a symlink to the former) 1203 # -- 1204 # fix removecontent, need to check if we just installed files 1205 # that resolves at the same directory path (different symlink) 1206 if removecontent_file is not None: 1207 self._filter_out_files_installed_on_diff_path( 1208 removecontent_file, items_installed) 1209 1210 # filter out files not installed from content metadata 1211 # these include splitdebug files, when splitdebug is 1212 # disabled. 1213 if items_not_installed: 1214 def _filter(_path): 1215 return _path not in items_not_installed 1216 Content.filter_content_file( 1217 content_file, _filter) 1218 1219 # always set data['injected'] to False 1220 # installed packages database SHOULD never have more 1221 # than one package for scope (key+slot) 1222 data['injected'] = False 1223 # spm counter will be set in self._install_package_into_spm_database() 1224 data['counter'] = -1 1225 # branch must be always set properly, it could happen it's not 1226 # when installing packages through their .tbz2s 1227 data['branch'] = self._settings['repositories']['branch'] 1228 # there is no need to store needed paths into db 1229 if "needed_paths" in data: 1230 del data['needed_paths'] 1231 # there is no need to store changelog data into db 1232 if "changelog" in data: 1233 del data['changelog'] 1234 # we don't want it to be added now, we want to add install source 1235 # info too. 1236 if "original_repository" in data: 1237 del data['original_repository'] 1238 # rewrite extra_download metadata with the currently provided, 1239 # and accepted extra_download items (in case of splitdebug being 1240 # disable, we're not going to add those entries, for example) 1241 data['extra_download'] = self._meta['extra_download'] 1242 1243 data['content'] = None 1244 data['content_safety'] = None 1245 try: 1246 # now we are ready to craft a 'content' iter object 1247 data['content'] = Content.FileContentReader( 1248 content_file) 1249 data['content_safety'] = Content.FileContentSafetyReader( 1250 content_safety_file) 1251 package_id = inst_repo.handlePackage( 1252 data, revision = data['revision'], 1253 formattedContent = True) 1254 finally: 1255 if data['content'] is not None: 1256 try: 1257 data['content'].close() 1258 data['content'] = None 1259 except (OSError, IOError): 1260 data['content'] = None 1261 if data['content_safety'] is not None: 1262 try: 1263 data['content_safety'].close() 1264 data['content_safety'] = None 1265 except (OSError, IOError): 1266 data['content_safety'] = None 1267 1268 # update datecreation 1269 ctime = time.time() 1270 inst_repo.setCreationDate(package_id, str(ctime)) 1271 1272 # add idpk to the installedtable 1273 inst_repo.dropInstalledPackageFromStore(package_id) 1274 inst_repo.storeInstalledPackage(package_id, 1275 self._repository_id, self._meta['install_source']) 1276 1277 automerge_data = self._meta.get('configprotect_data') 1278 if automerge_data: 1279 inst_repo.insertAutomergefiles(package_id, automerge_data) 1280 1281 inst_repo.commit() 1282 1283 # replace current empty "content" metadata info 1284 # content metadata is required by 1285 # _spm_install_package() -> Spm.add_installed_package() 1286 # in case of injected packages (SPM metadata might be 1287 # incomplete). 1288 self._meta['content'] = Content.FileContentReader(content_file) 1289 1290 return package_id 1291
1292 - def _install_package_unlocked(self, inst_repo, remove_package_id):
1293 """ 1294 Execute the package installation code. 1295 """ 1296 self._entropy.clear_cache() 1297 1298 self._entropy.logger.log( 1299 "[Package]", 1300 etpConst['logging']['normal_loglevel_id'], 1301 "Installing package: %s" % (self._meta['atom'],) 1302 ) 1303 1304 if remove_package_id != -1: 1305 am_files = inst_repo.retrieveAutomergefiles( 1306 remove_package_id, 1307 get_dict = True) 1308 self._meta['already_protected_config_files'].clear() 1309 self._meta['already_protected_config_files'].update(am_files) 1310 1311 # items_*installed will be filled by _move_image_to_system 1312 # then passed to _add_installed_package() 1313 items_installed = set() 1314 items_not_installed = set() 1315 exit_st = self._move_image_to_system_unlocked( 1316 inst_repo, remove_package_id, 1317 items_installed, items_not_installed) 1318 1319 if exit_st != 0: 1320 txt = "%s. %s. %s: %s" % ( 1321 red(_("An error occurred while trying to install the package")), 1322 red(_("Check if your system is healthy")), 1323 blue(_("Error")), 1324 exit_st, 1325 ) 1326 self._entropy.output( 1327 txt, 1328 importance = 1, 1329 level = "error", 1330 header = red(" ## ") 1331 ) 1332 return exit_st, None, None 1333 1334 txt = "%s: %s" % ( 1335 blue(_("Updating installed packages repository")), 1336 teal(self._meta['atom']), 1337 ) 1338 self._entropy.output( 1339 txt, 1340 importance = 1, 1341 level = "info", 1342 header = red(" ## ") 1343 ) 1344 1345 # generate the files and directories that would be removed 1346 removecontent_file = None 1347 if remove_package_id != -1: 1348 removecontent_file = self._generate_content_file( 1349 inst_repo.retrieveContentIter( 1350 remove_package_id, 1351 order_by="file", 1352 reverse=True) 1353 ) 1354 1355 package_id = self._add_installed_package_unlocked( 1356 inst_repo, removecontent_file, 1357 items_installed, items_not_installed) 1358 1359 return 0, package_id, removecontent_file
1360
1361 - def _install_phase(self):
1362 """ 1363 Execute the install phase. 1364 """ 1365 xterm_title = "%s %s: %s" % ( 1366 self._xterm_header, 1367 _("Installing"), 1368 self._meta['atom'], 1369 ) 1370 self._entropy.set_title(xterm_title) 1371 1372 txt = "%s: %s" % ( 1373 blue(_("Installing package")), 1374 red(self._meta['atom']), 1375 ) 1376 self._entropy.output( 1377 txt, 1378 importance = 1, 1379 level = "info", 1380 header = red(" ## ") 1381 ) 1382 1383 self._entropy.output( 1384 "[%s]" % ( 1385 purple(self._meta['description']), 1386 ), 1387 importance = 1, 1388 level = "info", 1389 header = red(" ## ") 1390 ) 1391 1392 if self._meta['splitdebug']: 1393 if self._meta.get('splitdebug_pkgfile'): 1394 txt = "[%s]" % ( 1395 teal(_("unsupported splitdebug usage (package files)")),) 1396 level = "warning" 1397 else: 1398 txt = "[%s]" % ( 1399 teal(_("<3 debug files installation enabled <3")),) 1400 level = "info" 1401 self._entropy.output( 1402 txt, 1403 importance = 1, 1404 level = level, 1405 header = red(" ## ") 1406 ) 1407 1408 inst_repo = self._entropy.installed_repository() 1409 with inst_repo.exclusive(): 1410 return self._install_phase_unlocked(inst_repo)
1411
1412 - def _install_phase_unlocked(self, inst_repo):
1413 """ 1414 _install_phase(), assuming that the installed packages repository 1415 lock is held in exclusive mode. 1416 """ 1417 remove_package_id = self._get_remove_package_id_unlocked(inst_repo) 1418 1419 remove_atom = None 1420 if remove_package_id != -1: 1421 remove_atom = inst_repo.retrieveAtom(remove_package_id) 1422 1423 # save trigger data 1424 remove_trigger_data = None 1425 if remove_package_id != -1: 1426 remove_trigger_data = self._get_remove_trigger_data( 1427 inst_repo, remove_package_id) 1428 1429 if remove_package_id == -1: 1430 removed_libs = frozenset() 1431 else: 1432 repo = self._entropy.open_repository(self._repository_id) 1433 repo_libs = repo.retrieveProvidedLibraries(self._package_id) 1434 inst_libs = inst_repo.retrieveProvidedLibraries( 1435 remove_package_id) 1436 removed_libs = frozenset(inst_libs - repo_libs) 1437 1438 config_protect_metadata = None 1439 if remove_package_id != -1: 1440 config_protect_metadata = self._get_config_protect_metadata( 1441 inst_repo, remove_package_id, _metadata = self._meta) 1442 1443 # after this point, old package metadata is no longer available 1444 1445 (exit_st, installed_package_id, 1446 removecontent_file) = self._install_package_unlocked( 1447 inst_repo, remove_package_id) 1448 if exit_st != 0: 1449 return exit_st 1450 1451 if remove_trigger_data: 1452 exit_st = self._pre_remove_package_unlocked(remove_trigger_data) 1453 if exit_st != 0: 1454 return exit_st 1455 1456 clean_content = remove_package_id != -1 1457 exit_st = self._install_clean_unlocked( 1458 inst_repo, installed_package_id, 1459 clean_content, removecontent_file, 1460 remove_atom, removed_libs, 1461 config_protect_metadata) 1462 if exit_st != 0: 1463 return exit_st 1464 1465 if remove_trigger_data: 1466 exit_st = self._post_remove_package_unlocked( 1467 remove_trigger_data) 1468 if exit_st != 0: 1469 return exit_st 1470 1471 if remove_package_id != -1: 1472 exit_st = self._post_remove_install_package_unlocked( 1473 remove_atom) 1474 if exit_st != 0: 1475 return exit_st 1476 1477 exit_st = self._install_spm_package_unlocked( 1478 inst_repo, installed_package_id) 1479 if exit_st != 0: 1480 return exit_st 1481 1482 return 0
1483
1484 - def _handle_install_collision_protect_unlocked(self, inst_repo, 1485 remove_package_id, 1486 tofile, 1487 todbfile):
1488 """ 1489 Handle files collition protection for the install phase. 1490 """ 1491 1492 avail = inst_repo.isFileAvailable( 1493 const_convert_to_unicode(todbfile), 1494 get_id = True) 1495 1496 if (remove_package_id not in avail) and avail: 1497 mytxt = darkred(_("Collision found during install for")) 1498 mytxt += "%s %s - %s" % ( 1499 blue(_("QA:")), 1500 blue(tofile), 1501 darkred(_("cannot overwrite")), 1502 ) 1503 self._entropy.output( 1504 mytxt, 1505 importance = 1, 1506 level = "warning", 1507 header = darkred(" ## ") 1508 ) 1509 self._entropy.logger.log( 1510 "[Package]", 1511 etpConst['logging']['normal_loglevel_id'], 1512 "WARNING!!! Collision found during install " \ 1513 "for %s - cannot overwrite" % (tofile,) 1514 ) 1515 return False 1516 1517 return True
1518
1519 - def _move_image_to_system_unlocked(self, inst_repo, remove_package_id, 1520 items_installed, items_not_installed):
1521 """ 1522 Internal method that moves the package image directory to the live 1523 filesystem. 1524 """ 1525 metadata = self.metadata() 1526 repo = self._entropy.open_repository(self._repository_id) 1527 protect = self._get_config_protect(repo, self._package_id) 1528 mask = self._get_config_protect(repo, self._package_id, 1529 mask = True) 1530 protectskip = self._get_config_protect_skip() 1531 1532 # support for unit testing settings 1533 sys_root = self._get_system_root(metadata) 1534 misc_data = self._entropy.ClientSettings()['misc'] 1535 col_protect = misc_data['collisionprotect'] 1536 splitdebug, splitdebug_dirs = metadata['splitdebug'], \ 1537 metadata['splitdebug_dirs'] 1538 info_dirs = self._get_info_directories() 1539 1540 # setup image_dir properly 1541 image_dir = metadata['imagedir'][:] 1542 if not const_is_python3(): 1543 # image_dir comes from unpackdir, which comes from download 1544 # metadatum, which is utf-8 (conf_encoding) 1545 image_dir = const_convert_to_rawstring(image_dir, 1546 from_enctype = etpConst['conf_encoding']) 1547 movefile = entropy.tools.movefile 1548 1549 def workout_subdir(currentdir, subdir): 1550 1551 imagepath_dir = os.path.join(currentdir, subdir) 1552 rel_imagepath_dir = imagepath_dir[len(image_dir):] 1553 rootdir = sys_root + rel_imagepath_dir 1554 1555 # splitdebug (.debug files) support 1556 # If splitdebug is not enabled, do not create splitdebug directories 1557 # and move on instead (return) 1558 if not splitdebug: 1559 for split_dir in splitdebug_dirs: 1560 if rootdir.startswith(split_dir): 1561 # also drop item from content metadata. In this way 1562 # SPM has in sync information on what the package 1563 # content really is. 1564 # --- 1565 # we should really use unicode 1566 # strings for items_not_installed 1567 unicode_rootdir = const_convert_to_unicode(rootdir) 1568 items_not_installed.add(unicode_rootdir) 1569 return 0 1570 1571 # handle broken symlinks 1572 if os.path.islink(rootdir) and not os.path.exists(rootdir): 1573 # broken symlink 1574 os.remove(rootdir) 1575 1576 # if our directory is a file on the live system 1577 elif os.path.isfile(rootdir): # really weird...! 1578 1579 self._entropy.logger.log( 1580 "[Package]", 1581 etpConst['logging']['normal_loglevel_id'], 1582 "WARNING!!! %s is a file when it should be " \ 1583 "a directory" % (rootdir,) 1584 ) 1585 mytxt = darkred(_("QA: %s is a file when it should " 1586 "be a directory") % (rootdir,)) 1587 1588 self._entropy.output( 1589 mytxt, 1590 importance = 1, 1591 level = "warning", 1592 header = red(" !!! ") 1593 ) 1594 rootdir_dir = os.path.dirname(rootdir) 1595 rootdir_name = os.path.basename(rootdir) 1596 tmp_fd, tmp_path = None, None 1597 try: 1598 tmp_fd, tmp_path = const_mkstemp( 1599 dir = rootdir_dir, prefix=rootdir_name) 1600 os.rename(rootdir, tmp_path) 1601 finally: 1602 if tmp_fd is not None: 1603 try: 1604 os.close(tmp_fd) 1605 except OSError: 1606 pass 1607 1608 self._entropy.output( 1609 "%s: %s -> %s" % ( 1610 darkred(_("File moved")), 1611 blue(rootdir), 1612 darkred(tmp_path), 1613 ), 1614 importance = 1, 1615 level = "warning", 1616 header = brown(" @@ ") 1617 ) 1618 1619 # if our directory is a symlink instead, then copy the symlink 1620 if os.path.islink(imagepath_dir): 1621 1622 # if our live system features a directory instead of 1623 # a symlink, we should consider removing the directory 1624 if not os.path.islink(rootdir) and os.path.isdir(rootdir): 1625 self._entropy.logger.log( 1626 "[Package]", 1627 etpConst['logging']['normal_loglevel_id'], 1628 "WARNING!!! %s is a directory when it should be " \ 1629 "a symlink !!" % (rootdir,) 1630 ) 1631 txt = "%s: %s" % ( 1632 _("QA: symlink expected, directory found"), 1633 rootdir, 1634 ) 1635 self._entropy.output( 1636 darkred(txt), 1637 importance = 1, 1638 level = "warning", 1639 header = red(" !!! ") 1640 ) 1641 1642 return 0 1643 1644 tolink = os.readlink(imagepath_dir) 1645 live_tolink = None 1646 if os.path.islink(rootdir): 1647 live_tolink = os.readlink(rootdir) 1648 1649 if tolink != live_tolink: 1650 _symfail = False 1651 if os.path.lexists(rootdir): 1652 # at this point, it must be a file 1653 try: 1654 os.remove(rootdir) 1655 except OSError as err: 1656 _symfail = True 1657 # must be atomic, too bad if it fails 1658 self._entropy.logger.log( 1659 "[Package]", 1660 etpConst['logging']['normal_loglevel_id'], 1661 "WARNING!!! Failed to remove %s " \ 1662 "file ! [workout_file/0]: %s" % ( 1663 rootdir, err, 1664 ) 1665 ) 1666 msg = _("Cannot remove symlink") 1667 mytxt = "%s: %s => %s" % ( 1668 purple(msg), 1669 blue(rootdir), 1670 repr(err), 1671 ) 1672 self._entropy.output( 1673 mytxt, 1674 importance = 1, 1675 level = "warning", 1676 header = brown(" ## ") 1677 ) 1678 if not _symfail: 1679 os.symlink(tolink, rootdir) 1680 1681 elif not os.path.isdir(rootdir): 1682 # directory not found, we need to create it 1683 try: 1684 # really force a simple mkdir first of all 1685 os.mkdir(rootdir) 1686 except (OSError, IOError) as err: 1687 # the only two allowed errors are these 1688 if err.errno not in (errno.EEXIST, errno.ENOENT): 1689 raise 1690 1691 # if the error is about ENOENT, try creating 1692 # the whole directory tree and check against races 1693 # (EEXIST). 1694 if err.errno == errno.ENOENT: 1695 try: 1696 os.makedirs(rootdir) 1697 except (OSError, IOError) as err2: 1698 if err2.errno != errno.EEXIST: 1699 raise 1700 1701 if not os.path.islink(rootdir): 1702 1703 # symlink doesn't need permissions, also 1704 # until os.walk ends they might be broken 1705 user = os.stat(imagepath_dir)[stat.ST_UID] 1706 group = os.stat(imagepath_dir)[stat.ST_GID] 1707 try: 1708 os.chown(rootdir, user, group) 1709 shutil.copystat(imagepath_dir, rootdir) 1710 except (OSError, IOError) as err: 1711 self._entropy.logger.log( 1712 "[Package]", 1713 etpConst['logging']['normal_loglevel_id'], 1714 "Error during workdir setup " \ 1715 "%s, %s, errno: %s" % ( 1716 rootdir, 1717 err, 1718 err.errno, 1719 ) 1720 ) 1721 # skip some errors because we may have 1722 # unwritable directories 1723 if err.errno not in ( 1724 errno.EPERM, errno.ENOENT, 1725 errno.ENOTDIR): 1726 mytxt = "%s: %s, %s, %s" % ( 1727 brown("Error during workdir setup"), 1728 purple(rootdir), err, 1729 err.errno 1730 ) 1731 self._entropy.output( 1732 mytxt, 1733 importance = 1, 1734 level = "error", 1735 header = darkred(" !!! ") 1736 ) 1737 return 4 1738 1739 item_dir, item_base = os.path.split(rootdir) 1740 item_dir = os.path.realpath(item_dir) 1741 item_inst = os.path.join(item_dir, item_base) 1742 item_inst = const_convert_to_unicode(item_inst) 1743 items_installed.add(item_inst) 1744 1745 return 0
1746 1747 1748 def workout_file(currentdir, item): 1749 1750 fromfile = os.path.join(currentdir, item) 1751 rel_fromfile = fromfile[len(image_dir):] 1752 rel_fromfile_dir = os.path.dirname(rel_fromfile) 1753 tofile = sys_root + rel_fromfile 1754 1755 rel_fromfile_dir_utf = const_convert_to_unicode( 1756 rel_fromfile_dir) 1757 metadata['affected_directories'].add( 1758 rel_fromfile_dir_utf) 1759 1760 # account for info files, if any 1761 if rel_fromfile_dir_utf in info_dirs: 1762 rel_fromfile_utf = const_convert_to_unicode( 1763 rel_fromfile) 1764 for _ext in self._INFO_EXTS: 1765 if rel_fromfile_utf.endswith(_ext): 1766 metadata['affected_infofiles'].add( 1767 rel_fromfile_utf) 1768 break 1769 1770 # splitdebug (.debug files) support 1771 # If splitdebug is not enabled, do not create 1772 # splitdebug directories and move on instead (return) 1773 if not splitdebug: 1774 for split_dir in splitdebug_dirs: 1775 if tofile.startswith(split_dir): 1776 # also drop item from content metadata. In this way 1777 # SPM has in sync information on what the package 1778 # content really is. 1779 # --- 1780 # we should really use unicode 1781 # strings for items_not_installed 1782 unicode_tofile = const_convert_to_unicode(tofile) 1783 items_not_installed.add(unicode_tofile) 1784 return 0 1785 1786 if col_protect > 1: 1787 todbfile = fromfile[len(image_dir):] 1788 myrc = self._handle_install_collision_protect_unlocked( 1789 inst_repo, remove_package_id, tofile, todbfile) 1790 if not myrc: 1791 return 0 1792 1793 prot_old_tofile = tofile[len(sys_root):] 1794 # configprotect_data is passed to insertAutomergefiles() 1795 # which always expects unicode data. 1796 # revert back to unicode (we previously called encode on 1797 # image_dir (which is passed to os.walk, which generates 1798 # raw strings) 1799 prot_old_tofile = const_convert_to_unicode(prot_old_tofile) 1800 1801 pre_tofile = tofile[:] 1802 (in_mask, protected, 1803 tofile, do_return) = self._handle_config_protect( 1804 protect, mask, protectskip, fromfile, tofile) 1805 1806 # collect new config automerge data 1807 if in_mask and os.path.exists(fromfile): 1808 try: 1809 prot_md5 = const_convert_to_unicode( 1810 entropy.tools.md5sum(fromfile)) 1811 metadata['configprotect_data'].append( 1812 (prot_old_tofile, prot_md5,)) 1813 except (IOError,) as err: 1814 self._entropy.logger.log( 1815 "[Package]", 1816 etpConst['logging']['normal_loglevel_id'], 1817 "WARNING!!! Failed to get md5 of %s " \ 1818 "file ! [workout_file/1]: %s" % ( 1819 fromfile, err, 1820 ) 1821 ) 1822 1823 # check if it's really necessary to protect file 1824 if protected: 1825 1826 # second task 1827 # prot_old_tofile is always unicode, it must be, see above 1828 oldprot_md5 = metadata['already_protected_config_files'].get( 1829 prot_old_tofile) 1830 1831 if oldprot_md5: 1832 1833 try: 1834 in_system_md5 = entropy.tools.md5sum(pre_tofile) 1835 except (OSError, IOError) as err: 1836 if err.errno != errno.ENOENT: 1837 raise 1838 in_system_md5 = "?" 1839 1840 if oldprot_md5 == in_system_md5: 1841 # we can merge it, files, even if 1842 # contains changes have not been modified 1843 # by the user 1844 msg = _("Automerging config file, never modified") 1845 mytxt = "%s: %s" % ( 1846 darkgreen(msg), 1847 blue(pre_tofile), 1848 ) 1849 self._entropy.output( 1850 mytxt, 1851 importance = 1, 1852 level = "info", 1853 header = red(" ## ") 1854 ) 1855 protected = False 1856 do_return = False 1857 tofile = pre_tofile 1858 1859 if do_return: 1860 return 0 1861 1862 try: 1863 from_r_path = os.path.realpath(fromfile) 1864 except RuntimeError: 1865 # circular symlink, fuck! 1866 # really weird...! 1867 self._entropy.logger.log( 1868 "[Package]", 1869 etpConst['logging']['normal_loglevel_id'], 1870 "WARNING!!! %s is a circular symlink !!!" % (fromfile,) 1871 ) 1872 txt = "%s: %s" % ( 1873 _("QA: circular symlink issue"), 1874 const_convert_to_unicode(fromfile), 1875 ) 1876 self._entropy.output( 1877 darkred(txt), 1878 importance = 1, 1879 level = "warning", 1880 header = red(" !!! ") 1881 ) 1882 from_r_path = fromfile 1883 1884 try: 1885 to_r_path = os.path.realpath(tofile) 1886 except RuntimeError: 1887 # circular symlink, fuck! 1888 # really weird...! 1889 self._entropy.logger.log( 1890 "[Package]", 1891 etpConst['logging']['normal_loglevel_id'], 1892 "WARNING!!! %s is a circular symlink !!!" % (tofile,) 1893 ) 1894 mytxt = "%s: %s" % ( 1895 _("QA: circular symlink issue"), 1896 const_convert_to_unicode(tofile), 1897 ) 1898 self._entropy.output( 1899 darkred(mytxt), 1900 importance = 1, 1901 level = "warning", 1902 header = red(" !!! ") 1903 ) 1904 to_r_path = tofile 1905 1906 if from_r_path == to_r_path and os.path.islink(tofile): 1907 # there is a serious issue here, better removing tofile, 1908 # happened to someone. 1909 1910 try: 1911 # try to cope... 1912 os.remove(tofile) 1913 except (OSError, IOError,) as err: 1914 self._entropy.logger.log( 1915 "[Package]", 1916 etpConst['logging']['normal_loglevel_id'], 1917 "WARNING!!! Failed to cope to oddity of %s " \ 1918 "file ! [workout_file/2]: %s" % ( 1919 tofile, err, 1920 ) 1921 ) 1922 1923 # if our file is a dir on the live system 1924 if os.path.isdir(tofile) and not os.path.islink(tofile): 1925 1926 # really weird...! 1927 self._entropy.logger.log( 1928 "[Package]", 1929 etpConst['logging']['normal_loglevel_id'], 1930 "WARNING!!! %s is a directory when it should " \ 1931 "be a file !!" % (tofile,) 1932 ) 1933 1934 txt = "%s: %s" % ( 1935 _("Fatal: file expected, directory found"), 1936 const_convert_to_unicode(tofile), 1937 ) 1938 self._entropy.output( 1939 darkred(txt), 1940 importance = 1, 1941 level = "error", 1942 header = red(" !!! ") 1943 ) 1944 return 1 1945 1946 # moving file using the raw format 1947 try: 1948 done = movefile(fromfile, tofile, src_basedir = image_dir) 1949 except (IOError,) as err: 1950 # try to move forward, sometimes packages might be 1951 # fucked up and contain broken things 1952 if err.errno not in (errno.ENOENT, errno.EACCES,): 1953 raise 1954 1955 self._entropy.logger.log( 1956 "[Package]", 1957 etpConst['logging']['normal_loglevel_id'], 1958 "WARNING!!! Error during file move" \ 1959 " to system: %s => %s | IGNORED: %s" % ( 1960 const_convert_to_unicode(fromfile), 1961 const_convert_to_unicode(tofile), 1962 err, 1963 ) 1964 ) 1965 done = True 1966 1967 if not done: 1968 self._entropy.logger.log( 1969 "[Package]", 1970 etpConst['logging']['normal_loglevel_id'], 1971 "WARNING!!! Error during file move" \ 1972 " to system: %s => %s" % (fromfile, tofile,) 1973 ) 1974 mytxt = "%s: %s => %s, %s" % ( 1975 _("QA: file move error"), 1976 const_convert_to_unicode(fromfile), 1977 const_convert_to_unicode(tofile), 1978 _("please report"), 1979 ) 1980 self._entropy.output( 1981 darkred(mytxt), 1982 importance = 1, 1983 level = "warning", 1984 header = red(" !!! ") 1985 ) 1986 return 4 1987 1988 item_dir = os.path.realpath(os.path.dirname(tofile)) 1989 item_inst = os.path.join(item_dir, os.path.basename(tofile)) 1990 item_inst = const_convert_to_unicode(item_inst) 1991 items_installed.add(item_inst) 1992 1993 if protected and \ 1994 os.getenv("ENTROPY_CLIENT_ENABLE_OLD_FILEUPDATES"): 1995 # add to disk cache 1996 file_updates = self._entropy.PackageFileUpdates() 1997 file_updates.add(tofile, quiet = True) 1998 1999 return 0 2000 2001 # merge data into system 2002 for currentdir, subdirs, files in os.walk(image_dir): 2003 2004 # create subdirs 2005 for subdir in subdirs: 2006 exit_st = workout_subdir(currentdir, subdir) 2007 if exit_st != 0: 2008 return exit_st 2009 2010 for item in files: 2011 move_st = workout_file(currentdir, item) 2012 if move_st != 0: 2013 return move_st 2014 2015 return 0 2016