Package entropy :: Package core :: Package settings :: Module base

Source Code for Module entropy.core.settings.base

   1  # -*- coding: utf-8 -*- 
   2  """ 
   3   
   4      @author: Fabio Erculiani <[email protected]> 
   5      @contact: [email protected] 
   6      @copyright: Fabio Erculiani 
   7      @license: GPL-2 
   8   
   9      B{Entropy Framework SystemSettings module}. 
  10   
  11      SystemSettings is a singleton, pluggable interface which contains 
  12      all the runtime settings (mostly parsed from configuration files 
  13      and inherited from entropy.const -- which contains almost all the 
  14      default values). 
  15      SystemSettings works as a I{dict} object. Due to limitations of 
  16      multiple inherittance when using the Singleton class, SystemSettings 
  17      ONLY mimics a I{dict} AND it's not a subclass of it. 
  18   
  19  """ 
  20  import codecs 
  21  import errno 
  22  import functools 
  23  import hashlib 
  24  import os 
  25  import sys 
  26  import threading 
  27  import warnings 
  28   
  29  from entropy.const import etpConst, etpSys, \ 
  30      const_secure_config_file, const_set_nice_level, \ 
  31      const_convert_to_unicode, const_convert_to_rawstring, \ 
  32      const_debug_write, const_is_python3, const_file_readable 
  33  from entropy.core import Singleton, EntropyPluginStore, BaseConfigParser 
  34  from entropy.cache import EntropyCacher 
  35  from entropy.core.settings.plugins.skel import SystemSettingsPlugin 
  36  from entropy.output import nocolor 
  37  from entropy.i18n import _ 
  38   
  39  import entropy.tools 
40 41 42 -class RepositoryConfigParser(BaseConfigParser):
43 """ 44 Entropy .ini-like repository configuration file parser. 45 46 This is backward compatible with the previous repository 47 implementation, in the sense that old repository syntax 48 is just ignored. However, a mix of old and new statements 49 may result in an undefined behaviour. 50 51 This is an example of the new syntax (with a complete listing 52 of the supported arguments): 53 54 [sabayon-limbo] 55 desc = Sabayon Linux Official Testing Repository 56 repo = http://pkg.sabayon.org 57 repo = http://pkg.repo.sabayon.org 58 pkg = http://pkg.sabayon.org 59 pkg = http://dl.sabayon.org/entropy 60 enabled = <true/false> 61 62 [sabayon-limbo] 63 desc = This statement will be ignored. 64 repo = This url will be ignored. 65 pkg = http://some.more.mirror.org/entropy 66 pkg = http://some.more.mirror.net/entropy 67 68 As you can see, multiple statements for the same repository 69 are allowed. However, only the first desc = statement will be 70 considered, while there can be as many pkg = and repo = as you 71 want. 72 73 Statements description: 74 - "desc": stands for description, the repository name description. 75 - "repo": the repository database URL string, plus other parameters 76 as supported in the previous configuration file syntax: 77 <db url prefix>[#<compression>]. 78 - "pkg": the repository packages URL string. This must be a valid URL. 79 The supported protocols are those supported by entropy.fetchers. 80 - "enabled": if set, its value can be either "true" or "false". The default 81 value is "true". It indicates if a repository is configured 82 but currently disabled or enabled. Please take into account 83 that config files in /etc/entropy/repositories.conf.d/ starting 84 with "_" are considered to contain disabled repositories. This 85 is just provided for convienence. 86 - "username": if set, it used for HTTP Basic Authentication on retrieve 87 data from remote repository. 88 - "password": if set, it used for HTTP Basic Authentication on retrieve 89 data from remote repository. 90 - "https_validate_cert": if set to "false" disable ssl certificate 91 validation of the remote repository. 92 """ 93 94 _SUPPORTED_KEYS = ("desc", "repo", "pkg", "enabled", \ 95 "username", "password", "https_validate_cert") 96 97 _DEFAULT_ENABLED_VALUE = True 98 99 # Repository configuration file suggested prefix. If config files 100 # are prefixed with this string, they can be automatically handled 101 # by Entropy. 102 FILENAME_PREFIX = "entropy_" 103
104 - def __init__(self, encoding = None):
106 107 @classmethod
108 - def _validate_section(cls, match):
109 """ 110 Reimpemented from BaseConfigParser. 111 """ 112 # a new repository begins 113 groups = match.groups() 114 if not groups: 115 return 116 117 candidate = groups[0] 118 # Note, candidate must not start with server= 119 # as this is used for Entropy Server repositories. 120 if not entropy.tools.validate_repository_id(candidate): 121 return 122 return candidate
123
124 - def add(self, repository_id, desc, repos, pkgs, enabled = True, 125 username = None, password = None, https_validate_cert = True):
126 """ 127 Add a repository to the repository configuration files directory. 128 Older repository configuration may get overwritten. This method 129 only writes repository configuration in the new .ini format and to 130 /etc/entropy/repositories.conf.d/<filename prefix><repository id>. 131 132 @param repository_id: repository identifier 133 @type repository_id: string 134 @param desc: repository description 135 @type desc: string 136 @param repos: list of "repo=" uri dicts (containing "uri" and 137 "dbcformat" keys) 138 @type repos: list 139 @param pkgs: list of packages mirrors uris 140 @type pkgs: list 141 @keyword enabled: True, if the repository is enabled 142 @type enabled: bool 143 @return: True, if success 144 @rtype: bool 145 """ 146 settings = SystemSettings() 147 repo_d_conf = settings.get_setting_dirs_data()['repositories_conf_d'] 148 conf_d_dir, _conf_files_mtime, _skipped_files, _auto_upd = repo_d_conf 149 # as per specifications, enabled config files handled by 150 # Entropy Client (see repositories.conf.d/README) start with 151 # entropy_ prefix. 152 base_name = self.FILENAME_PREFIX + repository_id 153 enabled_conf_file = os.path.join(conf_d_dir, base_name) 154 # while disabled config files start with _ 155 disabled_conf_file = os.path.join(conf_d_dir, "_" + base_name) 156 157 self.write(enabled_conf_file, repository_id, desc, repos, pkgs, 158 enabled = enabled, username = username, password = password, 159 https_validate_cert = https_validate_cert) 160 161 # if any disabled entry file is around, kill it with fire! 162 try: 163 os.remove(disabled_conf_file) 164 except OSError as err: 165 if err.errno != errno.ENOENT: 166 raise 167 168 return True
169
170 - def remove(self, repository_id):
171 """ 172 Remove a repository from the repositories configuration files directory. 173 174 This method only removes repository configuration at 175 /etc/entropy/repositories.conf.d/<filename prefix><repository id>. 176 177 @param repository_id: repository identifier 178 @type repository_id: string 179 @return: True, if success 180 @rtype: bool 181 """ 182 settings = SystemSettings() 183 repo_d_conf = settings.get_setting_dirs_data()['repositories_conf_d'] 184 conf_d_dir, _conf_files_mtime, _skipped_files, _auto_upd = repo_d_conf 185 # as per specifications, enabled config files handled by 186 # Entropy Client (see repositories.conf.d/README) start with 187 # entropy_ prefix. 188 base_name = self.FILENAME_PREFIX + repository_id 189 enabled_conf_file = os.path.join(conf_d_dir, base_name) 190 # while disabled config files start with _ 191 disabled_conf_file = os.path.join(conf_d_dir, "_" + base_name) 192 193 accomplished = False 194 try: 195 os.remove(enabled_conf_file) 196 accomplished = True 197 except OSError as err: 198 if err.errno != errno.ENOENT: 199 raise 200 201 # since we want to remove, also drop disabled 202 # config files 203 try: 204 os.remove(disabled_conf_file) 205 accomplished = True 206 except OSError as err: 207 if err.errno != errno.ENOENT: 208 raise 209 210 return accomplished
211
212 - def enable(self, repository_id):
213 """ 214 Enable a repository. 215 216 This method only handles repository configuration at 217 /etc/entropy/repositories.conf.d/<filename prefix><repository id>. 218 219 @param repository_id: repository identifier 220 @type repository_id: string 221 @return: True, if success 222 @rtype: bool 223 """ 224 settings = SystemSettings() 225 repo_d_conf = settings.get_setting_dirs_data()['repositories_conf_d'] 226 conf_d_dir, _conf_files_mtime, _skipped_files, _auto_upd = repo_d_conf 227 # as per specifications, enabled config files handled by 228 # Entropy Client (see repositories.conf.d/README) start with 229 # entropy_ prefix. 230 base_name = self.FILENAME_PREFIX + repository_id 231 enabled_conf_file = os.path.join(conf_d_dir, base_name) 232 # while disabled config files start with _ 233 disabled_conf_file = os.path.join(conf_d_dir, "_" + base_name) 234 235 # enabling or disabling the repo is just a rename() 236 # away for the new style files in repositories.conf.d/ 237 accomplished = False 238 239 try: 240 os.rename(disabled_conf_file, enabled_conf_file) 241 os.utime(enabled_conf_file, None) 242 accomplished = True 243 except OSError as err: 244 if err.errno != errno.ENOENT: 245 # do not handle EPERM ? 246 raise 247 248 return accomplished
249
250 - def disable(self, repository_id):
251 """ 252 Disable a repository. 253 254 This method only handles repository configuration at 255 /etc/entropy/repositories.conf.d/<filename prefix><repository id>. 256 257 @param repository_id: repository identifier 258 @type repository_id: string 259 @return: True, if success 260 @rtype: bool 261 """ 262 settings = SystemSettings() 263 repo_d_conf = settings.get_setting_dirs_data()['repositories_conf_d'] 264 conf_d_dir, _conf_files_mtime, _skipped_files, _auto_upd = repo_d_conf 265 # as per specifications, enabled config files handled by 266 # Entropy Client (see repositories.conf.d/README) start with 267 # entropy_ prefix. 268 base_name = self.FILENAME_PREFIX + repository_id 269 enabled_conf_file = os.path.join(conf_d_dir, base_name) 270 # while disabled config files start with _ 271 disabled_conf_file = os.path.join(conf_d_dir, "_" + base_name) 272 273 # enabling or disabling the repo is just a rename() 274 # away for the new style files in repositories.conf.d/ 275 accomplished = False 276 277 try: 278 os.rename(enabled_conf_file, disabled_conf_file) 279 os.utime(disabled_conf_file, None) 280 accomplished = True 281 except OSError as err: 282 if err.errno != errno.ENOENT: 283 # do not handle EPERM ? 284 raise 285 286 return accomplished
287
288 - def write(self, path, repository_id, desc, repos, pkgs, enabled = True, 289 username = None, password = None, https_validate_cert = True):
290 """ 291 Write the repository configuration to the given file. 292 293 @param path: configuration file to write 294 @type path: string 295 @param repository_id: repository identifier 296 @type repository_id: string 297 @param desc: repository description 298 @type desc: string 299 @param repos: list of "repo=" uri dicts (containing "uri" and 300 "dbcformat" keys) 301 @type repos: list 302 @param pkgs: list of packages mirrors uris 303 @type pkgs: list 304 @keyword enabled: True, if the repository is enabled 305 @type enabled: bool 306 """ 307 if enabled: 308 enabled_str = "true" 309 else: 310 enabled_str = "false" 311 312 repos_str = "" 313 for repo_meta in repos: 314 repos_str += "repo = %(uri)s#%(dbcformat)s\n" % repo_meta 315 316 config = """\ 317 # Repository configuration file automatically generated 318 # by Entropy on your behalf. 319 320 [%(repository_id)s] 321 desc = %(desc)s 322 %(repos)s 323 enabled = %(enabled)s 324 %(username)s 325 %(password)s 326 %(https_validate_cert)s 327 """ % { 328 "repository_id": repository_id, 329 "desc": desc, 330 "repos": repos_str.rstrip(), 331 "enabled": enabled_str, 332 "username": ("", "username = %s" % username)[username or 0], 333 "password": ("", "password = %s" % password)[password or 0], 334 "https_validate_cert" : ("https_validate_cert = false", "")[https_validate_cert] 335 } 336 for pkg in pkgs: 337 config += "pkg = %s\n" % (pkg,) 338 config += "\n" 339 340 entropy.tools.atomic_write(path, config, self._encoding)
341
342 - def repositories(self):
343 """ 344 Return a list of valid parsed repositories. 345 346 A repository is considered valid iff it contains 347 at least one "repo" and "pkg" parameter. 348 """ 349 required_keys = set(("repo", "pkg")) 350 repositories = [] 351 352 for repository_id in self._ordered_sections: 353 repo_data = self[repository_id] 354 remaining = required_keys - set(repo_data.keys()) 355 if not remaining: 356 # then required_keys are there 357 repositories.append(repository_id) 358 359 return repositories
360
361 - def repo(self, repository_id):
362 """ 363 Return the list of database URLs for the given repository. 364 This includes the default one, which is the first element 365 listed. 366 367 @param repository_id: the repository identifier 368 @type repository_id: string 369 @raise KeyError: if repository_id is not found or 370 metadata is not available 371 @return: the list of repository URLs 372 @rtype: list 373 """ 374 return self[repository_id]["repo"]
375
376 - def pkgs(self, repository_id):
377 """ 378 Return the list of package URLs for the given repository. 379 380 @param repository_id: the repository identifier 381 @type repository_id: string 382 @raise KeyError: if repository_id is not found or 383 metadata is not available 384 @return: the package URLs 385 @rtype: list 386 """ 387 return self[repository_id]["pkg"]
388
389 - def desc(self, repository_id):
390 """ 391 Return the description of the repository. 392 393 @param repository_id: the repository identifier 394 @type repository_id: string 395 @raise KeyError: if repository_id is not found or 396 metadata is not available 397 @return: the repository description 398 @rtype: string 399 """ 400 return self[repository_id]["desc"][0]
401
402 - def enabled(self, repository_id):
403 """ 404 Return whether the repository is enabled or disabled. 405 406 @param repository_id: the repository identifier 407 @type repository_id: string 408 @return: the repository status 409 @rtype: bool 410 """ 411 try: 412 enabled = self[repository_id]["enabled"][0] 413 return enabled.strip().lower() == "true" 414 except KeyError: 415 return self._DEFAULT_ENABLED_VALUE
416
417 - def username(self, repository_id):
418 """ 419 Return the username to use with the repository. 420 421 @param repository_id: the repository identifier 422 @type repository_id: string 423 @raise KeyError: if repository_id is not found or 424 metadata is not available 425 @return: the repository username. 426 @rtype: string 427 """ 428 return self[repository_id]["username"][0]
429
430 - def password(self, repository_id):
431 """ 432 Return the password to use with the repository. 433 434 @param repository_id: the repository identifier 435 @type repository_id: string 436 @raise KeyError: if repository_id is not found or 437 metadata is not available 438 @return: the repository password. 439 @rtype: string 440 """ 441 return self[repository_id]["password"][0]
442
443 - def https_validate_cert(self, repository_id):
444 """ 445 Return whether SSL cert validation of remote repository 446 is enabled. It is used only for HTTPS. 447 448 @param repository_id: the repository identifier 449 @type repository_id: string 450 @return: status of ssl certificate validation. 451 @rtype: bool 452 """ 453 try: 454 https_validate_cert = self[repository_id]["https_validate_cert"][0] 455 return https_validate_cert.strip().lower() == "true" 456 except KeyError: 457 return True # Default is enabled
458
459 -class SystemSettings(Singleton, EntropyPluginStore):
460 461 """ 462 This is the place where all the Entropy settings are stored if 463 they are not considered instance constants (etpConst). 464 For example, here we store package masking cache information and 465 settings, client-side, server-side and services settings. 466 Also, this class mimics a dictionary (even if not inheriting it 467 due to development choices). 468 469 Sample code: 470 471 >>> from entropy.core.settings.base import SystemSettings 472 >>> system_settings = SystemSettings() 473 >>> system_settings.clear() 474 >>> system_settings.destroy() 475 476 """ 477
478 - class CachingList(list):
479 """ 480 This object overrides a list, making possible to store 481 cache information in the same place of the data to be 482 cached. 483 """
484 - def __init__(self, *args, **kwargs):
485 list.__init__(self, *args, **kwargs) 486 self.__cache = None 487 self.__lock = threading.RLock()
488
489 - def __enter__(self):
490 """ 491 Make possible to acquire the whole cache content in 492 a thread-safe way. 493 """ 494 self.__lock.acquire()
495
496 - def __exit__(self, exc_type, exc_value, traceback):
497 """ 498 Make possible to add plugins without triggering parse() every time. 499 Reload SystemSettings on exit 500 """ 501 self.__lock.release()
502
503 - def get(self):
504 """ 505 Get cache object 506 """ 507 return self.__cache
508
509 - def set(self, cache_obj):
510 """ 511 Set cache object 512 """ 513 self.__cache = cache_obj
514
515 - def init_singleton(self):
516 517 """ 518 Replaces __init__ because SystemSettings is a Singleton. 519 see Singleton API reference for more information. 520 521 """ 522 EntropyPluginStore.__init__(self) 523 524 from entropy.core.settings.plugins.factory import get_available_plugins 525 self.__get_external_plugins = get_available_plugins 526 527 from threading import RLock 528 self.__lock = RLock() 529 self.__cacher = EntropyCacher() 530 self.__data = {} 531 self.__parsables = {} 532 self.__is_destroyed = False 533 self.__inside_with_stmt = 0 534 self.__pkg_comment_tag = "##" 535 536 self.__external_plugins = {} 537 self.__setting_files_order = [] 538 self.__setting_files_pre_run = [] 539 self.__setting_files = {} 540 self.__setting_dirs = {} 541 self.__persistent_settings = { 542 'pkg_masking_reasons': etpConst['pkg_masking_reasons'].copy(), 543 'pkg_masking_reference': etpConst['pkg_masking_reference'].copy(), 544 'backed_up': {}, 545 # package masking, live 546 'live_packagemasking': { 547 'unmask_matches': set(), 548 'mask_matches': set(), 549 }, 550 } 551 552 self.__setup_const() 553 self.__scan()
554
555 - def __enter__(self):
556 """ 557 Make possible to add plugins without triggering parse() every time. 558 """ 559 self.__inside_with_stmt += 1
560
561 - def __exit__(self, exc_type, exc_value, traceback):
562 """ 563 Make possible to add plugins without triggering parse() every time. 564 Reload SystemSettings on exit 565 """ 566 self.__inside_with_stmt -= 1 567 if self.__inside_with_stmt == 0: 568 self.clear()
569
570 - def destroy(self):
571 """ 572 Overloaded method from Singleton. 573 "Destroys" the instance. 574 575 @return: None 576 @rtype: None 577 """ 578 self.__is_destroyed = True
579
580 - def add_plugin(self, system_settings_plugin_instance):
581 """ 582 This method lets you add custom parsers to SystemSettings. 583 Mind that you are responsible of handling your plugin instance 584 and remove it before it is destroyed. You can remove the plugin 585 instance at any time by issuing remove_plugin. 586 Every add_plugin or remove_plugin method will also issue clear() 587 for you. This could be bad and it might be removed in future. 588 589 @param system_settings_plugin_instance: valid SystemSettingsPlugin 590 instance 591 @type system_settings_plugin_instance: SystemSettingsPlugin instance 592 @return: None 593 @rtype: None 594 """ 595 inst = system_settings_plugin_instance 596 if not isinstance(inst, SystemSettingsPlugin): 597 raise AttributeError("SystemSettings: expected valid " + \ 598 "SystemSettingsPlugin instance") 599 EntropyPluginStore.add_plugin(self, inst.get_id(), inst) 600 if self.__inside_with_stmt == 0: 601 self.clear()
602
603 - def remove_plugin(self, plugin_id):
604 """ 605 This method lets you remove previously added custom parsers from 606 SystemSettings through its plugin identifier. If plugin_id is not 607 available, KeyError exception will be raised. 608 Every add_plugin or remove_plugin method will also issue clear() 609 for you. This could be bad and it might be removed in future. 610 611 @param plugin_id: plugin identifier 612 @type plugin_id: basestring 613 @return: None 614 @rtype: None 615 """ 616 EntropyPluginStore.remove_plugin(self, plugin_id) 617 self.clear()
618
619 - def get_updatable_configuration_files(self, repository_id):
620 """ 621 Poll SystemSettings plugins and get a list of updatable configuration 622 files. For "updatable" it is meant, configuration files that expose 623 package matches (not just keys) at the beginning of new lines. 624 This makes possible to implement automatic configuration files updates 625 upon package name renames. 626 627 @param repository_id: repository identifier, if needed to return 628 a list of specific configuration files 629 @type repository_id: string or None 630 @return: list (set) of package files paths (must check for path avail) 631 @rtype: set 632 """ 633 own_list = set([ 634 self.__setting_files['keywords'], 635 self.__setting_files['mask'], 636 self.__setting_files['unmask'], 637 self.__setting_files['system_mask'], 638 self.__setting_files['splitdebug'], 639 ]) 640 for setting_id, setting_data in self.__setting_dirs.items(): 641 conf_dir, dir_sett, skipped_sett, auto_update = setting_data 642 if not auto_update: 643 continue 644 for conf_file, mtime_conf_file in dir_sett: 645 own_list.add(conf_file) 646 647 # poll plugins 648 for plugin in self.get_plugins().values(): 649 files = plugin.get_updatable_configuration_files(repository_id) 650 if files: 651 own_list.update(files) 652 653 for plugin in self.__external_plugins.values(): 654 files = plugin.get_updatable_configuration_files(repository_id) 655 if files: 656 own_list.update(files) 657 658 return own_list
659 660 @staticmethod
662 """ 663 Return the actual {ROOT}etc/entropy/packages path. 664 665 @return: path 666 @type: string 667 """ 668 return os.path.join(etpConst['confdir'], "packages")
669 670 @staticmethod
672 """ 673 Return the actual {ROOT}etc/entropy/packages/sets path. 674 675 @return: path 676 @type: string 677 """ 678 return os.path.join( 679 SystemSettings.packages_config_directory(), 680 etpConst['confsetsdirname'])
681
682 - def __maybe_lazy_load(self, key):
683 """ 684 Lazy load a dict item if it's in the parsable dict. 685 """ 686 if key is None: 687 for item, func in self.__parsables.items(): 688 const_debug_write( 689 __name__, "%s was lazy loaded (slow path!!)" % (item,)) 690 self.__data[item] = func() 691 return 692 693 if key in self.__parsables: 694 if key not in self.__data: 695 const_debug_write(__name__, "%s was lazy loaded" % (key,)) 696 self.__data[key] = self.__parsables[key]()
697
698 - def __setup_const(self):
699 700 """ 701 Internal method. Does constants initialization. 702 703 @return: None 704 @rtype: None 705 """ 706 707 del self.__setting_files_order[:] 708 del self.__setting_files_pre_run[:] 709 self.__setting_files.clear() 710 self.__setting_dirs.clear() 711 712 packages_dir = SystemSettings.packages_config_directory() 713 self.__setting_files.update({ 714 # keywording configuration files 715 'keywords': os.path.join( 716 packages_dir, "package.keywords"), 717 # unmasking configuration files 718 'unmask': os.path.join( 719 packages_dir, "package.unmask"), 720 # masking configuration files 721 'mask': os.path.join(packages_dir, "package.mask"), 722 # selectively enable splitdebug for packages 723 'splitdebug': os.path.join( 724 packages_dir, "package.splitdebug"), 725 'splitdebug_mask': os.path.join( 726 packages_dir, "package.splitdebug.mask"), 727 # masking configuration files 728 'license_mask': os.path.join( 729 packages_dir, "license.mask"), 730 'license_accept': os.path.join( 731 packages_dir, "license.accept"), 732 'system_mask': os.path.join( 733 packages_dir, "system.mask"), 734 'system_dirs': os.path.join( 735 etpConst['confdir'], "fsdirs.conf"), 736 'system_dirs_mask': os.path.join( 737 etpConst['confdir'], "fsdirsmask.conf"), 738 'extra_ldpaths': os.path.join( 739 etpConst['confdir'], "fsldpaths.conf"), 740 'system_rev_symlinks': os.path.join( 741 etpConst['confdir'], "fssymlinks.conf"), 742 'broken_syms': os.path.join(etpConst['confdir'], "brokensyms.conf"), 743 'broken_libs_mask': os.path.join( 744 etpConst['confdir'], "brokenlibsmask.conf"), 745 'broken_links_mask': os.path.join( 746 etpConst['confdir'], "brokenlinksmask.conf"), 747 'hw_hash': os.path.join(etpConst['confdir'], ".hw.hash"), 748 'system': os.path.join(etpConst['confdir'], "entropy.conf"), 749 'repositories': os.path.join( 750 etpConst['confdir'], "repositories.conf"), 751 'system_package_sets': {}, 752 }) 753 self.__setting_files_order.extend([ 754 'keywords', 'unmask', 'mask', 'license_mask', 755 'license_accept', 'system_mask', 'system_package_sets', 756 'system_dirs', 'system_dirs_mask', 'extra_ldpaths', 757 'splitdebug', 'splitdebug_mask', 'system', 758 'system_rev_symlinks', 'hw_hash', 'broken_syms', 759 'broken_libs_mask', 'broken_links_mask' 760 ]) 761 self.__setting_files_pre_run.extend(['repositories']) 762 763 dmp_dir = etpConst['dumpstoragedir'] 764 765 conf_d_descriptors = [ 766 ("mask_d", "package.mask.d", 767 packages_dir, True, True), 768 ("unmask_d", "package.unmask.d", 769 packages_dir, True, True), 770 ("license_mask_d", "license.mask.d", 771 packages_dir, False, True), 772 ("license_accept_d", "license.accept.d", 773 packages_dir, False, True), 774 ("system_mask_d", "system.mask.d", 775 packages_dir, True, True), 776 # this will be parsed from inside _repositories_parser 777 ("repositories_conf_d", "repositories.conf.d", 778 etpConst['confdir'], False, False), 779 ] 780 for setting_id, rel_dir, base_dir, auto_update, add_parser \ 781 in conf_d_descriptors: 782 conf_dir = base_dir + os.path.sep + rel_dir 783 self.__setting_dirs[setting_id] = [conf_dir, [], [], auto_update] 784 785 try: 786 dir_cont = list(os.listdir(conf_dir)) 787 except (OSError, IOError): 788 continue 789 790 conf_files = [] 791 for item in dir_cont: 792 if item == "README": 793 continue 794 if item.startswith(".keep"): 795 continue 796 if item.endswith(".example"): 797 continue 798 799 conf_file = os.path.join(conf_dir, item) 800 if not os.path.isfile(conf_file): 801 continue 802 803 if const_file_readable(conf_file): 804 conf_files.append(conf_file) 805 806 # ignore files starting with _ 807 skipped_conf_files = [x for x in conf_files if \ 808 os.path.basename(x).startswith("_")] 809 conf_files = [x for x in conf_files if not \ 810 os.path.basename(x).startswith("_")] 811 812 mtime_base_file = os.path.join(dmp_dir, rel_dir + "_") 813 skipped_conf_files = [ 814 (x, mtime_base_file + os.path.basename(x) + ".mtime") for \ 815 x in skipped_conf_files] 816 conf_files = [ 817 (x, mtime_base_file + os.path.basename(x) + ".mtime") for \ 818 x in conf_files] 819 820 self.__setting_dirs[setting_id][1] += conf_files 821 self.__setting_dirs[setting_id][2] += skipped_conf_files 822 if add_parser: 823 # this will make us call _<setting_id>_parser() 824 # and that must return None, becase the outcome 825 # has to be written into '<setting_id/_d>' metadata object 826 # thus, these have to always run AFTER their alter-egos 827 self.__setting_files_order.append(setting_id)
828
829 - def __scan(self):
830 831 """ 832 Internal method. Scan settings and fill variables. 833 834 @return: None 835 @rtype: None 836 """ 837 838 def enforce_persistent(): 839 # merge persistent settings back 840 self.__data.update(self.__persistent_settings) 841 # restore backed-up settings 842 self.__data.update(self.__persistent_settings['backed_up'].copy())
843 844 self.__parse() 845 enforce_persistent() 846 847 # plugins support 848 local_plugins = self.get_plugins() 849 for plugin_id in sorted(local_plugins): 850 851 self.__parsables[plugin_id] = functools.partial( 852 local_plugins[plugin_id].parse, self) 853 854 # external plugins support 855 external_plugins = self.__get_external_plugins() 856 for external_plugin_id in sorted(external_plugins): 857 external_plugin = external_plugins[external_plugin_id]() 858 859 self.__parsables[external_plugin_id] = functools.partial( 860 external_plugin.parse, self) 861 862 self.__external_plugins[external_plugin_id] = external_plugin 863 864 enforce_persistent() 865 866 # run post-SystemSettings setup, plugins hook 867 for plugin_id in sorted(local_plugins): 868 local_plugins[plugin_id].post_setup(self) 869 870 # run post-SystemSettings setup for external plugins too 871 for external_plugin_id in sorted(self.__external_plugins): 872 self.__external_plugins[external_plugin_id].post_setup(self)
873
874 - def __setitem__(self, mykey, myvalue):
875 """ 876 dict method. See Python dict API reference. 877 """ 878 # backup here too 879 if mykey in self.__persistent_settings: 880 self.__persistent_settings[mykey] = myvalue 881 self.__data[mykey] = myvalue
882
883 - def __getitem__(self, key):
884 """ 885 dict method. See Python dict API reference. 886 """ 887 with self.__lock: 888 self.__maybe_lazy_load(key) 889 return self.__data[key]
890
891 - def __delitem__(self, key):
892 """ 893 dict method. See Python dict API reference. 894 """ 895 with self.__lock: 896 try: 897 del self.__data[key] 898 except KeyError: 899 if key not in self.__parsables: 900 raise
901
902 - def __iter__(self):
903 """ 904 dict method. See Python dict API reference. 905 """ 906 self.__maybe_lazy_load(None) 907 return iter(self.__data)
908
909 - def __contains__(self, item):
910 """ 911 dict method. See Python dict API reference. 912 """ 913 return item in self.__data or item in self.__parsables
914
915 - def __hash__(self):
916 """ 917 dict method. See Python dict API reference. 918 """ 919 self.__maybe_lazy_load(None) 920 return hash(self.__data)
921
922 - def __len__(self):
923 """ 924 dict method. See Python dict API reference. 925 """ 926 self.__maybe_lazy_load(None) 927 return len(self.__data)
928
929 - def get(self, key, *args, **kwargs):
930 """ 931 dict method. See Python dict API reference. 932 """ 933 self.__maybe_lazy_load(key) 934 return self.__data.get(key, *args, **kwargs)
935
936 - def copy(self):
937 """ 938 dict method. See Python dict API reference. 939 """ 940 self.__maybe_lazy_load(None) 941 return self.__data.copy()
942
943 - def fromkeys(self, *args, **kwargs):
944 """ 945 dict method. See Python dict API reference. 946 """ 947 self.__maybe_lazy_load(None) 948 return self.__data.fromkeys(*args, **kwargs)
949
950 - def items(self):
951 """ 952 dict method. See Python dict API reference. 953 """ 954 self.__maybe_lazy_load(None) 955 return self.__data.items()
956
957 - def iteritems(self):
958 """ 959 dict method. See Python dict API reference. 960 """ 961 self.__maybe_lazy_load(None) 962 return self.__data.iteritems()
963
964 - def iterkeys(self):
965 """ 966 dict method. See Python dict API reference. 967 """ 968 self.__maybe_lazy_load(None) 969 return self.__data.iterkeys()
970
971 - def keys(self):
972 """ 973 dict method. See Python dict API reference. 974 """ 975 self.__maybe_lazy_load(None) 976 return self.__data.keys()
977
978 - def pop(self, key, *args, **kwargs):
979 """ 980 dict method. See Python dict API reference. 981 """ 982 self.__maybe_lazy_load(key) 983 return self.__data.pop(key, *args, **kwargs)
984
985 - def popitem(self):
986 """ 987 dict method. See Python dict API reference. 988 """ 989 self.__maybe_lazy_load(None) 990 return self.__data.popitem()
991
992 - def setdefault(self, key, *args, **kwargs):
993 """ 994 dict method. See Python dict API reference. 995 """ 996 self.__maybe_lazy_load(key) 997 return self.__data.setdefault(key, *args, **kwargs)
998
999 - def update(self, kwargs):
1000 """ 1001 dict method. See Python dict API reference. 1002 """ 1003 self.__maybe_lazy_load(None) 1004 return self.__data.update(kwargs)
1005
1006 - def values(self):
1007 """ 1008 dict method. See Python dict API reference. 1009 """ 1010 self.__maybe_lazy_load(None) 1011 return self.__data.values()
1012
1013 - def clear(self):
1014 """ 1015 dict method. See Python dict API reference. 1016 Settings are also re-initialized here. 1017 1018 @return None 1019 """ 1020 with self.__lock: 1021 self.__data.clear() 1022 self.__parsables.clear() 1023 self.__setup_const() 1024 self.__scan()
1025
1026 - def set_persistent_setting(self, persistent_dict):
1027 """ 1028 Make metadata persistent, the input dict will be merged 1029 with the base one at every reset call (clear()). 1030 1031 @param persistent_dict: dictionary to merge 1032 @type persistent_dict: dict 1033 1034 @return: None 1035 @rtype: None 1036 """ 1037 self.__persistent_settings.update(persistent_dict)
1038
1039 - def unset_persistent_setting(self, persistent_key):
1040 """ 1041 Remove dict key from persistent dictionary 1042 1043 @param persistent_key: key to remove 1044 @type persistent_dict: dict 1045 1046 @return: None 1047 @rtype: None 1048 """ 1049 del self.__persistent_settings[persistent_key] 1050 del self.__data[persistent_key]
1051
1052 - def __setup_package_sets_vars(self):
1053 1054 """ 1055 This function setups the *files* dictionary about package sets 1056 that will be read and parsed afterwards by the respective 1057 internal parser. 1058 1059 @return: None 1060 @rtype: None 1061 """ 1062 1063 # user defined package sets 1064 sets_dir = SystemSettings.packages_sets_directory() 1065 pkg_set_data = {} 1066 1067 try: 1068 dir_list = list(os.listdir(sets_dir)) 1069 except (OSError, IOError): 1070 dir_list = None 1071 1072 if dir_list is not None: 1073 1074 set_files = [] 1075 for item in dir_list: 1076 set_file = os.path.join(sets_dir, item) 1077 if const_file_readable(set_file): 1078 set_files.append(set_file) 1079 1080 for set_file in set_files: 1081 try: 1082 set_file = const_convert_to_unicode( 1083 set_file, etpConst['conf_encoding']) 1084 except UnicodeDecodeError: 1085 set_file = const_convert_to_unicode(set_file, 1086 sys.getfilesystemencoding()) 1087 1088 path = os.path.join(sets_dir, set_file) 1089 if not const_is_python3(): 1090 path = const_convert_to_rawstring( 1091 path, etpConst['conf_encoding']) 1092 pkg_set_data[set_file] = path 1093 1094 self.__setting_files['system_package_sets'].update(pkg_set_data)
1095
1096 - def __parse(self):
1097 """ 1098 This is the main internal parsing method. 1099 *files* and *mtimes* dictionaries are prepared and 1100 parsed just a few lines later. 1101 1102 @return: None 1103 @rtype: None 1104 """ 1105 # some parsers must be run BEFORE everything: 1106 for item in self.__setting_files_pre_run: 1107 myattr = '_%s_parser' % (item,) 1108 if not hasattr(self, myattr): 1109 continue 1110 func = getattr(self, myattr) 1111 self.__parsables[item] = func 1112 1113 # parse main settings 1114 self.__setup_package_sets_vars() 1115 1116 for item in self.__setting_files_order: 1117 myattr = '_%s_parser' % (item,) 1118 if not hasattr(self, myattr): 1119 continue 1120 func = getattr(self, myattr) 1121 self.__parsables[item] = func
1122
1123 - def get_setting_files_data(self):
1124 """ 1125 Return a copy of the internal *files* dictionary. 1126 This dict contains config file paths and their identifiers. 1127 1128 @return: dict __setting_files 1129 @rtype: dict 1130 """ 1131 return self.__setting_files.copy()
1132
1133 - def get_setting_dirs_data(self):
1134 """ 1135 Return a copy of the internal *dirs* dictionary. 1136 This dict contains *.d config dirs enclosing respective 1137 config files. 1138 1139 @return: dict __setting_dirs 1140 @rtype: dict 1141 """ 1142 return self.__setting_dirs.copy()
1143
1144 - def packages_configuration_hash(self):
1145 """ 1146 Return a SHA1 hash of the current packages configuration. 1147 This includes masking, unmasking, keywording, system masking 1148 settings. 1149 """ 1150 cache_key = "__packages_configuration_hash__" 1151 cached = self.get(cache_key) 1152 if cached is not None: 1153 return cached 1154 1155 sha = hashlib.sha1() 1156 1157 configs = ( 1158 ("mask", self['mask']), 1159 ("unmask", self['unmask']), 1160 ("keyword_mask", self['keywords']), 1161 ("license_mask", self['license_mask']), 1162 ("license_accept", self['license_accept']), 1163 ("system_mask", self['system_mask']), 1164 ("live_unmask", self['live_packagemasking']['unmask_matches']), 1165 ("live_mask", self['live_packagemasking']['mask_matches']), 1166 ) 1167 1168 sha.update(const_convert_to_rawstring("-begin-")) 1169 for name, config in configs: 1170 cache_s = "%s:{%s}|" % ( 1171 name, ",".join(sorted(config)), 1172 ) 1173 sha.update(const_convert_to_rawstring(cache_s)) 1174 1175 sha.update(const_convert_to_rawstring("-end-")) 1176 1177 outcome = sha.hexdigest() 1178 self[cache_key] = outcome 1179 return outcome
1180
1181 - def _keywords_parser(self):
1182 """ 1183 Parser returning package keyword masking metadata 1184 read from package.keywords file. 1185 This file contains package mask or unmask directives 1186 based on package keywords. 1187 1188 @return: parsed metadata 1189 @rtype: dict 1190 """ 1191 keywords_conf = self.__setting_files['keywords'] 1192 1193 # merge universal keywords 1194 data = { 1195 'universal': set(), 1196 'packages': {}, 1197 'repositories': {}, 1198 } 1199 1200 content = [x.split() for x in \ 1201 self.__generic_parser(keywords_conf, 1202 comment_tag = self.__pkg_comment_tag) \ 1203 if len(x.split()) < 4] 1204 for keywordinfo in content: 1205 # skip wrong lines 1206 if len(keywordinfo) > 3: 1207 continue 1208 # inversal keywording, check if it's not repo= 1209 if len(keywordinfo) == 1: 1210 if keywordinfo[0].startswith("repo="): 1211 continue 1212 # convert into entropy format 1213 if keywordinfo[0] == "**": 1214 keywordinfo[0] = "" 1215 data['universal'].add(keywordinfo[0]) 1216 continue 1217 # inversal keywording, check if it's not repo= 1218 if len(keywordinfo) in (2, 3,): 1219 # repo=? 1220 if keywordinfo[0].startswith("repo="): 1221 continue 1222 # add to repo? 1223 items = keywordinfo[1:] 1224 # convert into entropy format 1225 if keywordinfo[0] == "**": 1226 keywordinfo[0] = "" 1227 reponame = [x for x in items if x.startswith("repo=") \ 1228 and (len(x.split("=")) == 2)] 1229 if reponame: 1230 reponame = reponame[0].split("=")[1] 1231 if reponame not in data['repositories']: 1232 data['repositories'][reponame] = {} 1233 # repository unmask or package in repository unmask? 1234 if keywordinfo[0] not in data['repositories'][reponame]: 1235 data['repositories'][reponame][keywordinfo[0]] = set() 1236 if len(items) == 1: 1237 # repository unmask 1238 data['repositories'][reponame][keywordinfo[0]].add('*') 1239 elif "*" not in \ 1240 data['repositories'][reponame][keywordinfo[0]]: 1241 1242 item = [x for x in items if not x.startswith("repo=")] 1243 data['repositories'][reponame][keywordinfo[0]].add( 1244 item[0]) 1245 elif len(items) == 2: 1246 # it's going to be a faulty line!!?? 1247 # can't have two items and no repo= 1248 continue 1249 else: 1250 # add keyword to packages 1251 if keywordinfo[0] not in data['packages']: 1252 data['packages'][keywordinfo[0]] = set() 1253 data['packages'][keywordinfo[0]].add(items[0]) 1254 1255 # merge universal keywords 1256 etpConst['keywords'].clear() 1257 etpConst['keywords'].update(etpSys['keywords']) 1258 for keyword in data['universal']: 1259 etpConst['keywords'].add(keyword) 1260 1261 return data
1262 1263
1264 - def _unmask_parser(self):
1265 """ 1266 Parser returning package unmasking metadata read from 1267 package.unmask file. 1268 This file contains package unmask directives, allowing 1269 to enable experimental or *secret* packages. 1270 1271 @return: parsed metadata 1272 @rtype: dict 1273 """ 1274 return self.__generic_parser(self.__setting_files['unmask'], 1275 comment_tag = self.__pkg_comment_tag)
1276
1277 - def _mask_parser(self):
1278 """ 1279 Parser returning package masking metadata read from 1280 package.mask file. 1281 This file contains package mask directives, allowing 1282 to disable experimental or *secret* packages. 1283 1284 @return: parsed metadata 1285 @rtype: dict 1286 """ 1287 return self.__generic_parser(self.__setting_files['mask'], 1288 comment_tag = self.__pkg_comment_tag)
1289
1290 - def _mask_d_parser(self):
1291 """ 1292 Parser returning package masking metadata read from 1293 packages/package.mask.d/* files (alpha sorting). 1294 It writes directly to __data['mask'] in append. 1295 """ 1296 return self.__generic_d_parser("mask_d", "mask")
1297
1298 - def _unmask_d_parser(self):
1299 """ 1300 Parser returning package masking metadata read from 1301 packages/package.unmask.d/* files (alpha sorting). 1302 It writes directly to __data['unmask'] in append. 1303 """ 1304 return self.__generic_d_parser("unmask_d", "unmask")
1305
1306 - def _license_mask_d_parser(self):
1307 """ 1308 Parser returning package masking metadata read from 1309 packages/license.mask.d/* files (alpha sorting). 1310 It writes directly to __data['license_mask'] in append. 1311 """ 1312 return self.__generic_d_parser("license_mask_d", "license_mask")
1313
1314 - def _system_mask_d_parser(self):
1315 """ 1316 Parser returning package masking metadata read from 1317 packages/system.mask.d/* files (alpha sorting). 1318 It writes directly to __data['system_mask'] in append. 1319 """ 1320 return self.__generic_d_parser("system_mask_d", "system_mask")
1321
1322 - def _license_accept_d_parser(self):
1323 """ 1324 Parser returning package masking metadata read from 1325 packages/license.accept.d/* files (alpha sorting). 1326 It writes directly to __data['license_accept'] in append. 1327 """ 1328 return self.__generic_d_parser("license_accept_d", "license_accept")
1329
1330 - def __generic_d_parser(self, setting_dirs_id, setting_id, 1331 parse_skipped = False):
1332 """ 1333 Generic parser used by _*_d_parser() functions. 1334 """ 1335 _conf_dir, setting_files, skipped_files, auto_upd = \ 1336 self.__setting_dirs[setting_dirs_id] 1337 1338 content = [] 1339 files = setting_files 1340 if parse_skipped: 1341 files = skipped_files 1342 for sett_file, _mtime_sett_file in files: 1343 content += self.__generic_parser(sett_file, 1344 comment_tag = self.__pkg_comment_tag) 1345 1346 if setting_id is not None: 1347 # Always push out CachingList objects if 1348 # metadata is not available in self.__data 1349 # It doesn't harm to have it like this since 1350 # CachingList is just a list(). 1351 # Moreover, DO keep the same object and use 1352 # extend rather than throwing it away. 1353 self.__data.get( 1354 setting_id, 1355 SystemSettings.CachingList([])).extend(content) 1356 else: 1357 return content
1358
1359 - def _system_mask_parser(self):
1360 """ 1361 Parser returning system packages mask metadata read from 1362 package.system_mask file. 1363 This file contains packages that should be always kept 1364 installed, extending the already defined (in repository database) 1365 set of atoms. 1366 1367 @return: parsed metadata 1368 @rtype: dict 1369 """ 1370 return self.__generic_parser(self.__setting_files['system_mask'], 1371 comment_tag = self.__pkg_comment_tag)
1372
1373 - def _splitdebug_parser(self):
1374 """ 1375 Parser returning packages for which the splitdebug feature 1376 should be enabled. Splitdebug is about installing /usr/lib/debug 1377 files into the system. If no entries are listed in here and 1378 splitdebug is enabled in client.conf, the feature will be considered 1379 enabled for any package. 1380 1381 @return: parsed metadata 1382 @rtype: dict 1383 """ 1384 return self.__generic_parser(self.__setting_files['splitdebug'], 1385 comment_tag = self.__pkg_comment_tag)
1386
1387 - def _splitdebug_mask_parser(self):
1388 """ 1389 Parser returning packages for which the splitdebug feature 1390 should be always disabled. This takes the precedence over 1391 package.splitdebug. 1392 Splitdebug is about installing /usr/lib/debug files into the system. 1393 If no entries are listed in here and splitdebug is enabled in 1394 client.conf, the feature will be considered enabled for any package. 1395 1396 @return: parsed metadata 1397 @rtype: dict 1398 """ 1399 return self.__generic_parser(self.__setting_files['splitdebug_mask'], 1400 comment_tag = self.__pkg_comment_tag)
1401
1402 - def _license_mask_parser(self):
1403 """ 1404 Parser returning packages masked by license metadata read from 1405 license.mask file. 1406 Packages shipped with licenses listed there will be masked. 1407 1408 @return: parsed metadata 1409 @rtype: dict 1410 """ 1411 return self.__generic_parser(self.__setting_files['license_mask'])
1412
1413 - def _license_accept_parser(self):
1414 """ 1415 Parser returning packages unmasked by license metadata read from 1416 license.mask file. 1417 Packages shipped with licenses listed there will be unmasked. 1418 1419 @return: parsed metadata 1420 @rtype: dict 1421 """ 1422 return self.__generic_parser(self.__setting_files['license_accept'])
1423
1424 - def _extract_packages_from_set_file(self, filepath):
1425 """ 1426 docstring_title 1427 1428 @param filepath: 1429 @type filepath: 1430 @return: 1431 @rtype: 1432 """ 1433 enc = etpConst['conf_encoding'] 1434 f = None 1435 try: 1436 f = codecs.open(filepath, "r", encoding=enc) 1437 items = set() 1438 line = f.readline() 1439 while line: 1440 x = line.strip().rsplit("#", 1)[0] 1441 if x and (not x.startswith('#')): 1442 items.add(x) 1443 line = f.readline() 1444 finally: 1445 if f is not None: 1446 f.close() 1447 return items
1448
1449 - def _system_package_sets_parser(self):
1450 """ 1451 Parser returning system defined package sets read from 1452 /etc/entropy/packages/sets. 1453 1454 @return: parsed metadata 1455 @rtype: dict 1456 """ 1457 data = {} 1458 for set_name in self.__setting_files['system_package_sets']: 1459 set_filepath = self.__setting_files['system_package_sets'][set_name] 1460 set_elements = self._extract_packages_from_set_file(set_filepath) 1461 if set_elements: 1462 data[set_name] = set_elements.copy() 1463 return data
1464
1465 - def _extra_ldpaths_parser(self):
1466 """ 1467 Parser returning directories considered part of the base system. 1468 1469 @return: parsed metadata 1470 @rtype: dict 1471 """ 1472 return self.__generic_parser(self.__setting_files['extra_ldpaths'])
1473
1474 - def _system_dirs_parser(self):
1475 """ 1476 Parser returning directories considered part of the base system. 1477 1478 @return: parsed metadata 1479 @rtype: dict 1480 """ 1481 return self.__generic_parser(self.__setting_files['system_dirs'])
1482
1483 - def _system_dirs_mask_parser(self):
1484 """ 1485 Parser returning directories NOT considered part of the base system. 1486 Settings here overlay system_dirs_parser. 1487 1488 @return: parsed metadata 1489 @rtype: dict 1490 """ 1491 return self.__generic_parser(self.__setting_files['system_dirs_mask'])
1492
1493 - def _broken_syms_parser(self):
1494 """ 1495 Parser returning a list of shared objects symbols that can be used by 1496 QA tools to scan the filesystem or a subset of it. 1497 1498 @return: parsed metadata 1499 @rtype: dict 1500 """ 1501 return self.__generic_parser(self.__setting_files['broken_syms'])
1502
1503 - def _broken_libs_mask_parser(self):
1504 """ 1505 Parser returning a list of broken shared libraries which are 1506 always considered sane. 1507 1508 @return: parsed metadata 1509 @rtype: dict 1510 """ 1511 return self.__generic_parser(self.__setting_files['broken_libs_mask'])
1512 1522
1523 - def _hw_hash_parser(self):
1524 """ 1525 Hardware hash metadata parser and generator. It returns a theorically 1526 unique SHA256 hash bound to the computer running this Framework. 1527 1528 @return: string containing SHA256 hexdigest 1529 @rtype: string 1530 """ 1531 hw_hash_file = self.__setting_files['hw_hash'] 1532 1533 enc = etpConst['conf_encoding'] 1534 hash_data = None 1535 try: 1536 with codecs.open(hw_hash_file, "r", encoding=enc) as hash_f: 1537 hash_data = hash_f.readline().strip() 1538 1539 except IOError as err: 1540 if err.errno not in (errno.ENOENT, errno.EPERM): 1541 raise 1542 1543 if hash_data is not None: 1544 return hash_data 1545 1546 hash_file_dir = os.path.dirname(hw_hash_file) 1547 hw_hash_exec = etpConst['etp_hw_hash_gen'] 1548 1549 pipe = None 1550 try: 1551 try: 1552 pipe = os.popen('{ ' + hw_hash_exec + '; } 2>&1', 'r') 1553 except (OSError, IOError): 1554 return None 1555 1556 hash_data = pipe.read().strip() 1557 sts = pipe.close() 1558 pipe = None 1559 1560 if sts is not None: 1561 return None 1562 1563 # expecting ascii cruft, don't worry about hash_data type 1564 with codecs.open(hw_hash_file, "w", encoding=enc) as hash_f: 1565 hash_f.write(hash_data) 1566 1567 return hash_data 1568 1569 finally: 1570 if pipe is not None: 1571 try: 1572 pipe.close() 1573 except (OSError, IOError): 1574 # this handles the gap between .close() 1575 # and = None 1576 pass
1577 1596
1597 - def _system_parser(self):
1598 1599 """ 1600 Parses Entropy system configuration file. 1601 1602 @return: parsed metadata 1603 @rtype: dict 1604 """ 1605 etp_conf = self.__setting_files['system'] 1606 1607 data = { 1608 'proxy': etpConst['proxy'].copy(), 1609 'name': etpConst['systemname'], 1610 'log_level': etpConst['entropyloglevel'], 1611 'spm_backend': None, 1612 } 1613 1614 if not const_file_readable(etp_conf): 1615 return data 1616 1617 const_secure_config_file(etp_conf) 1618 enc = etpConst['conf_encoding'] 1619 with codecs.open(etp_conf, "r", encoding=enc) as entropy_f: 1620 entropyconf = [x.strip() for x in entropy_f.readlines() if \ 1621 x.strip() and not x.strip().startswith("#")] 1622 1623 def _loglevel(setting): 1624 try: 1625 loglevel = int(setting) 1626 except ValueError: 1627 return 1628 if (loglevel > -1) and (loglevel < 3): 1629 data['log_level'] = loglevel
1630 1631 def _ftp_proxy(setting): 1632 ftpproxy = setting.strip().split() 1633 if ftpproxy: 1634 data['proxy']['ftp'] = ftpproxy[-1] 1635 1636 def _http_proxy(setting): 1637 httpproxy = setting.strip().split() 1638 if httpproxy: 1639 data['proxy']['http'] = httpproxy[-1] 1640 1641 def _rsync_proxy(setting): 1642 rsyncproxy = setting.strip().split() 1643 if rsyncproxy: 1644 data['proxy']['rsync'] = rsyncproxy[-1] 1645 1646 def _proxy_username(setting): 1647 username = setting.strip().split() 1648 if username: 1649 data['proxy']['username'] = username[-1] 1650 1651 def _proxy_password(setting): 1652 password = setting.strip().split() 1653 if password: 1654 data['proxy']['password'] = password[-1] 1655 1656 def _name(setting): 1657 data['name'] = setting.strip() 1658 1659 def _colors(setting): 1660 bool_setting = entropy.tools.setting_to_bool(setting) 1661 if (bool_setting is not None) and not bool_setting: 1662 nocolor() 1663 1664 def _spm_backend(setting): 1665 data['spm_backend'] = setting.strip() 1666 1667 def _nice_level(setting): 1668 mylevel = setting.strip() 1669 try: 1670 mylevel = int(mylevel) 1671 if (mylevel >= -19) and (mylevel <= 19): 1672 const_set_nice_level(mylevel) 1673 except (ValueError,): 1674 return 1675 1676 settings_map = { 1677 'loglevel': _loglevel, 1678 'colors': _colors, 1679 'ftp-proxy': _ftp_proxy, 1680 'http-proxy': _http_proxy, 1681 'rsync-proxy': _rsync_proxy, 1682 'proxy-username': _proxy_username, 1683 'proxy-password': _proxy_password, 1684 'system-name': _name, 1685 'spm-backend': _spm_backend, 1686 'nice-level': _nice_level, 1687 } 1688 1689 for line in entropyconf: 1690 1691 key, value = entropy.tools.extract_setting(line) 1692 if key is None: 1693 continue 1694 1695 func = settings_map.get(key) 1696 if func is None: 1697 continue 1698 func(value) 1699 1700 return data 1701
1702 - def _analyze_client_repo_string(self, repostring, branch = None, 1703 product = None, _skip_repository_validation = False):
1704 """ 1705 Extract repository information from the provided repository string, 1706 usually contained in the repository settings file, repositories.conf. 1707 1708 @param repostring: valid repository identifier 1709 @type repostring: string 1710 @rtype: tuple (string, dict) 1711 @return: tuple composed by (repository identifier, extracted repository 1712 metadata) 1713 @raise AttributeError: when repostring passed is invalid. 1714 """ 1715 if branch is None: 1716 branch = etpConst['branch'] 1717 if product is None: 1718 product = etpConst['product'] 1719 1720 repo_key, repostring = entropy.tools.extract_setting(repostring) 1721 if repo_key != "repository": 1722 raise AttributeError("repostring must start with 'repository|'") 1723 1724 repo_split = repostring.split("|") 1725 if len(repo_split) < 4: 1726 raise AttributeError("repostring must have at least 5 pipe separated parts") 1727 1728 name = repo_split[0].strip() 1729 if not _skip_repository_validation: 1730 # validate repository id string 1731 if not entropy.tools.validate_repository_id(name): 1732 raise AttributeError("invalid repository identifier") 1733 1734 desc = repo_split[1].strip() 1735 # protocol filter takes place inside entropy.fetchers 1736 packages = [x.strip() for x in repo_split[2].strip().split() \ 1737 if x.strip()] 1738 database = repo_split[3].strip() 1739 return name, self._generate_repository_metadata( 1740 name, desc, packages, [database], product, branch)
1741
1742 - def _generate_repository_metadata(self, name, desc, packages, databases, 1743 product, branch, username = None, 1744 password = None, https_validate_cert = True):
1745 """ 1746 Given a set of raw repository metadata information, like name, 1747 description, a list of package urls and the database url, generate 1748 the appropriate metadata. 1749 """ 1750 def _extract(database): 1751 # Support for custom database file compression 1752 dbformat = None 1753 for testno in range(2): 1754 dbformatcolon = database.rfind("#") 1755 if dbformatcolon == -1: 1756 break 1757 1758 try: 1759 dbformat = database[dbformatcolon+1:] 1760 except (IndexError, ValueError, TypeError,): 1761 pass 1762 database = database[:dbformatcolon] 1763 1764 if dbformat not in etpConst['etpdatabasesupportedcformats']: 1765 # fallback to default 1766 dbformat = etpConst['etpdatabasefileformat'] 1767 1768 # strip off, if exists, the deprecated service_uri part (EAPI3 shit) 1769 uricol = database.rfind(",") 1770 if uricol != -1: 1771 database = database[:uricol] 1772 1773 return database, dbformat
1774 1775 data = {} 1776 data['repoid'] = name 1777 1778 databases = [_extract(x) for x in databases] 1779 databases = [(x, y) for x, y in databases if \ 1780 entropy.tools.is_valid_uri(x)] 1781 if not databases: 1782 raise AttributeError("no valid repository database URLs") 1783 1784 data['databases'] = [] 1785 data['plain_databases'] = [] 1786 for index, (database, dbformat) in enumerate(databases): 1787 1788 database_expanded = entropy.tools.expand_plain_database_mirror( 1789 database, product, name, branch) 1790 if database_expanded is None: 1791 database_expanded = const_convert_to_unicode("") 1792 1793 1794 # XXX: backward compatibility support, consider the first 1795 # databases entry as "database". 1796 if index == 0: 1797 data['dbcformat'] = dbformat 1798 data['plain_database'] = database 1799 data['database'] = database_expanded 1800 1801 data['databases'].append({ 1802 'uri': database_expanded, 1803 'dbcformat': dbformat, 1804 }) 1805 data['plain_databases'].append({ 1806 'uri': database, 1807 'dbcformat': dbformat, 1808 }) 1809 1810 data['description'] = desc 1811 data['packages'] = [] 1812 data['plain_packages'] = [] 1813 1814 if username and password: 1815 data['username'] = username 1816 data['password'] = password 1817 1818 if not https_validate_cert: 1819 data['https_validate_cert'] = "false" 1820 1821 data['dbpath'] = etpConst['etpdatabaseclientdir'] + os.path.sep + \ 1822 name + os.path.sep + product + os.path.sep + \ 1823 etpConst['currentarch'] + os.path.sep + branch 1824 1825 data['notice_board'] = data['database'] + os.path.sep + \ 1826 etpConst['rss-notice-board'] 1827 1828 data['local_notice_board'] = data['dbpath'] + os.path.sep + \ 1829 etpConst['rss-notice-board'] 1830 1831 data['local_notice_board_userdata'] = data['dbpath'] + \ 1832 os.path.sep + etpConst['rss-notice-board-userdata'] 1833 1834 data['dbrevision'] = "0" 1835 dbrevision_file = os.path.join(data['dbpath'], 1836 etpConst['etpdatabaserevisionfile']) 1837 1838 try: 1839 enc = etpConst['conf_encoding'] 1840 with codecs.open(dbrevision_file, "r", encoding=enc) as dbrev_f: 1841 data['dbrevision'] = dbrev_f.readline().strip() 1842 except (OSError, IOError): 1843 pass 1844 1845 # setup GPG key path 1846 data['gpg_pubkey'] = data['dbpath'] + os.path.sep + \ 1847 etpConst['etpdatabasegpgfile'] 1848 1849 # setup script paths 1850 data['post_branch_hop_script'] = data['dbpath'] + os.path.sep + \ 1851 etpConst['etp_post_branch_hop_script'] 1852 data['post_branch_upgrade_script'] = data['dbpath'] + \ 1853 os.path.sep + etpConst['etp_post_branch_upgrade_script'] 1854 data['post_repo_update_script'] = data['dbpath'] + os.path.sep + \ 1855 etpConst['etp_post_repo_update_script'] 1856 1857 data['webservices_config'] = data['dbpath'] + os.path.sep + \ 1858 etpConst['etpdatabasewebservicesfile'] 1859 1860 # initialize CONFIG_PROTECT 1861 # will be filled the first time the db will be opened 1862 data['configprotect'] = None 1863 data['configprotectmask'] = None 1864 1865 for repo_package in packages: 1866 new_repo_package = entropy.tools.expand_plain_package_mirror( 1867 repo_package, product, name) 1868 if new_repo_package is None: 1869 continue 1870 data['plain_packages'].append(repo_package) 1871 data['packages'].append(new_repo_package) 1872 1873 return data 1874
1875 - def _repositories_parser(self):
1876 """ 1877 Setup Entropy Client repository settings reading them from 1878 the relative config file specified in /etc/entropy/repositories.conf 1879 1880 @return: parsed metadata 1881 @rtype: dict 1882 """ 1883 repo_conf = self.__setting_files['repositories'] 1884 data = { 1885 'available': {}, 1886 'excluded': {}, 1887 'order': [], 1888 'product': etpConst['product'], 1889 'branch': etpConst['branch'], 1890 'arch': etpConst['currentarch'], 1891 'default_repository': etpConst['officialrepositoryid'], 1892 'transfer_limit': etpConst['downloadspeedlimit'], 1893 'timeout': etpConst['default_download_timeout'], 1894 'security_advisories_url': etpConst['securityurl'], 1895 'developer_repo': False, 1896 'differential_update': True, 1897 } 1898 1899 enc = etpConst['conf_encoding'] 1900 # TODO: repository = statements in repositories.conf 1901 # will be deprecated by mid 2014 1902 try: 1903 with codecs.open(repo_conf, "r", encoding=enc) as repo_f: 1904 repositoriesconf = [x.strip() for x in \ 1905 repo_f.readlines() if x.strip()] 1906 except (OSError, IOError) as err: 1907 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EISDIR): 1908 raise 1909 return data 1910 1911 repositories_d_conf = self.__generic_d_parser( 1912 "repositories_conf_d", None) 1913 1914 # add content of skipped (disabled) files as commented 1915 # out stuff 1916 skipped_conf = ["#" + x for x in self.__generic_d_parser( 1917 "repositories_conf_d", None, parse_skipped=True)] 1918 repositories_d_conf += skipped_conf 1919 1920 repoids = set() 1921 1922 def _product_func(line, setting): 1923 data['product'] = setting
1924 1925 def _branch_func(line, setting): 1926 data['branch'] = setting 1927 1928 def _repository_func(line, setting): 1929 1930 excluded = False 1931 my_repodata = data['available'] 1932 1933 if line.startswith("#"): 1934 excluded = True 1935 my_repodata = data['excluded'] 1936 line = line.lstrip(" #") 1937 1938 try: 1939 reponame, repodata = self._analyze_client_repo_string(line, 1940 data['branch'], data['product']) 1941 except AttributeError: 1942 return 1943 1944 # validate repository id string 1945 if not entropy.tools.validate_repository_id(reponame): 1946 sys.stderr.write("!!! invalid repository id '%s' in '%s'\n" % ( 1947 reponame, repo_conf)) 1948 return 1949 1950 repoids.add(reponame) 1951 obj = my_repodata.get(reponame) 1952 if obj is not None: 1953 1954 obj['plain_packages'].extend(repodata['plain_packages']) 1955 obj['packages'].extend(repodata['packages']) 1956 1957 if (not obj['plain_database']) and \ 1958 repodata['plain_database']: 1959 1960 obj['dbrevision'] = repodata['dbrevision'] 1961 obj['plain_database'] = repodata['plain_database'] 1962 obj['database'] = repodata['database'] 1963 obj['dbcformat'] = repodata['dbcformat'] 1964 1965 else: 1966 my_repodata[reponame] = repodata.copy() 1967 if not excluded: 1968 data['order'].append(reponame) 1969 1970 def _offrepoid(line, setting): 1971 data['default_repository'] = setting 1972 1973 def _developer_repo(line, setting): 1974 bool_setting = entropy.tools.setting_to_bool(setting) 1975 if bool_setting is not None: 1976 data['developer_repo'] = bool_setting 1977 1978 def _differential_update(line, setting): 1979 bool_setting = entropy.tools.setting_to_bool(setting) 1980 if bool_setting is not None: 1981 data['differential_update'] = bool_setting 1982 1983 def _down_speed_limit(line, setting): 1984 data['transfer_limit'] = None 1985 try: 1986 myval = int(setting) 1987 if myval > 0: 1988 data['transfer_limit'] = myval 1989 except ValueError: 1990 data['transfer_limit'] = None 1991 1992 def _down_timeout(line, setting): 1993 try: 1994 data['timeout'] = int(setting) 1995 except ValueError: 1996 return 1997 1998 def _security_url(setting): 1999 data['security_advisories_url'] = setting 2000 2001 settings_map = { 2002 'product': _product_func, 2003 'branch': _branch_func, 2004 'repository': _repository_func, 2005 '#repository': _repository_func, 2006 # backward compatibility 2007 'officialrepositoryid': _offrepoid, 2008 'official-repository-id': _offrepoid, 2009 'developer-repo': _developer_repo, 2010 'differential-update': _differential_update, 2011 # backward compatibility 2012 'downloadspeedlimit': _down_speed_limit, 2013 'download-speed-limit': _down_speed_limit, 2014 # backward compatibility 2015 'downloadtimeout': _down_timeout, 2016 'download-timeout': _down_timeout, 2017 # backward compatibility 2018 'securityurl': _security_url, 2019 'security-url': _security_url, 2020 } 2021 2022 # setup product and branch first 2023 for line in repositoriesconf: 2024 2025 key, value = entropy.tools.extract_setting(line) 2026 if key is None: 2027 continue 2028 key = key.replace(" ", "") 2029 key = key.replace("\t", "") 2030 2031 if key not in ("product", "branch"): 2032 continue 2033 2034 func = settings_map.get(key) 2035 if func is None: 2036 continue 2037 func(line, value) 2038 2039 for line in repositoriesconf: 2040 2041 key, value = entropy.tools.extract_setting(line) 2042 if key is None: 2043 continue 2044 key = key.replace(" ", "") 2045 key = key.replace("\t", "") 2046 2047 func = settings_map.get(key) 2048 if func is None: 2049 continue 2050 func(line, value) 2051 2052 for line in repositories_d_conf: 2053 2054 key, value = entropy.tools.extract_setting(line) 2055 if key is None: 2056 continue 2057 key = key.replace(" ", "") 2058 key = key.replace("\t", "") 2059 if key not in ("repository", "#repository"): 2060 # no other statements supported from here 2061 continue 2062 2063 func = settings_map.get(key) 2064 if func is None: 2065 continue 2066 func(line, value) 2067 2068 # .ini-like file support. 2069 _conf_dir, setting_files, skipped_files, _auto_upd = \ 2070 self.__setting_dirs["repositories_conf_d"] 2071 candidate_inis = [x for x, y in setting_files] 2072 disabled_candidate_inis = [x for x, y in skipped_files] 2073 2074 for inis in (candidate_inis, disabled_candidate_inis): 2075 ini_parser = RepositoryConfigParser(encoding = enc) 2076 try: 2077 ini_parser.read(inis) 2078 except (IOError, OSError) as err: 2079 sys.stderr.write("Cannot parse %s: %s\n" % ( 2080 " ".join(inis), 2081 err)) 2082 ini_parser = None 2083 2084 if ini_parser: 2085 ini_conf_excluded = inis is disabled_candidate_inis 2086 ini_repositories = ini_parser.repositories() 2087 for ini_repository in ini_repositories: 2088 if ini_repository in repoids: 2089 # double syntax is not supported. 2090 continue 2091 2092 repoids.add(ini_repository) 2093 ini_dbs = ini_parser.repo(ini_repository) 2094 try: 2095 ini_pkgs = ini_parser.pkgs(ini_repository) 2096 except KeyError: 2097 ini_pkgs = [] 2098 2099 try: 2100 ini_desc = ini_parser.desc(ini_repository) 2101 except KeyError: 2102 ini_desc = _("No description") 2103 2104 try: 2105 ini_username = ini_parser.username(ini_repository) 2106 except KeyError: 2107 ini_username = None 2108 2109 try: 2110 ini_password = ini_parser.password(ini_repository) 2111 except KeyError: 2112 ini_password = None 2113 2114 ini_https_validate_cert = ini_parser.https_validate_cert(ini_repository) 2115 2116 ini_excluded = not ini_parser.enabled(ini_repository) 2117 ini_data = self._generate_repository_metadata( 2118 ini_repository, ini_desc, ini_pkgs, ini_dbs, 2119 data['product'], data['branch'], 2120 ini_username, ini_password, 2121 ini_https_validate_cert) 2122 if ini_excluded or ini_conf_excluded: 2123 data['excluded'][ini_repository] = ini_data 2124 else: 2125 data['available'][ini_repository] = ini_data 2126 data['order'].append(ini_repository) 2127 2128 try: 2129 tx_limit = int(os.getenv("ETP_DOWNLOAD_KB")) 2130 except (ValueError, TypeError,): 2131 tx_limit = None 2132 if tx_limit is not None: 2133 data['transfer_limit'] = tx_limit 2134 2135 for repoid in repoids: 2136 2137 found_into = 'available' 2138 if repoid in data['available']: 2139 repo_data = data['available'][repoid] 2140 elif repoid in data['excluded']: 2141 repo_data = data['excluded'][repoid] 2142 found_into = 'excluded' 2143 else: 2144 continue 2145 2146 # fixup repository settings 2147 if not repo_data['plain_database'].strip(): 2148 data[found_into].pop(repoid) 2149 if repoid in data['order']: 2150 data['order'].remove(repoid) 2151 2152 # insert extra packages mirrors directly from repository dirs 2153 # if they actually exist. use data['order'] because it reflects 2154 # the list of available repos. 2155 for repoid in data['order']: 2156 if repoid in data['available']: 2157 obj = data['available'][repoid] 2158 elif repoid in data['excluded']: 2159 obj = data['excluded'][repoid] 2160 else: 2161 continue 2162 2163 mirrors_file = os.path.join(obj['dbpath'], 2164 etpConst['etpdatabasemirrorsfile']) 2165 2166 try: 2167 raw_mirrors = entropy.tools.generic_file_content_parser( 2168 mirrors_file, encoding = etpConst['conf_encoding']) 2169 except (OSError, IOError): 2170 raw_mirrors = [] 2171 2172 mirrors_data = [] 2173 for mirror in raw_mirrors: 2174 expanded_mirror = entropy.tools.expand_plain_package_mirror( 2175 mirror, data['product'], repoid) 2176 if expanded_mirror is None: 2177 continue 2178 mirrors_data.append((mirror, expanded_mirror)) 2179 2180 # add in reverse order, at the beginning of the list 2181 mirrors_data.reverse() 2182 for mirror, expanded_mirror in mirrors_data: 2183 obj['plain_packages'].insert(0, mirror) 2184 obj['packages'].insert(0, expanded_mirror) 2185 2186 # now use fallback mirrors information to properly sort 2187 # fallback mirrors, giving them the lowest priority even if 2188 # they are listed on top. 2189 fallback_mirrors_file = os.path.join(obj['dbpath'], 2190 etpConst['etpdatabasefallbackmirrorsfile']) 2191 2192 try: 2193 fallback_mirrors = entropy.tools.generic_file_content_parser( 2194 fallback_mirrors_file, encoding = etpConst['conf_encoding']) 2195 except (OSError, IOError): 2196 fallback_mirrors = [] 2197 2198 pkgs_map = {} 2199 if fallback_mirrors: 2200 for pkg_url in obj['plain_packages']: 2201 urlobj = entropy.tools.spliturl(pkg_url) 2202 try: 2203 url_key = urlobj.netloc 2204 except AttributeError as err: 2205 const_debug_write(__name__, 2206 "error splitting url: %s" % (err,)) 2207 url_key = None 2208 if url_key is None: 2209 break 2210 map_obj = pkgs_map.setdefault(url_key, []) 2211 map_obj.append(pkg_url) 2212 2213 fallback_urls = [] 2214 if pkgs_map: 2215 for fallback_mirror in fallback_mirrors: 2216 belonging_urls = pkgs_map.get(fallback_mirror) 2217 if belonging_urls is None: 2218 # nothing to do 2219 continue 2220 fallback_urls.extend(belonging_urls) 2221 2222 if fallback_urls: 2223 for fallback_url in fallback_urls: 2224 expanded_fallback_url = \ 2225 entropy.tools.expand_plain_package_mirror( 2226 fallback_url, data['product'], repoid) 2227 while True: 2228 try: 2229 obj['plain_packages'].remove(fallback_url) 2230 except ValueError: 2231 break 2232 while True: 2233 try: 2234 obj['packages'].remove(expanded_fallback_url) 2235 except ValueError: 2236 break 2237 obj['plain_packages'].insert(0, fallback_url) 2238 obj['packages'].insert(0, expanded_fallback_url) 2239 2240 # override parsed branch from env 2241 override_branch = os.getenv('ETP_BRANCH') 2242 if override_branch is not None: 2243 data['branch'] = override_branch 2244 2245 # remove repositories that are in excluded if they are in available 2246 for repoid in set(data['excluded']): 2247 if repoid in data['available']: 2248 try: 2249 del data['excluded'][repoid] 2250 except KeyError: 2251 continue 2252 2253 return data 2254
2255 - def __generic_parser(self, filepath, comment_tag = "#"):
2256 """ 2257 Internal method. This is the generic file parser here. 2258 2259 @param filepath: valid path 2260 @type filepath: string 2261 @keyword comment_tag: default comment tag (column where comments starts) 2262 @type: string 2263 @return: raw text extracted from file 2264 @rtype: list 2265 """ 2266 enc = etpConst['conf_encoding'] 2267 lines = [] 2268 try: 2269 lines += entropy.tools.generic_file_content_parser( 2270 filepath, comment_tag = comment_tag, encoding = enc) 2271 data = SystemSettings.CachingList(lines) 2272 except IOError as err: 2273 const_debug_write(__name__, "IOError __generic_parser, %s: %s" % ( 2274 filepath, err,)) 2275 except OSError as err: 2276 const_debug_write(__name__, "OSError __generic_parser, %s: %s" % ( 2277 filepath, err,)) 2278 except UnicodeEncodeError as err: 2279 const_debug_write(__name__, "UEE __generic_parser, %s: %s" % ( 2280 filepath, err,)) 2281 except UnicodeDecodeError as err: 2282 const_debug_write(__name__, "UDE __generic_parser, %s: %s" % ( 2283 filepath, err,)) 2284 2285 return SystemSettings.CachingList(lines)
2286
2287 - def validate_entropy_cache(self, *args, **kwargs):
2288 """ 2289 A call to this method is no longer necessary. 2290 """ 2291 warnings.warn("A call to this method is no longer necessary")
2292