Package entropy :: Package client :: Package interfaces :: Module db

Source Code for Module entropy.client.interfaces.db

   1  # -*- coding: utf-8 -*- 
   2  """ 
   3   
   4      @author: Fabio Erculiani <[email protected]> 
   5      @contact: [email protected] 
   6      @copyright: Fabio Erculiani 
   7      @license: GPL-2 
   8   
   9      B{Entropy Package Manager Client EntropyRepository plugin code}. 
  10   
  11  """ 
  12  import codecs 
  13  import errno 
  14  import os 
  15  import shutil 
  16  import subprocess 
  17  import sys 
  18  import threading 
  19  import time 
  20   
  21  from entropy.const import const_debug_write, const_setup_perms, etpConst, \ 
  22      const_set_nice_level, const_setup_file, const_convert_to_unicode, \ 
  23      const_debug_enabled, const_mkdtemp, const_mkstemp, const_file_readable, \ 
  24      const_file_writable 
  25  from entropy.output import blue, darkred, red, darkgreen, purple, teal, brown, \ 
  26      bold, TextInterface 
  27  from entropy.dump import dumpobj, loadobj 
  28  from entropy.cache import EntropyCacher 
  29  from entropy.db import EntropyRepository 
  30  from entropy.exceptions import RepositoryError, SystemDatabaseError, \ 
  31      PermissionDenied 
  32  from entropy.security import Repository as RepositorySecurity 
  33  from entropy.misc import TimeScheduled, ParallelTask 
  34  from entropy.fetchers import UrlFetcher 
  35  from entropy.i18n import _ 
  36  from entropy.db.skel import EntropyRepositoryPlugin, EntropyRepositoryBase 
  37  from entropy.db.exceptions import IntegrityError, OperationalError, Error, \ 
  38      DatabaseError 
  39  from entropy.core.settings.base import SystemSettings 
  40  from entropy.services.client import WebService 
  41  from entropy.client.services.interfaces import RepositoryWebService, \ 
  42      RepositoryWebServiceFactory 
  43   
  44  import entropy.dep 
  45  import entropy.tools 
  46   
  47  __all__ = ["CachedRepository", "ClientEntropyRepositoryPlugin", 
  48      "InstalledPackagesRepository", "AvailablePackagesRepository", 
  49      "GenericRepository"] 
50 51 -class ClientEntropyRepositoryPlugin(EntropyRepositoryPlugin):
52
53 - def __init__(self, client_interface, metadata = None):
54 """ 55 Entropy client-side repository EntropyRepository Plugin class. 56 This class will be instantiated and automatically added to 57 EntropyRepository instances generated by Entropy Client. 58 59 @param client_interface: Entropy Client interface instance 60 @type client_interface: entropy.client.interfaces.Client class 61 @param metadata: any dict form metadata map (key => value) 62 @type metadata: dict 63 """ 64 EntropyRepositoryPlugin.__init__(self) 65 self._client = client_interface 66 if metadata is None: 67 self._metadata = {} 68 else: 69 self._metadata = metadata
70
71 - def get_id(self):
72 return "__client__"
73
74 - def get_metadata(self):
75 return self._metadata
76
77 - def add_plugin_hook(self, entropy_repository_instance):
78 const_debug_write(__name__, 79 "ClientEntropyRepositoryPlugin: calling add_plugin_hook => %s" % ( 80 self,) 81 ) 82 83 out_intf = self._metadata.get('output_interface') 84 if out_intf is not None: 85 entropy_repository_instance.output = out_intf.output 86 entropy_repository_instance.ask_question = out_intf.ask_question 87 88 return 0
89
90 -class CachedRepository(EntropyRepository):
91 """ 92 This kind of repository cannot have close() called directly, without 93 a valid token passed. This is because the class object is cached somewhere 94 and calling close() would turn into a software bug. 95 """
96 - def setCloseToken(self, token):
97 """ 98 Set a token that can be used to validate close() calls. Calling 99 close() on these repos is prohibited and considered a software bug. 100 Only Entropy Client should be able to close them. 101 """ 102 self._close_token = token
103
104 - def close(self, safe=False, _token = None):
105 """ 106 Reimplemented from EntropyRepository 107 """ 108 close_token = getattr(self, "_close_token", None) 109 if close_token is not None: 110 if (_token is None) or (_token != close_token): 111 raise PermissionDenied( 112 "cannot close this repository directly. Software bug!") 113 return EntropyRepository.close(self, safe=safe)
114
115 116 -class InstalledPackagesRepository(CachedRepository):
117 """ 118 This class represents the installed packages repository and is a direct 119 subclass of EntropyRepository. 120 """ 121 122 # Name of the repository 123 NAME = "__system__" 124
125 - def __init__(self, *args, **kwargs):
126 # force our own name, always. 127 kwargs = kwargs.copy() 128 kwargs['name'] = self.NAME 129 super(InstalledPackagesRepository, self).__init__( 130 *args, **kwargs) 131 132 # ensure proper repository file permissions 133 if entropy.tools.is_root() and os.path.isfile(self._db): 134 const_setup_file(self._db, etpConst['entropygid'], 0o644, 135 uid = etpConst['uid'])
136
137 - def handlePackage(self, pkg_data, revision = None, 138 formattedContent = False):
139 """ 140 Reimplemented from EntropyRepository. 141 """ 142 removelist = self.getPackagesToRemove( 143 pkg_data['name'], pkg_data['category'], 144 pkg_data['slot'], pkg_data['injected'] 145 ) 146 for r_package_id in removelist: 147 self.removePackage(r_package_id) 148 return self.addPackage(pkg_data, revision = revision, 149 formatted_content = formattedContent)
150
151 152 -class AvailablePackagesRepositoryUpdater(object):
153 154 """ 155 AvailablePackagesRepository update logic class. 156 The required logic for updating a repository is stored here. 157 """ 158 WEBSERV_CACHE_ID = 'webserv_repo/segment_' 159 160 FETCH_ERRORS = ( 161 UrlFetcher.GENERIC_FETCH_WARN, 162 UrlFetcher.TIMEOUT_FETCH_ERROR, 163 UrlFetcher.GENERIC_FETCH_ERROR) 164
165 - def __init__(self, entropy_client, repository_id, force, gpg):
166 self.__force = force 167 self.__big_sock_timeout = 20 168 self._repository_id = repository_id 169 self._cacher = EntropyCacher() 170 self._entropy = entropy_client 171 self._settings = SystemSettings() 172 self._gpg_feature = gpg 173 self._supported_apis = etpConst['supportedapis'] 174 self._supported_download_items = ( 175 "db", "dbck", "dblight", "ck", "cklight", "compck", 176 "lock", "dbdump", "dbdumplight", "dbdumplightck", "dbdumpck", 177 "meta_file", "meta_file_gpg", "notice_board" 178 ) 179 self._developer_repo = \ 180 self._settings['repositories']['developer_repo'] 181 self._differential_update = \ 182 self._settings['repositories']['differential_update'] 183 if self._developer_repo: 184 const_debug_write(__name__, 185 "__init__: developer repo mode enabled") 186 self.__webservices = None 187 self.__webservice = None 188 self.__repo_eapi = None 189 190 avail_data = self._settings['repositories']['available'] 191 if self._repository_id not in avail_data: 192 raise KeyError("Repository not available")
193 194 @property
195 - def _repo_eapi(self):
196 if self.__repo_eapi is None: 197 self.__repo_eapi = self.__get_repo_eapi() 198 return self.__repo_eapi
199 200 @_repo_eapi.setter
201 - def _repo_eapi(self, value):
202 self.__repo_eapi = value
203 204 @property
205 - def _webservices(self):
206 if self.__webservices is None: 207 if hasattr(self._entropy, "RepositoryWebServices"): 208 self.__webservices = self._entropy.RepositoryWebServices() 209 else: 210 # in case self._entropy is a simple TextInterface() 211 # like how it's called in remote_revision(). 212 self.__webservices = RepositoryWebServiceFactory(self._entropy) 213 # cross fingers! 214 215 return self.__webservices
216 217 @property
218 - def _webservice(self):
219 if self.__webservice is None: 220 self.__webservice = self._webservices.new(self._repository_id) 221 self.__webservice._set_timeout(self.__big_sock_timeout) 222 return self.__webservice
223
225 try: 226 data = self._webservice.get_repository_metadata() 227 except WebService.WebServiceException as err: 228 const_debug_write(__name__, 229 "__get_webserv_repository_metadata: error: %s" % (err,)) 230 data = {} 231 return data
232
234 try: 235 revision = self._webservice.get_revision() 236 except WebService.WebServiceException as err: 237 const_debug_write(__name__, 238 "__get_webserv_repository_revision: error: %s" % (err,)) 239 revision = None 240 return revision
241
243 try: 244 webserv = self._webservices.new(self._repository_id) 245 except WebService.UnsupportedService: 246 return False 247 248 try: 249 available = webserv.update_service_available(cache = False) 250 except WebService.WebServiceException: 251 available = False 252 return available
253
255 256 avail_data = self._settings['repositories']['available'] 257 repo_data = avail_data[self._repository_id] 258 259 dbfile = os.path.join(repo_data['dbpath'], 260 etpConst['etpdatabasefile']) 261 dbconn = None 262 try: 263 dbconn = self._entropy.open_generic_repository(dbfile, 264 xcache = False, indexing_override = False) 265 dbconn.validate() 266 except (OperationalError, IntegrityError, SystemDatabaseError, 267 IOError, OSError,): 268 dbconn = None 269 return dbconn
270
271 - def __get_webserv_database_differences(self, webserv, package_ids):
272 273 try: 274 remote_package_ids = webserv.get_package_ids() 275 except WebService.WebServiceException as err: 276 const_debug_write(__name__, 277 "__get_webserv_database_differences: error: %s" % (err,)) 278 return None, None 279 280 added = [x for x in remote_package_ids if x not in package_ids] 281 removed = [x for x in package_ids if x not in remote_package_ids] 282 # sort, to increase determinism 283 added.sort() 284 return added, removed
285
286 - def __eapi1_eapi2_databases_alignment(self, dbfile, dbfile_old):
287 288 dbconn = self._entropy.open_generic_repository(dbfile, 289 xcache = False, indexing_override = False) 290 old_dbconn = self._entropy.open_generic_repository(dbfile_old, 291 xcache = False, indexing_override = False) 292 upd_rc = 0 293 try: 294 upd_rc = old_dbconn.alignDatabases(dbconn, output_header = "\t") 295 except (OperationalError, IntegrityError, DatabaseError,): 296 pass 297 old_dbconn.close() 298 dbconn.close() 299 if upd_rc > 0: 300 # -1 means no changes, == force used 301 # 0 means too much hassle 302 os.rename(dbfile_old, dbfile) 303 return upd_rc
304
305 - def __eapi2_inject_downloaded_dump(self, dumpfile, dbfile, cmethod):
306 307 # load the dump into database 308 mytxt = "%s %s, %s %s" % ( 309 red(_("Injecting downloaded dump")), 310 darkgreen(etpConst['etpdatabasedumplight']), 311 red(_("please wait")), 312 red("..."), 313 ) 314 self._entropy.output( 315 mytxt, 316 importance = 0, 317 level = "info", 318 header = "\t" 319 ) 320 dbconn = self._entropy.open_generic_repository(dbfile, 321 xcache = False, indexing_override = False) 322 rc = dbconn.importRepository(dumpfile, dbfile) 323 dbconn.close() 324 return rc
325
326 - def __get_repo_eapi(self):
327 328 eapi_env = os.getenv("FORCE_EAPI") 329 sqlite3_access = os.path.lexists("/usr/bin/sqlite3") 330 sqlite3_rc = subprocess.call("/usr/bin/sqlite3 -version > /dev/null", 331 shell = True) 332 try: 333 eapi_env_clear = int(eapi_env) 334 if eapi_env_clear not in self._supported_apis: 335 raise ValueError() 336 except (ValueError, TypeError,): 337 eapi_env_clear = None 338 339 repo_eapi = 2 340 eapi_avail = self.__check_webserv_availability() 341 if eapi_avail: 342 repo_eapi = 3 343 else: 344 if not sqlite3_access or entropy.tools.islive(): 345 repo_eapi = 1 346 elif sqlite3_rc != 0: 347 repo_eapi = 1 348 349 # if differential update is disabled and FORCE_EAPI is not overriding 350 # we cannot use EAPI=3 351 if (eapi_env_clear is None) and (not self._differential_update) and \ 352 (repo_eapi == 3): 353 const_debug_write(__name__, 354 "__get_repo_eapi: differential update is disabled !") 355 repo_eapi -= 1 356 357 # check EAPI 358 if eapi_env_clear is not None: 359 repo_eapi = eapi_env_clear 360 361 # FORCE_EAPI is triggered, disable 362 # developer_repo mode 363 self._developer_repo = False 364 const_debug_write(__name__, 365 "__get_repo_eapi: developer repo mode disabled FORCE_EAPI") 366 367 elif repo_eapi > 1 and self._developer_repo: 368 # enforce EAPI=1 369 repo_eapi = 1 370 371 const_debug_write(__name__, 372 "__get_repo_eapi: final eapi set to %s" % (repo_eapi,)) 373 374 return repo_eapi
375
376 - def _is_repository_updatable(self, revision):
377 """ 378 Given a remote repository revision, return whether 379 the repository is updatable or not. 380 """ 381 if self.__force: 382 return True 383 384 local = AvailablePackagesRepository.revision( 385 self._repository_id) 386 return local != revision
387
388 - def _select_database_mirror(self):
389 """ 390 Verify that the database mirror URL is available and working. 391 If this is not the case, fallback to the first available 392 and working packages mirror URL. 393 """ 394 repos_data = self._settings['repositories'] 395 avail_data = repos_data['available'] 396 repo_data = avail_data[self._repository_id] 397 database_uris = repo_data['databases'] 398 basic_user = repo_data.get('username') 399 basic_pwd = repo_data.get('password') 400 https_validate_cert = not repo_data.get('https_validate_cert') == "false" 401 402 ws_revision = self._remote_webservice_revision() 403 404 # Setup the repository uri 405 revision = None 406 uri = None 407 cformat = None 408 for counter, uri_meta in enumerate(database_uris, 1): 409 410 mytxt = "%s: %s [%s]" % ( 411 red(_("Scanning URL")), 412 teal(uri_meta['uri']), 413 brown(uri_meta['dbcformat']),) 414 self._entropy.output( 415 mytxt, 416 importance = 1, 417 level = "info", 418 header = blue(" # "), 419 back = True, 420 count = (counter, len(database_uris)) 421 ) 422 423 repo_uri = uri_meta['uri'] 424 425 uri_revision = self._remote_revision(repo_uri, 426 http_basic_user = basic_user, 427 http_basic_pwd = basic_pwd, 428 https_validate_cert = https_validate_cert) 429 430 if uri_revision != -1: 431 432 mytxt = "%s: %s [%s]" % ( 433 darkgreen(_("Selected URL")), 434 teal(uri_meta['uri']), 435 brown(uri_meta['dbcformat']),) 436 self._entropy.output( 437 mytxt, 438 importance = 1, 439 level = "info", 440 header = blue(" # ") 441 ) 442 443 revision = uri_revision 444 uri = repo_uri 445 cformat = uri_meta['dbcformat'] 446 break 447 448 if uri is not None: 449 if ws_revision is not None: 450 return ws_revision, uri, cformat 451 elif revision is not None: 452 return revision, uri, cformat 453 454 self._entropy.output( 455 "%s: %s" % ( 456 darkred(_("Attention")), 457 brown(_("repository is not available at the database URLs")), 458 ), 459 importance = 1, level = "warning", header = "\t", 460 ) 461 self._entropy.output( 462 "%s" % ( 463 purple( 464 _("Looking for an alternate route using package mirrors") 465 ),), 466 importance = 1, level = "warning", header = "\t", 467 ) 468 469 package_uris = repo_data['plain_packages'] 470 default_cformat = etpConst['etpdatabasefileformat'] 471 for package_uri in package_uris: 472 self._entropy.output( 473 "%s:" % (brown(_("Checking repository URL")),), 474 importance = 1, level = "warning", header = "\t", 475 ) 476 self._entropy.output( 477 "%s" % (package_uri,), 478 importance = 1, level = "warning", header = "\t", 479 ) 480 481 url = entropy.tools.expand_plain_database_mirror( 482 package_uri, repos_data['product'], 483 self._repository_id, 484 repos_data['branch']) 485 if url == uri: 486 # skip same URL 487 continue 488 489 revision = self._remote_revision(url, 490 http_basic_user = basic_user, 491 http_basic_pwd = basic_pwd, 492 https_validate_cert = https_validate_cert) 493 if revision != -1: 494 # found 495 self._entropy.output( 496 "%s:" % (brown(_("Found repository at URL")),), 497 importance = 1, level = "warning", header = "\t", 498 ) 499 self._entropy.output( 500 "%s" % (url,), 501 importance = 1, level = "warning", header = "\t", 502 ) 503 return revision, url, default_cformat 504 505 self._entropy.output( 506 "%s" % ( 507 purple( 508 _("Unable to find alternate repository mirrors. Sorry.") 509 ),), 510 importance = 1, level = "warning", header = "\t", 511 ) 512 return None
513
515 """ 516 Show the repository information to the user. 517 """ 518 avail_data = self._settings['repositories']['available'] 519 repo_data = avail_data[self._repository_id] 520 521 self._entropy.output( 522 bold("%s") % ( repo_data['description'] ), 523 importance = 2, 524 level = "info", 525 header = blue(" # ") 526 ) 527 528 for uri_meta in repo_data['databases']: 529 mytxt = "%s: %s [%s]" % ( 530 red(_("Repository URL")), 531 darkgreen(uri_meta['uri']), 532 brown(uri_meta['dbcformat']),) 533 self._entropy.output( 534 mytxt, 535 importance = 1, 536 level = "info", 537 header = blue(" # ") 538 ) 539 540 mytxt = "%s: %s" % (red(_("Repository local path")), 541 darkgreen(repo_data['dbpath']),) 542 self._entropy.output( 543 mytxt, 544 importance = 0, 545 level = "info", 546 header = blue(" # ") 547 ) 548 mytxt = "%s: %s" % (red(_("Repository API")), 549 darkgreen(str(self._repo_eapi)),) 550 self._entropy.output( 551 mytxt, 552 importance = 0, 553 level = "info", 554 header = blue(" # ") 555 )
556
557 - def __append_gpg_signature_to_path(self, path):
558 return path + etpConst['etpgpgextension']
559
560 - def _ensure_repository_path(self):
561 """ 562 Make sure that the local repository directory has valid 563 permissions and ownership. 564 """ 565 avail_data = self._settings['repositories']['available'] 566 repo_data = avail_data[self._repository_id] 567 568 # create dir if it doesn't exist 569 if not os.path.isdir(repo_data['dbpath']): 570 os.makedirs(repo_data['dbpath'], 0o755) 571 572 try: 573 items = os.listdir(etpConst['etpdatabaseclientdir']) 574 except OSError: 575 items = [] 576 577 # we cannot operate dir wide (etpdatabaseclientdir) because 578 # there are lock files in there and we should not touch them 579 for item in items: 580 repo_dir_path = os.path.join( 581 etpConst['etpdatabaseclientdir'], 582 item) 583 if not os.path.isdir(repo_dir_path): 584 continue 585 const_setup_perms( 586 repo_dir_path, 587 etpConst['entropygid'], 588 f_perms = 0o644)
589
590 - def __remove_repository_files(self):
591 sys_set = self._settings 592 avail_data = sys_set['repositories']['available'] 593 repo_dbpath = avail_data[self._repository_id]['dbpath'] 594 shutil.rmtree(repo_dbpath, True)
595
596 - def __database_download(self, uri, cmethod):
597 598 mytxt = "%s ..." % (red(_("Downloading repository")),) 599 self._entropy.output( 600 mytxt, 601 importance = 1, 602 level = "info", 603 header = "\t" 604 ) 605 606 downloaded_item = None 607 down_status = False 608 sig_status = False 609 if self._repo_eapi == 2: 610 611 down_item = "dbdumplight" 612 613 down_status = self._download_item( 614 uri, down_item, cmethod, 615 disallow_redirect = True) 616 if down_status: 617 # get GPG file if available 618 sig_status = self._download_item( 619 uri, down_item, cmethod, 620 disallow_redirect = True, 621 get_signature = True) 622 623 downloaded_item = down_item 624 625 if not down_status: # fallback to old db 626 627 self._repo_eapi = 1 628 down_item = "dblight" 629 if self._developer_repo: 630 # if developer repo mode is enabled, fetch full-blown db 631 down_item = "db" 632 const_debug_write(__name__, 633 "__handle_database_download: developer repo mode enabled") 634 635 down_status = self._download_item( 636 uri, down_item, cmethod, 637 disallow_redirect = True) 638 if down_status: 639 sig_status = self._download_item( 640 uri, down_item, cmethod, 641 disallow_redirect = True, 642 get_signature = True) 643 644 downloaded_item = down_item 645 646 if not down_status: 647 mytxt = "%s: %s." % (bold(_("Attention")), 648 red(_("unable to download the repository")),) 649 self._entropy.output( 650 mytxt, 651 importance = 1, 652 level = "warning", 653 header = "\t" 654 ) 655 656 return down_status, sig_status, downloaded_item
657
658 - def __database_checksum_download(self, uri, cmethod):
659 660 downitem = 'cklight' 661 if self._developer_repo: 662 downitem = 'dbck' 663 if self._repo_eapi == 2: # EAPI = 2 664 downitem = 'dbdumplightck' 665 666 garbage_url, hashfile = self._construct_paths( 667 uri, downitem, cmethod) 668 mytxt = "%s %s %s" % ( 669 red(_("Downloading checksum")), 670 darkgreen(os.path.basename(hashfile)), 671 red("..."), 672 ) 673 # download checksum 674 self._entropy.output( 675 mytxt, 676 importance = 0, 677 level = "info", 678 header = "\t" 679 ) 680 681 db_down_status = self._download_item( 682 uri, downitem, cmethod, 683 disallow_redirect = True) 684 685 if not db_down_status and (downitem not in ('cklight', 'dbck',)): 686 # fallback to old method 687 retryitem = 'cklight' 688 if self._developer_repo: 689 retryitem = 'dbck' 690 db_down_status = self._download_item( 691 uri, retryitem, cmethod, 692 disallow_redirect = True) 693 694 if not db_down_status: 695 mytxt = "%s %s !" % ( 696 red(_("Cannot fetch checksum")), 697 red(_("Cannot verify repository integrity")), 698 ) 699 self._entropy.output( 700 mytxt, 701 importance = 1, 702 level = "warning", 703 header = "\t" 704 ) 705 return db_down_status
706
707 - def __verify_file_checksum(self, file_path, md5_checksum_path):
708 enc = etpConst['conf_encoding'] 709 with codecs.open(md5_checksum_path, "r", encoding=enc) as ck_f: 710 md5hash = ck_f.readline().strip() 711 if not md5hash: # invalid !! => [] would cause IndexError 712 return False 713 md5hash = md5hash.split()[0] 714 return entropy.tools.compare_md5(file_path, md5hash)
715
716 - def __verify_database_checksum(self, uri, cmethod = None):
717 718 sys_settings_repos = self._settings['repositories'] 719 avail_config = sys_settings_repos['available'][self._repository_id] 720 721 sep = os.path.sep 722 if self._repo_eapi == 1: 723 if self._developer_repo: 724 remote_gb, dbfile = self._construct_paths( 725 uri, "db", cmethod) 726 remote_gb, md5file = self._construct_paths( 727 uri, "dbck", cmethod) 728 else: 729 remote_gb, dbfile = self._construct_paths( 730 uri, "dblight", cmethod) 731 remote_gb, md5file = self._construct_paths( 732 uri, "cklight", cmethod) 733 734 elif self._repo_eapi == 2: 735 remote_gb, dbfile = self._construct_paths( 736 uri, "dbdumplight", cmethod) 737 remote_gb, md5file = self._construct_paths( 738 uri, "dbdumplightck", cmethod) 739 740 else: 741 raise AttributeError("EAPI must be = 1 or 2") 742 743 try: 744 return self.__verify_file_checksum(dbfile, md5file) 745 except (OSError, IOError) as err: 746 if err.errno != errno.ENOENT: 747 raise 748 return -1
749
750 - def __unpack_downloaded_database(self, uri, down_item, cmethod):
751 752 rc = 0 753 path = None 754 sys_set_repos = self._settings['repositories']['available'] 755 repo_data = sys_set_repos[self._repository_id] 756 757 garbage, myfile = self._construct_paths(uri, down_item, cmethod) 758 759 if self._repo_eapi in (1, 2,): 760 try: 761 762 myfunc = getattr(entropy.tools, cmethod[1]) 763 path = myfunc(myfile) 764 # rename path correctly 765 if self._repo_eapi == 1: 766 new_path = os.path.join(os.path.dirname(path), 767 etpConst['etpdatabasefile']) 768 os.rename(path, new_path) 769 path = new_path 770 771 except (OSError, EOFError): 772 rc = 1 773 774 else: 775 mytxt = "invalid EAPI must be = 1 or 2" 776 raise AttributeError(mytxt) 777 778 if rc == 0: 779 const_setup_file(path, etpConst['entropygid'], 0o644, 780 uid = etpConst['uid']) 781 782 return rc
783
784 - def _downloaded_database_unpack(self, uri, cmethod):
785 """ 786 Unpack the downloaded database. 787 """ 788 file_to_unpack = etpConst['etpdatabasedump'] 789 if self._repo_eapi == 1: 790 file_to_unpack = etpConst['etpdatabasefile'] 791 elif self._repo_eapi == 2: 792 file_to_unpack = etpConst['etpdatabasedumplight'] 793 794 mytxt = "%s %s %s" % (red(_("Unpacking database to")), 795 darkgreen(file_to_unpack), red("..."),) 796 self._entropy.output( 797 mytxt, 798 importance = 0, 799 level = "info", 800 header = "\t" 801 ) 802 803 myitem = 'dblight' 804 if self._repo_eapi == 2: 805 myitem = 'dbdumplight' 806 elif self._developer_repo: 807 myitem = 'db' 808 809 myrc = self.__unpack_downloaded_database(uri, myitem, cmethod) 810 if myrc != 0: 811 mytxt = "%s %s !" % (red(_("Cannot unpack compressed package")), 812 red(_("Skipping repository")),) 813 self._entropy.output( 814 mytxt, 815 importance = 1, 816 level = "warning", 817 header = "\t" 818 ) 819 return False, myitem 820 return True, myitem
821
822 - def __update_repository_revision(self, revision):
823 cur_rev = AvailablePackagesRepository.revision(self._repository_id) 824 repo_data = self._settings['repositories'] 825 db_data = repo_data['available'][self._repository_id] 826 db_data['dbrevision'] = "0" 827 if cur_rev != -1: 828 db_data['dbrevision'] = str(cur_rev) 829 830 rev_file = os.path.join(db_data['dbpath'], 831 etpConst['etpdatabaserevisionfile']) 832 enc = etpConst['conf_encoding'] 833 with codecs.open(rev_file, "w", encoding=enc) as rev_f: 834 # safe anyway 835 rev_f.write("%s\n" % (revision,)) 836 rev_f.flush()
837
838 - def __validate_database(self):
839 def tell_error(err): 840 mytxt = "%s: %s" % (darkred(_("Repository is invalid")), 841 repr(err),) 842 self._entropy.output( 843 mytxt, 844 importance = 1, 845 level = "error", 846 header = "\t" 847 )
848 try: 849 dbconn = self._entropy.open_repository(self._repository_id) 850 except RepositoryError as err: 851 tell_error(err) 852 return False 853 try: 854 dbconn.validate() 855 except SystemDatabaseError as err: 856 tell_error(err) 857 return False 858 return True
859
860 - def __database_indexing(self):
861 862 # renice a bit, to avoid eating resources 863 old_prio = const_set_nice_level(15) 864 mytxt = red("%s ...") % (_("Indexing Repository metadata"),) 865 self._entropy.output( 866 mytxt, 867 importance = 1, 868 level = "info", 869 header = "\t" 870 ) 871 dbconn = self._entropy.open_repository(self._repository_id) 872 dbconn.createAllIndexes() 873 dbconn.commit(force = True) 874 875 inst_repo = self._entropy.installed_repository() 876 if inst_repo is not None: 877 with inst_repo.exclusive(): 878 try: # client db can be absent 879 inst_repo.createAllIndexes() 880 except (DatabaseError, OperationalError, IntegrityError,): 881 pass 882 const_set_nice_level(old_prio)
883
884 - def _construct_paths(self, uri, item, cmethod, get_signature = False):
885 """ 886 Build a remote URL and a local path for a supported resource item. 887 """ 888 if item not in self._supported_download_items: 889 raise AttributeError("Invalid item: %s" % (item,)) 890 891 items_needing_cmethod = ( 892 "db", "dbck", "dblight", "cklight", "dbdump", "dbdumpck", 893 "dbdumplight", "dbdumplightck", "compck", 894 ) 895 if (item in items_needing_cmethod) and (cmethod is None): 896 mytxt = "For %s, cmethod can't be None" % (item,) 897 raise AttributeError(mytxt) 898 899 avail_data = self._settings['repositories']['available'] 900 repo_data = avail_data[self._repository_id] 901 902 repo_dbpath = repo_data['dbpath'] 903 ec_hash = etpConst['etpdatabasehashfile'] 904 repo_lock_file = etpConst['etpdatabasedownloadlockfile'] 905 notice_board_filename = os.path.basename(repo_data['notice_board']) 906 meta_file = etpConst['etpdatabasemetafilesfile'] 907 meta_file_gpg = etpConst['etpdatabasemetafilesfile'] + \ 908 etpConst['etpgpgextension'] 909 md5_ext = etpConst['packagesmd5fileext'] 910 ec_cm2 = None 911 ec_cm3 = None 912 ec_cm4 = None 913 ec_cm5 = None 914 ec_cm6 = None 915 ec_cm7 = None 916 ec_cm8 = None 917 ec_cm9 = None 918 if cmethod is not None: 919 ec_cm2 = etpConst[cmethod[2]] 920 ec_cm3 = etpConst[cmethod[3]] 921 ec_cm4 = etpConst[cmethod[4]] 922 ec_cm5 = etpConst[cmethod[5]] 923 ec_cm6 = etpConst[cmethod[6]] 924 ec_cm7 = etpConst[cmethod[7]] 925 ec_cm8 = etpConst[cmethod[8]] 926 ec_cm9 = etpConst[cmethod[9]] 927 928 mymap = { 929 'db': ( 930 "%s/%s" % (uri, ec_cm2,), 931 "%s/%s" % (repo_dbpath, ec_cm2,), 932 ), 933 'dbck': ( 934 "%s/%s" % (uri, ec_cm9,), 935 "%s/%s" % (repo_dbpath, ec_cm9,), 936 ), 937 'dblight': ( 938 "%s/%s" % (uri, ec_cm7,), 939 "%s/%s" % (repo_dbpath, ec_cm7,), 940 ), 941 'dbdump': ( 942 "%s/%s" % (uri, ec_cm3,), 943 "%s/%s" % (repo_dbpath, ec_cm3,), 944 ), 945 'dbdumplight': ( 946 "%s/%s" % (uri, ec_cm5,), 947 "%s/%s" % (repo_dbpath, ec_cm5,), 948 ), 949 'ck': ( 950 "%s/%s" % (uri, ec_hash,), 951 "%s/%s" % (repo_dbpath, ec_hash,), 952 ), 953 'cklight': ( 954 "%s/%s" % (uri, ec_cm8,), 955 "%s/%s" % (repo_dbpath, ec_cm8,), 956 ), 957 'compck': ( 958 "%s/%s%s" % (uri, ec_cm2, md5_ext,), 959 "%s/%s%s" % (repo_dbpath, ec_cm2, md5_ext,), 960 ), 961 'dbdumpck': ( 962 "%s/%s" % (uri, ec_cm4,), 963 "%s/%s" % (repo_dbpath, ec_cm4,), 964 ), 965 'dbdumplightck': ( 966 "%s/%s" % (uri, ec_cm6,), 967 "%s/%s" % (repo_dbpath, ec_cm6,), 968 ), 969 'lock': ( 970 "%s/%s" % (uri, repo_lock_file,), 971 "%s/%s" % (repo_dbpath, repo_lock_file,), 972 ), 973 'notice_board': ( 974 repo_data['notice_board'], 975 "%s/%s" % (repo_dbpath, notice_board_filename,), 976 ), 977 'meta_file': ( 978 "%s/%s" % (uri, meta_file,), 979 "%s/%s" % (repo_dbpath, meta_file,), 980 ), 981 'meta_file_gpg': ( 982 "%s/%s" % (uri, meta_file_gpg,), 983 "%s/%s" % (repo_dbpath, meta_file_gpg,), 984 ), 985 } 986 987 url, path = mymap.get(item) 988 if get_signature: 989 url = self.__append_gpg_signature_to_path(url) 990 path = self.__append_gpg_signature_to_path(path) 991 992 return url, path
993
994 - def _download_item(self, uri, item, cmethod = None, 995 disallow_redirect = True, get_signature = False):
996 997 my_repos = self._settings['repositories'] 998 avail_data = my_repos['available'] 999 repo_data = avail_data[self._repository_id] 1000 1001 basic_user = repo_data.get('username') 1002 basic_pwd = repo_data.get('password') 1003 https_validate_cert = not repo_data.get('https_validate_cert') == "false" 1004 1005 url, filepath = self._construct_paths( 1006 uri, item, cmethod, get_signature = get_signature) 1007 1008 # See bug #3495, download the file to 1009 # a temporary location and then move it 1010 # if we are successful 1011 temp_filepath = filepath + ".edownload" 1012 1013 # to avoid having permissions issues 1014 # it's better to remove the file before, 1015 # otherwise new permissions won't be written 1016 if os.path.isfile(temp_filepath): 1017 os.remove(temp_filepath) 1018 filepath_dir = os.path.dirname(temp_filepath) 1019 1020 if not os.path.isdir(filepath_dir) and not \ 1021 os.path.lexists(filepath_dir): 1022 1023 os.makedirs(filepath_dir, 0o755) 1024 const_setup_perms(filepath_dir, etpConst['entropygid'], 1025 f_perms = 0o644) 1026 1027 try: 1028 1029 fetcher = self._entropy._url_fetcher( 1030 url, 1031 temp_filepath, 1032 resume = False, 1033 disallow_redirect = disallow_redirect, 1034 http_basic_user = basic_user, 1035 http_basic_pwd = basic_pwd, 1036 https_validate_cert = https_validate_cert 1037 ) 1038 1039 rc = fetcher.download() 1040 if rc in self.FETCH_ERRORS: 1041 return False 1042 try: 1043 os.rename(temp_filepath, filepath) 1044 except (OSError, IOError) as err: 1045 if err.errno != errno.ENOENT: 1046 raise 1047 return False # not downloaded? 1048 1049 const_setup_file(filepath, etpConst['entropygid'], 0o644, 1050 uid = etpConst['uid']) 1051 return True 1052 1053 finally: 1054 # cleanup temp file 1055 try: 1056 os.remove(temp_filepath) 1057 except (OSError, IOError) as err: 1058 if err.errno != errno.ENOENT: 1059 raise
1060
1061 - def _is_repository_unlocked(self, uri):
1062 """ 1063 Returns whether the repository is remotely locked or not. 1064 1065 @return: repository being remotely locked 1066 @rtype:bool 1067 """ 1068 rc = self._download_item(uri, "lock", disallow_redirect = True) 1069 if rc: # cannot download database 1070 return False 1071 return True
1072
1073 - def _standard_items_download(self, uri):
1074 """ 1075 Download a set of "standard" files from the repository mirror. 1076 """ 1077 repos_data = self._settings['repositories'] 1078 repo_data = repos_data['available'][self._repository_id] 1079 notice_board = os.path.basename(repo_data['local_notice_board']) 1080 db_meta_file = etpConst['etpdatabasemetafilesfile'] 1081 db_meta_file_gpg = etpConst['etpdatabasemetafilesfile'] + \ 1082 etpConst['etpgpgextension'] 1083 1084 objects_to_unpack = ("meta_file",) 1085 1086 download_items = [ 1087 ( 1088 "meta_file", 1089 db_meta_file, 1090 False, 1091 "%s %s %s" % ( 1092 red(_("Downloading repository metafile")), 1093 darkgreen(db_meta_file), 1094 red("..."), 1095 ) 1096 ), 1097 ( 1098 "meta_file_gpg", 1099 db_meta_file_gpg, 1100 True, 1101 "%s %s %s" % ( 1102 red(_("Downloading GPG signature of repository metafile")), 1103 darkgreen(db_meta_file_gpg), 1104 red("..."), 1105 ) 1106 ), 1107 ( 1108 "notice_board", 1109 notice_board, 1110 True, 1111 "%s %s %s" % ( 1112 red(_("Downloading Notice Board")), 1113 darkgreen(notice_board), 1114 red("..."), 1115 ) 1116 ), 1117 ] 1118 1119 def my_show_info(txt): 1120 self._entropy.output( 1121 txt, 1122 importance = 0, 1123 level = "info", 1124 header = "\t", 1125 back = True 1126 )
1127 1128 def my_show_down_status(message, mytype): 1129 self._entropy.output( 1130 message, 1131 importance = 0, 1132 level = mytype, 1133 header = "\t" 1134 ) 1135 1136 def my_show_file_unpack(fp): 1137 self._entropy.output( 1138 "%s: %s" % (darkgreen(_("unpacked meta file")), brown(fp),), 1139 header = blue("\t << ") 1140 ) 1141 1142 def my_show_file_rm(fp): 1143 self._entropy.output( 1144 "%s: %s" % (darkgreen(_("removed meta file")), purple(fp),), 1145 header = blue("\t << ") 1146 ) 1147 1148 downloaded_files = [] 1149 1150 for item, myfile, ignorable, mytxt in download_items: 1151 1152 my_show_info(mytxt) 1153 mystatus = self._download_item(uri, item, disallow_redirect = True) 1154 mytype = 'info' 1155 myurl, mypath = self._construct_paths(uri, item, None) 1156 1157 # download failed, is it critical? 1158 if not mystatus: 1159 if ignorable: 1160 message = "%s: %s." % (blue(myfile), 1161 red(_("not available, it's ok"))) 1162 else: 1163 mytype = 'warning' 1164 message = "%s: %s." % (blue(myfile), 1165 darkred(_("not available, not very ok!"))) 1166 my_show_down_status(message, mytype) 1167 1168 # remove garbage 1169 if os.path.isfile(mypath): 1170 try: 1171 os.remove(mypath) 1172 except OSError: 1173 continue 1174 1175 continue 1176 1177 message = "%s: %s." % (blue(myfile), 1178 darkgreen(_("available, w00t!"))) 1179 my_show_down_status(message, mytype) 1180 downloaded_files.append(mypath) 1181 1182 if item not in objects_to_unpack: 1183 continue 1184 if not const_file_readable(mypath): 1185 continue 1186 1187 tmpdir = const_mkdtemp(prefix="_standard_items_download") 1188 repo_dir = repo_data['dbpath'] 1189 enc = etpConst['conf_encoding'] 1190 try: 1191 done = entropy.tools.universal_uncompress(mypath, tmpdir, 1192 catch_empty = True) 1193 if not done: 1194 mytype = 'warning' 1195 message = "%s: %s." % (blue(myfile), 1196 darkred(_("cannot be unpacked, not very ok!"))) 1197 my_show_down_status(message, mytype) 1198 continue 1199 myfiles_to_move = set(os.listdir(tmpdir)) 1200 1201 # exclude files not available by default 1202 files_not_found_file = etpConst['etpdatabasemetafilesnotfound'] 1203 if files_not_found_file in myfiles_to_move: 1204 myfiles_to_move.remove(files_not_found_file) 1205 fnf_path = os.path.join(tmpdir, files_not_found_file) 1206 1207 f_nf = [] 1208 try: 1209 with codecs.open(fnf_path, "r", encoding=enc) as f: 1210 f_nf += [x.strip() for x in f.readlines()] 1211 except (OSError, IOError) as err: 1212 if err.errno != errno.ENOENT: 1213 raise 1214 else: 1215 for myfile in f_nf: 1216 myfile = os.path.basename(myfile) # avoid lamerz 1217 myfpath = os.path.join(repo_dir, myfile) 1218 try: 1219 os.remove(myfpath) 1220 my_show_file_rm(myfile) 1221 except (OSError, IOError) as err: 1222 if err.errno != errno.ENOENT: 1223 raise 1224 1225 for myfile in sorted(myfiles_to_move): 1226 from_mypath = os.path.join(tmpdir, myfile) 1227 to_mypath = os.path.join(repo_dir, myfile) 1228 try: 1229 os.rename(from_mypath, to_mypath) 1230 my_show_file_unpack(myfile) 1231 except OSError: 1232 # try non atomic way 1233 try: 1234 shutil.copy2(from_mypath, to_mypath) 1235 my_show_file_unpack(myfile) 1236 os.remove(from_mypath) 1237 except (shutil.Error, IOError, OSError,): 1238 continue 1239 continue 1240 1241 const_setup_file(to_mypath, etpConst['entropygid'], 0o644, 1242 uid = etpConst['uid']) 1243 1244 finally: 1245 shutil.rmtree(tmpdir, True) 1246 1247 repo_r = AvailablePackagesRepository.revision(self._repository_id) 1248 mytxt = "%s: %s" % ( 1249 red(_("Repository revision")), 1250 bold(str(repo_r)), 1251 ) 1252 self._entropy.output( 1253 mytxt, 1254 importance = 1, 1255 level = "info", 1256 header = "\t" 1257 ) 1258 1259 return downloaded_files 1260
1261 - def _check_downloaded_database(self, uri, cmethod):
1262 1263 dbitem = "dblight" 1264 if self._repo_eapi == 2: 1265 dbitem = "dbdumplight" 1266 elif self._developer_repo: 1267 dbitem = "db" 1268 garbage, dbfilename = self._construct_paths(uri, dbitem, cmethod) 1269 1270 # verify checksum 1271 mytxt = "%s %s %s" % ( 1272 red(_("Checking downloaded repository")), 1273 darkgreen(os.path.basename(dbfilename)), 1274 red("..."), 1275 ) 1276 self._entropy.output( 1277 mytxt, 1278 importance = 0, 1279 back = True, 1280 level = "info", 1281 header = "\t" 1282 ) 1283 db_status = self.__verify_database_checksum(uri, cmethod) 1284 if db_status == -1: 1285 mytxt = "%s. %s !" % ( 1286 red(_("Cannot open digest")), 1287 red(_("Cannot verify repository integrity")), 1288 ) 1289 self._entropy.output( 1290 mytxt, 1291 importance = 1, 1292 level = "warning", 1293 header = "\t" 1294 ) 1295 elif db_status: 1296 mytxt = "%s: %s" % ( 1297 red(_("Downloaded repository status")), 1298 bold(_("OK")), 1299 ) 1300 self._entropy.output( 1301 mytxt, 1302 importance = 1, 1303 level = "info", 1304 header = "\t" 1305 ) 1306 else: 1307 mytxt = "%s: %s" % ( 1308 red(_("Downloaded repository status")), 1309 darkred(_("ERROR")), 1310 ) 1311 self._entropy.output( 1312 mytxt, 1313 importance = 1, 1314 level = "error", 1315 header = "\t" 1316 ) 1317 mytxt = "%s. %s" % ( 1318 red(_("An error occurred while checking repository integrity")), 1319 red(_("Giving up")), 1320 ) 1321 self._entropy.output( 1322 mytxt, 1323 importance = 1, 1324 level = "error", 1325 header = "\t" 1326 ) 1327 return 1 1328 return 0
1329
1330 - def _install_gpg_key_if_available(self):
1331 1332 my_repos = self._settings['repositories'] 1333 avail_data = my_repos['available'] 1334 repo_data = avail_data[self._repository_id] 1335 gpg_path = repo_data['gpg_pubkey'] 1336 1337 if not const_file_readable(gpg_path): 1338 return False # gpg key not available 1339 1340 def do_warn_user(fingerprint): 1341 mytxt = purple(_("Make sure to verify the imported key and " 1342 "set an appropriate trust level")) 1343 self._entropy.output( 1344 mytxt + ":", 1345 level = "warning", 1346 header = "\t" 1347 ) 1348 mytxt = brown("gpg --homedir '%s' --edit-key '%s'" % ( 1349 etpConst['etpclientgpgdir'], fingerprint,) 1350 ) 1351 self._entropy.output( 1352 "$ " + mytxt, 1353 level = "warning", 1354 header = "\t" 1355 )
1356 1357 try: 1358 repo_sec = self._entropy.RepositorySecurity() 1359 except RepositorySecurity.GPGError: 1360 mytxt = "%s," % ( 1361 purple(_("This repository suports GPG-signed packages")), 1362 ) 1363 self._entropy.output( 1364 mytxt, 1365 level = "warning", 1366 header = "\t" 1367 ) 1368 mytxt = purple(_("you may want to install GnuPG to take " 1369 "advantage of this feature")) 1370 self._entropy.output( 1371 mytxt, 1372 level = "warning", 1373 header = "\t" 1374 ) 1375 return False # GPG not available 1376 1377 pk_expired = False 1378 try: 1379 pk_avail = repo_sec.is_pubkey_available(self._repository_id) 1380 except repo_sec.KeyExpired: 1381 pk_avail = False 1382 pk_expired = True 1383 1384 if pk_avail: 1385 1386 tmp_dir = const_mkdtemp(prefix="_install_gpg_key") 1387 repo_tmp_sec = self._entropy.RepositorySecurity( 1388 keystore_dir = tmp_dir) 1389 # try to install and get fingerprint 1390 try: 1391 downloaded_key_fp = repo_tmp_sec.install_key( 1392 self._repository_id, gpg_path) 1393 except RepositorySecurity.GPGError: 1394 downloaded_key_fp = None 1395 1396 fingerprint = repo_sec.get_key_metadata( 1397 self._repository_id)['fingerprint'] 1398 shutil.rmtree(tmp_dir, True) 1399 1400 if downloaded_key_fp != fingerprint and \ 1401 (downloaded_key_fp is not None): 1402 mytxt = "%s: %s !!!" % ( 1403 purple(_("GPG key changed for")), 1404 bold(self._repository_id), 1405 ) 1406 self._entropy.output( 1407 mytxt, 1408 level = "warning", 1409 header = "\t" 1410 ) 1411 mytxt = "[%s => %s]" % ( 1412 darkgreen(fingerprint), 1413 purple(downloaded_key_fp), 1414 ) 1415 self._entropy.output( 1416 mytxt, 1417 level = "warning", 1418 header = "\t" 1419 ) 1420 do_warn_user(downloaded_key_fp) 1421 else: 1422 mytxt = "%s: %s" % ( 1423 purple(_("GPG key already installed for")), 1424 bold(self._repository_id), 1425 ) 1426 self._entropy.output( 1427 mytxt, 1428 level = "info", 1429 header = "\t" 1430 ) 1431 do_warn_user(fingerprint) 1432 return True # already installed 1433 1434 elif pk_expired: 1435 mytxt = "%s: %s" % ( 1436 purple(_("GPG key EXPIRED for repository")), 1437 bold(self._repository_id), 1438 ) 1439 self._entropy.output( 1440 mytxt, 1441 level = "warning", 1442 header = "\t" 1443 ) 1444 1445 1446 # actually install 1447 mytxt = "%s: %s" % ( 1448 purple(_("Installing GPG key for repository")), 1449 brown(self._repository_id), 1450 ) 1451 self._entropy.output( 1452 mytxt, 1453 level = "info", 1454 header = "\t", 1455 back = True 1456 ) 1457 1458 try_ignore = False 1459 while True: 1460 try: 1461 fingerprint = repo_sec.install_key(self._repository_id, 1462 gpg_path) 1463 except RepositorySecurity.NothingImported as err: 1464 if try_ignore: 1465 mytxt = "%s: %s" % ( 1466 darkred(_("Error during GPG key installation")), 1467 err, 1468 ) 1469 self._entropy.output( 1470 mytxt, 1471 level = "error", 1472 header = "\t" 1473 ) 1474 return False 1475 self._entropy.output( 1476 purple(_("GPG key seems already installed but " 1477 "not properly recorded, resetting")), 1478 level = "warning", 1479 header = "\t" 1480 ) 1481 target_fingerprint = repo_sec.get_key_fingerprint(gpg_path) 1482 if target_fingerprint is not None: 1483 # kill it, this is usually caused by shadow repos 1484 # like sabayon-weekly 1485 dead_repository_ids = set() 1486 for _repository_id, key_meta in repo_sec.get_keys().items(): 1487 if key_meta['fingerprint'] == target_fingerprint: 1488 dead_repository_ids.add(_repository_id) 1489 for _repository_id in dead_repository_ids: 1490 try: 1491 repo_sec.delete_pubkey(_repository_id) 1492 except KeyError: 1493 # wtf, fault tolerance 1494 pass 1495 try_ignore = True 1496 continue 1497 except RepositorySecurity.GPGError as err: 1498 mytxt = "%s: %s" % ( 1499 darkred(_("Error during GPG key installation")), 1500 err, 1501 ) 1502 self._entropy.output( 1503 mytxt, 1504 level = "error", 1505 header = "\t" 1506 ) 1507 return False 1508 break 1509 1510 mytxt = "%s: %s" % ( 1511 purple(_("Successfully installed GPG key for repository")), 1512 brown(self._repository_id), 1513 ) 1514 self._entropy.output( 1515 mytxt, 1516 level = "info", 1517 header = "\t" 1518 ) 1519 mytxt = "%s: %s" % ( 1520 darkgreen(_("Fingerprint")), 1521 bold(fingerprint), 1522 ) 1523 self._entropy.output( 1524 mytxt, 1525 level = "info", 1526 header = "\t" 1527 ) 1528 do_warn_user(fingerprint) 1529 return True 1530
1531 - def _gpg_verify_downloaded_files(self, downloaded_files):
1532 1533 try: 1534 repo_sec = self._entropy.RepositorySecurity() 1535 except RepositorySecurity.GPGServiceNotAvailable: 1536 # wtf! it was available a while ago! 1537 return 0 # GPG not available 1538 1539 gpg_sign_ext = etpConst['etpgpgextension'] 1540 sign_files = [x for x in downloaded_files if \ 1541 x.endswith(gpg_sign_ext)] 1542 sign_files = [x for x in sign_files if const_file_readable(x)] 1543 1544 to_be_verified = [] 1545 1546 for sign_path in sign_files: 1547 target_path = sign_path[:-len(gpg_sign_ext)] 1548 if const_file_readable(target_path): 1549 to_be_verified.append((target_path, sign_path,)) 1550 1551 gpg_rc = 0 1552 1553 for target_path, sign_path in to_be_verified: 1554 1555 file_name = os.path.basename(target_path) 1556 1557 mytxt = "%s: %s ..." % ( 1558 darkgreen(_("Verifying GPG signature of")), 1559 brown(file_name), 1560 ) 1561 self._entropy.output( 1562 mytxt, 1563 level = "info", 1564 header = blue("\[email protected]@ "), 1565 back = True 1566 ) 1567 1568 is_valid, err_msg = repo_sec.verify_file(self._repository_id, 1569 target_path, sign_path) 1570 if is_valid: 1571 mytxt = "%s: %s" % ( 1572 darkgreen(_("Verified GPG signature of")), 1573 brown(file_name), 1574 ) 1575 self._entropy.output( 1576 mytxt, 1577 level = "info", 1578 header = blue("\[email protected]@ ") 1579 ) 1580 else: 1581 mytxt = "%s: %s" % ( 1582 darkred(_("Error during GPG verification of")), 1583 file_name, 1584 ) 1585 self._entropy.output( 1586 mytxt, 1587 level = "error", 1588 header = "\t%s " % (bold("!!!"),) 1589 ) 1590 mytxt = "%s: %s" % ( 1591 purple(_("It could mean a potential security risk")), 1592 err_msg, 1593 ) 1594 self._entropy.output( 1595 mytxt, 1596 level = "error", 1597 header = "\t%s " % (bold("!!!"),) 1598 ) 1599 gpg_rc = 1 1600 1601 return gpg_rc
1602
1603 - def _webservice_database_sync(self):
1604 """ 1605 Update the local repository database through the webservice 1606 using a differential sync. 1607 """ 1608 repo_db = None 1609 try: 1610 repo_db = self.__get_webserv_local_database() 1611 if repo_db is None: 1612 raise AttributeError() 1613 return self.__handle_webserv_database_sync(repo_db) 1614 except (DatabaseError, IntegrityError, OperationalError, 1615 AttributeError,): 1616 return False 1617 finally: 1618 if repo_db is not None: 1619 repo_db.commit() 1620 repo_db.close() 1621 1622 return False
1623
1624 - def __handle_webserv_database_sync(self, mydbconn):
1625 1626 try: 1627 webserv = self._webservice 1628 except WebService.UnsupportedService as err: 1629 const_debug_write(__name__, 1630 "__handle_webserv_database_sync: error: %s" % (err,)) 1631 return False 1632 1633 try: 1634 mypackage_ids = mydbconn.listAllPackageIds() 1635 except (DatabaseError, IntegrityError, OperationalError,): 1636 return False 1637 1638 added_ids, removed_ids = self.__get_webserv_database_differences( 1639 webserv, mypackage_ids) 1640 if (None in (added_ids, removed_ids)) or \ 1641 (not added_ids and not removed_ids and self.__force): 1642 # nothing to sync, it seems, if force is True, fallback to EAPI2 1643 return False 1644 1645 threshold = 100 1646 # is it worth it? 1647 if len(added_ids) > threshold: 1648 mytxt = "%s: %s (%s: %s/%s)" % ( 1649 blue(_("Web Service")), 1650 darkred(_("skipping differential sync")), 1651 brown(_("threshold")), 1652 blue(str(len(added_ids))), 1653 darkred(str(threshold)), 1654 ) 1655 self._entropy.output( 1656 mytxt, 1657 importance = 0, 1658 level = "info", 1659 header = blue(" # "), 1660 ) 1661 return False 1662 1663 count = 0 1664 chunk_size = RepositoryWebService.MAXIMUM_PACKAGE_REQUEST_SIZE 1665 added_segments = [] 1666 mytmp = [] 1667 1668 for package_id in added_ids: 1669 count += 1 1670 mytmp.append(package_id) 1671 if count % chunk_size == 0: 1672 added_segments.append(mytmp[:]) 1673 del mytmp[:] 1674 if mytmp: 1675 added_segments.append(mytmp[:]) 1676 1677 def _do_fetch(fetch_sts_map, segment, count, maxcount): 1678 try: 1679 try: 1680 pkg_meta = webserv.get_packages_metadata(segment) 1681 except WebService.WebServiceException as err: 1682 const_debug_write(__name__, 1683 "__handle_webserv_database_sync: error: %s" % (err,)) 1684 mytxt = "%s: %s" % ( 1685 blue(_("Web Service communication error")), 1686 err, 1687 ) 1688 self._entropy.output( 1689 mytxt, importance = 1, level = "info", 1690 header = "\t", count = (count, maxcount,) 1691 ) 1692 fetch_sts_map['error'] = True 1693 return 1694 except KeyboardInterrupt: 1695 const_debug_write(__name__, 1696 "__handle_webserv_database_sync: keyboard interrupt") 1697 fetch_sts_map['error'] = True 1698 return 1699 1700 if not pkg_meta: 1701 const_debug_write(__name__, 1702 "__handle_webserv_database_sync: empty data: %s" % ( 1703 pkg_meta,)) 1704 self._entropy.output( 1705 _("Web Service data error"), importance = 1, 1706 level = "info", header = "\t", 1707 count = (count, maxcount,) 1708 ) 1709 fetch_sts_map['error'] = True 1710 return 1711 1712 try: 1713 for package_id, pkg_data in pkg_meta.items(): 1714 dumpobj( 1715 "%s%s" % (self.WEBSERV_CACHE_ID, package_id,), 1716 pkg_data, 1717 ignore_exceptions = False 1718 ) 1719 except (IOError, EOFError, OSError,) as e: 1720 mytxt = "%s: %s: %s." % ( 1721 blue(_("Local status")), 1722 darkred("Error storing data"), 1723 e, 1724 ) 1725 self._entropy.output( 1726 mytxt, importance = 1, level = "info", 1727 header = "\t", count = (count, maxcount,) 1728 ) 1729 fetch_sts_map['error'] = True 1730 return 1731 1732 return 1733 finally: 1734 fetch_sts_map['sem'].release()
1735 1736 # do not exagerate or you're going to need a way to block 1737 # further requests as long as some threads are still running 1738 # to avoid timeout errors 1739 max_threads = 4 1740 fetch_sts_map = { 1741 'sem': threading.Semaphore(max_threads), 1742 'error': False, 1743 } 1744 1745 # fetch and store 1746 count = 0 1747 maxcount = len(added_segments) 1748 product = self._settings['repositories']['product'] 1749 segment = None 1750 threads = [] 1751 for segment in added_segments: 1752 count += 1 1753 mytxt = "%s %s" % (blue(_("Fetching segments")), "...",) 1754 self._entropy.output( 1755 mytxt, importance = 0, level = "info", 1756 header = "\t", back = True, count = (count, maxcount,) 1757 ) 1758 fetch_sts_map['sem'].acquire() 1759 if len(threads) >= max_threads: 1760 const_debug_write(__name__, 1761 purple("joining all the parallel threads")) 1762 # give them the chance to complete 1763 # since long delays on socket could cause timeouts 1764 for th in threads: 1765 th.join() 1766 const_debug_write(__name__, purple("parallel threads joined")) 1767 del threads[:] 1768 if fetch_sts_map['error']: 1769 return None 1770 th = ParallelTask(_do_fetch, fetch_sts_map, segment, count, 1771 maxcount) 1772 th.daemon = True 1773 th.start() 1774 threads.append(th) 1775 1776 for th in threads: 1777 th.join() 1778 1779 if fetch_sts_map['error']: 1780 return None 1781 1782 del added_segments 1783 1784 # get repository metadata 1785 repo_metadata = self.__get_webserv_repository_metadata() 1786 # this gives us the "checksum" data too 1787 if not repo_metadata: 1788 mytxt = "%s: %s" % ( 1789 blue(_("Web Service status")), 1790 darkred(_("cannot fetch repository metadata")), 1791 ) 1792 self._entropy.output( 1793 mytxt, 1794 importance = 0, 1795 level = "info", 1796 header = blue(" # "), 1797 ) 1798 return None 1799 1800 # update treeupdates 1801 try: 1802 mydbconn.setRepositoryUpdatesDigest(self._repository_id, 1803 repo_metadata['treeupdates_digest']) 1804 mydbconn.bumpTreeUpdatesActions( 1805 repo_metadata['treeupdates_actions']) 1806 except (Error,): 1807 mytxt = "%s: %s" % ( 1808 blue(_("Web Service status")), 1809 darkred(_("cannot update treeupdates data")), 1810 ) 1811 self._entropy.output( 1812 mytxt, 1813 importance = 0, 1814 level = "info", 1815 header = blue(" # "), 1816 ) 1817 return None 1818 1819 # update package sets 1820 try: 1821 mydbconn.clearPackageSets() 1822 mydbconn.insertPackageSets(repo_metadata['sets']) 1823 except (Error,): 1824 mytxt = "%s: %s" % ( 1825 blue(_("Web Service status")), 1826 darkred(_("cannot update package sets data")), 1827 ) 1828 self._entropy.output( 1829 mytxt, 1830 importance = 0, 1831 level = "info", 1832 header = blue(" # "), 1833 ) 1834 return None 1835 1836 # now that we have all stored, add 1837 for package_id in added_ids: 1838 mydata = self._cacher.pop("%s%s" % (self.WEBSERV_CACHE_ID, 1839 package_id,)) 1840 if mydata is None: 1841 mytxt = "%s: %s" % ( 1842 blue(_("Fetch error on segment while adding")), 1843 darkred(str(segment)), 1844 ) 1845 self._entropy.output( 1846 mytxt, importance = 1, level = "warning", 1847 header = " " 1848 ) 1849 return False 1850 1851 mytxt = "%s %s" % ( 1852 darkgreen("++"), 1853 teal(mydata['atom']), 1854 ) 1855 self._entropy.output( 1856 mytxt, importance = 0, level = "info", 1857 header = " ") 1858 try: 1859 mydbconn.addPackage( 1860 mydata, revision = mydata['revision'], 1861 package_id = package_id, 1862 formatted_content = True 1863 ) 1864 except (Error,) as err: 1865 if const_debug_enabled(): 1866 entropy.tools.print_traceback() 1867 self._entropy.output("%s: %s" % ( 1868 blue(_("repository error while adding packages")), 1869 err,), 1870 importance = 1, level = "warning", 1871 header = " " 1872 ) 1873 return False 1874 1875 # now remove 1876 # preload atoms names to improve speed during removePackage 1877 atoms_map = dict((x, mydbconn.retrieveAtom(x),) for x in removed_ids) 1878 for package_id in removed_ids: 1879 myatom = atoms_map.get(package_id) 1880 1881 mytxt = "%s %s" % ( 1882 darkred("--"), 1883 purple(str(myatom)),) 1884 self._entropy.output( 1885 mytxt, importance = 0, level = "info", 1886 header = " ") 1887 try: 1888 mydbconn.removePackage(package_id) 1889 except (Error,): 1890 self._entropy.output( 1891 blue(_("repository error while removing packages")), 1892 importance = 1, level = "warning", 1893 header = " " 1894 ) 1895 return False 1896 1897 mydbconn.commit() 1898 mydbconn.clearCache() 1899 # now verify if both checksums match 1900 result = False 1901 mychecksum = mydbconn.checksum(do_order = True, 1902 strict = False, include_signatures = True) 1903 if repo_metadata['checksum'] == mychecksum: 1904 result = True 1905 else: 1906 self._entropy.output( 1907 blue(_("Repository checksum doesn't match remote.")), 1908 importance = 0, level = "info", header = "\t", 1909 ) 1910 mytxt = "%s: %s" % (_('local'), mychecksum,) 1911 self._entropy.output( 1912 mytxt, importance = 0, 1913 level = "info", header = "\t", 1914 ) 1915 mytxt = "%s: %s" % (_('remote'), repo_metadata['checksum'],) 1916 self._entropy.output( 1917 mytxt, importance = 0, 1918 level = "info", header = "\t", 1919 ) 1920 1921 return result 1922
1923 - def remote_revision(self):
1924 rev = self._remote_webservice_revision() 1925 if rev is not None: 1926 return rev 1927 1928 avail_data = self._settings['repositories']['available'] 1929 repo_data = avail_data[self._repository_id] 1930 # default to the first entry, which is expected 1931 # to be always available. 1932 try: 1933 uri = repo_data['databases'][0]['uri'] 1934 except IndexError: 1935 rev = -1 1936 return rev 1937 1938 basic_user = repo_data.get('username') 1939 basic_pwd = repo_data.get('password') 1940 https_validate_cert = not repo_data.get('https_validate_cert') == "false" 1941 1942 rev = self._remote_revision(url, 1943 http_basic_user = basic_user, 1944 http_basic_pwd = basic_pwd, 1945 https_validate_cert = https_validate_cert) 1946 1947 return rev
1948
1949 - def _remote_webservice_revision(self):
1950 """ 1951 Return the remote repository revision using the webservice. 1952 This method returns None if the request failed or the 1953 webservice is not available. 1954 """ 1955 if self._repo_eapi < 3: 1956 return 1957 1958 # ask WebService then 1959 revision = self.__get_webserv_repository_revision() 1960 if revision is not None: 1961 try: 1962 revision = int(revision) 1963 except ValueError: 1964 revision = None 1965 1966 if revision is not None: 1967 return revision 1968 1969 # otherwise, fallback to previous EAPI 1970 self._repo_eapi -= 1
1971
1972 - def _remote_revision(self, uri, http_basic_user = None, 1973 http_basic_pwd = None, 1974 https_validate_cert = True):
1975 """ 1976 Return the remote repository revision by downloading 1977 the revision file from the given uri. 1978 """ 1979 sep = const_convert_to_unicode("/") 1980 url = uri + sep + etpConst['etpdatabaserevisionfile'] 1981 1982 tmp_fd, tmp_path = None, None 1983 rev = "-1" 1984 try: 1985 tmp_fd, tmp_path = const_mkstemp( 1986 prefix = "AvailableEntropyRepository.remote_revision") 1987 fetcher = self._entropy._url_fetcher( 1988 url, tmp_path, resume = False, 1989 http_basic_user = http_basic_user, 1990 http_basic_pwd = http_basic_pwd, 1991 https_validate_cert = https_validate_cert) 1992 fetch_rc = fetcher.download() 1993 if fetch_rc not in self.FETCH_ERRORS: 1994 with codecs.open(tmp_path, "r") as tmp_f: 1995 rev = tmp_f.readline().strip() 1996 except (IOError, OSError): 1997 # ignore any errors, especially read ones 1998 pass 1999 finally: 2000 if tmp_fd is not None: 2001 try: 2002 os.close(tmp_fd) 2003 except OSError: 2004 pass 2005 if tmp_path is not None: 2006 try: 2007 os.remove(tmp_path) 2008 except OSError: 2009 pass 2010 2011 # try to convert rev into integer now 2012 try: 2013 rev = int(rev) 2014 except ValueError: 2015 # corrupted data 2016 rev = -1 2017 2018 return rev
2019
2020 - def update(self):
2021 2022 # disallow unprivileged update 2023 if not entropy.tools.is_root(): 2024 raise PermissionDenied( 2025 "cannot update repository as unprivileged user") 2026 2027 self._show_repository_information() 2028 2029 selected = self._select_database_mirror() 2030 if selected is None: 2031 return EntropyRepositoryBase.REPOSITORY_NOT_AVAILABLE 2032 revision, uri, cformat = selected 2033 2034 updatable = self._is_repository_updatable(revision) 2035 if not self.__force: 2036 if not updatable: 2037 mytxt = "%s: %s." % (bold(_("Attention")), 2038 red(_("repository is already up to date")),) 2039 self._entropy.output( 2040 mytxt, 2041 importance = 1, 2042 level = "info", 2043 header = "\t" 2044 ) 2045 return EntropyRepositoryBase.REPOSITORY_ALREADY_UPTODATE 2046 2047 locked = not self._is_repository_unlocked(uri) 2048 if locked: 2049 mytxt = "%s: %s. %s." % ( 2050 bold(_("Attention")), 2051 red(_("Repository is being updated")), 2052 red(_("Try again in a few minutes")), 2053 ) 2054 self._entropy.output( 2055 mytxt, 2056 importance = 1, 2057 level = "warning", 2058 header = "\t" 2059 ) 2060 return EntropyRepositoryBase.REPOSITORY_NOT_AVAILABLE 2061 2062 # clear database interface cache belonging to this repository 2063 self._ensure_repository_path() 2064 2065 # dealing with EAPI 2066 # setting some vars 2067 db_checksum_down_status = False 2068 do_db_update_transfer = False 2069 rc = 0 2070 2071 my_repos = self._settings['repositories'] 2072 avail_data = my_repos['available'] 2073 repo_data = avail_data[self._repository_id] 2074 2075 # some variables 2076 dumpfile = os.path.join(repo_data['dbpath'], 2077 etpConst['etpdatabasedumplight']) 2078 dbfile = os.path.join(repo_data['dbpath'], 2079 etpConst['etpdatabasefile']) 2080 dbfile_old = dbfile+".sync" 2081 cmethod = etpConst['etpdatabasecompressclasses'].get( 2082 cformat) 2083 2084 while True: 2085 2086 downloaded_db_item = None 2087 sig_down_status = False 2088 db_checksum_down_status = False 2089 if self._repo_eapi < 3: 2090 2091 down_status, sig_down_status, downloaded_db_item = \ 2092 self.__database_download(uri, cmethod) 2093 if not down_status: 2094 return EntropyRepositoryBase.REPOSITORY_NOT_AVAILABLE 2095 db_checksum_down_status = \ 2096 self.__database_checksum_download(uri, cmethod) 2097 break 2098 2099 elif self._repo_eapi == 3 and not const_file_writable(dbfile): 2100 do_db_update_transfer = None 2101 self._repo_eapi -= 1 2102 continue 2103 2104 elif self._repo_eapi == 3: 2105 2106 status = False 2107 try: 2108 status = self._webservice_database_sync() 2109 except: 2110 # avoid broken entries, deal with every exception 2111 entropy.tools.print_traceback() 2112 self.__remove_repository_files() 2113 raise 2114 2115 if not status: 2116 # set to none and completely skip database alignment 2117 do_db_update_transfer = None 2118 self._repo_eapi -= 1 2119 continue 2120 2121 break 2122 2123 downloaded_files = self._standard_items_download(uri) 2124 # also add db file to downloaded item 2125 # and md5 check repository 2126 if downloaded_db_item is not None: 2127 2128 durl, dpath = self._construct_paths( 2129 uri, downloaded_db_item, cmethod) 2130 downloaded_files.append(dpath) 2131 if sig_down_status: 2132 d_sig_path = self.__append_gpg_signature_to_path(dpath) 2133 downloaded_files.append(d_sig_path) 2134 2135 # 1. we're always in EAPI1 or 2 here 2136 # 2. new policy, always deny repository if 2137 # its database checksum cannot be fetched 2138 if not db_checksum_down_status: 2139 # delete all 2140 self.__remove_repository_files() 2141 return EntropyRepositoryBase.REPOSITORY_NOT_AVAILABLE 2142 2143 rc = self._check_downloaded_database(uri, cmethod) 2144 if rc != 0: 2145 # delete all 2146 self.__remove_repository_files() 2147 return EntropyRepositoryBase.REPOSITORY_CHECKSUM_ERROR 2148 2149 # GPG pubkey install hook 2150 if self._gpg_feature: 2151 gpg_available = self._install_gpg_key_if_available() 2152 if gpg_available: 2153 gpg_rc = self._gpg_verify_downloaded_files(downloaded_files) 2154 2155 # Now we can unpack 2156 files_to_remove = [] 2157 if self._repo_eapi in (1, 2,): 2158 2159 # if do_db_update_transfer == False and not None 2160 if (do_db_update_transfer is not None) and not \ 2161 do_db_update_transfer: 2162 2163 try: 2164 os.rename(dbfile, dbfile_old) 2165 do_db_update_transfer = True 2166 except OSError as err: 2167 const_debug_write( 2168 __name__, "rename failed: %s" % (err,)) 2169 do_db_update_transfer = False 2170 2171 unpack_status, unpacked_item = \ 2172 self._downloaded_database_unpack(uri, cmethod) 2173 2174 if not unpack_status: 2175 # delete all 2176 self.__remove_repository_files() 2177 return EntropyRepositoryBase.REPOSITORY_GENERIC_ERROR 2178 2179 unpack_url, unpack_path = self._construct_paths( 2180 uri, unpacked_item, cmethod) 2181 files_to_remove.append(unpack_path) 2182 2183 # re-validate 2184 if not os.path.isfile(dbfile): 2185 do_db_update_transfer = False 2186 2187 elif os.path.isfile(dbfile) and not do_db_update_transfer and \ 2188 (self._repo_eapi != 1): 2189 os.remove(dbfile) 2190 2191 if self._repo_eapi == 2: 2192 rc = self.__eapi2_inject_downloaded_dump(dumpfile, 2193 dbfile, cmethod) 2194 2195 if do_db_update_transfer: 2196 self.__eapi1_eapi2_databases_alignment(dbfile, dbfile_old) 2197 2198 if self._repo_eapi == 2: 2199 # remove the dump 2200 files_to_remove.append(dumpfile) 2201 2202 if rc != 0: 2203 # delete all 2204 self.__remove_repository_files() 2205 files_to_remove.append(dbfile_old) 2206 for path in files_to_remove: 2207 try: 2208 os.remove(path) 2209 except OSError: 2210 continue 2211 return EntropyRepositoryBase.REPOSITORY_GENERIC_ERROR 2212 2213 # make sure that all the repository files are stored with proper 2214 # permissions to avoid possible XSS and trust boundary problems. 2215 downloaded_files.append(dbfile) 2216 for downloaded_file in sorted(set(downloaded_files)): 2217 try: 2218 const_setup_file(downloaded_file, 2219 etpConst['entropygid'], 0o644, 2220 uid = etpConst['uid']) 2221 except (OSError, IOError) as err: 2222 if err.errno != errno.ENOENT: 2223 raise 2224 2225 # remove garbage left around 2226 for path in files_to_remove: 2227 try: 2228 os.remove(path) 2229 except OSError: 2230 continue 2231 2232 valid = self.__validate_database() 2233 if not valid: 2234 # repository failed validation 2235 return EntropyRepositoryBase.REPOSITORY_GENERIC_ERROR 2236 2237 self.__update_repository_revision(revision) 2238 if self._entropy._indexing: 2239 self.__database_indexing() 2240 2241 try: 2242 spm_class = self._entropy.Spm_class() 2243 spm_class.entropy_client_post_repository_update_hook( 2244 self._entropy, self._repository_id) 2245 except Exception as err: 2246 entropy.tools.print_traceback() 2247 mytxt = "%s: %s" % ( 2248 blue(_("Configuration files update error, " 2249 "not critical, continuing")), 2250 err, 2251 ) 2252 self._entropy.output(mytxt, importance = 0, 2253 level = "info", header = blue(" # "),) 2254 2255 # remove garbage 2256 try: 2257 os.remove(dbfile_old) 2258 except OSError: 2259 pass 2260 2261 return EntropyRepositoryBase.REPOSITORY_UPDATED_OK
2262
2263 2264 -class MaskableRepository(EntropyRepositoryBase):
2265 """ 2266 Objects inheriting from this class support package masking. 2267 A masked package is a package that is not visible to user and thus not 2268 selectable in dependency calculation and also not directly installable. 2269 The only repositories that need to support the feature are those containing 2270 installable packages, like AvailablePackagesRepository. 2271 """ 2272 2273 _real_client_settings = None 2274 _real_client_settings_lock = threading.Lock() 2275
2276 - def __init__(self, *args, **kwargs):
2277 super(MaskableRepository, self).__init__(*args, **kwargs)
2278 2279 @property
2280 - def _client_settings(self):
2281 """ 2282 Load the Entropy Client settings object. 2283 """ 2284 if self._real_client_settings is None: 2285 with self._real_client_settings_lock: 2286 if self._real_client_settings is None: 2287 from entropy.client.interfaces import Client 2288 2289 self._real_client_settings = Client().ClientSettings() 2290 2291 return self._real_client_settings
2292 2293 @property
2294 - def _settings_client_plugin(self):
2295 """ 2296 Get the ClientSystemSettingsPlugin instance from Entropy Client. 2297 """ 2298 from entropy.client.interfaces import Client 2299 return Client()._settings_client_plugin
2300
2301 - def _mask_filter_fetch_cache(self, package_id):
2302 if self._caching: 2303 return loadobj( 2304 "MaskableRepositoryFilter/%s_%s/%s" % ( 2305 self.name, 2306 self.atomMatchCacheKey(), 2307 package_id, 2308 ) 2309 )
2310
2311 - def _mask_filter_store_cache(self, package_id, value):
2312 if self._caching: 2313 dumpobj( 2314 "MaskableRepositoryFilter/%s_%s/%s" % ( 2315 self.name, 2316 self.atomMatchCacheKey(), 2317 package_id, 2318 ), 2319 value)
2320
2321 - def _maskFilter_live(self, package_id):
2322 2323 ref = self._settings['pkg_masking_reference'] 2324 if (package_id, self.name) in \ 2325 self._settings['live_packagemasking']['mask_matches']: 2326 2327 # do not cache this 2328 return -1, ref['user_live_mask'] 2329 2330 elif (package_id, self.name) in \ 2331 self._settings['live_packagemasking']['unmask_matches']: 2332 2333 return package_id, ref['user_live_unmask']
2334
2335 - def _maskFilter_user_package_mask(self, package_id, live):
2336 2337 with self._settings['mask']: 2338 # thread-safe in here 2339 cache_obj = self._settings['mask'].get() 2340 if cache_obj is None: 2341 cache_obj = {} 2342 self._settings['mask'].set(cache_obj) 2343 user_package_mask_ids = cache_obj.get(self.name) 2344 2345 if user_package_mask_ids is None: 2346 user_package_mask_ids = set() 2347 2348 for atom in self._settings['mask']: 2349 atom, repository_ids = entropy.dep.dep_get_match_in_repos(atom) 2350 if repository_ids is not None: 2351 if self.name not in repository_ids: 2352 # then the mask doesn't involve us 2353 continue 2354 # check if @repository is specified 2355 matches, r = self.atomMatch(atom, multiMatch = True, 2356 maskFilter = False) 2357 if r != 0: 2358 continue 2359 user_package_mask_ids |= set(matches) 2360 2361 cache_obj[self.name] = user_package_mask_ids 2362 2363 if package_id in user_package_mask_ids: 2364 # sorry, masked 2365 ref = self._settings['pkg_masking_reference'] 2366 myr = ref['user_package_mask'] 2367 2368 try: 2369 clset = self._client_settings 2370 validator_cache = clset['masking_validation']['cache'] 2371 validator_cache[(package_id, self.name, live)] = -1, myr 2372 except KeyError: # system settings client plugin not found 2373 pass 2374 2375 return -1, myr
2376
2377 - def _maskFilter_user_package_unmask(self, package_id, live):
2378 2379 with self._settings['unmask']: 2380 # thread-safe in here 2381 cache_obj = self._settings['unmask'].get() 2382 if cache_obj is None: 2383 cache_obj = {} 2384 self._settings['unmask'].set(cache_obj) 2385 user_package_unmask_ids = cache_obj.get(self.name) 2386 2387 if user_package_unmask_ids is None: 2388 2389 user_package_unmask_ids = set() 2390 for atom in self._settings['unmask']: 2391 atom, repository_ids = entropy.dep.dep_get_match_in_repos(atom) 2392 if repository_ids is not None: 2393 if self.name not in repository_ids: 2394 # then the mask doesn't involve us 2395 continue 2396 matches, r = self.atomMatch(atom, multiMatch = True, 2397 maskFilter = False) 2398 if r != 0: 2399 continue 2400 user_package_unmask_ids |= set(matches) 2401 2402 cache_obj[self.name] = user_package_unmask_ids 2403 2404 if package_id in user_package_unmask_ids: 2405 2406 ref = self._settings['pkg_masking_reference'] 2407 myr = ref['user_package_unmask'] 2408 try: 2409 clset = self._client_settings 2410 validator_cache = clset['masking_validation']['cache'] 2411 validator_cache[(package_id, self.name, live)] = \ 2412 package_id, myr 2413 except KeyError: # system settings client plugin not found 2414 pass 2415 2416 return package_id, myr
2417
2418 - def _maskFilter_packages_db_mask(self, package_id, live):
2419 2420 # check if repository packages.db.mask needs it masked 2421 repos_mask = {} 2422 clset = self._client_settings 2423 if clset: 2424 repos_mask = clset['repositories']['mask'] 2425 2426 repomask = repos_mask.get(self.name) 2427 if isinstance(repomask, (list, set, frozenset)): 2428 2429 # first, seek into generic masking, all branches 2430 # (below) avoid issues with repository names 2431 mask_repo_id = "%[email protected]@:of:%s" % (self.name, self.name,) 2432 repomask_ids = repos_mask.get(mask_repo_id) 2433 2434 if not isinstance(repomask_ids, set): 2435 repomask_ids = set() 2436 for atom in repomask: 2437 matches, r = self.atomMatch(atom, multiMatch = True, 2438 maskFilter = False) 2439 if r != 0: 2440 continue 2441 repomask_ids |= set(matches) 2442 repos_mask[mask_repo_id] = repomask_ids 2443 2444 if package_id in repomask_ids: 2445 2446 ref = self._settings['pkg_masking_reference'] 2447 myr = ref['repository_packages_db_mask'] 2448 2449 try: 2450 validator_cache = clset['masking_validation']['cache'] 2451 validator_cache[(package_id, self.name, live)] = \ 2452 -1, myr 2453 except KeyError: # system settings client plugin not found 2454 pass 2455 2456 return -1, myr
2457
2458 - def _maskFilter_package_license_mask(self, package_id, live):
2459 2460 if not self._settings['license_mask']: 2461 return 2462 2463 mylicenses = self.retrieveLicense(package_id) 2464 mylicenses = mylicenses.strip().split() 2465 lic_mask = self._settings['license_mask'] 2466 for mylicense in mylicenses: 2467 2468 if mylicense not in lic_mask: 2469 continue 2470 2471 ref = self._settings['pkg_masking_reference'] 2472 myr = ref['user_license_mask'] 2473 try: 2474 clset = self._client_settings 2475 validator_cache = clset['masking_validation']['cache'] 2476 validator_cache[(package_id, self.name, live)] = -1, myr 2477 except KeyError: # system settings client plugin not found 2478 pass 2479 2480 return -1, myr
2481
2482 - def _maskFilter_keyword_mask(self, package_id, live):
2483 2484 # WORKAROUND for buggy entries 2485 # ** is fine then 2486 # TODO: remove this before 31-12-2011 2487 mykeywords = self.retrieveKeywords(package_id) 2488 if mykeywords == set([""]): 2489 mykeywords = set(['**']) 2490 2491 mask_ref = self._settings['pkg_masking_reference'] 2492 2493 # firstly, check if package keywords are in etpConst['keywords'] 2494 # (universal keywords have been merged from package.keywords) 2495 same_keywords = etpConst['keywords'] & mykeywords 2496 if same_keywords: 2497 myr = mask_ref['system_keyword'] 2498 try: 2499 clset = self._client_settings 2500 validator_cache = clset['masking_validation']['cache'] 2501 validator_cache[(package_id, self.name, live)] = \ 2502 package_id, myr 2503 2504 except KeyError: # system settings client plugin not found 2505 pass 2506 2507 return package_id, myr 2508 2509 # if we get here, it means we didn't find mykeywords 2510 # in etpConst['keywords'] 2511 # we need to seek self._settings['keywords'] 2512 # seek in repository first 2513 keyword_repo = self._settings['keywords']['repositories'] 2514 2515 for keyword in tuple(keyword_repo.get(self.name, {}).keys()): 2516 2517 if keyword not in mykeywords: 2518 continue 2519 2520 keyword_data = keyword_repo[self.name].get(keyword) 2521 if not keyword_data: 2522 continue 2523 2524 if "*" in keyword_data: 2525 # all packages in this repo with keyword "keyword" are ok 2526 myr = mask_ref['user_repo_package_keywords_all'] 2527 try: 2528 clset = self._client_settings 2529 validator_cache = clset['masking_validation']['cache'] 2530 validator_cache[(package_id, self.name, live)] = \ 2531 package_id, myr 2532 except KeyError: # system settings client plugin not found 2533 pass 2534 2535 return package_id, myr 2536 2537 kwd_key = "%s_ids" % (keyword,) 2538 keyword_data_ids = keyword_repo[self.name].get(kwd_key) 2539 if not isinstance(keyword_data_ids, set): 2540 2541 keyword_data_ids = set() 2542 for atom in keyword_data: 2543 matches, r = self.atomMatch(atom, multiMatch = True, 2544 maskFilter = False) 2545 if r != 0: 2546 continue 2547 keyword_data_ids |= matches 2548 2549 keyword_repo[self.name][kwd_key] = keyword_data_ids 2550 2551 if package_id in keyword_data_ids: 2552 2553 myr = mask_ref['user_repo_package_keywords'] 2554 try: 2555 clset = self._client_settings 2556 validator_cache = clset['masking_validation']['cache'] 2557 validator_cache[(package_id, self.name, live)] = \ 2558 package_id, myr 2559 except KeyError: # system settings client plugin not found 2560 pass 2561 return package_id, myr 2562 2563 keyword_pkg = self._settings['keywords']['packages'] 2564 2565 # if we get here, it means we didn't find a match in repositories 2566 # so we scan packages, last chance 2567 for keyword in keyword_pkg.keys(): 2568 # use .keys() because keyword_pkg gets modified during iteration 2569 2570 # first of all check if keyword is in mykeywords 2571 if keyword not in mykeywords: 2572 continue 2573 2574 keyword_data = keyword_pkg.get(keyword) 2575 if not keyword_data: 2576 continue 2577 2578 kwd_key = "%s_ids" % (keyword,) 2579 keyword_data_ids = keyword_pkg.get(self.name+kwd_key) 2580 2581 if not isinstance(keyword_data_ids, (list, set)): 2582 keyword_data_ids = set() 2583 for atom in keyword_data: 2584 # match atom 2585 matches, r = self.atomMatch(atom, multiMatch = True, 2586 maskFilter = False) 2587 if r != 0: 2588 continue 2589 keyword_data_ids |= matches 2590 2591 keyword_pkg[self.name+kwd_key] = keyword_data_ids 2592 2593 if package_id in keyword_data_ids: 2594 2595 # valid! 2596 myr = mask_ref['user_package_keywords'] 2597 try: 2598 clset = self._client_settings 2599 validator_cache = clset['masking_validation']['cache'] 2600 validator_cache[(package_id, self.name, live)] = \ 2601 package_id, myr 2602 except KeyError: # system settings client plugin not found 2603 pass 2604 2605 return package_id, myr 2606 2607 2608 ## if we get here, it means that pkg it keyword masked 2609 ## and we should look at the very last resort, per-repository 2610 ## package keywords 2611 # check if repository contains keyword unmasking data 2612 2613 clset = self._client_settings 2614 if clset is None: 2615 # SystemSettings Entropy Client plugin not available 2616 return 2617 2618 # let's see if something is available in repository config 2619 repo_keywords = clset['repositories']['repos_keywords'].get( 2620 self.name) 2621 if repo_keywords is None: 2622 # nopers, sorry! 2623 return 2624 2625 # check universal keywords 2626 same_keywords = repo_keywords.get('universal') & mykeywords 2627 if same_keywords: 2628 # universal keyword matches! 2629 myr = mask_ref['repository_packages_db_keywords'] 2630 validator_cache = clset['masking_validation']['cache'] 2631 validator_cache[(package_id, self.name, live)] = \ 2632 package_id, myr 2633 return package_id, myr 2634 2635 ## if we get here, it means that even universal masking failed 2636 ## and we need to look at per-package settings 2637 repo_settings = repo_keywords.get('packages') 2638 if not repo_settings: 2639 # it's empty, not worth checking 2640 return 2641 2642 cached_key = "packages_ids" 2643 keyword_data_ids = repo_keywords.get(cached_key) 2644 if not isinstance(keyword_data_ids, dict): 2645 # create cache 2646 2647 keyword_data_ids = {} 2648 for atom, values in repo_settings.items(): 2649 matches, r = self.atomMatch(atom, multiMatch = True, 2650 maskFilter = False) 2651 if r != 0: 2652 continue 2653 for match in matches: 2654 obj = keyword_data_ids.setdefault(match, set()) 2655 obj.update(values) 2656 2657 repo_keywords[cached_key] = keyword_data_ids 2658 2659 pkg_keywords = keyword_data_ids.get(package_id, set()) 2660 if "**" in pkg_keywords: 2661 same_keywords = True 2662 else: 2663 same_keywords = pkg_keywords & etpConst['keywords'] 2664 if same_keywords: 2665 # found! this pkg is not masked, yay! 2666 myr = mask_ref['repository_packages_db_keywords'] 2667 validator_cache = clset['masking_validation']['cache'] 2668 validator_cache[(package_id, self.name, live)] = \ 2669 package_id, myr 2670 return package_id, myr
2671
2672 - def maskFilter(self, package_id, live = True):
2673 """ 2674 Reimplemented from EntropyRepositoryBase 2675 """ 2676 validator_cache = self._client_settings.get( 2677 'masking_validation', {}).get('cache', {}) 2678 2679 cached = validator_cache.get((package_id, self.name, live)) 2680 if cached is not None: 2681 return cached 2682 2683 # use on-disk cache? 2684 cached = self._mask_filter_fetch_cache(package_id) 2685 if cached is not None: 2686 return cached 2687 2688 # avoid memleaks 2689 if len(validator_cache) > 100000: 2690 validator_cache.clear() 2691 2692 if live: 2693 data = self._maskFilter_live(package_id) 2694 if data: 2695 return data 2696 2697 data = self._maskFilter_user_package_mask(package_id, live) 2698 if data: 2699 self._mask_filter_store_cache(package_id, data) 2700 return data 2701 2702 data = self._maskFilter_user_package_unmask(package_id, live) 2703 if data: 2704 self._mask_filter_store_cache(package_id, data) 2705 return data 2706 2707 data = self._maskFilter_packages_db_mask(package_id, live) 2708 if data: 2709 self._mask_filter_store_cache(package_id, data) 2710 return data 2711 2712 data = self._maskFilter_package_license_mask(package_id, live) 2713 if data: 2714 self._mask_filter_store_cache(package_id, data) 2715 return data 2716 2717 data = self._maskFilter_keyword_mask(package_id, live) 2718 if data: 2719 self._mask_filter_store_cache(package_id, data) 2720 return data 2721 2722 # holy crap, can't validate 2723 myr = self._settings['pkg_masking_reference']['completely_masked'] 2724 validator_cache[(package_id, self.name, live)] = -1, myr 2725 self._mask_filter_store_cache(package_id, data) 2726 return -1, myr
2727
2728 - def atomMatchCacheKey(self):
2729 """ 2730 Reimplemented from EntropyRepositoryBase. 2731 """ 2732 return "%s_%s" % ( 2733 SystemSettings().packages_configuration_hash(), 2734 self._settings_client_plugin.packages_configuration_hash(), 2735 )
2736
2737 2738 -class AvailablePackagesRepository(CachedRepository, MaskableRepository):
2739 """ 2740 This class represents the available packages repository and is a direct 2741 subclass of EntropyRepository. It implements the update() method in order 2742 to make possible to update the repository. 2743 """
2744 - def __init__(self, *args, **kwargs):
2745 super(AvailablePackagesRepository, self).__init__(*args, **kwargs) 2746 2747 # ensure proper repository file permissions 2748 if entropy.tools.is_root() and os.path.isfile(self._db): 2749 const_setup_file(self._db, etpConst['entropygid'], 0o644, 2750 uid = etpConst['uid'])
2751 2752 @staticmethod
2753 - def update(entropy_client, repository_id, force, gpg):
2754 """ 2755 Reimplemented from EntropyRepositoryBase 2756 """ 2757 try: 2758 updater = AvailablePackagesRepositoryUpdater( 2759 entropy_client, repository_id, 2760 force, gpg) 2761 except KeyError as err: 2762 return EntropyRepositoryBase.REPOSITORY_NOT_AVAILABLE 2763 else: 2764 return updater.update()
2765 2766 @staticmethod
2767 - def revision(repository_id):
2768 """ 2769 Reimplemented from EntropyRepositoryBase 2770 """ 2771 db_data = SystemSettings()['repositories']['available'][repository_id] 2772 fname = os.path.join(db_data['dbpath'], 2773 etpConst['etpdatabaserevisionfile']) 2774 2775 revision = -1 2776 enc = etpConst['conf_encoding'] 2777 try: 2778 with codecs.open(fname, "r", encoding=enc) as f: 2779 read_data = f.readline().strip() 2780 try: 2781 revision = int(read_data) 2782 except ValueError: 2783 pass 2784 except (OSError, IOError) as err: 2785 if err.errno != errno.ENOENT: 2786 raise 2787 2788 return revision
2789 2790 @staticmethod
2791 - def remote_revision(repository_id):
2792 """ 2793 Reimplemented from EntropyRepositoryBase 2794 """ 2795 return AvailablePackagesRepositoryUpdater(TextInterface(), 2796 repository_id, False, False).remote_revision()
2797
2798 - def handlePackage(self, pkg_data, revision = None, 2799 formattedContent = False):
2800 """ 2801 Reimplemented from EntropyRepository 2802 """ 2803 raise PermissionDenied( 2804 "cannot execute handlePackage on this repository")
2805
2806 - def addPackage(self, pkg_data, revision = -1, package_id = None, 2807 formatted_content = False):
2808 """ 2809 Reimplemented from EntropyRepository 2810 """ 2811 raise PermissionDenied( 2812 "cannot execute addPackage on this repository")
2813
2814 - def removePackage(self, package_id, from_add_package = False):
2815 """ 2816 Reimplemented from EntropyRepository 2817 """ 2818 raise PermissionDenied( 2819 "cannot execute removePackage on this repository")
2820
2821 - def clearCache(self):
2822 # clear package masking filter 2823 try: 2824 clset = self._client_settings 2825 except AttributeError: 2826 # EntropyBaseRepository constructor calls 2827 # _maybeDatabaseSchemaUpdates that calls us here 2828 # while _real_client_settings is not set yet. 2829 pass 2830 else: 2831 clset.get('masking_validation', {}).get('cache', {}).clear() 2832 EntropyRepository.clearCache(self)
2833
2834 2835 -class GenericRepository(CachedRepository, MaskableRepository):
2836 """ 2837 This class represents a generic packages repository and is a direct 2838 subclass of EntropyRepository. 2839 Even GenericRepository is a CachedRepository because its object could 2840 get cached by 3rd party. Actually, we require this because our installed 2841 packages repository could end up being a GenericRepository, when running 2842 in fail-safe mode. 2843 """ 2844
2845 - def handlePackage(self, pkg_data, revision = None, 2846 formattedContent = False):
2847 """ 2848 Reimplemented from EntropyRepository. 2849 It is supposed that a generic repository should not support 2850 handlePackage. You can override this (at your own risk) by setting the 2851 "override_handlePackage" property to True. In this case, a generic 2852 addPackage() call is issued. 2853 """ 2854 override = getattr(self, 'override_handlePackage', False) 2855 if not override: 2856 raise PermissionDenied( 2857 "cannot execute handlePackage on this repository") 2858 2859 return self.addPackage(pkg_data, revision = revision, 2860 formatted_content = formattedContent)
2861
2862 - def maskFilter(self, package_id, live = True):
2863 """ 2864 Reimplemented from EntropyRepository. 2865 It is supposed that a generic repository doesn't support package 2866 masking. You can override this by setting the "enable_mask_filter" 2867 to True. 2868 """ 2869 enabled = getattr(self, 'enable_mask_filter', False) 2870 if not enabled: 2871 return package_id, 0 2872 return MaskableRepository.maskFilter(self, package_id, live = live)
2873