Package entropy :: Package server :: Package interfaces :: Module main

Source Code for Module entropy.server.interfaces.main

   1  # -*- coding: utf-8 -*- 
   2  """ 
   3   
   4      @author: Fabio Erculiani <[email protected]> 
   5      @contact: [email protected] 
   6      @copyright: Fabio Erculiani 
   7      @license: GPL-2 
   8   
   9      B{Entropy Package Manager Server Main Interfaces}. 
  10   
  11  """ 
  12  import codecs 
  13  import collections 
  14  import copy 
  15  import errno 
  16  import hashlib 
  17  import os 
  18  import re 
  19  import shutil 
  20  import stat 
  21  import subprocess 
  22  import sys 
  23  import time 
  24  import threading 
  25   
  26  from entropy.exceptions import OnlineMirrorError, PermissionDenied, \ 
  27      SystemDatabaseError, RepositoryError 
  28  from entropy.const import etpConst, etpSys, const_setup_perms, \ 
  29      const_create_working_dirs, const_convert_to_unicode, \ 
  30      const_setup_file, const_get_stringtype, const_debug_write, \ 
  31      const_debug_enabled, const_convert_to_rawstring, const_mkdtemp, \ 
  32      const_mkstemp, const_file_readable 
  33  from entropy.output import purple, red, darkgreen, \ 
  34      bold, brown, blue, darkred, teal 
  35  from entropy.cache import EntropyCacher 
  36  from entropy.server.interfaces.mirrors import Server as MirrorsServer 
  37  from entropy.i18n import _ 
  38  from entropy.core import BaseConfigParser 
  39  from entropy.core.settings.base import SystemSettings 
  40  from entropy.core.settings.plugins.skel import SystemSettingsPlugin 
  41  from entropy.transceivers import EntropyTransceiver 
  42  from entropy.db import EntropyRepository 
  43  from entropy.db.skel import EntropyRepositoryPlugin 
  44  from entropy.server.interfaces.db import ServerRepositoryStatus, \ 
  45      ServerPackagesRepository 
  46  from entropy.spm.plugins.factory import get_default_instance as get_spm, \ 
  47      get_default_class as get_spm_class 
  48  from entropy.qa import QAInterfacePlugin 
  49  from entropy.security import Repository as RepositorySecurity 
  50  from entropy.db.exceptions import ProgrammingError 
  51  from entropy.client.interfaces import Client 
  52  from entropy.client.interfaces.db import InstalledPackagesRepository, \ 
  53      GenericRepository 
  54  from entropy.client.misc import ConfigurationUpdates, ConfigurationFiles 
  55   
  56  import entropy.dep 
  57  import entropy.tools 
  58  import entropy.dump 
  59   
  60  SERVER_QA_PLUGIN = "ServerQAInterfacePlugin" 
61 62 63 -class ServerEntropyRepositoryPlugin(EntropyRepositoryPlugin):
64 65 PLUGIN_ID = "__server__" 66
67 - def __init__(self, server_interface, metadata = None):
68 """ 69 Entropy server-side repository ServerPackagesRepository Plugin class. 70 This class will be instantiated and automatically added to 71 ServerPackagesRepository instances generated by Entropy Server. 72 73 @param server_interface: Entropy Server interface instance 74 @type server_interface: entropy.server.interfaces.Server class 75 @param metadata: any dict form metadata map (key => value) 76 @type metadata: dict 77 """ 78 EntropyRepositoryPlugin.__init__(self) 79 self._cacher = EntropyCacher() 80 self._settings = SystemSettings() 81 self.srv_sys_settings_plugin = \ 82 etpConst['system_settings_plugins_ids']['server_plugin'] 83 self._server = server_interface 84 if metadata is None: 85 self._metadata = {} 86 else: 87 self._metadata = metadata
88
89 - def get_id(self):
91
92 - def get_metadata(self):
93 """ 94 This method should always return a direct reference to the object and 95 NOT a copy. 96 """ 97 return self._metadata
98
99 - def add_plugin_hook(self, entropy_repository_instance):
100 const_debug_write(__name__, 101 "ServerEntropyRepositoryPlugin: calling add_plugin_hook => %s" % ( 102 self,) 103 ) 104 105 repo = entropy_repository_instance.repository_id() 106 local_dbfile = self._metadata['local_dbfile'] 107 if local_dbfile is not None: 108 taint_file = self._server._get_local_repository_taint_file( 109 repo) 110 if os.path.isfile(taint_file): 111 dbs = ServerRepositoryStatus() 112 dbs.set_tainted(local_dbfile) 113 dbs.set_bumped(local_dbfile) 114 115 if "__temporary__" in self._metadata: # in-memory db? 116 local_dbfile_exists = True 117 else: 118 local_dbfile_exists = os.path.lexists(local_dbfile) 119 120 if not local_dbfile_exists: 121 # better than having a completely broken db 122 self._metadata['read_only'] = False 123 # force parameters, only ServerEntropyRepository exposes 124 # the setReadonly method 125 entropy_repository_instance.setReadonly(False) 126 entropy_repository_instance.initializeRepository() 127 entropy_repository_instance.commit() 128 129 out_intf = self._metadata.get('output_interface') 130 if out_intf is not None: 131 entropy_repository_instance.output = out_intf.output 132 entropy_repository_instance.ask_question = out_intf.ask_question 133 134 return 0
135
136 - def close_repo_hook(self, entropy_repository_instance):
137 const_debug_write(__name__, 138 "ServerEntropyRepositoryPlugin: calling close_repo_hook => %s" % ( 139 self,) 140 ) 141 142 # this happens because close_repositories() might be called 143 # before _setup_services() and in general, at any time, so, in this 144 # case, there is no need to print bullshit to dev. 145 if self._server.Mirrors is None: 146 return 0 147 148 repo = entropy_repository_instance.repository_id() 149 dbfile = self._metadata['local_dbfile'] 150 if dbfile is None: 151 # fake repo, or temporary one 152 return 0 153 154 read_only = self._metadata['read_only'] 155 if not read_only: 156 sts = ServerRepositoryStatus() 157 if sts.is_tainted(dbfile) and not sts.is_unlock_msg(dbfile): 158 u_msg = "[%s] %s" % (brown(repo), 159 darkgreen(_("mirrors have not been unlocked. Sync them.")),) 160 self._server.output( 161 u_msg, 162 importance = 1, 163 level = "warning", 164 header = brown(" * ") 165 ) 166 # avoid spamming 167 sts.set_unlock_msg(dbfile) 168 169 return 0
170
171 - def commit_hook(self, entropy_repository_instance):
172 173 const_debug_write(__name__, 174 "ServerEntropyRepositoryPlugin: calling commit_hook => %s" % ( 175 self,) 176 ) 177 178 dbs = ServerRepositoryStatus() 179 dbfile = self._metadata['local_dbfile'] 180 if dbfile is None: 181 # fake repo, or temporary one 182 return 0 183 repo = entropy_repository_instance.repository_id() 184 read_only = self._metadata['read_only'] 185 if read_only: 186 # do not taint database 187 return 0 188 189 # taint the database status 190 taint_file = self._server._get_local_repository_taint_file(repo) 191 enc = etpConst['conf_encoding'] 192 with codecs.open(taint_file, "w", encoding=enc) as f: 193 f.write("repository tainted\n") 194 195 const_setup_file(taint_file, etpConst['entropygid'], 0o664) 196 dbs.set_tainted(dbfile) 197 198 if not dbs.is_bumped(dbfile): 199 # bump revision, setting DatabaseBump causes 200 # the session to just bump once 201 dbs.set_bumped(dbfile) 202 """ 203 Entropy repository revision bumping function. 204 Every time it's called, 205 revision is incremented by 1. 206 """ 207 revision_file = self._server._get_local_repository_revision_file( 208 repo) 209 enc = etpConst['conf_encoding'] 210 if not os.path.isfile(revision_file): 211 revision = 1 212 else: 213 with codecs.open(revision_file, "r", encoding=enc) as rev_f: 214 revision = int(rev_f.readline().strip()) 215 revision += 1 216 217 tmp_revision_file = revision_file + ".tmp" 218 with codecs.open(tmp_revision_file, "w", encoding=enc) as rev_fw: 219 rev_fw.write(str(revision)+"\n") 220 221 # atomic ! 222 os.rename(tmp_revision_file, revision_file) 223 224 if not dbs.are_sets_synced(dbfile): 225 # auto-update package sets 226 self._server._sync_package_sets(entropy_repository_instance) 227 dbs.set_synced_sets(dbfile) 228 229 return 0
230
231 - def _get_category_description_from_disk(self, category):
232 """ 233 Get category name description from Source Package Manager. 234 235 @param category: category name 236 @type category: string 237 @return: category description 238 @rtype: string 239 """ 240 spm = self._server.Spm() 241 return spm.get_package_category_description_metadata(category)
242
243 - def __save_rss(self, srv_repo, rss_name, srv_updates):
244 # save to disk 245 try: 246 self._cacher.save(rss_name, srv_updates, 247 cache_dir = Server.CACHE_DIR) 248 except IOError as err: 249 e_msg = "[%s] %s: %s" % (brown(srv_repo), 250 purple(_("cannot store updates RSS cache")), 251 repr(err),) 252 self._server.output( 253 e_msg, 254 importance = 1, 255 level = "warning", 256 header = brown(" * ") 257 )
258
259 - def _write_rss_for_removed_package(self, repo_db, package_id):
260 261 # setup variables we're going to use 262 srv_repo = repo_db.repository_id() 263 rss_revision = repo_db.retrieveRevision(package_id) 264 rss_atom = "%s~%s" % (repo_db.retrieveAtom(package_id), rss_revision,) 265 status = ServerRepositoryStatus() 266 srv_updates = status.get_updates_log(srv_repo) 267 rss_name = srv_repo + etpConst['rss-dump-name'] 268 269 # load metadata from on disk cache, if available 270 rss_obj = self._cacher.pop(rss_name, cache_dir = Server.CACHE_DIR) 271 if rss_obj: 272 srv_updates.update(rss_obj) 273 274 # setup metadata keys, if not available 275 if 'added' not in srv_updates: 276 srv_updates['added'] = {} 277 if 'removed' not in srv_updates: 278 srv_updates['removed'] = {} 279 if 'light' not in srv_updates: 280 srv_updates['light'] = {} 281 282 # if pkgatom (rss_atom) is in the "added" metadata, drop it 283 if rss_atom in srv_updates['added']: 284 del srv_updates['added'][rss_atom] 285 # same thing for light key 286 if rss_atom in srv_updates['light']: 287 del srv_updates['light'][rss_atom] 288 289 # add metadata 290 mydict = {} 291 try: 292 mydict['description'] = repo_db.retrieveDescription(package_id) 293 except TypeError: 294 mydict['description'] = "N/A" 295 try: 296 mydict['homepage'] = repo_db.retrieveHomepage(package_id) 297 except TypeError: 298 mydict['homepage'] = "" 299 srv_updates['removed'][rss_atom] = mydict 300 301 # save to disk 302 self.__save_rss(srv_repo, rss_name, srv_updates)
303
304 - def _write_rss_for_added_package(self, repo_db, package_id, package_data):
305 306 # setup variables we're going to use 307 srv_repo = repo_db.repository_id() 308 rss_atom = "%s~%s" % (package_data['atom'], package_data['revision'],) 309 status = ServerRepositoryStatus() 310 srv_updates = status.get_updates_log(srv_repo) 311 rss_name = srv_repo + etpConst['rss-dump-name'] 312 313 # load metadata from on disk cache, if available 314 rss_obj = self._cacher.pop(rss_name, cache_dir = Server.CACHE_DIR) 315 if rss_obj: 316 srv_updates.update(rss_obj) 317 318 # setup metadata keys, if not available 319 if 'added' not in srv_updates: 320 srv_updates['added'] = {} 321 if 'removed' not in srv_updates: 322 srv_updates['removed'] = {} 323 if 'light' not in srv_updates: 324 srv_updates['light'] = {} 325 326 # if package_data['atom'] (rss_atom) is in the 327 # "removed" metadata, drop it 328 if rss_atom in srv_updates['removed']: 329 del srv_updates['removed'][rss_atom] 330 331 # add metadata 332 srv_updates['added'][rss_atom] = {} 333 srv_updates['added'][rss_atom]['description'] = \ 334 package_data['description'] 335 srv_updates['added'][rss_atom]['homepage'] = \ 336 package_data['homepage'] 337 338 srv_updates['light'][rss_atom] = {} 339 srv_updates['light'][rss_atom]['description'] = \ 340 package_data['description'] 341 srv_updates['light'][rss_atom]['homepage'] = \ 342 package_data['homepage'] 343 srv_updates['light'][rss_atom]['package_id'] = package_id 344 date_raw_str = const_convert_to_rawstring(package_data['datecreation']) 345 srv_updates['light'][rss_atom]['time_hash'] = \ 346 hashlib.sha256(date_raw_str).hexdigest() 347 348 # save to disk 349 self.__save_rss(srv_repo, rss_name, srv_updates)
350
351 - def add_package_hook(self, entropy_repository_instance, package_id, 352 package_data):
353 354 const_debug_write(__name__, 355 "ServerEntropyRepositoryPlugin: calling add_package_hook => %s" % ( 356 self,) 357 ) 358 359 # handle server-side repo RSS support 360 sys_set_plug = self.srv_sys_settings_plugin 361 if self._settings[sys_set_plug]['server']['rss']['enabled']: 362 self._write_rss_for_added_package(entropy_repository_instance, 363 package_id, package_data) 364 365 try: 366 descdata = self._get_category_description_from_disk( 367 package_data['category']) 368 entropy_repository_instance.setCategoryDescription( 369 package_data['category'], descdata) 370 except (IOError, OSError, EOFError,): 371 pass 372 entropy_repository_instance.commit() 373 374 return 0
375
376 - def remove_package_hook(self, entropy_repository_instance, package_id, 377 from_add_package):
378 379 const_debug_write(__name__, 380 "ServerEntropyRepositoryPlugin: calling remove_package_hook => %s" % ( 381 self,) 382 ) 383 384 # handle server-side repo RSS support 385 sys_set_plug = self.srv_sys_settings_plugin 386 if self._settings[sys_set_plug]['server']['rss']['enabled'] \ 387 and (not from_add_package): 388 389 # store addPackage action 390 self._write_rss_for_removed_package(entropy_repository_instance, 391 package_id) 392 393 return 0
394
395 - def treeupdates_move_action_hook(self, entropy_repository_instance, 396 package_id):
397 # check for injection and warn the developer 398 injected = entropy_repository_instance.isInjected(package_id) 399 new_atom = entropy_repository_instance.retrieveAtom(package_id) 400 if injected: 401 mytxt = "%s: %s %s. %s !!! %s." % ( 402 bold(_("INJECT")), 403 blue(str(new_atom)), 404 red(_("has been injected")), 405 red(_("quickpkg manually to update embedded db")), 406 red(_("Repository updated anyway")), 407 ) 408 self._server.output( 409 mytxt, 410 importance = 1, 411 level = "warning", 412 header = darkred(" * ") 413 ) 414 return 0
415
416 - def treeupdates_slot_move_action_hook(self, entropy_repository_instance, 417 package_id):
418 return self.treeupdates_move_action_hook(entropy_repository_instance, 419 package_id)
420
421 422 -class RepositoryConfigParser(BaseConfigParser):
423 """ 424 Entropy .ini-like server-side repository configuration file parser. 425 426 Entropy Server now supports repositories defined inside 427 /etc/entropy/repositories.conf.d/ files, written using the 428 syntax detailed below. This improves the ability to enable, disable, 429 add and remove repositories programmatically. Furthermore, it 430 makes possible to extend the supported parameters without breaking 431 backward compatibility. 432 433 In order to differentiate Entropy Client repository definitions between 434 Entropy Server ones, each repository section must start with "[server=". 435 436 This is an example of the syntax (with a complete listing 437 of the supported arguments): 438 439 [server=sabayon-limbo] 440 desc = Sabayon Linux Official Testing Repository 441 repo = ssh://[email protected]:~username/sabayon-limbo 442 enabled = <true/false> 443 444 [server=sabayon-limbo] 445 desc = This statement will be ignored. 446 repo-only = ssh://[email protected]:~username/sabayon-limbo 447 pkg-only = ssh://[email protected]:~username/sabayon-limbo 448 449 [server=sabayon-base] 450 desc = This is the base repository. 451 repo-only = ssh://[email protected]:~username/sabayon-base 452 pkg-only = ssh://[email protected]:~username/sabayon-base 453 base = <true/false> 454 455 As you can see, multiple statements for the same repository 456 are allowed. However, only the first desc = statement will be 457 considered, while there can be as many {pkg,repo}* = as you want. 458 459 The repository order is important, but this is guaranteed by the 460 fact that configuration files are parsed in lexical order. 461 462 Statements description: 463 - "desc": stands for description, the repository name description. 464 - "repo": the push & pull URI, for both packages and repository database. 465 - "repo-only": same as repo, but only for the repository database 466 push & pull. 467 - "pkg-only": same as repo, but only for the packages push & pull. 468 The supported protocols are those supported by entropy.fetchers. 469 - "enabled": if set, its value can be either "true" or "false". The default 470 value is "true". It indicates if a repository is configured 471 but currently disabled or enabled. Please take into account 472 that config files in /etc/entropy/repositories.conf.d/ starting 473 with "_" are considered to contain disabled repositories. This 474 is just provided for convienence. 475 - "base": if set, its value can be either "true" or "false". The default 476 value is "false". If no repository has the flag set, the first 477 listed repository will be the base one. Only the first repository 478 with "base = true" will be considered. The base repository is the 479 repository that is considered base for all the others 480 (the main one). 481 - "exclude-qa": if set, its value can be either "true" or "false". 482 The default value is "false". If "true", the repository is 483 excluded from QA checks. 484 """ 485 486 _SUPPORTED_KEYS = ("desc", "repo", "repo-only", "pkg-only", 487 "base", "enabled", "exclude-qa") 488 489 _DEFAULT_ENABLED_VALUE = True 490 _DEFAULT_QA_VALUE = False 491 _DEFAULT_BASE_VALUE = False 492 493 # Repository configuration file suggested prefix. If config files 494 # are prefixed with this string, they can be automatically handled 495 # by Entropy. 496 FILENAME_PREFIX = "entropysrv_" 497
498 - def __init__(self, encoding = None):
500 501 @classmethod
502 - def _validate_section(cls, match):
503 """ 504 Reimpemented from BaseConfigParser. 505 """ 506 # a new repository begins 507 groups = match.groups() 508 if not groups: 509 return 510 511 candidate = groups[0] 512 prefix = "server=" 513 if not candidate.startswith(prefix): 514 return 515 candidate = candidate[len(prefix):] 516 if not entropy.tools.validate_repository_id(candidate): 517 return 518 return candidate
519
520 - def base_repository(self):
521 """ 522 Return the base repository, if any, or None. 523 524 @return: the base repository identifier 525 @rtype: string or None 526 """ 527 repositories = self.repositories() 528 base = None 529 for repository_id in repositories: 530 try: 531 p_value = self[repository_id]["base"][0] 532 value = False 533 if p_value.strip().lower() == "true": 534 value = True 535 except KeyError: 536 value = self._DEFAULT_BASE_VALUE 537 if value: 538 base = repository_id 539 break 540 541 if base is None and repositories: 542 base = repositories[0] 543 return base
544
545 - def add(self, repository_id, desc, repo, repo_only, pkg_only, 546 base, enabled = True, exclude_qa = False):
547 """ 548 Add a repository to the repository configuration files directory. 549 Older repository configuration may get overwritten. This method 550 only writes repository configuration in the new .ini format and to 551 /etc/entropy/repositories.conf.d/<filename prefix><repository id>. 552 553 @param repository_id: repository identifier 554 @type repository_id: string 555 @param desc: repository description 556 @type desc: string 557 @param repo: list of "repo=" uris 558 @type repo: list 559 @param repo_only: list of "repo-only=" uris 560 @type repo_only: list 561 @param pkg_only: list of "pkg-only=" uris 562 @type pkg_only: list 563 @param base: True, if this is the base repository 564 @type base: bool 565 @keyword enabled: True, if the repository is enabled 566 @type enabled: bool 567 @keyword exclude_qa: True, if the repository should be excluded from QA 568 @type exclude_qa: bool 569 """ 570 settings = SystemSettings() 571 repo_d_conf = settings.get_setting_dirs_data()['repositories_conf_d'] 572 conf_d_dir, _conf_files_mtime, _skipped_files, _auto_upd = repo_d_conf 573 # as per specifications, enabled config files handled by 574 # Entropy Server (see repositories.conf.d/README) start with 575 # entropysrv_ prefix. 576 base_name = self.FILENAME_PREFIX + repository_id 577 enabled_conf_file = os.path.join(conf_d_dir, base_name) 578 # while disabled config files start with _ 579 disabled_conf_file = os.path.join(conf_d_dir, "_" + base_name) 580 581 self.write(enabled_conf_file, repository_id, desc, repo, repo_only, 582 pkg_only, base, enabled = enabled, exclude_qa = exclude_qa) 583 584 # if any disabled entry file is around, kill it with fire! 585 try: 586 os.remove(disabled_conf_file) 587 except OSError as err: 588 if err.errno != errno.ENOENT: 589 raise 590 591 return True
592
593 - def remove(self, repository_id):
594 """ 595 Remove a repository from the repositories configuration files directory. 596 597 This method only removes repository configuration at 598 /etc/entropy/repositories.conf.d/<filename prefix><repository id>. 599 600 @param repository_id: repository identifier 601 @type repository_id: string 602 @return: True, if success 603 @rtype: bool 604 """ 605 settings = SystemSettings() 606 repo_d_conf = settings.get_setting_dirs_data()['repositories_conf_d'] 607 conf_d_dir, _conf_files_mtime, _skipped_files, _auto_upd = repo_d_conf 608 # as per specifications, enabled config files handled by 609 # Entropy Server (see repositories.conf.d/README) start with 610 # entropysrv_ prefix. 611 base_name = self.FILENAME_PREFIX + repository_id 612 enabled_conf_file = os.path.join(conf_d_dir, base_name) 613 # while disabled config files start with _ 614 disabled_conf_file = os.path.join(conf_d_dir, "_" + base_name) 615 616 accomplished = False 617 try: 618 os.remove(enabled_conf_file) 619 accomplished = True 620 except OSError as err: 621 if err.errno != errno.ENOENT: 622 raise 623 624 # since we want to remove, also drop disabled 625 # config files 626 try: 627 os.remove(disabled_conf_file) 628 accomplished = True 629 except OSError as err: 630 if err.errno != errno.ENOENT: 631 raise 632 633 return accomplished
634
635 - def write(self, path, repository_id, desc, repo, repo_only, 636 pkg_only, base, enabled = True, exclude_qa = False):
637 """ 638 Write the repository configuration to the given file. 639 640 @param path: configuration file to write 641 @type path: string 642 @param repository_id: repository identifier 643 @type repository_id: string 644 @param desc: repository description 645 @type desc: string 646 @param repo: list of "repo=" uris 647 @type repo: list 648 @param repo_only: list of "repo-only=" uris 649 @type repo_only: list 650 @param pkg_only: list of "pkg-only=" uris 651 @type pkg_only: list 652 @param base: True, if this is the base repository, False if not, None 653 if unset. 654 @type base: bool 655 @keyword enabled: True, if the repository is enabled 656 @type enabled: bool 657 @keyword exclude_qa: True, if the repository should be excluded from QA 658 @type exclude_qa: bool 659 """ 660 if enabled: 661 enabled_str = "true" 662 else: 663 enabled_str = "false" 664 665 if exclude_qa: 666 qa_str = "true" 667 else: 668 qa_str = "false" 669 670 if base: 671 base_str = "base = true" 672 elif base is None: 673 base_str = "# base = false" 674 else: 675 base_str = "base = false" 676 677 repos_str = "" 678 for r in repo: 679 repos_str += "repo = %s\n" % (r,) 680 681 repo_only_str = "" 682 for r in repo_only: 683 repo_only_str += "repo-only = %s\n" % (r,) 684 if not repo_only_str: 685 repo_only_str = "# repo-only = " 686 687 pkg_only_str = "" 688 for pkg in pkg_only: 689 pkg_only_str += "pkg-only = %s\n" % (pkg,) 690 if not pkg_only_str: 691 pkg_only_str = "# pkg-only = " 692 693 meta = { 694 "repository_id": repository_id, 695 "desc": desc, 696 "repos": repos_str.rstrip(), 697 "repo_only": repo_only_str.rstrip(), 698 "pkg_only": pkg_only_str.rstrip(), 699 "enabled": enabled_str, 700 "exclude_qa": qa_str, 701 "base": base_str, 702 } 703 704 config = """\ 705 # Repository configuration file automatically generated 706 # by Entropy Server on your behalf. 707 708 [server=%(repository_id)s] 709 %(base)s 710 exclude-qa = %(exclude_qa)s 711 desc = %(desc)s 712 %(repos)s 713 %(repo_only)s 714 %(pkg_only)s 715 enabled = %(enabled)s 716 """ % meta 717 718 entropy.tools.atomic_write(path, config, self._encoding)
719
720 - def repositories(self):
721 """ 722 Return a list of valid parsed repositories. 723 724 A repository is considered valid iff it contains 725 at least "repo". The parse order is preserved. 726 """ 727 required_keys = set(("repo",)) 728 repositories = [] 729 730 for repository_id in self._ordered_sections: 731 repo_data = self[repository_id] 732 remaining = required_keys - set(repo_data.keys()) 733 if not remaining: 734 # then required_keys are there 735 repositories.append(repository_id) 736 737 return repositories
738
739 - def repo(self, repository_id):
740 """ 741 Return the repository push & pull URIs for both packages and 742 repository database. 743 744 @param repository_id: the repository identifier 745 @type repository_id: string 746 @raise KeyError: if repository_id is not found or 747 metadata is not available 748 @return: the repository push & pull URIs. 749 @rtype: list 750 """ 751 return self[repository_id]["repo"]
752
753 - def repo_only(self, repository_id):
754 """ 755 Return the repository push & pull URIs for the repository 756 database only. 757 758 @param repository_id: the repository identifier 759 @type repository_id: string 760 @raise KeyError: if repository_id is not found or 761 metadata is not available 762 @return: the repository push & pull URIs for the repository 763 database only. 764 @rtype: list 765 """ 766 return self[repository_id]["repo-only"]
767
768 - def pkg_only(self, repository_id):
769 """ 770 Return the repository push & pull URIs for the repository only. 771 772 @param repository_id: the repository identifier 773 @type repository_id: string 774 @raise KeyError: if repository_id is not found or 775 metadata is not available 776 @return: the repository push & pull URIs for the packages only. 777 @rtype: list 778 """ 779 return self[repository_id]["pkg-only"]
780
781 - def desc(self, repository_id):
782 """ 783 Return the description of the repository. 784 785 @param repository_id: the repository identifier 786 @type repository_id: string 787 @raise KeyError: if repository_id is not found or 788 metadata is not available 789 @return: the repository description 790 @rtype: string 791 """ 792 return self[repository_id]["desc"][0]
793
794 - def enabled(self, repository_id):
795 """ 796 Return whether the repository is enabled or disabled. 797 798 @param repository_id: the repository identifier 799 @type repository_id: string 800 @return: the repository status 801 @rtype: bool 802 """ 803 try: 804 enabled = self[repository_id]["enabled"][0] 805 return enabled.strip().lower() == "true" 806 except KeyError: 807 return self._DEFAULT_ENABLED_VALUE
808
809 - def exclude_qa(self, repository_id):
810 """ 811 Return whether the repository is excluded from QA. 812 813 @param repository_id: the repository identifier 814 @type repository_id: string 815 @return: the repository QA exclusion status 816 @rtype: bool 817 """ 818 try: 819 exclude = self[repository_id]["exclude-qa"][0] 820 return exclude.strip().lower() == "true" 821 except KeyError: 822 return self._DEFAULT_QA_VALUE
823
824 825 -class ServerSystemSettingsPlugin(SystemSettingsPlugin):
826 827 # List of static server-side repositories that must survive 828 # a repositories metadata reload 829 REPOSITORIES = {} 830
831 - def __init__(self, plugin_id, helper_interface):
832 SystemSettingsPlugin.__init__(self, plugin_id, helper_interface)
833 834 @staticmethod
835 - def server_conf_path():
836 """ 837 Return current server.conf path, this takes into account the current 838 configuration files directory path (which is affected by "root" path 839 changes [default: /]) 840 """ 841 # path to /etc/entropy/server.conf (usually, depends on systemroot) 842 return os.path.join(etpConst['confdir'], "server.conf")
843 844 @classmethod
845 - def analyze_server_repo_string(cls, repostring, product = None):
846 """ 847 Analyze a server repository string (usually contained in server.conf), 848 extracting all the parameters. 849 850 @param repostring: repository string 851 @type repostring: string 852 @keyword product: system product which repository belongs to 853 @rtype: None 854 @return: None 855 """ 856 857 if product is None: 858 product = etpConst['product'] 859 860 data = {} 861 repo_key, repostring = entropy.tools.extract_setting(repostring) 862 if repo_key != "repository": 863 raise AttributeError("invalid repostring passed") 864 865 repo_split = repostring.split("|") 866 if len(repo_split) < 3: 867 raise AttributeError("invalid repostring passed (2)") 868 869 repository_id = repo_split[0].strip() 870 desc = repo_split[1].strip() 871 uris = repo_split[2].strip().split() 872 exclude_qa = False # not supported through server.conf 873 874 repo_mirrors = [] 875 pkg_mirrors = [] 876 for uri in uris: 877 do_pkg = False 878 do_repo = False 879 while True: 880 if uri.startswith("<p>"): 881 do_pkg = True 882 uri = uri[3:] 883 continue 884 if uri.startswith("<r>"): 885 do_repo = True 886 uri = uri[3:] 887 continue 888 break 889 890 if not (do_repo or do_pkg): 891 do_repo = True 892 do_pkg = True 893 if do_repo: 894 repo_mirrors.append(uri) 895 if do_pkg: 896 pkg_mirrors.append(uri) 897 898 return repository_id, cls._generate_repository_metadata( 899 repository_id, desc, repo_mirrors, pkg_mirrors, exclude_qa)
900 901 @classmethod
902 - def _generate_repository_metadata(cls, repository_id, desc, 903 repo_mirrors, pkg_mirrors, 904 exclude_qa):
905 """ 906 Generate the repository metadata given raw information. 907 908 @param repository_id: the repository identifier 909 @type repository_id: string 910 @param desc: repository description 911 @type desc: string 912 @param repo_mirrors: list of repository database mirrors 913 @type repo_mirrors: list 914 @param pkg_mirrors: list of repository packages mirrors 915 @type pkg_mirrors: list 916 @param exclude_qa: exclude from QA checks 917 @type exclude_qa: bool 918 @return: the repository metadata 919 @rtype: dict 920 """ 921 data = {} 922 data['repoid'] = repository_id 923 data['description'] = desc 924 data['pkg_mirrors'] = pkg_mirrors[:] 925 data['repo_mirrors'] = repo_mirrors[:] 926 data['community'] = False 927 data['exclude_qa'] = exclude_qa 928 return data
929
930 - def __generic_parser(self, filepath):
931 """ 932 Internal method. This is the generic file parser here. 933 934 @param filepath: valid path 935 @type filepath: string 936 @return: raw text extracted from file 937 @rtype: list 938 """ 939 return entropy.tools.generic_file_content_parser( 940 filepath, 941 comment_tag = "##", 942 encoding = etpConst['conf_encoding'])
943
944 - def get_updatable_configuration_files(self, repository_id):
945 """ 946 Overridden from SystemSettings. 947 """ 948 files = set() 949 # hope that all the repos get synchronized with respect to 950 # package names moves 951 dep_rewrite_file = Server._get_conf_dep_rewrite_file() 952 dep_blacklist_file = Server._get_conf_dep_blacklist_file() 953 files.add(dep_rewrite_file) 954 files.add(dep_blacklist_file) 955 956 if (repository_id is not None) and \ 957 (repository_id in self._helper.repositories()): 958 959 critical_file = self._helper._get_local_critical_updates_file( 960 repository_id) 961 files.add(critical_file) 962 keywords_file = self._helper._get_local_repository_keywords_file( 963 repository_id) 964 files.add(keywords_file) 965 mask_file = self._helper._get_local_repository_mask_file( 966 repository_id) 967 files.add(mask_file) 968 bl_file = self._helper._get_missing_dependencies_blacklist_file( 969 repository_id) 970 files.add(bl_file) 971 restricted_file = self._helper._get_local_restricted_file( 972 repository_id) 973 files.add(restricted_file) 974 system_mask_file = \ 975 self._helper._get_local_repository_system_mask_file( 976 repository_id) 977 files.add(system_mask_file) 978 979 return files
980
981 - def dep_rewrite_parser(self, sys_set):
982 983 cached = getattr(self, '_mod_rewrite_data', None) 984 if cached is not None: 985 return cached 986 987 data = {} 988 rewrite_file = Server._get_conf_dep_rewrite_file() 989 if not os.path.isfile(rewrite_file): 990 return data 991 rewrite_content = self.__generic_parser(rewrite_file) 992 993 for line in rewrite_content: 994 params = line.split() 995 if len(params) < 2: 996 continue 997 pkg_match, pattern, replaces = params[0], params[1], params[2:] 998 if pattern.startswith("++"): 999 compiled_pattern = None 1000 pattern = pattern[2:] 1001 if not pattern: 1002 # malformed 1003 continue 1004 else: 1005 try: 1006 compiled_pattern = re.compile(pattern) 1007 except re.error: 1008 # invalid pattern 1009 continue 1010 # use this key to make sure to not overwrite similar entries 1011 data[(pkg_match, pattern)] = (compiled_pattern, replaces) 1012 1013 self._mod_rewrite_data = data 1014 return data
1015
1016 - def dep_blacklist_parser(self, sys_set):
1017 1018 data = {} 1019 blacklist_file = Server._get_conf_dep_blacklist_file() 1020 if not os.path.isfile(blacklist_file): 1021 return data 1022 blacklist_content = self.__generic_parser(blacklist_file) 1023 1024 for line in blacklist_content: 1025 params = line.split() 1026 if len(params) < 2: 1027 continue 1028 pkg_match, blacklisted_deps = params[0], params[1:] 1029 # use this key to make sure to not overwrite similar entries 1030 obj = data.setdefault(pkg_match, []) 1031 obj.extend(blacklisted_deps) 1032 1033 return data
1034
1035 - def qa_sets_parser(self, sys_set):
1036 1037 data = {} 1038 sets_file = Server._get_conf_qa_sets_file() 1039 if not os.path.isfile(sets_file): 1040 return data 1041 sets_content = self.__generic_parser(sets_file) 1042 1043 for line in sets_content: 1044 params = line.split() 1045 if len(params) < 2: 1046 continue 1047 repo_id, qa_sets = params[0], params[1:] 1048 obj = data.setdefault(repo_id, set()) 1049 obj.update(qa_sets) 1050 1051 return data
1052
1053 - def server_parser(self, sys_set):
1054 """ 1055 Parses Entropy server system configuration file. 1056 1057 @return dict data 1058 """ 1059 srv_plugin_class = ServerSystemSettingsPlugin 1060 server_conf = srv_plugin_class.server_conf_path() 1061 enc = etpConst['conf_encoding'] 1062 1063 try: 1064 with codecs.open(server_conf, "r", encoding=enc) \ 1065 as server_f: 1066 serverconf = [x.strip() for x in server_f.readlines() \ 1067 if x.strip()] 1068 except IOError as err: 1069 if err.errno != errno.ENOENT: 1070 raise 1071 # if file doesn't exist, provide empty 1072 # serverconf list. In this way, we make sure that 1073 # any additional metadata gets added. 1074 # see the for loop iterating through the 1075 # repository identifiers 1076 serverconf = [] 1077 1078 data = { 1079 'repositories': srv_plugin_class.REPOSITORIES.copy(), 1080 'community_mode': False, 1081 'qa_langs': [const_convert_to_unicode("en_US"), 1082 const_convert_to_unicode("C")], 1083 'default_repository_id': const_convert_to_unicode( 1084 etpConst['defaultserverrepositoryid']), 1085 'base_repository_id': None, 1086 'packages_expiration_days': etpConst['packagesexpirationdays'], 1087 'database_file_format': const_convert_to_unicode( 1088 etpConst['etpdatabasefileformat']), 1089 'disabled_eapis': set(), 1090 'broken_revdeps_qa_check': True, 1091 'exp_based_scope': etpConst['expiration_based_scope'], 1092 # disabled by default for now 1093 'nonfree_packages_dir_support': False, 1094 'sync_speed_limit': None, 1095 'weak_package_files': False, 1096 'changelog': True, 1097 'rss': { 1098 'enabled': etpConst['rss-feed'], 1099 'name': const_convert_to_unicode(etpConst['rss-name']), 1100 'light_name': const_convert_to_unicode( 1101 etpConst['rss-light-name']), 1102 'base_url': const_convert_to_unicode(etpConst['rss-base-url']), 1103 'website_url': const_convert_to_unicode( 1104 etpConst['rss-website-url']), 1105 'editor': const_convert_to_unicode( 1106 etpConst['rss-managing-editor']), 1107 'max_entries': etpConst['rss-max-entries'], 1108 'light_max_entries': etpConst['rss-light-max-entries'], 1109 }, 1110 } 1111 1112 fake_instance = self._helper.fake_default_repo 1113 default_repo_changed = False 1114 1115 def _offservrepoid(line, setting): 1116 # NOTE: remove this in future, supported for backward compat. 1117 # NOTE: added for backward and mixed compat. 1118 if default_repo_changed: 1119 return 1120 if not fake_instance: 1121 data['default_repository_id'] = setting.strip()
1122 1123 def _default_repo(line, setting): 1124 if not fake_instance: 1125 data['default_repository_id'] = setting.strip() 1126 default_repo_changed = True
1127 1128 def _exp_days(line, setting): 1129 mydays = setting.strip() 1130 try: 1131 mydays = int(mydays) 1132 data['packages_expiration_days'] = mydays 1133 except ValueError: 1134 return 1135 1136 def _exp_based_scope(line, setting): 1137 exp_opt = entropy.tools.setting_to_bool(setting) 1138 if exp_opt is not None: 1139 data['exp_based_scope'] = exp_opt 1140 1141 def _nf_packages_dir_sup(line, setting): 1142 opt = entropy.tools.setting_to_bool(setting) 1143 if opt is not None: 1144 data['nonfree_packages_dir_support'] = opt 1145 1146 def _disabled_eapis(line, setting): 1147 mydis = setting.strip().split(",") 1148 try: 1149 mydis = [int(x) for x in mydis] 1150 mydis = set([x for x in mydis if x in (1, 2, 3,)]) 1151 except ValueError: 1152 return 1153 if (len(mydis) < 3) and mydis: 1154 data['disabled_eapis'] = mydis 1155 1156 def _server_basic_lang(line, setting): 1157 data['qa_langs'] = setting.strip().split() 1158 1159 def _broken_revdeps_qa(line, setting): 1160 opt = entropy.tools.setting_to_bool(setting) 1161 if opt is not None: 1162 data['broken_revdeps_qa_check'] = opt 1163 1164 def _repository_func(line, setting): 1165 # TODO: deprecate in 2015. repositories.conf.d/ is the 1166 # supported way to define repositories. 1167 try: 1168 repoid, repodata = \ 1169 srv_plugin_class.analyze_server_repo_string( 1170 line, product = sys_set['repositories']['product']) 1171 except AttributeError: 1172 # error parsing string 1173 return 1174 1175 # validate repository id string 1176 if not entropy.tools.validate_repository_id(repoid): 1177 sys.stderr.write("!!! invalid repository id '%s' in '%s'\n" % ( 1178 repoid, srv_plugin_class.server_conf_path())) 1179 return 1180 1181 if repoid in data['repositories']: 1182 # just update mirrors 1183 data['repositories'][repoid]['pkg_mirrors'].extend( 1184 repodata['pkg_mirrors']) 1185 data['repositories'][repoid]['repo_mirrors'].extend( 1186 repodata['repo_mirrors']) 1187 else: 1188 data['repositories'][repoid] = repodata.copy() 1189 1190 # base_repository_id support 1191 if data['base_repository_id'] is None: 1192 data['base_repository_id'] = repoid 1193 1194 def _database_format(line, setting): 1195 if setting in etpConst['etpdatabasesupportedcformats']: 1196 data['database_file_format'] = setting 1197 1198 def _syncspeedlimit(line, setting): 1199 try: 1200 speed_limit = int(setting) 1201 except ValueError: 1202 speed_limit = None 1203 data['sync_speed_limit'] = speed_limit 1204 1205 def _weak_package_files(line, setting): 1206 opt = entropy.tools.setting_to_bool(setting) 1207 if opt is not None: 1208 data['weak_package_files'] = opt 1209 1210 def _changelog(line, setting): 1211 bool_setting = entropy.tools.setting_to_bool(setting) 1212 if bool_setting is not None: 1213 data['changelog'] = bool_setting 1214 1215 def _community_mode(line, setting): 1216 bool_setting = entropy.tools.setting_to_bool(setting) 1217 if bool_setting is not None: 1218 data['community_mode'] = bool_setting 1219 1220 def _rss_feed(line, setting): 1221 bool_setting = entropy.tools.setting_to_bool(setting) 1222 if bool_setting is not None: 1223 data['rss']['enabled'] = bool_setting 1224 1225 def _rss_name(line, setting): 1226 data['rss']['name'] = setting 1227 1228 def _rss_light_name(line, setting): 1229 data['rss']['light_name'] = setting 1230 1231 def _rss_base_url(line, setting): 1232 data['rss']['base_url'] = setting 1233 1234 def _rss_website_url(line, setting): 1235 data['rss']['website_url'] = setting 1236 1237 def _managing_editor(line, setting): 1238 data['rss']['editor'] = setting 1239 1240 def _max_rss_entries(line, setting): 1241 try: 1242 entries = int(setting) 1243 data['rss']['max_entries'] = entries 1244 except (ValueError, IndexError,): 1245 return 1246 1247 def _max_rss_light_entries(line, setting): 1248 try: 1249 entries = int(setting) 1250 data['rss']['light_max_entries'] = entries 1251 except (ValueError, IndexError,): 1252 return 1253 1254 settings_map = { 1255 'officialserverrepositoryid': _offservrepoid, 1256 'default-repository': _default_repo, 1257 'expiration-days': _exp_days, 1258 'community-mode': _community_mode, 1259 'expiration-based-scope': _exp_based_scope, 1260 'nonfree-packages-directory-support': _nf_packages_dir_sup, 1261 'disabled-eapis': _disabled_eapis, 1262 'broken-reverse-deps': _broken_revdeps_qa, 1263 'server-basic-languages': _server_basic_lang, 1264 'repository': _repository_func, 1265 'database-format': _database_format, 1266 # backward compatibility 1267 'sync-speed-limit': _syncspeedlimit, 1268 'syncspeedlimit': _syncspeedlimit, 1269 'weak-package-files': _weak_package_files, 1270 'changelog': _changelog, 1271 'rss-feed': _rss_feed, 1272 'rss-name': _rss_name, 1273 'rss-light-name': _rss_light_name, 1274 'rss-base-url': _rss_base_url, 1275 'rss-website-url': _rss_website_url, 1276 'managing-editor': _managing_editor, 1277 'max-rss-entries': _max_rss_entries, 1278 'max-rss-light-entries': _max_rss_light_entries, 1279 } 1280 1281 for line in serverconf: 1282 1283 key, value = entropy.tools.extract_setting(line) 1284 if key is None: 1285 continue 1286 1287 func = settings_map.get(key) 1288 if func is None: 1289 continue 1290 func(line, value) 1291 1292 # .ini-like file support. 1293 repositories_d_conf = sys_set.get_setting_dirs_data( 1294 )['repositories_conf_d'] 1295 _conf_dir, setting_files, _skipped_files, _upd = repositories_d_conf 1296 candidate_inis = [x for x,y in setting_files] 1297 1298 ini_parser = RepositoryConfigParser(encoding = enc) 1299 try: 1300 ini_parser.read(candidate_inis) 1301 except (IOError, OSError) as err: 1302 sys.stderr.write("Cannot parse %s: %s\n" % ( 1303 " ".join(candidate_inis), 1304 err)) 1305 ini_parser = None 1306 1307 if ini_parser: 1308 repositories = set(data['repositories'].keys()) 1309 ini_repositories = ini_parser.repositories() 1310 if data['base_repository_id'] is None: 1311 # if base_repository_id is not set, then 1312 # take the value of ini config files. 1313 ini_base = ini_parser.base_repository() 1314 if ini_base: 1315 data['base_repository_id'] = ini_base 1316 1317 for ini_repository in ini_repositories: 1318 if ini_repository in repositories: 1319 # double syntax is not supported. 1320 continue 1321 ini_enabled = ini_parser.enabled(ini_repository) 1322 if not ini_enabled: 1323 continue 1324 1325 ini_exclude_qa = ini_parser.exclude_qa(ini_repository) 1326 1327 try: 1328 ini_desc = ini_parser.desc(ini_repository) 1329 except KeyError: 1330 ini_desc = _("No description") 1331 try: 1332 ini_mirrors = ini_parser.repo(ini_repository) 1333 except KeyError: 1334 ini_mirrors = [] 1335 1336 repo_mirrors = [] 1337 pkg_mirrors = [] 1338 repo_mirrors.extend(ini_mirrors) 1339 pkg_mirrors.extend(ini_mirrors) 1340 1341 try: 1342 repo_mirrors.extend(ini_parser.repo_only(ini_repository)) 1343 except KeyError: 1344 pass 1345 try: 1346 pkg_mirrors.extend(ini_parser.pkg_only(ini_repository)) 1347 except KeyError: 1348 pass 1349 1350 repo_data = srv_plugin_class._generate_repository_metadata( 1351 ini_repository, ini_desc, repo_mirrors, pkg_mirrors, 1352 ini_exclude_qa) 1353 data['repositories'][ini_repository] = repo_data 1354 1355 env_community_mode = os.getenv("ETP_COMMUNITY_MODE") 1356 if env_community_mode == "0": 1357 data['community_mode'] = False 1358 elif env_community_mode == "1": 1359 data['community_mode'] = True 1360 1361 # add system database if community repository mode is enabled 1362 if data['community_mode']: 1363 client_repository_id = InstalledPackagesRepository.NAME 1364 1365 mydata = srv_plugin_class._generate_repository_metadata( 1366 client_repository_id, 1367 const_convert_to_unicode( 1368 "Community Repositories System Repository"), 1369 [],[], False) 1370 1371 data['repositories'][client_repository_id] = mydata 1372 srv_plugin_class.REPOSITORIES[client_repository_id] = \ 1373 mydata 1374 # installed packages repository is now the base repository 1375 data['base_repository_id'] = client_repository_id 1376 1377 # expand paths 1378 for repoid in data['repositories']: 1379 srv_plugin_class.extend_repository_metadata( 1380 sys_set, repoid, data['repositories'][repoid]) 1381 1382 # Support for shell variables 1383 shell_repoid = os.getenv('ETP_REPO') 1384 if shell_repoid: 1385 data['default_repository_id'] = shell_repoid 1386 1387 expiration_days = os.getenv('ETP_EXPIRATION_DAYS') 1388 if expiration_days: 1389 try: 1390 expiration_days = int(expiration_days) 1391 data['packages_expiration_days'] = expiration_days 1392 except ValueError: 1393 pass 1394 1395 return data 1396 1397 @staticmethod
1398 - def extend_repository_metadata(system_settings, repository_id, metadata):
1399 """ 1400 Extend server-side Repository metadata dictionary 1401 with information required by Entropy Server. 1402 """ 1403 metadata['repo_basedir'] = os.path.join( 1404 etpConst['entropyworkdir'], 1405 "server", 1406 repository_id) 1407 1408 metadata['packages_dir'] = os.path.join( 1409 etpConst['entropyworkdir'], 1410 "server", 1411 repository_id, 1412 etpConst['packagesrelativepath_basedir'], 1413 etpConst['currentarch']) 1414 1415 metadata['packages_dir_nonfree'] = os.path.join( 1416 etpConst['entropyworkdir'], 1417 "server", 1418 repository_id, 1419 etpConst['packagesrelativepath_basedir_nonfree'], 1420 etpConst['currentarch']) 1421 1422 metadata['packages_dir_restricted'] = os.path.join( 1423 etpConst['entropyworkdir'], 1424 "server", 1425 repository_id, 1426 etpConst['packagesrelativepath_basedir_restricted'], 1427 etpConst['currentarch']) 1428 1429 metadata['store_dir'] = os.path.join( 1430 etpConst['entropyworkdir'], 1431 "server", 1432 repository_id, 1433 "store", 1434 etpConst['currentarch']) 1435 1436 # consider this a base dir 1437 metadata['upload_basedir'] = os.path.join( 1438 etpConst['entropyworkdir'], 1439 "server", 1440 repository_id, 1441 "upload") 1442 1443 metadata['database_dir'] = os.path.join( 1444 etpConst['entropyworkdir'], 1445 "server", 1446 repository_id, 1447 "database", 1448 etpConst['currentarch']) 1449 1450 metadata['remote_repo_basedir'] = os.path.join( 1451 system_settings['repositories']['product'], 1452 repository_id) 1453 1454 metadata['database_remote_path'] = \ 1455 ServerSystemSettingsPlugin.get_repository_remote_path( 1456 system_settings, repository_id) 1457 metadata['override_database_remote_path'] = None
1458 1459 @staticmethod
1460 - def get_repository_remote_path(system_settings, repository_id):
1461 return system_settings['repositories']['product'] + "/" + \ 1462 repository_id + "/database/" + etpConst['currentarch']
1463 1464 @staticmethod
1465 - def set_override_remote_repository(system_settings, repository_id, 1466 override_repository_id):
1467 """ 1468 Used to set an overridden remote path where to push repository 1469 database. This can be used for quickly testing repository changes 1470 without directly overwriting the real repository. 1471 """ 1472 repo_path = ServerSystemSettingsPlugin.get_repository_remote_path( 1473 system_settings, override_repository_id) 1474 1475 sys_settings_plugin_id = \ 1476 etpConst['system_settings_plugins_ids']['server_plugin'] 1477 srv_data = system_settings[sys_settings_plugin_id]['server'] 1478 repo_data = srv_data['repositories'][repository_id] 1479 repo_data['override_database_remote_path'] = repo_path
1480
1481 1482 -class ServerFatscopeSystemSettingsPlugin(SystemSettingsPlugin):
1483
1484 - def repos_parser(self, sys_set):
1485 1486 cached = getattr(self, '_repos_data', None) 1487 if cached is not None: 1488 return cached 1489 1490 data = {} 1491 srv_plug_id = etpConst['system_settings_plugins_ids']['server_plugin'] 1492 # if support is not enabled, don't waste time scanning files 1493 srv_parser_data = sys_set[srv_plug_id]['server'] 1494 if not srv_parser_data['exp_based_scope']: 1495 return data 1496 1497 # get expiration-based packages removal data from config files 1498 for repoid in srv_parser_data['repositories']: 1499 1500 # filter out system repository if community repository 1501 # mode is enabled 1502 if repoid == InstalledPackagesRepository.NAME: 1503 continue 1504 1505 package_ids = set() 1506 exp_fp = self._helper._get_local_exp_based_pkgs_rm_whitelist_file( 1507 repoid) 1508 try: 1509 dbconn = self._helper.open_server_repository( 1510 repoid, just_reading = True) 1511 except RepositoryError: 1512 # ignore 1513 continue 1514 1515 pkgs = [] 1516 if const_file_readable(exp_fp): 1517 # don't worry about the race. 1518 pkgs += entropy.tools.generic_file_content_parser( 1519 exp_fp, encoding = etpConst['conf_encoding']) 1520 if '*' in pkgs: # wildcard support 1521 package_ids.add(-1) 1522 else: 1523 for pkg in pkgs: 1524 package_id, rc_match = dbconn.atomMatch(pkg) 1525 if rc_match: 1526 continue 1527 package_ids.add(package_id) 1528 1529 data[repoid] = package_ids 1530 1531 self._repos_data = data 1532 return data
1533
1534 -class ServerFakeClientSystemSettingsPlugin(SystemSettingsPlugin):
1535
1536 - def fake_cli_parser(self, sys_set):
1537 """ 1538 This is just fake, doesn't bring any new metadata but just tweak 1539 Entropy client ones. 1540 """ 1541 data = {} 1542 srv_plug_id = etpConst['system_settings_plugins_ids']['server_plugin'] 1543 # if support is not enabled, don't waste time scanning files 1544 srv_parser_data = sys_set[srv_plug_id]['server'] 1545 1546 # now setup fake Entropy Client repositories, so that Entropy Server 1547 # can use Entropy Client interfaces transparently 1548 srv_repodata = srv_parser_data['repositories'] 1549 cli_repodata = sys_set['repositories'] 1550 # remove unavailable server repos in client metadata first 1551 cli_repodata['available'].clear() 1552 1553 for repoid, repo_data in srv_repodata.items(): 1554 1555 try: 1556 # we must skip the repository validation because 1557 # repositories have been already validated. 1558 # moreover, the system repository_id might not be 1559 # valid (__system__). But still, this is the wanted 1560 # behaviour. 1561 xxx, my_data = sys_set._analyze_client_repo_string( 1562 "repository = %s|%s|http://--fake--|http://--fake--" \ 1563 % (repoid, repo_data['description'],), 1564 _skip_repository_validation=True) 1565 except AttributeError as err: 1566 # yeah, at least let stderr know. 1567 sys.stderr.write(repr(err) + "\n") 1568 continue # sorry! 1569 1570 my_data['repoid'] = repoid 1571 if '__temporary__' in repo_data: 1572 # fake repositories, temp ones 1573 # can't go into Entropy Client, they miss 1574 # 'database_dir' and other metadata 1575 my_data['dbpath'] = None 1576 my_data['__temporary__'] = repo_data['__temporary__'] 1577 my_data['dbrevision'] = 0 1578 else: 1579 my_data['dbpath'] = self._helper._get_local_repository_dir( 1580 repoid) 1581 my_data['dbrevision'] = \ 1582 self._helper.local_repository_revision( 1583 repoid) 1584 cli_repodata['available'][repoid] = my_data 1585 1586 cli_repodata['default_repository'] = \ 1587 srv_parser_data['default_repository_id'] 1588 1589 del cli_repodata['order'][:] 1590 if srv_parser_data['base_repository_id'] is not None: 1591 cli_repodata['order'].append(srv_parser_data['base_repository_id']) 1592 for repoid in sorted(srv_repodata): 1593 if repoid not in cli_repodata['order']: 1594 cli_repodata['order'].append(repoid) 1595 1596 return data
1597
1598 -class ServerQAInterfacePlugin(QAInterfacePlugin):
1599
1600 - def __init__(self, entropy_server_instance):
1601 self._server = entropy_server_instance
1602
1603 - def __check_package_using_spm(self, package_path):
1604 1605 spm_class = get_spm_class() 1606 spm_rc, spm_msg = spm_class.execute_qa_tests(package_path) 1607 1608 if spm_rc == 0: 1609 return True 1610 sys.stderr.write("QA Error: " + spm_msg + "\n") 1611 sys.stderr.flush() 1612 return False
1613
1614 - def __extract_edb_analyze_metadata(self, package_path):
1615 1616 def _is_supported(keywords): 1617 for arch in etpConst['keywords']: 1618 if arch in keywords: 1619 return True 1620 return False
1621 1622 tmp_fd, tmp_f = const_mkstemp(prefix = 'entropy.server') 1623 dbc = None 1624 try: 1625 found_edb = entropy.tools.dump_entropy_metadata(package_path, tmp_f) 1626 if not found_edb: 1627 return False 1628 dbc = self._server._open_temp_repository("test", temp_file = tmp_f, 1629 initialize = False) 1630 for package_id in dbc.listAllPackageIds(): 1631 # NOTE: content is tested in entropy.qa builtin package test 1632 # test content safety 1633 dbc.retrieveContentSafety(package_id) 1634 # test keywords 1635 keywords = dbc.retrieveKeywords(package_id) 1636 if not _is_supported(keywords): 1637 atom = dbc.retrieveAtom(package_id) 1638 # big PHAT warning !! 1639 self._server.output(darkred("~"*40), level = "warning") 1640 self._server.output("[%s, %s] %s" % ( 1641 brown(os.path.basename(package_path)), teal(atom), 1642 purple(_("package has no keyword set, it will be masked !"))), 1643 level = "warning", header = darkred(" !!! ")) 1644 self._server.output(darkred("~"*40), level = "warning") 1645 time.sleep(10) 1646 finally: 1647 if dbc is not None: 1648 dbc.close() 1649 os.close(tmp_fd) 1650 1651 return True
1652
1653 - def get_tests(self):
1654 return [self.__check_package_using_spm, 1655 self.__extract_edb_analyze_metadata]
1656
1657 - def get_id(self):
1658 return SERVER_QA_PLUGIN
1659
1660 1661 -class ServerConfigurationFiles(ConfigurationFiles):
1662 1663 """ 1664 Subclass Entropy Client version in order to return 1665 our repository identifiers 1666 """ 1667 1668 @property
1669 - def _repository_ids(self):
1670 """ 1671 Return a the list of repository identifiers the object 1672 is using. 1673 """ 1674 return self._entropy.repositories()
1675
1676 1677 -class Server(Client):
1678 1679 # Entropy Server cache directory, mainly used for storing commit changes 1680 CACHE_DIR = os.path.join(etpConst['entropyworkdir'], "server_cache") 1681 1682 # SystemSettings class variables 1683 SYSTEM_SETTINGS_PLG_ID = etpConst['system_settings_plugins_ids']['server_plugin'] 1684 1685 # Make possible to disable tree updates completely. 1686 _inhibit_treeupdates = False 1687
1688 - def init_singleton(self, default_repository = None, save_repository = False, 1689 fake_default_repo = False, fake_default_repo_id = None, 1690 fake_default_repo_desc = None, handle_uninitialized = True, 1691 **kwargs):
1692 1693 self._indexing = False 1694 1695 # initialize Entropy Client superclass 1696 if "installed_repo" not in kwargs: 1697 kwargs["installed_repo"] = False 1698 if "repo_validation" not in kwargs: 1699 kwargs["repo_validation"] = False 1700 Client.init_singleton(self, 1701 indexing = self._indexing, 1702 **kwargs 1703 ) 1704 1705 if fake_default_repo_desc is None: 1706 fake_default_repo_desc = 'this is a fake repository' 1707 self.__instance_destroyed = False 1708 1709 # settings 1710 self._memory_db_srv_instances = {} 1711 self._treeupdates_repos = set() 1712 self._server_dbcache = {} 1713 etpSys['serverside'] = True 1714 self.fake_default_repo = fake_default_repo 1715 self.fake_default_repo_id = fake_default_repo_id 1716 self.Mirrors = None 1717 self._settings_to_backup = [] 1718 self._save_repository = save_repository 1719 self._sync_lock_cache = set() 1720 1721 self.sys_settings_fake_cli_plugin_id = \ 1722 etpConst['system_settings_plugins_ids']['server_plugin_fake_client'] 1723 self.sys_settings_fatscope_plugin_id = \ 1724 etpConst['system_settings_plugins_ids']['server_plugin_fatscope'] 1725 1726 # create our SystemSettings plugin 1727 with self._settings: 1728 self.sys_settings_plugin = ServerSystemSettingsPlugin( 1729 Server.SYSTEM_SETTINGS_PLG_ID, self) 1730 self._settings.add_plugin(self.sys_settings_plugin) 1731 1732 # Fatscope support SystemSettings plugin 1733 self.sys_settings_fatscope_plugin = \ 1734 ServerFatscopeSystemSettingsPlugin( 1735 self.sys_settings_fatscope_plugin_id, self) 1736 self._settings.add_plugin(self.sys_settings_fatscope_plugin) 1737 1738 # Fatscope support SystemSettings plugin 1739 self.sys_settings_fake_cli_plugin = \ 1740 ServerFakeClientSystemSettingsPlugin( 1741 self.sys_settings_fake_cli_plugin_id, self) 1742 self._settings.add_plugin(self.sys_settings_fake_cli_plugin) 1743 1744 # setup fake repository 1745 if fake_default_repo: 1746 default_repository = fake_default_repo_id 1747 self._init_generic_memory_server_repository( 1748 fake_default_repo_id, 1749 fake_default_repo_desc, set_as_default = True) 1750 1751 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1752 self._repository = default_repository 1753 if self._repository is None: 1754 self._repository = srv_set['default_repository_id'] 1755 1756 if not fake_default_repo: 1757 if self._repository in srv_set['repositories']: 1758 try: 1759 self._ensure_paths(self._repository) 1760 except OSError as err: 1761 if err.errno != errno.EACCES: 1762 raise 1763 # sigh, ignore during init 1764 1765 # if repository is still None, fallback to internal 1766 # fake repository. This way Entropy Server will work 1767 # out of the box without any server.conf tweak 1768 # (and eit bashcomp is happy) 1769 if self._repository is None: 1770 repository_id = "__builtin__" 1771 self._init_generic_memory_server_repository( 1772 repository_id, "Built-in fallback fake repository", 1773 set_as_default=True) 1774 self._repository = repository_id 1775 1776 if self._repository not in srv_set['repositories']: 1777 raise PermissionDenied("PermissionDenied: %s %s" % ( 1778 self._repository, 1779 _("repository not configured"), 1780 ) 1781 ) 1782 if InstalledPackagesRepository.NAME == self._repository: 1783 raise PermissionDenied("PermissionDenied: %s %s" % ( 1784 InstalledPackagesRepository.NAME, 1785 _("protected repository id, can't use this, sorry dude..."), 1786 ) 1787 ) 1788 1789 self.switch_default_repository( 1790 self._repository, handle_uninitialized=handle_uninitialized)
1791
1792 - def destroy(self, _from_shutdown = False):
1793 """ 1794 Destroy this singleton instance. 1795 """ 1796 self.__instance_destroyed = True 1797 Client.close_repositories(self, mask_clear = False) 1798 Client.destroy(self, _from_shutdown = _from_shutdown) 1799 1800 if not _from_shutdown: 1801 plug_id2 = self.sys_settings_fake_cli_plugin_id 1802 plug_id1 = self.sys_settings_fatscope_plugin_id 1803 plug_id = Server.SYSTEM_SETTINGS_PLG_ID 1804 # reverse insert order 1805 plugs = [plug_id2, plug_id1, plug_id] 1806 for plug in plugs: 1807 if plug is None: 1808 continue 1809 if not self._settings.has_plugin(plug): 1810 continue 1811 self._settings.remove_plugin(plug) 1812 1813 self.close_repositories()
1814 1815 @property
1816 - def _cacher(self):
1817 """ 1818 Return an EntropyCacher object instance. 1819 """ 1820 return EntropyCacher()
1821
1822 - def is_destroyed(self):
1823 """ 1824 Return whether the singleton instance is destroyed. 1825 """ 1826 return self.__instance_destroyed
1827
1828 - def _cache_prefix(self, caller):
1829 """ 1830 Generate a cache object key prefix to use with EntropyCacher. 1831 1832 @param caller: a custom function caller name 1833 @type caller: string 1834 @return: the cache prefix 1835 @rtype: string 1836 """ 1837 return "%s/%s/%s" % ( 1838 __name__, self.__class__.__name__, caller)
1839
1840 - def _get_branch_from_download_relative_uri(self, db_download_uri):
1841 return db_download_uri.split("/")[2]
1842
1843 - def _swap_branch_in_download_relative_uri(self, new_branch, 1844 db_download_uri):
1845 cur_branch = self._get_branch_from_download_relative_uri( 1846 db_download_uri) 1847 return db_download_uri.replace("/%s/" % (cur_branch,), 1848 "/%s/" % (new_branch,))
1849
1850 - def _get_basedir_pkg_listing(self, base_dir, extension, branch = None):
1851 1852 pkgs_dir_types = set(self._get_pkg_dir_names()) 1853 basedir_raw_content = [] 1854 entropy.tools.recursive_directory_relative_listing( 1855 basedir_raw_content, base_dir) 1856 1857 pkg_ext = extension 1858 pkg_list = [x for x in basedir_raw_content if x.endswith(pkg_ext)] 1859 pkg_list = [x for x in pkg_list if \ 1860 x.split(os.path.sep)[0] in pkgs_dir_types] 1861 1862 if branch is not None: 1863 branch_extractor = \ 1864 self._get_branch_from_download_relative_uri 1865 pkg_list = [x for x in pkg_list if branch_extractor(x) == branch] 1866 1867 return pkg_list
1868
1869 - def _get_pkg_dir_names(self):
1870 return [etpConst['packagesrelativepath_basedir'], 1871 etpConst['packagesrelativepath_basedir_nonfree'], 1872 etpConst['packagesrelativepath_basedir_restricted']]
1873
1874 - def _get_remote_repository_relative_path(self, repository_id):
1875 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1876 return srv_set['repositories'][repository_id]['database_remote_path']
1877
1878 - def _get_override_remote_repository_relative_path(self, repository_id):
1879 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1880 repo_data = srv_set['repositories'][repository_id] 1881 return repo_data['override_database_remote_path']
1882
1883 - def _get_local_repository_file(self, repository_id, branch = None):
1884 return os.path.join(self._get_local_repository_dir(repository_id, 1885 branch = branch), etpConst['etpdatabasefile'])
1886
1887 - def _get_local_store_directory(self, repository_id):
1888 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1889 return srv_set['repositories'][repository_id]['store_dir']
1890
1891 - def _get_local_upload_directory(self, repository_id):
1892 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1893 return srv_set['repositories'][repository_id]['upload_basedir']
1894
1895 - def _get_local_repository_base_directory(self, repository_id):
1896 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1897 return srv_set['repositories'][repository_id]['repo_basedir']
1898
1899 - def _get_local_repository_taint_file(self, repository_id, branch = None):
1900 return os.path.join(self._get_local_repository_dir(repository_id, 1901 branch = branch), etpConst['etpdatabasetaintfile'])
1902
1903 - def _get_local_repository_revision_file(self, repository_id, branch = None):
1904 return os.path.join( 1905 self._get_local_repository_dir(repository_id, branch = branch), 1906 etpConst['etpdatabaserevisionfile'])
1907
1908 - def _get_local_repository_timestamp_file(self, repository_id, 1909 branch = None):
1910 return os.path.join(self._get_local_repository_dir(repository_id, 1911 branch = branch), etpConst['etpdatabasetimestampfile'])
1912
1913 - def _get_local_repository_mask_file(self, repository_id, branch = None):
1914 return os.path.join(self._get_local_repository_dir(repository_id, 1915 branch = branch), etpConst['etpdatabasemaskfile'])
1916
1917 - def _get_local_repository_system_mask_file(self, repository_id, 1918 branch = None):
1919 return os.path.join(self._get_local_repository_dir(repository_id, 1920 branch = branch), etpConst['etpdatabasesytemmaskfile'])
1921
1922 - def _get_local_repository_licensewhitelist_file(self, repository_id, 1923 branch = None):
1924 return os.path.join(self._get_local_repository_dir(repository_id, 1925 branch = branch), etpConst['etpdatabaselicwhitelistfile'])
1926
1927 - def _get_local_repository_mirrors_file(self, repository_id, branch = None):
1928 return os.path.join(self._get_local_repository_dir(repository_id, 1929 branch = branch), etpConst['etpdatabasemirrorsfile'])
1930
1931 - def _get_local_repository_fallback_mirrors_file(self, repository_id, 1932 branch = None):
1933 return os.path.join(self._get_local_repository_dir(repository_id, 1934 branch = branch), etpConst['etpdatabasefallbackmirrorsfile'])
1935
1936 - def _get_local_repository_rss_file(self, repository_id, branch = None):
1937 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1938 return os.path.join(self._get_local_repository_dir(repository_id, 1939 branch = branch), srv_set['rss']['name'])
1940
1941 - def _get_local_repository_changelog_file(self, repository_id, 1942 branch = None):
1943 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1944 return os.path.join(self._get_local_repository_dir(repository_id, 1945 branch = branch), etpConst['changelog_filename'])
1946
1947 - def _get_local_repository_compressed_changelog_file(self, repository_id, 1948 branch = None):
1949 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1950 return os.path.join(self._get_local_repository_dir(repository_id, 1951 branch = branch), etpConst['changelog_filename_compressed'])
1952
1953 - def _get_local_repository_rsslight_file(self, repository_id, branch = None):
1954 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 1955 return os.path.join(self._get_local_repository_dir(repository_id, 1956 branch = branch), srv_set['rss']['light_name'])
1957
1958 - def _get_local_repository_notice_board_file(self, repository_id, 1959 branch = None):
1960 return os.path.join(self._get_local_repository_dir(repository_id, 1961 branch = branch), etpConst['rss-notice-board'])
1962
1963 - def _get_local_repository_treeupdates_file(self, repository_id, 1964 branch = None):
1965 return os.path.join(self._get_local_repository_dir(repository_id, 1966 branch = branch), etpConst['etpdatabaseupdatefile'])
1967
1968 - def _get_local_repository_compressed_metafiles_file(self, repository_id, 1969 branch = None):
1970 return os.path.join(self._get_local_repository_dir(repository_id, 1971 branch = branch), etpConst['etpdatabasemetafilesfile'])
1972
1973 - def _get_local_repository_metafiles_not_found_file(self, repository_id, 1974 branch = None):
1975 return os.path.join(self._get_local_repository_dir(repository_id, 1976 branch = branch), etpConst['etpdatabasemetafilesnotfound'])
1977
1978 - def _get_local_repository_gpg_signature_file(self, repository_id, 1979 branch = None):
1980 return os.path.join(self._get_local_repository_dir(repository_id, 1981 branch = branch), etpConst['etpdatabasegpgfile'])
1982
1983 - def _get_local_exp_based_pkgs_rm_whitelist_file(self, repository_id, 1984 branch = None):
1985 return os.path.join(self._get_local_repository_dir(repository_id, 1986 branch = branch), etpConst['etpdatabaseexpbasedpkgsrm'])
1987
1988 - def _get_local_pkglist_file(self, repository_id, branch = None):
1989 return os.path.join(self._get_local_repository_dir(repository_id, 1990 branch = branch), etpConst['etpdatabasepkglist'])
1991
1992 - def _get_local_extra_pkglist_file(self, repository_id, branch = None):
1993 return os.path.join(self._get_local_repository_dir(repository_id, 1994 branch = branch), etpConst['etpdatabaseextrapkglist'])
1995
1996 - def _get_local_database_sets_dir(self, repository_id, branch = None):
1997 return os.path.join(self._get_local_repository_dir(repository_id, 1998 branch = branch), etpConst['confsetsdirname'])
1999
2000 - def _get_local_post_branch_mig_script(self, repository_id, branch = None):
2001 return os.path.join(self._get_local_repository_dir(repository_id, 2002 branch = branch), etpConst['etp_post_branch_hop_script'])
2003
2004 - def _get_local_post_branch_upg_script(self, repository_id, branch = None):
2005 return os.path.join(self._get_local_repository_dir(repository_id, 2006 branch = branch), etpConst['etp_post_branch_upgrade_script'])
2007
2008 - def _get_local_post_repo_update_script(self, repository_id, branch = None):
2009 return os.path.join(self._get_local_repository_dir(repository_id, 2010 branch = branch), etpConst['etp_post_repo_update_script'])
2011
2012 - def _get_local_critical_updates_file(self, repository_id, branch = None):
2013 return os.path.join(self._get_local_repository_dir(repository_id, 2014 branch = branch), etpConst['etpdatabasecriticalfile'])
2015
2016 - def _get_local_restricted_file(self, repository_id, branch = None):
2017 return os.path.join(self._get_local_repository_dir(repository_id, 2018 branch = branch), etpConst['etpdatabaserestrictedfile'])
2019
2020 - def _get_local_repository_keywords_file(self, repository_id, branch = None):
2021 return os.path.join(self._get_local_repository_dir(repository_id, 2022 branch = branch), etpConst['etpdatabasekeywordsfile'])
2023
2024 - def _get_local_repository_webserv_file(self, repository_id, branch = None):
2025 return os.path.join(self._get_local_repository_dir(repository_id, 2026 branch = branch), etpConst['etpdatabasewebservicesfile'])
2027
2028 - def _get_local_repository_dir(self, repository_id, branch = None):
2029 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2030 if branch is None: 2031 branch = self._settings['repositories']['branch'] 2032 return os.path.join( 2033 srv_set['repositories'][repository_id]['database_dir'], branch)
2034
2035 - def _get_missing_dependencies_blacklist_file(self, repository_id, 2036 branch = None):
2037 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2038 if branch is None: 2039 branch = self._settings['repositories']['branch'] 2040 return os.path.join( 2041 srv_set['repositories'][repository_id]['database_dir'], 2042 branch, etpConst['etpdatabasemissingdepsblfile'])
2043
2044 - def _get_repository_lockfile(self, repository_id):
2045 return os.path.join(self._get_local_repository_dir(repository_id), 2046 etpConst['etpdatabaselockfile'])
2047
2048 - def _get_repository_download_lockfile(self, repository_id):
2049 return os.path.join(self._get_local_repository_dir(repository_id), 2050 etpConst['etpdatabasedownloadlockfile'])
2051
2052 - def _create_local_repository_download_lockfile(self, repository_id):
2053 lock_file = self._get_repository_download_lockfile(repository_id) 2054 enc = etpConst['conf_encoding'] 2055 with codecs.open(lock_file, "w", encoding=enc) as f_lock: 2056 f_lock.write("download locked")
2057
2058 - def _create_local_repository_lockfile(self, repository_id):
2059 lock_file = self._get_repository_lockfile(repository_id) 2060 enc = etpConst['conf_encoding'] 2061 with codecs.open(lock_file, "w", encoding=enc) as f_lock: 2062 f_lock.write("database locked")
2063
2064 - def _remove_local_repository_lockfile(self, repository_id):
2065 lock_file = self._get_repository_lockfile(repository_id) 2066 try: 2067 os.remove(lock_file) 2068 except OSError: 2069 pass
2070
2071 - def _remove_local_repository_download_lockfile(self, repository_id):
2072 lock_file = self._get_repository_download_lockfile(repository_id) 2073 try: 2074 os.remove(lock_file) 2075 except OSError: 2076 pass
2077 2078 @staticmethod
2080 packages_dir = SystemSettings.packages_config_directory() 2081 return os.path.join(packages_dir, "packages.server.dep_rewrite")
2082 2083 @staticmethod
2085 packages_dir = SystemSettings.packages_config_directory() 2086 return os.path.join(packages_dir, "packages.server.dep_blacklist")
2087 2088 @staticmethod
2090 packages_dir = SystemSettings.packages_config_directory() 2091 return os.path.join(packages_dir, "packages.server.sets")
2092
2093 - def complete_remote_package_relative_path(self, pkg_rel_url, repository_id):
2094 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2095 return os.path.join( 2096 srv_set['repositories'][repository_id]['remote_repo_basedir'], 2097 pkg_rel_url)
2098
2099 - def complete_local_upload_package_path(self, pkg_rel_url, repository_id):
2100 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2101 return os.path.join( 2102 srv_set['repositories'][repository_id]['upload_basedir'], 2103 pkg_rel_url)
2104
2105 - def complete_local_package_path(self, pkg_rel_url, repository_id):
2106 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2107 return os.path.join( 2108 srv_set['repositories'][repository_id]['repo_basedir'], 2109 pkg_rel_url)
2110
2111 - def remote_repository_mirrors(self, repository_id):
2112 """ 2113 Return a list of remote repository mirrors (database) for given 2114 repository. 2115 2116 @param repository_id: repository identifier 2117 @type repository_id: string 2118 @return: list of available repository mirrors 2119 @rtype: list 2120 @raise KeyError: if repository_id is invalid 2121 """ 2122 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2123 return srv_set['repositories'][repository_id]['repo_mirrors'][:]
2124
2125 - def remote_packages_mirrors(self, repository_id):
2126 """ 2127 Return a list of remote packages mirrors (packages) for given 2128 repository. 2129 2130 @param repository_id: repository identifier 2131 @type repository_id: string 2132 @return: list of available packages mirrors 2133 @rtype: list 2134 @raise KeyError: if repository_id is invalid 2135 """ 2136 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2137 return srv_set['repositories'][repository_id]['pkg_mirrors'][:]
2138
2139 - def local_repository_revision(self, repository_id):
2140 """ 2141 Return local repository revision. 2142 2143 @param repository_id: repository identifier 2144 @type repository_id: string 2145 @return: the actual repository revision 2146 @rtype: int 2147 """ 2148 dbrev_file = self._get_local_repository_revision_file(repository_id) 2149 if not os.path.isfile(dbrev_file): 2150 return 0 2151 2152 enc = etpConst['conf_encoding'] 2153 with codecs.open(dbrev_file, "r", encoding=enc) as f_rev: 2154 rev = f_rev.readline().strip() 2155 try: 2156 rev = int(rev) 2157 except ValueError: 2158 self.output( 2159 "[%s] %s: %s - %s" % ( 2160 darkgreen(repository_id), 2161 blue(_("invalid repository revision")), 2162 bold(rev), 2163 blue(_("defaulting to 0")), 2164 ), 2165 importance = 2, 2166 level = "error", 2167 header = darkred(" !!! ") 2168 ) 2169 rev = 0 2170 return rev
2171
2172 - def remote_repository_revision(self, repository_id):
2173 """ 2174 Return the highest repository revision available on mirrors for 2175 given repository. 2176 2177 @param repository_id: repository identifier 2178 @type repository_id: string 2179 @return: remote repository revision 2180 @rtype: int 2181 """ 2182 repo_status = self.Mirrors.remote_repository_status(repository_id) 2183 remote_status = list(repo_status.items()) 2184 if not [x for x in remote_status if x[1]]: 2185 return 0 2186 return max([x[1] for x in remote_status])
2187
2188 - def repositories(self):
2189 """ 2190 Return a list of available Entropy Server repositories. 2191 2192 @return: list of available Entropy Server repositories 2193 @rtype: list 2194 """ 2195 return self._enabled_repos
2196 2197 @property
2198 - def _enabled_repos(self):
2199 """ 2200 Monkey-patched Entropy Client property provided for compatibility. 2201 """ 2202 try: 2203 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2204 except KeyError: 2205 # this happens during the init phase, ignore 2206 return [] 2207 else: 2208 return sorted(srv_set['repositories'])
2209
2210 - def qa_repositories(self):
2211 """ 2212 Return a list of QA-testable available Entropy Server repositories. 2213 2214 @return: list of QA-testable available Entropy Server repositories 2215 @rtype: list 2216 """ 2217 srv_set = self._settings[Server.SYSTEM_SETTINGS_PLG_ID]['server'] 2218 repos = srv_set['repositories'] 2219 return sorted([x for x, y in repos.items() if not y['exclude_qa']])
2220
2221 - def repository(self):
2222 """ 2223 Return the current repository marked as default. 2224 """ 2225 return self._repository
2226
2227 - def QA(self):
2228 """ 2229 Get Entropy QA Interface instance. 2230 2231 @return: Entropy QA Interface instance 2232 @rtype: entropy.qa.QAInterface instance 2233 """ 2234 qa_plugin = ServerQAInterfacePlugin(self) 2235 qa = Client.QA(self) 2236 qa.add_plugin(qa_plugin) 2237 return qa
2238
2239 - def Spm(self):
2240 """ 2241 Get Source Package Manager interface instance. 2242 2243 #@return: Source Package Manager interface instance 2244 @rtype: entropy.spm.plugins.skel.SpmPlugin based instance 2245 """ 2246 return get_spm(self)
2247
2248 - def Spm_class(self):
2249 """ 2250 Get Source Package Manager interface class. 2251 """ 2252 return get_spm_class()
2253
2254 - def ConfigurationUpdates(self):
2255 """ 2256 Return Entropy Configuration File Updates management object. 2257 """ 2258 return ConfigurationUpdates( 2259 self, _config_class=ServerConfigurationFiles)
2260
2261 - def Transceiver(self, uri):
2262 """ 2263 Get EntropyTransceiver interface instance. 2264 2265 @param uri: EntropyTransceiver URI 2266 @type uri: string 2267 @return: EntropyTransceiver instance 2268 @rtype: entropy.transceivers.EntropyTransceiver 2269 """ 2270 txc = EntropyTransceiver(uri) 2271 txc.set_output_interface(self) 2272 return txc
2273
2274 - def _sync_package_sets(self, entropy_repository):
2275 """ 2276 Synchronize repository package sets. 2277 2278 @param entropy_repository: EntropyRepositoryBase object 2279 @type entropy_repository: entropy.db.skel.EntropyRepositoryBase 2280 """ 2281 repository_id = entropy_repository.repository_id() 2282 self.output( 2283 "[%s|%s] %s" % ( 2284 blue(repository_id), 2285 red(_("repository")), 2286 blue(_("syncing package sets")), 2287 ), 2288 importance = 1, 2289 level = "info", 2290 header = brown(" @@ ") 2291 ) 2292 cur_sets = entropy_repository.retrievePackageSets() 2293 sys_sets = self._get_configured_package_sets(repository_id) 2294 if cur_sets != sys_sets: 2295 self._update_package_sets(repository_id, entropy_repository) 2296 # NOTE: this is called by the commit hook plugin, keep no_plugins=True! 2297 entropy_repository.commit(no_plugins = True)
2298
2299 - def sets_available(self, *args, **kwargs):
2300 sets = Client.Sets(self) 2301 return sets.available(*args, **kwargs)
2302
2303 - def sets_search(self, *args, **kwargs):
2304 sets = Client.Sets(self) 2305 return sets.search(*args, **kwargs)
2306
2307 - def sets_match(self, *args, **kwargs):
2308 sets = Client.Sets(self) 2309 return sets.match(*args, **kwargs)
2310
2311 - def atom_match(self, *args, **kwargs):
2312 # disable masked packages for server-side repos 2313 kwargs['mask_filter'] = False 2314 return Client.atom_match(self, *args, **kwargs)
2315
2316 - def _match_packages(self, repository_id, packages):
2317 2318 dbconn = self.open_server_repository(repository_id, read_only = True, 2319 no_upload = True) 2320 if ("world" in packages) or not packages: 2321 return dbconn.listAllPackageIds(), True 2322 else: 2323 package_ids = set() 2324 for package in packages: 2325 matches = dbconn.atomMatch(package, multiMatch = True) 2326 if matches[1] == 0: 2327 package_ids |= matches[0] 2328 else: 2329 mytxt = "%s: %s: %s" % ( 2330 red(_("Attention")), 2331 blue(_("cannot match")), 2332 bold(package), 2333 ) 2334 self.output( 2335 mytxt, 2336 importance = 1, 2337 level = "warning", 2338 header = darkred(" !!! ") 2339 ) 2340 return package_ids, False
2341
2342 - def mask_packages(self, repository_id, packages):
2343 """ 2344 Mask given package dependencies for given repository, if any (otherwise 2345 use default one). 2346 2347 @param repository_id: repository identifier 2348 @type repository_id: string 2349 @param packages: list of package dependency strings 2350 @type packages: list 2351 @return: mask status, True if ok, False if not 2352 @rtype: bool 2353 """ 2354 mask_file = self._get_local_repository_mask_file(repository_id) 2355 current_packages = [] 2356 2357 if const_file_readable(mask_file): 2358 # don't worry about the race. 2359 current_packages += entropy.tools.generic_file_content_parser( 2360 mask_file, comment_tag = "##", filter_comments = False, 2361 encoding = etpConst['conf_encoding']) 2362 # this is untrusted input, it's fine because that config file is 2363 # untrusted too 2364 current_packages.extend(packages) 2365 2366 mask_file_tmp = mask_file + ".mask_packages_tmp" 2367 enc = etpConst['conf_encoding'] 2368 with codecs.open(mask_file_tmp, "w", encoding=enc) as mask_f: 2369 for package in current_packages: 2370 mask_f.write(package + "\n") 2371 2372 os.rename(mask_file_tmp, mask_file) 2373 2374 return True
2375
2376 - def unmask_packages(self, repository_id, packages):
2377 """ 2378 Unmask given package dependencies for given repository, if any 2379 (otherwise use default one). 2380 2381 @param repository_id: repository identifier 2382 @type repository_id: string 2383 @param packages: list of package dependency strings 2384 @type packages: list 2385 @return: mask status, True if ok, False if not 2386 @rtype: bool 2387 """ 2388 mask_file = self._get_local_repository_mask_file(repository_id) 2389 current_packages = [] 2390 2391 if const_file_readable(mask_file): 2392 # don't worry about the race. 2393 current_packages += entropy.tools.generic_file_content_parser( 2394 mask_file, comment_tag = "##", filter_comments = False, 2395 encoding = etpConst['conf_encoding']) 2396 2397 def mask_filter(package): 2398 if package.startswith("#"): 2399 # comment, always valid 2400 return True 2401 in_file_pkg_match = self.atom_match(package) 2402 for req_package in packages: 2403 if package == req_package: 2404 # of course remove if it's equal 2405 return False 2406 req_package_match = self.atom_match(req_package) 2407 if req_package_match == in_file_pkg_match: 2408 # drop it, they point to the same package match 2409 return False 2410 return True
2411 2412 current_packages = list(filter(mask_filter, current_packages)) 2413 2414 mask_file_tmp = mask_file + ".mask_packages_tmp" 2415 enc = etpConst['conf_encoding'] 2416 with codecs.open(mask_file_tmp, "w", encoding=enc) as mask_f: 2417 for package in current_packages: 2418 mask_f.write(package + "\n") 2419 2420 os.rename(mask_file_tmp, mask_file) 2421 2422 return True
2423
2424 - def initialize_repository(self, repository_id, ask = True):
2425 """ 2426 Initialize (and wipe all data!) given repository to empty status. 2427 2428 @param repository_id: repository identifier 2429 @type repository_id: string 2430 @keyword ask: ask before making any change? 2431 @type ask: bool 2432 @return: execution status (0 = fine) 2433 @rtype: int 2434 """ 2435 self.output( 2436 "[%s] %s..." % ( 2437 purple(repository_id), darkgreen(_("initializing repository")), 2438 ), 2439 importance = 1, 2440 level = "info", header = darkgreen(" * ") 2441 ) 2442 self.close_repositories() 2443 2444 rc_question = self.ask_question( 2445 "[%s] %s" % ( 2446 purple(repository_id), 2447 teal(_("do you really want to initialize this repository ?")) 2448 ) 2449 ) 2450 if rc_question == _("No"): 2451 return 1 2452 2453 try: 2454 os.remove(self._get_local_repository_file(repository_id)) 2455 except OSError as err: 2456 if err.errno != errno.ENOENT: 2457 raise 2458 2459 # initialize 2460 dbconn = self.open_server_repository(repository_id, read_only = False, 2461 no_upload = True, is_new = True) 2462 dbconn.initializeRepository() 2463 dbconn.commit() 2464 2465 # create the store directory 2466 store_dir = self._get_local_store_directory(repository_id) 2467 if not os.path.isdir(store_dir): 2468 try: 2469 os.makedirs(store_dir) 2470 except (IOError, OSError) as err: 2471 self.output( 2472 "%s: %s" % (_("Cannot create store directory"), err), 2473 header=brown(" !!! "), 2474 importance=1, 2475 level="error") 2476 return 1 2477 2478 # create the upload directory 2479 upload_dir = self._get_local_upload_directory(repository_id) 2480 if not os.path.isdir(upload_dir): 2481 try: 2482 os.makedirs(upload_dir) 2483 except (IOError, OSError) as err: 2484 self.output( 2485 "%s: %s" % (_("Cannot create upload directory"), err), 2486 header=brown(" !!! "), 2487 importance=1, 2488 level="error") 2489 return 1 2490 2491 return 0
2492
2493 - def tag_packages(self, package_matches, package_tag, ask = True):
2494 """ 2495 Change version tag for given package matches. 2496 2497 @param package_matches: list of Entropy package matches 2498 @type package_matches: list 2499 @param package_tag: new Entropy package tag string 2500 @type package_tag: string 2501 @return: execution status (0 = fine) 2502 @rtype: int 2503 """ 2504 try: 2505 package_tag = str(package_tag) 2506 if " " in package_tag: 2507 raise ValueError 2508 except (UnicodeDecodeError, UnicodeEncodeError, ValueError,): 2509 self.output( 2510 "%s: %s" % ( 2511 blue(_("Invalid tag specified")), 2512 package_tag, 2513 ), 2514 importance = 1, level = "error", header = darkred(" !! ") 2515 ) 2516 return 1 2517 2518 pkg_map = {} 2519 for pkg_id, pkg_repo in package_matches: 2520 obj = pkg_map.setdefault(pkg_repo, []) 2521 obj.append(pkg_id) 2522 2523 for pkg_repo in sorted(pkg_map.keys()): 2524 switched = self._move_packages(pkg_map[pkg_repo], pkg_repo, 2525 pkg_repo, ask = ask, do_copy = True, new_tag = package_tag) 2526 if not switched: 2527 return 1 2528 return 0
2529
2530 - def flushback_packages(self, repository_id, from_branches, ask = True):
2531 """ 2532 When creating a new branch, for space reasons, packages are not 2533 moved to a new location. This works fine until old branch is removed. 2534 To avoid inconsistences, before deciding to do that, all the packages 2535 in the old branch should be flushed back to the the currently configured 2536 branch. 2537 2538 @param repository_id: repository identifier 2539 @type repository_id: string 2540 @param from_branches: list of branches to move packages from 2541 @type from_branches: list 2542 @keyword ask: ask before making any change? 2543 @type ask: bool 2544 @return execution status (0 = fine) 2545 @rtype: int 2546 """ 2547 branch = self._settings['repositories']['branch'] 2548 2549 if branch in from_branches: 2550 from_branches = [x for x in from_branches if x != branch] 2551 2552 self.output( 2553 "[%s=>%s|%s] %s" % ( 2554 darkgreen(', '.join(from_branches)), 2555 darkred(branch), 2556 brown(repository_id), 2557 blue(_("flushing back selected packages from branches")), 2558 ), 2559 importance = 2, 2560 level = "info", 2561 header = red(" @@ ") 2562 ) 2563 2564 dbconn = self.open_server_repository(repository_id, read_only = True, 2565 no_upload = True) 2566 2567 package_id_map = dict(((x, [],) for x in from_branches)) 2568 package_ids = dbconn.listAllPackageIds(order_by = 'atom') 2569 for package_id in package_ids: 2570 download_url = dbconn.retrieveDownloadURL(package_id) 2571 url_br = self._get_branch_from_download_relative_uri( 2572 download_url) 2573 if url_br in from_branches: 2574 package_id_map[url_br].append(package_id) 2575 2576 mapped_branches = [x for x in package_id_map if package_id_map[x]] 2577 if not mapped_branches: 2578 self.output( 2579 "[%s=>%s|%s] %s !" % ( 2580 darkgreen(', '.join(from_branches)), 2581 darkred(branch), 2582 brown(repository_id), 2583 blue(_("nothing to do")), 2584 ), 2585 importance = 0, 2586 level = "warning", 2587 header = blue(" @@ ") 2588 ) 2589 return 0 2590 2591 2592 all_fine = True 2593 tmp_down_dir = const_mkdtemp(prefix="entropy.server") 2594 2595 download_queue = {} 2596 dbconn = self.open_server_repository(repository_id, read_only = False, 2597 no_upload = True) 2598 2599 def generate_queue(branch, repository_id, from_branch, down_q, 2600 package_id_map): 2601 2602 self.output( 2603 "[%s=>%s|%s] %s" % ( 2604 darkgreen(from_branch), 2605 darkred(branch), 2606 brown(repository_id), 2607 brown(_("these are the packages that will be flushed")), 2608 ), 2609 importance = 1, 2610 level = "info", 2611 header = brown(" @@ ") 2612 ) 2613 2614 2615 for package_id in package_id_map[from_branch]: 2616 atom = dbconn.retrieveAtom(package_id) 2617 self.output( 2618 "[%s=>%s|%s] %s" % ( 2619 darkgreen(from_branch), 2620 darkred(branch), 2621 brown(repository_id), 2622 purple(atom), 2623 ), 2624 importance = 0, 2625 level = "info", 2626 header = blue(" # ") 2627 ) 2628 pkg_fp = os.path.basename( 2629 dbconn.retrieveDownloadURL(package_id)) 2630 pkg_fp = os.path.join(tmp_down_dir, pkg_fp) 2631 down_q.append((pkg_fp, package_id,))
2632 2633 2634 for from_branch in sorted(mapped_branches): 2635 2636 download_queue[from_branch] = [] 2637 all_fine = False 2638 generate_queue(branch, repository_id, from_branch, 2639 download_queue[from_branch], package_id_map) 2640 2641 if ask: 2642 rc_question = self.ask_question( 2643 _("Would you like to continue ?")) 2644 if rc_question == _("No"): 2645 continue 2646 2647 for uri in self.remote_packages_mirrors(repository_id): 2648 2649 crippled_uri = EntropyTransceiver.get_uri_name(uri) 2650 2651 queue_map = {} 2652 2653 for pkg_fp, package_id in download_queue[from_branch]: 2654 down_url = dbconn.retrieveDownloadURL(package_id) 2655 down_rel = self.complete_remote_package_relative_path( 2656 down_url, repository_id) 2657 down_rel_dir = os.path.dirname(down_rel) 2658 obj = queue_map.setdefault(down_rel_dir, []) 2659 obj.append(pkg_fp) 2660 2661 errors = False 2662 m_fine_uris = set() 2663 m_broken_uris = set() 2664 2665 for down_rel_dir, downloader_queue in queue_map.items(): 2666 2667 downloader = self.Mirrors.TransceiverServerHandler( 2668 self, 2669 [uri], 2670 downloader_queue, 2671 critical_files = downloader_queue, 2672 txc_basedir = down_rel_dir, 2673 local_basedir = tmp_down_dir, 2674 download = True, 2675 repo = repository_id 2676 ) 2677 xerrors, xm_fine_uris, xm_broken_uris = downloader.go() 2678 if xerrors: 2679 errors = True 2680 m_fine_uris.update(xm_fine_uris) 2681 m_broken_uris.update(xm_broken_uris) 2682 2683 if not errors: 2684 for downloaded_path, package_id in \ 2685 download_queue[from_branch]: 2686 2687 self.output( 2688 "[%s=>%s|%s|%s] %s: %s" % ( 2689 darkgreen(from_branch), 2690 darkred(branch), 2691 brown(repository_id), 2692 dbconn.retrieveAtom(package_id), 2693 blue(_("checking package hash")), 2694 darkgreen(os.path.basename(downloaded_path)), 2695 ), 2696 importance = 0, 2697 level = "info", 2698 header = brown(" "), 2699 back = True 2700 ) 2701 2702 md5hash = entropy.tools.md5sum(downloaded_path) 2703 db_md5hash = dbconn.retrieveDigest(package_id) 2704 if md5hash != db_md5hash: 2705 errors = True 2706 self.output( 2707 "[%s=>%s|%s|%s] %s: %s" % ( 2708 darkgreen(from_branch), 2709 darkred(branch), 2710 brown(repository_id), 2711 dbconn.retrieveAtom(package_id), 2712 blue(_("hash does not match for")), 2713 darkgreen(os.path.basename(downloaded_path)), 2714 ), 2715 importance = 0, 2716 level = "error", 2717 header = brown(" ") 2718 ) 2719 continue 2720 2721 if errors: 2722 reason = _("wrong md5") 2723 if m_broken_uris: 2724 my_broken_uris = [ 2725 (EntropyTransceiver.get_uri_name(x), y,) \ 2726 for x, y in m_broken_uris] 2727 reason = my_broken_uris[0][1] 2728 2729 self.output( 2730 "[%s=>%s|%s] %s, %s: %s" % ( 2731 darkgreen(from_branch), 2732 darkred(branch), 2733 brown(repository_id), 2734 blue(_("download errors")), 2735 blue(_("reason")), 2736 reason, 2737 ), 2738 importance = 1, 2739 level = "error", 2740 header = darkred(" !!! ") 2741 ) 2742 # continuing if possible 2743 continue 2744 2745 all_fine = True 2746 2747 self.output( 2748 "[%s=>%s|%s] %s: %s" % ( 2749 darkgreen(from_branch), 2750 darkred(branch), 2751 brown(repository_id), 2752 blue(_("download completed successfully")), 2753 darkgreen(crippled_uri), 2754 ), 2755 importance = 1, 2756 level = "info", 2757 header = darkgreen(" * ") 2758 ) 2759 2760 if not all_fine: 2761 self.output( 2762 "[%s=>%s|%s] %s" % ( 2763 darkgreen(', '.join(from_branches)), 2764 darkred(branch), 2765 brown(repository_id), 2766 blue(_("error downloading packages from mirrors")), 2767 ), 2768 importance = 2, 2769 level = "error", 2770 header = darkred(" !!! ") 2771 ) 2772 return 1 2773 2774 for from_branch in sorted(mapped_branches): 2775 2776 self.output( 2777 "[%s=>%s|%s] %s: %s" % ( 2778 darkgreen(from_branch), 2779 darkred(branch), 2780 brown(repository_id), 2781 blue(_("working on branch")), 2782 darkgreen(from_branch), 2783 ), 2784 importance = 1, 2785 level = "info", 2786 header = brown(" @@ ") 2787 ) 2788 2789 down_queue = download_queue[from_branch] 2790 for package_path, package_id in down_queue: 2791 2792 self.output( 2793 "[%s=>%s|%s] %s: %s" % ( 2794 darkgreen(from_branch), 2795 darkred(branch), 2796 brown(repository_id), 2797 blue(_("updating package")), 2798 darkgreen(os.path.basename(package_path)), 2799 ), 2800 importance = 1, 2801 level = "info", 2802 header = brown(" "), 2803 back = True 2804 ) 2805 2806 # build new download url 2807 download_url = dbconn.retrieveDownloadURL(package_id) 2808 download_url = \ 2809 self._swap_branch_in_download_relative_uri( 2810 branch, download_url) 2811 2812 # move files to upload 2813 new_package_path = self.complete_local_upload_package_path( 2814 download_url, repository_id) 2815 self._ensure_dir_path(os.path.dirname(new_package_path)) 2816 2817 try: 2818 os.rename(package_path, new_package_path) 2819 except OSError as err: 2820 if err.errno != errno.EXDEV: 2821 raise 2822 shutil.move(package_path, new_package_path) 2823 2824 # update database 2825 dbconn.setDownloadURL(package_id, download_url) 2826 dbconn.commit() 2827 dbconn.switchBranch(package_id, branch) 2828 dbconn.commit() 2829 2830 self.output( 2831 "[%s=>%s|%s] %s: %s" %</