#!/usr/bin/python # Copyright 2009-2011 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import print_function import signal import sys # This block ensures that ^C interrupts are handled quietly. try: def exithandler(signum,frame): signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGTERM, signal.SIG_IGN) sys.exit(128 + signum) signal.signal(signal.SIGINT, exithandler) signal.signal(signal.SIGTERM, exithandler) except KeyboardInterrupt: sys.exit(128 + signal.SIGINT) import io import logging import optparse import subprocess import time import textwrap import re try: import portage except ImportError: from os import path as osp sys.path.insert(0, osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")) import portage from portage import os, _encodings, _unicode_encode, _unicode_decode from _emerge.MetadataRegen import MetadataRegen from portage.cache.cache_errors import CacheError, StatCollision from portage.manifest import guessManifestFileType from portage.util import cmp_sort_key, writemsg_level from portage import cpv_getkey from portage.dep import Atom, isjustname from portage.versions import pkgcmp, pkgsplit, vercmp try: from xml.etree import ElementTree except ImportError: pass else: try: from xml.parsers.expat import ExpatError except ImportError: pass else: from repoman.utilities import parse_metadata_use from repoman.utilities import FindVCS if sys.hexversion >= 0x3000000: long = int def parse_args(args): usage = "egencache [options] ... [atom] ..." parser = optparse.OptionParser(usage=usage) actions = optparse.OptionGroup(parser, 'Actions') actions.add_option("--update", action="store_true", help="update metadata/cache/ (generate as necessary)") actions.add_option("--update-use-local-desc", action="store_true", help="update the use.local.desc file from metadata.xml") actions.add_option("--update-changelogs", action="store_true", help="update the ChangeLog files from SCM logs") parser.add_option_group(actions) common = optparse.OptionGroup(parser, 'Common options') common.add_option("--repo", action="store", help="name of repo to operate on (default repo is located at $PORTDIR)") common.add_option("--config-root", help="location of portage config files", dest="portage_configroot") common.add_option("--portdir", help="override the portage tree location", dest="portdir") common.add_option("--portdir-overlay", help="override the PORTDIR_OVERLAY variable (requires that --repo is also specified)", dest="portdir_overlay") common.add_option("--tolerant", action="store_true", help="exit successfully if only minor errors occurred") common.add_option("--ignore-default-opts", action="store_true", help="do not use the EGENCACHE_DEFAULT_OPTS environment variable") parser.add_option_group(common) update = optparse.OptionGroup(parser, '--update options') update.add_option("--cache-dir", help="location of the metadata cache", dest="cache_dir") update.add_option("--jobs", action="store", help="max ebuild processes to spawn") update.add_option("--load-average", action="store", help="max load allowed when spawning multiple jobs", dest="load_average") update.add_option("--rsync", action="store_true", help="enable rsync stat collision workaround " + \ "for bug 139134 (use with --update)") parser.add_option_group(update) uld = optparse.OptionGroup(parser, '--update-use-local-desc options') uld.add_option("--preserve-comments", action="store_true", help="preserve the comments from the existing use.local.desc file") uld.add_option("--use-local-desc-output", help="output file for use.local.desc data (or '-' for stdout)", dest="uld_output") parser.add_option_group(uld) options, args = parser.parse_args(args) if options.jobs: jobs = None try: jobs = int(options.jobs) except ValueError: jobs = -1 if jobs < 1: parser.error("Invalid: --jobs='%s'" % \ (options.jobs,)) options.jobs = jobs else: options.jobs = None if options.load_average: try: load_average = float(options.load_average) except ValueError: load_average = 0.0 if load_average <= 0.0: parser.error("Invalid: --load-average='%s'" % \ (options.load_average,)) options.load_average = load_average else: options.load_average = None options.config_root = options.portage_configroot if options.config_root is not None and \ not os.path.isdir(options.config_root): parser.error("Not a directory: --config-root='%s'" % \ (options.config_root,)) if options.cache_dir is not None: if not os.path.isdir(options.cache_dir): parser.error("Not a directory: --cache-dir='%s'" % \ (options.cache_dir,)) if not os.access(options.cache_dir, os.W_OK): parser.error("Write access denied: --cache-dir='%s'" % \ (options.cache_dir,)) if options.portdir_overlay is not None and \ options.repo is None: parser.error("--portdir-overlay option requires --repo option") for atom in args: try: atom = portage.dep.Atom(atom) except portage.exception.InvalidAtom: parser.error('Invalid atom: %s' % (atom,)) if not isjustname(atom): parser.error('Atom is too specific: %s' % (atom,)) if options.update_use_local_desc: try: ElementTree ExpatError except NameError: parser.error('--update-use-local-desc requires python with USE=xml!') if options.uld_output == '-' and options.preserve_comments: parser.error('--preserve-comments can not be used when outputting to stdout') return parser, options, args class GenCache(object): def __init__(self, portdb, cp_iter=None, max_jobs=None, max_load=None, rsync=False): # The caller must set portdb.porttrees in order to constrain # findname, cp_list, and cpv_list to the desired tree. tree = portdb.porttrees[0] self._portdb = portdb self._eclass_db = portdb._repo_info[tree].eclass_db self._auxdbkeys = portdb._known_keys # We can globally cleanse stale cache only if we # iterate over every single cp. self._global_cleanse = cp_iter is None if cp_iter is not None: self._cp_set = set(cp_iter) cp_iter = iter(self._cp_set) self._cp_missing = self._cp_set.copy() else: self._cp_set = None self._cp_missing = set() self._regen = MetadataRegen(portdb, cp_iter=cp_iter, consumer=self._metadata_callback, max_jobs=max_jobs, max_load=max_load) self.returncode = os.EX_OK conf = portdb.repositories.get_repo_for_location(tree) self._trg_caches = tuple(conf.iter_pregenerated_caches( self._auxdbkeys, force=True, readonly=False)) if not self._trg_caches: raise Exception("cache formats '%s' aren't supported" % (" ".join(conf.cache_formats),)) if rsync: for trg_cache in self._trg_caches: if hasattr(trg_cache, 'raise_stat_collision'): trg_cache.raise_stat_collision = True # Make _metadata_callback write this cache first, in case # it raises a StatCollision and triggers mtime # modification. self._trg_caches = tuple([trg_cache] + [x for x in self._trg_caches if x is not trg_cache]) self._existing_nodes = set() def _metadata_callback(self, cpv, repo_path, metadata, ebuild_hash): self._existing_nodes.add(cpv) self._cp_missing.discard(cpv_getkey(cpv)) if metadata is not None: if metadata.get('EAPI') == '0': del metadata['EAPI'] for trg_cache in self._trg_caches: self._write_cache(trg_cache, cpv, repo_path, metadata, ebuild_hash) def _write_cache(self, trg_cache, cpv, repo_path, metadata, ebuild_hash): if not hasattr(trg_cache, 'raise_stat_collision'): # This cache does not avoid redundant writes automatically, # so check for an identical existing entry before writing. # This prevents unnecessary disk writes and can also prevent # unnecessary rsync transfers. try: dest = trg_cache[cpv] except (KeyError, CacheError): pass else: if trg_cache.validate_entry(dest, ebuild_hash, self._eclass_db): identical = True for k in self._auxdbkeys: if dest.get(k, '') != metadata.get(k, ''): identical = False break if identical: return try: chf = trg_cache.validation_chf metadata['_%s_' % chf] = getattr(ebuild_hash, chf) try: trg_cache[cpv] = metadata except StatCollision as sc: # If the content of a cache entry changes and neither the # file mtime nor size changes, it will prevent rsync from # detecting changes. Cache backends may raise this # exception from _setitem() if they detect this type of stat # collision. These exceptions are handled by bumping the # mtime on the ebuild (and the corresponding cache entry). # See bug #139134. It is convenient to include checks for # redundant writes along with the internal StatCollision # detection code, so for caches with the # raise_stat_collision attribute, we do not need to # explicitly check for redundant writes like we do for the # other cache types above. max_mtime = sc.mtime for ec, ec_hash in metadata['_eclasses_'].items(): if max_mtime < ec_hash.mtime: max_mtime = ec_hash.mtime if max_mtime == sc.mtime: max_mtime += 1 max_mtime = long(max_mtime) try: os.utime(ebuild_hash.location, (max_mtime, max_mtime)) except OSError as e: self.returncode |= 1 writemsg_level( "%s writing target: %s\n" % (cpv, e), level=logging.ERROR, noiselevel=-1) else: ebuild_hash.mtime = max_mtime metadata['_mtime_'] = max_mtime trg_cache[cpv] = metadata self._portdb.auxdb[repo_path][cpv] = metadata except CacheError as ce: self.returncode |= 1 writemsg_level( "%s writing target: %s\n" % (cpv, ce), level=logging.ERROR, noiselevel=-1) def run(self): received_signal = [] def sighandler(signum, frame): signal.signal(signal.SIGINT, signal.SIG_IGN) signal.signal(signal.SIGTERM, signal.SIG_IGN) self._regen.terminate() received_signal.append(128 + signum) earlier_sigint_handler = signal.signal(signal.SIGINT, sighandler) earlier_sigterm_handler = signal.signal(signal.SIGTERM, sighandler) try: self._regen.run() finally: # Restore previous handlers if earlier_sigint_handler is not None: signal.signal(signal.SIGINT, earlier_sigint_handler) else: signal.signal(signal.SIGINT, signal.SIG_DFL) if earlier_sigterm_handler is not None: signal.signal(signal.SIGTERM, earlier_sigterm_handler) else: signal.signal(signal.SIGTERM, signal.SIG_DFL) if received_signal: sys.exit(received_signal[0]) self.returncode |= self._regen.returncode for trg_cache in self._trg_caches: self._cleanse_cache(trg_cache) def _cleanse_cache(self, trg_cache): cp_missing = self._cp_missing dead_nodes = set() if self._global_cleanse: try: for cpv in trg_cache: cp = cpv_getkey(cpv) if cp is None: self.returncode |= 1 writemsg_level( "Unable to parse cp for '%s'\n" % (cpv,), level=logging.ERROR, noiselevel=-1) else: dead_nodes.add(cpv) except CacheError as ce: self.returncode |= 1 writemsg_level( "Error listing cache entries for " + \ "'%s/metadata/cache': %s, continuing...\n" % \ (self._portdb.porttree_root, ce), level=logging.ERROR, noiselevel=-1) else: cp_set = self._cp_set try: for cpv in trg_cache: cp = cpv_getkey(cpv) if cp is None: self.returncode |= 1 writemsg_level( "Unable to parse cp for '%s'\n" % (cpv,), level=logging.ERROR, noiselevel=-1) else: cp_missing.discard(cp) if cp in cp_set: dead_nodes.add(cpv) except CacheError as ce: self.returncode |= 1 writemsg_level( "Error listing cache entries for " + \ "'%s/metadata/cache': %s, continuing...\n" % \ (self._portdb.porttree_root, ce), level=logging.ERROR, noiselevel=-1) if cp_missing: self.returncode |= 1 for cp in sorted(cp_missing): writemsg_level( "No ebuilds or cache entries found for '%s'\n" % (cp,), level=logging.ERROR, noiselevel=-1) if dead_nodes: dead_nodes.difference_update(self._existing_nodes) for k in dead_nodes: try: del trg_cache[k] except KeyError: pass except CacheError as ce: self.returncode |= 1 writemsg_level( "%s deleting stale cache: %s\n" % (k, ce), level=logging.ERROR, noiselevel=-1) if not trg_cache.autocommits: try: trg_cache.commit() except CacheError as ce: self.returncode |= 1 writemsg_level( "committing target: %s\n" % (ce,), level=logging.ERROR, noiselevel=-1) class GenUseLocalDesc(object): def __init__(self, portdb, output=None, preserve_comments=False): self.returncode = os.EX_OK self._portdb = portdb self._output = output self._preserve_comments = preserve_comments def run(self): repo_path = self._portdb.porttrees[0] ops = {'<':0, '<=':1, '=':2, '>=':3, '>':4} if self._output is None or self._output != '-': if self._output is None: prof_path = os.path.join(repo_path, 'profiles') desc_path = os.path.join(prof_path, 'use.local.desc') try: os.mkdir(prof_path) except OSError: pass else: desc_path = self._output try: if self._preserve_comments: # Probe in binary mode, in order to avoid # potential character encoding issues. output = open(_unicode_encode(desc_path, encoding=_encodings['fs'], errors='strict'), 'r+b') else: output = io.open(_unicode_encode(desc_path, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except IOError as e: if not self._preserve_comments or \ os.path.isfile(desc_path): writemsg_level( "ERROR: failed to open output file %s: %s\n" \ % (desc_path, e), level=logging.ERROR, noiselevel=-1) self.returncode |= 2 return # Open in r+b mode failed because the file doesn't # exist yet. We can probably recover if we disable # preserve_comments mode now. writemsg_level( "WARNING: --preserve-comments enabled, but " + \ "output file not found: %s\n" % (desc_path,), level=logging.WARNING, noiselevel=-1) self._preserve_comments = False try: output = io.open(_unicode_encode(desc_path, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except IOError as e: writemsg_level( "ERROR: failed to open output file %s: %s\n" \ % (desc_path, e), level=logging.ERROR, noiselevel=-1) self.returncode |= 2 return else: output = sys.stdout if self._preserve_comments: while True: pos = output.tell() if not output.readline().startswith(b'#'): break output.seek(pos) output.truncate() output.close() # Finished probing comments in binary mode, now append # in text mode. output = io.open(_unicode_encode(desc_path, encoding=_encodings['fs'], errors='strict'), mode='a', encoding=_encodings['repo.content'], errors='backslashreplace') output.write(_unicode_decode('\n')) else: output.write(_unicode_decode(''' # This file is deprecated as per GLEP 56 in favor of metadata.xml. Please add # your descriptions to your package's metadata.xml ONLY. # * generated automatically using egencache * '''.lstrip())) # The cmp function no longer exists in python3, so we'll # implement our own here under a slightly different name # since we don't want any confusion given that we never # want to rely on the builtin cmp function. def cmp_func(a, b): if a is None or b is None: # None can't be compared with other types in python3. if a is None and b is None: return 0 elif a is None: return -1 else: return 1 return (a > b) - (a < b) class _MetadataTreeBuilder(ElementTree.TreeBuilder): """ Implements doctype() as required to avoid deprecation warnings since Python >=2.7 """ def doctype(self, name, pubid, system): pass for cp in self._portdb.cp_all(): metadata_path = os.path.join(repo_path, cp, 'metadata.xml') try: metadata = ElementTree.parse(metadata_path, parser=ElementTree.XMLParser( target=_MetadataTreeBuilder())) except IOError: pass except (ExpatError, EnvironmentError) as e: writemsg_level( "ERROR: failed parsing %s/metadata.xml: %s\n" % (cp, e), level=logging.ERROR, noiselevel=-1) self.returncode |= 1 else: try: usedict = parse_metadata_use(metadata) except portage.exception.ParseError as e: writemsg_level( "ERROR: failed parsing %s/metadata.xml: %s\n" % (cp, e), level=logging.ERROR, noiselevel=-1) self.returncode |= 1 else: for flag in sorted(usedict): def atomcmp(atoma, atomb): # None is better than an atom, that's why we reverse the args if atoma is None or atomb is None: return cmp_func(atomb, atoma) # Same for plain PNs (.operator is None then) elif atoma.operator is None or atomb.operator is None: return cmp_func(atomb.operator, atoma.operator) # Version matching elif atoma.cpv != atomb.cpv: return pkgcmp(pkgsplit(atoma.cpv), pkgsplit(atomb.cpv)) # Versions match, let's fallback to operator matching else: return cmp_func(ops.get(atoma.operator, -1), ops.get(atomb.operator, -1)) def _Atom(key): if key is not None: return Atom(key) return None resdict = usedict[flag] if len(resdict) == 1: resdesc = next(iter(resdict.items()))[1] else: try: reskeys = dict((_Atom(k), k) for k in resdict) except portage.exception.InvalidAtom as e: writemsg_level( "ERROR: failed parsing %s/metadata.xml: %s\n" % (cp, e), level=logging.ERROR, noiselevel=-1) self.returncode |= 1 resdesc = next(iter(resdict.items()))[1] else: resatoms = sorted(reskeys, key=cmp_sort_key(atomcmp)) resdesc = resdict[reskeys[resatoms[-1]]] output.write(_unicode_decode( '%s:%s - %s\n' % (cp, flag, resdesc))) output.close() if sys.hexversion < 0x3000000: _filename_base = unicode else: _filename_base = str class _special_filename(_filename_base): """ Helps to sort file names by file type and other criteria. """ def __new__(cls, status_change, file_name): return _filename_base.__new__(cls, status_change + file_name) def __init__(self, status_change, file_name): _filename_base.__init__(status_change + file_name) self.status_change = status_change self.file_name = file_name self.file_type = guessManifestFileType(file_name) def file_type_lt(self, a, b): """ Defines an ordering between file types. """ first = a.file_type second = b.file_type if first == second: return False if first == "EBUILD": return True elif first == "MISC": return second in ("EBUILD",) elif first == "AUX": return second in ("EBUILD", "MISC") elif first == "DIST": return second in ("EBUILD", "MISC", "AUX") elif first is None: return False else: raise ValueError("Unknown file type '%s'" % first) def __lt__(self, other): """ Compare different file names, first by file type and then for ebuilds by version and lexicographically for others. EBUILD < MISC < AUX < DIST < None """ if self.__class__ != other.__class__: raise NotImplementedError # Sort by file type as defined by file_type_lt(). if self.file_type_lt(self, other): return True elif self.file_type_lt(other, self): return False # Files have the same type. if self.file_type == "EBUILD": # Sort by version. Lowest first. ver = "-".join(pkgsplit(self.file_name[:-7])[1:3]) other_ver = "-".join(pkgsplit(other.file_name[:-7])[1:3]) return vercmp(ver, other_ver) < 0 else: # Sort lexicographically. return self.file_name < other.file_name class GenChangeLogs(object): def __init__(self, portdb): self.returncode = os.EX_OK self._portdb = portdb self._wrapper = textwrap.TextWrapper( width = 78, initial_indent = ' ', subsequent_indent = ' ' ) @staticmethod def grab(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE) return _unicode_decode(p.communicate()[0], encoding=_encodings['stdio'], errors='strict') def generate_changelog(self, cp): try: output = io.open('ChangeLog', mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except IOError as e: writemsg_level( "ERROR: failed to open ChangeLog for %s: %s\n" % (cp,e,), level=logging.ERROR, noiselevel=-1) self.returncode |= 2 return output.write(_unicode_decode(''' # ChangeLog for %s # Copyright 1999-%s Gentoo Foundation; Distributed under the GPL v2 # $Header: $ ''' % (cp, time.strftime('%Y'))).lstrip()) # now grab all the commits commits = self.grab(['git', 'rev-list', 'HEAD', '--', '.']).split() for c in commits: # Explaining the arguments: # --name-status to get a list of added/removed files # --no-renames to avoid getting more complex records on the list # --format to get the timestamp, author and commit description # --root to make it work fine even with the initial commit # --relative to get paths relative to ebuilddir # -r (recursive) to get per-file changes # then the commit-id and path. cinfo = self.grab(['git', 'diff-tree', '--name-status', '--no-renames', '--format=%ct %cN <%cE>%n%B', '--root', '--relative', '-r', c, '--', '.']).rstrip('\n').split('\n') # Expected output: # timestamp Author Name # commit message l1 # ... # commit message ln # # status1 filename1 # ... # statusn filenamen changed = [] for n, l in enumerate(reversed(cinfo)): if not l: body = cinfo[1:-n-1] break else: f = l.split() if f[1] == 'Manifest': pass # XXX: remanifest commits? elif f[1] == 'ChangeLog': pass elif f[0].startswith('A'): changed.append(_special_filename("+", f[1])) elif f[0].startswith('D'): changed.append(_special_filename("-", f[1])) elif f[0].startswith('M'): changed.append(_special_filename("", f[1])) else: writemsg_level( "ERROR: unexpected git file status for %s: %s\n" % (cp,f,), level=logging.ERROR, noiselevel=-1) self.returncode |= 1 if not changed: continue (ts, author) = cinfo[0].split(' ', 1) date = time.strftime('%d %b %Y', time.gmtime(float(ts))) changed = [str(x) for x in sorted(changed)] wroteheader = False # Reverse the sort order for headers. for c in reversed(changed): if c.startswith('+') and c.endswith('.ebuild'): output.write(_unicode_decode( '*%s (%s)\n' % (c[1:-7], date))) wroteheader = True if wroteheader: output.write(_unicode_decode('\n')) # strip ': ', '[] ', and similar body[0] = re.sub(r'^\W*' + re.escape(cp) + r'\W+', '', body[0]) # strip trailing newline if not body[-1]: body = body[:-1] # strip git-svn id if body[-1].startswith('git-svn-id:') and not body[-2]: body = body[:-2] # strip the repoman version/manifest note if body[-1] == ' (Signed Manifest commit)' or body[-1] == ' (Unsigned Manifest commit)': body = body[:-1] if body[-1].startswith('(Portage version:') and body[-1].endswith(')'): body = body[:-1] if not body[-1]: body = body[:-1] # don't break filenames on hyphens self._wrapper.break_on_hyphens = False output.write(_unicode_decode( self._wrapper.fill( '%s; %s %s:' % (date, author, ', '.join(changed))))) # but feel free to break commit messages there self._wrapper.break_on_hyphens = True output.write(_unicode_decode( '\n%s\n\n' % '\n'.join(self._wrapper.fill(x) for x in body))) output.close() def run(self): repo_path = self._portdb.porttrees[0] os.chdir(repo_path) if 'git' not in FindVCS(): writemsg_level( "ERROR: --update-changelogs supported only in git repos\n", level=logging.ERROR, noiselevel=-1) self.returncode = 127 return for cp in self._portdb.cp_all(): os.chdir(os.path.join(repo_path, cp)) # Determine whether ChangeLog is up-to-date by comparing # the newest commit timestamp with the ChangeLog timestamp. lmod = self.grab(['git', 'log', '--format=%ct', '-1', '.']) if not lmod: # This cp has not been added to the repo. continue try: cmod = os.stat('ChangeLog').st_mtime except OSError: cmod = 0 if float(cmod) < float(lmod): self.generate_changelog(cp) def egencache_main(args): parser, options, atoms = parse_args(args) config_root = options.config_root # The calling environment is ignored, so the program is # completely controlled by commandline arguments. env = {} if options.repo is None: env['PORTDIR_OVERLAY'] = '' elif options.portdir_overlay: env['PORTDIR_OVERLAY'] = options.portdir_overlay if options.cache_dir is not None: env['PORTAGE_DEPCACHEDIR'] = options.cache_dir if options.portdir is not None: env['PORTDIR'] = options.portdir eprefix = os.environ.get("__PORTAGE_TEST_EPREFIX") settings = portage.config(config_root=config_root, local_config=False, env=env, _eprefix=eprefix) default_opts = None if not options.ignore_default_opts: default_opts = settings.get('EGENCACHE_DEFAULT_OPTS', '').split() if default_opts: parser, options, args = parse_args(default_opts + args) if options.cache_dir is not None: env['PORTAGE_DEPCACHEDIR'] = options.cache_dir settings = portage.config(config_root=config_root, local_config=False, env=env, _eprefix=eprefix) if not options.update and not options.update_use_local_desc \ and not options.update_changelogs: parser.error('No action specified') return 1 if options.update and 'metadata-transfer' not in settings.features: settings.features.add('metadata-transfer') settings.lock() portdb = portage.portdbapi(mysettings=settings) if options.update: if options.cache_dir is not None: # already validated earlier pass else: # We check write access after the portdbapi constructor # has had an opportunity to create it. This ensures that # we don't use the cache in the "volatile" mode which is # undesirable for egencache. if not os.access(settings["PORTAGE_DEPCACHEDIR"], os.W_OK): writemsg_level("ecachegen: error: " + \ "write access denied: %s\n" % (settings["PORTAGE_DEPCACHEDIR"],), level=logging.ERROR, noiselevel=-1) return 1 if options.repo is not None: repo_path = portdb.getRepositoryPath(options.repo) if repo_path is None: parser.error("Unable to locate repository named '%s'" % \ (options.repo,)) return 1 # Limit ebuilds to the specified repo. portdb.porttrees = [repo_path] else: portdb.porttrees = [portdb.porttree_root] ret = [os.EX_OK] if options.update: cp_iter = None if atoms: cp_iter = iter(atoms) gen_cache = GenCache(portdb, cp_iter=cp_iter, max_jobs=options.jobs, max_load=options.load_average, rsync=options.rsync) gen_cache.run() if options.tolerant: ret.append(os.EX_OK) else: ret.append(gen_cache.returncode) if options.update_use_local_desc: gen_desc = GenUseLocalDesc(portdb, output=options.uld_output, preserve_comments=options.preserve_comments) gen_desc.run() ret.append(gen_desc.returncode) if options.update_changelogs: gen_clogs = GenChangeLogs(portdb) gen_clogs.run() ret.append(gen_clogs.returncode) return max(ret) if __name__ == "__main__": portage._disable_legacy_globals() portage.util.noiselimit = -1 sys.exit(egencache_main(sys.argv[1:]))