aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'lib/portage')
-rw-r--r--lib/portage/__init__.py177
-rw-r--r--lib/portage/_compat_upgrade/binpkg_compression.py4
-rw-r--r--lib/portage/_compat_upgrade/binpkg_format.py51
-rw-r--r--lib/portage/_compat_upgrade/binpkg_multi_instance.py8
-rw-r--r--lib/portage/_compat_upgrade/default_locations.py12
-rw-r--r--lib/portage/_compat_upgrade/meson.build11
-rw-r--r--lib/portage/_emirrordist/Config.py17
-rw-r--r--lib/portage/_emirrordist/ContentDB.py16
-rw-r--r--lib/portage/_emirrordist/DeletionIterator.py26
-rw-r--r--lib/portage/_emirrordist/DeletionTask.py42
-rw-r--r--lib/portage/_emirrordist/FetchIterator.py41
-rw-r--r--lib/portage/_emirrordist/FetchTask.py121
-rw-r--r--lib/portage/_emirrordist/MirrorDistTask.py65
-rw-r--r--lib/portage/_emirrordist/main.py26
-rw-r--r--lib/portage/_emirrordist/meson.build15
-rw-r--r--lib/portage/_global_updates.py91
-rw-r--r--lib/portage/_selinux.py21
-rw-r--r--lib/portage/_sets/ProfilePackageSet.py6
-rw-r--r--lib/portage/_sets/__init__.py50
-rw-r--r--lib/portage/_sets/base.py18
-rw-r--r--lib/portage/_sets/dbapi.py74
-rw-r--r--lib/portage/_sets/files.py59
-rw-r--r--lib/portage/_sets/libs.py7
-rw-r--r--lib/portage/_sets/meson.build15
-rw-r--r--lib/portage/_sets/profiles.py12
-rw-r--r--lib/portage/_sets/security.py4
-rw-r--r--lib/portage/_sets/shell.py6
-rw-r--r--lib/portage/binpkg.py76
-rw-r--r--lib/portage/binrepo/config.py2
-rw-r--r--lib/portage/binrepo/meson.build8
-rw-r--r--lib/portage/cache/anydbm.py26
-rw-r--r--lib/portage/cache/cache_errors.py17
-rw-r--r--lib/portage/cache/ebuild_xattr.py17
-rw-r--r--lib/portage/cache/flat_hash.py22
-rw-r--r--lib/portage/cache/fs_template.py7
-rw-r--r--lib/portage/cache/index/IndexStreamIterator.py4
-rw-r--r--lib/portage/cache/index/meson.build9
-rw-r--r--lib/portage/cache/index/pkg_desc_index.py3
-rw-r--r--lib/portage/cache/mappings.py329
-rw-r--r--lib/portage/cache/meson.build20
-rw-r--r--lib/portage/cache/metadata.py20
-rw-r--r--lib/portage/cache/sql_template.py28
-rw-r--r--lib/portage/cache/sqlite.py44
-rw-r--r--lib/portage/cache/template.py41
-rw-r--r--lib/portage/cache/volatile.py3
-rw-r--r--lib/portage/checksum.py262
-rw-r--r--lib/portage/const.py127
-rw-r--r--lib/portage/cvstree.py12
-rw-r--r--lib/portage/data.py63
-rw-r--r--lib/portage/dbapi/IndexedPortdb.py8
-rw-r--r--lib/portage/dbapi/_ContentsCaseSensitivityManager.py8
-rw-r--r--lib/portage/dbapi/_MergeProcess.py142
-rw-r--r--lib/portage/dbapi/_SyncfsProcess.py31
-rw-r--r--lib/portage/dbapi/_VdbMetadataDelta.py11
-rw-r--r--lib/portage/dbapi/__init__.py113
-rw-r--r--lib/portage/dbapi/_similar_name_search.py1
-rw-r--r--lib/portage/dbapi/bintree.py1245
-rw-r--r--lib/portage/dbapi/meson.build22
-rw-r--r--lib/portage/dbapi/porttree.py244
-rw-r--r--lib/portage/dbapi/vartree.py665
-rw-r--r--lib/portage/dbapi/virtual.py21
-rw-r--r--lib/portage/debug.py12
-rw-r--r--lib/portage/dep/__init__.py242
-rw-r--r--lib/portage/dep/_dnf.py10
-rw-r--r--lib/portage/dep/_slot_operator.py18
-rw-r--r--lib/portage/dep/dep_check.py61
-rw-r--r--lib/portage/dep/libc.py83
-rw-r--r--lib/portage/dep/meson.build13
-rw-r--r--lib/portage/dep/soname/SonameAtom.py15
-rw-r--r--lib/portage/dep/soname/meson.build10
-rw-r--r--lib/portage/dep/soname/multilib_category.py41
-rw-r--r--lib/portage/dispatch_conf.py106
-rw-r--r--lib/portage/eapi.py495
-rw-r--r--lib/portage/eclass_cache.py11
-rw-r--r--lib/portage/elog/__init__.py9
-rw-r--r--lib/portage/elog/meson.build16
-rw-r--r--lib/portage/elog/messages.py4
-rw-r--r--lib/portage/elog/mod_custom.py73
-rw-r--r--lib/portage/elog/mod_echo.py16
-rw-r--r--lib/portage/elog/mod_mail.py2
-rw-r--r--lib/portage/elog/mod_mail_summary.py4
-rw-r--r--lib/portage/elog/mod_save.py10
-rw-r--r--lib/portage/elog/mod_save_summary.py9
-rw-r--r--lib/portage/elog/mod_syslog.py2
-rw-r--r--lib/portage/emaint/main.py44
-rw-r--r--lib/portage/emaint/meson.build11
-rw-r--r--lib/portage/emaint/modules/binhost/binhost.py8
-rw-r--r--lib/portage/emaint/modules/binhost/meson.build8
-rw-r--r--lib/portage/emaint/modules/config/config.py5
-rw-r--r--lib/portage/emaint/modules/config/meson.build8
-rw-r--r--lib/portage/emaint/modules/logs/logs.py5
-rw-r--r--lib/portage/emaint/modules/logs/meson.build8
-rw-r--r--lib/portage/emaint/modules/merges/merges.py32
-rw-r--r--lib/portage/emaint/modules/merges/meson.build8
-rw-r--r--lib/portage/emaint/modules/meson.build16
-rw-r--r--lib/portage/emaint/modules/move/meson.build8
-rw-r--r--lib/portage/emaint/modules/move/move.py13
-rw-r--r--lib/portage/emaint/modules/resume/meson.build8
-rw-r--r--lib/portage/emaint/modules/resume/resume.py9
-rw-r--r--lib/portage/emaint/modules/sync/meson.build8
-rw-r--r--lib/portage/emaint/modules/sync/sync.py8
-rw-r--r--lib/portage/emaint/modules/world/meson.build8
-rw-r--r--lib/portage/emaint/modules/world/world.py9
-rw-r--r--lib/portage/env/config.py18
-rw-r--r--lib/portage/env/loaders.py8
-rw-r--r--lib/portage/env/meson.build10
-rw-r--r--lib/portage/exception.py38
-rw-r--r--lib/portage/getbinpkg.py254
-rw-r--r--lib/portage/glsa.py206
-rw-r--r--lib/portage/gpg.py108
-rw-r--r--lib/portage/gpkg.py2130
-rw-r--r--lib/portage/installation.py21
-rw-r--r--lib/portage/localization.py8
-rw-r--r--lib/portage/locks.py82
-rw-r--r--lib/portage/mail.py21
-rw-r--r--lib/portage/manifest.py424
-rw-r--r--lib/portage/meson.build74
-rw-r--r--lib/portage/metadata.py34
-rw-r--r--lib/portage/module.py74
-rw-r--r--lib/portage/news.py192
-rw-r--r--lib/portage/output.py136
-rw-r--r--lib/portage/package/ebuild/_config/LicenseManager.py1
-rw-r--r--lib/portage/package/ebuild/_config/LocationsManager.py29
-rw-r--r--lib/portage/package/ebuild/_config/UseManager.py176
-rw-r--r--lib/portage/package/ebuild/_config/VirtualsManager.py4
-rw-r--r--lib/portage/package/ebuild/_config/env_var_validation.py2
-rw-r--r--lib/portage/package/ebuild/_config/helper.py2
-rw-r--r--lib/portage/package/ebuild/_config/meson.build17
-rw-r--r--lib/portage/package/ebuild/_config/special_env_vars.py512
-rw-r--r--lib/portage/package/ebuild/_config/unpack_dependencies.py55
-rw-r--r--lib/portage/package/ebuild/_ipc/ExitCommand.py2
-rw-r--r--lib/portage/package/ebuild/_ipc/IpcCommand.py1
-rw-r--r--lib/portage/package/ebuild/_ipc/QueryCommand.py29
-rw-r--r--lib/portage/package/ebuild/_ipc/meson.build10
-rw-r--r--lib/portage/package/ebuild/_metadata_invalid.py5
-rw-r--r--lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py37
-rw-r--r--lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py7
-rw-r--r--lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py3
-rw-r--r--lib/portage/package/ebuild/_parallel_manifest/meson.build10
-rw-r--r--lib/portage/package/ebuild/config.py269
-rw-r--r--lib/portage/package/ebuild/deprecated_profile_check.py4
-rw-r--r--lib/portage/package/ebuild/digestcheck.py2
-rw-r--r--lib/portage/package/ebuild/digestgen.py10
-rw-r--r--lib/portage/package/ebuild/doebuild.py571
-rw-r--r--lib/portage/package/ebuild/fetch.py84
-rw-r--r--lib/portage/package/ebuild/getmaskingstatus.py13
-rw-r--r--lib/portage/package/ebuild/meson.build23
-rw-r--r--lib/portage/package/ebuild/prepare_build_dirs.py41
-rw-r--r--lib/portage/package/meson.build9
-rw-r--r--lib/portage/process.py997
-rw-r--r--lib/portage/proxy/lazyimport.py12
-rw-r--r--lib/portage/proxy/meson.build9
-rw-r--r--lib/portage/proxy/objectproxy.py1
-rw-r--r--lib/portage/repository/config.py84
-rw-r--r--lib/portage/repository/meson.build10
-rw-r--r--lib/portage/repository/storage/hardlink_quarantine.py8
-rw-r--r--lib/portage/repository/storage/hardlink_rcu.py10
-rw-r--r--lib/portage/repository/storage/meson.build11
-rw-r--r--lib/portage/sync/controller.py24
-rw-r--r--lib/portage/sync/meson.build14
-rw-r--r--lib/portage/sync/modules/cvs/cvs.py4
-rw-r--r--lib/portage/sync/modules/cvs/meson.build8
-rw-r--r--lib/portage/sync/modules/git/__init__.py1
-rw-r--r--lib/portage/sync/modules/git/git.py444
-rw-r--r--lib/portage/sync/modules/git/meson.build8
-rw-r--r--lib/portage/sync/modules/mercurial/mercurial.py40
-rw-r--r--lib/portage/sync/modules/mercurial/meson.build8
-rw-r--r--lib/portage/sync/modules/meson.build14
-rw-r--r--lib/portage/sync/modules/rsync/meson.build8
-rw-r--r--lib/portage/sync/modules/rsync/rsync.py109
-rw-r--r--lib/portage/sync/modules/svn/meson.build8
-rw-r--r--lib/portage/sync/modules/svn/svn.py10
-rw-r--r--lib/portage/sync/modules/webrsync/__init__.py4
-rw-r--r--lib/portage/sync/modules/webrsync/meson.build8
-rw-r--r--lib/portage/sync/modules/webrsync/webrsync.py56
-rw-r--r--lib/portage/sync/modules/zipfile/__init__.py34
-rw-r--r--lib/portage/sync/modules/zipfile/zipfile.py143
-rw-r--r--lib/portage/sync/old_tree_timestamp.py4
-rw-r--r--lib/portage/sync/syncbase.py49
-rw-r--r--lib/portage/tests/.gnupg/openpgp-revocs.d/06B3A311BD775C280D22A9305D90EA06352177F6.rev37
-rw-r--r--lib/portage/tests/.gnupg/openpgp-revocs.d/8DEDA2CDED49C8809287B89D8812797DDF1DD192.rev37
-rw-r--r--lib/portage/tests/.gnupg/private-keys-v1.d/273B030399E7BEA66A9AD42216DE7CA17BA5D42E.keybin0 -> 2055 bytes
-rw-r--r--lib/portage/tests/.gnupg/private-keys-v1.d/C99796FB85B0C3DF03314A11B5850C51167D6282.keybin0 -> 2055 bytes
-rw-r--r--lib/portage/tests/.gnupg/pubring.kbxbin0 -> 2774 bytes
-rw-r--r--lib/portage/tests/.gnupg/trustdb.gpgbin0 -> 1360 bytes
-rw-r--r--lib/portage/tests/__init__.py259
-rw-r--r--lib/portage/tests/bin/meson.build14
-rw-r--r--lib/portage/tests/bin/setup_env.py7
-rw-r--r--lib/portage/tests/bin/test_doins.py12
-rw-r--r--lib/portage/tests/bin/test_eapi7_ver_funcs.py30
-rw-r--r--lib/portage/tests/bin/test_filter_bash_env.py27
-rw-r--r--lib/portage/tests/conftest.py91
-rw-r--r--lib/portage/tests/dbapi/meson.build12
-rw-r--r--lib/portage/tests/dbapi/test_auxdb.py123
-rw-r--r--lib/portage/tests/dbapi/test_bintree.py231
-rw-r--r--lib/portage/tests/dbapi/test_fakedbapi.py2
-rw-r--r--lib/portage/tests/dbapi/test_portdb_cache.py38
-rw-r--r--lib/portage/tests/dep/meson.build29
-rw-r--r--lib/portage/tests/dep/test_atom.py (renamed from lib/portage/tests/dep/testAtom.py)61
-rw-r--r--lib/portage/tests/dep/test_check_required_use.py (renamed from lib/portage/tests/dep/testCheckRequiredUse.py)0
-rw-r--r--lib/portage/tests/dep/test_dep_getcpv.py1
-rw-r--r--lib/portage/tests/dep/test_dep_getrepo.py1
-rw-r--r--lib/portage/tests/dep/test_dep_getslot.py1
-rw-r--r--lib/portage/tests/dep/test_dep_getusedeps.py3
-rw-r--r--lib/portage/tests/dep/test_dnf_convert.py1
-rw-r--r--lib/portage/tests/dep/test_extended_atom_dict.py (renamed from lib/portage/tests/dep/testExtendedAtomDict.py)0
-rw-r--r--lib/portage/tests/dep/test_extract_affecting_use.py (renamed from lib/portage/tests/dep/testExtractAffectingUSE.py)0
-rw-r--r--lib/portage/tests/dep/test_get_operator.py3
-rw-r--r--lib/portage/tests/dep/test_get_required_use_flags.py2
-rw-r--r--lib/portage/tests/dep/test_isjustname.py5
-rw-r--r--lib/portage/tests/dep/test_isvalidatom.py12
-rw-r--r--lib/portage/tests/dep/test_libc.py81
-rw-r--r--lib/portage/tests/dep/test_match_from_list.py2
-rw-r--r--lib/portage/tests/dep/test_overlap_dnf.py50
-rw-r--r--lib/portage/tests/dep/test_paren_reduce.py1
-rw-r--r--lib/portage/tests/dep/test_soname_atom_pickle.py1
-rw-r--r--lib/portage/tests/dep/test_standalone.py (renamed from lib/portage/tests/dep/testStandalone.py)5
-rw-r--r--lib/portage/tests/dep/test_use_reduce.py3
-rw-r--r--lib/portage/tests/ebuild/meson.build17
-rw-r--r--lib/portage/tests/ebuild/test_array_fromfile_eof.py2
-rw-r--r--lib/portage/tests/ebuild/test_config.py10
-rw-r--r--lib/portage/tests/ebuild/test_doebuild_fd_pipes.py136
-rw-r--r--lib/portage/tests/ebuild/test_doebuild_spawn.py6
-rw-r--r--lib/portage/tests/ebuild/test_fetch.py92
-rw-r--r--lib/portage/tests/ebuild/test_ipc_daemon.py33
-rw-r--r--lib/portage/tests/ebuild/test_shell_quote.py2
-rw-r--r--lib/portage/tests/ebuild/test_spawn.py10
-rw-r--r--lib/portage/tests/ebuild/test_use_expand_incremental.py8
-rw-r--r--lib/portage/tests/emerge/conftest.py858
-rw-r--r--lib/portage/tests/emerge/meson.build16
-rw-r--r--lib/portage/tests/emerge/test_actions.py68
-rw-r--r--lib/portage/tests/emerge/test_baseline.py221
-rw-r--r--lib/portage/tests/emerge/test_binpkg_fetch.py226
-rw-r--r--lib/portage/tests/emerge/test_config_protect.py28
-rw-r--r--lib/portage/tests/emerge/test_emerge_blocker_file_collision.py19
-rw-r--r--lib/portage/tests/emerge/test_emerge_slot_abi.py26
-rw-r--r--lib/portage/tests/emerge/test_libc_dep_inject.py552
-rw-r--r--lib/portage/tests/emerge/test_simple.py704
-rw-r--r--lib/portage/tests/env/config/meson.build12
-rw-r--r--lib/portage/tests/env/config/test_PackageKeywordsFile.py3
-rw-r--r--lib/portage/tests/env/config/test_PackageUseFile.py3
-rw-r--r--lib/portage/tests/env/config/test_PortageModulesFile.py6
-rw-r--r--lib/portage/tests/env/meson.build10
-rw-r--r--lib/portage/tests/glsa/meson.build9
-rw-r--r--lib/portage/tests/glsa/test_security_set.py143
-rw-r--r--lib/portage/tests/gpkg/__init__.py2
-rw-r--r--lib/portage/tests/gpkg/__test__.py0
-rw-r--r--lib/portage/tests/gpkg/meson.build15
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_checksum.py376
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_gpg.py395
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_metadata_update.py56
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_metadata_url.py159
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_path.py371
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_size.py54
-rw-r--r--lib/portage/tests/gpkg/test_gpkg_stream.py93
-rw-r--r--lib/portage/tests/lafilefixer/meson.build9
-rw-r--r--lib/portage/tests/lafilefixer/test_lafilefixer.py4
-rw-r--r--lib/portage/tests/lazyimport/meson.build10
-rw-r--r--lib/portage/tests/lazyimport/test_lazy_import_portage_baseline.py4
-rw-r--r--lib/portage/tests/lint/meson.build12
-rw-r--r--lib/portage/tests/lint/test_compile_modules.py2
-rw-r--r--lib/portage/tests/lint/test_import_modules.py2
-rw-r--r--lib/portage/tests/locks/meson.build10
-rw-r--r--lib/portage/tests/locks/test_asynchronous_lock.py31
-rw-r--r--lib/portage/tests/locks/test_lock_nonblock.py55
-rw-r--r--lib/portage/tests/meson.build31
-rw-r--r--lib/portage/tests/news/meson.build9
-rw-r--r--lib/portage/tests/news/test_NewsItem.py443
-rw-r--r--lib/portage/tests/process/meson.build19
-rw-r--r--lib/portage/tests/process/test_AsyncFunction.py85
-rw-r--r--lib/portage/tests/process/test_ForkProcess.py46
-rw-r--r--lib/portage/tests/process/test_PipeLogger.py16
-rw-r--r--lib/portage/tests/process/test_PopenProcess.py17
-rw-r--r--lib/portage/tests/process/test_PopenProcessBlockingIO.py24
-rw-r--r--lib/portage/tests/process/test_pickle.py43
-rw-r--r--lib/portage/tests/process/test_poll.py18
-rw-r--r--lib/portage/tests/process/test_spawn_fail_e2big.py33
-rw-r--r--lib/portage/tests/process/test_spawn_returnproc.py39
-rw-r--r--lib/portage/tests/process/test_spawn_warn_large_env.py46
-rw-r--r--lib/portage/tests/process/test_unshare_net.py33
-rw-r--r--lib/portage/tests/resolver/ResolverPlayground.py267
-rw-r--r--lib/portage/tests/resolver/binpkg_multi_instance/meson.build10
-rw-r--r--lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py50
-rw-r--r--lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py44
-rw-r--r--lib/portage/tests/resolver/meson.build100
-rw-r--r--lib/portage/tests/resolver/soname/meson.build19
-rw-r--r--lib/portage/tests/resolver/soname/test_autounmask.py38
-rw-r--r--lib/portage/tests/resolver/soname/test_depclean.py1
-rw-r--r--lib/portage/tests/resolver/soname/test_downgrade.py85
-rw-r--r--lib/portage/tests/resolver/soname/test_or_choices.py39
-rw-r--r--lib/portage/tests/resolver/soname/test_reinstall.py40
-rw-r--r--lib/portage/tests/resolver/soname/test_skip_update.py71
-rw-r--r--lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py143
-rw-r--r--lib/portage/tests/resolver/soname/test_slot_conflict_update.py38
-rw-r--r--lib/portage/tests/resolver/soname/test_soname_provided.py45
-rw-r--r--lib/portage/tests/resolver/soname/test_unsatisfiable.py40
-rw-r--r--lib/portage/tests/resolver/soname/test_unsatisfied.py40
-rw-r--r--lib/portage/tests/resolver/test_aggressive_backtrack_downgrade.py1
-rw-r--r--lib/portage/tests/resolver/test_alternatives_gzip.py246
-rw-r--r--lib/portage/tests/resolver/test_autounmask.py26
-rw-r--r--lib/portage/tests/resolver/test_autounmask_binpkg_use.py39
-rw-r--r--lib/portage/tests/resolver/test_autounmask_multilib_use.py8
-rw-r--r--lib/portage/tests/resolver/test_autounmask_parent.py1
-rw-r--r--lib/portage/tests/resolver/test_autounmask_use_breakage.py1
-rw-r--r--lib/portage/tests/resolver/test_autounmask_use_slot_conflict.py7
-rw-r--r--lib/portage/tests/resolver/test_bdeps.py44
-rw-r--r--lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py35
-rw-r--r--lib/portage/tests/resolver/test_broken_deps.py76
-rw-r--r--lib/portage/tests/resolver/test_changed_deps.py41
-rw-r--r--lib/portage/tests/resolver/test_circular_choices.py5
-rw-r--r--lib/portage/tests/resolver/test_circular_choices_rust.py3
-rw-r--r--lib/portage/tests/resolver/test_circular_dependencies.py2
-rw-r--r--lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py40
-rw-r--r--lib/portage/tests/resolver/test_cross_dep_priority.py164
-rw-r--r--lib/portage/tests/resolver/test_depclean.py3
-rw-r--r--lib/portage/tests/resolver/test_depclean_order.py118
-rw-r--r--lib/portage/tests/resolver/test_depclean_slot_unavailable.py2
-rw-r--r--lib/portage/tests/resolver/test_depth.py9
-rw-r--r--lib/portage/tests/resolver/test_disjunctive_depend_order.py33
-rw-r--r--lib/portage/tests/resolver/test_eapi.py46
-rw-r--r--lib/portage/tests/resolver/test_emptytree_reinstall_unsatisfiability.py137
-rw-r--r--lib/portage/tests/resolver/test_imagemagick_graphicsmagick.py1
-rw-r--r--lib/portage/tests/resolver/test_installkernel.py93
-rw-r--r--lib/portage/tests/resolver/test_merge_order.py12
-rw-r--r--lib/portage/tests/resolver/test_multirepo.py61
-rw-r--r--lib/portage/tests/resolver/test_onlydeps_ideps.py172
-rw-r--r--lib/portage/tests/resolver/test_onlydeps_minimal.py25
-rw-r--r--lib/portage/tests/resolver/test_or_choices.py10
-rw-r--r--lib/portage/tests/resolver/test_package_tracker.py3
-rw-r--r--lib/portage/tests/resolver/test_perl_rebuild_bug.py121
-rw-r--r--lib/portage/tests/resolver/test_profile_default_eapi.py6
-rw-r--r--lib/portage/tests/resolver/test_profile_package_set.py6
-rw-r--r--lib/portage/tests/resolver/test_rebuild_ghostscript.py160
-rw-r--r--lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py41
-rw-r--r--lib/portage/tests/resolver/test_required_use.py1
-rw-r--r--lib/portage/tests/resolver/test_runtime_cycle_merge_order.py153
-rw-r--r--lib/portage/tests/resolver/test_simple.py33
-rw-r--r--lib/portage/tests/resolver/test_slot_abi.py114
-rw-r--r--lib/portage/tests/resolver/test_slot_abi_downgrade.py78
-rw-r--r--lib/portage/tests/resolver/test_slot_change_without_revbump.py40
-rw-r--r--lib/portage/tests/resolver/test_slot_collisions.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_conflict_blocked_prune.py78
-rw-r--r--lib/portage/tests/resolver/test_slot_conflict_force_rebuild.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_conflict_rebuild.py10
-rw-r--r--lib/portage/tests/resolver/test_slot_conflict_unsatisfied_deep_deps.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_conflict_update.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_conflict_update_virt.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_autounmask.py41
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_bdeps.py74
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_complete_graph.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_exclusive_slots.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_missed_update.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_rebuild.py40
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_required_use.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_reverse_deps.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_runtime_pkg_mask.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_unsatisfied.py1
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_unsolved.py42
-rw-r--r--lib/portage/tests/resolver/test_slot_operator_update_probe_parent_downgrade.py1
-rw-r--r--lib/portage/tests/resolver/test_solve_non_slot_operator_slot_conflicts.py1
-rw-r--r--lib/portage/tests/resolver/test_unnecessary_slot_upgrade.py51
-rw-r--r--lib/portage/tests/resolver/test_update.py106
-rw-r--r--lib/portage/tests/resolver/test_use_dep_defaults.py1
-rw-r--r--lib/portage/tests/resolver/test_useflags.py212
-rw-r--r--lib/portage/tests/resolver/test_virtual_slot.py4
-rwxr-xr-xlib/portage/tests/runTests.py70
-rw-r--r--lib/portage/tests/sets/base/meson.build10
-rw-r--r--lib/portage/tests/sets/base/test_internal_package_set.py (renamed from lib/portage/tests/sets/base/testInternalPackageSet.py)4
-rw-r--r--lib/portage/tests/sets/base/test_variable_set.py45
-rw-r--r--lib/portage/tests/sets/files/meson.build10
-rw-r--r--lib/portage/tests/sets/files/test_config_file_set.py (renamed from lib/portage/tests/sets/files/testConfigFileSet.py)3
-rw-r--r--lib/portage/tests/sets/files/test_static_file_set.py (renamed from lib/portage/tests/sets/files/testStaticFileSet.py)3
-rw-r--r--lib/portage/tests/sets/meson.build12
-rw-r--r--lib/portage/tests/sets/shell/meson.build9
-rw-r--r--lib/portage/tests/sets/shell/test_shell.py (renamed from lib/portage/tests/sets/shell/testShell.py)7
-rw-r--r--lib/portage/tests/sync/meson.build10
-rw-r--r--lib/portage/tests/sync/test_sync_local.py38
-rw-r--r--lib/portage/tests/sync/test_sync_zipfile.py99
-rw-r--r--lib/portage/tests/unicode/meson.build9
-rw-r--r--lib/portage/tests/unicode/test_string_format.py17
-rw-r--r--lib/portage/tests/update/meson.build11
-rw-r--r--lib/portage/tests/update/test_move_ent.py331
-rw-r--r--lib/portage/tests/update/test_move_slot_ent.py290
-rw-r--r--lib/portage/tests/update/test_update_dbentry.py397
-rw-r--r--lib/portage/tests/util/dyn_libs/meson.build10
-rw-r--r--lib/portage/tests/util/dyn_libs/test_installed_dynlibs.py65
-rw-r--r--lib/portage/tests/util/eventloop/meson.build9
-rw-r--r--lib/portage/tests/util/eventloop/test_call_soon_fifo.py1
-rw-r--r--lib/portage/tests/util/file_copy/meson.build9
-rw-r--r--lib/portage/tests/util/file_copy/test_copyfile.py42
-rw-r--r--lib/portage/tests/util/futures/asyncio/meson.build14
-rw-r--r--lib/portage/tests/util/futures/asyncio/test_child_watcher.py51
-rw-r--r--lib/portage/tests/util/futures/asyncio/test_pipe_closed.py8
-rw-r--r--lib/portage/tests/util/futures/asyncio/test_subprocess_exec.py6
-rw-r--r--lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py2
-rw-r--r--lib/portage/tests/util/futures/meson.build14
-rw-r--r--lib/portage/tests/util/futures/test_compat_coroutine.py211
-rw-r--r--lib/portage/tests/util/futures/test_done_callback.py1
-rw-r--r--lib/portage/tests/util/futures/test_iter_completed.py19
-rw-r--r--lib/portage/tests/util/futures/test_retry.py57
-rw-r--r--lib/portage/tests/util/meson.build31
-rw-r--r--lib/portage/tests/util/test_checksum.py49
-rw-r--r--lib/portage/tests/util/test_digraph.py60
-rw-r--r--lib/portage/tests/util/test_file_copier.py1
-rw-r--r--lib/portage/tests/util/test_getconfig.py6
-rw-r--r--lib/portage/tests/util/test_install_mask.py31
-rw-r--r--lib/portage/tests/util/test_manifest.py34
-rw-r--r--lib/portage/tests/util/test_mtimedb.py362
-rw-r--r--lib/portage/tests/util/test_normalizedPath.py1
-rw-r--r--lib/portage/tests/util/test_shelve.py4
-rw-r--r--lib/portage/tests/util/test_socks5.py95
-rw-r--r--lib/portage/tests/util/test_stackDicts.py2
-rw-r--r--lib/portage/tests/util/test_stackLists.py1
-rw-r--r--lib/portage/tests/util/test_uniqueArray.py3
-rw-r--r--lib/portage/tests/util/test_varExpand.py17
-rw-r--r--lib/portage/tests/util/test_whirlpool.py49
-rw-r--r--lib/portage/tests/util/test_xattr.py2
-rw-r--r--lib/portage/tests/versions/meson.build10
-rw-r--r--lib/portage/tests/versions/test_cpv_sort_key.py1
-rw-r--r--lib/portage/tests/versions/test_vercmp.py11
-rw-r--r--lib/portage/tests/xpak/meson.build9
-rw-r--r--lib/portage/tests/xpak/test_decodeint.py3
-rw-r--r--lib/portage/update.py23
-rw-r--r--lib/portage/util/ExtractKernelVersion.py10
-rw-r--r--lib/portage/util/__init__.py159
-rw-r--r--lib/portage/util/_async/AsyncFunction.py32
-rw-r--r--lib/portage/util/_async/AsyncScheduler.py4
-rw-r--r--lib/portage/util/_async/AsyncTaskFuture.py8
-rw-r--r--lib/portage/util/_async/BuildLogger.py41
-rw-r--r--lib/portage/util/_async/FileCopier.py20
-rw-r--r--lib/portage/util/_async/FileDigester.py72
-rw-r--r--lib/portage/util/_async/ForkProcess.py332
-rw-r--r--lib/portage/util/_async/PipeLogger.py4
-rw-r--r--lib/portage/util/_async/PipeReaderBlockingIO.py23
-rw-r--r--lib/portage/util/_async/PopenProcess.py7
-rw-r--r--lib/portage/util/_async/SchedulerInterface.py13
-rw-r--r--lib/portage/util/_async/TaskScheduler.py1
-rw-r--r--lib/portage/util/_async/meson.build20
-rw-r--r--lib/portage/util/_ctypes.py15
-rw-r--r--lib/portage/util/_dyn_libs/LinkageMapELF.py49
-rw-r--r--lib/portage/util/_dyn_libs/PreservedLibsRegistry.py6
-rw-r--r--lib/portage/util/_dyn_libs/display_preserved_libs.py13
-rw-r--r--lib/portage/util/_dyn_libs/dyn_libs.py65
-rw-r--r--lib/portage/util/_dyn_libs/meson.build14
-rw-r--r--lib/portage/util/_dyn_libs/soname_deps.py4
-rw-r--r--lib/portage/util/_dyn_libs/soname_deps_qa.py9
-rw-r--r--lib/portage/util/_eventloop/asyncio_event_loop.py76
-rw-r--r--lib/portage/util/_eventloop/meson.build9
-rw-r--r--lib/portage/util/_get_vm_info.py2
-rw-r--r--lib/portage/util/_info_files.py11
-rw-r--r--lib/portage/util/_path.py4
-rw-r--r--lib/portage/util/_pty.py27
-rw-r--r--lib/portage/util/_urlopen.py8
-rw-r--r--lib/portage/util/_xattr.py11
-rw-r--r--lib/portage/util/backoff.py4
-rw-r--r--lib/portage/util/bin_entry_point.py18
-rw-r--r--lib/portage/util/changelog.py4
-rw-r--r--lib/portage/util/compression_probe.py9
-rw-r--r--lib/portage/util/configparser.py7
-rw-r--r--lib/portage/util/cpuinfo.py22
-rw-r--r--lib/portage/util/digraph.py12
-rw-r--r--lib/portage/util/elf/constants.py15
-rw-r--r--lib/portage/util/elf/header.py1
-rw-r--r--lib/portage/util/elf/meson.build9
-rw-r--r--lib/portage/util/endian/meson.build8
-rw-r--r--lib/portage/util/env_update.py132
-rw-r--r--lib/portage/util/file_copy.py137
-rw-r--r--lib/portage/util/file_copy/__init__.py37
-rw-r--r--lib/portage/util/futures/_asyncio/__init__.py73
-rw-r--r--lib/portage/util/futures/_asyncio/meson.build8
-rw-r--r--lib/portage/util/futures/_asyncio/streams.py2
-rw-r--r--lib/portage/util/futures/_sync_decorator.py9
-rw-r--r--lib/portage/util/futures/compat_coroutine.py141
-rw-r--r--lib/portage/util/futures/executor/fork.py19
-rw-r--r--lib/portage/util/futures/executor/meson.build8
-rw-r--r--lib/portage/util/futures/extendedfutures.py12
-rw-r--r--lib/portage/util/futures/iter_completed.py3
-rw-r--r--lib/portage/util/futures/meson.build16
-rw-r--r--lib/portage/util/futures/retry.py2
-rw-r--r--lib/portage/util/futures/unix_events.py5
-rw-r--r--lib/portage/util/hooks.py2
-rw-r--r--lib/portage/util/install_mask.py18
-rw-r--r--lib/portage/util/iterators/MultiIterGroupBy.py5
-rw-r--r--lib/portage/util/iterators/meson.build8
-rw-r--r--lib/portage/util/lafilefixer.py8
-rw-r--r--lib/portage/util/listdir.py6
-rw-r--r--lib/portage/util/locale.py94
-rw-r--r--lib/portage/util/meson.build49
-rw-r--r--lib/portage/util/movefile.py94
-rw-r--r--lib/portage/util/mtimedb.py104
-rw-r--r--lib/portage/util/netlink.py2
-rw-r--r--lib/portage/util/shelve.py4
-rw-r--r--lib/portage/util/socks5.py43
-rw-r--r--lib/portage/util/whirlpool.py83
-rw-r--r--lib/portage/util/writeable_check.py16
-rw-r--r--lib/portage/versions.py162
-rw-r--r--lib/portage/xml/meson.build8
-rw-r--r--lib/portage/xml/metadata.py14
-rw-r--r--lib/portage/xpak.py30
499 files changed, 21252 insertions, 8941 deletions
diff --git a/lib/portage/__init__.py b/lib/portage/__init__.py
index 13af8da09..21bf99317 100644
--- a/lib/portage/__init__.py
+++ b/lib/portage/__init__.py
@@ -1,13 +1,13 @@
-# Copyright 1998-2021 Gentoo Authors
+# Copyright 1998-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# pylint: disable=ungrouped-imports
-VERSION = "HEAD"
-
# ===========================================================================
# START OF IMPORTS -- START OF IMPORTS -- START OF IMPORTS -- START OF IMPORT
# ===========================================================================
+from portage import installation
+
try:
import asyncio
import sys
@@ -16,7 +16,7 @@ try:
if not hasattr(errno, "ESTALE"):
# ESTALE may not be defined on some systems, such as interix.
errno.ESTALE = -1
- import multiprocessing.util
+ import functools
import re
import types
import platform
@@ -48,11 +48,10 @@ except ImportError as e:
sys.stderr.write(
"!!! gone wrong. Here is the information we got for this exception:\n"
)
- sys.stderr.write(" " + str(e) + "\n\n")
+ sys.stderr.write(f" {e}\n\n")
raise
try:
-
import portage.proxy.lazyimport
import portage.proxy as proxy
@@ -93,7 +92,7 @@ try:
+ "doebuild_environment,spawn,spawnebuild",
"portage.package.ebuild.config:autouse,best_from_dict,"
+ "check_config_instance,config",
- "portage.package.ebuild.deprecated_profile_check:" + "deprecated_profile_check",
+ "portage.package.ebuild.deprecated_profile_check:deprecated_profile_check",
"portage.package.ebuild.digestcheck:digestcheck",
"portage.package.ebuild.digestgen:digestgen",
"portage.package.ebuild.fetch:fetch",
@@ -124,6 +123,7 @@ try:
+ "cpv_getkey@getCPFromCPV,endversion_keys,"
+ "suffix_value@endversion,pkgcmp,pkgsplit,vercmp,ververify",
"portage.xpak",
+ "portage.gpkg",
"subprocess",
"time",
)
@@ -183,9 +183,10 @@ except ImportError as e:
"!!! There is a README.RESCUE file that details the steps required to perform\n"
)
sys.stderr.write("!!! a recovery of portage.\n")
- sys.stderr.write(" " + str(e) + "\n\n")
+ sys.stderr.write(f" {e}\n\n")
raise
+utf8_mode = sys.getfilesystemencoding() == "utf-8"
# We use utf_8 encoding everywhere. Previously, we used
# sys.getfilesystemencoding() for the 'merge' encoding, but that had
@@ -259,23 +260,21 @@ class _unicode_func_wrapper:
self._encoding = encoding
def _process_args(self, args, kwargs):
-
encoding = self._encoding
wrapped_args = [
_unicode_encode(x, encoding=encoding, errors="strict") for x in args
]
if kwargs:
- wrapped_kwargs = dict(
- (k, _unicode_encode(v, encoding=encoding, errors="strict"))
+ wrapped_kwargs = {
+ k: _unicode_encode(v, encoding=encoding, errors="strict")
for k, v in kwargs.items()
- )
+ }
else:
wrapped_kwargs = {}
return (wrapped_args, wrapped_kwargs)
def __call__(self, *args, **kwargs):
-
encoding = self._encoding
wrapped_args, wrapped_kwargs = self._process_args(args, kwargs)
@@ -321,6 +320,9 @@ class _unicode_module_wrapper:
object.__setattr__(self, "_cache", cache)
def __getattribute__(self, attr):
+ if utf8_mode:
+ return getattr(object.__getattribute__(self, "_mod"), attr)
+
cache = object.__getattribute__(self, "_cache")
if cache is not None:
result = cache.get(attr)
@@ -347,30 +349,6 @@ class _unicode_module_wrapper:
return result
-class _eintr_func_wrapper:
- """
- Wraps a function and handles EINTR by calling the function as
- many times as necessary (until it returns without raising EINTR).
- """
-
- __slots__ = ("_func",)
-
- def __init__(self, func):
- self._func = func
-
- def __call__(self, *args, **kwargs):
-
- while True:
- try:
- rval = self._func(*args, **kwargs)
- break
- except EnvironmentError as e:
- if e.errno != errno.EINTR:
- raise
-
- return rval
-
-
import os as _os
_os_overrides = {
@@ -378,14 +356,11 @@ _os_overrides = {
id(_os.popen): _os.popen,
id(_os.read): _os.read,
id(_os.system): _os.system,
- id(_os.waitpid): _eintr_func_wrapper(_os.waitpid),
+ id(_os.waitpid): _os.waitpid,
}
-try:
- _os_overrides[id(_os.mkfifo)] = _os.mkfifo
-except AttributeError:
- pass # Jython
+_os_overrides[id(_os.mkfifo)] = _os.mkfifo
if hasattr(_os, "statvfs"):
_os_overrides[id(_os.statvfs)] = _os.statvfs
@@ -408,7 +383,7 @@ try:
_selinux_merge = _unicode_module_wrapper(_selinux, encoding=_encodings["merge"])
except (ImportError, OSError) as e:
if isinstance(e, OSError):
- sys.stderr.write("!!! SELinux not loaded: %s\n" % str(e))
+ sys.stderr.write(f"!!! SELinux not loaded: {e}\n")
del e
_selinux = None
selinux = None
@@ -446,7 +421,7 @@ class _ForkWatcher:
_ForkWatcher.hook(_ForkWatcher)
-multiprocessing.util.register_after_fork(_ForkWatcher, _ForkWatcher.hook)
+os.register_at_fork(after_in_child=functools.partial(_ForkWatcher.hook, _ForkWatcher))
def getpid():
@@ -481,10 +456,10 @@ def _shell_quote(s):
"""
if _shell_quote_re.search(s) is None:
return s
- for letter in '\\"$`':
+ for letter in r"\"$`":
if letter in s:
- s = s.replace(letter, "\\" + letter)
- return '"%s"' % s
+ s = s.replace(letter, rf"\{letter}")
+ return f'"{s}"'
bsd_chflags = None
@@ -497,9 +472,9 @@ if platform.system() in ("FreeBSD",):
def load_mod(name):
- modname = ".".join(name.split(".")[:-1])
- mod = __import__(modname)
components = name.split(".")
+ modname = ".".join(components[:-1])
+ mod = __import__(modname)
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
@@ -533,7 +508,7 @@ def abssymlink(symlink, target=None):
mylink = os.readlink(symlink)
if mylink[0] != "/":
mydir = os.path.dirname(symlink)
- mylink = mydir + "/" + mylink
+ mylink = f"{mydir}/{mylink}"
return os.path.normpath(mylink)
@@ -545,19 +520,22 @@ _deprecated_eapis = frozenset(
"3_pre1",
"3_pre2",
"4_pre1",
- "4-python",
"4-slot-abi",
"5_pre1",
"5_pre2",
- "5-progress",
"6_pre1",
"7_pre1",
]
)
+
+from itertools import chain
+
_supported_eapis = frozenset(
- [str(x) for x in range(portage.const.EAPI + 1)]
- + list(_testing_eapis)
- + list(_deprecated_eapis)
+ chain(
+ (str(x) for x in range(portage.const.EAPI + 1)),
+ _testing_eapis,
+ _deprecated_eapis,
+ )
)
@@ -567,7 +545,6 @@ def _eapi_is_deprecated(eapi):
def eapi_is_supported(eapi):
eapi = str(eapi).strip()
-
return eapi in _supported_eapis
@@ -596,7 +573,7 @@ def _parse_eapi_ebuild_head(f):
def _movefile(src, dest, **kwargs):
"""Calls movefile and raises a PortageException if an error occurs."""
if movefile(src, dest, **kwargs) is None:
- raise portage.exception.PortageException("mv '%s' '%s'" % (src, dest))
+ raise portage.exception.PortageException(f"mv '{src}' '{dest}'")
auxdbkeys = (
@@ -641,6 +618,15 @@ class _trees_dict(dict):
def create_trees(
config_root=None, target_root=None, trees=None, env=None, sysroot=None, eprefix=None
):
+ if utf8_mode:
+ config_root = (
+ os.fsdecode(config_root) if isinstance(config_root, bytes) else config_root
+ )
+ target_root = (
+ os.fsdecode(target_root) if isinstance(target_root, bytes) else target_root
+ )
+ sysroot = os.fsdecode(sysroot) if isinstance(sysroot, bytes) else sysroot
+ eprefix = os.fsdecode(eprefix) if isinstance(eprefix, bytes) else eprefix
if trees is None:
trees = _trees_dict()
@@ -667,12 +653,10 @@ def create_trees(
if settings["ROOT"] == "/" and settings["EPREFIX"] == const.EPREFIX:
trees._running_eroot = trees._target_eroot
else:
-
# When ROOT != "/" we only want overrides from the calling
# environment to apply to the config that's associated
# with ROOT != "/", so pass a nearly empty dict for the env parameter.
- clean_env = {}
- for k in (
+ env_sequence = (
"PATH",
"PORTAGE_GRPNAME",
"PORTAGE_REPOSITORIES",
@@ -686,18 +670,29 @@ def create_trees(
"https_proxy",
"no_proxy",
"__PORTAGE_TEST_HARDLINK_LOCKS",
- ):
- v = settings.get(k)
- if v is not None:
- clean_env[k] = v
+ )
+ env = ((k, settings.get(k)) for k in env_sequence)
+ clean_env = {k: v for k, v in env if v is not None}
+
if depcachedir is not None:
clean_env["PORTAGE_DEPCACHEDIR"] = depcachedir
- settings = config(
+ mysettings = config(
config_root=None, target_root="/", env=clean_env, sysroot="/", eprefix=None
)
- settings.lock()
- trees._running_eroot = settings["EROOT"]
- myroots.append((settings["EROOT"], settings))
+ mysettings.lock()
+ trees._running_eroot = mysettings["EROOT"]
+ myroots.append((mysettings["EROOT"], mysettings))
+
+ if settings["SYSROOT"] != "/" and settings["SYSROOT"] != settings["ROOT"]:
+ mysettings = config(
+ config_root=settings["SYSROOT"],
+ target_root=settings["SYSROOT"],
+ env=clean_env,
+ sysroot=settings["SYSROOT"],
+ eprefix="",
+ )
+ mysettings.lock()
+ myroots.append((mysettings["EROOT"], mysettings))
for myroot, mysettings in myroots:
trees[myroot] = portage.util.LazyItemsDict(trees.get(myroot, {}))
@@ -712,7 +707,7 @@ def create_trees(
return trees
-if VERSION == "HEAD":
+if installation.TYPE == installation.TYPES.SOURCE:
class _LazyVersion(proxy.objectproxy.ObjectProxy):
def _get_target(self):
@@ -725,12 +720,8 @@ if VERSION == "HEAD":
BASH_BINARY,
"-c",
(
- "cd %s ; git describe --match 'portage-*' || exit $? ; "
- + 'if [ -n "`git diff-index --name-only --diff-filter=M HEAD`" ] ; '
- + "then echo modified ; git rev-list --format=%%ct -n 1 HEAD ; fi ; "
- + "exit 0"
- )
- % _shell_quote(PORTAGE_BASE_PATH),
+ f"cd {_shell_quote(PORTAGE_BASE_PATH)} ; git describe --dirty --match 'portage-*' || exit $? ; "
+ ),
]
cmd = [
_unicode_encode(x, encoding=encoding, errors="strict") for x in cmd
@@ -741,37 +732,16 @@ if VERSION == "HEAD":
output = _unicode_decode(proc.communicate()[0], encoding=encoding)
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
- output_lines = output.splitlines()
- if output_lines:
- version_split = output_lines[0].split("-")
- if len(version_split) > 1:
- VERSION = version_split[1]
- patchlevel = False
- if len(version_split) > 2:
- patchlevel = True
- VERSION = "%s_p%s" % (VERSION, version_split[2])
- if len(output_lines) > 1 and output_lines[1] == "modified":
- head_timestamp = None
- if len(output_lines) > 3:
- try:
- head_timestamp = int(output_lines[3])
- except ValueError:
- pass
- timestamp = int(time.time())
- if (
- head_timestamp is not None
- and timestamp > head_timestamp
- ):
- timestamp = timestamp - head_timestamp
- if not patchlevel:
- VERSION = "%s_p0" % (VERSION,)
- VERSION = "%s_p%d" % (VERSION, timestamp)
- return VERSION
- VERSION = "HEAD"
+ VERSION = output.lstrip("portage-").strip().replace("-g", "+g")
+ else:
+ VERSION = "HEAD"
return VERSION
VERSION = _LazyVersion()
+else:
+ VERSION = "@VERSION@"
+
_legacy_global_var_names = (
"archlist",
"db",
@@ -790,7 +760,6 @@ _legacy_global_var_names = (
def _reset_legacy_globals():
-
global _legacy_globals_constructed
_legacy_globals_constructed = set()
for k in _legacy_global_var_names:
@@ -798,7 +767,6 @@ def _reset_legacy_globals():
class _LegacyGlobalProxy(proxy.objectproxy.ObjectProxy):
-
__slots__ = ("_name",)
def __init__(self, name):
@@ -825,3 +793,4 @@ def _disable_legacy_globals():
global _legacy_global_var_names
for k in _legacy_global_var_names:
globals().pop(k, None)
+ portage.data._initialized_globals.clear()
diff --git a/lib/portage/_compat_upgrade/binpkg_compression.py b/lib/portage/_compat_upgrade/binpkg_compression.py
index 58f995485..d643fcaf3 100644
--- a/lib/portage/_compat_upgrade/binpkg_compression.py
+++ b/lib/portage/_compat_upgrade/binpkg_compression.py
@@ -34,7 +34,7 @@ def main():
)
with open(config_path) as f:
content = f.read()
- compat_setting = 'BINPKG_COMPRESS="{}"'.format(COMPAT_BINPKG_COMPRESS)
+ compat_setting = f'BINPKG_COMPRESS="{COMPAT_BINPKG_COMPRESS}"'
portage.output.EOutput().einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -43,7 +43,7 @@ def main():
content = re.sub(
"^BINPKG_COMPRESS=.*$", compat_setting, content, flags=re.MULTILINE
)
- with open(config_path, "wt") as f:
+ with open(config_path, "w") as f:
f.write(content)
diff --git a/lib/portage/_compat_upgrade/binpkg_format.py b/lib/portage/_compat_upgrade/binpkg_format.py
new file mode 100644
index 000000000..6ad24799c
--- /dev/null
+++ b/lib/portage/_compat_upgrade/binpkg_format.py
@@ -0,0 +1,51 @@
+# Copyright 2020 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import re
+
+import portage
+from portage import os
+from portage.const import GLOBAL_CONFIG_PATH
+
+COMPAT_BINPKG_FORMAT = "xpak"
+
+
+def main():
+ """
+ If the current installation is still configured to use the old
+ default BINPKG_FORMAT=xpak setting, then patch make.globals
+ inside ${ED} to maintain backward compatibility, ensuring that
+ binary package consumers are not caught off guard. This is
+ intended to be called from the ebuild as follows:
+
+ pkg_preinst() {
+ python_setup
+ env -u BINPKG_FORMAT
+ PYTHONPATH="${D%/}$(python_get_sitedir)${PYTHONPATH:+:${PYTHONPATH}}" \
+ "${PYTHON}" -m portage._compat_upgrade.binpkg_format || die
+ }
+ """
+ if (
+ portage.settings.get("BINPKG_FORMAT", COMPAT_BINPKG_FORMAT)
+ == COMPAT_BINPKG_FORMAT
+ ):
+ config_path = os.path.join(
+ os.environ["ED"], GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals"
+ )
+ with open(config_path) as f:
+ content = f.read()
+ compat_setting = f'BINPKG_FORMAT="{COMPAT_BINPKG_FORMAT}"'
+ portage.output.EOutput().einfo(
+ "Setting make.globals default {} for backward compatibility".format(
+ compat_setting
+ )
+ )
+ content = re.sub(
+ "^BINPKG_FORMAT=.*$", compat_setting, content, flags=re.MULTILINE
+ )
+ with open(config_path, "w") as f:
+ f.write(content)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/lib/portage/_compat_upgrade/binpkg_multi_instance.py b/lib/portage/_compat_upgrade/binpkg_multi_instance.py
index ce504c54b..a5a412666 100644
--- a/lib/portage/_compat_upgrade/binpkg_multi_instance.py
+++ b/lib/portage/_compat_upgrade/binpkg_multi_instance.py
@@ -24,15 +24,13 @@ def main():
"""
if "binpkg-multi-instance" not in portage.settings.features:
portage.output.EOutput().einfo(
- "Setting make.globals default {} for backward compatibility".format(
- COMPAT_FEATURES
- )
+ f"Setting make.globals default {COMPAT_FEATURES} for backward compatibility"
)
config_path = os.path.join(
os.environ["ED"], GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals"
)
- with open(config_path, "at") as f:
- f.write("{}\n".format(COMPAT_FEATURES))
+ with open(config_path, "a") as f:
+ f.write(f"{COMPAT_FEATURES}\n")
if __name__ == "__main__":
diff --git a/lib/portage/_compat_upgrade/default_locations.py b/lib/portage/_compat_upgrade/default_locations.py
index f4a24985b..730a0682a 100644
--- a/lib/portage/_compat_upgrade/default_locations.py
+++ b/lib/portage/_compat_upgrade/default_locations.py
@@ -68,7 +68,7 @@ def main():
with open(config_path) as f:
content = f.read()
if do_distdir:
- compat_setting = 'DISTDIR="{}"'.format(compat_distdir)
+ compat_setting = f'DISTDIR="{compat_distdir}"'
out.einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -78,7 +78,7 @@ def main():
"^DISTDIR=.*$", compat_setting, content, flags=re.MULTILINE
)
if do_pkgdir:
- compat_setting = 'PKGDIR="{}"'.format(compat_pkgdir)
+ compat_setting = f'PKGDIR="{compat_pkgdir}"'
out.einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -88,7 +88,7 @@ def main():
"^PKGDIR=.*$", compat_setting, content, flags=re.MULTILINE
)
if do_rpmdir:
- compat_setting = 'RPMDIR="{}"'.format(compat_rpmdir)
+ compat_setting = f'RPMDIR="{compat_rpmdir}"'
out.einfo(
"Setting make.globals default {} for backward compatibility".format(
compat_setting
@@ -97,7 +97,7 @@ def main():
content = re.sub(
"^RPMDIR=.*$", compat_setting, content, flags=re.MULTILINE
)
- with open(config_path, "wt") as f:
+ with open(config_path, "w") as f:
f.write(content)
if do_main_repo:
@@ -106,7 +106,7 @@ def main():
)
with open(config_path) as f:
content = f.read()
- compat_setting = "location = {}".format(compat_main_repo)
+ compat_setting = f"location = {compat_main_repo}"
out.einfo(
"Setting repos.conf default {} for backward compatibility".format(
compat_setting
@@ -115,7 +115,7 @@ def main():
content = re.sub(
"^location =.*$", compat_setting, content, flags=re.MULTILINE
)
- with open(config_path, "wt") as f:
+ with open(config_path, "w") as f:
f.write(content)
diff --git a/lib/portage/_compat_upgrade/meson.build b/lib/portage/_compat_upgrade/meson.build
new file mode 100644
index 000000000..6db0981b9
--- /dev/null
+++ b/lib/portage/_compat_upgrade/meson.build
@@ -0,0 +1,11 @@
+py.install_sources(
+ [
+ 'binpkg_compression.py',
+ 'binpkg_format.py',
+ 'binpkg_multi_instance.py',
+ 'default_locations.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/_compat_upgrade',
+ pure : not native_extensions
+)
diff --git a/lib/portage/_emirrordist/Config.py b/lib/portage/_emirrordist/Config.py
index 78b7a482a..5b7f51221 100644
--- a/lib/portage/_emirrordist/Config.py
+++ b/lib/portage/_emirrordist/Config.py
@@ -2,7 +2,6 @@
# Distributed under the terms of the GNU General Public License v2
import copy
-import io
import logging
import shelve
import time
@@ -12,6 +11,8 @@ from portage.package.ebuild.fetch import MirrorLayoutConfig
from portage.util import grabdict, grablines
from .ContentDB import ContentDB
+logger = logging.getLogger(__name__)
+
class Config:
def __init__(self, options, portdb, event_loop):
@@ -87,24 +88,20 @@ class Config:
self.layouts = self.layout_conf.get_all_layouts()
def _open_log(self, log_desc, log_path, mode):
-
if log_path is None or getattr(self.options, "dry_run", False):
- log_func = logging.info
+ log_func = logger.info
line_format = "%s: %%s" % log_desc
add_newline = False
if log_path is not None:
- logging.warning(
- "dry-run: %s log " "redirected to logging.info" % log_desc
- )
+ logger.warning(f"dry-run: {log_desc} log redirected to logging.info")
else:
- self._open_files.append(io.open(log_path, mode=mode, encoding="utf_8"))
+ self._open_files.append(open(log_path, mode=mode, encoding="utf_8"))
line_format = "%s\n"
log_func = self._open_files[-1].write
return self._LogFormatter(line_format, log_func)
class _LogFormatter:
-
__slots__ = ("_line_format", "_log_func")
def __init__(self, line_format, log_func):
@@ -137,9 +134,9 @@ class Config:
db = dbshelve.open(db_file, flags=open_flag)
if dry_run:
- logging.warning("dry-run: %s db opened in readonly mode" % db_desc)
+ logger.warning(f"dry-run: {db_desc} db opened in readonly mode")
if not isinstance(db, dict):
- volatile_db = dict((k, db[k]) for k in db)
+ volatile_db = {k: db[k] for k in db}
db.close()
db = volatile_db
else:
diff --git a/lib/portage/_emirrordist/ContentDB.py b/lib/portage/_emirrordist/ContentDB.py
index 6a5efbe95..622a437aa 100644
--- a/lib/portage/_emirrordist/ContentDB.py
+++ b/lib/portage/_emirrordist/ContentDB.py
@@ -8,6 +8,8 @@ import typing
from portage.package.ebuild.fetch import DistfileName
+logger = logging.getLogger(__name__)
+
class ContentDB:
"""
@@ -36,10 +38,10 @@ class ContentDB:
@param filename: file name with digests attribute
"""
distfile_str = str(filename)
- distfile_key = "filename:{}".format(distfile_str)
+ distfile_key = f"filename:{distfile_str}"
for k, v in filename.digests.items():
if k != "size":
- digest_key = "digest:{}:{}".format(k.upper(), v.lower())
+ digest_key = f"digest:{k.upper()}:{v.lower()}"
try:
digest_files = self._shelve[digest_key]
except KeyError:
@@ -75,7 +77,7 @@ class ContentDB:
@param filename: file name with digests attribute
"""
- distfile_key = "filename:{}".format(filename)
+ distfile_key = f"filename:{filename}"
try:
content_revisions = self._shelve[distfile_key]
except KeyError:
@@ -90,7 +92,7 @@ class ContentDB:
remaining.add(revision_key)
continue
for k, v in revision_key:
- digest_key = "digest:{}:{}".format(k, v)
+ digest_key = f"digest:{k}:{v}"
try:
digest_files = self._shelve[digest_key]
except KeyError:
@@ -110,10 +112,10 @@ class ContentDB:
pass
if remaining:
- logging.debug(("drop '%s' revision(s) from content db") % filename)
+ logger.debug(f"drop '{filename}' revision(s) from content db")
self._shelve[distfile_key] = remaining
else:
- logging.debug(("drop '%s' from content db") % filename)
+ logger.debug(f"drop '{filename}' from content db")
try:
del self._shelve[distfile_key]
except KeyError:
@@ -151,7 +153,7 @@ class ContentDB:
for distfile_str in digest_files:
matched_revisions.setdefault(distfile_str, set())
try:
- content_revisions = self._shelve["filename:{}".format(distfile_str)]
+ content_revisions = self._shelve[f"filename:{distfile_str}"]
except KeyError:
pass
else:
diff --git a/lib/portage/_emirrordist/DeletionIterator.py b/lib/portage/_emirrordist/DeletionIterator.py
index 59abce82f..bed40e935 100644
--- a/lib/portage/_emirrordist/DeletionIterator.py
+++ b/lib/portage/_emirrordist/DeletionIterator.py
@@ -9,6 +9,8 @@ from portage import os
from portage.package.ebuild.fetch import DistfileName
from .DeletionTask import DeletionTask
+logger = logging.getLogger(__name__)
+
class DeletionIterator:
def __init__(self, config):
@@ -25,20 +27,20 @@ class DeletionIterator:
distfiles_set = set()
distfiles_set.update(
(
- filename
- if isinstance(filename, DistfileName)
- else DistfileName(filename)
+ (
+ filename
+ if isinstance(filename, DistfileName)
+ else DistfileName(filename)
+ )
for filename in itertools.chain.from_iterable(
layout.get_filenames(distdir) for layout in self._config.layouts
)
)
if self._config.content_db is None
else itertools.chain.from_iterable(
- (
- self._config.content_db.get_filenames_translate(filename)
- for filename in itertools.chain.from_iterable(
- layout.get_filenames(distdir) for layout in self._config.layouts
- )
+ self._config.content_db.get_filenames_translate(filename)
+ for filename in itertools.chain.from_iterable(
+ layout.get_filenames(distdir) for layout in self._config.layouts
)
)
)
@@ -61,7 +63,7 @@ class DeletionIterator:
break
else:
if exceptions:
- logging.error(
+ logger.error(
"stat failed on '%s' in distfiles: %s\n"
% (filename, "; ".join(str(x) for x in exceptions))
)
@@ -91,7 +93,6 @@ class DeletionIterator:
self._config.scheduled_deletion_count += 1
if deletion_db is None or deletion_delay is None:
-
yield DeletionTask(
background=True,
distfile=filename,
@@ -103,11 +104,10 @@ class DeletionIterator:
deletion_entry = deletion_db.get(filename)
if deletion_entry is None:
- logging.debug("add '%s' to deletion db" % filename)
+ logger.debug(f"add '{filename}' to deletion db")
deletion_db[filename] = start_time
elif deletion_entry + deletion_delay <= start_time:
-
yield DeletionTask(
background=True,
distfile=filename,
@@ -123,4 +123,4 @@ class DeletionIterator:
except KeyError:
pass
else:
- logging.debug("drop '%s' from deletion db" % filename)
+ logger.debug(f"drop '{filename}' from deletion db")
diff --git a/lib/portage/_emirrordist/DeletionTask.py b/lib/portage/_emirrordist/DeletionTask.py
index 7066e57a7..2dd97237b 100644
--- a/lib/portage/_emirrordist/DeletionTask.py
+++ b/lib/portage/_emirrordist/DeletionTask.py
@@ -9,28 +9,27 @@ from portage.package.ebuild.fetch import ContentHashLayout
from portage.util._async.FileCopier import FileCopier
from _emerge.CompositeTask import CompositeTask
+logger = logging.getLogger(__name__)
-class DeletionTask(CompositeTask):
+class DeletionTask(CompositeTask):
__slots__ = ("distfile", "distfile_path", "config")
def _start(self):
if self.config.options.recycle_dir is not None:
recycle_path = os.path.join(self.config.options.recycle_dir, self.distfile)
if self.config.options.dry_run:
- logging.info(
- ("dry-run: move '%s' from " "distfiles to recycle") % self.distfile
+ logger.info(
+ f"dry-run: move '{self.distfile}' from distfiles to recycle"
)
else:
- logging.debug(
- ("move '%s' from " "distfiles to recycle") % self.distfile
- )
+ logger.debug(f"move '{self.distfile}' from distfiles to recycle")
try:
# note: distfile_path can be a symlink here
os.rename(os.path.realpath(self.distfile_path), recycle_path)
except OSError as e:
if e.errno != errno.EXDEV:
- logging.error(
+ logger.error(
("rename %s from distfiles to " "recycle failed: %s")
% (self.distfile, e)
)
@@ -52,16 +51,14 @@ class DeletionTask(CompositeTask):
success = True
if self.config.options.dry_run:
- logging.info(("dry-run: delete '%s' from " "distfiles") % self.distfile)
+ logger.info(f"dry-run: delete '{self.distfile}' from distfiles")
else:
- logging.debug(("delete '%s' from " "distfiles") % self.distfile)
+ logger.debug(f"delete '{self.distfile}' from distfiles")
try:
os.unlink(self.distfile_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- logging.error(
- "%s unlink failed in distfiles: %s" % (self.distfile, e)
- )
+ logger.error(f"{self.distfile} unlink failed in distfiles: {e}")
success = False
if success:
@@ -72,7 +69,6 @@ class DeletionTask(CompositeTask):
self._async_wait()
def _recycle_copier_exit(self, copier):
-
self._assert_current(copier)
if self._was_cancelled():
self.wait()
@@ -80,18 +76,15 @@ class DeletionTask(CompositeTask):
success = True
if copier.returncode == os.EX_OK:
-
try:
os.unlink(copier.src_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- logging.error(
- "%s unlink failed in distfiles: %s" % (self.distfile, e)
- )
+ logger.error(f"{self.distfile} unlink failed in distfiles: {e}")
success = False
else:
- logging.error(
+ logger.error(
("%s copy from distfiles " "to recycle failed: %s")
% (self.distfile, copier.future.exception())
)
@@ -109,7 +102,7 @@ class DeletionTask(CompositeTask):
success = True
for layout in self.config.layouts:
if isinstance(layout, ContentHashLayout) and not self.distfile.digests:
- logging.debug(("_delete_links: '%s' has " "no digests") % self.distfile)
+ logger.debug(f"_delete_links: '{self.distfile}' has no digests")
continue
distfile_path = os.path.join(
self.config.options.distfiles, layout.get_path(self.distfile)
@@ -118,9 +111,7 @@ class DeletionTask(CompositeTask):
os.unlink(distfile_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- logging.error(
- "%s unlink failed in distfiles: %s" % (self.distfile, e)
- )
+ logger.error(f"{self.distfile} unlink failed in distfiles: {e}")
success = False
if success:
@@ -130,13 +121,12 @@ class DeletionTask(CompositeTask):
self.returncode = 1
def _success(self):
-
cpv = "unknown"
if self.config.distfiles_db is not None:
cpv = self.config.distfiles_db.get(self.distfile, cpv)
self.config.delete_count += 1
- self.config.log_success("%s\t%s\tremoved" % (cpv, self.distfile))
+ self.config.log_success(f"{cpv}\t{self.distfile}\tremoved")
if self.config.distfiles_db is not None:
try:
@@ -144,7 +134,7 @@ class DeletionTask(CompositeTask):
except KeyError:
pass
else:
- logging.debug(("drop '%s' from " "distfiles db") % self.distfile)
+ logger.debug(f"drop '{self.distfile}' from distfiles db")
if self.config.content_db is not None:
self.config.content_db.remove(self.distfile)
@@ -155,4 +145,4 @@ class DeletionTask(CompositeTask):
except KeyError:
pass
else:
- logging.debug(("drop '%s' from " "deletion db") % self.distfile)
+ logger.debug(f"drop '{self.distfile}' from deletion db")
diff --git a/lib/portage/_emirrordist/FetchIterator.py b/lib/portage/_emirrordist/FetchIterator.py
index 79c460f79..e4fdd092a 100644
--- a/lib/portage/_emirrordist/FetchIterator.py
+++ b/lib/portage/_emirrordist/FetchIterator.py
@@ -1,4 +1,4 @@
-# Copyright 2013-2018 Gentoo Foundation
+# Copyright 2013-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import threading
@@ -14,6 +14,7 @@ from portage.exception import PortageException, PortageKeyError
from portage.package.ebuild.fetch import DistfileName
from portage.util._async.AsyncTaskFuture import AsyncTaskFuture
from portage.util._async.TaskScheduler import TaskScheduler
+from portage.util.futures import asyncio
from portage.util.futures.iter_completed import iter_gather
from .FetchTask import FetchTask
from _emerge.CompositeTask import CompositeTask
@@ -41,11 +42,9 @@ class FetchIterator:
# and in order to reduce latency in case of a signal interrupt.
cp_all = self._config.portdb.cp_all
for category in sorted(self._config.portdb.categories):
- for cp in cp_all(categories=(category,)):
- yield cp
+ yield from cp_all(categories=(category,))
def __iter__(self):
-
portdb = self._config.portdb
get_repo_for_location = portdb.repositories.get_repo_for_location
@@ -54,19 +53,16 @@ class FetchIterator:
hash_filter = None
for cp in self._iter_every_cp():
-
if self._terminated.is_set():
return
for tree in portdb.porttrees:
-
# Reset state so the Manifest is pulled once
# for this cp / tree combination.
repo_config = get_repo_for_location(tree)
digests_future = portdb._event_loop.create_future()
for cpv in portdb.cp_list(cp, mytree=tree):
-
if self._terminated.is_set():
return
@@ -163,7 +159,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
try:
(restrict,) = aux_get_result.result()
except (PortageKeyError, PortageException) as e:
- config.log_failure("%s\t\taux_get exception %s" % (cpv, e))
+ config.log_failure(f"{cpv}\t\taux_get exception {e}")
result.set_result(fetch_tasks)
return
@@ -173,14 +169,14 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
try:
restrict = frozenset(use_reduce(restrict, flat=True, matchnone=True))
except PortageException as e:
- config.log_failure("%s\t\tuse_reduce exception %s" % (cpv, e))
+ config.log_failure(f"{cpv}\t\tuse_reduce exception {e}")
result.set_result(fetch_tasks)
return
try:
uri_map = fetch_map_result.result()
except PortageException as e:
- config.log_failure("%s\t\tgetFetchMap exception %s" % (cpv, e))
+ config.log_failure(f"{cpv}\t\tgetFetchMap exception {e}")
result.set_result(fetch_tasks)
return
@@ -199,7 +195,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
if override_fetch:
uri = uri.partition("+")[2]
- # skip fetch-restricted files unless overriden via fetch+
+ # skip fetch-restricted files unless overridden via fetch+
# or mirror+
if restrict_fetch and not override_fetch:
continue
@@ -235,10 +231,10 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
digests = repo_config.load_manifest(
os.path.join(repo_config.location, cpv.cp)
).getTypeDigests("DIST")
- except (EnvironmentError, PortageException) as e:
+ except (OSError, PortageException) as e:
digests_future.done() or digests_future.set_exception(e)
for filename in new_uri_map:
- config.log_failure("%s\t%s\tManifest exception %s" % (cpv, filename, e))
+ config.log_failure(f"{cpv}\t{filename}\tManifest exception {e}")
config.file_failures[filename] = cpv
result.set_result(fetch_tasks)
return
@@ -247,7 +243,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
if not digests:
for filename in new_uri_map:
- config.log_failure("%s\t%s\tdigest entry missing" % (cpv, filename))
+ config.log_failure(f"{cpv}\t{filename}\tdigest entry missing")
config.file_failures[filename] = cpv
result.set_result(fetch_tasks)
return
@@ -255,7 +251,7 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
for filename, uri_tuple in new_uri_map.items():
file_digests = digests.get(filename)
if file_digests is None:
- config.log_failure("%s\t%s\tdigest entry missing" % (cpv, filename))
+ config.log_failure(f"{cpv}\t{filename}\tdigest entry missing")
config.file_failures[filename] = cpv
continue
if filename in config.file_owners:
@@ -281,8 +277,11 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
result.set_result(fetch_tasks)
def future_generator():
- yield config.portdb.async_aux_get(
- cpv, ("RESTRICT",), myrepo=repo_config.name, loop=loop
+ yield asyncio.ensure_future(
+ config.portdb.async_aux_get(
+ cpv, ("RESTRICT",), myrepo=repo_config.name, loop=loop
+ ),
+ loop,
)
yield config.portdb.async_fetch_map(cpv, mytree=repo_config.location, loop=loop)
@@ -297,9 +296,11 @@ def _async_fetch_tasks(config, hash_filter, repo_config, digests_future, cpv, lo
)
gather_result.add_done_callback(aux_get_done)
result.add_done_callback(
- lambda result: gather_result.cancel()
- if result.cancelled() and not gather_result.done()
- else None
+ lambda result: (
+ gather_result.cancel()
+ if result.cancelled() and not gather_result.done()
+ else None
+ )
)
return result
diff --git a/lib/portage/_emirrordist/FetchTask.py b/lib/portage/_emirrordist/FetchTask.py
index 304837332..013163f83 100644
--- a/lib/portage/_emirrordist/FetchTask.py
+++ b/lib/portage/_emirrordist/FetchTask.py
@@ -17,6 +17,8 @@ from portage.util._async.PipeLogger import PipeLogger
from portage.util._async.PopenProcess import PopenProcess
from _emerge.CompositeTask import CompositeTask
+logger = logging.getLogger(__name__)
+
default_hash_name = portage.const.MANIFEST2_HASH_DEFAULT
# Use --no-check-certificate since Manifest digests should provide
@@ -25,7 +27,6 @@ default_fetchcommand = 'wget -c -v -t 1 --passive-ftp --no-check-certificate --t
class FetchTask(CompositeTask):
-
__slots__ = (
"distfile",
"digests",
@@ -46,7 +47,6 @@ class FetchTask(CompositeTask):
)
def _start(self):
-
if (
self.config.options.fetch_log_dir is not None
and not self.config.options.dry_run
@@ -68,11 +68,11 @@ class FetchTask(CompositeTask):
self.config.content_db.add(self.distfile)
if not self._have_needed_digests():
- msg = "incomplete digests: %s" % " ".join(self.digests)
+ msg = f"incomplete digests: {' '.join(self.digests)}"
self.scheduler.output(
msg, background=self.background, log_path=self._log_path
)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = os.EX_OK
self._async_wait()
@@ -87,11 +87,11 @@ class FetchTask(CompositeTask):
st = os.stat(distfile_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "%s stat failed in %s: %s" % (self.distfile, "distfiles", e)
+ msg = f"{self.distfile} stat failed in distfiles: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
else:
break
@@ -100,7 +100,7 @@ class FetchTask(CompositeTask):
if not size_ok:
if self.config.options.dry_run:
if st is not None:
- logging.info(
+ logger.info(
("dry-run: delete '%s' with " "wrong size from distfiles")
% (self.distfile,)
)
@@ -115,14 +115,13 @@ class FetchTask(CompositeTask):
)
if self._unlink_file(unlink_path, "distfiles"):
if st is not None:
- logging.debug(
+ logger.debug(
("delete '%s' with " "wrong size from distfiles")
% (self.distfile,)
)
else:
self.config.log_failure(
- "%s\t%s\t%s"
- % (self.cpv, self.distfile, "unlink failed in distfiles")
+ f"{self.cpv}\t{self.distfile}\tunlink failed in distfiles"
)
unlink_success = False
if not unlink_success:
@@ -167,22 +166,20 @@ class FetchTask(CompositeTask):
pass
if self.config.options.recycle_dir is not None:
-
recycle_file = os.path.join(self.config.options.recycle_dir, self.distfile)
if self.config.options.dry_run:
if os.path.exists(recycle_file):
- logging.info("dry-run: delete '%s' from recycle" % (self.distfile,))
+ logger.info(f"dry-run: delete '{self.distfile}' from recycle")
else:
try:
os.unlink(recycle_file)
except OSError:
pass
else:
- logging.debug("delete '%s' from recycle" % (self.distfile,))
+ logger.debug(f"delete '{self.distfile}' from recycle")
def _distfiles_digester_exit(self, digester):
-
self._assert_current(digester)
if self._was_cancelled():
self.wait()
@@ -193,10 +190,10 @@ class FetchTask(CompositeTask):
# is a bad situation which normally does not occur, so
# skip this file and report it, in order to draw attention
# from the administrator.
- msg = "%s distfiles digester failed unexpectedly" % (self.distfile,)
+ msg = f"{self.distfile} distfiles digester failed unexpectedly"
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
- logging.error(msg)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ logger.error(msg)
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.wait()
return
@@ -213,7 +210,6 @@ class FetchTask(CompositeTask):
_mirror_info = collections.namedtuple("_mirror_info", "name location")
def _start_fetch(self):
-
self._previously_added = False
self._fs_mirror_stack = []
if self.config.options.distfiles_local is not None:
@@ -242,7 +238,6 @@ class FetchTask(CompositeTask):
@staticmethod
def _mirror_iterator(uri, mirrors_dict):
-
slash_index = uri.find("/", 9)
if slash_index != -1:
mirror_name = uri[9:slash_index].strip("/")
@@ -269,7 +264,7 @@ class FetchTask(CompositeTask):
else:
msg = "no fetchable uris"
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = os.EX_OK
self.wait()
@@ -277,7 +272,6 @@ class FetchTask(CompositeTask):
def _next_uri(self):
remaining_tries = self.config.options.tries - len(self._tried_uris)
if remaining_tries > 0:
-
if remaining_tries <= self.config.options.tries // 2:
while self._primaryuri_stack:
uri = self._primaryuri_stack.pop()
@@ -308,11 +302,11 @@ class FetchTask(CompositeTask):
st = os.stat(file_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "%s stat failed in %s: %s" % (self.distfile, mirror_info.name, e)
+ msg = f"{self.distfile} stat failed in {mirror_info.name}: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
else:
size_ok = st.st_size == self.digests["size"]
self._current_stat = st
@@ -332,7 +326,6 @@ class FetchTask(CompositeTask):
self._try_next_mirror()
def _fs_mirror_digester_exit(self, digester):
-
self._assert_current(digester)
if self._was_cancelled():
self.wait()
@@ -340,16 +333,13 @@ class FetchTask(CompositeTask):
current_mirror = self._current_mirror
if digester.returncode != os.EX_OK:
- msg = "%s %s digester failed unexpectedly" % (
- self.distfile,
- current_mirror.name,
- )
+ msg = f"{self.distfile} {current_mirror.name} digester failed unexpectedly"
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
- logging.error(msg)
+ logger.error(msg)
else:
bad_digest = self._find_bad_digest(digester.digests)
if bad_digest is not None:
- msg = "%s %s has bad %s digest: expected %s, got %s" % (
+ msg = "{} {} has bad {} digest: expected {}, got {}".format(
self.distfile,
current_mirror.name,
bad_digest,
@@ -359,18 +349,18 @@ class FetchTask(CompositeTask):
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
elif self.config.options.dry_run:
# Report success without actually touching any files
if self._same_device(
current_mirror.location, self.config.options.distfiles
):
- logging.info(
+ logger.info(
("dry-run: hardlink '%s' from %s " "to distfiles")
% (self.distfile, current_mirror.name)
)
else:
- logging.info(
+ logger.info(
"dry-run: copy '%s' from %s to distfiles"
% (self.distfile, current_mirror.name)
)
@@ -385,9 +375,9 @@ class FetchTask(CompositeTask):
self.config.layouts[0].get_path(self.distfile),
)
if self._hardlink_atomic(
- src, dest, "%s to %s" % (current_mirror.name, "distfiles")
+ src, dest, f"{current_mirror.name} to distfiles"
):
- logging.debug(
+ logger.debug(
"hardlink '%s' from %s to distfiles"
% (self.distfile, current_mirror.name)
)
@@ -410,7 +400,6 @@ class FetchTask(CompositeTask):
self._try_next_mirror()
def _fs_mirror_copier_exit(self, copier):
-
self._assert_current(copier)
if self._was_cancelled():
self.wait()
@@ -418,17 +407,16 @@ class FetchTask(CompositeTask):
current_mirror = self._current_mirror
if copier.returncode != os.EX_OK:
- msg = "%s %s copy failed unexpectedly: %s" % (
+ msg = "{} {} copy failed unexpectedly: {}".format(
self.distfile,
current_mirror.name,
copier.future.exception(),
)
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
- logging.error(msg)
+ logger.error(msg)
else:
-
- logging.debug(
- "copy '%s' from %s to distfiles" % (self.distfile, current_mirror.name)
+ logger.debug(
+ f"copy '{self.distfile}' from {current_mirror.name} to distfiles"
)
# Apply the timestamp from the source file, but
@@ -439,7 +427,7 @@ class FetchTask(CompositeTask):
ns=(self._current_stat.st_mtime_ns, self._current_stat.st_mtime_ns),
)
except OSError as e:
- msg = "%s %s utime failed unexpectedly: %s" % (
+ msg = "{} {} utime failed unexpectedly: {}".format(
self.distfile,
current_mirror.name,
e,
@@ -447,7 +435,7 @@ class FetchTask(CompositeTask):
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
self._success()
self.returncode = os.EX_OK
@@ -457,10 +445,9 @@ class FetchTask(CompositeTask):
self._try_next_mirror()
def _fetch_uri(self, uri):
-
if self.config.options.dry_run:
# Simply report success.
- logging.info("dry-run: fetch '%s' from '%s'" % (self.distfile, uri))
+ logger.info(f"dry-run: fetch '{self.distfile}' from '{uri}'")
self._success()
self.returncode = os.EX_OK
self._async_wait()
@@ -473,7 +460,7 @@ class FetchTask(CompositeTask):
self._fetch_tmp_dir_info = "distfiles"
distdir = self.config.options.distfiles
- tmp_basename = self.distfile + "._emirrordist_fetch_.%s" % portage.getpid()
+ tmp_basename = self.distfile + f"._emirrordist_fetch_.{portage.getpid()}"
variables = {"DISTDIR": distdir, "URI": uri, "FILE": tmp_basename}
@@ -511,7 +498,6 @@ class FetchTask(CompositeTask):
self._start_task(fetcher, self._fetcher_exit)
def _fetcher_exit(self, fetcher):
-
self._assert_current(fetcher)
if self._was_cancelled():
self.wait()
@@ -531,23 +517,22 @@ class FetchTask(CompositeTask):
self._try_next_mirror()
def _fetch_digester_exit(self, digester):
-
self._assert_current(digester)
if self._was_cancelled():
self.wait()
return
if digester.returncode != os.EX_OK:
- msg = "%s %s digester failed unexpectedly" % (
+ msg = "{} {} digester failed unexpectedly".format(
self.distfile,
self._fetch_tmp_dir_info,
)
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
- logging.error(msg)
+ logger.error(msg)
else:
bad_digest = self._find_bad_digest(digester.digests)
if bad_digest is not None:
- msg = "%s has bad %s digest: expected %s, got %s" % (
+ msg = "{} has bad {} digest: expected {}, got {}".format(
self.distfile,
bad_digest,
self.digests[bad_digest],
@@ -586,7 +571,6 @@ class FetchTask(CompositeTask):
self._try_next_mirror()
def _fetch_copier_exit(self, copier):
-
self._assert_current(copier)
try:
@@ -602,14 +586,14 @@ class FetchTask(CompositeTask):
self._make_layout_links()
else:
# out of space?
- msg = "%s %s copy failed unexpectedly: %s" % (
+ msg = "{} {} copy failed unexpectedly: {}".format(
self.distfile,
self._fetch_tmp_dir_info,
copier.future.exception(),
)
self.scheduler.output(msg + "\n", background=True, log_path=self._log_path)
- logging.error(msg)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ logger.error(msg)
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = 1
self.wait()
@@ -634,7 +618,7 @@ class FetchTask(CompositeTask):
if not self._hardlink_atomic(
src_path,
link_path,
- "%s -> %s" % (link_path, src_path),
+ f"{link_path} -> {src_path}",
self.config.options.symlinks,
):
success = False
@@ -647,7 +631,7 @@ class FetchTask(CompositeTask):
msg = "failed to create distfiles layout {}".format(
"symlink" if self.config.options.symlinks else "hardlink"
)
- self.config.log_failure("%s\t%s\t%s" % (self.cpv, self.distfile, msg))
+ self.config.log_failure(f"{self.cpv}\t{self.distfile}\t{msg}")
self.config.file_failures[self.distfile] = self.cpv
self.returncode = 1
@@ -658,11 +642,11 @@ class FetchTask(CompositeTask):
os.unlink(file_path)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- msg = "unlink '%s' failed in %s: %s" % (self.distfile, dir_info, e)
+ msg = f"unlink '{self.distfile}' failed in {dir_info}: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
return False
return True
@@ -698,10 +682,9 @@ class FetchTask(CompositeTask):
return st1.st_dev == st2.st_dev
def _hardlink_atomic(self, src, dest, dir_info, symlink=False):
-
head, tail = os.path.split(dest)
hardlink_tmp = os.path.join(
- head, ".%s._mirrordist_hardlink_.%s" % (tail, portage.getpid())
+ head, f".{tail}._mirrordist_hardlink_.{portage.getpid()}"
)
try:
@@ -712,29 +695,21 @@ class FetchTask(CompositeTask):
os.link(src, hardlink_tmp)
except OSError as e:
if e.errno != errno.EXDEV:
- msg = "hardlink %s from %s failed: %s" % (
- self.distfile,
- dir_info,
- e,
- )
+ msg = f"hardlink {self.distfile} from {dir_info} failed: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
return False
try:
os.rename(hardlink_tmp, dest)
except OSError as e:
- msg = "hardlink rename '%s' from %s failed: %s" % (
- self.distfile,
- dir_info,
- e,
- )
+ msg = f"hardlink rename '{self.distfile}' from {dir_info} failed: {e}"
self.scheduler.output(
msg + "\n", background=True, log_path=self._log_path
)
- logging.error(msg)
+ logger.error(msg)
return False
finally:
try:
diff --git a/lib/portage/_emirrordist/MirrorDistTask.py b/lib/portage/_emirrordist/MirrorDistTask.py
index 28164e645..984b9fbee 100644
--- a/lib/portage/_emirrordist/MirrorDistTask.py
+++ b/lib/portage/_emirrordist/MirrorDistTask.py
@@ -4,11 +4,7 @@
import errno
import logging
import time
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
+import threading
import portage
from portage import os
@@ -17,9 +13,10 @@ from _emerge.CompositeTask import CompositeTask
from .FetchIterator import FetchIterator
from .DeletionIterator import DeletionIterator
+logger = logging.getLogger(__name__)
-class MirrorDistTask(CompositeTask):
+class MirrorDistTask(CompositeTask):
__slots__ = ("_config", "_fetch_iterator", "_term_rlock", "_term_callback_handle")
def __init__(self, config):
@@ -40,7 +37,6 @@ class MirrorDistTask(CompositeTask):
self._start_task(fetch, self._fetch_exit)
def _fetch_exit(self, fetch):
-
self._assert_current(fetch)
if self._was_cancelled():
self._async_wait()
@@ -59,7 +55,6 @@ class MirrorDistTask(CompositeTask):
self._post_deletion()
def _deletion_exit(self, deletion):
-
self._assert_current(deletion)
if self._was_cancelled():
self._async_wait()
@@ -68,7 +63,6 @@ class MirrorDistTask(CompositeTask):
self._post_deletion()
def _post_deletion(self):
-
if self._config.options.recycle_db is not None:
self._update_recycle_db()
@@ -82,7 +76,6 @@ class MirrorDistTask(CompositeTask):
self._async_wait()
def _update_recycle_db(self):
-
start_time = self._config.start_time
recycle_dir = self._config.options.recycle_dir
recycle_db = self._config.recycle_db
@@ -92,21 +85,18 @@ class MirrorDistTask(CompositeTask):
recycle_db_cache = dict(recycle_db.items())
for filename in os.listdir(recycle_dir):
-
recycle_file = os.path.join(recycle_dir, filename)
try:
st = os.stat(recycle_file)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- logging.error(
- ("stat failed for '%s' in " "recycle: %s") % (filename, e)
- )
+ logger.error(f"stat failed for '{filename}' in recycle: {e}")
continue
value = recycle_db_cache.pop(filename, None)
if value is None:
- logging.debug(("add '%s' to " "recycle db") % filename)
+ logger.debug(f"add '{filename}' to recycle db")
recycle_db[filename] = (st.st_size, start_time)
else:
r_size, r_time = value
@@ -114,29 +104,24 @@ class MirrorDistTask(CompositeTask):
recycle_db[filename] = (st.st_size, start_time)
elif r_time + r_deletion_delay < start_time:
if self._config.options.dry_run:
- logging.info(
- ("dry-run: delete '%s' from " "recycle") % filename
- )
- logging.info(("drop '%s' from " "recycle db") % filename)
+ logger.info(f"dry-run: delete '{filename}' from recycle")
+ logger.info(f"drop '{filename}' from recycle db")
else:
try:
os.unlink(recycle_file)
except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
- logging.error(
- ("delete '%s' from " "recycle failed: %s")
- % (filename, e)
+ logger.error(
+ f"delete '{filename}' from recycle failed: {e}"
)
else:
- logging.debug(("delete '%s' from " "recycle") % filename)
+ logger.debug(f"delete '{filename}' from recycle")
try:
del recycle_db[filename]
except KeyError:
pass
else:
- logging.debug(
- ("drop '%s' from " "recycle db") % filename
- )
+ logger.debug(f"drop '{filename}' from recycle db")
# Existing files were popped from recycle_db_cache,
# so any remaining entries are for files that no
@@ -147,10 +132,9 @@ class MirrorDistTask(CompositeTask):
except KeyError:
pass
else:
- logging.debug(("drop non-existent '%s' from " "recycle db") % filename)
+ logger.debug(f"drop non-existent '{filename}' from recycle db")
def _scheduled_deletion_log(self):
-
start_time = self._config.start_time
dry_run = self._config.options.dry_run
deletion_delay = self._config.options.deletion_delay
@@ -169,7 +153,7 @@ class MirrorDistTask(CompositeTask):
date_files.append(filename)
if dry_run:
- logging.warning(
+ logger.warning(
"dry-run: scheduled-deletions log "
"will be summarized via logging.info"
)
@@ -178,16 +162,15 @@ class MirrorDistTask(CompositeTask):
for date in sorted(date_map):
date_files = date_map[date]
if dry_run:
- logging.info(
- ("dry-run: scheduled deletions for %s: %s files")
- % (date, len(date_files))
+ logger.info(
+ f"dry-run: scheduled deletions for {date}: {len(date_files)} files"
)
- lines.append("%s\n" % date)
+ lines.append(f"{date}\n")
for filename in date_files:
cpv = "unknown"
if distfiles_db is not None:
cpv = distfiles_db.get(filename, cpv)
- lines.append("\t%s\t%s\n" % (filename, cpv))
+ lines.append(f"\t{filename}\t{cpv}\n")
if not dry_run:
portage.util.write_atomic(
@@ -202,12 +185,12 @@ class MirrorDistTask(CompositeTask):
added_file_count = self._config.added_file_count
added_byte_count = self._config.added_byte_count
- logging.info("finished in %i seconds" % elapsed_time)
- logging.info("failed to fetch %i files" % fail_count)
- logging.info("deleted %i files" % delete_count)
- logging.info("deletion of %i files scheduled" % scheduled_deletion_count)
- logging.info("added %i files" % added_file_count)
- logging.info("added %i bytes total" % added_byte_count)
+ logger.info("finished in %i seconds" % elapsed_time)
+ logger.info("failed to fetch %i files" % fail_count)
+ logger.info("deleted %i files" % delete_count)
+ logger.info("deletion of %i files scheduled" % scheduled_deletion_count)
+ logger.info("added %i files" % added_file_count)
+ logger.info("added %i bytes total" % added_byte_count)
def _cleanup(self):
"""
@@ -250,4 +233,4 @@ class MirrorDistTask(CompositeTask):
Override _async_wait to call self._cleanup().
"""
self._cleanup()
- super(MirrorDistTask, self)._async_wait()
+ super()._async_wait()
diff --git a/lib/portage/_emirrordist/main.py b/lib/portage/_emirrordist/main.py
index f6a4f2e43..989665ab8 100644
--- a/lib/portage/_emirrordist/main.py
+++ b/lib/portage/_emirrordist/main.py
@@ -29,7 +29,7 @@ common_options = (
"longopt": "--verbose",
"shortopt": "-v",
"help": "display extra information on stderr "
- "(multiple occurences increase verbosity)",
+ "(multiple occurrences increase verbosity)",
"action": "count",
"default": 0,
},
@@ -232,19 +232,18 @@ def parse_args(args):
def emirrordist_main(args):
-
# The calling environment is ignored, so the program is
# completely controlled by commandline arguments.
env = {}
if not sys.stdout.isatty():
portage.output.nocolor()
- env["NOCOLOR"] = "true"
+ env["NO_COLOR"] = "true"
parser, options, args = parse_args(args)
if options.version:
- sys.stdout.write("Portage %s\n" % portage.VERSION)
+ sys.stdout.write(f"Portage {portage.VERSION}\n")
return os.EX_OK
config_root = options.config_root
@@ -275,7 +274,7 @@ def emirrordist_main(args):
repo_path = settings.repositories.treemap.get(options.repo)
if repo_path is None:
- parser.error("Unable to locate repository named '%s'" % (options.repo,))
+ parser.error(f"Unable to locate repository named '{options.repo}'")
if options.jobs is not None:
options.jobs = int(options.jobs)
@@ -335,9 +334,7 @@ def emirrordist_main(args):
os.path.isdir(options.temp_dir)
and os.access(options.temp_dir, os.W_OK | os.X_OK)
):
- parser.error(
- ("--temp-dir '%s' is not a " "writable directory") % options.temp_dir
- )
+ parser.error(f"--temp-dir '{options.temp_dir}' is not a writable directory")
if options.distfiles is not None:
options.distfiles = normalize_path(os.path.abspath(options.distfiles))
@@ -347,7 +344,7 @@ def emirrordist_main(args):
and os.access(options.distfiles, os.W_OK | os.X_OK)
):
parser.error(
- ("--distfiles '%s' is not a " "writable directory") % options.distfiles
+ f"--distfiles '{options.distfiles}' is not a writable directory"
)
else:
parser.error("missing required --distfiles parameter")
@@ -393,8 +390,7 @@ def emirrordist_main(args):
and os.access(options.recycle_dir, os.W_OK | os.X_OK)
):
parser.error(
- ("--recycle-dir '%s' is not a " "writable directory")
- % options.recycle_dir
+ f"--recycle-dir '{options.recycle_dir}' is not a writable directory"
)
if options.recycle_db is not None:
@@ -413,8 +409,7 @@ def emirrordist_main(args):
and os.access(options.fetch_log_dir, os.W_OK | os.X_OK)
):
parser.error(
- ("--fetch-log-dir '%s' is not a " "writable directory")
- % options.fetch_log_dir
+ f"--fetch-log-dir '{options.fetch_log_dir}' is not a writable directory"
)
if options.whitelist_from:
@@ -422,7 +417,7 @@ def emirrordist_main(args):
for x in options.whitelist_from:
path = normalize_path(os.path.abspath(x))
if not os.access(path, os.R_OK):
- parser.error("--whitelist-from '%s' is not readable" % x)
+ parser.error(f"--whitelist-from '{x}' is not readable")
if os.path.isfile(path):
normalized_paths.append(path)
elif os.path.isdir(path):
@@ -435,7 +430,7 @@ def emirrordist_main(args):
normalized_paths.append(file)
else:
parser.error(
- "--whitelist-from '%s' is not a regular file or a directory" % x
+ f"--whitelist-from '{x}' is not a regular file or a directory"
)
options.whitelist_from = normalized_paths
@@ -459,7 +454,6 @@ def emirrordist_main(args):
l.setLevel(l.getEffectiveLevel() - 10 * options.verbose)
with Config(options, portdb, SchedulerInterface(global_event_loop())) as config:
-
if not options.mirror:
parser.error("No action specified")
diff --git a/lib/portage/_emirrordist/meson.build b/lib/portage/_emirrordist/meson.build
new file mode 100644
index 000000000..4ac3a0842
--- /dev/null
+++ b/lib/portage/_emirrordist/meson.build
@@ -0,0 +1,15 @@
+py.install_sources(
+ [
+ 'Config.py',
+ 'ContentDB.py',
+ 'DeletionIterator.py',
+ 'DeletionTask.py',
+ 'FetchIterator.py',
+ 'FetchTask.py',
+ 'MirrorDistTask.py',
+ 'main.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/_emirrordist',
+ pure : not native_extensions
+)
diff --git a/lib/portage/_global_updates.py b/lib/portage/_global_updates.py
index 80728fb43..f7997fc37 100644
--- a/lib/portage/_global_updates.py
+++ b/lib/portage/_global_updates.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2020 Gentoo Authors
+# Copyright 2010-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import stat
@@ -38,9 +38,14 @@ def _global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
if secpass < 2 or "SANDBOX_ACTIVE" in os.environ or len(trees) != 1:
return False
- return _do_global_updates(
- trees, prev_mtimes, quiet=quiet, if_mtime_changed=if_mtime_changed
- )
+ vardb = trees[trees._running_eroot]["vartree"].dbapi
+ vardb.lock()
+ try:
+ return _do_global_updates(
+ trees, prev_mtimes, quiet=quiet, if_mtime_changed=if_mtime_changed
+ )
+ finally:
+ vardb.unlock()
def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
@@ -89,7 +94,7 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
if not update_notice_printed:
update_notice_printed = True
writemsg_stdout("\n")
- writemsg_stdout(colorize("GOOD", _("Performing Global Updates\n")))
+ writemsg_stdout(colorize("GOOD", "Performing Global Updates\n"))
writemsg_stdout(
_(
"(Could take a couple of minutes if you have a lot of binary packages.)\n"
@@ -97,40 +102,37 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
)
if not quiet:
writemsg_stdout(
- _(
- " %s='update pass' %s='binary update' "
- "%s='/var/db update' %s='/var/db move'\n"
- " %s='/var/db SLOT move' %s='binary move' "
- "%s='binary SLOT move'\n %s='update /etc/portage/package.*'\n"
- )
- % (
- bold("."),
- bold("*"),
- bold("#"),
- bold("@"),
- bold("s"),
- bold("%"),
- bold("S"),
- bold("p"),
+ " ".join(
+ (
+ "",
+ f"{bold('.')}='update pass'",
+ f"{bold('*')}='binary update'",
+ f"{bold('#')}='/var/db update'",
+ f"{bold('@')}='/var/db move'\n",
+ f"{bold('s')}='/var/db SLOT move'",
+ f"{bold('%')}='binary move'",
+ f"{bold('S')}='binary SLOT move'\n",
+ f"{bold('p')}='update /etc/portage/package.*'\n",
+ )
)
)
valid_updates, errors = parse_updates(mycontent)
myupd.extend(valid_updates)
if not quiet:
writemsg_stdout(bold(mykey))
- writemsg_stdout(len(valid_updates) * "." + "\n")
+ writemsg_stdout(f"{len(valid_updates) * '.'}\n")
if len(errors) == 0:
# Update our internal mtime since we
# processed all of our directives.
timestamps[mykey] = mystat[stat.ST_MTIME]
else:
for msg in errors:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
if myupd:
retupd = True
if retupd:
- if os.access(bindb.bintree.pkgdir, os.W_OK):
+ if bindb.writable:
# Call binarytree.populate(), since we want to make sure it's
# only populated with local packages here (getbinpkgs=0).
bindb.bintree.populate()
@@ -198,7 +200,7 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
if world_modified:
world_list.sort()
- write_atomic(world_file, "".join("%s\n" % (x,) for x in world_list))
+ write_atomic(world_file, "".join(f"{x}\n" for x in world_list))
if world_warnings:
# XXX: print warning that we've updated world entries
# and the old name still matches something (from an overlay)?
@@ -239,44 +241,29 @@ def _do_global_updates(trees, prev_mtimes, quiet=False, if_mtime_changed=True):
# until after _all_ of the above updates have
# been processed because the mtimedb will
# automatically commit when killed by ctrl C.
- for mykey, mtime in timestamps.items():
- prev_mtimes[mykey] = mtime
+ prev_mtimes.update(timestamps)
- do_upgrade_packagesmessage = False
# We gotta do the brute force updates for these now.
- if True:
+ def onUpdate(_maxval, curval):
+ if curval > 0:
+ writemsg_stdout("#")
- def onUpdate(_maxval, curval):
- if curval > 0:
- writemsg_stdout("#")
+ if quiet:
+ onUpdate = None
- if quiet:
- onUpdate = None
- vardb.update_ents(repo_map, onUpdate=onUpdate)
- if bindb:
+ vardb.update_ents(repo_map, onUpdate=onUpdate)
- def onUpdate(_maxval, curval):
- if curval > 0:
- writemsg_stdout("*")
+ if bindb:
- if quiet:
- onUpdate = None
- bindb.update_ents(repo_map, onUpdate=onUpdate)
- else:
- do_upgrade_packagesmessage = 1
+ def onUpdate(_maxval, curval):
+ if curval > 0:
+ writemsg_stdout("*")
+
+ bindb.update_ents(repo_map, onUpdate=onUpdate)
# Update progress above is indicated by characters written to stdout so
# we print a couple new lines here to separate the progress output from
# what follows.
writemsg_stdout("\n\n")
- if do_upgrade_packagesmessage and bindb and bindb.cpv_all():
- writemsg_stdout(
- _(
- " ** Skipping packages. Run 'fixpackages' or set it in FEATURES to fix the tbz2's in the packages directory.\n"
- )
- )
- writemsg_stdout(bold(_("Note: This can take a very long time.")))
- writemsg_stdout("\n")
-
return retupd
diff --git a/lib/portage/_selinux.py b/lib/portage/_selinux.py
index d05d6b8e7..5ae1b4e71 100644
--- a/lib/portage/_selinux.py
+++ b/lib/portage/_selinux.py
@@ -1,4 +1,4 @@
-# Copyright 1999-2020 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# Don't use the unicode-wrapped os and shutil modules here since
@@ -6,6 +6,7 @@
import os
import shutil
import warnings
+from functools import partial
try:
import selinux
@@ -81,7 +82,7 @@ def settype(newtype):
ret[2] = newtype
return ":".join(ret)
except IndexError:
- warnings.warn("Invalid SELinux context: %s" % getcontext())
+ warnings.warn(f"Invalid SELinux context: {getcontext()}")
return None
@@ -98,7 +99,7 @@ def setexec(ctx="\n"):
if selinux.security_getenforce() == 1:
raise OSError(msg)
else:
- portage.writemsg("!!! %s\n" % msg, noiselevel=-1)
+ portage.writemsg(f"!!! {msg}\n", noiselevel=-1)
if rc < 0:
if selinux.security_getenforce() == 1:
@@ -134,14 +135,12 @@ class spawn_wrapper:
def __call__(self, *args, **kwargs):
if self._con is not None:
- pre_exec = kwargs.get("pre_exec")
-
- def _pre_exec():
- if pre_exec is not None:
- pre_exec()
- setexec(self._con)
-
- kwargs["pre_exec"] = _pre_exec
+ pre_exec = partial(setexec, self._con)
+ kwargs["pre_exec"] = (
+ portage.process._chain_pre_exec_fns(pre_exec, kwargs["pre_exec"])
+ if kwargs.get("pre_exec")
+ else pre_exec
+ )
return self._spawn_func(*args, **kwargs)
diff --git a/lib/portage/_sets/ProfilePackageSet.py b/lib/portage/_sets/ProfilePackageSet.py
index 1b5c6eae7..8ef7a5609 100644
--- a/lib/portage/_sets/ProfilePackageSet.py
+++ b/lib/portage/_sets/ProfilePackageSet.py
@@ -11,9 +11,7 @@ class ProfilePackageSet(PackageSet):
_operations = ["merge"]
def __init__(self, profiles, debug=False):
- super(ProfilePackageSet, self).__init__(
- allow_repo=any(allow_profile_repo_deps(y) for y in profiles)
- )
+ super().__init__(allow_repo=any(allow_profile_repo_deps(y) for y in profiles))
self._profiles = profiles
if profiles:
desc_profile = profiles[-1]
@@ -22,7 +20,7 @@ class ProfilePackageSet(PackageSet):
description = desc_profile.location
else:
description = None
- self.description = "Profile packages for profile %s" % description
+ self.description = f"Profile packages for profile {description}"
def load(self):
self._setAtoms(
diff --git a/lib/portage/_sets/__init__.py b/lib/portage/_sets/__init__.py
index 15f942b10..295a1e353 100644
--- a/lib/portage/_sets/__init__.py
+++ b/lib/portage/_sets/__init__.py
@@ -9,9 +9,6 @@ __all__ = [
"load_default_config",
]
-import io
-import logging
-import sys
import portage
from portage import os
from portage import load_mod
@@ -181,7 +178,7 @@ class SetConfig:
import random
while setname in parser.sections():
- setname = "%08d" % random.randint(0, 10 ** 10)
+ setname = "%08d" % random.randint(0, 10**10)
parser.add_section(setname)
for k, v in options.items():
@@ -342,7 +339,6 @@ class SetConfig:
def load_default_config(settings, trees):
-
if not _ENABLE_SET_CONFIG:
return SetConfig(None, settings, trees)
@@ -354,21 +350,33 @@ def load_default_config(settings, trees):
vcs_dirs = [_unicode_encode(x, encoding=_encodings["fs"]) for x in VCS_DIRS]
def _getfiles():
- for path, dirs, files in os.walk(os.path.join(global_config_path, "sets")):
- for d in dirs:
- if d in vcs_dirs or d.startswith(b".") or d.endswith(b"~"):
- dirs.remove(d)
- for f in files:
- if not f.startswith(b".") and not f.endswith(b"~"):
- yield os.path.join(path, f)
-
- dbapi = trees["porttree"].dbapi
- for repo in dbapi.getRepositories():
- path = dbapi.getRepositoryPath(repo)
- yield os.path.join(path, "sets.conf")
-
- yield os.path.join(
- settings["PORTAGE_CONFIGROOT"], USER_CONFIG_PATH, "sets.conf"
- )
+ sets_config_paths = [
+ os.path.join(global_config_path, "sets"),
+ *(
+ os.path.join(repo.location, "sets.conf")
+ for repo in trees["porttree"].dbapi.repositories
+ ),
+ os.path.join(settings["PORTAGE_CONFIGROOT"], USER_CONFIG_PATH, "sets.conf"),
+ ]
+
+ dot = "."
+ tilde = "~"
+ if not portage.utf8_mode:
+ dot = _unicode_encode(dot)
+ tilde = _unicode_encode(tilde)
+
+ for sets_config_path in sets_config_paths:
+ if os.path.isdir(sets_config_path):
+ for path, dirs, files in os.walk(sets_config_path):
+ dirs.sort()
+ files.sort()
+ for d in dirs:
+ if d in vcs_dirs or d.startswith(dot) or d.endswith(tilde):
+ dirs.remove(d)
+ for f in files:
+ if not f.startswith(dot) and not f.endswith(tilde):
+ yield os.path.join(path, f)
+ elif os.path.isfile(sets_config_path):
+ yield sets_config_path
return SetConfig(_getfiles(), settings, trees)
diff --git a/lib/portage/_sets/base.py b/lib/portage/_sets/base.py
index 8e540f3c8..537ad4510 100644
--- a/lib/portage/_sets/base.py
+++ b/lib/portage/_sets/base.py
@@ -34,10 +34,8 @@ class PackageSet:
def __iter__(self):
self._load()
- for x in self._atoms:
- yield x
- for x in self._nonatoms:
- yield x
+ yield from self._atoms
+ yield from self._nonatoms
def __bool__(self):
self._load()
@@ -160,9 +158,7 @@ class PackageSet:
class EditablePackageSet(PackageSet):
def __init__(self, allow_wildcard=False, allow_repo=False):
- super(EditablePackageSet, self).__init__(
- allow_wildcard=allow_wildcard, allow_repo=allow_repo
- )
+ super().__init__(allow_wildcard=allow_wildcard, allow_repo=allow_repo)
def update(self, atoms):
self._load()
@@ -223,10 +219,8 @@ class InternalPackageSet(EditablePackageSet):
functions default to allow_repo=False, which is sufficient to ensure
that repo atoms are prohibited when necessary.
"""
- super(InternalPackageSet, self).__init__(
- allow_wildcard=allow_wildcard, allow_repo=allow_repo
- )
- if initial_atoms != None:
+ super().__init__(allow_wildcard=allow_wildcard, allow_repo=allow_repo)
+ if initial_atoms is not None:
self.update(initial_atoms)
def clear(self):
@@ -242,7 +236,7 @@ class InternalPackageSet(EditablePackageSet):
class DummyPackageSet(PackageSet):
def __init__(self, atoms=None):
- super(DummyPackageSet, self).__init__()
+ super().__init__()
if atoms:
self._setAtoms(atoms)
diff --git a/lib/portage/_sets/dbapi.py b/lib/portage/_sets/dbapi.py
index 4a837522f..9c5b979a9 100644
--- a/lib/portage/_sets/dbapi.py
+++ b/lib/portage/_sets/dbapi.py
@@ -33,7 +33,7 @@ class EverythingSet(PackageSet):
_filter = None
def __init__(self, vdbapi, **kwargs):
- super(EverythingSet, self).__init__()
+ super().__init__()
self._db = vdbapi
def load(self):
@@ -47,7 +47,7 @@ class EverythingSet(PackageSet):
# SLOT installed, in order to avoid the possibility
# of unwanted upgrades as reported in bug #338959.
pkg = pkg_str(cpv, None)
- atom = Atom("%s:%s" % (pkg.cp, pkg.slot))
+ atom = Atom(f"{pkg.cp}:{pkg.slot}")
if self._filter:
if self._filter(atom):
myatoms.append(atom)
@@ -63,7 +63,6 @@ class EverythingSet(PackageSet):
class OwnerSet(PackageSet):
-
_operations = ["merge", "unmerge"]
description = (
@@ -71,7 +70,7 @@ class OwnerSet(PackageSet):
)
def __init__(self, vardb=None, exclude_files=None, files=None):
- super(OwnerSet, self).__init__()
+ super().__init__()
self._db = vardb
self._exclude_files = exclude_files
self._files = files
@@ -105,7 +104,7 @@ class OwnerSet(PackageSet):
if not exclude_paths:
for link, p in vardb._owners.iter_owners(paths):
pkg = pkg_str(link.mycpv, None)
- rValue.add("%s:%s" % (pkg.cp, pkg.slot))
+ rValue.add(f"{pkg.cp}:{pkg.slot}")
else:
all_paths = set()
all_paths.update(paths)
@@ -113,7 +112,7 @@ class OwnerSet(PackageSet):
exclude_atoms = set()
for link, p in vardb._owners.iter_owners(all_paths):
pkg = pkg_str(link.mycpv, None)
- atom = "%s:%s" % (pkg.cp, pkg.slot)
+ atom = f"{pkg.cp}:{pkg.slot}"
rValue.add(atom)
# Returned paths are relative to ROOT and do not have
# a leading slash.
@@ -145,7 +144,6 @@ class OwnerSet(PackageSet):
class VariableSet(EverythingSet):
-
_operations = ["merge", "unmerge"]
description = (
@@ -156,7 +154,7 @@ class VariableSet(EverythingSet):
def __init__(
self, vardb, metadatadb=None, variable=None, includes=None, excludes=None
):
- super(VariableSet, self).__init__(vardb)
+ super().__init__(vardb)
self._metadatadb = metadatadb
self._variable = variable
self._includes = includes
@@ -167,15 +165,32 @@ class VariableSet(EverythingSet):
if not ebuild:
return False
(values,) = self._metadatadb.aux_get(ebuild, [self._variable])
- values = values.split()
- if self._includes and not self._includes.intersection(values):
+ values_list = values.split()
+
+ if "DEPEND" in self._variable:
+ include_atoms = []
+ for include in self._includes:
+ include_atoms.append(Atom(include))
+
+ for x in use_reduce(values, token_class=Atom, flat=True):
+ if not isinstance(x, Atom):
+ continue
+
+ for include_atom in include_atoms:
+ if include_atom.match(x):
+ return True
+
return False
- if self._excludes and self._excludes.intersection(values):
+
+ if self._includes and not self._includes.intersection(values_list):
return False
+
+ if self._excludes and self._excludes.intersection(values_list):
+ return False
+
return True
def singleBuilder(cls, options, settings, trees):
-
variable = options.get("variable")
if variable is None:
raise SetConfigError(_("missing required attribute: 'variable'"))
@@ -204,7 +219,6 @@ class VariableSet(EverythingSet):
class SubslotChangedSet(PackageSet):
-
_operations = ["merge", "unmerge"]
description = (
@@ -214,7 +228,7 @@ class SubslotChangedSet(PackageSet):
)
def __init__(self, portdb=None, vardb=None):
- super(SubslotChangedSet, self).__init__()
+ super().__init__()
self._portdb = portdb
self._vardb = vardb
@@ -225,7 +239,7 @@ class SubslotChangedSet(PackageSet):
cp_list = self._vardb.cp_list
for cp in self._vardb.cp_all():
for pkg in cp_list(cp):
- slot_atom = "%s:%s" % (pkg.cp, pkg.slot)
+ slot_atom = f"{pkg.cp}:{pkg.slot}"
ebuild = xmatch(xmatch_level, slot_atom)
if not ebuild:
continue
@@ -241,7 +255,6 @@ class SubslotChangedSet(PackageSet):
class DowngradeSet(PackageSet):
-
_operations = ["merge", "unmerge"]
description = (
@@ -251,7 +264,7 @@ class DowngradeSet(PackageSet):
)
def __init__(self, portdb=None, vardb=None):
- super(DowngradeSet, self).__init__()
+ super().__init__()
self._portdb = portdb
self._vardb = vardb
@@ -264,7 +277,7 @@ class DowngradeSet(PackageSet):
for cp in self._vardb.cp_all():
for cpv in cp_list(cp):
pkg = pkg_str(cpv, None)
- slot_atom = "%s:%s" % (pkg.cp, pkg.slot)
+ slot_atom = f"{pkg.cp}:{pkg.slot}"
ebuild = xmatch(xmatch_level, slot_atom)
if not ebuild:
continue
@@ -280,7 +293,6 @@ class DowngradeSet(PackageSet):
class UnavailableSet(EverythingSet):
-
_operations = ["unmerge"]
description = (
@@ -290,14 +302,13 @@ class UnavailableSet(EverythingSet):
)
def __init__(self, vardb, metadatadb=None):
- super(UnavailableSet, self).__init__(vardb)
+ super().__init__(vardb)
self._metadatadb = metadatadb
def _filter(self, atom):
return not self._metadatadb.match(atom)
def singleBuilder(cls, options, settings, trees):
-
metadatadb = options.get("metadata-source", "porttree")
if not metadatadb in trees:
raise SetConfigError(
@@ -310,7 +321,6 @@ class UnavailableSet(EverythingSet):
class UnavailableBinaries(EverythingSet):
-
_operations = (
"merge",
"unmerge",
@@ -323,7 +333,7 @@ class UnavailableBinaries(EverythingSet):
)
def __init__(self, vardb, metadatadb=None):
- super(UnavailableBinaries, self).__init__(vardb)
+ super().__init__(vardb)
self._metadatadb = metadatadb
def _filter(self, atom):
@@ -334,7 +344,6 @@ class UnavailableBinaries(EverythingSet):
return not self._metadatadb.cpv_exists(inst_cpv)
def singleBuilder(cls, options, settings, trees):
-
metadatadb = options.get("metadata-source", "bintree")
if not metadatadb in trees:
raise SetConfigError(
@@ -350,7 +359,7 @@ class CategorySet(PackageSet):
_operations = ["merge", "unmerge"]
def __init__(self, category, dbapi, only_visible=True):
- super(CategorySet, self).__init__()
+ super().__init__()
self._db = dbapi
self._category = category
self._check = only_visible
@@ -358,7 +367,7 @@ class CategorySet(PackageSet):
s = "visible"
else:
s = "all"
- self.description = "Package set containing %s packages of category %s" % (
+ self.description = "Package set containing {} packages of category {}".format(
s,
self._category,
)
@@ -438,12 +447,11 @@ class AgeSet(EverythingSet):
_aux_keys = ("BUILD_TIME",)
def __init__(self, vardb, mode="older", age=7):
- super(AgeSet, self).__init__(vardb)
+ super().__init__(vardb)
self._mode = mode
self._age = age
def _filter(self, atom):
-
cpv = self._db.match(atom)[0]
try:
(date,) = self._db.aux_get(cpv, self._aux_keys)
@@ -477,12 +485,11 @@ class DateSet(EverythingSet):
_aux_keys = ("BUILD_TIME",)
def __init__(self, vardb, date, mode="older"):
- super(DateSet, self).__init__(vardb)
+ super().__init__(vardb)
self._mode = mode
self._date = date
def _filter(self, atom):
-
cpv = self._db.match(atom)[0]
try:
(date,) = self._db.aux_get(cpv, self._aux_keys)
@@ -572,7 +579,7 @@ class RebuiltBinaries(EverythingSet):
_aux_keys = ("BUILD_TIME",)
def __init__(self, vardb, bindb=None):
- super(RebuiltBinaries, self).__init__(vardb, bindb=bindb)
+ super().__init__(vardb, bindb=bindb)
self._bindb = bindb
def _filter(self, atom):
@@ -591,7 +598,6 @@ class RebuiltBinaries(EverythingSet):
class ChangedDepsSet(PackageSet):
-
_operations = ["merge", "unmerge"]
description = (
@@ -601,7 +607,7 @@ class ChangedDepsSet(PackageSet):
)
def __init__(self, portdb=None, vardb=None):
- super(ChangedDepsSet, self).__init__()
+ super().__init__()
self._portdb = portdb
self._vardb = vardb
@@ -655,7 +661,7 @@ class ChangedDepsSet(PackageSet):
# if dependencies don't match, trigger the rebuild.
if vdbvars != pdbvars:
- atoms.append("=%s" % cpv)
+ atoms.append(f"={cpv}")
self._setAtoms(atoms)
diff --git a/lib/portage/_sets/files.py b/lib/portage/_sets/files.py
index 30fc80bd4..1b9cc6016 100644
--- a/lib/portage/_sets/files.py
+++ b/lib/portage/_sets/files.py
@@ -1,10 +1,11 @@
-# Copyright 2007-2020 Gentoo Authors
+# Copyright 2007-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import errno
import re
from itertools import chain
+import portage
from portage import os
from portage import _encodings
from portage import _unicode_decode
@@ -35,10 +36,10 @@ class StaticFileSet(EditablePackageSet):
_repopath_sub = re.compile(r"\$\{repository:(?P<reponame>.+)\}")
def __init__(self, filename, greedy=False, dbapi=None):
- super(StaticFileSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._filename = filename
self._mtime = None
- self.description = "Package set loaded from file %s" % self._filename
+ self.description = f"Package set loaded from file {self._filename}"
self.loader = ItemFileLoader(self._filename, self._validate)
if greedy and not dbapi:
self.errors.append(
@@ -56,18 +57,18 @@ class StaticFileSet(EditablePackageSet):
value = []
for line in metadata:
line = line.strip()
- if len(line) == 0 and key != None:
+ if len(line) == 0 and key is not None:
setattr(self, key, " ".join(value))
key = None
- elif line[-1] == ":" and key == None:
+ elif line[-1] == ":" and key is None:
key = line[:-1].lower()
value = []
- elif key != None:
+ elif key is not None:
value.append(line)
else:
pass
else:
- if key != None:
+ if key is not None:
setattr(self, key, " ".join(value))
def _validate(self, atom):
@@ -76,15 +77,13 @@ class StaticFileSet(EditablePackageSet):
def write(self):
write_atomic(
self._filename,
- "".join(
- "%s\n" % (atom,) for atom in sorted(chain(self._atoms, self._nonatoms))
- ),
+ "".join(f"{atom}\n" for atom in sorted(chain(self._atoms, self._nonatoms))),
)
def load(self):
try:
mtime = os.stat(self._filename).st_mtime
- except (OSError, IOError):
+ except OSError:
mtime = None
if not self._loaded or self._mtime != mtime:
try:
@@ -92,7 +91,7 @@ class StaticFileSet(EditablePackageSet):
for fname in errors:
for e in errors[fname]:
self.errors.append(fname + ": " + e)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -103,7 +102,7 @@ class StaticFileSet(EditablePackageSet):
matches = self.dbapi.match(a)
for cpv in matches:
pkg = self.dbapi._pkg_str(cpv, None)
- atoms.append("%s:%s" % (pkg.cp, pkg.slot))
+ atoms.append(f"{pkg.cp}:{pkg.slot}")
# In addition to any installed slots, also try to pull
# in the latest new slot that may be available.
atoms.append(a)
@@ -178,6 +177,14 @@ class StaticFileSet(EditablePackageSet):
directory = normalize_path(directory)
for parent, dirs, files in os.walk(directory):
+ if portage.utf8_mode:
+ dirs_orig = dirs
+ omit_dir = lambda d: dirs_orig.remove(os.fsdecode(d))
+ parent = os.fsencode(parent)
+ dirs = [os.fsencode(value) for value in dirs]
+ files = [os.fsencode(value) for value in files]
+ else:
+ omit_dir = lambda d: dirs.remove(d)
try:
parent = _unicode_decode(
parent, encoding=_encodings["fs"], errors="strict"
@@ -186,7 +193,7 @@ class StaticFileSet(EditablePackageSet):
continue
for d in dirs[:]:
if d in vcs_dirs or d.startswith(b".") or d.endswith(b"~"):
- dirs.remove(d)
+ omit_dir(d)
for filename in files:
try:
filename = _unicode_decode(
@@ -213,9 +220,9 @@ class StaticFileSet(EditablePackageSet):
class ConfigFileSet(PackageSet):
def __init__(self, filename):
- super(ConfigFileSet, self).__init__()
+ super().__init__()
self._filename = filename
- self.description = "Package set generated from %s" % self._filename
+ self.description = f"Package set generated from {self._filename}"
self.loader = KeyListFileLoader(self._filename, ValidAtomValidator)
def load(self):
@@ -250,7 +257,7 @@ class WorldSelectedSet(EditablePackageSet):
description = "Set of packages and subsets that were directly installed by the user"
def __init__(self, eroot):
- super(WorldSelectedSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._pkgset = WorldSelectedPackagesSet(eroot)
self._setset = WorldSelectedSetsSet(eroot)
@@ -288,7 +295,7 @@ class WorldSelectedPackagesSet(EditablePackageSet):
description = "Set of packages that were directly installed by the user"
def __init__(self, eroot):
- super(WorldSelectedPackagesSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._lock = None
self._filename = os.path.join(eroot, WORLD_FILE)
self.loader = ItemFileLoader(self._filename, self._validate)
@@ -298,14 +305,14 @@ class WorldSelectedPackagesSet(EditablePackageSet):
return ValidAtomValidator(atom, allow_repo=True)
def write(self):
- write_atomic(self._filename, "".join(sorted("%s\n" % x for x in self._atoms)))
+ write_atomic(self._filename, "".join(sorted(f"{x}\n" for x in self._atoms)))
def load(self):
atoms = []
atoms_changed = False
try:
mtime = os.stat(self._filename).st_mtime
- except (OSError, IOError):
+ except OSError:
mtime = None
if not self._loaded or self._mtime != mtime:
try:
@@ -313,7 +320,7 @@ class WorldSelectedPackagesSet(EditablePackageSet):
for fname in errors:
for e in errors[fname]:
self.errors.append(fname + ": " + e)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -384,7 +391,7 @@ class WorldSelectedSetsSet(EditablePackageSet):
description = "Set of sets that were directly installed by the user"
def __init__(self, eroot):
- super(WorldSelectedSetsSet, self).__init__(allow_repo=True)
+ super().__init__(allow_repo=True)
self._lock = None
self._filename = os.path.join(eroot, WORLD_SETS_FILE)
self.loader = ItemFileLoader(self._filename, self._validate)
@@ -394,15 +401,13 @@ class WorldSelectedSetsSet(EditablePackageSet):
return setname.startswith(SETPREFIX)
def write(self):
- write_atomic(
- self._filename, "".join(sorted("%s\n" % x for x in self._nonatoms))
- )
+ write_atomic(self._filename, "".join(sorted(f"{x}\n" for x in self._nonatoms)))
def load(self):
atoms_changed = False
try:
mtime = os.stat(self._filename).st_mtime
- except (OSError, IOError):
+ except OSError:
mtime = None
if not self._loaded or self._mtime != mtime:
try:
@@ -410,7 +415,7 @@ class WorldSelectedSetsSet(EditablePackageSet):
for fname in errors:
for e in errors[fname]:
self.errors.append(fname + ": " + e)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
diff --git a/lib/portage/_sets/libs.py b/lib/portage/_sets/libs.py
index 683cc0823..860844235 100644
--- a/lib/portage/_sets/libs.py
+++ b/lib/portage/_sets/libs.py
@@ -12,7 +12,7 @@ class LibraryConsumerSet(PackageSet):
_operations = ["merge", "unmerge"]
def __init__(self, vardbapi, debug=False):
- super(LibraryConsumerSet, self).__init__()
+ super().__init__()
self.dbapi = vardbapi
self.debug = debug
@@ -28,12 +28,11 @@ class LibraryConsumerSet(PackageSet):
# without replacement.
pass
else:
- rValue.add("%s:%s" % (pkg.cp, pkg.slot))
+ rValue.add(f"{pkg.cp}:{pkg.slot}")
return rValue
class LibraryFileConsumerSet(LibraryConsumerSet):
-
"""
Note: This does not detect libtool archive (*.la) files that consume the
specified files (revdep-rebuild is able to detect them).
@@ -45,7 +44,7 @@ class LibraryFileConsumerSet(LibraryConsumerSet):
)
def __init__(self, vardbapi, files, **kargs):
- super(LibraryFileConsumerSet, self).__init__(vardbapi, **kargs)
+ super().__init__(vardbapi, **kargs)
self.files = files
def load(self):
diff --git a/lib/portage/_sets/meson.build b/lib/portage/_sets/meson.build
new file mode 100644
index 000000000..9f5696505
--- /dev/null
+++ b/lib/portage/_sets/meson.build
@@ -0,0 +1,15 @@
+py.install_sources(
+ [
+ 'ProfilePackageSet.py',
+ 'base.py',
+ 'dbapi.py',
+ 'files.py',
+ 'libs.py',
+ 'profiles.py',
+ 'security.py',
+ 'shell.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/_sets',
+ pure : not native_extensions
+)
diff --git a/lib/portage/_sets/profiles.py b/lib/portage/_sets/profiles.py
index 289a93218..4731a0add 100644
--- a/lib/portage/_sets/profiles.py
+++ b/lib/portage/_sets/profiles.py
@@ -17,9 +17,7 @@ class PackagesSystemSet(PackageSet):
_operations = ["merge"]
def __init__(self, profiles, debug=False):
- super(PackagesSystemSet, self).__init__(
- allow_repo=any(allow_profile_repo_deps(x) for x in profiles)
- )
+ super().__init__(allow_repo=any(allow_profile_repo_deps(x) for x in profiles))
self._profiles = profiles
self._debug = debug
if profiles:
@@ -29,13 +27,13 @@ class PackagesSystemSet(PackageSet):
description = desc_profile.location
else:
description = None
- self.description = "System packages for profile %s" % description
+ self.description = f"System packages for profile {description}"
def load(self):
debug = self._debug
if debug:
writemsg_level(
- "\nPackagesSystemSet: profiles: %s\n" % (self._profiles,),
+ f"\nPackagesSystemSet: profiles: {self._profiles}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -54,7 +52,7 @@ class PackagesSystemSet(PackageSet):
if debug:
writemsg_level(
- "\nPackagesSystemSet: raw packages: %s\n" % (mylist,),
+ f"\nPackagesSystemSet: raw packages: {mylist}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -63,7 +61,7 @@ class PackagesSystemSet(PackageSet):
if debug:
writemsg_level(
- "\nPackagesSystemSet: stacked packages: %s\n" % (mylist,),
+ f"\nPackagesSystemSet: stacked packages: {mylist}\n",
level=logging.DEBUG,
noiselevel=-1,
)
diff --git a/lib/portage/_sets/security.py b/lib/portage/_sets/security.py
index 5e8bc89e3..a5b2e6aec 100644
--- a/lib/portage/_sets/security.py
+++ b/lib/portage/_sets/security.py
@@ -16,7 +16,7 @@ class SecuritySet(PackageSet):
description = "package set that includes all packages possibly affected by a GLSA"
def __init__(self, settings, vardbapi, portdbapi, least_change=True):
- super(SecuritySet, self).__init__()
+ super().__init__()
self._settings = settings
self._vardbapi = vardbapi
self._portdbapi = portdbapi
@@ -49,7 +49,7 @@ class SecuritySet(PackageSet):
for atom in atomlist[:]:
cpv = self._portdbapi.xmatch("match-all", atom)[0]
pkg = self._portdbapi._pkg_str(cpv, None)
- cps = "%s:%s" % (pkg.cp, pkg.slot)
+ cps = f"{pkg.cp}:{pkg.slot}"
if not cps in mydict:
mydict[cps] = (atom, cpv)
else:
diff --git a/lib/portage/_sets/shell.py b/lib/portage/_sets/shell.py
index 249e1fb05..467a84efd 100644
--- a/lib/portage/_sets/shell.py
+++ b/lib/portage/_sets/shell.py
@@ -23,15 +23,15 @@ class CommandOutputSet(PackageSet):
Args:
name: A string that identifies the set.
command: A string or sequence identifying the command to run
- (see the subprocess.Popen documentaion for the format)
+ (see the subprocess.Popen documentation for the format)
"""
_operations = ["merge", "unmerge"]
def __init__(self, command):
- super(CommandOutputSet, self).__init__()
+ super().__init__()
self._command = command
- self.description = "Package set generated from output of '%s'" % self._command
+ self.description = f"Package set generated from output of '{self._command}'"
def load(self):
pipe = subprocess.Popen(self._command, stdout=subprocess.PIPE, shell=True)
diff --git a/lib/portage/binpkg.py b/lib/portage/binpkg.py
new file mode 100644
index 000000000..a48e09bdb
--- /dev/null
+++ b/lib/portage/binpkg.py
@@ -0,0 +1,76 @@
+# Copyright 2001-2020 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import tarfile
+from portage import os
+from portage.const import SUPPORTED_XPAK_EXTENSIONS, SUPPORTED_GPKG_EXTENSIONS
+from portage.exception import InvalidBinaryPackageFormat
+from portage.output import colorize
+from portage.util import writemsg
+
+
+def get_binpkg_format(binpkg_path, check_file=False, remote=False):
+ if binpkg_path.endswith(SUPPORTED_XPAK_EXTENSIONS):
+ file_ext_format = "xpak"
+ elif binpkg_path.endswith(SUPPORTED_GPKG_EXTENSIONS):
+ file_ext_format = "gpkg"
+ else:
+ file_ext_format = None
+
+ if remote:
+ if file_ext_format is not None:
+ return file_ext_format
+ else:
+ raise InvalidBinaryPackageFormat(
+ f"Unsupported binary package format from '{binpkg_path}'"
+ )
+
+ if file_ext_format is not None and not check_file:
+ return file_ext_format
+
+ try:
+ with open(binpkg_path, "rb") as binpkg_file:
+ header = binpkg_file.read(100)
+ if b"/gpkg-1\x00" in header:
+ file_format = "gpkg"
+ else:
+ binpkg_file.seek(-16, 2)
+ tail = binpkg_file.read(16)
+ if (tail[0:8] == b"XPAKSTOP") and (tail[12:16] == b"STOP"):
+ file_format = "xpak"
+ else:
+ file_format = None
+
+ # check if wrong order gpkg
+ if file_format is None:
+ try:
+ with tarfile.open(binpkg_path) as gpkg_tar:
+ if "gpkg-1" in (os.path.basename(f) for f in gpkg_tar.getnames()):
+ file_format = "gpkg"
+ except tarfile.TarError:
+ pass
+
+ except Exception as err:
+ # We got many different exceptions here, so have to catch all of them.
+ file_format = None
+ writemsg(
+ colorize("ERR", f"Error reading binpkg '{binpkg_path}': {err}\n"),
+ )
+ raise InvalidBinaryPackageFormat(f"Error reading binpkg '{binpkg_path}': {err}")
+
+ if file_format is None:
+ raise InvalidBinaryPackageFormat(
+ f"Unsupported binary package format from '{binpkg_path}'"
+ )
+
+ if (file_ext_format is not None) and (file_ext_format != file_format):
+ writemsg(
+ colorize(
+ "WARN",
+ "File {} binpkg format mismatch, actual format: {}\n".format(
+ binpkg_path, file_format
+ ),
+ )
+ )
+
+ return file_format
diff --git a/lib/portage/binrepo/config.py b/lib/portage/binrepo/config.py
index 8024f24d7..5601a2e00 100644
--- a/lib/portage/binrepo/config.py
+++ b/lib/portage/binrepo/config.py
@@ -29,7 +29,7 @@ class BinRepoConfig:
def info_string(self):
"""
- Returns a formatted string containing informations about the repository.
+ Returns a formatted string containing information about the repository.
Used by emerge --info.
"""
indent = " " * 4
diff --git a/lib/portage/binrepo/meson.build b/lib/portage/binrepo/meson.build
new file mode 100644
index 000000000..4577808b4
--- /dev/null
+++ b/lib/portage/binrepo/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'config.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/binrepo',
+ pure : not native_extensions
+)
diff --git a/lib/portage/cache/anydbm.py b/lib/portage/cache/anydbm.py
index ce0077e3f..ad7042ae4 100644
--- a/lib/portage/cache/anydbm.py
+++ b/lib/portage/cache/anydbm.py
@@ -1,4 +1,4 @@
-# Copyright 2005-2020 Gentoo Authors
+# Copyright 2005-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# Author(s): Brian Harring (ferringb@gentoo.org)
@@ -18,7 +18,6 @@ from portage.cache import cache_errors
class database(fs_template.FsBased):
-
validation_chf = "md5"
chf_types = ("md5", "mtime")
@@ -27,7 +26,7 @@ class database(fs_template.FsBased):
serialize_eclasses = False
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
default_db = config.get("dbtype", "anydbm")
if not default_db.startswith("."):
@@ -50,12 +49,12 @@ class database(fs_template.FsBased):
try:
self._ensure_dirs()
self._ensure_dirs(self._db_path)
- except (OSError, IOError) as e:
+ except OSError as e:
raise cache_errors.InitializationError(self.__class__, e)
# try again if failed
try:
- if self.__db == None:
+ if self.__db is None:
# dbm.open() will not work with bytes in python-3.1:
# TypeError: can't concat bytes to str
if gdbm is None:
@@ -68,6 +67,21 @@ class database(fs_template.FsBased):
raise cache_errors.InitializationError(self.__class__, e)
self._ensure_access(self._db_path)
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ # These attributes are not picklable, so they are automatically
+ # regenerated after unpickling.
+ state["_database__db"] = None
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ mode = "w"
+ if dbm.whichdb(self._db_path) in ("dbm.gnu", "gdbm"):
+ # Allow multiple concurrent writers (see bug #53607).
+ mode += "u"
+ self.__db = dbm.open(self._db_path, mode, self._perms)
+
def iteritems(self):
# dbm doesn't implement items()
for k in self.__db.keys():
@@ -90,7 +104,7 @@ class database(fs_template.FsBased):
return cpv in self.__db
def __del__(self):
- if "__db" in self.__dict__ and self.__db != None:
+ if "__db" in self.__dict__ and self.__db is not None:
self.__db.sync()
self.__db.close()
diff --git a/lib/portage/cache/cache_errors.py b/lib/portage/cache/cache_errors.py
index 080029734..581c4eed3 100644
--- a/lib/portage/cache/cache_errors.py
+++ b/lib/portage/cache/cache_errors.py
@@ -12,10 +12,7 @@ class InitializationError(CacheError):
self.error, self.class_name = error, class_name
def __str__(self):
- return "Creation of instance %s failed due to %s" % (
- self.class_name,
- str(self.error),
- )
+ return f"Creation of instance {self.class_name} failed due to {str(self.error)}"
class CacheCorruption(CacheError):
@@ -23,7 +20,7 @@ class CacheCorruption(CacheError):
self.key, self.ex = key, ex
def __str__(self):
- return "%s is corrupt: %s" % (self.key, str(self.ex))
+ return f"{self.key} is corrupt: {str(self.ex)}"
class GeneralCacheCorruption(CacheError):
@@ -31,17 +28,17 @@ class GeneralCacheCorruption(CacheError):
self.ex = ex
def __str__(self):
- return "corruption detected: %s" % str(self.ex)
+ return f"corruption detected: {str(self.ex)}"
class InvalidRestriction(CacheError):
def __init__(self, key, restriction, exception=None):
- if exception == None:
+ if exception is None:
exception = ""
self.key, self.restriction, self.ex = key, restriction, ex
def __str__(self):
- return "%s:%s is not valid: %s" % (self.key, self.restriction, str(self.ex))
+ return f"{self.key}:{self.restriction} is not valid: {str(self.ex)}"
class ReadOnlyRestriction(CacheError):
@@ -67,14 +64,14 @@ class StatCollision(CacheError):
self.size = size
def __str__(self):
- return "%s has stat collision with size %s and mtime %s" % (
+ return "{} has stat collision with size {} and mtime {}".format(
self.key,
self.size,
self.mtime,
)
def __repr__(self):
- return "portage.cache.cache_errors.StatCollision(%s)" % (
+ return "portage.cache.cache_errors.StatCollision({})".format(
", ".join(
(repr(self.key), repr(self.filename), repr(self.mtime), repr(self.size))
),
diff --git a/lib/portage/cache/ebuild_xattr.py b/lib/portage/cache/ebuild_xattr.py
index 587466589..86fd5dbf0 100644
--- a/lib/portage/cache/ebuild_xattr.py
+++ b/lib/portage/cache/ebuild_xattr.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
# Copyright: 2009-2020 Gentoo Authors
# Author(s): Petteri Räty (betelgeuse@gentoo.org)
# License: GPL2
@@ -21,11 +20,10 @@ class NoValueException(Exception):
class database(fs_template.FsBased):
-
autocommits = True
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self.portdir = self.label
self.ns = xattr.NS_USER + ".gentoo.cache"
self.keys = set(self._known_keys)
@@ -57,7 +55,7 @@ class database(fs_template.FsBased):
while True:
self.__set(path, "test_max", s)
s += hundred
- except IOError as e:
+ except OSError as e:
# ext based give wrong errno
# https://bugzilla.kernel.org/show_bug.cgi?id=12793
if e.errno in (errno.E2BIG, errno.ENOSPC):
@@ -67,7 +65,7 @@ class database(fs_template.FsBased):
try:
self.__remove(path, "test_max")
- except IOError as e:
+ except OSError as e:
if e.errno != errno.ENODATA:
raise
@@ -88,7 +86,7 @@ class database(fs_template.FsBased):
def __get(self, path, key, default=None):
try:
return xattr.get(path, key, namespace=self.ns)
- except IOError as e:
+ except OSError as e:
if not default is None and errno.ENODATA == e.errno:
return default
raise NoValueException()
@@ -135,7 +133,7 @@ class database(fs_template.FsBased):
parts += 1
# Only the first entry carries the number of parts
- self.__set(path, key, "%s:%s" % (parts, s[0:max_len]))
+ self.__set(path, key, f"{parts}:{s[0:max_len]}")
# Write out the rest
for i in range(1, parts):
@@ -143,7 +141,7 @@ class database(fs_template.FsBased):
val = s[start : start + max_len]
self.__set(path, key + str(i), val)
else:
- self.__set(path, key, "%s:%s" % (1, s))
+ self.__set(path, key, f"{1}:{s}")
def _delitem(self, cpv):
pass # Will be gone with the ebuild
@@ -152,7 +150,6 @@ class database(fs_template.FsBased):
return os.path.exists(self.__get_path(cpv))
def __iter__(self):
-
for root, dirs, files in os.walk(self.portdir):
for file in files:
try:
@@ -166,4 +163,4 @@ class database(fs_template.FsBased):
pn_pv = file[:-7]
path = os.path.join(root, file)
if self.__has_cache(path):
- yield "%s/%s/%s" % (cat, os.path.basename(root), file[:-7])
+ yield f"{cat}/{os.path.basename(root)}/{file[:-7]}"
diff --git a/lib/portage/cache/flat_hash.py b/lib/portage/cache/flat_hash.py
index d3f4dad4c..cc0536276 100644
--- a/lib/portage/cache/flat_hash.py
+++ b/lib/portage/cache/flat_hash.py
@@ -5,7 +5,6 @@
from portage.cache import fs_template
from portage.cache import cache_errors
import errno
-import io
import stat
import tempfile
import os as _os
@@ -17,17 +16,16 @@ from portage.versions import _pkg_str
class database(fs_template.FsBased):
-
autocommits = True
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self.location = os.path.join(
self.location, self.label.lstrip(os.path.sep).rstrip(os.path.sep)
)
write_keys = set(self._known_keys)
write_keys.add("_eclasses_")
- write_keys.add("_%s_" % (self.validation_chf,))
+ write_keys.add(f"_{self.validation_chf}_")
self._write_keys = sorted(write_keys)
if not self.readonly and not os.path.exists(self.location):
self._ensure_dirs()
@@ -36,9 +34,8 @@ class database(fs_template.FsBased):
# Don't use os.path.join, for better performance.
fp = self.location + _os.sep + cpv
try:
- with io.open(
+ with open(
_unicode_encode(fp, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as myf:
@@ -51,7 +48,7 @@ class database(fs_template.FsBased):
# that uses mtime mangling.
d["_mtime_"] = _os.fstat(myf.fileno())[stat.ST_MTIME]
return d
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise cache_errors.CacheCorruption(cpv, e)
raise KeyError(cpv, e)
@@ -66,17 +63,17 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
try:
fd, fp = tempfile.mkstemp(dir=self.location)
- except EnvironmentError as e:
+ except OSError as e:
raise cache_errors.CacheCorruption(cpv, e)
- with io.open(
+ with open(
fd, mode="w", encoding=_encodings["repo.content"], errors="backslashreplace"
) as myf:
for k in self._write_keys:
v = values.get(k)
if not v:
continue
- myf.write("%s=%s\n" % (k, v))
+ myf.write(f"{k}={v}\n")
self._ensure_access(fp)
@@ -85,7 +82,7 @@ class database(fs_template.FsBased):
new_fp = os.path.join(self.location, cpv)
try:
os.rename(fp, new_fp)
- except EnvironmentError as e:
+ except OSError as e:
success = False
try:
if errno.ENOENT == e.errno:
@@ -93,7 +90,7 @@ class database(fs_template.FsBased):
self._ensure_dirs(cpv)
os.rename(fp, new_fp)
success = True
- except EnvironmentError as e:
+ except OSError as e:
raise cache_errors.CacheCorruption(cpv, e)
else:
raise cache_errors.CacheCorruption(cpv, e)
@@ -150,7 +147,6 @@ class database(fs_template.FsBased):
class md5_database(database):
-
validation_chf = "md5"
store_eclass_paths = False
diff --git a/lib/portage/cache/fs_template.py b/lib/portage/cache/fs_template.py
index a3f803740..738bb5417 100644
--- a/lib/portage/cache/fs_template.py
+++ b/lib/portage/cache/fs_template.py
@@ -21,7 +21,6 @@ class FsBased(template.database):
attempt to ensure files have the specified owners/perms"""
def __init__(self, *args, **config):
-
for x, y in (("gid", -1), ("perms", 0o644)):
if x in config:
# Since Python 3.4, chown requires int type (no proxies).
@@ -29,7 +28,7 @@ class FsBased(template.database):
del config[x]
else:
setattr(self, "_" + x, y)
- super(FsBased, self).__init__(*args, **config)
+ super().__init__(*args, **config)
if self.label.startswith(os.path.sep):
# normpath.
@@ -43,7 +42,7 @@ class FsBased(template.database):
if mtime != -1:
mtime = int(mtime)
os.utime(path, (mtime, mtime))
- except (PortageException, EnvironmentError):
+ except (PortageException, OSError):
return False
return True
@@ -87,4 +86,4 @@ def gen_label(base, label):
label = label.strip('"').strip("'")
label = os.path.join(*(label.rstrip(os.path.sep).split(os.path.sep)))
tail = os.path.split(label)[1]
- return "%s-%X" % (tail, abs(label.__hash__()))
+ return f"{tail}-{abs(label.__hash__()):X}"
diff --git a/lib/portage/cache/index/IndexStreamIterator.py b/lib/portage/cache/index/IndexStreamIterator.py
index 78d7f0e5f..616aca0e3 100644
--- a/lib/portage/cache/index/IndexStreamIterator.py
+++ b/lib/portage/cache/index/IndexStreamIterator.py
@@ -4,20 +4,16 @@
class IndexStreamIterator:
def __init__(self, f, parser):
-
self.parser = parser
self._file = f
def close(self):
-
if self._file is not None:
self._file.close()
self._file = None
def __iter__(self):
-
try:
-
for line in self._file:
node = self.parser(line)
if node is not None:
diff --git a/lib/portage/cache/index/meson.build b/lib/portage/cache/index/meson.build
new file mode 100644
index 000000000..fdc427953
--- /dev/null
+++ b/lib/portage/cache/index/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'IndexStreamIterator.py',
+ 'pkg_desc_index.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/cache/index',
+ pure : not native_extensions
+)
diff --git a/lib/portage/cache/index/pkg_desc_index.py b/lib/portage/cache/index/pkg_desc_index.py
index be81b9bb9..9c63c2559 100644
--- a/lib/portage/cache/index/pkg_desc_index.py
+++ b/lib/portage/cache/index/pkg_desc_index.py
@@ -33,11 +33,10 @@ class pkg_node(str):
def pkg_desc_index_line_format(cp, pkgs, desc):
- return "%s %s: %s\n" % (cp, " ".join(_pkg_str(cpv).version for cpv in pkgs), desc)
+ return f"{cp} {' '.join(_pkg_str(cpv).version for cpv in pkgs)}: {desc}\n"
def pkg_desc_index_line_read(line, repo=None):
-
try:
pkgs, desc = line.split(":", 1)
except ValueError:
diff --git a/lib/portage/cache/mappings.py b/lib/portage/cache/mappings.py
index c0f2147a9..469f3dbc4 100644
--- a/lib/portage/cache/mappings.py
+++ b/lib/portage/cache/mappings.py
@@ -1,4 +1,4 @@
-# Copyright: 2005-2020 Gentoo Authors
+# Copyright: 2005-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# Author(s): Brian Harring (ferringb@gentoo.org)
@@ -69,7 +69,7 @@ class Mapping:
class MutableMapping(Mapping):
"""
- A mutable vesion of the Mapping class.
+ A mutable version of the Mapping class.
"""
__slots__ = ()
@@ -146,7 +146,6 @@ class UserDict(MutableMapping):
__slots__ = ("data",)
def __init__(self, *args, **kwargs):
-
self.data = {}
if len(args) > 1:
@@ -250,7 +249,7 @@ class LazyLoad(Mapping):
def __getitem__(self, key):
if key in self.d:
return self.d[key]
- if self.pull != None:
+ if self.pull is not None:
self.d.update(self.pull())
self.pull = None
return self.d[key]
@@ -264,7 +263,7 @@ class LazyLoad(Mapping):
def __contains__(self, key):
if key in self.d:
return True
- if self.pull != None:
+ if self.pull is not None:
self.d.update(self.pull())
self.pull = None
return key in self.d
@@ -272,6 +271,175 @@ class LazyLoad(Mapping):
keys = __iter__
+class _SlotDict:
+ """
+ Base class for classes returned from slot_dict_class.
+ """
+
+ _prefix = ""
+ allowed_keys = frozenset()
+ __slots__ = ("__weakref__",)
+
+ def __init__(self, *args, **kwargs):
+ if len(args) > 1:
+ raise TypeError(
+ "expected at most 1 positional argument, got " + repr(len(args))
+ )
+
+ if args:
+ self.update(args[0])
+
+ if kwargs:
+ self.update(kwargs)
+
+ def __reduce__(self):
+ return _PickledSlotDict, (
+ self._prefix,
+ self.allowed_keys,
+ dict(self),
+ )
+
+ def __eq__(self, other):
+ return dict(self) == dict(other)
+
+ def __iter__(self):
+ for k, v in self.iteritems():
+ yield k
+
+ def __len__(self):
+ l = 0
+ for i in self.iteritems():
+ l += 1
+ return l
+
+ def iteritems(self):
+ prefix = self._prefix
+ for k in self.allowed_keys:
+ try:
+ yield (k, getattr(self, prefix + k))
+ except AttributeError:
+ pass
+
+ def itervalues(self):
+ for k, v in self.iteritems():
+ yield v
+
+ def __delitem__(self, k):
+ try:
+ delattr(self, self._prefix + k)
+ except AttributeError:
+ raise KeyError(k)
+
+ def __setitem__(self, k, v):
+ setattr(self, self._prefix + k, v)
+
+ def setdefault(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
+
+ def update(self, *args, **kwargs):
+ if len(args) > 1:
+ raise TypeError(
+ "expected at most 1 positional argument, got " + repr(len(args))
+ )
+ other = None
+ if args:
+ other = args[0]
+ if other is None:
+ pass
+ elif hasattr(other, "iteritems"):
+ # Use getattr to avoid interference from 2to3.
+ for k, v in getattr(other, "iteritems")():
+ self[k] = v
+ elif hasattr(other, "items"):
+ # Use getattr to avoid interference from 2to3.
+ for k, v in getattr(other, "items")():
+ self[k] = v
+ elif hasattr(other, "keys"):
+ for k in other.keys():
+ self[k] = other[k]
+ else:
+ for k, v in other:
+ self[k] = v
+ if kwargs:
+ self.update(kwargs)
+
+ def __getitem__(self, k):
+ try:
+ return getattr(self, self._prefix + k)
+ except AttributeError:
+ raise KeyError(k)
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __contains__(self, k):
+ return hasattr(self, self._prefix + k)
+
+ def pop(self, key, *args):
+ if len(args) > 1:
+ raise TypeError(
+ "pop expected at most 2 arguments, got " + repr(1 + len(args))
+ )
+ try:
+ value = self[key]
+ except KeyError:
+ if args:
+ return args[0]
+ raise
+ del self[key]
+ return value
+
+ def popitem(self):
+ try:
+ k, v = self.iteritems().next()
+ except StopIteration:
+ raise KeyError("container is empty")
+ del self[k]
+ return (k, v)
+
+ def copy(self):
+ c = self.__class__()
+ c.update(self)
+ return c
+
+ def clear(self):
+ for k in self.allowed_keys:
+ try:
+ delattr(self, self._prefix + k)
+ except AttributeError:
+ pass
+
+ def __str__(self):
+ return str(dict(self.iteritems()))
+
+ def __repr__(self):
+ return repr(dict(self.iteritems()))
+
+ items = iteritems
+ keys = __iter__
+ values = itervalues
+
+
+class _PickledSlotDict(_SlotDict):
+ """
+ Since LocalSlotDict instances are not directly picklable, this
+ class exists as a way to express pickled LocalSlotDict instances,
+ using a plain __dict__ instead of custom __slots__.
+ """
+
+ def __init__(self, prefix, allowed_keys, *args, **kwargs):
+ self._prefix = prefix
+ self.allowed_keys = allowed_keys
+ super().__init__(*args, **kwargs)
+
+
_slot_dict_classes = weakref.WeakValueDictionary()
@@ -294,152 +462,15 @@ def slot_dict_class(keys, prefix="_val_"):
keys_set = keys
else:
keys_set = frozenset(keys)
- v = _slot_dict_classes.get((keys_set, prefix))
+ cache_key = (keys_set, prefix)
+ v = _slot_dict_classes.get(cache_key)
if v is None:
- class SlotDict:
-
+ class LocalSlotDict(_SlotDict):
allowed_keys = keys_set
_prefix = prefix
- __slots__ = ("__weakref__",) + tuple(prefix + k for k in allowed_keys)
-
- def __init__(self, *args, **kwargs):
-
- if len(args) > 1:
- raise TypeError(
- "expected at most 1 positional argument, got " + repr(len(args))
- )
-
- if args:
- self.update(args[0])
-
- if kwargs:
- self.update(kwargs)
-
- def __iter__(self):
- for k, v in self.iteritems():
- yield k
-
- def __len__(self):
- l = 0
- for i in self.iteritems():
- l += 1
- return l
-
- def iteritems(self):
- prefix = self._prefix
- for k in self.allowed_keys:
- try:
- yield (k, getattr(self, prefix + k))
- except AttributeError:
- pass
-
- def itervalues(self):
- for k, v in self.iteritems():
- yield v
-
- def __delitem__(self, k):
- try:
- delattr(self, self._prefix + k)
- except AttributeError:
- raise KeyError(k)
-
- def __setitem__(self, k, v):
- setattr(self, self._prefix + k, v)
-
- def setdefault(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
-
- def update(self, *args, **kwargs):
- if len(args) > 1:
- raise TypeError(
- "expected at most 1 positional argument, got " + repr(len(args))
- )
- other = None
- if args:
- other = args[0]
- if other is None:
- pass
- elif hasattr(other, "iteritems"):
- # Use getattr to avoid interference from 2to3.
- for k, v in getattr(other, "iteritems")():
- self[k] = v
- elif hasattr(other, "items"):
- # Use getattr to avoid interference from 2to3.
- for k, v in getattr(other, "items")():
- self[k] = v
- elif hasattr(other, "keys"):
- for k in other.keys():
- self[k] = other[k]
- else:
- for k, v in other:
- self[k] = v
- if kwargs:
- self.update(kwargs)
-
- def __getitem__(self, k):
- try:
- return getattr(self, self._prefix + k)
- except AttributeError:
- raise KeyError(k)
-
- def get(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
-
- def __contains__(self, k):
- return hasattr(self, self._prefix + k)
-
- def pop(self, key, *args):
- if len(args) > 1:
- raise TypeError(
- "pop expected at most 2 arguments, got " + repr(1 + len(args))
- )
- try:
- value = self[key]
- except KeyError:
- if args:
- return args[0]
- raise
- del self[key]
- return value
-
- def popitem(self):
- try:
- k, v = self.iteritems().next()
- except StopIteration:
- raise KeyError("container is empty")
- del self[k]
- return (k, v)
-
- def copy(self):
- c = self.__class__()
- c.update(self)
- return c
-
- def clear(self):
- for k in self.allowed_keys:
- try:
- delattr(self, self._prefix + k)
- except AttributeError:
- pass
-
- def __str__(self):
- return str(dict(self.iteritems()))
-
- def __repr__(self):
- return repr(dict(self.iteritems()))
-
- items = iteritems
- keys = __iter__
- values = itervalues
-
- v = SlotDict
- _slot_dict_classes[v.allowed_keys] = v
+ __slots__ = tuple(prefix + k for k in allowed_keys)
+
+ v = LocalSlotDict
+ _slot_dict_classes[cache_key] = v
return v
diff --git a/lib/portage/cache/meson.build b/lib/portage/cache/meson.build
new file mode 100644
index 000000000..5ebeda6bb
--- /dev/null
+++ b/lib/portage/cache/meson.build
@@ -0,0 +1,20 @@
+py.install_sources(
+ [
+ 'anydbm.py',
+ 'cache_errors.py',
+ 'ebuild_xattr.py',
+ 'flat_hash.py',
+ 'fs_template.py',
+ 'mappings.py',
+ 'metadata.py',
+ 'sqlite.py',
+ 'sql_template.py',
+ 'template.py',
+ 'volatile.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/cache',
+ pure : not native_extensions
+)
+
+subdir('index')
diff --git a/lib/portage/cache/metadata.py b/lib/portage/cache/metadata.py
index 02d8385e0..791ad8344 100644
--- a/lib/portage/cache/metadata.py
+++ b/lib/portage/cache/metadata.py
@@ -53,7 +53,7 @@ class database(flat_hash.database):
def __init__(self, location, *args, **config):
loc = location
- super(database, self).__init__(location, *args, **config)
+ super().__init__(location, *args, **config)
self.location = os.path.join(loc, "metadata", "cache")
self.ec = None
self.raise_stat_collision = False
@@ -83,9 +83,9 @@ class database(flat_hash.database):
getter = attrgetter(self.validation_chf)
try:
ec_data = self.ec.get_eclass_data(d["INHERITED"].split())
- d["_eclasses_"] = dict(
- (k, (v.eclass_dir, getter(v))) for k, v in ec_data.items()
- )
+ d["_eclasses_"] = {
+ k: (v.eclass_dir, getter(v)) for k, v in ec_data.items()
+ }
except KeyError as e:
# INHERITED contains a non-existent eclass.
raise cache_errors.CacheCorruption(cpv, e)
@@ -120,7 +120,7 @@ class database(flat_hash.database):
_unicode_encode(new_fp, encoding=_encodings["fs"], errors="strict"),
"rb",
)
- except EnvironmentError:
+ except OSError:
pass
else:
try:
@@ -129,7 +129,7 @@ class database(flat_hash.database):
existing_content = f.read()
finally:
f.close()
- except EnvironmentError:
+ except OSError:
pass
else:
existing_mtime = existing_st[stat.ST_MTIME]
@@ -156,7 +156,7 @@ class database(flat_hash.database):
myf = open(
_unicode_encode(fp, encoding=_encodings["fs"], errors="strict"), "wb"
)
- except EnvironmentError as e:
+ except OSError as e:
if errno.ENOENT == e.errno:
try:
self._ensure_dirs(cpv)
@@ -164,7 +164,7 @@ class database(flat_hash.database):
_unicode_encode(fp, encoding=_encodings["fs"], errors="strict"),
"wb",
)
- except EnvironmentError as e:
+ except OSError as e:
raise cache_errors.CacheCorruption(cpv, e)
else:
raise cache_errors.CacheCorruption(cpv, e)
@@ -177,9 +177,9 @@ class database(flat_hash.database):
try:
os.rename(fp, new_fp)
- except EnvironmentError as e:
+ except OSError as e:
try:
os.unlink(fp)
- except EnvironmentError:
+ except OSError:
pass
raise cache_errors.CacheCorruption(cpv, e)
diff --git a/lib/portage/cache/sql_template.py b/lib/portage/cache/sql_template.py
index 99cd41a34..ec058ab15 100644
--- a/lib/portage/cache/sql_template.py
+++ b/lib/portage/cache/sql_template.py
@@ -13,7 +13,7 @@ class SQLDatabase(template.database):
_BaseError must be an exception class that all Exceptions thrown from the derived RDBMS are derived
from.
- SCHEMA_INSERT_CPV_INTO_PACKAGE should be modified dependant on the RDBMS, as should SCHEMA_PACKAGE_CREATE-
+ SCHEMA_INSERT_CPV_INTO_PACKAGE should be modified dependent on the RDBMS, as should SCHEMA_PACKAGE_CREATE-
basically you need to deal with creation of a unique pkgid. If the dbapi2 rdbms class has a method of
recovering that id, then modify _insert_cpv to remove the extra select.
@@ -27,7 +27,7 @@ class SQLDatabase(template.database):
pkgid INTEGER PRIMARY KEY, label VARCHAR(255), cpv VARCHAR(255), UNIQUE(label, cpv))"
% SCHEMA_PACKAGE_NAME
)
- SCHEMA_PACKAGE_DROP = "DROP TABLE %s" % SCHEMA_PACKAGE_NAME
+ SCHEMA_PACKAGE_DROP = f"DROP TABLE {SCHEMA_PACKAGE_NAME}"
SCHEMA_VALUES_NAME = "values_cache"
SCHEMA_VALUES_CREATE = (
@@ -35,7 +35,7 @@ class SQLDatabase(template.database):
key varchar(255), value text, UNIQUE(pkgid, key))"
% (SCHEMA_VALUES_NAME, SCHEMA_PACKAGE_NAME)
)
- SCHEMA_VALUES_DROP = "DROP TABLE %s" % SCHEMA_VALUES_NAME
+ SCHEMA_VALUES_DROP = f"DROP TABLE {SCHEMA_VALUES_NAME}"
SCHEMA_INSERT_CPV_INTO_PACKAGE = (
"INSERT INTO %s (label, cpv) VALUES(%%s, %%s)" % SCHEMA_PACKAGE_NAME
)
@@ -53,7 +53,7 @@ class SQLDatabase(template.database):
"""initialize the instance.
derived classes shouldn't need to override this"""
- super(SQLDatabase, self).__init__(location, label, auxdbkeys, *args, **config)
+ super().__init__(location, label, auxdbkeys, *args, **config)
config.setdefault("host", "127.0.0.1")
config.setdefault("autocommit", self.autocommits)
@@ -69,14 +69,14 @@ class SQLDatabase(template.database):
def _initdb_con(self, config):
"""ensure needed tables are in place.
- If the derived class needs a different set of table creation commands, overload the approriate
+ If the derived class needs a different set of table creation commands, overload the appropriate
SCHEMA_ attributes. If it needs additional execution beyond, override"""
self._dbconnect(config)
if not self._table_exists(self.SCHEMA_PACKAGE_NAME):
if self.readonly:
raise cache_errors.ReadOnlyRestriction(
- "table %s doesn't exist" % self.SCHEMA_PACKAGE_NAME
+ f"table {self.SCHEMA_PACKAGE_NAME} doesn't exist"
)
try:
self.con.execute(self.SCHEMA_PACKAGE_CREATE)
@@ -86,7 +86,7 @@ class SQLDatabase(template.database):
if not self._table_exists(self.SCHEMA_VALUES_NAME):
if self.readonly:
raise cache_errors.ReadOnlyRestriction(
- "table %s doesn't exist" % self.SCHEMA_VALUES_NAME
+ f"table {self.SCHEMA_VALUES_NAME} doesn't exist"
)
try:
self.con.execute(self.SCHEMA_VALUES_CREATE)
@@ -122,7 +122,7 @@ class SQLDatabase(template.database):
if len(rows) == 0:
raise KeyError(cpv)
- vals = dict([(k, "") for k in self._known_keys])
+ vals = {k: "" for k in self._known_keys}
vals.update(dict(rows))
return vals
@@ -152,12 +152,11 @@ class SQLDatabase(template.database):
def __del__(self):
# just to be safe.
- if "db" in self.__dict__ and self.db != None:
+ if "db" in self.__dict__ and self.db is not None:
self.commit()
self.db.close()
def _setitem(self, cpv, values):
-
try:
# insert.
try:
@@ -255,8 +254,7 @@ class SQLDatabase(template.database):
try:
self.con.execute(
- "SELECT cpv FROM %s WHERE label=%s"
- % (self.SCHEMA_PACKAGE_NAME, self.label)
+ f"SELECT cpv FROM {self.SCHEMA_PACKAGE_NAME} WHERE label={self.label}"
)
except self._BaseError as e:
raise cache_errors.GeneralCacheCorruption(e)
@@ -278,7 +276,7 @@ class SQLDatabase(template.database):
l = []
for x, y, v in self.con.fetchall():
if oldcpv != x:
- if oldcpv != None:
+ if oldcpv is not None:
d = dict(l)
if "_eclasses_" in d:
d["_eclasses_"] = reconstruct_eclasses(oldcpv, d["_eclasses_"])
@@ -288,7 +286,7 @@ class SQLDatabase(template.database):
l.clear()
oldcpv = x
l.append((y, v))
- if oldcpv != None:
+ if oldcpv is not None:
d = dict(l)
if "_eclasses_" in d:
d["_eclasses_"] = reconstruct_eclasses(oldcpv, d["_eclasses_"])
@@ -309,7 +307,7 @@ class SQLDatabase(template.database):
v = v.replace("%", "\\%")
v = v.replace(".*", "%")
query_list.append(
- "(key=%s AND value LIKE %s)" % (self._sfilter(k), self._sfilter(v))
+ f"(key={self._sfilter(k)} AND value LIKE {self._sfilter(v)})"
)
if len(query_list):
diff --git a/lib/portage/cache/sqlite.py b/lib/portage/cache/sqlite.py
index 23a775e65..77dc8bc41 100644
--- a/lib/portage/cache/sqlite.py
+++ b/lib/portage/cache/sqlite.py
@@ -1,4 +1,4 @@
-# Copyright 1999-2020 Gentoo Authors
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import collections
@@ -14,7 +14,6 @@ from portage.localization import _
class database(fs_template.FsBased):
-
validation_chf = "md5"
chf_types = ("md5", "mtime")
@@ -30,11 +29,11 @@ class database(fs_template.FsBased):
)
def __init__(self, *args, **config):
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self._import_sqlite()
self._allowed_keys = ["_eclasses_"]
self._allowed_keys.extend(self._known_keys)
- self._allowed_keys.extend("_%s_" % k for k in self.chf_types)
+ self._allowed_keys.extend(f"_{k}_" for k in self.chf_types)
self._allowed_keys_set = frozenset(self._allowed_keys)
self._allowed_keys = sorted(self._allowed_keys_set)
@@ -54,6 +53,19 @@ class database(fs_template.FsBased):
self._config = config
self._db_connection_info = None
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ # These attributes are not picklable, so they are automatically
+ # regenerated after unpickling.
+ state["_db_module"] = None
+ state["_db_error"] = None
+ state["_db_connection_info"] = None
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self._import_sqlite()
+
def _import_sqlite(self):
# sqlite3 is optional with >=python-2.5
try:
@@ -108,11 +120,11 @@ class database(fs_template.FsBased):
connection, cursor, portage.getpid()
)
self._db_cursor.execute(
- "PRAGMA encoding = %s" % self._db_escape_string("UTF-8")
+ f"PRAGMA encoding = {self._db_escape_string('UTF-8')}"
)
if not self.readonly and not self._ensure_access(self._dbpath):
raise cache_errors.InitializationError(
- self.__class__, "can't ensure perms on %s" % self._dbpath
+ self.__class__, f"can't ensure perms on {self._dbpath}"
)
self._db_init_cache_size(config["cache_bytes"])
self._db_init_synchronous(config["synchronous"])
@@ -136,12 +148,10 @@ class database(fs_template.FsBased):
"%s INTEGER PRIMARY KEY AUTOINCREMENT"
% self._db_table["packages"]["package_id"]
)
- table_parameters.append("%s TEXT" % self._db_table["packages"]["package_key"])
+ table_parameters.append(f"{self._db_table['packages']['package_key']} TEXT")
for k in self._allowed_keys:
- table_parameters.append("%s TEXT" % k)
- table_parameters.append(
- "UNIQUE(%s)" % self._db_table["packages"]["package_key"]
- )
+ table_parameters.append(f"{k} TEXT")
+ table_parameters.append(f"UNIQUE({self._db_table['packages']['package_key']})")
create_statement.append(",".join(table_parameters))
create_statement.append(")")
@@ -163,13 +173,13 @@ class database(fs_template.FsBased):
)
else:
writemsg(_("sqlite: dropping old table: %s\n") % v["table_name"])
- cursor.execute("DROP TABLE %s" % v["table_name"])
+ cursor.execute(f"DROP TABLE {v['table_name']}")
cursor.execute(v["create"])
else:
cursor.execute(v["create"])
def _db_table_exists(self, table_name):
- """return true/false dependant on a tbl existing"""
+ """return true/false dependent on a tbl existing"""
cursor = self._db_cursor
cursor.execute(
'SELECT name FROM sqlite_master WHERE type="table" AND name=%s'
@@ -178,7 +188,7 @@ class database(fs_template.FsBased):
return len(cursor.fetchall()) == 1
def _db_table_get_create(self, table_name):
- """return true/false dependant on a tbl existing"""
+ """return true/false dependent on a tbl existing"""
cursor = self._db_cursor
cursor.execute(
"SELECT sql FROM sqlite_master WHERE name=%s"
@@ -202,7 +212,7 @@ class database(fs_template.FsBased):
if m is None:
return False, missing_keys
- unique_constraints = set([self._db_table["packages"]["package_key"]])
+ unique_constraints = {self._db_table["packages"]["package_key"]}
missing_keys = set(self._allowed_keys)
unique_re = re.compile(r"^\s*UNIQUE\s*\(\s*(\w*)\s*\)\s*$")
column_re = re.compile(r"^\s*(\w*)\s*TEXT\s*$")
@@ -289,7 +299,7 @@ class database(fs_template.FsBased):
def _setitem(self, cpv, values):
update_statement = []
update_statement.append(
- "REPLACE INTO %s" % self._db_table["packages"]["table_name"]
+ f"REPLACE INTO {self._db_table['packages']['table_name']}"
)
update_statement.append("(")
update_statement.append(
@@ -309,7 +319,7 @@ class database(fs_template.FsBased):
s = " ".join(update_statement)
cursor.execute(s)
except self._db_error as e:
- writemsg("%s: %s\n" % (cpv, str(e)))
+ writemsg(f"{cpv}: {str(e)}\n")
raise
def commit(self):
diff --git a/lib/portage/cache/template.py b/lib/portage/cache/template.py
index 3677dfa74..9e69e368f 100644
--- a/lib/portage/cache/template.py
+++ b/lib/portage/cache/template.py
@@ -11,7 +11,7 @@ import operator
class database:
# this is for metadata/cache transfer.
- # basically flags the cache needs be updated when transfered cache to cache.
+ # basically flags the cache needs be updated when transferred cache to cache.
# leave this.
complete_eclass_entries = True
@@ -32,7 +32,7 @@ class database:
def __getitem__(self, cpv):
"""set a cpv to values
- This shouldn't be overriden in derived classes since it handles the __eclasses__ conversion.
+ This shouldn't be overridden in derived classes since it handles the __eclasses__ conversion.
that said, if the class handles it, they can override it."""
if self.updates > self.sync_rate:
self.commit()
@@ -46,7 +46,7 @@ class database:
if self.serialize_eclasses and "_eclasses_" in d:
for chf_type in chf_types:
- if "_%s_" % chf_type not in d:
+ if f"_{chf_type}_" not in d:
# Skip the reconstruct_eclasses call, since it's
# a waste of time if it contains a different chf_type
# than the current one. In the past, it was possible
@@ -77,7 +77,7 @@ class database:
# those that egencache uses to avoid redundant writes.
d.pop("INHERITED", None)
- mtime_required = not any(d.get("_%s_" % x) for x in chf_types if x != "mtime")
+ mtime_required = not any(d.get(f"_{x}_") for x in chf_types if x != "mtime")
mtime = d.get("_mtime_")
if not mtime:
@@ -89,14 +89,14 @@ class database:
mtime = int(mtime)
except ValueError:
raise cache_errors.CacheCorruption(
- cpv, "_mtime_ conversion to int failed: %s" % (mtime,)
+ cpv, f"_mtime_ conversion to int failed: {mtime}"
)
d["_mtime_"] = mtime
return d
def _getitem(self, cpv):
"""get cpv's values.
- override this in derived classess"""
+ override this in derived classes"""
raise NotImplementedError
@staticmethod
@@ -111,16 +111,17 @@ class database:
return extern_ec_dict
chf_getter = operator.attrgetter(chf_type)
if paths:
- intern_ec_dict = dict(
- (k, (v.eclass_dir, chf_getter(v))) for k, v in extern_ec_dict.items()
- )
+ intern_ec_dict = {
+ k: (v.eclass_dir, chf_getter(v)) for k, v in extern_ec_dict.items()
+ }
else:
- intern_ec_dict = dict((k, chf_getter(v)) for k, v in extern_ec_dict.items())
+ intern_ec_dict = {k: chf_getter(v) for k, v in extern_ec_dict.items()}
return intern_ec_dict
def __setitem__(self, cpv, values):
"""set a cpv to values
- This shouldn't be overriden in derived classes since it handles the readonly checks"""
+ This shouldn't be overridden in derived classes since it handles the readonly checks
+ """
if self.readonly:
raise cache_errors.ReadOnlyRestriction()
d = None
@@ -156,7 +157,8 @@ class database:
def __delitem__(self, cpv):
"""delete a key from the cache.
- This shouldn't be overriden in derived classes since it handles the readonly checks"""
+ This shouldn't be overridden in derived classes since it handles the readonly checks
+ """
if self.readonly:
raise cache_errors.ReadOnlyRestriction()
if not self.autocommits:
@@ -240,7 +242,7 @@ class database:
return False
def _validate_entry(self, chf_type, entry, ebuild_hash, eclass_db):
- hash_key = "_%s_" % chf_type
+ hash_key = f"_{chf_type}_"
try:
entry_hash = entry[hash_key]
except KeyError:
@@ -311,12 +313,11 @@ def serialize_eclasses(eclass_dict, chf_type="mtime", paths=True):
getter = operator.attrgetter(chf_type)
if paths:
return "\t".join(
- "%s\t%s\t%s" % (k, v.eclass_dir, getter(v))
+ f"{k}\t{v.eclass_dir}\t{getter(v)}"
for k, v in sorted(eclass_dict.items(), key=_keysorter)
)
return "\t".join(
- "%s\t%s" % (k, getter(v))
- for k, v in sorted(eclass_dict.items(), key=_keysorter)
+ f"{k}\t{getter(v)}" for k, v in sorted(eclass_dict.items(), key=_keysorter)
)
@@ -349,11 +350,11 @@ def reconstruct_eclasses(cpv, eclass_string, chf_type="mtime", paths=True):
if paths:
if len(eclasses) % 3 != 0:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ was of invalid len %i" % len(eclasses)
+ cpv, f"_eclasses_ was of invalid len {len(eclasses)}"
)
elif len(eclasses) % 2 != 0:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ was of invalid len %i" % len(eclasses)
+ cpv, f"_eclasses_ was of invalid len {len(eclasses)}"
)
d = {}
try:
@@ -367,11 +368,11 @@ def reconstruct_eclasses(cpv, eclass_string, chf_type="mtime", paths=True):
d[name] = converter(val)
except IndexError:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ was of invalid len %i" % len(eclasses)
+ cpv, f"_eclasses_ was of invalid len {len(eclasses)}"
)
except ValueError:
raise cache_errors.CacheCorruption(
- cpv, "_eclasses_ not valid for chf_type {}".format(chf_type)
+ cpv, f"_eclasses_ not valid for chf_type {chf_type}"
)
del eclasses
return d
diff --git a/lib/portage/cache/volatile.py b/lib/portage/cache/volatile.py
index 67afd20e7..6c0473b32 100644
--- a/lib/portage/cache/volatile.py
+++ b/lib/portage/cache/volatile.py
@@ -6,7 +6,6 @@ from portage.cache import template
class database(template.database):
-
autocommits = True
serialize_eclasses = False
store_eclass_paths = False
@@ -14,7 +13,7 @@ class database(template.database):
def __init__(self, *args, **config):
config.pop("gid", None)
config.pop("perms", None)
- super(database, self).__init__(*args, **config)
+ super().__init__(*args, **config)
self._data = {}
self._delitem = self._data.__delitem__
diff --git a/lib/portage/checksum.py b/lib/portage/checksum.py
index c19a50df9..b10643476 100644
--- a/lib/portage/checksum.py
+++ b/lib/portage/checksum.py
@@ -26,11 +26,11 @@ from portage.localization import _
# SHA256: hashlib
# SHA512: hashlib
# RMD160: hashlib, pycrypto, mhash
-# WHIRLPOOL: hashlib, mhash, bundled
-# BLAKE2B (512): hashlib (3.6+), pyblake2, pycrypto
-# BLAKE2S (512): hashlib (3.6+), pyblake2, pycrypto
-# SHA3_256: hashlib (3.6+), pysha3, pycrypto
-# SHA3_512: hashlib (3.6+), pysha3, pycrypto
+# WHIRLPOOL: hashlib, bundled (C), bundled (Python)
+# BLAKE2B (512): hashlib
+# BLAKE2S (512): hashlib
+# SHA3_256: hashlib
+# SHA3_512: hashlib
# Dict of all available hash functions
@@ -43,8 +43,8 @@ def _open_file(filename):
return open(
_unicode_encode(filename, encoding=_encodings["fs"], errors="strict"), "rb"
)
- except IOError as e:
- func_call = "open('%s')" % _unicode_decode(filename)
+ except OSError as e:
+ func_call = f"open('{_unicode_decode(filename)}')"
if e.errno == errno.EPERM:
raise portage.exception.OperationNotPermitted(func_call)
elif e.errno == errno.EACCES:
@@ -56,7 +56,6 @@ def _open_file(filename):
class _generate_hash_function:
-
__slots__ = ("_hashobject",)
def __init__(self, hashtype, hashobject, origin="unknown"):
@@ -100,10 +99,15 @@ class _generate_hash_function:
# Define hash functions, try to use the best module available. Preferred
# modules should go first, latter ones should check if the hashes aren't
# already defined.
-
-
-# Use hashlib from python-2.5 if available and prefer it over pycrypto and internal fallbacks.
+# Use hashlib if available and prefer it over pycrypto and internal fallbacks.
+#
# Need special handling for RMD160/WHIRLPOOL as they may not always be provided by hashlib.
+# We keep fallbacks for RMD160/WHIRLPOOL for now as newer OpenSSLs don't expose them
+# by default.
+# See also
+# - https://github.com/python/cpython/issues/91257
+# - https://github.com/python/cpython/issues/92876
+# - https://bugs.gentoo.org/846389
_generate_hash_function("MD5", hashlib.md5, origin="hashlib")
_generate_hash_function("SHA1", hashlib.sha1, origin="hashlib")
_generate_hash_function("SHA256", hashlib.sha256, origin="hashlib")
@@ -111,7 +115,6 @@ _generate_hash_function("SHA512", hashlib.sha512, origin="hashlib")
for local_name, hash_name in (
("RMD160", "ripemd160"),
("WHIRLPOOL", "whirlpool"),
- # available since Python 3.6
("BLAKE2B", "blake2b"),
("BLAKE2S", "blake2s"),
("SHA3_256", "sha3_256"),
@@ -127,76 +130,6 @@ for local_name, hash_name in (
)
-# Support using pyblake2 as fallback for python<3.6
-if "BLAKE2B" not in hashfunc_map or "BLAKE2S" not in hashfunc_map:
- try:
- import pyblake2
-
- _generate_hash_function("BLAKE2B", pyblake2.blake2b, origin="pyblake2")
- _generate_hash_function("BLAKE2S", pyblake2.blake2s, origin="pyblake2")
- except ImportError:
- pass
-
-
-# Support using pysha3 as fallback for python<3.6
-if "SHA3_256" not in hashfunc_map or "SHA3_512" not in hashfunc_map:
- try:
- import sha3
-
- _generate_hash_function("SHA3_256", sha3.sha3_256, origin="pysha3")
- _generate_hash_function("SHA3_512", sha3.sha3_512, origin="pysha3")
- except ImportError:
- pass
-
-
-# Support pygcrypt as fallback using optimized routines from libgcrypt
-# (GnuPG).
-gcrypt_algos = frozenset(
- ("RMD160", "WHIRLPOOL", "SHA3_256", "SHA3_512", "STREEBOG256", "STREEBOG512")
-)
-# Note: currently disabled due to resource exhaustion bugs in pygcrypt.
-# Please do not reenable until upstream has a fix.
-# https://bugs.gentoo.org/615620
-if False:
- # if gcrypt_algos.difference(hashfunc_map):
- try:
- import binascii
- import pygcrypt.hashcontext
-
- class GCryptHashWrapper:
- def __init__(self, algo):
- self._obj = pygcrypt.hashcontext.HashContext(algo=algo, secure=False)
-
- def update(self, data):
- self._obj.write(data)
-
- def hexdigest(self):
- return binascii.b2a_hex(self._obj.read()).decode()
-
- name_mapping = {
- "RMD160": "ripemd160",
- "WHIRLPOOL": "whirlpool",
- "SHA3_256": "sha3-256",
- "SHA3_512": "sha3-512",
- "STREEBOG256": "stribog256",
- "STREEBOG512": "stribog512",
- }
-
- for local_name, gcry_name in name_mapping.items():
- try:
- pygcrypt.hashcontext.HashContext(algo=gcry_name)
- except Exception: # yes, it throws Exception...
- pass
- else:
- _generate_hash_function(
- local_name,
- functools.partial(GCryptHashWrapper, gcry_name),
- origin="pygcrypt",
- )
- except ImportError:
- pass
-
-
# Use pycrypto when available, prefer it over the internal fallbacks
# Check for 'new' attributes, since they can be missing if the module
# is broken somehow.
@@ -208,112 +141,38 @@ if "RMD160" not in hashfunc_map:
if rmd160hash_ is not None:
_generate_hash_function("RMD160", rmd160hash_, origin="pycrypto")
except ImportError:
- pass
-
-# The following hashes were added in pycryptodome (pycrypto fork)
-if "BLAKE2B" not in hashfunc_map:
- try:
- from Crypto.Hash import BLAKE2b
-
- blake2bhash_ = getattr(BLAKE2b, "new", None)
- if blake2bhash_ is not None:
- _generate_hash_function(
- "BLAKE2B",
- functools.partial(blake2bhash_, digest_bytes=64),
- origin="pycrypto",
- )
- except ImportError:
- pass
-
-if "BLAKE2S" not in hashfunc_map:
- try:
- from Crypto.Hash import BLAKE2s
-
- blake2shash_ = getattr(BLAKE2s, "new", None)
- if blake2shash_ is not None:
- _generate_hash_function(
- "BLAKE2S",
- functools.partial(blake2shash_, digest_bytes=32),
- origin="pycrypto",
- )
- except ImportError:
- pass
-
-if "SHA3_256" not in hashfunc_map:
- try:
- from Crypto.Hash import SHA3_256
-
- sha3_256hash_ = getattr(SHA3_256, "new", None)
- if sha3_256hash_ is not None:
- _generate_hash_function("SHA3_256", sha3_256hash_, origin="pycrypto")
- except ImportError:
- pass
-
-if "SHA3_512" not in hashfunc_map:
- try:
- from Crypto.Hash import SHA3_512
-
- sha3_512hash_ = getattr(SHA3_512, "new", None)
- if sha3_512hash_ is not None:
- _generate_hash_function("SHA3_512", sha3_512hash_, origin="pycrypto")
- except ImportError:
- pass
-
-
-# Try to use mhash if available
-# mhash causes GIL presently, so it gets less priority than hashlib and
-# pycrypto. However, it might be the only accelerated implementation of
-# WHIRLPOOL available.
-if "RMD160" not in hashfunc_map or "WHIRLPOOL" not in hashfunc_map:
- try:
- import mhash
-
- for local_name, hash_name in (
- ("RMD160", "RIPEMD160"),
- ("WHIRLPOOL", "WHIRLPOOL"),
- ):
- if local_name not in hashfunc_map and hasattr(
- mhash, "MHASH_%s" % hash_name
- ):
- _generate_hash_function(
- local_name,
- functools.partial(
- mhash.MHASH, getattr(mhash, "MHASH_%s" % hash_name)
- ),
- origin="mhash",
- )
- except ImportError:
- pass
-
-
-# Support pygost as fallback streebog provider
-# It's mostly provided as a reference implementation; it's pure Python,
-# slow and reads all data to memory (i.e. doesn't hash on update()...)
-if "STREEBOG256" not in hashfunc_map or "STREEBOG512" not in hashfunc_map:
- try:
- import pygost.gost34112012
-
- _generate_hash_function(
- "STREEBOG256",
- functools.partial(pygost.gost34112012.GOST34112012, digest_size=32),
- origin="pygost",
- )
- _generate_hash_function(
- "STREEBOG512",
- functools.partial(pygost.gost34112012.GOST34112012, digest_size=64),
- origin="pygost",
- )
- except ImportError:
- pass
+ # Try to use mhash if available
+ # mhash causes GIL presently, so it gets less priority than hashlib and
+ # pycrypto. However, it might be the only accelerated implementation of
+ # WHIRLPOOL available.
+ try:
+ import mhash
+
+ for local_name, hash_name in (("RMD160", "RIPEMD160"),):
+ if local_name not in hashfunc_map and hasattr(
+ mhash, f"MHASH_{hash_name}"
+ ):
+ _generate_hash_function(
+ local_name,
+ functools.partial(
+ mhash.MHASH, getattr(mhash, f"MHASH_{hash_name}")
+ ),
+ origin="mhash",
+ )
+ except ImportError:
+ pass
_whirlpool_unaccelerated = False
if "WHIRLPOOL" not in hashfunc_map:
# Bundled WHIRLPOOL implementation
- _whirlpool_unaccelerated = True
- from portage.util.whirlpool import new as _new_whirlpool
+ from portage.util.whirlpool import CWhirlpool, PyWhirlpool
- _generate_hash_function("WHIRLPOOL", _new_whirlpool, origin="bundled")
+ if CWhirlpool.is_available:
+ _generate_hash_function("WHIRLPOOL", CWhirlpool, origin="bundled-c")
+ else:
+ _whirlpool_unaccelerated = True
+ _generate_hash_function("WHIRLPOOL", PyWhirlpool, origin="bundled-py")
# There is only one implementation for size
@@ -339,16 +198,13 @@ if os.path.exists(PRELINK_BINARY):
proc.communicate()
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
- prelink_capable = 1
+ prelink_capable = True
del cmd, proc, status
def is_prelinkable_elf(filename):
- f = _open_file(filename)
- try:
+ with _open_file(filename) as f:
magic = f.read(17)
- finally:
- f.close()
return (
len(magic) == 17
and magic.startswith(b"\x7fELF")
@@ -367,9 +223,7 @@ def _perform_md5_merge(x, **kwargs):
def perform_all(x, calc_prelink=0):
- mydict = {}
- for k in hashfunc_keys:
- mydict[k] = perform_checksum(x, k, calc_prelink)[0]
+ mydict = {k: perform_checksum(x, k, calc_prelink)[0] for k in hashfunc_keys}
return mydict
@@ -427,9 +281,8 @@ class _hash_filter:
for token in self._tokens:
if token in matches:
return True
- if token[:1] == "-":
- if token[1:] in matches:
- return False
+ if token[:1] == "-" and token[1:] in matches:
+ return False
return False
@@ -459,11 +312,11 @@ def _apply_hash_filter(digests, hash_filter):
break
if modified:
- digests = dict(
- (k, v)
- for (k, v) in digests.items()
+ digests = {
+ k: v
+ for k, v in digests.items()
if k == "size" or k in verifiable_hash_types
- )
+ }
return digests
@@ -525,12 +378,11 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
if mydict[x] != myhash:
if strict:
raise portage.exception.DigestException(
- ("Failed to verify '$(file)s' on " + "checksum type '%(type)s'")
- % {"file": filename, "type": x}
+ f"Failed to verify '{filename}' on checksum type '{x}'"
)
else:
file_is_ok = False
- reason = (("Failed on %s verification" % x), myhash, mydict[x])
+ reason = (f"Failed on {x} verification", myhash, mydict[x])
break
return file_is_ok, reason
@@ -578,11 +430,10 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0):
try:
if hashname not in hashfunc_keys:
raise portage.exception.DigestException(
- hashname
- + " hash function not available (needs dev-python/pycrypto)"
+ f"{hashname} hash function not available (needs dev-python/pycrypto)"
)
myhash, mysize = hashfunc_map[hashname].checksum_file(myfilename)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno in (errno.ENOENT, errno.ESTALE):
raise portage.exception.FileNotFound(myfilename)
elif e.errno == portage.exception.PermissionDenied.errno:
@@ -618,8 +469,7 @@ def perform_multiple_checksums(filename, hashes=["MD5"], calc_prelink=0):
for x in hashes:
if x not in hashfunc_keys:
raise portage.exception.DigestException(
- x
- + " hash function not available (needs dev-python/pycrypto or >=dev-lang/python-2.5)"
+ f"{x} hash function not available (needs dev-python/pycrypto)"
)
rVal[x] = perform_checksum(filename, x, calc_prelink)[0]
return rVal
@@ -638,6 +488,6 @@ def checksum_str(data, hashname="MD5"):
"""
if hashname not in hashfunc_keys:
raise portage.exception.DigestException(
- hashname + " hash function not available (needs dev-python/pycrypto)"
+ f"{hashname} hash function not available (needs dev-python/pycrypto)"
)
return hashfunc_map[hashname].checksum_str(data)
diff --git a/lib/portage/const.py b/lib/portage/const.py
index abe0ef6c6..2154213b7 100644
--- a/lib/portage/const.py
+++ b/lib/portage/const.py
@@ -1,9 +1,11 @@
# portage: Constants
-# Copyright 1998-2021 Gentoo Authors
+# Copyright 1998-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import os
+from portage import installation
+
# ===========================================================================
# START OF CONSTANTS -- START OF CONSTANTS -- START OF CONSTANTS -- START OF
# ===========================================================================
@@ -28,56 +30,84 @@ import os
# variables used with config_root (these need to be relative)
USER_CONFIG_PATH = "etc/portage"
-BINREPOS_CONF_FILE = USER_CONFIG_PATH + "/binrepos.conf"
-MAKE_CONF_FILE = USER_CONFIG_PATH + "/make.conf"
-MODULES_FILE_PATH = USER_CONFIG_PATH + "/modules"
-CUSTOM_PROFILE_PATH = USER_CONFIG_PATH + "/profile"
-USER_VIRTUALS_FILE = USER_CONFIG_PATH + "/virtuals"
-EBUILD_SH_ENV_FILE = USER_CONFIG_PATH + "/bashrc"
-EBUILD_SH_ENV_DIR = USER_CONFIG_PATH + "/env"
-CUSTOM_MIRRORS_FILE = USER_CONFIG_PATH + "/mirrors"
-COLOR_MAP_FILE = USER_CONFIG_PATH + "/color.map"
-PROFILE_PATH = USER_CONFIG_PATH + "/make.profile"
-MAKE_DEFAULTS_FILE = PROFILE_PATH + "/make.defaults" # FIXME: not used
-DEPRECATED_PROFILE_FILE = PROFILE_PATH + "/deprecated"
+BINREPOS_CONF_FILE = f"{USER_CONFIG_PATH}/binrepos.conf"
+MAKE_CONF_FILE = f"{USER_CONFIG_PATH}/make.conf"
+MODULES_FILE_PATH = f"{USER_CONFIG_PATH}/modules"
+CUSTOM_PROFILE_PATH = f"{USER_CONFIG_PATH}/profile"
+USER_VIRTUALS_FILE = f"{USER_CONFIG_PATH}/virtuals"
+EBUILD_SH_ENV_FILE = f"{USER_CONFIG_PATH}/bashrc"
+EBUILD_SH_ENV_DIR = f"{USER_CONFIG_PATH}/env"
+CUSTOM_MIRRORS_FILE = f"{USER_CONFIG_PATH}/mirrors"
+COLOR_MAP_FILE = f"{USER_CONFIG_PATH}/color.map"
+PROFILE_PATH = f"{USER_CONFIG_PATH}/make.profile"
+MAKE_DEFAULTS_FILE = f"{PROFILE_PATH}/make.defaults" # FIXME: not used
+DEPRECATED_PROFILE_FILE = f"{PROFILE_PATH}/deprecated"
# variables used with targetroot (these need to be absolute, but not
# have a leading '/' since they are used directly with os.path.join on EROOT)
VDB_PATH = "var/db/pkg"
CACHE_PATH = "var/cache/edb"
PRIVATE_PATH = "var/lib/portage"
-WORLD_FILE = PRIVATE_PATH + "/world"
-WORLD_SETS_FILE = PRIVATE_PATH + "/world_sets"
-CONFIG_MEMORY_FILE = PRIVATE_PATH + "/config"
+WORLD_FILE = f"{PRIVATE_PATH}/world"
+WORLD_SETS_FILE = f"{PRIVATE_PATH}/world_sets"
+CONFIG_MEMORY_FILE = f"{PRIVATE_PATH}/config"
NEWS_LIB_PATH = "var/lib/gentoo"
# these variables get EPREFIX prepended automagically when they are
# translated into their lowercase variants
-DEPCACHE_PATH = "/var/cache/edb/dep"
-GLOBAL_CONFIG_PATH = "/usr/share/portage/config"
+DEPCACHE_PATH = f"/{CACHE_PATH}/dep"
+
+if installation.TYPE == installation.TYPES.MODULE:
+ GLOBAL_CONFIG_PATH = "/share/portage/config"
+else:
+ GLOBAL_CONFIG_PATH = "/usr/share/portage/config"
# these variables are not used with target_root or config_root
# NOTE: Use realpath(__file__) so that python module symlinks in site-packages
# are followed back to the real location of the whole portage installation.
-# NOTE: Please keep PORTAGE_BASE_PATH in one line to help substitutions.
-# fmt:off
-PORTAGE_BASE_PATH = os.path.join(os.sep, os.sep.join(os.path.realpath(__file__.rstrip("co")).split(os.sep)[:-3]))
-# fmt:on
-PORTAGE_BIN_PATH = PORTAGE_BASE_PATH + "/bin"
+if installation.TYPE == installation.TYPES.SYSTEM:
+ PORTAGE_BASE_PATH = """@PORTAGE_BASE_PATH@"""
+elif installation.TYPE == installation.TYPES.MODULE:
+ PORTAGE_BASE_PATH = os.path.join(
+ os.path.realpath(__import__("sys").prefix), "lib/portage"
+ )
+else:
+ PORTAGE_BASE_PATH = os.path.join(
+ os.sep, *os.path.realpath(__file__).split(os.sep)[:-3]
+ )
+
+if installation.TYPE == installation.TYPES.SYSTEM:
+ PORTAGE_BIN_PATH = """@PORTAGE_BIN_PATH@"""
+else:
+ PORTAGE_BIN_PATH = f"{PORTAGE_BASE_PATH}/bin"
+
+# The EPREFIX for the current install is hardcoded here, but access to this
+# constant should be minimal, in favor of access via the EPREFIX setting of
+# a config instance (since it's possible to contruct a config instance with
+# a different EPREFIX). Therefore, the EPREFIX constant should *NOT* be used
+# in the definition of any other constants within this file.
+if installation.TYPE == installation.TYPES.SYSTEM:
+ EPREFIX = BINARY_PREFIX = "@EPREFIX@"
+elif installation.TYPE == installation.TYPES.MODULE:
+ EPREFIX = __import__("sys").prefix
+ BINARY_PREFIX = ""
+else:
+ EPREFIX = BINARY_PREFIX = ""
+
PORTAGE_PYM_PATH = os.path.realpath(os.path.join(__file__, "../.."))
-LOCALE_DATA_PATH = PORTAGE_BASE_PATH + "/locale" # FIXME: not used
-EBUILD_SH_BINARY = PORTAGE_BIN_PATH + "/ebuild.sh"
-MISC_SH_BINARY = PORTAGE_BIN_PATH + "/misc-functions.sh"
-SANDBOX_BINARY = "/usr/bin/sandbox"
-FAKEROOT_BINARY = "/usr/bin/fakeroot"
-BASH_BINARY = "/bin/bash"
-MOVE_BINARY = "/bin/mv"
-PRELINK_BINARY = "/usr/sbin/prelink"
+LOCALE_DATA_PATH = f"{PORTAGE_BASE_PATH}/locale" # FIXME: not used
+EBUILD_SH_BINARY = f"{PORTAGE_BIN_PATH}/ebuild.sh"
+MISC_SH_BINARY = f"{PORTAGE_BIN_PATH}/misc-functions.sh"
+SANDBOX_BINARY = f"{BINARY_PREFIX}/usr/bin/sandbox"
+FAKEROOT_BINARY = f"{BINARY_PREFIX}/usr/bin/fakeroot"
+BASH_BINARY = f"{BINARY_PREFIX}/bin/bash"
+MOVE_BINARY = f"{BINARY_PREFIX}/bin/mv"
+PRELINK_BINARY = f"{BINARY_PREFIX}/usr/sbin/prelink"
INVALID_ENV_FILE = "/etc/spork/is/not/valid/profile.env"
MERGING_IDENTIFIER = "-MERGING-"
REPO_NAME_FILE = "repo_name"
-REPO_NAME_LOC = "profiles" + "/" + REPO_NAME_FILE
+REPO_NAME_LOC = f"profiles/{REPO_NAME_FILE}"
PORTAGE_PACKAGE_ATOM = "sys-apps/portage"
LIBC_PACKAGE_ATOM = "virtual/libc"
@@ -124,18 +154,21 @@ EBUILD_PHASES = (
"other",
)
SUPPORTED_FEATURES = frozenset(
- [
+ (
"assume-digests",
"binpkg-docompress",
"binpkg-dostrip",
+ "binpkg-ignore-signature",
"binpkg-logs",
"binpkg-multi-instance",
+ "binpkg-request-signature",
+ "binpkg-signing",
"buildpkg",
+ "buildpkg-live",
"buildsyspkg",
"candy",
"case-insensitive-fs",
"ccache",
- "cgroup",
"chflags",
"clean-logs",
"collision-protect",
@@ -143,6 +176,7 @@ SUPPORTED_FEATURES = frozenset(
"compressdebug",
"compress-index",
"config-protect-if-modified",
+ "dedupdebug",
"digest",
"distcc",
"distlocks",
@@ -152,8 +186,8 @@ SUPPORTED_FEATURES = frozenset(
"fakeroot",
"fixlafiles",
"force-mirror",
- "force-prefix",
"getbinpkg",
+ "gpg-keepalive",
"icecream",
"installsources",
"ipc-sandbox",
@@ -161,6 +195,7 @@ SUPPORTED_FEATURES = frozenset(
"keepwork",
"lmirror",
"merge-sync",
+ "merge-wait",
"metadata-transfer",
"mirror",
"mount-sandbox",
@@ -209,28 +244,22 @@ SUPPORTED_FEATURES = frozenset(
"userpriv",
"usersandbox",
"usersync",
+ "warn-on-large-env",
"webrsync-gpg",
"xattr",
- ]
+ )
)
EAPI = 8
HASHING_BLOCKSIZE = 32768
-MANIFEST2_HASH_DEFAULTS = frozenset(["BLAKE2B", "SHA512"])
+MANIFEST2_HASH_DEFAULTS = frozenset(("BLAKE2B", "SHA512"))
MANIFEST2_HASH_DEFAULT = "BLAKE2B"
MANIFEST2_IDENTIFIERS = ("AUX", "MISC", "DIST", "EBUILD")
-# The EPREFIX for the current install is hardcoded here, but access to this
-# constant should be minimal, in favor of access via the EPREFIX setting of
-# a config instance (since it's possible to contruct a config instance with
-# a different EPREFIX). Therefore, the EPREFIX constant should *NOT* be used
-# in the definition of any other constants within this file.
-EPREFIX = ""
-
-# pick up EPREFIX from the environment if set
+# Redefine EPREFIX from the environment if set
if "PORTAGE_OVERRIDE_EPREFIX" in os.environ:
EPREFIX = os.environ["PORTAGE_OVERRIDE_EPREFIX"]
if EPREFIX:
@@ -242,7 +271,7 @@ VCS_DIRS = ("CVS", "RCS", "SCCS", ".bzr", ".git", ".hg", ".svn")
# List of known live eclasses. Keep it in sync with cnf/sets/portage.conf
LIVE_ECLASSES = frozenset(
- [
+ (
"bzr",
"cvs",
"darcs",
@@ -251,11 +280,15 @@ LIVE_ECLASSES = frozenset(
"golang-vcs",
"mercurial",
"subversion",
- ]
+ )
)
SUPPORTED_BINPKG_FORMATS = ("tar", "rpm")
+
+SUPPORTED_GENTOO_BINPKG_FORMATS = ("xpak", "gpkg")
+
SUPPORTED_XPAK_EXTENSIONS = (".tbz2", ".xpak")
+SUPPORTED_GPKG_EXTENSIONS = (".gpkg.tar",)
# Time formats used in various places like metadata.chk.
TIMESTAMP_FORMAT = "%a, %d %b %Y %H:%M:%S +0000" # to be used with time.gmtime()
diff --git a/lib/portage/cvstree.py b/lib/portage/cvstree.py
index 38652db33..96d028d2b 100644
--- a/lib/portage/cvstree.py
+++ b/lib/portage/cvstree.py
@@ -2,7 +2,6 @@
# Copyright 1998-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import io
import re
import stat
import time
@@ -48,17 +47,16 @@ def isadded(entries, path):
filename = os.path.basename(path)
try:
- myfile = io.open(
+ myfile = open(
_unicode_encode(
os.path.join(basedir, "CVS", "Entries"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["content"],
errors="strict",
)
- except IOError:
+ except OSError:
return 0
mylines = myfile.readlines()
myfile.close()
@@ -107,8 +105,7 @@ def findoption(entries, pattern, recursive=0, basedir=""):
if recursive:
for mydir, mydata in entries["dirs"].items():
- for x in findoption(mydata, pattern, recursive, basedir + mydir):
- yield x
+ yield from findoption(mydata, pattern, recursive, basedir + mydir)
def findchanged(entries, recursive=0, basedir=""):
@@ -235,9 +232,8 @@ def getentries(mydir, recursive=0):
if not os.path.exists(mydir):
return entries
try:
- myfile = io.open(
+ myfile = open(
_unicode_encode(myfn, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="strict",
)
diff --git a/lib/portage/data.py b/lib/portage/data.py
index 09a4dd079..4b9b74c7b 100644
--- a/lib/portage/data.py
+++ b/lib/portage/data.py
@@ -8,6 +8,7 @@ import platform
import pwd
import portage
+from portage.localization import _
portage.proxy.lazyimport.lazyimport(
globals(),
@@ -16,14 +17,11 @@ portage.proxy.lazyimport.lazyimport(
"portage.util.path:first_existing",
"subprocess",
)
-from portage.localization import _
ostype = platform.system()
-userland = None
+userland = "GNU"
if ostype == "DragonFly" or ostype.endswith("BSD"):
userland = "BSD"
-else:
- userland = "GNU"
lchown = getattr(os, "lchown", None)
@@ -78,17 +76,17 @@ def _target_root():
# Handle either empty or unset ROOT.
root = os.sep
root = portage.util.normalize_path(root)
- return root.rstrip(os.sep) + os.sep
+ return f"{root.rstrip(os.sep)}{os.sep}"
def portage_group_warning():
warn_prefix = colorize("BAD", "*** WARNING *** ")
- mylines = [
+ mylines = (
"For security reasons, only system administrators should be",
"allowed in the portage group. Untrusted users or processes",
"can potentially exploit the portage group for attacks such as",
"local privilege escalation.",
- ]
+ )
for x in mylines:
writemsg(warn_prefix, noiselevel=-1)
writemsg(x, noiselevel=-1)
@@ -140,7 +138,6 @@ def _get_global(k):
return globals()[k]
if k == "secpass":
-
unprivileged = False
if hasattr(portage, "settings"):
unprivileged = "unprivileged" in portage.settings.features
@@ -158,15 +155,12 @@ def _get_global(k):
unprivileged = _unprivileged_mode(eroot_or_parent, eroot_st)
v = 0
- if uid == 0:
- v = 2
- elif unprivileged:
+ if uid == 0 or unprivileged:
v = 2
elif _get_global("portage_gid") in os.getgroups():
v = 1
elif k in ("portage_gid", "portage_uid"):
-
# Discover the uid and gid of the portage user/group
keyerror = False
try:
@@ -221,7 +215,7 @@ def _get_global(k):
return portage_gid
if k == "portage_uid":
return portage_uid
- raise AssertionError("unknown name: %s" % k)
+ raise AssertionError(f"unknown name: {k}")
elif k == "userpriv_groups":
v = [_get_global("portage_gid")]
@@ -229,41 +223,43 @@ def _get_global(k):
# Get a list of group IDs for the portage user. Do not use
# grp.getgrall() since it is known to trigger spurious
# SIGPIPE problems with nss_ldap.
- cmd = ["id", "-G", _portage_username]
-
encoding = portage._encodings["content"]
- cmd = [
+ cmd = (
portage._unicode_encode(x, encoding=encoding, errors="strict")
- for x in cmd
- ]
+ for x in ("id", "-G", _portage_username)
+ )
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
myoutput = proc.communicate()[0]
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
- for x in portage._unicode_decode(
- myoutput, encoding=encoding, errors="strict"
- ).split():
+
+ def check(x):
try:
- v.append(int(x))
+ return int(x)
except ValueError:
- pass
- v = sorted(set(v))
+ return None
+
+ unicode_decode = portage._unicode_decode(
+ myoutput, encoding=encoding, errors="strict"
+ )
+ checked_v = (check(x) for x in unicode_decode.split())
+ filtered_v = (x for x in checked_v if x)
+ v = sorted(set(filtered_v))
# Avoid instantiating portage.settings when the desired
# variable is set in os.environ.
elif k in ("_portage_grpname", "_portage_username"):
- v = None
+ v = "portage"
+ env_key = "PORTAGE_USERNAME"
if k == "_portage_grpname":
env_key = "PORTAGE_GRPNAME"
- else:
- env_key = "PORTAGE_USERNAME"
if env_key in os.environ:
v = os.environ[env_key]
elif hasattr(portage, "settings"):
- v = portage.settings.get(env_key)
+ v = portage.settings.get(env_key, v)
else:
# The config class has equivalent code, but we also need to
# do it here if _disable_legacy_globals() has been called.
@@ -290,11 +286,8 @@ def _get_global(k):
pass
else:
v = pwd_struct.pw_name
-
- if v is None:
- v = "portage"
else:
- raise AssertionError("unknown name: %s" % k)
+ raise AssertionError(f"unknown name: {k}")
globals()[k] = v
_initialized_globals.add(k)
@@ -302,7 +295,6 @@ def _get_global(k):
class _GlobalProxy(portage.proxy.objectproxy.ObjectProxy):
-
__slots__ = ("_name",)
def __init__(self, name):
@@ -335,7 +327,6 @@ def _init(settings):
"_portage_grpname" not in _initialized_globals
and "_portage_username" not in _initialized_globals
):
-
# Prevents "TypeError: expected string" errors
# from grp.getgrnam() with PyPy
native_string = platform.python_implementation() == "PyPy"
@@ -354,9 +345,7 @@ def _init(settings):
if "secpass" not in _initialized_globals:
v = 0
- if uid == 0:
- v = 2
- elif "unprivileged" in settings.features:
+ if uid == 0 or "unprivileged" in settings.features:
v = 2
elif portage_gid in os.getgroups():
v = 1
diff --git a/lib/portage/dbapi/IndexedPortdb.py b/lib/portage/dbapi/IndexedPortdb.py
index 013806b57..6f73d9d60 100644
--- a/lib/portage/dbapi/IndexedPortdb.py
+++ b/lib/portage/dbapi/IndexedPortdb.py
@@ -2,7 +2,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import functools
import operator
import os
@@ -44,7 +43,6 @@ class IndexedPortdb:
)
def __init__(self, portdb):
-
self._portdb = portdb
for k in self._copy_attrs:
@@ -55,7 +53,6 @@ class IndexedPortdb:
self._unindexed_cp_map = None
def _init_index(self):
-
cp_map = {}
desc_cache = {}
self._desc_cache = desc_cache
@@ -77,8 +74,8 @@ class IndexedPortdb:
f = None
for filename in filenames:
try:
- f = io.open(filename, encoding=_encodings["repo.content"])
- except IOError as e:
+ f = open(filename, encoding=_encodings["repo.content"])
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
else:
@@ -119,7 +116,6 @@ class IndexedPortdb:
cp_group_iter = MultiIterGroupBy(streams, key=operator.attrgetter("cp"))
for cp_group in cp_group_iter:
-
new_cp = None
cp_list = cp_map.get(cp_group[0].cp)
if cp_list is None:
diff --git a/lib/portage/dbapi/_ContentsCaseSensitivityManager.py b/lib/portage/dbapi/_ContentsCaseSensitivityManager.py
index f9cefecf6..63862ee4a 100644
--- a/lib/portage/dbapi/_ContentsCaseSensitivityManager.py
+++ b/lib/portage/dbapi/_ContentsCaseSensitivityManager.py
@@ -67,10 +67,10 @@ class ContentsCaseSensitivityManager:
"""
Initialize data structures for case-insensitive support.
"""
- self._contents_insensitive = dict(
- (k.lower(), v) for k, v in self.getcontents().items()
- )
- self._reverse_key_map = dict((k.lower(), k) for k in self.getcontents())
+ self._contents_insensitive = {
+ k.lower(): v for k, v in self.getcontents().items()
+ }
+ self._reverse_key_map = {k.lower(): k for k in self.getcontents()}
def _keys_case_insensitive(self):
if self._contents_insensitive is None:
diff --git a/lib/portage/dbapi/_MergeProcess.py b/lib/portage/dbapi/_MergeProcess.py
index db3f3b105..d9ab2b47a 100644
--- a/lib/portage/dbapi/_MergeProcess.py
+++ b/lib/portage/dbapi/_MergeProcess.py
@@ -1,15 +1,19 @@
-# Copyright 2010-2020 Gentoo Authors
+# Copyright 2010-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import functools
import io
+import multiprocessing
import platform
import fcntl
import portage
from portage import os, _unicode_decode
-from portage.util._ctypes import find_library
+from portage.package.ebuild._ipc.QueryCommand import QueryCommand
+from portage.util._ctypes import load_libc
import portage.elog.messages
from portage.util._async.ForkProcess import ForkProcess
+from portage.util import no_color
class MergeProcess(ForkProcess):
@@ -38,6 +42,7 @@ class MergeProcess(ForkProcess):
"_dblink",
"_elog_keys",
"_locked_vdb",
+ "_mtime_reader",
)
def _start(self):
@@ -48,7 +53,7 @@ class MergeProcess(ForkProcess):
# since closing of file descriptors in the subprocess
# can prevent access to open database connections such
# as that used by the sqlite metadata cache module.
- cpv = "%s/%s" % (self.mycat, self.mypkg)
+ cpv = f"{self.mycat}/{self.mypkg}"
settings = self.settings
if cpv != settings.mycpv or "EAPI" not in settings.configdict["pkg"]:
settings.reload()
@@ -59,7 +64,7 @@ class MergeProcess(ForkProcess):
# process, so that it's only done once rather than
# for each child process.
if platform.system() == "Linux" and "merge-sync" in settings.features:
- find_library("c")
+ load_libc()
# Inherit stdin by default, so that the pdb SIGUSR1
# handler is usable for the subprocess.
@@ -70,7 +75,7 @@ class MergeProcess(ForkProcess):
self.fd_pipes.setdefault(0, portage._get_stdin().fileno())
self.log_filter_file = self.settings.get("PORTAGE_LOG_FILTER_FILE_CMD")
- super(MergeProcess, self)._start()
+ super()._start()
def _lock_vdb(self):
"""
@@ -92,7 +97,7 @@ class MergeProcess(ForkProcess):
self._locked_vdb = False
def _elog_output_handler(self):
- output = self._read_buf(self._elog_reader_fd)
+ output = self._read_buf(self._elog_reader_fd.fileno())
if output:
lines = _unicode_decode(output).split("\n")
if len(lines) == 1:
@@ -108,25 +113,42 @@ class MergeProcess(ForkProcess):
reporter(msg, phase=phase, key=key, out=out)
elif output is not None: # EIO/POLLHUP
- self.scheduler.remove_reader(self._elog_reader_fd)
- os.close(self._elog_reader_fd)
+ self.scheduler.remove_reader(self._elog_reader_fd.fileno())
+ self._elog_reader_fd.close()
self._elog_reader_fd = None
return False
+ def _mtime_handler(self):
+ if self._mtime_reader is not None:
+ try:
+ mtimes = self._mtime_reader.recv()
+ except EOFError:
+ self.scheduler.remove_reader(self._mtime_reader.fileno())
+ self._mtime_reader.close()
+ self._mtime_reader = None
+ else:
+ if self.prev_mtimes is not None:
+ self.prev_mtimes.clear()
+ self.prev_mtimes.update(mtimes)
+
def _spawn(self, args, fd_pipes, **kwargs):
"""
Extend the superclass _spawn method to perform some pre-fork and
post-fork actions.
"""
- elog_reader_fd, elog_writer_fd = os.pipe()
+ elog_reader_fd, elog_writer_fd = multiprocessing.Pipe(duplex=False)
fcntl.fcntl(
- elog_reader_fd,
+ elog_reader_fd.fileno(),
fcntl.F_SETFL,
- fcntl.fcntl(elog_reader_fd, fcntl.F_GETFL) | os.O_NONBLOCK,
+ fcntl.fcntl(elog_reader_fd.fileno(), fcntl.F_GETFL) | os.O_NONBLOCK,
)
+ mtime_reader, mtime_writer = multiprocessing.Pipe(duplex=False)
+ self.scheduler.add_reader(mtime_reader.fileno(), self._mtime_handler)
+ self._mtime_reader = mtime_reader
+
blockers = None
if self.blockers is not None:
# Query blockers in the main process, since closing
@@ -142,9 +164,9 @@ class MergeProcess(ForkProcess):
vartree=self.vartree,
blockers=blockers,
pipe=elog_writer_fd,
+ mtime_pipe=mtime_writer,
)
- fd_pipes[elog_writer_fd] = elog_writer_fd
- self.scheduler.add_reader(elog_reader_fd, self._elog_output_handler)
+ self.scheduler.add_reader(elog_reader_fd.fileno(), self._elog_output_handler)
# If a concurrent emerge process tries to install a package
# in the same SLOT as this one at the same time, there is an
@@ -158,8 +180,34 @@ class MergeProcess(ForkProcess):
self._dblink = mylink
self._elog_reader_fd = elog_reader_fd
- pids = super(MergeProcess, self)._spawn(args, fd_pipes, **kwargs)
- os.close(elog_writer_fd)
+
+ # Since the entire QueryCommand._db is not required, only pass
+ # in tree types that QueryCommand specifically requires.
+ child_db = {}
+ parent_db = portage.db if QueryCommand._db is None else QueryCommand._db
+ for root in parent_db:
+ child_db[root] = {}
+ for tree_type in ("vartree", "porttree"):
+ child_db[root][tree_type] = parent_db[root][tree_type]
+
+ self.target = functools.partial(
+ self._target,
+ self._counter,
+ self._dblink,
+ self.infloc,
+ self.mydbapi,
+ self.myebuild,
+ self.pkgloc,
+ self.prev_mtimes,
+ self.settings,
+ self.unmerge,
+ self.vartree.dbapi,
+ child_db,
+ )
+
+ pids = super()._spawn(args, fd_pipes, **kwargs)
+ elog_writer_fd.close()
+ mtime_writer.close()
self._buf = ""
self._elog_keys = set()
# Discard messages which will be collected by the subprocess,
@@ -174,15 +222,27 @@ class MergeProcess(ForkProcess):
return pids
- def _run(self):
- os.close(self._elog_reader_fd)
- counter = self._counter
- mylink = self._dblink
-
- portage.output.havecolor = self.settings.get("NOCOLOR") not in ("yes", "true")
-
+ @staticmethod
+ def _target(
+ counter,
+ mylink,
+ infloc,
+ mydbapi,
+ myebuild,
+ pkgloc,
+ prev_mtimes,
+ settings,
+ unmerge,
+ vardb,
+ db,
+ ):
+ if QueryCommand._db is None:
+ # Initialize QueryCommand._db for AbstractEbuildProcess/EbuildIpcDaemon
+ # when not using the multiprocessing fork start method.
+ QueryCommand._db = db
+ portage.output.havecolor = not no_color(settings)
# Avoid wastful updates of the vdb cache.
- self.vartree.dbapi._flush_cache_enabled = False
+ vardb._flush_cache_enabled = False
# In this subprocess we don't want PORTAGE_BACKGROUND to
# suppress stdout/stderr output since they are pipes. We
@@ -190,21 +250,21 @@ class MergeProcess(ForkProcess):
# already be opened by the parent process, so we set the
# "subprocess" value for use in conditional logging code
# involving PORTAGE_LOG_FILE.
- if not self.unmerge:
+ if not unmerge:
# unmerge phases have separate logs
- if self.settings.get("PORTAGE_BACKGROUND") == "1":
- self.settings["PORTAGE_BACKGROUND_UNMERGE"] = "1"
+ if settings.get("PORTAGE_BACKGROUND") == "1":
+ settings["PORTAGE_BACKGROUND_UNMERGE"] = "1"
else:
- self.settings["PORTAGE_BACKGROUND_UNMERGE"] = "0"
- self.settings.backup_changes("PORTAGE_BACKGROUND_UNMERGE")
- self.settings["PORTAGE_BACKGROUND"] = "subprocess"
- self.settings.backup_changes("PORTAGE_BACKGROUND")
+ settings["PORTAGE_BACKGROUND_UNMERGE"] = "0"
+ settings.backup_changes("PORTAGE_BACKGROUND_UNMERGE")
+ settings["PORTAGE_BACKGROUND"] = "subprocess"
+ settings.backup_changes("PORTAGE_BACKGROUND")
rval = 1
- if self.unmerge:
+ if unmerge:
if not mylink.exists():
rval = os.EX_OK
- elif mylink.unmerge(ldpath_mtimes=self.prev_mtimes) == os.EX_OK:
+ elif mylink.unmerge(ldpath_mtimes=prev_mtimes) == os.EX_OK:
mylink.lockdb()
try:
mylink.delete()
@@ -213,11 +273,11 @@ class MergeProcess(ForkProcess):
rval = os.EX_OK
else:
rval = mylink.merge(
- self.pkgloc,
- self.infloc,
- myebuild=self.myebuild,
- mydbapi=self.mydbapi,
- prev_mtimes=self.prev_mtimes,
+ pkgloc,
+ infloc,
+ myebuild=myebuild,
+ mydbapi=mydbapi,
+ prev_mtimes=prev_mtimes,
counter=counter,
)
return rval
@@ -232,7 +292,7 @@ class MergeProcess(ForkProcess):
):
self.postinst_failure = True
self.returncode = os.EX_OK
- super(MergeProcess, self)._proc_join_done(proc, future)
+ super()._proc_join_done(proc, future)
def _unregister(self):
"""
@@ -249,8 +309,8 @@ class MergeProcess(ForkProcess):
self._unlock_vdb()
if self._elog_reader_fd is not None:
- self.scheduler.remove_reader(self._elog_reader_fd)
- os.close(self._elog_reader_fd)
+ self.scheduler.remove_reader(self._elog_reader_fd.fileno())
+ self._elog_reader_fd.close()
self._elog_reader_fd = None
if self._elog_keys is not None:
for key in self._elog_keys:
@@ -259,4 +319,4 @@ class MergeProcess(ForkProcess):
)
self._elog_keys = None
- super(MergeProcess, self)._unregister()
+ super()._unregister()
diff --git a/lib/portage/dbapi/_SyncfsProcess.py b/lib/portage/dbapi/_SyncfsProcess.py
index 6aa04fa58..300ae5398 100644
--- a/lib/portage/dbapi/_SyncfsProcess.py
+++ b/lib/portage/dbapi/_SyncfsProcess.py
@@ -1,8 +1,10 @@
-# Copyright 2012 Gentoo Foundation
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import functools
+
from portage import os
-from portage.util._ctypes import find_library, LoadLibrary
+from portage.util._ctypes import load_libc
from portage.util._async.ForkProcess import ForkProcess
@@ -16,27 +18,24 @@ class SyncfsProcess(ForkProcess):
__slots__ = ("paths",)
+ def _start(self):
+ self.target = functools.partial(self._target, self._get_syncfs, self.paths)
+ super()._start()
+
@staticmethod
def _get_syncfs():
-
- filename = find_library("c")
- if filename is not None:
- library = LoadLibrary(filename)
- if library is not None:
- try:
- return library.syncfs
- except AttributeError:
- pass
-
+ (libc, _) = load_libc()
+ if libc is not None:
+ return getattr(libc, "syncfs", None)
return None
- def _run(self):
-
+ @staticmethod
+ def _target(get_syncfs, paths):
syncfs_failed = False
- syncfs = self._get_syncfs()
+ syncfs = get_syncfs()
if syncfs is not None:
- for path in self.paths:
+ for path in paths:
try:
fd = os.open(path, os.O_RDONLY)
except OSError:
diff --git a/lib/portage/dbapi/_VdbMetadataDelta.py b/lib/portage/dbapi/_VdbMetadataDelta.py
index ebf5fe7cf..f9528fc4f 100644
--- a/lib/portage/dbapi/_VdbMetadataDelta.py
+++ b/lib/portage/dbapi/_VdbMetadataDelta.py
@@ -2,7 +2,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import json
import os
@@ -12,7 +11,6 @@ from portage.versions import cpv_getkey
class VdbMetadataDelta:
-
_format_version = "1"
def __init__(self, vardb):
@@ -32,21 +30,19 @@ class VdbMetadataDelta:
)
def load(self):
-
if not os.path.exists(self._vardb._aux_cache_filename):
# If the primary cache doesn't exist yet, then
# we can't record a delta against it.
return None
try:
- with io.open(
+ with open(
self._vardb._cache_delta_filename,
- "r",
encoding=_encodings["repo.content"],
errors="strict",
) as f:
cache_obj = json.load(f)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
except (SystemExit, KeyboardInterrupt):
@@ -98,7 +94,6 @@ class VdbMetadataDelta:
return None
def recordEvent(self, event, cpv, slot, counter):
-
self._vardb.lock()
try:
deltas_obj = self.load()
@@ -113,7 +108,7 @@ class VdbMetadataDelta:
"package": cpv.cp,
"version": cpv.version,
"slot": slot,
- "counter": "%s" % counter,
+ "counter": f"{counter}",
}
deltas_obj["deltas"].append(delta_node)
diff --git a/lib/portage/dbapi/__init__.py b/lib/portage/dbapi/__init__.py
index 717ab95d5..9105227c7 100644
--- a/lib/portage/dbapi/__init__.py
+++ b/lib/portage/dbapi/__init__.py
@@ -1,10 +1,14 @@
-# Copyright 1998-2020 Gentoo Authors
+# Copyright 1998-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["dbapi"]
import functools
+import logging
import re
+import sys
+from typing import Any, Dict, List, Optional, Tuple
+from collections.abc import Sequence
import portage
@@ -22,14 +26,18 @@ from portage.const import MERGING_IDENTIFIER
from portage import os
from portage import auxdbkeys
from portage.eapi import _get_eapi_attrs
-from portage.exception import InvalidData
+from portage.exception import (
+ CorruptionKeyError,
+ InvalidBinaryPackageFormat,
+ InvalidData,
+)
from portage.localization import _
from _emerge.Package import Package
class dbapi:
_category_re = re.compile(r"^\w[-.+\w]*$", re.UNICODE)
- _categories = None
+ _categories: Optional[tuple[str, ...]] = None
_use_mutable = False
_known_keys = frozenset(auxdbkeys)
_pkg_str_aux_keys = ("EAPI", "KEYWORDS", "SLOT", "repository")
@@ -38,7 +46,7 @@ class dbapi:
pass
@property
- def categories(self):
+ def categories(self) -> tuple[str, ...]:
"""
Use self.cp_all() to generate a category list. Mutable instances
can delete the self._categories attribute in cases when the cached
@@ -46,17 +54,17 @@ class dbapi:
"""
if self._categories is not None:
return self._categories
- self._categories = tuple(sorted(set(catsplit(x)[0] for x in self.cp_all())))
+ self._categories = tuple(sorted({catsplit(x)[0] for x in self.cp_all()}))
return self._categories
def close_caches(self):
pass
- def cp_list(self, cp, use_cache=1):
+ def cp_list(self, cp: str, use_cache: int = 1) -> Any:
raise NotImplementedError(self)
@staticmethod
- def _cmp_cpv(cpv1, cpv2):
+ def _cmp_cpv(cpv1, cpv2) -> int:
result = vercmp(cpv1.version, cpv2.version)
if result == 0 and cpv1.build_time is not None and cpv2.build_time is not None:
result = (cpv1.build_time > cpv2.build_time) - (
@@ -65,7 +73,7 @@ class dbapi:
return result
@staticmethod
- def _cpv_sort_ascending(cpv_list):
+ def _cpv_sort_ascending(cpv_list: Sequence[Any]) -> None:
"""
Use this to sort self.cp_list() results in ascending
order. It sorts in place and returns None.
@@ -76,7 +84,7 @@ class dbapi:
# dict to map strings back to their original values.
cpv_list.sort(key=cmp_sort_key(dbapi._cmp_cpv))
- def cpv_all(self):
+ def cpv_all(self) -> list[str]:
"""Return all CPVs in the db
Args:
None
@@ -93,16 +101,18 @@ class dbapi:
cpv_list.extend(self.cp_list(cp))
return cpv_list
- def cp_all(self, sort=False):
+ def cp_all(self, sort: bool = False) -> list[str]:
"""Implement this in a child class
Args
sort - return sorted results
Returns:
A list of strings 1 per CP in the datastore
"""
- return NotImplementedError
+ raise NotImplementedError
- def aux_get(self, mycpv, mylist, myrepo=None):
+ def aux_get(
+ self, mycpv: str, mylist: str, myrepo: Optional[str] = None
+ ) -> list[str]:
"""Return the metadata keys in mylist for mycpv
Args:
mycpv - "sys-apps/foo-1.0"
@@ -114,7 +124,7 @@ class dbapi:
"""
raise NotImplementedError
- def aux_update(self, cpv, metadata_updates):
+ def aux_update(self, cpv: str, metadata_updates: dict[str, Any]) -> None:
"""
Args:
cpv - "sys-apps/foo-1.0"
@@ -124,7 +134,7 @@ class dbapi:
"""
raise NotImplementedError
- def match(self, origdep, use_cache=1):
+ def match(self, origdep: str, use_cache: int = 1):
"""Given a dependency, try to find packages that match
Args:
origdep - Depend atom
@@ -138,7 +148,7 @@ class dbapi:
self._iter_match(mydep, self.cp_list(mydep.cp, use_cache=use_cache))
)
- def _iter_match(self, atom, cpv_iter):
+ def _iter_match(self, atom: str, cpv_iter):
cpv_iter = iter(match_from_list(atom, cpv_iter))
if atom.repo:
cpv_iter = self._iter_match_repo(atom, cpv_iter)
@@ -150,7 +160,7 @@ class dbapi:
def _pkg_str(self, cpv, repo):
"""
- This is used to contruct _pkg_str instances on-demand during
+ This is used to construct _pkg_str instances on-demand during
matching. If cpv is a _pkg_str instance with slot attribute,
then simply return it. Otherwise, fetch metadata and construct
a _pkg_str instance. This may raise KeyError or InvalidData.
@@ -219,17 +229,9 @@ class dbapi:
yield cpv
- def _repoman_iuse_implicit_cnstr(self, pkg, metadata):
- """
- In repoman's version of _iuse_implicit_cnstr, account for modifications
- of the self.settings reference between calls.
- """
- eapi_attrs = _get_eapi_attrs(metadata["EAPI"])
- if eapi_attrs.iuse_effective:
- iuse_implicit_match = lambda flag: self.settings._iuse_effective_match(flag)
- else:
- iuse_implicit_match = lambda flag: self.settings._iuse_implicit_match(flag)
- return iuse_implicit_match
+ @staticmethod
+ def _iuse_implicit_built(iuse_implicit_match, use, flag):
+ return iuse_implicit_match(flag) or flag in use
def _iuse_implicit_cnstr(self, pkg, metadata):
"""
@@ -265,25 +267,25 @@ class dbapi:
# This behavior is only used for EAPIs that support IUSE_EFFECTIVE,
# since built USE settings for earlier EAPIs may contain a large
# number of irrelevant flags.
- prof_iuse = iuse_implicit_match
- enabled = frozenset(metadata["USE"].split()).__contains__
- iuse_implicit_match = lambda flag: prof_iuse(flag) or enabled(flag)
+ iuse_implicit_match = functools.partial(
+ self._iuse_implicit_built,
+ iuse_implicit_match,
+ frozenset(metadata["USE"].split()),
+ )
return iuse_implicit_match
def _match_use(self, atom, pkg, metadata, ignore_profile=False):
iuse_implicit_match = self._iuse_implicit_cnstr(pkg, metadata)
- usealiases = self.settings._use_manager.getUseAliases(pkg)
iuse = Package._iuse(
None,
metadata["IUSE"].split(),
iuse_implicit_match,
- usealiases,
metadata["EAPI"],
)
for x in atom.unevaluated_atom.use.required:
- if iuse.get_real_flag(x) is None:
+ if iuse.get_flag(x) is None:
return False
if atom.use is None:
@@ -297,18 +299,16 @@ class dbapi:
# with implicit IUSE, in order to avoid potential
# inconsistencies in USE dep matching (see bug #453400).
use = frozenset(
- x for x in metadata["USE"].split() if iuse.get_real_flag(x) is not None
+ x for x in metadata["USE"].split() if iuse.get_flag(x) is not None
)
missing_enabled = frozenset(
- x for x in atom.use.missing_enabled if iuse.get_real_flag(x) is None
+ x for x in atom.use.missing_enabled if iuse.get_flag(x) is None
)
missing_disabled = frozenset(
- x for x in atom.use.missing_disabled if iuse.get_real_flag(x) is None
- )
- enabled = frozenset((iuse.get_real_flag(x) or x) for x in atom.use.enabled)
- disabled = frozenset(
- (iuse.get_real_flag(x) or x) for x in atom.use.disabled
+ x for x in atom.use.missing_disabled if iuse.get_flag(x) is None
)
+ enabled = frozenset((iuse.get_flag(x) or x) for x in atom.use.enabled)
+ disabled = frozenset((iuse.get_flag(x) or x) for x in atom.use.disabled)
if enabled:
if any(x in enabled for x in missing_disabled):
@@ -333,7 +333,7 @@ class dbapi:
pkg, stable=self.settings._parent_stable
)
if any(
- x in usemask and iuse.get_real_flag(x) is not None
+ x in usemask and iuse.get_flag(x) is not None
for x in atom.use.enabled
):
return False
@@ -342,9 +342,7 @@ class dbapi:
pkg, stable=self.settings._parent_stable
)
if any(
- x in useforce
- and x not in usemask
- and iuse.get_real_flag(x) is not None
+ x in useforce and x not in usemask and iuse.get_flag(x) is not None
for x in atom.use.disabled
):
return False
@@ -352,15 +350,13 @@ class dbapi:
# Check unsatisfied use-default deps
if atom.use.enabled:
missing_disabled = frozenset(
- x
- for x in atom.use.missing_disabled
- if iuse.get_real_flag(x) is None
+ x for x in atom.use.missing_disabled if iuse.get_flag(x) is None
)
if any(x in atom.use.enabled for x in missing_disabled):
return False
if atom.use.disabled:
missing_enabled = frozenset(
- x for x in atom.use.missing_enabled if iuse.get_real_flag(x) is None
+ x for x in atom.use.missing_enabled if iuse.get_flag(x) is None
)
if any(x in atom.use.disabled for x in missing_enabled):
return False
@@ -371,11 +367,11 @@ class dbapi:
if "/" + MERGING_IDENTIFIER in mypath:
if os.path.exists(mypath):
writemsg(
- colorize("BAD", _("INCOMPLETE MERGE:")) + " %s\n" % mypath,
+ colorize("BAD", _("INCOMPLETE MERGE:")) + f" {mypath}\n",
noiselevel=-1,
)
else:
- writemsg("!!! Invalid db entry: %s\n" % mypath, noiselevel=-1)
+ writemsg(f"!!! Invalid db entry: {mypath}\n", noiselevel=-1)
def update_ents(self, updates, onProgress=None, onUpdate=None):
"""
@@ -412,7 +408,7 @@ class dbapi:
pkg = _pkg_str(cpv, metadata=metadata, settings=self.settings)
except InvalidData:
continue
- metadata = dict((k, metadata[k]) for k in update_keys)
+ metadata = {k: metadata[k] for k in update_keys}
if repo_dict is None:
updates_list = updates
else:
@@ -431,7 +427,12 @@ class dbapi:
updates_list, metadata, parent=pkg
)
if metadata_updates:
- aux_update(cpv, metadata_updates)
+ try:
+ aux_update(cpv, metadata_updates)
+ except (InvalidBinaryPackageFormat, CorruptionKeyError) as e:
+ logging.warning(
+ f"{e.__class__.__name__}: {e}", exc_info=sys.exc_info()
+ )
if onUpdate:
onUpdate(maxval, i + 1)
if onProgress:
@@ -474,7 +475,11 @@ class dbapi:
and mycpv.sub_slot
and mycpv.sub_slot not in (mycpv.slot, newslot)
):
- newslot = "%s/%s" % (newslot, mycpv.sub_slot)
+ newslot = f"{newslot}/{mycpv.sub_slot}"
mydata = {"SLOT": newslot + "\n"}
- self.aux_update(mycpv, mydata)
+ try:
+ self.aux_update(mycpv, mydata)
+ except CorruptionKeyError as e:
+ logging.warning(f"{e.__class__.__name__}: {e}", exc_info=sys.exc_info())
+ continue
return moves
diff --git a/lib/portage/dbapi/_similar_name_search.py b/lib/portage/dbapi/_similar_name_search.py
index 0af3e8070..3ee174072 100644
--- a/lib/portage/dbapi/_similar_name_search.py
+++ b/lib/portage/dbapi/_similar_name_search.py
@@ -7,7 +7,6 @@ from portage.versions import catsplit
def similar_name_search(dbs, atom):
-
cp_lower = atom.cp.lower()
cat, pkg = catsplit(cp_lower)
if cat == "null":
diff --git a/lib/portage/dbapi/bintree.py b/lib/portage/dbapi/bintree.py
index 777fc4918..7bc1f60f6 100644
--- a/lib/portage/dbapi/bintree.py
+++ b/lib/portage/dbapi/bintree.py
@@ -1,4 +1,4 @@
-# Copyright 1998-2021 Gentoo Authors
+# Copyright 1998-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["bindbapi", "binarytree"]
@@ -26,20 +26,33 @@ portage.proxy.lazyimport.lazyimport(
from portage.binrepo.config import BinRepoConfigLoader
from portage.cache.mappings import slot_dict_class
-from portage.const import BINREPOS_CONF_FILE, CACHE_PATH, SUPPORTED_XPAK_EXTENSIONS
+from portage.const import (
+ BINREPOS_CONF_FILE,
+ CACHE_PATH,
+ SUPPORTED_XPAK_EXTENSIONS,
+ SUPPORTED_GPKG_EXTENSIONS,
+ SUPPORTED_GENTOO_BINPKG_FORMATS,
+)
from portage.dbapi.virtual import fakedbapi
from portage.dep import Atom, use_reduce, paren_enclose
from portage.exception import (
AlarmSignal,
+ CorruptionKeyError,
InvalidPackageName,
+ InvalidBinaryPackageFormat,
ParseError,
PortageException,
+ PortagePackageException,
+ SignatureException,
)
from portage.localization import _
+from portage.output import colorize
from portage.package.ebuild.profile_iuse import iter_iuse_vars
+from portage.util import ensure_dirs
from portage.util.file_copy import copyfile
from portage.util.futures import asyncio
from portage.util.futures.executor.fork import ForkExecutor
+from portage.binpkg import get_binpkg_format
from portage import _movefile
from portage import os
from portage import _encodings
@@ -49,6 +62,7 @@ from portage import _unicode_encode
import codecs
import errno
import io
+import re
import stat
import subprocess
import tempfile
@@ -58,6 +72,7 @@ import traceback
import warnings
from gzip import GzipFile
from itertools import chain
+from pathlib import PurePath
from urllib.parse import urlparse
@@ -71,9 +86,12 @@ class bindbapi(fakedbapi):
_known_keys = frozenset(
list(fakedbapi._known_keys) + ["CHOST", "repository", "USE"]
)
+ # Must include keys used to create _pkg_str attributes used in
+ # the fakedbapi _instance_key_multi_instance method.
_pkg_str_aux_keys = fakedbapi._pkg_str_aux_keys + (
"BUILD_ID",
"BUILD_TIME",
+ "SIZE",
"_mtime_",
)
@@ -91,35 +109,54 @@ class bindbapi(fakedbapi):
self.bintree = mybintree
self.move_ent = mybintree.move_ent
# Selectively cache metadata in order to optimize dep matching.
- self._aux_cache_keys = set(
- [
- "BDEPEND",
- "BUILD_ID",
- "BUILD_TIME",
- "CHOST",
- "DEFINED_PHASES",
- "DEPEND",
- "EAPI",
- "IDEPEND",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "MD5",
- "PDEPEND",
- "PROPERTIES",
- "PROVIDES",
- "RDEPEND",
- "repository",
- "REQUIRES",
- "RESTRICT",
- "SIZE",
- "SLOT",
- "USE",
- "_mtime_",
- ]
- )
- self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
+ self._aux_cache_keys = {
+ "BDEPEND",
+ "BUILD_ID",
+ "BUILD_TIME",
+ "CHOST",
+ "DEFINED_PHASES",
+ "DEPEND",
+ "EAPI",
+ "IDEPEND",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "MD5",
+ "PDEPEND",
+ "PROPERTIES",
+ "PROVIDES",
+ "RDEPEND",
+ "repository",
+ "REQUIRES",
+ "RESTRICT",
+ "SIZE",
+ "SLOT",
+ "USE",
+ "_mtime_",
+ }
self._aux_cache = {}
+ self._aux_cache_slot_dict_cache = None
+
+ @property
+ def _aux_cache_slot_dict(self):
+ if self._aux_cache_slot_dict_cache is None:
+ self._aux_cache_slot_dict_cache = slot_dict_class(self._aux_cache_keys)
+ return self._aux_cache_slot_dict_cache
+
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ # These attributes are not picklable, so they are automatically
+ # regenerated after unpickling.
+ state["_aux_cache_slot_dict_cache"] = None
+ state["_instance_key"] = None
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ if self._multi_instance:
+ self._instance_key = self._instance_key_multi_instance
+ else:
+ self._instance_key = self._instance_key_cpv
@property
def writable(self):
@@ -170,28 +207,41 @@ class bindbapi(fakedbapi):
return add_pkg._db.aux_get(add_pkg, wants)
if not self.bintree._remotepkgs or not self.bintree.isremote(mycpv):
try:
- tbz2_path = self.bintree._pkg_paths[instance_key]
+ binpkg_path = self.bintree._pkg_paths[instance_key]
except KeyError:
raise KeyError(mycpv)
- tbz2_path = os.path.join(self.bintree.pkgdir, tbz2_path)
+ binpkg_path = os.path.join(self.bintree.pkgdir, binpkg_path)
try:
- st = os.lstat(tbz2_path)
- except OSError:
- raise KeyError(mycpv)
- metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
+ st = os.stat(binpkg_path)
+ except OSError as oe:
+ raise CorruptionKeyError(mycpv) from oe
+
+ binpkg_format = get_binpkg_format(binpkg_path)
+ if binpkg_format == "xpak":
+ metadata_bytes = portage.xpak.tbz2(binpkg_path).get_data()
+ decode_metadata_name = False
+ elif binpkg_format == "gpkg":
+ metadata_bytes = portage.gpkg.gpkg(
+ self.settings, mycpv, binpkg_path
+ ).get_metadata()
+ decode_metadata_name = True
def getitem(k):
if k == "_mtime_":
return str(st[stat.ST_MTIME])
if k == "SIZE":
return str(st.st_size)
- v = metadata_bytes.get(
- _unicode_encode(
- k,
- encoding=_encodings["repo.content"],
- errors="backslashreplace",
- )
- )
+ else:
+ if decode_metadata_name:
+ v = metadata_bytes.get(k)
+ else:
+ v = metadata_bytes.get(
+ _unicode_encode(
+ k,
+ encoding=_encodings["repo.content"],
+ errors="backslashreplace",
+ )
+ )
if v is not None:
v = _unicode_decode(
v, encoding=_encodings["repo.content"], errors="replace"
@@ -202,6 +252,7 @@ class bindbapi(fakedbapi):
getitem = self.cpvdict[instance_key].get
mydata = {}
mykeys = wants
+
for x in mykeys:
myval = getitem(x)
# myval is None if the key doesn't exist
@@ -217,7 +268,6 @@ class bindbapi(fakedbapi):
def aux_update(self, cpv, values):
if not self.bintree.populated:
self.bintree.populate()
- build_id = None
try:
build_id = cpv.build_id
except AttributeError:
@@ -230,16 +280,49 @@ class bindbapi(fakedbapi):
cpv = self._instance_key(cpv, support_string=True)[0]
build_id = cpv.build_id
- tbz2path = self.bintree.getname(cpv)
- if not os.path.exists(tbz2path):
- raise KeyError(cpv)
- mytbz2 = portage.xpak.tbz2(tbz2path)
- mydata = mytbz2.get_data()
+ cpv_str = str(cpv)
+ if build_id is not None:
+ cpv_str += f"-{build_id}"
- for k, v in values.items():
- k = _unicode_encode(
- k, encoding=_encodings["repo.content"], errors="backslashreplace"
+ binpkg_path = self.bintree.getname(cpv)
+ try:
+ os.stat(binpkg_path)
+ except OSError as oe:
+ raise CorruptionKeyError(cpv) from oe
+
+ binpkg_format = get_binpkg_format(binpkg_path)
+ if binpkg_format == "xpak":
+ mytbz2 = portage.xpak.tbz2(binpkg_path)
+ mydata = mytbz2.get_data()
+ encoding_key = True
+ elif binpkg_format == "gpkg":
+ mybinpkg = portage.gpkg.gpkg(self.settings, cpv_str, binpkg_path)
+ try:
+ mydata = mybinpkg.get_metadata()
+ signature_exist = mybinpkg.signature_exist
+ except SignatureException:
+ signature_exist = True
+ if signature_exist:
+ writemsg(
+ colorize(
+ "WARN",
+ f"Binpkg update ignored for signed package: {binpkg_path}, "
+ "the file will be removed.\n",
+ )
+ )
+ self.bintree.remove(cpv)
+ return
+ encoding_key = False
+ else:
+ raise InvalidBinaryPackageFormat(
+ f"Unknown binary package format {binpkg_path}"
)
+
+ for k, v in values.items():
+ if encoding_key:
+ k = _unicode_encode(
+ k, encoding=_encodings["repo.content"], errors="backslashreplace"
+ )
v = _unicode_encode(
v, encoding=_encodings["repo.content"], errors="backslashreplace"
)
@@ -248,7 +331,15 @@ class bindbapi(fakedbapi):
for k, v in list(mydata.items()):
if not v:
del mydata[k]
- mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
+ if binpkg_format == "xpak":
+ mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
+ elif binpkg_format == "gpkg":
+ mybinpkg.update_metadata(mydata)
+ else:
+ raise InvalidBinaryPackageFormat(
+ f"Unknown binary package format {binpkg_path}"
+ )
+
# inject will clear stale caches via cpv_inject.
self.bintree.inject(cpv)
@@ -271,12 +362,24 @@ class bindbapi(fakedbapi):
if add_pkg is not None:
await add_pkg._db.unpack_metadata(pkg, dest_dir, loop=loop)
else:
- tbz2_file = self.bintree.getname(cpv)
- await loop.run_in_executor(
- ForkExecutor(loop=loop),
- portage.xpak.tbz2(tbz2_file).unpackinfo,
- dest_dir,
- )
+ binpkg_file = self.bintree.getname(cpv)
+ binpkg_format = get_binpkg_format(binpkg_file)
+ if binpkg_format == "xpak":
+ await loop.run_in_executor(
+ ForkExecutor(loop=loop),
+ portage.xpak.tbz2(binpkg_file).unpackinfo,
+ dest_dir,
+ )
+ elif binpkg_format == "gpkg":
+ await loop.run_in_executor(
+ ForkExecutor(loop=loop),
+ portage.gpkg.gpkg(self.settings, cpv, binpkg_file).unpack_metadata,
+ dest_dir,
+ )
+ else:
+ raise InvalidBinaryPackageFormat(
+ f"Unknown binary package format {binpkg_file}"
+ )
async def unpack_contents(self, pkg, dest_dir, loop=None):
"""
@@ -297,23 +400,31 @@ class bindbapi(fakedbapi):
pkg_path = self.bintree.getname(cpv)
if pkg_path is not None:
+ binpkg_format = get_binpkg_format(pkg_path)
+ if binpkg_format == "xpak":
+ extractor = BinpkgExtractorAsync(
+ background=settings.get("PORTAGE_BACKGROUND") == "1",
+ env=settings.environ(),
+ features=settings.features,
+ image_dir=dest_dir,
+ pkg=cpv,
+ pkg_path=pkg_path,
+ logfile=settings.get("PORTAGE_LOG_FILE"),
+ scheduler=SchedulerInterface(loop),
+ )
- extractor = BinpkgExtractorAsync(
- background=settings.get("PORTAGE_BACKGROUND") == "1",
- env=settings.environ(),
- features=settings.features,
- image_dir=dest_dir,
- pkg=cpv,
- pkg_path=pkg_path,
- logfile=settings.get("PORTAGE_LOG_FILE"),
- scheduler=SchedulerInterface(loop),
- )
-
- extractor.start()
- await extractor.async_wait()
- if extractor.returncode != os.EX_OK:
- raise PortageException("Error Extracting '{}'".format(pkg_path))
-
+ extractor.start()
+ await extractor.async_wait()
+ if extractor.returncode != os.EX_OK:
+ raise PortageException(f"Error Extracting '{pkg_path}'")
+ elif binpkg_format == "gpkg":
+ await loop.run_in_executor(
+ ForkExecutor(loop=loop),
+ portage.gpkg.gpkg(self.settings, cpv, pkg_path).decompress,
+ dest_dir,
+ )
+ else:
+ raise portage.exception.InvalidBinaryPackageFormat(pkg_path)
else:
instance_key = self._instance_key(cpv)
add_pkg = self.bintree._additional_pkgs.get(instance_key)
@@ -357,7 +468,7 @@ class bindbapi(fakedbapi):
except KeyError:
raise portage.exception.MissingSignature("SIZE")
except ValueError:
- raise portage.exception.InvalidSignature("SIZE: %s" % metadata["SIZE"])
+ raise portage.exception.InvalidSignature(f"SIZE: {metadata['SIZE']}")
else:
filesdict[os.path.basename(self.bintree.getname(pkg))] = size
@@ -374,7 +485,6 @@ class binarytree:
virtual=DeprecationWarning,
settings=None,
):
-
if pkgdir is None:
raise TypeError("pkgdir parameter is required")
@@ -400,138 +510,135 @@ class binarytree:
stacklevel=2,
)
- if True:
- self.pkgdir = normalize_path(pkgdir)
- # NOTE: Event if binpkg-multi-instance is disabled, it's
- # still possible to access a PKGDIR which uses the
- # binpkg-multi-instance layout (or mixed layout).
- self._multi_instance = "binpkg-multi-instance" in settings.features
- if self._multi_instance:
- self._allocate_filename = self._allocate_filename_multi
- self.dbapi = bindbapi(self, settings=settings)
- self.update_ents = self.dbapi.update_ents
- self.move_slot_ent = self.dbapi.move_slot_ent
- self.populated = 0
- self.tree = {}
- self._binrepos_conf = None
- self._remote_has_index = False
- self._remotepkgs = None # remote metadata indexed by cpv
- self._additional_pkgs = {}
- self.invalids = []
- self.settings = settings
- self._pkg_paths = {}
- self._populating = False
- self._all_directory = os.path.isdir(os.path.join(self.pkgdir, "All"))
- self._pkgindex_version = 0
- self._pkgindex_hashes = ["MD5", "SHA1"]
- self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
- self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
- self._pkgindex_keys.update(["CPV", "SIZE"])
- self._pkgindex_aux_keys = [
- "BASE_URI",
- "BDEPEND",
- "BUILD_ID",
- "BUILD_TIME",
- "CHOST",
- "DEFINED_PHASES",
- "DEPEND",
- "DESCRIPTION",
- "EAPI",
- "FETCHCOMMAND",
- "IDEPEND",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "PDEPEND",
- "PKGINDEX_URI",
- "PROPERTIES",
- "PROVIDES",
- "RDEPEND",
- "repository",
- "REQUIRES",
- "RESTRICT",
- "RESUMECOMMAND",
- "SIZE",
- "SLOT",
- "USE",
- ]
- self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
- self._pkgindex_use_evaluated_keys = (
- "BDEPEND",
- "DEPEND",
- "IDEPEND",
- "LICENSE",
- "RDEPEND",
- "PDEPEND",
- "PROPERTIES",
- "RESTRICT",
- )
- self._pkgindex_header = None
- self._pkgindex_header_keys = set(
- [
- "ACCEPT_KEYWORDS",
- "ACCEPT_LICENSE",
- "ACCEPT_PROPERTIES",
- "ACCEPT_RESTRICT",
- "CBUILD",
- "CONFIG_PROTECT",
- "CONFIG_PROTECT_MASK",
- "FEATURES",
- "GENTOO_MIRRORS",
- "INSTALL_MASK",
- "IUSE_IMPLICIT",
- "USE",
- "USE_EXPAND",
- "USE_EXPAND_HIDDEN",
- "USE_EXPAND_IMPLICIT",
- "USE_EXPAND_UNPREFIXED",
- ]
- )
- self._pkgindex_default_pkg_data = {
- "BDEPEND": "",
- "BUILD_ID": "",
- "BUILD_TIME": "",
- "DEFINED_PHASES": "",
- "DEPEND": "",
- "EAPI": "0",
- "IDEPEND": "",
- "IUSE": "",
- "KEYWORDS": "",
- "LICENSE": "",
- "PATH": "",
- "PDEPEND": "",
- "PROPERTIES": "",
- "PROVIDES": "",
- "RDEPEND": "",
- "REQUIRES": "",
- "RESTRICT": "",
- "SLOT": "0",
- "USE": "",
- }
- self._pkgindex_inherited_keys = ["CHOST", "repository"]
-
- # Populate the header with appropriate defaults.
- self._pkgindex_default_header_data = {
- "CHOST": self.settings.get("CHOST", ""),
- "repository": "",
- }
-
- self._pkgindex_translated_keys = (
- ("DESCRIPTION", "DESC"),
- ("_mtime_", "MTIME"),
- ("repository", "REPO"),
- )
+ self.pkgdir = normalize_path(pkgdir)
+ # NOTE: Event if binpkg-multi-instance is disabled, it's
+ # still possible to access a PKGDIR which uses the
+ # binpkg-multi-instance layout (or mixed layout).
+ self._multi_instance = "binpkg-multi-instance" in settings.features
+ if self._multi_instance:
+ self._allocate_filename = self._allocate_filename_multi
+ self.dbapi = bindbapi(self, settings=settings)
+ self.update_ents = self.dbapi.update_ents
+ self.move_slot_ent = self.dbapi.move_slot_ent
+ self.populated = 0
+ self.tree = {}
+ self._binrepos_conf = None
+ self._remote_has_index = False
+ self._remotepkgs = None # remote metadata indexed by cpv
+ self._additional_pkgs = {}
+ self.invalids = []
+ self.invalid_paths: dict[str, list[str]] = {}
+ self.settings = settings
+ self._pkg_paths = {}
+ self._populating = False
+ self._all_directory = os.path.isdir(os.path.join(self.pkgdir, "All"))
+ self._pkgindex_version = 0
+ self._pkgindex_hashes = ["MD5", "SHA1"]
+ self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
+ self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
+ self._pkgindex_keys.update(["CPV", "SIZE"])
+ self._pkgindex_aux_keys = [
+ "BASE_URI",
+ "BDEPEND",
+ "BUILD_ID",
+ "BUILD_TIME",
+ "CHOST",
+ "DEFINED_PHASES",
+ "DEPEND",
+ "DESCRIPTION",
+ "EAPI",
+ "FETCHCOMMAND",
+ "IDEPEND",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PDEPEND",
+ "PKGINDEX_URI",
+ "PROPERTIES",
+ "PROVIDES",
+ "RDEPEND",
+ "repository",
+ "REQUIRES",
+ "RESTRICT",
+ "RESUMECOMMAND",
+ "SIZE",
+ "SLOT",
+ "USE",
+ ]
+ self._pkgindex_use_evaluated_keys = (
+ "BDEPEND",
+ "DEPEND",
+ "IDEPEND",
+ "LICENSE",
+ "RDEPEND",
+ "PDEPEND",
+ "PROPERTIES",
+ "RESTRICT",
+ )
+ self._pkgindex_header = None
+ self._pkgindex_header_keys = {
+ "ACCEPT_KEYWORDS",
+ "ACCEPT_LICENSE",
+ "ACCEPT_PROPERTIES",
+ "ACCEPT_RESTRICT",
+ "CBUILD",
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "FEATURES",
+ "GENTOO_MIRRORS",
+ "INSTALL_MASK",
+ "IUSE_IMPLICIT",
+ "USE",
+ "USE_EXPAND",
+ "USE_EXPAND_HIDDEN",
+ "USE_EXPAND_IMPLICIT",
+ "USE_EXPAND_UNPREFIXED",
+ }
+ self._pkgindex_default_pkg_data = {
+ "BDEPEND": "",
+ "BUILD_ID": "",
+ "BUILD_TIME": "",
+ "DEFINED_PHASES": "",
+ "DEPEND": "",
+ "EAPI": "0",
+ "IDEPEND": "",
+ "IUSE": "",
+ "KEYWORDS": "",
+ "LICENSE": "",
+ "PATH": "",
+ "PDEPEND": "",
+ "PROPERTIES": "",
+ "PROVIDES": "",
+ "RDEPEND": "",
+ "REQUIRES": "",
+ "RESTRICT": "",
+ "SLOT": "0",
+ "USE": "",
+ }
+ self._pkgindex_inherited_keys = ["CHOST", "repository"]
+
+ # Populate the header with appropriate defaults.
+ self._pkgindex_default_header_data = {
+ "CHOST": self.settings.get("CHOST", ""),
+ "repository": "",
+ }
+
+ self._pkgindex_translated_keys = (
+ ("DESCRIPTION", "DESC"),
+ ("_mtime_", "MTIME"),
+ ("repository", "REPO"),
+ )
- self._pkgindex_allowed_pkg_keys = set(
- chain(
- self._pkgindex_keys,
- self._pkgindex_aux_keys,
- self._pkgindex_hashes,
- self._pkgindex_default_pkg_data,
- self._pkgindex_inherited_keys,
- chain(*self._pkgindex_translated_keys),
- )
+ self._pkgindex_allowed_pkg_keys = set(
+ chain(
+ self._pkgindex_keys,
+ self._pkgindex_aux_keys,
+ self._pkgindex_hashes,
+ self._pkgindex_default_pkg_data,
+ self._pkgindex_inherited_keys,
+ chain(*self._pkgindex_translated_keys),
)
+ )
@property
def root(self):
@@ -579,7 +686,7 @@ class binarytree:
# If this update has already been applied to the same
# package build then silently continue.
applied = False
- for maybe_applied in self.dbapi.match("={}".format(mynewcpv)):
+ for maybe_applied in self.dbapi.match(f"={mynewcpv}"):
if maybe_applied.build_time == mycpv.build_time:
applied = True
break
@@ -594,24 +701,66 @@ class binarytree:
writemsg("!!! " + mycpv + " -> " + mynewcpv + "\n", noiselevel=-1)
continue
- tbz2path = self.getname(mycpv)
- if os.path.exists(tbz2path) and not os.access(tbz2path, os.W_OK):
+ binpkg_path = self.getname(mycpv)
+ try:
+ os.stat(binpkg_path)
+ except FileNotFoundError:
+ writemsg(_("!!! File not found: %s\n") % binpkg_path, noiselevel=-1)
+ continue
+ except OSError as oe:
+ writemsg(
+ _("!!! File os error (path %s): %s\n") % (binpkg_path, oe),
+ noiselevel=-1,
+ )
+ continue
+ if not os.access(binpkg_path, os.W_OK):
writemsg(
_("!!! Cannot update readonly binary: %s\n") % mycpv, noiselevel=-1
)
continue
+ binpkg_format = get_binpkg_format(binpkg_path)
+ if binpkg_format == "xpak":
+ mytbz2 = portage.xpak.tbz2(binpkg_path)
+ mydata = mytbz2.get_data()
+ decode_metadata_name = False
+ elif binpkg_format == "gpkg":
+ mybinpkg = portage.gpkg.gpkg(self.settings, mycpv, binpkg_path)
+ try:
+ mydata = mybinpkg.get_metadata()
+ signature_exist = mybinpkg.signature_exist
+ except SignatureException:
+ signature_exist = True
+ if signature_exist:
+ writemsg(
+ colorize(
+ "WARN",
+ f"Binpkg update ignored for signed package: {binpkg_path}\n",
+ )
+ )
+ continue
+ decode_metadata_name = True
+ else:
+ continue
+
moves += 1
- mytbz2 = portage.xpak.tbz2(tbz2path)
- mydata = mytbz2.get_data()
+
updated_items = update_dbentries([mylist], mydata, parent=mycpv)
mydata.update(updated_items)
- mydata[b"PF"] = _unicode_encode(
- mynewpkg + "\n", encoding=_encodings["repo.content"]
- )
- mydata[b"CATEGORY"] = _unicode_encode(
- mynewcat + "\n", encoding=_encodings["repo.content"]
- )
+ if decode_metadata_name:
+ mydata["PF"] = _unicode_encode(
+ mynewpkg + "\n", encoding=_encodings["repo.content"]
+ )
+ mydata["CATEGORY"] = _unicode_encode(
+ mynewcat + "\n", encoding=_encodings["repo.content"]
+ )
+ else:
+ mydata[b"PF"] = _unicode_encode(
+ mynewpkg + "\n", encoding=_encodings["repo.content"]
+ )
+ mydata[b"CATEGORY"] = _unicode_encode(
+ mynewcat + "\n", encoding=_encodings["repo.content"]
+ )
if mynewpkg != myoldpkg:
ebuild_data = mydata.pop(
_unicode_encode(
@@ -628,7 +777,7 @@ class binarytree:
metadata = self.dbapi._aux_cache_slot_dict()
for k in self.dbapi._aux_cache_keys:
- v = mydata.get(_unicode_encode(k))
+ v = mydata.get(k if decode_metadata_name else _unicode_encode(k))
if v is not None:
v = _unicode_decode(v)
metadata[k] = " ".join(v.split())
@@ -638,15 +787,26 @@ class binarytree:
# assuming that it will be deleted by eclean-pkg when its
# time comes.
mynewcpv = _pkg_str(mynewcpv, metadata=metadata, db=self.dbapi)
- update_path = self.getname(mynewcpv, allocate_new=True) + ".partial"
+ allocated_pkg_path = self.getname(mynewcpv, allocate_new=True)
+ update_path = allocated_pkg_path + ".partial"
self._ensure_dir(os.path.dirname(update_path))
update_path_lock = None
try:
update_path_lock = lockfile(update_path, wantnewlockfile=True)
- copyfile(tbz2path, update_path)
- mytbz2 = portage.xpak.tbz2(update_path)
- mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
- self.inject(mynewcpv, filename=update_path)
+ copyfile(binpkg_path, update_path)
+ if binpkg_format == "xpak":
+ mytbz2 = portage.xpak.tbz2(update_path)
+ mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
+ elif binpkg_format == "gpkg":
+ mybinpkg = portage.gpkg.gpkg(self.settings, mycpv, update_path)
+ mybinpkg.update_metadata(mydata, new_basename=mynewcpv)
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
+ self.inject(
+ mynewcpv,
+ current_pkg_path=update_path,
+ allocated_pkg_path=allocated_pkg_path,
+ )
finally:
if update_path_lock is not None:
try:
@@ -680,11 +840,18 @@ class binarytree:
return
pkgdir_gid = pkgdir_st.st_gid
pkgdir_grp_mode = 0o2070 & pkgdir_st.st_mode
- try:
- ensure_dirs(path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0)
- except PortageException:
- if not os.path.isdir(path):
- raise
+
+ components = []
+ for component in PurePath(path).relative_to(self.pkgdir).parts:
+ components.append(component)
+ component_path = os.path.join(self.pkgdir, *components)
+ try:
+ ensure_dirs(
+ component_path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0
+ )
+ except PortageException:
+ if not os.path.isdir(component_path):
+ raise
def _file_permissions(self, path):
try:
@@ -701,7 +868,15 @@ class binarytree:
except PortageException:
pass
- def populate(self, getbinpkgs=False, getbinpkg_refresh=True, add_repos=()):
+ def populate(
+ self,
+ getbinpkgs=False,
+ getbinpkg_refresh=False,
+ add_repos=(),
+ force_reindex=False,
+ invalid_errors=True,
+ pretend=False,
+ ):
"""
Populates the binarytree with package metadata.
@@ -714,6 +889,10 @@ class binarytree:
@type add_repos: sequence
"""
+ # TODO: Should we return here if we're --pretend? On the one hand,
+ # people might not want --pretend to affect state. On the other hand,
+ # it makes --pretend pretty useless with --getbinpkg as your index will
+ # be stale.
if self._populating:
return
@@ -731,6 +910,8 @@ class binarytree:
try:
update_pkgindex = self._populate_local(
reindex="pkgdir-index-trusted" not in self.settings.features
+ or force_reindex,
+ invalid_errors=invalid_errors,
)
if update_pkgindex and self.dbapi.writable:
@@ -760,20 +941,22 @@ class binarytree:
if not self._binrepos_conf:
writemsg(
_(
- "!!! %s is missing (or PORTAGE_BINHOST is unset), but use is requested.\n"
- )
- % (config_path,),
+ f"!!! {config_path} is missing (or PORTAGE_BINHOST is unset), "
+ "but use is requested.\n"
+ ),
noiselevel=-1,
)
else:
- self._populate_remote(getbinpkg_refresh=getbinpkg_refresh)
+ self._populate_remote(
+ getbinpkg_refresh=getbinpkg_refresh, pretend=pretend
+ )
finally:
self._populating = False
self.populated = True
- def _populate_local(self, reindex=True):
+ def _populate_local(self, reindex=True, invalid_errors=True):
"""
Populates the binarytree with local package metadata.
@@ -788,6 +971,14 @@ class binarytree:
# the Packages file will not be needlessly re-written due to
# missing digests.
minimum_keys = self._pkgindex_keys.difference(self._pkgindex_hashes)
+
+ if "binpkg-request-signature" in self.settings.features:
+ gpkg_only = True
+ else:
+ gpkg_only = False
+
+ gpkg_only_warned = False
+
if True:
pkg_paths = {}
self._pkg_paths = pkg_paths
@@ -810,7 +1001,19 @@ class binarytree:
metadata[_instance_key(cpv)] = d
path = d.get("PATH")
if not path:
- path = cpv + ".tbz2"
+ if gpkg_only:
+ if not gpkg_only_warned:
+ writemsg(
+ colorize(
+ "WARN",
+ "Local XPAK packages are ignored due to 'binpkg-request-signature'.\n",
+ ),
+ noiselevel=-1,
+ )
+ gpkg_only_warned = True
+ continue
+ else:
+ path = cpv + ".tbz2"
if reindex:
basename = os.path.basename(path)
@@ -835,8 +1038,23 @@ class binarytree:
)
except UnicodeDecodeError:
continue
- if not myfile.endswith(SUPPORTED_XPAK_EXTENSIONS):
+ if not myfile.endswith(
+ SUPPORTED_XPAK_EXTENSIONS + SUPPORTED_GPKG_EXTENSIONS
+ ):
+ continue
+
+ if myfile.endswith(SUPPORTED_XPAK_EXTENSIONS) and gpkg_only:
+ if not gpkg_only_warned:
+ writemsg(
+ colorize(
+ "WARN",
+ "Local XPAK packages are ignored due to 'binpkg-request-signature'.\n",
+ ),
+ noiselevel=-1,
+ )
+ gpkg_only_warned = True
continue
+
mypath = os.path.join(mydir, myfile)
full_path = os.path.join(self.pkgdir, mypath)
s = os.lstat(full_path)
@@ -846,9 +1064,9 @@ class binarytree:
# Validate data from the package index and try to avoid
# reading the xpak if possible.
+ match = None
possibilities = basename_index.get(myfile)
if possibilities:
- match = None
for d in possibilities:
try:
if int(d["_mtime_"]) != s[stat.ST_MTIME]:
@@ -869,43 +1087,76 @@ class binarytree:
pkg_paths[instance_key] = mypath
# update the path if the package has been moved
oldpath = d.get("PATH")
- if oldpath and oldpath != mypath:
- update_pkgindex = True
- # Omit PATH if it is the default path for
- # the current Packages format version.
- if mypath != mycpv + ".tbz2":
+ if oldpath != mypath:
d["PATH"] = mypath
- if not oldpath:
- update_pkgindex = True
- else:
- d.pop("PATH", None)
- if oldpath:
- update_pkgindex = True
+ update_pkgindex = True
self.dbapi.cpv_inject(mycpv)
continue
if not os.access(full_path, os.R_OK):
- writemsg(
- _("!!! Permission denied to read " "binary package: '%s'\n")
- % full_path,
- noiselevel=-1,
- )
+ if invalid_errors:
+ writemsg(
+ _(
+ "!!! Permission denied to read "
+ "binary package: '%s'\n"
+ )
+ % full_path,
+ noiselevel=-1,
+ )
self.invalids.append(myfile[:-5])
+ self.invalid_paths[myfile] = [full_path]
+ continue
+
+ try:
+ binpkg_format = get_binpkg_format(myfile)
+ except InvalidBinaryPackageFormat:
+ self.invalids.append(myfile[:-5])
+ self.invalid_paths[myfile[:-5]] = [full_path]
+ continue
+
+ if gpkg_only:
+ if binpkg_format != "gpkg":
+ if not gpkg_only_warned:
+ writemsg(
+ colorize(
+ "WARN",
+ "Local XPAK packages are ignored due to 'binpkg-request-signature'.\n",
+ ),
+ noiselevel=-1,
+ )
+ gpkg_only_warned = True
+ continue
+ else:
+ binpkg_format = "gpkg"
+
+ for ext in SUPPORTED_XPAK_EXTENSIONS + SUPPORTED_GPKG_EXTENSIONS:
+ if myfile.endswith(ext):
+ mypkg = myfile[: -len(ext)]
+ break
+ try:
+ pkg_metadata = self._read_metadata(
+ full_path,
+ s,
+ keys=chain(self.dbapi._aux_cache_keys, ("PF", "CATEGORY")),
+ binpkg_format=binpkg_format,
+ )
+ except (PortagePackageException, SignatureException) as e:
+ if invalid_errors:
+ writemsg(
+ f"!!! Invalid binary package: '{full_path}', {e}\n",
+ noiselevel=-1,
+ )
+ self.invalid_paths[mypkg] = [full_path]
continue
- pkg_metadata = self._read_metadata(
- full_path,
- s,
- keys=chain(self.dbapi._aux_cache_keys, ("PF", "CATEGORY")),
- )
mycat = pkg_metadata.get("CATEGORY", "")
mypf = pkg_metadata.get("PF", "")
slot = pkg_metadata.get("SLOT", "")
- mypkg = myfile[:-5]
if not mycat or not mypf or not slot:
# old-style or corrupt package
- writemsg(
- _("\n!!! Invalid binary package: '%s'\n") % full_path,
- noiselevel=-1,
- )
+ if invalid_errors:
+ writemsg(
+ _("\n!!! Invalid binary package: '%s'\n") % full_path,
+ noiselevel=-1,
+ )
missing_keys = []
if not mycat:
missing_keys.append("CATEGORY")
@@ -913,22 +1164,25 @@ class binarytree:
missing_keys.append("PF")
if not slot:
missing_keys.append("SLOT")
- msg = []
- if missing_keys:
- missing_keys.sort()
+ if invalid_errors:
+ msg = []
+ if missing_keys:
+ missing_keys.sort()
+ msg.append(
+ _("Missing metadata key(s): %s.")
+ % ", ".join(missing_keys)
+ )
+ if invalid_errors:
msg.append(
- _("Missing metadata key(s): %s.")
- % ", ".join(missing_keys)
- )
- msg.append(
- _(
- " This binary package is not "
- "recoverable and should be deleted."
+ _(
+ " This binary package is not "
+ "recoverable and should be deleted."
+ )
)
- )
- for line in textwrap.wrap("".join(msg), 72):
- writemsg("!!! %s\n" % line, noiselevel=-1)
+ for line in textwrap.wrap("".join(msg), 72):
+ writemsg(f"!!! {line}\n", noiselevel=-1)
self.invalids.append(mypkg)
+ self.invalid_paths[mypkg] = [full_path]
continue
multi_instance = False
@@ -939,13 +1193,23 @@ class binarytree:
build_id = self._parse_build_id(myfile)
if build_id < 1:
invalid_name = True
- elif myfile != "%s-%s.xpak" % (mypf, build_id):
+ elif myfile != f"{mypf}-{build_id}.xpak":
invalid_name = True
else:
mypkg = mypkg[: -len(str(build_id)) - 1]
+ elif myfile.endswith(".gpkg.tar"):
+ build_id = self._parse_build_id(myfile)
+ if build_id > 0:
+ multi_instance = True
+ if myfile != f"{mypf}-{build_id}.gpkg.tar":
+ invalid_name = True
+ else:
+ mypkg = mypkg[: -len(str(build_id)) - 1]
+ else:
+ if myfile != f"{mypf}.gpkg.tar":
+ invalid_name = True
elif myfile != mypf + ".tbz2":
invalid_name = True
-
if invalid_name:
writemsg(
_("\n!!! Binary package name is " "invalid: '%s'\n")
@@ -968,7 +1232,7 @@ class binarytree:
build_id = None
if multi_instance:
- name_split = catpkgsplit("%s/%s" % (mycat, mypf))
+ name_split = catpkgsplit(f"{mycat}/{mypf}")
if (
name_split is None
or tuple(catsplit(mydir)) != name_split[:2]
@@ -1042,11 +1306,7 @@ class binarytree:
self.dbapi.cpv_remove(mycpv)
del pkg_paths[_instance_key(mycpv)]
- # record location if it's non-default
- if mypath != mycpv + ".tbz2":
- d["PATH"] = mypath
- else:
- d.pop("PATH", None)
+ d["PATH"] = mypath
metadata[_instance_key(mycpv)] = d
if reindex:
@@ -1064,29 +1324,49 @@ class binarytree:
return pkgindex if update_pkgindex else None
- def _populate_remote(self, getbinpkg_refresh=True):
+ def _run_trust_helper(self):
+ portage_trust_helper = self.settings.get("PORTAGE_TRUST_HELPER", "")
+ if portage_trust_helper == "":
+ return
+ try:
+ ret = subprocess.run(portage_trust_helper)
+ except FileNotFoundError:
+ writemsg(
+ _(
+ "\n!!! Portage trust helper %s for binary packages not found\n!!! Continuing, but did you install app-portage/getuto?\n"
+ )
+ % portage_trust_helper,
+ noiselevel=-1,
+ )
+ return
+ ret.check_returncode()
+ def _populate_remote(self, getbinpkg_refresh=True, pretend=False):
self._remote_has_index = False
self._remotepkgs = {}
+
+ if "binpkg-request-signature" in self.settings.features:
+ # This is somewhat broken, we *should* run the trust helper always
+ # when binpackages are involved, not only when we refuse unsigned
+ # ones. (If the keys have expired we end up refusing signed but
+ # technically invalid packages...)
+ if not pretend and self.dbapi.writable:
+ self._run_trust_helper()
+ gpkg_only = True
+ else:
+ gpkg_only = False
+
# Order by descending priority.
for repo in reversed(list(self._binrepos_conf.values())):
base_url = repo.sync_uri
parsed_url = urlparse(base_url)
- host = parsed_url.netloc
+ host = parsed_url.hostname or ""
port = parsed_url.port
- user = None
- passwd = None
- user_passwd = ""
- if "@" in host:
- user, host = host.split("@", 1)
- user_passwd = user + "@"
- if ":" in user:
- user, passwd = user.split(":", 1)
-
- if port is not None:
- port_str = ":%s" % (port,)
- if host.endswith(port_str):
- host = host[: -len(port_str)]
+ user = parsed_url.username
+ passwd = parsed_url.password
+ user_passwd = user + "@" if user else ""
+ gpkg_only_warned = False
+
pkgindex_file = os.path.join(
self.settings["EROOT"],
CACHE_PATH,
@@ -1097,11 +1377,10 @@ class binarytree:
)
pkgindex = self._new_pkgindex()
try:
- f = io.open(
+ f = open(
_unicode_encode(
pkgindex_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
@@ -1109,7 +1388,7 @@ class binarytree:
pkgindex.read(f)
finally:
f.close()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
local_timestamp = pkgindex.header.get("TIMESTAMP", None)
@@ -1152,16 +1431,19 @@ class binarytree:
# Don't use urlopen for https, unless
# PEP 476 is supported (bug #469888).
- if repo.fetchcommand is None and (
- parsed_url.scheme not in ("https",) or _have_pep_476()
- ):
+ if (
+ repo.fetchcommand is None or parsed_url.scheme in ("", "file")
+ ) and (parsed_url.scheme not in ("https",) or _have_pep_476()):
try:
- f = _urlopen(
- url, if_modified_since=local_timestamp, proxies=proxies
- )
- if hasattr(f, "headers") and f.headers.get("timestamp", ""):
- remote_timestamp = f.headers.get("timestamp")
- except IOError as err:
+ if parsed_url.scheme in ("", "file"):
+ f = open(f"{parsed_url.path.rstrip('/')}/Packages", "rb")
+ else:
+ f = _urlopen(
+ url, if_modified_since=local_timestamp, proxies=proxies
+ )
+ if hasattr(f, "headers") and f.headers.get("timestamp", ""):
+ remote_timestamp = f.headers.get("timestamp")
+ except OSError as err:
if (
hasattr(err, "code") and err.code == 304
): # not modified (since local_timestamp)
@@ -1177,11 +1459,10 @@ class binarytree:
raise
except ValueError:
raise ParseError(
- "Invalid Portage BINHOST value '%s'" % url.lstrip()
+ f"Invalid Portage BINHOST value '{url.lstrip()}'"
)
if f is None:
-
path = parsed_url.path.rstrip("/") + "/Packages"
if repo.fetchcommand is None and parsed_url.scheme == "ssh":
@@ -1190,7 +1471,7 @@ class binarytree:
# matches that of the cached Packages file.
ssh_args = ["ssh"]
if port is not None:
- ssh_args.append("-p%s" % (port,))
+ ssh_args.append(f"-p{port}")
# NOTE: shlex evaluates embedded quotes
ssh_args.extend(
portage.util.shlex_split(
@@ -1211,7 +1492,7 @@ class binarytree:
if not fcmd:
fcmd = self.settings.get("FETCHCOMMAND")
if not fcmd:
- raise EnvironmentError("FETCHCOMMAND is unset")
+ raise OSError("FETCHCOMMAND is unset")
else:
fcmd = repo.fetchcommand
@@ -1234,7 +1515,7 @@ class binarytree:
fcmd=fcmd, fcmd_vars=fcmd_vars
)
if not success:
- raise EnvironmentError("%s failed" % (setting,))
+ raise OSError(f"{setting} failed")
f = open(tmp_filename, "rb")
f_dec = codecs.iterdecode(
@@ -1295,7 +1576,7 @@ class binarytree:
+ "\n"
)
rmt_idx = pkgindex
- except EnvironmentError as e:
+ except OSError as e:
# This includes URLError which is raised for SSL
# certificate errors when PEP 476 is supported.
writemsg(
@@ -1309,7 +1590,7 @@ class binarytree:
error_msg = str(e)
except UnicodeDecodeError as uerror:
error_msg = str(uerror.object, encoding="utf_8", errors="replace")
- writemsg("!!! %s\n\n" % error_msg)
+ writemsg(f"!!! {error_msg}\n\n")
del e
pkgindex = None
if proc is not None:
@@ -1330,7 +1611,7 @@ class binarytree:
f = atomic_ofstream(pkgindex_file)
pkgindex.write(f)
f.close()
- except (IOError, PortageException):
+ except (OSError, PortageException):
if os.access(os.path.dirname(pkgindex_file), os.W_OK):
raise
# The current user doesn't have permission to cache the
@@ -1346,6 +1627,32 @@ class binarytree:
if self.dbapi.cpv_exists(cpv):
continue
+ if gpkg_only:
+ try:
+ binpkg_format = get_binpkg_format(
+ d.get("PATH"), remote=True
+ )
+ except InvalidBinaryPackageFormat as e:
+ writemsg(
+ colorize(
+ "WARN",
+ f"{e}\n",
+ ),
+ noiselevel=-1,
+ )
+ continue
+ if binpkg_format != "gpkg":
+ if not gpkg_only_warned:
+ writemsg(
+ colorize(
+ "WARN",
+ f"Remote XPAK packages in '{remote_base_uri}' are ignored due to 'binpkg-request-signature'.\n",
+ ),
+ noiselevel=-1,
+ )
+ gpkg_only_warned = True
+ continue
+
d["CPV"] = cpv
d["BASE_URI"] = remote_base_uri
d["PKGINDEX_URI"] = url
@@ -1378,13 +1685,17 @@ class binarytree:
self._additional_pkgs[instance_key] = pkg
self.dbapi.cpv_inject(pkg)
- def inject(self, cpv, filename=None):
+ def inject(self, cpv, current_pkg_path=None, allocated_pkg_path=None):
"""Add a freshly built package to the database. This updates
$PKGDIR/Packages with the new package metadata (including MD5).
@param cpv: The cpv of the new package to inject
@type cpv: string
- @param filename: File path of the package to inject, or None if it's
- already in the location returned by getname()
+ @param current_pkg_path: File path of the package to inject,
+ or None if it's already in the location returned by getname()
+ @type filename: string
+ @rtype: _pkg_str or None
+ @param allocated_pkg_path: File path of the package that was newly
+ allocated or None if it's not allocated.
@type filename: string
@rtype: _pkg_str or None
@return: A _pkg_str instance on success, or None on failure.
@@ -1392,10 +1703,10 @@ class binarytree:
mycat, mypkg = catsplit(cpv)
if not self.populated:
self.populate()
- if filename is None:
+ if current_pkg_path is None:
full_path = self.getname(cpv)
else:
- full_path = filename
+ full_path = current_pkg_path
try:
s = os.stat(full_path)
except OSError as e:
@@ -1403,11 +1714,29 @@ class binarytree:
raise
del e
writemsg(
- _("!!! Binary package does not exist: '%s'\n") % full_path,
+ f"!!! Binary package does not exist: '{full_path}'\n",
+ noiselevel=-1,
+ )
+ return
+
+ try:
+ metadata = self._read_metadata(full_path, s)
+ except (PortagePackageException, SignatureException) as e:
+ writemsg(
+ f"!!! Invalid binary package: '{full_path}', {e}\n",
+ noiselevel=-1,
+ )
+ return
+
+ try:
+ binpkg_format = get_binpkg_format(full_path)
+ except InvalidBinaryPackageFormat as e:
+ writemsg(
+ f"!!! Invalid binary package: '{full_path}'\n",
noiselevel=-1,
)
return
- metadata = self._read_metadata(full_path, s)
+
invalid_depend = False
try:
self._eval_use_flags(cpv, metadata)
@@ -1442,34 +1771,19 @@ class binarytree:
try:
os.makedirs(self.pkgdir, exist_ok=True)
pkgindex_lock = lockfile(self._pkgindex_file, wantnewlockfile=1)
- if filename is not None:
- new_filename = self.getname(cpv, allocate_new=True)
+ if current_pkg_path is not None:
+ if allocated_pkg_path is not None:
+ new_path = allocated_pkg_path
+ else:
+ new_path = self.getname(cpv, allocate_new=True)
try:
- samefile = os.path.samefile(filename, new_filename)
+ samefile = os.path.samefile(current_pkg_path, new_path)
except OSError:
samefile = False
if not samefile:
- self._ensure_dir(os.path.dirname(new_filename))
- _movefile(filename, new_filename, mysettings=self.settings)
- full_path = new_filename
-
- basename = os.path.basename(full_path)
- pf = catsplit(cpv)[1]
- if build_id is None and not fetched and basename.endswith(".xpak"):
- # Apply the newly assigned BUILD_ID. This is intended
- # to occur only for locally built packages. If the
- # package was fetched, we want to preserve its
- # attributes, so that we can later distinguish that it
- # is identical to its remote counterpart.
- build_id = self._parse_build_id(basename)
- metadata["BUILD_ID"] = str(build_id)
- cpv = _pkg_str(
- cpv, metadata=metadata, settings=self.settings, db=self.dbapi
- )
- binpkg = portage.xpak.tbz2(full_path)
- binary_data = binpkg.get_data()
- binary_data[b"BUILD_ID"] = _unicode_encode(metadata["BUILD_ID"])
- binpkg.recompose_mem(portage.xpak.xpak_mem(binary_data))
+ self._ensure_dir(os.path.dirname(new_path))
+ _movefile(current_pkg_path, new_path, mysettings=self.settings)
+ full_path = new_path
self._file_permissions(full_path)
pkgindex = self._load_pkgindex()
@@ -1490,7 +1804,58 @@ class binarytree:
return cpv
- def _read_metadata(self, filename, st, keys=None):
+ def remove(self, cpv: portage.versions._pkg_str) -> None:
+ """
+ Remove a package instance and update internal state including
+ the package index. This will raise a KeyError if cpv is not
+ found in the internal state. It will display a warning message
+ if the package file was not found on disk, since it could have
+ been removed by another process before this method could
+ acquire a lock.
+
+ @param cpv: The cpv of the existing package to remove
+ @type cpv: portage.versions._pkg_str
+ @rtype: None
+ @return: None
+ @raise KeyError: If cpv does not exist in the internal state
+ """
+ if not self.populated:
+ self.populate()
+ os.makedirs(self.pkgdir, exist_ok=True)
+ pkgindex_lock = lockfile(self._pkgindex_file, wantnewlockfile=1)
+ try:
+ # Will raise KeyError if the package is not found.
+ instance_key = self.dbapi._instance_key(cpv)
+ pkg_path = self.getname(cpv)
+ self.dbapi.cpv_remove(cpv)
+ self._pkg_paths.pop(instance_key, None)
+ if self._remotepkgs is not None:
+ self._remotepkgs.pop(instance_key, None)
+ pkgindex = self._load_pkgindex()
+ if not self._pkgindex_version_supported(pkgindex):
+ pkgindex = self._new_pkgindex()
+
+ path = pkg_path[len(self.pkgdir) + 1 :]
+ for i in range(len(pkgindex.packages) - 1, -1, -1):
+ d = pkgindex.packages[i]
+ if cpv == d.get("CPV"):
+ if path == d.get("PATH", ""):
+ del pkgindex.packages[i]
+
+ self._pkgindex_write(pkgindex)
+ try:
+ os.remove(pkg_path)
+ except OSError as err:
+ writemsg(
+ colorize(
+ "WARN",
+ f"Failed to remove package: {pkg_path} {str(err)}",
+ )
+ )
+ finally:
+ unlockfile(pkgindex_lock)
+
+ def _read_metadata(self, filename, st, keys=None, binpkg_format=None):
"""
Read metadata from a binary package. The returned metadata
dictionary will contain empty strings for any values that
@@ -1511,14 +1876,35 @@ class binarytree:
metadata = self.dbapi._aux_cache_slot_dict()
else:
metadata = {}
- binary_metadata = portage.xpak.tbz2(filename).get_data()
+
+ # xpak return key as binary, gpkg return key as str
+ decode_metadata_name = True
+
+ if not binpkg_format:
+ binpkg_format = get_binpkg_format(filename)
+ if binpkg_format == "xpak":
+ binpkg_metadata = portage.xpak.tbz2(filename).get_data()
+ elif binpkg_format == "gpkg":
+ binpkg_metadata = portage.gpkg.gpkg(
+ self.settings, None, filename
+ ).get_metadata()
+ decode_metadata_name = False
+ else:
+ raise InvalidBinaryPackageFormat(
+ f"Unrecognized binary package format in '{filename}'"
+ )
+
for k in keys:
if k == "_mtime_":
metadata[k] = str(st[stat.ST_MTIME])
elif k == "SIZE":
metadata[k] = str(st.st_size)
else:
- v = binary_metadata.get(_unicode_encode(k))
+ if decode_metadata_name:
+ v = binpkg_metadata.get(_unicode_encode(k))
+ else:
+ # check gpkg
+ v = binpkg_metadata.get(k)
if v is None:
if k == "EAPI":
metadata[k] = "0"
@@ -1527,6 +1913,7 @@ class binarytree:
else:
v = _unicode_decode(v)
metadata[k] = " ".join(v.split())
+
return metadata
def _inject_file(self, pkgindex, cpv, filename):
@@ -1618,9 +2005,8 @@ class binarytree:
d["SIZE"] = str(st.st_size)
rel_path = pkg_path[len(self.pkgdir) + 1 :]
- # record location if it's non-default
- if rel_path != cpv + ".tbz2":
- d["PATH"] = rel_path
+ # Always record location
+ d["PATH"] = rel_path
return d
@@ -1757,7 +2143,7 @@ class binarytree:
deps = use_reduce(deps, uselist=use, token_class=token_class)
deps = paren_enclose(deps)
except portage.exception.InvalidDependString as e:
- writemsg("%s: %s\n" % (k, e), noiselevel=-1)
+ writemsg(f"{k}: {e}\n", noiselevel=-1)
raise
metadata[k] = deps
@@ -1773,18 +2159,23 @@ class binarytree:
if not self.populated:
self.populate()
writemsg("\n\n", 1)
- writemsg("mydep: %s\n" % mydep, 1)
+ writemsg(f"mydep: {mydep}\n", 1)
mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
- writemsg("mydep: %s\n" % mydep, 1)
+ writemsg(f"mydep: {mydep}\n", 1)
mykey = dep_getkey(mydep)
- writemsg("mykey: %s\n" % mykey, 1)
+ writemsg(f"mykey: {mykey}\n", 1)
mymatch = best(match_from_list(mydep, self.dbapi.cp_list(mykey)))
- writemsg("mymatch: %s\n" % mymatch, 1)
+ writemsg(f"mymatch: {mymatch}\n", 1)
if mymatch is None:
return ""
return mymatch
- def getname(self, cpv, allocate_new=None):
+ def getname(self, cpv, allocate_new=None, remote_binpkg_format=None):
+ return self.getname_build_id(
+ cpv, allocate_new=allocate_new, remote_binpkg_format=remote_binpkg_format
+ )[0]
+
+ def getname_build_id(self, cpv, allocate_new=None, remote_binpkg_format=None):
"""Returns a file location for this package.
If cpv has both build_time and build_id attributes, then the
path to the specific corresponding instance is returned.
@@ -1801,8 +2192,9 @@ class binarytree:
cpv = _pkg_str(cpv)
filename = None
+ build_id = None
if allocate_new:
- filename = self._allocate_filename(cpv)
+ filename, build_id = self._allocate_filename(cpv, remote_binpkg_format)
elif self._is_specific_instance(cpv):
instance_key = self.dbapi._instance_key(cpv)
path = self._pkg_paths.get(instance_key)
@@ -1819,16 +2211,33 @@ class binarytree:
if filename is not None:
filename = os.path.join(self.pkgdir, filename)
elif instance_key in self._additional_pkgs:
- return None
+ return (None, None)
if filename is None:
- if self._multi_instance:
- pf = catsplit(cpv)[1]
- filename = "%s-%s.xpak" % (os.path.join(self.pkgdir, cpv.cp, pf), "1")
+ binpkg_format = self.settings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
+
+ if not binpkg_format:
+ raise InvalidBinaryPackageFormat(
+ "Unable to determine the binpkg format."
+ )
+ elif binpkg_format == "xpak":
+ if self._multi_instance:
+ pf = catsplit(cpv)[1]
+ filename = f"{os.path.join(self.pkgdir, cpv.cp, pf)}-1.xpak"
+ else:
+ filename = os.path.join(self.pkgdir, cpv + ".tbz2")
+ elif binpkg_format == "gpkg":
+ if self._multi_instance:
+ pf = catsplit(cpv)[1]
+ filename = f"{os.path.join(self.pkgdir, cpv.cp, pf)}-1.gpkg.tar"
+ else:
+ filename = os.path.join(self.pkgdir, cpv + ".gpkg.tar")
else:
- filename = os.path.join(self.pkgdir, cpv + ".tbz2")
+ raise InvalidBinaryPackageFormat(f"{binpkg_format}")
- return filename
+ return (filename, build_id)
def _is_specific_instance(self, cpv):
specific = True
@@ -1849,10 +2258,60 @@ class binarytree:
max_build_id = x.build_id
return max_build_id
- def _allocate_filename(self, cpv):
- return os.path.join(self.pkgdir, cpv + ".tbz2")
+ def _allocate_filename(self, cpv, remote_binpkg_format=None):
+ if remote_binpkg_format is None:
+ try:
+ binpkg_format = get_binpkg_format(cpv._metadata["PATH"])
+ except (AttributeError, KeyError):
+ binpkg_format = self.settings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
+ else:
+ binpkg_format = remote_binpkg_format
+
+ # Do not create a new placeholder to avoid overwriting existing binpkgs.
+ if binpkg_format == "xpak":
+ return (os.path.join(self.pkgdir, cpv + ".tbz2"), None)
+ elif binpkg_format == "gpkg":
+ return (os.path.join(self.pkgdir, cpv + ".gpkg.tar"), None)
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
+
+ def _allocate_filename_multi(self, cpv, remote_binpkg_format=None):
+ if remote_binpkg_format is None:
+ try:
+ binpkg_format = get_binpkg_format(cpv._metadata["PATH"])
+ except (AttributeError, KeyError):
+ binpkg_format = self.settings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
+ else:
+ binpkg_format = remote_binpkg_format
+
+ if binpkg_format == "xpak":
+ binpkg_suffix = "xpak"
+ elif binpkg_format == "gpkg":
+ binpkg_suffix = "gpkg.tar"
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
- def _allocate_filename_multi(self, cpv):
+ # If the preferred path is available then return
+ # that. This prevents unnecessary build_id incrementation
+ # triggered when the _max_build_id method counts remote
+ # build ids.
+ pf = catsplit(cpv)[1]
+ if getattr(cpv, "build_id", False):
+ preferred_path = f"{os.path.join(self.pkgdir, cpv.cp, pf)}-{cpv.build_id}.{binpkg_suffix}"
+ if not os.path.exists(preferred_path):
+ try:
+ # Avoid races
+ ensure_dirs(os.path.dirname(preferred_path))
+ with open(preferred_path, "x") as f:
+ pass
+ except FileExistsError:
+ pass
+ else:
+ return (preferred_path, cpv.build_id)
# First, get the max build_id found when _populate was
# called.
@@ -1861,27 +2320,39 @@ class binarytree:
# A new package may have been added concurrently since the
# last _populate call, so use increment build_id until
# we locate an unused id.
- pf = catsplit(cpv)[1]
build_id = max_build_id + 1
while True:
- filename = "%s-%s.xpak" % (os.path.join(self.pkgdir, cpv.cp, pf), build_id)
+ filename = (
+ f"{os.path.join(self.pkgdir, cpv.cp, pf)}-{build_id}.{binpkg_suffix}"
+ )
if os.path.exists(filename):
build_id += 1
else:
- return filename
+ try:
+ # Avoid races
+ ensure_dirs(os.path.dirname(filename))
+ with open(filename, "x") as f:
+ pass
+ except FileExistsError:
+ build_id += 1
+ continue
+ return (filename, build_id)
@staticmethod
def _parse_build_id(filename):
build_id = -1
- suffixlen = len(".xpak")
- hyphen = filename.rfind("-", 0, -(suffixlen + 1))
- if hyphen != -1:
- build_id = filename[hyphen + 1 : -suffixlen]
- try:
- build_id = int(build_id)
- except ValueError:
- pass
+ if filename.endswith(SUPPORTED_XPAK_EXTENSIONS):
+ suffixlen = len(".xpak")
+ elif filename.endswith(SUPPORTED_GPKG_EXTENSIONS):
+ suffixlen = len(".gpkg.tar")
+ else:
+ raise InvalidBinaryPackageFormat(filename)
+
+ filename = filename[:-suffixlen]
+ if re.match(r".*-[\w.]*\d+[\w.]*-\d+$", filename):
+ build_id = int(filename.split("-")[-1])
+
return build_id
def isremote(self, pkgname):
@@ -1956,15 +2427,14 @@ class binarytree:
def _load_pkgindex(self):
pkgindex = self._new_pkgindex()
try:
- f = io.open(
+ f = open(
_unicode_encode(
self._pkgindex_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
- except EnvironmentError:
+ except OSError:
pass
else:
try:
@@ -1974,7 +2444,6 @@ class binarytree:
return pkgindex
def _get_digests(self, pkg):
-
try:
cpv = pkg.cpv
except AttributeError:
diff --git a/lib/portage/dbapi/meson.build b/lib/portage/dbapi/meson.build
new file mode 100644
index 000000000..6b6a94c47
--- /dev/null
+++ b/lib/portage/dbapi/meson.build
@@ -0,0 +1,22 @@
+py.install_sources(
+ [
+ 'DummyTree.py',
+ 'IndexedPortdb.py',
+ 'IndexedVardb.py',
+ 'bintree.py',
+ 'cpv_expand.py',
+ 'dep_expand.py',
+ 'porttree.py',
+ 'vartree.py',
+ 'virtual.py',
+ '_ContentsCaseSensitivityManager.py',
+ '_MergeProcess.py',
+ '_SyncfsProcess.py',
+ '_VdbMetadataDelta.py',
+ '_expand_new_virt.py',
+ '_similar_name_search.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/dbapi',
+ pure : not native_extensions
+)
diff --git a/lib/portage/dbapi/porttree.py b/lib/portage/dbapi/porttree.py
index 93f3fee2f..de6aa5c82 100644
--- a/lib/portage/dbapi/porttree.py
+++ b/lib/portage/dbapi/porttree.py
@@ -1,4 +1,4 @@
-# Copyright 1998-2021 Gentoo Authors
+# Copyright 1998-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["close_portdbapi_caches", "FetchlistDict", "portagetree", "portdbapi"]
@@ -41,7 +41,9 @@ from portage.util.futures import asyncio
from portage.util.futures.iter_completed import iter_gather
from _emerge.EbuildMetadataPhase import EbuildMetadataPhase
+import contextlib
import os as _os
+import threading
import traceback
import warnings
import errno
@@ -49,6 +51,8 @@ import functools
import collections
from collections import OrderedDict
+from collections.abc import Sequence
+from typing import Optional, Union
from urllib.parse import urlparse
@@ -104,7 +108,6 @@ class _dummy_list(list):
class _better_cache:
-
"""
The purpose of better_cache is to locate catpkgs in repositories using ``os.listdir()`` as much as possible, which
is less expensive IO-wise than exhaustively doing a stat on each repo for a particular catpkg. better_cache stores a
@@ -161,7 +164,7 @@ class _better_cache:
continue
for p in pkg_list:
try:
- atom = Atom("%s/%s" % (cat, p))
+ atom = Atom(f"{cat}/{p}")
except InvalidAtom:
continue
if atom != atom.cp:
@@ -238,6 +241,7 @@ class portdbapi(dbapi):
# this purpose because doebuild makes many changes to the config
# instance that is passed in.
self.doebuild_settings = config(clone=self.settings)
+ self._doebuild_settings_lock = asyncio.Lock()
self.depcachedir = os.path.realpath(self.settings.depcachedir)
if os.environ.get("SANDBOX_ON") == "1":
@@ -312,7 +316,7 @@ class portdbapi(dbapi):
x,
self._known_keys,
readonly=True,
- **cache_kwargs
+ **cache_kwargs,
)
except CacheError:
pass
@@ -332,49 +336,58 @@ class portdbapi(dbapi):
if cache is not None:
self._pregen_auxdb[x] = cache
# Selectively cache metadata in order to optimize dep matching.
- self._aux_cache_keys = set(
- [
- "BDEPEND",
- "DEPEND",
- "EAPI",
- "IDEPEND",
- "INHERITED",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "PDEPEND",
- "PROPERTIES",
- "RDEPEND",
- "repository",
- "RESTRICT",
- "SLOT",
- "DEFINED_PHASES",
- "REQUIRED_USE",
- ]
- )
+ self._aux_cache_keys = {
+ "BDEPEND",
+ "DEPEND",
+ "EAPI",
+ "IDEPEND",
+ "INHERITED",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PDEPEND",
+ "PROPERTIES",
+ "RDEPEND",
+ "repository",
+ "RESTRICT",
+ "SLOT",
+ "DEFINED_PHASES",
+ "REQUIRED_USE",
+ }
self._aux_cache = {}
self._better_cache = None
self._broken_ebuilds = set()
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ # These attributes are not picklable, so they are automatically
+ # regenerated after unpickling.
+ state["_doebuild_settings_lock"] = None
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self._doebuild_settings_lock = asyncio.Lock()
+
def _set_porttrees(self, porttrees):
"""
- Consumers, such as repoman and emirrordist, may modify the porttrees
- attribute in order to modify the effective set of repositories for
- all portdbapi operations.
+ Consumers, such as emirrordist, may modify the porttrees attribute in
+ order to modify the effective set of repositories for all portdbapi
+ operations.
@param porttrees: list of repo locations, in ascending order by
repo priority
@type porttrees: list
"""
+ self._porttrees = tuple(porttrees)
self._porttrees_repos = portage.OrderedDict(
(repo.name, repo)
for repo in (
self.repositories.get_repo_for_location(location)
- for location in porttrees
+ for location in self._porttrees
)
)
- self._porttrees = tuple(porttrees)
def _get_porttrees(self):
return self._porttrees
@@ -437,7 +450,9 @@ class portdbapi(dbapi):
return license_path
return None
- def findname(self, mycpv, mytree=None, myrepo=None):
+ def findname(
+ self, mycpv: str, mytree: Optional[str] = None, myrepo: Optional[str] = None
+ ) -> str:
return self.findname2(mycpv, mytree, myrepo)[0]
def getRepositoryPath(self, repository_id):
@@ -496,7 +511,12 @@ class portdbapi(dbapi):
"""
return self.settings.repositories.ignored_repos
- def findname2(self, mycpv, mytree=None, myrepo=None):
+ def findname2(
+ self,
+ mycpv: str,
+ mytree: Optional[str] = None,
+ myrepo: Optional[str] = None,
+ ) -> Union[tuple[None, int], tuple[str, str], tuple[str, None]]:
"""
Returns the location of the CPV, and what overlay it was in.
Searches overlays first, then PORTDIR; this allows us to return the first
@@ -543,7 +563,7 @@ class portdbapi(dbapi):
continue
mytrees.append(repo.location)
- # For optimal performace in this hot spot, we do manual unicode
+ # For optimal performance in this hot spot, we do manual unicode
# handling here instead of using the wrapped os module.
encoding = _encodings["fs"]
errors = "strict"
@@ -572,11 +592,10 @@ class portdbapi(dbapi):
return (None, 0)
def _write_cache(self, cpv, repo_path, metadata, ebuild_hash):
-
try:
cache = self.auxdb[repo_path]
chf = cache.validation_chf
- metadata["_%s_" % chf] = getattr(ebuild_hash, chf)
+ metadata[f"_{chf}_"] = getattr(ebuild_hash, chf)
except CacheError:
# Normally this shouldn't happen, so we'll show
# a traceback for debugging purposes.
@@ -602,7 +621,7 @@ class portdbapi(dbapi):
_("!!! aux_get(): ebuild for " "'%s' does not exist at:\n") % (cpv,),
noiselevel=-1,
)
- writemsg("!!! %s\n" % ebuild_path, noiselevel=-1)
+ writemsg(f"!!! {ebuild_path}\n", noiselevel=-1)
raise PortageKeyError(cpv)
# Pull pre-generated metadata from the metadata/cache/
@@ -646,8 +665,14 @@ class portdbapi(dbapi):
return (metadata, ebuild_hash)
- def aux_get(self, mycpv, mylist, mytree=None, myrepo=None):
- "stub code for returning auxilliary db information, such as SLOT, DEPEND, etc."
+ def aux_get(
+ self,
+ mycpv: str,
+ mylist: Sequence[str],
+ mytree: Optional[str] = None,
+ myrepo: Optional[str] = None,
+ ) -> list[str]:
+ "stub code for returning auxiliary db information, such as SLOT, DEPEND, etc."
'input: "sys-apps/foo-1.0",["SLOT","DEPEND","HOMEPAGE"]'
'return: ["0",">=sys-libs/bar-1.0","http://www.foo.com"] or raise PortageKeyError if error'
# For external API consumers, self._event_loop returns a new event
@@ -658,7 +683,7 @@ class portdbapi(dbapi):
self.async_aux_get(mycpv, mylist, mytree=mytree, myrepo=myrepo, loop=loop)
)
- def async_aux_get(self, mycpv, mylist, mytree=None, myrepo=None, loop=None):
+ async def async_aux_get(self, mycpv, mylist, mytree=None, myrepo=None, loop=None):
"""
Asynchronous form form of aux_get.
@@ -683,13 +708,11 @@ class portdbapi(dbapi):
# Callers of this method certainly want the same event loop to
# be used for all calls.
loop = asyncio._wrap_loop(loop)
- future = loop.create_future()
cache_me = False
if myrepo is not None:
mytree = self.treemap.get(myrepo)
if mytree is None:
- future.set_exception(PortageKeyError(myrepo))
- return future
+ raise PortageKeyError(myrepo)
if (
mytree is not None
@@ -708,16 +731,14 @@ class portdbapi(dbapi):
):
aux_cache = self._aux_cache.get(mycpv)
if aux_cache is not None:
- future.set_result([aux_cache.get(x, "") for x in mylist])
- return future
+ return [aux_cache.get(x, "") for x in mylist]
cache_me = True
try:
cat, pkg = mycpv.split("/", 1)
except ValueError:
# Missing slash. Can't find ebuild so raise PortageKeyError.
- future.set_exception(PortageKeyError(mycpv))
- return future
+ raise PortageKeyError(mycpv)
myebuild, mylocation = self.findname2(mycpv, mytree)
@@ -726,12 +747,12 @@ class portdbapi(dbapi):
"!!! aux_get(): %s\n" % _("ebuild not found for '%s'") % mycpv,
noiselevel=1,
)
- future.set_exception(PortageKeyError(mycpv))
- return future
+ raise PortageKeyError(mycpv)
mydata, ebuild_hash = self._pull_valid_cache(mycpv, myebuild, mylocation)
if mydata is not None:
+ future = loop.create_future()
self._aux_get_return(
future,
mycpv,
@@ -743,37 +764,71 @@ class portdbapi(dbapi):
cache_me,
None,
)
- return future
+ return future.result()
if myebuild in self._broken_ebuilds:
- future.set_exception(PortageKeyError(mycpv))
- return future
-
- proc = EbuildMetadataPhase(
- cpv=mycpv,
- ebuild_hash=ebuild_hash,
- portdb=self,
- repo_path=mylocation,
- scheduler=loop,
- settings=self.doebuild_settings,
- )
+ raise PortageKeyError(mycpv)
- proc.addExitListener(
- functools.partial(
- self._aux_get_return,
- future,
- mycpv,
- mylist,
- myebuild,
- ebuild_hash,
- mydata,
- mylocation,
- cache_me,
- )
- )
- future.add_done_callback(functools.partial(self._aux_get_cancel, proc))
- proc.start()
- return future
+ proc = None
+ deallocate_config = None
+ async with contextlib.AsyncExitStack() as stack:
+ try:
+ if (
+ threading.current_thread() is threading.main_thread()
+ and loop._loop is asyncio._safe_loop()._loop
+ ):
+ # In this case use self._doebuild_settings_lock to manage concurrency.
+ deallocate_config = loop.create_future()
+ await stack.enter_async_context(self._doebuild_settings_lock)
+ settings = self.doebuild_settings
+ else:
+ if portage._internal_caller:
+ raise AssertionError(
+ f"async_aux_get called from thread {threading.current_thread()} with loop {loop}"
+ )
+ # Clone a config instance since we do not have a thread-safe config pool.
+ settings = portage.config(clone=self.settings)
+
+ proc = EbuildMetadataPhase(
+ cpv=mycpv,
+ ebuild_hash=ebuild_hash,
+ portdb=self,
+ repo_path=mylocation,
+ scheduler=loop,
+ settings=settings,
+ deallocate_config=deallocate_config,
+ )
+
+ future = loop.create_future()
+ proc.addExitListener(
+ functools.partial(
+ self._aux_get_return,
+ future,
+ mycpv,
+ mylist,
+ myebuild,
+ ebuild_hash,
+ mydata,
+ mylocation,
+ cache_me,
+ )
+ )
+ future.add_done_callback(functools.partial(self._aux_get_cancel, proc))
+ proc.start()
+
+ finally:
+ # Wait for deallocate_config before releasing
+ # self._doebuild_settings_lock if needed.
+ if deallocate_config is not None:
+ if proc is None or not proc.isAlive():
+ deallocate_config.done() or deallocate_config.cancel()
+ else:
+ await deallocate_config
+
+ # After deallocate_config is done, release self._doebuild_settings_lock
+ # by leaving the stack context, and wait for proc to finish and
+ # trigger a call to self._aux_get_return.
+ return await future
@staticmethod
def _aux_get_cancel(proc, future):
@@ -878,7 +933,7 @@ class portdbapi(dbapi):
)
)
else:
- result.set_exception(future.exception())
+ result.set_exception(aux_get_future.exception())
return
eapi, myuris = aux_get_future.result()
@@ -888,7 +943,7 @@ class portdbapi(dbapi):
# since callers already handle it.
result.set_exception(
portage.exception.InvalidDependString(
- "getFetchMap(): '%s' has unsupported EAPI: '%s'" % (mypkg, eapi)
+ f"getFetchMap(): '{mypkg}' has unsupported EAPI: '{eapi}'"
)
)
return
@@ -902,8 +957,9 @@ class portdbapi(dbapi):
except Exception as e:
result.set_exception(e)
- aux_get_future = self.async_aux_get(
- mypkg, ["EAPI", "SRC_URI"], mytree=mytree, loop=loop
+ aux_get_future = asyncio.ensure_future(
+ self.async_aux_get(mypkg, ["EAPI", "SRC_URI"], mytree=mytree, loop=loop),
+ loop,
)
result.add_done_callback(
lambda result: aux_get_future.cancel() if result.cancelled() else None
@@ -912,7 +968,7 @@ class portdbapi(dbapi):
return result
def getfetchsizes(self, mypkg, useflags=None, debug=0, myrepo=None):
- # returns a filename:size dictionnary of remaining downloads
+ # returns a filename:size dictionary of remaining downloads
myebuild, mytree = self.findname2(mypkg, myrepo=myrepo)
if myebuild is None:
raise AssertionError(_("ebuild not found for '%s'") % mypkg)
@@ -1065,7 +1121,7 @@ class portdbapi(dbapi):
oroot + "/" + x, EmptyOnError=1, ignorecvs=1, dirsonly=1
):
try:
- atom = Atom("%s/%s" % (x, y))
+ atom = Atom(f"{x}/{y}")
except InvalidAtom:
continue
if atom != atom.cp:
@@ -1203,12 +1259,12 @@ class portdbapi(dbapi):
def xmatch(
self,
- level,
- origdep,
- mydep=DeprecationWarning,
- mykey=DeprecationWarning,
- mylist=DeprecationWarning,
- ):
+ level: str,
+ origdep: str,
+ mydep: type[DeprecationWarning] = DeprecationWarning,
+ mykey: type[DeprecationWarning] = DeprecationWarning,
+ mylist: type[DeprecationWarning] = DeprecationWarning,
+ ) -> Union[Sequence[str], str]:
"""
Caching match function.
@@ -1373,7 +1429,7 @@ class portdbapi(dbapi):
myval = ""
else:
- raise AssertionError("Invalid level argument: '%s'" % level)
+ raise AssertionError(f"Invalid level argument: '{level}'")
if self.frozen:
xcache_this_level = self.xcache.get(level)
@@ -1384,7 +1440,7 @@ class portdbapi(dbapi):
return myval
- def match(self, mydep, use_cache=1):
+ def match(self, mydep: str, use_cache: int = 1) -> Union[Sequence[str], str]:
return self.xmatch("match-visible", mydep)
def gvisible(self, mylist):
@@ -1436,10 +1492,10 @@ class portdbapi(dbapi):
continue
except PortageException as e:
writemsg(
- "!!! Error: aux_get('%s', %s)\n" % (mycpv, aux_keys),
+ f"!!! Error: aux_get('{mycpv}', {aux_keys})\n",
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
del e
continue
@@ -1705,10 +1761,7 @@ def _async_manifest_fetchlist(
return
if e is None:
result.set_result(
- dict(
- (k, list(v.result()))
- for k, v in zip(cpv_list, gather_result.result())
- )
+ {k: list(v.result()) for k, v in zip(cpv_list, gather_result.result())}
)
else:
result.set_exception(e)
@@ -1734,7 +1787,6 @@ def _async_manifest_fetchlist(
def _parse_uri_map(cpv, metadata, use=None):
-
myuris = use_reduce(
metadata.get("SRC_URI", ""),
uselist=use,
diff --git a/lib/portage/dbapi/vartree.py b/lib/portage/dbapi/vartree.py
index 8ffb23b1c..c6b45ba42 100644
--- a/lib/portage/dbapi/vartree.py
+++ b/lib/portage/dbapi/vartree.py
@@ -1,4 +1,4 @@
-# Copyright 1998-2021 Gentoo Authors
+# Copyright 1998-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["vardbapi", "vartree", "dblink"] + ["write_contents", "tar_contents"]
@@ -32,7 +32,7 @@ portage.proxy.lazyimport.lazyimport(
"portage.util.env_update:env_update",
"portage.util.install_mask:install_mask_dir,InstallMask,_raise_exc",
"portage.util.listdir:dircache,listdir",
- "portage.util.movefile:movefile",
+ "portage.util.movefile:movefile,_cmpxattr",
"portage.util.path:first_existing,iter_parents",
"portage.util.writeable_check:get_ro_checker",
"portage.util._xattr:xattr",
@@ -43,6 +43,7 @@ portage.proxy.lazyimport.lazyimport(
"portage.util._eventloop.global_event_loop:global_event_loop",
"portage.versions:best,catpkgsplit,catsplit,cpv_getkey,vercmp,"
+ "_get_slot_re,_pkgsplit@pkgsplit,_pkg_str,_unknown_repo",
+ "portage.gpkg",
"subprocess",
"tarfile",
)
@@ -54,13 +55,16 @@ from portage.const import (
PORTAGE_PACKAGE_ATOM,
PRIVATE_PATH,
VDB_PATH,
+ SUPPORTED_GENTOO_BINPKG_FORMATS,
)
from portage.dbapi import dbapi
from portage.exception import (
CommandNotFound,
+ CorruptionKeyError,
InvalidData,
InvalidLocation,
InvalidPackageName,
+ InvalidBinaryPackageFormat,
FileNotFound,
PermissionDenied,
UnsupportedAPIException,
@@ -90,6 +94,7 @@ from ._ContentsCaseSensitivityManager import ContentsCaseSensitivityManager
import argparse
import errno
+import filecmp
import fnmatch
import functools
import gc
@@ -97,6 +102,7 @@ import grp
import io
from itertools import chain
import logging
+import multiprocessing
import os as _os
import operator
import pickle
@@ -111,7 +117,6 @@ import warnings
class vardbapi(dbapi):
-
_excluded_dirs = ["CVS", "lost+found"]
_excluded_dirs = [re.escape(x) for x in _excluded_dirs]
_excluded_dirs = re.compile(
@@ -188,33 +193,31 @@ class vardbapi(dbapi):
if vartree is None:
vartree = portage.db[settings["EROOT"]]["vartree"]
self.vartree = vartree
- self._aux_cache_keys = set(
- [
- "BDEPEND",
- "BUILD_TIME",
- "CHOST",
- "COUNTER",
- "DEPEND",
- "DESCRIPTION",
- "EAPI",
- "HOMEPAGE",
- "BUILD_ID",
- "IDEPEND",
- "IUSE",
- "KEYWORDS",
- "LICENSE",
- "PDEPEND",
- "PROPERTIES",
- "RDEPEND",
- "repository",
- "RESTRICT",
- "SLOT",
- "USE",
- "DEFINED_PHASES",
- "PROVIDES",
- "REQUIRES",
- ]
- )
+ self._aux_cache_keys = {
+ "BDEPEND",
+ "BUILD_TIME",
+ "CHOST",
+ "COUNTER",
+ "DEPEND",
+ "DESCRIPTION",
+ "EAPI",
+ "HOMEPAGE",
+ "BUILD_ID",
+ "IDEPEND",
+ "IUSE",
+ "KEYWORDS",
+ "LICENSE",
+ "PDEPEND",
+ "PROPERTIES",
+ "RDEPEND",
+ "repository",
+ "RESTRICT",
+ "SLOT",
+ "USE",
+ "DEFINED_PHASES",
+ "PROVIDES",
+ "REQUIRES",
+ }
self._aux_cache_obj = None
self._aux_cache_filename = os.path.join(
self._eroot, CACHE_PATH, "vdb_metadata.pickle"
@@ -338,7 +341,7 @@ class vardbapi(dbapi):
"""
lock, counter = self._slot_locks.get(slot_atom, (None, 0))
if lock is None:
- lock_path = self.getpath("%s:%s" % (slot_atom.cp, slot_atom.slot))
+ lock_path = self.getpath(f"{slot_atom.cp}:{slot_atom.slot}")
ensure_dirs(os.path.dirname(lock_path))
lock = lockfile(lock_path, wantnewlockfile=True)
self._slot_locks[slot_atom] = (lock, counter + 1)
@@ -394,7 +397,7 @@ class vardbapi(dbapi):
def cpv_inject(self, mycpv):
"injects a real package into our on-disk database; assumes mycpv is valid and doesn't already exist"
ensure_dirs(self.getpath(mycpv))
- counter = self.counter_tick(mycpv=mycpv)
+ counter = self.counter_tick()
# write local package counter so that emerge clean does the right thing
write_atomic(self.getpath(mycpv, filename="COUNTER"), str(counter))
@@ -457,7 +460,7 @@ class vardbapi(dbapi):
os.path.join(newpath, old_pf + ".ebuild"),
os.path.join(newpath, new_pf + ".ebuild"),
)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -481,7 +484,7 @@ class vardbapi(dbapi):
cat_dir = self.getpath(mysplit[0])
try:
dir_list = os.listdir(cat_dir)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(cat_dir)
del e
@@ -497,7 +500,7 @@ class vardbapi(dbapi):
continue
if len(mysplit) > 1:
if ps[0] == mysplit[1]:
- cpv = "%s/%s" % (mysplit[0], x)
+ cpv = f"{mysplit[0]}/{x}"
metadata = dict(
zip(
self._aux_cache_keys,
@@ -538,7 +541,7 @@ class vardbapi(dbapi):
return [
x for x in os.listdir(p) if os.path.isdir(os.path.join(p, x))
]
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(p)
del e
@@ -631,7 +634,7 @@ class vardbapi(dbapi):
)
try:
curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime_ns
- except (IOError, OSError):
+ except OSError:
curmtime = 0
if mycat not in self.matchcache or self.mtdircache[mycat] != curmtime:
@@ -665,7 +668,6 @@ class vardbapi(dbapi):
or not os.path.exists(self._cache_delta_filename)
)
):
-
ensure_dirs(os.path.dirname(self._aux_cache_filename))
self._owners.populate() # index any unindexed contents
@@ -702,7 +704,7 @@ class vardbapi(dbapi):
self._aux_cache_filename, encoding=_encodings["fs"], errors="strict"
),
mode="rb",
- **open_kwargs
+ **open_kwargs,
) as f:
mypickle = pickle.Unpickler(f)
try:
@@ -794,7 +796,6 @@ class vardbapi(dbapi):
pull_me = cache_these.union(wants)
mydata = {"_mtime_": mydir_mtime}
cache_valid = False
- cache_incomplete = False
cache_mtime = None
metadata = None
if pkg_data is not None:
@@ -873,18 +874,17 @@ class vardbapi(dbapi):
results[x] = st[stat.ST_MTIME]
continue
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(mydir, x),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
myd = f.read()
- except IOError:
+ except OSError:
if (
x not in self._aux_cache_keys
and self._aux_cache_keys_re.match(x) is None
@@ -938,7 +938,7 @@ class vardbapi(dbapi):
args = bunzip2_cmd + ["-c", env_file]
try:
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
raise portage.exception.CommandNotFound(args[0])
@@ -991,8 +991,10 @@ class vardbapi(dbapi):
def aux_update(self, cpv, values):
mylink = self._dblink(cpv)
- if not mylink.exists():
- raise KeyError(cpv)
+ try:
+ os.stat(mylink.dbdir)
+ except OSError as oe:
+ raise CorruptionKeyError(cpv) from oe
self._bump_mtime(cpv)
self._clear_pkg_cache(mylink)
for k, v in values.items():
@@ -1001,10 +1003,17 @@ class vardbapi(dbapi):
else:
try:
os.unlink(os.path.join(self.getpath(cpv), k))
- except EnvironmentError:
+ except OSError:
pass
self._bump_mtime(cpv)
+ @staticmethod
+ def _async_copy(dbdir, dest_dir):
+ for parent, dirs, files in os.walk(dbdir, onerror=_raise_exc):
+ for key in files:
+ shutil.copy(os.path.join(parent, key), os.path.join(dest_dir, key))
+ break
+
async def unpack_metadata(self, pkg, dest_dir, loop=None):
"""
Unpack package metadata to a directory. This method is a coroutine.
@@ -1020,14 +1029,9 @@ class vardbapi(dbapi):
else:
cpv = pkg.mycpv
dbdir = self.getpath(cpv)
-
- def async_copy():
- for parent, dirs, files in os.walk(dbdir, onerror=_raise_exc):
- for key in files:
- shutil.copy(os.path.join(parent, key), os.path.join(dest_dir, key))
- break
-
- await loop.run_in_executor(ForkExecutor(loop=loop), async_copy)
+ await loop.run_in_executor(
+ ForkExecutor(loop=loop), self._async_copy, dbdir, dest_dir
+ )
async def unpack_contents(
self,
@@ -1080,42 +1084,58 @@ class vardbapi(dbapi):
# Method parameters may override QUICKPKG_DEFAULT_OPTS.
opts_list = portage.util.shlex_split(settings.get("QUICKPKG_DEFAULT_OPTS", ""))
if include_config is not None:
- opts_list.append(
- "--include-config={}".format("y" if include_config else "n")
- )
+ opts_list.append(f"--include-config={'y' if include_config else 'n'}")
if include_unmodified_config is not None:
opts_list.append(
"--include-unmodified-config={}".format(
"y" if include_unmodified_config else "n"
)
)
-
opts, args = parser.parse_known_args(opts_list)
- tar_cmd = ("tar", "-x", "--xattrs", "--xattrs-include=*", "-C", dest_dir)
- pr, pw = os.pipe()
- proc = await asyncio.create_subprocess_exec(*tar_cmd, stdin=pr)
- os.close(pr)
- with os.fdopen(pw, "wb", 0) as pw_file:
+ binpkg_format = settings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
+ if binpkg_format == "xpak":
+ tar_cmd = ("tar", "-x", "--xattrs", "--xattrs-include=*", "-C", dest_dir)
+ pr, pw = multiprocessing.Pipe(duplex=False)
+ proc = await asyncio.create_subprocess_exec(*tar_cmd, stdin=pr)
+ pr.close()
+ excluded_config_files = await loop.run_in_executor(
+ ForkExecutor(loop=loop),
+ functools.partial(
+ self._dblink(cpv).quickpkg,
+ pw,
+ include_config=opts.include_config == "y",
+ include_unmodified_config=opts.include_unmodified_config == "y",
+ ),
+ )
+ await proc.wait()
+ if proc.returncode != os.EX_OK:
+ raise PortageException(f"command failed: {tar_cmd}")
+ elif binpkg_format == "gpkg":
+ gpkg_tmp_fd, gpkg_tmp = tempfile.mkstemp(suffix=".gpkg.tar")
+ os.close(gpkg_tmp_fd)
excluded_config_files = await loop.run_in_executor(
ForkExecutor(loop=loop),
functools.partial(
self._dblink(cpv).quickpkg,
- pw_file,
+ gpkg_tmp,
include_config=opts.include_config == "y",
include_unmodified_config=opts.include_unmodified_config == "y",
),
)
- await proc.wait()
- if proc.returncode != os.EX_OK:
- raise PortageException("command failed: {}".format(tar_cmd))
+ portage.gpkg.gpkg(settings, cpv, gpkg_tmp).decompress(dest_dir)
+ os.remove(gpkg_tmp)
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
if excluded_config_files:
log_lines = [
_(
"Config files excluded by QUICKPKG_DEFAULT_OPTS (see quickpkg(1) man page):"
)
- ] + ["\t{}".format(name) for name in excluded_config_files]
+ ] + [f"\t{name}" for name in excluded_config_files]
out = io.StringIO()
for line in log_lines:
portage.elog.messages.ewarn(line, phase="install", key=cpv, out=out)
@@ -1125,13 +1145,10 @@ class vardbapi(dbapi):
log_path=settings.get("PORTAGE_LOG_FILE"),
)
- def counter_tick(self, myroot=None, mycpv=None):
- """
- @param myroot: ignored, self._eroot is used instead
- """
- return self.counter_tick_core(incrementing=1, mycpv=mycpv)
+ def counter_tick(self) -> int:
+ return self.counter_tick_core(incrementing=1)
- def get_counter_tick_core(self, myroot=None, mycpv=None):
+ def get_counter_tick_core(self) -> int:
"""
Use this method to retrieve the counter instead
of having to trust the value of a global counter
@@ -1149,17 +1166,13 @@ class vardbapi(dbapi):
it also corresponds to the total number of
installation actions that have occurred in
the history of this package database.
-
- @param myroot: ignored, self._eroot is used instead
"""
- del myroot
counter = -1
try:
- with io.open(
+ with open(
_unicode_encode(
self._counter_path, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -1170,8 +1183,8 @@ class vardbapi(dbapi):
_("!!! COUNTER file is corrupt: '%s'\n") % self._counter_path,
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
- except EnvironmentError as e:
+ writemsg(f"!!! {e}\n", noiselevel=-1)
+ except OSError as e:
# Silently allow ENOENT since files under
# /var/cache/ are allowed to disappear.
if e.errno != errno.ENOENT:
@@ -1179,7 +1192,7 @@ class vardbapi(dbapi):
_("!!! Unable to read COUNTER file: '%s'\n") % self._counter_path,
noiselevel=-1,
)
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
del e
if self._cached_counter == counter:
@@ -1204,7 +1217,7 @@ class vardbapi(dbapi):
return max_counter + 1
- def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
+ def counter_tick_core(self, incrementing: int = 1) -> int:
"""
This method will grab the next COUNTER value and record it back
to the global file. Note that every package install must have
@@ -1212,13 +1225,8 @@ class vardbapi(dbapi):
into the same SLOT and in that case it's important that both
packages have different COUNTER metadata.
- @param myroot: ignored, self._eroot is used instead
- @param mycpv: ignored
- @rtype: int
@return: new counter value
"""
- myroot = None
- mycpv = None
self.lock()
try:
counter = self.get_counter_tick_core() - 1
@@ -1291,16 +1299,15 @@ class vardbapi(dbapi):
needed_filename = os.path.join(pkg.dbdir, LinkageMap._needed_aux_key)
new_needed = None
try:
- with io.open(
+ with open(
_unicode_encode(
needed_filename, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
needed_lines = f.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
else:
@@ -1312,9 +1319,7 @@ class vardbapi(dbapi):
try:
entry = NeededEntry.parse(needed_filename, l)
except InvalidData as e:
- writemsg_level(
- "\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1
- )
+ writemsg_level(f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1)
continue
filename = os.path.join(root, entry.filename.lstrip(os.sep))
@@ -1516,7 +1521,6 @@ class vardbapi(dbapi):
return x
while path_iter:
-
path = path_iter.pop()
if case_insensitive:
path = path.lower()
@@ -1572,8 +1576,7 @@ class vardbapi(dbapi):
del owners[:]
dblink_cache.clear()
gc.collect()
- for x in self._iter_owners_low_mem(path_iter):
- yield x
+ yield from self._iter_owners_low_mem(path_iter)
return
else:
for cpv, p in owners:
@@ -1581,7 +1584,7 @@ class vardbapi(dbapi):
def _iter_owners_low_mem(self, path_list):
"""
- This implemention will make a short-lived dblink instance (and
+ This implementation will make a short-lived dblink instance (and
parse CONTENTS) for every single installed package. This is
slower and but uses less memory than the method which uses the
basename cache.
@@ -1627,8 +1630,7 @@ class vardbapi(dbapi):
search_future = event_loop.create_future()
event_loop.call_soon(search_pkg, cpv, search_future)
event_loop.run_until_complete(search_future)
- for result in search_future.result():
- yield result
+ yield from search_future.result()
class vartree:
@@ -1637,7 +1639,6 @@ class vartree:
def __init__(
self, root=None, virtual=DeprecationWarning, categories=None, settings=None
):
-
if settings is None:
settings = portage.settings
@@ -1784,6 +1785,7 @@ class dblink:
blockers=None,
scheduler=None,
pipe=None,
+ mtime_pipe=None,
):
"""
Creates a DBlink object for a given CPV.
@@ -1840,6 +1842,7 @@ class dblink:
self._device_path_map = {}
self._hardlink_merge_map = {}
self._hash_key = (self._eroot, self.mycpv)
+ self._mtime_pipe = mtime_pipe
self._protect_obj = None
self._pipe = pipe
self._postinst_failure = False
@@ -1857,7 +1860,6 @@ class dblink:
return isinstance(other, dblink) and self._hash_key == other._hash_key
def _get_protect_obj(self):
-
if self._protect_obj is None:
self._protect_obj = ConfigProtect(
self._eroot,
@@ -1912,7 +1914,7 @@ class dblink:
(slot,) = db.aux_get(self.mycpv, ["SLOT"])
slot = slot.partition("/")[0]
- slot_atoms.append(portage.dep.Atom("%s:%s" % (self.mycpv.cp, slot)))
+ slot_atoms.append(portage.dep.Atom(f"{self.mycpv.cp}:{slot}"))
for blocker in self._blockers or []:
slot_atoms.append(blocker.slot_atom)
@@ -2007,16 +2009,15 @@ class dblink:
contents_file = os.path.join(self.dbdir, "CONTENTS")
pkgfiles = {}
try:
- with io.open(
+ with open(
_unicode_encode(
contents_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
mylines = f.readlines()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -2107,7 +2108,11 @@ class dblink:
return pkgfiles
def quickpkg(
- self, output_file, include_config=False, include_unmodified_config=False
+ self,
+ output_file,
+ metadata=None,
+ include_config=False,
+ include_unmodified_config=False,
):
"""
Create a tar file appropriate for use by quickpkg.
@@ -2130,6 +2135,9 @@ class dblink:
contents = self.getcontents()
excluded_config_files = []
protect = None
+ binpkg_format = settings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
if not include_config:
confprot = ConfigProtect(
@@ -2152,16 +2160,26 @@ class dblink:
excluded_config_files.append(filename)
return True
- # The tarfile module will write pax headers holding the
- # xattrs only if PAX_FORMAT is specified here.
- with tarfile.open(
- fileobj=output_file,
- mode="w|",
- format=tarfile.PAX_FORMAT if xattrs else tarfile.DEFAULT_FORMAT,
- ) as tar:
- tar_contents(
- contents, settings["ROOT"], tar, protect=protect, xattrs=xattrs
- )
+ if binpkg_format == "xpak":
+ # The tarfile module will write pax headers holding the
+ # xattrs only if PAX_FORMAT is specified here.
+ with tarfile.open(
+ fileobj=(
+ output_file
+ if hasattr(output_file, "write")
+ else open(output_file.fileno(), mode="wb", closefd=False)
+ ),
+ mode="w|",
+ format=tarfile.PAX_FORMAT if xattrs else tarfile.DEFAULT_FORMAT,
+ ) as tar:
+ tar_contents(
+ contents, settings["ROOT"], tar, protect=protect, xattrs=xattrs
+ )
+ elif binpkg_format == "gpkg":
+ gpkg_file = portage.gpkg.gpkg(settings, cpv, output_file)
+ gpkg_file._quickpkg(contents, metadata, settings["ROOT"], protect=protect)
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
return excluded_config_files
@@ -2321,7 +2339,7 @@ class dblink:
if others_in_slot is None:
slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
slot_matches = self.vartree.dbapi.match(
- "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
+ f"{portage.cpv_getkey(self.mycpv)}:{slot}"
)
others_in_slot = []
for cur_cpv in slot_matches:
@@ -2421,9 +2439,7 @@ class dblink:
level=logging.ERROR,
noiselevel=-1,
)
- showMessage(
- "%s\n" % (eapi_unsupported,), level=logging.ERROR, noiselevel=-1
- )
+ showMessage(f"{eapi_unsupported}\n", level=logging.ERROR, noiselevel=-1)
elif os.path.isfile(myebuildpath):
phase = EbuildPhase(
background=background,
@@ -2545,7 +2561,6 @@ class dblink:
scheduler.run_until_complete(builddir_lock.async_unlock())
if log_path is not None:
-
if not failures and "unmerge-logs" not in self.settings.features:
try:
os.unlink(log_path)
@@ -2573,14 +2588,17 @@ class dblink:
else:
self.settings.pop("PORTAGE_LOG_FILE", None)
- env_update(
- target_root=self.settings["ROOT"],
- prev_mtimes=ldpath_mtimes,
- contents=contents,
- env=self.settings,
- writemsg_level=self._display_merge,
- vardbapi=self.vartree.dbapi,
- )
+ # If we didn't unmerge anything, don't bother updating env.
+ if contents:
+ env_update(
+ target_root=self.settings["ROOT"],
+ prev_mtimes=ldpath_mtimes,
+ contents=contents,
+ env=self.settings,
+ writemsg_level=self._display_merge,
+ vardbapi=self.vartree.dbapi,
+ )
+ self._send_mtimes(ldpath_mtimes)
unmerge_with_replacement = preserve_paths is not None
if not unmerge_with_replacement:
@@ -2617,9 +2635,7 @@ class dblink:
)
def _show_unmerge(self, zing, desc, file_type, file_name):
- self._display_merge(
- "%s %s %s %s\n" % (zing, desc.ljust(8), file_type, file_name)
- )
+ self._display_merge(f"{zing} {desc.ljust(8)} {file_type} {file_name}\n")
def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
"""
@@ -2641,15 +2657,11 @@ class dblink:
ignored_unlink_errnos = self._ignored_unlink_errnos
ignored_rmdir_errnos = self._ignored_rmdir_errnos
- if not pkgfiles:
- showMessage(_("No package files given... Grabbing a set.\n"))
- pkgfiles = self.getcontents()
-
if others_in_slot is None:
others_in_slot = []
slot = self.vartree.dbapi._pkg_str(self.mycpv, None).slot
slot_matches = self.vartree.dbapi.match(
- "%s:%s" % (portage.cpv_getkey(self.mycpv), slot)
+ f"{portage.cpv_getkey(self.mycpv)}:{slot}"
)
for cur_cpv in slot_matches:
if cur_cpv == self.mycpv:
@@ -2672,6 +2684,7 @@ class dblink:
unmerge_orphans = "unmerge-orphans" in self.settings.features
calc_prelink = "prelink-checksums" in self.settings.features
+ pkgfiles = pkgfiles if pkgfiles else self.getcontents()
if pkgfiles:
self.updateprotect()
mykeys = list(pkgfiles)
@@ -2712,10 +2725,9 @@ class dblink:
# administrative and pkg_postinst stuff.
self._eerror(
"postrm",
- ["Could not chmod or unlink '%s': %s" % (file_name, ose)],
+ [f"Could not chmod or unlink '{file_name}': {ose}"],
)
else:
-
# Even though the file no longer exists, we log it
# here so that _unmerge_dirs can see that we've
# removed a file from this device, and will record
@@ -2763,7 +2775,6 @@ class dblink:
infodirs_inodes.add((statobj.st_dev, statobj.st_ino))
for i, objkey in enumerate(mykeys):
-
obj = normalize_path(objkey)
if os is _os_merge:
try:
@@ -2879,7 +2890,7 @@ class dblink:
):
try:
unlink(obj, lstatobj)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -2915,7 +2926,6 @@ class dblink:
and stat.S_ISDIR(statobj.st_mode)
and obj.startswith(real_root)
):
-
relative_path = obj[real_root_len:]
try:
target_dir_contents = os.listdir(obj)
@@ -2968,7 +2978,7 @@ class dblink:
try:
unlink(obj, lstatobj)
show_unmerge("<<<", "", file_type, obj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -2992,7 +3002,7 @@ class dblink:
continue
try:
unlink(obj, lstatobj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3036,7 +3046,7 @@ class dblink:
flat_list.update(*protected_symlinks.values())
flat_list = sorted(flat_list)
for f in flat_list:
- lines.append("\t%s" % (os.path.join(real_root, f.lstrip(os.sep))))
+ lines.append(f"\t{os.path.join(real_root, f.lstrip(os.sep))}")
lines.append("")
self._elog("elog", "postrm", lines)
@@ -3058,7 +3068,6 @@ class dblink:
unlink,
os,
):
-
real_root = self.settings["ROOT"]
show_unmerge = self._show_unmerge
ignored_unlink_errnos = self._ignored_unlink_errnos
@@ -3081,7 +3090,7 @@ class dblink:
msg.append("")
for f in flat_list:
- msg.append("\t%s" % os.path.join(real_root, f.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(real_root, f.lstrip(os.path.sep))}")
msg.append("")
msg.append("Use the UNINSTALL_IGNORE variable to exempt specific symlinks")
@@ -3133,7 +3142,7 @@ class dblink:
try:
unlink(obj, os.lstat(obj))
show_unmerge("<<<", "", "sym", obj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3148,7 +3157,6 @@ class dblink:
def _unmerge_dirs(
self, dirs, infodirs_inodes, protected_symlinks, unmerge_desc, unlink, os
):
-
show_unmerge = self._show_unmerge
infodir_cleanup = self._infodir_cleanup
ignored_unlink_errnos = self._ignored_unlink_errnos
@@ -3184,7 +3192,7 @@ class dblink:
if stat.S_ISREG(lstatobj.st_mode):
unlink(child, lstatobj)
show_unmerge("<<<", "", "obj", child)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3218,7 +3226,7 @@ class dblink:
self._merged_path(os.path.realpath(parent_name), parent_stat)
show_unmerge("<<<", "", "dir", obj)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno not in ignored_rmdir_errnos:
raise
if e.errno != errno.ENOENT:
@@ -3250,7 +3258,7 @@ class dblink:
try:
unlink(obj, os.lstat(obj))
show_unmerge("<<<", "", "sym", obj)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno not in ignored_unlink_errnos:
raise
del e
@@ -3367,7 +3375,6 @@ class dblink:
if self.getcontents():
basename = os_filename_arg.path.basename(destfile)
if self._contents_basenames is None:
-
try:
for x in self._contents.keys():
_unicode_encode(
@@ -3387,9 +3394,9 @@ class dblink:
else:
os = portage.os
- self._contents_basenames = set(
+ self._contents_basenames = {
os.path.basename(x) for x in self._contents.keys()
- )
+ }
if basename not in self._contents_basenames:
# This is a shortcut that, in most cases, allows us to
# eliminate this package as an owner without the need
@@ -3401,13 +3408,12 @@ class dblink:
parent_path = os_filename_arg.path.dirname(destfile)
try:
parent_stat = os_filename_arg.stat(parent_path)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
return False
if self._contents_inodes is None:
-
if os is _os_merge:
try:
for x in self._contents.keys():
@@ -3534,7 +3540,6 @@ class dblink:
provider_nodes = set()
# Create provider nodes and add them to the graph.
for f_abs in old_contents:
-
if os is _os_merge:
try:
_unicode_encode(
@@ -3858,7 +3863,6 @@ class dblink:
self.vartree.dbapi._plib_registry.pruneNonExisting()
def _collision_protect(self, srcroot, destroot, mypkglist, file_list, symlink_list):
-
os = _os_merge
real_relative_paths = {}
@@ -3942,7 +3946,7 @@ class dblink:
try:
dest_lstat = os.lstat(dest_path)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
del e
continue
@@ -3957,14 +3961,14 @@ class dblink:
try:
dest_lstat = os.lstat(parent_path)
break
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOTDIR:
raise
del e
if not dest_lstat:
raise AssertionError(
"unable to find non-directory "
- + "parent for '%s'" % dest_path
+ + f"parent for '{dest_path}'"
)
dest_path = parent_path
f = os.path.sep + dest_path[len(destroot) :]
@@ -4086,7 +4090,6 @@ class dblink:
inode_map = {}
real_paths = set()
for i, path in enumerate(file_paths):
-
if os is _os_merge:
try:
_unicode_encode(path, encoding=_encodings["merge"], errors="strict")
@@ -4134,7 +4137,7 @@ class dblink:
msg.append("")
for path_list in suspicious_hardlinks:
for path, s in path_list:
- msg.append("\t%s" % path)
+ msg.append(f"\t{path}")
msg.append("")
msg.append(
_("See the Gentoo Security Handbook " "guide for advice on how to proceed.")
@@ -4147,6 +4150,9 @@ class dblink:
def _eqawarn(self, phase, lines):
self._elog("eqawarn", phase, lines)
+ def _ewarn(self, phase, lines):
+ self._elog("ewarn", phase, lines)
+
def _eerror(self, phase, lines):
self._elog("eerror", phase, lines)
@@ -4201,11 +4207,17 @@ class dblink:
if str_buffer:
str_buffer = _unicode_encode("".join(str_buffer))
while str_buffer:
- str_buffer = str_buffer[os.write(self._pipe, str_buffer) :]
+ str_buffer = str_buffer[os.write(self._pipe.fileno(), str_buffer) :]
def _emerge_log(self, msg):
emergelog(False, msg)
+ def _send_mtimes(self, mtimes):
+ if self._mtime_pipe is None:
+ return
+
+ self._mtime_pipe.send(mtimes)
+
def treewalk(
self,
srcroot,
@@ -4294,18 +4306,17 @@ class dblink:
slot = ""
for var_name in ("CHOST", "SLOT"):
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(inforoot, var_name),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
val = f.readline().strip()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -4342,6 +4353,9 @@ class dblink:
],
)
+ def ewarn(lines):
+ self._ewarn("preinst", lines)
+
def eerror(lines):
self._eerror("preinst", lines)
@@ -4353,7 +4367,7 @@ class dblink:
# Use _pkg_str discard the sub-slot part if necessary.
slot = _pkg_str(self.mycpv, slot=slot).slot
cp = self.mysplit[0]
- slot_atom = "%s:%s" % (cp, slot)
+ slot_atom = f"{cp}:{slot}"
self.lockdb()
try:
@@ -4434,18 +4448,17 @@ class dblink:
phase.start()
phase.wait()
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(inforoot, "INSTALL_MASK"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
install_mask = InstallMask(f.read())
- except EnvironmentError:
+ except OSError:
install_mask = None
if install_mask:
@@ -4466,7 +4479,6 @@ class dblink:
eprefix_len = len(self.settings["EPREFIX"])
while True:
-
unicode_error = False
eagain_error = False
@@ -4490,6 +4502,10 @@ class dblink:
eagain_error = True
break
+ if portage.utf8_mode:
+ parent = os.fsencode(parent)
+ dirs = [os.fsencode(value) for value in dirs]
+ files = [os.fsencode(value) for value in files]
try:
parent = _unicode_decode(
parent, encoding=_encodings["merge"], errors="strict"
@@ -4654,7 +4670,7 @@ class dblink:
for blocker in self._blockers or []:
blocker = self.vartree.dbapi._dblink(blocker.cpv)
# It may have been unmerged before lock(s)
- # were aquired.
+ # were acquired.
if blocker.exists():
blockers.append(blocker)
@@ -4682,7 +4698,7 @@ class dblink:
msg = textwrap.wrap(msg, 70)
msg.append("")
for f in rofilesystems:
- msg.append("\t%s" % f)
+ msg.append(f"\t{f}")
msg.append("")
self._elog("eerror", "preinst", msg)
@@ -4711,15 +4727,15 @@ class dblink:
msg = textwrap.wrap(msg, 70)
msg.append("")
for k, v in sorted(internal_collisions.items(), key=operator.itemgetter(0)):
- msg.append("\t%s" % os.path.join(destroot, k.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(destroot, k.lstrip(os.path.sep))}")
for (file1, file2), differences in sorted(v.items()):
msg.append(
- "\t\t%s" % os.path.join(destroot, file1.lstrip(os.path.sep))
+ f"\t\t{os.path.join(destroot, file1.lstrip(os.path.sep))}"
)
msg.append(
- "\t\t%s" % os.path.join(destroot, file2.lstrip(os.path.sep))
+ f"\t\t{os.path.join(destroot, file2.lstrip(os.path.sep))}"
)
- msg.append("\t\t\tDifferences: %s" % ", ".join(differences))
+ msg.append(f"\t\t\tDifferences: {', '.join(differences)}")
msg.append("")
self._elog("eerror", "preinst", msg)
@@ -4748,7 +4764,7 @@ class dblink:
msg = textwrap.wrap(msg, 70)
msg.append("")
for f in symlink_collisions:
- msg.append("\t%s" % os.path.join(destroot, f.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(destroot, f.lstrip(os.path.sep))}")
msg.append("")
self._elog("eerror", "preinst", msg)
@@ -4801,7 +4817,7 @@ class dblink:
msg.append("")
for f in collisions:
- msg.append("\t%s" % os.path.join(destroot, f.lstrip(os.path.sep)))
+ msg.append(f"\t{os.path.join(destroot, f.lstrip(os.path.sep))}")
eerror(msg)
@@ -4830,26 +4846,28 @@ class dblink:
for pkg in owners:
pkg = self.vartree.dbapi._pkg_str(pkg.mycpv, None)
- pkg_info_str = "%s%s%s" % (pkg, _slot_separator, pkg.slot)
+ pkg_info_str = f"{pkg}{_slot_separator}{pkg.slot}"
if pkg.repo != _unknown_repo:
- pkg_info_str += "%s%s" % (_repo_separator, pkg.repo)
+ pkg_info_str += f"{_repo_separator}{pkg.repo}"
pkg_info_strs[pkg] = pkg_info_str
finally:
self.unlockdb()
+ collision_message_type = ewarn
+ if collision_protect or protect_owned and owners:
+ collision_message_type = eerror
+
for pkg, owned_files in owners.items():
msg = []
msg.append(pkg_info_strs[pkg.mycpv])
for f in sorted(owned_files):
- msg.append(
- "\t%s" % os.path.join(destroot, f.lstrip(os.path.sep))
- )
+ msg.append(f"\t{os.path.join(destroot, f.lstrip(os.path.sep))}")
msg.append("")
- eerror(msg)
+ collision_message_type(msg)
if not owners:
- eerror(
+ collision_message_type(
[_("None of the installed" " packages claim the file(s)."), ""]
)
@@ -4889,10 +4907,12 @@ class dblink:
" If necessary, refer to your elog "
"messages for the whole content of the above message."
)
- eerror(wrap(msg, 70))
if abort:
+ eerror(wrap(msg, 70))
return 1
+ else:
+ ewarn(wrap(msg, 70))
# The merge process may move files out of the image directory,
# which causes invalidation of the .installed flag.
@@ -4920,7 +4940,7 @@ class dblink:
rval = self._pre_merge_backup(self._installed_instance, downgrade)
if rval != os.EX_OK:
showMessage(
- _("!!! FAILED preinst: ") + "quickpkg: %s\n" % rval,
+ _("!!! FAILED preinst: ") + f"quickpkg: {rval}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -4955,8 +4975,8 @@ class dblink:
# write local package counter for recording
if counter is None:
- counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
- with io.open(
+ counter = self.vartree.dbapi.counter_tick()
+ with open(
_unicode_encode(
os.path.join(self.dbtmpdir, "COUNTER"),
encoding=_encodings["fs"],
@@ -4966,7 +4986,7 @@ class dblink:
encoding=_encodings["repo.content"],
errors="backslashreplace",
) as f:
- f.write("%s" % counter)
+ f.write(f"{counter}")
self.updateprotect()
@@ -5051,21 +5071,18 @@ class dblink:
emerge_log = self._emerge_log
- # If we have any preserved libraries then autoclean
- # is forced so that preserve-libs logic doesn't have
+ # We always autoclean now for the current package-case for simplicity.
+ # If it were conditional, we'd always need to do it when any preserved-libs,
+ # so that preserve-libs logic doesn't have
# to account for the additional complexity of the
# AUTOCLEAN=no mode.
- autoclean = self.settings.get("AUTOCLEAN", "yes") == "yes" or preserve_paths
-
- if autoclean:
- emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
+ emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
others_in_slot.append(self) # self has just been merged
for dblnk in list(others_in_slot):
if dblnk is self:
continue
- if not (autoclean or dblnk.mycpv == self.mycpv or reinstall_self):
- continue
+
showMessage(_(">>> Safely unmerging already-installed instance...\n"))
emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
others_in_slot.remove(dblnk) # dblnk will unmerge itself now
@@ -5095,17 +5112,6 @@ class dblink:
self.unlockdb()
showMessage(_(">>> Original instance of package unmerged safely.\n"))
- if len(others_in_slot) > 1:
- showMessage(
- colorize("WARN", _("WARNING:"))
- + _(
- " AUTOCLEAN is disabled. This can cause serious"
- " problems due to overlapping packages.\n"
- ),
- level=logging.WARN,
- noiselevel=-1,
- )
-
# We hold both directory locks.
self.dbdir = self.dbpkgdir
self.lockdb()
@@ -5125,14 +5131,17 @@ class dblink:
self._clear_contents_cache()
contents = self.getcontents()
destroot_len = len(destroot) - 1
- self.lockdb()
- try:
- for blocker in blockers:
- self.vartree.dbapi.removeFromContents(
- blocker, iter(contents), relative_paths=False
- )
- finally:
- self.unlockdb()
+
+ # Avoid lock contention if we aren't going to do any work.
+ if blockers:
+ self.lockdb()
+ try:
+ for blocker in blockers:
+ self.vartree.dbapi.removeFromContents(
+ blocker, iter(contents), relative_paths=False
+ )
+ finally:
+ self.unlockdb()
plib_registry = self.vartree.dbapi._plib_registry
if plib_registry:
@@ -5237,15 +5246,18 @@ class dblink:
],
)
- # update environment settings, library paths. DO NOT change symlinks.
- env_update(
- target_root=self.settings["ROOT"],
- prev_mtimes=prev_mtimes,
- contents=contents,
- env=self.settings,
- writemsg_level=self._display_merge,
- vardbapi=self.vartree.dbapi,
- )
+ # Update environment settings, library paths. DO NOT change symlinks.
+ # Only do this if we actually installed something.
+ if contents:
+ env_update(
+ target_root=self.settings["ROOT"],
+ prev_mtimes=prev_mtimes,
+ contents=contents,
+ env=self.settings,
+ writemsg_level=self._display_merge,
+ vardbapi=self.vartree.dbapi,
+ )
+ self._send_mtimes(prev_mtimes)
# For gcc upgrades, preserved libs have to be removed after the
# the library path has been updated.
@@ -5275,18 +5287,22 @@ class dblink:
return backup_p
def _merge_contents(self, srcroot, destroot, cfgfiledict):
-
cfgfiledict_orig = cfgfiledict.copy()
# open CONTENTS file (possibly overwriting old one) for recording
# Use atomic_ofstream for automatic coercion of raw bytes to
# unicode, in order to prevent TypeError when writing raw bytes
# to TextIOWrapper with python2.
+ contents_tmp_path = os.path.join(self.dbtmpdir, "CONTENTS")
outfile = atomic_ofstream(
- _unicode_encode(
- os.path.join(self.dbtmpdir, "CONTENTS"),
- encoding=_encodings["fs"],
- errors="strict",
+ (
+ contents_tmp_path
+ if portage.utf8_mode
+ else _unicode_encode(
+ contents_tmp_path,
+ encoding=_encodings["fs"],
+ errors="strict",
+ )
),
mode="w",
encoding=_encodings["repo.content"],
@@ -5386,7 +5402,7 @@ class dblink:
@param secondhand: A set of items to merge in pass two (usually
or symlinks that point to non-existing files that may get merged later)
@type secondhand: List
- @param stufftomerge: Either a diretory to merge, or a list of items.
+ @param stufftomerge: Either a directory to merge, or a list of items.
@type stufftomerge: String or List
@param cfgfiledict: { File:mtime } mapping for config_protected files
@type cfgfiledict: Dictionary
@@ -5426,7 +5442,6 @@ class dblink:
mergelist = stufftomerge[:]
while mergelist:
-
relative_path = mergelist.pop()
mysrc = join(srcroot, relative_path)
mydest = join(destroot, relative_path)
@@ -5532,6 +5547,8 @@ class dblink:
destmd5,
mydest_link,
)
+ if protected and moveme:
+ mydmode = None
zing = "!!!"
if not moveme:
@@ -5564,17 +5581,18 @@ class dblink:
msg.append(
_("Installation of a symlink is blocked by a directory:")
)
- msg.append(" '%s'" % mydest)
+ msg.append(f" '{mydest}'")
msg.append(
_("This symlink will be merged with a different name:")
)
- msg.append(" '%s'" % newdest)
+ msg.append(f" '{newdest}'")
msg.append("")
self._eerror("preinst", msg)
mydest = newdest
+ mydmode = None
# if secondhand is None it means we're operating in "force" mode and should not create a second hand.
- if (secondhand != None) and (not os.path.exists(myrealto)):
+ if (secondhand is not None) and (not os.path.exists(myrealto)):
# either the target directory doesn't exist yet or the target file doesn't exist -- or
# the target is a broken symlink. We will add this file to our "second hand" and merge
# it later.
@@ -5597,7 +5615,7 @@ class dblink:
except OSError:
pass
- if mymtime != None:
+ if mymtime is not None:
# Use lexists, since if the target happens to be a broken
# symlink then that should trigger an independent warning.
if not (
@@ -5614,7 +5632,7 @@ class dblink:
],
)
- showMessage("%s %s -> %s\n" % (zing, mydest, myto))
+ showMessage(f"{zing} {mydest} -> {myto}\n")
outfile.write(
self._format_contents_line(
node_type="sym",
@@ -5630,14 +5648,14 @@ class dblink:
noiselevel=-1,
)
showMessage(
- "!!! %s -> %s\n" % (mydest, myto),
+ f"!!! {mydest} -> {myto}\n",
level=logging.ERROR,
noiselevel=-1,
)
return 1
elif stat.S_ISDIR(mymode):
# we are merging a directory
- if mydmode != None:
+ if mydmode is not None:
# destination exists
if bsd_chflags:
@@ -5679,7 +5697,7 @@ class dblink:
stat.S_ISLNK(mydmode) and os.path.isdir(mydest)
):
# a symlink to an existing directory will work for us; keep it:
- showMessage("--- %s/\n" % mydest)
+ showMessage(f"--- {mydest}/\n")
if bsd_chflags:
bsd_chflags.lchflags(mydest, dflags)
else:
@@ -5690,9 +5708,9 @@ class dblink:
msg.append(
_("Installation of a directory is blocked by a file:")
)
- msg.append(" '%s'" % mydest)
+ msg.append(f" '{mydest}'")
msg.append(_("This file will be renamed to a different name:"))
- msg.append(" '%s'" % backup_dest)
+ msg.append(f" '{backup_dest}'")
msg.append("")
self._eerror("preinst", msg)
if (
@@ -5732,7 +5750,7 @@ class dblink:
bsd_chflags.lchflags(mydest, dflags)
os.chmod(mydest, mystat[0])
os.chown(mydest, mystat[4], mystat[5])
- showMessage(">>> %s/\n" % mydest)
+ showMessage(f">>> {mydest}/\n")
else:
try:
# destination doesn't exist
@@ -5753,7 +5771,7 @@ class dblink:
del e
os.chmod(mydest, mystat[0])
os.chown(mydest, mystat[4], mystat[5])
- showMessage(">>> %s/\n" % mydest)
+ showMessage(f">>> {mydest}/\n")
try:
self._merged_path(mydest, os.lstat(mydest))
@@ -5779,45 +5797,56 @@ class dblink:
msg.append(
_("Installation of a regular file is blocked by a directory:")
)
- msg.append(" '%s'" % mydest)
+ msg.append(f" '{mydest}'")
msg.append(_("This file will be merged with a different name:"))
- msg.append(" '%s'" % newdest)
+ msg.append(f" '{newdest}'")
msg.append("")
self._eerror("preinst", msg)
mydest = newdest
+ mydmode = None
# whether config protection or not, we merge the new file the
# same way. Unless moveme=0 (blocking directory)
if moveme:
- # Create hardlinks only for source files that already exist
- # as hardlinks (having identical st_dev and st_ino).
- hardlink_key = (mystat.st_dev, mystat.st_ino)
-
- hardlink_candidates = self._hardlink_merge_map.get(hardlink_key)
- if hardlink_candidates is None:
- hardlink_candidates = []
- self._hardlink_merge_map[hardlink_key] = hardlink_candidates
-
- mymtime = movefile(
- mysrc,
- mydest,
- newmtime=thismtime,
- sstat=mystat,
- mysettings=self.settings,
- hardlink_candidates=hardlink_candidates,
- encoding=_encodings["merge"],
- )
- if mymtime is None:
- return 1
- hardlink_candidates.append(mydest)
- zing = ">>>"
+ # only replace the existing file if it differs, see #722270
+ if self._needs_move(mysrc, mydest, mymode, mydmode):
+ # Create hardlinks only for source files that already exist
+ # as hardlinks (having identical st_dev and st_ino).
+ hardlink_key = (mystat.st_dev, mystat.st_ino)
+
+ hardlink_candidates = self._hardlink_merge_map.get(hardlink_key)
+ if hardlink_candidates is None:
+ hardlink_candidates = []
+ self._hardlink_merge_map[hardlink_key] = hardlink_candidates
+
+ mymtime = movefile(
+ mysrc,
+ mydest,
+ newmtime=thismtime,
+ sstat=mystat,
+ mysettings=self.settings,
+ hardlink_candidates=hardlink_candidates,
+ encoding=_encodings["merge"],
+ )
+ if mymtime is None:
+ return 1
+ hardlink_candidates.append(mydest)
+ zing = ">>>"
+ else:
+ mymtime = thismtime if thismtime is not None else mymtime
+ try:
+ os.utime(mydest, ns=(mymtime, mymtime))
+ except OSError:
+ # utime can fail here with EPERM
+ pass
+ zing = "==="
try:
self._merged_path(mydest, os.lstat(mydest))
except OSError:
pass
- if mymtime != None:
+ if mymtime is not None:
outfile.write(
self._format_contents_line(
node_type="obj",
@@ -5826,7 +5855,7 @@ class dblink:
mtime_ns=mymtime,
)
)
- showMessage("%s %s\n" % (zing, mydest))
+ showMessage(f"{zing} {mydest}\n")
else:
# we are merging a fifo or device node
zing = "!!!"
@@ -5874,7 +5903,6 @@ class dblink:
dest_md5,
dest_link,
):
-
move_me = True
protected = True
force = False
@@ -5940,10 +5968,10 @@ class dblink:
if md5_digest is not None:
fields.append(md5_digest)
elif symlink_target is not None:
- fields.append("-> {}".format(symlink_target))
+ fields.append(f"-> {symlink_target}")
if mtime_ns is not None:
fields.append(str(mtime_ns // 1000000000))
- return "{}\n".format(" ".join(fields))
+ return f"{' '.join(fields)}\n"
def _merged_path(self, path, lstatobj, exists=True):
previous_path = self._device_path_map.get(lstatobj.st_dev)
@@ -5971,7 +5999,6 @@ class dblink:
returncode = None
if platform.system() == "Linux":
-
paths = []
for path in self._device_path_map.values():
if path is not False:
@@ -5987,7 +6014,7 @@ class dblink:
if returncode is None or returncode != os.EX_OK:
try:
proc = subprocess.Popen(["sync"])
- except EnvironmentError:
+ except OSError:
pass
else:
proc.wait()
@@ -6031,7 +6058,6 @@ class dblink:
# fail-clean is enabled, and the success/die hooks have
# already been called by EbuildPhase.
if os.path.isdir(self.settings["PORTAGE_BUILDDIR"]):
-
if retval == os.EX_OK:
phase = "success_hooks"
else:
@@ -6047,8 +6073,11 @@ class dblink:
ebuild_phase.wait()
self._elog_process()
- if "noclean" not in self.settings.features and (
- retval == os.EX_OK or "fail-clean" in self.settings.features
+ # Keep the build dir around if postinst fails (bug #704866)
+ if (
+ not self._postinst_failure
+ and "noclean" not in self.settings.features
+ and (retval == os.EX_OK or "fail-clean" in self.settings.features)
):
if myebuild is None:
myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
@@ -6084,13 +6113,12 @@ class dblink:
"returns contents of a file with whitespace converted to spaces"
if not os.path.exists(self.dbdir + "/" + name):
return ""
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, name),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -6103,13 +6131,12 @@ class dblink:
def getfile(self, fname):
if not os.path.exists(self.dbdir + "/" + fname):
return ""
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, fname),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -6127,13 +6154,12 @@ class dblink:
def getelements(self, ename):
if not os.path.exists(self.dbdir + "/" + ename):
return []
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, ename),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -6145,7 +6171,7 @@ class dblink:
return myreturn
def setelements(self, mylist, ename):
- with io.open(
+ with open(
_unicode_encode(
os.path.join(self.dbdir, ename),
encoding=_encodings["fs"],
@@ -6156,14 +6182,13 @@ class dblink:
errors="backslashreplace",
) as f:
for x in mylist:
- f.write("%s\n" % x)
+ f.write(f"{x}\n")
def isregular(self):
"Is this a regular package (does it have a CATEGORY file? A dblink can be virtual *and* regular)"
return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
def _pre_merge_backup(self, backup_dblink, downgrade):
-
if "unmerge-backup" in self.settings.features or (
downgrade and "downgrade-backup" in self.settings.features
):
@@ -6172,7 +6197,6 @@ class dblink:
return os.EX_OK
def _pre_unmerge_backup(self, background):
-
if "unmerge-backup" in self.settings.features:
logfile = None
if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
@@ -6182,7 +6206,6 @@ class dblink:
return os.EX_OK
def _quickpkg_dblink(self, backup_dblink, background, logfile):
-
build_time = backup_dblink.getfile("BUILD_TIME")
try:
build_time = int(build_time.strip())
@@ -6192,13 +6215,12 @@ class dblink:
trees = QueryCommand.get_db()[self.settings["EROOT"]]
bintree = trees["bintree"]
- for binpkg in reversed(bintree.dbapi.match("={}".format(backup_dblink.mycpv))):
+ for binpkg in reversed(bintree.dbapi.match(f"={backup_dblink.mycpv}")):
if binpkg.build_time == build_time:
return os.EX_OK
self.lockdb()
try:
-
if not backup_dblink.exists():
# It got unmerged by a concurrent process.
return os.EX_OK
@@ -6232,7 +6254,7 @@ class dblink:
args=[
portage._python_interpreter,
quickpkg_binary,
- "=%s" % (backup_dblink.mycpv,),
+ f"={backup_dblink.mycpv}",
],
background=background,
env=env,
@@ -6246,6 +6268,39 @@ class dblink:
finally:
self.unlockdb()
+ def _needs_move(self, mysrc, mydest, mymode, mydmode):
+ """
+ Checks whether the given file at |mysrc| needs to be moved to |mydest| or if
+ they are identical.
+
+ Takes file mode and extended attributes into account.
+ Should only be used for regular files.
+ """
+ if mydmode is None or not stat.S_ISREG(mydmode) or mymode != mydmode:
+ return True
+
+ src_bytes = _unicode_encode(mysrc, encoding=_encodings["fs"], errors="strict")
+ dest_bytes = _unicode_encode(mydest, encoding=_encodings["fs"], errors="strict")
+
+ if "xattr" in self.settings.features:
+ excluded_xattrs = self.settings.get("PORTAGE_XATTR_EXCLUDE", "")
+ if not _cmpxattr(src_bytes, dest_bytes, exclude=excluded_xattrs):
+ return True
+
+ try:
+ files_equal = filecmp.cmp(src_bytes, dest_bytes, shallow=False)
+ except Exception as e:
+ writemsg(
+ _(
+ "Exception '%s' happened when comparing files %s and %s, will replace the latter\n"
+ )
+ % (e, mysrc, mydest),
+ noiselevel=-1,
+ )
+ return True
+
+ return not files_equal
+
def merge(
mycat,
@@ -6359,12 +6414,12 @@ def write_contents(contents, root, f):
relative_filename = filename[root_len:]
if entry_type == "obj":
entry_type, mtime, md5sum = entry_data
- line = "%s %s %s %s\n" % (entry_type, relative_filename, md5sum, mtime)
+ line = f"{entry_type} {relative_filename} {md5sum} {mtime}\n"
elif entry_type == "sym":
entry_type, mtime, link = entry_data
- line = "%s %s -> %s %s\n" % (entry_type, relative_filename, link, mtime)
+ line = f"{entry_type} {relative_filename} -> {link} {mtime}\n"
else: # dir, dev, fif
- line = "%s %s\n" % (entry_type, relative_filename)
+ line = f"{entry_type} {relative_filename}\n"
f.write(line)
@@ -6412,7 +6467,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None, xattrs=Fals
if path.startswith(root):
arcname = "./" + path[len(root) :]
else:
- raise ValueError("invalid root argument: '%s'" % root)
+ raise ValueError(f"invalid root argument: '{root}'")
live_path = path
if (
"dir" == contents_type
@@ -6495,9 +6550,9 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None, xattrs=Fals
# Compatible with GNU tar, which saves the xattrs
# under the SCHILY.xattr namespace.
for k in xattr.list(path_bytes):
- tarinfo.pax_headers[
- "SCHILY.xattr." + _unicode_decode(k)
- ] = _unicode_decode(xattr.get(path_bytes, _unicode_encode(k)))
+ tarinfo.pax_headers["SCHILY.xattr." + _unicode_decode(k)] = (
+ _unicode_decode(xattr.get(path_bytes, _unicode_encode(k)))
+ )
with open(path_bytes, "rb") as f:
tar.addfile(tarinfo, f)
diff --git a/lib/portage/dbapi/virtual.py b/lib/portage/dbapi/virtual.py
index bba45f47d..8e1f14041 100644
--- a/lib/portage/dbapi/virtual.py
+++ b/lib/portage/dbapi/virtual.py
@@ -1,4 +1,4 @@
-# Copyright 1998-2020 Gentoo Authors
+# Copyright 1998-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.dbapi import dbapi
@@ -212,22 +212,3 @@ class fakedbapi(dbapi):
if metadata is None:
raise KeyError(cpv)
metadata.update(values)
-
-
-class testdbapi:
- """A dbapi instance with completely fake functions to get by hitting disk
- TODO(antarus):
- This class really needs to be rewritten to have better stubs; but these work for now.
- The dbapi classes themselves need unit tests...and that will be a lot of work.
- """
-
- def __init__(self):
- self.cpvs = {}
-
- def f(*args, **kwargs):
- return True
-
- fake_api = dir(dbapi)
- for call in fake_api:
- if not hasattr(self, call):
- setattr(self, call, f)
diff --git a/lib/portage/debug.py b/lib/portage/debug.py
index 59aae437e..ee2dc13e7 100644
--- a/lib/portage/debug.py
+++ b/lib/portage/debug.py
@@ -1,13 +1,9 @@
-# Copyright 1999-2014 Gentoo Foundation
+# Copyright 1999-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import os
import sys
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
+import threading
import portage.const
from portage.util import writemsg
@@ -69,12 +65,12 @@ class trace_handler:
my_repr = repr(arg)
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
- return "value=%s " % my_repr
+ return f"value={my_repr} "
if "exception" == event:
my_repr = repr(arg[1])
if len(my_repr) > self.max_repr_length:
my_repr = "'omitted'"
- return "type=%s value=%s " % (arg[0], my_repr)
+ return f"type={arg[0]} value={my_repr} "
return ""
diff --git a/lib/portage/dep/__init__.py b/lib/portage/dep/__init__.py
index 3b3577025..a4a5bf26b 100644
--- a/lib/portage/dep/__init__.py
+++ b/lib/portage/dep/__init__.py
@@ -1,7 +1,8 @@
-# deps.py -- Portage dependency resolution functions
-# Copyright 2003-2021 Gentoo Authors
+# Copyright 2003-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+"""deps.py -- Portage dependency resolution functions"""
+
__all__ = [
"Atom",
"best_match_to_list",
@@ -29,7 +30,6 @@ __all__ = [
]
import re
-import sys
import warnings
from functools import lru_cache
@@ -57,14 +57,17 @@ from portage.versions import (
ververify,
)
import portage.cache.mappings
+from typing import TYPE_CHECKING
+if TYPE_CHECKING:
+ import _emerge.Package
# \w is [a-zA-Z0-9_]
# PMS 3.1.3: A slot name may contain any of the characters [A-Za-z0-9+_.-].
# It must not begin with a hyphen or a dot.
_slot_separator = ":"
-# loosly match SLOT, which may have an optional ABI part
+# loosely match SLOT, which may have an optional ABI part
_slot_loose = r"([\w+./*=-]+)"
_use = r"\[.*\]"
@@ -80,7 +83,7 @@ _extended_cat = r"[\w+*][\w+.*-]*"
_slot_dep_re_cache = {}
-def _get_slot_dep_re(eapi_attrs):
+def _get_slot_dep_re(eapi_attrs: portage.eapi._eapi_attrs) -> re.Pattern:
cache_key = eapi_attrs.slot_operator
slot_re = _slot_dep_re_cache.get(cache_key)
if slot_re is not None:
@@ -97,7 +100,11 @@ def _get_slot_dep_re(eapi_attrs):
return slot_re
-def _match_slot(atom, pkg):
+def _match_slot(atom, pkg) -> bool:
+ """
+ @type atom: portage.dep.Atom
+ @type pkg: _emerge.Package.Package
+ """
if pkg.slot == atom.slot:
if not atom.sub_slot:
return True
@@ -106,23 +113,18 @@ def _match_slot(atom, pkg):
return False
-_atom_re_cache = {}
+_atom_re = None
-def _get_atom_re(eapi_attrs):
- cache_key = eapi_attrs.dots_in_PN
- atom_re = _atom_re_cache.get(cache_key)
- if atom_re is not None:
- return atom_re
+def _get_atom_re(eapi_attrs: portage.eapi._eapi_attrs) -> re.Pattern:
+ global _atom_re
+ if _atom_re is not None:
+ return _atom_re
- if eapi_attrs.dots_in_PN:
- cp_re = _cp["dots_allowed_in_PN"]
- cpv_re = _cpv["dots_allowed_in_PN"]
- else:
- cp_re = _cp["dots_disallowed_in_PN"]
- cpv_re = _cpv["dots_disallowed_in_PN"]
+ cp_re = _cp
+ cpv_re = _cpv
- atom_re = re.compile(
+ _atom_re = re.compile(
"^(?P<without_use>(?:"
+ "(?P<op>"
+ _op
@@ -144,26 +146,20 @@ def _get_atom_re(eapi_attrs):
+ ")?$",
re.VERBOSE | re.UNICODE,
)
-
- _atom_re_cache[cache_key] = atom_re
- return atom_re
+ return _atom_re
-_atom_wildcard_re_cache = {}
+_atom_wildcard_re = None
def _get_atom_wildcard_re(eapi_attrs):
- cache_key = eapi_attrs.dots_in_PN
- atom_re = _atom_wildcard_re_cache.get(cache_key)
- if atom_re is not None:
- return atom_re
+ global _atom_wildcard_re
+ if _atom_wildcard_re is not None:
+ return _atom_wildcard_re
- if eapi_attrs.dots_in_PN:
- pkg_re = r"[\w+*][\w+.*-]*?"
- else:
- pkg_re = r"[\w+*][\w+*-]*?"
+ pkg_re = r"[\w+*][\w+*-]*?"
- atom_re = re.compile(
+ _atom_wildcard_re = re.compile(
r"((?P<simple>("
+ _extended_cat
+ r")/("
@@ -185,12 +181,10 @@ def _get_atom_wildcard_re(eapi_attrs):
+ r"))?$",
re.UNICODE,
)
+ return _atom_wildcard_re
- _atom_wildcard_re_cache[cache_key] = atom_re
- return atom_re
-
-_usedep_re_cache = {}
+_usedep_re = None
def _get_usedep_re(eapi_attrs):
@@ -201,27 +195,19 @@ def _get_usedep_re(eapi_attrs):
@return: A regular expression object that matches valid USE deps for the
given eapi.
"""
- cache_key = eapi_attrs.dots_in_use_flags
- usedep_re = _usedep_re_cache.get(cache_key)
- if usedep_re is not None:
- return usedep_re
-
- if eapi_attrs.dots_in_use_flags:
- _flag_re = r"[A-Za-z0-9][A-Za-z0-9+_@.-]*"
- else:
- _flag_re = r"[A-Za-z0-9][A-Za-z0-9+_@-]*"
+ global _usedep_re
+ if _usedep_re is not None:
+ return _usedep_re
- usedep_re = re.compile(
+ _usedep_re = re.compile(
r"^(?P<prefix>[!-]?)(?P<flag>"
- + _flag_re
+ + r"[A-Za-z0-9][A-Za-z0-9+_@-]*"
+ r")(?P<default>(\(\+\)|\(\-\))?)(?P<suffix>[?=]?)$"
)
+ return _usedep_re
- _usedep_re_cache[cache_key] = usedep_re
- return usedep_re
-
-_useflag_re_cache = {}
+_useflag_re = None
def _get_useflag_re(eapi):
@@ -234,21 +220,12 @@ def _get_useflag_re(eapi):
@return: A regular expression object that matches valid USE flags for the
given eapi.
"""
- eapi_attrs = _get_eapi_attrs(eapi)
- cache_key = eapi_attrs.dots_in_use_flags
- useflag_re = _useflag_re_cache.get(cache_key)
- if useflag_re is not None:
- return useflag_re
-
- if eapi_attrs.dots_in_use_flags:
- flag_re = r"[A-Za-z0-9][A-Za-z0-9+_@.-]*"
- else:
- flag_re = r"[A-Za-z0-9][A-Za-z0-9+_@-]*"
-
- useflag_re = re.compile(r"^" + flag_re + r"$")
+ global _useflag_re
+ if _useflag_re is not None:
+ return _useflag_re
- _useflag_re_cache[cache_key] = useflag_re
- return useflag_re
+ _useflag_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9+_@-]*$")
+ return _useflag_re
def cpvequal(cpv1, cpv2):
@@ -492,9 +469,9 @@ def paren_enclose(mylist, unevaluated_atom=False, opconvert=False):
for x in mylist:
if isinstance(x, list):
if opconvert and x and x[0] == "||":
- mystrparts.append("%s ( %s )" % (x[0], paren_enclose(x[1:])))
+ mystrparts.append(f"{x[0]} ( {paren_enclose(x[1:])} )")
else:
- mystrparts.append("( %s )" % paren_enclose(x))
+ mystrparts.append(f"( {paren_enclose(x)} )")
else:
if unevaluated_atom:
x = getattr(x, "unevaluated_atom", x)
@@ -1055,7 +1032,6 @@ def flatten(mylist):
class _use_dep:
-
__slots__ = (
"_eapi_attrs",
"conditional",
@@ -1101,7 +1077,6 @@ class _use_dep:
conditional=None,
required=None,
):
-
self._eapi_attrs = eapi_attrs
if enabled_flags is not None:
@@ -1194,10 +1169,10 @@ class _use_dep:
def __str__(self):
if not self.tokens:
return ""
- return "[%s]" % (",".join(self.tokens),)
+ return f"[{','.join(self.tokens)}]"
def __repr__(self):
- return "portage.dep._use_dep(%s)" % repr(self.tokens)
+ return f"portage.dep._use_dep({repr(self.tokens)})"
def evaluate_conditionals(self, use):
"""
@@ -1465,7 +1440,6 @@ class _use_dep:
class Atom(str):
-
"""
For compatibility with existing atom string manipulation code, this
class emulates most of the str methods that are useful with atoms.
@@ -1489,17 +1463,7 @@ class Atom(str):
def __init__(self, forbid_overlap=False):
self.overlap = self._overlap(forbid=forbid_overlap)
- def __new__(
- cls,
- s,
- unevaluated_atom=None,
- allow_wildcard=False,
- allow_repo=None,
- _use=None,
- eapi=None,
- is_valid_flag=None,
- allow_build_id=None,
- ):
+ def __new__(cls, s, *args, **kwargs):
return str.__new__(cls, s)
def __init__(
@@ -1542,8 +1506,8 @@ class Atom(str):
allow_build_id = True
blocker_prefix = ""
- if "!" == s[:1]:
- blocker = self._blocker(forbid_overlap=("!" == s[1:2]))
+ if s[:1] == "!":
+ blocker = self._blocker(forbid_overlap=s[1:2] == "!")
if blocker.overlap.forbid:
blocker_prefix = s[:2]
s = s[2:]
@@ -1710,13 +1674,7 @@ class Atom(str):
if eapi is not None:
if not isinstance(eapi, str):
raise TypeError(
- "expected eapi argument of "
- + "%s, got %s: %s"
- % (
- str,
- type(eapi),
- eapi,
- )
+ "expected eapi argument of " + f"{str}, got {type(eapi)}: {eapi}"
)
if self.slot and not eapi_attrs.slot_deps:
raise InvalidAtom(
@@ -1768,7 +1726,7 @@ class Atom(str):
)
@property
- def slot_operator_built(self):
+ def slot_operator_built(self) -> bool:
"""
Returns True if slot_operator == "=" and sub_slot is not None.
NOTE: foo/bar:2= is unbuilt and returns False, whereas foo/bar:2/2=
@@ -1777,7 +1735,7 @@ class Atom(str):
return self.slot_operator == "=" and self.sub_slot is not None
@property
- def without_repo(self):
+ def without_repo(self) -> "Atom":
if self.repo is None:
return self
return Atom(
@@ -1785,7 +1743,7 @@ class Atom(str):
)
@property
- def without_slot(self):
+ def without_slot(self) -> "Atom":
if self.slot is None and self.slot_operator is None:
return self
atom = remove_slot(self)
@@ -1795,14 +1753,14 @@ class Atom(str):
atom += str(self.use)
return Atom(atom, allow_repo=True, allow_wildcard=True)
- def with_repo(self, repo):
+ def with_repo(self, repo) -> "Atom":
atom = remove_slot(self)
if self.slot is not None or self.slot_operator is not None:
atom += _slot_separator
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
atom += _repo_separator + repo
@@ -1810,7 +1768,7 @@ class Atom(str):
atom += str(self.use)
return Atom(atom, allow_repo=True, allow_wildcard=True)
- def with_slot(self, slot):
+ def with_slot(self, slot) -> "Atom":
atom = remove_slot(self) + _slot_separator + slot
if self.repo is not None:
atom += _repo_separator + self.repo
@@ -1823,7 +1781,7 @@ class Atom(str):
"Atom instances are immutable", self.__class__, name, value
)
- def intersects(self, other):
+ def intersects(self, other: "Atom") -> bool:
"""
Atoms with different cpv, operator or use attributes cause this method
to return False even though there may actually be some intersection.
@@ -1835,7 +1793,7 @@ class Atom(str):
False otherwise.
"""
if not isinstance(other, Atom):
- raise TypeError("expected %s, got %s" % (Atom, type(other)))
+ raise TypeError(f"expected {Atom}, got {type(other)}")
if self == other:
return True
@@ -1853,7 +1811,7 @@ class Atom(str):
return False
- def evaluate_conditionals(self, use):
+ def evaluate_conditionals(self, use: set) -> "Atom":
"""
Create an atom instance with any USE conditionals evaluated.
@param use: The set of enabled USE flags
@@ -1869,7 +1827,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
use_dep = self.use.evaluate_conditionals(use)
@@ -1881,7 +1839,9 @@ class Atom(str):
_use=use_dep,
)
- def violated_conditionals(self, other_use, is_valid_flag, parent_use=None):
+ def violated_conditionals(
+ self, other_use: set, is_valid_flag: callable, parent_use=None
+ ) -> "Atom":
"""
Create an atom instance with any USE conditional removed, that is
satisfied by other_use.
@@ -1902,7 +1862,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
use_dep = self.use.violated_conditionals(other_use, is_valid_flag, parent_use)
@@ -1923,7 +1883,7 @@ class Atom(str):
if self.slot is not None:
atom += self.slot
if self.sub_slot is not None:
- atom += "/%s" % self.sub_slot
+ atom += f"/{self.sub_slot}"
if self.slot_operator is not None:
atom += self.slot_operator
use_dep = self.use._eval_qa_conditionals(use_mask, use_force)
@@ -1944,7 +1904,7 @@ class Atom(str):
memo[id(self)] = self
return self
- def match(self, pkg):
+ def match(self, pkg: "_emerge.Package"):
"""
Check if the given package instance matches this atom.
@@ -1997,17 +1957,13 @@ class ExtendedAtomDict(portage.cache.mappings.MutableMapping):
return result
def __iter__(self):
- for k in self._normal:
- yield k
- for k in self._extended:
- yield k
+ yield from self._normal
+ yield from self._extended
def iteritems(self):
try:
- for item in self._normal.items():
- yield item
- for item in self._extended.items():
- yield item
+ yield from self._normal.items()
+ yield from self._extended.items()
except AttributeError:
pass # FEATURES=python-trace
@@ -2028,7 +1984,6 @@ class ExtendedAtomDict(portage.cache.mappings.MutableMapping):
return self._normal.setdefault(cp, default)
def __getitem__(self, cp):
-
if not isinstance(cp, str):
raise KeyError(cp)
@@ -2546,7 +2501,6 @@ def match_from_list(mydep, candidate_list):
mylist = []
if mydep.extended_syntax:
-
for x in candidate_list:
cp = getattr(x, "cp", None)
if cp is None:
@@ -2561,7 +2515,6 @@ def match_from_list(mydep, candidate_list):
mylist.append(x)
if mylist and mydep.operator == "=*":
-
candidate_list = mylist
mylist = []
# Currently, only \*\w+\* is supported.
@@ -2872,7 +2825,6 @@ def get_required_use_flags(required_use, eapi=None):
class _RequiredUseLeaf:
-
__slots__ = ("_satisfied", "_token")
def __init__(self, token, satisfied):
@@ -2884,7 +2836,6 @@ class _RequiredUseLeaf:
class _RequiredUseBranch:
-
__slots__ = ("_children", "_operator", "_parent", "_satisfied")
def __init__(self, operator=None, parent=None):
@@ -2897,7 +2848,6 @@ class _RequiredUseBranch:
return self._satisfied
def tounicode(self):
-
include_parens = self._parent is not None
tokens = []
if self._operator is not None:
@@ -2908,7 +2858,7 @@ class _RequiredUseBranch:
complex_nesting = False
node = self
- while node != None and not complex_nesting:
+ while node is not None and not complex_nesting:
if node._operator in ("||", "^^", "??"):
complex_nesting = True
else:
@@ -3239,53 +3189,3 @@ def extract_affecting_use(mystr, atom, eapi=None):
raise InvalidDependString(_("malformed syntax: '%s'") % mystr)
return affecting_use
-
-
-def extract_unpack_dependencies(src_uri, unpackers):
- """
- Return unpack dependencies string for given SRC_URI string.
-
- @param src_uri: SRC_URI string
- @type src_uri: String
- @param unpackers: Dictionary mapping archive suffixes to dependency strings
- @type unpackers: Dictionary
- @rtype: String
- @return: Dependency string specifying packages required to unpack archives.
- """
- src_uri = src_uri.split()
-
- depend = []
- for i in range(len(src_uri)):
- if src_uri[i][-1] == "?" or src_uri[i] in ("(", ")"):
- depend.append(src_uri[i])
- elif (i + 1 < len(src_uri) and src_uri[i + 1] == "->") or src_uri[i] == "->":
- continue
- else:
- for suffix in sorted(unpackers, key=lambda x: len(x), reverse=True):
- suffix = suffix.lower()
- if src_uri[i].lower().endswith(suffix):
- depend.append(unpackers[suffix])
- break
-
- while True:
- cleaned_depend = depend[:]
- for i in range(len(cleaned_depend)):
- if cleaned_depend[i] is None:
- continue
- elif cleaned_depend[i] == "(" and cleaned_depend[i + 1] == ")":
- cleaned_depend[i] = None
- cleaned_depend[i + 1] = None
- elif (
- cleaned_depend[i][-1] == "?"
- and cleaned_depend[i + 1] == "("
- and cleaned_depend[i + 2] == ")"
- ):
- cleaned_depend[i] = None
- cleaned_depend[i + 1] = None
- cleaned_depend[i + 2] = None
- if depend == cleaned_depend:
- break
- else:
- depend = [x for x in cleaned_depend if x is not None]
-
- return " ".join(depend)
diff --git a/lib/portage/dep/_dnf.py b/lib/portage/dep/_dnf.py
index c83efed8a..d9ee10bf2 100644
--- a/lib/portage/dep/_dnf.py
+++ b/lib/portage/dep/_dnf.py
@@ -24,7 +24,7 @@ def dnf_convert(dep_struct):
if isinstance(x, list):
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (dep_struct,)
+ ), f"Normalization error, nested conjunction found in {dep_struct}"
if any(isinstance(element, list) for element in x):
x_dnf = ["||"]
for element in x[1:]:
@@ -34,10 +34,10 @@ def dnf_convert(dep_struct):
# must be a conjunction.
assert (
element
- ), "Normalization error, empty conjunction found in %s" % (x,)
+ ), f"Normalization error, empty conjunction found in {x}"
assert (
element[0] != "||"
- ), "Normalization error, nested disjunction found in %s" % (x,)
+ ), f"Normalization error, nested disjunction found in {x}"
element = dnf_convert(element)
if contains_disjunction(element):
assert (
@@ -89,9 +89,7 @@ def contains_disjunction(dep_struct):
is_disjunction = dep_struct and dep_struct[0] == "||"
for x in dep_struct:
if isinstance(x, list):
- assert x, "Normalization error, empty conjunction found in %s" % (
- dep_struct,
- )
+ assert x, f"Normalization error, empty conjunction found in {dep_struct}"
if x[0] == "||":
return True
if is_disjunction and contains_disjunction(x):
diff --git a/lib/portage/dep/_slot_operator.py b/lib/portage/dep/_slot_operator.py
index bdaf5f328..d3f506450 100644
--- a/lib/portage/dep/_slot_operator.py
+++ b/lib/portage/dep/_slot_operator.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2018 Gentoo Foundation
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.dep import Atom, paren_enclose, use_reduce
@@ -41,8 +41,7 @@ def find_built_slot_operator_atoms(pkg):
def _find_built_slot_operator(dep_struct):
for x in dep_struct:
if isinstance(x, list):
- for atom in _find_built_slot_operator(x):
- yield atom
+ yield from _find_built_slot_operator(x)
elif isinstance(x, Atom) and x.slot_operator_built:
yield x
@@ -59,7 +58,6 @@ def ignore_built_slot_operator_deps(dep_struct):
def evaluate_slot_operator_equal_deps(settings, use, trees):
-
metadata = settings.configdict["pkg"]
eapi = metadata["EAPI"]
eapi_attrs = _get_eapi_attrs(eapi)
@@ -93,6 +91,7 @@ def _eval_deps(dep_struct, vardbs):
# and B installed should record subslot on A only since the package is
# supposed to link against that anyway, and we have no guarantee that B
# has matching ABI.
+ # See bug #455904, bug #489458, bug #586238.
for i, x in enumerate(dep_struct):
if isinstance(x, list):
@@ -103,14 +102,15 @@ def _eval_deps(dep_struct, vardbs):
if best_version:
best_version = best_version[-1]
try:
- best_version = vardb._pkg_str(best_version, None)
+ best_version = (
+ best_version
+ if hasattr(best_version, "slot")
+ else vardb._pkg_str(best_version, None)
+ )
except (KeyError, InvalidData):
pass
else:
- slot_part = "%s/%s=" % (
- best_version.slot,
- best_version.sub_slot,
- )
+ slot_part = f"{best_version.slot}/{best_version.sub_slot}="
x = x.with_slot(slot_part)
dep_struct[i] = x
break
diff --git a/lib/portage/dep/dep_check.py b/lib/portage/dep/dep_check.py
index 9fccda08b..c361ee59e 100644
--- a/lib/portage/dep/dep_check.py
+++ b/lib/portage/dep/dep_check.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2020 Gentoo Authors
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["dep_check", "dep_eval", "dep_wordreduce", "dep_zapdeps"]
@@ -31,7 +31,7 @@ def _expand_new_virtuals(
trees=None,
use_mask=None,
use_force=None,
- **kwargs
+ **kwargs,
):
"""
In order to solve bug #141118, recursively expand new-style virtuals so
@@ -45,7 +45,7 @@ def _expand_new_virtuals(
expanded to || ( highest match ... lowest match ).
The result is normalized in the same way as use_reduce, having a top-level
- conjuction, and no redundant nested lists.
+ conjunction, and no redundant nested lists.
"""
newsplit = []
mytrees = trees[myroot]
@@ -78,15 +78,15 @@ def _expand_new_virtuals(
newsplit.append(x)
continue
elif isinstance(x, list):
- assert x, "Normalization error, empty conjunction found in %s" % (mysplit,)
+ assert x, f"Normalization error, empty conjunction found in {mysplit}"
if is_disjunction:
assert (
x[0] != "||"
- ), "Normalization error, nested disjunction found in %s" % (mysplit,)
+ ), f"Normalization error, nested disjunction found in {mysplit}"
else:
assert (
x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (mysplit,)
+ ), f"Normalization error, nested conjunction found in {mysplit}"
x_exp = _expand_new_virtuals(
x,
edebug,
@@ -96,7 +96,7 @@ def _expand_new_virtuals(
trees=trees,
use_mask=use_mask,
use_force=use_force,
- **kwargs
+ **kwargs,
)
if is_disjunction:
if len(x_exp) == 1:
@@ -107,9 +107,7 @@ def _expand_new_virtuals(
# must be a disjunction.
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (
- x_exp,
- )
+ ), f"Normalization error, nested conjunction found in {x_exp}"
newsplit.extend(x[1:])
else:
newsplit.append(x)
@@ -243,7 +241,7 @@ def _expand_new_virtuals(
mysettings,
myroot=myroot,
trees=trees,
- **pkg_kwargs
+ **pkg_kwargs,
)
finally:
# Restore previous EAPI after recursion.
@@ -253,7 +251,7 @@ def _expand_new_virtuals(
del mytrees["virt_parent"]
if not mycheck[0]:
- raise ParseError("%s: %s '%s'" % (pkg, mycheck[1], depstring))
+ raise ParseError(f"{pkg}: {mycheck[1]} '{depstring}'")
# Replace the original atom "x" with "virt_atom" which refers
# to the specific version of the virtual whose deps we're
@@ -346,7 +344,7 @@ def dep_zapdeps(
"""
if trees is None:
trees = portage.db
- writemsg("ZapDeps -- %s\n" % (use_binaries), 2)
+ writemsg(f"ZapDeps -- {use_binaries}\n", 2)
if not reduced or unreduced == ["||"] or dep_eval(reduced):
return []
@@ -429,7 +427,7 @@ def dep_zapdeps(
# Sort the deps into installed, not installed but already
# in the graph and other, not installed and not in the graph
- # and other, with values of [[required_atom], availablility]
+ # and other, with values of [[required_atom], availability]
for x, satisfied in zip(deps, satisfieds):
if isinstance(x, list):
atoms = dep_zapdeps(
@@ -472,7 +470,7 @@ def dep_zapdeps(
avail_pkg = [replacing]
if avail_pkg:
avail_pkg = avail_pkg[-1] # highest (ascending order)
- avail_slot = Atom("%s:%s" % (atom.cp, avail_pkg.slot))
+ avail_slot = Atom(f"{atom.cp}:{avail_pkg.slot}")
if not avail_pkg:
all_available = False
all_use_satisfied = False
@@ -527,7 +525,7 @@ def dep_zapdeps(
avail_pkg_use = avail_pkg_use[-1]
if avail_pkg_use != avail_pkg:
avail_pkg = avail_pkg_use
- avail_slot = Atom("%s:%s" % (atom.cp, avail_pkg.slot))
+ avail_slot = Atom(f"{atom.cp}:{avail_pkg.slot}")
if not replacing and downgrade_probe is not None and graph is not None:
highest_in_slot = mydbapi_match_pkgs(avail_slot)
@@ -602,7 +600,7 @@ def dep_zapdeps(
# If any version of a package is already in the graph then we
# assume that it is preferred over other possible packages choices.
all_installed = True
- for atom in set(Atom(atom.cp) for atom in atoms if not atom.blocker):
+ for atom in {Atom(atom.cp) for atom in atoms if not atom.blocker}:
# New-style virtuals have zero cost to install.
if not vardb.match(atom) and not atom.startswith("virtual/"):
all_installed = False
@@ -745,7 +743,7 @@ def dep_zapdeps(
if minimize_slots:
# Prefer choices having fewer new slots. When used with DNF form,
- # this can eliminate unecessary packages that depclean would
+ # this can eliminate unnecessary packages that depclean would
# ultimately eliminate (see bug 632026). Only use this behavior
# when deemed necessary by the caller, since this will discard the
# order specified in the ebuild, and the preferences specified
@@ -906,7 +904,7 @@ def dep_check(
eapi=eapi,
)
except InvalidDependString as e:
- return [0, "%s" % (e,)]
+ return [0, f"{e}"]
if mysplit == []:
# dependencies were reduced to nothing
@@ -931,7 +929,7 @@ def dep_check(
trees=trees,
)
except ParseError as e:
- return [0, "%s" % (e,)]
+ return [0, f"{e}"]
dnf = False
if mysettings.local_config: # if not repoman
@@ -944,8 +942,8 @@ def dep_check(
return [0, _("Invalid token")]
writemsg("\n\n\n", 1)
- writemsg("mysplit: %s\n" % (mysplit), 1)
- writemsg("mysplit2: %s\n" % (mysplit2), 1)
+ writemsg(f"mysplit: {mysplit}\n", 1)
+ writemsg(f"mysplit2: {mysplit2}\n", 1)
selected_atoms = dep_zapdeps(
mysplit,
@@ -965,7 +963,8 @@ def _overlap_dnf(dep_struct):
order to minimize the number of packages chosen to satisfy cases like
"|| ( foo bar ) || ( bar baz )" as in bug #632026. Non-overlapping
groups are excluded from the conversion, since DNF leads to exponential
- explosion of the formula.
+ explosion of the formula. Duplicate || groups are eliminated since
+ DNF expansion of duplicates is nonsensical (bug #891137).
When dep_struct does not contain any overlapping groups, no DNF
conversion will be performed, and dep_struct will be returned as-is.
@@ -988,7 +987,7 @@ def _overlap_dnf(dep_struct):
if isinstance(x, list):
assert (
x and x[0] == "||"
- ), "Normalization error, nested conjunction found in %s" % (dep_struct,)
+ ), f"Normalization error, nested conjunction found in {dep_struct}"
order_map[id(x)] = i
prev_cp = None
for atom in _iter_flatten(x):
@@ -1023,7 +1022,17 @@ def _overlap_dnf(dep_struct):
if len(disjunctions) > 1:
overlap = True
# convert overlapping disjunctions to DNF
- result.extend(_dnf_convert(sorted(disjunctions.values(), key=order_key)))
+ dedup_set = set()
+ unique_disjunctions = []
+ for x in sorted(disjunctions.values(), key=order_key):
+ dep_repr = portage.dep.paren_enclose(x, opconvert=True)
+ if dep_repr not in dedup_set:
+ dedup_set.add(dep_repr)
+ unique_disjunctions.append(x)
+ if len(unique_disjunctions) > 1:
+ result.extend(_dnf_convert(unique_disjunctions))
+ else:
+ result.extend(unique_disjunctions)
else:
# pass through non-overlapping disjunctions
result.append(disjunctions.popitem()[1])
@@ -1080,7 +1089,7 @@ def dep_wordreduce(mydeplist, mysettings, mydbapi, mode, use_cache=1):
mydep = x
else:
mydep = mydbapi.match(deplist[mypos], use_cache=use_cache)
- if mydep != None:
+ if mydep is not None:
tmp = len(mydep) >= 1
if deplist[mypos][0] == "!":
tmp = False
diff --git a/lib/portage/dep/libc.py b/lib/portage/dep/libc.py
new file mode 100644
index 000000000..db88432cb
--- /dev/null
+++ b/lib/portage/dep/libc.py
@@ -0,0 +1,83 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.dep import Atom
+from portage.const import LIBC_PACKAGE_ATOM
+from portage.dbapi._expand_new_virt import expand_new_virt
+
+import portage.dbapi.porttree
+
+
+def find_libc_deps(portdb: portage.dbapi.porttree.dbapi, realized: bool = False):
+ """Finds libc package for a ROOT via portdb.
+
+ Parameters
+ ----------
+ portdb : dbapi
+ dbapi instance for portdb (for installed packages).
+ realized : bool
+ Request installed atoms rather than the installed package satisfying LIBC_PACKAGE_ATOM.
+
+ Returns
+ -------
+ list
+ List of libc packages (or atoms if realized is passed).
+ """
+
+ libc_pkgs = set()
+
+ for atom in expand_new_virt(
+ portdb,
+ LIBC_PACKAGE_ATOM,
+ ):
+ if atom.blocker:
+ continue
+
+ if not realized:
+ # Just the raw packages were requested (whatever satifies the virtual)
+ libc_pkgs.add(atom)
+ continue
+
+ # This will give us something like sys-libs/glibc:2.2, but we want to know
+ # what installed atom actually satifies that.
+ try:
+ libc_pkgs.add(portdb.match(atom)[0])
+ except IndexError:
+ continue
+
+ return libc_pkgs
+
+
+def strip_libc_deps(dep_struct: list, libc_deps: set):
+ """Strip libc dependency out of a given dependency strucutre.
+
+ Parameters
+ ----------
+ dep_struct: list
+ List of package dependencies (atoms).
+
+ libc_deps: set
+ List of dependencies satisfying LIBC_PACKAGE_ATOM to be
+ stripped out of any dependencies.
+
+ Returns
+ -------
+ list
+ List of dependencies with any matching libc_deps removed.
+ """
+ # We're going to just grab the libc provider for ROOT and
+ # strip out any dep for the purposes of --changed-deps.
+ # We can't go off versions, even though it'd be more precise
+ # (see below), because we'd end up with FPs and unnecessary
+ # --changed-deps results far too often.
+ #
+ # This penalizes a bit the case where someone adds a
+ # minimum (or maximum) version of libc explicitly in an ebuild
+ # without a new revision, but that's extremely rare, and doesn't
+ # feel like it changes the balance for what we prefer here.
+
+ for i, x in reversed(list(enumerate(dep_struct))):
+ # We only need to bother if x is an Atom because we know the deps
+ # we inject are simple & flat.
+ if isinstance(x, Atom) and any(x.cp == libc_dep.cp for libc_dep in libc_deps):
+ del dep_struct[i]
diff --git a/lib/portage/dep/meson.build b/lib/portage/dep/meson.build
new file mode 100644
index 000000000..d2379d8cb
--- /dev/null
+++ b/lib/portage/dep/meson.build
@@ -0,0 +1,13 @@
+py.install_sources(
+ [
+ 'dep_check.py',
+ 'libc.py',
+ '_dnf.py',
+ '_slot_operator.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/dep',
+ pure : not native_extensions
+)
+
+subdir('soname')
diff --git a/lib/portage/dep/soname/SonameAtom.py b/lib/portage/dep/soname/SonameAtom.py
index 2308b31fd..421ab3f5a 100644
--- a/lib/portage/dep/soname/SonameAtom.py
+++ b/lib/portage/dep/soname/SonameAtom.py
@@ -3,7 +3,6 @@
class SonameAtom:
-
__slots__ = ("multilib_category", "soname", "_hash_key", "_hash_value")
# Distiguishes package atoms from other atom types
@@ -21,11 +20,15 @@ class SonameAtom:
)
def __getstate__(self):
- return dict((k, getattr(self, k)) for k in self.__slots__)
+ state = {k: getattr(self, k) for k in self.__slots__}
+ # hash() results are not meaningfully picklable.
+ state["_hash_value"] = None
+ return state
def __setstate__(self, state):
for k, v in state.items():
object.__setattr__(self, k, v)
+ object.__setattr__(self, "_hash_value", hash(self._hash_key))
def __hash__(self):
return self._hash_value
@@ -43,14 +46,10 @@ class SonameAtom:
return True
def __repr__(self):
- return "%s('%s', '%s')" % (
- self.__class__.__name__,
- self.multilib_category,
- self.soname,
- )
+ return f"{self.__class__.__name__}('{self.multilib_category}', '{self.soname}')"
def __str__(self):
- return "%s: %s" % (self.multilib_category, self.soname)
+ return f"{self.multilib_category}: {self.soname}"
def match(self, pkg):
"""
diff --git a/lib/portage/dep/soname/meson.build b/lib/portage/dep/soname/meson.build
new file mode 100644
index 000000000..7db7a547e
--- /dev/null
+++ b/lib/portage/dep/soname/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'SonameAtom.py',
+ 'multilib_category.py',
+ 'parse.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/dep/soname',
+ pure : not native_extensions
+)
diff --git a/lib/portage/dep/soname/multilib_category.py b/lib/portage/dep/soname/multilib_category.py
index ab855d339..baca439fd 100644
--- a/lib/portage/dep/soname/multilib_category.py
+++ b/lib/portage/dep/soname/multilib_category.py
@@ -11,6 +11,7 @@
# arm_{32,64}
# hppa_{32,64}
# ia_{32,64}
+# loong_{ilp32s,ilp32f,ilp32d,lp64s,lp64f,lp64d}
# m68k_{32,64}
# mips_{eabi32,eabi64,n32,n64,o32,o64}
# ppc_{32,64}
@@ -37,15 +38,29 @@ from portage.util.elf.constants import (
EF_MIPS_ABI2,
EF_RISCV_FLOAT_ABI_DOUBLE,
EF_RISCV_RVC,
+ EF_LOONGARCH_ABI_LP64_SOFT_FLOAT,
+ EF_LOONGARCH_ABI_LP64_SINGLE_FLOAT,
+ EF_LOONGARCH_ABI_LP64_DOUBLE_FLOAT,
+ EF_LOONGARCH_ABI_ILP32_SOFT_FLOAT,
+ EF_LOONGARCH_ABI_ILP32_SINGLE_FLOAT,
+ EF_LOONGARCH_ABI_ILP32_DOUBLE_FLOAT,
+ EF_LOONGARCH_ABI_MASK,
ELFCLASS32,
ELFCLASS64,
EM_386,
EM_68K,
EM_AARCH64,
EM_ALPHA,
+ EM_AMDGPU,
+ EM_ARC,
+ EM_ARC_COMPACT,
+ EM_ARC_COMPACT2,
+ EM_ARC_COMPACT3,
+ EM_ARC_COMPACT3_64,
EM_ARM,
EM_ALTERA_NIOS2,
EM_IA_64,
+ EM_LOONGARCH,
EM_MIPS,
EM_PARISC,
EM_PPC,
@@ -68,9 +83,16 @@ _machine_prefix_map = {
EM_68K: "m68k",
EM_AARCH64: "arm",
EM_ALPHA: "alpha",
+ EM_AMDGPU: "amdgpu",
EM_ALTERA_NIOS2: "nios2",
+ EM_ARC: "arc",
+ EM_ARC_COMPACT: "arc",
+ EM_ARC_COMPACT2: "arc",
+ EM_ARC_COMPACT3: "arc",
+ EM_ARC_COMPACT3_64: "arc",
EM_ARM: "arm",
EM_IA_64: "ia64",
+ EM_LOONGARCH: "loong",
EM_MIPS: "mips",
EM_PARISC: "hppa",
EM_PPC: "ppc",
@@ -84,6 +106,15 @@ _machine_prefix_map = {
EM_X86_64: "x86",
}
+_loong_abi_map = {
+ EF_LOONGARCH_ABI_LP64_SOFT_FLOAT: "lp64s",
+ EF_LOONGARCH_ABI_LP64_SINGLE_FLOAT: "lp64f",
+ EF_LOONGARCH_ABI_LP64_DOUBLE_FLOAT: "lp64d",
+ EF_LOONGARCH_ABI_ILP32_SOFT_FLOAT: "ilp32s",
+ EF_LOONGARCH_ABI_ILP32_SINGLE_FLOAT: "ilp32f",
+ EF_LOONGARCH_ABI_ILP32_DOUBLE_FLOAT: "ilp32d",
+}
+
_mips_abi_map = {
E_MIPS_ABI_EABI32: "eabi32",
E_MIPS_ABI_EABI64: "eabi64",
@@ -92,8 +123,12 @@ _mips_abi_map = {
}
-def _compute_suffix_mips(elf_header):
+def _compute_suffix_loong(elf_header):
+ loong_abi = elf_header.e_flags & EF_LOONGARCH_ABI_MASK
+ return _loong_abi_map.get(loong_abi)
+
+def _compute_suffix_mips(elf_header):
name = None
mips_abi = elf_header.e_flags & EF_MIPS_ABI
@@ -136,6 +171,7 @@ def _compute_suffix_riscv(elf_header):
_specialized_funcs = {
+ "loong": _compute_suffix_loong,
"mips": _compute_suffix_mips,
"riscv": _compute_suffix_riscv,
}
@@ -153,7 +189,6 @@ def compute_multilib_category(elf_header):
"""
category = None
if elf_header.e_machine is not None:
-
prefix = _machine_prefix_map.get(elf_header.e_machine)
specialized_func = _specialized_funcs.get(prefix)
suffix = None
@@ -171,6 +206,6 @@ def compute_multilib_category(elf_header):
if prefix is None or suffix is None:
category = None
else:
- category = "%s_%s" % (prefix, suffix)
+ category = f"{prefix}_{suffix}"
return category
diff --git a/lib/portage/dispatch_conf.py b/lib/portage/dispatch_conf.py
index fa9780937..2597cf9b4 100644
--- a/lib/portage/dispatch_conf.py
+++ b/lib/portage/dispatch_conf.py
@@ -6,7 +6,6 @@
# written by Jeremy Wohl (http://igmus.org)
import errno
-import io
import functools
import stat
import subprocess
@@ -25,9 +24,6 @@ RCS_BRANCH = "1.1.1"
RCS_LOCK = "rcs -ko -M -l"
RCS_PUT = 'ci -t-"Archived config file." -m"dispatch-conf update."'
RCS_GET = "co"
-RCS_MERGE = "rcsmerge -p -r" + RCS_BRANCH + " '%s' > '%s'"
-
-DIFF3_MERGE = "diff3 -mE '%s' '%s' '%s' > '%s'"
_ARCHIVE_ROTATE_MAX = 9
@@ -40,10 +36,10 @@ def diffstatusoutput(cmd, file1, file2):
# raise a UnicodeDecodeError which makes the output inaccessible.
args = shlex_split(cmd % (file1, file2))
- args = [portage._unicode_encode(x, errors="strict") for x in args]
+ args = (portage._unicode_encode(x, errors="strict") for x in args)
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = portage._unicode_decode(proc.communicate()[0])
- if output and output[-1] == "\n":
+ if output and output.endswith("\n"):
# getstatusoutput strips one newline
output = output[:-1]
return (proc.wait(), output)
@@ -75,21 +71,19 @@ def diff_mixed(func, file1, file2):
if tempdir is None:
tempdir = tempfile.mkdtemp()
- diff_files[i] = os.path.join(tempdir, "%d" % i)
+ diff_files[i] = os.path.join(tempdir, f"{i}")
if st is None:
content = "/dev/null\n"
elif stat.S_ISLNK(st.st_mode):
link_dest = os.readlink(files[i])
- content = "SYM: %s -> %s\n" % (file1, link_dest)
+ content = f"SYM: {file1} -> {link_dest}\n"
elif stat.S_ISDIR(st.st_mode):
- content = "DIR: %s\n" % (file1,)
+ content = f"DIR: {file1}\n"
elif stat.S_ISFIFO(st.st_mode):
- content = "FIF: %s\n" % (file1,)
+ content = f"FIF: {file1}\n"
else:
- content = "DEV: %s\n" % (file1,)
- with io.open(
- diff_files[i], mode="w", encoding=_encodings["stdio"]
- ) as f:
+ content = f"DEV: {file1}\n"
+ with open(diff_files[i], mode="w", encoding=_encodings["stdio"]) as f:
f.write(content)
return func(diff_files[0], diff_files[1])
@@ -124,10 +118,7 @@ def read_config(mandatory_opts):
loader = KeyValuePairFileLoader(config_path, None)
opts, _errors = loader.load()
if not opts:
- print(
- _("dispatch-conf: Error reading {}; fatal").format(config_path),
- file=sys.stderr,
- )
+ print(_(f"dispatch-conf: Error reading {config_path}; fatal"), file=sys.stderr)
sys.exit(1)
# Handle quote removal here, since KeyValuePairFileLoader doesn't do that.
@@ -143,9 +134,8 @@ def read_config(mandatory_opts):
else:
print(
_(
- 'dispatch-conf: Missing option "%s" in /etc/dispatch-conf.conf; fatal'
- )
- % (key,),
+ f'dispatch-conf: Missing option "{key}" in /etc/dispatch-conf.conf; fatal'
+ ),
file=sys.stderr,
)
@@ -160,8 +150,9 @@ def read_config(mandatory_opts):
os.chmod(opts["archive-dir"], 0o700)
elif not os.path.isdir(opts["archive-dir"]):
print(
- _("dispatch-conf: Config archive dir [%s] must exist; fatal")
- % (opts["archive-dir"],),
+ _(
+ rf"""dispatch-conf: Config archive dir [{opts["archive-dir"]}] must exist; fatal"""
+ ),
file=sys.stderr,
)
sys.exit(1)
@@ -193,13 +184,9 @@ def _archive_copy(src_st, src_path, dest_path):
os.symlink(os.readlink(src_path), dest_path)
else:
shutil.copy2(src_path, dest_path)
- except EnvironmentError as e:
+ except OSError as e:
portage.util.writemsg(
- _(
- "dispatch-conf: Error copying %(src_path)s to "
- "%(dest_path)s: %(reason)s\n"
- )
- % {"src_path": src_path, "dest_path": dest_path, "reason": e},
+ f"dispatch-conf: Error copying {src_path} to {dest_path}: {e}\n",
noiselevel=-1,
)
@@ -226,9 +213,9 @@ def rcs_archive(archive, curconf, newconf, mrgconf):
):
_archive_copy(curconf_st, curconf, archive)
- if os.path.lexists(archive + ",v"):
- os.system(RCS_LOCK + " " + archive)
- os.system(RCS_PUT + " " + archive)
+ if os.path.lexists(f"{archive},v"):
+ os.system(f"{RCS_LOCK} {archive}")
+ os.system(f"{RCS_PUT} {archive}")
ret = 0
mystat = None
@@ -241,20 +228,24 @@ def rcs_archive(archive, curconf, newconf, mrgconf):
if mystat is not None and (
stat.S_ISREG(mystat.st_mode) or stat.S_ISLNK(mystat.st_mode)
):
- os.system(RCS_GET + " -r" + RCS_BRANCH + " " + archive)
+ os.system(f"{RCS_GET} -r{RCS_BRANCH} {archive}")
has_branch = os.path.lexists(archive)
if has_branch:
- os.rename(archive, archive + ".dist")
+ os.rename(archive, f"{archive}.dist")
_archive_copy(mystat, newconf, archive)
- if has_branch:
- if mrgconf and os.path.isfile(archive) and os.path.isfile(mrgconf):
- # This puts the results of the merge into mrgconf.
- ret = os.system(RCS_MERGE % (archive, mrgconf))
- os.chmod(mrgconf, mystat.st_mode)
- os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
- os.rename(archive, archive + ".dist.new")
+ if (
+ has_branch
+ and mrgconf
+ and os.path.isfile(archive)
+ and os.path.isfile(mrgconf)
+ ):
+ # This puts the results of the merge into mrgconf.
+ ret = os.system(f"rcsmerge -p -r{RCS_BRANCH} '{archive}' > '{mrgconf}'")
+ os.chmod(mrgconf, mystat.st_mode)
+ os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
+ os.rename(archive, f"{archive}.dist.new")
return ret
@@ -273,8 +264,7 @@ def _file_archive_rotate(archive):
for max_suf, max_st, max_path in (
(suf, os.lstat(path), path)
for suf, path in (
- (suf, "%s.%s" % (archive, suf))
- for suf in range(1, _ARCHIVE_ROTATE_MAX + 1)
+ (suf, f"{archive}.{suf}") for suf in range(1, _ARCHIVE_ROTATE_MAX + 1)
)
):
pass
@@ -290,7 +280,7 @@ def _file_archive_rotate(archive):
# Removing a directory might destroy something important,
# so rename it instead.
head, tail = os.path.split(archive)
- placeholder = tempfile.NamedTemporaryFile(prefix="%s." % tail, dir=head)
+ placeholder = tempfile.NamedTemporaryFile(prefix=f"{tail}.", dir=head)
placeholder.close()
os.rename(max_path, placeholder.name)
else:
@@ -300,9 +290,9 @@ def _file_archive_rotate(archive):
max_suf -= 1
for suf in range(max_suf + 1, 1, -1):
- os.rename("%s.%s" % (archive, suf - 1), "%s.%s" % (archive, suf))
+ os.rename(f"{archive}.{suf - 1}", f"{archive}.{suf}")
- os.rename(archive, "%s.1" % (archive,))
+ os.rename(archive, f"{archive}.1")
def _file_archive_ensure_dir(parent_dir):
@@ -372,7 +362,7 @@ def file_archive(archive, curconf, newconf, mrgconf):
stat.S_ISREG(mystat.st_mode) or stat.S_ISLNK(mystat.st_mode)
):
# Save off new config file in the archive dir with .dist.new suffix
- newconf_archive = archive + ".dist.new"
+ newconf_archive = f"{archive}.dist.new"
if os.path.isdir(newconf_archive) and not os.path.islink(newconf_archive):
_file_archive_rotate(newconf_archive)
_archive_copy(mystat, newconf, newconf_archive)
@@ -382,11 +372,11 @@ def file_archive(archive, curconf, newconf, mrgconf):
mrgconf
and os.path.isfile(curconf)
and os.path.isfile(newconf)
- and os.path.isfile(archive + ".dist")
+ and os.path.isfile(f"{archive}.dist")
):
# This puts the results of the merge into mrgconf.
ret = os.system(
- DIFF3_MERGE % (curconf, archive + ".dist", newconf, mrgconf)
+ f"diff3 -mE '{curconf}' '{archive}.dist' '{newconf}' > '{mrgconf}'"
)
os.chmod(mrgconf, mystat.st_mode)
os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
@@ -397,24 +387,24 @@ def file_archive(archive, curconf, newconf, mrgconf):
def rcs_archive_post_process(archive):
"""Check in the archive file with the .dist.new suffix on the branch
and remove the one with the .dist suffix."""
- os.rename(archive + ".dist.new", archive)
- if os.path.lexists(archive + ".dist"):
+ os.rename(f"{archive}.dist.new", archive)
+ if os.path.lexists(f"{archive}.dist"):
# Commit the last-distributed version onto the branch.
- os.system(RCS_LOCK + RCS_BRANCH + " " + archive)
- os.system(RCS_PUT + " -r" + RCS_BRANCH + " " + archive)
- os.unlink(archive + ".dist")
+ os.system(f"{RCS_LOCK}{RCS_BRANCH} {archive}")
+ os.system(f"{RCS_PUT} -r{RCS_BRANCH} {archive}")
+ os.unlink(f"{archive}.dist")
else:
# Forcefully commit the last-distributed version onto the branch.
- os.system(RCS_PUT + " -f -r" + RCS_BRANCH + " " + archive)
+ os.system(f"{RCS_PUT} -f -r{RCS_BRANCH} {archive}")
def file_archive_post_process(archive):
"""Rename the archive file with the .dist.new suffix to a .dist suffix"""
- if os.path.lexists(archive + ".dist.new"):
- dest = "%s.dist" % archive
+ if os.path.lexists(f"{archive}.dist.new"):
+ dest = f"{archive}.dist"
if os.path.isdir(dest) and not os.path.islink(dest):
_file_archive_rotate(dest)
- os.rename(archive + ".dist.new", dest)
+ os.rename(f"{archive}.dist.new", dest)
def perform_conf_update_hooks(kind, conf):
diff --git a/lib/portage/eapi.py b/lib/portage/eapi.py
index adee87d00..2c1701870 100644
--- a/lib/portage/eapi.py
+++ b/lib/portage/eapi.py
@@ -2,361 +2,215 @@
# Distributed under the terms of the GNU General Public License v2
import collections
-import operator
-import types
+from functools import lru_cache
+from typing import Optional
from portage import eapi_is_supported
-def eapi_has_iuse_defaults(eapi):
- return eapi != "0"
+def eapi_has_iuse_defaults(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).iuse_defaults
-def eapi_has_iuse_effective(eapi):
- return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
+def eapi_has_iuse_effective(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).iuse_effective
-def eapi_has_slot_deps(eapi):
- return eapi != "0"
+def eapi_has_slot_deps(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).slot_deps
-def eapi_has_slot_operator(eapi):
- return eapi not in ("0", "1", "2", "3", "4", "4-python")
+def eapi_has_slot_operator(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).slot_operator
-def eapi_has_src_uri_arrows(eapi):
- return eapi not in ("0", "1")
+def eapi_has_src_uri_arrows(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).src_uri_arrows
-def eapi_has_selective_src_uri_restriction(eapi):
- return eapi not in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- "7",
- )
+def eapi_has_selective_src_uri_restriction(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).selective_src_uri_restriction
-def eapi_has_use_deps(eapi):
- return eapi not in ("0", "1")
+def eapi_has_use_deps(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).use_deps
-def eapi_has_strong_blocks(eapi):
- return eapi not in ("0", "1")
+def eapi_has_strong_blocks(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).strong_blocks
-def eapi_has_src_prepare_and_src_configure(eapi):
+def eapi_has_src_prepare_and_src_configure(eapi: str) -> bool:
return eapi not in ("0", "1")
-def eapi_supports_prefix(eapi):
- return eapi not in ("0", "1", "2")
+def eapi_supports_prefix(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).prefix
-def eapi_exports_AA(eapi):
- return eapi in ("0", "1", "2", "3")
+def eapi_exports_AA(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_AA
-def eapi_exports_KV(eapi):
- return eapi in ("0", "1", "2", "3")
+def eapi_exports_KV(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_KV
-def eapi_exports_merge_type(eapi):
- return eapi not in ("0", "1", "2", "3")
+def eapi_exports_merge_type(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_merge_type
-def eapi_exports_replace_vars(eapi):
- return eapi not in ("0", "1", "2", "3")
+def eapi_exports_replace_vars(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_replace_vars
-def eapi_exports_EBUILD_PHASE_FUNC(eapi):
- return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
+def eapi_exports_EBUILD_PHASE_FUNC(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_EBUILD_PHASE_FUNC
-def eapi_exports_PORTDIR(eapi):
- return eapi in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
+def eapi_exports_PORTDIR(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_PORTDIR
-def eapi_exports_ECLASSDIR(eapi):
- return eapi in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
-
+def eapi_exports_ECLASSDIR(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).exports_ECLASSDIR
-def eapi_exports_REPOSITORY(eapi):
- return eapi in ("4-python", "5-progress")
-
-def eapi_has_pkg_pretend(eapi):
+def eapi_has_pkg_pretend(eapi: str) -> bool:
return eapi not in ("0", "1", "2", "3")
-def eapi_has_implicit_rdepend(eapi):
+def eapi_has_implicit_rdepend(eapi: str) -> bool:
return eapi in ("0", "1", "2", "3")
-def eapi_has_dosed_dohard(eapi):
+def eapi_has_dosed_dohard(eapi: str) -> bool:
return eapi in ("0", "1", "2", "3")
-def eapi_has_required_use(eapi):
- return eapi not in ("0", "1", "2", "3")
-
+def eapi_has_required_use(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).required_use
-def eapi_has_required_use_at_most_one_of(eapi):
- return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
+def eapi_has_required_use_at_most_one_of(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).required_use_at_most_one_of
-def eapi_has_use_dep_defaults(eapi):
- return eapi not in ("0", "1", "2", "3")
+def eapi_has_use_dep_defaults(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).use_dep_defaults
-def eapi_requires_posixish_locale(eapi):
- return eapi not in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- )
+def eapi_requires_posixish_locale(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).posixish_locale
-def eapi_has_repo_deps(eapi):
- return eapi in ("4-python", "5-progress")
+def eapi_has_repo_deps(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).repo_deps
-def eapi_allows_dots_in_PN(eapi):
- return eapi in ("4-python", "5-progress")
+def eapi_supports_stable_use_forcing_and_masking(eapi: str) -> bool:
+ return eapi not in ("0", "1", "2", "3", "4", "4-slot-abi")
-def eapi_allows_dots_in_use_flags(eapi):
- return eapi in ("4-python", "5-progress")
-
-def eapi_supports_stable_use_forcing_and_masking(eapi):
- return eapi not in ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
-
-
-def eapi_allows_directories_on_profile_level_and_repository_level(eapi):
+def eapi_allows_directories_on_profile_level_and_repository_level(eapi: str) -> bool:
return eapi not in ("0", "1", "2", "3", "4", "4-slot-abi", "5", "6")
-def eapi_has_use_aliases(eapi):
- return eapi in ("4-python", "5-progress")
+def eapi_allows_package_provided(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).allows_package_provided
-def eapi_has_automatic_unpack_dependencies(eapi):
- return eapi in ("5-progress",)
+def eapi_has_bdepend(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).bdepend
-def eapi_allows_package_provided(eapi):
- return eapi in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
+def eapi_has_idepend(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).idepend
-def eapi_has_bdepend(eapi):
- return eapi not in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
+def eapi_empty_groups_always_true(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).empty_groups_always_true
-def eapi_has_idepend(eapi):
- return eapi not in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- "7",
- )
+def eapi_path_variables_end_with_trailing_slash(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).path_variables_end_with_trailing_slash
-def eapi_empty_groups_always_true(eapi):
- return eapi in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
+def eapi_has_broot(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).broot
-def eapi_path_variables_end_with_trailing_slash(eapi):
- return eapi in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
+def eapi_has_sysroot(eapi: str) -> bool:
+ return _get_eapi_attrs(eapi).sysroot
-def eapi_has_broot(eapi):
- return eapi not in (
- "0",
- "1",
- "2",
- "3",
- "4",
- "4-python",
- "4-slot-abi",
- "5",
- "5-progress",
- "6",
- )
+_eapi_attrs = collections.namedtuple(
+ "_eapi_attrs",
+ (
+ "allows_package_provided",
+ "bdepend",
+ "broot",
+ "exports_AA",
+ "exports_EBUILD_PHASE_FUNC",
+ "exports_ECLASSDIR",
+ "exports_KV",
+ "exports_merge_type",
+ "exports_PORTDIR",
+ "exports_replace_vars",
+ "feature_flag_test",
+ "idepend",
+ "iuse_defaults",
+ "iuse_effective",
+ "posixish_locale",
+ "path_variables_end_with_trailing_slash",
+ "prefix",
+ "repo_deps",
+ "required_use",
+ "required_use_at_most_one_of",
+ "selective_src_uri_restriction",
+ "slot_operator",
+ "slot_deps",
+ "src_uri_arrows",
+ "strong_blocks",
+ "use_deps",
+ "use_dep_defaults",
+ "empty_groups_always_true",
+ "sysroot",
+ ),
+)
-def eapi_has_sysroot(eapi):
- return eapi not in (
+class Eapi:
+ ALL_EAPIS = (
"0",
"1",
"2",
"3",
"4",
- "4-python",
"4-slot-abi",
"5",
- "5-progress",
"6",
+ "7",
+ "8",
)
+ _eapi_val: int = -1
-_eapi_attrs = collections.namedtuple(
- "_eapi_attrs",
- "allows_package_provided "
- "bdepend "
- "broot "
- "dots_in_PN dots_in_use_flags "
- "exports_AA "
- "exports_EBUILD_PHASE_FUNC "
- "exports_ECLASSDIR "
- "exports_KV "
- "exports_merge_type "
- "exports_PORTDIR "
- "exports_replace_vars "
- "feature_flag_test "
- "idepend iuse_defaults iuse_effective posixish_locale "
- "path_variables_end_with_trailing_slash "
- "prefix "
- "repo_deps required_use required_use_at_most_one_of "
- "selective_src_uri_restriction slot_operator slot_deps "
- "src_uri_arrows strong_blocks use_deps use_dep_defaults "
- "empty_groups_always_true sysroot",
-)
-
-
-_eapi_attr_func_prefixes = (
- "eapi_allows_",
- "eapi_has_",
- "eapi_requires_",
- "eapi_supports_",
- "eapi_",
-)
+ def __init__(self, eapi_string: str):
+ if not eapi_string in self.ALL_EAPIS:
+ raise ValueError(f"'{eapi_string}' not recognized as a valid EAPI")
+ self._eapi_val = int(eapi_string.partition("-")[0])
-def _eapi_func_decorator(func, attr_getter):
- def wrapper(eapi):
- return attr_getter(_get_eapi_attrs(eapi))
+ def __ge__(self, other: "Eapi") -> bool:
+ return self._eapi_val >= other._eapi_val
- wrapper.func = func
- wrapper.__doc__ = func.__doc__
- return wrapper
+ def __le__(self, other: "Eapi") -> bool:
+ return self._eapi_val <= other._eapi_val
-def _decorate_eapi_funcs():
- """
- Decorate eapi_* functions so that they use _get_eapi_attrs(eapi)
- to cache results.
- """
- decorated = {}
- for k, v in globals().items():
- if not (
- isinstance(v, types.FunctionType) and k.startswith(_eapi_attr_func_prefixes)
- ):
- continue
- for prefix in _eapi_attr_func_prefixes:
- if k.startswith(prefix):
- attr_name = k[len(prefix) :]
- if hasattr(_eapi_attrs, attr_name):
- decorated[k] = _eapi_func_decorator(
- v, operator.attrgetter(attr_name)
- )
- break
- globals().update(decorated)
-
-
-_decorate_eapi_funcs()
-
-
-_eapi_attrs_cache = {}
-
-
-def _get_eapi_attrs(eapi):
+@lru_cache(32)
+def _get_eapi_attrs(eapi_str: Optional[str]) -> _eapi_attrs:
"""
When eapi is None then validation is not as strict, since we want the
same to work for multiple EAPIs that may have slightly different rules.
@@ -364,59 +218,68 @@ def _get_eapi_attrs(eapi):
be helpful for handling of corrupt EAPI metadata in essential functions
such as pkgsplit.
"""
- eapi_attrs = _eapi_attrs_cache.get(eapi)
- if eapi_attrs is not None:
- return eapi_attrs
-
- orig_eapi = eapi
- if eapi is not None and not eapi_is_supported(eapi):
- eapi = None
-
- eapi_attrs = _eapi_attrs(
- allows_package_provided=(
- eapi is None or eapi_allows_package_provided.func(eapi)
- ),
- bdepend=(eapi is not None and eapi_has_bdepend.func(eapi)),
- broot=(eapi is None or eapi_has_broot.func(eapi)),
- dots_in_PN=(eapi is None or eapi_allows_dots_in_PN.func(eapi)),
- dots_in_use_flags=(eapi is None or eapi_allows_dots_in_use_flags.func(eapi)),
- empty_groups_always_true=(
- eapi is not None and eapi_empty_groups_always_true.func(eapi)
- ),
- exports_AA=(eapi is not None and eapi_exports_AA.func(eapi)),
- exports_EBUILD_PHASE_FUNC=(
- eapi is None or eapi_exports_EBUILD_PHASE_FUNC.func(eapi)
- ),
- exports_ECLASSDIR=(eapi is not None and eapi_exports_ECLASSDIR.func(eapi)),
- exports_KV=(eapi is not None and eapi_exports_KV.func(eapi)),
- exports_merge_type=(eapi is None or eapi_exports_merge_type.func(eapi)),
- exports_PORTDIR=(eapi is None or eapi_exports_PORTDIR.func(eapi)),
- exports_replace_vars=(eapi is None or eapi_exports_replace_vars.func(eapi)),
- feature_flag_test=False,
- idepend=(eapi is not None and eapi_has_idepend.func(eapi)),
- iuse_defaults=(eapi is None or eapi_has_iuse_defaults.func(eapi)),
- iuse_effective=(eapi is not None and eapi_has_iuse_effective.func(eapi)),
- path_variables_end_with_trailing_slash=(
- eapi is not None and eapi_path_variables_end_with_trailing_slash.func(eapi)
- ),
- posixish_locale=(eapi is not None and eapi_requires_posixish_locale.func(eapi)),
- prefix=(eapi is None or eapi_supports_prefix.func(eapi)),
- repo_deps=(eapi is None or eapi_has_repo_deps.func(eapi)),
- required_use=(eapi is None or eapi_has_required_use.func(eapi)),
- required_use_at_most_one_of=(
- eapi is None or eapi_has_required_use_at_most_one_of.func(eapi)
- ),
- selective_src_uri_restriction=(
- eapi is None or eapi_has_selective_src_uri_restriction.func(eapi)
- ),
- slot_deps=(eapi is None or eapi_has_slot_deps.func(eapi)),
- slot_operator=(eapi is None or eapi_has_slot_operator.func(eapi)),
- src_uri_arrows=(eapi is None or eapi_has_src_uri_arrows.func(eapi)),
- strong_blocks=(eapi is None or eapi_has_strong_blocks.func(eapi)),
- sysroot=(eapi is None or eapi_has_sysroot.func(eapi)),
- use_deps=(eapi is None or eapi_has_use_deps.func(eapi)),
- use_dep_defaults=(eapi is None or eapi_has_use_dep_defaults.func(eapi)),
- )
-
- _eapi_attrs_cache[orig_eapi] = eapi_attrs
- return eapi_attrs
+ if eapi_str is None or not eapi_is_supported(eapi_str):
+ return _eapi_attrs(
+ allows_package_provided=True,
+ bdepend=False,
+ broot=True,
+ empty_groups_always_true=False,
+ exports_AA=False,
+ exports_EBUILD_PHASE_FUNC=True,
+ exports_ECLASSDIR=False,
+ exports_KV=False,
+ exports_merge_type=True,
+ exports_PORTDIR=True,
+ exports_replace_vars=True,
+ feature_flag_test=False,
+ idepend=False,
+ iuse_defaults=True,
+ iuse_effective=False,
+ path_variables_end_with_trailing_slash=False,
+ posixish_locale=False,
+ prefix=True,
+ repo_deps=True,
+ required_use=True,
+ required_use_at_most_one_of=True,
+ selective_src_uri_restriction=True,
+ slot_deps=True,
+ slot_operator=True,
+ src_uri_arrows=True,
+ strong_blocks=True,
+ sysroot=True,
+ use_deps=True,
+ use_dep_defaults=True,
+ )
+ else:
+ eapi = Eapi(eapi_str)
+ return _eapi_attrs(
+ allows_package_provided=eapi <= Eapi("6"),
+ bdepend=eapi >= Eapi("7"),
+ broot=eapi >= Eapi("7"),
+ empty_groups_always_true=eapi <= Eapi("6"),
+ exports_AA=eapi <= Eapi("3"),
+ exports_EBUILD_PHASE_FUNC=eapi >= Eapi("5"),
+ exports_ECLASSDIR=eapi <= Eapi("6"),
+ exports_KV=eapi <= Eapi("3"),
+ exports_merge_type=eapi >= Eapi("4"),
+ exports_PORTDIR=eapi <= Eapi("6"),
+ exports_replace_vars=eapi >= Eapi("4"),
+ feature_flag_test=False,
+ idepend=eapi >= Eapi("8"),
+ iuse_defaults=eapi >= Eapi("1"),
+ iuse_effective=eapi >= Eapi("5"),
+ path_variables_end_with_trailing_slash=eapi <= Eapi("6"),
+ posixish_locale=eapi >= Eapi("6"),
+ prefix=eapi >= Eapi("3"),
+ repo_deps=False,
+ required_use=eapi >= Eapi("4"),
+ required_use_at_most_one_of=eapi >= Eapi("5"),
+ selective_src_uri_restriction=eapi >= Eapi("8"),
+ slot_deps=eapi >= Eapi("1"),
+ slot_operator=eapi >= Eapi("5"),
+ src_uri_arrows=eapi >= Eapi("2"),
+ strong_blocks=eapi >= Eapi("2"),
+ sysroot=eapi >= Eapi("7"),
+ use_deps=eapi >= Eapi("2"),
+ use_dep_defaults=eapi >= Eapi("4"),
+ )
diff --git a/lib/portage/eclass_cache.py b/lib/portage/eclass_cache.py
index c89b70922..c4c783168 100644
--- a/lib/portage/eclass_cache.py
+++ b/lib/portage/eclass_cache.py
@@ -47,7 +47,7 @@ class hashed_path:
return val
def __repr__(self):
- return "<portage.eclass_cache.hashed_path('%s')>" % (self.location,)
+ return f"<portage.eclass_cache.hashed_path('{self.location}')>"
class cache:
@@ -98,7 +98,7 @@ class cache:
that have the same name.
"""
if not isinstance(other, self.__class__):
- raise TypeError("expected type %s, got %s" % (self.__class__, type(other)))
+ raise TypeError(f"expected type {self.__class__}, got {type(other)}")
self.porttrees = self.porttrees + other.porttrees
self.eclasses.update(other.eclasses)
self._eclass_locations.update(other._eclass_locations)
@@ -110,7 +110,7 @@ class cache:
master_eclasses = {}
eclass_len = len(".eclass")
ignored_listdir_errnos = (errno.ENOENT, errno.ENOTDIR)
- for x in [normalize_path(os.path.join(y, "eclass")) for y in self.porttrees]:
+ for x in (normalize_path(os.path.join(y, "eclass")) for y in self.porttrees):
try:
eclass_filenames = os.listdir(x)
except OSError as e:
@@ -169,10 +169,7 @@ class cache:
return d
def get_eclass_data(self, inherits):
- ec_dict = {}
- for x in inherits:
- ec_dict[x] = self.eclasses[x]
-
+ ec_dict = {x: self.eclasses[x] for x in inherits}
return ec_dict
@property
diff --git a/lib/portage/elog/__init__.py b/lib/portage/elog/__init__.py
index 48e2a39de..2d835ddb1 100644
--- a/lib/portage/elog/__init__.py
+++ b/lib/portage/elog/__init__.py
@@ -2,7 +2,6 @@
# Copyright 2006-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import sys
import portage
@@ -59,7 +58,7 @@ def _combine_logentries(logentries):
for msgtype, msgcontent in logentries[phase]:
if previous_type != msgtype:
previous_type = msgtype
- rValue.append("%s: %s" % (msgtype, phase))
+ rValue.append(f"{msgtype}: {phase}")
if isinstance(msgcontent, str):
rValue.append(msgcontent.rstrip("\n"))
else:
@@ -200,8 +199,8 @@ def elog_process(cpv, mysettings, phasefilter=None):
)
% str(s)
)
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
except AlarmSignal:
- writemsg("Timeout in elog_process for system '%s'\n" % s, noiselevel=-1)
+ writemsg(f"Timeout in elog_process for system '{s}'\n", noiselevel=-1)
except PortageException as e:
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
diff --git a/lib/portage/elog/meson.build b/lib/portage/elog/meson.build
new file mode 100644
index 000000000..c76b5f017
--- /dev/null
+++ b/lib/portage/elog/meson.build
@@ -0,0 +1,16 @@
+py.install_sources(
+ [
+ 'filtering.py',
+ 'messages.py',
+ 'mod_custom.py',
+ 'mod_echo.py',
+ 'mod_mail.py',
+ 'mod_mail_summary.py',
+ 'mod_save.py',
+ 'mod_save_summary.py',
+ 'mod_syslog.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/elog',
+ pure : not native_extensions
+)
diff --git a/lib/portage/elog/messages.py b/lib/portage/elog/messages.py
index 6a67a45de..3ec9dd80e 100644
--- a/lib/portage/elog/messages.py
+++ b/lib/portage/elog/messages.py
@@ -17,7 +17,6 @@ from portage import _encodings
from portage import _unicode_encode
from portage import _unicode_decode
-import io
import sys
_log_levels = frozenset(
@@ -57,9 +56,8 @@ def collect_ebuild_messages(path):
logentries[msgfunction] = []
lastmsgtype = None
msgcontent = []
- f = io.open(
+ f = open(
_unicode_encode(filename, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
diff --git a/lib/portage/elog/mod_custom.py b/lib/portage/elog/mod_custom.py
index e0ae77e10..a3e199bcb 100644
--- a/lib/portage/elog/mod_custom.py
+++ b/lib/portage/elog/mod_custom.py
@@ -1,10 +1,33 @@
# elog/mod_custom.py - elog dispatch module
-# Copyright 2006-2020 Gentoo Authors
+# Copyright 2006-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import types
+
+import portage
import portage.elog.mod_save
import portage.exception
import portage.process
+from portage.util.futures import asyncio
+
+# Since elog_process is typically called while the event loop is
+# running, hold references to spawned processes and wait for them
+# asynchronously, ultimately waiting for them if necessary when
+# the AsyncioEventLoop _close_main method calls _async_finalize
+# via portage.process.run_coroutine_exitfuncs().
+_proc_refs = None
+
+
+def _get_procs() -> list[tuple[portage.process.MultiprocessingProcess, asyncio.Future]]:
+ """
+ Return list of (proc, asyncio.ensure_future(proc.wait())) which is not
+ inherited from the parent after fork.
+ """
+ global _proc_refs
+ if _proc_refs is None or _proc_refs.pid != portage.getpid():
+ _proc_refs = types.SimpleNamespace(pid=portage.getpid(), procs=[])
+ portage.process.atexit_register(_async_finalize)
+ return _proc_refs.procs
def process(mysettings, key, logentries, fulltext):
@@ -18,8 +41,50 @@ def process(mysettings, key, logentries, fulltext):
mylogcmd = mysettings["PORTAGE_ELOG_COMMAND"]
mylogcmd = mylogcmd.replace("${LOGFILE}", elogfilename)
mylogcmd = mylogcmd.replace("${PACKAGE}", key)
- retval = portage.process.spawn_bash(mylogcmd)
- if retval != 0:
+ loop = asyncio.get_event_loop()
+ proc = portage.process.spawn_bash(mylogcmd, returnproc=True)
+ procs = _get_procs()
+ procs.append((proc, asyncio.ensure_future(proc.wait(), loop=loop)))
+ for index, (proc, waiter) in reversed(list(enumerate(procs))):
+ if not waiter.done():
+ continue
+ del procs[index]
+ if waiter.result() != 0:
+ raise portage.exception.PortageException(
+ f"!!! PORTAGE_ELOG_COMMAND failed with exitcode {waiter.result()}"
+ )
+
+
+async def _async_finalize():
+ """
+ Async finalize is preferred, since we can wait for process exit status.
+ """
+ procs = _get_procs()
+ while procs:
+ proc, waiter = procs.pop()
+ if (await waiter) != 0:
+ raise portage.exception.PortageException(
+ f"!!! PORTAGE_ELOG_COMMAND failed with exitcode {waiter.result()}"
+ )
+
+
+def finalize():
+ """
+ NOTE: This raises PortageException if there are any processes
+ still running, so it's better to use _async_finalize instead
+ (invoked via portage.process.run_coroutine_exitfuncs() in
+ the AsyncioEventLoop _close_main method).
+ """
+ procs = _get_procs()
+ while procs:
+ proc, waiter = procs.pop()
+ if not waiter.done():
+ waiter.cancel()
+ proc.terminate()
+ raise portage.exception.PortageException(
+ f"!!! PORTAGE_ELOG_COMMAND was killed after it was found running in the background (pid {proc.pid})"
+ )
+ elif waiter.result() != 0:
raise portage.exception.PortageException(
- "!!! PORTAGE_ELOG_COMMAND failed with exitcode %d" % retval
+ f"!!! PORTAGE_ELOG_COMMAND failed with exitcode {waiter.result()}"
)
diff --git a/lib/portage/elog/mod_echo.py b/lib/portage/elog/mod_echo.py
index a63939988..1043edf20 100644
--- a/lib/portage/elog/mod_echo.py
+++ b/lib/portage/elog/mod_echo.py
@@ -22,7 +22,12 @@ def process(mysettings, key, logentries, fulltext):
and "PORTAGE_LOG_FILE" in mysettings
):
logfile = mysettings["PORTAGE_LOG_FILE"]
- _items.append((mysettings["ROOT"], key, logentries, logfile))
+
+ try:
+ binary = mysettings.configdict["pkg"]["MERGE_TYPE"] == "binary"
+ except KeyError:
+ binary = False
+ _items.append((mysettings["ROOT"], key, logentries, logfile, binary))
def finalize():
@@ -42,14 +47,17 @@ def finalize():
def _finalize():
global _items
printer = EOutput()
- for root, key, logentries, logfile in _items:
+ for root, key, logentries, logfile, binary in _items:
+ color = "PKG_BINARY_MERGE" if binary else "INFORM"
+
print()
+
if root == "/":
- printer.einfo(_("Messages for package %s:") % colorize("INFORM", key))
+ printer.einfo(_("Messages for package %s:") % colorize(color, key))
else:
printer.einfo(
_("Messages for package %(pkg)s merged to %(root)s:")
- % {"pkg": colorize("INFORM", key), "root": root}
+ % {"pkg": colorize(color, key), "root": root}
)
if logfile is not None:
printer.einfo(_("Log file: %s") % colorize("INFORM", logfile))
diff --git a/lib/portage/elog/mod_mail.py b/lib/portage/elog/mod_mail.py
index 22083ac5c..47293afc9 100644
--- a/lib/portage/elog/mod_mail.py
+++ b/lib/portage/elog/mod_mail.py
@@ -41,4 +41,4 @@ def process(mysettings, key, logentries, fulltext):
try:
portage.mail.send_mail(mysettings, mymessage)
except PortageException as e:
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
diff --git a/lib/portage/elog/mod_mail_summary.py b/lib/portage/elog/mod_mail_summary.py
index a695290ef..cd356911b 100644
--- a/lib/portage/elog/mod_mail_summary.py
+++ b/lib/portage/elog/mod_mail_summary.py
@@ -79,7 +79,7 @@ def _finalize(mysettings, items):
"process %(pid)d on host %(host)s:\n"
) % {"pid": portage.getpid(), "host": socket.getfqdn()}
for key in items:
- mybody += "- %s\n" % key
+ mybody += f"- {key}\n"
mymessage = portage.mail.create_message(
myfrom, myrecipient, mysubject, mybody, attachments=list(items.values())
@@ -97,6 +97,6 @@ def _finalize(mysettings, items):
"Timeout in finalize() for elog system 'mail_summary'\n", noiselevel=-1
)
except PortageException as e:
- writemsg("%s\n" % (e,), noiselevel=-1)
+ writemsg(f"{e}\n", noiselevel=-1)
return
diff --git a/lib/portage/elog/mod_save.py b/lib/portage/elog/mod_save.py
index aedfd0a38..126aded6e 100644
--- a/lib/portage/elog/mod_save.py
+++ b/lib/portage/elog/mod_save.py
@@ -3,7 +3,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import time
import portage
from portage import os
@@ -16,12 +15,11 @@ from portage.util import apply_permissions, ensure_dirs, normalize_path
def process(mysettings, key, logentries, fulltext):
-
if mysettings.get("PORTAGE_LOGDIR"):
logdir = normalize_path(mysettings["PORTAGE_LOGDIR"])
else:
logdir = os.path.join(
- os.sep, mysettings["EPREFIX"].lstrip(os.sep), "var", "log", "portage"
+ os.sep, mysettings["BROOT"].lstrip(os.sep), "var", "log", "portage"
)
if not os.path.isdir(logdir):
@@ -57,15 +55,15 @@ def process(mysettings, key, logentries, fulltext):
_ensure_log_subdirs(logdir, log_subdir)
try:
- with io.open(
+ with open(
_unicode_encode(elogfilename, encoding=_encodings["fs"], errors="strict"),
mode="w",
encoding=_encodings["content"],
errors="backslashreplace",
) as elogfile:
elogfile.write(_unicode_decode(fulltext))
- except IOError as e:
- func_call = "open('%s', 'w')" % elogfilename
+ except OSError as e:
+ func_call = f"open('{elogfilename}', 'w')"
if e.errno == errno.EACCES:
raise portage.exception.PermissionDenied(func_call)
elif e.errno == errno.EPERM:
diff --git a/lib/portage/elog/mod_save_summary.py b/lib/portage/elog/mod_save_summary.py
index 939198fdc..d2d10f475 100644
--- a/lib/portage/elog/mod_save_summary.py
+++ b/lib/portage/elog/mod_save_summary.py
@@ -3,7 +3,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import time
import portage
from portage import os
@@ -21,7 +20,7 @@ def process(mysettings, key, logentries, fulltext):
logdir = normalize_path(mysettings["PORTAGE_LOGDIR"])
else:
logdir = os.path.join(
- os.sep, mysettings["EPREFIX"].lstrip(os.sep), "var", "log", "portage"
+ os.sep, mysettings["BROOT"].lstrip(os.sep), "var", "log", "portage"
)
if not os.path.isdir(logdir):
@@ -41,14 +40,14 @@ def process(mysettings, key, logentries, fulltext):
# TODO: Locking
elogfilename = elogdir + "/summary.log"
try:
- elogfile = io.open(
+ elogfile = open(
_unicode_encode(elogfilename, encoding=_encodings["fs"], errors="strict"),
mode="a",
encoding=_encodings["content"],
errors="backslashreplace",
)
- except IOError as e:
- func_call = "open('%s', 'a')" % elogfilename
+ except OSError as e:
+ func_call = f"open('{elogfilename}', 'a')"
if e.errno == errno.EACCES:
raise portage.exception.PermissionDenied(func_call)
elif e.errno == errno.EPERM:
diff --git a/lib/portage/elog/mod_syslog.py b/lib/portage/elog/mod_syslog.py
index e34bd3a92..b2e2583b4 100644
--- a/lib/portage/elog/mod_syslog.py
+++ b/lib/portage/elog/mod_syslog.py
@@ -27,6 +27,6 @@ def process(mysettings, key, logentries, fulltext):
if isinstance(msgcontent, str):
msgcontent = [msgcontent]
for line in msgcontent:
- line = "%s: %s: %s" % (key, phase, line)
+ line = f"{key}: {phase}: {line}"
syslog.syslog(_pri[msgtype], line.rstrip("\n"))
syslog.closelog()
diff --git a/lib/portage/emaint/main.py b/lib/portage/emaint/main.py
index 778b0d145..ad6eea359 100644
--- a/lib/portage/emaint/main.py
+++ b/lib/portage/emaint/main.py
@@ -62,7 +62,7 @@ class OptionItem:
def usage(module_controller):
- _usage = "usage: emaint [options] COMMAND"
+ _usage = "emaint [options] COMMAND"
desc = (
"The emaint program provides an interface to system health "
@@ -72,30 +72,30 @@ def usage(module_controller):
_usage += "\n\n"
for line in textwrap.wrap(desc, 65):
- _usage += "%s\n" % line
+ _usage += f"{line}\n"
_usage += "\nCommands:\n"
- _usage += " %s" % "all".ljust(15) + "Perform all supported commands\n"
+ _usage += f" {'all'.ljust(15)}" + "Perform all supported commands\n"
textwrap.subsequent_indent = " ".ljust(17)
for mod in module_controller.module_names:
desc = textwrap.wrap(module_controller.get_description(mod), 65)
- _usage += " %s%s\n" % (mod.ljust(15), desc[0])
+ _usage += f" {mod.ljust(15)}{desc[0]}\n"
for d in desc[1:]:
- _usage += " %s%s\n" % (" ".ljust(15), d)
+ _usage += f" {' '.ljust(15)}{d}\n"
return _usage
def module_opts(module_controller, module):
- _usage = " %s module options:\n" % module
+ _usage = f" {module} module options:\n"
opts = module_controller.get_func_descriptions(module)
if opts == {}:
opts = DEFAULT_OPTIONS
for opt in sorted(opts):
optd = opts[opt]
if "short" in optd:
- opto = " %s, %s" % (optd["short"], optd["long"])
+ opto = f" {optd['short']}, {optd['long']}"
else:
- opto = " %s" % (optd["long"],)
- _usage += "%s %s\n" % (opto.ljust(15), optd["help"])
+ opto = f" {optd['long']}"
+ _usage += f"{opto.ljust(15)} {optd['help']}\n"
_usage += "\n"
return _usage
@@ -136,7 +136,7 @@ class TaskHandler:
"module_output": self.module_output,
# pass in a copy of the options so a module can not pollute or change
# them for other tasks if there is more to do.
- "options": options.copy(),
+ "options": options.copy() if options else None,
}
returncode, msgs = getattr(inst, func)(**kwargs)
returncodes.append(returncode)
@@ -159,7 +159,6 @@ def print_results(results):
def emaint_main(myargv):
-
# Similar to emerge, emaint needs a default umask so that created
# files (such as the world file) have sane permissions.
os.umask(0o22)
@@ -195,7 +194,7 @@ def emaint_main(myargv):
if len(args) != 1:
parser.error("Incorrect number of arguments")
if args[0] not in module_names:
- parser.error("%s target is not a known target" % args[0])
+ parser.error(f"{args[0]} target is not a known target")
check_opt = None
func = status = long_action = None
@@ -205,18 +204,22 @@ def emaint_main(myargv):
check_opt = opt
if opt.status and getattr(options, opt.target, False):
if long_action is not None:
- parser.error(
- "--%s and %s are exclusive options" % (long_action, opt.long)
- )
+ parser.error(f"--{long_action} and {opt.long} are exclusive options")
status = opt.status
func = opt.func
long_action = opt.long.lstrip("-")
if long_action is None:
- # print("DEBUG: long_action is None: setting to 'check'")
- long_action = "check"
- func = check_opt.func
- status = check_opt.status
+ if args[0] == "sync":
+ # print("DEBUG: long_action is None: setting to 'auto'")
+ long_action = "auto"
+ func = "auto_sync"
+ status = "Syncing %s"
+ else:
+ # print("DEBUG: long_action is None: setting to 'check'")
+ long_action = "check"
+ func = check_opt.func
+ status = check_opt.status
if args[0] == "all":
tasks = []
@@ -228,8 +231,7 @@ def emaint_main(myargv):
tasks = [module_controller.get_class(args[0])]
else:
portage.util.writemsg(
- "\nERROR: module '%s' does not have option '--%s'\n\n"
- % (args[0], long_action),
+ f"\nERROR: module '{args[0]}' does not have option '--{long_action}'\n\n",
noiselevel=-1,
)
portage.util.writemsg(module_opts(module_controller, args[0]), noiselevel=-1)
diff --git a/lib/portage/emaint/meson.build b/lib/portage/emaint/meson.build
new file mode 100644
index 000000000..caca9861f
--- /dev/null
+++ b/lib/portage/emaint/meson.build
@@ -0,0 +1,11 @@
+py.install_sources(
+ [
+ 'defaults.py',
+ 'main.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint',
+ pure : not native_extensions
+)
+
+subdir('modules')
diff --git a/lib/portage/emaint/modules/binhost/binhost.py b/lib/portage/emaint/modules/binhost/binhost.py
index 9d924f7e1..ece686591 100644
--- a/lib/portage/emaint/modules/binhost/binhost.py
+++ b/lib/portage/emaint/modules/binhost/binhost.py
@@ -11,7 +11,6 @@ from portage.versions import _pkg_str
class BinhostHandler:
-
short_desc = "Generate a metadata index for binary packages"
@staticmethod
@@ -26,7 +25,6 @@ class BinhostHandler:
self._pkgindex = self._bintree._load_pkgindex()
def _need_update(self, cpv, data):
-
if "MD5" not in data:
return True
@@ -86,9 +84,9 @@ class BinhostHandler:
missing.append(cpv)
if onProgress:
onProgress(maxval, i + 1)
- errors = ["'%s' is not in Packages" % cpv for cpv in missing]
+ errors = [f"'{cpv}' is not in Packages" for cpv in missing]
for cpv in stale:
- errors.append("'%s' is not in the repository" % cpv)
+ errors.append(f"'{cpv}' is not in the repository")
if errors:
return (False, errors)
return (True, None)
@@ -158,7 +156,7 @@ class BinhostHandler:
bintree._eval_use_flags(cpv, d)
except portage.exception.InvalidDependString:
writemsg(
- "!!! Invalid binary package: '%s'\n" % bintree.getname(cpv),
+ f"!!! Invalid binary package: '{bintree.getname(cpv)}'\n",
noiselevel=-1,
)
else:
diff --git a/lib/portage/emaint/modules/binhost/meson.build b/lib/portage/emaint/modules/binhost/meson.build
new file mode 100644
index 000000000..457737014
--- /dev/null
+++ b/lib/portage/emaint/modules/binhost/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'binhost.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/binhost',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/config/config.py b/lib/portage/emaint/modules/config/config.py
index 1129e9d91..5239b139c 100644
--- a/lib/portage/emaint/modules/config/config.py
+++ b/lib/portage/emaint/modules/config/config.py
@@ -8,7 +8,6 @@ from portage.util import grabdict, writedict
class CleanConfig:
-
short_desc = "Discard any no longer installed configs from emerge's tracker list"
@staticmethod
@@ -33,7 +32,7 @@ class CleanConfig:
keys = sorted(configs)
for config in keys:
if not os.path.exists(config):
- messages.append(" %s" % config)
+ messages.append(f" {config}")
if onProgress:
onProgress(maxval, i + 1)
i += 1
@@ -61,7 +60,7 @@ class CleanConfig:
if not os.path.exists(full_path):
modified = True
configs.pop(config)
- messages.append(" %s" % config)
+ messages.append(f" {config}")
if onProgress:
onProgress(maxval, i + 1)
i += 1
diff --git a/lib/portage/emaint/modules/config/meson.build b/lib/portage/emaint/modules/config/meson.build
new file mode 100644
index 000000000..d346ee7a3
--- /dev/null
+++ b/lib/portage/emaint/modules/config/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'config.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/config',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/logs/logs.py b/lib/portage/emaint/modules/logs/logs.py
index 26ffcd368..fefbe6edb 100644
--- a/lib/portage/emaint/modules/logs/logs.py
+++ b/lib/portage/emaint/modules/logs/logs.py
@@ -15,7 +15,6 @@ ERROR_MESSAGES = {
class CleanLogs:
-
short_desc = "Clean PORTAGE_LOGDIR logs"
@staticmethod
@@ -63,7 +62,7 @@ class CleanLogs:
clean_cmd.remove("-mtime")
clean_cmd.pop(i)
else:
- clean_cmd[clean_cmd.index("-mtime") + 1] = "+%s" % str(num_of_days)
+ clean_cmd[clean_cmd.index("-mtime") + 1] = f"+{str(num_of_days)}"
if pretend:
if "-delete" in clean_cmd:
clean_cmd.remove("-delete")
@@ -98,7 +97,7 @@ class CleanLogs:
if rval in ERROR_MESSAGES:
msg.append(ERROR_MESSAGES[rval])
else:
- msg.append("PORTAGE_LOGDIR_CLEAN command returned %s" % rval)
+ msg.append(f"PORTAGE_LOGDIR_CLEAN command returned {rval}")
msg.append(
"See the make.conf(5) man page for "
"PORTAGE_LOGDIR_CLEAN usage instructions."
diff --git a/lib/portage/emaint/modules/logs/meson.build b/lib/portage/emaint/modules/logs/meson.build
new file mode 100644
index 000000000..be1685cf3
--- /dev/null
+++ b/lib/portage/emaint/modules/logs/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'logs.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/logs',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/merges/merges.py b/lib/portage/emaint/modules/merges/merges.py
index b607da23b..dec97f83e 100644
--- a/lib/portage/emaint/modules/merges/merges.py
+++ b/lib/portage/emaint/modules/merges/merges.py
@@ -22,7 +22,9 @@ class TrackingFile:
@param tracking_path: file path used to keep track of failed merges
@type tracking_path: String
"""
- self._tracking_path = _unicode_encode(tracking_path)
+ self._tracking_path = (
+ tracking_path if portage.utf8_mode else _unicode_encode(tracking_path)
+ )
def save(self, failed_pkgs):
"""
@@ -32,7 +34,7 @@ class TrackingFile:
@type failed_pkgs: dict
"""
tracking_path = self._tracking_path
- lines = ["%s %s" % (pkg, mtime) for pkg, mtime in failed_pkgs.items()]
+ lines = [f"{pkg} {mtime}" for pkg, mtime in failed_pkgs.items()]
portage.util.write_atomic(tracking_path, "\n".join(lines))
def load(self):
@@ -46,7 +48,7 @@ class TrackingFile:
if not self.exists():
return {}
failed_pkgs = {}
- with open(tracking_path, "r") as tracking_file:
+ with open(tracking_path) as tracking_file:
for failed_merge in tracking_file:
pkg, mtime = failed_merge.strip().split()
failed_pkgs[pkg] = mtime
@@ -68,7 +70,7 @@ class TrackingFile:
def __iter__(self):
"""
- Provide an interator over failed merges.
+ Provide an iterator over failed merges.
@return: iterator of packages that failed to merge
"""
@@ -163,14 +165,14 @@ class MergesHandler:
portdb = portage.db[portage.root]["porttree"].dbapi
for failed_pkg in failed_pkgs:
# validate pkg name
- pkg_name = "%s" % failed_pkg.replace(MERGING_IDENTIFIER, "")
- pkg_atom = "=%s" % pkg_name
+ pkg_name = f"{failed_pkg.replace(MERGING_IDENTIFIER, '')}"
+ pkg_atom = f"={pkg_name}"
if not isvalidatom(pkg_atom):
- pkg_invalid_entries.add("'%s' is an invalid package atom." % pkg_atom)
+ pkg_invalid_entries.add(f"'{pkg_atom}' is an invalid package atom.")
if not portdb.cpv_exists(pkg_name):
pkg_invalid_entries.add(
- "'%s' does not exist in the ebuild repository." % pkg_name
+ f"'{pkg_name}' does not exist in the ebuild repository."
)
pkg_atoms.add(pkg_atom)
@@ -218,9 +220,9 @@ class MergesHandler:
if output:
results.append(output)
if proc.returncode != os.EX_OK:
- emerge_status = "Failed to emerge '%s'" % (" ".join(pkg_atoms))
+ emerge_status = f"Failed to emerge '{' '.join(pkg_atoms)}'"
else:
- emerge_status = "Successfully emerged '%s'" % (" ".join(pkg_atoms))
+ emerge_status = f"Successfully emerged '{' '.join(pkg_atoms)}'"
results.append(emerge_status)
return results
@@ -231,7 +233,7 @@ class MergesHandler:
errors = []
for pkg, mtime in failed_pkgs.items():
mtime_str = time.ctime(int(mtime))
- errors.append("'%s' failed to merge on '%s'" % (pkg, mtime_str))
+ errors.append(f"'{pkg}' failed to merge on '{mtime_str}'")
if errors:
return (False, errors)
return (True, None)
@@ -251,8 +253,8 @@ class MergesHandler:
try:
self._tracking_file.save(failed_pkgs)
- except IOError as ex:
- errors = ["Unable to save failed merges to tracking file: %s\n" % str(ex)]
+ except OSError as ex:
+ errors = [f"Unable to save failed merges to tracking file: {str(ex)}\n"]
errors.append(", ".join(sorted(failed_pkgs)))
return (False, errors)
self._remove_failed_dirs(failed_pkgs)
@@ -261,13 +263,13 @@ class MergesHandler:
)
# list any new failed merges
for pkg in sorted(self._scan()):
- results.append("'%s' still found as a failed merge." % pkg)
+ results.append(f"'{pkg}' still found as a failed merge.")
# reload config and remove successful packages from tracking file
portage._reset_legacy_globals()
vardb = portage.db[portage.root]["vartree"].dbapi
still_failed_pkgs = {}
for pkg, mtime in failed_pkgs.items():
- pkg_name = "%s" % pkg.replace(MERGING_IDENTIFIER, "")
+ pkg_name = f"{pkg.replace(MERGING_IDENTIFIER, '')}"
if not vardb.cpv_exists(pkg_name):
still_failed_pkgs[pkg] = mtime
self._tracking_file.save(still_failed_pkgs)
diff --git a/lib/portage/emaint/modules/merges/meson.build b/lib/portage/emaint/modules/merges/meson.build
new file mode 100644
index 000000000..b20748219
--- /dev/null
+++ b/lib/portage/emaint/modules/merges/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'merges.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/merges',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/meson.build b/lib/portage/emaint/modules/meson.build
new file mode 100644
index 000000000..48f4f77d8
--- /dev/null
+++ b/lib/portage/emaint/modules/meson.build
@@ -0,0 +1,16 @@
+py.install_sources(
+ [
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules',
+ pure : not native_extensions
+)
+
+subdir('binhost')
+subdir('config')
+subdir('logs')
+subdir('merges')
+subdir('move')
+subdir('resume')
+subdir('sync')
+subdir('world')
diff --git a/lib/portage/emaint/modules/move/meson.build b/lib/portage/emaint/modules/move/meson.build
new file mode 100644
index 000000000..e3930d0b8
--- /dev/null
+++ b/lib/portage/emaint/modules/move/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'move.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/move',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/move/move.py b/lib/portage/emaint/modules/move/move.py
index 305cc2e81..5a56fca1d 100644
--- a/lib/portage/emaint/modules/move/move.py
+++ b/lib/portage/emaint/modules/move/move.py
@@ -82,12 +82,12 @@ class MoveHandler:
# If this update has already been applied to the same
# package build then silently continue.
for maybe_applied in match(
- "={}".format(cpv.replace(cpv.cp, str(newcp), 1))
+ f"={cpv.replace(cpv.cp, str(newcp), 1)}"
):
if maybe_applied.build_time == build_time:
break
else:
- errors.append("'%s' moved to '%s'" % (cpv, newcp))
+ errors.append(f"'{cpv}' moved to '{newcp}'")
elif update_cmd[0] == "slotmove":
pkg, origslot, newslot = update_cmd[1:]
atom = pkg.with_slot(origslot)
@@ -98,8 +98,7 @@ class MoveHandler:
continue
if repo_match(cpv.repo):
errors.append(
- "'%s' slot moved from '%s' to '%s'"
- % (cpv, origslot, newslot)
+ f"'{cpv}' slot moved from '{origslot}' to '{newslot}'"
)
if onProgress:
onProgress(0, 0)
@@ -121,7 +120,7 @@ class MoveHandler:
pkg = _pkg_str(cpv, metadata=metadata, settings=settings)
except InvalidData:
continue
- metadata = dict((k, metadata[k]) for k in self._update_keys)
+ metadata = {k: metadata[k] for k in self._update_keys}
try:
updates = allupdates[pkg.repo]
except KeyError:
@@ -133,7 +132,7 @@ class MoveHandler:
continue
metadata_updates = portage.update_dbentries(updates, metadata, parent=pkg)
if metadata_updates:
- errors.append("'%s' has outdated metadata" % cpv)
+ errors.append(f"'{cpv}' has outdated metadata")
if onProgress:
onProgress(maxval, i + 1)
@@ -178,7 +177,6 @@ class MoveHandler:
class MoveInstalled(MoveHandler):
-
short_desc = "Perform package move updates for installed packages"
@staticmethod
@@ -193,7 +191,6 @@ class MoveInstalled(MoveHandler):
class MoveBinary(MoveHandler):
-
short_desc = "Perform package move updates for binary packages"
@staticmethod
diff --git a/lib/portage/emaint/modules/resume/meson.build b/lib/portage/emaint/modules/resume/meson.build
new file mode 100644
index 000000000..71a14fb6f
--- /dev/null
+++ b/lib/portage/emaint/modules/resume/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'resume.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/resume',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/resume/resume.py b/lib/portage/emaint/modules/resume/resume.py
index 1203519c0..0e3da3886 100644
--- a/lib/portage/emaint/modules/resume/resume.py
+++ b/lib/portage/emaint/modules/resume/resume.py
@@ -5,7 +5,6 @@ import portage
class CleanResume:
-
short_desc = "Discard emerge --resume merge lists"
@staticmethod
@@ -26,15 +25,13 @@ class CleanResume:
if d is None:
continue
if not isinstance(d, dict):
- messages.append("unrecognized resume list: '%s'" % k)
+ messages.append(f"unrecognized resume list: '{k}'")
continue
mergelist = d.get("mergelist")
if mergelist is None or not hasattr(mergelist, "__len__"):
- messages.append("unrecognized resume list: '%s'" % k)
+ messages.append(f"unrecognized resume list: '{k}'")
continue
- messages.append(
- "resume list '%s' contains %d packages" % (k, len(mergelist))
- )
+ messages.append(f"resume list '{k}' contains {len(mergelist)} packages")
finally:
if onProgress:
onProgress(maxval, i + 1)
diff --git a/lib/portage/emaint/modules/sync/meson.build b/lib/portage/emaint/modules/sync/meson.build
new file mode 100644
index 000000000..03eb81829
--- /dev/null
+++ b/lib/portage/emaint/modules/sync/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'sync.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/sync',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/sync/sync.py b/lib/portage/emaint/modules/sync/sync.py
index 50a37ec61..ea009d16c 100644
--- a/lib/portage/emaint/modules/sync/sync.py
+++ b/lib/portage/emaint/modules/sync/sync.py
@@ -30,7 +30,6 @@ warn = create_color_func("WARN")
class SyncRepos:
-
short_desc = "Check repos.conf settings and/or sync repositories"
@staticmethod
@@ -127,7 +126,7 @@ class SyncRepos:
"""Internal search, matches up the repo name or alias in repos.
@param repos: list of repo names or aliases to match
- @param avalable: list of repo objects to search
+ @param available: list of repo objects to search
@return: list of repo objects that match
"""
selected = []
@@ -359,8 +358,7 @@ class SyncRepos:
messages = []
for rval in rvals:
messages.append(
- "Action: %s for repo: %s, returned code = %s"
- % (action, rval[0], rval[1])
+ f"Action: {action} for repo: {rval[0]}, returned code = {rval[1]}"
)
return messages
@@ -427,7 +425,7 @@ class SyncScheduler(AsyncScheduler):
self._update_leaf_nodes()
if hooks_enabled:
self._hooks_repos.add(repo)
- super(SyncScheduler, self)._task_exit(self)
+ super()._task_exit(self)
def _master_hooks(self, repo_name):
"""
diff --git a/lib/portage/emaint/modules/world/meson.build b/lib/portage/emaint/modules/world/meson.build
new file mode 100644
index 000000000..934276c9c
--- /dev/null
+++ b/lib/portage/emaint/modules/world/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'world.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/emaint/modules/world',
+ pure : not native_extensions
+)
diff --git a/lib/portage/emaint/modules/world/world.py b/lib/portage/emaint/modules/world/world.py
index b0ddf427e..57746bf3d 100644
--- a/lib/portage/emaint/modules/world/world.py
+++ b/lib/portage/emaint/modules/world/world.py
@@ -6,7 +6,6 @@ from portage import os
class WorldHandler:
-
short_desc = "Fix problems in the world file"
@staticmethod
@@ -64,8 +63,8 @@ class WorldHandler:
self._check_world(onProgress)
errors = []
if self.found:
- errors += ["'%s' is not a valid atom" % x for x in self.invalid]
- errors += ["'%s' is not installed" % x for x in self.not_installed]
+ errors += [f"'{x}' is not a valid atom" for x in self.invalid]
+ errors += [f"'{x}' is not installed" for x in self.not_installed]
else:
errors.append(self.world_file + " could not be opened for reading")
if errors:
@@ -86,9 +85,7 @@ class WorldHandler:
try:
world_set.replace(self.okay)
except portage.exception.PortageException:
- errors.append(
- "%s could not be opened for writing" % self.world_file
- )
+ errors.append(f"{self.world_file} could not be opened for writing")
if errors:
return (False, errors)
return (True, None)
diff --git a/lib/portage/env/config.py b/lib/portage/env/config.py
index af996ab13..353580251 100644
--- a/lib/portage/env/config.py
+++ b/lib/portage/env/config.py
@@ -74,9 +74,7 @@ class PackageKeywordsFile(ConfigLoaderKlass):
default_loader = KeyListFileLoader
def __init__(self, filename):
- super(PackageKeywordsFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
class PackageUseFile(ConfigLoaderKlass):
@@ -87,16 +85,14 @@ class PackageUseFile(ConfigLoaderKlass):
default_loader = KeyListFileLoader
def __init__(self, filename):
- super(PackageUseFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
class PackageMaskFile(ConfigLoaderKlass):
"""
A class that implements a file-based package.mask
- Entires in package.mask are of the form:
+ Entries in package.mask are of the form:
atom1
atom2
or optionally
@@ -107,9 +103,7 @@ class PackageMaskFile(ConfigLoaderKlass):
default_loader = ItemFileLoader
def __init__(self, filename):
- super(PackageMaskFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
class PortageModulesFile(ConfigLoaderKlass):
@@ -120,6 +114,4 @@ class PortageModulesFile(ConfigLoaderKlass):
default_loader = KeyValuePairFileLoader
def __init__(self, filename):
- super(PortageModulesFile, self).__init__(
- self.default_loader(filename, validator=None)
- )
+ super().__init__(self.default_loader(filename, validator=None))
diff --git a/lib/portage/env/loaders.py b/lib/portage/env/loaders.py
index 284c311a1..81386fd32 100644
--- a/lib/portage/env/loaders.py
+++ b/lib/portage/env/loaders.py
@@ -3,7 +3,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import stat
import portage
@@ -31,7 +30,7 @@ class LoaderError(Exception):
self.error_msg = error_msg
def __str__(self):
- return "Failed while loading resource: %s, error was: %s" % (
+ return "Failed while loading resource: {}, error was: {}".format(
self.resource,
self.error_msg,
)
@@ -159,14 +158,13 @@ class FileLoader(DataLoader):
func = self.lineParser
for fn in RecursiveFileLoader(self.fname):
try:
- with io.open(
+ with open(
_unicode_encode(fn, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
lines = f.readlines()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.EACCES:
writemsg(_("Permission denied: '%s'\n") % fn, noiselevel=-1)
del e
diff --git a/lib/portage/env/meson.build b/lib/portage/env/meson.build
new file mode 100644
index 000000000..ed3615251
--- /dev/null
+++ b/lib/portage/env/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'config.py',
+ 'loaders.py',
+ 'validators.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/env',
+ pure : not native_extensions
+)
diff --git a/lib/portage/exception.py b/lib/portage/exception.py
index ec8ea1980..7b48aa919 100644
--- a/lib/portage/exception.py
+++ b/lib/portage/exception.py
@@ -1,4 +1,4 @@
-# Copyright 1998-2020 Gentoo Authors
+# Copyright 1998-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import signal
@@ -30,6 +30,10 @@ class CorruptionError(PortageException):
"""Corruption indication"""
+class CorruptionKeyError(CorruptionError, PortageKeyError):
+ """KeyError raised when corruption is detected (cause should be accesssible as __cause__)"""
+
+
class InvalidDependString(PortageException):
"""An invalid depend string has been encountered"""
@@ -159,7 +163,7 @@ class AmbiguousPackageName(ValueError, PortageException):
def __init__(self, *args, **kwargs):
self.args = args
- super(AmbiguousPackageName, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def __str__(self):
return ValueError.__str__(self)
@@ -181,6 +185,26 @@ class InvalidPackageName(PortagePackageException):
"""Malformed package name"""
+class InvalidBinaryPackageFormat(PortagePackageException):
+ """Invalid Binary Package Format"""
+
+
+class InvalidCompressionMethod(PortagePackageException):
+ """Invalid or unsupported compression method"""
+
+
+class CompressorNotFound(PortagePackageException):
+ """A required compressor binary was not available or executable"""
+
+
+class CompressorOperationFailed(PortagePackageException):
+ """An error occurred during external operation"""
+
+
+class SignedPackage(PortagePackageException):
+ """Unable to update a signed package"""
+
+
class InvalidAtom(PortagePackageException):
"""Malformed atom spec"""
@@ -201,10 +225,10 @@ class UnsupportedAPIException(PortagePackageException):
eapi = str(eapi)
eapi = eapi.lstrip("-")
msg = _(
- "Unable to do any operations on '%(cpv)s', since "
+ f"Unable to do any operations on '{self.cpv}', since "
"its EAPI is higher than this portage version's. Please upgrade"
- " to a portage version that supports EAPI '%(eapi)s'."
- ) % {"cpv": self.cpv, "eapi": eapi}
+ f" to a portage version that supports EAPI '{eapi}'."
+ )
return _unicode_decode(msg, encoding=_encodings["content"], errors="replace")
@@ -216,6 +240,10 @@ class DigestException(SignatureException):
"""A problem exists in the digest"""
+class GPGException(SignatureException):
+ """GPG operation failed"""
+
+
class MissingSignature(SignatureException):
"""Signature was not present in the checked file"""
diff --git a/lib/portage/getbinpkg.py b/lib/portage/getbinpkg.py
index 6aa8f1de1..fca44f052 100644
--- a/lib/portage/getbinpkg.py
+++ b/lib/portage/getbinpkg.py
@@ -21,11 +21,11 @@ import tempfile
import base64
import warnings
-_all_errors = [NotImplementedError, ValueError, socket.error]
-
from html.parser import HTMLParser as html_parser_HTMLParser
from urllib.parse import unquote as urllib_parse_unquote
+_all_errors = [NotImplementedError, ValueError, socket.error]
+
try:
import ftplib
except ImportError as e:
@@ -43,11 +43,8 @@ except ImportError as e:
else:
_all_errors.append(http_client_error)
-_all_errors = tuple(_all_errors)
-
def make_metadata_dict(data):
-
warnings.warn(
"portage.getbinpkg.make_metadata_dict() is deprecated",
DeprecationWarning,
@@ -55,20 +52,24 @@ def make_metadata_dict(data):
)
myid, _myglob = data
-
- mydict = {}
- for k_bytes in portage.xpak.getindex_mem(myid):
- k = _unicode_decode(
- k_bytes, encoding=_encodings["repo.content"], errors="replace"
+ metadata = (
+ (
+ k_bytes,
+ _unicode_decode(
+ k_bytes, encoding=_encodings["repo.content"], errors="replace"
+ ),
)
- if k not in _all_metadata_keys and k != "CATEGORY":
- continue
- v = _unicode_decode(
+ for k_bytes in portage.xpak.getindex_mem(myid)
+ )
+ mydict = {
+ k: _unicode_decode(
portage.xpak.getitem(data, k_bytes),
encoding=_encodings["repo.content"],
errors="replace",
)
- mydict[k] = v
+ for k_bytes, k in metadata
+ if k in _all_metadata_keys or k == "CATEGORY"
+ }
return mydict
@@ -78,7 +79,6 @@ class ParseLinks(html_parser_HTMLParser):
page and provide suffix and prefix limitors"""
def __init__(self):
-
warnings.warn(
"portage.getbinpkg.ParseLinks is deprecated",
DeprecationWarning,
@@ -92,19 +92,15 @@ class ParseLinks(html_parser_HTMLParser):
return self.PL_anchors
def get_anchors_by_prefix(self, prefix):
- newlist = []
- for x in self.PL_anchors:
- if x.startswith(prefix):
- if x not in newlist:
- newlist.append(x[:])
+ newlist = [
+ x for x in self.PL_anchors if x.startswith(prefix) and x not in newlist
+ ]
return newlist
def get_anchors_by_suffix(self, suffix):
- newlist = []
- for x in self.PL_anchors:
- if x.endswith(suffix):
- if x not in newlist:
- newlist.append(x[:])
+ newlist = [
+ x for x in self.PL_anchors if x.endswith(suffix) and x not in newlist
+ ]
return newlist
def handle_endtag(self, tag):
@@ -112,10 +108,12 @@ class ParseLinks(html_parser_HTMLParser):
def handle_starttag(self, tag, attrs):
if tag == "a":
- for x in attrs:
- if x[0] == "href":
- if x[1] not in self.PL_anchors:
- self.PL_anchors.append(urllib_parse_unquote(x[1]))
+ myarchors = (
+ urllib_parse_unquote(x[1])
+ for x in attrs
+ if x[0] == "href" and x[1] not in self.PL_anchors
+ )
+ self.PL_anchors.extend(myarchors)
def create_conn(baseurl, conn=None):
@@ -134,16 +132,13 @@ def create_conn(baseurl, conn=None):
raise ValueError(
_("Provided URI does not " "contain protocol identifier. '%s'") % baseurl
)
- protocol, url_parts = parts
+ protocol, url = parts
del parts
- url_parts = url_parts.split("/")
- host = url_parts[0]
- if len(url_parts) < 2:
- address = "/"
- else:
- address = "/" + "/".join(url_parts[1:])
- del url_parts
+ url_split = url.split("/", 1)
+ host = url_split[0]
+ address = f"/{url_split[1]}"
+ del url, url_split
userpass_host = host.split("@", 1)
if len(userpass_host) == 1:
@@ -154,13 +149,14 @@ def create_conn(baseurl, conn=None):
userpass = userpass_host[0].split(":")
del userpass_host
- if len(userpass) > 2:
+ userpass_size = len(userpass)
+ if userpass_size > 2:
raise ValueError(_("Unable to interpret username/password provided."))
- elif len(userpass) == 2:
- username = userpass[0]
+
+ username = userpass[0]
+ if userpass_size == 2:
password = userpass[1]
- elif len(userpass) == 1:
- username = userpass[0]
+ elif userpass_size == 1:
password = None
del userpass
@@ -172,12 +168,10 @@ def create_conn(baseurl, conn=None):
except AttributeError:
# Python 2
encodebytes = base64.encodestring
- http_headers = {
- b"Authorization": "Basic %s"
- % encodebytes(_unicode_encode("%s:%s" % (username, password))).replace(
- b"\012", b""
- ),
- }
+ unicode_bytes = encodebytes(_unicode_encode(f"{username}:{password}")).replace(
+ b"\012", b""
+ )
+ http_headers = {b"Authorization": f"Basic {unicode_bytes}"}
if not conn:
if protocol == "https":
@@ -204,9 +198,10 @@ def create_conn(baseurl, conn=None):
conn.login(username, password)
else:
sys.stderr.write(
- colorize("WARN", _(" * No password provided for username"))
- + " '%s'" % (username,)
- + "\n\n"
+ colorize(
+ "WARN",
+ _(f" * No password provided for username '{username}'\n\n"),
+ )
)
conn.login(username)
conn.set_pasv(passive)
@@ -238,22 +233,18 @@ def make_ftp_request(conn, address, rest=None, dest=None):
)
try:
-
if dest:
fstart_pos = dest.tell()
conn.voidcmd("TYPE I")
fsize = conn.size(address)
- if (rest != None) and (rest < 0):
+ retr_address = f"RETR {address}"
+ if rest and rest < 0:
rest = fsize + int(rest)
- if rest < 0:
- rest = 0
-
- if rest != None:
- mysocket = conn.transfercmd("RETR %s" % str(address), rest)
+ mysocket = conn.transfercmd(retr_address, rest)
else:
- mysocket = conn.transfercmd("RETR %s" % str(address))
+ mysocket = conn.transfercmd(retr_address)
mydata = ""
while 1:
@@ -262,14 +253,13 @@ def make_ftp_request(conn, address, rest=None, dest=None):
if dest:
dest.write(somedata)
else:
- mydata = mydata + somedata
+ mydata = f"{mydata}{somedata}"
else:
break
+ data_size = len(mydata)
if dest:
data_size = fstart_pos - dest.tell()
- else:
- data_size = len(mydata)
mysocket.close()
conn.voidresp()
@@ -294,7 +284,7 @@ def make_http_request(conn, address, _params={}, headers={}, dest=None):
rc = 0
response = None
- while (rc == 0) or (rc == 301) or (rc == 302):
+ while rc in (0, 301, 302):
try:
if rc != 0:
conn = create_conn(address)[0]
@@ -302,38 +292,33 @@ def make_http_request(conn, address, _params={}, headers={}, dest=None):
except SystemExit as e:
raise
except Exception as e:
- return None, None, "Server request failed: %s" % str(e)
+ return None, None, f"Server request failed: {e}"
response = conn.getresponse()
rc = response.status
# 301 means that the page address is wrong.
- if (rc == 301) or (rc == 302):
- ignored_data = response.read()
- del ignored_data
+ if rc in (301, 302):
+ # This response reading is ignored on purpose.
+ _ = response.read()
for x in str(response.msg).split("\n"):
parts = x.split(": ", 1)
if parts[0] == "Location":
if rc == 301:
sys.stderr.write(
- colorize("BAD", _("Location has moved: "))
- + str(parts[1])
- + "\n"
+ f"{colorize('BAD', _('Location has moved: '))}{parts[1]}\n"
)
if rc == 302:
sys.stderr.write(
- colorize("BAD", _("Location has temporarily moved: "))
- + str(parts[1])
- + "\n"
+ f"{colorize('BAD', _('Location has temporarily moved: '))}{parts[1]}\n"
)
address = parts[1]
break
- if (rc != 200) and (rc != 206):
+ if rc not in (200, 206):
return (
None,
rc,
- "Server did not respond successfully (%s: %s)"
- % (str(response.status), str(response.reason)),
+ f"Server did not respond successfully ({response.status}: {response.reason})",
)
if dest:
@@ -344,7 +329,6 @@ def make_http_request(conn, address, _params={}, headers={}, dest=None):
def match_in_array(array, prefix="", suffix="", match_both=1, allow_overlap=0):
-
warnings.warn(
"portage.getbinpkg.match_in_array() is deprecated",
DeprecationWarning,
@@ -358,26 +342,28 @@ def match_in_array(array, prefix="", suffix="", match_both=1, allow_overlap=0):
for x in array:
add_p = 0
- if prefix and (len(x) >= len(prefix)) and (x[: len(prefix)] == prefix):
+ x_size = len(x)
+ prefix_size = len(prefix)
+ if prefix and x_size >= prefix_size and x[:prefix_size] == prefix:
add_p = 1
if match_both:
if prefix and not add_p: # Require both, but don't have first one.
continue
- else:
- if add_p: # Only need one, and we have it.
- myarray.append(x[:])
- continue
+ elif add_p: # Only need one, and we have it.
+ myarray.append(x[:])
+ continue
+ suffix_size = len(suffix)
if not allow_overlap: # Not allow to overlap prefix and suffix
- if len(x) >= (len(prefix) + len(suffix)):
+ if x_size >= (prefix_size + suffix_size):
pass
else:
continue # Too short to match.
else:
pass # Do whatever... We're overlapping.
- if suffix and (len(x) >= len(suffix)) and (x[-len(suffix) :] == suffix):
+ if suffix and x_size >= suffix_size and x[-len(suffix) :] == suffix:
myarray.append(x) # It matches
else:
continue # Doesn't match.
@@ -396,9 +382,8 @@ def dir_get_list(baseurl, conn=None):
stacklevel=2,
)
- if not conn:
- keepconnection = 0
- else:
+ keepconnection = 0
+ if conn:
keepconnection = 1
conn, protocol, address, params, headers = create_conn(baseurl, conn)
@@ -408,7 +393,7 @@ def dir_get_list(baseurl, conn=None):
if not address.endswith("/"):
# http servers can return a 400 error here
# if the address doesn't end with a slash.
- address += "/"
+ address = f"{address}/"
page, rc, msg = make_http_request(conn, address, params, headers)
if page:
@@ -422,7 +407,7 @@ def dir_get_list(baseurl, conn=None):
raise portage.exception.PortageException(
_("Unable to get listing: %s %s") % (rc, msg)
)
- elif protocol in ["ftp"]:
+ elif protocol == "ftp":
if address[-1] == "/":
olddir = conn.pwd()
conn.cwd(address)
@@ -453,15 +438,14 @@ def file_get_metadata(baseurl, conn=None, chunk_size=3000):
stacklevel=2,
)
- if not conn:
+ keepconnection = 1
+ if conn:
keepconnection = 0
- else:
- keepconnection = 1
conn, protocol, address, params, headers = create_conn(baseurl, conn)
if protocol in ["http", "https"]:
- headers["Range"] = "bytes=-%s" % str(chunk_size)
+ headers["Range"] = f"bytes=-{chunk_size}"
data, _x, _x = make_http_request(conn, address, params, headers)
elif protocol in ["ftp"]:
data, _x, _x = make_ftp_request(conn, address, -chunk_size)
@@ -473,7 +457,7 @@ def file_get_metadata(baseurl, conn=None, chunk_size=3000):
finally:
f.close()
else:
- raise TypeError(_("Unknown protocol. '%s'") % protocol)
+ raise TypeError(_(f"Unknown protocol. '{protocol}'"))
if data:
xpaksize = portage.xpak.decodeint(data[-8:-4])
@@ -505,7 +489,6 @@ def file_get(
URI should be in the form <proto>://[user[:pass]@]<site>[:port]<path>"""
if not fcmd:
-
warnings.warn(
"Use of portage.getbinpkg.file_get() without the fcmd "
"parameter is deprecated",
@@ -523,27 +506,26 @@ def file_get(
if "DISTDIR" not in variables:
if dest is None:
raise portage.exception.MissingParameter(
- _("%s is missing required '%s' key") % ("fcmd_vars", "DISTDIR")
+ _("fcmd_vars is missing required 'DISTDIR' key")
)
variables["DISTDIR"] = dest
if "URI" not in variables:
if baseurl is None:
raise portage.exception.MissingParameter(
- _("%s is missing required '%s' key") % ("fcmd_vars", "URI")
+ _("fcmd_vars is missing required 'URI' key")
)
variables["URI"] = baseurl
if "FILE" not in variables:
- if filename is None:
+ if not filename:
filename = os.path.basename(variables["URI"])
variables["FILE"] = filename
from portage.util import varexpand
from portage.process import spawn
- myfetch = portage.util.shlex_split(fcmd)
- myfetch = [varexpand(x, mydict=variables) for x in myfetch]
+ myfetch = [varexpand(x, mydict=variables) for x in portage.util.shlex_split(fcmd)]
fd_pipes = {
0: portage._get_stdin().fileno(),
1: sys.__stdout__.fileno(),
@@ -569,14 +551,13 @@ def file_get_lib(baseurl, dest, conn=None):
stacklevel=2,
)
- if not conn:
- keepconnection = 0
- else:
+ keepconnection = 0
+ if conn:
keepconnection = 1
conn, protocol, address, params, headers = create_conn(baseurl, conn)
- sys.stderr.write("Fetching '" + str(os.path.basename(address)) + "'\n")
+ sys.stderr.write(f"Fetching '{os.path.basename(address)}'\n")
if protocol in ["http", "https"]:
data, rc, _msg = make_http_request(conn, address, params, headers, dest=dest)
elif protocol in ["ftp"]:
@@ -612,30 +593,28 @@ def file_get_lib(baseurl, dest, conn=None):
def dir_get_metadata(
baseurl, conn=None, chunk_size=3000, verbose=1, usingcache=1, makepickle=None
):
-
warnings.warn(
"portage.getbinpkg.dir_get_metadata() is deprecated",
DeprecationWarning,
stacklevel=2,
)
- if not conn:
+ keepconnection = 1
+ if conn:
keepconnection = 0
- else:
- keepconnection = 1
cache_path = "/var/cache/edb"
metadatafilename = os.path.join(cache_path, "remote_metadata.pickle")
- if makepickle is None:
+ if not makepickle:
makepickle = "/var/cache/edb/metadata.idx.most_recent"
try:
conn = create_conn(baseurl, conn)[0]
- except _all_errors as e:
+ except tuple(_all_errors) as e:
# ftplib.FTP(host) can raise errors like this:
# socket.error: (111, 'Connection refused')
- sys.stderr.write("!!! %s\n" % (e,))
+ sys.stderr.write(f"!!! {e}\n")
return {}
out = sys.stdout
@@ -673,7 +652,7 @@ def dir_get_metadata(
if not os.access(cache_path, os.W_OK):
sys.stderr.write(_("!!! Unable to write binary metadata to disk!\n"))
- sys.stderr.write(_("!!! Permission denied: '%s'\n") % cache_path)
+ sys.stderr.write(_(f"!!! Permission denied: '{cache_path}'\n"))
return metadata[baseurl]["data"]
import portage.exception
@@ -681,10 +660,8 @@ def dir_get_metadata(
try:
filelist = dir_get_list(baseurl, conn)
except portage.exception.PortageException as e:
- sys.stderr.write(
- _("!!! Error connecting to '%s'.\n") % _hide_url_passwd(baseurl)
- )
- sys.stderr.write("!!! %s\n" % str(e))
+ sys.stderr.write(_(f"!!! Error connecting to '{_hide_url_passwd(baseurl)}'.\n"))
+ sys.stderr.write(f"!!! {e}\n")
del e
return metadata[baseurl]["data"]
tbz2list = match_in_array(filelist, suffix=".tbz2")
@@ -709,7 +686,7 @@ def dir_get_metadata(
mytempfile.seek(0)
data = mytempfile.read()
except ValueError as e:
- sys.stderr.write("--- %s\n" % str(e))
+ sys.stderr.write(f"--- {str(e)}\n")
if trynum < 3:
sys.stderr.write(_("Retrying...\n"))
sys.stderr.flush()
@@ -743,10 +720,8 @@ def dir_get_metadata(
except SystemExit as e:
raise
except Exception as e:
- sys.stderr.write(
- _("!!! Failed to read data from index: ") + str(mfile) + "\n"
- )
- sys.stderr.write("!!! %s" % str(e))
+ sys.stderr.write(f"!!! Failed to read data from index: {mfile}\n")
+ sys.stderr.write(f"!!! {e}")
sys.stderr.flush()
try:
metadatafile = open(
@@ -761,7 +736,7 @@ def dir_get_metadata(
raise
except Exception as e:
sys.stderr.write(_("!!! Failed to write binary metadata to disk!\n"))
- sys.stderr.write("!!! %s\n" % str(e))
+ sys.stderr.write(f"!!! {e}\n")
sys.stderr.flush()
break
# We may have metadata... now we run through the tbz2 list and check.
@@ -784,10 +759,14 @@ def dir_get_metadata(
def display(self):
self.out.write(
- "\r"
- + colorize("WARN", _("cache miss: '") + str(self.misses) + "'")
- + " --- "
- + colorize("GOOD", _("cache hit: '") + str(self.hits) + "'")
+ "".join(
+ (
+ "\r",
+ colorize("WARN", _(f"cache miss: '{self.misses}'")),
+ " --- ",
+ colorize("GOOD", _(f"cache hit: '{self.hits}'")),
+ )
+ )
)
self.out.flush()
@@ -829,9 +808,7 @@ def dir_get_metadata(
metadata[baseurl]["data"][x] = make_metadata_dict(myid)
elif verbose:
sys.stderr.write(
- colorize("BAD", _("!!! Failed to retrieve metadata on: "))
- + str(x)
- + "\n"
+ f"{colorize('BAD', _('!!! Failed to retrieve metadata on: '))}{x}\n"
)
sys.stderr.flush()
else:
@@ -900,9 +877,8 @@ class PackageIndex:
inherited_keys=None,
translated_keys=None,
):
-
self._pkg_slot_dict = None
- if allowed_pkg_keys is not None:
+ if allowed_pkg_keys:
self._pkg_slot_dict = slot_dict_class(allowed_pkg_keys)
self._default_header_data = default_header_data
@@ -912,7 +888,7 @@ class PackageIndex:
self._read_translation_map = {}
if translated_keys:
self._write_translation_map.update(translated_keys)
- self._read_translation_map.update(((y, x) for (x, y) in translated_keys))
+ self._read_translation_map.update((y, x) for (x, y) in translated_keys)
self.header = {}
if self._default_header_data:
self.header.update(self._default_header_data)
@@ -920,11 +896,9 @@ class PackageIndex:
self.modified = True
def _readpkgindex(self, pkgfile, pkg_entry=True):
-
+ d = {}
allowed_keys = None
- if self._pkg_slot_dict is None or not pkg_entry:
- d = {}
- else:
+ if self._pkg_slot_dict and pkg_entry:
d = self._pkg_slot_dict()
allowed_keys = d.allowed_keys
@@ -946,7 +920,7 @@ class PackageIndex:
def _writepkgindex(self, pkgfile, items):
for k, v in items:
- pkgfile.write("%s: %s\n" % (self._write_translation_map.get(k, k), v))
+ pkgfile.write(f"{self._write_translation_map.get(k, k)}: {v}\n")
pkgfile.write("\n")
def read(self, pkgfile):
@@ -970,7 +944,7 @@ class PackageIndex:
if self._inherited_keys:
for k in self._inherited_keys:
v = self.header.get(k)
- if v is not None:
+ if v:
d.setdefault(k, v)
self.packages.append(d)
@@ -988,7 +962,7 @@ class PackageIndex:
if self._inherited_keys:
for k in self._inherited_keys:
v = self.header.get(k)
- if v is not None and v == metadata.get(k):
+ if v and v == metadata.get(k):
del metadata[k]
if self._default_pkg_data:
for k, v in self._default_pkg_data.items():
@@ -997,5 +971,5 @@ class PackageIndex:
keys = list(metadata)
keys.sort()
self._writepkgindex(
- pkgfile, [(k, metadata[k]) for k in keys if metadata[k]]
+ pkgfile, ((k, metadata[k]) for k in keys if metadata[k])
)
diff --git a/lib/portage/glsa.py b/lib/portage/glsa.py
index 19f226db1..648159ad8 100644
--- a/lib/portage/glsa.py
+++ b/lib/portage/glsa.py
@@ -1,4 +1,4 @@
-# Copyright 2003-2020 Gentoo Authors
+# Copyright 2003-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import codecs
@@ -10,7 +10,6 @@ import xml.dom.minidom
from functools import reduce
-import io
from io import StringIO
from portage import _encodings, _unicode_decode, _unicode_encode
@@ -38,6 +37,8 @@ opMapping = {
}
NEWLINE_ESCAPE = "!;\\n" # some random string to mark newlines that should be preserved
SPACE_ESCAPE = "!;_" # some random string to mark spaces that should be preserved
+# See PMS 3.1.7 "Keyword names"
+ARCH_REGEX = re.compile(r"^\*$|^[-_a-z0-9 ]+$")
def get_applied_glsas(settings):
@@ -76,26 +77,29 @@ def wrap(text, width, caption=""):
words = text.split()
indentLevel = len(caption) + 1
- for w in words:
- if line != "" and line[-1] == "\n":
- rValue += line
+ for word in words:
+ if line and line[-1] == "\n":
+ rValue = f"{rValue}{line}"
line = " " * indentLevel
- if len(line) + len(w.replace(NEWLINE_ESCAPE, "")) + 1 > width:
- rValue += line + "\n"
- line = " " * indentLevel + w.replace(NEWLINE_ESCAPE, "\n")
- elif w.find(NEWLINE_ESCAPE) >= 0:
+ if len(line) + len(word.replace(NEWLINE_ESCAPE, "")) + 1 > width:
+ rValue = f"{rValue}{line}\n"
+ escaped_word = word.replace(NEWLINE_ESCAPE, "\n")
+ line = f"{' ' * indentLevel}{escaped_word}"
+ elif word.find(NEWLINE_ESCAPE) >= 0:
+ escaped_word = word.replace(NEWLINE_ESCAPE, "\n")
+ whitespace = ""
if len(line.strip()) > 0:
- rValue += line + " " + w.replace(NEWLINE_ESCAPE, "\n")
- else:
- rValue += line + w.replace(NEWLINE_ESCAPE, "\n")
+ whitespace = " "
+ rValue = f"{rValue}{line}{whitespace}{escaped_word}"
line = " " * indentLevel
else:
+ whitespace = ""
if len(line.strip()) > 0:
- line += " " + w
- else:
- line += w
+ whitespace = " "
+ line = f"{line}{whitespace}{word}"
if len(line) > 0:
- rValue += line.replace(NEWLINE_ESCAPE, "\n")
+ escaped_line = line.replace(NEWLINE_ESCAPE, "\n")
+ rValue = f"{rValue}{escaped_line}"
rValue = rValue.replace(SPACE_ESCAPE, " ")
return rValue
@@ -112,25 +116,29 @@ def get_glsa_list(myconfig):
@rtype: List of Strings
@return: a list of GLSA IDs in this repository
"""
- rValue = []
+ repository = os.path.join(myconfig["PORTDIR"], "metadata", "glsa")
if "GLSA_DIR" in myconfig:
repository = myconfig["GLSA_DIR"]
- else:
- repository = os.path.join(myconfig["PORTDIR"], "metadata", "glsa")
if not os.access(repository, os.R_OK):
return []
dirlist = os.listdir(repository)
prefix = "glsa-"
+ prefix_size = len(prefix)
suffix = ".xml"
+ suffix_size = len(suffix)
- for f in dirlist:
+ def check(value):
try:
- if f[: len(prefix)] == prefix and f[-1 * len(suffix) :] == suffix:
- rValue.append(f[len(prefix) : -1 * len(suffix)])
+ if value[:prefix_size] == prefix and value[-suffix_size:] == suffix:
+ return value[prefix_size:-suffix_size]
except IndexError:
- pass
+ return None
+ return None
+
+ checked_dirlist = (check(f) for f in dirlist)
+ rValue = [f for f in checked_dirlist if f]
return rValue
@@ -143,7 +151,7 @@ def getListElements(listnode):
@rtype: List of Strings
@return: a list that contains the value of the <li> elements
"""
- if not listnode.nodeName in ["ul", "ol"]:
+ if not listnode.nodeName in ("ul", "ol"):
raise GlsaFormatException("Invalid function call: listnode is not <ul> or <ol>")
rValue = [
getText(li, format="strip")
@@ -182,9 +190,9 @@ def getText(node, format, textfd=None): # pylint: disable=redefined-builtin
returnNone = False
else:
returnNone = True
- if format in ["strip", "keep"]:
- if node.nodeName in ["uri", "mail"]:
- textfd.write(node.childNodes[0].data + ": " + node.getAttribute("link"))
+ if format in ("strip", "keep"):
+ if node.nodeName in ("uri", "mail"):
+ textfd.write(f"{node.childNodes[0].data}:{node.getAttribute('link')}")
else:
for subnode in node.childNodes:
if subnode.nodeName == "#text":
@@ -197,20 +205,18 @@ def getText(node, format, textfd=None): # pylint: disable=redefined-builtin
for p_subnode in subnode.childNodes:
if p_subnode.nodeName == "#text":
textfd.write(p_subnode.data.strip())
- elif p_subnode.nodeName in ["uri", "mail"]:
+ elif p_subnode.nodeName in ("uri", "mail"):
textfd.write(p_subnode.childNodes[0].data)
textfd.write(" ( " + p_subnode.getAttribute("link") + " )")
textfd.write(NEWLINE_ESCAPE)
elif subnode.nodeName == "ul":
for li in getListElements(subnode):
- textfd.write("-" + SPACE_ESCAPE + li + NEWLINE_ESCAPE + " ")
+ textfd.write(f"-{SPACE_ESCAPE}{li}{NEWLINE_ESCAPE} ")
elif subnode.nodeName == "ol":
i = 0
for li in getListElements(subnode):
i = i + 1
- textfd.write(
- str(i) + "." + SPACE_ESCAPE + li + NEWLINE_ESCAPE + " "
- )
+ textfd.write(f"{i}.{SPACE_ESCAPE}{li}{NEWLINE_ESCAPE} ")
elif subnode.nodeName == "code":
textfd.write(
getText(subnode, format="keep")
@@ -262,19 +268,22 @@ def makeAtom(pkgname, versionNode):
@rtype: String
@return: the portage atom
"""
- rValue = (
- opMapping[versionNode.getAttribute("range")]
- + pkgname
- + "-"
- + getText(versionNode, format="strip")
- )
+ rangetype = versionNode.getAttribute("range")
+ if rangetype in opMapping:
+ op = opMapping[rangetype]
+ else:
+ raise GlsaFormatException(
+ _(f"Invalid range found for '{pkgname}': {rangetype}")
+ )
+ version = getText(versionNode, format="strip")
+ rValue = f"{op}{pkgname}-{version}"
try:
slot = versionNode.getAttribute("slot").strip()
except KeyError:
pass
else:
if slot and slot != "*":
- rValue += _slot_separator + slot
+ rValue = f"{rValue}{_slot_separator}{slot}"
return str(rValue)
@@ -289,16 +298,20 @@ def makeVersion(versionNode):
@rtype: String
@return: the version string
"""
- rValue = opMapping[versionNode.getAttribute("range")] + getText(
- versionNode, format="strip"
- )
+ rangetype = versionNode.getAttribute("range")
+ if rangetype in opMapping:
+ op = opMapping[rangetype]
+ else:
+ raise GlsaFormatException(_(f"Invalid range found: {rangetype}"))
+ version = getText(versionNode, format="strip")
+ rValue = f"{op}{version}"
try:
slot = versionNode.getAttribute("slot").strip()
except KeyError:
pass
else:
if slot and slot != "*":
- rValue += _slot_separator + slot
+ rValue = f"{rValue}{_slot_separator}{slot}"
return rValue
@@ -421,9 +434,9 @@ def getMinUpgrade(vulnerableList, unaffectedList, portdbapi, vardbapi, minimize=
and portdbapi._pkg_str(c, None).slot
== vardbapi._pkg_str(vuln, None).slot
):
- update = c_pv[0] + "/" + c_pv[1] + "-" + c_pv[2]
+ update = f"{c_pv[0]}/{c_pv[1]}-{c_pv[2]}"
if c_pv[3] != "r0": # we don't like -r0 for display
- update += "-" + c_pv[3]
+ update = f"{update}-{c_pv[3]}"
update = portdbapi._pkg_str(update, None)
vuln_update.append([vuln, update])
@@ -466,7 +479,7 @@ def format_date(datestr):
class GlsaTypeException(Exception):
def __init__(self, doctype):
- Exception.__init__(self, "wrong DOCTYPE: %s" % doctype)
+ Exception.__init__(self, f"wrong DOCTYPE: {doctype}")
class GlsaFormatException(Exception):
@@ -509,7 +522,7 @@ class Glsa:
self.type = "file"
else:
raise GlsaArgumentException(
- _("Given ID %s isn't a valid GLSA ID or filename.") % myid
+ _(f"Given ID {myid} isn't a valid GLSA ID or filename.")
)
self.nr = myid
self.config = myconfig
@@ -526,13 +539,13 @@ class Glsa:
@return: None
"""
if "GLSA_DIR" in self.config:
- repository = "file://" + self.config["GLSA_DIR"] + "/"
+ repository = f"file://{self.config['GLSA_DIR']}/"
else:
- repository = "file://" + self.config["PORTDIR"] + "/metadata/glsa/"
+ repository = f"file://{self.config['PORTDIR']}/metadata/glsa/"
if self.type == "file":
- myurl = "file://" + self.nr
+ myurl = f"file://{self.nr}"
else:
- myurl = repository + "glsa-%s.xml" % str(self.nr)
+ myurl = f"{repository}glsa-{self.nr}.xml"
f = urllib_request_urlopen(myurl)
try:
@@ -563,10 +576,9 @@ class Glsa:
myroot = self.DOM.getElementsByTagName("glsa")[0]
if self.type == "id" and myroot.getAttribute("id") != self.nr:
raise GlsaFormatException(
- _("filename and internal id don't match:")
- + myroot.getAttribute("id")
- + " != "
- + self.nr
+ _(
+ f"filename and internal id don't match: {myroot.getAttribute('id')} != {self.nr}"
+ )
)
# the simple (single, required, top-level, #PCDATA) tags first
@@ -585,17 +597,14 @@ class Glsa:
count = revisedEl.getAttribute("count")
if not count:
raise GlsaFormatException(
- "Count attribute is missing or blank in GLSA: "
- + myroot.getAttribute("id")
+ f"Count attribute is missing or blank in GLSA: {myroot.getAttribute('id')}"
)
try:
self.count = int(count)
except ValueError:
raise GlsaFormatException(
- "Revision attribute in GLSA: "
- + myroot.getAttribute("id")
- + " is not an integer"
+ f"Revision attribute in GLSA: {myroot.getAttribute('id')} is not an integer"
)
self.revised = format_date(self.revised)
@@ -645,9 +654,9 @@ class Glsa:
try:
name = portage.dep.Atom(name)
except portage.exception.InvalidAtom:
- raise GlsaFormatException(_("invalid package name: %s") % name)
+ raise GlsaFormatException(_(f"invalid package name: {name}"))
if name != name.cp:
- raise GlsaFormatException(_("invalid package name: %s") % name)
+ raise GlsaFormatException(_(f"invalid package name: {name}"))
name = name.cp
if name not in self.packages:
self.packages[name] = []
@@ -683,58 +692,51 @@ class Glsa:
outstream = getattr(outstream, "buffer", outstream)
outstream = codecs.getwriter(encoding)(outstream)
width = 76
- outstream.write(("GLSA %s: \n%s" % (self.nr, self.title)).center(width) + "\n")
- outstream.write((width * "=") + "\n")
- outstream.write(
- wrap(self.synopsis, width, caption=_("Synopsis: ")) + "\n"
- )
- outstream.write(_("Announced on: %s\n") % self.announced)
- outstream.write(
- _("Last revised on: %s : %02d\n\n") % (self.revised, self.count)
+ buffer = "\n".join(
+ (
+ f"GLSA {self.nr}: ",
+ f"{self.title}".center(width),
+ "=" * width,
+ wrap(self.synopsis, width, caption=_("Synopsis: ")),
+ _(f"Announced on: {self.announced}"),
+ _(f"Last revised on: {self.revised} : %{self.count}\n"),
+ )
)
+ outstream.write(buffer)
if self.glsatype == "ebuild":
for k in self.packages:
pkg = self.packages[k]
for path in pkg:
vul_vers = ", ".join(path["vul_vers"])
unaff_vers = ", ".join(path["unaff_vers"])
- outstream.write(_("Affected package: %s\n") % k)
+ outstream.write(_(f"Affected package: {k}\n"))
outstream.write(_("Affected archs: "))
if path["arch"] == "*":
outstream.write(_("All\n"))
else:
- outstream.write("%s\n" % path["arch"])
- outstream.write(_("Vulnerable: %s\n") % vul_vers)
- outstream.write(_("Unaffected: %s\n\n") % unaff_vers)
+ outstream.write(f"{path['arch']}\n")
+ outstream.write(_(f"Vulnerable: {vul_vers}\n"))
+ outstream.write(_(f"Unaffected: {unaff_vers}\n\n"))
elif self.glsatype == "infrastructure":
pass
if len(self.bugs) > 0:
- outstream.write(_("\nRelated bugs: "))
- outstream.write(", ".join(self.bugs))
- outstream.write("\n")
+ outstream.write(_(f"\nRelated bugs: {', '.join(self.bugs)}\n"))
if self.background:
- outstream.write(
- "\n" + wrap(self.background, width, caption=_("Background: "))
- )
- outstream.write(
- "\n" + wrap(self.description, width, caption=_("Description: "))
- )
- outstream.write(
- "\n" + wrap(self.impact_text, width, caption=_("Impact: "))
- )
- outstream.write(
- "\n" + wrap(self.workaround, width, caption=_("Workaround: "))
- )
- outstream.write(
- "\n" + wrap(self.resolution, width, caption=_("Resolution: "))
- )
+ bg = wrap(self.background, width, caption=_("Background: "))
+ outstream.write(f"\n{bg}")
myreferences = " ".join(
r.replace(" ", SPACE_ESCAPE) + NEWLINE_ESCAPE for r in self.references
)
- outstream.write(
- "\n" + wrap(myreferences, width, caption=_("References: "))
+ buffer = "\n".join(
+ (
+ wrap(self.description, width, caption=_("Description: ")),
+ wrap(self.impact_text, width, caption=_("Impact: ")),
+ wrap(self.workaround, width, caption=_("Workaround: ")),
+ wrap(self.resolution, width, caption=_("Resolution: ")),
+ wrap(myreferences, width, caption=_("References: ")),
+ )
)
- outstream.write("\n")
+ outstream.write(f"\n{buffer}\n")
def isVulnerable(self):
"""
@@ -749,7 +751,13 @@ class Glsa:
for k in self.packages:
pkg = self.packages[k]
for path in pkg:
- if path["arch"] == "*" or self.config["ARCH"] in path["arch"].split():
+ if not ARCH_REGEX.match(path["arch"]):
+ raise GlsaFormatException(
+ f"Unrecognized arch list in {self.nr} (wrong delimiter?): {path['arch']}"
+ )
+
+ arches = path["arch"].split()
+ if path["arch"] == "*" or self.config["ARCH"] in arches:
for v in path["vul_atoms"]:
rValue = rValue or (
len(match(v, self.vardbapi)) > 0
@@ -787,7 +795,7 @@ class Glsa:
@return: None
"""
if not self.isInjected():
- checkfile = io.open(
+ checkfile = open(
_unicode_encode(
os.path.join(self.config["EROOT"], PRIVATE_PATH, "glsa_injected"),
encoding=_encodings["fs"],
@@ -813,11 +821,11 @@ class Glsa:
@return: list of package-versions that have to be merged
"""
return list(
- set(
+ {
update
for (vuln, update) in self.getAffectionTable(least_change)
if update
- )
+ }
)
def getAffectionTable(self, least_change=True):
diff --git a/lib/portage/gpg.py b/lib/portage/gpg.py
new file mode 100644
index 000000000..d8a4cfcfc
--- /dev/null
+++ b/lib/portage/gpg.py
@@ -0,0 +1,108 @@
+# Copyright 2001-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import subprocess
+import sys
+import threading
+
+from portage import os
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
+from portage.exception import GPGException
+from portage.output import colorize
+from portage.util import shlex_split, varexpand, writemsg, writemsg_stdout
+
+
+class GPG:
+ """
+ Unlock GPG, must call dircetly from main program for get correct TTY
+ """
+
+ def __init__(self, settings):
+ """
+ Portage settings are needed to run GPG unlock command.
+ """
+ self.settings = settings
+ self.thread = None
+ self._terminated = None
+ self.GPG_signing_base_command = self.settings.get(
+ "BINPKG_GPG_SIGNING_BASE_COMMAND"
+ )
+ self.digest_algo = self.settings.get("BINPKG_GPG_SIGNING_DIGEST")
+ self.signing_gpg_home = self.settings.get("BINPKG_GPG_SIGNING_GPG_HOME")
+ self.signing_gpg_key = self.settings.get("BINPKG_GPG_SIGNING_KEY")
+ self.GPG_unlock_command = self.GPG_signing_base_command.replace(
+ "[PORTAGE_CONFIG]",
+ f"--homedir {self.signing_gpg_home} "
+ f"--digest-algo {self.digest_algo} "
+ f"--local-user {self.signing_gpg_key} "
+ "--output - /dev/null",
+ )
+
+ if "gpg-keepalive" in self.settings.features:
+ self.keepalive = True
+ else:
+ self.keepalive = False
+
+ def unlock(self):
+ """
+ Set GPG_TTY and run GPG unlock command.
+ If gpg-keepalive is set, start keepalive thread.
+ """
+ if self.GPG_unlock_command and (
+ self.settings.get("BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0])
+ == "gpkg"
+ ):
+ try:
+ os.environ["GPG_TTY"] = os.ttyname(sys.stdout.fileno())
+ except OSError as e:
+ # When run with no input/output tty, this will fail.
+ # However, if the password is given by command,
+ # GPG does not need to ask password, so can be ignored.
+ writemsg(f"{colorize('WARN', str(e))}\n")
+
+ cmd = shlex_split(varexpand(self.GPG_unlock_command, mydict=self.settings))
+ return_code = subprocess.Popen(cmd, stdout=subprocess.DEVNULL).wait()
+
+ if return_code == os.EX_OK:
+ writemsg_stdout(f"{colorize('GOOD', 'unlocked')}\n")
+ sys.stdout.flush()
+ else:
+ raise GPGException("GPG unlock failed")
+
+ if self.keepalive:
+ self.GPG_unlock_command = shlex_split(
+ varexpand(self.GPG_unlock_command, mydict=self.settings)
+ )
+ self._terminated = threading.Event()
+ self.thread = threading.Thread(target=self.gpg_keepalive, daemon=True)
+ self.thread.start()
+
+ def stop(self):
+ """
+ Stop keepalive thread.
+ """
+ if self.thread is not None:
+ self._terminated.set()
+
+ def gpg_keepalive(self):
+ """
+ Call GPG unlock command every 5 mins to avoid the passphrase expired.
+ """
+ count = 0
+ while not self._terminated.is_set():
+ if count < 5:
+ if self._terminated.wait(60):
+ break
+ count += 1
+ continue
+ else:
+ count = 0
+
+ proc = subprocess.Popen(
+ self.GPG_unlock_command,
+ stdin=subprocess.DEVNULL,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.STDOUT,
+ )
+ if proc.wait() != os.EX_OK and not self._terminated.is_set():
+ raise GPGException("GPG keepalive failed")
diff --git a/lib/portage/gpkg.py b/lib/portage/gpkg.py
new file mode 100644
index 000000000..2b957d58c
--- /dev/null
+++ b/lib/portage/gpkg.py
@@ -0,0 +1,2130 @@
+# Copyright 2001-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import tarfile
+import traceback
+import io
+import threading
+import subprocess
+import errno
+import pwd
+import grp
+import stat
+import sys
+import tempfile
+from copy import copy
+from datetime import datetime
+
+import portage
+from portage import checksum
+from portage import os
+from portage import shutil
+from portage import normalize_path
+from portage import _encodings
+from portage import _unicode_decode
+from portage import _unicode_encode
+from portage.binpkg import get_binpkg_format
+from portage.exception import (
+ FileNotFound,
+ InvalidBinaryPackageFormat,
+ InvalidCompressionMethod,
+ CompressorNotFound,
+ CompressorOperationFailed,
+ CommandNotFound,
+ GPGException,
+ DigestException,
+ MissingSignature,
+ InvalidSignature,
+ SignedPackage,
+)
+from portage.output import colorize, EOutput
+from portage.util._urlopen import urlopen
+from portage.util import writemsg
+from portage.util import shlex_split, varexpand
+from portage.util.compression_probe import _compressors
+from portage.util.cpuinfo import makeopts_to_job_count
+from portage.process import find_binary
+from portage.const import MANIFEST2_HASH_DEFAULTS, HASHING_BLOCKSIZE
+
+
+class tar_stream_writer:
+ """
+ One-pass helper function that return a file-like object
+ for create a file inside of a tar container.
+
+ This helper allowed streaming add a new file to tar
+ without prior knows the file size.
+
+ With optional call and pipe data through external program,
+ the helper can transparently save compressed data.
+
+ With optional checksum helper, this helper can create
+ corresponding checksum and GPG signature.
+
+ Example:
+
+ writer = tar_stream_writer(
+ file_tarinfo, # the file tarinfo that need to be added
+ container, # the outer container tarfile object
+ tarfile.USTAR_FORMAT, # the outer container format
+ ["gzip"], # compression command
+ checksum_helper # checksum helper
+ )
+
+ writer.write(data)
+ writer.close()
+ """
+
+ def __init__(
+ self,
+ tarinfo,
+ container,
+ tar_format,
+ cmd=None,
+ checksum_helper=None,
+ uid=None,
+ gid=None,
+ ):
+ """
+ tarinfo # the file tarinfo that need to be added
+ container # the outer container tarfile object
+ tar_format # the outer container format for create the tar header
+ cmd # subprocess.Popen format compression command
+ checksum_helper # checksum helper
+ uid # drop root user to uid
+ gid # drop root group to gid
+ """
+ self.checksum_helper = checksum_helper
+ self.cmd = cmd
+ self.closed = False
+ self.container = container
+ self.killed = False
+ self.tar_format = tar_format
+ self.tarinfo = tarinfo
+ self.uid = uid
+ self.gid = gid
+
+ # Record container end position
+ self.container.fileobj.seek(0, io.SEEK_END)
+ self.begin_position = self.container.fileobj.tell()
+ self.end_position = 0
+ self.file_size = 0
+
+ # Write tar header without size
+ tar_header = self.tarinfo.tobuf(
+ self.tar_format, self.container.encoding, self.container.errors
+ )
+ self.header_size = len(tar_header)
+ self.container.fileobj.write(tar_header)
+ self.container.fileobj.flush()
+ self.container.offset += self.header_size
+
+ # Start external compressor if needed
+ if cmd is None:
+ self.proc = None
+ else:
+ self.proc = subprocess.Popen(
+ cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ user=self.uid,
+ group=self.gid,
+ )
+
+ self.read_thread = threading.Thread(
+ target=self._cmd_read_thread, name="tar_stream_cmd_read", daemon=True
+ )
+ self.read_thread.start()
+
+ def __del__(self):
+ self.close()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.close()
+
+ def kill(self):
+ """
+ kill external program if any error happened in python
+ """
+ if self.proc is not None:
+ self.killed = True
+ self.proc.kill()
+ try:
+ self.proc.stdin.close()
+ except BrokenPipeError:
+ traceback.print_exc()
+ self.close()
+
+ def _cmd_read_thread(self):
+ """
+ Use thread to avoid block.
+ Read stdout from external compressor, then write to the file
+ in container, and to checksum helper if needed.
+ """
+ while True:
+ try:
+ buffer = self.proc.stdout.read(HASHING_BLOCKSIZE)
+ if not buffer:
+ self.proc.stdout.close()
+ return
+ except BrokenPipeError:
+ self.proc.stdout.close()
+ writemsg(colorize("BAD", f"GPKG subprocess failed: {self.cmd} \n"))
+ if not self.killed:
+ # Do not raise error if killed by portage
+ raise CompressorOperationFailed("PIPE broken")
+ try:
+ self.container.fileobj.write(buffer)
+ except OSError as err:
+ self.error = True
+ self.kill()
+ raise CompressorOperationFailed(str(err))
+ if self.checksum_helper:
+ self.checksum_helper.update(buffer)
+
+ def write(self, data):
+ """
+ Write data to tarfile or external compressor stdin
+ """
+ if self.closed:
+ raise OSError("writer closed")
+
+ if self.proc:
+ # Write to external program
+ try:
+ self.proc.stdin.write(data)
+ except BrokenPipeError:
+ self.error = True
+ writemsg(colorize("BAD", f"GPKG subprocess failed: {self.cmd} \n"))
+ raise CompressorOperationFailed("PIPE broken")
+ else:
+ # Write to container
+ self.container.fileobj.write(data)
+ if self.checksum_helper:
+ self.checksum_helper.update(data)
+
+ def close(self):
+ """
+ Update the new file tar header when close
+ """
+ if self.closed:
+ return
+
+ # Wait compressor exit
+ if self.proc is not None:
+ self.proc.stdin.close()
+ if self.proc.wait() != os.EX_OK:
+ if not (self.killed or self.error):
+ raise CompressorOperationFailed("compression failed")
+ if self.read_thread.is_alive():
+ self.read_thread.join()
+
+ # Get container end position and calculate file size
+ self.container.fileobj.seek(0, io.SEEK_END)
+ self.end_position = self.container.fileobj.tell()
+ self.file_size = self.end_position - self.begin_position - self.header_size
+ self.tarinfo.size = self.file_size
+
+ # Tar block is 512, need padding \0
+ _, remainder = divmod(self.file_size, 512)
+ if remainder > 0:
+ padding_size = 512 - remainder
+ self.container.fileobj.write(b"\0" * padding_size)
+ self.container.offset += padding_size
+ self.container.fileobj.flush()
+
+ # Update tar header
+ tar_header = self.tarinfo.tobuf(
+ self.tar_format, self.container.encoding, self.container.errors
+ )
+ self.container.fileobj.seek(self.begin_position)
+ self.container.fileobj.write(tar_header)
+ self.container.fileobj.seek(0, io.SEEK_END)
+ self.container.fileobj.flush()
+ self.container.offset = self.container.fileobj.tell()
+ self.closed = True
+
+ # Add tarinfo to tarfile
+ self.container.members.append(self.tarinfo)
+
+ if self.checksum_helper:
+ self.checksum_helper.finish()
+
+ self.closed = True
+
+
+class tar_stream_reader:
+ """
+ helper function that return a file-like object
+ for read a file inside of a tar container.
+
+ This helper allowed transparently streaming read a compressed
+ file in tar.
+
+ With optional call and pipe compressed data through external
+ program, and return the uncompressed data.
+
+ reader = tar_stream_reader(
+ fileobj, # the fileobj from tarfile.extractfile(f)
+ ["gzip", "-d"], # decompression command
+ )
+
+ reader.read()
+ reader.close()
+ """
+
+ def __init__(self, fileobj, cmd=None, uid=None, gid=None):
+ """
+ fileobj should be a file-like object that have read().
+ cmd is optional external decompressor command.
+ """
+ self.closed = False
+ self.cmd = cmd
+ self.fileobj = fileobj
+ self.killed = False
+ self.uid = uid
+ self.gid = gid
+
+ if cmd is None:
+ self.read_io = fileobj
+ self.proc = None
+ else:
+ # Start external decompressor
+ self.proc = subprocess.Popen(
+ cmd,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ user=self.uid,
+ group=self.gid,
+ )
+ self.read_io = self.proc.stdout
+ # Start stdin block writing thread
+ self.thread = threading.Thread(
+ target=self._write_thread, name="tar_stream_stdin_writer", daemon=True
+ )
+ self.thread.start()
+
+ def __del__(self):
+ try:
+ self.close()
+ except CompressorOperationFailed:
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ try:
+ self.close()
+ except CompressorOperationFailed:
+ pass
+
+ def _write_thread(self):
+ """
+ writing thread to avoid full buffer blocking
+ """
+ try:
+ while True:
+ buffer = self.fileobj.read(HASHING_BLOCKSIZE)
+ if buffer:
+ try:
+ self.proc.stdin.write(buffer)
+ except ValueError:
+ if self.killed:
+ return
+ else:
+ raise
+ else:
+ self.proc.stdin.flush()
+ self.proc.stdin.close()
+ break
+ except BrokenPipeError:
+ if self.killed is False:
+ writemsg(colorize("BAD", f"GPKG subprocess failed: {self.cmd} \n"))
+ raise CompressorOperationFailed("PIPE broken")
+
+ def kill(self):
+ """
+ kill external program if any error happened in python
+ """
+ if self.proc is not None:
+ self.killed = True
+ self.proc.kill()
+ try:
+ self.proc.stdin.close()
+ except BrokenPipeError:
+ traceback.print_exc()
+ self.close()
+
+ def read(self, bufsize=-1):
+ """
+ return decompressor stdout data
+ """
+ if self.closed:
+ raise OSError("writer closed")
+ else:
+ return self.read_io.read(bufsize)
+
+ def close(self):
+ """
+ wait external program complete and do clean up
+ """
+ if self.closed:
+ return
+
+ self.closed = True
+
+ if self.proc is not None:
+ self.thread.join()
+ try:
+ if self.proc.wait() != os.EX_OK:
+ if not self.killed:
+ writemsg(colorize("BAD", f"GPKG external program failed.\n"))
+ raise CompressorOperationFailed("decompression failed")
+ finally:
+ self.proc.stdout.close()
+
+
+class checksum_helper:
+ """
+ Do checksum generation and GPG Signature generation and verification
+ """
+
+ SIGNING = 0
+ VERIFY = 1
+
+ def __init__(self, settings, gpg_operation=None, detached=True, signature=None):
+ """
+ settings # portage settings
+ gpg_operation # either SIGNING or VERIFY
+ signature # GPG signature string used for GPG verify only
+ """
+ self.settings = settings
+ self.gpg_operation = gpg_operation
+ self.gpg_proc = None
+ self.gpg_result = None
+ self.gpg_output = None
+ self.finished = False
+ self.sign_file_path = None
+
+ if (gpg_operation == checksum_helper.VERIFY) and (os.getuid() == 0):
+ try:
+ drop_user = self.settings.get("GPG_VERIFY_USER_DROP", "nobody")
+ if drop_user == "":
+ self.uid = None
+ else:
+ self.uid = pwd.getpwnam(drop_user).pw_uid
+ except KeyError:
+ writemsg(colorize("BAD", f"!!! Failed to find user {drop_user}.\n"))
+ raise
+
+ try:
+ drop_group = self.settings.get("GPG_VERIFY_GROUP_DROP", "nogroup")
+ if drop_group == "":
+ self.gid = None
+ else:
+ self.gid = grp.getgrnam(drop_group).gr_gid
+ except KeyError:
+ writemsg(colorize("BAD", f"!!! Failed to find group {drop_group}.\n"))
+ raise
+ else:
+ self.uid = None
+ self.gid = None
+
+ # Initialize the hash libs
+ self.libs = {}
+ for hash_name in MANIFEST2_HASH_DEFAULTS:
+ self.libs[hash_name] = checksum.hashfunc_map[hash_name]._hashobject()
+
+ # GPG
+ env = self.settings.environ()
+ if self.gpg_operation == checksum_helper.SIGNING:
+ gpg_signing_base_command = self.settings.get(
+ "BINPKG_GPG_SIGNING_BASE_COMMAND"
+ )
+ digest_algo = self.settings.get("BINPKG_GPG_SIGNING_DIGEST")
+ gpg_home = self.settings.get("BINPKG_GPG_SIGNING_GPG_HOME")
+ gpg_key = self.settings.get("BINPKG_GPG_SIGNING_KEY")
+
+ if detached:
+ gpg_detached = "--detach-sig"
+ else:
+ gpg_detached = "--clear-sign"
+
+ if gpg_signing_base_command:
+ gpg_signing_command = gpg_signing_base_command.replace(
+ "[PORTAGE_CONFIG]",
+ f"--homedir {gpg_home} "
+ f"--digest-algo {digest_algo} "
+ f"--local-user {gpg_key} "
+ f"{gpg_detached} "
+ "--batch --no-tty",
+ )
+
+ gpg_signing_command = shlex_split(
+ varexpand(gpg_signing_command, mydict=self.settings)
+ )
+ gpg_signing_command = [x for x in gpg_signing_command if x != ""]
+ try:
+ env["GPG_TTY"] = os.ttyname(sys.stdout.fileno())
+ except OSError:
+ pass
+ else:
+ raise CommandNotFound("GPG signing command is not set")
+
+ self.gpg_proc = subprocess.Popen(
+ gpg_signing_command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env,
+ )
+
+ elif self.gpg_operation == checksum_helper.VERIFY:
+ if (signature is None) and (detached == True):
+ raise MissingSignature("No signature provided")
+
+ gpg_verify_base_command = self.settings.get(
+ "BINPKG_GPG_VERIFY_BASE_COMMAND"
+ )
+ gpg_home = self.settings.get("BINPKG_GPG_VERIFY_GPG_HOME")
+
+ if not gpg_verify_base_command:
+ raise CommandNotFound("GPG verify command is not set")
+
+ gpg_verify_command = gpg_verify_base_command.replace(
+ "[PORTAGE_CONFIG]", f"--homedir {gpg_home} "
+ )
+
+ if detached:
+ self.sign_file_fd, self.sign_file_path = tempfile.mkstemp(
+ ".sig", "portage-sign-"
+ )
+
+ gpg_verify_command = gpg_verify_command.replace(
+ "[SIGNATURE]", f"{self.sign_file_path} -"
+ )
+
+ # Create signature file and allow everyone read
+ with open(self.sign_file_fd, "wb") as sign:
+ sign.write(signature)
+ os.chmod(self.sign_file_path, 0o644)
+ else:
+ gpg_verify_command = gpg_verify_command.replace(
+ "[SIGNATURE]", "--output - -"
+ )
+
+ gpg_verify_command = shlex_split(
+ varexpand(gpg_verify_command, mydict=self.settings)
+ )
+ gpg_verify_command = [x for x in gpg_verify_command if x != ""]
+
+ self.gpg_proc = subprocess.Popen(
+ gpg_verify_command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env,
+ user=self.uid,
+ group=self.gid,
+ )
+
+ def __del__(self):
+ self.finish()
+
+ def _check_gpg_status(self, gpg_status):
+ """
+ Check GPG status log for extra info.
+ GPG will return OK even if the signature owner is not trusted.
+ """
+ good_signature = False
+ trust_signature = False
+
+ for l in gpg_status.splitlines():
+ if l.startswith("[GNUPG:] GOODSIG"):
+ good_signature = True
+
+ if l.startswith("[GNUPG:] TRUST_ULTIMATE") or l.startswith(
+ "[GNUPG:] TRUST_FULLY"
+ ):
+ trust_signature = True
+
+ if (not good_signature) or (not trust_signature):
+ writemsg(colorize("BAD", f"!!!\n{self.gpg_result.decode()}"))
+ raise InvalidSignature("GPG verify failed")
+
+ def update(self, data):
+ """
+ Write data to hash libs and GPG stdin.
+ """
+ for c in self.libs:
+ self.libs[c].update(data)
+
+ if self.gpg_proc is not None:
+ self.gpg_proc.stdin.write(data)
+
+ def finish(self):
+ """
+ Tell GPG file is EOF, and get results, then do clean up.
+ """
+ if self.finished:
+ return
+
+ if self.gpg_proc is not None:
+ # Tell GPG EOF
+ self.gpg_proc.stdin.close()
+
+ return_code = self.gpg_proc.wait()
+
+ if self.sign_file_path:
+ os.remove(self.sign_file_path)
+
+ self.finished = True
+
+ self.gpg_result = self.gpg_proc.stderr.read()
+ self.gpg_output = self.gpg_proc.stdout.read()
+ self.gpg_proc.stdout.close()
+ self.gpg_proc.stderr.close()
+
+ if return_code == os.EX_OK:
+ if self.gpg_operation == checksum_helper.VERIFY:
+ self._check_gpg_status(self.gpg_result.decode())
+ else:
+ writemsg(colorize("BAD", f"!!!\n{self.gpg_result.decode()}"))
+ if self.gpg_operation == checksum_helper.SIGNING:
+ writemsg(colorize("BAD", self.gpg_output.decode()))
+ raise GPGException("GPG signing failed")
+ elif self.gpg_operation == checksum_helper.VERIFY:
+ raise InvalidSignature("GPG verify failed")
+
+
+class tar_safe_extract:
+ """
+ A safer version of tar extractall that doing sanity check.
+ Note that this does not solve all security problems.
+ """
+
+ def __init__(self, tar: tarfile.TarFile, prefix: str = ""):
+ """
+ tar: an opened TarFile that ready to be read.
+ prefix: a optional prefix for an inner directory should be considered
+ as the root directory. e.g. "metadata" and "image".
+ """
+ self.tar = tar
+ self.prefix = prefix
+ self.closed = False
+ self.file_list = []
+
+ def extractall(self, dest_dir: str):
+ """
+ Extract all files to a temporary directory in the dest_dir, and move
+ them to the dest_dir after sanity check.
+ """
+ if self.closed:
+ raise OSError("Tar file is closed.")
+ temp_dir = tempfile.TemporaryDirectory(dir=dest_dir)
+ try:
+ while True:
+ member = self.tar.next()
+ if member is None:
+ break
+ if (member.name in self.file_list) or (
+ os.path.join(".", member.name) in self.file_list
+ ):
+ writemsg(
+ colorize(
+ "BAD", f"Danger: duplicate files detected: {member.name}\n"
+ )
+ )
+ raise ValueError("Duplicate files detected.")
+ if member.name.startswith("/"):
+ writemsg(
+ colorize(
+ "BAD", f"Danger: absolute path detected: {member.name}\n"
+ )
+ )
+ raise ValueError("Absolute path detected.")
+ if member.name.startswith("../") or ("/../" in member.name):
+ writemsg(
+ colorize(
+ "BAD", f"Danger: path traversal detected: {member.name}\n"
+ )
+ )
+ raise ValueError("Path traversal detected.")
+ if member.isdev():
+ writemsg(
+ colorize(
+ "BAD", f"Danger: device file detected: {member.name}\n"
+ )
+ )
+ raise ValueError("Device file detected.")
+ if member.islnk() and (member.linkname not in self.file_list):
+ writemsg(
+ colorize(
+ "BAD", f"Danger: hardlink escape detected: {member.name}\n"
+ )
+ )
+ raise ValueError("Hardlink escape detected.")
+
+ self.file_list.append(member.name)
+ self.tar.extract(member, path=temp_dir.name)
+
+ data_dir = os.path.join(temp_dir.name, self.prefix)
+ for file in os.listdir(data_dir):
+ shutil.move(os.path.join(data_dir, file), os.path.join(dest_dir, file))
+ finally:
+ temp_dir.cleanup()
+ self.closed = True
+
+
+class gpkg:
+ """
+ Gentoo binary package
+ https://www.gentoo.org/glep/glep-0078.html
+ """
+
+ def __init__(self, settings, basename=None, gpkg_file=None):
+ """
+ gpkg class handle all gpkg operations for one package.
+ basename is the package basename.
+ gpkg_file should be exists file path for read or will create.
+ """
+ self.settings = settings
+ self.gpkg_version = "gpkg-1"
+ if gpkg_file is None:
+ self.gpkg_file = None
+ else:
+ self.gpkg_file = _unicode_decode(
+ gpkg_file, encoding=_encodings["fs"], errors="strict"
+ )
+
+ if basename is None:
+ self.basename = None
+ else:
+ self.basename = basename.split("/", maxsplit=1)[-1]
+
+ self.checksums = []
+ self.manifest_old = []
+ self.signature_exist = None
+ self.prefix = None
+
+ # Compression is the compression algorithm, if set to None will
+ # not use compression.
+ self.compression = self.settings.get("BINPKG_COMPRESS", None)
+ if self.compression in ["", "none"]:
+ self.compression = None
+
+ # The create_signature is whether create signature for the package or not.
+ if "binpkg-signing" in self.settings.features:
+ self.create_signature = True
+ else:
+ self.create_signature = False
+
+ # The request_signature is whether signature files are mandatory.
+ # If set true, any missing signature file will cause reject processing.
+ if "binpkg-request-signature" in self.settings.features:
+ self.request_signature = True
+ else:
+ self.request_signature = False
+
+ # The verify_signature is whether verify package signature or not.
+ # In rare case user may want to ignore signature,
+ # E.g. package with expired signature.
+ if "binpkg-ignore-signature" in self.settings.features:
+ self.verify_signature = False
+ else:
+ self.verify_signature = True
+
+ self.ext_list = {
+ "gzip": ".gz",
+ "bzip2": ".bz2",
+ "lz4": ".lz4",
+ "lzip": ".lz",
+ "lzop": ".lzo",
+ "xz": ".xz",
+ "zstd": ".zst",
+ }
+
+ def unpack_metadata(self, dest_dir=None):
+ """
+ Unpack metadata to dest_dir.
+ If dest_dir is None, return files and values in dict.
+ The dict key will be UTF-8, not bytes.
+ """
+ self._verify_binpkg(metadata_only=True)
+
+ with tarfile.open(self.gpkg_file, "r") as container:
+ metadata_tarinfo, metadata_comp = self._get_inner_tarinfo(
+ container, "metadata"
+ )
+
+ with tar_stream_reader(
+ container.extractfile(metadata_tarinfo),
+ self._get_decompression_cmd(metadata_comp),
+ ) as metadata_reader:
+ metadata_tar = io.BytesIO(metadata_reader.read())
+
+ with tarfile.open(mode="r:", fileobj=metadata_tar) as metadata:
+ if dest_dir is None:
+ metadata_ = {
+ os.path.relpath(k.name, "metadata"): metadata.extractfile(
+ k
+ ).read()
+ for k in metadata.getmembers()
+ }
+ else:
+ metadata_safe = tar_safe_extract(metadata, "metadata")
+ metadata_safe.extractall(dest_dir)
+ metadata_ = True
+ metadata_tar.close()
+ return metadata_
+
+ def get_metadata(self, want=None):
+ """
+ get package metadata.
+ if want is list, return all want key-values in dict
+ if want is str, return the want key value
+ """
+ if want is None:
+ return self.unpack_metadata()
+ elif isinstance(want, str):
+ metadata = self.unpack_metadata()
+ metadata_want = metadata.get(want, None)
+ return metadata_want
+ else:
+ metadata = self.unpack_metadata()
+ metadata_want = {k: metadata.get(k, None) for k in want}
+ return metadata_want
+
+ def get_metadata_url(self, url, want=None):
+ """
+ Return the requested metadata from url gpkg.
+ Default return all meta data.
+ Use 'want' to get specific name from metadata.
+ This method only support the correct package format.
+ Wrong files order or incorrect basename will be considered invalid
+ to reduce potential attacks.
+ Only signature will be check if the signature file is the next file.
+ Manifest will be ignored since it will be at the end of package.
+ """
+ # The init download file head size
+ init_size = 51200
+
+ # Load remote container
+ container_file = io.BytesIO(
+ urlopen(url, headers={"Range": "bytes=0-" + str(init_size)}).read()
+ )
+
+ # Check gpkg and metadata
+ with tarfile.open(mode="r", fileobj=container_file) as container:
+ if self.gpkg_version not in (
+ os.path.basename(f) for f in container.getnames()
+ ):
+ raise InvalidBinaryPackageFormat("Invalid gpkg file.")
+
+ metadata_tarinfo, metadata_comp = self._get_inner_tarinfo(
+ container, "metadata"
+ )
+
+ # Extra 10240 bytes for signature
+ end_size = metadata_tarinfo.offset_data + metadata_tarinfo.size + 10240
+ _, remainder = divmod(end_size, 512)
+ end_size += 512 - remainder
+
+ # If need more data
+ if end_size > 10000000:
+ raise InvalidBinaryPackageFormat("metadata too large " + str(end_size))
+ if end_size > init_size:
+ container_file.seek(0, io.SEEK_END)
+ container_file.write(
+ urlopen(
+ url,
+ headers={"Range": f"bytes={init_size + 1}-{end_size}"},
+ ).read()
+ )
+
+ container_file.seek(0)
+
+ # Reload and process full metadata
+ with tarfile.open(mode="r", fileobj=container_file) as container:
+ metadata_tarinfo, metadata_comp = self._get_inner_tarinfo(
+ container, "metadata"
+ )
+
+ # Verify metadata file signature if needed
+ # binpkg-ignore-signature can override this.
+ signature_filename = metadata_tarinfo.name + ".sig"
+ if signature_filename in container.getnames():
+ if self.request_signature and self.verify_signature:
+ metadata_signature = container.extractfile(
+ signature_filename
+ ).read()
+ checksum_info = checksum_helper(
+ self.settings,
+ gpg_operation=checksum_helper.VERIFY,
+ signature=metadata_signature,
+ )
+ checksum_info.update(container.extractfile(metadata_tarinfo).read())
+ checksum_info.finish()
+
+ # Load metadata
+ with tar_stream_reader(
+ container.extractfile(metadata_tarinfo),
+ self._get_decompression_cmd(metadata_comp),
+ ) as metadata_reader:
+ metadata_file = io.BytesIO(metadata_reader.read())
+
+ with tarfile.open(mode="r:", fileobj=metadata_file) as metadata:
+ if want is None:
+ metadata_ = {
+ os.path.relpath(k.name, "metadata"): metadata.extractfile(
+ k
+ ).read()
+ for k in metadata.getmembers()
+ }
+ else:
+ metadata_ = {
+ os.path.relpath(k.name, "metadata"): metadata.extractfile(
+ k
+ ).read()
+ for k in metadata.getmembers()
+ if k in want
+ }
+ metadata_file.close()
+ container_file.close()
+ return metadata_
+
+ def compress(self, root_dir, metadata, clean=False):
+ """
+ Use initialized configuation create new gpkg file from root_dir.
+ Will overwrite any exists file.
+ metadata is a dict, the key will be file name, the value will be
+ the file contents.
+ """
+
+ root_dir = normalize_path(
+ _unicode_decode(root_dir, encoding=_encodings["fs"], errors="strict")
+ )
+
+ # Get pre image info
+ container_tar_format, image_tar_format = self._get_tar_format_from_stats(
+ *self._check_pre_image_files(root_dir)
+ )
+
+ # Long CPV
+ if len(self.basename) >= 154:
+ container_tar_format = tarfile.GNU_FORMAT
+
+ # gpkg container
+ container = tarfile.TarFile(
+ name=self.gpkg_file, mode="w", format=container_tar_format
+ )
+
+ # gpkg version
+ gpkg_version_file = tarfile.TarInfo(
+ os.path.join(self.basename, self.gpkg_version)
+ )
+ gpkg_version_file.mtime = datetime.now().timestamp()
+ container.addfile(gpkg_version_file)
+ checksum_info = checksum_helper(self.settings)
+ checksum_info.finish()
+ self._record_checksum(checksum_info, gpkg_version_file)
+
+ compression_cmd = self._get_compression_cmd()
+
+ # metadata
+ self._add_metadata(container, metadata, compression_cmd)
+
+ # image
+ if self.create_signature:
+ checksum_info = checksum_helper(
+ self.settings, gpg_operation=checksum_helper.SIGNING
+ )
+ else:
+ checksum_info = checksum_helper(self.settings)
+
+ image_tarinfo = self._create_tarinfo("image")
+ image_tarinfo.mtime = datetime.now().timestamp()
+ with tar_stream_writer(
+ image_tarinfo, container, image_tar_format, compression_cmd, checksum_info
+ ) as image_writer:
+ with tarfile.open(
+ mode="w|", fileobj=image_writer, format=image_tar_format
+ ) as image_tar:
+ image_tar.add(root_dir, "image", recursive=True)
+
+ image_tarinfo = container.getmember(image_tarinfo.name)
+ self._record_checksum(checksum_info, image_tarinfo)
+
+ if self.create_signature:
+ self._add_signature(checksum_info, image_tarinfo, container)
+
+ self._add_manifest(container)
+
+ # Check if all directories are the same in the container
+ prefix = os.path.commonpath(container.getnames())
+ if not prefix:
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch in {self.gpkg_file}"
+ )
+
+ container.close()
+
+ def decompress(self, decompress_dir):
+ """
+ decompress current gpkg to decompress_dir
+ """
+ decompress_dir = normalize_path(
+ _unicode_decode(decompress_dir, encoding=_encodings["fs"], errors="strict")
+ )
+
+ self._verify_binpkg()
+ os.makedirs(decompress_dir, mode=0o755, exist_ok=True)
+
+ with tarfile.open(self.gpkg_file, "r") as container:
+ image_tarinfo, image_comp = self._get_inner_tarinfo(container, "image")
+
+ with tar_stream_reader(
+ container.extractfile(image_tarinfo),
+ self._get_decompression_cmd(image_comp),
+ ) as image_tar:
+ with tarfile.open(mode="r|", fileobj=image_tar) as image:
+ try:
+ image_safe = tar_safe_extract(image, "image")
+ image_safe.extractall(decompress_dir)
+ image_tar.close()
+ except Exception as ex:
+ writemsg(colorize("BAD", "!!!Extract failed.\n"))
+ raise
+ finally:
+ if not image_tar.closed:
+ image_tar.kill()
+
+ def update_metadata(self, metadata, new_basename=None, force=False):
+ """
+ Update metadata in the gpkg file.
+ """
+ self._verify_binpkg()
+ self.checksums = []
+ if self.signature_exist and not force:
+ raise SignedPackage("Cannot update a signed gpkg file")
+
+ if new_basename is None:
+ if self.basename:
+ new_basename = self.basename
+ elif self.prefix:
+ new_basename = self.prefix
+ else:
+ raise InvalidBinaryPackageFormat("No basename or prefix specified")
+ else:
+ new_basename = new_basename.split("/", maxsplit=1)[-1]
+ self.basename = new_basename
+
+ with open(self.gpkg_file, "rb") as container:
+ container_tar_format = self._get_tar_format(container)
+ if container_tar_format is None:
+ raise InvalidBinaryPackageFormat("Cannot identify tar format")
+
+ # container
+ tmp_gpkg_file_name = f"{self.gpkg_file}.{os.getpid()}"
+ with tarfile.TarFile(
+ name=tmp_gpkg_file_name, mode="w", format=container_tar_format
+ ) as container:
+ # gpkg version
+ gpkg_version_file = tarfile.TarInfo(
+ os.path.join(new_basename, self.gpkg_version)
+ )
+ gpkg_version_file.mtime = datetime.now().timestamp()
+ container.addfile(gpkg_version_file)
+ checksum_info = checksum_helper(self.settings)
+ checksum_info.finish()
+ self._record_checksum(checksum_info, gpkg_version_file)
+
+ compression_cmd = self._get_compression_cmd()
+
+ # metadata
+ self._add_metadata(container, metadata, compression_cmd)
+
+ # reuse other stuffs
+ with tarfile.open(self.gpkg_file, "r") as container_old:
+ manifest_old = self.manifest_old.copy()
+
+ for m in manifest_old:
+ file_name_old = m[1]
+ if os.path.basename(file_name_old) == self.gpkg_version:
+ continue
+ if os.path.basename(file_name_old).startswith("metadata"):
+ continue
+ old_data_tarinfo = container_old.getmember(
+ os.path.join(self.prefix, file_name_old)
+ )
+ new_data_tarinfo = copy(old_data_tarinfo)
+ new_file_path = list(os.path.split(new_data_tarinfo.name))
+ new_file_path[0] = new_basename
+ new_data_tarinfo.name = os.path.join(*new_file_path)
+ container.addfile(
+ new_data_tarinfo, container_old.extractfile(old_data_tarinfo)
+ )
+ self.checksums.append(m)
+
+ self._add_manifest(container)
+
+ # Check if all directories are the same in the container
+ prefix = os.path.commonpath(container.getnames())
+ if not prefix:
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch in {self.gpkg_file}; files: "
+ f"{container.getnames()}"
+ )
+
+ shutil.move(tmp_gpkg_file_name, self.gpkg_file)
+
+ def update_signature(self, keep_current_signature=False):
+ """
+ Add / update signature in the gpkg file.
+ if keep_current_signature is True, keep the current signature, otherwise, re-signing it.
+ """
+ self.create_signature = True
+ self._verify_binpkg()
+ self.checksums = []
+
+ with open(self.gpkg_file, "rb") as container:
+ container_tar_format = self._get_tar_format(container)
+ if container_tar_format is None:
+ raise InvalidBinaryPackageFormat("Cannot identify tar format")
+
+ # container
+ tmp_gpkg_file_name = f"{self.gpkg_file}.{os.getpid()}"
+ with tarfile.TarFile(
+ name=tmp_gpkg_file_name, mode="w", format=container_tar_format
+ ) as container:
+ # gpkg version
+ gpkg_version_file = tarfile.TarInfo(
+ os.path.join(self.prefix, self.gpkg_version)
+ )
+ gpkg_version_file.mtime = datetime.now().timestamp()
+ container.addfile(gpkg_version_file)
+ checksum_info = checksum_helper(self.settings)
+ checksum_info.finish()
+ self._record_checksum(checksum_info, gpkg_version_file)
+
+ # reuse other stuffs
+ with tarfile.open(self.gpkg_file, "r") as container_old:
+ manifest_old = self.manifest_old.copy()
+ file_list_old = [f[1] for f in manifest_old]
+
+ for m in manifest_old:
+ file_name_old = m[1]
+ if os.path.basename(file_name_old) == self.gpkg_version:
+ continue
+ if os.path.basename(file_name_old).endswith(".sig"):
+ continue
+ old_data_tarinfo = container_old.getmember(
+ os.path.join(self.prefix, file_name_old)
+ )
+ new_data_tarinfo = copy(old_data_tarinfo)
+
+ container.addfile(
+ new_data_tarinfo, container_old.extractfile(old_data_tarinfo)
+ )
+ self.checksums.append(m)
+
+ # Check if signature file exists and reuse or create new one.
+ if keep_current_signature and (
+ file_name_old + ".sig" in file_list_old
+ ):
+ old_data_sign_tarinfo = container_old.getmember(
+ file_name_old + ".sig"
+ )
+ new_data_sign_tarinfo = copy(old_data_sign_tarinfo)
+ container.addfile(
+ new_data_sign_tarinfo,
+ container_old.extractfile(old_data_sign_tarinfo),
+ )
+ for manifest_sign in manifest_old:
+ if manifest_sign[1] == file_name_old + ".sig":
+ self.checksums.append(manifest_sign)
+ break
+ else:
+ checksum_info = checksum_helper(
+ self.settings, gpg_operation=checksum_helper.SIGNING
+ )
+ checksum_info.update(
+ container_old.extractfile(old_data_tarinfo).read()
+ )
+ checksum_info.finish()
+ self._add_signature(checksum_info, new_data_tarinfo, container)
+
+ self._add_manifest(container)
+
+ # Check if all directories are the same in the container
+ prefix = os.path.commonpath(container.getnames())
+ if not prefix:
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch in {self.gpkg_file}"
+ )
+
+ shutil.move(tmp_gpkg_file_name, self.gpkg_file)
+
+ def _add_metadata(self, container, metadata, compression_cmd):
+ """
+ add metadata to container
+ """
+ if metadata is None:
+ metadata = {}
+ metadata_tarinfo = self._create_tarinfo("metadata")
+ metadata_tarinfo.mtime = datetime.now().timestamp()
+
+ if self.create_signature:
+ checksum_info = checksum_helper(
+ self.settings, gpg_operation=checksum_helper.SIGNING
+ )
+ else:
+ checksum_info = checksum_helper(self.settings)
+
+ with tar_stream_writer(
+ metadata_tarinfo,
+ container,
+ tarfile.USTAR_FORMAT,
+ compression_cmd,
+ checksum_info,
+ ) as metadata_writer:
+ with tarfile.open(
+ mode="w|", fileobj=metadata_writer, format=tarfile.USTAR_FORMAT
+ ) as metadata_tar:
+ for m in metadata:
+ m_info = tarfile.TarInfo(os.path.join("metadata", m))
+ m_info.mtime = datetime.now().timestamp()
+
+ if isinstance(metadata[m], bytes):
+ m_data = io.BytesIO(metadata[m])
+ else:
+ m_data = io.BytesIO(metadata[m].encode("UTF-8"))
+
+ m_data.seek(0, io.SEEK_END)
+ m_info.size = m_data.tell()
+ m_data.seek(0)
+ metadata_tar.addfile(m_info, m_data)
+ m_data.close()
+
+ metadata_tarinfo = container.getmember(metadata_tarinfo.name)
+ self._record_checksum(checksum_info, metadata_tarinfo)
+
+ if self.create_signature:
+ self._add_signature(checksum_info, metadata_tarinfo, container)
+
+ def _quickpkg(self, contents, metadata, root_dir, protect=None):
+ """
+ Similar to compress, but for quickpkg.
+ Will compress the given files to image with root,
+ ignoring all other files.
+ """
+ eout = EOutput()
+
+ protect_file = io.BytesIO(
+ b"# empty file because --include-config=n when `quickpkg` was used\n"
+ )
+ protect_file.seek(0, io.SEEK_END)
+ protect_file_size = protect_file.tell()
+
+ root_dir = normalize_path(
+ _unicode_decode(root_dir, encoding=_encodings["fs"], errors="strict")
+ )
+
+ # Get pre image info
+ container_tar_format, image_tar_format = self._get_tar_format_from_stats(
+ *self._check_pre_quickpkg_files(contents, root_dir, ignore_missing=True)
+ )
+
+ # Long CPV
+ if len(self.basename) >= 154:
+ container_tar_format = tarfile.GNU_FORMAT
+
+ # GPKG container
+ container = tarfile.TarFile(
+ name=self.gpkg_file, mode="w", format=container_tar_format
+ )
+
+ # GPKG version
+ gpkg_version_file = tarfile.TarInfo(
+ os.path.join(self.basename, self.gpkg_version)
+ )
+ gpkg_version_file.mtime = datetime.now().timestamp()
+ container.addfile(gpkg_version_file)
+ checksum_info = checksum_helper(self.settings)
+ checksum_info.finish()
+ self._record_checksum(checksum_info, gpkg_version_file)
+
+ compression_cmd = self._get_compression_cmd()
+ # Metadata
+ self._add_metadata(container, metadata, compression_cmd)
+
+ # Image
+ if self.create_signature:
+ checksum_info = checksum_helper(
+ self.settings, gpg_operation=checksum_helper.SIGNING
+ )
+ else:
+ checksum_info = checksum_helper(self.settings)
+
+ paths = list(contents)
+ paths.sort()
+ image_tarinfo = self._create_tarinfo("image")
+ image_tarinfo.mtime = datetime.now().timestamp()
+ with tar_stream_writer(
+ image_tarinfo, container, image_tar_format, compression_cmd, checksum_info
+ ) as image_writer:
+ with tarfile.open(
+ mode="w|", fileobj=image_writer, format=image_tar_format
+ ) as image_tar:
+ if len(paths) == 0:
+ tarinfo = image_tar.tarinfo("image")
+ tarinfo.type = tarfile.DIRTYPE
+ tarinfo.size = 0
+ tarinfo.mode = 0o755
+ image_tar.addfile(tarinfo)
+
+ for path in paths:
+ try:
+ lst = os.lstat(path)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ eout.ewarn(f'Missing file from local system: "{path}"')
+ del e
+ continue
+ contents_type = contents[path][0]
+ if path.startswith(root_dir):
+ arcname = "image/" + path[len(root_dir) :]
+ else:
+ raise ValueError(f"invalid root argument: '{root_dir}'")
+ live_path = path
+ if (
+ "dir" == contents_type
+ and not stat.S_ISDIR(lst.st_mode)
+ and os.path.isdir(live_path)
+ ):
+ # Even though this was a directory in the original ${D}, it exists
+ # as a symlink to a directory in the live filesystem. It must be
+ # recorded as a real directory in the tar file to ensure that tar
+ # can properly extract it's children.
+ live_path = os.path.realpath(live_path)
+ lst = os.lstat(live_path)
+
+ # Since os.lstat() inside TarFile.gettarinfo() can trigger a
+ # UnicodeEncodeError when python has something other than utf_8
+ # return from sys.getfilesystemencoding() (as in bug #388773),
+ # we implement the needed functionality here, using the result
+ # of our successful lstat call. An alternative to this would be
+ # to pass in the fileobj argument to TarFile.gettarinfo(), so
+ # that it could use fstat instead of lstat. However, that would
+ # have the unwanted effect of dereferencing symlinks.
+
+ tarinfo = image_tar.tarinfo(arcname)
+ tarinfo.mode = lst.st_mode
+ tarinfo.uid = lst.st_uid
+ tarinfo.gid = lst.st_gid
+ tarinfo.size = 0
+ tarinfo.mtime = lst.st_mtime
+ tarinfo.linkname = ""
+ if stat.S_ISREG(lst.st_mode):
+ inode = (lst.st_ino, lst.st_dev)
+ if (
+ lst.st_nlink > 1
+ and inode in image_tar.inodes
+ and arcname != image_tar.inodes[inode]
+ ):
+ tarinfo.type = tarfile.LNKTYPE
+ tarinfo.linkname = image_tar.inodes[inode]
+ else:
+ image_tar.inodes[inode] = arcname
+ tarinfo.type = tarfile.REGTYPE
+ tarinfo.size = lst.st_size
+ elif stat.S_ISDIR(lst.st_mode):
+ tarinfo.type = tarfile.DIRTYPE
+ elif stat.S_ISLNK(lst.st_mode):
+ tarinfo.type = tarfile.SYMTYPE
+ tarinfo.linkname = os.readlink(live_path)
+ else:
+ continue
+ try:
+ tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+ except KeyError:
+ pass
+ try:
+ tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+ except KeyError:
+ pass
+
+ if stat.S_ISREG(lst.st_mode):
+ if protect and protect(path):
+ protect_file.seek(0)
+ tarinfo.size = protect_file_size
+ image_tar.addfile(tarinfo, protect_file)
+ else:
+ path_bytes = _unicode_encode(
+ path, encoding=_encodings["fs"], errors="strict"
+ )
+
+ with open(path_bytes, "rb") as f:
+ image_tar.addfile(tarinfo, f)
+
+ else:
+ image_tar.addfile(tarinfo)
+
+ image_tarinfo = container.getmember(image_tarinfo.name)
+ self._record_checksum(checksum_info, image_tarinfo)
+
+ if self.create_signature:
+ self._add_signature(checksum_info, image_tarinfo, container)
+
+ self._add_manifest(container)
+
+ # Check if all directories are the same in the container
+ prefix = os.path.commonpath(container.getnames())
+ if not prefix:
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch in {self.gpkg_file}"
+ )
+
+ container.close()
+
+ def _record_checksum(self, checksum_info, tarinfo):
+ """
+ Record checksum result for the given file.
+ Replace old checksum if already exists.
+ """
+
+ # Remove prefix directory from the filename
+ file_name = os.path.basename(tarinfo.name)
+
+ for c in self.checksums:
+ if c[1] == file_name:
+ self.checksums.remove(c)
+
+ checksum_record = ["DATA", file_name, str(tarinfo.size)]
+
+ for c in checksum_info.libs:
+ checksum_record.append(c)
+ checksum_record.append(checksum_info.libs[c].hexdigest())
+
+ self.checksums.append(checksum_record)
+
+ def _add_manifest(self, container):
+ """
+ Add Manifest to the container based on current checksums.
+ Creare GPG signatue if needed.
+ """
+ manifest = io.BytesIO()
+
+ for m in self.checksums:
+ manifest.write((" ".join(m) + "\n").encode("UTF-8"))
+
+ if self.create_signature:
+ checksum_info = checksum_helper(
+ self.settings, gpg_operation=checksum_helper.SIGNING, detached=False
+ )
+ checksum_info.update(manifest.getvalue())
+ checksum_info.finish()
+ manifest.seek(0)
+ manifest.write(checksum_info.gpg_output)
+
+ if self.basename is not None:
+ basename = self.basename
+ elif self.prefix is not None:
+ basename = self.prefix
+ else:
+ raise InvalidBinaryPackageFormat("No basename or prefix specified")
+
+ manifest_tarinfo = tarfile.TarInfo(os.path.join(basename, "Manifest"))
+ manifest_tarinfo.size = manifest.tell()
+ manifest_tarinfo.mtime = datetime.now().timestamp()
+ manifest.seek(0)
+ container.addfile(manifest_tarinfo, manifest)
+ manifest.close()
+
+ def _load_manifest(self, manifest_string):
+ """
+ Check, load, and return manifest in a list by files
+ """
+ manifest = []
+ manifest_filenames = []
+
+ for manifest_record in manifest_string.splitlines():
+ if manifest_record == "":
+ continue
+ manifest_record = manifest_record.strip().split()
+
+ if manifest_record[0] != "DATA":
+ raise DigestException("invalied Manifest")
+
+ if manifest_record[1] in manifest_filenames:
+ raise DigestException("Manifest duplicate file exists")
+
+ try:
+ int(manifest_record[2])
+ except ValueError:
+ raise DigestException("Manifest invalied file size")
+
+ manifest.append(manifest_record)
+ manifest_filenames.append(manifest_record[1])
+
+ return manifest
+
+ def _add_signature(self, checksum_info, tarinfo, container, manifest=True):
+ """
+ Add GPG signature for the given tarinfo file.
+ manifest: add to manifest
+ """
+ if checksum_info.gpg_output is None:
+ raise GPGException("GPG signature is not exists")
+
+ signature = io.BytesIO(checksum_info.gpg_output)
+ signature_tarinfo = tarfile.TarInfo(f"{tarinfo.name}.sig")
+ signature_tarinfo.size = len(signature.getvalue())
+ signature_tarinfo.mtime = datetime.now().timestamp()
+ container.addfile(signature_tarinfo, signature)
+
+ if manifest:
+ signature_checksum_info = checksum_helper(self.settings)
+ signature.seek(0)
+ signature_checksum_info.update(signature.read())
+ signature_checksum_info.finish()
+ self._record_checksum(signature_checksum_info, signature_tarinfo)
+
+ signature.close()
+
+ def _verify_binpkg(self, metadata_only=False):
+ """
+ Verify current GPKG file.
+ """
+ # Check file path
+ if self.gpkg_file is None:
+ raise FileNotFound("no gpkg file provided")
+
+ # Check if is file
+ if not os.path.isfile(self.gpkg_file):
+ raise FileNotFound(f"File not found {self.gpkg_file}")
+
+ # Check if is tar file
+ with open(self.gpkg_file, "rb") as container:
+ container_tar_format = self._get_tar_format(container)
+ if container_tar_format is None:
+ get_binpkg_format(self.gpkg_file, check_file=True)
+ raise InvalidBinaryPackageFormat(
+ f"Cannot identify tar format: {self.gpkg_file}"
+ )
+
+ # Check container
+ with tarfile.open(self.gpkg_file, "r") as container:
+ try:
+ container_files = container.getnames()
+ except tarfile.ReadError:
+ get_binpkg_format(self.gpkg_file, check_file=True)
+ raise InvalidBinaryPackageFormat(
+ f"Cannot read tar file: {self.gpkg_file}"
+ )
+
+ # Check if gpkg version file exists in any place
+ if self.gpkg_version not in (os.path.basename(f) for f in container_files):
+ get_binpkg_format(self.gpkg_file, check_file=True)
+ raise InvalidBinaryPackageFormat(f"Invalid gpkg file: {self.gpkg_file}")
+
+ # Check how many layers are in the container
+ for f in container_files:
+ if f.startswith("/"):
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch '{f}' in {self.gpkg_file}"
+ )
+ if f.count("/") != 1:
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch '{f}' in {self.gpkg_file}"
+ )
+
+ # Check if all directories are the same in the container
+ prefix = os.path.commonpath(container_files)
+ if not prefix:
+ raise InvalidBinaryPackageFormat(
+ f"gpkg file structure mismatch in {self.gpkg_file}, {str(container_files)}"
+ )
+
+ gpkg_version_file = os.path.join(prefix, self.gpkg_version)
+
+ # If any signature exists, we assume all files have signature.
+ if any(f.endswith(".sig") for f in container_files):
+ signature_exist = True
+ else:
+ signature_exist = False
+
+ # Check if all files are unique to avoid same name attack
+ container_files_unique = []
+ for f in container_files:
+ if f in container_files_unique:
+ raise InvalidBinaryPackageFormat(
+ f"Duplicate file {f} exist, potential attack?"
+ )
+ container_files_unique.append(f)
+
+ del container_files_unique
+
+ # Add all files to check list
+ unverified_files = container_files.copy()
+
+ # Check Manifest file
+ manifest_filename = os.path.join(prefix, "Manifest")
+ if manifest_filename not in unverified_files:
+ raise MissingSignature(f"Manifest not found: {self.gpkg_file}")
+
+ manifest_file = container.extractfile(manifest_filename)
+ manifest_data = manifest_file.read()
+ manifest_file.close()
+
+ if b"-----BEGIN PGP SIGNATURE-----" in manifest_data:
+ signature_exist = True
+
+ # Check Manifest signature if needed.
+ # binpkg-ignore-signature can override this.
+ if self.request_signature or signature_exist:
+ checksum_info = checksum_helper(
+ self.settings, gpg_operation=checksum_helper.VERIFY, detached=False
+ )
+
+ try:
+ checksum_info.update(manifest_data)
+ checksum_info.finish()
+ except (InvalidSignature, MissingSignature):
+ if self.verify_signature:
+ raise
+
+ manifest_data = checksum_info.gpg_output
+ unverified_files.remove(manifest_filename)
+ else:
+ unverified_files.remove(manifest_filename)
+
+ # Load manifest and create manifest check list
+ manifest = self._load_manifest(manifest_data.decode("UTF-8"))
+ unverified_manifest = manifest.copy()
+
+ # Check all remaining files
+ for f in unverified_files.copy():
+ if f.endswith(".sig"):
+ f_signature = None
+ else:
+ f_signature = f + ".sig"
+
+ # Find current file manifest record
+ manifest_record = None
+ for m in manifest:
+ if m[1] == os.path.basename(f):
+ manifest_record = m
+
+ if manifest_record is None:
+ raise DigestException(f"{f} checksum not found in {self.gpkg_file}")
+
+ if int(manifest_record[2]) != int(container.getmember(f).size):
+ raise DigestException(
+ f"{f} file size mismatched in {self.gpkg_file}"
+ )
+
+ # Ignore image file and signature if not needed
+ if os.path.basename(f).startswith("image") and metadata_only:
+ unverified_files.remove(f)
+ unverified_manifest.remove(manifest_record)
+ continue
+
+ # Verify current file signature if needed
+ # binpkg-ignore-signature can override this.
+ if (
+ (self.request_signature or signature_exist)
+ and self.verify_signature
+ and f_signature
+ ):
+ if f_signature in unverified_files:
+ signature_file = container.extractfile(f_signature)
+ signature = signature_file.read()
+ signature_file.close()
+ checksum_info = checksum_helper(
+ self.settings,
+ gpg_operation=checksum_helper.VERIFY,
+ signature=signature,
+ )
+ elif f == gpkg_version_file:
+ # gpkg version file is not signed
+ checksum_info = checksum_helper(self.settings)
+ else:
+ raise MissingSignature(
+ f"{f} signature not found in {self.gpkg_file}"
+ )
+ else:
+ checksum_info = checksum_helper(self.settings)
+
+ # Verify current file checksum
+ f_io = container.extractfile(f)
+ while True:
+ buffer = f_io.read(HASHING_BLOCKSIZE)
+ if buffer:
+ checksum_info.update(buffer)
+ else:
+ checksum_info.finish()
+ break
+ f_io.close()
+
+ # At least one supported checksum must be checked
+ verified_hash_count = 0
+ for c in checksum_info.libs:
+ try:
+ if (
+ checksum_info.libs[c].hexdigest().lower()
+ == manifest_record[manifest_record.index(c) + 1].lower()
+ ):
+ verified_hash_count += 1
+ else:
+ raise DigestException(
+ f"{f} checksum mismatched in {self.gpkg_file}"
+ )
+ except KeyError:
+ # Checksum method not supported
+ pass
+
+ if verified_hash_count < 1:
+ raise DigestException(
+ f"{f} no supported checksum found in {self.gpkg_file}"
+ )
+
+ # Current file verified
+ unverified_files.remove(f)
+ unverified_manifest.remove(manifest_record)
+
+ # Check if any file IN Manifest but NOT IN binary package
+ if len(unverified_manifest) != 0:
+ raise DigestException(
+ f"Missing files: {str(unverified_manifest)} in {self.gpkg_file}"
+ )
+
+ # Check if any file NOT IN Manifest but IN binary package
+ if len(unverified_files) != 0:
+ raise DigestException(
+ f"Unknown files exists: {str(unverified_files)} in {self.gpkg_file}"
+ )
+
+ # Save current Manifest for other operations.
+ self.manifest_old = manifest.copy()
+ self.signature_exist = signature_exist
+ self.prefix = prefix
+
+ def _generate_metadata_from_dir(self, metadata_dir):
+ """
+ read all files in metadata_dir and return as dict
+ """
+ metadata = {}
+ metadata_dir = normalize_path(
+ _unicode_decode(metadata_dir, encoding=_encodings["fs"], errors="strict")
+ )
+ for parent, dirs, files in os.walk(metadata_dir):
+ for f in files:
+ try:
+ f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
+ except UnicodeDecodeError:
+ continue
+ with open(os.path.join(parent, f), "rb") as metafile:
+ metadata[f] = metafile.read()
+ return metadata
+
+ def _get_binary_cmd(self, compression, mode):
+ """
+ get command list from portage and try match compressor
+ """
+ if compression not in _compressors:
+ raise InvalidCompressionMethod(compression)
+
+ compressor = _compressors[compression]
+ if mode not in compressor:
+ raise InvalidCompressionMethod(f"{compression}: {mode}")
+
+ if mode == "compress" and (
+ self.settings.get(f"BINPKG_COMPRESS_FLAGS_{compression.upper()}", None)
+ is not None
+ ):
+ compressor["compress"] = compressor["compress"].replace(
+ "${BINPKG_COMPRESS_FLAGS}",
+ f"${{BINPKG_COMPRESS_FLAGS_{compression.upper()}}}",
+ )
+
+ cmd = compressor[mode].replace(
+ "{JOBS}", str(makeopts_to_job_count(self.settings.get("MAKEOPTS", "1")))
+ )
+ cmd = shlex_split(varexpand(cmd, mydict=self.settings))
+
+ # Filter empty elements that make Popen fail
+ cmd = [x for x in cmd if x != ""]
+
+ if (not cmd) and ((mode + "_alt") in compressor):
+ cmd = shlex_split(
+ varexpand(compressor[mode + "_alt"], mydict=self.settings)
+ )
+ cmd = [x for x in cmd if x != ""]
+
+ if not cmd:
+ raise CompressorNotFound(compression)
+ if not find_binary(cmd[0]):
+ raise CompressorNotFound(cmd[0])
+
+ return cmd
+
+ def _get_compression_cmd(self, compression=None):
+ """
+ return compression command for Popen
+ """
+ if compression is None:
+ compression = self.compression
+ if compression is None:
+ return None
+ else:
+ return self._get_binary_cmd(compression, "compress")
+
+ def _get_decompression_cmd(self, compression=None):
+ """
+ return decompression command for Popen
+ """
+ if compression is None:
+ compression = self.compression
+ if compression is None:
+ return None
+ else:
+ return self._get_binary_cmd(compression, "decompress")
+
+ def _get_tar_format(self, fileobj):
+ """
+ Try to detect tar version
+ """
+ old_position = fileobj.tell()
+ fileobj.seek(0x101)
+ magic = fileobj.read(8)
+ fileobj.seek(0x9C)
+ typeflag = fileobj.read(1)
+ fileobj.seek(old_position)
+
+ if magic == b"ustar \x00":
+ return tarfile.GNU_FORMAT
+ elif magic == b"ustar\x0000":
+ if typeflag == b"x" or typeflag == b"g":
+ return tarfile.PAX_FORMAT
+ else:
+ return tarfile.USTAR_FORMAT
+
+ return None
+
+ def _get_tar_format_from_stats(
+ self,
+ image_max_prefix_length,
+ image_max_name_length,
+ image_max_linkname_length,
+ image_max_file_size,
+ image_total_size,
+ ):
+ """
+ Choose the corresponding tar format according to
+ the image information
+ """
+ # Max possible size in UStar is 8 GiB (8589934591 bytes)
+ # stored in 11 octets
+ # Use 8000000000, just in case we need add something extra
+
+ # Total size > 8 GiB, container need use GNU tar format
+ if image_total_size < 8000000000:
+ container_tar_format = tarfile.USTAR_FORMAT
+ else:
+ container_tar_format = tarfile.GNU_FORMAT
+
+ # Image at least one file > 8 GiB, image need use GNU tar format
+ if image_max_file_size < 8000000000:
+ image_tar_format = tarfile.USTAR_FORMAT
+ else:
+ image_tar_format = tarfile.GNU_FORMAT
+
+ # UStar support max 155 prefix length, 100 file name and 100 link name,
+ # ends with \x00. If any exceeded, failback to GNU format.
+ if image_max_prefix_length >= 155:
+ image_tar_format = tarfile.GNU_FORMAT
+
+ if image_max_name_length >= 100:
+ image_tar_format = tarfile.GNU_FORMAT
+
+ if image_max_linkname_length >= 100:
+ image_tar_format = tarfile.GNU_FORMAT
+ return container_tar_format, image_tar_format
+
+ def _check_pre_image_files(self, root_dir, image_prefix="image"):
+ """
+ Check the pre image files size and path, return the longest
+ path length, largest single file size, and total files size.
+ """
+ image_prefix_length = len(image_prefix) + 1
+ root_dir = os.path.join(
+ normalize_path(
+ _unicode_decode(root_dir, encoding=_encodings["fs"], errors="strict")
+ ),
+ "",
+ )
+ root_dir_length = len(
+ _unicode_encode(root_dir, encoding=_encodings["fs"], errors="strict")
+ )
+
+ image_max_prefix_length = 0
+ image_max_name_length = 0
+ image_max_link_length = 0
+ image_max_file_size = 0
+ image_total_size = 0
+
+ for parent, dirs, files in os.walk(root_dir):
+ if portage.utf8_mode:
+ parent = os.fsencode(parent)
+ dirs = [os.fsencode(value) for value in dirs]
+ files = [os.fsencode(value) for value in files]
+
+ parent = _unicode_decode(parent, encoding=_encodings["fs"], errors="strict")
+ for d in dirs:
+ try:
+ d = _unicode_decode(d, encoding=_encodings["fs"], errors="strict")
+ except UnicodeDecodeError as err:
+ writemsg(colorize("BAD", f"\n*** {err}\n\n"), noiselevel=-1)
+ raise
+
+ d = os.path.join(parent, d)
+ prefix_length = (
+ len(_unicode_encode(d, encoding=_encodings["fs"], errors="strict"))
+ - root_dir_length
+ + image_prefix_length
+ )
+
+ if os.path.islink(d):
+ path_link = os.readlink(d)
+ path_link_length = len(
+ _unicode_encode(
+ path_link, encoding=_encodings["fs"], errors="strict"
+ )
+ )
+ image_max_link_length = max(image_max_link_length, path_link_length)
+
+ image_max_prefix_length = max(image_max_prefix_length, prefix_length)
+
+ for f in files:
+ try:
+ f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
+ except UnicodeDecodeError as err:
+ writemsg(colorize("BAD", f"\n*** {err}\n\n"), noiselevel=-1)
+ raise
+
+ filename_length = len(
+ _unicode_encode(f, encoding=_encodings["fs"], errors="strict")
+ )
+ image_max_name_length = max(image_max_name_length, filename_length)
+
+ f = os.path.join(parent, f)
+ path_length = (
+ len(_unicode_encode(f, encoding=_encodings["fs"], errors="strict"))
+ - root_dir_length
+ + image_prefix_length
+ )
+
+ file_stat = os.lstat(f)
+
+ if os.path.islink(f):
+ path_link = os.readlink(f)
+ path_link_length = len(
+ os.fsencode(path_link)
+ if portage.utf8_mode
+ else _unicode_encode(
+ path_link, encoding=_encodings["fs"], errors="strict"
+ )
+ )
+ elif file_stat.st_nlink > 1:
+ # Hardlink exists
+ path_link_length = path_length
+ else:
+ path_link_length = 0
+
+ image_max_link_length = max(image_max_link_length, path_link_length)
+
+ try:
+ file_size = os.path.getsize(f)
+ except FileNotFoundError:
+ # Ignore file not found if symlink to non-existing file
+ if os.path.islink(f):
+ continue
+ else:
+ raise
+ image_total_size += file_size
+ image_max_file_size = max(image_max_file_size, file_size)
+
+ return (
+ image_max_prefix_length,
+ image_max_name_length,
+ image_max_link_length,
+ image_max_file_size,
+ image_total_size,
+ )
+
+ def _check_pre_quickpkg_files(
+ self, contents, root, image_prefix="image", ignore_missing=False
+ ):
+ """
+ Check the pre quickpkg files size and path, return the longest
+ path length, largest single file size, and total files size.
+ """
+ image_prefix_length = len(image_prefix) + 1
+ root_dir = os.path.join(
+ normalize_path(
+ _unicode_decode(root, encoding=_encodings["fs"], errors="strict")
+ ),
+ "",
+ )
+ root_dir_length = len(
+ _unicode_encode(root_dir, encoding=_encodings["fs"], errors="strict")
+ )
+
+ image_max_prefix_length = 0
+ image_max_name_length = 0
+ image_max_link_length = 0
+ image_max_file_size = 0
+ image_total_size = 0
+
+ paths = list(contents)
+ for path in paths:
+ try:
+ path = _unicode_decode(path, encoding=_encodings["fs"], errors="strict")
+ except UnicodeDecodeError as err:
+ writemsg(colorize("BAD", f"\n*** {err}\n\n"), noiselevel=-1)
+ raise
+
+ d, f = os.path.split(path)
+
+ prefix_length = (
+ len(_unicode_encode(d, encoding=_encodings["fs"], errors="strict"))
+ - root_dir_length
+ + image_prefix_length
+ )
+ image_max_prefix_length = max(image_max_prefix_length, prefix_length)
+
+ filename_length = len(
+ _unicode_encode(f, encoding=_encodings["fs"], errors="strict")
+ )
+ image_max_name_length = max(image_max_name_length, filename_length)
+
+ path_length = (
+ len(_unicode_encode(path, encoding=_encodings["fs"], errors="strict"))
+ - root_dir_length
+ + image_prefix_length
+ )
+
+ if not os.path.exists(path):
+ if ignore_missing:
+ continue
+ else:
+ raise FileNotFound(path)
+
+ file_stat = os.lstat(path)
+
+ if os.path.islink(path):
+ path_link = os.readlink(path)
+ path_link_length = len(
+ _unicode_encode(
+ path_link, encoding=_encodings["fs"], errors="strict"
+ )
+ )
+ elif file_stat.st_nlink > 1:
+ # Hardlink exists
+ path_link_length = path_length
+ else:
+ path_link_length = 0
+
+ image_max_link_length = max(image_max_link_length, path_link_length)
+
+ if os.path.isfile(path):
+ try:
+ file_size = os.path.getsize(path)
+ except FileNotFoundError:
+ # Ignore file not found if symlink to non-existing file
+ if os.path.islink(path):
+ continue
+ else:
+ raise
+ image_total_size += file_size
+ if file_size > image_max_file_size:
+ image_max_file_size = file_size
+
+ return (
+ image_max_prefix_length,
+ image_max_name_length,
+ image_max_link_length,
+ image_max_file_size,
+ image_total_size,
+ )
+
+ def _create_tarinfo(self, file_name):
+ """
+ Create new tarinfo for the new file
+ """
+ if self.compression is None:
+ ext = ""
+ elif self.compression in self.ext_list:
+ ext = self.ext_list[self.compression]
+ else:
+ raise InvalidCompressionMethod(self.compression)
+
+ if self.basename:
+ basename = self.basename
+ elif self.prefix:
+ basename = self.prefix
+ else:
+ raise InvalidBinaryPackageFormat("No basename or prefix specified")
+ data_tarinfo = tarfile.TarInfo(os.path.join(basename, file_name + ".tar" + ext))
+ return data_tarinfo
+
+ def _extract_filename_compression(self, file_name):
+ """
+ Extract the file basename and compression method
+ """
+ file_name = os.path.basename(file_name)
+ if file_name.endswith(".tar"):
+ return file_name[:-4], None
+
+ for compression in self.ext_list:
+ if file_name.endswith(".tar" + self.ext_list[compression]):
+ return (
+ file_name[: -len(".tar" + self.ext_list[compression])],
+ compression,
+ )
+
+ raise InvalidCompressionMethod(file_name)
+
+ def _get_inner_tarinfo(self, tar, file_name):
+ """
+ Get inner tarinfo from given container.
+ Will try get file_name from correct basename first,
+ if it fail, try any file that have same name as file_name, and
+ return the first one.
+ """
+ if self.gpkg_version not in (os.path.basename(f) for f in tar.getnames()):
+ raise InvalidBinaryPackageFormat(f"Invalid gpkg file")
+
+ if self.basename and self.prefix and not self.prefix.startswith(self.basename):
+ writemsg(
+ colorize("WARN", f"Package basename mismatched, using {self.prefix}\n")
+ )
+
+ all_files = tar.getmembers()
+ for f in all_files:
+ try:
+ f_name, f_comp = self._extract_filename_compression(f.name)
+ except InvalidCompressionMethod:
+ continue
+
+ if f_name == file_name:
+ return f, f_comp
+
+ # Not found
+ raise FileNotFound(f"File Not found: {file_name}")
diff --git a/lib/portage/installation.py b/lib/portage/installation.py
new file mode 100644
index 000000000..53396834c
--- /dev/null
+++ b/lib/portage/installation.py
@@ -0,0 +1,21 @@
+# portage: Installation
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from enum import Enum
+
+TYPES = Enum(
+ "InstallationType",
+ [
+ "SOURCE", # Portage is not installed, but running from its source tree.
+ "MODULE", # Portage is installed solely as a Python module.
+ "SYSTEM", # Portage is fully installed to the system, possibly prefixed.
+ ],
+)
+
+if "@INSTALL_TYPE@" == "MODULE":
+ TYPE = TYPES.MODULE
+elif "@INSTALL_TYPE@" == "SYSTEM":
+ TYPE = TYPES.SYSTEM
+else:
+ TYPE = TYPES.SOURCE
diff --git a/lib/portage/localization.py b/lib/portage/localization.py
index 9df71d62d..b9c9e90ec 100644
--- a/lib/portage/localization.py
+++ b/lib/portage/localization.py
@@ -28,10 +28,7 @@ def localization_example():
a_value = "value.of.a"
b_value = 123
c_value = [1, 2, 3, 4]
- print(
- _("A: %(a)s -- B: %(b)s -- C: %(c)s")
- % {"a": a_value, "b": b_value, "c": c_value}
- )
+ print(_(f"A: {a_value} -- B: {b_value} -- C: {c_value}"))
def localized_size(num_bytes):
@@ -47,4 +44,5 @@ def localized_size(num_bytes):
except UnicodeDecodeError:
# failure to decode locale data
formatted_num = str(num_kib)
- return _unicode_decode(formatted_num, encoding=_encodings["stdio"]) + " KiB"
+ unicode_num = _unicode_decode(formatted_num, encoding=_encodings["stdio"])
+ return f"{unicode_num} KiB"
diff --git a/lib/portage/locks.py b/lib/portage/locks.py
index 67541a84d..ee40451b1 100644
--- a/lib/portage/locks.py
+++ b/lib/portage/locks.py
@@ -1,7 +1,10 @@
-# portage: Lock management code
-# Copyright 2004-2021 Gentoo Authors
+# Copyright 2004-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+"""
+Portage: Lock management code
+"""
+
__all__ = [
"lockdir",
"unlockdir",
@@ -76,6 +79,11 @@ class _lock_manager:
del _open_inodes[self.inode_key]
+def _lockf_test_lock_fn(path, fd, flags):
+ fcntl.lockf(fd, flags)
+ return functools.partial(unlockfile, (path, fd, flags, fcntl.lockf))
+
+
def _get_lock_fn():
"""
Returns fcntl.lockf if proven to work, and otherwise returns fcntl.flock.
@@ -85,10 +93,7 @@ def _get_lock_fn():
if _lock_fn is not None:
return _lock_fn
- if _test_lock_fn(
- lambda path, fd, flags: fcntl.lockf(fd, flags)
- and functools.partial(unlockfile, (path, fd, flags, fcntl.lockf))
- ):
+ if _test_lock_fn(_lockf_test_lock_fn):
_lock_fn = fcntl.lockf
return _lock_fn
@@ -100,29 +105,26 @@ def _get_lock_fn():
def _test_lock_fn(
lock_fn: typing.Callable[[str, int, int], typing.Callable[[], None]]
) -> bool:
- def _test_lock(fd, lock_path):
- os.close(fd)
- try:
- with open(lock_path, "a") as f:
- lock_fn(lock_path, f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
- except (TryAgain, EnvironmentError) as e:
- if isinstance(e, TryAgain) or e.errno == errno.EAGAIN:
- # Parent process holds lock, as expected.
- sys.exit(0)
-
- # Something went wrong.
- sys.exit(1)
-
fd, lock_path = tempfile.mkstemp()
unlock_fn = None
try:
try:
unlock_fn = lock_fn(lock_path, fd, fcntl.LOCK_EX)
- except (TryAgain, EnvironmentError):
+ except (TryAgain, OSError):
pass
else:
_lock_manager(fd, os.fstat(fd), lock_path)
- proc = multiprocessing.Process(target=_test_lock, args=(fd, lock_path))
+ proc = multiprocessing.Process(
+ target=_subprocess_test_lock,
+ args=(
+ # Since file descriptors are not inherited unless the fork start
+ # method is used, the subprocess should only try to close an
+ # inherited file descriptor for the fork start method.
+ fd if multiprocessing.get_start_method() == "fork" else None,
+ lock_fn,
+ lock_path,
+ ),
+ )
proc.start()
proc.join()
if proc.exitcode == os.EX_OK:
@@ -138,6 +140,21 @@ def _test_lock_fn(
return False
+def _subprocess_test_lock(fd, lock_fn, lock_path):
+ if fd is not None:
+ os.close(fd)
+ try:
+ with open(lock_path, "a") as f:
+ lock_fn(lock_path, f.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except (TryAgain, OSError) as e:
+ if isinstance(e, TryAgain) or e.errno == errno.EAGAIN:
+ # Parent process holds lock, as expected.
+ sys.exit(0)
+
+ # Something went wrong.
+ sys.exit(1)
+
+
def _close_fds():
"""
This is intended to be called after a fork, in order to close file
@@ -294,12 +311,12 @@ def _lockfile_iteration(
# try for a non-blocking lock, if it's held, throw a message
# we're waiting on lockfile and use a blocking attempt.
- locking_method = portage._eintr_func_wrapper(_get_lock_fn())
+ locking_method = _get_lock_fn()
try:
if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
- raise IOError(errno.ENOSYS, "Function not implemented")
+ raise OSError(errno.ENOSYS, "Function not implemented")
locking_method(myfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
- except IOError as e:
+ except OSError as e:
if not hasattr(e, "errno"):
raise
if e.errno in (errno.EACCES, errno.EAGAIN, errno.ENOLCK):
@@ -325,7 +342,7 @@ def _lockfile_iteration(
while True:
try:
locking_method(myfd, fcntl.LOCK_EX)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == errno.ENOLCK:
# This is known to occur on Solaris NFS (see
# bug #462694). Assume that the error is due
@@ -342,9 +359,7 @@ def _lockfile_iteration(
_("Error while waiting " "to lock '%s'")
% lockfilename
)
- writemsg(
- "\n!!! %s: %s\n" % (context_desc, e), noiselevel=-1
- )
+ writemsg(f"\n!!! {context_desc}: {e}\n", noiselevel=-1)
time.sleep(_HARDLINK_POLL_LATENCY)
continue
@@ -494,7 +509,7 @@ def _fstat_nlink(fd):
"""
try:
return os.fstat(fd).st_nlink
- except EnvironmentError as e:
+ except OSError as e:
if e.errno in (errno.ENOENT, errno.ESTALE):
# Some filesystems such as CIFS return
# ENOENT which means st_nlink == 0.
@@ -503,7 +518,6 @@ def _fstat_nlink(fd):
def unlockfile(mytuple):
-
# XXX: Compatability hack.
if len(mytuple) == 3:
lockfilename, myfd, unlinkfile = mytuple
@@ -532,7 +546,7 @@ def unlockfile(mytuple):
except OSError:
if isinstance(lockfilename, str):
_open_fds[myfd].close()
- raise IOError(_("Failed to unlock file '%s'\n") % lockfilename)
+ raise OSError(_("Failed to unlock file '%s'\n") % lockfilename)
try:
# This sleep call was added to allow other processes that are
@@ -625,7 +639,7 @@ def hardlink_lockfile(
if e.errno in (errno.ENOENT, errno.ESTALE):
pass
else:
- func_call = "unlink('%s')" % myhardlock
+ func_call = f"unlink('{myhardlock}')"
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
@@ -638,7 +652,7 @@ def hardlink_lockfile(
try:
myfd = os.open(lockfilename, os.O_CREAT | os.O_RDWR, 0o660)
except OSError as e:
- func_call = "open('%s')" % lockfilename
+ func_call = f"open('{lockfilename}')"
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
@@ -686,7 +700,7 @@ def hardlink_lockfile(
try:
os.link(lockfilename, myhardlock)
except OSError as e:
- func_call = "link('%s', '%s')" % (lockfilename, myhardlock)
+ func_call = f"link('{lockfilename}', '{myhardlock}')"
if e.errno == OperationNotPermitted.errno:
raise OperationNotPermitted(func_call)
elif e.errno == PermissionDenied.errno:
diff --git a/lib/portage/mail.py b/lib/portage/mail.py
index aa2617b42..20b139ad3 100644
--- a/lib/portage/mail.py
+++ b/lib/portage/mail.py
@@ -10,7 +10,6 @@
# 'smtlib' module imports the 'email' module, that's imported
# locally as well.
-import socket
import sys
from portage import os
@@ -36,13 +35,12 @@ def TextMessage(_text):
def create_message(sender, recipient, subject, body, attachments=None):
-
from email.header import Header
from email.mime.base import MIMEBase as BaseMessage
from email.mime.multipart import MIMEMultipart as MultipartMessage
from email.utils import formatdate
- if attachments == None:
+ if attachments is None:
mymessage = TextMessage(body)
else:
mymessage = MultipartMessage()
@@ -54,7 +52,7 @@ def create_message(sender, recipient, subject, body, attachments=None):
mymessage.attach(TextMessage(x))
else:
raise portage.exception.PortageException(
- _("Can't handle type of attachment: %s") % type(x)
+ _(f"Can't handle type of attachment: {type(x)}")
)
mymessage.set_unixfrom(sender)
@@ -77,7 +75,6 @@ def create_message(sender, recipient, subject, body, attachments=None):
def send_mail(mysettings, message):
-
import smtplib
mymailhost = "localhost"
@@ -117,14 +114,13 @@ def send_mail(mysettings, message):
# user wants to use a sendmail binary instead of smtp
if mymailhost[0] == os.sep and os.path.exists(mymailhost):
- fd = os.popen(mymailhost + " -f " + myfrom + " " + myrecipient, "w")
+ fd = os.popen(f"{mymailhost } -f {myfrom} {myrecipient}", "w")
fd.write(_force_ascii_if_necessary(message.as_string()))
- if fd.close() != None:
+ if fd.close() is not None:
sys.stderr.write(
_(
- "!!! %s returned with a non-zero exit code. This generally indicates an error.\n"
+ f"!!! {mymailhost} returned with a non-zero exit code. This generally indicates an error.\n"
)
- % mymailhost
)
else:
try:
@@ -149,12 +145,11 @@ def send_mail(mysettings, message):
myconn.quit()
except smtplib.SMTPException as e:
raise portage.exception.PortageException(
- _("!!! An error occurred while trying to send logmail:\n") + str(e)
+ _(f"!!! An error occurred while trying to send logmail:\n{e}")
)
- except socket.error as e:
+ except OSError as e:
raise portage.exception.PortageException(
_(
- "!!! A network error occurred while trying to send logmail:\n%s\nSure you configured PORTAGE_ELOG_MAILURI correctly?"
+ f"!!! A network error occurred while trying to send logmail:\n{e}\nSure you configured PORTAGE_ELOG_MAILURI correctly?"
)
- % str(e)
)
diff --git a/lib/portage/manifest.py b/lib/portage/manifest.py
index 0b4fad76c..4384f647c 100644
--- a/lib/portage/manifest.py
+++ b/lib/portage/manifest.py
@@ -2,7 +2,7 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
+import itertools
import logging
import re
import stat
@@ -82,13 +82,13 @@ def parseManifest2(line):
if not isinstance(line, str):
line = " ".join(line)
myentry = None
- match = _manifest_re.match(line)
- if match is not None:
- tokens = match.group(3).split()
+ matched = _manifest_re.match(line)
+ if matched:
+ tokens = matched.group(3).split()
hashes = dict(zip(tokens[1::2], tokens[2::2]))
hashes["size"] = int(tokens[0])
myentry = Manifest2Entry(
- type=match.group(1), name=match.group(2), hashes=hashes
+ type=matched.group(1), name=matched.group(2), hashes=hashes
)
return myentry
@@ -107,19 +107,16 @@ class Manifest2Entry(ManifestEntry):
myhashkeys = list(self.hashes)
myhashkeys.remove("size")
myhashkeys.sort()
- for h in myhashkeys:
- myline += " " + h + " " + str(self.hashes[h])
- return myline
+ with_hashes = " ".join(f"{h} {self.hashes[h]}" for h in myhashkeys)
+ return f"{myline} {with_hashes}"
def __eq__(self, other):
- if (
- not isinstance(other, Manifest2Entry)
- or self.type != other.type
- or self.name != other.name
- or self.hashes != other.hashes
- ):
- return False
- return True
+ return (
+ isinstance(other, Manifest2Entry)
+ and self.type == other.type
+ and self.name == other.name
+ and self.hashes == other.hashes
+ )
def __ne__(self, other):
return not self.__eq__(other)
@@ -162,7 +159,6 @@ class Manifest:
find_invalid_path_char = _find_invalid_path_char
self._find_invalid_path_char = find_invalid_path_char
self.pkgdir = _unicode_decode(pkgdir).rstrip(os.sep) + os.sep
- self.fhashdict = {}
self.hashes = set()
self.required_hashes = set()
@@ -182,14 +178,19 @@ class Manifest:
self.required_hashes.update(required_hashes)
self.required_hashes.intersection_update(self.hashes)
- for t in MANIFEST2_IDENTIFIERS:
- self.fhashdict[t] = {}
+ self.fhashdict = {t: {} for t in MANIFEST2_IDENTIFIERS}
+
if not from_scratch:
- self._read()
- if fetchlist_dict != None:
- self.fetchlist_dict = fetchlist_dict
- else:
- self.fetchlist_dict = {}
+ # Parse Manifest file for this instance
+ try:
+ self._readManifest(self.getFullname(), myhashdict=self.fhashdict)
+ except FileNotFound:
+ pass
+
+ self.fetchlist_dict = {}
+ if fetchlist_dict:
+ self.fetchlist_dict.update(fetchlist_dict)
+
self.distdir = distdir
self.thin = thin
if thin:
@@ -206,9 +207,9 @@ class Manifest:
def getDigests(self):
"""Compability function for old digest/manifest code, returns dict of filename:{hashfunction:hashvalue}"""
- rval = {}
- for t in MANIFEST2_IDENTIFIERS:
- rval.update(self.fhashdict[t])
+ rval = {
+ k: v for t in MANIFEST2_IDENTIFIERS for k, v in self.fhashdict[t].items()
+ }
return rval
def getTypeDigests(self, ftype):
@@ -219,9 +220,8 @@ class Manifest:
"""Parse a manifest. If myhashdict is given then data will be added too it.
Otherwise, a new dict will be created and returned."""
try:
- with io.open(
+ with open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -229,19 +229,12 @@ class Manifest:
myhashdict = {}
self._parseDigests(f, myhashdict=myhashdict, **kwargs)
return myhashdict
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise FileNotFound(file_path)
else:
raise
- def _read(self):
- """Parse Manifest file for this instance"""
- try:
- self._readManifest(self.getFullname(), myhashdict=self.fhashdict)
- except FileNotFound:
- pass
-
def _parseManifestLines(self, mylines):
"""Parse manifest lines and return a list of manifest entries."""
for myline in mylines:
@@ -269,40 +262,49 @@ class Manifest:
def _getDigestData(self, distlist):
"""create a hash dict for a specific list of files"""
- myhashdict = {}
- for myname in distlist:
- for mytype in self.fhashdict:
- if myname in self.fhashdict[mytype]:
- myhashdict.setdefault(mytype, {})
- myhashdict[mytype].setdefault(myname, {})
- myhashdict[mytype][myname].update(self.fhashdict[mytype][myname])
+ myhashdict = {
+ mytype: {myname: self.fhashdict[mytype][myname]}
+ for myname in distlist
+ for mytype in self.fhashdict
+ if myname in self.fhashdict[mytype]
+ }
return myhashdict
def _createManifestEntries(self):
- valid_hashes = set(get_valid_checksum_keys())
- valid_hashes.add("size")
- mytypes = list(self.fhashdict)
- mytypes.sort()
- for t in mytypes:
- myfiles = list(self.fhashdict[t])
- myfiles.sort()
- for f in myfiles:
- myentry = Manifest2Entry(
- type=t, name=f, hashes=self.fhashdict[t][f].copy()
+ valid_hashes = set(itertools.chain(get_valid_checksum_keys(), ("size",)))
+ mytypes = sorted(self.fhashdict)
+ for mytype in mytypes:
+ myfiles = sorted(self.fhashdict[mytype])
+ for myfile in myfiles:
+ remainings = set(self.fhashdict[mytype][myfile]).intersection(
+ valid_hashes
+ )
+ yield Manifest2Entry(
+ type=mytype,
+ name=myfile,
+ hashes={
+ remaining: self.fhashdict[mytype][myfile][remaining]
+ for remaining in remainings
+ },
)
- for h in list(myentry.hashes):
- if h not in valid_hashes:
- del myentry.hashes[h]
- yield myentry
def checkIntegrity(self):
- for t in self.fhashdict:
- for f in self.fhashdict[t]:
- diff = self.required_hashes.difference(set(self.fhashdict[t][f]))
- if diff:
- raise MissingParameter(
- _("Missing %s checksum(s): %s %s") % (" ".join(diff), t, f)
+ manifest_data = (
+ (
+ self.required_hashes.difference(set(self.fhashdict[mytype][myfile])),
+ mytype,
+ myfile,
+ )
+ for mytype in self.fhashdict
+ for myfile in self.fhashdict[mytype]
+ )
+ for needed_hashes, its_type, its_file in manifest_data:
+ if needed_hashes:
+ raise MissingParameter(
+ _(
+ f"Missing {' '.join(needed_hashes)} checksum(s): {its_type} {its_file}"
)
+ )
def write(self, sign=False, force=False):
"""Write Manifest instance to disk, optionally signing it. Returns
@@ -315,30 +317,27 @@ class Manifest:
try:
myentries = list(self._createManifestEntries())
update_manifest = True
- preserved_stats = {}
- preserved_stats[self.pkgdir.rstrip(os.sep)] = os.stat(self.pkgdir)
+ preserved_stats = {self.pkgdir.rstrip(os.sep): os.stat(self.pkgdir)}
if myentries and not force:
try:
- f = io.open(
+ with open(
_unicode_encode(
self.getFullname(),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
- )
- oldentries = list(self._parseManifestLines(f))
- preserved_stats[self.getFullname()] = os.fstat(f.fileno())
- f.close()
- if len(oldentries) == len(myentries):
- update_manifest = False
- for i in range(len(oldentries)):
- if oldentries[i] != myentries[i]:
- update_manifest = True
- break
- except (IOError, OSError) as e:
+ ) as f:
+ oldentries = list(self._parseManifestLines(f))
+ preserved_stats[self.getFullname()] = os.fstat(f.fileno())
+ if len(oldentries) == len(myentries):
+ update_manifest = False
+ for oldentry, myentry in zip(oldentries, myentries):
+ if oldentry != myentry:
+ update_manifest = True
+ break
+ except OSError as e:
if e.errno == errno.ENOENT:
pass
else:
@@ -352,7 +351,7 @@ class Manifest:
# non-empty for all currently known use cases.
write_atomic(
self.getFullname(),
- "".join("%s\n" % str(myentry) for myentry in myentries),
+ "".join(f"{myentry}\n" for myentry in myentries),
)
self._apply_max_mtime(preserved_stats, myentries)
rval = True
@@ -368,7 +367,7 @@ class Manifest:
if sign:
self.sign()
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno == errno.EACCES:
raise PermissionDenied(str(e))
raise
@@ -394,30 +393,30 @@ class Manifest:
# it always rounds down. Note that stat_result.st_mtime will round
# up from 0.999999999 to 1.0 when precision is lost during conversion
# from nanosecond resolution to float.
- max_mtime = None
- _update_max = (
- lambda st: max_mtime
- if max_mtime is not None and max_mtime > st[stat.ST_MTIME]
- else st[stat.ST_MTIME]
- )
- _stat = (
- lambda path: preserved_stats[path]
- if path in preserved_stats
- else os.stat(path)
- )
+ def _update_max(max_mtime, st):
+ stat_mtime = st[stat.ST_MTIME]
+ if max_mtime:
+ return max(max_mtime, stat_mtime)
+
+ def _stat(path):
+ if path in preserved_stats:
+ return preserved_stats[path]
+ else:
+ return os.stat(path)
+
+ max_mtime = None
for stat_result in preserved_stats.values():
- max_mtime = _update_max(stat_result)
+ max_mtime = _update_max(max_mtime, stat_result)
for entry in entries:
if entry.type == "DIST":
continue
- abs_path = (
- os.path.join(self.pkgdir, "files", entry.name)
- if entry.type == "AUX"
- else os.path.join(self.pkgdir, entry.name)
- )
- max_mtime = _update_max(_stat(abs_path))
+ files = ""
+ if entry.type == "AUX":
+ files = "files"
+ abs_path = os.path.join(self.pkgdir, files, entry.name)
+ max_mtime = _update_max(max_mtime, _stat(abs_path))
if not self.thin:
# Account for changes to all relevant nested directories.
@@ -434,7 +433,7 @@ class Manifest:
# report such problems).
pass
else:
- max_mtime = _update_max(_stat(parent_dir))
+ max_mtime = _update_max(max_mtime, _stat(parent_dir))
if max_mtime is not None:
for path in preserved_stats:
@@ -447,8 +446,7 @@ class Manifest:
# unless this repo is being prepared for distribution
# via rsync.
writemsg_level(
- "!!! utime('%s', (%s, %s)): %s\n"
- % (path, max_mtime, max_mtime, e),
+ f"!!! utime('{path}', ({max_mtime}, {max_mtime})): {e}\n",
level=logging.WARNING,
noiselevel=-1,
)
@@ -465,18 +463,19 @@ class Manifest:
"""Add entry to Manifest optionally using hashdict to avoid recalculation of hashes"""
if ftype == "AUX" and not fname.startswith("files/"):
fname = os.path.join("files", fname)
- if not os.path.exists(self.pkgdir + fname) and not ignoreMissing:
+ if not os.path.exists(f"{self.pkgdir}{fname}") and not ignoreMissing:
raise FileNotFound(fname)
- if not ftype in MANIFEST2_IDENTIFIERS:
+ if ftype not in MANIFEST2_IDENTIFIERS:
raise InvalidDataType(ftype)
- if ftype == "AUX" and fname.startswith("files"):
+
+ if fname.startswith("files"):
fname = fname[6:]
self.fhashdict[ftype][fname] = {}
- if hashdict != None:
+ if hashdict is not None:
self.fhashdict[ftype][fname].update(hashdict)
if self.required_hashes.difference(set(self.fhashdict[ftype][fname])):
- self.updateFileHashes(
- ftype, fname, checkExisting=False, ignoreMissing=ignoreMissing
+ self.updateAllFileHashes(
+ ftype, [fname], checkExisting=False, ignoreMissing=ignoreMissing
)
def removeFile(self, ftype, fname):
@@ -489,17 +488,15 @@ class Manifest:
def findFile(self, fname):
"""Return entrytype of the given file if present in Manifest or None if not present"""
- for t in MANIFEST2_IDENTIFIERS:
- if fname in self.fhashdict[t]:
- return t
- return None
+ found_entries = (t for t in MANIFEST2_IDENTIFIERS if fname in self.fhashdict[t])
+ return next(found_entries, None)
def create(
self,
checkExisting=False,
assumeDistHashesSometimes=False,
assumeDistHashesAlways=False,
- requiredDistfiles=[],
+ requiredDistfiles=None,
):
"""Recreate this Manifest from scratch. This will not use any
existing checksums unless assumeDistHashesSometimes or
@@ -513,10 +510,9 @@ class Manifest:
return
if checkExisting:
self.checkAllHashes()
+ distfilehashes = {}
if assumeDistHashesSometimes or assumeDistHashesAlways:
- distfilehashes = self.fhashdict["DIST"]
- else:
- distfilehashes = {}
+ distfilehashes.update(self.fhashdict["DIST"])
self.__init__(
self.pkgdir,
distdir=self.distdir,
@@ -530,18 +526,19 @@ class Manifest:
find_invalid_path_char=self._find_invalid_path_char,
strict_misc_digests=self.strict_misc_digests,
)
- pn = os.path.basename(self.pkgdir.rstrip(os.path.sep))
- cat = self._pkgdir_category()
- pkgdir = self.pkgdir
+ update_pkgdir = self._update_thick_pkgdir
if self.thin:
- cpvlist = self._update_thin_pkgdir(cat, pn, pkgdir)
- else:
- cpvlist = self._update_thick_pkgdir(cat, pn, pkgdir)
+ update_pkgdir = self._update_thin_pkgdir
- distlist = set()
- for cpv in cpvlist:
- distlist.update(self._getCpvDistfiles(cpv))
+ cpvlist = update_pkgdir(
+ self._pkgdir_category(),
+ os.path.basename(self.pkgdir.rstrip(os.path.sep)),
+ self.pkgdir,
+ )
+ distlist = {
+ distfile for cpv in cpvlist for distfile in self._getCpvDistfiles(cpv)
+ }
if requiredDistfiles is None:
# This allows us to force removal of stale digests for the
@@ -551,9 +548,7 @@ class Manifest:
# repoman passes in an empty list, which implies that all distfiles
# are required.
requiredDistfiles = distlist.copy()
- required_hash_types = set()
- required_hash_types.add("size")
- required_hash_types.update(self.required_hashes)
+ required_hash_types = set(itertools.chain(self.required_hashes, ("size",)))
for f in distlist:
fname = os.path.join(self.distdir, f)
mystat = None
@@ -590,41 +585,44 @@ class Manifest:
return None
pf = filename[:-7]
ps = portage.versions._pkgsplit(pf)
- cpv = "%s/%s" % (cat, pf)
+ cpv = f"{cat}/{pf}"
if not ps:
- raise PortagePackageException(_("Invalid package name: '%s'") % cpv)
+ raise PortagePackageException(_(f"Invalid package name: '{cpv}'"))
if ps[0] != pn:
raise PortagePackageException(
- _("Package name does not " "match directory name: '%s'") % cpv
+ _(f"Package name does not match directory name: '{cpv}'")
)
return cpv
def _update_thin_pkgdir(self, cat, pn, pkgdir):
- for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
- break
- cpvlist = []
- for f in pkgdir_files:
+ _, _, pkgdir_files = next(os.walk(pkgdir), (None, None, None))
+
+ def _process_for_cpv(filename):
try:
- f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
+ filename = _unicode_decode(
+ filename, encoding=_encodings["fs"], errors="strict"
+ )
except UnicodeDecodeError:
- continue
- if f[:1] == ".":
- continue
- pf = self._is_cpv(cat, pn, f)
+ return None
+ if filename.startswith("."):
+ return None
+ pf = self._is_cpv(cat, pn, filename)
if pf is not None:
- cpvlist.append(pf)
+ return pf
+
+ processed = (_process_for_cpv(filename) for filename in pkgdir_files)
+ cpvlist = [pf for pf in processed if pf]
return cpvlist
def _update_thick_pkgdir(self, cat, pn, pkgdir):
+ _, _, pkgdir_files = next(os.walk(pkgdir), (None, None, None))
cpvlist = []
- for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
- break
for f in pkgdir_files:
try:
f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
except UnicodeDecodeError:
continue
- if f[:1] == ".":
+ if f.startswith("."):
continue
pf = self._is_cpv(cat, pn, f)
if pf is not None:
@@ -635,12 +633,12 @@ class Manifest:
else:
continue
self.fhashdict[mytype][f] = perform_multiple_checksums(
- self.pkgdir + f, self.hashes
+ f"{self.pkgdir}{f}", self.hashes
)
recursive_files = []
pkgdir = self.pkgdir
- cut_len = len(os.path.join(pkgdir, "files") + os.sep)
+ cut_len = len(os.path.join(pkgdir, f"files{os.sep}"))
for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")):
for f in files:
try:
@@ -662,12 +660,12 @@ class Manifest:
def _getAbsname(self, ftype, fname):
if ftype == "DIST":
- absname = os.path.join(self.distdir, fname)
+ abspath = (self.distdir, fname)
elif ftype == "AUX":
- absname = os.path.join(self.pkgdir, "files", fname)
+ abspath = (self.pkgdir, "files", fname)
else:
- absname = os.path.join(self.pkgdir, fname)
- return absname
+ abspath = (self.pkgdir, fname)
+ return os.path.join(*abspath)
def checkAllHashes(self, ignoreMissingFiles=False):
for t in MANIFEST2_IDENTIFIERS:
@@ -693,7 +691,7 @@ class Manifest:
except FileNotFound as e:
if not ignoreMissing:
raise
- return False, _("File Not Found: '%s'") % str(e)
+ return False, _(f"File Not Found: '{e}'")
def checkCpvHashes(
self, cpv, checkDistfiles=True, onlyDistfiles=False, checkMiscfiles=False
@@ -704,7 +702,7 @@ class Manifest:
self.checkTypeHashes("AUX", ignoreMissingFiles=False)
if checkMiscfiles:
self.checkTypeHashes("MISC", ignoreMissingFiles=False)
- ebuildname = "%s.ebuild" % self._catsplit(cpv)[1]
+ ebuildname = f"{self._catsplit(cpv)[1]}.ebuild"
self.checkFileHashes("EBUILD", ebuildname, ignoreMissing=False)
if checkDistfiles or onlyDistfiles:
for f in self._getCpvDistfiles(cpv):
@@ -715,68 +713,75 @@ class Manifest:
return self.fetchlist_dict[cpv]
def getDistfilesSize(self, fetchlist):
- total_bytes = 0
- for f in fetchlist:
- total_bytes += int(self.fhashdict["DIST"][f]["size"])
+ total_bytes = sum(int(self.fhashdict["DIST"][f]["size"]) for f in fetchlist)
return total_bytes
- def updateFileHashes(
- self, ftype, fname, checkExisting=True, ignoreMissing=True, reuseExisting=False
+ def updateAllFileHashes(
+ self, ftype, fnames, checkExisting=True, ignoreMissing=True, reuseExisting=False
):
- """Regenerate hashes for the given file"""
- if checkExisting:
- self.checkFileHashes(ftype, fname, ignoreMissing=ignoreMissing)
- if not ignoreMissing and fname not in self.fhashdict[ftype]:
- raise FileNotInManifestException(fname)
- if fname not in self.fhashdict[ftype]:
- self.fhashdict[ftype][fname] = {}
- myhashkeys = list(self.hashes)
- if reuseExisting:
- for k in [h for h in self.fhashdict[ftype][fname] if h in myhashkeys]:
- myhashkeys.remove(k)
- myhashes = perform_multiple_checksums(
- self._getAbsname(ftype, fname), myhashkeys
- )
- self.fhashdict[ftype][fname].update(myhashes)
+ """Regenerate hashes from a list of files"""
+ for fname in fnames:
+ if checkExisting:
+ self.checkFileHashes(ftype, fname, ignoreMissing=ignoreMissing)
+ if not ignoreMissing and fname not in self.fhashdict[ftype]:
+ raise FileNotInManifestException(fname)
+ if fname not in self.fhashdict[ftype]:
+ self.fhashdict[ftype][fname] = {}
+ myhashkeys = self.hashes
+ if reuseExisting:
+ myhashkeys = myhashkeys.difference(self.fhashdict[ftype][fname])
+ myhashes = perform_multiple_checksums(
+ self._getAbsname(ftype, fname), myhashkeys
+ )
+ self.fhashdict[ftype][fname].update(myhashes)
- def updateTypeHashes(self, idtype, checkExisting=False, ignoreMissingFiles=True):
- """Regenerate all hashes for all files of the given type"""
- for fname in self.fhashdict[idtype]:
- self.updateFileHashes(idtype, fname, checkExisting)
+ def updateAllTypeHashes(
+ self, idtypes, checkExisting=False, ignoreMissingFiles=True
+ ):
+ """Regenerate all hashes for all files from a list of types"""
+ for idtype in idtypes:
+ self.updateAllFileHashes(
+ ftype=idtype, fnames=self.fhashdict[idtype], checkExisting=checkExisting
+ )
def updateAllHashes(self, checkExisting=False, ignoreMissingFiles=True):
"""Regenerate all hashes for all files in this Manifest."""
- for idtype in MANIFEST2_IDENTIFIERS:
- self.updateTypeHashes(
- idtype,
- checkExisting=checkExisting,
- ignoreMissingFiles=ignoreMissingFiles,
- )
+ self.updateTypeHashes(
+ idtypes=MANIFEST2_IDENTIFIERS,
+ checkExisting=checkExisting,
+ ignoreMissingFiles=ignoreMissingFiles,
+ )
def updateCpvHashes(self, cpv, ignoreMissingFiles=True):
"""Regenerate all hashes associated to the given cpv (includes all AUX and MISC
files)."""
- self.updateTypeHashes("AUX", ignoreMissingFiles=ignoreMissingFiles)
- self.updateTypeHashes("MISC", ignoreMissingFiles=ignoreMissingFiles)
- ebuildname = "%s.ebuild" % self._catsplit(cpv)[1]
- self.updateFileHashes(
- "EBUILD", ebuildname, ignoreMissingFiles=ignoreMissingFiles
+ self.updateAllTypeHashes(
+ idtypes=("AUX", "MISC"),
+ ignoreMissingFiles=ignoreMissingFiles,
+ )
+ self.updateAllFileHashes(
+ ftype="EBUILD",
+ fnames=(f"{self._catsplit(cpv)[1]}.ebuild",),
+ ignoreMissingFiles=ignoreMissingFiles,
+ )
+ self.updateAllFileHashes(
+ ftype="DIST",
+ fnames=self._getCpvDistfiles(cpv),
+ ignoreMissingFiles=ignoreMissingFiles,
)
- for f in self._getCpvDistfiles(cpv):
- self.updateFileHashes("DIST", f, ignoreMissingFiles=ignoreMissingFiles)
def updateHashesGuessType(self, fname, *args, **kwargs):
"""Regenerate hashes for the given file (guesses the type and then
- calls updateFileHashes)."""
+ calls updateAllFileHashes)."""
mytype = self.guessType(fname)
- if mytype == "AUX":
- fname = fname[len("files" + os.sep) :]
- elif mytype is None:
+ if mytype is None:
return
+ elif mytype == "AUX":
+ fname = fname[len(f"files{os.sep}") :]
myrealtype = self.findFile(fname)
if myrealtype is not None:
mytype = myrealtype
- return self.updateFileHashes(mytype, fname, *args, **kwargs)
+ return self.updateAllFileHashes(ftype=mytype, fnames=(fname,), *args, **kwargs)
def getFileData(self, ftype, fname, key):
"""Return the value of a specific (type,filename,key) triple, mainly useful
@@ -785,28 +790,25 @@ class Manifest:
def getVersions(self):
"""Returns a list of manifest versions present in the manifest file."""
- rVal = []
mfname = self.getFullname()
if not os.path.exists(mfname):
- return rVal
- myfile = io.open(
+ return []
+ with open(
_unicode_encode(mfname, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
- )
- lines = myfile.readlines()
- myfile.close()
- for l in lines:
- mysplit = l.split()
- if (
- len(mysplit) > 4
- and mysplit[0] in MANIFEST2_IDENTIFIERS
- and ((len(mysplit) - 3) % 2) == 0
- and not 2 in rVal
- ):
- rVal.append(2)
- return rVal
+ ) as myfile:
+ line_splits = (line.split() for line in myfile.readlines())
+ validation = (
+ True
+ for line_split in line_splits
+ if len(line_split) > 4
+ and line_split[0] in MANIFEST2_IDENTIFIERS
+ and (len(line_split) - 3) % 2 == 0
+ )
+ if any(validation):
+ return [2]
+ return []
def _catsplit(self, pkg_key):
"""Split a category and package, returning a list of [cat, pkg].
diff --git a/lib/portage/meson.build b/lib/portage/meson.build
new file mode 100644
index 000000000..06dde8ca7
--- /dev/null
+++ b/lib/portage/meson.build
@@ -0,0 +1,74 @@
+const_py = configure_file(
+ input : 'const.py',
+ output : 'const.py',
+ configuration : conf_data
+)
+
+installation_py = configure_file(
+ input : 'installation.py',
+ output : 'installation.py',
+ configuration : conf_data
+)
+
+__init__py = configure_file(
+ input : '__init__.py',
+ output : '__init__.py',
+ configuration : conf_data
+)
+
+py.install_sources(
+ [
+ 'binpkg.py',
+ 'checksum.py',
+ const_py,
+ 'cvstree.py',
+ 'data.py',
+ 'debug.py',
+ 'dispatch_conf.py',
+ 'eapi.py',
+ 'eclass_cache.py',
+ 'exception.py',
+ 'getbinpkg.py',
+ 'glsa.py',
+ 'gpg.py',
+ 'gpkg.py',
+ installation_py,
+ 'localization.py',
+ 'locks.py',
+ 'mail.py',
+ 'manifest.py',
+ 'metadata.py',
+ 'module.py',
+ 'news.py',
+ 'output.py',
+ 'process.py',
+ 'progress.py',
+ 'update.py',
+ 'versions.py',
+ 'xpak.py',
+ '_global_updates.py',
+ '_legacy_globals.py',
+ '_selinux.py',
+ __init__py,
+ ],
+ subdir : 'portage',
+ pure : not native_extensions
+)
+
+subdir('binrepo')
+subdir('cache')
+subdir('dbapi')
+subdir('dep')
+subdir('elog')
+subdir('emaint')
+subdir('env')
+subdir('package')
+subdir('proxy')
+subdir('repository')
+subdir('sync')
+subdir('tests')
+subdir('util')
+subdir('xml')
+subdir('_compat_upgrade')
+subdir('_emirrordist')
+subdir('_sets')
diff --git a/lib/portage/metadata.py b/lib/portage/metadata.py
index 0bd2bcce4..869c10bb3 100644
--- a/lib/portage/metadata.py
+++ b/lib/portage/metadata.py
@@ -38,12 +38,11 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
"/var",
]:
print(
- "!!! PORTAGE_DEPCACHEDIR IS SET TO A PRIMARY "
- + "ROOT DIRECTORY ON YOUR SYSTEM.",
- file=sys.stderr,
- )
- print(
- "!!! This is ALMOST CERTAINLY NOT what you want: '%s'" % cachedir,
+ (
+ "!!! PORTAGE_DEPCACHEDIR IS SET TO A PRIMARY "
+ "ROOT DIRECTORY ON YOUR SYSTEM.\n"
+ f"!!! This is ALMOST CERTAINLY NOT what you want: '{cachedir}'",
+ ),
file=sys.stderr,
)
sys.exit(73)
@@ -76,8 +75,6 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
eclass_db.update_eclasses()
porttrees_data.append(TreeData(portdb.auxdb[path], eclass_db, path, src_db))
- porttrees = [tree_data.path for tree_data in porttrees_data]
-
quiet = (
settings.get("TERM") == "dumb" or "--quiet" in myopts or not sys.stdout.isatty()
)
@@ -101,7 +98,7 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
# Temporarily override portdb.porttrees so portdb.cp_all()
# will only return the relevant subset.
portdb_porttrees = portdb.porttrees
- portdb.porttrees = porttrees
+ portdb.porttrees = (tree_data.path for tree_data in porttrees_data)
try:
cp_all = portdb.cp_all()
finally:
@@ -120,10 +117,9 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
for cp in cp_all:
for tree_data in porttrees_data:
-
src_chf = tree_data.src_db.validation_chf
dest_chf = tree_data.dest_db.validation_chf
- dest_chf_key = "_%s_" % dest_chf
+ dest_chf_key = f"_{dest_chf}_"
dest_chf_getter = operator.attrgetter(dest_chf)
for cpv in portdb.cp_list(cp, mytree=tree_data.path):
@@ -191,11 +187,11 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
# We don't want to skip the write unless we're really
# sure that the existing cache is identical, so don't
# trust _mtime_ and _eclasses_ alone.
- for k in auxdbkeys:
- if dest.get(k, "") != src.get(k, ""):
- dest = None
- break
-
+ cache_is_identical = (
+ True for k in auxdbkeys if dest.get(k, "") != src.get(k, "")
+ )
+ if any(cache_is_identical):
+ dest = None
if dest is not None:
# The existing data is valid and identical,
# so there's no need to overwrite it.
@@ -219,8 +215,10 @@ def action_metadata(settings, portdb, myopts, porttrees=None):
dead_nodes = set(tree_data.dest_db)
except CacheError as e:
writemsg_level(
- "Error listing cache entries for "
- + "'%s': %s, continuing...\n" % (tree_data.path, e),
+ (
+ "Error listing cache entries for "
+ f"'{tree_data.path}': {e}, continuing...\n"
+ ),
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/module.py b/lib/portage/module.py
index a30d509ee..cd9e2a7e1 100644
--- a/lib/portage/module.py
+++ b/lib/portage/module.py
@@ -53,12 +53,13 @@ class Module:
kid["module_name"] = ".".join([mod_name, kid["sourcefile"]])
except KeyError:
kid["module_name"] = ".".join([mod_name, self.name])
- msg = (
- "%s module's module_spec is old, missing attribute: "
- "'sourcefile'. Backward compatibility may be "
- "removed in the future.\nFile: %s\n"
+ writemsg(
+ _(
+ f"{self.name} module's module_spec is old, missing attribute: "
+ "'sourcefile'. Backward compatibility may be "
+ f"removed in the future.\nFile: {self._module.__file__}\n"
+ )
)
- writemsg(_(msg) % (self.name, self._module.__file__))
kid["is_imported"] = False
self.kids[kidname] = kid
self.kids_names.append(kidname)
@@ -67,8 +68,8 @@ class Module:
def get_class(self, name):
if not name or name not in self.kids_names:
raise InvalidModuleName(
- "Module name '%s' is invalid or not" % name
- + "part of the module '%s'" % self.name
+ f"Module name '{name}' is invalid or not"
+ f"part of the module '{self.name}'"
)
kid = self.kids[name]
if kid["is_imported"]:
@@ -107,19 +108,27 @@ class Modules:
@rtype: dictionary of module_plugins
"""
module_dir = self._module_path
- importables = []
names = os.listdir(module_dir)
- for entry in names:
- # skip any __init__ or __pycache__ files or directories
- if entry.startswith("__"):
- continue
+
+ def _a_real_module(entry):
try:
# test for statinfo to ensure it should a real module
# it will bail if it errors
os.lstat(os.path.join(module_dir, entry, "__init__.py"))
- importables.append(entry)
- except EnvironmentError:
- pass
+ except OSError:
+ return False
+ return True
+
+ # The importables list cannot be a generator.
+ # If it was a generator, it would be consumed by self.parents.extend()
+ # and the following for loop wouldn't have anything to iterate with.
+ importables = [
+ entry
+ for entry in names
+ if not entry.startswith("__") and _a_real_module(entry)
+ ]
+ self.parents.extend(importables)
+
kids = {}
for entry in importables:
new_module = Module(entry, self._namepath)
@@ -128,7 +137,6 @@ class Modules:
kid = new_module.kids[module_name]
kid["parent"] = new_module
kids[kid["name"]] = kid
- self.parents.append(entry)
return kids
def get_module_names(self):
@@ -149,9 +157,7 @@ class Modules:
if modname and modname in self.module_names:
mod = self._modules[modname]["parent"].get_class(modname)
else:
- raise InvalidModuleName(
- "Module name '%s' is invalid or not" % modname + "found"
- )
+ raise InvalidModuleName(f"Module name '{modname}' is invalid or not found")
return mod
def get_description(self, modname):
@@ -165,9 +171,7 @@ class Modules:
if modname and modname in self.module_names:
mod = self._modules[modname]["description"]
else:
- raise InvalidModuleName(
- "Module name '%s' is invalid or not" % modname + "found"
- )
+ raise InvalidModuleName(f"Module name '{modname}' is invalid or not found")
return mod
def get_functions(self, modname):
@@ -181,9 +185,7 @@ class Modules:
if modname and modname in self.module_names:
mod = self._modules[modname]["functions"]
else:
- raise InvalidModuleName(
- "Module name '%s' is invalid or not" % modname + "found"
- )
+ raise InvalidModuleName(f"Module name '{modname}' is invalid or not found")
return mod
def get_func_descriptions(self, modname):
@@ -197,9 +199,7 @@ class Modules:
if modname and modname in self.module_names:
desc = self._modules[modname]["func_desc"]
else:
- raise InvalidModuleName(
- "Module name '%s' is invalid or not" % modname + "found"
- )
+ raise InvalidModuleName(f"Module name '{modname}' is invalid or not found")
return desc
def get_opt_descriptions(self, modname):
@@ -213,9 +213,7 @@ class Modules:
if modname and modname in self.module_names:
desc = self._modules[modname].get("opt_desc")
else:
- raise InvalidModuleName(
- "Module name '%s' is invalid or not found" % modname
- )
+ raise InvalidModuleName(f"Module name '{modname}' is invalid or not found")
return desc
def get_spec(self, modname, var):
@@ -231,22 +229,14 @@ class Modules:
if modname and modname in self.module_names:
value = self._modules[modname].get(var, None)
else:
- raise InvalidModuleName(
- "Module name '%s' is invalid or not found" % modname
- )
+ raise InvalidModuleName(f"Module name '{modname}' is invalid or not found")
return value
def _check_compat(self, module):
if self.compat_versions:
if not module.module_spec["version"] in self.compat_versions:
raise ModuleVersionError(
- "Error loading '%s' plugin module: %s, version: %s\n"
+ f"Error loading '{self._namepath}' plugin module: {module.module_spec['name']}, version: {module.module_spec['version']}\n"
"Module is not compatible with the current application version\n"
- "Compatible module API versions are: %s"
- % (
- self._namepath,
- module.module_spec["name"],
- module.module_spec["version"],
- self.compat_versions,
- )
+ f"Compatible module API versions are: {self.compat_versions}"
)
diff --git a/lib/portage/news.py b/lib/portage/news.py
index ce61f8490..b7f10e610 100644
--- a/lib/portage/news.py
+++ b/lib/portage/news.py
@@ -14,12 +14,17 @@ __all__ = [
]
from collections import OrderedDict
-
+from typing import TYPE_CHECKING, Any, Optional
+from re import Pattern, Match
import fnmatch
-import io
import logging
import os as _os
import re
+
+if TYPE_CHECKING:
+ import portage.dbapi.vartree
+ import portage.package.ebuild.config
+
from portage import os
from portage import _encodings
from portage import _unicode_decode
@@ -59,7 +64,14 @@ class NewsManager:
"""
- def __init__(self, portdb, vardb, news_path, unread_path, language_id="en"):
+ def __init__(
+ self,
+ portdb: "portage.dbapi.porttree.portdbapi",
+ vardb: "portage.dbapi.vartree.vardbapi",
+ news_path: str,
+ unread_path: str,
+ language_id: str = "en",
+ ) -> None:
self.news_path = news_path
self.unread_path = unread_path
self.language_id = language_id
@@ -77,11 +89,11 @@ class NewsManager:
self._dir_mode = 0o0074
self._mode_mask = 0o0000
- portdir = portdb.repositories.mainRepoLocation()
- profiles_base = None
+ portdir: Optional[str] = portdb.repositories.mainRepoLocation()
+ profiles_base: Optional[str] = None
if portdir is not None:
- profiles_base = os.path.join(portdir, "profiles") + os.path.sep
- profile_path = None
+ profiles_base = os.path.join(portdir, ("profiles" + os.path.sep))
+ profile_path: Optional[str] = None
if profiles_base is not None and portdb.settings.profile_path:
profile_path = normalize_path(
os.path.realpath(portdb.settings.profile_path)
@@ -90,19 +102,19 @@ class NewsManager:
profile_path = profile_path[len(profiles_base) :]
self._profile_path = profile_path
- def _unread_filename(self, repoid):
- return os.path.join(self.unread_path, "news-%s.unread" % repoid)
+ def _unread_filename(self, repoid: str) -> str:
+ return os.path.join(self.unread_path, f"news-{repoid}.unread")
- def _skip_filename(self, repoid):
- return os.path.join(self.unread_path, "news-%s.skip" % repoid)
+ def _skip_filename(self, repoid: str) -> str:
+ return os.path.join(self.unread_path, f"news-{repoid}.skip")
- def _news_dir(self, repoid):
- repo_path = self.portdb.getRepositoryPath(repoid)
+ def _news_dir(self, repoid: str) -> str:
+ repo_path: Optional[str] = self.portdb.getRepositoryPath(repoid)
if repo_path is None:
- raise AssertionError(_("Invalid repoID: %s") % repoid)
+ raise AssertionError(_(f"Invalid repoID: {repoid}"))
return os.path.join(repo_path, self.news_path)
- def updateItems(self, repoid):
+ def updateItems(self, repoid: str) -> None:
"""
Figure out which news items from NEWS_PATH are both unread and relevant to
the user (according to the GLEP 42 standards of relevancy). Then add these
@@ -125,23 +137,23 @@ class NewsManager:
if not os.access(self.unread_path, os.W_OK):
return
- news_dir = self._news_dir(repoid)
+ news_dir: str = self._news_dir(repoid)
try:
- news = _os.listdir(
+ news: list[str] = _os.listdir(
_unicode_encode(news_dir, encoding=_encodings["fs"], errors="strict")
)
except OSError:
return
- skip_filename = self._skip_filename(repoid)
- unread_filename = self._unread_filename(repoid)
- unread_lock = lockfile(unread_filename, wantnewlockfile=1)
+ skip_filename: str = self._skip_filename(repoid)
+ unread_filename: str = self._unread_filename(repoid)
+ unread_lock: Optional[bool] = lockfile(unread_filename, wantnewlockfile=1)
try:
try:
- unread = set(grabfile(unread_filename))
- unread_orig = unread.copy()
- skip = set(grabfile(skip_filename))
- skip_orig = skip.copy()
+ unread: set[str | tuple[str, str]] = set(grabfile(unread_filename))
+ unread_orig: set[str | tuple[str, str]] = unread.copy()
+ skip: set[str | tuple[str, str]] = set(grabfile(skip_filename))
+ skip_orig: set[str | tuple[str, str]] = skip.copy()
except PermissionDenied:
return
@@ -164,7 +176,7 @@ class NewsManager:
if itemid in skip:
continue
filename = os.path.join(
- news_dir, itemid, itemid + "." + self.language_id + ".txt"
+ news_dir, itemid, f"{itemid}.{self.language_id}.txt"
)
if not os.path.isfile(filename):
continue
@@ -178,9 +190,7 @@ class NewsManager:
skip.add(item.name)
if unread != unread_orig:
- write_atomic(
- unread_filename, "".join("%s\n" % x for x in sorted(unread))
- )
+ write_atomic(unread_filename, "".join(f"{x}\n" for x in sorted(unread)))
apply_secpass_permissions(
unread_filename,
uid=self._uid,
@@ -190,7 +200,7 @@ class NewsManager:
)
if skip != skip_orig:
- write_atomic(skip_filename, "".join("%s\n" % x for x in sorted(skip)))
+ write_atomic(skip_filename, "".join(f"{x}\n" for x in sorted(skip)))
apply_secpass_permissions(
skip_filename,
uid=self._uid,
@@ -202,7 +212,7 @@ class NewsManager:
finally:
unlockfile(unread_lock)
- def getUnreadItems(self, repoid, update=False):
+ def getUnreadItems(self, repoid: str, update: bool = False) -> int:
"""
Determine if there are unread relevant items in news.repoid.unread.
If there are unread items return their number.
@@ -214,7 +224,7 @@ class NewsManager:
self.updateItems(repoid)
unread_filename = self._unread_filename(repoid)
- unread_lock = None
+ unread_lock: Optional[bool] = None
try:
unread_lock = lockfile(unread_filename, wantnewlockfile=1)
except (
@@ -234,11 +244,11 @@ class NewsManager:
unlockfile(unread_lock)
-_formatRE = re.compile(r"News-Item-Format:\s*([^\s]*)\s*$")
-_installedRE = re.compile("Display-If-Installed:(.*)\n")
-_profileRE = re.compile("Display-If-Profile:(.*)\n")
-_keywordRE = re.compile("Display-If-Keyword:(.*)\n")
-_valid_profile_RE = re.compile(r"^[^*]+(/\*)?$")
+_formatRE: Pattern[str] = re.compile(r"News-Item-Format:\s*([^\s]*)\s*$")
+_installedRE: Pattern[str] = re.compile("Display-If-Installed:(.*)\n")
+_profileRE: Pattern[str] = re.compile("Display-If-Profile:(.*)\n")
+_keywordRE: Pattern[str] = re.compile("Display-If-Keyword:(.*)\n")
+_valid_profile_RE: Pattern[str] = re.compile(r"^[^*]+(/\*)?$")
class NewsItem:
@@ -252,7 +262,7 @@ class NewsItem:
Creation of a news item involves passing in the path to the particular news item.
"""
- def __init__(self, path, name):
+ def __init__(self, path: str, name: str):
"""
For a given news item we only want if it path is a file.
"""
@@ -261,7 +271,12 @@ class NewsItem:
self._parsed = False
self._valid = True
- def isRelevant(self, vardb, config, profile):
+ def isRelevant(
+ self,
+ vardb: "portage.dbapi.vartree.vardbapi",
+ config: "portage.package.ebuild.config.config",
+ profile: Optional[str],
+ ) -> bool:
"""
This function takes a dict of keyword arguments; one should pass in any
objects need to do to lookups (like what keywords we are on, what profile,
@@ -280,40 +295,40 @@ class NewsItem:
if not len(self.restrictions):
return True
- kwargs = {"vardb": vardb, "config": config, "profile": profile}
+ kwargs: dict[str, Any] = {"vardb": vardb, "config": config, "profile": profile}
- all_match = True
+ all_match: bool = True
for values in self.restrictions.values():
- any_match = False
- for restriction in values:
- if restriction.checkRestriction(**kwargs):
- any_match = True
+ matches = [restriction.checkRestriction(**kwargs) for restriction in values]
+ any_match = any(matches)
+
+ # If, for a single restriction, we didn't match anything, then we obviously
+ # didn't match everything, so just bail out.
if not any_match:
all_match = False
+ break
return all_match
- def isValid(self):
+ def isValid(self) -> bool:
if not self._parsed:
self.parse()
return self._valid
- def parse(self):
- f = io.open(
+ def parse(self) -> None:
+ with open(
_unicode_encode(self.path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
- )
- lines = f.readlines()
- f.close()
+ ) as f:
+ lines = f.readlines()
self.restrictions = {}
invalids = []
- news_format = None
+ news_format: Optional[str] = None
# Look for News-Item-Format
for i, line in enumerate(lines):
- format_match = _formatRE.match(line)
+ format_match: Optional[Match[str]] = _formatRE.match(line)
if format_match is not None:
news_format = format_match.group(1)
if fnmatch.fnmatch(news_format, "[12].*"):
@@ -348,12 +363,12 @@ class NewsItem:
if invalids:
self._valid = False
- msg = []
- msg.append(_("Invalid news item: %s") % (self.path,))
- for lineno, line in invalids:
- msg.append(_(" line %d: %s") % (lineno, line))
+ msg = [
+ _(f"Invalid news item: {self.path}"),
+ *(_(f" line {lineno}: {line}") for lineno, line in invalids),
+ ]
writemsg_level(
- "".join("!!! %s\n" % x for x in msg), level=logging.ERROR, noiselevel=-1
+ "".join(f"!!! {x}\n" for x in msg), level=logging.ERROR, noiselevel=-1
)
self._parsed = True
@@ -368,7 +383,7 @@ class DisplayRestriction:
are met, then it is displayed
"""
- def isValid(self):
+ def isValid(self) -> bool:
return True
def checkRestriction(self, **kwargs):
@@ -385,16 +400,15 @@ class DisplayProfileRestriction(DisplayRestriction):
self.profile = profile
self.format = news_format
- def isValid(self):
- if fnmatch.fnmatch(self.format, "1.*") and "*" in self.profile:
- return False
- if fnmatch.fnmatch(self.format, "2.*") and not _valid_profile_RE.match(
- self.profile
- ):
- return False
- return True
+ def isValid(self) -> bool:
+ return (
+ not fnmatch.fnmatch(self.format, "1.*")
+ or "*" not in self.profile
+ and not fnmatch.fnmatch(self.format, "2.*")
+ or _valid_profile_RE.match(self.profile)
+ )
- def checkRestriction(self, **kwargs):
+ def checkRestriction(self, **kwargs) -> bool:
if fnmatch.fnmatch(self.format, "2.*") and self.profile.endswith("/*"):
return kwargs["profile"].startswith(self.profile[:-1])
return kwargs["profile"] == self.profile
@@ -410,10 +424,8 @@ class DisplayKeywordRestriction(DisplayRestriction):
self.keyword = keyword
self.format = news_format
- def checkRestriction(self, **kwargs):
- if kwargs["config"].get("ARCH", "") == self.keyword:
- return True
- return False
+ def checkRestriction(self, **kwargs) -> bool:
+ return kwargs["config"].get("ARCH", "") == self.keyword
class DisplayInstalledRestriction(DisplayRestriction):
@@ -426,21 +438,23 @@ class DisplayInstalledRestriction(DisplayRestriction):
self.atom = atom
self.format = news_format
- def isValid(self):
+ def isValid(self) -> bool:
if fnmatch.fnmatch(self.format, "1.*"):
return isvalidatom(self.atom, eapi="0")
if fnmatch.fnmatch(self.format, "2.*"):
return isvalidatom(self.atom, eapi="5")
return isvalidatom(self.atom)
- def checkRestriction(self, **kwargs):
- vdb = kwargs["vardb"]
- if vdb.match(self.atom):
- return True
- return False
+ def checkRestriction(self, **kwargs) -> Optional[Match[str]]:
+ return kwargs["vardb"].match(self.atom)
-def count_unread_news(portdb, vardb, repos=None, update=True):
+def count_unread_news(
+ portdb: "portage.dbapi.porttree.portdbapi",
+ vardb: "portage.dbapi.vartree.vardbapi",
+ repos: Optional[list[Any]] = None,
+ update: bool = True,
+) -> dict[str, int]:
"""
Returns a dictionary mapping repos to integer counts of unread news items.
By default, this will scan all repos and check for new items that have
@@ -473,7 +487,7 @@ def count_unread_news(portdb, vardb, repos=None, update=True):
# NOTE: The NewsManager typically handles permission errors by
# returning silently, so PermissionDenied won't necessarily be
# raised even if we do trigger a permission error above.
- msg = "Permission denied: '%s'\n" % (e,)
+ msg = f"Permission denied: '{e}'\n"
if msg in permission_msgs:
pass
else:
@@ -486,21 +500,23 @@ def count_unread_news(portdb, vardb, repos=None, update=True):
return news_counts
-def display_news_notifications(news_counts):
+def display_news_notifications(news_counts: dict[Any, int]) -> None:
"""
Display a notification for unread news items, using a dictionary mapping
repos to integer counts, like that returned from count_unread_news().
+
+ @param news_count: mapping of repos to integer counts of unread news items
+ @type news_count: dict
"""
- newsReaderDisplay = False
+ news_reader_display = False
for repo, count in news_counts.items():
if count > 0:
- if not newsReaderDisplay:
- newsReaderDisplay = True
+ if not news_reader_display:
+ news_reader_display = True
print()
print(colorize("WARN", " * IMPORTANT:"), end=" ")
- print("%s news items need reading for repository '%s'." % (count, repo))
+ print(f"{count} news items need reading for repository '{repo}'.")
- if newsReaderDisplay:
+ if news_reader_display:
print(colorize("WARN", " *"), end=" ")
- print("Use " + colorize("GOOD", "eselect news read") + " to view new items.")
- print()
+ print(f"Use {colorize('GOOD', 'eselect news read')} to view new items.\n")
diff --git a/lib/portage/output.py b/lib/portage/output.py
index 42f487f8a..4408705c4 100644
--- a/lib/portage/output.py
+++ b/lib/portage/output.py
@@ -1,19 +1,22 @@
-# Copyright 1998-2021 Gentoo Authors
+# Copyright 1998-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__docformat__ = "epytext"
import errno
-import io
+import itertools
import re
import subprocess
import sys
+from typing import Optional
import portage
portage.proxy.lazyimport.lazyimport(
globals(),
+ "portage.process:spawn",
"portage.util:writemsg",
+ "portage.util.futures:asyncio",
)
import portage.util.formatter as formatter
@@ -34,56 +37,50 @@ from portage.localization import _
havecolor = 1
dotitles = 1
-_styles = {}
"""Maps style class to tuple of attribute names."""
+_styles = {}
-codes = {}
"""Maps attribute name to ansi code."""
esc_seq = "\x1b["
-codes["normal"] = esc_seq + "0m"
-codes["reset"] = esc_seq + "39;49;00m"
-
-codes["bold"] = esc_seq + "01m"
-codes["faint"] = esc_seq + "02m"
-codes["standout"] = esc_seq + "03m"
-codes["underline"] = esc_seq + "04m"
-codes["blink"] = esc_seq + "05m"
-codes["overline"] = esc_seq + "06m"
-codes["reverse"] = esc_seq + "07m"
-codes["invisible"] = esc_seq + "08m"
-
-codes["no-attr"] = esc_seq + "22m"
-codes["no-standout"] = esc_seq + "23m"
-codes["no-underline"] = esc_seq + "24m"
-codes["no-blink"] = esc_seq + "25m"
-codes["no-overline"] = esc_seq + "26m"
-codes["no-reverse"] = esc_seq + "27m"
-
-codes["bg_black"] = esc_seq + "40m"
-codes["bg_darkred"] = esc_seq + "41m"
-codes["bg_darkgreen"] = esc_seq + "42m"
-codes["bg_brown"] = esc_seq + "43m"
-codes["bg_darkblue"] = esc_seq + "44m"
-codes["bg_purple"] = esc_seq + "45m"
-codes["bg_teal"] = esc_seq + "46m"
-codes["bg_lightgray"] = esc_seq + "47m"
-codes["bg_default"] = esc_seq + "49m"
-codes["bg_darkyellow"] = codes["bg_brown"]
+codes = {
+ "normal": f"{esc_seq}0m",
+ "reset": f"{esc_seq}39;49;00m",
+ "bold": f"{esc_seq}01m",
+ "faint": f"{esc_seq}02m",
+ "standout": f"{esc_seq}03m",
+ "underline": f"{esc_seq}04m",
+ "blink": f"{esc_seq}05m",
+ "overline": f"{esc_seq}06m",
+ "reverse": f"{esc_seq}07m",
+ "invisible": f"{esc_seq}08m",
+ "no-attr": f"{esc_seq}22m",
+ "no-standout": f"{esc_seq}23m",
+ "no-underline": f"{esc_seq}24m",
+ "no-blink": f"{esc_seq}25m",
+ "no-overline": f"{esc_seq}26m",
+ "no-reverse": f"{esc_seq}27m",
+ "bg_black": f"{esc_seq}40m",
+ "bg_darkred": f"{esc_seq}41m",
+ "bg_darkgreen": f"{esc_seq}42m",
+ "bg_brown": f"{esc_seq}43m",
+ "bg_darkblue": f"{esc_seq}44m",
+ "bg_purple": f"{esc_seq}45m",
+ "bg_teal": f"{esc_seq}46m",
+ "bg_lightgray": f"{esc_seq}47m",
+ "bg_default": f"{esc_seq}49m",
+ "bg_darkyellow": f"{esc_seq}43m",
+}
def color(fg, bg="default", attr=["normal"]):
- mystr = codes[fg]
- for x in [bg] + attr:
- mystr += codes[x]
- return mystr
+ myansicodechain = itertools.chain((codes[fg]), (codes[x] for x in [bg, *attr]))
+ return "".join(myansicodechain)
-ansi_codes = []
-for x in range(30, 38):
- ansi_codes.append("%im" % x)
- ansi_codes.append("%i;01m" % x)
+ansi_codes = [y for x in range(30, 38) for y in (f"{x}m", f"{x};01m")]
+
rgb_ansi_colors = [
"0x000000",
@@ -139,7 +136,7 @@ codes["0xAAAA00"] = codes["brown"]
codes["darkyellow"] = codes["0xAAAA00"]
-# Colors from /etc/init.d/functions.sh
+# Colors from /lib/gentoo/functions.sh
_styles["BAD"] = ("red",)
_styles["BRACKET"] = ("blue",)
_styles["ERR"] = ("red",)
@@ -157,7 +154,7 @@ _styles["UNMERGE_WARN"] = ("red",)
_styles["SECURITY_WARN"] = ("red",)
_styles["MERGE_LIST_PROGRESS"] = ("yellow",)
_styles["PKG_BLOCKER"] = ("red",)
-_styles["PKG_BLOCKER_SATISFIED"] = ("darkblue",)
+_styles["PKG_BLOCKER_SATISFIED"] = ("teal",)
_styles["PKG_MERGE"] = ("darkgreen",)
_styles["PKG_MERGE_SYSTEM"] = ("darkgreen",)
_styles["PKG_MERGE_WORLD"] = ("green",)
@@ -165,8 +162,8 @@ _styles["PKG_BINARY_MERGE"] = ("purple",)
_styles["PKG_BINARY_MERGE_SYSTEM"] = ("purple",)
_styles["PKG_BINARY_MERGE_WORLD"] = ("fuchsia",)
_styles["PKG_UNINSTALL"] = ("red",)
-_styles["PKG_NOMERGE"] = ("darkblue",)
-_styles["PKG_NOMERGE_SYSTEM"] = ("darkblue",)
+_styles["PKG_NOMERGE"] = ("teal",)
+_styles["PKG_NOMERGE_SYSTEM"] = ("teal",)
_styles["PKG_NOMERGE_WORLD"] = ("blue",)
_styles["PROMPT_CHOICE_DEFAULT"] = ("green",)
_styles["PROMPT_CHOICE_OTHER"] = ("red",)
@@ -193,9 +190,8 @@ def _parse_color_map(config_root="/", onerror=None):
return token
try:
- with io.open(
+ with open(
_unicode_encode(myfile, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
@@ -256,7 +252,7 @@ def _parse_color_map(config_root="/", onerror=None):
_styles[k] = tuple(code_list)
elif k in codes:
codes[k] = "".join(code_list)
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno == errno.ENOENT:
raise FileNotFound(myfile)
elif e.errno == errno.EACCES:
@@ -270,7 +266,7 @@ def nc_len(mystr):
_legal_terms_re = re.compile(
- r"^(xterm|xterm-color|Eterm|aterm|rxvt|screen|kterm|rxvt-unicode|gnome|interix|tmux|st-256color|alacritty|konsole)"
+ r"^(xterm|xterm-color|Eterm|aterm|rxvt|screen|kterm|rxvt-unicode|gnome|interix|tmux|st-256color|alacritty|konsole|foot)"
)
_disable_xtermTitle = None
_max_xtermTitle_len = 253
@@ -291,7 +287,7 @@ def xtermTitle(mystr, raw=False):
if len(mystr) > _max_xtermTitle_len:
mystr = mystr[:_max_xtermTitle_len]
if not raw:
- mystr = "\x1b]0;%s\x07" % mystr
+ mystr = f"\x1b]0;{mystr}\x07"
# avoid potential UnicodeEncodeError
mystr = _unicode_encode(
@@ -341,7 +337,7 @@ def xtermTitleReset():
home = os.environ.get("HOME", "")
if home != "" and pwd.startswith(home):
pwd = "~" + pwd[len(home) :]
- default_xterm_title = "\x1b]0;%s@%s:%s\x07" % (
+ default_xterm_title = "\x1b]0;{}@{}:{}\x07".format(
os.environ.get("LOGNAME", ""),
os.environ.get("HOSTNAME", "").split(".", 1)[0],
pwd,
@@ -393,7 +389,7 @@ def colormap():
"QAWARN",
"WARN",
):
- mycolors.append("PORTAGE_COLOR_{}=$'{}'".format(c, style_to_ansi_code(c)))
+ mycolors.append(f"PORTAGE_COLOR_{c}=$'{style_to_ansi_code(c)}'")
return "\n".join(mycolors)
@@ -539,7 +535,7 @@ def get_term_size(fd=None):
try:
proc = subprocess.Popen(["stty", "size"], stdout=subprocess.PIPE, stderr=fd)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
# stty command not found
@@ -559,18 +555,29 @@ def get_term_size(fd=None):
return (0, 0)
-def set_term_size(lines, columns, fd):
+def set_term_size(lines: int, columns: int, fd: int) -> Optional[asyncio.Future]:
"""
Set the number of lines and columns for the tty that is connected to fd.
For portability, this simply calls `stty rows $lines columns $columns`.
+
+ If spawn succeeds and the event loop is running then an instance of
+ asyncio.Future is returned and the caller should wait for it in order
+ to prevent possible error messages like this:
+
+ [ERROR] Task was destroyed but it is pending!
"""
- from portage.process import spawn
cmd = ["stty", "rows", str(lines), "columns", str(columns)]
try:
- spawn(cmd, env=os.environ, fd_pipes={0: fd})
+ proc = spawn(cmd, env=os.environ, fd_pipes={0: fd}, returnproc=True)
except CommandNotFound:
writemsg(_("portage: stty: command not found\n"), noiselevel=-1)
+ else:
+ loop = asyncio.get_event_loop()
+ if loop.is_running():
+ return asyncio.ensure_future(proc.wait(), loop=loop)
+ else:
+ loop.run_until_complete(proc.wait())
class EOutput:
@@ -813,12 +820,12 @@ class ProgressBar:
self._set_desc()
def _set_desc(self):
- self._desc = "%s%s" % (
- "%s: " % self._title if self._title else "",
- "%s" % self._label if self._label else "",
+ self._desc = "{}{}".format(
+ f"{self._title}: " if self._title else "",
+ f"{self._label}" if self._label else "",
)
if len(self._desc) > self._desc_max_length: # truncate if too long
- self._desc = "%s..." % self._desc[: self._desc_max_length - 3]
+ self._desc = f"{self._desc[:self._desc_max_length - 3]}..."
if len(self._desc):
self._desc = self._desc.ljust(self._desc_max_length)
@@ -905,7 +912,7 @@ class TermProgressBar(ProgressBar):
position = 0.5
self._position = position
bar_width = int(offset * max_bar_width)
- image = "%s%s%s" % (
+ image = "{}{}{}".format(
self._desc,
_percent,
"["
@@ -919,7 +926,7 @@ class TermProgressBar(ProgressBar):
percentage = 100 * curval // maxval
max_bar_width = bar_space - 1
_percent = ("%d%% " % percentage).rjust(percentage_str_width)
- image = "%s%s" % (self._desc, _percent)
+ image = f"{self._desc}{_percent}"
if cols < min_columns:
return image
@@ -963,7 +970,7 @@ def _init(config_root="/"):
try:
_parse_color_map(
config_root=config_root,
- onerror=lambda e: writemsg("%s\n" % str(e), noiselevel=-1),
+ onerror=lambda e: writemsg(f"{str(e)}\n", noiselevel=-1),
)
except FileNotFound:
pass
@@ -971,12 +978,11 @@ def _init(config_root="/"):
writemsg(_("Permission denied: '%s'\n") % str(e), noiselevel=-1)
del e
except PortageException as e:
- writemsg("%s\n" % str(e), noiselevel=-1)
+ writemsg(f"{str(e)}\n", noiselevel=-1)
del e
class _LazyInitColorMap(portage.proxy.objectproxy.ObjectProxy):
-
__slots__ = ("_attr",)
def __init__(self, attr):
diff --git a/lib/portage/package/ebuild/_config/LicenseManager.py b/lib/portage/package/ebuild/_config/LicenseManager.py
index c28cc89bf..90f7742e3 100644
--- a/lib/portage/package/ebuild/_config/LicenseManager.py
+++ b/lib/portage/package/ebuild/_config/LicenseManager.py
@@ -15,7 +15,6 @@ from portage.package.ebuild._config.helper import ordered_by_atom_specificity
class LicenseManager:
def __init__(self, license_group_locations, abs_user_config, user_config=True):
-
self._accept_license_str = None
self._accept_license = None
self._license_groups = {}
diff --git a/lib/portage/package/ebuild/_config/LocationsManager.py b/lib/portage/package/ebuild/_config/LocationsManager.py
index d65aac609..6c54b8056 100644
--- a/lib/portage/package/ebuild/_config/LocationsManager.py
+++ b/lib/portage/package/ebuild/_config/LocationsManager.py
@@ -3,7 +3,6 @@
__all__ = ("LocationsManager",)
-import io
import warnings
import portage
@@ -93,16 +92,12 @@ class LocationsManager:
+ os.sep
)
- self.esysroot = self.sysroot.rstrip(os.sep) + self.eprefix + os.sep
-
# TODO: Set this via the constructor using
# PORTAGE_OVERRIDE_EPREFIX.
self.broot = portage.const.EPREFIX
def load_profiles(self, repositories, known_repository_paths):
- known_repository_paths = set(
- os.path.realpath(x) for x in known_repository_paths
- )
+ known_repository_paths = {os.path.realpath(x) for x in known_repository_paths}
known_repos = []
for x in known_repository_paths:
@@ -165,7 +160,7 @@ class LocationsManager:
_("!!! Unable to parse profile: '%s'\n") % self.profile_path,
noiselevel=-1,
)
- writemsg("!!! ParseError: %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! ParseError: {str(e)}\n", noiselevel=-1)
self.profiles = []
self.profiles_complex = []
@@ -226,14 +221,13 @@ class LocationsManager:
eapi = eapi or "0"
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(eapi_file, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
eapi = f.readline().strip()
- except IOError:
+ except OSError:
pass
else:
if not eapi_is_supported(eapi):
@@ -389,13 +383,13 @@ class LocationsManager:
+ os.path.sep
)
- if self.sysroot != "/" and self.sysroot != self.target_root:
+ if self.sysroot != "/" and self.target_root == "/":
writemsg(
_(
"!!! Error: SYSROOT (currently %s) must "
- "equal / or ROOT (currently %s).\n"
+ "be set to / when ROOT is /.\n"
)
- % (self.sysroot, self.target_root),
+ % self.sysroot,
noiselevel=-1,
)
raise InvalidLocation(self.sysroot)
@@ -405,6 +399,15 @@ class LocationsManager:
self.eroot = self.target_root.rstrip(os.sep) + self.eprefix + os.sep
+ # In a cross-prefix scenario where SYSROOT=/ and ROOT=/, assume we want
+ # ESYSROOT to point to the target prefix.
+ if self.sysroot == self.target_root:
+ self.esysroot = self.sysroot.rstrip(os.sep) + self.eprefix + os.sep
+ elif self.sysroot == "/":
+ self.esysroot = self.broot + os.sep
+ else:
+ self.esysroot = self.sysroot
+
self.global_config_path = GLOBAL_CONFIG_PATH
if portage.const.EPREFIX:
self.global_config_path = os.path.join(
diff --git a/lib/portage/package/ebuild/_config/UseManager.py b/lib/portage/package/ebuild/_config/UseManager.py
index a0fb0254a..3827ba27a 100644
--- a/lib/portage/package/ebuild/_config/UseManager.py
+++ b/lib/portage/package/ebuild/_config/UseManager.py
@@ -6,24 +6,19 @@ __all__ = ("UseManager",)
from _emerge.Package import Package
from portage import os
from portage.dep import (
- Atom,
dep_getrepo,
dep_getslot,
ExtendedAtomDict,
remove_slot,
_get_useflag_re,
- _repo_separator,
)
from portage.eapi import (
- eapi_has_use_aliases,
eapi_supports_stable_use_forcing_and_masking,
)
-from portage.exception import InvalidAtom
from portage.localization import _
from portage.repository.config import allow_profile_repo_deps
from portage.util import (
grabfile,
- grabdict,
grabdict_package,
read_corresponding_eapi_file,
stack_lists,
@@ -46,12 +41,10 @@ class UseManager:
# use.stable.mask _repo_usestablemask_dict
# use.force _repo_useforce_dict
# use.stable.force _repo_usestableforce_dict
- # use.aliases _repo_usealiases_dict
# package.use.mask _repo_pusemask_dict
# package.use.stable.mask _repo_pusestablemask_dict
# package.use.force _repo_puseforce_dict
# package.use.stable.force _repo_pusestableforce_dict
- # package.use.aliases _repo_pusealiases_dict
# --------------------------------
# profiles
# --------------------------------
@@ -158,11 +151,6 @@ class UseManager:
"package.use", abs_user_config, user_config
)
- self._repo_usealiases_dict = self._parse_repository_usealiases(repositories)
- self._repo_pusealiases_dict = self._parse_repository_packageusealiases(
- repositories
- )
-
self.repositories = repositories
def _parse_file_to_tuple(
@@ -409,111 +397,6 @@ class UseManager:
for profile in locations
)
- def _parse_repository_usealiases(self, repositories):
- ret = {}
- for repo in repositories.repos_with_profiles():
- file_name = os.path.join(repo.location, "profiles", "use.aliases")
- eapi = read_corresponding_eapi_file(file_name, default=repo.eapi)
- useflag_re = _get_useflag_re(eapi)
- raw_file_dict = grabdict(file_name, recursive=True)
- file_dict = {}
- for real_flag, aliases in raw_file_dict.items():
- if useflag_re.match(real_flag) is None:
- writemsg(
- _("--- Invalid real USE flag in '%s': '%s'\n")
- % (file_name, real_flag),
- noiselevel=-1,
- )
- else:
- for alias in aliases:
- if useflag_re.match(alias) is None:
- writemsg(
- _(
- "--- Invalid USE flag alias for '%s' real USE flag in '%s': '%s'\n"
- )
- % (real_flag, file_name, alias),
- noiselevel=-1,
- )
- else:
- if any(
- alias in v
- for k, v in file_dict.items()
- if k != real_flag
- ):
- writemsg(
- _("--- Duplicated USE flag alias in '%s': '%s'\n")
- % (file_name, alias),
- noiselevel=-1,
- )
- else:
- file_dict.setdefault(real_flag, []).append(alias)
- ret[repo.name] = file_dict
- return ret
-
- def _parse_repository_packageusealiases(self, repositories):
- ret = {}
- for repo in repositories.repos_with_profiles():
- file_name = os.path.join(repo.location, "profiles", "package.use.aliases")
- eapi = read_corresponding_eapi_file(file_name, default=repo.eapi)
- useflag_re = _get_useflag_re(eapi)
- lines = grabfile(file_name, recursive=True)
- file_dict = {}
- for line in lines:
- elements = line.split()
- atom = elements[0]
- try:
- atom = Atom(atom, eapi=eapi)
- except InvalidAtom:
- writemsg(_("--- Invalid atom in '%s': '%s'\n") % (file_name, atom))
- continue
- if len(elements) == 1:
- writemsg(
- _("--- Missing real USE flag for '%s' in '%s'\n")
- % (atom, file_name),
- noiselevel=-1,
- )
- continue
- real_flag = elements[1]
- if useflag_re.match(real_flag) is None:
- writemsg(
- _("--- Invalid real USE flag for '%s' in '%s': '%s'\n")
- % (atom, file_name, real_flag),
- noiselevel=-1,
- )
- else:
- for alias in elements[2:]:
- if useflag_re.match(alias) is None:
- writemsg(
- _(
- "--- Invalid USE flag alias for '%s' real USE flag for '%s' in '%s': '%s'\n"
- )
- % (real_flag, atom, file_name, alias),
- noiselevel=-1,
- )
- else:
- # Duplicated USE flag aliases in entries for different atoms
- # matching the same package version are detected in getUseAliases().
- if any(
- alias in v
- for k, v in file_dict.get(atom.cp, {})
- .get(atom, {})
- .items()
- if k != real_flag
- ):
- writemsg(
- _(
- "--- Duplicated USE flag alias for '%s' in '%s': '%s'\n"
- )
- % (atom, file_name, alias),
- noiselevel=-1,
- )
- else:
- file_dict.setdefault(atom.cp, {}).setdefault(
- atom, {}
- ).setdefault(real_flag, []).append(alias)
- ret[repo.name] = file_dict
- return ret
-
def _isStable(self, pkg):
if self._user_config:
try:
@@ -650,65 +533,6 @@ class UseManager:
return frozenset(stack_lists(useforce, incremental=True))
- def getUseAliases(self, pkg):
- if hasattr(pkg, "eapi") and not eapi_has_use_aliases(pkg.eapi):
- return {}
-
- cp = getattr(pkg, "cp", None)
- if cp is None:
- slot = dep_getslot(pkg)
- repo = dep_getrepo(pkg)
- pkg = _pkg_str(remove_slot(pkg), slot=slot, repo=repo)
- cp = pkg.cp
-
- usealiases = {}
-
- if hasattr(pkg, "repo") and pkg.repo != Package.UNKNOWN_REPO:
- repos = []
- try:
- repos.extend(repo.name for repo in self.repositories[pkg.repo].masters)
- except KeyError:
- pass
- repos.append(pkg.repo)
- for repo in repos:
- usealiases_dict = self._repo_usealiases_dict.get(repo, {})
- for real_flag, aliases in usealiases_dict.items():
- for alias in aliases:
- if any(
- alias in v for k, v in usealiases.items() if k != real_flag
- ):
- writemsg(
- _("--- Duplicated USE flag alias for '%s%s%s': '%s'\n")
- % (pkg.cpv, _repo_separator, pkg.repo, alias),
- noiselevel=-1,
- )
- else:
- usealiases.setdefault(real_flag, []).append(alias)
- cp_usealiases_dict = self._repo_pusealiases_dict.get(repo, {}).get(cp)
- if cp_usealiases_dict:
- usealiases_dict_list = ordered_by_atom_specificity(
- cp_usealiases_dict, pkg
- )
- for usealiases_dict in usealiases_dict_list:
- for real_flag, aliases in usealiases_dict.items():
- for alias in aliases:
- if any(
- alias in v
- for k, v in usealiases.items()
- if k != real_flag
- ):
- writemsg(
- _(
- "--- Duplicated USE flag alias for '%s%s%s': '%s'\n"
- )
- % (pkg.cpv, _repo_separator, pkg.repo, alias),
- noiselevel=-1,
- )
- else:
- usealiases.setdefault(real_flag, []).append(alias)
-
- return usealiases
-
def getPUSE(self, pkg):
cp = getattr(pkg, "cp", None)
if cp is None:
diff --git a/lib/portage/package/ebuild/_config/VirtualsManager.py b/lib/portage/package/ebuild/_config/VirtualsManager.py
index a5473a94f..b0acf3c7d 100644
--- a/lib/portage/package/ebuild/_config/VirtualsManager.py
+++ b/lib/portage/package/ebuild/_config/VirtualsManager.py
@@ -107,11 +107,11 @@ class VirtualsManager:
memo[id(self)] = result
# immutable attributes (internal policy ensures lack of mutation)
- # _treeVirtuals is initilised by _populate_treeVirtuals().
+ # _treeVirtuals is initialised by _populate_treeVirtuals().
# Before that it's 'None'.
result._treeVirtuals = self._treeVirtuals
memo[id(self._treeVirtuals)] = self._treeVirtuals
- # _dirVirtuals is initilised by __init__.
+ # _dirVirtuals is initialised by __init__.
result._dirVirtuals = self._dirVirtuals
memo[id(self._dirVirtuals)] = self._dirVirtuals
diff --git a/lib/portage/package/ebuild/_config/env_var_validation.py b/lib/portage/package/ebuild/_config/env_var_validation.py
index 82fddca03..e00070926 100644
--- a/lib/portage/package/ebuild/_config/env_var_validation.py
+++ b/lib/portage/package/ebuild/_config/env_var_validation.py
@@ -8,7 +8,7 @@ from portage.util import shlex_split
def validate_cmd_var(v):
"""
- Validate an evironment variable value to see if it
+ Validate an environment variable value to see if it
contains an executable command as the first token.
returns (valid, token_list) where 'valid' is boolean and 'token_list'
is the (possibly empty) list of tokens split by shlex.
diff --git a/lib/portage/package/ebuild/_config/helper.py b/lib/portage/package/ebuild/_config/helper.py
index 275d32cfe..1081b4da3 100644
--- a/lib/portage/package/ebuild/_config/helper.py
+++ b/lib/portage/package/ebuild/_config/helper.py
@@ -50,7 +50,7 @@ def ordered_by_atom_specificity(cpdict, pkg, repo=None):
def prune_incremental(split):
"""
Prune off any parts of an incremental variable that are
- made irrelevant by the latest occuring * or -*. This
+ made irrelevant by the latest occurring * or -*. This
could be more aggressive but that might be confusing
and the point is just to reduce noise a bit.
"""
diff --git a/lib/portage/package/ebuild/_config/meson.build b/lib/portage/package/ebuild/_config/meson.build
new file mode 100644
index 000000000..e12d82e50
--- /dev/null
+++ b/lib/portage/package/ebuild/_config/meson.build
@@ -0,0 +1,17 @@
+py.install_sources(
+ [
+ 'KeywordsManager.py',
+ 'LicenseManager.py',
+ 'LocationsManager.py',
+ 'MaskManager.py',
+ 'UseManager.py',
+ 'VirtualsManager.py',
+ 'env_var_validation.py',
+ 'features_set.py',
+ 'helper.py',
+ 'special_env_vars.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/package/ebuild/_config',
+ pure : not native_extensions
+)
diff --git a/lib/portage/package/ebuild/_config/special_env_vars.py b/lib/portage/package/ebuild/_config/special_env_vars.py
index 06ae3aa39..1a66192c9 100644
--- a/lib/portage/package/ebuild/_config/special_env_vars.py
+++ b/lib/portage/package/ebuild/_config/special_env_vars.py
@@ -72,8 +72,6 @@ env_blacklist = frozenset(
)
)
-environ_whitelist = []
-
# Whitelisted variables are always allowed to enter the ebuild
# environment. Generally, this only includes special portage
# variables. Ebuilds can unset variables that are not whitelisted
@@ -82,274 +80,263 @@ environ_whitelist = []
# important to set our special BASH_ENV variable in the ebuild
# environment in order to prevent sandbox from sourcing /etc/profile
# in it's bashrc (causing major leakage).
-environ_whitelist += [
- "ACCEPT_LICENSE",
- "BASH_ENV",
- "BASH_FUNC____in_portage_iuse%%",
- "BROOT",
- "BUILD_PREFIX",
- "COLUMNS",
- "D",
- "DISTDIR",
- "DOC_SYMLINKS_DIR",
- "EAPI",
- "EBUILD",
- "EBUILD_FORCE_TEST",
- "EBUILD_PHASE",
- "EBUILD_PHASE_FUNC",
- "ECLASSDIR",
- "ECLASS_DEPTH",
- "ED",
- "EMERGE_FROM",
- "ENV_UNSET",
- "EPREFIX",
- "EROOT",
- "ESYSROOT",
- "FEATURES",
- "FILESDIR",
- "HOME",
- "MERGE_TYPE",
- "NOCOLOR",
- "PATH",
- "PKGDIR",
- "PKGUSE",
- "PKG_LOGDIR",
- "PKG_TMPDIR",
- "PORTAGE_ACTUAL_DISTDIR",
- "PORTAGE_ARCHLIST",
- "PORTAGE_BASHRC_FILES",
- "PORTAGE_BASHRC",
- "PM_EBUILD_HOOK_DIR",
- "PORTAGE_BINPKG_FILE",
- "PORTAGE_BINPKG_TAR_OPTS",
- "PORTAGE_BINPKG_TMPFILE",
- "PORTAGE_BIN_PATH",
- "PORTAGE_BUILDDIR",
- "PORTAGE_BUILD_GROUP",
- "PORTAGE_BUILD_USER",
- "PORTAGE_BUNZIP2_COMMAND",
- "PORTAGE_BZIP2_COMMAND",
- "PORTAGE_COLORMAP",
- "PORTAGE_COMPRESS",
- "PORTAGE_COMPRESSION_COMMAND",
- "PORTAGE_COMPRESS_EXCLUDE_SUFFIXES",
- "PORTAGE_CONFIGROOT",
- "PORTAGE_DEBUG",
- "PORTAGE_DEPCACHEDIR",
- "PORTAGE_DOHTML_UNWARNED_SKIPPED_EXTENSIONS",
- "PORTAGE_DOHTML_UNWARNED_SKIPPED_FILES",
- "PORTAGE_DOHTML_WARN_ON_SKIPPED_FILES",
- "PORTAGE_EBUILD_EXIT_FILE",
- "PORTAGE_FEATURES",
- "PORTAGE_GID",
- "PORTAGE_GRPNAME",
- "PORTAGE_INTERNAL_CALLER",
- "PORTAGE_INST_GID",
- "PORTAGE_INST_UID",
- "PORTAGE_IPC_DAEMON",
- "PORTAGE_IUSE",
- "PORTAGE_ECLASS_LOCATIONS",
- "PORTAGE_LOG_FILE",
- "PORTAGE_OVERRIDE_EPREFIX",
- "PORTAGE_PIPE_FD",
- "PORTAGE_PROPERTIES",
- "PORTAGE_PYM_PATH",
- "PORTAGE_PYTHON",
- "PORTAGE_PYTHONPATH",
- "PORTAGE_QUIET",
- "PORTAGE_REPO_NAME",
- "PORTAGE_REPOSITORIES",
- "PORTAGE_RESTRICT",
- "PORTAGE_SIGPIPE_STATUS",
- "PORTAGE_SOCKS5_PROXY",
- "PORTAGE_TMPDIR",
- "PORTAGE_UPDATE_ENV",
- "PORTAGE_USERNAME",
- "PORTAGE_VERBOSE",
- "PORTAGE_WORKDIR_MODE",
- "PORTAGE_XATTR_EXCLUDE",
- "PORTDIR",
- "PORTDIR_OVERLAY",
- "PREROOTPATH",
- "PYTHONDONTWRITEBYTECODE",
- "REPLACING_VERSIONS",
- "REPLACED_BY_VERSION",
- "ROOT",
- "ROOTPATH",
- "SANDBOX_LOG",
- "SYSROOT",
- "T",
- "TMP",
- "TMPDIR",
- "USE_EXPAND",
- "USE_ORDER",
- "WORKDIR",
- "XARGS",
- "__PORTAGE_TEST_HARDLINK_LOCKS",
-]
-
-# user config variables
-environ_whitelist += ["DOC_SYMLINKS_DIR", "INSTALL_MASK", "PKG_INSTALL_MASK"]
-
-environ_whitelist += ["A", "AA", "CATEGORY", "P", "PF", "PN", "PR", "PV", "PVR"]
-
-# misc variables inherited from the calling environment
-environ_whitelist += [
- "COLORTERM",
- "DISPLAY",
- "EDITOR",
- "LESS",
- "LESSOPEN",
- "LOGNAME",
- "LS_COLORS",
- "PAGER",
- "TERM",
- "TERMCAP",
- "USER",
- "ftp_proxy",
- "http_proxy",
- "no_proxy",
-]
-
-# tempdir settings
-environ_whitelist += [
- "TMPDIR",
- "TEMP",
- "TMP",
-]
-
-# localization settings
-environ_whitelist += [
- "LANG",
- "LC_COLLATE",
- "LC_CTYPE",
- "LC_MESSAGES",
- "LC_MONETARY",
- "LC_NUMERIC",
- "LC_TIME",
- "LC_PAPER",
- "LC_ALL",
-]
-
-# other variables inherited from the calling environment
-environ_whitelist += [
- "CVS_RSH",
- "ECHANGELOG_USER",
- "GPG_AGENT_INFO",
- "SSH_AGENT_PID",
- "SSH_AUTH_SOCK",
- "STY",
- "WINDOW",
- "XAUTHORITY",
-]
-
-environ_whitelist = frozenset(environ_whitelist)
+environ_whitelist = frozenset(
+ (
+ "A",
+ "AA",
+ "ACCEPT_LICENSE",
+ "BASH_ENV",
+ "BASH_FUNC____in_portage_iuse%%",
+ "BINPKG_FORMAT",
+ "BROOT",
+ "BUILD_ID",
+ "BUILD_PREFIX",
+ "CATEGORY",
+ "COLUMNS",
+ "D",
+ "DISTDIR",
+ "DOC_SYMLINKS_DIR",
+ "EAPI",
+ "EBUILD",
+ "EBUILD_FORCE_TEST",
+ "EBUILD_PHASE",
+ "EBUILD_PHASE_FUNC",
+ "ECLASSDIR",
+ "ECLASS_DEPTH",
+ "ED",
+ "EMERGE_FROM",
+ "ENV_UNSET",
+ "EPREFIX",
+ "EROOT",
+ "ESYSROOT",
+ "FEATURES",
+ "FILESDIR",
+ "HOME",
+ "MERGE_TYPE",
+ "NOCOLOR",
+ "NO_COLOR",
+ "P",
+ "PATH",
+ "PF",
+ "PKGDIR",
+ "PKGUSE",
+ "PKG_LOGDIR",
+ "PKG_TMPDIR",
+ "PM_EBUILD_HOOK_DIR",
+ "PN",
+ "PORTAGE_ACTUAL_DISTDIR",
+ "PORTAGE_ARCHLIST",
+ "PORTAGE_BASHRC_FILES",
+ "PORTAGE_BASHRC",
+ "PORTAGE_BINPKG_FILE",
+ "PORTAGE_BINPKG_TAR_OPTS",
+ "PORTAGE_BINPKG_TMPFILE",
+ "PORTAGE_BIN_PATH",
+ "PORTAGE_BUILDDIR",
+ "PORTAGE_BUILD_GROUP",
+ "PORTAGE_BUILD_USER",
+ "PORTAGE_BUNZIP2_COMMAND",
+ "PORTAGE_BZIP2_COMMAND",
+ "PORTAGE_COLORMAP",
+ "PORTAGE_COMPRESS",
+ "PORTAGE_COMPRESSION_COMMAND",
+ "PORTAGE_COMPRESS_EXCLUDE_SUFFIXES",
+ "PORTAGE_CONFIGROOT",
+ "PORTAGE_DEBUG",
+ "PORTAGE_DEPCACHEDIR",
+ "PORTAGE_DOHTML_UNWARNED_SKIPPED_EXTENSIONS",
+ "PORTAGE_DOHTML_UNWARNED_SKIPPED_FILES",
+ "PORTAGE_DOHTML_WARN_ON_SKIPPED_FILES",
+ "PORTAGE_EBUILD_EXIT_FILE",
+ "PORTAGE_FEATURES",
+ "PORTAGE_GID",
+ "PORTAGE_GRPNAME",
+ "PORTAGE_INTERNAL_CALLER",
+ "PORTAGE_INST_GID",
+ "PORTAGE_INST_UID",
+ "PORTAGE_IPC_DAEMON",
+ "PORTAGE_IUSE",
+ "PORTAGE_ECLASS_LOCATIONS",
+ "PORTAGE_LOG_FILE",
+ "PORTAGE_OVERRIDE_EPREFIX",
+ "PORTAGE_PIPE_FD",
+ "PORTAGE_PROPERTIES",
+ "PORTAGE_PYM_PATH",
+ "PORTAGE_PYTHON",
+ "PORTAGE_PYTHONPATH",
+ "PORTAGE_QUIET",
+ "PORTAGE_REPO_REVISIONS",
+ "PORTAGE_REPO_NAME",
+ "PORTAGE_REPOSITORIES",
+ "PORTAGE_RESTRICT",
+ "PORTAGE_SIGPIPE_STATUS",
+ "PORTAGE_SOCKS5_PROXY",
+ "PORTAGE_TMPDIR",
+ "PORTAGE_UPDATE_ENV",
+ "PORTAGE_USERNAME",
+ "PORTAGE_VERBOSE",
+ "PORTAGE_WORKDIR_MODE",
+ "PORTAGE_XATTR_EXCLUDE",
+ "PORTDIR",
+ "PORTDIR_OVERLAY",
+ "PR",
+ "PREROOTPATH",
+ "PV",
+ "PVR",
+ "PYTHONDONTWRITEBYTECODE",
+ "REPLACING_VERSIONS",
+ "REPLACED_BY_VERSION",
+ "ROOT",
+ "ROOTPATH",
+ "SANDBOX_LOG",
+ "SYSROOT",
+ "T",
+ "TMP",
+ "TMPDIR",
+ "USE_EXPAND",
+ "USE_ORDER",
+ "WORKDIR",
+ "XARGS",
+ "__PORTAGE_TEST_HARDLINK_LOCKS",
+ # user config variables
+ "DOC_SYMLINKS_DIR",
+ "INSTALL_MASK",
+ "PKG_INSTALL_MASK",
+ # misc variables inherited from the calling environment
+ "COLORTERM",
+ "DISPLAY",
+ "EDITOR",
+ "LESS",
+ "LESSOPEN",
+ "LOGNAME",
+ "LS_COLORS",
+ "PAGER",
+ "TERM",
+ "TERMCAP",
+ "USER",
+ "ftp_proxy",
+ "http_proxy",
+ "https_proxy",
+ "no_proxy",
+ # tempdir settings
+ "TMPDIR",
+ "TEMP",
+ "TMP",
+ # localization settings
+ "LANG",
+ "LC_COLLATE",
+ "LC_CTYPE",
+ "LC_MESSAGES",
+ "LC_MONETARY",
+ "LC_NUMERIC",
+ "LC_TIME",
+ "LC_PAPER",
+ "LC_ALL",
+ # other variables inherited from the calling environment
+ "CVS_RSH",
+ "ECHANGELOG_USER",
+ "GPG_AGENT_INFO",
+ "SSH_AGENT_PID",
+ "SSH_AUTH_SOCK",
+ "STY",
+ "WINDOW",
+ "XAUTHORITY",
+ )
+)
environ_whitelist_re = re.compile(r"^(CCACHE_|DISTCC_).*")
# Filter selected variables in the config.environ() method so that
# they don't needlessly propagate down into the ebuild environment.
-environ_filter = []
-
# Exclude anything that could be extremely long here (like SRC_URI)
# since that could cause execve() calls to fail with E2BIG errors. For
# example, see bug #262647.
-environ_filter += [
- "DEPEND",
- "RDEPEND",
- "PDEPEND",
- "SRC_URI",
- "BDEPEND",
- "IDEPEND",
-]
-
-# misc variables inherited from the calling environment
-environ_filter += [
- "INFOPATH",
- "MANPATH",
- "USER",
-]
-
-# variables that break bash
-environ_filter += [
- "HISTFILE",
- "POSIXLY_CORRECT",
-]
-
-# portage config variables and variables set directly by portage
-environ_filter += [
- "ACCEPT_CHOSTS",
- "ACCEPT_KEYWORDS",
- "ACCEPT_PROPERTIES",
- "ACCEPT_RESTRICT",
- "AUTOCLEAN",
- "BINPKG_COMPRESS",
- "BINPKG_COMPRESS_FLAGS",
- "CLEAN_DELAY",
- "COLLISION_IGNORE",
- "CONFIG_PROTECT",
- "CONFIG_PROTECT_MASK",
- "EGENCACHE_DEFAULT_OPTS",
- "EMERGE_DEFAULT_OPTS",
- "EMERGE_LOG_DIR",
- "EMERGE_WARNING_DELAY",
- "FETCHCOMMAND",
- "FETCHCOMMAND_FTP",
- "FETCHCOMMAND_HTTP",
- "FETCHCOMMAND_HTTPS",
- "FETCHCOMMAND_RSYNC",
- "FETCHCOMMAND_SFTP",
- "GENTOO_MIRRORS",
- "NOCONFMEM",
- "O",
- "PORTAGE_BACKGROUND",
- "PORTAGE_BACKGROUND_UNMERGE",
- "PORTAGE_BINHOST",
- "PORTAGE_BINPKG_FORMAT",
- "PORTAGE_BUILDDIR_LOCKED",
- "PORTAGE_CHECKSUM_FILTER",
- "PORTAGE_ELOG_CLASSES",
- "PORTAGE_ELOG_MAILFROM",
- "PORTAGE_ELOG_MAILSUBJECT",
- "PORTAGE_ELOG_MAILURI",
- "PORTAGE_ELOG_SYSTEM",
- "PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS",
- "PORTAGE_FETCH_RESUME_MIN_SIZE",
- "PORTAGE_GPG_DIR",
- "PORTAGE_GPG_KEY",
- "PORTAGE_GPG_SIGNING_COMMAND",
- "PORTAGE_IONICE_COMMAND",
- "PORTAGE_PACKAGE_EMPTY_ABORT",
- "PORTAGE_REPO_DUPLICATE_WARN",
- "PORTAGE_RO_DISTDIRS",
- "PORTAGE_RSYNC_EXTRA_OPTS",
- "PORTAGE_RSYNC_OPTS",
- "PORTAGE_RSYNC_RETRIES",
- "PORTAGE_SSH_OPTS",
- "PORTAGE_SYNC_STALE",
- "PORTAGE_USE",
- "PORTAGE_LOG_FILTER_FILE_CMD",
- "PORTAGE_LOGDIR",
- "PORTAGE_LOGDIR_CLEAN",
- "QUICKPKG_DEFAULT_OPTS",
- "REPOMAN_DEFAULT_OPTS",
- "RESUMECOMMAND",
- "RESUMECOMMAND_FTP",
- "RESUMECOMMAND_HTTP",
- "RESUMECOMMAND_HTTPS",
- "RESUMECOMMAND_RSYNC",
- "RESUMECOMMAND_SFTP",
- "SIGNED_OFF_BY",
- "UNINSTALL_IGNORE",
- "USE_EXPAND_HIDDEN",
- "USE_ORDER",
- "__PORTAGE_HELPER",
-]
-
-# No longer supported variables
-environ_filter += ["SYNC"]
-
-environ_filter = frozenset(environ_filter)
+environ_filter = frozenset(
+ (
+ "DEPEND",
+ "RDEPEND",
+ "PDEPEND",
+ "SRC_URI",
+ "BDEPEND",
+ "IDEPEND",
+ # misc variables inherited from the calling environment
+ "INFOPATH",
+ "MANPATH",
+ "USER",
+ # variables that break bash
+ "HISTFILE",
+ "POSIXLY_CORRECT",
+ # portage config variables and variables set directly by portage
+ "ACCEPT_CHOSTS",
+ "ACCEPT_KEYWORDS",
+ "ACCEPT_PROPERTIES",
+ "ACCEPT_RESTRICT",
+ "AUTOCLEAN",
+ "BINPKG_COMPRESS",
+ "BINPKG_COMPRESS_FLAGS",
+ "CLEAN_DELAY",
+ "COLLISION_IGNORE",
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "EGENCACHE_DEFAULT_OPTS",
+ "EMERGE_DEFAULT_OPTS",
+ "EMERGE_LOG_DIR",
+ "EMERGE_WARNING_DELAY",
+ "FETCHCOMMAND",
+ "FETCHCOMMAND_FTP",
+ "FETCHCOMMAND_HTTP",
+ "FETCHCOMMAND_HTTPS",
+ "FETCHCOMMAND_RSYNC",
+ "FETCHCOMMAND_SFTP",
+ "GENTOO_MIRRORS",
+ "NOCONFMEM",
+ "O",
+ "PORTAGE_BACKGROUND",
+ "PORTAGE_BACKGROUND_UNMERGE",
+ "PORTAGE_BINHOST",
+ "PORTAGE_BINPKG_FORMAT",
+ "PORTAGE_BUILDDIR_LOCKED",
+ "PORTAGE_CHECKSUM_FILTER",
+ "PORTAGE_ELOG_CLASSES",
+ "PORTAGE_ELOG_MAILFROM",
+ "PORTAGE_ELOG_MAILSUBJECT",
+ "PORTAGE_ELOG_MAILURI",
+ "PORTAGE_ELOG_SYSTEM",
+ "PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS",
+ "PORTAGE_FETCH_RESUME_MIN_SIZE",
+ "PORTAGE_GPG_DIR",
+ "PORTAGE_GPG_KEY",
+ "PORTAGE_GPG_SIGNING_COMMAND",
+ "PORTAGE_IONICE_COMMAND",
+ "PORTAGE_PACKAGE_EMPTY_ABORT",
+ "PORTAGE_REPO_DUPLICATE_WARN",
+ "PORTAGE_RO_DISTDIRS",
+ "PORTAGE_RSYNC_EXTRA_OPTS",
+ "PORTAGE_RSYNC_OPTS",
+ "PORTAGE_RSYNC_RETRIES",
+ "PORTAGE_SSH_OPTS",
+ "PORTAGE_SYNC_STALE",
+ "PORTAGE_TRUST_HELPER",
+ "PORTAGE_USE",
+ "PORTAGE_LOG_FILTER_FILE_CMD",
+ "PORTAGE_LOGDIR",
+ "PORTAGE_LOGDIR_CLEAN",
+ "QUICKPKG_DEFAULT_OPTS",
+ "REPOMAN_DEFAULT_OPTS",
+ "RESUMECOMMAND",
+ "RESUMECOMMAND_FTP",
+ "RESUMECOMMAND_HTTP",
+ "RESUMECOMMAND_HTTPS",
+ "RESUMECOMMAND_RSYNC",
+ "RESUMECOMMAND_SFTP",
+ "UNINSTALL_IGNORE",
+ "USE_EXPAND_HIDDEN",
+ "USE_ORDER",
+ "__PORTAGE_HELPER",
+ # No longer supported variables
+ "SYNC",
+ )
+)
# Variables that are not allowed to have per-repo or per-package
# settings.
@@ -375,4 +362,5 @@ validate_commands = (
case_insensitive_vars = (
"AUTOCLEAN",
"NOCOLOR",
+ "NO_COLOR",
)
diff --git a/lib/portage/package/ebuild/_config/unpack_dependencies.py b/lib/portage/package/ebuild/_config/unpack_dependencies.py
deleted file mode 100644
index 58b2fb16e..000000000
--- a/lib/portage/package/ebuild/_config/unpack_dependencies.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# Copyright 2012 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-
-from portage import os, _supported_eapis
-from portage.dep import use_reduce
-from portage.eapi import eapi_has_automatic_unpack_dependencies
-from portage.exception import InvalidDependString
-from portage.localization import _
-from portage.util import grabfile, writemsg
-
-
-def load_unpack_dependencies_configuration(repositories):
- repo_dict = {}
- for repo in repositories.repos_with_profiles():
- for eapi in _supported_eapis:
- if eapi_has_automatic_unpack_dependencies(eapi):
- file_name = os.path.join(
- repo.location, "profiles", "unpack_dependencies", eapi
- )
- lines = grabfile(file_name, recursive=True)
- for line in lines:
- elements = line.split()
- suffix = elements[0].lower()
- if len(elements) == 1:
- writemsg(
- _(
- "--- Missing unpack dependencies for '%s' suffix in '%s'\n"
- )
- % (suffix, file_name)
- )
- depend = " ".join(elements[1:])
- try:
- use_reduce(depend, eapi=eapi)
- except InvalidDependString as e:
- writemsg(
- _(
- "--- Invalid unpack dependencies for '%s' suffix in '%s': '%s'\n"
- % (suffix, file_name, e)
- )
- )
- else:
- repo_dict.setdefault(repo.name, {}).setdefault(eapi, {})[
- suffix
- ] = depend
-
- ret = {}
- for repo in repositories.repos_with_profiles():
- for repo_name in [x.name for x in repo.masters] + [repo.name]:
- for eapi in repo_dict.get(repo_name, {}):
- for suffix, depend in (
- repo_dict.get(repo_name, {}).get(eapi, {}).items()
- ):
- ret.setdefault(repo.name, {}).setdefault(eapi, {})[suffix] = depend
-
- return ret
diff --git a/lib/portage/package/ebuild/_ipc/ExitCommand.py b/lib/portage/package/ebuild/_ipc/ExitCommand.py
index be419e9b7..b7c970353 100644
--- a/lib/portage/package/ebuild/_ipc/ExitCommand.py
+++ b/lib/portage/package/ebuild/_ipc/ExitCommand.py
@@ -5,7 +5,6 @@ from portage.package.ebuild._ipc.IpcCommand import IpcCommand
class ExitCommand(IpcCommand):
-
__slots__ = (
"exitcode",
"reply_hook",
@@ -17,7 +16,6 @@ class ExitCommand(IpcCommand):
self.exitcode = None
def __call__(self, argv):
-
if self.exitcode is not None:
# Ignore all but the first call, since if die is called
# then we certainly want to honor that exitcode, even
diff --git a/lib/portage/package/ebuild/_ipc/IpcCommand.py b/lib/portage/package/ebuild/_ipc/IpcCommand.py
index 2c4b9d8be..763cdb30b 100644
--- a/lib/portage/package/ebuild/_ipc/IpcCommand.py
+++ b/lib/portage/package/ebuild/_ipc/IpcCommand.py
@@ -3,7 +3,6 @@
class IpcCommand:
-
__slots__ = ()
def __call__(self, argv):
diff --git a/lib/portage/package/ebuild/_ipc/QueryCommand.py b/lib/portage/package/ebuild/_ipc/QueryCommand.py
index f8f464516..faf1baa0a 100644
--- a/lib/portage/package/ebuild/_ipc/QueryCommand.py
+++ b/lib/portage/package/ebuild/_ipc/QueryCommand.py
@@ -10,12 +10,11 @@ from portage.eapi import eapi_has_repo_deps
from portage.elog import messages as elog_messages
from portage.exception import InvalidAtom
from portage.package.ebuild._ipc.IpcCommand import IpcCommand
-from portage.util import normalize_path
+from portage.util import normalize_path, no_color
from portage.versions import best
class QueryCommand(IpcCommand):
-
__slots__ = (
"phase",
"settings",
@@ -53,7 +52,7 @@ class QueryCommand(IpcCommand):
root = normalize_path(root or os.sep).rstrip(os.sep) + os.sep
if root not in db:
- return ("", "%s: Invalid ROOT: %s\n" % (cmd, root), 3)
+ return ("", f"{cmd}: Invalid ROOT: {root}\n", 3)
portdb = db[root]["porttree"].dbapi
vardb = db[root]["vartree"].dbapi
@@ -63,12 +62,12 @@ class QueryCommand(IpcCommand):
try:
atom = Atom(args[0], allow_repo=allow_repo)
except InvalidAtom:
- return ("", "%s: Invalid atom: %s\n" % (cmd, args[0]), 2)
+ return ("", f"{cmd}: Invalid atom: {args[0]}\n", 2)
try:
atom = Atom(args[0], allow_repo=allow_repo, eapi=eapi)
except InvalidAtom as e:
- warnings.append("QA Notice: %s: %s" % (cmd, e))
+ warnings.append(f"QA Notice: {cmd}: {e}")
use = self.settings.get("PORTAGE_BUILT_USE")
if use is None:
@@ -88,7 +87,7 @@ class QueryCommand(IpcCommand):
return ("", warnings_str, returncode)
if cmd == "best_version":
m = best(vardb.match(atom))
- return ("%s\n" % m, warnings_str, 0)
+ return (f"{m}\n", warnings_str, 0)
if cmd in (
"master_repositories",
"repository_path",
@@ -98,7 +97,7 @@ class QueryCommand(IpcCommand):
):
repo = _repo_name_re.match(args[0])
if repo is None:
- return ("", "%s: Invalid repository: %s\n" % (cmd, args[0]), 2)
+ return ("", f"{cmd}: Invalid repository: {args[0]}\n", 2)
try:
repo = portdb.repositories[args[0]]
except KeyError:
@@ -106,15 +105,15 @@ class QueryCommand(IpcCommand):
if cmd == "master_repositories":
return (
- "%s\n" % " ".join(x.name for x in repo.masters),
+ f"{' '.join(x.name for x in repo.masters)}\n",
warnings_str,
0,
)
if cmd == "repository_path":
- return ("%s\n" % repo.location, warnings_str, 0)
+ return (f"{repo.location}\n", warnings_str, 0)
if cmd == "available_eclasses":
return (
- "%s\n" % " ".join(sorted(repo.eclass_db.eclasses)),
+ f"{' '.join(sorted(repo.eclass_db.eclasses))}\n",
warnings_str,
0,
)
@@ -123,7 +122,7 @@ class QueryCommand(IpcCommand):
eclass = repo.eclass_db.eclasses[args[1]]
except KeyError:
return ("", warnings_str, 1)
- return ("%s\n" % eclass.location, warnings_str, 0)
+ return (f"{eclass.location}\n", warnings_str, 0)
if cmd == "license_path":
paths = reversed(
[
@@ -133,9 +132,9 @@ class QueryCommand(IpcCommand):
)
for path in paths:
if os.path.exists(path):
- return ("%s\n" % path, warnings_str, 0)
+ return (f"{path}\n", warnings_str, 0)
return ("", warnings_str, 1)
- return ("", "Invalid command: %s\n" % cmd, 3)
+ return ("", f"Invalid command: {cmd}\n", 3)
def _elog(self, elog_funcname, lines):
"""
@@ -150,9 +149,7 @@ class QueryCommand(IpcCommand):
elog_func = getattr(elog_messages, elog_funcname)
global_havecolor = portage.output.havecolor
try:
- portage.output.havecolor = self.settings.get(
- "NOCOLOR", "false"
- ).lower() in ("no", "false")
+ portage.output.havecolor = not no_color(self.settings)
for line in lines:
elog_func(line, phase=phase, key=self.settings.mycpv, out=out)
finally:
diff --git a/lib/portage/package/ebuild/_ipc/meson.build b/lib/portage/package/ebuild/_ipc/meson.build
new file mode 100644
index 000000000..18c0ac1cf
--- /dev/null
+++ b/lib/portage/package/ebuild/_ipc/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'ExitCommand.py',
+ 'IpcCommand.py',
+ 'QueryCommand.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/package/ebuild/_ipc',
+ pure : not native_extensions
+)
diff --git a/lib/portage/package/ebuild/_metadata_invalid.py b/lib/portage/package/ebuild/_metadata_invalid.py
index b42adbcf2..310b21e9e 100644
--- a/lib/portage/package/ebuild/_metadata_invalid.py
+++ b/lib/portage/package/ebuild/_metadata_invalid.py
@@ -10,7 +10,6 @@ from portage.elog.messages import eerror
def eapi_invalid(self, cpv, repo_name, settings, eapi_var, eapi_parsed, eapi_lineno):
-
msg = []
msg.extend(
textwrap.wrap(
@@ -25,9 +24,9 @@ def eapi_invalid(self, cpv, repo_name, settings, eapi_var, eapi_parsed, eapi_lin
if not eapi_parsed:
# None means the assignment was not found, while an
- # empty string indicates an (invalid) empty assingment.
+ # empty string indicates an (invalid) empty assignment.
msg.append(
- "\tvalid EAPI assignment must" " occur on or before line: %s" % eapi_lineno
+ f"\tvalid EAPI assignment must occur on or before line: {eapi_lineno}"
)
else:
msg.append(
diff --git a/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py b/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
index 7bf5dd141..ec2d5bdfc 100644
--- a/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
+++ b/lib/portage/package/ebuild/_parallel_manifest/ManifestProcess.py
@@ -1,6 +1,8 @@
-# Copyright 2012 Gentoo Foundation
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import functools
+
import portage
from portage import os
from portage.exception import FileNotFound, PermissionDenied, PortagePackageException
@@ -9,16 +11,29 @@ from portage.util._async.ForkProcess import ForkProcess
class ManifestProcess(ForkProcess):
-
__slots__ = ("cp", "distdir", "fetchlist_dict", "repo_config")
MODIFIED = 16
- def _run(self):
- mf = self.repo_config.load_manifest(
- os.path.join(self.repo_config.location, self.cp),
+ def _start(self):
+ self.target = functools.partial(
+ self._target,
+ self.cp,
self.distdir,
- fetchlist_dict=self.fetchlist_dict,
+ self.fetchlist_dict,
+ self.repo_config,
+ )
+ super()._start()
+
+ @staticmethod
+ def _target(cp, distdir, fetchlist_dict, repo_config):
+ """
+ TODO: Make all arguments picklable for the multiprocessing spawn start method.
+ """
+ mf = repo_config.load_manifest(
+ os.path.join(repo_config.location, cp),
+ distdir,
+ fetchlist_dict=fetchlist_dict,
)
try:
@@ -31,22 +46,18 @@ class ManifestProcess(ForkProcess):
return 1
except PortagePackageException as e:
- portage.writemsg(("!!! %s\n") % (e,), noiselevel=-1)
+ portage.writemsg(f"!!! {e}\n", noiselevel=-1)
return 1
try:
modified = mf.write(sign=False)
except PermissionDenied as e:
portage.writemsg(
- "!!! %s: %s\n"
- % (
- _("Permission Denied"),
- e,
- ),
+ f"!!! {_('Permission Denied')}: {e}\n",
noiselevel=-1,
)
return 1
else:
if modified:
- return self.MODIFIED
+ return ManifestProcess.MODIFIED
return os.EX_OK
diff --git a/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py b/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
index 4599e2d50..da7529277 100644
--- a/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
+++ b/lib/portage/package/ebuild/_parallel_manifest/ManifestScheduler.py
@@ -18,9 +18,8 @@ class ManifestScheduler(AsyncScheduler):
gpg_cmd=None,
gpg_vars=None,
force_sign_key=None,
- **kwargs
+ **kwargs,
):
-
AsyncScheduler.__init__(self, **kwargs)
self._portdb = portdb
@@ -41,8 +40,7 @@ class ManifestScheduler(AsyncScheduler):
# and in order to reduce latency in case of a signal interrupt.
cp_all = self._portdb.cp_all
for category in sorted(self._portdb.categories):
- for cp in cp_all(categories=(category,)):
- yield cp
+ yield from cp_all(categories=(category,))
def _iter_tasks(self):
portdb = self._portdb
@@ -94,7 +92,6 @@ class ManifestScheduler(AsyncScheduler):
)
def _task_exit(self, task):
-
if task.returncode != os.EX_OK:
if not self._terminated_tasks:
portage.writemsg(
diff --git a/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py b/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py
index df279dab6..87aa46de5 100644
--- a/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py
+++ b/lib/portage/package/ebuild/_parallel_manifest/ManifestTask.py
@@ -21,7 +21,6 @@ from .ManifestProcess import ManifestProcess
class ManifestTask(CompositeTask):
-
__slots__ = (
"cp",
"distdir",
@@ -233,7 +232,7 @@ class ManifestTask(CompositeTask):
"rb",
) as f:
return self._PGP_HEADER not in f.readline()
- except IOError as e:
+ except OSError as e:
if e.errno in (errno.ENOENT, errno.ESTALE):
return False
raise
diff --git a/lib/portage/package/ebuild/_parallel_manifest/meson.build b/lib/portage/package/ebuild/_parallel_manifest/meson.build
new file mode 100644
index 000000000..14b3fa659
--- /dev/null
+++ b/lib/portage/package/ebuild/_parallel_manifest/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'ManifestProcess.py',
+ 'ManifestScheduler.py',
+ 'ManifestTask.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/package/ebuild/_parallel_manifest',
+ pure : not native_extensions
+)
diff --git a/lib/portage/package/ebuild/config.py b/lib/portage/package/ebuild/config.py
index b4d6862a3..67fd1bb18 100644
--- a/lib/portage/package/ebuild/config.py
+++ b/lib/portage/package/ebuild/config.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2021 Gentoo Authors
+# Copyright 2010-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = [
@@ -29,7 +29,6 @@ portage.proxy.lazyimport.lazyimport(
"portage.dbapi.vartree:vartree",
"portage.package.ebuild.doebuild:_phase_func_map",
"portage.util.compression_probe:_compressors",
- "portage.util.locale:check_locale,split_LC_ALL",
)
from portage import bsd_chflags, load_mod, os, selinux, _unicode_decode
from portage.const import (
@@ -41,6 +40,7 @@ from portage.const import (
PORTAGE_BASE_PATH,
PRIVATE_PATH,
PROFILE_PATH,
+ SUPPORTED_GENTOO_BINPKG_FORMATS,
USER_CONFIG_PATH,
USER_VIRTUALS_FILE,
)
@@ -104,9 +104,6 @@ from portage.package.ebuild._config.helper import (
ordered_by_atom_specificity,
prune_incremental,
)
-from portage.package.ebuild._config.unpack_dependencies import (
- load_unpack_dependencies_configuration,
-)
_feature_flags_cache = {}
@@ -135,8 +132,7 @@ def autouse(myvartree, use_cache=1, mysettings=None):
def check_config_instance(test):
if not isinstance(test, config):
raise TypeError(
- "Invalid type for config object: %s (should be %s)"
- % (test.__class__, config)
+ f"Invalid type for config object: {test.__class__} (should be {config})"
)
@@ -148,7 +144,7 @@ def best_from_dict(key, top_dict, key_order, EmptyOnError=1, FullCopy=1, AllowEm
return top_dict[x][key]
if EmptyOnError:
return ""
- raise KeyError("Key not found in list; '%s'" % key)
+ raise KeyError(f"Key not found in list; '{key}'")
def _lazy_iuse_regex(iuse_implicit):
@@ -160,7 +156,7 @@ def _lazy_iuse_regex(iuse_implicit):
# Escape anything except ".*" which is supposed to pass through from
# _get_implicit_iuse().
regex = sorted(re.escape(x) for x in iuse_implicit)
- regex = "^(%s)$" % "|".join(regex)
+ regex = f"^({'|'.join(regex)})$"
regex = regex.replace("\\.\\*", ".*")
return regex
@@ -168,7 +164,7 @@ def _lazy_iuse_regex(iuse_implicit):
class _iuse_implicit_match_cache:
def __init__(self, settings):
self._iuse_implicit_re = re.compile(
- "^(%s)$" % "|".join(settings._get_implicit_iuse())
+ f"^({'|'.join(settings._get_implicit_iuse())})$"
)
self._cache = {}
@@ -202,7 +198,6 @@ class config:
_deprecated_keys = {
"PORTAGE_LOGDIR": "PORT_LOGDIR",
"PORTAGE_LOGDIR_CLEAN": "PORT_LOGDIR_CLEAN",
- "SIGNED_OFF_BY": "DCO_SIGNED_OFF_BY",
}
_setcpv_aux_keys = (
@@ -334,7 +329,6 @@ class config:
self.profiles = clone.profiles
self.packages = clone.packages
self.repositories = clone.repositories
- self.unpack_dependencies = clone.unpack_dependencies
self._default_features_use = clone._default_features_use
self._iuse_effective = clone._iuse_effective
self._iuse_implicit_match = clone._iuse_implicit_match
@@ -428,7 +422,6 @@ class config:
eprefix = locations_manager.eprefix
config_root = locations_manager.config_root
sysroot = locations_manager.sysroot
- esysroot = locations_manager.esysroot
broot = locations_manager.broot
abs_user_config = locations_manager.abs_user_config
make_conf_paths = [
@@ -471,6 +464,7 @@ class config:
locations_manager.set_root_override(make_conf.get("ROOT"))
target_root = locations_manager.target_root
eroot = locations_manager.eroot
+ esysroot = locations_manager.esysroot
self.global_config_path = locations_manager.global_config_path
# The expand_map is used for variable substitution
@@ -489,14 +483,7 @@ class config:
# interaction with the calling environment that might
# lead to unexpected results.
- env_d = (
- getconfig(
- os.path.join(eroot, "etc", "profile.env"),
- tolerant=tolerant,
- expand=False,
- )
- or {}
- )
+ env_d = self._get_env_d(broot=broot, eroot=eroot, tolerant=tolerant)
expand_map = env_d.copy()
self._expand_map = expand_map
@@ -563,9 +550,7 @@ class config:
user_auxdbmodule is not None
and user_auxdbmodule in self._module_aliases
):
- warnings.warn(
- "'%s' is deprecated: %s" % (user_auxdbmodule, modules_file)
- )
+ warnings.warn(f"'{user_auxdbmodule}' is deprecated: {modules_file}")
self.modules["default"] = {
"portdbapi.auxdbmodule": "portage.cache.flat_hash.mtime_md5_database",
@@ -598,9 +583,9 @@ class config:
env = os.environ
# Avoid potential UnicodeDecodeError exceptions later.
- env_unicode = dict(
- (_unicode_decode(k), _unicode_decode(v)) for k, v in env.items()
- )
+ env_unicode = {
+ _unicode_decode(k): _unicode_decode(v) for k, v in env.items()
+ }
self.backupenv = env_unicode
@@ -716,7 +701,7 @@ class config:
)
for x in profiles_complex
]
- except EnvironmentError as e:
+ except OSError as e:
_raise_exc(e)
self.packages = tuple(stack_lists(packages_list, incremental=1))
@@ -729,10 +714,6 @@ class config:
x = Atom(x.lstrip("*"))
self.prevmaskdict.setdefault(x.cp, []).append(x)
- self.unpack_dependencies = load_unpack_dependencies_configuration(
- self.repositories
- )
-
mygcfg = {}
if profiles_complex:
mygcfg_dlists = []
@@ -884,10 +865,10 @@ class config:
# Initialize all USE related variables we track ourselves.
self.usemask = self._use_manager.getUseMask()
self.useforce = self._use_manager.getUseForce()
- self.configdict["conf"][
- "USE"
- ] = self._use_manager.extract_global_USE_changes(
- self.configdict["conf"].get("USE", "")
+ self.configdict["conf"]["USE"] = (
+ self._use_manager.extract_global_USE_changes(
+ self.configdict["conf"].get("USE", "")
+ )
)
# Read license_groups and optionally license_groups and package.license from user config
@@ -897,10 +878,10 @@ class config:
user_config=local_config,
)
# Extract '*/*' entries from package.license
- self.configdict["conf"][
- "ACCEPT_LICENSE"
- ] = self._license_manager.extract_global_changes(
- self.configdict["conf"].get("ACCEPT_LICENSE", "")
+ self.configdict["conf"]["ACCEPT_LICENSE"] = (
+ self._license_manager.extract_global_changes(
+ self.configdict["conf"].get("ACCEPT_LICENSE", "")
+ )
)
# profile.bashrc
@@ -1086,9 +1067,9 @@ class config:
# reasonable defaults; this is important as without USE_ORDER,
# USE will always be "" (nothing set)!
if "USE_ORDER" not in self:
- self[
- "USE_ORDER"
- ] = "env:pkg:conf:defaults:pkginternal:features:repo:env.d"
+ self["USE_ORDER"] = (
+ "env:pkg:conf:defaults:pkginternal:features:repo:env.d"
+ )
self.backup_changes("USE_ORDER")
if "CBUILD" not in self and "CHOST" in self:
@@ -1120,7 +1101,6 @@ class config:
except OSError:
pass
else:
-
if portage.data._unprivileged_mode(eroot_or_parent, eroot_st):
unprivileged = True
@@ -1217,6 +1197,61 @@ class config:
if mycpv:
self.setcpv(mycpv)
+ def _get_env_d(self, broot, eroot, tolerant):
+ broot_only_variables = (
+ "PATH",
+ "PREROOTPATH",
+ "ROOTPATH",
+ )
+ eroot_only_variables = (
+ "CONFIG_PROTECT",
+ "CONFIG_PROTECT_MASK",
+ "INFODIR",
+ "INFOPATH",
+ "MANPATH",
+ "PKG_CONFIG_.*",
+ )
+
+ broot_only_variables_re = re.compile(r"^(%s)$" % "|".join(broot_only_variables))
+ eroot_only_variables_re = re.compile(r"^(%s)$" % "|".join(eroot_only_variables))
+
+ broot_env_d_path = os.path.join(broot or "/", "etc", "profile.env")
+ eroot_env_d_path = os.path.join(eroot or "/", "etc", "profile.env")
+
+ if (
+ os.path.exists(broot_env_d_path)
+ and os.path.exists(eroot_env_d_path)
+ and os.path.samefile(broot_env_d_path, eroot_env_d_path)
+ ):
+ broot_env_d = (
+ getconfig(broot_env_d_path, tolerant=tolerant, expand=False) or {}
+ )
+ eroot_env_d = broot_env_d
+ else:
+ broot_env_d = (
+ getconfig(broot_env_d_path, tolerant=tolerant, expand=False) or {}
+ )
+ eroot_env_d = (
+ getconfig(eroot_env_d_path, tolerant=tolerant, expand=False) or {}
+ )
+
+ env_d = {}
+
+ for k in broot_env_d.keys() | eroot_env_d.keys():
+ if broot_only_variables_re.match(k):
+ if k in broot_env_d:
+ env_d[k] = broot_env_d[k]
+ elif eroot_only_variables_re.match(k):
+ if k in eroot_env_d:
+ env_d[k] = eroot_env_d[k]
+ else:
+ if k in eroot_env_d:
+ env_d[k] = eroot_env_d[k]
+ elif k in broot_env_d:
+ env_d[k] = broot_env_d[k]
+
+ return env_d
+
def _init_iuse(self):
self._iuse_effective = self._calc_iuse_effective()
self._iuse_implicit_match = _iuse_implicit_match_cache(self)
@@ -1297,7 +1332,7 @@ class config:
_("!!! Directory initialization failed: '%s'\n") % mydir,
noiselevel=-1,
)
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
@property
def _keywords_manager(self):
@@ -1513,6 +1548,15 @@ class config:
noiselevel=-1,
)
+ binpkg_format = self.get("BINPKG_FORMAT")
+ if binpkg_format:
+ if binpkg_format not in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ writemsg(
+ "!!! BINPKG_FORMAT contains invalid or "
+ "unsupported format: %s" % binpkg_format,
+ noiselevel=-1,
+ )
+
binpkg_compression = self.get("BINPKG_COMPRESS")
if binpkg_compression:
try:
@@ -1520,10 +1564,21 @@ class config:
except KeyError as e:
writemsg(
"!!! BINPKG_COMPRESS contains invalid or "
- "unsupported compression method: %s" % e.args[0],
+ "unsupported compression method: %s\n" % e.args[0],
noiselevel=-1,
)
else:
+ if (
+ self.get(
+ f"BINPKG_COMPRESS_FLAGS_{binpkg_compression.upper()}", None
+ )
+ is not None
+ ):
+ compression["compress"] = compression["compress"].replace(
+ "${BINPKG_COMPRESS_FLAGS}",
+ f"${{BINPKG_COMPRESS_FLAGS_{binpkg_compression.upper()}}}",
+ )
+
try:
compression_binary = shlex_split(
portage.util.varexpand(compression["compress"], mydict=self)
@@ -1531,7 +1586,7 @@ class config:
except IndexError as e:
writemsg(
"!!! BINPKG_COMPRESS contains invalid or "
- "unsupported compression method: %s" % e.args[0],
+ "unsupported compression method: %s\n" % e.args[0],
noiselevel=-1,
)
else:
@@ -1539,7 +1594,7 @@ class config:
missing_package = compression["package"]
writemsg(
"!!! BINPKG_COMPRESS unsupported %s. "
- "Missing package: %s"
+ "Missing package: %s\n"
% (binpkg_compression, missing_package),
noiselevel=-1,
)
@@ -1614,7 +1669,6 @@ class config:
self.regenerate()
class _lazy_vars:
-
__slots__ = ("built_use", "settings", "values")
def __init__(self, built_use, settings):
@@ -1634,14 +1688,14 @@ class config:
if use is None:
use = frozenset(settings["PORTAGE_USE"].split())
- values[
- "ACCEPT_LICENSE"
- ] = settings._license_manager.get_prunned_accept_license(
- settings.mycpv,
- use,
- settings.get("LICENSE", ""),
- settings.get("SLOT"),
- settings.get("PORTAGE_REPO_NAME"),
+ values["ACCEPT_LICENSE"] = (
+ settings._license_manager.get_prunned_accept_license(
+ settings.mycpv,
+ use,
+ settings.get("LICENSE", ""),
+ settings.get("SLOT"),
+ settings.get("PORTAGE_REPO_NAME"),
+ )
)
values["PORTAGE_PROPERTIES"] = self._flatten("PROPERTIES", use, settings)
values["PORTAGE_RESTRICT"] = self._flatten("RESTRICT", use, settings)
@@ -1684,14 +1738,14 @@ class config:
def __getitem__(self, key):
prefix = key.lower() + "_"
prefix_len = len(prefix)
- expand_flags = set(
+ expand_flags = {
x[prefix_len:] for x in self._use if x[:prefix_len] == prefix
- )
+ }
var_split = self._use_expand_dict.get(key, "").split()
# Preserve the order of var_split because it can matter for things
# like LINGUAS.
var_split = [x for x in var_split if x in expand_flags]
- var_split.extend(expand_flags.difference(var_split))
+ var_split.extend(sorted(expand_flags.difference(var_split)))
has_wildcard = "*" in expand_flags
if has_wildcard:
var_split = [x for x in var_split if x != "*"]
@@ -2059,6 +2113,9 @@ class config:
"test" in restrict
and not "all" in allow_test
and not ("test_network" in properties and "network" in allow_test)
+ and not (
+ "test_privileged" in properties and "privileged" in allow_test
+ )
)
if restrict_test and "test" in self.features:
@@ -2122,7 +2179,7 @@ class config:
"fi; "
"[[ -n ${___PORTAGE_IUSE_HASH[$1]} ]]; "
"}"
- ) % " ".join('["%s"]=1' % x for x in portage_iuse)
+ ) % " ".join(f'["{x}"]=1' for x in portage_iuse)
else:
portage_iuse = self._get_implicit_iuse()
portage_iuse.update(explicit_iuse)
@@ -2149,7 +2206,9 @@ class config:
# "test" is in IUSE and USE=test is masked, so execution
# of src_test() probably is not reliable. Therefore,
# temporarily disable FEATURES=test just for this package.
- self["FEATURES"] = " ".join(x for x in self.features if x != "test")
+ self["FEATURES"] = " ".join(
+ x for x in sorted(self.features) if x != "test"
+ )
# Allow _* flags from USE_EXPAND wildcards to pass through here.
use.difference_update(
@@ -2164,7 +2223,7 @@ class config:
# Use the calculated USE flags to regenerate the USE_EXPAND flags so
# that they are consistent. For optimal performance, use slice
# comparison instead of startswith().
- use_expand_split = set(x.lower() for x in self.get("USE_EXPAND", "").split())
+ use_expand_split = {x.lower() for x in self.get("USE_EXPAND", "").split()}
lazy_use_expand = self._lazy_use_expand(
self,
unfiltered_use,
@@ -2175,7 +2234,7 @@ class config:
self._use_expand_dict,
)
- use_expand_iuses = dict((k, set()) for k in use_expand_split)
+ use_expand_iuses = {k: set() for k in use_expand_split}
for x in portage_iuse:
x_split = x.split("_")
if len(x_split) == 1:
@@ -2240,7 +2299,7 @@ class config:
if k in protected_keys or k in non_user_variables:
writemsg(
"!!! Illegal variable "
- + "'%s' assigned in '%s'\n" % (k, penvfile),
+ + f"'{k}' assigned in '{penvfile}'\n",
noiselevel=-1,
)
elif k in incrementals:
@@ -2639,14 +2698,13 @@ class config:
def reload(self):
"""Reload things like /etc/profile.env that can change during runtime."""
- env_d_filename = os.path.join(self["EROOT"], "etc", "profile.env")
self.configdict["env.d"].clear()
- env_d = getconfig(env_d_filename, tolerant=self._tolerant, expand=False)
- if env_d:
- # env_d will be None if profile.env doesn't exist.
- for k in self._env_d_blacklist:
- env_d.pop(k, None)
- self.configdict["env.d"].update(env_d)
+ env_d = self._get_env_d(
+ broot=self["BROOT"], eroot=self["EROOT"], tolerant=self._tolerant
+ )
+ for k in self._env_d_blacklist:
+ env_d.pop(k, None)
+ self.configdict["env.d"].update(env_d)
def regenerate(self, useonly=0, use_cache=None):
"""
@@ -2739,10 +2797,8 @@ class config:
myflags = set()
for mykey, incremental_list in increment_lists.items():
-
myflags.clear()
for mysplit in incremental_list:
-
for x in mysplit:
if x == "-*":
# "-*" is a special "minus" var that means "unset all settings".
@@ -2787,7 +2843,7 @@ class config:
use_expand_unprefixed = self.get("USE_EXPAND_UNPREFIXED", "").split()
- # In order to best accomodate the long-standing practice of
+ # In order to best accommodate the long-standing practice of
# setting default USE_EXPAND variables in the profile's
# make.defaults, we translate these variables into their
# equivalent USE flags so that useful incremental behavior
@@ -2842,7 +2898,6 @@ class config:
iuse = [x.lstrip("+-") for x in iuse.split()]
myflags = set()
for curdb in self.uvlist:
-
for k in use_expand_unprefixed:
v = curdb.get(k)
if v is None:
@@ -2991,9 +3046,9 @@ class config:
for k in use_expand:
prefix = k.lower() + "_"
prefix_len = len(prefix)
- expand_flags = set(
+ expand_flags = {
x[prefix_len:] for x in myflags if x[:prefix_len] == prefix
- )
+ }
var_split = use_expand_dict.get(k, "").split()
var_split = [x for x in var_split if x in expand_flags]
var_split.extend(sorted(expand_flags.difference(var_split)))
@@ -3098,7 +3153,6 @@ class config:
return ""
def _getitem(self, mykey):
-
if mykey in self._constant_keys:
# These two point to temporary values when
# portage plans to update itself.
@@ -3124,7 +3178,7 @@ class config:
return ":".join(value)
if mykey == "PORTAGE_GID":
- return "%s" % portage_gid
+ return f"{portage_gid}"
for d in self.lookuplist:
try:
@@ -3197,8 +3251,7 @@ class config:
"set a value; will be thrown away at reset() time"
if not isinstance(myvalue, str):
raise ValueError(
- "Invalid type being used as a value: '%s': '%s'"
- % (str(mykey), str(myvalue))
+ f"Invalid type being used as a value: '{str(mykey)}': '{str(myvalue)}'"
)
# Avoid potential UnicodeDecodeError exceptions later.
@@ -3289,17 +3342,12 @@ class config:
if not (src_like_phase and eapi_attrs.sysroot):
mydict.pop("ESYSROOT", None)
- if not (src_like_phase and eapi_attrs.broot):
+ if not eapi_attrs.broot:
mydict.pop("BROOT", None)
- # Prefix variables are supported beginning with EAPI 3, or when
- # force-prefix is in FEATURES, since older EAPIs would otherwise be
- # useless with prefix configurations. This brings compatibility with
- # the prefix branch of portage, which also supports EPREFIX for all
- # EAPIs (for obvious reasons).
if phase == "depend" or (
- "force-prefix" not in self.features
- and eapi is not None
+ # Prefix variables are supported beginning with EAPI 3.
+ eapi is not None
and not eapi_supports_prefix(eapi)
):
mydict.pop("ED", None)
@@ -3307,16 +3355,12 @@ class config:
mydict.pop("EROOT", None)
mydict.pop("ESYSROOT", None)
- if (
- phase
- not in (
- "pretend",
- "setup",
- "preinst",
- "postinst",
- )
- or not eapi_exports_replace_vars(eapi)
- ):
+ if phase not in (
+ "pretend",
+ "setup",
+ "preinst",
+ "postinst",
+ ) or not eapi_exports_replace_vars(eapi):
mydict.pop("REPLACING_VERSIONS", None)
if phase not in ("prerm", "postrm") or not eapi_exports_replace_vars(eapi):
@@ -3328,20 +3372,17 @@ class config:
mydict["EBUILD_PHASE_FUNC"] = phase_func
if eapi_attrs.posixish_locale:
- split_LC_ALL(mydict)
- mydict["LC_COLLATE"] = "C"
- # check_locale() returns None when check can not be executed.
- if check_locale(silent=True, env=mydict) is False:
- # try another locale
- for l in ("C.UTF-8", "en_US.UTF-8", "en_GB.UTF-8", "C"):
- mydict["LC_CTYPE"] = l
- if check_locale(silent=True, env=mydict):
- # TODO: output the following only once
- # writemsg(_("!!! LC_CTYPE unsupported, using %s instead\n")
- # % mydict["LC_CTYPE"])
- break
- else:
- raise AssertionError("C locale did not pass the test!")
+ if mydict.get("LC_ALL"):
+ # Sometimes this method is called for processes
+ # that are not ebuild phases, so only raise
+ # AssertionError for actual ebuild phases.
+ if phase and phase not in ("clean", "cleanrm", "fetch"):
+ raise AssertionError(
+ f"LC_ALL={mydict['LC_ALL']} for posixish locale. It seems that split_LC_ALL was not called for phase {phase}?"
+ )
+ elif "LC_ALL" in mydict:
+ # Delete placeholder from split_LC_ALL.
+ del mydict["LC_ALL"]
if not eapi_attrs.exports_PORTDIR:
mydict.pop("PORTDIR", None)
diff --git a/lib/portage/package/ebuild/deprecated_profile_check.py b/lib/portage/package/ebuild/deprecated_profile_check.py
index 19bea1903..ce6476928 100644
--- a/lib/portage/package/ebuild/deprecated_profile_check.py
+++ b/lib/portage/package/ebuild/deprecated_profile_check.py
@@ -3,7 +3,6 @@
__all__ = ["deprecated_profile_check"]
-import io
import portage
from portage import os, _encodings, _unicode_encode
@@ -39,11 +38,10 @@ def deprecated_profile_check(settings=None):
if not os.access(deprecated_profile_file, os.R_OK):
return
- with io.open(
+ with open(
_unicode_encode(
deprecated_profile_file, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
diff --git a/lib/portage/package/ebuild/digestcheck.py b/lib/portage/package/ebuild/digestcheck.py
index 3fe64550c..cbd57fb58 100644
--- a/lib/portage/package/ebuild/digestcheck.py
+++ b/lib/portage/package/ebuild/digestcheck.py
@@ -80,7 +80,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
except DigestException as e:
eout.eend(1)
writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1)
- writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
+ writemsg(f"!!! {e.value[0]}\n", noiselevel=-1)
writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1)
writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1)
writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1)
diff --git a/lib/portage/package/ebuild/digestgen.py b/lib/portage/package/ebuild/digestgen.py
index 3a3c92a3a..36d979fff 100644
--- a/lib/portage/package/ebuild/digestgen.py
+++ b/lib/portage/package/ebuild/digestgen.py
@@ -56,7 +56,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
for myfile in fetchlist_dict[cpv]:
distfiles_map.setdefault(myfile, []).append(cpv)
except InvalidDependString as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
del e
return 0
mytree = os.path.dirname(os.path.dirname(mysettings["O"]))
@@ -171,7 +171,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
# digest does not match.
cmd = colorize(
"INFORM",
- "ebuild --force %s manifest" % os.path.basename(myebuild),
+ f"ebuild --force {os.path.basename(myebuild)} manifest",
)
writemsg(
(
@@ -181,7 +181,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
)
% myfile
)
- + "!!! %s\n" % cmd,
+ + f"!!! {cmd}\n",
noiselevel=-1,
)
return 0
@@ -199,7 +199,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
)
return 0
except PortagePackageException as e:
- writemsg(("!!! %s\n") % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return 0
try:
mf.write(sign=False)
@@ -227,7 +227,7 @@ def digestgen(myarchives=None, mysettings=None, myportdb=None):
pv = pkg_key.split("/")[1]
for filename in auto_assumed:
if filename in fetchlist:
- writemsg_stdout(" %s::%s\n" % (pv, filename))
+ writemsg_stdout(f" {pv}::{filename}\n")
return 1
finally:
portage._doebuild_manifest_exempt_depend -= 1
diff --git a/lib/portage/package/ebuild/doebuild.py b/lib/portage/package/ebuild/doebuild.py
index 9650a8444..6691db4e9 100644
--- a/lib/portage/package/ebuild/doebuild.py
+++ b/lib/portage/package/ebuild/doebuild.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2021 Gentoo Authors
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["doebuild", "doebuild_environment", "spawn", "spawnebuild"]
@@ -7,7 +7,6 @@ import grp
import gzip
import errno
import fnmatch
-import io
from itertools import chain
import logging
import os as _os
@@ -20,6 +19,7 @@ import sys
import tempfile
from textwrap import wrap
import time
+from typing import Union
import warnings
import zlib
@@ -43,6 +43,7 @@ portage.proxy.lazyimport.lazyimport(
"portage.util._async.SchedulerInterface:SchedulerInterface",
"portage.util._eventloop.global_event_loop:global_event_loop",
"portage.util.ExtractKernelVersion:ExtractKernelVersion",
+ "_emerge.EbuildPhase:_setup_locale",
)
from portage import (
@@ -66,6 +67,7 @@ from portage.const import (
INVALID_ENV_FILE,
MISC_SH_BINARY,
PORTAGE_PYM_PACKAGES,
+ SUPPORTED_GENTOO_BINPKG_FORMATS,
)
from portage.data import portage_gid, portage_uid, secpass, uid, userpriv_groups
from portage.dbapi.porttree import _parse_uri_map
@@ -76,11 +78,11 @@ from portage.dep import (
paren_enclose,
use_reduce,
)
+from portage.dep.libc import find_libc_deps
from portage.eapi import (
eapi_exports_KV,
eapi_exports_merge_type,
eapi_exports_replace_vars,
- eapi_exports_REPOSITORY,
eapi_has_required_use,
eapi_has_src_prepare_and_src_configure,
eapi_has_pkg_pretend,
@@ -111,14 +113,15 @@ from portage.util import (
writemsg_stdout,
write_atomic,
)
-from portage.util.cpuinfo import get_cpu_count
+from portage.util.cpuinfo import get_cpu_count, makeopts_to_job_count
from portage.util.lafilefixer import rewrite_lafile
from portage.util.compression_probe import _compressors
from portage.util.futures import asyncio
from portage.util.futures.executor.fork import ForkExecutor
from portage.util.path import first_existing
from portage.util.socks5 import get_socks5_proxy
-from portage.versions import _pkgsplit
+from portage.util._dyn_libs.dyn_libs import check_dyn_libs_inconsistent
+from portage.versions import _pkgsplit, pkgcmp
from _emerge.BinpkgEnvExtractor import BinpkgEnvExtractor
from _emerge.EbuildBuildDir import EbuildBuildDir
from _emerge.EbuildPhase import EbuildPhase
@@ -126,7 +129,6 @@ from _emerge.EbuildSpawnProcess import EbuildSpawnProcess
from _emerge.Package import Package
from _emerge.RootConfig import RootConfig
-
_unsandboxed_phases = frozenset(
[
"clean",
@@ -210,6 +212,7 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
kwargs["pidns"] = (
"pid-sandbox" in settings.features and phase not in _global_pid_phases
)
+ kwargs["warn_on_large_env"] = "warn-on-large-env" in settings.features
if phase == "depend":
kwargs["droppriv"] = "userpriv" in settings.features
@@ -228,7 +231,7 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
else:
ebuild_sh_arg = phase
- cmd = "%s %s" % (
+ cmd = "{} {}".format(
_shell_quote(
os.path.join(
settings["PORTAGE_BIN_PATH"], os.path.basename(EBUILD_SH_BINARY)
@@ -237,6 +240,9 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
ebuild_sh_arg,
)
+ if phase == "test" and "test_privileged" in settings["PORTAGE_PROPERTIES"].split():
+ kwargs["droppriv"] = False
+
settings["EBUILD_PHASE"] = phase
try:
return spawn(cmd, settings, **kwargs)
@@ -245,17 +251,23 @@ def _doebuild_spawn(phase, settings, actionmap=None, **kwargs):
def _spawn_phase(
- phase, settings, actionmap=None, returnpid=False, logfile=None, **kwargs
+ phase,
+ settings,
+ actionmap=None,
+ returnpid=False,
+ returnproc=False,
+ logfile=None,
+ **kwargs,
):
-
- if returnpid:
+ if returnproc or returnpid:
return _doebuild_spawn(
phase,
settings,
actionmap=actionmap,
returnpid=returnpid,
+ returnproc=returnproc,
logfile=logfile,
- **kwargs
+ **kwargs,
)
# The logfile argument is unused here, since EbuildPhase uses
@@ -266,7 +278,7 @@ def _spawn_phase(
phase=phase,
scheduler=SchedulerInterface(asyncio._safe_loop()),
settings=settings,
- **kwargs
+ **kwargs,
)
ebuild_phase.start()
@@ -285,20 +297,8 @@ def _doebuild_path(settings, eapi=None):
if portage_bin_path[0] != portage.const.PORTAGE_BIN_PATH:
# Add a fallback path for restarting failed builds (bug 547086)
portage_bin_path.append(portage.const.PORTAGE_BIN_PATH)
- prerootpath = [x for x in settings.get("PREROOTPATH", "").split(":") if x]
- rootpath = [x for x in settings.get("ROOTPATH", "").split(":") if x]
- rootpath_set = frozenset(rootpath)
- overrides = [
- x for x in settings.get("__PORTAGE_TEST_PATH_OVERRIDE", "").split(":") if x
- ]
- prefixes = []
- # settings["EPREFIX"] should take priority over portage.const.EPREFIX
- if portage.const.EPREFIX != settings["EPREFIX"] and settings["ROOT"] == os.sep:
- prefixes.append(settings["EPREFIX"])
- prefixes.append(portage.const.EPREFIX)
-
- path = overrides
+ path = [x for x in settings.get("__PORTAGE_TEST_PATH_OVERRIDE", "").split(":") if x]
if "xattr" in settings.features:
for x in portage_bin_path:
@@ -318,24 +318,20 @@ def _doebuild_path(settings, eapi=None):
for x in portage_bin_path:
path.append(os.path.join(x, "ebuild-helpers"))
- path.extend(prerootpath)
-
- for prefix in prefixes:
- prefix = prefix if prefix else "/"
- for x in (
- "usr/local/sbin",
- "usr/local/bin",
- "usr/sbin",
- "usr/bin",
- "sbin",
- "bin",
- ):
- # Respect order defined in ROOTPATH
- x_abs = os.path.join(prefix, x)
- if x_abs not in rootpath_set:
- path.append(x_abs)
- path.extend(rootpath)
+ # If PATH is set in env.d, ignore PATH from the calling environment.
+ # This allows packages to update our PATH as they get installed.
+ if "PATH" in settings.configdict["env.d"]:
+ settings.configdict["env"].pop("PATH", None)
+
+ if "PATH" in settings:
+ pathset = set(path)
+ for p in settings["PATH"].split(":"):
+ # Avoid duplicate entries.
+ if p not in pathset:
+ path.append(p)
+ pathset.add(p)
+
settings["PATH"] = ":".join(path)
@@ -460,15 +456,11 @@ def doebuild_environment(
mysettings["PN"] = mysplit[0]
mysettings["PV"] = mysplit[1]
mysettings["PR"] = mysplit[2]
+ mysettings["PVR"] = mypv[len(mysplit[0]) + 1 :]
if noiselimit < 0:
mysettings["PORTAGE_QUIET"] = "1"
- if mysplit[2] == "r0":
- mysettings["PVR"] = mysplit[1]
- else:
- mysettings["PVR"] = mysplit[1] + "-" + mysplit[2]
-
# All temporary directories should be subdirectories of
# $PORTAGE_TMPDIR/portage, since it's common for /tmp and /var/tmp
# to be mounted with the "noexec" option (see bug #346899).
@@ -493,7 +485,7 @@ def doebuild_environment(
mysettings["SANDBOX_LOG"] = os.path.join(mysettings["T"], "sandbox.log")
mysettings["FILESDIR"] = os.path.join(settings["PORTAGE_BUILDDIR"], "files")
- # Prefix forward compatability
+ # Prefix forward compatibility
eprefix_lstrip = mysettings["EPREFIX"].lstrip(os.sep)
mysettings["ED"] = (
os.path.join(mysettings["D"], eprefix_lstrip).rstrip(os.sep) + os.sep
@@ -538,14 +530,6 @@ def doebuild_environment(
if not eapi_is_supported(eapi):
raise UnsupportedAPIException(mycpv, eapi)
- if (
- eapi_exports_REPOSITORY(eapi)
- and "PORTAGE_REPO_NAME" in mysettings.configdict["pkg"]
- ):
- mysettings.configdict["pkg"]["REPOSITORY"] = mysettings.configdict["pkg"][
- "PORTAGE_REPO_NAME"
- ]
-
if mydo != "depend":
if hasattr(mydbapi, "getFetchMap") and (
"A" not in mysettings.configdict["pkg"]
@@ -618,6 +602,10 @@ def doebuild_environment(
nproc = get_cpu_count()
if nproc:
mysettings["MAKEOPTS"] = "-j%d" % (nproc)
+ if "GNUMAKEFLAGS" not in mysettings and "MAKEFLAGS" not in mysettings:
+ mysettings["GNUMAKEFLAGS"] = (
+ f"--load-average {nproc} --output-sync=line"
+ )
if not eapi_exports_KV(eapi):
# Discard KV for EAPIs that don't support it. Cached KV is restored
@@ -649,27 +637,52 @@ def doebuild_environment(
mysettings["KV"] = ""
mysettings.backup_changes("KV")
+ binpkg_format = mysettings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
+ if binpkg_format not in portage.const.SUPPORTED_GENTOO_BINPKG_FORMATS:
+ writemsg(
+ "!!! BINPKG_FORMAT contains invalid or "
+ "unsupported format: %s" % binpkg_format,
+ noiselevel=-1,
+ )
+ binpkg_format = "xpak"
+ mysettings["BINPKG_FORMAT"] = binpkg_format
+
binpkg_compression = mysettings.get("BINPKG_COMPRESS", "bzip2")
try:
compression = _compressors[binpkg_compression]
except KeyError as e:
if binpkg_compression:
writemsg(
- "Warning: Invalid or unsupported compression method: %s\n"
- % e.args[0]
+ f"Warning: Invalid or unsupported compression method: {e.args[0]}\n"
)
else:
# Empty BINPKG_COMPRESS disables compression.
mysettings["PORTAGE_COMPRESSION_COMMAND"] = "cat"
else:
+ if (
+ settings.get(
+ f"BINPKG_COMPRESS_FLAGS_{binpkg_compression.upper()}", None
+ )
+ is not None
+ ):
+ compression["compress"] = compression["compress"].replace(
+ "${BINPKG_COMPRESS_FLAGS}",
+ f"${{BINPKG_COMPRESS_FLAGS_{binpkg_compression.upper()}}}",
+ )
+
try:
+ compression_binary = compression["compress"].replace(
+ "{JOBS}",
+ str(makeopts_to_job_count(mysettings.get("MAKEOPTS", "1"))),
+ )
compression_binary = shlex_split(
- varexpand(compression["compress"], mydict=settings)
+ varexpand(compression_binary, mydict=settings)
)[0]
except IndexError as e:
writemsg(
- "Warning: Invalid or unsupported compression method: %s\n"
- % e.args[0]
+ f"Warning: Invalid or unsupported compression method: {e.args[0]}\n"
)
else:
if find_binary(compression_binary) is None:
@@ -679,9 +692,13 @@ def doebuild_environment(
% (binpkg_compression, missing_package)
)
else:
+ compression_binary = compression["compress"].replace(
+ "{JOBS}",
+ str(makeopts_to_job_count(mysettings.get("MAKEOPTS", "1"))),
+ )
cmd = [
varexpand(x, mydict=settings)
- for x in shlex_split(compression["compress"])
+ for x in shlex_split(compression_binary)
]
# Filter empty elements
cmd = [x for x in cmd if x != ""]
@@ -720,7 +737,8 @@ def doebuild(
prev_mtimes=None,
fd_pipes=None,
returnpid=False,
-):
+ returnproc=False,
+) -> Union[int, portage.process.MultiprocessingProcess, list[int]]:
"""
Wrapper function that invokes specific ebuild phases through the spawning
of ebuild.sh
@@ -757,9 +775,15 @@ def doebuild(
for example.
@type fd_pipes: Dictionary
@param returnpid: Return a list of process IDs for a successful spawn, or
- an integer value if spawn is unsuccessful. NOTE: This requires the
- caller clean up all returned PIDs.
+ an integer value if spawn is unsuccessful. This parameter is supported
+ supported only when mydo is "depend". NOTE: This requires the caller clean
+ up all returned PIDs.
@type returnpid: Boolean
+ @param returnproc: Return a MultiprocessingProcess instance for a successful spawn, or
+ an integer value if spawn is unsuccessful. This parameter is supported
+ supported only when mydo is "depend". NOTE: This requires the caller to
+ asynchronously wait for the MultiprocessingProcess instance.
+ @type returnproc: Boolean
@rtype: Boolean
@return:
1. 0 for success
@@ -852,7 +876,7 @@ def doebuild(
if mydo not in validcommands:
validcommands.sort()
writemsg(
- "!!! doebuild: '%s' is not one of the following valid commands:" % mydo,
+ f"!!! doebuild: '{mydo}' is not one of the following valid commands:",
noiselevel=-1,
)
for vcount in range(len(validcommands)):
@@ -862,26 +886,32 @@ def doebuild(
writemsg("\n", noiselevel=-1)
return 1
- if returnpid and mydo != "depend":
+ if (returnproc or returnpid) and mydo != "depend":
# This case is not supported, since it bypasses the EbuildPhase class
# which implements important functionality (including post phase hooks
# and IPC for things like best/has_version and die).
+ if returnproc:
+ raise NotImplementedError(f"returnproc not implemented for phase {mydo}")
warnings.warn(
"portage.doebuild() called "
"with returnpid parameter enabled. This usage will "
"not be supported in the future.",
- DeprecationWarning,
+ UserWarning,
stacklevel=2,
)
+ elif returnpid:
+ warnings.warn(
+ "The portage.doebuild() returnpid parameter is deprecated and replaced by returnproc",
+ UserWarning,
+ stacklevel=1,
+ )
if mydo == "fetchall":
fetchall = 1
mydo = "fetch"
if mydo not in clean_phases and not os.path.exists(myebuild):
- writemsg(
- "!!! doebuild: %s not found for %s\n" % (myebuild, mydo), noiselevel=-1
- )
+ writemsg(f"!!! doebuild: {myebuild} not found for {mydo}\n", noiselevel=-1)
return 1
global _doebuild_manifest_cache
@@ -951,7 +981,7 @@ def doebuild(
except DigestException as e:
out = portage.output.EOutput()
out.eerror(_("Digest verification failed:"))
- out.eerror("%s" % e.value[0])
+ out.eerror(f"{e.value[0]}")
out.eerror(_("Reason: %s") % e.value[1])
out.eerror(_("Got: %s") % e.value[2])
out.eerror(_("Expected: %s") % e.value[3])
@@ -962,7 +992,6 @@ def doebuild(
return 1
if mf is not _doebuild_manifest_cache and not mf.allow_missing:
-
# Make sure that all of the ebuilds are
# actually listed in the Manifest.
for f in os.listdir(pkgdir):
@@ -1006,6 +1035,13 @@ def doebuild(
myebuild, mydo, myroot, mysettings, debug, use_cache, mydbapi
)
+ # For returnproc or returnpid assume that the event loop is running
+ # so we can't run the event loop to call _setup_locale in this case
+ # and we have to assume the caller took care of it (otherwise
+ # config.environ() will raise AssertionError).
+ if not (returnproc or returnpid):
+ asyncio.run(_setup_locale(mysettings))
+
if mydo in clean_phases:
builddir_lock = None
if not returnpid and "PORTAGE_BUILDDIR_LOCKED" not in mysettings:
@@ -1025,14 +1061,17 @@ def doebuild(
# get possible slot information from the deps file
if mydo == "depend":
- if not returnpid:
- raise TypeError("returnpid must be True for depend phase")
+ if not (returnproc or returnpid):
+ raise TypeError("returnproc or returnpid must be True for depend phase")
return _spawn_phase(
- mydo, mysettings, fd_pipes=fd_pipes, returnpid=returnpid
+ mydo,
+ mysettings,
+ fd_pipes=fd_pipes,
+ returnpid=returnpid,
+ returnproc=returnproc,
)
if mydo == "nofetch":
-
if returnpid:
writemsg(
"!!! doebuild: %s\n"
@@ -1045,7 +1084,6 @@ def doebuild(
)
if tree == "porttree":
-
if not returnpid:
# Validate dependency metadata here to ensure that ebuilds with
# invalid data are never installed via the ebuild command. Skip
@@ -1121,7 +1159,7 @@ def doebuild(
newstuff = True
else:
for x in alist:
- writemsg_stdout(">>> Checking %s's mtime...\n" % x)
+ writemsg_stdout(f">>> Checking {x}'s mtime...\n")
try:
x_st = os.stat(os.path.join(mysettings["DISTDIR"], x))
except OSError:
@@ -1224,12 +1262,12 @@ def doebuild(
else:
vardb = vartree.dbapi
cpv = mysettings.mycpv
- cpv_slot = "%s%s%s" % (cpv.cp, portage.dep._slot_separator, cpv.slot)
+ cpv_slot = f"{cpv.cp}{portage.dep._slot_separator}{cpv.slot}"
mysettings["REPLACING_VERSIONS"] = " ".join(
- set(
+ {
portage.versions.cpv_getversion(match)
for match in vardb.match(cpv_slot) + vardb.match("=" + cpv)
- )
+ }
)
# if any of these are being called, handle them -- running them out of
@@ -1276,7 +1314,6 @@ def doebuild(
)
)
if need_distfiles:
-
src_uri = mysettings.configdict["pkg"].get("SRC_URI")
if src_uri is None:
(src_uri,) = mydbapi.aux_get(
@@ -1293,7 +1330,7 @@ def doebuild(
alist = _parse_uri_map(mysettings.mycpv, metadata, use=use)
aalist = _parse_uri_map(mysettings.mycpv, metadata)
except InvalidDependString as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(_("!!! Invalid SRC_URI for '%s'.\n") % mycpv, noiselevel=-1)
del e
return 1
@@ -1307,47 +1344,21 @@ def doebuild(
if mf is not None:
dist_digests = mf.getTypeDigests("DIST")
- def _fetch_subprocess(fetchme, mysettings, listonly, dist_digests):
- # For userfetch, drop privileges for the entire fetch call, in
- # order to handle DISTDIR on NFS with root_squash for bug 601252.
- if _want_userfetch(mysettings):
- _drop_privs_userfetch(mysettings)
-
- return fetch(
- fetchme,
- mysettings,
- listonly=listonly,
- fetchonly=fetchonly,
- allow_missing_digests=False,
- digests=dist_digests,
- )
-
loop = asyncio._safe_loop()
- if loop.is_running():
- # Called by EbuildFetchonly for emerge --pretend --fetchonly.
- success = fetch(
+ success = loop.run_until_complete(
+ loop.run_in_executor(
+ ForkExecutor(loop=loop),
+ _fetch_subprocess,
fetchme,
mysettings,
- listonly=listonly,
- fetchonly=fetchonly,
- allow_missing_digests=False,
- digests=dist_digests,
- )
- else:
- success = loop.run_until_complete(
- loop.run_in_executor(
- ForkExecutor(loop=loop),
- _fetch_subprocess,
- fetchme,
- mysettings,
- listonly,
- dist_digests,
- )
+ listonly,
+ dist_digests,
+ fetchonly,
)
+ )
if not success:
# Since listonly mode is called by emerge --pretend in an
- # asynchronous context, spawn_nofetch would trigger event loop
- # recursion here, therefore delegate execution of pkg_nofetch
+ # asynchronous context, execution of pkg_nofetch is delegated
# to the caller (bug 657360).
if not listonly:
spawn_nofetch(
@@ -1442,7 +1453,7 @@ def doebuild(
)
portage.util.ensure_dirs(parent_dir)
if not os.access(parent_dir, os.W_OK):
- raise PermissionDenied("access('%s', os.W_OK)" % parent_dir)
+ raise PermissionDenied(f"access('{parent_dir}', os.W_OK)")
retval = spawnebuild(
mydo,
actionmap,
@@ -1459,19 +1470,20 @@ def doebuild(
if retval == os.EX_OK:
if mydo == "package" and bintree is not None:
pkg = bintree.inject(
- mysettings.mycpv, filename=mysettings["PORTAGE_BINPKG_TMPFILE"]
+ mysettings.mycpv,
+ current_pkg_path=mysettings["PORTAGE_BINPKG_TMPFILE"],
)
if pkg is not None:
infoloc = os.path.join(
mysettings["PORTAGE_BUILDDIR"], "build-info"
)
build_info = {
- "BINPKGMD5": "%s\n" % pkg._metadata["MD5"],
+ "BINPKGMD5": f"{pkg._metadata['MD5']}\n",
}
if pkg.build_id is not None:
- build_info["BUILD_ID"] = "%s\n" % pkg.build_id
+ build_info["BUILD_ID"] = f"{pkg.build_id}\n"
for k, v in build_info.items():
- with io.open(
+ with open(
_unicode_encode(
os.path.join(infoloc, k),
encoding=_encodings["fs"],
@@ -1569,7 +1581,6 @@ def doebuild(
return retval
finally:
-
if builddir_lock is not None:
builddir_lock.scheduler.run_until_complete(builddir_lock.async_unlock())
if tmpdir:
@@ -1591,6 +1602,22 @@ def doebuild(
portage._doebuild_manifest_exempt_depend -= 1
+def _fetch_subprocess(fetchme, mysettings, listonly, dist_digests, fetchonly):
+ # For userfetch, drop privileges for the entire fetch call, in
+ # order to handle DISTDIR on NFS with root_squash for bug 601252.
+ if _want_userfetch(mysettings):
+ _drop_privs_userfetch(mysettings)
+
+ return fetch(
+ fetchme,
+ mysettings,
+ listonly=listonly,
+ fetchonly=fetchonly,
+ allow_missing_digests=False,
+ digests=dist_digests,
+ )
+
+
def _check_temp_dir(settings):
if "PORTAGE_TMPDIR" not in settings or not os.path.isdir(
settings["PORTAGE_TMPDIR"]
@@ -1612,7 +1639,22 @@ def _check_temp_dir(settings):
# for those people.
checkdir = first_existing(os.path.join(settings["PORTAGE_TMPDIR"], "portage"))
- if not os.access(checkdir, os.W_OK):
+ try:
+ with tempfile.NamedTemporaryFile(prefix="exectest-", dir=checkdir) as fd:
+ os.chmod(fd.name, 0o755)
+ if not os.access(fd.name, os.X_OK):
+ writemsg(
+ _(
+ "Can not execute files in %s\n"
+ "Likely cause is that you've mounted it with one of the\n"
+ "following mount options: 'noexec', 'user', 'users'\n\n"
+ "Please make sure that portage can execute files in this directory.\n"
+ )
+ % checkdir,
+ noiselevel=-1,
+ )
+ return 1
+ except PermissionError:
writemsg(
_(
"%s is not writable.\n"
@@ -1623,21 +1665,6 @@ def _check_temp_dir(settings):
)
return 1
- with tempfile.NamedTemporaryFile(prefix="exectest-", dir=checkdir) as fd:
- os.chmod(fd.name, 0o755)
- if not os.access(fd.name, os.X_OK):
- writemsg(
- _(
- "Can not execute files in %s\n"
- "Likely cause is that you've mounted it with one of the\n"
- "following mount options: 'noexec', 'user', 'users'\n\n"
- "Please make sure that portage can execute files in this directory.\n"
- )
- % checkdir,
- noiselevel=-1,
- )
- return 1
-
return os.EX_OK
@@ -1783,10 +1810,17 @@ def _spawn_actionmap(settings):
def _validate_deps(mysettings, myroot, mydo, mydbapi):
-
- invalid_dep_exempt_phases = set(["clean", "cleanrm", "help", "prerm", "postrm"])
+ invalid_dep_exempt_phases = {"clean", "cleanrm", "help", "prerm", "postrm"}
all_keys = set(Package.metadata_keys)
all_keys.add("SRC_URI")
+ # Since configdict["pkg"]["USE"] may contain package.use settings
+ # from config.setcpv, it is inappropriate to use here (bug 675748),
+ # so discard it. This is only an issue because configdict["pkg"] is
+ # a sub-optimal place to extract metadata from. This issue does not
+ # necessarily indicate a flaw in the Package constructor, since
+ # passing in precalculated USE can be valid for things like
+ # autounmask USE changes.
+ all_keys.discard("USE")
all_keys = tuple(all_keys)
metadata = mysettings.configdict["pkg"]
if all(k in metadata for k in ("PORTAGE_REPO_NAME", "SRC_URI")):
@@ -1812,6 +1846,10 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
root_config = RootConfig(mysettings, {"porttree": FakeTree(mydbapi)}, None)
+ # A USE calculation from setcpv should always be available here because
+ # mysettings.mycpv is not None, so use it to prevent redundant setcpv calls.
+ metadata["USE"] = mysettings["PORTAGE_USE"]
+
pkg = Package(
built=False,
cpv=mysettings.mycpv,
@@ -1824,7 +1862,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
if pkg.invalid:
for k, v in pkg.invalid.items():
for msg in v:
- msgs.append(" %s\n" % (msg,))
+ msgs.append(f" {msg}\n")
if msgs:
portage.util.writemsg_level(
@@ -1858,7 +1896,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
),
noiselevel=-1,
)
- writemsg(" %s\n" % reduced_noise, noiselevel=-1)
+ writemsg(f" {reduced_noise}\n", noiselevel=-1)
normalized_required_use = " ".join(pkg._metadata["REQUIRED_USE"].split())
if reduced_noise != normalized_required_use:
writemsg(
@@ -1870,7 +1908,7 @@ def _validate_deps(mysettings, myroot, mydo, mydbapi):
noiselevel=-1,
)
writemsg(
- " %s\n" % human_readable_required_use(normalized_required_use),
+ f" {human_readable_required_use(normalized_required_use)}\n",
noiselevel=-1,
)
writemsg("\n", noiselevel=-1)
@@ -1895,7 +1933,7 @@ def spawn(
ipc=True,
mountns=False,
pidns=False,
- **keywords
+ **keywords,
):
"""
Spawn a subprocess with extra portage-specific options.
@@ -1904,7 +1942,7 @@ def spawn(
Sandbox: Sandbox means the spawned process will be limited in its ability t
read and write files (normally this means it is restricted to ${D}/)
SElinux Sandbox: Enables sandboxing on SElinux
- Reduced Privileges: Drops privilages such that the process runs as portage:portage
+ Reduced Privileges: Drops privileges such that the process runs as portage:portage
instead of as root.
Notes: os.system cannot be used because it messes with signal handling. Instead we
@@ -2067,9 +2105,9 @@ def spawn(
free = True
if mysettings.mycpv is not None:
- keywords["opt_name"] = "[%s]" % mysettings.mycpv
+ keywords["opt_name"] = f"[{mysettings.mycpv}]"
else:
- keywords["opt_name"] = "[%s/%s]" % (
+ keywords["opt_name"] = "[{}/{}]".format(
mysettings.get("CATEGORY", ""),
mysettings.get("PF", ""),
)
@@ -2094,7 +2132,7 @@ def spawn(
mysettings.configdict["env"]["LOGNAME"] = logname
try:
- if keywords.get("returnpid"):
+ if keywords.get("returnpid") or keywords.get("returnproc"):
return spawn_func(mystring, env=mysettings.environ(), **keywords)
proc = EbuildSpawnProcess(
@@ -2103,7 +2141,7 @@ def spawn(
scheduler=SchedulerInterface(asyncio._safe_loop()),
spawn_func=spawn_func,
settings=mysettings,
- **keywords
+ **keywords,
)
proc.start()
@@ -2133,7 +2171,6 @@ def spawnebuild(
fd_pipes=None,
returnpid=False,
):
-
if returnpid:
warnings.warn(
"portage.spawnebuild() called "
@@ -2171,7 +2208,7 @@ def spawnebuild(
if not (mydo == "install" and "noauto" in mysettings.features):
check_file = os.path.join(
- mysettings["PORTAGE_BUILDDIR"], ".%sed" % mydo.rstrip("e")
+ mysettings["PORTAGE_BUILDDIR"], f".{mydo.rstrip('e')}ed"
)
if os.path.exists(check_file):
writemsg_stdout(
@@ -2262,7 +2299,7 @@ def _check_build_log(mysettings, out=None):
_unicode_encode(logfile, encoding=_encodings["fs"], errors="strict"),
mode="rb",
)
- except EnvironmentError:
+ except OSError:
return
f_real = None
@@ -2271,11 +2308,11 @@ def _check_build_log(mysettings, out=None):
f = gzip.GzipFile(filename="", mode="rb", fileobj=f)
am_maintainer_mode = []
- bash_command_not_found = []
+ command_not_found = []
bash_command_not_found_re = re.compile(
r"(.*): line (\d*): (.*): command not found$"
)
- command_not_found_exclude_re = re.compile(r"/configure: line ")
+ dash_command_not_found_re = re.compile(r"(.*): (\d+): (.*): not found$")
helper_missing_file = []
helper_missing_file_re = re.compile(r"^!!! (do|new).*: .* does not exist$")
@@ -2286,7 +2323,7 @@ def _check_build_log(mysettings, out=None):
qa_configure_opts = ""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
mysettings["PORTAGE_BUILDDIR"], "build-info", "QA_CONFIGURE_OPTIONS"
@@ -2294,27 +2331,26 @@ def _check_build_log(mysettings, out=None):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as qa_configure_opts_f:
qa_configure_opts = qa_configure_opts_f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
qa_configure_opts = qa_configure_opts.split()
if qa_configure_opts:
if len(qa_configure_opts) > 1:
- qa_configure_opts = "|".join("(%s)" % x for x in qa_configure_opts)
- qa_configure_opts = "^(%s)$" % qa_configure_opts
+ qa_configure_opts = "|".join(f"({x})" for x in qa_configure_opts)
+ qa_configure_opts = f"^({qa_configure_opts})$"
else:
- qa_configure_opts = "^%s$" % qa_configure_opts[0]
+ qa_configure_opts = f"^{qa_configure_opts[0]}$"
qa_configure_opts = re.compile(qa_configure_opts)
qa_am_maintainer_mode = []
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
mysettings["PORTAGE_BUILDDIR"],
@@ -2324,23 +2360,22 @@ def _check_build_log(mysettings, out=None):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as qa_am_maintainer_mode_f:
qa_am_maintainer_mode = [
x for x in qa_am_maintainer_mode_f.read().splitlines() if x
]
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
if qa_am_maintainer_mode:
if len(qa_am_maintainer_mode) > 1:
- qa_am_maintainer_mode = "|".join("(%s)" % x for x in qa_am_maintainer_mode)
- qa_am_maintainer_mode = "^(%s)$" % qa_am_maintainer_mode
+ qa_am_maintainer_mode = "|".join(f"({x})" for x in qa_am_maintainer_mode)
+ qa_am_maintainer_mode = f"^({qa_am_maintainer_mode})$"
else:
- qa_am_maintainer_mode = "^%s$" % qa_am_maintainer_mode[0]
+ qa_am_maintainer_mode = f"^{qa_am_maintainer_mode[0]}$"
qa_am_maintainer_mode = re.compile(qa_am_maintainer_mode)
# Exclude output from dev-libs/yaz-3.0.47 which looks like this:
@@ -2359,7 +2394,10 @@ def _check_build_log(mysettings, out=None):
setuptools_warn = set()
setuptools_warn_re = re.compile(r".*\/setuptools\/.*: .*Warning: (.*)")
# skip useless version normalization warnings
- setuptools_warn_ignore_re = [re.compile(r"Normalizing .*")]
+ setuptools_warn_ignore_re = [
+ re.compile(r"Normalizing .*"),
+ re.compile(r"setup.py install is deprecated"),
+ ]
def _eerror(lines):
for line in lines:
@@ -2378,11 +2416,11 @@ def _check_build_log(mysettings, out=None):
):
am_maintainer_mode.append(line.rstrip("\n"))
- if (
- bash_command_not_found_re.match(line) is not None
- and command_not_found_exclude_re.search(line) is None
- ):
- bash_command_not_found.append(line.rstrip("\n"))
+ if bash_command_not_found_re.match(line) is not None:
+ command_not_found.append(line.rstrip("\n"))
+
+ if dash_command_not_found_re.match(line) is not None:
+ command_not_found.append(line.rstrip("\n"))
if helper_missing_file_re.match(line) is not None:
helper_missing_file.append(line.rstrip("\n"))
@@ -2408,8 +2446,8 @@ def _check_build_log(mysettings, out=None):
except (EOFError, zlib.error) as e:
_eerror(
[
- "portage encountered a zlib error: '%s'" % (e,),
- "while reading the log file: '%s'" % logfile,
+ f"portage encountered a zlib error: '{e}'",
+ f"while reading the log file: '{logfile}'",
]
)
finally:
@@ -2444,10 +2482,10 @@ def _check_build_log(mysettings, out=None):
)
_eqawarn(msg)
- if bash_command_not_found:
+ if command_not_found:
msg = [_("QA Notice: command not found:")]
msg.append("")
- msg.extend("\t" + line for line in bash_command_not_found)
+ msg.extend("\t" + line for line in command_not_found)
_eqawarn(msg)
if helper_missing_file:
@@ -2459,7 +2497,7 @@ def _check_build_log(mysettings, out=None):
if configure_opts_warn:
msg = [_("QA Notice: Unrecognized configure options:")]
msg.append("")
- msg.extend("\t%s" % x for x in configure_opts_warn)
+ msg.extend(f"\t{x}" for x in configure_opts_warn)
_eqawarn(msg)
if make_jobserver:
@@ -2488,8 +2526,8 @@ def _post_src_install_write_metadata(settings):
"""
eapi_attrs = _get_eapi_attrs(settings.configdict["pkg"]["EAPI"])
-
build_info_dir = os.path.join(settings["PORTAGE_BUILDDIR"], "build-info")
+ metadata_buffer = {}
metadata_keys = ["IUSE"]
if eapi_attrs.iuse_effective:
@@ -2498,14 +2536,14 @@ def _post_src_install_write_metadata(settings):
for k in metadata_keys:
v = settings.configdict["pkg"].get(k)
if v is not None:
- write_atomic(os.path.join(build_info_dir, k), v + "\n")
+ metadata_buffer[k] = v
for k in ("CHOST",):
v = settings.get(k)
if v is not None:
- write_atomic(os.path.join(build_info_dir, k), v + "\n")
+ metadata_buffer[k] = v
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "BUILD_TIME"),
encoding=_encodings["fs"],
@@ -2515,7 +2553,7 @@ def _post_src_install_write_metadata(settings):
encoding=_encodings["repo.content"],
errors="strict",
) as f:
- f.write("%.0f\n" % (time.time(),))
+ f.write(f"{time.time():.0f}\n")
use = frozenset(settings["PORTAGE_USE"].split())
for k in _vdb_use_conditional_keys:
@@ -2543,17 +2581,7 @@ def _post_src_install_write_metadata(settings):
except OSError:
pass
continue
- with io.open(
- _unicode_encode(
- os.path.join(build_info_dir, k),
- encoding=_encodings["fs"],
- errors="strict",
- ),
- mode="w",
- encoding=_encodings["repo.content"],
- errors="strict",
- ) as f:
- f.write("%s\n" % v)
+ metadata_buffer[k] = v
if eapi_attrs.slot_operator:
deps = evaluate_slot_operator_equal_deps(settings, use, QueryCommand.get_db())
@@ -2565,17 +2593,20 @@ def _post_src_install_write_metadata(settings):
except OSError:
pass
continue
- with io.open(
- _unicode_encode(
- os.path.join(build_info_dir, k),
- encoding=_encodings["fs"],
- errors="strict",
- ),
- mode="w",
- encoding=_encodings["repo.content"],
+
+ metadata_buffer[k] = v
+
+ for k, v in metadata_buffer.items():
+ with open(
+ _unicode_encode(
+ os.path.join(build_info_dir, k),
+ encoding=_encodings["fs"],
errors="strict",
- ) as f:
- f.write("%s\n" % v)
+ ),
+ mode="w",
+ encoding=_encodings["repo.content"],
+ ) as f:
+ f.write(f"{v}\n")
def _preinst_bsdflags(mysettings):
@@ -2595,8 +2626,7 @@ def _preinst_bsdflags(mysettings):
% (_shell_quote(mysettings["D"]),)
)
os.system(
- "chflags -R nosunlnk,nouunlnk %s 2>/dev/null"
- % (_shell_quote(mysettings["D"]),)
+ f"chflags -R nosunlnk,nouunlnk {_shell_quote(mysettings['D'])} 2>/dev/null"
)
@@ -2639,7 +2669,7 @@ def _post_src_install_uid_fix(mysettings, out):
qa_desktop_file = ""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(
mysettings["PORTAGE_BUILDDIR"], "build-info", "QA_DESKTOP_FILE"
@@ -2647,26 +2677,24 @@ def _post_src_install_uid_fix(mysettings, out):
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
qa_desktop_file = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
qa_desktop_file = qa_desktop_file.split()
if qa_desktop_file:
if len(qa_desktop_file) > 1:
- qa_desktop_file = "|".join("(%s)" % x for x in qa_desktop_file)
- qa_desktop_file = "^(%s)$" % qa_desktop_file
+ qa_desktop_file = "|".join(f"({x})" for x in qa_desktop_file)
+ qa_desktop_file = f"^({qa_desktop_file})$"
else:
- qa_desktop_file = "^%s$" % qa_desktop_file[0]
+ qa_desktop_file = f"^{qa_desktop_file[0]}$"
qa_desktop_file = re.compile(qa_desktop_file)
while True:
-
unicode_error = False
size = 0
counted_inodes = set()
@@ -2675,6 +2703,10 @@ def _post_src_install_uid_fix(mysettings, out):
desktopfile_errors = []
for parent, dirs, files in os.walk(destdir):
+ if portage.utf8_mode:
+ parent = os.fsencode(parent)
+ dirs = [os.fsencode(value) for value in dirs]
+ files = [os.fsencode(value) for value in files]
try:
parent = _unicode_decode(
parent, encoding=_encodings["merge"], errors="strict"
@@ -2731,7 +2763,6 @@ def _post_src_install_uid_fix(mysettings, out):
is not None
)
):
-
desktop_validate = validate_desktop_entry(fpath)
if desktop_validate:
desktopfile_errors.extend(desktop_validate)
@@ -2763,7 +2794,7 @@ def _post_src_install_uid_fix(mysettings, out):
" %s is not a valid libtool archive, skipping\n"
% fpath[len(destdir) :]
)
- qa_msg = "QA Notice: invalid .la file found: %s, %s" % (
+ qa_msg = "QA Notice: invalid .la file found: {}, {}".format(
fpath[len(destdir) :],
e,
)
@@ -2775,13 +2806,17 @@ def _post_src_install_uid_fix(mysettings, out):
if not fixlafiles_announced:
fixlafiles_announced = True
writemsg("Fixing .la files\n", fd=out)
- writemsg(" %s\n" % fpath[len(destdir) :], fd=out)
+ writemsg(f" {fpath[len(destdir):]}\n", fd=out)
# write_atomic succeeds even in some cases in which
# a normal write might fail due to file permission
# settings on some operating systems such as HP-UX
write_atomic(
- _unicode_encode(
- fpath, encoding=_encodings["merge"], errors="strict"
+ (
+ fpath
+ if portage.utf8_mode
+ else _unicode_encode(
+ fpath, encoding=_encodings["merge"], errors="strict"
+ )
),
new_contents,
mode="wb",
@@ -2825,7 +2860,7 @@ def _post_src_install_uid_fix(mysettings, out):
build_info_dir = os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info")
- f = io.open(
+ f = open(
_unicode_encode(
os.path.join(build_info_dir, "SIZE"),
encoding=_encodings["fs"],
@@ -2855,6 +2890,48 @@ def _reapply_bsdflags_to_image(mysettings):
)
+def _inject_libc_dep(build_info_dir, mysettings):
+ #
+ # We could skip this for non-binpkgs but there doesn't seem to be much
+ # value in that, as users shouldn't downgrade libc anyway.
+ injected_libc_depstring = []
+ for libc_realized_atom in find_libc_deps(
+ QueryCommand.get_db()[mysettings["EROOT"]]["vartree"].dbapi, True
+ ):
+ if pkgcmp(mysettings.mycpv, libc_realized_atom) is not None:
+ # We don't want to inject deps on ourselves (libc)
+ injected_libc_depstring = []
+ break
+
+ injected_libc_depstring.append(f">={libc_realized_atom}")
+
+ rdepend_file = os.path.join(build_info_dir, "RDEPEND")
+ # Slurp the existing contents because we need to mangle it a bit
+ # It'll look something like (if it exists):
+ # ```
+ # app-misc/foo dev-libs/bar
+ # <newline>
+ # ````
+ rdepend = None
+ if os.path.exists(rdepend_file):
+ with open(rdepend_file, encoding="utf-8") as f:
+ rdepend = f.readlines()
+ rdepend = "\n".join(rdepend).strip()
+
+ # For RDEPEND, we want an implicit dependency on >=${PROVIDER_OF_LIBC}
+ # to avoid runtime breakage when merging binpkgs, see bug #753500.
+ #
+ if injected_libc_depstring:
+ if rdepend:
+ rdepend += f" {' '.join(injected_libc_depstring).strip()}"
+ else:
+ # The package doesn't have an RDEPEND, so make one up.
+ rdepend = " ".join(injected_libc_depstring)
+
+ with open(rdepend_file, "w", encoding="utf-8") as f:
+ f.write(f"{rdepend}\n")
+
+
def _post_src_install_soname_symlinks(mysettings, out):
"""
Check that libraries in $D have corresponding soname symlinks.
@@ -2864,22 +2941,20 @@ def _post_src_install_soname_symlinks(mysettings, out):
"""
image_dir = mysettings["D"]
- needed_filename = os.path.join(
- mysettings["PORTAGE_BUILDDIR"], "build-info", "NEEDED.ELF.2"
- )
+ build_info_dir = os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info")
+ needed_filename = os.path.join(build_info_dir, "NEEDED.ELF.2")
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(
needed_filename, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
lines = f.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
return
@@ -2887,21 +2962,25 @@ def _post_src_install_soname_symlinks(mysettings, out):
if f is not None:
f.close()
+ # We do RDEPEND mangling here instead of the natural location
+ # in _post_src_install_write_metadata because NEEDED hasn't been
+ # written yet at that point.
+ _inject_libc_dep(build_info_dir, mysettings)
+
metadata = {}
for k in ("QA_PREBUILT", "QA_SONAME_NO_SYMLINK"):
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info", k),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
v = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
else:
@@ -2919,10 +2998,10 @@ def _post_src_install_soname_symlinks(mysettings, out):
qa_soname_no_symlink = metadata.get("QA_SONAME_NO_SYMLINK", "").split()
if qa_soname_no_symlink:
if len(qa_soname_no_symlink) > 1:
- qa_soname_no_symlink = "|".join("(%s)" % x for x in qa_soname_no_symlink)
- qa_soname_no_symlink = "^(%s)$" % qa_soname_no_symlink
+ qa_soname_no_symlink = "|".join(f"({x})" for x in qa_soname_no_symlink)
+ qa_soname_no_symlink = f"^({qa_soname_no_symlink})$"
else:
- qa_soname_no_symlink = "^%s$" % qa_soname_no_symlink[0]
+ qa_soname_no_symlink = f"^{qa_soname_no_symlink[0]}$"
qa_soname_no_symlink = re.compile(qa_soname_no_symlink)
libpaths = set(portage.util.getlibpaths(mysettings["ROOT"], env=mysettings))
@@ -2962,35 +3041,33 @@ def _post_src_install_soname_symlinks(mysettings, out):
build_info_dir = os.path.join(mysettings["PORTAGE_BUILDDIR"], "build-info")
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "PROVIDES_EXCLUDE"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
provides_exclude = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
provides_exclude = ""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "REQUIRES_EXCLUDE"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
requires_exclude = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
requires_exclude = ""
@@ -3013,7 +3090,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
entry = NeededEntry.parse(needed_filename, l)
except InvalidData as e:
portage.util.writemsg_level(
- "\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1
+ f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1
)
continue
@@ -3067,7 +3144,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
needed_file.close()
if soname_deps.requires is not None:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "REQUIRES"),
encoding=_encodings["fs"],
@@ -3080,7 +3157,7 @@ def _post_src_install_soname_symlinks(mysettings, out):
f.write(soname_deps.requires)
if soname_deps.provides is not None:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(build_info_dir, "PROVIDES"),
encoding=_encodings["fs"],
@@ -3091,11 +3168,19 @@ def _post_src_install_soname_symlinks(mysettings, out):
errors="strict",
) as f:
f.write(soname_deps.provides)
+ else:
+ if check_dyn_libs_inconsistent(image_dir, soname_deps.provides):
+ eerror(
+ "Error! Installing dynamic libraries (.so) with blank PROVIDES!",
+ phase="install",
+ key=mysettings.mycpv,
+ out=out,
+ )
if unrecognized_elf_files:
qa_msg = ["QA Notice: Unrecognized ELF file(s):"]
qa_msg.append("")
- qa_msg.extend("\t%s" % str(entry).rstrip() for entry in unrecognized_elf_files)
+ qa_msg.extend(f"\t{str(entry).rstrip()}" for entry in unrecognized_elf_files)
qa_msg.append("")
for line in qa_msg:
eqawarn(line, key=mysettings.mycpv, out=out)
diff --git a/lib/portage/package/ebuild/fetch.py b/lib/portage/package/ebuild/fetch.py
index 8c64362c2..bfa0c2b27 100644
--- a/lib/portage/package/ebuild/fetch.py
+++ b/lib/portage/package/ebuild/fetch.py
@@ -6,7 +6,6 @@ __all__ = ["fetch"]
import errno
import functools
import glob
-import io
import itertools
import json
import logging
@@ -135,7 +134,6 @@ def _spawn_fetch(settings, args, **kwargs):
# wget pollute stderr (if portage detects a problem then it
# can send it's own message to stderr).
if "fd_pipes" not in kwargs:
-
kwargs["fd_pipes"] = {
0: portage._get_stdin().fileno(),
1: sys.__stdout__.fileno(),
@@ -234,7 +232,7 @@ def _ensure_distdir(settings, distdir):
if "FAKED_MODE" in settings:
# When inside fakeroot, directories with portage's gid appear
# to have root's gid. Therefore, use root's gid instead of
- # portage's gid to avoid spurrious permissions adjustments
+ # portage's gid to avoid spurious permissions adjustments
# when inside fakeroot.
dir_gid = 0
@@ -242,20 +240,15 @@ def _ensure_distdir(settings, distdir):
userpriv = portage.data.secpass >= 2 and "userpriv" in settings.features
write_test_file = os.path.join(distdir, ".__portage_test_write__")
- try:
- st = os.stat(distdir)
- except OSError:
- st = None
-
- if st is not None and stat.S_ISDIR(st.st_mode):
- if not (userfetch or userpriv):
- return
- if _userpriv_test_write_file(settings, write_test_file):
- return
+ if _userpriv_test_write_file(settings, write_test_file):
+ return
_userpriv_test_write_file_cache.pop(write_test_file, None)
+
+ already_exists = os.path.isdir(distdir)
+
if ensure_dirs(distdir, gid=dir_gid, mode=dirmode, mask=modemask):
- if st is None:
+ if not already_exists:
# The directory has just been created
# and therefore it must be empty.
return
@@ -371,9 +364,7 @@ def _check_distfile(filename, digests, eout, show_errors=1, hash_filter=None):
if hash_filter is not None:
digests = _apply_hash_filter(digests, hash_filter)
if _check_digests(filename, digests, show_errors=show_errors):
- eout.ebegin(
- "%s %s ;-)" % (os.path.basename(filename), " ".join(sorted(digests)))
- )
+ eout.ebegin(f"{os.path.basename(filename)} {' '.join(sorted(digests))} ;-)")
eout.eend(0)
else:
return (False, st)
@@ -579,7 +570,7 @@ class ContentHashLayout(FilenameHashLayout):
to a digest value for self.algo, and which can be compared to
other DistfileName instances with their digests_equal method.
"""
- for filename in super(ContentHashLayout, self).get_filenames(distdir):
+ for filename in super().get_filenames(distdir):
yield DistfileName(filename, digests=dict([(self.algo, filename)]))
@staticmethod
@@ -674,7 +665,7 @@ class MirrorLayoutConfig:
ret = []
for val in self.structure:
if not self.validate_structure(val):
- raise ValueError("Unsupported structure: {}".format(val))
+ raise ValueError(f"Unsupported structure: {val}")
if val[0] == "flat":
ret.append(FlatLayout(*val[1:]))
elif val[0] == "filename-hash":
@@ -702,9 +693,9 @@ def get_mirror_url(mirror_url, filename, mysettings, cache_path=None):
cache = {}
if cache_path is not None:
try:
- with open(cache_path, "r") as f:
+ with open(cache_path) as f:
cache = json.load(f)
- except (IOError, ValueError):
+ except (OSError, ValueError):
pass
ts, data = cache.get(mirror_url, (0, None))
@@ -712,7 +703,7 @@ def get_mirror_url(mirror_url, filename, mysettings, cache_path=None):
if ts >= time.time() - 86400:
mirror_conf.deserialize(data)
else:
- tmpfile = ".layout.conf.%s" % urlparse(mirror_url).hostname
+ tmpfile = f".layout.conf.{urlparse(mirror_url).hostname}"
try:
if mirror_url[:1] == "/":
tmpfile = os.path.join(mirror_url, "layout.conf")
@@ -726,8 +717,8 @@ def get_mirror_url(mirror_url, filename, mysettings, cache_path=None):
tmpfile = os.path.join(mysettings["DISTDIR"], tmpfile)
mirror_conf.read_from_file(tmpfile)
else:
- raise IOError()
- except (ConfigParserError, IOError, UnicodeDecodeError):
+ raise OSError()
+ except (ConfigParserError, OSError, UnicodeDecodeError):
pass
else:
cache[mirror_url] = (time.time(), mirror_conf.serialize())
@@ -989,7 +980,7 @@ def fetch(
]
restrict_fetch = "fetch" in restrict
- force_mirror = "force-mirror" in features and not restrict_mirror
+ force_mirror = "force-mirror" in features and not restrict_mirror and try_mirrors
file_uri_tuples = []
# Check for 'items' attribute since OrderedDict is not a dict.
@@ -1092,7 +1083,7 @@ def fetch(
writemsg(_("!!! No known mirror by the name: %s\n") % (mirrorname))
else:
writemsg(_("Invalid mirror definition in SRC_URI:\n"), noiselevel=-1)
- writemsg(" %s\n" % (myuri), noiselevel=-1)
+ writemsg(f" {myuri}\n", noiselevel=-1)
else:
if (restrict_fetch and not override_fetch) or force_mirror:
# Only fetch from specific mirrors is allowed.
@@ -1131,7 +1122,7 @@ def fetch(
_ensure_distdir(mysettings, mysettings["DISTDIR"])
except PortageException as e:
if not os.path.isdir(mysettings["DISTDIR"]):
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! Directory Not Found: DISTDIR='%s'\n")
% mysettings["DISTDIR"],
@@ -1217,7 +1208,7 @@ def fetch(
vfs_stat = os.statvfs(mysettings["DISTDIR"])
except OSError as e:
writemsg_level(
- "!!! statvfs('%s'): %s\n" % (mysettings["DISTDIR"], e),
+ f"!!! statvfs('{mysettings['DISTDIR']}'): {e}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -1234,7 +1225,6 @@ def fetch(
if (size - mysize + vfs_stat.f_bsize) >= (
vfs_stat.f_bsize * vfs_stat.f_bavail
):
-
if (size - mysize + vfs_stat.f_bsize) >= (
vfs_stat.f_bsize * vfs_stat.f_bfree
):
@@ -1248,7 +1238,6 @@ def fetch(
has_space = False
if distdir_writable and use_locks:
-
lock_kwargs = {}
if fetchonly:
lock_kwargs["flags"] = os.O_NONBLOCK
@@ -1267,7 +1256,6 @@ def fetch(
continue
try:
if not listonly:
-
eout = EOutput()
eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
match, mystat = _check_distfile(
@@ -1444,7 +1432,7 @@ def fetch(
shutil.copyfile(mirror_file, download_path)
writemsg(_("Local mirror has file: %s\n") % myfile)
break
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
del e
@@ -1482,13 +1470,14 @@ def fetch(
if distdir_writable:
try:
os.unlink(download_path)
- except EnvironmentError:
+ except OSError:
pass
elif not orig_digests:
- # We don't have a digest, but the file exists. We must
- # assume that it is fully downloaded.
+ # We don't have a digest, and the temporary file exists.
if not force:
- continue
+ # Try to resume this download when full
+ # download has not been explicitly forced.
+ fetched = 1
else:
if (
mydigests[myfile].get("size") is not None
@@ -1502,7 +1491,7 @@ def fetch(
):
eout = EOutput()
eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
- eout.ebegin("%s size ;-)" % (myfile,))
+ eout.ebegin(f"{myfile} size ;-)")
eout.eend(0)
continue
else:
@@ -1553,9 +1542,7 @@ def fetch(
if digests:
digests = list(digests)
digests.sort()
- eout.ebegin(
- "%s %s ;-)" % (myfile, " ".join(digests))
- )
+ eout.ebegin(f"{myfile} {' '.join(digests)} ;-)")
eout.eend(0)
continue # fetch any remaining files
@@ -1575,6 +1562,7 @@ def fetch(
tried_locations.add(loc)
if listonly:
writemsg_stdout(loc + " ", noiselevel=-1)
+ fetched = 2
continue
# allow different fetchcommands per protocol
protocol = loc[0 : loc.find("://")]
@@ -1734,7 +1722,7 @@ def fetch(
try:
variables["DIGESTS"] = " ".join(
[
- "%s:%s" % (k.lower(), v)
+ f"{k.lower()}:{v}"
for k, v in mydigests[myfile].items()
if k != "size"
]
@@ -1752,7 +1740,6 @@ def fetch(
myret = -1
try:
-
myret = _spawn_fetch(mysettings, myfetch)
finally:
@@ -1783,7 +1770,7 @@ def fetch(
os.unlink(download_path)
fetched = 0
continue
- except EnvironmentError:
+ except OSError:
pass
if mydigests is not None and myfile in mydigests:
@@ -1795,7 +1782,6 @@ def fetch(
del e
fetched = 0
else:
-
if stat.S_ISDIR(mystat.st_mode):
# This can happen if FETCHCOMMAND erroneously
# contains wget's -P option where it should
@@ -1849,13 +1835,12 @@ def fetch(
"<title>.*(not found|404).*</title>",
re.I | re.M,
)
- with io.open(
+ with open(
_unicode_encode(
download_path,
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as f:
@@ -1869,7 +1854,7 @@ def fetch(
)
fetched = 0
continue
- except (IOError, OSError):
+ except OSError:
pass
fetched = 1
continue
@@ -1946,8 +1931,7 @@ def fetch(
)
if digests:
eout.ebegin(
- "%s %s ;-)"
- % (myfile, " ".join(sorted(digests)))
+ f"{myfile} {' '.join(sorted(digests))} ;-)"
)
eout.eend(0)
fetched = 2
@@ -1960,7 +1944,7 @@ def fetch(
)
fetched = 2
break
- elif mydigests != None:
+ elif mydigests is not None:
writemsg(
_("No digest file available and download failed.\n\n"),
noiselevel=-1,
diff --git a/lib/portage/package/ebuild/getmaskingstatus.py b/lib/portage/package/ebuild/getmaskingstatus.py
index b47dd8c50..f4f3e91b3 100644
--- a/lib/portage/package/ebuild/getmaskingstatus.py
+++ b/lib/portage/package/ebuild/getmaskingstatus.py
@@ -12,7 +12,6 @@ from portage.versions import _pkg_str
class _UnmaskHint:
-
__slots__ = ("key", "value")
def __init__(self, key, value):
@@ -21,7 +20,6 @@ class _UnmaskHint:
class _MaskReason:
-
__slots__ = ("category", "message", "unmask_hint")
def __init__(self, category, message, unmask_hint=None):
@@ -43,7 +41,6 @@ def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
-
metadata = None
installed = False
if not isinstance(mycpv, str):
@@ -90,9 +87,9 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
properties = metadata["PROPERTIES"]
restrict = metadata["RESTRICT"]
if not eapi_is_supported(eapi):
- return [_MaskReason("EAPI", "EAPI %s" % eapi)]
+ return [_MaskReason("EAPI", f"EAPI {eapi}")]
if _eapi_is_deprecated(eapi) and not installed:
- return [_MaskReason("EAPI", "EAPI %s" % eapi)]
+ return [_MaskReason("EAPI", f"EAPI {eapi}")]
egroups = settings.configdict["backupenv"].get("ACCEPT_KEYWORDS", "").split()
global_accept_keywords = settings.get("ACCEPT_KEYWORDS", "")
pgroups = global_accept_keywords.split()
@@ -149,7 +146,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
try:
missing_licenses = settings._getMissingLicenses(mycpv, metadata)
if missing_licenses:
- allowed_tokens = set(["||", "(", ")"])
+ allowed_tokens = {"||", "(", ")"}
allowed_tokens.update(missing_licenses)
license_split = licenses.split()
license_split = [x for x in license_split if x in allowed_tokens]
@@ -168,7 +165,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
try:
missing_properties = settings._getMissingProperties(mycpv, metadata)
if missing_properties:
- allowed_tokens = set(["||", "(", ")"])
+ allowed_tokens = {"||", "(", ")"}
allowed_tokens.update(missing_properties)
properties_split = properties.split()
properties_split = [x for x in properties_split if x in allowed_tokens]
@@ -185,7 +182,7 @@ def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
msg.append("in RESTRICT")
rValue.append(_MaskReason("RESTRICT", " ".join(msg)))
except InvalidDependString as e:
- rValue.append(_MaskReason("invalid", "RESTRICT: %s" % (e,)))
+ rValue.append(_MaskReason("invalid", f"RESTRICT: {e}"))
# Only show KEYWORDS masks for installed packages
# if they're not masked for any other reason.
diff --git a/lib/portage/package/ebuild/meson.build b/lib/portage/package/ebuild/meson.build
new file mode 100644
index 000000000..69fb4f588
--- /dev/null
+++ b/lib/portage/package/ebuild/meson.build
@@ -0,0 +1,23 @@
+py.install_sources(
+ [
+ 'config.py',
+ 'deprecated_profile_check.py',
+ 'digestcheck.py',
+ 'digestgen.py',
+ 'doebuild.py',
+ 'fetch.py',
+ 'getmaskingreason.py',
+ 'getmaskingstatus.py',
+ 'prepare_build_dirs.py',
+ 'profile_iuse.py',
+ '_metadata_invalid.py',
+ '_spawn_nofetch.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/package/ebuild',
+ pure : not native_extensions
+)
+
+subdir('_config')
+subdir('_ipc')
+subdir('_parallel_manifest')
diff --git a/lib/portage/package/ebuild/prepare_build_dirs.py b/lib/portage/package/ebuild/prepare_build_dirs.py
index 659198905..9471179aa 100644
--- a/lib/portage/package/ebuild/prepare_build_dirs.py
+++ b/lib/portage/package/ebuild/prepare_build_dirs.py
@@ -54,7 +54,7 @@ def prepare_build_dirs(myroot=None, settings=None, cleanup=False):
if errno.ENOENT == oe.errno:
pass
elif errno.EPERM == oe.errno:
- writemsg("%s\n" % oe, noiselevel=-1)
+ writemsg(f"{oe}\n", noiselevel=-1)
writemsg(
_("Operation Not Permitted: rmtree('%s')\n") % clean_dir,
noiselevel=-1,
@@ -72,7 +72,7 @@ def prepare_build_dirs(myroot=None, settings=None, cleanup=False):
if errno.EEXIST == oe.errno:
pass
elif errno.EPERM == oe.errno:
- writemsg("%s\n" % oe, noiselevel=-1)
+ writemsg(f"{oe}\n", noiselevel=-1)
writemsg(
_("Operation Not Permitted: makedirs('%s')\n") % dir_path,
noiselevel=-1,
@@ -102,6 +102,15 @@ def prepare_build_dirs(myroot=None, settings=None, cleanup=False):
apply_secpass_permissions(
mysettings[dir_key], uid=portage_uid, gid=portage_gid
)
+ # The setgid bit prevents a lockfile group permission race for bug #468990.
+ ipc_kwargs = {}
+ if portage.data.secpass >= 1:
+ ipc_kwargs["gid"] = portage_gid
+ ipc_kwargs["mode"] = 0o2770
+ ensure_dirs(
+ os.path.join(mysettings["PORTAGE_BUILDDIR"], ".ipc"),
+ **ipc_kwargs,
+ )
except PermissionDenied as e:
writemsg(_("Permission Denied: %s\n") % str(e), noiselevel=-1)
return 1
@@ -142,7 +151,7 @@ def _adjust_perms_msg(settings, msg):
mode="ab",
)
log_file_real = log_file
- except IOError:
+ except OSError:
def write(msg):
pass
@@ -165,7 +174,6 @@ def _adjust_perms_msg(settings, msg):
def _prepare_features_dirs(mysettings):
-
# Use default ABI libdir in accordance with bug #355283.
libdir = None
default_abi = mysettings.get("DEFAULT_ABI")
@@ -228,11 +236,9 @@ def _prepare_features_dirs(mysettings):
except OSError:
continue
if subdir_st.st_gid != portage_gid or (
- (
- stat.S_ISDIR(subdir_st.st_mode)
- and not dirmode
- == (stat.S_IMODE(subdir_st.st_mode) & dirmode)
- )
+ stat.S_ISDIR(subdir_st.st_mode)
+ and not dirmode
+ == (stat.S_IMODE(subdir_st.st_mode) & dirmode)
):
droppriv_fix = True
break
@@ -284,7 +290,7 @@ def _prepare_features_dirs(mysettings):
except PortageException as e:
failure = True
- writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"\n!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! Failed resetting perms on %s='%s'\n")
% (kwargs["basedir_var"], basedir),
@@ -308,7 +314,7 @@ def _prepare_workdir(mysettings):
else:
raise ValueError()
if parsed_mode & 0o7777 != parsed_mode:
- raise ValueError("Invalid file mode: %s" % mode)
+ raise ValueError(f"Invalid file mode: {mode}")
else:
workdir_mode = parsed_mode
except KeyError as e:
@@ -317,7 +323,7 @@ def _prepare_workdir(mysettings):
)
except ValueError as e:
if len(str(e)) > 0:
- writemsg("%s\n" % e)
+ writemsg(f"{e}\n")
writemsg(
_("!!! Unable to parse PORTAGE_WORKDIR_MODE='%s', using %s.\n")
% (mysettings["PORTAGE_WORKDIR_MODE"], oct(workdir_mode))
@@ -355,7 +361,7 @@ def _prepare_workdir(mysettings):
mode=0o2770,
)
except PortageException as e:
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! Permission issues with PORTAGE_LOGDIR='%s'\n")
% mysettings["PORTAGE_LOGDIR"],
@@ -387,7 +393,7 @@ def _prepare_workdir(mysettings):
log_subdir = os.path.join(logdir, "build", mysettings["CATEGORY"])
mysettings["PORTAGE_LOG_FILE"] = os.path.join(
log_subdir,
- "%s:%s.log%s" % (mysettings["PF"], logid_time, compress_log_ext),
+ f"{mysettings['PF']}:{logid_time}.log{compress_log_ext}",
)
else:
log_subdir = logdir
@@ -408,16 +414,17 @@ def _prepare_workdir(mysettings):
try:
_ensure_log_subdirs(logdir, log_subdir)
except PortageException as e:
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
if os.access(log_subdir, os.W_OK):
logdir_subdir_ok = True
else:
writemsg(
- "!!! %s: %s\n" % (_("Permission Denied"), log_subdir), noiselevel=-1
+ f"!!! {_('Permission Denied')}: {log_subdir}\n",
+ noiselevel=-1,
)
- tmpdir_log_path = os.path.join(mysettings["T"], "build.log%s" % compress_log_ext)
+ tmpdir_log_path = os.path.join(mysettings["T"], f"build.log{compress_log_ext}")
if not logdir_subdir_ok:
# NOTE: When sesandbox is enabled, the local SELinux security policies
# may not allow output to be piped out of the sesandbox domain. The
diff --git a/lib/portage/package/meson.build b/lib/portage/package/meson.build
new file mode 100644
index 000000000..a42019acd
--- /dev/null
+++ b/lib/portage/package/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ '__init__.py',
+ ],
+ subdir : 'portage/package',
+ pure : not native_extensions
+)
+
+subdir('ebuild')
diff --git a/lib/portage/process.py b/lib/portage/process.py
index e1bd2314e..1bc0c507c 100644
--- a/lib/portage/process.py
+++ b/lib/portage/process.py
@@ -1,11 +1,13 @@
# portage.py -- core Portage functionality
-# Copyright 1998-2020 Gentoo Authors
+# Copyright 1998-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import atexit
import errno
import fcntl
+import io
+import logging
import multiprocessing
import platform
import signal
@@ -14,6 +16,11 @@ import subprocess
import sys
import traceback
import os as _os
+import warnings
+
+from dataclasses import dataclass
+from functools import lru_cache, partial
+from typing import Any, Optional, Callable, Union
from portage import os
from portage import _encodings
@@ -22,12 +29,16 @@ import portage
portage.proxy.lazyimport.lazyimport(
globals(),
- "portage.util:dump_traceback,writemsg",
+ "portage.util._async.ForkProcess:ForkProcess",
+ "portage.util._eventloop.global_event_loop:global_event_loop",
+ "portage.util.futures:asyncio",
+ "portage.util:dump_traceback,writemsg,writemsg_level",
)
from portage.const import BASH_BINARY, SANDBOX_BINARY, FAKEROOT_BINARY
from portage.exception import CommandNotFound
-from portage.util._ctypes import find_library, LoadLibrary, ctypes
+from portage.proxy.objectproxy import ObjectProxy
+from portage.util._ctypes import load_libc, LoadLibrary, ctypes
try:
from portage.util.netlink import RtNetlink
@@ -84,18 +95,14 @@ if _fd_dir is not None:
raise
return range(max_fd_limit)
-
-elif os.path.isdir("/proc/%s/fd" % portage.getpid()):
+elif os.path.isdir(f"/proc/{portage.getpid()}/fd"):
# In order for this function to work in forked subprocesses,
# os.getpid() must be called from inside the function.
def get_open_fds():
return (
- int(fd)
- for fd in os.listdir("/proc/%s/fd" % portage.getpid())
- if fd.isdigit()
+ int(fd) for fd in os.listdir(f"/proc/{portage.getpid()}/fd") if fd.isdigit()
)
-
else:
def get_open_fds():
@@ -117,7 +124,6 @@ def sanitize_fds():
not be inherited by child processes.
"""
if _set_inheritable is not None:
-
whitelist = frozenset(
[
portage._get_stdin().fileno(),
@@ -221,6 +227,34 @@ def run_exitfuncs():
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
+async def run_coroutine_exitfuncs():
+ """
+ This is the same as run_exitfuncs but it uses asyncio.iscoroutinefunction
+ to check which functions to run. It is called by the AsyncioEventLoop
+ _close_main method just before the loop is closed.
+ """
+ tasks = []
+ for index, (func, targs, kargs) in reversed(list(enumerate(_exithandlers))):
+ if asyncio.iscoroutinefunction(func):
+ del _exithandlers[index]
+ tasks.append(asyncio.ensure_future(func(*targs, **kargs)))
+ tracebacks = []
+ exc_info = None
+ for task in tasks:
+ try:
+ await task
+ except Exception:
+ file = io.StringIO()
+ traceback.print_exc(file=file)
+ tracebacks.append(file.getvalue())
+ exc_info = sys.exc_info()
+ if len(tracebacks) > 1:
+ for tb in tracebacks[:-1]:
+ print(tb, file=sys.stderr, flush=True)
+ if exc_info is not None:
+ raise exc_info[1].with_traceback(exc_info[2])
+
+
atexit.register(run_exitfuncs)
# It used to be necessary for API consumers to remove pids from spawned_pids,
@@ -246,12 +280,211 @@ def cleanup():
pass
+@dataclass(frozen=True)
+class EnvStats:
+ env_size: int
+ env_largest_name: str
+ env_largest_size: int
+
+
+def calc_env_stats(env) -> EnvStats:
+ @lru_cache(1024)
+ def encoded_length(s):
+ return len(os.fsencode(s))
+
+ env_size = 0
+ env_largest_name = None
+ env_largest_size = 0
+ for env_name, env_value in env.items():
+ env_name_size = encoded_length(env_name)
+ env_value_size = encoded_length(env_value)
+ # Add two for '=' and the terminating null byte.
+ total_size = env_name_size + env_value_size + 2
+ if total_size > env_largest_size:
+ env_largest_name = env_name
+ env_largest_size = total_size
+ env_size += total_size
+
+ return EnvStats(env_size, env_largest_name, env_largest_size)
+
+
+env_too_large_warnings = 0
+
+
+class AbstractProcess:
+ def send_signal(self, sig):
+ """Send a signal to the process."""
+ if self.returncode is not None:
+ # Skip signalling a process that we know has already died.
+ return
+
+ try:
+ os.kill(self.pid, sig)
+ except ProcessLookupError:
+ # Suppress the race condition error; bpo-40550.
+ pass
+
+
+class Process(AbstractProcess):
+ """
+ An object that wraps OS processes which do not have an
+ associated multiprocessing.Process instance. Ultimately,
+ we need to stop using os.fork() to create these processes
+ because it is unsafe for threaded processes as discussed
+ in https://github.com/python/cpython/issues/84559.
+
+ Note that if subprocess.Popen is used without pass_fds
+ or preexec_fn parameters, then it avoids using os.fork()
+ by instead using posix_spawn. This approach is not used
+ by spawn because it needs to execute python code prior
+ to exec, so it instead uses multiprocessing.Process,
+ which only uses os.fork() when the multiprocessing start
+ method is fork.
+ """
+
+ def __init__(self, pid: int):
+ self.pid = pid
+ self.returncode = None
+ self._exit_waiters = []
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} {self.pid}>"
+
+ async def wait(self):
+ """
+ Wait for the child process to terminate.
+
+ Set and return the returncode attribute.
+ """
+ if self.returncode is not None:
+ return self.returncode
+
+ loop = global_event_loop()
+ if not self._exit_waiters:
+ loop._asyncio_child_watcher.add_child_handler(self.pid, self._child_handler)
+ waiter = loop.create_future()
+ self._exit_waiters.append(waiter)
+ return await waiter
+
+ def _child_handler(self, pid, returncode):
+ if pid != self.pid:
+ raise AssertionError(f"expected pid {self.pid}, got {pid}")
+ self.returncode = returncode
+
+ for waiter in self._exit_waiters:
+ if not waiter.cancelled():
+ waiter.set_result(returncode)
+ self._exit_waiters = None
+
+ def terminate(self):
+ """Terminate the process with SIGTERM"""
+ self.send_signal(signal.SIGTERM)
+
+ def kill(self):
+ """Kill the process with SIGKILL"""
+ self.send_signal(signal.SIGKILL)
+
+
+class MultiprocessingProcess(AbstractProcess):
+ """
+ An object that wraps OS processes created by multiprocessing.Process.
+ """
+
+ # Number of seconds between poll attempts for process exit status
+ # (after the sentinel has become ready).
+ _proc_join_interval = 0.1
+
+ def __init__(self, proc: multiprocessing.Process):
+ self._proc = proc
+ self.pid = proc.pid
+ self.returncode = None
+ self._exit_waiters = []
+
+ def __repr__(self):
+ return f"<{self.__class__.__name__} {self.pid}>"
+
+ async def wait(self):
+ """
+ Wait for the child process to terminate.
+
+ Set and return the returncode attribute.
+ """
+ if self.returncode is not None:
+ return self.returncode
+
+ loop = global_event_loop()
+ if not self._exit_waiters:
+ asyncio.ensure_future(self._proc_join(), loop=loop).add_done_callback(
+ self._proc_join_done
+ )
+ waiter = loop.create_future()
+ self._exit_waiters.append(waiter)
+ return await waiter
+
+ async def _proc_join(self):
+ loop = global_event_loop()
+ sentinel_reader = loop.create_future()
+ proc = self._proc
+ loop.add_reader(
+ proc.sentinel,
+ lambda: sentinel_reader.done() or sentinel_reader.set_result(None),
+ )
+ try:
+ await sentinel_reader
+ finally:
+ # If multiprocessing.Process supports the close method, then
+ # access to proc.sentinel will raise ValueError if the
+ # sentinel has been closed. In this case it's not safe to call
+ # remove_reader, since the file descriptor may have been closed
+ # and then reallocated to a concurrent coroutine. When the
+ # close method is not supported, proc.sentinel remains open
+ # until proc's finalizer is called.
+ try:
+ loop.remove_reader(proc.sentinel)
+ except ValueError:
+ pass
+
+ # Now that proc.sentinel is ready, poll until process exit
+ # status has become available.
+ while True:
+ proc.join(0)
+ if proc.exitcode is not None:
+ break
+ await asyncio.sleep(self._proc_join_interval, loop=loop)
+
+ def _proc_join_done(self, future):
+ # The join task should never be cancelled, so let it raise
+ # asyncio.CancelledError here if that somehow happens.
+ future.result()
+
+ self.returncode = self._proc.exitcode
+ if hasattr(self._proc, "close"):
+ self._proc.close()
+ self._proc = None
+
+ for waiter in self._exit_waiters:
+ if not waiter.cancelled():
+ waiter.set_result(self.returncode)
+ self._exit_waiters = None
+
+ def terminate(self):
+ """Terminate the process with SIGTERM"""
+ if self._proc is not None:
+ self._proc.terminate()
+
+ def kill(self):
+ """Kill the process with SIGKILL"""
+ if self._proc is not None:
+ self._proc.kill()
+
+
def spawn(
mycommand,
env=None,
opt_name=None,
fd_pipes=None,
returnpid=False,
+ returnproc=False,
uid=None,
gid=None,
groups=None,
@@ -265,8 +498,8 @@ def spawn(
unshare_ipc=False,
unshare_mount=False,
unshare_pid=False,
- cgroup=None,
-):
+ warn_on_large_env=False,
+) -> Union[int, MultiprocessingProcess, list[int]]:
"""
Spawns a given command.
@@ -284,6 +517,9 @@ def spawn(
@param returnpid: Return the Process IDs for a successful spawn.
NOTE: This requires the caller clean up all the PIDs, otherwise spawn will clean them.
@type returnpid: Boolean
+ @param returnproc: Return a MultiprocessingProcess instance (conflicts with logfile parameter).
+ NOTE: This requires the caller to asynchronously wait for the MultiprocessingProcess instance.
+ @type returnproc: Boolean
@param uid: User ID to spawn as; useful for dropping privilages
@type uid: Integer
@param gid: Group ID to spawn as; useful for dropping privilages
@@ -313,19 +549,37 @@ def spawn(
@type unshare_mount: Boolean
@param unshare_pid: If True, PID ns will be unshared from the spawned process
@type unshare_pid: Boolean
- @param cgroup: CGroup path to bind the process to
- @type cgroup: String
logfile requires stdout and stderr to be assigned to this process (ie not pointed
somewhere else.)
"""
+ if logfile and returnproc:
+ raise ValueError(
+ "logfile parameter conflicts with returnproc (use fd_pipes instead)"
+ )
+
# mycommand is either a str or a list
if isinstance(mycommand, str):
mycommand = mycommand.split()
env = os.environ if env is None else env
+ # Sometimes os.environ can fail to pickle as shown in bug 923750
+ # comment 4, so copy it to a dict.
+ env = env if isinstance(env, dict) else dict(env)
+
+ env_stats = None
+ if warn_on_large_env:
+ env_stats = calc_env_stats(env)
+
+ global env_too_large_warnings
+ if env_stats.env_size > 1024 * 96 and env_too_large_warnings < 3:
+ env_too_large_warnings += 1
+ writemsg_level(
+ f"WARNING: New process environment is large, executing {mycommand} may fail. Size: {env_stats.env_size} bytes. Largest environment variable: {env_stats.env_largest_name} ({env_stats.env_largest_size} bytes)",
+ logging.WARNING,
+ )
# If an absolute path to an executable file isn't given
# search for it unless we've been told not to.
@@ -362,10 +616,10 @@ def spawn(
# Create a tee process, giving it our stdout and stderr
# as well as the read end of the pipe.
- mypids.extend(
+ mypids.append(
spawn(
("tee", "-i", "-a", logfile),
- returnpid=True,
+ returnproc=True,
fd_pipes={0: pr, 1: fd_pipes[1], 2: fd_pipes[2]},
)
)
@@ -410,57 +664,40 @@ def spawn(
# fork, so that the result is cached in the main process.
bool(groups)
- parent_pid = portage.getpid()
- pid = None
- try:
- pid = os.fork()
-
- if pid == 0:
- portage._ForkWatcher.hook(portage._ForkWatcher)
- try:
- _exec(
- binary,
- mycommand,
- opt_name,
- fd_pipes,
- env,
- gid,
- groups,
- uid,
- umask,
- cwd,
- pre_exec,
- close_fds,
- unshare_net,
- unshare_ipc,
- unshare_mount,
- unshare_pid,
- unshare_flags,
- cgroup,
- )
- except SystemExit:
- raise
- except Exception as e:
- # We need to catch _any_ exception so that it doesn't
- # propagate out of this function and cause exiting
- # with anything other than os._exit()
- writemsg("%s:\n %s\n" % (e, " ".join(mycommand)), noiselevel=-1)
- traceback.print_exc()
- sys.stderr.flush()
-
- finally:
- # Don't used portage.getpid() here, due to a race with the above
- # portage._ForkWatcher cache update.
- if pid == 0 or (pid is None and _os.getpid() != parent_pid):
- # Call os._exit() from a finally block in order
- # to suppress any finally blocks from earlier
- # in the call stack (see bug #345289). This
- # finally block has to be setup before the fork
- # in order to avoid a race condition.
- os._exit(1)
-
- if not isinstance(pid, int):
- raise AssertionError("fork returned non-integer: %s" % (repr(pid),))
+ start_func = _start_proc if returnproc or not returnpid else _start_fork
+
+ pid = start_func(
+ _exec_wrapper,
+ args=(
+ binary,
+ mycommand,
+ opt_name,
+ fd_pipes,
+ env,
+ gid,
+ groups,
+ uid,
+ umask,
+ cwd,
+ pre_exec,
+ close_fds,
+ unshare_net,
+ unshare_ipc,
+ unshare_mount,
+ unshare_pid,
+ unshare_flags,
+ env_stats,
+ ),
+ fd_pipes=fd_pipes,
+ close_fds=close_fds,
+ )
+
+ if returnproc:
+ # _start_proc returns a MultiprocessingProcess instance.
+ return pid
+
+ if returnpid and not isinstance(pid, int):
+ raise AssertionError(f"fork returned non-integer: {repr(pid)}")
# Add the pid to our local and the global pid lists.
mypids.append(pid)
@@ -473,36 +710,39 @@ def spawn(
# If the caller wants to handle cleaning up the processes, we tell
# it about all processes that were created.
if returnpid:
+ warnings.warn(
+ "The portage.process.spawn returnpid parameter is deprecated and replaced by returnproc",
+ UserWarning,
+ stacklevel=1,
+ )
return mypids
+ loop = global_event_loop()
+
# Otherwise we clean them up.
while mypids:
-
# Pull the last reader in the pipe chain. If all processes
# in the pipe are well behaved, it will die when the process
# it is reading from dies.
pid = mypids.pop(0)
# and wait for it.
- retval = os.waitpid(pid, 0)[1]
+ retval = loop.run_until_complete(pid.wait())
if retval:
# If it failed, kill off anything else that
# isn't dead yet.
for pid in mypids:
- # With waitpid and WNOHANG, only check the
- # first element of the tuple since the second
- # element may vary (bug #337465).
- if os.waitpid(pid, os.WNOHANG)[0] == 0:
- os.kill(pid, signal.SIGTERM)
- os.waitpid(pid, 0)
-
- # If it got a signal, return the signal that was sent.
- if retval & 0xFF:
- return (retval & 0xFF) << 8
+ waiter = asyncio.ensure_future(pid.wait(), loop)
+ try:
+ loop.run_until_complete(
+ asyncio.wait_for(asyncio.shield(waiter), 0.001)
+ )
+ except (TimeoutError, asyncio.TimeoutError):
+ pid.terminate()
+ loop.run_until_complete(waiter)
- # Otherwise, return its exit code.
- return retval >> 8
+ return retval
# Everything succeeded
return 0
@@ -531,7 +771,7 @@ def _has_ipv6():
# [Errno 99] Cannot assign requested address.
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock.bind(("::1", 0))
- except EnvironmentError:
+ except OSError:
__has_ipv6 = False
else:
__has_ipv6 = True
@@ -574,12 +814,77 @@ def _configure_loopback_interface():
rtnl.add_address(ifindex, socket.AF_INET, "10.0.0.1", 8)
if _has_ipv6():
rtnl.add_address(ifindex, socket.AF_INET6, "fd::1", 8)
- except EnvironmentError as e:
+ except OSError as e:
writemsg(
- "Unable to configure loopback interface: %s\n" % e.strerror, noiselevel=-1
+ f"Unable to configure loopback interface: {e.strerror}\n", noiselevel=-1
)
+def _exec_wrapper(
+ binary,
+ mycommand,
+ opt_name,
+ fd_pipes,
+ env,
+ gid,
+ groups,
+ uid,
+ umask,
+ cwd,
+ pre_exec,
+ close_fds,
+ unshare_net,
+ unshare_ipc,
+ unshare_mount,
+ unshare_pid,
+ unshare_flags,
+ env_stats,
+):
+ """
+ Calls _exec with the given args and handles any raised Exception.
+ The intention is for _exec_wrapper and _exec to be reusable with
+ other process cloning implementations besides _start_fork.
+ """
+ try:
+ _exec(
+ binary,
+ mycommand,
+ opt_name,
+ fd_pipes,
+ env,
+ gid,
+ groups,
+ uid,
+ umask,
+ cwd,
+ pre_exec,
+ close_fds,
+ unshare_net,
+ unshare_ipc,
+ unshare_mount,
+ unshare_pid,
+ unshare_flags,
+ )
+ except Exception as e:
+ if isinstance(e, OSError) and e.errno == errno.E2BIG:
+ # If exec() failed with E2BIG, then this is
+ # potentially because the environment variables
+ # grew to large. The following will gather some
+ # stats about the environment and print a
+ # diagnostic message to help identifying the
+ # culprit. See also
+ # - https://bugs.gentoo.org/721088
+ # - https://bugs.gentoo.org/830187
+ if not env_stats:
+ env_stats = calc_env_stats(env)
+
+ writemsg(
+ f"ERROR: Executing {mycommand} failed with E2BIG. Child process environment size: {env_stats.env_size} bytes. Largest environment variable: {env_stats.env_largest_name} ({env_stats.env_largest_size} bytes)\n"
+ )
+ writemsg(f"{e}:\n {' '.join(mycommand)}\n", noiselevel=-1)
+ raise
+
+
def _exec(
binary,
mycommand,
@@ -598,7 +903,6 @@ def _exec(
unshare_mount,
unshare_pid,
unshare_flags,
- cgroup,
):
"""
Execute a given binary with options
@@ -636,8 +940,6 @@ def _exec(
@type unshare_pid: Boolean
@param unshare_flags: Flags for the unshare(2) function
@type unshare_flags: Integer
- @param cgroup: CGroup path to bind the process to
- @type cgroup: String
@rtype: None
@return: Never returns (calls os.execve)
"""
@@ -683,167 +985,202 @@ def _exec(
# the parent process (see bug #289486).
signal.signal(signal.SIGQUIT, signal.SIG_DFL)
- _setup_pipes(fd_pipes, close_fds=close_fds, inheritable=True)
+ # Unshare (while still uid==0)
+ have_unshare = False
+ libc = None
+ if unshare_net or unshare_ipc or unshare_mount or unshare_pid:
+ (libc, _) = load_libc()
+ if libc is not None:
+ have_unshare = hasattr(libc, "unshare")
- # Add to cgroup
- # it's better to do it from the child since we can guarantee
- # it is done before we start forking children
- if cgroup:
- with open(os.path.join(cgroup, "cgroup.procs"), "a") as f:
- f.write("%d\n" % portage.getpid())
+ if not have_unshare:
+ # unshare() may not be supported by libc
+ unshare_net = False
+ unshare_ipc = False
+ unshare_mount = False
+ unshare_pid = False
- # Unshare (while still uid==0)
if unshare_net or unshare_ipc or unshare_mount or unshare_pid:
- filename = find_library("c")
- if filename is not None:
- libc = LoadLibrary(filename)
- if libc is not None:
- try:
- # Since a failed unshare call could corrupt process
- # state, first validate that the call can succeed.
- # The parent process should call _unshare_validate
- # before it forks, so that all child processes can
- # reuse _unshare_validate results that have been
- # cached by the parent process.
- errno_value = _unshare_validate(unshare_flags)
- if errno_value == 0 and libc.unshare(unshare_flags) != 0:
- errno_value = ctypes.get_errno()
- if errno_value != 0:
-
- involved_features = []
- if unshare_ipc:
- involved_features.append("ipc-sandbox")
- if unshare_mount:
- involved_features.append("mount-sandbox")
- if unshare_net:
- involved_features.append("network-sandbox")
- if unshare_pid:
- involved_features.append("pid-sandbox")
-
- writemsg(
- 'Unable to unshare: %s (for FEATURES="%s")\n'
- % (
- errno.errorcode.get(errno_value, "?"),
- " ".join(involved_features),
- ),
- noiselevel=-1,
- )
- else:
- if unshare_pid:
- main_child_pid = os.fork()
- if main_child_pid == 0:
- # The portage.getpid() cache may need to be updated here,
- # in case the pre_exec function invokes portage APIs.
- portage._ForkWatcher.hook(portage._ForkWatcher)
- # pid namespace requires us to become init
- binary, myargs = (
- portage._python_interpreter,
- [
- portage._python_interpreter,
- os.path.join(portage._bin_path, "pid-ns-init"),
- _unicode_encode(
- "" if uid is None else str(uid)
- ),
- _unicode_encode(
- "" if gid is None else str(gid)
- ),
- _unicode_encode(
- ""
- if groups is None
- else ",".join(
- str(group) for group in groups
- )
- ),
- _unicode_encode(
- "" if umask is None else str(umask)
- ),
- _unicode_encode(
- ",".join(str(fd) for fd in fd_pipes)
- ),
- binary,
- ]
- + myargs,
- )
- uid = None
- gid = None
- groups = None
- umask = None
- else:
- # Execute a supervisor process which will forward
- # signals to init and forward exit status to the
- # parent process. The supervisor process runs in
- # the global pid namespace, so skip /proc remount
- # and other setup that's intended only for the
- # init process.
- binary, myargs = portage._python_interpreter, [
- portage._python_interpreter,
- os.path.join(portage._bin_path, "pid-ns-init"),
- str(main_child_pid),
- ]
-
- os.execve(binary, myargs, env)
-
- if unshare_mount:
- # mark the whole filesystem as slave to avoid
- # mounts escaping the namespace
- s = subprocess.Popen(["mount", "--make-rslave", "/"])
- mount_ret = s.wait()
- if mount_ret != 0:
- # TODO: should it be fatal maybe?
- writemsg(
- "Unable to mark mounts slave: %d\n" % (mount_ret,),
- noiselevel=-1,
- )
- if unshare_pid:
- # we need at least /proc being slave
- s = subprocess.Popen(["mount", "--make-slave", "/proc"])
- mount_ret = s.wait()
- if mount_ret != 0:
- # can't proceed with shared /proc
- writemsg(
- "Unable to mark /proc slave: %d\n" % (mount_ret,),
- noiselevel=-1,
- )
- os._exit(1)
- # mount new /proc for our namespace
- s = subprocess.Popen(
- ["mount", "-n", "-t", "proc", "proc", "/proc"]
- )
- mount_ret = s.wait()
- if mount_ret != 0:
- writemsg(
- "Unable to mount new /proc: %d\n" % (mount_ret,),
- noiselevel=-1,
- )
- os._exit(1)
- if unshare_net:
- # use 'localhost' to avoid hostname resolution problems
- try:
- # pypy3 does not implement socket.sethostname()
- new_hostname = b"localhost"
- if hasattr(socket, "sethostname"):
- socket.sethostname(new_hostname)
- else:
- if (
- libc.sethostname(
- new_hostname, len(new_hostname)
- )
- != 0
- ):
- errno_value = ctypes.get_errno()
- raise OSError(
- errno_value, os.strerror(errno_value)
- )
- except Exception as e:
- writemsg(
- 'Unable to set hostname: %s (for FEATURES="network-sandbox")\n'
- % (e,),
- noiselevel=-1,
- )
- _configure_loopback_interface()
- except AttributeError:
- # unshare() not supported by libc
- pass
+ # Since a failed unshare call could corrupt process
+ # state, first validate that the call can succeed.
+ # The parent process should call _unshare_validate
+ # before it forks, so that all child processes can
+ # reuse _unshare_validate results that have been
+ # cached by the parent process.
+ errno_value = _unshare_validate(unshare_flags)
+ if errno_value == 0 and libc.unshare(unshare_flags) != 0:
+ errno_value = ctypes.get_errno()
+ if errno_value != 0:
+ involved_features = []
+ if unshare_ipc:
+ involved_features.append("ipc-sandbox")
+ if unshare_mount:
+ involved_features.append("mount-sandbox")
+ if unshare_net:
+ involved_features.append("network-sandbox")
+ if unshare_pid:
+ involved_features.append("pid-sandbox")
+
+ writemsg(
+ 'Unable to unshare: %s (for FEATURES="%s")\n'
+ % (
+ errno.errorcode.get(errno_value, "?"),
+ " ".join(involved_features),
+ ),
+ noiselevel=-1,
+ )
+
+ unshare_net = False
+ unshare_ipc = False
+ unshare_mount = False
+ unshare_pid = False
+
+ if unshare_pid:
+ # pid namespace requires us to become init
+ binary, myargs = (
+ portage._python_interpreter,
+ [
+ portage._python_interpreter,
+ os.path.join(portage._bin_path, "pid-ns-init"),
+ _unicode_encode("" if uid is None else str(uid)),
+ _unicode_encode("" if gid is None else str(gid)),
+ _unicode_encode(
+ "" if groups is None else ",".join(str(group) for group in groups)
+ ),
+ _unicode_encode("" if umask is None else str(umask)),
+ _unicode_encode(",".join(str(fd) for fd in fd_pipes)),
+ binary,
+ ]
+ + myargs,
+ )
+ uid = None
+ gid = None
+ groups = None
+ umask = None
+
+ # Use _start_fork for os.fork() error handling, ensuring
+ # that if exec fails then the child process will display
+ # a traceback before it exits via os._exit to suppress any
+ # finally blocks from parent's call stack (bug 345289).
+ main_child_pid = _start_fork(
+ _exec2,
+ args=(
+ binary,
+ myargs,
+ env,
+ gid,
+ groups,
+ uid,
+ umask,
+ cwd,
+ pre_exec,
+ unshare_net,
+ unshare_ipc,
+ unshare_mount,
+ unshare_pid,
+ libc,
+ ),
+ fd_pipes=None,
+ close_fds=False,
+ )
+
+ # Execute a supervisor process which will forward
+ # signals to init and forward exit status to the
+ # parent process. The supervisor process runs in
+ # the global pid namespace, so skip /proc remount
+ # and other setup that's intended only for the
+ # init process.
+ binary, myargs = portage._python_interpreter, [
+ portage._python_interpreter,
+ os.path.join(portage._bin_path, "pid-ns-init"),
+ str(main_child_pid),
+ ]
+
+ os.execve(binary, myargs, env)
+
+ # Reachable only if unshare_pid is False.
+ _exec2(
+ binary,
+ myargs,
+ env,
+ gid,
+ groups,
+ uid,
+ umask,
+ cwd,
+ pre_exec,
+ unshare_net,
+ unshare_ipc,
+ unshare_mount,
+ unshare_pid,
+ libc,
+ )
+
+
+def _exec2(
+ binary,
+ myargs,
+ env,
+ gid,
+ groups,
+ uid,
+ umask,
+ cwd,
+ pre_exec,
+ unshare_net,
+ unshare_ipc,
+ unshare_mount,
+ unshare_pid,
+ libc,
+):
+ if unshare_mount:
+ # mark the whole filesystem as slave to avoid
+ # mounts escaping the namespace
+ s = subprocess.Popen(["mount", "--make-rslave", "/"])
+ mount_ret = s.wait()
+ if mount_ret != 0:
+ # TODO: should it be fatal maybe?
+ writemsg(
+ "Unable to mark mounts slave: %d\n" % (mount_ret,),
+ noiselevel=-1,
+ )
+ if unshare_pid:
+ # we need at least /proc being slave
+ s = subprocess.Popen(["mount", "--make-slave", "/proc"])
+ mount_ret = s.wait()
+ if mount_ret != 0:
+ # can't proceed with shared /proc
+ writemsg(
+ "Unable to mark /proc slave: %d\n" % (mount_ret,),
+ noiselevel=-1,
+ )
+ os._exit(1)
+ # mount new /proc for our namespace
+ s = subprocess.Popen(["mount", "-n", "-t", "proc", "proc", "/proc"])
+ mount_ret = s.wait()
+ if mount_ret != 0:
+ writemsg(
+ "Unable to mount new /proc: %d\n" % (mount_ret,),
+ noiselevel=-1,
+ )
+ os._exit(1)
+ if unshare_net:
+ # use 'localhost' to avoid hostname resolution problems
+ try:
+ # pypy3 does not implement socket.sethostname()
+ new_hostname = b"localhost"
+ if hasattr(socket, "sethostname"):
+ socket.sethostname(new_hostname)
+ else:
+ if libc.sethostname(new_hostname, len(new_hostname)) != 0:
+ errno_value = ctypes.get_errno()
+ raise OSError(errno_value, os.strerror(errno_value))
+ except Exception as e:
+ writemsg(
+ f'Unable to set hostname: {e} (for FEATURES="network-sandbox")\n',
+ noiselevel=-1,
+ )
+ _configure_loopback_interface()
# Set requested process permissions.
if gid:
@@ -900,11 +1237,9 @@ class _unshare_validator:
@rtype: int
@returns: errno value, or 0 if no error occurred.
"""
- filename = find_library("c")
- if filename is None:
- return errno.ENOTSUP
-
- libc = LoadLibrary(filename)
+ # This ctypes library lookup caches the result for use in the
+ # subprocess when the multiprocessing start method is fork.
+ (libc, filename) = load_libc()
if libc is None:
return errno.ENOTSUP
@@ -912,7 +1247,7 @@ class _unshare_validator:
proc = multiprocessing.Process(
target=cls._run_subproc,
- args=(subproc_pipe, cls._validate_subproc, (libc.unshare, flags)),
+ args=(subproc_pipe, cls._validate_subproc, (filename, flags)),
)
proc.start()
subproc_pipe.close()
@@ -941,7 +1276,7 @@ class _unshare_validator:
subproc_pipe.close()
@staticmethod
- def _validate_subproc(unshare, flags):
+ def _validate_subproc(filename, flags):
"""
Perform validation. Calls to this method must be isolated in a
subprocess, since the unshare function is called for purposes of
@@ -954,7 +1289,10 @@ class _unshare_validator:
@rtype: int
@returns: errno value, or 0 if no error occurred.
"""
- return 0 if unshare(flags) == 0 else ctypes.get_errno()
+ # Since ctypes objects are not picklable for the multiprocessing
+ # spawn start method, acquire them here.
+ libc = LoadLibrary(filename)
+ return 0 if libc.unshare(flags) == 0 else ctypes.get_errno()
_unshare_validate = _unshare_validator()
@@ -996,7 +1334,7 @@ def _setup_pipes(fd_pipes, close_fds=True, inheritable=None):
actually does nothing in this case), which avoids possible
interference.
"""
-
+ fd_pipes = {} if fd_pipes is None else fd_pipes
reverse_map = {}
# To protect from cases where direct assignment could
# clobber needed fds ({1:2, 2:1}) we create a reverse map
@@ -1014,7 +1352,6 @@ def _setup_pipes(fd_pipes, close_fds=True, inheritable=None):
# explicitly requested for it to remain open by adding
# it to the keys of fd_pipes.
while reverse_map:
-
oldfd, newfds = reverse_map.popitem()
old_fdflags = None
@@ -1040,7 +1377,6 @@ def _setup_pipes(fd_pipes, close_fds=True, inheritable=None):
fcntl.fcntl(newfd, fcntl.F_SETFD, old_fdflags)
if _set_inheritable is not None:
-
inheritable_state = None
if not (old_fdflags is None or _FD_CLOEXEC is None):
inheritable_state = not bool(old_fdflags & _FD_CLOEXEC)
@@ -1072,6 +1408,155 @@ def _setup_pipes(fd_pipes, close_fds=True, inheritable=None):
pass
+def _start_fork(
+ target: Callable[..., None],
+ args: Optional[tuple[Any, ...]] = (),
+ kwargs: Optional[dict[str, Any]] = {},
+ fd_pipes: Optional[dict[int, int]] = None,
+ close_fds: Optional[bool] = True,
+) -> int:
+ """
+ Execute the target function in a fork. The fd_pipes and
+ close_fds parameters are handled in the fork, before the target
+ function is called. The args and kwargs parameters are passed
+ as positional and keyword arguments for the target function.
+
+ The target, args, and kwargs parameters are intended to
+ be equivalent to the corresponding multiprocessing.Process
+ constructor parameters.
+
+ Ultimately, the intention is for spawn to support other
+ process cloning implementations besides _start_fork, since
+ fork is unsafe for threaded processes as discussed in
+ https://github.com/python/cpython/issues/84559.
+ """
+ parent_pid = portage.getpid()
+ pid = None
+ try:
+ pid = os.fork()
+
+ if pid == 0:
+ try:
+ _setup_pipes(fd_pipes, close_fds=close_fds, inheritable=True)
+ target(*args, **kwargs)
+ except Exception:
+ # We need to catch _any_ exception and display it since the child
+ # process must unconditionally exit via os._exit() if exec fails.
+ traceback.print_exc()
+ sys.stderr.flush()
+ finally:
+ # Don't used portage.getpid() here, in case there is a race
+ # with getpid cache invalidation via _ForkWatcher hook.
+ if pid == 0 or (pid is None and _os.getpid() != parent_pid):
+ # Call os._exit() from a finally block in order
+ # to suppress any finally blocks from earlier
+ # in the call stack (see bug #345289). This
+ # finally block has to be setup before the fork
+ # in order to avoid a race condition.
+ os._exit(1)
+ return pid
+
+
+class _chain_pre_exec_fns:
+ """
+ Wraps a target function to call pre_exec functions just before
+ the original target function.
+ """
+
+ def __init__(self, target, *args):
+ self._target = target
+ self._pre_exec_fns = args
+
+ def __call__(self, *args, **kwargs):
+ for pre_exec in self._pre_exec_fns:
+ pre_exec()
+ return self._target(*args, **kwargs)
+
+
+def _setup_pipes_after_fork(fd_pipes):
+ for fd in set(fd_pipes.values()):
+ os.set_inheritable(fd, True)
+ _setup_pipes(fd_pipes, close_fds=False, inheritable=True)
+
+
+def _start_proc(
+ target: Callable[..., None],
+ args: Optional[tuple[Any, ...]] = (),
+ kwargs: Optional[dict[str, Any]] = {},
+ fd_pipes: Optional[dict[int, int]] = None,
+ close_fds: Optional[bool] = False,
+) -> MultiprocessingProcess:
+ """
+ Execute the target function using multiprocess.Process.
+ If the close_fds parameter is True then NotImplementedError
+ is raised, since it is risky to forcefully close file
+ descriptors that have references (bug 374335), and PEP 446
+ should ensure that any relevant file descriptors are
+ non-inheritable and therefore automatically closed on exec.
+ """
+ if close_fds:
+ raise NotImplementedError(
+ "close_fds is not supported (since file descriptors are non-inheritable by default for exec)"
+ )
+
+ # Manage fd_pipes inheritance for spawn/exec (bug 923755),
+ # which ForkProcess does not handle because its target
+ # function does not necessarily exec.
+ if fd_pipes and multiprocessing.get_start_method() == "fork":
+ target = _chain_pre_exec_fns(target, partial(_setup_pipes_after_fork, fd_pipes))
+ fd_pipes = None
+
+ proc = ForkProcess(
+ scheduler=global_event_loop(),
+ target=target,
+ args=args,
+ kwargs=kwargs,
+ fd_pipes=fd_pipes,
+ create_pipe=False, # Pipe creation is delegated to the caller (see bug 923750).
+ )
+ proc.start()
+
+ # ForkProcess conveniently holds a MultiprocessingProcess
+ # instance that is suitable to return here, but use _GCProtector
+ # to protect the ForkProcess instance from being garbage collected
+ # and triggering messages like this (bug 925456):
+ # [ERROR] Task was destroyed but it is pending!
+ return _GCProtector(proc._proc, proc.async_wait)
+
+
+class _GCProtector(ObjectProxy):
+ """
+ Proxy a target object, and also hold a reference to something
+ extra in order to protect it from garbage collection. Override
+ the wait method to first call target's wait method and then
+ wait for extra (a coroutine function) before returning the result.
+ """
+
+ __slots__ = ("_extra", "_target")
+
+ def __init__(self, target, extra):
+ super().__init__()
+ object.__setattr__(self, "_target", target)
+ object.__setattr__(self, "_extra", extra)
+
+ def _get_target(self):
+ return object.__getattribute__(self, "_target")
+
+ def __getattribute__(self, attr):
+ if attr == "wait":
+ return object.__getattribute__(self, attr)
+ return getattr(object.__getattribute__(self, "_target"), attr)
+
+ async def wait(self):
+ """
+ Wrap the target's wait method to also wait for an extra
+ coroutine function.
+ """
+ result = await object.__getattribute__(self, "_target").wait()
+ await object.__getattribute__(self, "_extra")()
+ return result
+
+
def find_binary(binary):
"""
Given a binary name, find the binary in PATH
diff --git a/lib/portage/proxy/lazyimport.py b/lib/portage/proxy/lazyimport.py
index c04251f29..c6a934ae9 100644
--- a/lib/portage/proxy/lazyimport.py
+++ b/lib/portage/proxy/lazyimport.py
@@ -1,15 +1,11 @@
-# Copyright 2009-2020 Gentoo Authors
+# Copyright 2009-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["lazyimport"]
import sys
import types
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
+import threading
from portage.proxy.objectproxy import ObjectProxy
@@ -89,7 +85,6 @@ def _unregister_module_proxy(name):
class _LazyImport(ObjectProxy):
-
__slots__ = ("_scope", "_alias", "_name", "_target")
def __init__(self, scope, alias, name):
@@ -116,7 +111,6 @@ class _LazyImport(ObjectProxy):
class _LazyImportFrom(_LazyImport):
-
__slots__ = ("_attr_name",)
def __init__(self, scope, name, attr_name, alias):
@@ -136,7 +130,7 @@ class _LazyImportFrom(_LazyImport):
except AttributeError:
# Try to import it as a submodule
try:
- __import__("%s.%s" % (name, attr_name))
+ __import__(f"{name}.{attr_name}")
except ImportError:
pass
# If it's a submodule, this will succeed. Otherwise, it may
diff --git a/lib/portage/proxy/meson.build b/lib/portage/proxy/meson.build
new file mode 100644
index 000000000..0dae6ed2f
--- /dev/null
+++ b/lib/portage/proxy/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'lazyimport.py',
+ 'objectproxy.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/proxy',
+ pure : not native_extensions
+)
diff --git a/lib/portage/proxy/objectproxy.py b/lib/portage/proxy/objectproxy.py
index 7cdc6f68d..f36464e19 100644
--- a/lib/portage/proxy/objectproxy.py
+++ b/lib/portage/proxy/objectproxy.py
@@ -6,7 +6,6 @@ __all__ = ["ObjectProxy"]
class ObjectProxy:
-
"""
Object that acts as a proxy to another object, forwarding
attribute accesses and method calls. This can be useful
diff --git a/lib/portage/repository/config.py b/lib/portage/repository/config.py
index 0b591d94f..a5b904dc6 100644
--- a/lib/portage/repository/config.py
+++ b/lib/portage/repository/config.py
@@ -9,6 +9,7 @@ import re
import typing
import portage
+from pathlib import Path
from portage import eclass_cache, os
from portage.checksum import get_valid_checksum_keys
from portage.const import PORTAGE_BASE_PATH, REPO_NAME_LOC, USER_CONFIG_PATH
@@ -160,6 +161,7 @@ class RepoConfig:
"thin_manifest",
"update_changelog",
"user_location",
+ "volatile",
"_eapis_banned",
"_eapis_deprecated",
"_masters_orig",
@@ -330,10 +332,45 @@ class RepoConfig:
self.name = name
if portage._sync_mode:
missing = False
-
elif name == "DEFAULT":
missing = False
+ volatile = repo_opts.get("volatile")
+ # If volatile is explicitly set, go with it.
+ if volatile is not None:
+ self.volatile = volatile in ("true", "yes")
+ else:
+ # If it's unset, we default to no (i.e. the repository is not volatile),
+ # but with a heuristic for when a repository is not likely to be suitable
+ # (likely to contain custom user changes).
+ try:
+ # If the repository doesn't exist, we can't check its ownership,
+ # so err on the safe side.
+ if missing or not self.location:
+ self.volatile = True
+ # On Prefix, you can't rely on the ownership as a proxy for user
+ # owned because the user typically owns everything.
+ # But we can't access if we're on Prefix here, so use whether
+ # we're under /var/db/repos instead.
+ elif not self.location.startswith("/var/db/repos"):
+ self.volatile = True
+ # If the owner of the repository isn't root or Portage, it's
+ # an indication the user may expect to be able to safely make
+ # changes in the directory, so default to volatile.
+ elif Path(self.location).owner() not in ("root", "portage"):
+ self.volatile = True
+ else:
+ self.volatile = False
+ except Exception:
+ # There's too many conditions here to refine the exception list:
+ # - We lack permissions to poke at the directory (PermissionError)
+ # - Its UID doesn't actually exist and the repository
+ # won't be synced by the user (KeyError).
+ # - The directory doesn't exist (FileNotFoundError)
+ # - Probably many others.
+ # So, just fail safe.
+ self.volatile = True
+
self.eapi = None
self.missing_repo_name = missing
# sign_commit is disabled by default, since it requires Git >=1.7.9,
@@ -531,16 +568,15 @@ class RepoConfig:
repo_name_path = os.path.join(repo_path, REPO_NAME_LOC)
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(
repo_name_path, encoding=_encodings["fs"], errors="strict"
),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
return f.readline().strip(), False
- except EnvironmentError:
+ except OSError:
return "x-" + os.path.basename(repo_path), True
finally:
if f is not None:
@@ -548,7 +584,7 @@ class RepoConfig:
def info_string(self):
"""
- Returns a formatted string containing informations about the repository.
+ Returns a formatted string containing information about the repository.
Used by emerge --info.
"""
indent = " " * 4
@@ -582,6 +618,8 @@ class RepoConfig:
repo_msg.append(
indent + "eclass-overrides: " + " ".join(self.eclass_overrides)
)
+ if self.volatile is not None:
+ repo_msg.append(indent + "volatile: " + str(self.volatile))
for o, v in self.module_specific_options.items():
if v is not None:
repo_msg.append(indent + o + ": " + v)
@@ -589,16 +627,18 @@ class RepoConfig:
return "\n".join(repo_msg)
def __repr__(self):
- return "<portage.repository.config.RepoConfig(name=%r, location=%r)>" % (
- self.name,
- _unicode_decode(self.location),
+ return (
+ "<portage.repository.config.RepoConfig(name={!r}, location={!r})>".format(
+ self.name,
+ _unicode_decode(self.location),
+ )
)
def __str__(self):
d = {}
for k in self.__slots__:
d[k] = getattr(self, k, None)
- return "%s" % (d,)
+ return f"{d}"
class RepoConfigLoader:
@@ -689,8 +729,16 @@ class RepoConfigLoader:
"sync_umask",
"sync_uri",
"sync_user",
+ "volatile",
):
v = getattr(repos_conf_opts, k, None)
+
+ # If PORTDIR_OVERLAY is set, we have to require volatile,
+ # because it'll break changes e.g. with ebuild(1) or
+ # development in a local repository with the same repo_name.
+ if k == "volatile" and portdir_overlay:
+ v = True
+
if v is not None:
setattr(repo, k, v)
@@ -721,7 +769,6 @@ class RepoConfigLoader:
prepos[repo.name] = repo
else:
-
if not portage._sync_mode:
writemsg(
_("!!! Invalid PORTDIR_OVERLAY (not a dir): '%s'\n") % ov,
@@ -1143,7 +1190,7 @@ class RepoConfigLoader:
def _check_locations(self):
"""Check if repositories location are correct and show a warning message if not"""
- for (name, r) in self.prepos.items():
+ for name, r in self.prepos.items():
if name != "DEFAULT":
if r.location is None:
writemsg(
@@ -1215,6 +1262,7 @@ class RepoConfigLoader:
"sync_allow_hardlinks",
"sync_openpgp_key_refresh",
"sync_rcu",
+ "volatile",
)
str_or_int_keys = (
"auto_sync",
@@ -1252,33 +1300,33 @@ class RepoConfigLoader:
):
if repo_name != repo.name:
continue
- config_string += "\n[%s]\n" % repo_name
+ config_string += f"\n[{repo_name}]\n"
for key in sorted(keys):
if key == "main_repo" and repo_name != "DEFAULT":
continue
if getattr(repo, key) is not None:
if key in bool_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
"true" if getattr(repo, key) else "false",
)
elif key in str_or_int_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
getattr(repo, key),
)
elif key in str_tuple_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
" ".join(getattr(repo, key)),
)
elif key in repo_config_tuple_keys:
- config_string += "%s = %s\n" % (
+ config_string += "{} = {}\n".format(
key.replace("_", "-"),
" ".join(x.name for x in getattr(repo, key)),
)
for o, v in repo.module_specific_options.items():
- config_string += "%s = %s\n" % (o, v)
+ config_string += f"{o} = {v}\n"
return config_string.lstrip("\n")
@@ -1331,7 +1379,7 @@ def parse_layout_conf(repo_location, repo_name=None):
data = {}
- # None indicates abscence of a masters setting, which later code uses
+ # None indicates absence of a masters setting, which later code uses
# to trigger a backward compatibility fallback that sets an implicit
# master. In order to avoid this fallback behavior, layout.conf can
# explicitly set masters to an empty value, which will result in an
diff --git a/lib/portage/repository/meson.build b/lib/portage/repository/meson.build
new file mode 100644
index 000000000..ecc71a6fa
--- /dev/null
+++ b/lib/portage/repository/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'config.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/repository',
+ pure : not native_extensions
+)
+
+subdir('storage')
diff --git a/lib/portage/repository/storage/hardlink_quarantine.py b/lib/portage/repository/storage/hardlink_quarantine.py
index ad9e64bcc..3fb1396fa 100644
--- a/lib/portage/repository/storage/hardlink_quarantine.py
+++ b/lib/portage/repository/storage/hardlink_quarantine.py
@@ -39,7 +39,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
"""
Run cmd and raise RepoStorageException on failure.
- @param cmd: command to executre
+ @param cmd: command to execute
@type cmd: list
"""
p = SpawnProcess(
@@ -48,7 +48,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
p.start()
if await p.async_wait() != os.EX_OK:
raise RepoStorageException(
- "command exited with status {}: {}".format(p.returncode, " ".join(cmd))
+ f"command exited with status {p.returncode}: {' '.join(cmd)}"
)
async def init_update(self):
@@ -70,7 +70,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
"--exclude=/lost+found",
"--exclude=/packages",
"--exclude",
- "/{}".format(os.path.basename(update_location)),
+ f"/{os.path.basename(update_location)}",
self._user_location + "/",
update_location + "/",
]
@@ -99,7 +99,7 @@ class HardlinkQuarantineRepoStorage(RepoStorageInterface):
"--exclude=/lost+found",
"--exclude=/packages",
"--exclude",
- "/{}".format(os.path.basename(update_location)),
+ f"/{os.path.basename(update_location)}",
update_location + "/",
self._user_location + "/",
]
diff --git a/lib/portage/repository/storage/hardlink_rcu.py b/lib/portage/repository/storage/hardlink_rcu.py
index 4fd87a24b..6f464c8d0 100644
--- a/lib/portage/repository/storage/hardlink_rcu.py
+++ b/lib/portage/repository/storage/hardlink_rcu.py
@@ -19,13 +19,13 @@ class HardlinkRcuRepoStorage(RepoStorageInterface):
Enable read-copy-update (RCU) behavior for sync operations. The
current latest immutable version of a repository will be
reference by a symlink found where the repository would normally
- be located. Repository consumers should resolve the cannonical
+ be located. Repository consumers should resolve the canonical
path of this symlink before attempt to access the repository,
and all operations should be read-only, since the repository
is considered immutable. Updates occur by atomic replacement
of the symlink, which causes new consumers to use the new
immutable version, while any earlier consumers continue to use
- the cannonical path that was resolved earlier.
+ the canonical path that was resolved earlier.
Performance is better than HardlinkQuarantineRepoStorage,
since commit involves atomic replacement of a symlink. Since
@@ -112,7 +112,7 @@ class HardlinkRcuRepoStorage(RepoStorageInterface):
"""
Run cmd and raise RepoStorageException on failure.
- @param cmd: command to executre
+ @param cmd: command to execute
@type cmd: list
@param privileged: run with maximum privileges
@type privileged: bool
@@ -125,7 +125,7 @@ class HardlinkRcuRepoStorage(RepoStorageInterface):
p.start()
if await p.async_wait() != os.EX_OK:
raise RepoStorageException(
- "command exited with status {}: {}".format(p.returncode, " ".join(cmd))
+ f"command exited with status {p.returncode}: {' '.join(cmd)}"
)
async def init_update(self):
@@ -216,7 +216,7 @@ class HardlinkRcuRepoStorage(RepoStorageInterface):
os.unlink(new_symlink)
except OSError:
pass
- os.symlink("snapshots/{}".format(new_id), new_symlink)
+ os.symlink(f"snapshots/{new_id}", new_symlink)
# If SyncManager.pre_sync creates an empty directory where
# self._latest_symlink is suppose to be (which is normal if
diff --git a/lib/portage/repository/storage/meson.build b/lib/portage/repository/storage/meson.build
new file mode 100644
index 000000000..1845f9f51
--- /dev/null
+++ b/lib/portage/repository/storage/meson.build
@@ -0,0 +1,11 @@
+py.install_sources(
+ [
+ 'hardlink_quarantine.py',
+ 'hardlink_rcu.py',
+ 'inplace.py',
+ 'interface.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/repository/storage',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/controller.py b/lib/portage/sync/controller.py
index 987aa5481..da593e1a8 100644
--- a/lib/portage/sync/controller.py
+++ b/lib/portage/sync/controller.py
@@ -89,13 +89,20 @@ class SyncManager:
# files have sane permissions.
os.umask(0o22)
- self.module_controller = portage.sync.module_controller
- self.module_names = self.module_controller.module_names
self.hooks = {}
for _dir in ["repo.postsync.d", "postsync.d"]:
hooks = get_hooks_from_dir(_dir, prefix=self.settings["PORTAGE_CONFIGROOT"])
self.hooks[_dir] = hooks
+ @property
+ def module_controller(self):
+ # Not stored as local attribute because it's not picklable.
+ return portage.sync.module_controller
+
+ @property
+ def module_names(self):
+ return self.module_controller.module_names
+
def __getattr__(self, name):
if name == "async":
warnings.warn(
@@ -137,7 +144,7 @@ class SyncManager:
if repo.sync_type in self.module_names:
tasks = [self.module_controller.get_class(repo.sync_type)]
else:
- msg = "\n%s: Sync module '%s' is not an installed/known type'\n" % (
+ msg = "\n{}: Sync module '{}' is not an installed/known type'\n".format(
bad("ERROR"),
repo.sync_type,
)
@@ -176,7 +183,7 @@ class SyncManager:
self.exitcode = exitcode
self.updatecache_flg = updatecache_flg
if exitcode == 0:
- msg = "=== Sync completed for %s" % self.repo.name
+ msg = f"=== Sync completed for {self.repo.name}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n")
if self.callback:
@@ -190,7 +197,7 @@ class SyncManager:
_hooks = self.hooks["postsync.d"]
for filepath in _hooks:
writemsg_level(
- "Spawning post_sync hook: %s\n" % (_unicode_decode(_hooks[filepath])),
+ f"Spawning post_sync hook: {_unicode_decode(_hooks[filepath])}\n",
level=logging.ERROR,
noiselevel=4,
)
@@ -212,7 +219,7 @@ class SyncManager:
return succeeded
def pre_sync(self, repo):
- msg = ">>> Syncing repository '%s' into '%s'..." % (repo.name, repo.location)
+ msg = f">>> Syncing repository '{repo.name}' into '{repo.location}'..."
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n")
try:
@@ -248,7 +255,7 @@ class SyncManager:
pw = pwd.getpwuid(int(username))
except (ValueError, KeyError):
writemsg(
- "!!! User '%s' invalid or does not exist\n" % username,
+ f"!!! User '{username}' invalid or does not exist\n",
noiselevel=-1,
)
return (logname, user, group, home)
@@ -266,7 +273,7 @@ class SyncManager:
pw = grp.getgrgid(int(groupname))
except (ValueError, KeyError):
writemsg(
- "!!! Group '%s' invalid or does not exist\n" % groupname,
+ f"!!! Group '{groupname}' invalid or does not exist\n",
noiselevel=-1,
)
return (logname, user, group, home)
@@ -364,7 +371,6 @@ class SyncManager:
if updatecache_flg and os.path.exists(
os.path.join(repo.location, "metadata", "md5-cache")
):
-
# Only update cache for repo.location since that's
# the only one that's been synced here.
action_metadata(
diff --git a/lib/portage/sync/meson.build b/lib/portage/sync/meson.build
new file mode 100644
index 000000000..a39f1e3cf
--- /dev/null
+++ b/lib/portage/sync/meson.build
@@ -0,0 +1,14 @@
+py.install_sources(
+ [
+ 'config_checks.py',
+ 'controller.py',
+ 'getaddrinfo_validate.py',
+ 'old_tree_timestamp.py',
+ 'syncbase.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync',
+ pure : not native_extensions
+)
+
+subdir('modules')
diff --git a/lib/portage/sync/modules/cvs/cvs.py b/lib/portage/sync/modules/cvs/cvs.py
index 722f54ab4..e2e3a38a8 100644
--- a/lib/portage/sync/modules/cvs/cvs.py
+++ b/lib/portage/sync/modules/cvs/cvs.py
@@ -41,7 +41,7 @@ class CVSSync(NewBase):
self.repo.module_specific_options["sync-cvs-repo"]
),
),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
!= os.EX_OK
):
@@ -64,7 +64,7 @@ class CVSSync(NewBase):
exitcode = portage.process.spawn_bash(
"cd %s; exec cvs -z0 -q update -dP"
% (portage._shell_quote(self.repo.location),),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
msg = "!!! cvs update error; exiting."
diff --git a/lib/portage/sync/modules/cvs/meson.build b/lib/portage/sync/modules/cvs/meson.build
new file mode 100644
index 000000000..cdf54e9bd
--- /dev/null
+++ b/lib/portage/sync/modules/cvs/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'cvs.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules/cvs',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/modules/git/__init__.py b/lib/portage/sync/modules/git/__init__.py
index ef32a9da0..121494215 100644
--- a/lib/portage/sync/modules/git/__init__.py
+++ b/lib/portage/sync/modules/git/__init__.py
@@ -77,6 +77,7 @@ module_spec = {
"sync-git-pull-env",
"sync-git-pull-extra-opts",
"sync-git-verify-commit-signature",
+ "sync-git-verify-max-age-days",
),
}
},
diff --git a/lib/portage/sync/modules/git/git.py b/lib/portage/sync/modules/git/git.py
index 98670e1f9..8fdbf97de 100644
--- a/lib/portage/sync/modules/git/git.py
+++ b/lib/portage/sync/modules/git/git.py
@@ -1,15 +1,17 @@
-# Copyright 2005-2020 Gentoo Authors
+# Copyright 2005-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import io
import logging
+import re
import subprocess
+import datetime
import portage
from portage import os
from portage.util import writemsg_level, shlex_split
from portage.util.futures import asyncio
from portage.output import create_color_func, EOutput
+from portage.const import TIMESTAMP_FORMAT
good = create_color_func("GOOD")
bad = create_color_func("BAD")
@@ -35,11 +37,11 @@ class GitSync(NewBase):
def __init__(self):
NewBase.__init__(self, "git", portage.const.GIT_PACKAGE_ATOM)
- def exists(self, **kwargs):
+ def exists(self, **kwargs) -> bool:
"""Tests whether the repo actually exists"""
return os.path.exists(os.path.join(self.repo.location, ".git"))
- def new(self, **kwargs):
+ def new(self, **kwargs) -> tuple[int, bool]:
"""Do the initial clone of the repository"""
if kwargs:
self._kwargs(kwargs)
@@ -49,9 +51,9 @@ class GitSync(NewBase):
if not os.path.exists(self.repo.location):
os.makedirs(self.repo.location)
self.logger(
- self.xterm_titles, "Created new directory %s" % self.repo.location
+ self.xterm_titles, f"Created new directory {self.repo.location}"
)
- except IOError:
+ except OSError:
return (1, False)
sync_uri = self.repo.sync_uri
@@ -61,22 +63,22 @@ class GitSync(NewBase):
git_cmd_opts = ""
if self.repo.module_specific_options.get("sync-git-env"):
shlexed_env = shlex_split(self.repo.module_specific_options["sync-git-env"])
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-git-clone-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-git-clone-env"]
)
- clone_env = dict(
- (k, v)
+ clone_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(clone_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -84,18 +86,15 @@ class GitSync(NewBase):
if self.repo.clone_depth is not None:
if self.repo.clone_depth != 0:
git_cmd_opts += " --depth %d" % self.repo.clone_depth
- elif self.repo.sync_depth is not None:
- if self.repo.sync_depth != 0:
- git_cmd_opts += " --depth %d" % self.repo.sync_depth
else:
# default
git_cmd_opts += " --depth 1"
if self.repo.module_specific_options.get("sync-git-clone-extra-opts"):
git_cmd_opts += (
- " %s" % self.repo.module_specific_options["sync-git-clone-extra-opts"]
+ f" {self.repo.module_specific_options['sync-git-clone-extra-opts']}"
)
- git_cmd = "%s clone%s %s ." % (
+ git_cmd = "{} clone{} {} .".format(
self.bin_command,
git_cmd_opts,
portage._shell_quote(sync_uri),
@@ -103,19 +102,38 @@ class GitSync(NewBase):
writemsg_level(git_cmd + "\n")
exitcode = portage.process.spawn_bash(
- "cd %s ; exec %s" % (portage._shell_quote(self.repo.location), git_cmd),
- **self.spawn_kwargs
+ f"cd {portage._shell_quote(self.repo.location)} ; exec {git_cmd}",
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git clone error in %s" % self.repo.location
+ msg = f"!!! git clone error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
+
+ self.add_safe_directory()
+
if not self.verify_head():
return (1, False)
+
return (os.EX_OK, True)
- def update(self):
+ def _gen_ceiling_string(self, path: str) -> str:
+ """
+ Iteratively generate a colon delimited string of all of the
+ given path's parents, for use with GIT_CEILING_DIRECTORIES
+ """
+ directories = []
+
+ while True:
+ if path == "/":
+ break
+ path = os.path.dirname(path)
+ directories.append(path)
+
+ return ":".join(directories)
+
+ def update(self) -> tuple[int, bool]:
"""Update existing git repository, and ignore the syncuri. We are
going to trust the user and assume that the user is in the branch
that he/she wants updated. We'll let the user manage branches with
@@ -123,33 +141,128 @@ class GitSync(NewBase):
"""
if not self.has_bin:
return (1, False)
+
+ opts = self.options.get("emerge_config").opts
+
git_cmd_opts = ""
quiet = self.settings.get("PORTAGE_QUIET") == "1"
+ verbose = "--verbose" in opts
+
+ # We don't want to operate with a .git outside of the given
+ # repo in any circumstances.
+ self.spawn_kwargs["env"].update(
+ {"GIT_CEILING_DIRECTORIES": self._gen_ceiling_string(self.repo.location)}
+ )
+
+ self.add_safe_directory()
+
if self.repo.module_specific_options.get("sync-git-env"):
shlexed_env = shlex_split(self.repo.module_specific_options["sync-git-env"])
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-git-pull-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-git-pull-env"]
)
- pull_env = dict(
- (k, v)
+ pull_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(pull_env)
- if self.settings.get("PORTAGE_QUIET") == "1":
+ if quiet:
git_cmd_opts += " --quiet"
+ elif verbose:
+ git_cmd_opts += " --verbose"
+
+ # The logic here is a bit delicate. We need to balance two things:
+ # 1. Having a robust sync mechanism which works unattended.
+ # 2. Allowing users to have the flexibility they might expect when using
+ # a git repository in repos.conf for syncing.
+ #
+ # For sync-type=git repositories, we've seen a problem in the wild
+ # where shallow clones end up "breaking themselves" especially when
+ # the origin is behind a CDN. 'git pull' might return state X,
+ # but on a subsequent pull, return state X-1. git will then (sometimes)
+ # leave orphaned untracked files in the repository. On a subsequent pull,
+ # when state >= X is returned where those files exist in the origin,
+ # git then refuses to write over them and aborts to avoid clobbering
+ # local work.
+ #
+ # To mitigate this, Portage will aggressively clobber any changes
+ # in the local directory, as its priority is to keep syncing working,
+ # by running 'git clean' and 'git reset --hard'.
+ #
+ # Portage performs this clobbering if:
+ # 1. sync-type=git
+ # 2.
+ # - volatile=no (explicitly set to no), OR
+ # - volatile is unset AND the repository owner is either root or portage
+ # 3. Portage is syncing the repository (rather than e.g. auto-sync=no
+ # and never running 'emaint sync -r foo')
+ #
+ # Portage will not clobber if:
+ # 1. volatile=yes (explicitly set in the config), OR
+ # 2. volatile is unset and the repository owner is neither root nor
+ # portage.
+ #
+ # 'volatile' refers to whether the repository is volatile and may
+ # only be safely changed by Portage itself, i.e. whether Portage
+ # should expect the user to change it or not.
+ #
+ # - volatile=yes:
+ # The repository is volatile and may be changed at any time by the user.
+ # Portage will not perform destructive operations on the repository.
+ # - volatile=no
+ # The repository is not volatile. Only Portage may modify the
+ # repository. User changes may be lost.
+ # Portage may perform destructive operations on the repository
+ # to keep sync working.
+ #
+ # References:
+ # bug #887025
+ # bug #824782
+ # https://archives.gentoo.org/gentoo-dev/message/f58a97027252458ad0a44090a2602897
+
+ # Default: Perform shallow updates (but only if the target is
+ # already a shallow repository).
+ sync_depth = 1
+ if self.repo.sync_depth is not None:
+ sync_depth = self.repo.sync_depth
+ else:
+ if self.repo.volatile:
+ # If sync-depth is not explicitly set by the user,
+ # then check if the target repository is already a
+ # shallow one. And do not perform a shallow update if
+ # the target repository is not shallow.
+ is_shallow_cmd = ["git", "rev-parse", "--is-shallow-repository"]
+ is_shallow_res = portage._unicode_decode(
+ subprocess.check_output(
+ is_shallow_cmd,
+ cwd=portage._unicode_encode(self.repo.location),
+ )
+ ).rstrip("\n")
+ if is_shallow_res == "false":
+ sync_depth = 0
+ else:
+ # If the repository is marked as non-volatile, we assume
+ # it's fine to Portage to do what it wishes to it.
+ sync_depth = 1
+
+ shallow = False
+ if sync_depth > 0:
+ git_cmd_opts += f" --depth {sync_depth}"
+ shallow = True
+
if self.repo.module_specific_options.get("sync-git-pull-extra-opts"):
git_cmd_opts += (
- " %s" % self.repo.module_specific_options["sync-git-pull-extra-opts"]
+ f" {self.repo.module_specific_options['sync-git-pull-extra-opts']}"
)
try:
@@ -166,15 +279,12 @@ class GitSync(NewBase):
)
).rstrip("\n")
except subprocess.CalledProcessError as e:
- msg = "!!! git rev-parse error in %s" % self.repo.location
+ msg = f"!!! git rev-parse error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (e.returncode, False)
- shallow = self.repo.sync_depth is not None and self.repo.sync_depth != 0
if shallow:
- git_cmd_opts += " --depth %d" % self.repo.sync_depth
-
# For shallow fetch, unreachable objects may need to be pruned
# manually, in order to prevent automatic git gc calls from
# eventually failing (see bug 599008).
@@ -184,21 +294,49 @@ class GitSync(NewBase):
exitcode = portage.process.spawn(
gc_cmd,
cwd=portage._unicode_encode(self.repo.location),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git gc error in %s" % self.repo.location
+ msg = f"!!! git gc error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
- git_cmd = "%s fetch %s%s" % (
- self.bin_command,
- remote_branch.partition("/")[0],
- git_cmd_opts,
- )
+ git_remote = remote_branch.partition("/")[0]
- writemsg_level(git_cmd + "\n")
+ if not self.repo.volatile:
+ git_get_remote_url_cmd = ["git", "ls-remote", "--get-url", git_remote]
+ git_remote_url = portage._unicode_decode(
+ subprocess.check_output(
+ git_get_remote_url_cmd,
+ cwd=portage._unicode_encode(self.repo.location),
+ )
+ ).strip()
+ if git_remote_url != self.repo.sync_uri:
+ git_set_remote_url_cmd = [
+ "git",
+ "remote",
+ "set-url",
+ git_remote,
+ self.repo.sync_uri,
+ ]
+ exitcode = portage.process.spawn(
+ git_set_remote_url_cmd,
+ cwd=portage._unicode_encode(self.repo.location),
+ **self.spawn_kwargs,
+ )
+ if exitcode != os.EX_OK:
+ msg = f"!!! could not update git remote {git_remote}'s url to {self.repo.sync_uri}"
+ self.logger(self.xterm_titles, msg)
+ writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
+ return (exitcode, False)
+ elif not quiet:
+ writemsg_level(" ".join(git_set_remote_url_cmd) + "\n")
+
+ git_cmd = f"{self.bin_command} fetch {git_remote}{git_cmd_opts}"
+
+ if not quiet:
+ writemsg_level(git_cmd + "\n")
rev_cmd = [self.bin_command, "rev-list", "--max-count=1", "HEAD"]
previous_rev = subprocess.check_output(
@@ -206,39 +344,92 @@ class GitSync(NewBase):
)
exitcode = portage.process.spawn_bash(
- "cd %s ; exec %s" % (portage._shell_quote(self.repo.location), git_cmd),
- **self.spawn_kwargs
+ f"cd {portage._shell_quote(self.repo.location)} ; exec {git_cmd}",
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git fetch error in %s" % self.repo.location
+ msg = f"!!! git fetch error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
- if not self.verify_head(revision="refs/remotes/%s" % remote_branch):
+ if not self.verify_head(revision=f"refs/remotes/{remote_branch}"):
return (1, False)
- if shallow:
+ if not self.repo.volatile:
+ # Clean up the repo before trying to sync to upstream.
+ # - Only done for volatile=false repositories to avoid losing
+ # data.
+ # - This is needed to avoid orphaned files preventing further syncs
+ # on shallow clones.
+ clean_cmd = [self.bin_command, "clean", "--force", "-d", "-x"]
+
+ if quiet:
+ clean_cmd.append("--quiet")
+
+ exitcode = portage.process.spawn(
+ clean_cmd,
+ cwd=portage._unicode_encode(self.repo.location),
+ **self.spawn_kwargs,
+ )
+
+ if exitcode != os.EX_OK:
+ msg = f"!!! git clean error in {self.repo.location}"
+ self.logger(self.xterm_titles, msg)
+ writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
+ return (exitcode, False)
+
+ # `git diff --quiet` returns 0 on a clean tree and 1 otherwise
+ is_clean = (
+ portage.process.spawn(
+ f"{self.bin_command} diff --quiet",
+ cwd=portage._unicode_encode(self.repo.location),
+ **self.spawn_kwargs,
+ )
+ == 0
+ )
+
+ if not is_clean and not self.repo.volatile:
+ # If the repo isn't clean, clobber any changes for parity
+ # with rsync. Only do this for non-volatile repositories.
+ merge_cmd = [self.bin_command, "reset", "--hard"]
+ elif shallow:
# Since the default merge strategy typically fails when
# the depth is not unlimited, `git reset --merge`.
merge_cmd = [self.bin_command, "reset", "--merge"]
else:
merge_cmd = [self.bin_command, "merge"]
- merge_cmd.append("refs/remotes/%s" % remote_branch)
+
+ merge_cmd.append(f"refs/remotes/{remote_branch}")
if quiet:
merge_cmd.append("--quiet")
+
+ if not quiet:
+ writemsg_level(" ".join(merge_cmd) + "\n")
+
exitcode = portage.process.spawn(
merge_cmd,
cwd=portage._unicode_encode(self.repo.location),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! git merge error in %s" % self.repo.location
- self.logger(self.xterm_titles, msg)
- writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
- return (exitcode, False)
+ if not self.repo.volatile:
+ # HACK - sometimes merging results in a tree diverged from
+ # upstream, so try to hack around it
+ # https://stackoverflow.com/questions/41075972/how-to-update-a-git-shallow-clone/41081908#41081908
+ exitcode = portage.process.spawn(
+ f"{self.bin_command} reset --hard refs/remotes/{remote_branch}",
+ cwd=portage._unicode_encode(self.repo.location),
+ **self.spawn_kwargs,
+ )
+
+ if exitcode != os.EX_OK:
+ msg = f"!!! git merge error in {self.repo.location}"
+ self.logger(self.xterm_titles, msg)
+ writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
+ return (exitcode, False)
current_rev = subprocess.check_output(
rev_cmd, cwd=portage._unicode_encode(self.repo.location)
@@ -246,7 +437,53 @@ class GitSync(NewBase):
return (os.EX_OK, current_rev != previous_rev)
- def verify_head(self, revision="-1"):
+ def verify_head(self, revision="-1") -> bool:
+ max_age_days = self.repo.module_specific_options.get(
+ "sync-git-verify-max-age-days", ""
+ )
+ if max_age_days:
+ try:
+ max_age_days = int(max_age_days)
+ if max_age_days <= 0:
+ raise ValueError(max_age_days)
+ except ValueError:
+ writemsg_level(
+ f"!!! sync-git-max-age-days must be a positive non-zero integer: {max_age_days}\n",
+ level=logging.ERROR,
+ noiselevel=-1,
+ )
+ return False
+ show_timestamp_chk_file_cmd = [
+ self.bin_command,
+ "show",
+ f"{revision}:metadata/timestamp.chk",
+ ]
+ try:
+ timestamp_chk = portage._unicode_decode(
+ subprocess.check_output(
+ show_timestamp_chk_file_cmd,
+ cwd=portage._unicode_encode(self.repo.location),
+ )
+ ).strip()
+ except subprocess.CalledProcessError as e:
+ writemsg_level(
+ f"!!! {show_timestamp_chk_file_cmd} failed with {e.returncode}",
+ level=logging.ERROR,
+ noiselevel=-1,
+ )
+ return False
+ timestamp = datetime.datetime.strptime(timestamp_chk, TIMESTAMP_FORMAT)
+ max_timestamp_age = datetime.datetime.now() - datetime.timedelta(
+ days=max_age_days
+ )
+ if timestamp < max_timestamp_age:
+ writemsg_level(
+ f"!!! timestamp (from timestamp.chk) {timestamp} is older than max age {max_timestamp_age}\n",
+ level=logging.ERROR,
+ noiselevel=-1,
+ )
+ return False
+
if self.repo.module_specific_options.get(
"sync-git-verify-commit-signature", "false"
).lower() not in ("true", "yes"):
@@ -260,21 +497,30 @@ class GitSync(NewBase):
)
return False
- openpgp_env = self._get_openpgp_env(self.repo.sync_openpgp_key_path)
+ opts = self.options.get("emerge_config").opts
+ debug = "--debug" in opts
+ quiet = self.settings.get("PORTAGE_QUIET") == "1"
+ verbose = "--verbose" in opts
+
+ openpgp_env = self._get_openpgp_env(self.repo.sync_openpgp_key_path, debug)
+
+ if debug:
+ old_level = logging.getLogger().getEffectiveLevel()
+ logging.getLogger().setLevel(logging.DEBUG)
+ logging.getLogger("gemato").setLevel(logging.DEBUG)
try:
out = EOutput()
env = None
if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
try:
- out.einfo("Using keys from %s" % (self.repo.sync_openpgp_key_path,))
- with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
+ out.einfo(f"Using keys from {self.repo.sync_openpgp_key_path}")
+ with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
self._refresh_keys(openpgp_env)
except (GematoException, asyncio.TimeoutError) as e:
writemsg_level(
- "!!! Verification impossible due to keyring problem:\n%s\n"
- % (e,),
+ f"!!! Verification impossible due to keyring problem:\n{e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -283,45 +529,77 @@ class GitSync(NewBase):
env = os.environ.copy()
env["GNUPGHOME"] = openpgp_env.home
- rev_cmd = [self.bin_command, "log", "-n1", "--pretty=format:%G?", revision]
+ rev_cmd = [
+ self.bin_command,
+ "-c",
+ "log.showsignature=0",
+ "log",
+ "-n1",
+ "--pretty=format:%G?%n%GF",
+ revision,
+ ]
try:
- status = portage._unicode_decode(
+ lines = portage._unicode_decode(
subprocess.check_output(
rev_cmd,
cwd=portage._unicode_encode(self.repo.location),
env=env,
)
- ).strip()
+ ).splitlines()
except subprocess.CalledProcessError:
return False
+ status = lines[0].strip()
+ if len(lines) > 1:
+ signing_key = lines[1].strip()
+
if status == "G": # good signature is good
- out.einfo("Trusted signature found on top commit")
+ if not quiet:
+ message = "Trusted signature found on top commit"
+ if verbose:
+ message += (
+ f" (git revision: {revision}, signing key: {signing_key})"
+ )
+ out.einfo(message)
return True
if status == "U": # untrusted
- out.ewarn("Top commit signature is valid but not trusted")
+ out.ewarn(
+ f"Top commit signature is valid but not trusted (git revision: {revision}, signing key: {signing_key})"
+ )
return True
if status == "B":
- expl = "bad signature"
+ expl = (
+ f"bad signature using key {signing_key} on git revision {revision}"
+ )
elif status == "X":
- expl = "expired signature"
+ expl = f"expired signature using key {signing_key} on git revision {revision}"
elif status == "Y":
- expl = "expired key"
+ expl = f"expired key using key {signing_key} on git revision {revision}"
elif status == "R":
- expl = "revoked key"
+ expl = f"revoked key using key {signing_key} on git revision {revision}"
elif status == "E":
expl = "unable to verify signature (missing key?)"
elif status == "N":
expl = "no signature"
else:
expl = "unknown issue"
- out.eerror("No valid signature found: %s" % (expl,))
+ out.eerror(f"No valid signature found: {expl}")
+
+ if debug:
+ writemsg_level(
+ f"!!! Got following output from gpg: {status}\n",
+ level=logging.DEBUG,
+ noiselevel=-1,
+ )
+
return False
finally:
if openpgp_env is not None:
openpgp_env.close()
+ if debug:
+ logging.getLogger().setLevel(old_level)
- def retrieve_head(self, **kwargs):
+ def retrieve_head(self, **kwargs) -> tuple[int, bool]:
"""Get information about the head commit"""
if kwargs:
self._kwargs(kwargs)
@@ -341,3 +619,31 @@ class GitSync(NewBase):
except subprocess.CalledProcessError:
ret = (1, False)
return ret
+
+ def add_safe_directory(self) -> bool:
+ # Add safe.directory to system gitconfig if not already configured.
+ # Workaround for bug #838271 and bug #838223.
+ location_escaped = re.escape(self.repo.location)
+ result = subprocess.run(
+ [
+ self.bin_command,
+ "config",
+ "--get",
+ "safe.directory",
+ f"^{location_escaped}$",
+ ],
+ stdout=subprocess.DEVNULL,
+ )
+ if result.returncode == 1:
+ result = subprocess.run(
+ [
+ self.bin_command,
+ "config",
+ "--system",
+ "--add",
+ "safe.directory",
+ self.repo.location,
+ ],
+ stdout=subprocess.DEVNULL,
+ )
+ return result.returncode == 0
diff --git a/lib/portage/sync/modules/git/meson.build b/lib/portage/sync/modules/git/meson.build
new file mode 100644
index 000000000..fb683c53e
--- /dev/null
+++ b/lib/portage/sync/modules/git/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'git.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules/git',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/modules/mercurial/mercurial.py b/lib/portage/sync/modules/mercurial/mercurial.py
index 486b4fdd6..6a3016a1f 100644
--- a/lib/portage/sync/modules/mercurial/mercurial.py
+++ b/lib/portage/sync/modules/mercurial/mercurial.py
@@ -35,9 +35,9 @@ class MercurialSync(NewBase):
if not os.path.exists(self.repo.location):
os.makedirs(self.repo.location)
self.logger(
- self.xterm_titles, "Created new directory %s" % self.repo.location
+ self.xterm_titles, f"Created new directory {self.repo.location}"
)
- except IOError:
+ except OSError:
return (1, False)
sync_uri = self.repo.sync_uri
@@ -49,22 +49,22 @@ class MercurialSync(NewBase):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-env"]
)
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-mercurial-clone-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-clone-env"]
)
- clone_env = dict(
- (k, v)
+ clone_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(clone_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -74,7 +74,7 @@ class MercurialSync(NewBase):
" %s"
% self.repo.module_specific_options["sync-mercurial-clone-extra-opts"]
)
- hg_cmd = "%s clone%s %s ." % (
+ hg_cmd = "{} clone{} {} .".format(
self.bin_command,
hg_cmd_opts,
portage._shell_quote(sync_uri),
@@ -84,10 +84,10 @@ class MercurialSync(NewBase):
exitcode = portage.process.spawn(
shlex_split(hg_cmd),
cwd=portage._unicode_encode(self.repo.location),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! hg clone error in %s" % self.repo.location
+ msg = f"!!! hg clone error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -105,22 +105,22 @@ class MercurialSync(NewBase):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-env"]
)
- env = dict(
- (k, v)
+ env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(env)
if self.repo.module_specific_options.get("sync-mercurial-pull-env"):
shlexed_env = shlex_split(
self.repo.module_specific_options["sync-mercurial-pull-env"]
)
- pull_env = dict(
- (k, v)
+ pull_env = {
+ k: v
for k, _, v in (assignment.partition("=") for assignment in shlexed_env)
if k
- )
+ }
self.spawn_kwargs["env"].update(pull_env)
if self.settings.get("PORTAGE_QUIET") == "1":
@@ -130,7 +130,7 @@ class MercurialSync(NewBase):
" %s"
% self.repo.module_specific_options["sync-mercurial-pull-extra-opts"]
)
- hg_cmd = "%s pull -u%s" % (self.bin_command, hg_cmd_opts)
+ hg_cmd = f"{self.bin_command} pull -u{hg_cmd_opts}"
writemsg_level(hg_cmd + "\n")
rev_cmd = [self.bin_command, "id", "--id", "--rev", "tip"]
@@ -141,10 +141,10 @@ class MercurialSync(NewBase):
exitcode = portage.process.spawn(
shlex_split(hg_cmd),
cwd=portage._unicode_encode(self.repo.location),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
- msg = "!!! hg pull error in %s" % self.repo.location
+ msg = f"!!! hg pull error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
diff --git a/lib/portage/sync/modules/mercurial/meson.build b/lib/portage/sync/modules/mercurial/meson.build
new file mode 100644
index 000000000..4e4897ed3
--- /dev/null
+++ b/lib/portage/sync/modules/mercurial/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'mercurial.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules/mercurial',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/modules/meson.build b/lib/portage/sync/modules/meson.build
new file mode 100644
index 000000000..fab2878e9
--- /dev/null
+++ b/lib/portage/sync/modules/meson.build
@@ -0,0 +1,14 @@
+py.install_sources(
+ [
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules',
+ pure : not native_extensions
+)
+
+subdir('cvs')
+subdir('git')
+subdir('mercurial')
+subdir('rsync')
+subdir('svn')
+subdir('webrsync')
diff --git a/lib/portage/sync/modules/rsync/meson.build b/lib/portage/sync/modules/rsync/meson.build
new file mode 100644
index 000000000..ab95e7cfc
--- /dev/null
+++ b/lib/portage/sync/modules/rsync/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'rsync.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules/rsync',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/modules/rsync/rsync.py b/lib/portage/sync/modules/rsync/rsync.py
index 5f4cf1aeb..5d442d262 100644
--- a/lib/portage/sync/modules/rsync/rsync.py
+++ b/lib/portage/sync/modules/rsync/rsync.py
@@ -1,8 +1,7 @@
-# Copyright 1999-2020 Gentoo Authors
+# Copyright 1999-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import datetime
-import io
import logging
import random
import re
@@ -148,11 +147,16 @@ class RsyncSync(NewBase):
else:
self.max_age = 0
+ debug = "--debug" in opts
+ if debug:
+ old_level = logging.getLogger().getEffectiveLevel()
+ logging.getLogger().setLevel(logging.DEBUG)
+
openpgp_env = None
if self.verify_metamanifest and gemato is not None:
# Use isolated environment if key is specified,
# system environment otherwise
- openpgp_env = self._get_openpgp_env(self.repo.sync_openpgp_key_path)
+ openpgp_env = self._get_openpgp_env(self.repo.sync_openpgp_key_path, debug)
try:
# Load and update the keyring early. If it fails, then verification
@@ -160,8 +164,8 @@ class RsyncSync(NewBase):
# so we may as well bail out before actual rsync happens.
if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
try:
- out.einfo("Using keys from %s" % (self.repo.sync_openpgp_key_path,))
- with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
+ out.einfo(f"Using keys from {self.repo.sync_openpgp_key_path}")
+ with open(self.repo.sync_openpgp_key_path, "rb") as f:
openpgp_env.import_key(f)
self._refresh_keys(openpgp_env)
except (GematoException, asyncio.TimeoutError) as e:
@@ -225,7 +229,7 @@ class RsyncSync(NewBase):
)[1:5]
except ValueError:
writemsg_level(
- "!!! sync-uri is invalid: %s\n" % syncuri,
+ f"!!! sync-uri is invalid: {syncuri}\n",
noiselevel=-1,
level=logging.ERROR,
)
@@ -264,7 +268,7 @@ class RsyncSync(NewBase):
getaddrinfo_host, None, family, socket.SOCK_STREAM
)
)
- except socket.error as e:
+ except OSError as e:
writemsg_level(
"!!! getaddrinfo failed for '%s': %s\n"
% (_unicode_decode(hostname), str(e)),
@@ -273,7 +277,6 @@ class RsyncSync(NewBase):
)
if addrinfos:
-
AF_INET = socket.AF_INET
AF_INET6 = None
if socket.has_ipv6:
@@ -284,10 +287,10 @@ class RsyncSync(NewBase):
for addrinfo in addrinfos:
if addrinfo[0] == AF_INET:
- ips_v4.append("%s" % addrinfo[4][0])
+ ips_v4.append(f"{addrinfo[4][0]}")
elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
# IPv6 addresses need to be enclosed in square brackets
- ips_v6.append("[%s]" % addrinfo[4][0])
+ ips_v6.append(f"[{addrinfo[4][0]}]")
random.shuffle(ips_v4)
random.shuffle(ips_v6)
@@ -334,7 +337,7 @@ class RsyncSync(NewBase):
dosyncuri = uris.pop()
elif maxretries < 0 or retries > maxretries:
writemsg(
- "!!! Exhausted addresses for %s\n" % _unicode_decode(hostname),
+ f"!!! Exhausted addresses for {_unicode_decode(hostname)}\n",
noiselevel=-1,
)
return (1, False)
@@ -446,30 +449,28 @@ class RsyncSync(NewBase):
out.ewarn(
"You may want to try using another mirror and/or reporting this one:"
)
- out.ewarn(" %s" % (dosyncuri,))
+ out.ewarn(f" {dosyncuri}")
out.ewarn("")
out.quiet = quiet
- out.einfo("Manifest timestamp: %s UTC" % (ts.ts,))
+ out.einfo(f"Manifest timestamp: {ts.ts} UTC")
out.einfo("Valid OpenPGP signature found:")
out.einfo(
"- primary key: %s"
% (m.openpgp_signature.primary_key_fingerprint)
)
- out.einfo("- subkey: %s" % (m.openpgp_signature.fingerprint))
- out.einfo(
- "- timestamp: %s UTC" % (m.openpgp_signature.timestamp)
- )
+ out.einfo(f"- subkey: {m.openpgp_signature.fingerprint}")
+ out.einfo(f"- timestamp: {m.openpgp_signature.timestamp} UTC")
# if nothing has changed, skip the actual Manifest
# verification
if not local_state_unchanged:
- out.ebegin("Verifying %s" % (download_dir,))
+ out.ebegin(f"Verifying {download_dir}")
m.assert_directory_verifies()
out.eend(0)
except GematoException as e:
writemsg_level(
- "!!! Manifest verification failed:\n%s\n" % (e,),
+ f"!!! Manifest verification failed:\n{e}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -488,6 +489,8 @@ class RsyncSync(NewBase):
self.repo_storage.abort_update()
if openpgp_env is not None:
openpgp_env.close()
+ if debug:
+ logging.getLogger().setLevel(old_level)
def _process_exitcode(self, exitcode, syncuri, out, maxretries):
if exitcode == 0:
@@ -495,7 +498,7 @@ class RsyncSync(NewBase):
elif exitcode == SERVER_OUT_OF_DATE:
exitcode = 1
elif exitcode == EXCEEDED_MAX_RETRIES:
- sys.stderr.write(">>> Exceeded PORTAGE_RSYNC_RETRIES: %s\n" % maxretries)
+ sys.stderr.write(f">>> Exceeded PORTAGE_RSYNC_RETRIES: {maxretries}\n")
exitcode = 1
elif exitcode > 0:
msg = []
@@ -507,7 +510,7 @@ class RsyncSync(NewBase):
"that sync-uri attribute for repository '%s' is proper."
% self.repo.name
)
- msg.append("sync-uri: '%s'" % self.repo.sync_uri)
+ msg.append(f"sync-uri: '{self.repo.sync_uri}'")
elif exitcode == 11:
msg.append("Rsync has reported that there is a File IO error. Normally")
msg.append(
@@ -518,7 +521,7 @@ class RsyncSync(NewBase):
% self.repo.name
)
msg.append("and try again after the problem has been fixed.")
- msg.append("Location of repository: '%s'" % self.repo.location)
+ msg.append(f"Location of repository: '{self.repo.location}'")
elif exitcode == 20:
msg.append("Rsync was killed before it finished.")
else:
@@ -546,9 +549,9 @@ class RsyncSync(NewBase):
os.makedirs(self.repo.location)
self.logger(
self.self.xterm_titles,
- "Created New Directory %s " % self.repo.location,
+ f"Created New Directory {self.repo.location} ",
)
- except IOError:
+ except OSError:
return (1, False)
return self.update()
@@ -582,7 +585,7 @@ class RsyncSync(NewBase):
"--force", # Force deletion on non-empty dirs
"--whole-file", # Don't do block transfers, only entire files
"--delete", # Delete files that aren't in the master tree
- "--stats", # Show final statistics about what was transfered
+ "--stats", # Show final statistics about what was transferred
"--human-readable",
"--timeout=" + str(self.timeout), # IO timeout if not done in X seconds
"--exclude=/distfiles", # Exclude distfiles from consideration
@@ -604,16 +607,16 @@ class RsyncSync(NewBase):
portage.writemsg(
yellow("WARNING:")
+ " adding required option "
- + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt
+ + f"{opt} not included in PORTAGE_RSYNC_OPTS\n"
)
rsync_opts.append(opt)
for exclude in ("distfiles", "local", "packages"):
- opt = "--exclude=/%s" % exclude
+ opt = f"--exclude=/{exclude}"
if opt not in rsync_opts:
portage.writemsg(
yellow("WARNING:")
- + " adding required option %s not included in " % opt
+ + f" adding required option {opt} not included in "
+ "PORTAGE_RSYNC_OPTS (can be overridden with --exclude='!')\n"
)
rsync_opts.append(opt)
@@ -634,7 +637,7 @@ class RsyncSync(NewBase):
portage.writemsg(
yellow("WARNING:")
+ " adding required option "
- + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt
+ + f"{opt} not included in PORTAGE_RSYNC_OPTS\n"
)
rsync_opts.append(opt)
return rsync_opts
@@ -705,48 +708,47 @@ class RsyncSync(NewBase):
command.append(syncuri.rstrip("/") + "/metadata/timestamp.chk")
command.append(tmpservertimestampfile)
content = None
- pids = []
+ proc = None
+ proc_waiter = None
+ loop = asyncio.get_event_loop()
try:
# Timeout here in case the server is unresponsive. The
# --timeout rsync option doesn't apply to the initial
# connection attempt.
try:
- if self.rsync_initial_timeout:
- portage.exception.AlarmSignal.register(self.rsync_initial_timeout)
-
- pids.extend(
- portage.process.spawn(command, returnpid=True, **self.spawn_kwargs)
+ proc = portage.process.spawn(
+ command, returnproc=True, **self.spawn_kwargs
)
- exitcode = os.waitpid(pids[0], 0)[1]
+ proc_waiter = asyncio.ensure_future(proc.wait(), loop)
+ future = (
+ asyncio.wait_for(
+ asyncio.shield(proc_waiter), self.rsync_initial_timeout
+ )
+ if self.rsync_initial_timeout
+ else proc_waiter
+ )
+ exitcode = loop.run_until_complete(future)
if self.usersync_uid is not None:
portage.util.apply_permissions(
tmpservertimestampfile, uid=os.getuid()
)
content = portage.grabfile(tmpservertimestampfile)
finally:
- if self.rsync_initial_timeout:
- portage.exception.AlarmSignal.unregister()
try:
os.unlink(tmpservertimestampfile)
except OSError:
pass
- except portage.exception.AlarmSignal:
+ except (TimeoutError, asyncio.TimeoutError):
# timed out
print("timed out")
# With waitpid and WNOHANG, only check the
# first element of the tuple since the second
# element may vary (bug #337465).
- if pids and os.waitpid(pids[0], os.WNOHANG)[0] == 0:
- os.kill(pids[0], signal.SIGTERM)
- os.waitpid(pids[0], 0)
+ if proc_waiter and not proc_waiter.done():
+ proc.terminate()
+ loop.run_until_complete(proc_waiter)
# This is the same code rsync uses for timeout.
exitcode = 30
- else:
- if exitcode != os.EX_OK:
- if exitcode & 0xFF:
- exitcode = (exitcode & 0xFF) << 8
- else:
- exitcode = exitcode >> 8
if content:
try:
@@ -755,7 +757,6 @@ class RsyncSync(NewBase):
)
except (OverflowError, ValueError):
pass
- del command, pids, content
if exitcode == os.EX_OK:
if (servertimestamp != 0) and (servertimestamp == timestamp):
@@ -774,20 +775,18 @@ class RsyncSync(NewBase):
)
print(">>>")
print(
- ">>> In order to force sync, remove '%s'."
- % self.servertimestampfile
+ f">>> In order to force sync, remove '{self.servertimestampfile}'."
)
print(">>>")
print()
elif (servertimestamp != 0) and (servertimestamp < timestamp):
- self.logger(self.xterm_titles, ">>> Server out of date: %s" % syncuri)
+ self.logger(self.xterm_titles, f">>> Server out of date: {syncuri}")
print()
print(">>>")
- print(">>> SERVER OUT OF DATE: %s" % syncuri)
+ print(f">>> SERVER OUT OF DATE: {syncuri}")
print(">>>")
print(
- ">>> In order to force sync, remove '%s'."
- % self.servertimestampfile
+ f">>> In order to force sync, remove '{self.servertimestampfile}'."
)
print(">>>")
print()
diff --git a/lib/portage/sync/modules/svn/meson.build b/lib/portage/sync/modules/svn/meson.build
new file mode 100644
index 000000000..0b1b86f3e
--- /dev/null
+++ b/lib/portage/sync/modules/svn/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'svn.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules/svn',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/modules/svn/svn.py b/lib/portage/sync/modules/svn/svn.py
index 788e394cd..ec4bdb006 100644
--- a/lib/portage/sync/modules/svn/svn.py
+++ b/lib/portage/sync/modules/svn/svn.py
@@ -36,7 +36,7 @@ class SVNSync(NewBase):
portage._shell_quote(self.repo.location),
portage._shell_quote(svn_root),
),
- **self.spawn_kwargs
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
msg = "!!! svn checkout error; exiting."
@@ -59,8 +59,8 @@ class SVNSync(NewBase):
# svn update
exitcode = portage.process.spawn_bash(
- "cd %s; exec svn update" % (portage._shell_quote(self.repo.location),),
- **self.spawn_kwargs
+ f"cd {portage._shell_quote(self.repo.location)}; exec svn update",
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
msg = "!!! svn update error; exiting."
@@ -77,8 +77,8 @@ class SVNSync(NewBase):
@rtype: (int, bool)
"""
exitcode = portage.process.spawn_bash(
- "cd %s; exec svn upgrade" % (portage._shell_quote(self.repo.location),),
- **self.spawn_kwargs
+ f"cd {portage._shell_quote(self.repo.location)}; exec svn upgrade",
+ **self.spawn_kwargs,
)
if exitcode != os.EX_OK:
msg = "!!! svn upgrade error; exiting."
diff --git a/lib/portage/sync/modules/webrsync/__init__.py b/lib/portage/sync/modules/webrsync/__init__.py
index bc0cdf43c..534a1d562 100644
--- a/lib/portage/sync/modules/webrsync/__init__.py
+++ b/lib/portage/sync/modules/webrsync/__init__.py
@@ -13,8 +13,8 @@ from portage.sync.config_checks import CheckSyncConfig
DEFAULT_CLASS = "WebRsync"
-AVAILABLE_CLASSES = ["WebRsync", "PyWebsync"]
-options = {"1": "WebRsync", "2": "PyWebsync"}
+AVAILABLE_CLASSES = ["WebRsync", "PyWebRsync"]
+options = {"1": "WebRsync", "2": "PyWebRsync"}
config_class = DEFAULT_CLASS
diff --git a/lib/portage/sync/modules/webrsync/meson.build b/lib/portage/sync/modules/webrsync/meson.build
new file mode 100644
index 000000000..d3e42cbf1
--- /dev/null
+++ b/lib/portage/sync/modules/webrsync/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'webrsync.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/sync/modules/webrsync',
+ pure : not native_extensions
+)
diff --git a/lib/portage/sync/modules/webrsync/webrsync.py b/lib/portage/sync/modules/webrsync/webrsync.py
index 0e2f63472..ca0416fa4 100644
--- a/lib/portage/sync/modules/webrsync/webrsync.py
+++ b/lib/portage/sync/modules/webrsync/webrsync.py
@@ -1,28 +1,33 @@
"""WebRsync module for portage"""
-import io
import logging
import portage
from portage import os
from portage.util import writemsg_level
-from portage.util.futures import asyncio
from portage.output import create_color_func
+from portage.sync.syncbase import SyncBase
good = create_color_func("GOOD")
bad = create_color_func("BAD")
warn = create_color_func("WARN")
-from portage.sync.syncbase import SyncBase
try:
- from gemato.exceptions import GematoException
import gemato.openpgp
except ImportError:
gemato = None
class WebRsync(SyncBase):
- """WebRSync sync class"""
+ """WebRSync sync class
+
+ This class implements syncing via calls to an external binary, either:
+ - emerge-delta-webrsync (if sync-webrsync-delta is set), or
+ - emerge-webrsync
+
+ It wraps them and performs PGP verification if sync-webrsync-verify-signature
+ is set via gemato.
+ """
short_desc = "Perform sync operations on webrsync based repositories"
@@ -46,7 +51,7 @@ class WebRsync(SyncBase):
self.bin_command = portage.process.find_binary(self._bin_command)
self.bin_pkg = ">=app-portage/emerge-delta-webrsync-3.7.5"
- return super(WebRsync, self).has_bin
+ return super().has_bin
def sync(self, **kwargs):
"""Sync the repository"""
@@ -67,7 +72,6 @@ class WebRsync(SyncBase):
if self.repo.module_specific_options.get(
"sync-webrsync-verify-signature", "false"
).lower() in ("true", "yes"):
-
if not self.repo.sync_openpgp_key_path:
writemsg_level(
"!!! sync-openpgp-key-path is not set\n",
@@ -93,24 +97,13 @@ class WebRsync(SyncBase):
)
return (1, False)
- openpgp_env = self._get_openpgp_env(self.repo.sync_openpgp_key_path)
-
- out = portage.output.EOutput(quiet=quiet)
- try:
- out.einfo("Using keys from %s" % (self.repo.sync_openpgp_key_path,))
- with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
- openpgp_env.import_key(f)
- self._refresh_keys(openpgp_env)
- self.spawn_kwargs["env"]["PORTAGE_GPG_DIR"] = openpgp_env.home
- self.spawn_kwargs["env"]["PORTAGE_TEMP_GPG_DIR"] = openpgp_env.home
- except (GematoException, asyncio.TimeoutError) as e:
- writemsg_level(
- "!!! Verification impossible due to keyring problem:\n%s\n"
- % (e,),
- level=logging.ERROR,
- noiselevel=-1,
- )
- return (1, False)
+ self.spawn_kwargs["env"]["PORTAGE_SYNC_WEBRSYNC_GPG"] = "1"
+ self.spawn_kwargs["env"][
+ "PORTAGE_GPG_KEY"
+ ] = self.repo.sync_openpgp_key_path
+ self.spawn_kwargs["env"][
+ "PORTAGE_GPG_KEY_SERVER"
+ ] = self.repo.sync_openpgp_keyserver
webrsync_cmd = [self.bin_command]
if verbose:
@@ -125,7 +118,7 @@ class WebRsync(SyncBase):
exitcode = portage.process.spawn(webrsync_cmd, **self.spawn_kwargs)
if exitcode != os.EX_OK:
- msg = "!!! emerge-webrsync error in %s" % self.repo.location
+ msg = f"!!! emerge-webrsync error in {self.repo.location}"
self.logger(self.xterm_titles, msg)
writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
return (exitcode, False)
@@ -136,7 +129,12 @@ class WebRsync(SyncBase):
class PyWebRsync(SyncBase):
- """WebRSync sync class"""
+ """PyWebRsync sync class
+
+ TODO: Implement the sync parts from the emerge-webrsync external
+ binary to avoid split logic for various components, which
+ is how we ended up with bug #597800.
+ """
short_desc = "Perform sync operations on webrsync based repositories"
@@ -149,4 +147,6 @@ class PyWebRsync(SyncBase):
def sync(self, **kwargs):
"""Sync the repository"""
- pass
+ raise NotImplementedError(
+ "Python impl. of webrsync backend is not yet implemented"
+ )
diff --git a/lib/portage/sync/modules/zipfile/__init__.py b/lib/portage/sync/modules/zipfile/__init__.py
new file mode 100644
index 000000000..e44833088
--- /dev/null
+++ b/lib/portage/sync/modules/zipfile/__init__.py
@@ -0,0 +1,34 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2024 Alexey Gladkov <gladkov.alexey@gmail.com>
+
+doc = """Zipfile plug-in module for portage.
+Performs a http download of a portage snapshot and unpacks it to the repo
+location."""
+__doc__ = doc[:]
+
+
+import os
+
+from portage.sync.config_checks import CheckSyncConfig
+
+
+module_spec = {
+ "name": "zipfile",
+ "description": doc,
+ "provides": {
+ "zipfile-module": {
+ "name": "zipfile",
+ "sourcefile": "zipfile",
+ "class": "ZipFile",
+ "description": doc,
+ "functions": ["sync", "retrieve_head"],
+ "func_desc": {
+ "sync": "Performs an archived http download of the "
+ + "repository, then unpacks it.",
+ "retrieve_head": "Returns the checksum of the unpacked archive.",
+ },
+ "validate_config": CheckSyncConfig,
+ "module_specific_options": (),
+ },
+ },
+}
diff --git a/lib/portage/sync/modules/zipfile/zipfile.py b/lib/portage/sync/modules/zipfile/zipfile.py
new file mode 100644
index 000000000..edfb5aa68
--- /dev/null
+++ b/lib/portage/sync/modules/zipfile/zipfile.py
@@ -0,0 +1,143 @@
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2024 Alexey Gladkov <gladkov.alexey@gmail.com>
+
+import os
+import os.path
+import logging
+import zipfile
+import shutil
+import tempfile
+import urllib.request
+
+import portage
+from portage.util import writemsg_level, writemsg_stdout
+from portage.sync.syncbase import SyncBase
+
+
+class ZipFile(SyncBase):
+ """ZipFile sync module"""
+
+ short_desc = "Perform sync operations on GitHub repositories"
+
+ @staticmethod
+ def name():
+ return "ZipFile"
+
+ def __init__(self):
+ SyncBase.__init__(self, "emerge", ">=sys-apps/portage-2.3")
+
+ def retrieve_head(self, **kwargs):
+ """Get information about the checksum of the unpacked archive"""
+ if kwargs:
+ self._kwargs(kwargs)
+ info = portage.grabdict(os.path.join(self.repo.location, ".info"))
+ if "etag" in info:
+ return (os.EX_OK, info["etag"][0])
+ return (1, False)
+
+ def _do_cmp(self, f1, f2):
+ bufsize = 8 * 1024
+ while True:
+ b1 = f1.read(bufsize)
+ b2 = f2.read(bufsize)
+ if b1 != b2:
+ return False
+ if not b1:
+ return True
+
+ def sync(self, **kwargs):
+ """Sync the repository"""
+ if kwargs:
+ self._kwargs(kwargs)
+
+ req = urllib.request.Request(url=self.repo.sync_uri)
+
+ info = portage.grabdict(os.path.join(self.repo.location, ".info"))
+ if "etag" in info:
+ req.add_header("If-None-Match", info["etag"][0])
+
+ try:
+ with urllib.request.urlopen(req) as response:
+ with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
+ shutil.copyfileobj(response, tmp_file)
+
+ zip_file = tmp_file.name
+ etag = response.headers.get("etag")
+
+ except urllib.error.HTTPError as resp:
+ if resp.code == 304:
+ writemsg_stdout(">>> The repository has not changed.\n", noiselevel=-1)
+ return (os.EX_OK, False)
+
+ writemsg_level(
+ f"!!! Unable to obtain zip archive: {resp}\n",
+ noiselevel=-1,
+ level=logging.ERROR,
+ )
+ return (1, False)
+
+ if not zipfile.is_zipfile(zip_file):
+ msg = "!!! file is not a zip archive."
+ self.logger(self.xterm_titles, msg)
+ writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR)
+
+ os.unlink(zip_file)
+
+ return (1, False)
+
+ # Drop previous tree
+ tempdir = tempfile.mkdtemp(prefix=".temp", dir=self.repo.location)
+ tmpname = os.path.basename(tempdir)
+
+ for name in os.listdir(self.repo.location):
+ if name != tmpname:
+ os.rename(
+ os.path.join(self.repo.location, name),
+ os.path.join(tempdir, name),
+ )
+
+ with zipfile.ZipFile(zip_file) as archive:
+ strip_comp = 0
+
+ for f in archive.namelist():
+ f = os.path.normpath(f)
+ if os.path.basename(f) == "profiles":
+ strip_comp = f.count("/")
+ break
+
+ for n in archive.infolist():
+ p = os.path.normpath(n.filename)
+
+ if os.path.isabs(p):
+ continue
+
+ parts = p.split("/")
+ dstpath = os.path.join(self.repo.location, *parts[strip_comp:])
+
+ if n.is_dir():
+ os.makedirs(dstpath, mode=0o755, exist_ok=True)
+ continue
+
+ with archive.open(n) as srcfile:
+ prvpath = os.path.join(tempdir, *parts[strip_comp:])
+
+ if os.path.exists(prvpath):
+ with open(prvpath, "rb") as prvfile:
+ if self._do_cmp(prvfile, srcfile):
+ os.rename(prvpath, dstpath)
+ continue
+ srcfile.seek(0)
+
+ with open(dstpath, "wb") as dstfile:
+ shutil.copyfileobj(srcfile, dstfile)
+
+ # Drop previous tree
+ shutil.rmtree(tempdir)
+
+ with open(os.path.join(self.repo.location, ".info"), "w") as infofile:
+ if etag:
+ infofile.write(f"etag {etag}\n")
+
+ os.unlink(zip_file)
+
+ return (os.EX_OK, True)
diff --git a/lib/portage/sync/old_tree_timestamp.py b/lib/portage/sync/old_tree_timestamp.py
index 3558a25ad..950878ce5 100644
--- a/lib/portage/sync/old_tree_timestamp.py
+++ b/lib/portage/sync/old_tree_timestamp.py
@@ -82,7 +82,7 @@ def old_tree_timestamp_warn(portdir, settings):
warnsync = float(settings.get(var_name, default_warnsync))
except ValueError:
writemsg_level(
- "!!! %s contains non-numeric value: %s\n" % (var_name, settings[var_name]),
+ f"!!! {var_name} contains non-numeric value: {settings[var_name]}\n",
level=logging.ERROR,
noiselevel=-1,
)
@@ -94,7 +94,7 @@ def old_tree_timestamp_warn(portdir, settings):
if (unixtime - 86400 * warnsync) > lastsync:
out = EOutput()
if have_english_locale():
- out.ewarn("Last emerge --sync was %s ago." % whenago(unixtime - lastsync))
+ out.ewarn(f"Last emerge --sync was {whenago(unixtime - lastsync)} ago.")
else:
out.ewarn(
_("Last emerge --sync was %s.")
diff --git a/lib/portage/sync/syncbase.py b/lib/portage/sync/syncbase.py
index 94c873e1f..c3a07da7d 100644
--- a/lib/portage/sync/syncbase.py
+++ b/lib/portage/sync/syncbase.py
@@ -1,4 +1,4 @@
-# Copyright 2014-2020 Gentoo Authors
+# Copyright 2014-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
"""
@@ -55,25 +55,24 @@ class SyncBase:
@property
def has_bin(self):
- """Checks for existance of the external binary, and also
+ """Checks for existence of the external binary, and also
checks for storage driver configuration problems.
MUST only be called after _kwargs() has set the logger
"""
if self.bin_command is None:
msg = [
- "Command not found: %s" % self._bin_command,
- 'Type "emerge %s" to enable %s support.'
- % (self.bin_pkg, self._bin_command),
+ f"Command not found: {self._bin_command}",
+ f'Type "emerge {self.bin_pkg}" to enable {self._bin_command} support.',
]
for l in msg:
- writemsg_level("!!! %s\n" % l, level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"!!! {l}\n", level=logging.ERROR, noiselevel=-1)
return False
try:
self.repo_storage
except RepoStorageException as e:
- writemsg_level("!!! %s\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"!!! {e}\n", level=logging.ERROR, noiselevel=-1)
return False
return True
@@ -178,7 +177,7 @@ class SyncBase:
try:
retry_count = int(self.repo.sync_openpgp_key_refresh_retry_count)
except Exception as e:
- errors.append("sync-openpgp-key-refresh-retry-count: {}".format(e))
+ errors.append(f"sync-openpgp-key-refresh-retry-count: {e}")
else:
if retry_count <= 0:
return None
@@ -191,9 +190,7 @@ class SyncBase:
self.repo.sync_openpgp_key_refresh_retry_overall_timeout
)
except Exception as e:
- errors.append(
- "sync-openpgp-key-refresh-retry-overall-timeout: {}".format(e)
- )
+ errors.append(f"sync-openpgp-key-refresh-retry-overall-timeout: {e}")
else:
if retry_overall_timeout < 0:
errors.append(
@@ -213,7 +210,7 @@ class SyncBase:
self.repo.sync_openpgp_key_refresh_retry_delay_mult
)
except Exception as e:
- errors.append("sync-openpgp-key-refresh-retry-delay-mult: {}".format(e))
+ errors.append(f"sync-openpgp-key-refresh-retry-delay-mult: {e}")
else:
if retry_delay_mult <= 0:
errors.append(
@@ -229,7 +226,7 @@ class SyncBase:
self.repo.sync_openpgp_key_refresh_retry_delay_exp_base
)
except Exception as e:
- errors.append("sync-openpgp-key-refresh-retry-delay-exp: {}".format(e))
+ errors.append(f"sync-openpgp-key-refresh-retry-delay-exp: {e}")
else:
if retry_delay_exp_base <= 0:
errors.append(
@@ -243,11 +240,11 @@ class SyncBase:
lines.append("!!! Retry disabled for openpgp key refresh:")
lines.append("")
for msg in errors:
- lines.append(" {}".format(msg))
+ lines.append(f" {msg}")
lines.append("")
for line in lines:
- writemsg_level("{}\n".format(line), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"{line}\n", level=logging.ERROR, noiselevel=-1)
return None
@@ -293,11 +290,9 @@ class SyncBase:
out.ebegin(
"Refreshing keys from keyserver{}".format(
- (
- ""
- if self.repo.sync_openpgp_keyserver is None
- else " " + self.repo.sync_openpgp_keyserver
- )
+ ""
+ if self.repo.sync_openpgp_keyserver is None
+ else " " + self.repo.sync_openpgp_keyserver
)
)
retry_decorator = self._key_refresh_retry_decorator()
@@ -317,7 +312,7 @@ class SyncBase:
keyserver=self.repo.sync_openpgp_keyserver
)
except Exception as e:
- writemsg_level("%s\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"{e}\n", level=logging.ERROR, noiselevel=-1)
raise # retry
# The ThreadPoolExecutor that asyncio uses by default
@@ -333,18 +328,24 @@ class SyncBase:
loop.run_until_complete(decorated_func())
out.eend(0)
- def _get_openpgp_env(self, openpgp_key_path=None):
+ def _get_openpgp_env(self, openpgp_key_path=None, debug=False):
if gemato is not None:
# Override global proxy setting with one provided in emerge configuration
if "http_proxy" in self.spawn_kwargs["env"]:
proxy = self.spawn_kwargs["env"]["http_proxy"]
+ elif "https_proxy" in self.spawn_kwargs["env"]:
+ proxy = self.spawn_kwargs["env"]["https_proxy"]
else:
proxy = None
if openpgp_key_path:
- openpgp_env = gemato.openpgp.OpenPGPEnvironment(proxy=proxy)
+ openpgp_env = gemato.openpgp.OpenPGPEnvironment(
+ proxy=proxy, debug=debug
+ )
else:
- openpgp_env = gemato.openpgp.OpenPGPSystemEnvironment(proxy=proxy)
+ openpgp_env = gemato.openpgp.OpenPGPSystemEnvironment(
+ proxy=proxy, debug=debug
+ )
return openpgp_env
diff --git a/lib/portage/tests/.gnupg/openpgp-revocs.d/06B3A311BD775C280D22A9305D90EA06352177F6.rev b/lib/portage/tests/.gnupg/openpgp-revocs.d/06B3A311BD775C280D22A9305D90EA06352177F6.rev
new file mode 100644
index 000000000..a6752fd30
--- /dev/null
+++ b/lib/portage/tests/.gnupg/openpgp-revocs.d/06B3A311BD775C280D22A9305D90EA06352177F6.rev
@@ -0,0 +1,37 @@
+This is a revocation certificate for the OpenPGP key:
+
+pub rsa4096 2020-07-14 [S]
+ 06B3A311BD775C280D22A9305D90EA06352177F6
+uid Gentoo Portage Test Trusted Key (Test Only, Do NOT Trust!!!) (Gentoo Test Key) <test@example.org>
+
+A revocation certificate is a kind of "kill switch" to publicly
+declare that a key shall not anymore be used. It is not possible
+to retract such a revocation certificate once it has been published.
+
+Use it to revoke this key in case of a compromise or loss of
+the secret key. However, if the secret key is still accessible,
+it is better to generate a new revocation certificate and give
+a reason for the revocation. For details see the description of
+of the gpg command "--generate-revocation" in the GnuPG manual.
+
+To avoid an accidental use of this file, a colon has been inserted
+before the 5 dashes below. Remove this colon with a text editor
+before importing and publishing this revocation certificate.
+
+:-----BEGIN PGP PUBLIC KEY BLOCK-----
+Comment: This is a revocation certificate
+
+iQI2BCABCAAgFiEEBrOjEb13XCgNIqkwXZDqBjUhd/YFAl8OFTwCHQAACgkQXZDq
+BjUhd/aXCA/+OgzosMDaDe5DNwkSi2yKdC2X18v8JcaYnXBUR93nXA0LVN7iVWkR
+WEH3NuVspQZ5vK+3AHTKabqZFC/buA5oQOH01Ncd4lQISfOOhFiBn5DIPX31BVT0
+iPmVkcxHAD4031ptP4oat6EFclT13SRchtlnAO04JofeHnzQIw3SozQGzXpAA1g4
+BogQ0HWA88HzuEYYE+e/yzZL4D496X1DTaXksg0Py5c4SS6u5pND6lcUtAGxAwa9
+sJFPs+coeURaRV99CrJfdh4u2OkvINTfrKOS6NFBQq6HVH5mLsRXZlcE4Oo4d+fN
+XoPrTZnRUqpJADUdjHFvO/lr0fArJTS5IQCVBNFeCMlvgmUPeKWJ1r6Uiwe/UHor
+9OP/tK97EqpsaXmHbo0jOUkn5iiUwy784+JBSSu/Q2NxqcBr74aaRdfxvs62dmv7
+droCDQi3ebqTdnlDSaeCIWHyVlSroOhZ+ZETVy193K1X7VXFX3hYKiJ3G8QZwy3e
+AlsVGjIHWfC+K+enIn+uwSUvOWPN3upK8kqMRuXvAOppFCE4sTqNbxUnHHXaqo/r
+s1q6zVsWVILBk97BHlJph2IaqhV7iIgPU97/r4U/BT11VqDFdVSHcXcs4PDNs5vh
+6qttaDiyDqZjwMr+0iDoouHxFpqY8e+3M2gycUgGr2XV6ML0pXE6BqA=
+=nIjC
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/lib/portage/tests/.gnupg/openpgp-revocs.d/8DEDA2CDED49C8809287B89D8812797DDF1DD192.rev b/lib/portage/tests/.gnupg/openpgp-revocs.d/8DEDA2CDED49C8809287B89D8812797DDF1DD192.rev
new file mode 100644
index 000000000..456e0aa50
--- /dev/null
+++ b/lib/portage/tests/.gnupg/openpgp-revocs.d/8DEDA2CDED49C8809287B89D8812797DDF1DD192.rev
@@ -0,0 +1,37 @@
+This is a revocation certificate for the OpenPGP key:
+
+pub rsa4096 2020-07-14 [S]
+ 8DEDA2CDED49C8809287B89D8812797DDF1DD192
+uid Gentoo Portage Test Untrusted Key (Test Only, Do NOT Trust!!!) (Gentoo Test Key) <test@example.org>
+
+A revocation certificate is a kind of "kill switch" to publicly
+declare that a key shall not anymore be used. It is not possible
+to retract such a revocation certificate once it has been published.
+
+Use it to revoke this key in case of a compromise or loss of
+the secret key. However, if the secret key is still accessible,
+it is better to generate a new revocation certificate and give
+a reason for the revocation. For details see the description of
+of the gpg command "--generate-revocation" in the GnuPG manual.
+
+To avoid an accidental use of this file, a colon has been inserted
+before the 5 dashes below. Remove this colon with a text editor
+before importing and publishing this revocation certificate.
+
+:-----BEGIN PGP PUBLIC KEY BLOCK-----
+Comment: This is a revocation certificate
+
+iQI2BCABCAAgFiEEje2ize1JyICSh7idiBJ5fd8d0ZIFAl8OFXUCHQAACgkQiBJ5
+fd8d0ZKdwxAAhmkC0V+OLyOU9PCV6ogD9/3b3nVqNIreoc+gxHTLmEvxiMSItqmq
+DkcW9RJKAduA/HiLZQ8Yzxw+ldC6kuWqYEjNpSM54VDkrgOePi8W1bVDTCoSp7bo
+0JOG4frieqIxA6lhAA2UppH7EPRXoODPLYqooNxWAs3xxVrR6eGAb5l8NXzrymvN
+acFfOZ0s5FgADQskQHWVq6TaJn9DrcZxd+b+plSwPYDXqzTChKQ5jw7uMAPUvDkG
+JUWgoKiKSrK64bslUq8aEDEZQ4uxjyEi6G0vO/wPL/ysGhS7KkPgCZsEfNjWjajb
+jAsdvl1raoHxK/O7llMNr9uRAZtC56pJ//SRDc3kylZrkAo0RNoXQFowT739HWei
+2UkCFDfz488VKKrOI8TzTyUvLFEo14ZAXGg1wdHaGnbYMzxpKjP15alOFo6fKIcS
+Kz1f/Mab4wf4Sg0XAjQ9pnai1/U9ZF3/NSnRtYgJkLCrIEtRLrgSHJsLDPxjCfGV
+jWszAbIk167aA0yKsSmuwkpc5bZqqBaTo904r857fxyt5Les6SOHsV7iNXt7F+am
+03Y6u6m2eROba7M67l115vTyYcw5EZVp5j0nI81PXsC9X2DD1ci5xrNmPyEeupC4
+7y7mcGbUYPJAJHJ0kHG4ZYLnNMl42ZYr1ssEeasDwUsLWgVqvx9RkKI=
+=kVUQ
+-----END PGP PUBLIC KEY BLOCK-----
diff --git a/lib/portage/tests/.gnupg/private-keys-v1.d/273B030399E7BEA66A9AD42216DE7CA17BA5D42E.key b/lib/portage/tests/.gnupg/private-keys-v1.d/273B030399E7BEA66A9AD42216DE7CA17BA5D42E.key
new file mode 100644
index 000000000..0bd1026ad
--- /dev/null
+++ b/lib/portage/tests/.gnupg/private-keys-v1.d/273B030399E7BEA66A9AD42216DE7CA17BA5D42E.key
Binary files differ
diff --git a/lib/portage/tests/.gnupg/private-keys-v1.d/C99796FB85B0C3DF03314A11B5850C51167D6282.key b/lib/portage/tests/.gnupg/private-keys-v1.d/C99796FB85B0C3DF03314A11B5850C51167D6282.key
new file mode 100644
index 000000000..8e29ef43c
--- /dev/null
+++ b/lib/portage/tests/.gnupg/private-keys-v1.d/C99796FB85B0C3DF03314A11B5850C51167D6282.key
Binary files differ
diff --git a/lib/portage/tests/.gnupg/pubring.kbx b/lib/portage/tests/.gnupg/pubring.kbx
new file mode 100644
index 000000000..f6367f83b
--- /dev/null
+++ b/lib/portage/tests/.gnupg/pubring.kbx
Binary files differ
diff --git a/lib/portage/tests/.gnupg/trustdb.gpg b/lib/portage/tests/.gnupg/trustdb.gpg
new file mode 100644
index 000000000..db5b1023b
--- /dev/null
+++ b/lib/portage/tests/.gnupg/trustdb.gpg
Binary files differ
diff --git a/lib/portage/tests/__init__.py b/lib/portage/tests/__init__.py
index 02d9c4932..23dd366d8 100644
--- a/lib/portage/tests/__init__.py
+++ b/lib/portage/tests/__init__.py
@@ -1,8 +1,9 @@
# tests/__init__.py -- Portage Unit Test functionality
-# Copyright 2006-2021 Gentoo Authors
+# Copyright 2006-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import argparse
+import multiprocessing
import sys
import time
import unittest
@@ -12,8 +13,10 @@ from unittest.runner import TextTestResult as _TextTestResult
import portage
from portage import os
+from portage.util import no_color
from portage import _encodings
from portage import _unicode_decode
+from portage.output import colorize
from portage.proxy.objectproxy import ObjectProxy
@@ -62,151 +65,6 @@ def cnf_sbindir():
return os.path.join(portage.const.EPREFIX or "/", "usr", "sbin")
-def main():
- suite = unittest.TestSuite()
- basedir = Path(__file__).resolve().parent
-
- argv0 = Path(sys.argv[0])
-
- usage = "usage: %s [options] [tests to run]" % argv0.name
- parser = argparse.ArgumentParser(usage=usage)
- parser.add_argument(
- "-l", "--list", help="list all tests", action="store_true", dest="list_tests"
- )
- parser.add_argument("tests", nargs="*", type=Path)
- options = parser.parse_args(args=sys.argv)
-
- if (
- os.environ.get("NOCOLOR") in ("yes", "true")
- or os.environ.get("TERM") == "dumb"
- or not sys.stdout.isatty()
- ):
- portage.output.nocolor()
-
- if options.list_tests:
- testdir = argv0.parent
- for mydir in getTestDirs(basedir):
- testsubdir = mydir.name
- for name in getTestNames(mydir):
- print("%s/%s/%s.py" % (testdir, testsubdir, name))
- return os.EX_OK
-
- if len(options.tests) > 1:
- suite.addTests(getTestFromCommandLine(options.tests[1:], basedir))
- else:
- for mydir in getTestDirs(basedir):
- suite.addTests(getTests(mydir, basedir))
-
- result = TextTestRunner(verbosity=2).run(suite)
- if not result.wasSuccessful():
- return 1
- return os.EX_OK
-
-
-def my_import(name):
- mod = __import__(name)
- components = name.split(".")
- for comp in components[1:]:
- mod = getattr(mod, comp)
- return mod
-
-
-def getTestFromCommandLine(args, base_path):
- result = []
- for arg in args:
- realpath = arg.resolve()
- path = realpath.parent
- f = realpath.relative_to(path)
-
- if not f.name.startswith("test") or not f.suffix == ".py":
- raise Exception("Invalid argument: '%s'" % arg)
-
- mymodule = f.stem
- result.extend(getTestsFromFiles(path, base_path, [mymodule]))
- return result
-
-
-def getTestDirs(base_path):
- TEST_FILE = "__test__.py"
- testDirs = []
-
- # the os.walk help mentions relative paths as being quirky
- # I was tired of adding dirs to the list, so now we add __test__.py
- # to each dir we want tested.
- for testFile in base_path.rglob(TEST_FILE):
- testDirs.append(testFile.parent)
-
- testDirs.sort()
- return testDirs
-
-
-def getTestNames(path):
- files = path.glob("*")
- files = [f.stem for f in files if f.name.startswith("test") and f.suffix == ".py"]
- files.sort()
- return files
-
-
-def getTestsFromFiles(path, base_path, files):
- parent_path = path.relative_to(base_path)
- parent_module = ".".join(("portage", "tests") + parent_path.parts)
- result = []
- for mymodule in files:
- # Make the trailing / a . for module importing
- modname = ".".join((parent_module, mymodule))
- mod = my_import(modname)
- result.append(unittest.TestLoader().loadTestsFromModule(mod))
- return result
-
-
-def getTests(path, base_path):
- """
-
- path is the path to a given subdir ( 'portage/' for example)
- This does a simple filter on files in that dir to give us modules
- to import
-
- """
- return getTestsFromFiles(path, base_path, getTestNames(path))
-
-
-class TextTestResult(_TextTestResult):
- """
- We need a subclass of unittest.runner.TextTestResult to handle tests with TODO
-
- This just adds an addTodo method that can be used to add tests
- that are marked TODO; these can be displayed later
- by the test runner.
- """
-
- def __init__(self, stream, descriptions, verbosity):
- super(TextTestResult, self).__init__(stream, descriptions, verbosity)
- self.todoed = []
- self.portage_skipped = []
-
- def addTodo(self, test, info):
- self.todoed.append((test, info))
- if self.showAll:
- self.stream.writeln("TODO")
- elif self.dots:
- self.stream.write(".")
-
- def addPortageSkip(self, test, info):
- self.portage_skipped.append((test, info))
- if self.showAll:
- self.stream.writeln("SKIP")
- elif self.dots:
- self.stream.write(".")
-
- def printErrors(self):
- if self.dots or self.showAll:
- self.stream.writeln()
- self.printErrorList("ERROR", self.errors)
- self.printErrorList("FAIL", self.failures)
- self.printErrorList("TODO", self.todoed)
- self.printErrorList("SKIP", self.portage_skipped)
-
-
class TestCase(unittest.TestCase):
"""
We need a way to mark a unit test as "ok to fail"
@@ -217,68 +75,19 @@ class TestCase(unittest.TestCase):
def __init__(self, *pargs, **kwargs):
unittest.TestCase.__init__(self, *pargs, **kwargs)
- self.todo = False
- self.portage_skip = None
self.cnf_path = cnf_path
self.cnf_etc_path = cnf_etc_path
self.bindir = cnf_bindir
self.sbindir = cnf_sbindir
- def defaultTestResult(self):
- return TextTestResult()
-
- def run(self, result=None):
- if result is None:
- result = self.defaultTestResult()
- result.startTest(self)
- testMethod = getattr(self, self._testMethodName)
- try:
- ok = False
- try:
- try:
- self.setUp()
- except KeyboardInterrupt:
- raise
- except unittest.SkipTest:
- raise
- except Exception:
- result.addError(self, sys.exc_info())
- return
-
- testMethod()
- ok = True
- except unittest.SkipTest as e:
- result.addPortageSkip(self, "%s: SKIP: %s" % (testMethod, str(e)))
- except self.failureException:
- if self.portage_skip is not None:
- if self.portage_skip is True:
- result.addPortageSkip(self, "%s: SKIP" % testMethod)
- else:
- result.addPortageSkip(
- self, "%s: SKIP: %s" % (testMethod, self.portage_skip)
- )
- elif self.todo:
- result.addTodo(self, "%s: TODO" % testMethod)
- else:
- result.addFailure(self, sys.exc_info())
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- result.addError(self, sys.exc_info())
-
- try:
- self.tearDown()
- except SystemExit:
- raise
- except KeyboardInterrupt:
- raise
- except:
- result.addError(self, sys.exc_info())
- ok = False
- if ok:
- result.addSuccess(self)
- finally:
- result.stopTest(self)
+ def setUp(self):
+ """
+ Setup multiprocessing start method if needed. It needs to be
+ done relatively late in order to work with the pytest-xdist
+ plugin due to execnet usage.
+ """
+ if os.environ.get("PORTAGE_MULTIPROCESSING_START_METHOD") == "spawn":
+ multiprocessing.set_start_method("spawn", force=True)
def assertRaisesMsg(self, msg, excClass, callableObj, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
@@ -297,53 +106,13 @@ class TestCase(unittest.TestCase):
excName = excClass.__name__
else:
excName = str(excClass)
- raise self.failureException("%s not raised: %s" % (excName, msg))
+ raise self.failureException(f"{excName} not raised: {msg}")
def assertNotExists(self, path):
"""Make sure |path| does not exist"""
path = Path(path)
if path.exists():
- raise self.failureException("path exists when it should not: %s" % path)
-
-
-class TextTestRunner(unittest.TextTestRunner):
- """
- We subclass unittest.TextTestRunner to output SKIP for tests that fail but are skippable
- """
-
- def _makeResult(self):
- return TextTestResult(self.stream, self.descriptions, self.verbosity)
-
- def run(self, test):
- """
- Run the given test case or test suite.
- """
- result = self._makeResult()
- startTime = time.time()
- test(result)
- stopTime = time.time()
- timeTaken = stopTime - startTime
- result.printErrors()
- self.stream.writeln(result.separator2)
- run = result.testsRun
- self.stream.writeln(
- "Ran %d test%s in %.3fs" % (run, run != 1 and "s" or "", timeTaken)
- )
- self.stream.writeln()
- if not result.wasSuccessful():
- self.stream.write("FAILED (")
- failed = len(result.failures)
- errored = len(result.errors)
- if failed:
- self.stream.write("failures=%d" % failed)
- if errored:
- if failed:
- self.stream.write(", ")
- self.stream.write("errors=%d" % errored)
- self.stream.writeln(")")
- else:
- self.stream.writeln("OK")
- return result
+ raise self.failureException(f"path exists when it should not: {path}")
test_cps = ["sys-apps/portage", "virtual/portage"]
diff --git a/lib/portage/tests/bin/meson.build b/lib/portage/tests/bin/meson.build
new file mode 100644
index 000000000..519972f0a
--- /dev/null
+++ b/lib/portage/tests/bin/meson.build
@@ -0,0 +1,14 @@
+py.install_sources(
+ [
+ 'setup_env.py',
+ 'test_dobin.py',
+ 'test_dodir.py',
+ 'test_doins.py',
+ 'test_eapi7_ver_funcs.py',
+ 'test_filter_bash_env.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/bin',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/bin/setup_env.py b/lib/portage/tests/bin/setup_env.py
index faef118b0..5787f8768 100644
--- a/lib/portage/tests/bin/setup_env.py
+++ b/lib/portage/tests/bin/setup_env.py
@@ -1,5 +1,5 @@
# setup_env.py -- Make sure bin subdir has sane env for testing
-# Copyright 2007-2013 Gentoo Foundation
+# Copyright 2007-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import tempfile
@@ -78,10 +78,7 @@ def portage_func(func, args, exit_status=0):
f = open("/dev/null", "wb")
fd_pipes = {0: 0, 1: f.fileno(), 2: f.fileno()}
- def pre_exec():
- os.chdir(env["S"])
-
- spawn([func] + args.split(), env=env, fd_pipes=fd_pipes, pre_exec=pre_exec)
+ spawn([func] + args.split(), env=env, fd_pipes=fd_pipes, cwd=env["S"])
f.close()
diff --git a/lib/portage/tests/bin/test_doins.py b/lib/portage/tests/bin/test_doins.py
index cb6b3a9dc..75a93ef9b 100644
--- a/lib/portage/tests/bin/test_doins.py
+++ b/lib/portage/tests/bin/test_doins.py
@@ -16,7 +16,7 @@ exists_in_D = setup_env.exists_in_D
class DoIns(setup_env.BinTestCase):
def testDoIns(self):
- """Tests the most basic senario."""
+ """Tests the most basic scenario."""
self.init()
try:
env = setup_env.env
@@ -98,8 +98,8 @@ class DoIns(setup_env.BinTestCase):
pass
uid = os.lstat(os.path.join(env["S"], "test")).st_uid
pw = pwd.getpwuid(uid)
- # Similary to testDoInsOptionUid, use user name.
- env["INSOPTIONS"] = "-o %s" % pw.pw_name
+ # Similarly to testDoInsOptionUid, use user name.
+ env["INSOPTIONS"] = f"-o {pw.pw_name}"
doins("test")
st = os.lstat(env["D"] + "/test")
if st.st_uid != uid:
@@ -115,7 +115,7 @@ class DoIns(setup_env.BinTestCase):
with open(os.path.join(env["S"], "test"), "w"):
pass
gid = os.lstat(os.path.join(env["S"], "test")).st_gid
- # Similary to testDoInsOptionUid, use gid.
+ # Similarly to testDoInsOptionUid, use gid.
env["INSOPTIONS"] = "-g %d" % gid
doins("test")
st = os.lstat(env["D"] + "/test")
@@ -133,8 +133,8 @@ class DoIns(setup_env.BinTestCase):
pass
gid = os.lstat(os.path.join(env["S"], "test")).st_gid
gr = grp.getgrgid(gid)
- # Similary to testDoInsOptionUid, use group name.
- env["INSOPTIONS"] = "-g %s" % gr.gr_name
+ # Similarly to testDoInsOptionUid, use group name.
+ env["INSOPTIONS"] = f"-g {gr.gr_name}"
doins("test")
st = os.lstat(env["D"] + "/test")
if st.st_gid != gid:
diff --git a/lib/portage/tests/bin/test_eapi7_ver_funcs.py b/lib/portage/tests/bin/test_eapi7_ver_funcs.py
index a01901e27..0483a35b3 100644
--- a/lib/portage/tests/bin/test_eapi7_ver_funcs.py
+++ b/lib/portage/tests/bin/test_eapi7_ver_funcs.py
@@ -14,9 +14,9 @@ class TestEAPI7VerFuncs(TestCase):
Test that commands in test_cases produce expected output.
"""
with tempfile.NamedTemporaryFile("w") as test_script:
- test_script.write('source "%s"/eapi7-ver-funcs.sh\n' % (PORTAGE_BIN_PATH,))
+ test_script.write(f'source "{PORTAGE_BIN_PATH}"/eapi7-ver-funcs.sh\n')
for cmd, exp in test_cases:
- test_script.write("%s\n" % (cmd,))
+ test_script.write(f"{cmd}\n")
test_script.flush()
s = subprocess.Popen(
@@ -29,18 +29,16 @@ class TestEAPI7VerFuncs(TestCase):
for test_case, result in zip(test_cases, sout.decode().splitlines()):
cmd, exp = test_case
- self.assertEqual(
- result, exp, "%s -> %s; expected: %s" % (cmd, result, exp)
- )
+ self.assertEqual(result, exp, f"{cmd} -> {result}; expected: {exp}")
def _test_return(self, test_cases):
"""
Test that commands in test_cases give appropriate exit codes.
"""
with tempfile.NamedTemporaryFile("w+") as test_script:
- test_script.write('source "%s"/eapi7-ver-funcs.sh\n' % (PORTAGE_BIN_PATH,))
+ test_script.write(f'source "{PORTAGE_BIN_PATH}"/eapi7-ver-funcs.sh\n')
for cmd, exp in test_cases:
- test_script.write("%s; echo $?\n" % (cmd,))
+ test_script.write(f"{cmd}; echo $?\n")
test_script.flush()
s = subprocess.Popen(
@@ -53,9 +51,7 @@ class TestEAPI7VerFuncs(TestCase):
for test_case, result in zip(test_cases, sout.decode().splitlines()):
cmd, exp = test_case
- self.assertEqual(
- result, exp, "%s -> %s; expected: %s" % (cmd, result, exp)
- )
+ self.assertEqual(result, exp, f"{cmd} -> {result}; expected: {exp}")
def _test_fail(self, test_cases):
"""
@@ -63,13 +59,10 @@ class TestEAPI7VerFuncs(TestCase):
"""
for cmd in test_cases:
- test = """
-source "%s"/eapi7-ver-funcs.sh
-die() { exit 1; }
-%s""" % (
- PORTAGE_BIN_PATH,
- cmd,
- )
+ test = f"""
+source "{PORTAGE_BIN_PATH}"/eapi7-ver-funcs.sh
+die() {{ exit 1; }}
+{cmd}"""
s = subprocess.Popen(
["bash", "-c", test], stdout=subprocess.PIPE, stderr=subprocess.PIPE
@@ -78,8 +71,7 @@ die() { exit 1; }
self.assertEqual(
s.returncode,
1,
- '"%s" did not fail; output: %s; %s)'
- % (cmd, sout.decode(), serr.decode()),
+ f'"{cmd}" did not fail; output: {sout.decode()}; {serr.decode()})',
)
def test_ver_cut(self):
diff --git a/lib/portage/tests/bin/test_filter_bash_env.py b/lib/portage/tests/bin/test_filter_bash_env.py
index 7f0bdf52f..9040a5fef 100644
--- a/lib/portage/tests/bin/test_filter_bash_env.py
+++ b/lib/portage/tests/bin/test_filter_bash_env.py
@@ -1,4 +1,4 @@
-# Copyright 2018 Gentoo Foundation
+# Copyright 2018-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import difflib
@@ -12,11 +12,10 @@ from portage.tests import TestCase
class TestFilterBashEnv(TestCase):
def testTestFilterBashEnv(self):
-
test_cases = (
(
"RDEPEND BASH.* _EPATCH_ECLASS",
- br"""declare -ir BASHPID="28997"
+ rb"""declare -ir BASHPID="28997"
declare -rx A="portage-2.3.24.tar.bz2"
declare -- DESKTOP_DATABASE_DIR="/usr/share/applications"
declare PDEPEND="
@@ -34,12 +33,10 @@ declare -- _EUTILS_ECLASS="1"
declare -- f
get_libdir ()
{
- local CONF_LIBDIR;
- if [ -n "${CONF_LIBDIR_OVERRIDE}" ]; then
- echo ${CONF_LIBDIR_OVERRIDE};
- else
- get_abi_LIBDIR;
- fi
+ local libdir_var="LIBDIR_${ABI}";
+ local libdir="lib";
+ [[ -n ${ABI} && -n ${!libdir_var} ]] && libdir=${!libdir_var};
+ echo "${libdir}"
}
make_wrapper ()
{
@@ -53,7 +50,7 @@ use_if_iuse ()
use $1
}
""",
- br"""declare -x A="portage-2.3.24.tar.bz2"
+ rb"""declare -x A="portage-2.3.24.tar.bz2"
declare -- DESKTOP_DATABASE_DIR="/usr/share/applications"
declare PDEPEND="
!build? (
@@ -66,12 +63,10 @@ declare -- _EUTILS_ECLASS="1"
declare -- f
get_libdir ()
{
- local CONF_LIBDIR;
- if [ -n "${CONF_LIBDIR_OVERRIDE}" ]; then
- echo ${CONF_LIBDIR_OVERRIDE};
- else
- get_abi_LIBDIR;
- fi
+ local libdir_var="LIBDIR_${ABI}";
+ local libdir="lib";
+ [[ -n ${ABI} && -n ${!libdir_var} ]] && libdir=${!libdir_var};
+ echo "${libdir}"
}
make_wrapper ()
{
diff --git a/lib/portage/tests/conftest.py b/lib/portage/tests/conftest.py
new file mode 100644
index 000000000..76bdaa381
--- /dev/null
+++ b/lib/portage/tests/conftest.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# Copyright 2006-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import grp
+import os
+import os.path as osp
+import pwd
+import signal
+import tempfile
+import shutil
+import sys
+
+import pytest
+
+import portage
+from portage.util._eventloop.global_event_loop import global_event_loop
+from portage.const import PORTAGE_BIN_PATH
+
+
+def debug_signal(signum, frame):
+ import pdb
+
+ pdb.set_trace()
+
+
+signal.signal(signal.SIGUSR1, debug_signal)
+
+
+@pytest.fixture(autouse=True, scope="session")
+def prepare_environment():
+ # Pretend that the current user's uid/gid are the 'portage' uid/gid,
+ # so things go smoothly regardless of the current user and global
+ # user/group configuration.
+ os.environ["PORTAGE_USERNAME"] = pwd.getpwuid(os.getuid()).pw_name
+ os.environ["PORTAGE_GRPNAME"] = grp.getgrgid(os.getgid()).gr_name
+
+ # Insert our parent dir so we can do shiny import "tests"
+ # This line courtesy of Marienz and Pkgcore ;)
+ sys.path.insert(0, osp.dirname(osp.dirname(osp.dirname(osp.realpath(__file__)))))
+
+ portage._internal_caller = True
+
+ # Ensure that we don't instantiate portage.settings, so that tests should
+ # work the same regardless of global configuration file state/existence.
+ portage._disable_legacy_globals()
+
+ if portage.util.no_color(os.environ):
+ portage.output.nocolor()
+
+ # import portage.tests as tests
+
+ path = os.environ.get("PATH", "").split(":")
+ path = [x for x in path if x]
+
+ insert_bin_path = True
+ try:
+ insert_bin_path = not path or not os.path.samefile(path[0], PORTAGE_BIN_PATH)
+ except OSError:
+ pass
+
+ if insert_bin_path:
+ path.insert(0, PORTAGE_BIN_PATH)
+ os.environ["PATH"] = ":".join(path)
+
+ try:
+ # Copy GPG test keys to temporary directory
+ gpg_path = tempfile.mkdtemp(prefix="gpg_")
+
+ shutil.copytree(
+ os.path.join(os.path.dirname(os.path.realpath(__file__)), ".gnupg"),
+ gpg_path,
+ dirs_exist_ok=True,
+ )
+
+ os.chmod(gpg_path, 0o700)
+ os.environ["PORTAGE_GNUPGHOME"] = gpg_path
+
+ yield
+
+ finally:
+ global_event_loop().close()
+ shutil.rmtree(gpg_path, ignore_errors=True)
+
+
+# if __name__ == "__main__":
+# try:
+# sys.exit(tests.main())
+# finally:
+# global_event_loop().close()
+# shutil.rmtree(gpg_path, ignore_errors=True)
diff --git a/lib/portage/tests/dbapi/meson.build b/lib/portage/tests/dbapi/meson.build
new file mode 100644
index 000000000..bbfb7f97a
--- /dev/null
+++ b/lib/portage/tests/dbapi/meson.build
@@ -0,0 +1,12 @@
+py.install_sources(
+ [
+ 'test_auxdb.py',
+ 'test_bintree.py',
+ 'test_fakedbapi.py',
+ 'test_portdb_cache.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/dbapi',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/dbapi/test_auxdb.py b/lib/portage/tests/dbapi/test_auxdb.py
index 41ca4936b..aac6ce361 100644
--- a/lib/portage/tests/dbapi/test_auxdb.py
+++ b/lib/portage/tests/dbapi/test_auxdb.py
@@ -1,6 +1,9 @@
-# Copyright 2020-2021 Gentoo Authors
+# Copyright 2020-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import functools
+import multiprocessing
+
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import ResolverPlayground
from portage.util.futures import asyncio
@@ -13,7 +16,7 @@ class AuxdbTestCase(TestCase):
from portage.cache.anydbm import database
except ImportError:
self.skipTest("dbm import failed")
- self._test_mod("portage.cache.anydbm.database", multiproc=False)
+ self._test_mod("portage.cache.anydbm.database", multiproc=False, picklable=True)
def test_flat_hash_md5(self):
self._test_mod("portage.cache.flat_hash.md5_database")
@@ -26,9 +29,9 @@ class AuxdbTestCase(TestCase):
import sqlite3
except ImportError:
self.skipTest("sqlite3 import failed")
- self._test_mod("portage.cache.sqlite.database")
+ self._test_mod("portage.cache.sqlite.database", picklable=True)
- def _test_mod(self, auxdbmodule, multiproc=True):
+ def _test_mod(self, auxdbmodule, multiproc=True, picklable=True):
ebuilds = {
"cat/A-1": {
"EAPI": "7",
@@ -48,7 +51,7 @@ class AuxdbTestCase(TestCase):
"foo": ("inherit bar",),
"bar": (
"EXPORT_FUNCTIONS src_prepare",
- 'DEPEND="{}"'.format(eclass_depend),
+ f'DEPEND="{eclass_depend}"',
"bar_src_prepare() { default; }",
),
}
@@ -56,60 +59,92 @@ class AuxdbTestCase(TestCase):
playground = ResolverPlayground(
ebuilds=ebuilds,
eclasses=eclasses,
- user_config={"modules": ("portdbapi.auxdbmodule = %s" % auxdbmodule,)},
+ user_config={"modules": (f"portdbapi.auxdbmodule = {auxdbmodule}",)},
)
- portdb = playground.trees[playground.eroot]["porttree"].dbapi
+ try:
+ portdb = playground.trees[playground.eroot]["porttree"].dbapi
+ metadata_keys = ["DEFINED_PHASES", "DEPEND", "EAPI", "INHERITED"]
+
+ test_func = functools.partial(
+ self._run_test_mod_async, ebuilds, metadata_keys, portdb
+ )
+
+ results = test_func()
+
+ self._compare_results(
+ ebuilds, eclass_defined_phases, eclass_depend, ebuild_inherited, results
+ )
- def test_func():
loop = asyncio._wrap_loop()
- return loop.run_until_complete(
- self._test_mod_async(
+ picklable_or_fork = picklable or multiprocessing.get_start_method == "fork"
+ if picklable_or_fork:
+ results = loop.run_until_complete(
+ loop.run_in_executor(ForkExecutor(), test_func)
+ )
+
+ self._compare_results(
ebuilds,
- ebuild_inherited,
eclass_defined_phases,
eclass_depend,
- portdb,
+ ebuild_inherited,
+ results,
)
- )
- self.assertTrue(test_func())
+ auxdb = portdb.auxdb[portdb.getRepositoryPath("test_repo")]
+ cpv = next(iter(ebuilds))
- loop = asyncio._wrap_loop()
- self.assertTrue(
- loop.run_until_complete(loop.run_in_executor(ForkExecutor(), test_func))
- )
-
- auxdb = portdb.auxdb[portdb.getRepositoryPath("test_repo")]
- cpv = next(iter(ebuilds))
-
- def modify_auxdb():
- metadata = auxdb[cpv]
- metadata["RESTRICT"] = "test"
- try:
- del metadata["_eclasses_"]
- except KeyError:
- pass
- auxdb[cpv] = metadata
+ modify_auxdb = functools.partial(self._modify_auxdb, auxdb, cpv)
- if multiproc:
- loop.run_until_complete(loop.run_in_executor(ForkExecutor(), modify_auxdb))
- else:
- modify_auxdb()
+ if multiproc and picklable_or_fork:
+ loop.run_until_complete(
+ loop.run_in_executor(ForkExecutor(), modify_auxdb)
+ )
+ else:
+ modify_auxdb()
- self.assertEqual(auxdb[cpv]["RESTRICT"], "test")
+ self.assertEqual(auxdb[cpv]["RESTRICT"], "test")
+ finally:
+ playground.cleanup()
- async def _test_mod_async(
- self, ebuilds, ebuild_inherited, eclass_defined_phases, eclass_depend, portdb
+ def _compare_results(
+ self, ebuilds, eclass_defined_phases, eclass_depend, ebuild_inherited, results
):
+ for cpv, metadata in ebuilds.items():
+ self.assertEqual(results[cpv]["DEFINED_PHASES"], eclass_defined_phases)
+ self.assertEqual(results[cpv]["DEPEND"], eclass_depend)
+ self.assertEqual(results[cpv]["EAPI"], metadata["EAPI"])
+ self.assertEqual(
+ frozenset(results[cpv]["INHERITED"].split()), ebuild_inherited
+ )
+ @staticmethod
+ def _run_test_mod_async(ebuilds, metadata_keys, portdb):
+ loop = asyncio._wrap_loop()
+ return loop.run_until_complete(
+ AuxdbTestCase._test_mod_async(
+ ebuilds,
+ metadata_keys,
+ portdb,
+ )
+ )
+
+ @staticmethod
+ async def _test_mod_async(ebuilds, metadata_keys, portdb):
+ results = {}
for cpv, metadata in ebuilds.items():
- defined_phases, depend, eapi, inherited = await portdb.async_aux_get(
- cpv, ["DEFINED_PHASES", "DEPEND", "EAPI", "INHERITED"]
+ results[cpv] = dict(
+ zip(metadata_keys, await portdb.async_aux_get(cpv, metadata_keys))
)
- self.assertEqual(defined_phases, eclass_defined_phases)
- self.assertEqual(depend, eclass_depend)
- self.assertEqual(eapi, metadata["EAPI"])
- self.assertEqual(frozenset(inherited.split()), ebuild_inherited)
- return True
+ return results
+
+ @staticmethod
+ def _modify_auxdb(auxdb, cpv):
+ metadata = auxdb[cpv]
+ metadata["RESTRICT"] = "test"
+ try:
+ del metadata["_eclasses_"]
+ except KeyError:
+ pass
+ auxdb[cpv] = metadata
diff --git a/lib/portage/tests/dbapi/test_bintree.py b/lib/portage/tests/dbapi/test_bintree.py
new file mode 100644
index 000000000..91ac338a0
--- /dev/null
+++ b/lib/portage/tests/dbapi/test_bintree.py
@@ -0,0 +1,231 @@
+# Copyright 2022-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from unittest.mock import MagicMock, patch, call
+import os
+import tempfile
+
+from portage.tests import TestCase
+
+from portage.dbapi.bintree import binarytree
+from portage.localization import _
+from portage.const import BINREPOS_CONF_FILE
+
+
+class BinarytreeTestCase(TestCase):
+ @classmethod
+ def setUpClass(cls):
+ """
+ Create a temporary TMPDIR which prevents test
+ methods of this class from leaving behind an empty
+ /tmp/Packages file if TMPDIR is initially unset.
+ """
+ cls._orig_tmpdir = os.environ.get("TMPDIR")
+ cls._tmpdir = tempfile.TemporaryDirectory()
+ os.environ["TMPDIR"] = cls._tmpdir.name
+
+ @classmethod
+ def tearDownClass(cls):
+ cls._tmpdir.cleanup()
+ if cls._orig_tmpdir is None:
+ os.environ.pop("TMPDIR", None)
+ else:
+ os.environ["TMPDIR"] = cls._orig_tmpdir
+ del cls._orig_tmpdir, cls._tmpdir
+
+ def test_required_init_params(self):
+ with self.assertRaises(TypeError) as cm:
+ binarytree()
+ self.assertEqual(str(cm.exception), "pkgdir parameter is required")
+ with self.assertRaises(TypeError) as cm:
+ binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"))
+ self.assertEqual(str(cm.exception), "settings parameter is required")
+
+ def test_init_with_legacy_params_warns(self):
+ with self.assertWarns(DeprecationWarning):
+ binarytree(
+ _unused=None, pkgdir=os.getenv("TMPDIR", "/tmp"), settings=MagicMock()
+ )
+ with self.assertWarns(DeprecationWarning):
+ binarytree(
+ virtual=None, pkgdir=os.getenv("TMPDIR", "/tmp"), settings=MagicMock()
+ )
+
+ def test_instance_has_required_attrs(self):
+ # Quite smoky test. What would it be a better testing strategy?
+ # Not sure yet...
+ required_attrs_no_multi_instance = {
+ "pkgdir",
+ "_multi_instance",
+ "dbapi",
+ "update_ents",
+ "move_slot_ent",
+ "populated",
+ "tree",
+ "_binrepos_conf",
+ "_remote_has_index",
+ "_remotepkgs",
+ "_additional_pkgs",
+ "invalids",
+ "invalid_paths",
+ "settings",
+ "_pkg_paths",
+ "_populating",
+ "_all_directory",
+ "_pkgindex_version",
+ "_pkgindex_hashes",
+ "_pkgindex_file",
+ "_pkgindex_keys",
+ "_pkgindex_aux_keys",
+ "_pkgindex_use_evaluated_keys",
+ "_pkgindex_header",
+ "_pkgindex_header_keys",
+ "_pkgindex_default_pkg_data",
+ "_pkgindex_inherited_keys",
+ "_pkgindex_default_header_data",
+ "_pkgindex_translated_keys",
+ "_pkgindex_allowed_pkg_keys",
+ }
+ no_multi_instance_settings = MagicMock()
+ no_multi_instance_settings.features = ""
+ no_multi_instance_bt = binarytree(
+ pkgdir=os.getenv("TMPDIR", "/tmp"), settings=no_multi_instance_settings
+ )
+ multi_instance_settings = MagicMock()
+ multi_instance_settings.features = "binpkg-multi-instance"
+ multi_instance_bt = binarytree(
+ pkgdir=os.getenv("TMPDIR", "/tmp"), settings=multi_instance_settings
+ )
+ for attr in required_attrs_no_multi_instance:
+ getattr(no_multi_instance_bt, attr)
+ getattr(multi_instance_bt, attr)
+ # The next attribute is the difference between multi instance
+ # and no multi instance:
+ getattr(multi_instance_bt, "_allocate_filename")
+
+ @patch("portage.dbapi.bintree.binarytree._populate_local")
+ def test_populate_without_updates_repos_nor_getbinspkgs(self, ppopulate_local):
+ bt = binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"), settings=MagicMock())
+ ppopulate_local.return_value = {}
+ bt.populate()
+ ppopulate_local.assert_called_once_with(reindex=True, invalid_errors=True)
+ self.assertFalse(bt._populating)
+ self.assertTrue(bt.populated)
+
+ @patch("portage.dbapi.bintree.binarytree._populate_local")
+ def test_populate_calls_twice_populate_local_if_updates(self, ppopulate_local):
+ bt = binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"), settings=MagicMock())
+ bt.populate()
+ self.assertIn(
+ call(reindex=True, invalid_errors=True), ppopulate_local.mock_calls
+ )
+ self.assertIn(call(), ppopulate_local.mock_calls)
+ self.assertEqual(ppopulate_local.call_count, 2)
+
+ @patch("portage.dbapi.bintree.binarytree._populate_additional")
+ @patch("portage.dbapi.bintree.binarytree._populate_local")
+ def test_populate_with_repos(self, ppopulate_local, ppopulate_additional):
+ repos = ("one", "two")
+ bt = binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"), settings=MagicMock())
+ bt.populate(add_repos=repos)
+ ppopulate_additional.assert_called_once_with(repos)
+
+ @patch("portage.dbapi.bintree.BinRepoConfigLoader")
+ @patch("portage.dbapi.bintree.binarytree._populate_remote")
+ @patch("portage.dbapi.bintree.binarytree._populate_local")
+ def test_populate_with_getbinpkgs(
+ self, ppopulate_local, ppopulate_remote, pBinRepoConfigLoader
+ ):
+ refresh = "something"
+ settings = MagicMock()
+ settings.__getitem__.return_value = "/some/path"
+ bt = binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"), settings=settings)
+ bt.populate(getbinpkgs=True, getbinpkg_refresh=refresh)
+ ppopulate_remote.assert_called_once_with(
+ getbinpkg_refresh=refresh, pretend=False
+ )
+
+ @patch("portage.dbapi.bintree.writemsg")
+ @patch("portage.dbapi.bintree.BinRepoConfigLoader")
+ @patch("portage.dbapi.bintree.binarytree._populate_remote")
+ @patch("portage.dbapi.bintree.binarytree._populate_local")
+ def test_populate_with_getbinpkgs_and_not_BinRepoConfigLoader(
+ self, ppopulate_local, ppopulate_remote, pBinRepoConfigLoader, pwritemsg
+ ):
+ refresh = "something"
+ settings = MagicMock()
+ portage_root = "/some/path"
+ settings.__getitem__.return_value = portage_root
+ pBinRepoConfigLoader.return_value = None
+ conf_file = os.path.join(portage_root, BINREPOS_CONF_FILE)
+ bt = binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"), settings=settings)
+ bt.populate(getbinpkgs=True, getbinpkg_refresh=refresh)
+ ppopulate_remote.assert_not_called()
+ pwritemsg.assert_called_once_with(
+ _(
+ f"!!! {conf_file} is missing (or PORTAGE_BINHOST is unset)"
+ ", but use is requested.\n"
+ ),
+ noiselevel=-1,
+ )
+
+ @patch("portage.dbapi.bintree.BinRepoConfigLoader")
+ @patch("portage.dbapi.bintree.binarytree._populate_remote")
+ @patch("portage.dbapi.bintree.binarytree._populate_local")
+ def test_default_getbinpkg_refresh_in_populate(
+ self, ppopulate_local, ppopulate_remote, pBinRepoConfigLoader
+ ):
+ """Bug #864259
+ This test fixes the bug. It requires that
+ ``_emerge.actions.run_action`` calls ``binarytree.populate``
+ explicitly with ``getbinpkg_refresh=True``
+ """
+ settings = MagicMock()
+ settings.__getitem__.return_value = "/some/path"
+ bt = binarytree(pkgdir=os.getenv("TMPDIR", "/tmp"), settings=settings)
+ bt.populate(getbinpkgs=True)
+ ppopulate_remote.assert_called_once_with(getbinpkg_refresh=False, pretend=False)
+
+ @patch("portage.dbapi.bintree.BinRepoConfigLoader")
+ @patch("portage.dbapi.bintree.binarytree._run_trust_helper")
+ def test_default_getbinpkg_refresh_in_populate_trusthelper(
+ self, run_trust_helper, pBinRepoConfigLoader
+ ):
+ """
+ Test for bug #915842.
+
+ Verify that we call the trust helper in non-pretend mode.
+ """
+ settings = MagicMock()
+ settings.features = ["binpkg-request-signature"]
+ settings.__getitem__.return_value = "/some/path"
+
+ d = tempfile.TemporaryDirectory()
+ try:
+ bt = binarytree(pkgdir=d.name, settings=settings)
+ bt.populate(getbinpkgs=True, pretend=False)
+ run_trust_helper.assert_called_once()
+ finally:
+ d.cleanup()
+
+ @patch("portage.dbapi.bintree.BinRepoConfigLoader")
+ @patch("portage.dbapi.bintree.binarytree._run_trust_helper")
+ def test_default_getbinpkg_refresh_in_populate_trusthelper_pretend(
+ self, run_trust_helper, pBinRepoConfigLoader
+ ):
+ """
+ Test for bug #915842.
+
+ Verify we do not call the trust helper in pretend mode.
+ """
+ settings = MagicMock()
+ settings.features = ["binpkg-request-signature"]
+ settings.__getitem__.return_value = "/some/path"
+
+ d = tempfile.TemporaryDirectory()
+ try:
+ bt = binarytree(pkgdir=d.name, settings=settings)
+ bt.populate(getbinpkgs=True, pretend=True)
+ run_trust_helper.assert_not_called()
+ finally:
+ d.cleanup()
diff --git a/lib/portage/tests/dbapi/test_fakedbapi.py b/lib/portage/tests/dbapi/test_fakedbapi.py
index 08bffbe02..33ebbe30a 100644
--- a/lib/portage/tests/dbapi/test_fakedbapi.py
+++ b/lib/portage/tests/dbapi/test_fakedbapi.py
@@ -101,7 +101,7 @@ class TestFakedbapi(TestCase):
self.assertEqual(
fakedb.match(atom),
expected_result,
- "fakedb.match('%s') = %s != %s" % (atom, result, expected_result),
+ f"fakedb.match('{atom}') = {result} != {expected_result}",
)
finally:
shutil.rmtree(tempdir)
diff --git a/lib/portage/tests/dbapi/test_portdb_cache.py b/lib/portage/tests/dbapi/test_portdb_cache.py
index ad97d82ba..c24a4f209 100644
--- a/lib/portage/tests/dbapi/test_portdb_cache.py
+++ b/lib/portage/tests/dbapi/test_portdb_cache.py
@@ -1,6 +1,7 @@
-# Copyright 2012-2018 Gentoo Foundation
+# Copyright 2012-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import shutil
import subprocess
import sys
import textwrap
@@ -23,9 +24,21 @@ class PortdbCacheTestCase(TestCase):
"dev-libs/A-2": {},
"sys-apps/B-1": {},
"sys-apps/B-2": {},
+ "sys-apps/C-1": {
+ "EAPI": 8,
+ "MISC_CONTENT": "inherit bar foo baz",
+ },
}
- playground = ResolverPlayground(ebuilds=ebuilds, debug=debug)
+ # The convoluted structure here is to test accumulation
+ # of IDEPEND across eclasses (bug #870295).
+ eclasses = {
+ "foo": ("inherit bar",),
+ "bar": ("IDEPEND=dev-libs/A",),
+ "baz": ("IDEPEND=",),
+ }
+
+ playground = ResolverPlayground(ebuilds=ebuilds, eclasses=eclasses, debug=debug)
settings = playground.settings
eprefix = settings["EPREFIX"]
test_repo_location = settings.repositories["test_repo"].location
@@ -40,7 +53,7 @@ class PortdbCacheTestCase(TestCase):
portage_python,
"-b",
"-Wd",
- os.path.join(self.bindir, "egencache"),
+ os.path.join(str(self.bindir), "egencache"),
"--update-manifests",
"--sign-manifests=n",
"--repo",
@@ -51,6 +64,7 @@ class PortdbCacheTestCase(TestCase):
python_cmd = (portage_python, "-b", "-Wd", "-c")
test_commands = (
+ (lambda: shutil.rmtree(md5_cache_dir) or True,),
(lambda: not os.path.exists(pms_cache_dir),),
(lambda: not os.path.exists(md5_cache_dir),),
python_cmd
@@ -161,9 +175,20 @@ class PortdbCacheTestCase(TestCase):
"""
),
),
+ (portage_python, "-b", "-Wd", "-Wi::DeprecationWarning", "-c")
+ + (
+ textwrap.dedent(
+ """
+ import os, sys, portage
+ location = portage.portdb.repositories['test_repo'].location
+ if not portage.portdb._pregen_auxdb[location]["sys-apps/C-1"]['IDEPEND']:
+ sys.exit(1)
+ """
+ ),
+ ),
# Test auto-detection and preference for md5-cache when both
# cache formats are available but layout.conf is absent.
- (BASH_BINARY, "-c", "rm %s" % portage._shell_quote(layout_conf_path)),
+ (BASH_BINARY, "-c", f"rm {portage._shell_quote(layout_conf_path)}"),
python_cmd
+ (
textwrap.dedent(
@@ -200,7 +225,7 @@ class PortdbCacheTestCase(TestCase):
pythonpath = PORTAGE_PYM_PATH + pythonpath
env = {
- "PATH": os.environ.get("PATH", ""),
+ "PATH": settings["PATH"],
"PORTAGE_OVERRIDE_EPREFIX": eprefix,
"PORTAGE_PYTHON": portage_python,
"PORTAGE_REPOSITORIES": settings.repositories.config_string(),
@@ -229,9 +254,8 @@ class PortdbCacheTestCase(TestCase):
stdout = subprocess.PIPE
for i, args in enumerate(test_commands):
-
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index %s failed" % (i,))
+ self.assertTrue(args[0](), f"callable at index {i} failed")
continue
proc = subprocess.Popen(args, env=env, stdout=stdout)
diff --git a/lib/portage/tests/dep/meson.build b/lib/portage/tests/dep/meson.build
new file mode 100644
index 000000000..7350f7775
--- /dev/null
+++ b/lib/portage/tests/dep/meson.build
@@ -0,0 +1,29 @@
+py.install_sources(
+ [
+ 'test_atom.py',
+ 'test_check_required_use.py',
+ 'test_extended_atom_dict.py',
+ 'test_extract_affecting_use.py',
+ 'test_standalone.py',
+ 'test_best_match_to_list.py',
+ 'test_dep_getcpv.py',
+ 'test_dep_getrepo.py',
+ 'test_dep_getslot.py',
+ 'test_dep_getusedeps.py',
+ 'test_dnf_convert.py',
+ 'test_get_operator.py',
+ 'test_get_required_use_flags.py',
+ 'test_isjustname.py',
+ 'test_isvalidatom.py',
+ 'test_libc.py',
+ 'test_match_from_list.py',
+ 'test_overlap_dnf.py',
+ 'test_paren_reduce.py',
+ 'test_soname_atom_pickle.py',
+ 'test_use_reduce.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/dep',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/dep/testAtom.py b/lib/portage/tests/dep/test_atom.py
index 33139d83a..b7d8bee8b 100644
--- a/lib/portage/tests/dep/testAtom.py
+++ b/lib/portage/tests/dep/test_atom.py
@@ -8,7 +8,6 @@ from portage.exception import InvalidAtom
class TestAtom(TestCase):
def testAtom(self):
-
tests = (
(
"=sys-apps/portage-2.1-r1:0[doc,a=,!b=,c?,!d?,-e]",
@@ -218,23 +217,23 @@ class TestAtom(TestCase):
self.assertEqual(
op,
a.operator,
- msg="Atom('%s').operator = %s == '%s'" % (atom, a.operator, op),
- )
- self.assertEqual(
- cp, a.cp, msg="Atom('%s').cp = %s == '%s'" % (atom, a.cp, cp)
+ msg=f"Atom('{atom}').operator = {a.operator} == '{op}'",
)
+ self.assertEqual(cp, a.cp, msg=f"Atom('{atom}').cp = {a.cp} == '{cp}'")
if ver is not None:
- cpv = "%s-%s" % (cp, ver)
+ cpv = f"{cp}-{ver}"
else:
cpv = cp
+ self.assertEqual(cpv, a.cpv, msg=f"Atom('{atom}').cpv = {a.cpv} == '{cpv}'")
self.assertEqual(
- cpv, a.cpv, msg="Atom('%s').cpv = %s == '%s'" % (atom, a.cpv, cpv)
- )
- self.assertEqual(
- slot, a.slot, msg="Atom('%s').slot = %s == '%s'" % (atom, a.slot, slot)
+ slot,
+ a.slot,
+ msg=f"Atom('{atom}').slot = {a.slot} == '{slot}'",
)
self.assertEqual(
- repo, a.repo, msg="Atom('%s').repo == %s == '%s'" % (atom, a.repo, repo)
+ repo,
+ a.repo,
+ msg=f"Atom('{atom}').repo == {a.repo} == '{repo}'",
)
if a.use:
@@ -244,7 +243,7 @@ class TestAtom(TestCase):
self.assertEqual(
use,
returned_use,
- msg="Atom('%s').use = %s == '%s'" % (atom, returned_use, use),
+ msg=f"Atom('{atom}').use = {returned_use} == '{use}'",
)
for atom, allow_wildcard, allow_repo in tests_xfail:
@@ -302,7 +301,7 @@ class TestAtom(TestCase):
self.assertEqual(
v,
getattr(a, k),
- msg="Atom('%s').%s = %s == '%s'" % (atom, k, getattr(a, k), v),
+ msg=f"Atom('{atom}').{k} = {getattr(a, k)} == '{v}'",
)
def test_intersects(self):
@@ -323,7 +322,7 @@ class TestAtom(TestCase):
self.assertEqual(
Atom(atom).intersects(Atom(other)),
expected_result,
- "%s and %s should intersect: %s" % (atom, other, expected_result),
+ f"{atom} and {other} should intersect: {expected_result}",
)
def test_violated_conditionals(self):
@@ -553,22 +552,28 @@ class TestAtom(TestCase):
other_use, validator.is_valid_flag, parent_use
)
if parent_use is None:
- fail_msg = "Atom: %s, other_use: %s, iuse: %s, parent_use: %s, got: %s, expected: %s" % (
- atom,
- " ".join(other_use),
- " ".join(iuse),
- "None",
- str(violated_atom),
- expected_violated_atom,
+ fail_msg = (
+ "Atom: %s, other_use: %s, iuse: %s, parent_use: %s, got: %s, expected: %s"
+ % (
+ atom,
+ " ".join(other_use),
+ " ".join(iuse),
+ "None",
+ str(violated_atom),
+ expected_violated_atom,
+ )
)
else:
- fail_msg = "Atom: %s, other_use: %s, iuse: %s, parent_use: %s, got: %s, expected: %s" % (
- atom,
- " ".join(other_use),
- " ".join(iuse),
- " ".join(parent_use),
- str(violated_atom),
- expected_violated_atom,
+ fail_msg = (
+ "Atom: %s, other_use: %s, iuse: %s, parent_use: %s, got: %s, expected: %s"
+ % (
+ atom,
+ " ".join(other_use),
+ " ".join(iuse),
+ " ".join(parent_use),
+ str(violated_atom),
+ expected_violated_atom,
+ )
)
self.assertEqual(str(violated_atom), expected_violated_atom, fail_msg)
diff --git a/lib/portage/tests/dep/testCheckRequiredUse.py b/lib/portage/tests/dep/test_check_required_use.py
index cbb1a608a..cbb1a608a 100644
--- a/lib/portage/tests/dep/testCheckRequiredUse.py
+++ b/lib/portage/tests/dep/test_check_required_use.py
diff --git a/lib/portage/tests/dep/test_dep_getcpv.py b/lib/portage/tests/dep/test_dep_getcpv.py
index c8e5d3231..a057b59be 100644
--- a/lib/portage/tests/dep/test_dep_getcpv.py
+++ b/lib/portage/tests/dep/test_dep_getcpv.py
@@ -10,7 +10,6 @@ class DepGetCPV(TestCase):
"""A simple testcase for isvalidatom"""
def testDepGetCPV(self):
-
prefix_ops = ["<", ">", "=", "~", "<=", ">=", "!=", "!<", "!>", "!~"]
bad_prefix_ops = [">~", "<~", "~>", "~<"]
diff --git a/lib/portage/tests/dep/test_dep_getrepo.py b/lib/portage/tests/dep/test_dep_getrepo.py
index 2acec6119..745a1d80c 100644
--- a/lib/portage/tests/dep/test_dep_getrepo.py
+++ b/lib/portage/tests/dep/test_dep_getrepo.py
@@ -9,7 +9,6 @@ class DepGetRepo(TestCase):
"""A simple testcase for isvalidatom"""
def testDepGetRepo(self):
-
repo_char = "::"
repos = ("a", "repo-name", "repo_name", "repo123", None)
cpvs = ["sys-apps/portage"]
diff --git a/lib/portage/tests/dep/test_dep_getslot.py b/lib/portage/tests/dep/test_dep_getslot.py
index 804d8880c..0a19822cd 100644
--- a/lib/portage/tests/dep/test_dep_getslot.py
+++ b/lib/portage/tests/dep/test_dep_getslot.py
@@ -10,7 +10,6 @@ class DepGetSlot(TestCase):
"""A simple testcase for isvalidatom"""
def testDepGetSlot(self):
-
slot_char = ":"
slots = ("a", "1.2", "1", "IloveVapier", None)
cpvs = ["sys-apps/portage"]
diff --git a/lib/portage/tests/dep/test_dep_getusedeps.py b/lib/portage/tests/dep/test_dep_getusedeps.py
index 3a4ada7fa..02f654a81 100644
--- a/lib/portage/tests/dep/test_dep_getusedeps.py
+++ b/lib/portage/tests/dep/test_dep_getusedeps.py
@@ -12,7 +12,6 @@ class DepGetUseDeps(TestCase):
"""A simple testcase for dep_getusedeps"""
def testDepGetUseDeps(self):
-
for mycpv in test_cps:
for version in test_versions:
for slot in test_slots:
@@ -23,7 +22,7 @@ class DepGetUseDeps(TestCase):
if slot:
cpv += ":" + slot
if isinstance(use, tuple):
- cpv += "[%s]" % (",".join(use),)
+ cpv += f"[{','.join(use)}]"
self.assertEqual(dep_getusedeps(cpv), use)
else:
if len(use):
diff --git a/lib/portage/tests/dep/test_dnf_convert.py b/lib/portage/tests/dep/test_dnf_convert.py
index 434ee5aa0..1a1789592 100644
--- a/lib/portage/tests/dep/test_dnf_convert.py
+++ b/lib/portage/tests/dep/test_dnf_convert.py
@@ -8,7 +8,6 @@ from portage.dep._dnf import dnf_convert
class DNFConvertTestCase(TestCase):
def testDNFConvert(self):
-
test_cases = (
(
"|| ( A B ) || ( C D )",
diff --git a/lib/portage/tests/dep/testExtendedAtomDict.py b/lib/portage/tests/dep/test_extended_atom_dict.py
index 7c177b927..7c177b927 100644
--- a/lib/portage/tests/dep/testExtendedAtomDict.py
+++ b/lib/portage/tests/dep/test_extended_atom_dict.py
diff --git a/lib/portage/tests/dep/testExtractAffectingUSE.py b/lib/portage/tests/dep/test_extract_affecting_use.py
index 8c93ad973..8c93ad973 100644
--- a/lib/portage/tests/dep/testExtractAffectingUSE.py
+++ b/lib/portage/tests/dep/test_extract_affecting_use.py
diff --git a/lib/portage/tests/dep/test_get_operator.py b/lib/portage/tests/dep/test_get_operator.py
index 7815961a0..62fa2ad21 100644
--- a/lib/portage/tests/dep/test_get_operator.py
+++ b/lib/portage/tests/dep/test_get_operator.py
@@ -8,7 +8,6 @@ from portage.dep import get_operator
class GetOperator(TestCase):
def testGetOperator(self):
-
# get_operator does not validate operators
tests = [
("~", "~"),
@@ -30,7 +29,7 @@ class GetOperator(TestCase):
self.assertEqual(
result,
test[1],
- msg="get_operator(%s) != %s" % (test[0] + atom, test[1]),
+ msg=f"get_operator({test[0] + atom}) != {test[1]}",
)
result = get_operator("sys-apps/portage")
diff --git a/lib/portage/tests/dep/test_get_required_use_flags.py b/lib/portage/tests/dep/test_get_required_use_flags.py
index f9c39d530..b7aea0d18 100644
--- a/lib/portage/tests/dep/test_get_required_use_flags.py
+++ b/lib/portage/tests/dep/test_get_required_use_flags.py
@@ -41,7 +41,7 @@ class TestCheckRequiredUse(TestCase):
for required_use in test_cases_xfail:
self.assertRaisesMsg(
- "REQUIRED_USE: '%s'" % (required_use,),
+ f"REQUIRED_USE: '{required_use}'",
InvalidDependString,
get_required_use_flags,
required_use,
diff --git a/lib/portage/tests/dep/test_isjustname.py b/lib/portage/tests/dep/test_isjustname.py
index 3ee0c86c3..2b3828e00 100644
--- a/lib/portage/tests/dep/test_isjustname.py
+++ b/lib/portage/tests/dep/test_isjustname.py
@@ -8,7 +8,6 @@ from portage.dep import isjustname
class IsJustName(TestCase):
def testIsJustName(self):
-
cats = ("", "sys-apps/", "foo/", "virtual/")
pkgs = ("portage", "paludis", "pkgcore", "notARealPkg")
vers = ("", "-2.0-r3", "-1.0_pre2", "-3.1b")
@@ -19,10 +18,10 @@ class IsJustName(TestCase):
if len(ver):
self.assertFalse(
isjustname(cat + pkg + ver),
- msg="isjustname(%s) is True!" % (cat + pkg + ver),
+ msg=f"isjustname({cat + pkg + ver}) is True!",
)
else:
self.assertTrue(
isjustname(cat + pkg + ver),
- msg="isjustname(%s) is False!" % (cat + pkg + ver),
+ msg=f"isjustname({cat + pkg + ver}) is False!",
)
diff --git a/lib/portage/tests/dep/test_isvalidatom.py b/lib/portage/tests/dep/test_isvalidatom.py
index ddd2b58db..34900dbbc 100644
--- a/lib/portage/tests/dep/test_isvalidatom.py
+++ b/lib/portage/tests/dep/test_isvalidatom.py
@@ -25,7 +25,6 @@ class IsValidAtomTestCase:
class IsValidAtom(TestCase):
def testIsValidAtom(self):
-
test_cases = (
IsValidAtomTestCase("sys-apps/portage", True),
IsValidAtomTestCase("=sys-apps/portage-2.1", True),
@@ -185,9 +184,6 @@ class IsValidAtom(TestCase):
"sys-apps/portage::repo", False, allow_repo=None, eapi="5"
),
IsValidAtomTestCase(
- "sys-apps/portage::repo", True, allow_repo=None, eapi="5-progress"
- ),
- IsValidAtomTestCase(
"sys-apps/portage::repo", False, allow_repo=None, eapi="7"
),
# If allow_repo is not None, it should not be overwritten by eapi
@@ -196,9 +192,6 @@ class IsValidAtom(TestCase):
"sys-apps/portage::repo", False, allow_repo=False, eapi="5"
),
IsValidAtomTestCase(
- "sys-apps/portage::repo", False, allow_repo=False, eapi="5-progress"
- ),
- IsValidAtomTestCase(
"sys-apps/portage::repo", False, allow_repo=False, eapi="7"
),
IsValidAtomTestCase("sys-apps/portage::repo", True, allow_repo=True),
@@ -206,9 +199,6 @@ class IsValidAtom(TestCase):
"sys-apps/portage::repo", True, allow_repo=True, eapi="5"
),
IsValidAtomTestCase(
- "sys-apps/portage::repo", True, allow_repo=True, eapi="5-progress"
- ),
- IsValidAtomTestCase(
"sys-apps/portage::repo", True, allow_repo=True, eapi="7"
),
IsValidAtomTestCase("virtual/ffmpeg:0/53", True),
@@ -239,5 +229,5 @@ class IsValidAtom(TestCase):
)
),
test_case.expected,
- msg="isvalidatom(%s) != %s" % (test_case.atom, test_case.expected),
+ msg=f"isvalidatom({test_case.atom}) != {test_case.expected}",
)
diff --git a/lib/portage/tests/dep/test_libc.py b/lib/portage/tests/dep/test_libc.py
new file mode 100644
index 000000000..6ea96d720
--- /dev/null
+++ b/lib/portage/tests/dep/test_libc.py
@@ -0,0 +1,81 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.dep import Atom
+from portage.dep.libc import strip_libc_deps
+from portage.tests import TestCase
+
+
+class LibcUtilStripDeps(TestCase):
+ def testStripSimpleDeps(self):
+ """
+ Test that we strip a basic libc dependency out and return
+ a list of dependencies without it in there.
+ """
+
+ libc_dep = [Atom("=sys-libs/glibc-2.38")]
+
+ original_deps = (
+ [
+ Atom("=sys-libs/glibc-2.38"),
+ Atom("=app-misc/foo-1.2.3"),
+ ],
+ [
+ Atom("=sys-libs/glibc-2.38"),
+ ],
+ [
+ Atom("=app-misc/foo-1.2.3"),
+ Atom("=app-misc/bar-1.2.3"),
+ ],
+ )
+
+ for deplist in original_deps:
+ strip_libc_deps(deplist, libc_dep)
+
+ self.assertFalse(
+ all(libc in deplist for libc in libc_dep),
+ "Stripped deplist contains a libc candidate",
+ )
+
+ def testStripComplexRealizedDeps(self):
+ """
+ Test that we strip pathological libc dependencies out and return
+ a list of dependencies without it in there.
+ """
+
+ # This shouldn't really happen for a 'realized' dependency, but
+ # we shouldn't crash if it happens anyway.
+ libc_dep = [Atom("=sys-libs/glibc-2.38*[p]")]
+
+ original_deps = (
+ [
+ Atom("=sys-libs/glibc-2.38[x]"),
+ Atom("=app-misc/foo-1.2.3"),
+ ],
+ [
+ Atom("=sys-libs/glibc-2.38[p]"),
+ ],
+ [
+ Atom("=app-misc/foo-1.2.3"),
+ Atom("=app-misc/bar-1.2.3"),
+ ],
+ )
+
+ for deplist in original_deps:
+ strip_libc_deps(deplist, libc_dep)
+
+ self.assertFalse(
+ all(libc in deplist for libc in libc_dep),
+ "Stripped deplist contains a libc candidate",
+ )
+
+ def testStripNonRealizedDeps(self):
+ """
+ Check that we strip non-realized libc deps.
+ """
+
+ libc_dep = [Atom("sys-libs/glibc:2.2=")]
+ original_deps = [Atom(">=sys-libs/glibc-2.38-r7")]
+
+ strip_libc_deps(original_deps, libc_dep)
+ self.assertFalse(original_deps, "(g)libc dep was not stripped")
diff --git a/lib/portage/tests/dep/test_match_from_list.py b/lib/portage/tests/dep/test_match_from_list.py
index 7d5257719..c1241ab1d 100644
--- a/lib/portage/tests/dep/test_match_from_list.py
+++ b/lib/portage/tests/dep/test_match_from_list.py
@@ -16,7 +16,7 @@ class Package:
self.cp = atom.cp
slot = atom.slot
if atom.sub_slot:
- slot = "%s/%s" % (slot, atom.sub_slot)
+ slot = f"{slot}/{atom.sub_slot}"
if not slot:
slot = "0"
self.cpv = _pkg_str(atom.cpv, slot=slot, repo=atom.repo)
diff --git a/lib/portage/tests/dep/test_overlap_dnf.py b/lib/portage/tests/dep/test_overlap_dnf.py
index a468fab38..7fd1cfe7d 100644
--- a/lib/portage/tests/dep/test_overlap_dnf.py
+++ b/lib/portage/tests/dep/test_overlap_dnf.py
@@ -1,4 +1,4 @@
-# Copyright 2017 Gentoo Foundation
+# Copyright 2017-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
@@ -8,7 +8,6 @@ from portage.dep.dep_check import _overlap_dnf
class OverlapDNFTestCase(TestCase):
def testOverlapDNF(self):
-
test_cases = (
(
"|| ( cat/A cat/B ) cat/E || ( cat/C cat/D )",
@@ -47,3 +46,50 @@ class OverlapDNFTestCase(TestCase):
_overlap_dnf(use_reduce(dep_str, token_class=Atom, opconvert=True)),
result,
)
+
+
+class DuplicateOverlapDNFTestCase(TestCase):
+ def testDuplicateOverlapDNF(self):
+ """
+ Demonstrate deduplication of any-of blocks, preventing unnecessary
+ DNF expansion for duplicate any-of blocks as in bug 891137.
+ """
+ test_cases = (
+ ("|| ( cat/A cat/B ) || ( cat/A cat/B )", [["||", "cat/A", "cat/B"]]),
+ (
+ "|| ( cat/A cat/B ) cat/E || ( cat/C cat/D ) || ( cat/A cat/B )",
+ ["cat/E", ["||", "cat/A", "cat/B"], ["||", "cat/C", "cat/D"]],
+ ),
+ (
+ "|| ( cat/A cat/B ) cat/D || ( cat/B cat/C ) || ( cat/A cat/B )",
+ [
+ "cat/D",
+ [
+ "||",
+ ["cat/A", "cat/B"],
+ ["cat/A", "cat/C"],
+ ["cat/B", "cat/B"],
+ ["cat/B", "cat/C"],
+ ],
+ ],
+ ),
+ (
+ "|| ( cat/A cat/B ) || ( cat/C cat/D ) || ( ( cat/B cat/E ) cat/F ) || ( cat/A cat/B )",
+ [
+ [
+ "||",
+ ["cat/A", "cat/B", "cat/E"],
+ ["cat/A", "cat/F"],
+ ["cat/B", "cat/B", "cat/E"],
+ ["cat/B", "cat/F"],
+ ],
+ ["||", "cat/C", "cat/D"],
+ ],
+ ),
+ )
+
+ for dep_str, result in test_cases:
+ self.assertEqual(
+ _overlap_dnf(use_reduce(dep_str, token_class=Atom, opconvert=True)),
+ result,
+ )
diff --git a/lib/portage/tests/dep/test_paren_reduce.py b/lib/portage/tests/dep/test_paren_reduce.py
index b73ca6d2d..b8d780fbd 100644
--- a/lib/portage/tests/dep/test_paren_reduce.py
+++ b/lib/portage/tests/dep/test_paren_reduce.py
@@ -8,7 +8,6 @@ from portage.exception import InvalidDependString
class TestParenReduce(TestCase):
def testParenReduce(self):
-
test_cases = (
("A", ["A"]),
("( A )", ["A"]),
diff --git a/lib/portage/tests/dep/test_soname_atom_pickle.py b/lib/portage/tests/dep/test_soname_atom_pickle.py
index 086f82312..a7951755d 100644
--- a/lib/portage/tests/dep/test_soname_atom_pickle.py
+++ b/lib/portage/tests/dep/test_soname_atom_pickle.py
@@ -9,7 +9,6 @@ from portage.util.futures.executor.fork import ForkExecutor
class TestSonameAtomPickle(TestCase):
-
_ALL_PROVIDES = frozenset([SonameAtom("x86_64", "libc.so.6")])
def test_soname_atom_pickle(self):
diff --git a/lib/portage/tests/dep/testStandalone.py b/lib/portage/tests/dep/test_standalone.py
index fa8ffc99c..3b6cb12d7 100644
--- a/lib/portage/tests/dep/testStandalone.py
+++ b/lib/portage/tests/dep/test_standalone.py
@@ -10,7 +10,6 @@ class TestStandalone(TestCase):
"""Test some small functions portage.dep"""
def testCPVequal(self):
-
test_cases = (
("sys-apps/portage-2.1", "sys-apps/portage-2.1", True),
("sys-apps/portage-2.1", "sys-apps/portage-2.0", False),
@@ -32,12 +31,12 @@ class TestStandalone(TestCase):
self.assertEqual(
cpvequal(cpv1, cpv2),
expected_result,
- "cpvequal('%s', '%s') != %s" % (cpv1, cpv2, expected_result),
+ f"cpvequal('{cpv1}', '{cpv2}') != {expected_result}",
)
for cpv1, cpv2 in test_cases_xfail:
self.assertRaisesMsg(
- "cpvequal(%s, %s)" % (cpv1, cpv2),
+ f"cpvequal({cpv1}, {cpv2})",
PortageException,
cpvequal,
cpv1,
diff --git a/lib/portage/tests/dep/test_use_reduce.py b/lib/portage/tests/dep/test_use_reduce.py
index c4a24e8c6..81b659a94 100644
--- a/lib/portage/tests/dep/test_use_reduce.py
+++ b/lib/portage/tests/dep/test_use_reduce.py
@@ -54,7 +54,7 @@ class UseReduceTestCase:
subset=self.subset,
)
except InvalidDependString as e:
- raise InvalidDependString("%s: %s" % (e, self.deparray))
+ raise InvalidDependString(f"{e}: {self.deparray}")
class UseReduce(TestCase):
@@ -65,7 +65,6 @@ class UseReduce(TestCase):
return False
def testUseReduce(self):
-
EAPI_WITH_SRC_URI_ARROWS = "2"
EAPI_WITHOUT_SRC_URI_ARROWS = "0"
diff --git a/lib/portage/tests/ebuild/meson.build b/lib/portage/tests/ebuild/meson.build
new file mode 100644
index 000000000..0c4407c70
--- /dev/null
+++ b/lib/portage/tests/ebuild/meson.build
@@ -0,0 +1,17 @@
+py.install_sources(
+ [
+ 'test_array_fromfile_eof.py',
+ 'test_config.py',
+ 'test_doebuild_fd_pipes.py',
+ 'test_doebuild_spawn.py',
+ 'test_fetch.py',
+ 'test_ipc_daemon.py',
+ 'test_shell_quote.py',
+ 'test_spawn.py',
+ 'test_use_expand_incremental.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/ebuild',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/ebuild/test_array_fromfile_eof.py b/lib/portage/tests/ebuild/test_array_fromfile_eof.py
index 2054eee42..282a613be 100644
--- a/lib/portage/tests/ebuild/test_array_fromfile_eof.py
+++ b/lib/portage/tests/ebuild/test_array_fromfile_eof.py
@@ -27,7 +27,7 @@ class ArrayFromfileEofTestCase(TestCase):
a = array.array("B")
try:
a.fromfile(f, len(input_bytes) + 1)
- except (EOFError, IOError):
+ except (EOFError, OSError):
# python-3.0 lost data here
eof = True
diff --git a/lib/portage/tests/ebuild/test_config.py b/lib/portage/tests/ebuild/test_config.py
index d123d9abb..743b30bfb 100644
--- a/lib/portage/tests/ebuild/test_config.py
+++ b/lib/portage/tests/ebuild/test_config.py
@@ -1,7 +1,6 @@
# Copyright 2010-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import io
import tempfile
import portage
@@ -84,7 +83,6 @@ class ConfigTestCase(TestCase):
playground.cleanup()
def testLicenseManager(self):
-
user_config = {
"package.license": (
"dev-libs/* TEST",
@@ -112,7 +110,7 @@ class ConfigTestCase(TestCase):
self.assertEqual(lic_man._accept_license_str, None)
self.assertEqual(lic_man._accept_license, None)
self.assertEqual(lic_man._license_groups, {"EULA": frozenset(["TEST"])})
- self.assertEqual(lic_man._undef_lic_groups, set(["TEST"]))
+ self.assertEqual(lic_man._undef_lic_groups, {"TEST"})
self.assertEqual(lic_man.extract_global_changes(), "TEST TEST2")
self.assertEqual(lic_man.extract_global_changes(), "")
@@ -196,7 +194,6 @@ class ConfigTestCase(TestCase):
playground.cleanup()
def testPackageMaskOrder(self):
-
ebuilds = {
"dev-libs/A-1": {},
"dev-libs/B-1": {},
@@ -263,7 +260,6 @@ class ConfigTestCase(TestCase):
playground.cleanup()
def testManifest(self):
-
distfiles = {
"B-2.tar.bz2": b"binary\0content",
"C-2.zip": b"binary\0content",
@@ -370,7 +366,7 @@ class ConfigTestCase(TestCase):
user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
os.makedirs(user_config_dir)
- with io.open(
+ with open(
os.path.join(user_config_dir, "package.env"),
mode="w",
encoding=_encodings["content"],
@@ -381,7 +377,7 @@ class ConfigTestCase(TestCase):
env_dir = os.path.join(user_config_dir, "env")
os.makedirs(env_dir)
for k, v in env_files.items():
- with io.open(
+ with open(
os.path.join(env_dir, k), mode="w", encoding=_encodings["content"]
) as f:
for line in v:
diff --git a/lib/portage/tests/ebuild/test_doebuild_fd_pipes.py b/lib/portage/tests/ebuild/test_doebuild_fd_pipes.py
index e0c75c872..445fcf6c4 100644
--- a/lib/portage/tests/ebuild/test_doebuild_fd_pipes.py
+++ b/lib/portage/tests/ebuild/test_doebuild_fd_pipes.py
@@ -1,6 +1,8 @@
-# Copyright 2013-2016 Gentoo Foundation
+# Copyright 2013-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import multiprocessing
+
import portage
from portage import os
from portage.tests import TestCase
@@ -12,15 +14,9 @@ from _emerge.Package import Package
from _emerge.PipeReader import PipeReader
-class DoebuildProcess(ForkProcess):
-
- __slots__ = ("doebuild_kwargs", "doebuild_pargs")
-
- def _run(self):
- return portage.doebuild(*self.doebuild_pargs, **self.doebuild_kwargs)
-
-
class DoebuildFdPipesTestCase(TestCase):
+ output_fd = 200
+
def testDoebuild(self):
"""
Invoke portage.doebuild() with the fd_pipes parameter, and
@@ -29,22 +25,24 @@ class DoebuildFdPipesTestCase(TestCase):
supported for API consumers (see bug #475812).
"""
- output_fd = 200
+ output_fd = self.output_fd
ebuild_body = ["S=${WORKDIR}"]
- for phase_func in (
- "pkg_info",
- "pkg_nofetch",
- "pkg_pretend",
- "pkg_setup",
- "src_unpack",
- "src_prepare",
- "src_configure",
- "src_compile",
- "src_test",
- "src_install",
+ for phase_func, default in (
+ ("pkg_info", False),
+ ("pkg_nofetch", False),
+ ("pkg_pretend", False),
+ ("pkg_setup", False),
+ ("pkg_config", False),
+ ("src_unpack", False),
+ ("src_prepare", True),
+ ("src_configure", False),
+ ("src_compile", False),
+ ("src_test", False),
+ ("src_install", False),
):
ebuild_body.append(
- ("%s() { echo ${EBUILD_PHASE}" " 1>&%s; }") % (phase_func, output_fd)
+ ("%s() { %secho ${EBUILD_PHASE}" " 1>&%s; }")
+ % (phase_func, "default; " if default else "", output_fd)
)
ebuild_body.append("")
@@ -52,11 +50,24 @@ class DoebuildFdPipesTestCase(TestCase):
ebuilds = {
"app-misct/foo-1": {
- "EAPI": "5",
+ "EAPI": "8",
+ "IUSE": "+foo +bar",
+ "REQUIRED_USE": "|| ( foo bar )",
"MISC_CONTENT": ebuild_body,
}
}
+ # Populate configdict["pkg"]["USE"] with something arbitrary in order
+ # to try and trigger bug 675748 in doebuild _validate_deps.
+ arbitrary_package_use = "baz"
+
+ user_config = {
+ # In order to trigger bug 675748, package.env must be non-empty,
+ # but the referenced env file can be empty.
+ "package.env": (f"app-misct/foo {os.devnull}",),
+ "package.use": (f"app-misct/foo {arbitrary_package_use}",),
+ }
+
# Override things that may be unavailable, or may have portability
# issues when running tests in exotic environments.
# prepstrip - bug #447810 (bash read builtin EINTR problem)
@@ -65,7 +76,7 @@ class DoebuildFdPipesTestCase(TestCase):
self.assertEqual(true_binary is None, False, "true command not found")
dev_null = open(os.devnull, "wb")
- playground = ResolverPlayground(ebuilds=ebuilds)
+ playground = ResolverPlayground(ebuilds=ebuilds, user_config=user_config)
try:
QueryCommand._db = playground.trees
root_config = playground.trees[playground.eroot]["root_config"]
@@ -107,39 +118,50 @@ class DoebuildFdPipesTestCase(TestCase):
type_name="ebuild",
)
settings.setcpv(pkg)
- ebuildpath = portdb.findname(cpv)
- self.assertNotEqual(ebuildpath, None)
-
- for phase in (
- "info",
- "nofetch",
- "pretend",
- "setup",
- "unpack",
- "prepare",
- "configure",
- "compile",
- "test",
- "install",
- "qmerge",
- "clean",
- "merge",
+
+ # Demonstrate that settings.configdict["pkg"]["USE"] contains our arbitrary
+ # package.use setting in order to trigger bug 675748.
+ self.assertEqual(settings.configdict["pkg"]["USE"], arbitrary_package_use)
+
+ # Try to trigger the config.environ() split_LC_ALL assertion for bug 925863.
+ settings["LC_ALL"] = "C"
+
+ source_ebuildpath = portdb.findname(cpv)
+ self.assertNotEqual(source_ebuildpath, None)
+
+ for phase, tree, ebuildpath in (
+ ("info", "porttree", source_ebuildpath),
+ ("nofetch", "porttree", source_ebuildpath),
+ ("pretend", "porttree", source_ebuildpath),
+ ("setup", "porttree", source_ebuildpath),
+ ("unpack", "porttree", source_ebuildpath),
+ ("prepare", "porttree", source_ebuildpath),
+ ("configure", "porttree", source_ebuildpath),
+ ("compile", "porttree", source_ebuildpath),
+ ("test", "porttree", source_ebuildpath),
+ ("install", "porttree", source_ebuildpath),
+ ("qmerge", "porttree", source_ebuildpath),
+ ("clean", "porttree", source_ebuildpath),
+ ("merge", "porttree", source_ebuildpath),
+ ("clean", "porttree", source_ebuildpath),
+ ("config", "vartree", root_config.trees["vartree"].dbapi.findname(cpv)),
):
+ if ebuildpath is not source_ebuildpath:
+ self.assertNotEqual(ebuildpath, None)
- pr, pw = os.pipe()
+ pr, pw = multiprocessing.Pipe(duplex=False)
- producer = DoebuildProcess(
- doebuild_pargs=(ebuildpath, phase),
- doebuild_kwargs={
+ producer = ForkProcess(
+ target=self._doebuild,
+ fd_pipes={
+ 1: dev_null.fileno(),
+ },
+ args=(QueryCommand._db, pw, ebuildpath, phase),
+ kwargs={
"settings": settings,
- "mydbapi": portdb,
- "tree": "porttree",
+ "mydbapi": root_config.trees[tree].dbapi,
+ "tree": tree,
"vartree": root_config.trees["vartree"],
- "fd_pipes": {
- 1: dev_null.fileno(),
- 2: dev_null.fileno(),
- output_fd: pw,
- },
"prev_mtimes": {},
},
)
@@ -152,7 +174,7 @@ class DoebuildFdPipesTestCase(TestCase):
task_scheduler.start()
finally:
# PipeReader closes pr
- os.close(pw)
+ pw.close()
task_scheduler.wait()
output = portage._unicode_decode(consumer.getvalue()).rstrip("\n")
@@ -169,3 +191,11 @@ class DoebuildFdPipesTestCase(TestCase):
dev_null.close()
playground.cleanup()
QueryCommand._db = None
+
+ @staticmethod
+ def _doebuild(db, pw, *args, **kwargs):
+ QueryCommand._db = db
+ kwargs["fd_pipes"] = {
+ DoebuildFdPipesTestCase.output_fd: pw.fileno(),
+ }
+ return portage.doebuild(*args, **kwargs)
diff --git a/lib/portage/tests/ebuild/test_doebuild_spawn.py b/lib/portage/tests/ebuild/test_doebuild_spawn.py
index ef0ae5847..cac844f8f 100644
--- a/lib/portage/tests/ebuild/test_doebuild_spawn.py
+++ b/lib/portage/tests/ebuild/test_doebuild_spawn.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2015 Gentoo Foundation
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import textwrap
@@ -28,7 +28,6 @@ class DoebuildSpawnTestCase(TestCase):
"""
def testDoebuildSpawn(self):
-
ebuild_body = textwrap.dedent(
"""
pkg_nofetch() { : ; }
@@ -81,13 +80,14 @@ class DoebuildSpawnTestCase(TestCase):
settings["T"] = os.path.join(settings["PORTAGE_BUILDDIR"], "temp")
for x in ("PORTAGE_BUILDDIR", "HOME", "T"):
os.makedirs(settings[x])
+ os.makedirs(os.path.join(settings["PORTAGE_BUILDDIR"], ".ipc"))
# Create a fake environment, to pretend as if the ebuild
# has been sourced already.
open(os.path.join(settings["T"], "environment"), "wb").close()
scheduler = SchedulerInterface(global_event_loop())
+ self.assertTrue(scheduler._loop is global_event_loop()._loop)
for phase in ("_internal_test",):
-
# Test EbuildSpawnProcess by calling doebuild.spawn() with
# returnpid=False. This case is no longer used by portage
# internals since EbuildPhase is used instead and that passes
diff --git a/lib/portage/tests/ebuild/test_fetch.py b/lib/portage/tests/ebuild/test_fetch.py
index cfca0d4e1..4812eb430 100644
--- a/lib/portage/tests/ebuild/test_fetch.py
+++ b/lib/portage/tests/ebuild/test_fetch.py
@@ -1,4 +1,4 @@
-# Copyright 2019-2021 Gentoo Authors
+# Copyright 2019-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import functools
@@ -36,7 +36,6 @@ from _emerge.Package import Package
class EbuildFetchTestCase(TestCase):
def testEbuildFetch(self):
-
user_config = {
"make.conf": ('GENTOO_MIRRORS="{scheme}://{host}:{port}"',),
}
@@ -71,7 +70,6 @@ class EbuildFetchTestCase(TestCase):
user_config_subst = user_config.copy()
for configname, configdata in user_config.items():
-
configdata_sub = []
for line in configdata:
configdata_sub.append(
@@ -135,7 +133,7 @@ class EbuildFetchTestCase(TestCase):
fetch_bin = portage.process.find_binary(fetchcommand[0])
if fetch_bin is None:
self.skipTest(
- "FETCHCOMMAND not found: {}".format(playground.settings["FETCHCOMMAND"])
+ f"FETCHCOMMAND not found: {playground.settings['FETCHCOMMAND']}"
)
eubin = os.path.join(playground.eprefix, "usr", "bin")
os.symlink(fetch_bin, os.path.join(eubin, os.path.basename(fetch_bin)))
@@ -143,9 +141,7 @@ class EbuildFetchTestCase(TestCase):
resume_bin = portage.process.find_binary(resumecommand[0])
if resume_bin is None:
self.skipTest(
- "RESUMECOMMAND not found: {}".format(
- playground.settings["RESUMECOMMAND"]
- )
+ f"RESUMECOMMAND not found: {playground.settings['RESUMECOMMAND']}"
)
if resume_bin != fetch_bin:
os.symlink(resume_bin, os.path.join(eubin, os.path.basename(resume_bin)))
@@ -162,7 +158,7 @@ class EbuildFetchTestCase(TestCase):
for layout_lines in mirror_layouts:
settings = config(clone=playground.settings)
- layout_data = "".join("{}\n".format(line) for line in layout_lines)
+ layout_data = "".join(f"{line}\n" for line in layout_lines)
mirror_conf = MirrorLayoutConfig()
mirror_conf.read_from_file(io.StringIO(layout_data))
layouts = mirror_conf.get_all_layouts()
@@ -171,10 +167,10 @@ class EbuildFetchTestCase(TestCase):
for k, v in orig_distfiles.items():
filename = DistfileName(
k,
- digests=dict(
- (algo, checksum_str(v, hashname=algo))
+ digests={
+ algo: checksum_str(v, hashname=algo)
for algo in MANIFEST2_HASH_DEFAULTS
- ),
+ },
)
distfiles[filename] = v
@@ -182,11 +178,11 @@ class EbuildFetchTestCase(TestCase):
for layout in layouts:
content["/distfiles/" + layout.get_path(filename)] = v
# upstream path
- content["/distfiles/{}.txt".format(k)] = v
+ content[f"/distfiles/{k}.txt"] = v
shutil.rmtree(settings["DISTDIR"])
os.makedirs(settings["DISTDIR"])
- with open(os.path.join(settings["DISTDIR"], "layout.conf"), "wt") as f:
+ with open(os.path.join(settings["DISTDIR"], "layout.conf"), "w") as f:
f.write(layout_data)
if any(isinstance(layout, ContentHashLayout) for layout in layouts):
@@ -203,11 +199,7 @@ class EbuildFetchTestCase(TestCase):
# Demonstrate that fetch preserves a stale file in DISTDIR when no digests are given.
foo_uri = {
- "foo": (
- "{scheme}://{host}:{port}/distfiles/foo".format(
- scheme=scheme, host=host, port=server.server_port
- ),
- )
+ "foo": (f"{scheme}://{host}:{server.server_port}/distfiles/foo",)
}
foo_path = os.path.join(settings["DISTDIR"], "foo")
foo_stale_content = b"stale content\n"
@@ -254,9 +246,11 @@ class EbuildFetchTestCase(TestCase):
"""
% orig_fetchcommand.replace("${FILE}", "${FILE}.__download__")
)
- settings["FETCHCOMMAND"] = '"%s" "%s" "${URI}" "${DISTDIR}" "${FILE}"' % (
- BASH_BINARY,
- temp_fetchcommand,
+ settings["FETCHCOMMAND"] = (
+ '"{}" "{}" "${{URI}}" "${{DISTDIR}}" "${{FILE}}"'.format(
+ BASH_BINARY,
+ temp_fetchcommand,
+ )
)
settings.features.add("skiprocheck")
settings.features.remove("distlocks")
@@ -284,7 +278,7 @@ class EbuildFetchTestCase(TestCase):
portage._python_interpreter,
"-b",
"-Wd",
- os.path.join(self.bindir, "emirrordist"),
+ os.path.join(str(self.bindir), "emirrordist"),
"--distfiles",
settings["DISTDIR"],
"--config-root",
@@ -341,16 +335,7 @@ class EbuildFetchTestCase(TestCase):
)
)
- # Tests only work with one ebuild at a time, so the config
- # pool only needs a single config instance.
- class config_pool:
- @staticmethod
- def allocate():
- return settings
-
- @staticmethod
- def deallocate(settings):
- pass
+ config_pool = config_pool_cls(settings)
def async_fetch(pkg, ebuild_path):
fetcher = EbuildFetcher(
@@ -455,7 +440,7 @@ class EbuildFetchTestCase(TestCase):
self.assertEqual(f.read(), distfiles[k])
# Test PORTAGE_RO_DISTDIRS
- settings["PORTAGE_RO_DISTDIRS"] = '"{}"'.format(ro_distdir)
+ settings["PORTAGE_RO_DISTDIRS"] = f'"{ro_distdir}"'
orig_fetchcommand = settings["FETCHCOMMAND"]
orig_resumecommand = settings["RESUMECOMMAND"]
try:
@@ -578,13 +563,13 @@ class EbuildFetchTestCase(TestCase):
emdisopts, portdb, asyncio.get_event_loop()
) as emdisconf:
# Copy revisions from bar to foo.
- for revision_key in emdisconf.content_db["filename:{}".format("bar")]:
+ for revision_key in emdisconf.content_db["filename:bar"]:
emdisconf.content_db.add(
DistfileName("foo", digests=dict(revision_key))
)
# Copy revisions from foo to bar.
- for revision_key in emdisconf.content_db["filename:{}".format("foo")]:
+ for revision_key in emdisconf.content_db["filename:foo"]:
emdisconf.content_db.add(
DistfileName("bar", digests=dict(revision_key))
)
@@ -592,12 +577,12 @@ class EbuildFetchTestCase(TestCase):
content_db_state = dict(emdisconf.content_db.items())
self.assertEqual(content_db_state, dict(emdisconf.content_db.items()))
self.assertEqual(
- [
+ {
k[len("filename:") :]
for k in content_db_state
if k.startswith("filename:")
- ],
- ["bar", "foo"],
+ },
+ {"bar", "foo"},
)
self.assertEqual(
content_db_state["filename:foo"], content_db_state["filename:bar"]
@@ -630,12 +615,12 @@ class EbuildFetchTestCase(TestCase):
emdisconf.content_db.remove(filename)
# foo should still have a content revision corresponding to bar's content.
self.assertEqual(
- [
+ {
k[len("filename:") :]
for k in emdisconf.content_db
if k.startswith("filename:")
- ],
- ["bar", "foo"],
+ },
+ {"bar", "foo"},
)
self.assertEqual(len(emdisconf.content_db["filename:foo"]), 1)
self.assertEqual(
@@ -744,10 +729,10 @@ class EbuildFetchTestCase(TestCase):
filename = DistfileName(
"foo-1.tar.gz",
- digests=dict(
- (algo, checksum_str(b"", hashname=algo))
+ digests={
+ algo: checksum_str(b"", hashname=algo)
for algo in MANIFEST2_HASH_DEFAULTS
- ),
+ },
)
# Raise KeyError for a hash algorithm SHA1 which is not in MANIFEST2_HASH_DEFAULTS.
@@ -851,10 +836,10 @@ class EbuildFetchTestCase(TestCase):
def test_filename_hash_layout_get_filenames(self):
filename = DistfileName(
"foo-1.tar.gz",
- digests=dict(
- (algo, checksum_str(b"", hashname=algo))
+ digests={
+ algo: checksum_str(b"", hashname=algo)
for algo in MANIFEST2_HASH_DEFAULTS
- ),
+ },
)
layouts = (
FlatLayout(),
@@ -886,3 +871,16 @@ class EbuildFetchTestCase(TestCase):
self.assertEqual(filename_result, str(filename))
finally:
shutil.rmtree(distdir)
+
+
+# Tests only work with one ebuild at a time, so the config
+# pool only needs a single config instance.
+class config_pool_cls:
+ def __init__(self, settings):
+ self._settings = settings
+
+ def allocate(self):
+ return self._settings
+
+ def deallocate(self, settings):
+ pass
diff --git a/lib/portage/tests/ebuild/test_ipc_daemon.py b/lib/portage/tests/ebuild/test_ipc_daemon.py
index e20b6fff1..b8777fe94 100644
--- a/lib/portage/tests/ebuild/test_ipc_daemon.py
+++ b/lib/portage/tests/ebuild/test_ipc_daemon.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2016 Gentoo Foundation
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import tempfile
@@ -21,20 +21,7 @@ from _emerge.EbuildBuildDir import EbuildBuildDir
from _emerge.EbuildIpcDaemon import EbuildIpcDaemon
-class SleepProcess(ForkProcess):
- """
- Emulate the sleep command, in order to ensure a consistent
- return code when it is killed by SIGTERM (see bug #437180).
- """
-
- __slots__ = ("seconds",)
-
- def _run(self):
- time.sleep(self.seconds)
-
-
class IpcDaemonTestCase(TestCase):
-
_SCHEDULE_TIMEOUT = 40 # seconds
def testIpcDaemon(self):
@@ -66,10 +53,10 @@ class IpcDaemonTestCase(TestCase):
build_dir = EbuildBuildDir(scheduler=event_loop, settings=env)
event_loop.run_until_complete(build_dir.async_lock())
- ensure_dirs(env["PORTAGE_BUILDDIR"])
+ ensure_dirs(os.path.join(env["PORTAGE_BUILDDIR"], ".ipc"))
- input_fifo = os.path.join(env["PORTAGE_BUILDDIR"], ".ipc_in")
- output_fifo = os.path.join(env["PORTAGE_BUILDDIR"], ".ipc_out")
+ input_fifo = os.path.join(env["PORTAGE_BUILDDIR"], ".ipc", "in")
+ output_fifo = os.path.join(env["PORTAGE_BUILDDIR"], ".ipc", "out")
os.mkfifo(input_fifo)
os.mkfifo(output_fifo)
@@ -90,6 +77,7 @@ class IpcDaemonTestCase(TestCase):
task_scheduler = TaskScheduler(
iter([daemon, proc]), max_jobs=2, event_loop=event_loop
)
+ self.assertTrue(task_scheduler._loop is event_loop._loop)
self.received_command = False
@@ -125,7 +113,9 @@ class IpcDaemonTestCase(TestCase):
daemon = EbuildIpcDaemon(
commands=commands, input_fifo=input_fifo, output_fifo=output_fifo
)
- proc = SleepProcess(seconds=sleep_time_s)
+ # Emulate the sleep command, in order to ensure a consistent
+ # return code when it is killed by SIGTERM (see bug #437180).
+ proc = ForkProcess(target=time.sleep, args=(sleep_time_s,))
task_scheduler = TaskScheduler(
iter([daemon, proc]), max_jobs=2, event_loop=event_loop
)
@@ -171,8 +161,13 @@ class IpcDaemonTestCase(TestCase):
)
task_scheduler.addExitListener(self._exit_callback)
- try:
+ async def start_task_scheduler():
+ # This fails unless the event loop is running, since it needs
+ # the loop to setup a ChildWatcher.
task_scheduler.start()
+
+ try:
+ event_loop.run_until_complete(start_task_scheduler())
event_loop.run_until_complete(self._run_done)
event_loop.run_until_complete(task_scheduler.async_wait())
finally:
diff --git a/lib/portage/tests/ebuild/test_shell_quote.py b/lib/portage/tests/ebuild/test_shell_quote.py
index 7c9cb6428..885ff34b9 100644
--- a/lib/portage/tests/ebuild/test_shell_quote.py
+++ b/lib/portage/tests/ebuild/test_shell_quote.py
@@ -101,6 +101,6 @@ class ShellQuoteTestCase(TestCase):
("?abcxyz?", '"?abcxyz?"'),
]
- for (data, expected_result) in test_data:
+ for data, expected_result in test_data:
result = _shell_quote(data)
self.assertEqual(result, expected_result)
diff --git a/lib/portage/tests/ebuild/test_spawn.py b/lib/portage/tests/ebuild/test_spawn.py
index ad8e121db..c73171f04 100644
--- a/lib/portage/tests/ebuild/test_spawn.py
+++ b/lib/portage/tests/ebuild/test_spawn.py
@@ -2,7 +2,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import tempfile
import portage
from portage import os
@@ -23,7 +22,7 @@ class SpawnTestCase(TestCase):
null_fd = os.open("/dev/null", os.O_RDWR)
test_string = 2 * "blah blah blah\n"
proc = SpawnProcess(
- args=[BASH_BINARY, "-c", "echo -n '%s'" % test_string],
+ args=[BASH_BINARY, "-c", f"echo -n '{test_string}'"],
env={},
fd_pipes={0: portage._get_stdin().fileno(), 1: null_fd, 2: null_fd},
scheduler=global_event_loop(),
@@ -32,9 +31,8 @@ class SpawnTestCase(TestCase):
proc.start()
os.close(null_fd)
self.assertEqual(proc.wait(), os.EX_OK)
- f = io.open(
+ f = open(
_unicode_encode(logfile, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="strict",
)
@@ -42,14 +40,14 @@ class SpawnTestCase(TestCase):
f.close()
# When logging passes through a pty, this comparison will fail
# unless the oflag terminal attributes have the termios.OPOST
- # bit disabled. Otherwise, tranformations such as \n -> \r\n
+ # bit disabled. Otherwise, transformations such as \n -> \r\n
# may occur.
self.assertEqual(test_string, log_content)
finally:
if logfile:
try:
os.unlink(logfile)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
diff --git a/lib/portage/tests/ebuild/test_use_expand_incremental.py b/lib/portage/tests/ebuild/test_use_expand_incremental.py
index 23c8d17b3..a4d24425f 100644
--- a/lib/portage/tests/ebuild/test_use_expand_incremental.py
+++ b/lib/portage/tests/ebuild/test_use_expand_incremental.py
@@ -1,7 +1,6 @@
# Copyright 2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-import io
from portage import os, _encodings
from portage.dep import Atom
@@ -13,7 +12,6 @@ from portage.util import ensure_dirs
class UseExpandIncrementalTestCase(TestCase):
def testUseExpandIncremental(self):
-
profiles = (
(
"base",
@@ -100,13 +98,13 @@ class UseExpandIncrementalTestCase(TestCase):
prof_path = os.path.join(profile_root, p)
ensure_dirs(prof_path)
for k, v in data.items():
- with io.open(
+ with open(
os.path.join(prof_path, k),
mode="w",
encoding=_encodings["repo.content"],
) as f:
for line in v:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# The config must be reloaded in order to account
# for the above profile customizations.
@@ -122,7 +120,7 @@ class UseExpandIncrementalTestCase(TestCase):
settings.setcpv(pkg)
expected = frozenset(expected_use)
got = frozenset(settings["PORTAGE_USE"].split())
- self.assertEqual(got, expected, "%s != %s" % (got, expected))
+ self.assertEqual(got, expected, f"{got} != {expected}")
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/conftest.py b/lib/portage/tests/emerge/conftest.py
new file mode 100644
index 000000000..356e09879
--- /dev/null
+++ b/lib/portage/tests/emerge/conftest.py
@@ -0,0 +1,858 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import argparse
+from typing import Optional, Callable # , Self
+
+from portage.const import (
+ SUPPORTED_GENTOO_BINPKG_FORMATS,
+ BASH_BINARY,
+ BINREPOS_CONF_FILE,
+)
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.cache.mappings import Mapping
+from portage.tests.util.test_socks5 import AsyncHTTPServer
+from portage import os
+from portage import shutil
+from portage.util.futures import asyncio
+from portage.tests import cnf_bindir, cnf_sbindir
+from portage.process import find_binary
+from portage.util import find_updated_config_files, shlex_split
+import portage
+
+import pytest
+
+
+_INSTALL_SOMETHING = """
+S="${WORKDIR}"
+
+pkg_pretend() {
+ einfo "called pkg_pretend for $CATEGORY/$PF"
+}
+
+src_install() {
+ einfo "installing something..."
+ insinto /usr/lib/${P}
+ echo "blah blah blah" > "${T}"/regular-file
+ doins "${T}"/regular-file
+ dosym regular-file /usr/lib/${P}/symlink || die
+
+ # Test CONFIG_PROTECT
+ insinto /etc
+ newins "${T}"/regular-file ${PN}-${SLOT%/*}
+
+ # Test code for bug #381629, using a copyright symbol encoded with latin-1.
+ # We use $(printf "\\xa9") rather than $'\\xa9', since printf apparently
+ # works in any case, while $'\\xa9' transforms to \\xef\\xbf\\xbd under
+ # some conditions. TODO: Find out why it transforms to \\xef\\xbf\\xbd when
+ # running tests for Python 3.2 (even though it's bash that is ultimately
+ # responsible for performing the transformation).
+ local latin_1_dir=/usr/lib/${P}/latin-1-$(printf "\\xa9")-directory
+ insinto "${latin_1_dir}"
+ echo "blah blah blah" > "${T}"/latin-1-$(printf "\\xa9")-regular-file || die
+ doins "${T}"/latin-1-$(printf "\\xa9")-regular-file
+ dosym latin-1-$(printf "\\xa9")-regular-file ${latin_1_dir}/latin-1-$(printf "\\xa9")-symlink || die
+
+ call_has_and_best_version
+}
+
+pkg_config() {
+ einfo "called pkg_config for $CATEGORY/$PF"
+}
+
+pkg_info() {
+ einfo "called pkg_info for $CATEGORY/$PF"
+}
+
+pkg_preinst() {
+ if ! ___eapi_best_version_and_has_version_support_-b_-d_-r; then
+ # The BROOT variable is unset during pkg_* phases for EAPI 7,
+ # therefore best/has_version -b is expected to fail if we attempt
+ # to call it for EAPI 7 here.
+ call_has_and_best_version
+ fi
+}
+
+call_has_and_best_version() {
+ local root_arg
+ if ___eapi_best_version_and_has_version_support_-b_-d_-r; then
+ root_arg="-b"
+ else
+ root_arg="--host-root"
+ fi
+ einfo "called ${EBUILD_PHASE_FUNC} for $CATEGORY/$PF"
+ einfo "EPREFIX=${EPREFIX}"
+ einfo "PORTAGE_OVERRIDE_EPREFIX=${PORTAGE_OVERRIDE_EPREFIX}"
+ einfo "ROOT=${ROOT}"
+ einfo "EROOT=${EROOT}"
+ einfo "SYSROOT=${SYSROOT}"
+ einfo "ESYSROOT=${ESYSROOT}"
+ einfo "BROOT=${BROOT}"
+ # Test that has_version and best_version work correctly with
+ # prefix (involves internal ROOT -> EROOT calculation in order
+ # to support ROOT override via the environment with EAPIs 3
+ # and later which support prefix).
+ if has_version $CATEGORY/$PN:$SLOT ; then
+ einfo "has_version detects an installed instance of $CATEGORY/$PN:$SLOT"
+ einfo "best_version reports that the installed instance is $(best_version $CATEGORY/$PN:$SLOT)"
+ else
+ einfo "has_version does not detect an installed instance of $CATEGORY/$PN:$SLOT"
+ fi
+ if [[ ${EPREFIX} != ${PORTAGE_OVERRIDE_EPREFIX} ]] ; then
+ if has_version ${root_arg} $CATEGORY/$PN:$SLOT ; then
+ einfo "has_version ${root_arg} detects an installed instance of $CATEGORY/$PN:$SLOT"
+ einfo "best_version ${root_arg} reports that the installed instance is $(best_version ${root_arg} $CATEGORY/$PN:$SLOT)"
+ else
+ einfo "has_version ${root_arg} does not detect an installed instance of $CATEGORY/$PN:$SLOT"
+ fi
+ fi
+}
+
+"""
+
+_AVAILABLE_EBUILDS = {
+ "dev-libs/A-1": {
+ "EAPI": "5",
+ "IUSE": "+flag",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "MISC_CONTENT": _INSTALL_SOMETHING,
+ "RDEPEND": "flag? ( dev-libs/B[flag] )",
+ },
+ "dev-libs/B-1": {
+ "EAPI": "5",
+ "IUSE": "+flag",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "MISC_CONTENT": _INSTALL_SOMETHING,
+ },
+ "dev-libs/C-1": {
+ "EAPI": "7",
+ "KEYWORDS": "~x86",
+ "RDEPEND": "dev-libs/D[flag]",
+ "MISC_CONTENT": _INSTALL_SOMETHING,
+ },
+ "dev-libs/D-1": {
+ "EAPI": "7",
+ "KEYWORDS": "~x86",
+ "IUSE": "flag",
+ "MISC_CONTENT": _INSTALL_SOMETHING,
+ },
+ "virtual/foo-0": {
+ "EAPI": "5",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ },
+}
+
+_INSTALLED_EBUILDS = {
+ "dev-libs/A-1": {
+ "EAPI": "5",
+ "IUSE": "+flag",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "RDEPEND": "flag? ( dev-libs/B[flag] )",
+ "USE": "flag",
+ },
+ "dev-libs/B-1": {
+ "EAPI": "5",
+ "IUSE": "+flag",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "USE": "flag",
+ },
+ "dev-libs/depclean-me-1": {
+ "EAPI": "5",
+ "IUSE": "",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "USE": "",
+ },
+ "app-misc/depclean-me-1": {
+ "EAPI": "5",
+ "IUSE": "",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "RDEPEND": "dev-libs/depclean-me",
+ "USE": "",
+ },
+}
+
+
+_BASELINE_COMMAND_SEQUENCE = [
+ "emerge -1 dev-libs/A -v dev-libs/B",
+ "emerge with quickpkg direct",
+ "env-update",
+ "portageq envvar",
+ "etc-update",
+ "dispatch-conf",
+ "emerge --version",
+ "emerge --info",
+ "emerge --info --verbose",
+ "emerge --list-sets",
+ "emerge --check-news",
+ "emerge --regen/--metadata",
+ "misc package operations",
+ "binhost emerge",
+]
+
+PORTAGE_PYTHON = portage._python_interpreter
+NOOP = lambda: ...
+
+
+class PortageCommand:
+ """A class that represents a baseline test case command,
+ including handling of environment and one-use arguments.
+ """
+
+ command = None
+ name = None
+
+ def __init__(
+ self,
+ *args: tuple[str],
+ env_mod: Optional[dict[str, str]] = None,
+ preparation: Optional[Callable[[], None]] = None,
+ post_command: Optional[Callable[[], None]] = None,
+ ) -> None:
+ self.args = args
+ self.env_mod = env_mod
+ self.preparation = preparation
+ self.post_command = post_command
+
+ def __iter__(self):
+ """To be able to call a function with ``*command`` as argument."""
+ yield self
+
+ @property
+ def env(self) -> dict[str, str]:
+ """This property returns the environment intended to be used
+ with the current test command, including possible modifications.
+ """
+ try:
+ base_environment = self.base_environment
+ except AttributeError:
+ base_environment = {}
+ else:
+ base_environment = base_environment.copy()
+ if self.env_mod:
+ base_environment.update(self.env_mod)
+ return base_environment
+
+ def __call__(self): # -> Self:
+ if self.preparation:
+ self.preparation()
+ try:
+ tuple_command = self.command + self.args
+ except TypeError:
+ # In case self.command is a string:
+ tuple_command = (self.command,) + self.args
+ return tuple_command
+
+ def __bool__(self) -> bool:
+ return bool(self.command)
+
+ def check_command_result(self) -> None:
+ if self.post_command:
+ self.post_command()
+
+
+class PortageCommandSequence:
+ def __init__(self, *commands):
+ self.commands = commands
+
+ def __iter__(self):
+ yield from self.commands
+
+
+class Emerge(PortageCommand):
+ name = "emerge"
+ command = (PORTAGE_PYTHON, "-b", "-Wd", os.path.join(str(cnf_bindir), name))
+
+
+class Noop(PortageCommand):
+ name = "No-op"
+
+
+class EnvUpdate(PortageCommand):
+ name = "env-update"
+ command = (PORTAGE_PYTHON, "-b", "-Wd", os.path.join(str(cnf_sbindir), name))
+
+
+class DispatchConf(PortageCommand):
+ name = "dispatch-conf"
+ command = (
+ PORTAGE_PYTHON,
+ "-b",
+ "-Wd",
+ os.path.join(str(cnf_sbindir), name),
+ )
+
+
+class Ebuild(PortageCommand):
+ name = "ebuild"
+ command = (PORTAGE_PYTHON, "-b", "-Wd", os.path.join(str(cnf_bindir), name))
+
+
+class Egencache(PortageCommand):
+ name = "egencache"
+ command = (
+ PORTAGE_PYTHON,
+ "-b",
+ "-Wd",
+ os.path.join(str(cnf_bindir), name),
+ )
+
+
+class Emaint(PortageCommand):
+ name = "emaint"
+ command = (PORTAGE_PYTHON, "-b", "-Wd", os.path.join(str(cnf_sbindir), name))
+
+
+class EtcUpdate(PortageCommand):
+ name = "etc-update"
+ command = (BASH_BINARY, os.path.join(str(cnf_sbindir), name))
+
+
+class Fixpackages(PortageCommand):
+ name = "fixpackages"
+ command = (
+ PORTAGE_PYTHON,
+ "-b",
+ "-Wd",
+ os.path.join(str(cnf_sbindir), name),
+ )
+
+
+class Portageq(PortageCommand):
+ name = "portageq"
+ command = (
+ PORTAGE_PYTHON,
+ "-b",
+ "-Wd",
+ os.path.join(str(cnf_bindir), name),
+ )
+
+
+class Quickpkg(PortageCommand):
+ name = "quickpkg"
+ command = (
+ PORTAGE_PYTHON,
+ "-b",
+ "-Wd",
+ os.path.join(str(cnf_bindir), name),
+ )
+
+
+class Regenworld(PortageCommand):
+ name = "regenworld"
+ command = (
+ PORTAGE_PYTHON,
+ "-b",
+ "-Wd",
+ os.path.join(str(cnf_sbindir), name),
+ )
+
+
+def pytest_generate_tests(metafunc):
+ if "baseline_command" in metafunc.fixturenames:
+ metafunc.parametrize(
+ "baseline_command", _BASELINE_COMMAND_SEQUENCE, indirect=True
+ )
+
+
+def _have_python_xml():
+ try:
+ __import__("xml.etree.ElementTree")
+ __import__("xml.parsers.expat").parsers.expat.ExpatError
+ except (AttributeError, ImportError):
+ return False
+ return True
+
+
+def _check_foo_file(pkgdir, filename, must_exist) -> None:
+ assert (
+ os.path.exists(os.path.join(pkgdir, "virtual", "foo", filename)) == must_exist
+ )
+
+
+def _check_number_of_protected_files(must_have, eroot, config_protect) -> None:
+ assert must_have == len(
+ list(find_updated_config_files(eroot, shlex_split(config_protect)))
+ )
+
+
+class BinhostContentMap(Mapping):
+ def __init__(self, remote_path, local_path):
+ self._remote_path = remote_path
+ self._local_path = local_path
+
+ def __getitem__(self, request_path):
+ safe_path = os.path.normpath(request_path)
+ if not safe_path.startswith(self._remote_path + "/"):
+ raise KeyError(request_path)
+ local_path = os.path.join(
+ self._local_path, safe_path[len(self._remote_path) + 1 :]
+ )
+ try:
+ with open(local_path, "rb") as f:
+ return f.read()
+ except OSError:
+ raise KeyError(request_path)
+
+
+@pytest.fixture(scope="module")
+def async_loop():
+ yield asyncio._wrap_loop()
+
+
+@pytest.fixture(params=SUPPORTED_GENTOO_BINPKG_FORMATS, scope="function")
+def playground(request, tmp_path_factory):
+ """Fixture that provides instances of ``ResolverPlayground``
+ each one with one supported value for ``BINPKG_FORMAT``."""
+ binpkg_format = request.param
+ playground = ResolverPlayground(
+ ebuilds=_AVAILABLE_EBUILDS,
+ installed=_INSTALLED_EBUILDS,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ eprefix=str(tmp_path_factory.mktemp("eprefix", numbered=True)),
+ )
+ yield playground
+ playground.cleanup()
+
+
+@pytest.fixture()
+def binhost(playground, async_loop):
+ settings = playground.settings
+ eprefix = settings["EPREFIX"]
+ binhost_dir = os.path.join(eprefix, "binhost")
+ binhost_address = "127.0.0.1"
+ binhost_remote_path = "/binhost"
+ binhost_server = AsyncHTTPServer(
+ binhost_address, BinhostContentMap(binhost_remote_path, binhost_dir), async_loop
+ ).__enter__()
+ binhost_uri = "http://{address}:{port}{path}".format(
+ address=binhost_address,
+ port=binhost_server.server_port,
+ path=binhost_remote_path,
+ )
+ yield {"server": binhost_server, "uri": binhost_uri, "dir": binhost_dir}
+ binhost_server.__exit__(None, None, None)
+
+
+@pytest.fixture()
+def _generate_all_baseline_commands(playground, binhost):
+ """This fixture generates all the commands that
+ ``test_portage_baseline`` will use.
+
+ But, don't use this fixture directly, instead, use the
+ ``baseline_command`` fixture. That improves performance a bit due to
+ pytest caching (?).
+
+ .. note::
+
+ To add a new command, define it in the local ``test_commands``
+ dict, if not yet defined, and add its key at the correct position
+ in the ``_BASELINE_COMMAND_SEQUENCE`` list.
+ """
+ settings = playground.settings
+ eprefix = settings["EPREFIX"]
+ eroot = settings["EROOT"]
+ trees = playground.trees
+ pkgdir = playground.pkgdir
+ portdb = trees[eroot]["porttree"].dbapi
+ test_repo_location = settings.repositories["test_repo"].location
+ var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
+ cachedir = os.path.join(var_cache_edb, "dep")
+ cachedir_pregen = os.path.join(test_repo_location, "metadata", "md5-cache")
+
+ rm_binary = find_binary("rm")
+ assert rm_binary is not None, "rm command not found"
+ rm_cmd = (rm_binary,)
+
+ egencache_extra_args = []
+ if _have_python_xml():
+ egencache_extra_args.append("--update-use-local-desc")
+
+ test_ebuild = portdb.findname("dev-libs/A-1")
+ assert test_ebuild is not None
+
+ cross_prefix = os.path.join(eprefix, "cross_prefix")
+ cross_root = os.path.join(eprefix, "cross_root")
+ cross_eroot = os.path.join(cross_root, eprefix.lstrip(os.sep))
+
+ binpkg_format = settings.get("BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0])
+ assert binpkg_format in ("xpak", "gpkg")
+ if binpkg_format == "xpak":
+ foo_filename = "foo-0-1.xpak"
+ elif binpkg_format == "gpkg":
+ foo_filename = "foo-0-1.gpkg.tar"
+
+ test_commands = {}
+
+ if hasattr(argparse.ArgumentParser, "parse_intermixed_args"):
+ parse_intermixed_command = Emerge(
+ "--oneshot",
+ "dev-libs/A",
+ "-v",
+ "dev-libs/A",
+ )
+ else:
+ parse_intermixed_command = Noop()
+ test_commands["emerge -1 dev-libs/A -v dev-libs/B"] = parse_intermixed_command
+
+ quickpkg_direct_seq = [
+ Emerge(
+ "--usepkgonly",
+ "--root",
+ cross_root,
+ "--quickpkg-direct=y",
+ "--quickpkg-direct-root",
+ "/",
+ "dev-libs/A",
+ ),
+ # v needs ^
+ Emerge(
+ "--usepkgonly",
+ "--quickpkg-direct=y",
+ "--quickpkg-direct-root",
+ cross_root,
+ "dev-libs/A",
+ ),
+ ]
+ test_commands["emerge with quickpkg direct"] = PortageCommandSequence(
+ *quickpkg_direct_seq
+ )
+
+ test_commands["env-update"] = EnvUpdate()
+ test_commands["portageq envvar"] = Portageq(
+ "envvar",
+ "-v",
+ "CONFIG_PROTECT",
+ "EROOT",
+ "PORTAGE_CONFIGROOT",
+ "PORTAGE_TMPDIR",
+ "USERLAND",
+ )
+ test_commands["etc-update"] = EtcUpdate()
+ test_commands["dispatch-conf"] = DispatchConf()
+ test_commands["emerge --version"] = Emerge("--version")
+ test_commands["emerge --info"] = Emerge("--info")
+ test_commands["emerge --info --verbose"] = Emerge("--info", "--verbose")
+ test_commands["emerge --list-sets"] = Emerge("--list-sets")
+ test_commands["emerge --check-news"] = Emerge("--check-news")
+
+ def _rm_cachedir():
+ shutil.rmtree(cachedir)
+
+ def _rm_cachedir_and_pregen():
+ _rm_cachedir()
+ shutil.rmtree(cachedir_pregen)
+
+ regen_seq = [
+ Emerge("--regen", preparation=_rm_cachedir_and_pregen),
+ Emerge(
+ "--regen",
+ env_mod={"FEATURES": "metadata-transfer"},
+ preparation=_rm_cachedir,
+ ),
+ Egencache(
+ "--repo",
+ "test_repo",
+ "--repositories-configuration",
+ playground.settings.repositories.config_string(),
+ "--update",
+ *egencache_extra_args,
+ preparation=_rm_cachedir,
+ ),
+ Emerge("--metadata", env_mod={"FEATURES": "metadata-transfer"}),
+ Emerge(
+ "--metadata",
+ env_mod={"FEATURES": "metadata-transfer"},
+ preparation=_rm_cachedir,
+ ),
+ Emerge("--metadata"),
+ Emerge("--oneshot", "virtual/foo", preparation=_rm_cachedir),
+ Emerge(
+ "--unmerge",
+ "virtual/foo",
+ env_mod={"FEATURES": "unmerge-backup"},
+ preparation=lambda: _check_foo_file(pkgdir, foo_filename, must_exist=False),
+ ),
+ Emerge(
+ "--pretend",
+ "dev-libs/A",
+ preparation=lambda: _check_foo_file(pkgdir, foo_filename, must_exist=True),
+ ),
+ ]
+ test_commands["emerge --regen/--metadata"] = PortageCommandSequence(*regen_seq)
+
+ abcd_seq = [
+ Ebuild(
+ test_ebuild,
+ "manifest",
+ "clean",
+ "package",
+ "merge",
+ ),
+ Emerge(
+ "--pretend",
+ "--tree",
+ "--complete-graph",
+ "dev-libs/A",
+ ),
+ Emerge("-p", "dev-libs/B"),
+ Emerge(
+ "-p",
+ "--newrepo",
+ "dev-libs/B",
+ ),
+ Emerge("-B", "dev-libs/B"),
+ Emerge(
+ "--oneshot",
+ "--usepkg",
+ "dev-libs/B",
+ ),
+ # trigger clean prior to pkg_pretend as in bug #390711
+ Ebuild(test_ebuild, "unpack"),
+ Emerge("--oneshot", "dev-libs/A"),
+ Emerge("--noreplace", "dev-libs/A"),
+ Emerge(
+ "--config",
+ "dev-libs/A",
+ ),
+ Emerge(
+ "--info",
+ "dev-libs/A",
+ "dev-libs/B",
+ ),
+ Emerge(
+ "--pretend",
+ "--depclean",
+ "--verbose",
+ "dev-libs/B",
+ ),
+ Emerge("--pretend", "--depclean"),
+ Emerge(
+ "--depclean",
+ ),
+ # Test bug #523684, where a file renamed or removed by the
+ # admin forces replacement files to be merged with config
+ # protection.
+ Quickpkg(
+ "--include-config",
+ "y",
+ "dev-libs/A",
+ post_command=lambda: _check_number_of_protected_files(
+ 0, eroot, settings["CONFIG_PROTECT"]
+ ),
+ ),
+ Emerge("--noreplace", "dev-libs/A"),
+ Emerge(
+ "--usepkgonly",
+ "dev-libs/A",
+ preparation=lambda: os.unlink(os.path.join(eprefix, "etc", "A-0")),
+ post_command=lambda: _check_number_of_protected_files(
+ 1, eroot, settings["CONFIG_PROTECT"]
+ ),
+ ),
+ Emaint("--check", "all"),
+ Emaint("--fix", "all"),
+ Fixpackages(),
+ Regenworld(),
+ Portageq(
+ "match",
+ eroot,
+ "dev-libs/A",
+ ),
+ Portageq(
+ "best_visible",
+ eroot,
+ "dev-libs/A",
+ ),
+ Portageq(
+ "best_visible",
+ eroot,
+ "binary",
+ "dev-libs/A",
+ ),
+ Portageq(
+ "contents",
+ eroot,
+ "dev-libs/A-1",
+ ),
+ Portageq(
+ "metadata",
+ eroot,
+ "ebuild",
+ "dev-libs/A-1",
+ "EAPI",
+ "IUSE",
+ "RDEPEND",
+ ),
+ Portageq(
+ "metadata",
+ eroot,
+ "binary",
+ "dev-libs/A-1",
+ "EAPI",
+ "USE",
+ "RDEPEND",
+ ),
+ Portageq(
+ "metadata",
+ eroot,
+ "installed",
+ "dev-libs/A-1",
+ "EAPI",
+ "USE",
+ "RDEPEND",
+ ),
+ Portageq(
+ "owners",
+ eroot,
+ eroot + "usr",
+ ),
+ Emerge("-p", eroot + "usr"),
+ Emerge(
+ "-p",
+ "--unmerge",
+ "-q",
+ eroot + "usr",
+ ),
+ Emerge(
+ "--unmerge",
+ "--quiet",
+ "dev-libs/A",
+ ),
+ Emerge(
+ "-C",
+ "--quiet",
+ "dev-libs/B",
+ ),
+ # autounmask:
+ # If EMERGE_DEFAULT_OPTS contains --autounmask=n, then --autounmask
+ # must be specified with --autounmask-continue.
+ Emerge(
+ "--autounmask",
+ "--autounmask-continue",
+ "dev-libs/C",
+ env_mod={"EMERGE_DEFAULT_OPTS": "--autounmask=n"},
+ ),
+ # Verify that the above --autounmask-continue command caused
+ # USE=flag to be applied correctly to dev-libs/D.
+ Portageq(
+ "match",
+ eroot,
+ "dev-libs/D[flag]",
+ ),
+ ]
+ test_commands["misc package operations"] = PortageCommandSequence(*abcd_seq)
+
+ cross_prefix_seq = [
+ # Test cross-prefix usage, including chpathtool for binpkgs.
+ # EAPI 7
+ Emerge("dev-libs/C", env_mod={"EPREFIX": cross_prefix}),
+ Portageq(
+ "has_version", cross_prefix, "dev-libs/C", env_mod={"EPREFIX": cross_prefix}
+ ),
+ Portageq(
+ "has_version", cross_prefix, "dev-libs/D", env_mod={"EPREFIX": cross_prefix}
+ ),
+ Emerge("dev-libs/D", env_mod={"ROOT": cross_root}),
+ Portageq(
+ "has_version",
+ cross_eroot,
+ "dev-libs/D",
+ ),
+ # EAPI 5
+ Emerge("--usepkgonly", "dev-libs/A", env_mod={"EPREFIX": cross_prefix}),
+ Portageq(
+ "has_version", cross_prefix, "dev-libs/A", env_mod={"EPREFIX": cross_prefix}
+ ),
+ Portageq(
+ "has_version", cross_prefix, "dev-libs/B", env_mod={"EPREFIX": cross_prefix}
+ ),
+ Emerge("-C", "--quiet", "dev-libs/B", env_mod={"EPREFIX": cross_prefix}),
+ Emerge("-C", "--quiet", "dev-libs/A", env_mod={"EPREFIX": cross_prefix}),
+ Emerge("dev-libs/A", env_mod={"EPREFIX": cross_prefix}),
+ # Test ROOT support
+ Emerge("dev-libs/B", env_mod={"ROOT": cross_root}),
+ Portageq(
+ "has_version",
+ cross_eroot,
+ "dev-libs/B",
+ ),
+ ]
+ test_commands["misc operations with eprefix"] = PortageCommandSequence(
+ *cross_prefix_seq
+ )
+
+ # Test binhost support if FETCHCOMMAND is available.
+ binrepos_conf_file = os.path.join(os.sep, eprefix, BINREPOS_CONF_FILE)
+ binhost_uri = binhost["uri"]
+ binhost_dir = binhost["dir"]
+ with open(binrepos_conf_file, "w") as f:
+ f.write("[test-binhost]\n")
+ f.write(f"sync-uri = {binhost_uri}\n")
+ fetchcommand = portage.util.shlex_split(settings["FETCHCOMMAND"])
+ fetch_bin = portage.process.find_binary(fetchcommand[0])
+
+ if fetch_bin is None:
+ test_commands["binhost emerge"] = Noop()
+ else:
+ # The next emerge has been added to split this test from the rest:
+ make_package = Emerge("-e", "--buildpkg", "dev-libs/A")
+ getbinpkgonly = Emerge(
+ "-e",
+ "--getbinpkgonly",
+ "dev-libs/A",
+ preparation=lambda: os.rename(pkgdir, binhost_dir),
+ )
+
+ # Remove binrepos.conf and test PORTAGE_BINHOST.
+ def _rm_pkgdir_and_rm_binrepos_conf_file():
+ shutil.rmtree(pkgdir)
+ os.unlink(binrepos_conf_file)
+
+ getbinpkgonly_fetchonly = Emerge(
+ "-fe",
+ "--getbinpkgonly",
+ "dev-libs/A",
+ env_mod={"PORTAGE_BINHOST": binhost_uri},
+ preparation=_rm_pkgdir_and_rm_binrepos_conf_file,
+ )
+
+ # Test bug 920537 binrepos.conf with local file src-uri.
+ def _rm_pkgdir_and_create_binrepos_conf_with_file_uri():
+ shutil.rmtree(pkgdir)
+ with open(binrepos_conf_file, "w") as f:
+ f.write("[test-binhost]\n")
+ f.write(f"sync-uri = file://{binhost_dir}\n")
+
+ getbinpkgonly_file_uri = Emerge(
+ "-fe",
+ "--getbinpkgonly",
+ "dev-libs/A",
+ preparation=_rm_pkgdir_and_create_binrepos_conf_with_file_uri,
+ )
+
+ fetch_sequence = PortageCommandSequence(
+ make_package, getbinpkgonly, getbinpkgonly_fetchonly, getbinpkgonly_file_uri
+ )
+ test_commands["binhost emerge"] = fetch_sequence
+ yield test_commands
+
+
+@pytest.fixture()
+def baseline_command(request, _generate_all_baseline_commands):
+ """A fixture that provides the commands to perform a baseline
+ functional test of portage. It uses another fixture, namely
+ ``_generate_all_baseline_commands``.
+ Pytest caches the fixtures and there is a little performance
+ improvement if the commands are generated only once..
+ """
+ return _generate_all_baseline_commands[request.param]
diff --git a/lib/portage/tests/emerge/meson.build b/lib/portage/tests/emerge/meson.build
new file mode 100644
index 000000000..0e0a41974
--- /dev/null
+++ b/lib/portage/tests/emerge/meson.build
@@ -0,0 +1,16 @@
+py.install_sources(
+ [
+ 'test_actions.py',
+ 'test_binpkg_fetch.py',
+ 'test_config_protect.py',
+ 'test_emerge_blocker_file_collision.py',
+ 'test_emerge_slot_abi.py',
+ 'test_global_updates.py',
+ 'test_baseline.py',
+ 'test_libc_dep_inject.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/emerge',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/emerge/test_actions.py b/lib/portage/tests/emerge/test_actions.py
new file mode 100644
index 000000000..cdc087a8e
--- /dev/null
+++ b/lib/portage/tests/emerge/test_actions.py
@@ -0,0 +1,68 @@
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from unittest.mock import MagicMock, patch
+
+from _emerge.actions import get_libc_version, run_action
+
+from portage.const import LIBC_PACKAGE_ATOM
+from portage.dbapi.virtual import fakedbapi
+from portage.dep import Atom
+from portage.tests import TestCase
+
+
+class RunActionTestCase(TestCase):
+ """This class' purpose is to encompass UTs for ``actions.run_action``.
+ Since that function is extremely long (at least on Sep. 2022;
+ hopefully the situation gets better with the time), the tests in this
+ ``TestCase`` contain plenty of mocks/patches.
+ Hopefully, with time and effort, the ``run_action`` function (and others
+ in the module) are refactored to make testing easier and more robust.
+
+ A side effect of the mocking approach is a strong dependency on the
+ details of the implementation. That can be improved if functions
+ are smaller and do a well defined small set of tasks. Another call to
+ refactoring...
+ If the implementation changes, the mocks can be adjusted to play its
+ role.
+ """
+
+ @patch("_emerge.actions.profile_check")
+ @patch("_emerge.actions.adjust_configs")
+ @patch("_emerge.actions.apply_priorities")
+ def test_binary_trees_populate_called(self, papply, padjust, profile_ckeck):
+ """Ensure that ``binarytree.populate`` API is correctly used.
+ The point of this test is to ensure that the ``populate`` method
+ is called as expected: since it is the first time that ``populate``
+ is called, it must use ``getbinpkg_refresh=True``.
+ """
+ config = MagicMock()
+ config.action = None
+ config.opts = {"--quiet": True, "--usepkg": True, "--package-moves": "n"}
+ bt = MagicMock()
+ tree = {"bintree": bt}
+ trees = {"first": tree}
+ config.trees = trees
+
+ run_action(config)
+
+ bt.populate.assert_called_once_with(
+ getbinpkgs=False, getbinpkg_refresh=True, pretend=False
+ )
+
+ def testGetSystemLibc(self):
+ """
+ Check that get_libc_version extracts the right version string
+ from the provider LIBC_PACKAGE_ATOM for emerge --info and friends.
+ """
+ settings = MagicMock()
+
+ settings.getvirtuals.return_value = {
+ LIBC_PACKAGE_ATOM: [Atom("=sys-libs/musl-1.2.3")]
+ }
+ settings.__getitem__.return_value = {}
+
+ vardb = fakedbapi(settings)
+ vardb.cpv_inject("sys-libs/musl-1.2.3", {"SLOT": "0"})
+
+ self.assertEqual(get_libc_version(vardb), ["musl-1.2.3"])
diff --git a/lib/portage/tests/emerge/test_baseline.py b/lib/portage/tests/emerge/test_baseline.py
new file mode 100644
index 000000000..eb4a3372d
--- /dev/null
+++ b/lib/portage/tests/emerge/test_baseline.py
@@ -0,0 +1,221 @@
+# Copyright 2011-2021, 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+"""This module defines a baseline for portage's functionality.
+
+Multiple portage commands are executed in a sequence in a playground
+(see the ``baseline_command`` fixture in ``conftest.py``).
+
+All the commands are triggered from the ``test_portage_baseline`` test.
+That test is marked with::
+
+ @pytest.mark.ft
+
+so that it can selected with that marker, i.e.::
+
+ pytest -m ft
+
+``ft`` stands for *functional test*, since that's what it is, a
+functional or end-to-end test, ensuring some functionality of portage.
+
+The test also works with pytest-xdist, e.g.::
+
+ pytest -m ft -n 8
+
+"""
+
+import subprocess
+
+import pytest
+
+import portage
+from portage import os
+from portage.const import (
+ PORTAGE_PYM_PATH,
+ USER_CONFIG_PATH,
+)
+from portage.process import find_binary
+from portage.tests import cnf_etc_path
+from portage.util import ensure_dirs
+from portage.util.futures import asyncio
+
+
+_METADATA_XML_FILES = (
+ (
+ "dev-libs/A",
+ {
+ "flags": "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
+ },
+ ),
+ (
+ "dev-libs/B",
+ {
+ "flags": "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
+ },
+ ),
+)
+
+_1Q_2010_UPDATE = """
+slotmove =app-doc/pms-3 2 3
+move dev-util/git dev-vcs/git
+"""
+
+
+@pytest.mark.ft
+def test_portage_baseline(async_loop, playground, binhost, baseline_command):
+ async_loop.run_until_complete(
+ asyncio.ensure_future(
+ _async_test_baseline(
+ playground,
+ binhost,
+ baseline_command,
+ ),
+ loop=async_loop,
+ )
+ )
+
+
+async def _async_test_baseline(playground, binhost, commands):
+ debug = playground.debug
+ settings = playground.settings
+ trees = playground.trees
+ eprefix = settings["EPREFIX"]
+
+ test_repo_location = settings.repositories["test_repo"].location
+ var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
+ cachedir = os.path.join(var_cache_edb, "dep")
+ cachedir_pregen = os.path.join(test_repo_location, "metadata", "md5-cache")
+
+ cross_prefix = os.path.join(eprefix, "cross_prefix")
+ cross_root = os.path.join(eprefix, "cross_root")
+ cross_eroot = os.path.join(cross_root, eprefix.lstrip(os.sep))
+
+ distdir = playground.distdir
+ pkgdir = playground.pkgdir
+ fake_bin = os.path.join(eprefix, "bin")
+ portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
+ profile_path = settings.profile_path
+ user_config_dir = os.path.join(os.sep, eprefix, USER_CONFIG_PATH)
+
+ path = settings.get("PATH")
+ if path is not None and not path.strip():
+ path = None
+ if path is None:
+ path = ""
+ else:
+ path = ":" + path
+ path = fake_bin + path
+
+ pythonpath = os.environ.get("PYTHONPATH")
+ if pythonpath is not None and not pythonpath.strip():
+ pythonpath = None
+ if pythonpath is not None and pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
+ pass
+ else:
+ if pythonpath is None:
+ pythonpath = ""
+ else:
+ pythonpath = ":" + pythonpath
+ pythonpath = PORTAGE_PYM_PATH + pythonpath
+
+ env = {
+ "PORTAGE_OVERRIDE_EPREFIX": eprefix,
+ "CLEAN_DELAY": "0",
+ "DISTDIR": distdir,
+ "EMERGE_WARNING_DELAY": "0",
+ "INFODIR": "",
+ "INFOPATH": "",
+ "PATH": path,
+ "PKGDIR": pkgdir,
+ "PORTAGE_INST_GID": str(os.getgid()), # str(portage.data.portage_gid),
+ "PORTAGE_INST_UID": str(os.getuid()), # str(portage.data.portage_uid),
+ "PORTAGE_PYTHON": portage._python_interpreter,
+ "PORTAGE_REPOSITORIES": settings.repositories.config_string(),
+ "PORTAGE_TMPDIR": portage_tmpdir,
+ "PORTAGE_LOGDIR": portage_tmpdir,
+ "PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
+ "PYTHONPATH": pythonpath,
+ "__PORTAGE_TEST_PATH_OVERRIDE": fake_bin,
+ }
+
+ if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
+ env["__PORTAGE_TEST_HARDLINK_LOCKS"] = os.environ[
+ "__PORTAGE_TEST_HARDLINK_LOCKS"
+ ]
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+ dirs = [
+ cachedir,
+ cachedir_pregen,
+ cross_eroot,
+ cross_prefix,
+ distdir,
+ fake_bin,
+ portage_tmpdir,
+ updates_dir,
+ user_config_dir,
+ var_cache_edb,
+ ]
+ etc_symlinks = ("dispatch-conf.conf", "etc-update.conf")
+ # Override things that may be unavailable, or may have portability
+ # issues when running tests in exotic environments.
+ # prepstrip - bug #447810 (bash read builtin EINTR problem)
+ true_symlinks = ["find", "prepstrip", "sed", "scanelf"]
+ true_binary = find_binary("true")
+ assert true_binary is not None, "true command not found"
+
+ for d in dirs:
+ ensure_dirs(d)
+ for x in true_symlinks:
+ try:
+ os.symlink(true_binary, os.path.join(fake_bin, x))
+ except FileExistsError:
+ pass
+ for x in etc_symlinks:
+ try:
+ os.symlink(
+ os.path.join(str(cnf_etc_path), x), os.path.join(eprefix, "etc", x)
+ )
+ except FileExistsError:
+ pass
+ with open(os.path.join(var_cache_edb, "counter"), "wb") as f:
+ f.write(b"100")
+ # non-empty system set keeps --depclean quiet
+ with open(os.path.join(profile_path, "packages"), "w") as f:
+ f.write("*dev-libs/token-system-pkg")
+ for cp, xml_data in _METADATA_XML_FILES:
+ with open(os.path.join(test_repo_location, cp, "metadata.xml"), "w") as f:
+ f.write(playground.metadata_xml_template % xml_data)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(_1Q_2010_UPDATE)
+ if debug:
+ # The subprocess inherits both stdout and stderr, for
+ # debugging purposes.
+ stdout = None
+ else:
+ # The subprocess inherits stderr so that any warnings
+ # triggered by python -Wd will be visible.
+ stdout = subprocess.PIPE
+
+ for command in commands:
+ if command:
+ command.base_environment = env
+
+ proc = await asyncio.create_subprocess_exec(
+ *command(), env=command.env, stderr=None, stdout=stdout
+ )
+
+ if debug:
+ await proc.wait()
+ else:
+ output, _err = await proc.communicate()
+ await proc.wait()
+ if proc.returncode != os.EX_OK:
+ portage.writemsg(output)
+
+ real_command = command.name
+ args = command.args
+ assert (
+ os.EX_OK == proc.returncode
+ ), f"'{real_command}' failed with args '{args}'"
+ command.check_command_result()
diff --git a/lib/portage/tests/emerge/test_binpkg_fetch.py b/lib/portage/tests/emerge/test_binpkg_fetch.py
new file mode 100644
index 000000000..731711bad
--- /dev/null
+++ b/lib/portage/tests/emerge/test_binpkg_fetch.py
@@ -0,0 +1,226 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import shutil
+import subprocess
+import sys
+import tempfile
+
+import portage
+from portage import _unicode_decode, os
+from portage.const import (
+ PORTAGE_PYM_PATH,
+ USER_CONFIG_PATH,
+)
+from portage.process import find_binary
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.util import ensure_dirs
+
+
+class BinpkgFetchtestCase(TestCase):
+ def testLocalFilePkgSyncUpdate(self):
+ """
+ Check handling of local file:// sync-uri and unnecessary BUILD_ID
+ increments (bug #921208).
+ """
+ debug = False
+
+ ebuilds = {
+ "dev-libs/A-1::local": {
+ "EAPI": "7",
+ "SLOT": "0",
+ },
+ }
+
+ playground = ResolverPlayground(ebuilds=ebuilds, debug=debug)
+ settings = playground.settings
+ eprefix = settings["EPREFIX"]
+ eroot = settings["EROOT"]
+ trees = playground.trees
+ bindb = trees[eroot]["bintree"].dbapi
+ var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
+ user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
+
+ portage_python = portage._python_interpreter
+ emerge_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "emerge"),
+ )
+
+ tmppkgdir = tempfile.TemporaryDirectory()
+ tmppkgdir_suffix = os.path.join(tmppkgdir.name, "binpkg")
+
+ test_commands = (
+ # Create a trivial binpkg first.
+ emerge_cmd
+ + (
+ "--oneshot",
+ "--verbose",
+ "--buildpkg",
+ "dev-libs/A",
+ ),
+ # Copy to a new PKGDIR which we'll use as PORTAGE_BINHOST then delete the old PKGDIR.
+ (
+ (
+ lambda: shutil.copytree(bindb.bintree.pkgdir, tmppkgdir_suffix)
+ or True,
+ )
+ ),
+ (
+ (
+ lambda: os.unlink(
+ os.path.join(
+ bindb.bintree.pkgdir, "dev-libs", "A", "A-1-1.gpkg.tar"
+ )
+ )
+ or True,
+ )
+ ),
+ )
+ test_commands_nonfatal = (
+ # This should succeed if we've correctly saved it as A-1-1.gpkg.tar, not
+ # A-1-2.gpkg.tar, and then also try to unpack the right filename, but
+ # we defer checking the exit code to get a better error if the binpkg
+ # was downloaded with the wrong filename.
+ emerge_cmd
+ + (
+ "--oneshot",
+ "--verbose",
+ "--getbinpkgonly",
+ "dev-libs/A",
+ ),
+ )
+ test_commands_final = (
+ # Check whether the downloaded binpkg in PKGDIR has the correct
+ # filename (-1) or an unnecessarily-incremented one (-2).
+ (
+ lambda: os.path.exists(
+ os.path.join(
+ bindb.bintree.pkgdir, "dev-libs", "A", "A-1-1.gpkg.tar"
+ )
+ ),
+ ),
+ )
+
+ fake_bin = os.path.join(eprefix, "bin")
+ portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
+
+ path = settings.get("PATH")
+ if path is not None and not path.strip():
+ path = None
+ if path is None:
+ path = ""
+ else:
+ path = ":" + path
+ path = fake_bin + path
+
+ pythonpath = os.environ.get("PYTHONPATH")
+ if pythonpath is not None and not pythonpath.strip():
+ pythonpath = None
+ if pythonpath is not None and pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
+ pass
+ else:
+ if pythonpath is None:
+ pythonpath = ""
+ else:
+ pythonpath = ":" + pythonpath
+ pythonpath = PORTAGE_PYM_PATH + pythonpath
+
+ env = {
+ "PORTAGE_OVERRIDE_EPREFIX": eprefix,
+ "PATH": path,
+ "PORTAGE_PYTHON": portage_python,
+ "PORTAGE_REPOSITORIES": settings.repositories.config_string(),
+ "PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
+ "PYTHONPATH": pythonpath,
+ "PORTAGE_INST_GID": str(os.getgid()),
+ "PORTAGE_INST_UID": str(os.getuid()),
+ "FEATURES": "-pkgdir-index-trusted",
+ }
+
+ dirs = [
+ playground.distdir,
+ fake_bin,
+ portage_tmpdir,
+ user_config_dir,
+ var_cache_edb,
+ ]
+
+ true_symlinks = ["chown", "chgrp"]
+
+ needed_binaries = {
+ "true": (find_binary("true"), True),
+ }
+
+ def run_commands(test_commands, require_success=True):
+ all_successful = True
+
+ for i, args in enumerate(test_commands):
+ if hasattr(args[0], "__call__"):
+ if require_success:
+ self.assertTrue(args[0](), f"callable at index {i} failed")
+ continue
+
+ if isinstance(args[0], dict):
+ local_env = env.copy()
+ local_env.update(args[0])
+ args = args[1:]
+ else:
+ local_env = env
+
+ local_env["PORTAGE_BINHOST"] = f"file:///{tmppkgdir_suffix}"
+ proc = subprocess.Popen(args, env=local_env, stdout=stdout)
+
+ if debug:
+ proc.wait()
+ else:
+ output = proc.stdout.readlines()
+ proc.wait()
+ proc.stdout.close()
+ if proc.returncode != os.EX_OK:
+ for line in output:
+ sys.stderr.write(_unicode_decode(line))
+
+ if all_successful and proc.returncode != os.EX_OK:
+ all_successful = False
+
+ if require_success:
+ self.assertEqual(
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
+ )
+
+ return all_successful
+
+ try:
+ for d in dirs:
+ ensure_dirs(d)
+ for x in true_symlinks:
+ os.symlink(needed_binaries["true"][0], os.path.join(fake_bin, x))
+
+ with open(os.path.join(var_cache_edb, "counter"), "wb") as f:
+ f.write(b"100")
+
+ if debug:
+ # The subprocess inherits both stdout and stderr, for
+ # debugging purposes.
+ stdout = None
+ else:
+ # The subprocess inherits stderr so that any warnings
+ # triggered by python -Wd will be visible.
+ stdout = subprocess.PIPE
+
+ run_commands(test_commands)
+ deferred_success = run_commands(test_commands_nonfatal, False)
+ run_commands(test_commands_final)
+
+ # Check the return value of test_commands_nonfatal later on so
+ # we can get a better error message from test_commands_final
+ # if possible.
+ self.assertTrue(deferred_success, f"{test_commands_nonfatal} failed")
+ finally:
+ playground.debug = False
+ playground.cleanup()
+ tmppkgdir.cleanup()
diff --git a/lib/portage/tests/emerge/test_config_protect.py b/lib/portage/tests/emerge/test_config_protect.py
index b60d0c495..e04fc1a92 100644
--- a/lib/portage/tests/emerge/test_config_protect.py
+++ b/lib/portage/tests/emerge/test_config_protect.py
@@ -1,7 +1,6 @@
-# Copyright 2014-2015 Gentoo Foundation
+# Copyright 2014-2015, 2023 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-import io
from functools import partial
import shutil
import stat
@@ -113,10 +112,15 @@ src_install() {
portage_python,
"-b",
"-Wd",
- os.path.join(self.sbindir, "dispatch-conf"),
+ os.path.join(str(self.sbindir), "dispatch-conf"),
)
- emerge_cmd = (portage_python, "-b", "-Wd", os.path.join(self.bindir, "emerge"))
- etc_update_cmd = (BASH_BINARY, os.path.join(self.sbindir, "etc-update"))
+ emerge_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "emerge"),
+ )
+ etc_update_cmd = (BASH_BINARY, os.path.join(str(self.sbindir), "etc-update"))
etc_update_auto = etc_update_cmd + (
"--automode",
"-5",
@@ -129,7 +133,7 @@ src_install() {
path = os.path.join(dir_path, name)
st = os.lstat(path)
if stat.S_ISREG(st.st_mode):
- with io.open(path, mode="a", encoding=_encodings["stdio"]) as f:
+ with open(path, mode="a", encoding=_encodings["stdio"]) as f:
f.write("modified at %d\n" % time.time())
elif stat.S_ISLNK(st.st_mode):
old_dest = os.readlink(path)
@@ -187,7 +191,7 @@ src_install() {
fake_bin = os.path.join(eprefix, "bin")
portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
- path = os.environ.get("PATH")
+ path = settings.get("PATH")
if path is not None and not path.strip():
path = None
if path is None:
@@ -218,8 +222,8 @@ src_install() {
"INFODIR": "",
"INFOPATH": "",
"PATH": path,
- "PORTAGE_INST_GID": str(portage.data.portage_gid),
- "PORTAGE_INST_UID": str(portage.data.portage_uid),
+ "PORTAGE_INST_GID": str(os.getgid()), # str(portage.data.portage_gid),
+ "PORTAGE_INST_UID": str(os.getuid()), # str(portage.data.portage_uid),
"PORTAGE_PYTHON": portage_python,
"PORTAGE_REPOSITORIES": settings.repositories.config_string(),
"PORTAGE_TMPDIR": portage_tmpdir,
@@ -248,7 +252,8 @@ src_install() {
os.symlink(true_binary, os.path.join(fake_bin, x))
for x in etc_symlinks:
os.symlink(
- os.path.join(self.cnf_etc_path, x), os.path.join(eprefix, "etc", x)
+ os.path.join(str(self.cnf_etc_path), x),
+ os.path.join(eprefix, "etc", x),
)
with open(os.path.join(var_cache_edb, "counter"), "wb") as f:
f.write(b"100")
@@ -263,7 +268,6 @@ src_install() {
stdout = subprocess.PIPE
for args in test_commands:
-
if hasattr(args, "__call__"):
args()
continue
@@ -288,7 +292,7 @@ src_install() {
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py b/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
index 785bf50cb..1eb7da79f 100644
--- a/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
+++ b/lib/portage/tests/emerge/test_emerge_blocker_file_collision.py
@@ -1,4 +1,4 @@
-# Copyright 2016 Gentoo Foundation
+# Copyright 2016, 2023 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import subprocess
@@ -16,7 +16,6 @@ from portage.util import ensure_dirs
class BlockerFileCollisionEmergeTestCase(TestCase):
def testBlockerFileCollision(self):
-
debug = False
install_something = """
@@ -51,7 +50,12 @@ src_install() {
user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
portage_python = portage._python_interpreter
- emerge_cmd = (portage_python, "-b", "-Wd", os.path.join(self.bindir, "emerge"))
+ emerge_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "emerge"),
+ )
file_collision = os.path.join(eroot, "usr/lib/file-collision")
@@ -94,7 +98,7 @@ src_install() {
portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
profile_path = settings.profile_path
- path = os.environ.get("PATH")
+ path = settings.get("PATH")
if path is not None and not path.strip():
path = None
if path is None:
@@ -122,6 +126,8 @@ src_install() {
"PORTAGE_REPOSITORIES": settings.repositories.config_string(),
"PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
"PYTHONPATH": pythonpath,
+ "PORTAGE_INST_GID": str(os.getgid()),
+ "PORTAGE_INST_UID": str(os.getuid()),
}
if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
@@ -160,9 +166,8 @@ src_install() {
stdout = subprocess.PIPE
for i, args in enumerate(test_commands):
-
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index %s failed" % (i,))
+ self.assertTrue(args[0](), f"callable at index {i} failed")
continue
if isinstance(args[0], dict):
@@ -185,7 +190,7 @@ src_install() {
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
playground.debug = False
diff --git a/lib/portage/tests/emerge/test_emerge_slot_abi.py b/lib/portage/tests/emerge/test_emerge_slot_abi.py
index 3c3a8b582..c1a8fe894 100644
--- a/lib/portage/tests/emerge/test_emerge_slot_abi.py
+++ b/lib/portage/tests/emerge/test_emerge_slot_abi.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2019 Gentoo Authors
+# Copyright 2012-2019, 2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import subprocess
@@ -16,7 +16,6 @@ from portage.util import ensure_dirs
class SlotAbiEmergeTestCase(TestCase):
def testSlotAbiEmerge(self):
-
debug = False
ebuilds = {
@@ -55,8 +54,18 @@ class SlotAbiEmergeTestCase(TestCase):
package_mask_path = os.path.join(user_config_dir, "package.mask")
portage_python = portage._python_interpreter
- ebuild_cmd = (portage_python, "-b", "-Wd", os.path.join(self.bindir, "ebuild"))
- emerge_cmd = (portage_python, "-b", "-Wd", os.path.join(self.bindir, "emerge"))
+ ebuild_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "ebuild"),
+ )
+ emerge_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "emerge"),
+ )
test_ebuild = portdb.findname("dev-libs/dbus-glib-0.98")
self.assertFalse(test_ebuild is None)
@@ -102,7 +111,7 @@ class SlotAbiEmergeTestCase(TestCase):
portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
profile_path = settings.profile_path
- path = os.environ.get("PATH")
+ path = settings.get("PATH")
if path is not None and not path.strip():
path = None
if path is None:
@@ -130,6 +139,8 @@ class SlotAbiEmergeTestCase(TestCase):
"PORTAGE_REPOSITORIES": settings.repositories.config_string(),
"PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
"PYTHONPATH": pythonpath,
+ "PORTAGE_INST_GID": str(os.getgid()),
+ "PORTAGE_INST_UID": str(os.getuid()),
}
if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
@@ -162,9 +173,8 @@ class SlotAbiEmergeTestCase(TestCase):
stdout = subprocess.PIPE
for i, args in enumerate(test_commands):
-
if hasattr(args[0], "__call__"):
- self.assertTrue(args[0](), "callable at index %s failed" % (i,))
+ self.assertTrue(args[0](), f"callable at index {i} failed")
continue
proc = subprocess.Popen(args, env=env, stdout=stdout)
@@ -180,7 +190,7 @@ class SlotAbiEmergeTestCase(TestCase):
sys.stderr.write(_unicode_decode(line))
self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
)
finally:
playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_libc_dep_inject.py b/lib/portage/tests/emerge/test_libc_dep_inject.py
new file mode 100644
index 000000000..933affcd7
--- /dev/null
+++ b/lib/portage/tests/emerge/test_libc_dep_inject.py
@@ -0,0 +1,552 @@
+# Copyright 2016-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import subprocess
+import sys
+import textwrap
+
+import portage
+from portage import os
+from portage import _unicode_decode
+from portage.const import PORTAGE_PYM_PATH, USER_CONFIG_PATH
+from portage.process import find_binary
+from portage.tests import TestCase
+from portage.util import ensure_dirs
+
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class LibcDepInjectEmergeTestCase(TestCase):
+ def testLibcDepInjection(self):
+ """
+ Test whether the implicit libc dependency injection (bug #913628)
+ is correctly added for only ebuilds installing an ELF binary.
+
+ Based on BlockerFileCollisionEmergeTestCase.
+ """
+ debug = False
+
+ install_elf = textwrap.dedent(
+ """
+ S="${WORKDIR}"
+
+ src_install() {
+ insinto /usr/bin
+ # We need an ELF binary for the injection to trigger, so
+ # use ${BASH} given we know it must be around for running ebuilds.
+ cp "${BASH}" "${ED}"/usr/bin/${PN} || die
+ }
+ """
+ )
+
+ ebuilds = {
+ "sys-libs/glibc-2.38": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ "virtual/libc-1": {
+ "EAPI": "8",
+ "RDEPEND": "sys-libs/glibc",
+ },
+ "dev-libs/A-1": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ "dev-libs/B-1": {
+ "EAPI": "8",
+ },
+ "dev-libs/C-1": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ "dev-libs/D-1": {
+ "EAPI": "8",
+ },
+ "dev-libs/E-1": {
+ "EAPI": "8",
+ "RDEPEND": ">=dev-libs/D-1",
+ "MISC_CONTENT": install_elf,
+ },
+ }
+
+ world = ("dev-libs/A",)
+
+ playground = ResolverPlayground(ebuilds=ebuilds, world=world, debug=debug)
+ settings = playground.settings
+ eprefix = settings["EPREFIX"]
+ eroot = settings["EROOT"]
+ var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
+ user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
+
+ portage_python = portage._python_interpreter
+ emerge_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "emerge"),
+ )
+
+ test_commands = (
+ # If we install a package with an ELF but no libc provider is installed,
+ # make sure we don't inject anything (we don't want to have some bare RDEPEND with
+ # literally "[]").
+ emerge_cmd
+ + (
+ "--oneshot",
+ "dev-libs/C",
+ ),
+ (
+ lambda: not portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "dev-libs", "C-1", "RDEPEND"
+ )
+ ),
+ ),
+ # (We need sys-libs/glibc pulled in and virtual/libc installed)
+ emerge_cmd
+ + (
+ "--oneshot",
+ "virtual/libc",
+ ),
+ # A package NOT installing an ELF binary shouldn't have an injected libc dep
+ # Let's check the virtual/libc one as we already have to merge it to pull in
+ # sys-libs/glibc, but we'll do a better check after too.
+ (
+ lambda: ">=sys-libs/glibc-2.38\n"
+ not in portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "virtual", "libc-1", "RDEPEND"
+ )
+ ),
+ ),
+ # A package NOT installing an ELF binary shouldn't have an injected libc dep
+ emerge_cmd
+ + (
+ "--oneshot",
+ "dev-libs/B",
+ ),
+ (
+ lambda: not portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "dev-libs", "B-1", "RDEPEND"
+ )
+ ),
+ ),
+ # A package installing an ELF binary should have an injected libc dep
+ emerge_cmd
+ + (
+ "--oneshot",
+ "dev-libs/A",
+ ),
+ (lambda: os.path.exists(os.path.join(eroot, "usr/bin/A")),),
+ (
+ lambda: ">=sys-libs/glibc-2.38\n"
+ in portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "dev-libs", "A-1", "RDEPEND"
+ )
+ ),
+ ),
+ # Install glibc again because earlier, no libc was installed, so the injection
+ # wouldn't have fired even if the "are we libc?" check was broken.
+ emerge_cmd
+ + (
+ "--oneshot",
+ "sys-libs/glibc",
+ ),
+ # We don't want the libc (sys-libs/glibc is the provider here) to have an injected dep on itself
+ (
+ lambda: ">=sys-libs/glibc-2.38\n"
+ not in portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "sys-libs", "glibc-2.38", "RDEPEND"
+ )
+ ),
+ ),
+ # Make sure we append to, not clobber, RDEPEND
+ emerge_cmd
+ + (
+ "--oneshot",
+ "dev-libs/E",
+ ),
+ (
+ lambda: [">=dev-libs/D-1 >=sys-libs/glibc-2.38\n"]
+ == portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "dev-libs", "E-1", "RDEPEND"
+ )
+ ),
+ ),
+ )
+
+ fake_bin = os.path.join(eprefix, "bin")
+ portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
+ profile_path = settings.profile_path
+
+ path = settings.get("PATH")
+ if path is not None and not path.strip():
+ path = None
+ if path is None:
+ path = ""
+ else:
+ path = ":" + path
+ path = fake_bin + path
+
+ pythonpath = os.environ.get("PYTHONPATH")
+ if pythonpath is not None and not pythonpath.strip():
+ pythonpath = None
+ if pythonpath is not None and pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
+ pass
+ else:
+ if pythonpath is None:
+ pythonpath = ""
+ else:
+ pythonpath = ":" + pythonpath
+ pythonpath = PORTAGE_PYM_PATH + pythonpath
+
+ env = {
+ "PORTAGE_OVERRIDE_EPREFIX": eprefix,
+ "PATH": path,
+ "PORTAGE_PYTHON": portage_python,
+ "PORTAGE_REPOSITORIES": settings.repositories.config_string(),
+ "PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
+ "PYTHONPATH": pythonpath,
+ "PORTAGE_INST_GID": str(os.getgid()),
+ "PORTAGE_INST_UID": str(os.getuid()),
+ "FEATURES": "-qa-unresolved-soname-deps -preserve-libs -merge-sync",
+ }
+
+ dirs = [
+ playground.distdir,
+ fake_bin,
+ portage_tmpdir,
+ user_config_dir,
+ var_cache_edb,
+ ]
+
+ true_symlinks = ["chown", "chgrp"]
+
+ # We don't want to make pax-utils a hard-requirement for tests,
+ # so if it's not found, skip the test rather than FAIL it.
+ needed_binaries = {
+ "true": (find_binary("true"), True),
+ "scanelf": (find_binary("scanelf"), False),
+ "find": (find_binary("find"), True),
+ }
+
+ for name, (path, mandatory) in needed_binaries.items():
+ found = path is not None
+
+ if not found:
+ if mandatory:
+ self.assertIsNotNone(path, f"command {name} not found")
+ else:
+ self.skipTest(f"{name} not found")
+
+ try:
+ for d in dirs:
+ ensure_dirs(d)
+ for x in true_symlinks:
+ os.symlink(needed_binaries["true"][0], os.path.join(fake_bin, x))
+
+ # We need scanelf, find for the ELF parts (creating NEEDED)
+ os.symlink(needed_binaries["scanelf"][0], os.path.join(fake_bin, "scanelf"))
+ os.symlink(needed_binaries["find"][0], os.path.join(fake_bin, "find"))
+
+ with open(os.path.join(var_cache_edb, "counter"), "wb") as f:
+ f.write(b"100")
+ with open(os.path.join(profile_path, "packages"), "w") as f:
+ f.write("*virtual/libc")
+
+ if debug:
+ # The subprocess inherits both stdout and stderr, for
+ # debugging purposes.
+ stdout = None
+ else:
+ # The subprocess inherits stderr so that any warnings
+ # triggered by python -Wd will be visible.
+ stdout = subprocess.PIPE
+
+ for i, args in enumerate(test_commands):
+ if hasattr(args[0], "__call__"):
+ self.assertTrue(args[0](), f"callable at index {i} failed")
+ continue
+
+ if isinstance(args[0], dict):
+ local_env = env.copy()
+ local_env.update(args[0])
+ args = args[1:]
+ else:
+ local_env = env
+
+ proc = subprocess.Popen(args, env=local_env, stdout=stdout)
+
+ if debug:
+ proc.wait()
+ else:
+ output = proc.stdout.readlines()
+ proc.wait()
+ proc.stdout.close()
+ if proc.returncode != os.EX_OK:
+ for line in output:
+ sys.stderr.write(_unicode_decode(line))
+
+ self.assertEqual(
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
+ )
+
+ # Check that dev-libs/A doesn't get re-emerged via --changed-deps
+ # after injecting the libc dep. We want to suppress the injected
+ # dep in the changed-deps comparisons.
+ k = ResolverPlaygroundTestCase(
+ ["@world"],
+ options={
+ "--changed-deps": True,
+ "--deep": True,
+ "--update": True,
+ "--verbose": True,
+ },
+ success=True,
+ mergelist=[],
+ )
+ playground.run_TestCase(k)
+ self.assertEqual(k.test_success, True, k.fail_msg)
+ finally:
+ playground.debug = False
+ playground.cleanup()
+
+ def testBinpkgLibcDepInjection(self):
+ """
+ Test whether the implicit libc dependency injection (bug #913628)
+ correctly forces an upgrade to a newer glibc before merging a binpkg
+ built against it.
+
+ Based on BlockerFileCollisionEmergeTestCase.
+ """
+ debug = False
+
+ install_elf = textwrap.dedent(
+ """
+ S="${WORKDIR}"
+
+ src_install() {
+ insinto /usr/bin
+ # We need an ELF binary for the injection to trigger, so
+ # use ${BASH} given we know it must be around for running ebuilds.
+ cp "${BASH}" "${ED}"/usr/bin/${PN} || die
+ }
+ """
+ )
+
+ ebuilds = {
+ "sys-libs/glibc-2.37": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ "sys-libs/glibc-2.38": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ "virtual/libc-1": {
+ "EAPI": "8",
+ "RDEPEND": "sys-libs/glibc",
+ },
+ "dev-libs/A-1": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ "dev-libs/B-1": {
+ "EAPI": "8",
+ },
+ "dev-libs/C-1": {
+ "EAPI": "8",
+ "MISC_CONTENT": install_elf,
+ },
+ }
+
+ playground = ResolverPlayground(ebuilds=ebuilds, debug=debug)
+ settings = playground.settings
+ eprefix = settings["EPREFIX"]
+ eroot = settings["EROOT"]
+ var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
+ user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
+
+ portage_python = portage._python_interpreter
+ emerge_cmd = (
+ portage_python,
+ "-b",
+ "-Wd",
+ os.path.join(str(self.bindir), "emerge"),
+ )
+
+ test_commands = (
+ # (We need sys-libs/glibc pulled in and virtual/libc installed)
+ emerge_cmd
+ + (
+ "--oneshot",
+ "virtual/libc",
+ ),
+ # A package installing an ELF binary should have an injected libc dep
+ emerge_cmd
+ + (
+ "--oneshot",
+ "dev-libs/A",
+ ),
+ (lambda: os.path.exists(os.path.join(eroot, "usr/bin/A")),),
+ (
+ lambda: ">=sys-libs/glibc-2.38\n"
+ in portage.util.grablines(
+ os.path.join(
+ eprefix, "var", "db", "pkg", "dev-libs", "A-1", "RDEPEND"
+ )
+ ),
+ ),
+ # Downgrade glibc to a version (2.37) older than the version
+ # that dev-libs/A's binpkg was built against (2.38). Below,
+ # we check that it pulls in a newer glibc via a ResolverPlayground
+ # testcase.
+ emerge_cmd
+ + (
+ "--oneshot",
+ "--nodeps",
+ "<sys-libs/glibc-2.38",
+ ),
+ )
+
+ fake_bin = os.path.join(eprefix, "bin")
+ portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
+ profile_path = settings.profile_path
+
+ path = settings.get("PATH")
+ if path is not None and not path.strip():
+ path = None
+ if path is None:
+ path = ""
+ else:
+ path = ":" + path
+ path = fake_bin + path
+
+ pythonpath = os.environ.get("PYTHONPATH")
+ if pythonpath is not None and not pythonpath.strip():
+ pythonpath = None
+ if pythonpath is not None and pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
+ pass
+ else:
+ if pythonpath is None:
+ pythonpath = ""
+ else:
+ pythonpath = ":" + pythonpath
+ pythonpath = PORTAGE_PYM_PATH + pythonpath
+
+ env = {
+ "PORTAGE_OVERRIDE_EPREFIX": eprefix,
+ "PATH": path,
+ "PORTAGE_PYTHON": portage_python,
+ "PORTAGE_REPOSITORIES": settings.repositories.config_string(),
+ "PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
+ "PYTHONPATH": pythonpath,
+ "PORTAGE_INST_GID": str(os.getgid()),
+ "PORTAGE_INST_UID": str(os.getuid()),
+ "FEATURES": "buildpkg",
+ }
+
+ dirs = [
+ playground.distdir,
+ fake_bin,
+ portage_tmpdir,
+ user_config_dir,
+ var_cache_edb,
+ ]
+
+ true_symlinks = ["chown", "chgrp"]
+
+ # We don't want to make pax-utils a hard-requirement for tests,
+ # so if it's not found, skip the test rather than FAIL it.
+ needed_binaries = {
+ "true": (find_binary("true"), True),
+ "scanelf": (find_binary("scanelf"), False),
+ "find": (find_binary("find"), True),
+ }
+
+ for name, (path, mandatory) in needed_binaries.items():
+ found = path is not None
+
+ if not found:
+ if mandatory:
+ self.assertIsNotNone(path, f"command {name} not found")
+ else:
+ self.skipTest(f"{name} not found")
+
+ try:
+ for d in dirs:
+ ensure_dirs(d)
+ for x in true_symlinks:
+ os.symlink(needed_binaries["true"][0], os.path.join(fake_bin, x))
+
+ # We need scanelf, find for the ELF parts (creating NEEDED)
+ os.symlink(needed_binaries["scanelf"][0], os.path.join(fake_bin, "scanelf"))
+ os.symlink(needed_binaries["find"][0], os.path.join(fake_bin, "find"))
+
+ with open(os.path.join(var_cache_edb, "counter"), "wb") as f:
+ f.write(b"100")
+ with open(os.path.join(profile_path, "packages"), "w") as f:
+ f.write("*virtual/libc")
+
+ if debug:
+ # The subprocess inherits both stdout and stderr, for
+ # debugging purposes.
+ stdout = None
+ else:
+ # The subprocess inherits stderr so that any warnings
+ # triggered by python -Wd will be visible.
+ stdout = subprocess.PIPE
+
+ for i, args in enumerate(test_commands):
+ if hasattr(args[0], "__call__"):
+ self.assertTrue(args[0](), f"callable at index {i} failed")
+ continue
+
+ if isinstance(args[0], dict):
+ local_env = env.copy()
+ local_env.update(args[0])
+ args = args[1:]
+ else:
+ local_env = env
+
+ proc = subprocess.Popen(args, env=local_env, stdout=stdout)
+
+ if debug:
+ proc.wait()
+ else:
+ output = proc.stdout.readlines()
+ proc.wait()
+ proc.stdout.close()
+ if proc.returncode != os.EX_OK:
+ for line in output:
+ sys.stderr.write(_unicode_decode(line))
+
+ self.assertEqual(
+ os.EX_OK, proc.returncode, f"emerge failed with args {args}"
+ )
+
+ # Now check that glibc gets upgraded to the right version
+ # for the binpkg first after we downgraded it earlier, before
+ # merging the dev-libs/A binpkg which needs 2.38.
+ k = ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ options={
+ "--usepkgonly": True,
+ "--verbose": True,
+ },
+ success=True,
+ mergelist=["[binary]sys-libs/glibc-2.38-1", "[binary]dev-libs/A-1-1"],
+ )
+ playground.run_TestCase(k)
+ self.assertEqual(k.test_success, True, k.fail_msg)
+
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/emerge/test_simple.py b/lib/portage/tests/emerge/test_simple.py
deleted file mode 100644
index 3a8bf3764..000000000
--- a/lib/portage/tests/emerge/test_simple.py
+++ /dev/null
@@ -1,704 +0,0 @@
-# Copyright 2011-2021 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-import argparse
-import subprocess
-
-import portage
-from portage import shutil, os
-from portage.const import (
- BASH_BINARY,
- BINREPOS_CONF_FILE,
- PORTAGE_PYM_PATH,
- USER_CONFIG_PATH,
-)
-from portage.cache.mappings import Mapping
-from portage.process import find_binary
-from portage.tests import TestCase
-from portage.tests.resolver.ResolverPlayground import ResolverPlayground
-from portage.tests.util.test_socks5 import AsyncHTTPServer
-from portage.util import ensure_dirs, find_updated_config_files, shlex_split
-from portage.util.futures import asyncio
-
-
-class BinhostContentMap(Mapping):
- def __init__(self, remote_path, local_path):
- self._remote_path = remote_path
- self._local_path = local_path
-
- def __getitem__(self, request_path):
- safe_path = os.path.normpath(request_path)
- if not safe_path.startswith(self._remote_path + "/"):
- raise KeyError(request_path)
- local_path = os.path.join(
- self._local_path, safe_path[len(self._remote_path) + 1 :]
- )
- try:
- with open(local_path, "rb") as f:
- return f.read()
- except EnvironmentError:
- raise KeyError(request_path)
-
-
-class SimpleEmergeTestCase(TestCase):
- def _have_python_xml(self):
- try:
- __import__("xml.etree.ElementTree")
- __import__("xml.parsers.expat").parsers.expat.ExpatError
- except (AttributeError, ImportError):
- return False
- return True
-
- def testSimple(self):
-
- debug = False
-
- install_something = """
-S="${WORKDIR}"
-
-pkg_pretend() {
- einfo "called pkg_pretend for $CATEGORY/$PF"
-}
-
-src_install() {
- einfo "installing something..."
- insinto /usr/lib/${P}
- echo "blah blah blah" > "${T}"/regular-file
- doins "${T}"/regular-file
- dosym regular-file /usr/lib/${P}/symlink || die
-
- # Test CONFIG_PROTECT
- insinto /etc
- newins "${T}"/regular-file ${PN}-${SLOT%/*}
-
- # Test code for bug #381629, using a copyright symbol encoded with latin-1.
- # We use $(printf "\\xa9") rather than $'\\xa9', since printf apparently
- # works in any case, while $'\\xa9' transforms to \\xef\\xbf\\xbd under
- # some conditions. TODO: Find out why it transforms to \\xef\\xbf\\xbd when
- # running tests for Python 3.2 (even though it's bash that is ultimately
- # responsible for performing the transformation).
- local latin_1_dir=/usr/lib/${P}/latin-1-$(printf "\\xa9")-directory
- insinto "${latin_1_dir}"
- echo "blah blah blah" > "${T}"/latin-1-$(printf "\\xa9")-regular-file || die
- doins "${T}"/latin-1-$(printf "\\xa9")-regular-file
- dosym latin-1-$(printf "\\xa9")-regular-file ${latin_1_dir}/latin-1-$(printf "\\xa9")-symlink || die
-
- call_has_and_best_version
-}
-
-pkg_config() {
- einfo "called pkg_config for $CATEGORY/$PF"
-}
-
-pkg_info() {
- einfo "called pkg_info for $CATEGORY/$PF"
-}
-
-pkg_preinst() {
- if ! ___eapi_best_version_and_has_version_support_-b_-d_-r; then
- # The BROOT variable is unset during pkg_* phases for EAPI 7,
- # therefore best/has_version -b is expected to fail if we attempt
- # to call it for EAPI 7 here.
- call_has_and_best_version
- fi
-}
-
-call_has_and_best_version() {
- local root_arg
- if ___eapi_best_version_and_has_version_support_-b_-d_-r; then
- root_arg="-b"
- else
- root_arg="--host-root"
- fi
- einfo "called ${EBUILD_PHASE_FUNC} for $CATEGORY/$PF"
- einfo "EPREFIX=${EPREFIX}"
- einfo "PORTAGE_OVERRIDE_EPREFIX=${PORTAGE_OVERRIDE_EPREFIX}"
- einfo "ROOT=${ROOT}"
- einfo "EROOT=${EROOT}"
- einfo "SYSROOT=${SYSROOT}"
- einfo "ESYSROOT=${ESYSROOT}"
- einfo "BROOT=${BROOT}"
- # Test that has_version and best_version work correctly with
- # prefix (involves internal ROOT -> EROOT calculation in order
- # to support ROOT override via the environment with EAPIs 3
- # and later which support prefix).
- if has_version $CATEGORY/$PN:$SLOT ; then
- einfo "has_version detects an installed instance of $CATEGORY/$PN:$SLOT"
- einfo "best_version reports that the installed instance is $(best_version $CATEGORY/$PN:$SLOT)"
- else
- einfo "has_version does not detect an installed instance of $CATEGORY/$PN:$SLOT"
- fi
- if [[ ${EPREFIX} != ${PORTAGE_OVERRIDE_EPREFIX} ]] ; then
- if has_version ${root_arg} $CATEGORY/$PN:$SLOT ; then
- einfo "has_version ${root_arg} detects an installed instance of $CATEGORY/$PN:$SLOT"
- einfo "best_version ${root_arg} reports that the installed instance is $(best_version ${root_arg} $CATEGORY/$PN:$SLOT)"
- else
- einfo "has_version ${root_arg} does not detect an installed instance of $CATEGORY/$PN:$SLOT"
- fi
- fi
-}
-
-"""
-
- ebuilds = {
- "dev-libs/A-1": {
- "EAPI": "5",
- "IUSE": "+flag",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- "MISC_CONTENT": install_something,
- "RDEPEND": "flag? ( dev-libs/B[flag] )",
- },
- "dev-libs/B-1": {
- "EAPI": "5",
- "IUSE": "+flag",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- "MISC_CONTENT": install_something,
- },
- "dev-libs/C-1": {
- "EAPI": "7",
- "KEYWORDS": "~x86",
- "RDEPEND": "dev-libs/D[flag]",
- "MISC_CONTENT": install_something,
- },
- "dev-libs/D-1": {
- "EAPI": "7",
- "KEYWORDS": "~x86",
- "IUSE": "flag",
- "MISC_CONTENT": install_something,
- },
- "virtual/foo-0": {
- "EAPI": "5",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- },
- }
-
- installed = {
- "dev-libs/A-1": {
- "EAPI": "5",
- "IUSE": "+flag",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- "RDEPEND": "flag? ( dev-libs/B[flag] )",
- "USE": "flag",
- },
- "dev-libs/B-1": {
- "EAPI": "5",
- "IUSE": "+flag",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- "USE": "flag",
- },
- "dev-libs/depclean-me-1": {
- "EAPI": "5",
- "IUSE": "",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- "USE": "",
- },
- "app-misc/depclean-me-1": {
- "EAPI": "5",
- "IUSE": "",
- "KEYWORDS": "x86",
- "LICENSE": "GPL-2",
- "RDEPEND": "dev-libs/depclean-me",
- "USE": "",
- },
- }
-
- metadata_xml_files = (
- (
- "dev-libs/A",
- {
- "flags": "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
- },
- ),
- (
- "dev-libs/B",
- {
- "flags": "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
- },
- ),
- )
-
- playground = ResolverPlayground(
- ebuilds=ebuilds, installed=installed, debug=debug
- )
-
- loop = asyncio._wrap_loop()
- loop.run_until_complete(
- asyncio.ensure_future(
- self._async_test_simple(playground, metadata_xml_files, loop=loop),
- loop=loop,
- )
- )
-
- async def _async_test_simple(self, playground, metadata_xml_files, loop):
-
- debug = playground.debug
- settings = playground.settings
- eprefix = settings["EPREFIX"]
- eroot = settings["EROOT"]
- trees = playground.trees
- portdb = trees[eroot]["porttree"].dbapi
- test_repo_location = settings.repositories["test_repo"].location
- var_cache_edb = os.path.join(eprefix, "var", "cache", "edb")
- cachedir = os.path.join(var_cache_edb, "dep")
- cachedir_pregen = os.path.join(test_repo_location, "metadata", "md5-cache")
-
- portage_python = portage._python_interpreter
- dispatch_conf_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.sbindir, "dispatch-conf"),
- )
- ebuild_cmd = (portage_python, "-b", "-Wd", os.path.join(self.bindir, "ebuild"))
- egencache_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.bindir, "egencache"),
- "--repo",
- "test_repo",
- "--repositories-configuration",
- settings.repositories.config_string(),
- )
- emerge_cmd = (portage_python, "-b", "-Wd", os.path.join(self.bindir, "emerge"))
- emaint_cmd = (portage_python, "-b", "-Wd", os.path.join(self.sbindir, "emaint"))
- env_update_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.sbindir, "env-update"),
- )
- etc_update_cmd = (BASH_BINARY, os.path.join(self.sbindir, "etc-update"))
- fixpackages_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.sbindir, "fixpackages"),
- )
- portageq_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.bindir, "portageq"),
- )
- quickpkg_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.bindir, "quickpkg"),
- )
- regenworld_cmd = (
- portage_python,
- "-b",
- "-Wd",
- os.path.join(self.sbindir, "regenworld"),
- )
-
- rm_binary = find_binary("rm")
- self.assertEqual(rm_binary is None, False, "rm command not found")
- rm_cmd = (rm_binary,)
-
- egencache_extra_args = []
- if self._have_python_xml():
- egencache_extra_args.append("--update-use-local-desc")
-
- test_ebuild = portdb.findname("dev-libs/A-1")
- self.assertFalse(test_ebuild is None)
-
- cross_prefix = os.path.join(eprefix, "cross_prefix")
- cross_root = os.path.join(eprefix, "cross_root")
- cross_eroot = os.path.join(cross_root, eprefix.lstrip(os.sep))
-
- binhost_dir = os.path.join(eprefix, "binhost")
- binhost_address = "127.0.0.1"
- binhost_remote_path = "/binhost"
- binhost_server = AsyncHTTPServer(
- binhost_address, BinhostContentMap(binhost_remote_path, binhost_dir), loop
- ).__enter__()
- binhost_uri = "http://{address}:{port}{path}".format(
- address=binhost_address,
- port=binhost_server.server_port,
- path=binhost_remote_path,
- )
-
- test_commands = ()
-
- if hasattr(argparse.ArgumentParser, "parse_intermixed_args"):
- test_commands += (
- emerge_cmd + ("--oneshot", "dev-libs/A", "-v", "dev-libs/A"),
- )
-
- test_commands += (
- emerge_cmd
- + (
- "--usepkgonly",
- "--root",
- cross_root,
- "--quickpkg-direct=y",
- "--quickpkg-direct-root",
- "/",
- "dev-libs/A",
- ),
- emerge_cmd
- + (
- "--usepkgonly",
- "--quickpkg-direct=y",
- "--quickpkg-direct-root",
- cross_root,
- "dev-libs/A",
- ),
- env_update_cmd,
- portageq_cmd
- + (
- "envvar",
- "-v",
- "CONFIG_PROTECT",
- "EROOT",
- "PORTAGE_CONFIGROOT",
- "PORTAGE_TMPDIR",
- "USERLAND",
- ),
- etc_update_cmd,
- dispatch_conf_cmd,
- emerge_cmd + ("--version",),
- emerge_cmd + ("--info",),
- emerge_cmd + ("--info", "--verbose"),
- emerge_cmd + ("--list-sets",),
- emerge_cmd + ("--check-news",),
- rm_cmd + ("-rf", cachedir),
- rm_cmd + ("-rf", cachedir_pregen),
- emerge_cmd + ("--regen",),
- rm_cmd + ("-rf", cachedir),
- ({"FEATURES": "metadata-transfer"},) + emerge_cmd + ("--regen",),
- rm_cmd + ("-rf", cachedir),
- ({"FEATURES": "metadata-transfer"},) + emerge_cmd + ("--regen",),
- rm_cmd + ("-rf", cachedir),
- egencache_cmd + ("--update",) + tuple(egencache_extra_args),
- ({"FEATURES": "metadata-transfer"},) + emerge_cmd + ("--metadata",),
- rm_cmd + ("-rf", cachedir),
- ({"FEATURES": "metadata-transfer"},) + emerge_cmd + ("--metadata",),
- emerge_cmd + ("--metadata",),
- rm_cmd + ("-rf", cachedir),
- emerge_cmd + ("--oneshot", "virtual/foo"),
- lambda: self.assertFalse(
- os.path.exists(os.path.join(pkgdir, "virtual", "foo", "foo-0-1.xpak"))
- ),
- ({"FEATURES": "unmerge-backup"},)
- + emerge_cmd
- + ("--unmerge", "virtual/foo"),
- lambda: self.assertTrue(
- os.path.exists(os.path.join(pkgdir, "virtual", "foo", "foo-0-1.xpak"))
- ),
- emerge_cmd + ("--pretend", "dev-libs/A"),
- ebuild_cmd + (test_ebuild, "manifest", "clean", "package", "merge"),
- emerge_cmd + ("--pretend", "--tree", "--complete-graph", "dev-libs/A"),
- emerge_cmd + ("-p", "dev-libs/B"),
- emerge_cmd + ("-p", "--newrepo", "dev-libs/B"),
- emerge_cmd
- + (
- "-B",
- "dev-libs/B",
- ),
- emerge_cmd
- + (
- "--oneshot",
- "--usepkg",
- "dev-libs/B",
- ),
- # trigger clean prior to pkg_pretend as in bug #390711
- ebuild_cmd + (test_ebuild, "unpack"),
- emerge_cmd
- + (
- "--oneshot",
- "dev-libs/A",
- ),
- emerge_cmd
- + (
- "--noreplace",
- "dev-libs/A",
- ),
- emerge_cmd
- + (
- "--config",
- "dev-libs/A",
- ),
- emerge_cmd + ("--info", "dev-libs/A", "dev-libs/B"),
- emerge_cmd + ("--pretend", "--depclean", "--verbose", "dev-libs/B"),
- emerge_cmd
- + (
- "--pretend",
- "--depclean",
- ),
- emerge_cmd + ("--depclean",),
- quickpkg_cmd
- + (
- "--include-config",
- "y",
- "dev-libs/A",
- ),
- # Test bug #523684, where a file renamed or removed by the
- # admin forces replacement files to be merged with config
- # protection.
- lambda: self.assertEqual(
- 0,
- len(
- list(
- find_updated_config_files(
- eroot, shlex_split(settings["CONFIG_PROTECT"])
- )
- )
- ),
- ),
- lambda: os.unlink(os.path.join(eprefix, "etc", "A-0")),
- emerge_cmd + ("--usepkgonly", "dev-libs/A"),
- lambda: self.assertEqual(
- 1,
- len(
- list(
- find_updated_config_files(
- eroot, shlex_split(settings["CONFIG_PROTECT"])
- )
- )
- ),
- ),
- emaint_cmd + ("--check", "all"),
- emaint_cmd + ("--fix", "all"),
- fixpackages_cmd,
- regenworld_cmd,
- portageq_cmd + ("match", eroot, "dev-libs/A"),
- portageq_cmd + ("best_visible", eroot, "dev-libs/A"),
- portageq_cmd + ("best_visible", eroot, "binary", "dev-libs/A"),
- portageq_cmd + ("contents", eroot, "dev-libs/A-1"),
- portageq_cmd
- + ("metadata", eroot, "ebuild", "dev-libs/A-1", "EAPI", "IUSE", "RDEPEND"),
- portageq_cmd
- + ("metadata", eroot, "binary", "dev-libs/A-1", "EAPI", "USE", "RDEPEND"),
- portageq_cmd
- + (
- "metadata",
- eroot,
- "installed",
- "dev-libs/A-1",
- "EAPI",
- "USE",
- "RDEPEND",
- ),
- portageq_cmd + ("owners", eroot, eroot + "usr"),
- emerge_cmd + ("-p", eroot + "usr"),
- emerge_cmd + ("-p", "--unmerge", "-q", eroot + "usr"),
- emerge_cmd + ("--unmerge", "--quiet", "dev-libs/A"),
- emerge_cmd + ("-C", "--quiet", "dev-libs/B"),
- # If EMERGE_DEFAULT_OPTS contains --autounmask=n, then --autounmask
- # must be specified with --autounmask-continue.
- ({"EMERGE_DEFAULT_OPTS": "--autounmask=n"},)
- + emerge_cmd
- + (
- "--autounmask",
- "--autounmask-continue",
- "dev-libs/C",
- ),
- # Verify that the above --autounmask-continue command caused
- # USE=flag to be applied correctly to dev-libs/D.
- portageq_cmd + ("match", eroot, "dev-libs/D[flag]"),
- # Test cross-prefix usage, including chpathtool for binpkgs.
- # EAPI 7
- ({"EPREFIX": cross_prefix},) + emerge_cmd + ("dev-libs/C",),
- ({"EPREFIX": cross_prefix},)
- + portageq_cmd
- + ("has_version", cross_prefix, "dev-libs/C"),
- ({"EPREFIX": cross_prefix},)
- + portageq_cmd
- + ("has_version", cross_prefix, "dev-libs/D"),
- ({"ROOT": cross_root},) + emerge_cmd + ("dev-libs/D",),
- portageq_cmd + ("has_version", cross_eroot, "dev-libs/D"),
- # EAPI 5
- ({"EPREFIX": cross_prefix},) + emerge_cmd + ("--usepkgonly", "dev-libs/A"),
- ({"EPREFIX": cross_prefix},)
- + portageq_cmd
- + ("has_version", cross_prefix, "dev-libs/A"),
- ({"EPREFIX": cross_prefix},)
- + portageq_cmd
- + ("has_version", cross_prefix, "dev-libs/B"),
- ({"EPREFIX": cross_prefix},) + emerge_cmd + ("-C", "--quiet", "dev-libs/B"),
- ({"EPREFIX": cross_prefix},) + emerge_cmd + ("-C", "--quiet", "dev-libs/A"),
- ({"EPREFIX": cross_prefix},) + emerge_cmd + ("dev-libs/A",),
- ({"EPREFIX": cross_prefix},)
- + portageq_cmd
- + ("has_version", cross_prefix, "dev-libs/A"),
- ({"EPREFIX": cross_prefix},)
- + portageq_cmd
- + ("has_version", cross_prefix, "dev-libs/B"),
- # Test ROOT support
- ({"ROOT": cross_root},) + emerge_cmd + ("dev-libs/B",),
- portageq_cmd + ("has_version", cross_eroot, "dev-libs/B"),
- )
-
- # Test binhost support if FETCHCOMMAND is available.
- binrepos_conf_file = os.path.join(os.sep, eprefix, BINREPOS_CONF_FILE)
- with open(binrepos_conf_file, "wt") as f:
- f.write("[test-binhost]\n")
- f.write("sync-uri = {}\n".format(binhost_uri))
- fetchcommand = portage.util.shlex_split(playground.settings["FETCHCOMMAND"])
- fetch_bin = portage.process.find_binary(fetchcommand[0])
- if fetch_bin is not None:
- test_commands = test_commands + (
- lambda: os.rename(pkgdir, binhost_dir),
- emerge_cmd + ("-e", "--getbinpkgonly", "dev-libs/A"),
- lambda: shutil.rmtree(pkgdir),
- lambda: os.rename(binhost_dir, pkgdir),
- # Remove binrepos.conf and test PORTAGE_BINHOST.
- lambda: os.unlink(binrepos_conf_file),
- lambda: os.rename(pkgdir, binhost_dir),
- ({"PORTAGE_BINHOST": binhost_uri},)
- + emerge_cmd
- + ("-fe", "--getbinpkgonly", "dev-libs/A"),
- lambda: shutil.rmtree(pkgdir),
- lambda: os.rename(binhost_dir, pkgdir),
- )
-
- distdir = playground.distdir
- pkgdir = playground.pkgdir
- fake_bin = os.path.join(eprefix, "bin")
- portage_tmpdir = os.path.join(eprefix, "var", "tmp", "portage")
- profile_path = settings.profile_path
- user_config_dir = os.path.join(os.sep, eprefix, USER_CONFIG_PATH)
-
- path = os.environ.get("PATH")
- if path is not None and not path.strip():
- path = None
- if path is None:
- path = ""
- else:
- path = ":" + path
- path = fake_bin + path
-
- pythonpath = os.environ.get("PYTHONPATH")
- if pythonpath is not None and not pythonpath.strip():
- pythonpath = None
- if pythonpath is not None and pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
- pass
- else:
- if pythonpath is None:
- pythonpath = ""
- else:
- pythonpath = ":" + pythonpath
- pythonpath = PORTAGE_PYM_PATH + pythonpath
-
- env = {
- "PORTAGE_OVERRIDE_EPREFIX": eprefix,
- "CLEAN_DELAY": "0",
- "DISTDIR": distdir,
- "EMERGE_WARNING_DELAY": "0",
- "INFODIR": "",
- "INFOPATH": "",
- "PATH": path,
- "PKGDIR": pkgdir,
- "PORTAGE_INST_GID": str(portage.data.portage_gid),
- "PORTAGE_INST_UID": str(portage.data.portage_uid),
- "PORTAGE_PYTHON": portage_python,
- "PORTAGE_REPOSITORIES": settings.repositories.config_string(),
- "PORTAGE_TMPDIR": portage_tmpdir,
- "PORTAGE_LOGDIR": portage_tmpdir,
- "PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
- "PYTHONPATH": pythonpath,
- "__PORTAGE_TEST_PATH_OVERRIDE": fake_bin,
- }
-
- if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
- env["__PORTAGE_TEST_HARDLINK_LOCKS"] = os.environ[
- "__PORTAGE_TEST_HARDLINK_LOCKS"
- ]
-
- updates_dir = os.path.join(test_repo_location, "profiles", "updates")
- dirs = [
- cachedir,
- cachedir_pregen,
- cross_eroot,
- cross_prefix,
- distdir,
- fake_bin,
- portage_tmpdir,
- updates_dir,
- user_config_dir,
- var_cache_edb,
- ]
- etc_symlinks = ("dispatch-conf.conf", "etc-update.conf")
- # Override things that may be unavailable, or may have portability
- # issues when running tests in exotic environments.
- # prepstrip - bug #447810 (bash read builtin EINTR problem)
- true_symlinks = ["find", "prepstrip", "sed", "scanelf"]
- true_binary = find_binary("true")
- self.assertEqual(true_binary is None, False, "true command not found")
- try:
- for d in dirs:
- ensure_dirs(d)
- for x in true_symlinks:
- os.symlink(true_binary, os.path.join(fake_bin, x))
- for x in etc_symlinks:
- os.symlink(
- os.path.join(self.cnf_etc_path, x), os.path.join(eprefix, "etc", x)
- )
- with open(os.path.join(var_cache_edb, "counter"), "wb") as f:
- f.write(b"100")
- # non-empty system set keeps --depclean quiet
- with open(os.path.join(profile_path, "packages"), "w") as f:
- f.write("*dev-libs/token-system-pkg")
- for cp, xml_data in metadata_xml_files:
- with open(
- os.path.join(test_repo_location, cp, "metadata.xml"), "w"
- ) as f:
- f.write(playground.metadata_xml_template % xml_data)
- with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
- f.write(
- """
-slotmove =app-doc/pms-3 2 3
-move dev-util/git dev-vcs/git
-"""
- )
-
- if debug:
- # The subprocess inherits both stdout and stderr, for
- # debugging purposes.
- stdout = None
- else:
- # The subprocess inherits stderr so that any warnings
- # triggered by python -Wd will be visible.
- stdout = subprocess.PIPE
-
- for args in test_commands:
-
- if hasattr(args, "__call__"):
- args()
- continue
-
- if isinstance(args[0], dict):
- local_env = env.copy()
- local_env.update(args[0])
- args = args[1:]
- else:
- local_env = env
-
- proc = await asyncio.create_subprocess_exec(
- *args, env=local_env, stderr=None, stdout=stdout
- )
-
- if debug:
- await proc.wait()
- else:
- output, _err = await proc.communicate()
- await proc.wait()
- if proc.returncode != os.EX_OK:
- portage.writemsg(output)
-
- self.assertEqual(
- os.EX_OK, proc.returncode, "emerge failed with args %s" % (args,)
- )
- finally:
- binhost_server.__exit__(None, None, None)
- playground.cleanup()
diff --git a/lib/portage/tests/env/config/meson.build b/lib/portage/tests/env/config/meson.build
new file mode 100644
index 000000000..191430216
--- /dev/null
+++ b/lib/portage/tests/env/config/meson.build
@@ -0,0 +1,12 @@
+py.install_sources(
+ [
+ 'test_PackageKeywordsFile.py',
+ 'test_PackageMaskFile.py',
+ 'test_PackageUseFile.py',
+ 'test_PortageModulesFile.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/env/config',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/env/config/test_PackageKeywordsFile.py b/lib/portage/tests/env/config/test_PackageKeywordsFile.py
index e4a1fcaaf..f1f541102 100644
--- a/lib/portage/tests/env/config/test_PackageKeywordsFile.py
+++ b/lib/portage/tests/env/config/test_PackageKeywordsFile.py
@@ -9,7 +9,6 @@ from tempfile import mkstemp
class PackageKeywordsFileTestCase(TestCase):
-
cpv = ["sys-apps/portage"]
keywords = ["~x86", "amd64", "-mips"]
@@ -34,7 +33,7 @@ class PackageKeywordsFileTestCase(TestCase):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
for c in self.cpv:
- f.write("%s %s\n" % (c, " ".join(self.keywords)))
+ f.write(f"{c} {' '.join(self.keywords)}\n")
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/config/test_PackageUseFile.py b/lib/portage/tests/env/config/test_PackageUseFile.py
index e8773e4c3..d68397f64 100644
--- a/lib/portage/tests/env/config/test_PackageUseFile.py
+++ b/lib/portage/tests/env/config/test_PackageUseFile.py
@@ -9,7 +9,6 @@ from tempfile import mkstemp
class PackageUseFileTestCase(TestCase):
-
cpv = "sys-apps/portage"
useflags = ["cdrom", "far", "boo", "flag", "blat"]
@@ -30,7 +29,7 @@ class PackageUseFileTestCase(TestCase):
def BuildFile(self):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
- f.write("%s %s" % (self.cpv, " ".join(self.useflags)))
+ f.write(f"{self.cpv} {' '.join(self.useflags)}")
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/config/test_PortageModulesFile.py b/lib/portage/tests/env/config/test_PortageModulesFile.py
index 3ca6aa240..bca86e0e6 100644
--- a/lib/portage/tests/env/config/test_PortageModulesFile.py
+++ b/lib/portage/tests/env/config/test_PortageModulesFile.py
@@ -1,4 +1,4 @@
-# Copyright 2006-2009 Gentoo Foundation
+# Copyright 2006-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage import os
@@ -8,12 +8,12 @@ from tempfile import mkstemp
class PortageModulesFileTestCase(TestCase):
-
keys = ["foo.bar", "baz", "bob", "extra_key"]
invalid_keys = ["", ""]
modules = ["spanky", "zmedico", "antarus", "ricer", "5", "6"]
def setUp(self):
+ super().setUp()
self.items = {}
for k, v in zip(self.keys + self.invalid_keys, self.modules):
self.items[k] = v
@@ -32,7 +32,7 @@ class PortageModulesFileTestCase(TestCase):
fd, self.fname = mkstemp()
f = os.fdopen(fd, "w")
for k, v in self.items.items():
- f.write("%s=%s\n" % (k, v))
+ f.write(f"{k}={v}\n")
f.close()
def NukeFile(self):
diff --git a/lib/portage/tests/env/meson.build b/lib/portage/tests/env/meson.build
new file mode 100644
index 000000000..e26216199
--- /dev/null
+++ b/lib/portage/tests/env/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/env',
+ pure : not native_extensions
+)
+
+subdir('config')
diff --git a/lib/portage/tests/glsa/meson.build b/lib/portage/tests/glsa/meson.build
new file mode 100644
index 000000000..72817041a
--- /dev/null
+++ b/lib/portage/tests/glsa/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_security_set.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/glsa',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/glsa/test_security_set.py b/lib/portage/tests/glsa/test_security_set.py
index 68265913e..1206d9f80 100644
--- a/lib/portage/tests/glsa/test_security_set.py
+++ b/lib/portage/tests/glsa/test_security_set.py
@@ -1,7 +1,6 @@
-# Copyright 2013-2014 Gentoo Foundation
+# Copyright 2013-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import io
import portage
from portage import os, _encodings
@@ -11,9 +10,10 @@ from portage.tests.resolver.ResolverPlayground import (
ResolverPlaygroundTestCase,
)
+from portage.glsa import GlsaFormatException
-class SecuritySetTestCase(TestCase):
+class SecuritySetTestCase(TestCase):
glsa_template = """\
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet href="/xsl/glsa.xsl" type="text/xsl"?>
@@ -29,9 +29,9 @@ class SecuritySetTestCase(TestCase):
<bug>55555</bug>
<access>remote</access>
<affected>
- <package name="%(cp)s" auto="yes" arch="*">
- <unaffected range="ge">%(unaffected_version)s</unaffected>
- <vulnerable range="lt">%(unaffected_version)s</vulnerable>
+ <package name="%(cp)s" auto="yes" arch="%(arch)s">
+ <unaffected range="%(unaffected_range)s">%(unaffected_version)s</unaffected>
+ <vulnerable range="%(affected_range)s">%(affected_version)s</vulnerable>
</package>
</affected>
<background>
@@ -64,15 +64,18 @@ class SecuritySetTestCase(TestCase):
__import__("xml.etree.ElementTree")
__import__("xml.parsers.expat").parsers.expat.ExpatError
except (AttributeError, ImportError):
- return "python is missing xml support"
+ self.skipTest("python is missing xml support")
- def testSecuritySet(self):
+ def write_glsa_test_case(self, glsa_dir, glsa):
+ with open(
+ os.path.join(glsa_dir, "glsa-" + glsa["glsa_id"] + ".xml"),
+ encoding=_encodings["repo.content"],
+ mode="w",
+ ) as f:
+ f.write(self.glsa_template % glsa)
- skip_reason = self._must_skip()
- if skip_reason:
- self.portage_skip = skip_reason
- self.assertFalse(True, skip_reason)
- return
+ def testSecuritySet(self):
+ self._must_skip()
ebuilds = {
"cat/A-vulnerable-2.2": {"KEYWORDS": "x86"},
@@ -89,19 +92,31 @@ class SecuritySetTestCase(TestCase):
"glsa_id": "201301-01",
"pkgname": "A-vulnerable",
"cp": "cat/A-vulnerable",
+ "unaffected_range": "ge",
+ "affected_range": "lt",
"unaffected_version": "2.2",
+ "affected_version": "2.2",
+ "arch": "*",
},
{
"glsa_id": "201301-02",
"pkgname": "B-not-vulnerable",
"cp": "cat/B-not-vulnerable",
+ "unaffected_range": "ge",
+ "affected_range": "lt",
"unaffected_version": "4.4",
+ "affected_version": "4.4",
+ "arch": "*",
},
{
"glsa_id": "201301-03",
"pkgname": "NotInstalled",
"cp": "cat/NotInstalled",
+ "unaffected_range": "ge",
+ "affected_range": "lt",
"unaffected_version": "3.5",
+ "affected_version": "3.5",
+ "arch": "*",
},
)
@@ -121,22 +136,110 @@ class SecuritySetTestCase(TestCase):
)
try:
-
portdb = playground.trees[playground.eroot]["porttree"].dbapi
glsa_dir = os.path.join(
portdb.repositories["test_repo"].location, "metadata", "glsa"
)
portage.util.ensure_dirs(glsa_dir)
for glsa in glsas:
- with io.open(
- os.path.join(glsa_dir, "glsa-" + glsa["glsa_id"] + ".xml"),
- encoding=_encodings["repo.content"],
- mode="w",
- ) as f:
- f.write(self.glsa_template % glsa)
+ self.write_glsa_test_case(glsa_dir, glsa)
for test_case in test_cases:
playground.run_TestCase(test_case)
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
+
+ def testStatelessSecuritySet(self):
+ # Tests which don't rely on the GLSA being fixed. This allows
+ # testing the format parsing with a bit more flexibility (no
+ # need to keep inventing packages).
+
+ self._must_skip()
+
+ ebuilds = {
+ "cat/A-vulnerable-2.2": {"KEYWORDS": "x86"},
+ "cat/B-not-vulnerable-4.5": {"KEYWORDS": "x86"},
+ }
+
+ installed = {
+ "cat/A-vulnerable-2.1": {"KEYWORDS": "x86"},
+ "cat/B-not-vulnerable-4.4": {"KEYWORDS": "x86"},
+ }
+
+ glsas = (
+ {
+ "glsa_id": "201301-04",
+ "pkgname": "A-vulnerable",
+ "cp": "cat/A-vulnerable",
+ "unaffected_range": "ge",
+ "affected_range": "lt",
+ "unaffected_version": "2.2",
+ "affected_version": "2.2",
+ # Use an invalid delimiter (comma)
+ "arch": "amd64,sparc",
+ },
+ {
+ "glsa_id": "201301-05",
+ "pkgname": "A-vulnerable",
+ "cp": "cat/A-vulnerable",
+ "unaffected_range": "ge",
+ "affected_range": "lt",
+ "unaffected_version": "2.2",
+ "affected_version": "2.2",
+ # Use an invalid arch (~arch)
+ "arch": "~amd64",
+ },
+ {
+ "glsa_id": "201301-06",
+ "pkgname": "A-vulnerable",
+ "cp": "cat/A-vulnerable",
+ "unaffected_range": "ge",
+ "affected_range": "lt",
+ "unaffected_version": "2.2",
+ "affected_version": "2.2",
+ # Two valid arches followed by an invalid one
+ "arch": "amd64 sparc $$$$",
+ },
+ {
+ "glsa_id": "201301-07",
+ "pkgname": "A-vulnerable",
+ "cp": "cat/A-vulnerable",
+ "unaffected_range": "None",
+ "affected_range": "lt",
+ "unaffected_version": "2.2",
+ "affected_version": "2.2",
+ "arch": "*",
+ },
+ )
+
+ world = ["cat/A"]
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@security"],
+ success=True,
+ mergelist=["cat/A-vulnerable-2.2"],
+ ),
+ )
+
+ # Give each GLSA a clean slate
+ for glsa in glsas:
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, world=world, debug=False
+ )
+
+ try:
+ portdb = playground.trees[playground.eroot]["porttree"].dbapi
+ glsa_dir = os.path.join(
+ portdb.repositories["test_repo"].location, "metadata", "glsa"
+ )
+ portage.util.ensure_dirs(glsa_dir)
+
+ self.write_glsa_test_case(glsa_dir, glsa)
+
+ with self.assertRaises(GlsaFormatException):
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/__init__.py b/lib/portage/tests/gpkg/__init__.py
new file mode 100644
index 000000000..532918b6a
--- /dev/null
+++ b/lib/portage/tests/gpkg/__init__.py
@@ -0,0 +1,2 @@
+# Copyright 2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
diff --git a/lib/portage/tests/gpkg/__test__.py b/lib/portage/tests/gpkg/__test__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/lib/portage/tests/gpkg/__test__.py
diff --git a/lib/portage/tests/gpkg/meson.build b/lib/portage/tests/gpkg/meson.build
new file mode 100644
index 000000000..156319b35
--- /dev/null
+++ b/lib/portage/tests/gpkg/meson.build
@@ -0,0 +1,15 @@
+py.install_sources(
+ [
+ 'test_gpkg_checksum.py',
+ 'test_gpkg_gpg.py',
+ 'test_gpkg_metadata_update.py',
+ 'test_gpkg_metadata_url.py',
+ 'test_gpkg_path.py',
+ 'test_gpkg_size.py',
+ 'test_gpkg_stream.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/gpkg',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/gpkg/test_gpkg_checksum.py b/lib/portage/tests/gpkg/test_gpkg_checksum.py
new file mode 100644
index 000000000..ba80aefa7
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_checksum.py
@@ -0,0 +1,376 @@
+# Copyright Gentoo Foundation 2006-2020
+# Portage Unit Testing Functionality
+
+import io
+import tarfile
+import tempfile
+from os import urandom
+
+from portage import os
+from portage import shutil
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.gpkg import gpkg
+from portage.exception import (
+ InvalidBinaryPackageFormat,
+ DigestException,
+ MissingSignature,
+)
+
+
+class test_gpkg_checksum_case(TestCase):
+ def test_gpkg_missing_header(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name != os.path.join("test", binpkg_1.gpkg_version):
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ InvalidBinaryPackageFormat,
+ binpkg_2.decompress,
+ os.path.join(tmpdir, "test"),
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_missing_manifest(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name != os.path.join("test", "Manifest"):
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ MissingSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_missing_files(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data2"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if "image.tar" not in f.name:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ DigestException, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_extra_files(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ tar_2.addfile(f, tar_1.extractfile(f))
+ data_tarinfo = tarfile.TarInfo(os.path.join("test", "data2"))
+ data_tarinfo.size = len(data)
+ data2 = io.BytesIO(data)
+ tar_2.addfile(data_tarinfo, data2)
+ data2.close()
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ DigestException, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_incorrect_checksum(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name == os.path.join("test", "Manifest"):
+ data = io.BytesIO(tar_1.extractfile(f).read())
+ data_view = data.getbuffer()
+ data_view[-16:] = b"20a6d80ab0320fh9"
+ del data_view
+ tar_2.addfile(f, data)
+ data.close()
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ DigestException, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_duplicate_files(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(100)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ tar_2.addfile(f, tar_1.extractfile(f))
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ InvalidBinaryPackageFormat,
+ binpkg_2.decompress,
+ os.path.join(tmpdir, "test"),
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_manifest_duplicate_files(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(100)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name == os.path.join("test", "Manifest"):
+ manifest = tar_1.extractfile(f).read()
+ data = io.BytesIO(manifest)
+ data.seek(io.SEEK_END)
+ data.write(b"\n")
+ data.write(manifest)
+ f.size = data.tell()
+ data.seek(0)
+ tar_2.addfile(f, data)
+ data.close()
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ DigestException, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_different_size_file(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature -gpg-keepalive"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(100)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if "image" in f.name:
+ data = tar_1.extractfile(f).read()
+ data = data + b"1234"
+ f.size = len(data)
+ tar_2.addfile(f, io.BytesIO(data))
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ DigestException,
+ binpkg_2.decompress,
+ os.path.join(tmpdir, "test"),
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/test_gpkg_gpg.py b/lib/portage/tests/gpkg/test_gpkg_gpg.py
new file mode 100644
index 000000000..d7eae4a82
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_gpg.py
@@ -0,0 +1,395 @@
+# Copyright 2022-2024 Gentoo Authors
+# Portage Unit Testing Functionality
+
+import io
+import tarfile
+import tempfile
+from os import urandom
+
+from portage import os
+from portage import shutil
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.gpkg import gpkg
+from portage.gpg import GPG
+from portage.exception import MissingSignature, InvalidSignature
+
+
+class test_gpkg_gpg_case(TestCase):
+ def test_gpkg_missing_manifest_signature(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name == os.path.join("test", "Manifest"):
+ manifest = tar_1.extractfile(f).read().decode("UTF-8")
+ manifest = manifest.replace(
+ "-----BEGIN PGP SIGNATURE-----", ""
+ )
+ manifest = manifest.replace(
+ "-----END PGP SIGNATURE-----", ""
+ )
+ manifest_data = io.BytesIO(manifest.encode("UTF-8"))
+ manifest_data.seek(0, io.SEEK_END)
+ f.size = manifest_data.tell()
+ manifest_data.seek(0)
+ tar_2.addfile(f, manifest_data)
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+
+ self.assertRaises(
+ InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_missing_signature(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name.endswith(".sig"):
+ pass
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+ self.assertRaises(
+ MissingSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_ignore_signature(self):
+ gpg_test_path = os.environ["PORTAGE_GNUPGHOME"]
+
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-ignore-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ f'BINPKG_GPG_SIGNING_BASE_COMMAND="flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --batch --no-tty --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]"',
+ 'BINPKG_GPG_SIGNING_DIGEST="SHA512"',
+ f'BINPKG_GPG_SIGNING_GPG_HOME="{gpg_test_path}"',
+ 'BINPKG_GPG_SIGNING_KEY="0x8812797DDF1DD192"',
+ 'BINPKG_GPG_VERIFY_BASE_COMMAND="/usr/bin/gpg --verify --batch --no-tty --yes --no-auto-check-trustdb --status-fd 2 [PORTAGE_CONFIG] [SIGNATURE]"',
+ f'BINPKG_GPG_VERIFY_GPG_HOME="{gpg_test_path}"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_2.decompress(os.path.join(tmpdir, "test"))
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_auto_use_signature(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing '
+ '-binpkg-request-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name.endswith(".sig"):
+ pass
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+ self.assertRaises(
+ MissingSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_invalid_signature(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name == os.path.join("test", "Manifest"):
+ sig = b"""
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA512
+
+DATA test/image.tar.zst 1049649 BLAKE2B 3112adba9c09023962f26d9dcbf8e74107c05220f2f29aa2ce894f8a4104c3bb238f87095df73735befcf1e1f6039fc3abf4defa87e68ce80f33dd01e09c055a SHA512 9f584727f2e20a50a30e0077b94082c8c1f517ebfc9978eb3281887e24458108e73d1a2ce82eb0b59f5df7181597e4b0a297ae68bbfb36763aa052e6bdbf2c59
+DATA test/image.tar.zst.sig 833 BLAKE2B 214724ae4ff9198879c8c960fd8167632e27982c2278bb873f195abe75b75afa1ebed4c37ec696f5f5bc35c3a1184b60e0b50d56695b072b254f730db01eddb5 SHA512 67316187da8bb6b7a5f9dc6a42ed5c7d72c6184483a97f23c0bebd8b187ac9268e0409eb233c935101606768718c99eaa5699037d6a68c2d88c9ed5331a3f73c
+-----BEGIN PGP SIGNATURE-----
+
+iQIzBAEBCgAdFiEEBrOjEb13XCgNIqkwXZDqBjUhd/YFAmFazXEACgkQXZDqBjUh
+d/YFZA//eiXkYAS2NKxim6Ppr1HcZdjU1f6H+zyQzC7OdPkAh7wsVXpSr1aq+giD
+G4tNtI6nsFokpA5CMhDf+ffBofKmFY5plk9zyQHr43N/RS5G6pcb2LHk0mQqgIdB
+EsZRRD75Na4uGDWjuNHRmsasPTsc9qyW7FLckjwUsVmk9foAoiLYYaTsilsEGqXD
+Bl/Z6PaQXvdd8txbcP6dOXfhVT06b+RWcnHI06KQrmFkZjZQh/7bCIeCVwNbXr7d
+Obo8SVzCrQbTONei57AkyuRfnPqBfP61k8rQtcDUmCckQQfyaRwoW2nDIewOPfIH
+xfvM137to2GEI2RR1TpWmGfu3iQzgC71f4svdX9Tyi5N7aFmfud7LZs6/Un3IdVk
+ZH9/AmRzeH6hKllqSv/6WuhjsTNvr0bOzGbskkhqlLga2tml08gHFYOMWRJb/bRz
+N8FZMhHzFoc0hsG8SU9uC+OeW+y5NdqpbRnQwgABmAiKEpgAPnABTsr0HjyxvjY+
+uCUdvMMHvnTxTjNEZ3Q+UQ2VsSoZzPbW9Y4PuM0XxxmTI8htdn4uIhy9dLNPsJmB
+eTE8aov/1uKq9VMsYC8wcx5vLMaR7/O/9XstP+r6PaZwiLlyrKHGexV4O52sj6LC
+qGAN3VUF+8EsdcsV781H0F86PANhyBgEYTGDrnItTGe3/vAPjCo=
+=S/Vn
+-----END PGP SIGNATURE-----
+"""
+ data = io.BytesIO(sig)
+ f.size = len(sig)
+ tar_2.addfile(f, data)
+ data.close()
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+ self.assertRaises(
+ InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_untrusted_signature(self):
+ gpg_test_path = os.environ["PORTAGE_GNUPGHOME"]
+
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ f'BINPKG_GPG_SIGNING_BASE_COMMAND="flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --batch --no-tty --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]"',
+ 'BINPKG_GPG_SIGNING_DIGEST="SHA512"',
+ f'BINPKG_GPG_SIGNING_GPG_HOME="{gpg_test_path}"',
+ 'BINPKG_GPG_SIGNING_KEY="0x8812797DDF1DD192"',
+ 'BINPKG_GPG_VERIFY_BASE_COMMAND="/usr/bin/gpg --verify --batch --no-tty --yes --no-auto-check-trustdb --status-fd 2 [PORTAGE_CONFIG] [SIGNATURE]"',
+ f'BINPKG_GPG_VERIFY_GPG_HOME="{gpg_test_path}"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ self.assertRaises(
+ InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_unknown_signature(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"',
+ 'BINPKG_FORMAT="gpkg"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ data = urandom(1048576)
+ with open(os.path.join(orig_full_path, "data"), "wb") as f:
+ f.write(data)
+
+ binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
+ binpkg_1.compress(orig_full_path, {})
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name == os.path.join("test", "Manifest"):
+ sig = b"""
+-----BEGIN PGP SIGNED MESSAGE-----
+Hash: SHA256
+
+
+DATA test/image.tar.zst 1049649 BLAKE2B 3112adba9c09023962f26d9dcbf8e74107c05220f2f29aa2ce894f8a4104c3bb238f87095df73735befcf1e1f6039fc3abf4defa87e68ce80f33dd01e09c055a SHA512 9f584727f2e20a50a30e0077b94082c8c1f517ebfc9978eb3281887e24458108e73d1a2ce82eb0b59f5df7181597e4b0a297ae68bbfb36763aa052e6bdbf2c59
+DATA test/image.tar.zst.sig 833 BLAKE2B 214724ae4ff9198879c8c960fd8167632e27982c2278bb873f195abe75b75afa1ebed4c37ec696f5f5bc35c3a1184b60e0b50d56695b072b254f730db01eddb5 SHA512 67316187da8bb6b7a5f9dc6a42ed5c7d72c6184483a97f23c0bebd8b187ac9268e0409eb233c935101606768718c99eaa5699037d6a68c2d88c9ed5331a3f73c
+-----BEGIN PGP SIGNATURE-----
+
+iNUEARYIAH0WIQSMe+CQzU+/D/DeMitA3PGOlxUHlQUCYVrQal8UgAAAAAAuAChp
+c3N1ZXItZnByQG5vdGF0aW9ucy5vcGVucGdwLmZpZnRoaG9yc2VtYW4ubmV0OEM3
+QkUwOTBDRDRGQkYwRkYwREUzMjJCNDBEQ0YxOEU5NzE1MDc5NQAKCRBA3PGOlxUH
+lbmTAP4jdhMTW6g550/t0V7XcixqVtBockOTln8hZrZIQrjAJAD/caDkxgz5Xl8C
+EP1pgSXXGtlUnv6akg/wueFJKEr9KQs=
+=edEg
+-----END PGP SIGNATURE-----
+"""
+ data = io.BytesIO(sig)
+ f.size = len(sig)
+ tar_2.addfile(f, data)
+ data.close()
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))
+ self.assertRaises(
+ InvalidSignature, binpkg_2.decompress, os.path.join(tmpdir, "test")
+ )
+
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/test_gpkg_metadata_update.py b/lib/portage/tests/gpkg/test_gpkg_metadata_update.py
new file mode 100644
index 000000000..51ad8b404
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_metadata_update.py
@@ -0,0 +1,56 @@
+# Copyright Gentoo Foundation 2006-2020
+# Portage Unit Testing Functionality
+
+import tempfile
+from os import urandom
+
+from portage import os
+from portage import shutil
+from portage.util._compare_files import compare_files
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.gpkg import gpkg
+
+
+class test_gpkg_metadata_case(TestCase):
+ def test_gpkg_update_metadata(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="gzip"', 'FEATURES="-binpkg-signing"'),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ meta = {"test1": b"1234567890", "test2": b"abcdef"}
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), meta)
+
+ meta_result = test_gpkg.get_metadata()
+ self.assertEqual(meta, meta_result)
+
+ meta_new = {"test3": b"0987654321", "test4": b"XXXXXXXX"}
+ test_gpkg.update_metadata(meta_new)
+
+ meta_result = test_gpkg.get_metadata()
+ self.assertEqual(meta_new, meta_result)
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig/" + "test"),
+ os.path.join(tmpdir, "test/" + "test"),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/test_gpkg_metadata_url.py b/lib/portage/tests/gpkg/test_gpkg_metadata_url.py
new file mode 100644
index 000000000..e9f411127
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_metadata_url.py
@@ -0,0 +1,159 @@
+# Copyright 2022-2024 Gentoo Authors
+# Portage Unit Testing Functionality
+
+import io
+import tarfile
+import tempfile
+from functools import partial
+from os import urandom
+from concurrent.futures import Future
+
+from portage.gpkg import gpkg
+from portage import os
+from portage import shutil
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.exception import InvalidSignature
+from portage.gpg import GPG
+
+
+class test_gpkg_metadata_url_case(TestCase):
+ def httpd(self, directory, httpd_future):
+ try:
+ import http.server
+ import socketserver
+ except ImportError:
+ self.skipTest("http server not exits")
+
+ Handler = partial(http.server.SimpleHTTPRequestHandler, directory=directory)
+
+ with socketserver.TCPServer(("127.0.0.1", 0), Handler) as httpd:
+ httpd_future.set_result(httpd)
+ httpd.serve_forever()
+
+ def start_http_server(self, directory):
+ try:
+ import threading
+ except ImportError:
+ self.skipTest("threading module not exists")
+
+ httpd_future = Future()
+ server = threading.Thread(
+ target=self.httpd, args=(directory, httpd_future), daemon=True
+ )
+ server.start()
+ return httpd_future.result()
+
+ def test_gpkg_get_metadata_url(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'BINPKG_COMPRESS="gzip"',
+ 'FEATURES="${FEATURES} -binpkg-signing '
+ '-binpkg-request-signature"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ server = None
+ try:
+ settings = playground.settings
+ server = self.start_http_server(tmpdir)
+
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ meta = {
+ "test1": b"{abcdefghijklmnopqrstuvwxyz, 1234567890}",
+ "test2": urandom(102400),
+ }
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), meta)
+
+ meta_from_url = test_gpkg.get_metadata_url(
+ "http://{}:{}/test.gpkg.tar".format(*server.server_address)
+ )
+
+ self.assertEqual(meta, meta_from_url)
+ finally:
+ if server is not None:
+ server.shutdown()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_get_metadata_url_unknown_signature(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": (
+ 'BINPKG_COMPRESS="gzip"',
+ 'FEATURES="${FEATURES} binpkg-signing ' 'binpkg-request-signature"',
+ ),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+ gpg = None
+ server = None
+ try:
+ settings = playground.settings
+ gpg = GPG(settings)
+ gpg.unlock()
+
+ server = self.start_http_server(tmpdir)
+
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test-1.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ meta = {
+ "test1": b"{abcdefghijklmnopqrstuvwxyz, 1234567890}",
+ "test2": urandom(102400),
+ }
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), meta)
+
+ with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
+ with tarfile.open(
+ os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
+ ) as tar_2:
+ for f in tar_1.getmembers():
+ if f.name == "test/metadata.tar.gz":
+ sig = b"""
+-----BEGIN PGP SIGNATURE-----
+
+iHUEABYIAB0WIQRVhCbPGi/rhGTq4nV+k2dcK9uyIgUCXw4ehAAKCRB+k2dcK9uy
+IkCfAP49AOYjzuQPP0n5P0SGCINnAVEXN7QLQ4PurY/lt7cT2gEAq01stXjFhrz5
+87Koh+ND2r5XfQsz3XeBqbb/BpmbEgo=
+=sc5K
+-----END PGP SIGNATURE-----
+"""
+ data = io.BytesIO(sig)
+ f.size = len(sig)
+ tar_2.addfile(f, data)
+ data.close()
+ else:
+ tar_2.addfile(f, tar_1.extractfile(f))
+
+ test_gpkg = gpkg(settings, "test")
+ self.assertRaises(
+ InvalidSignature,
+ test_gpkg.get_metadata_url,
+ "http://{}:{}/test-2.gpkg.tar".format(*server.server_address),
+ )
+ finally:
+ if gpg is not None:
+ gpg.stop()
+ if server is not None:
+ server.shutdown()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/test_gpkg_path.py b/lib/portage/tests/gpkg/test_gpkg_path.py
new file mode 100644
index 000000000..19451e2e9
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_path.py
@@ -0,0 +1,371 @@
+# Copyright 2022-2024 Gentoo Authors
+# Portage Unit Testing Functionality
+
+import tempfile
+import tarfile
+import io
+from os import urandom
+
+from portage import os
+from portage import shutil
+from portage.util._compare_files import compare_files
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.gpkg import gpkg
+
+
+class test_gpkg_path_case(TestCase):
+ def test_gpkg_short_path(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="none"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ path_name = (
+ "aaaabbbb/ccccdddd/eeeeffff/gggghhhh/iiiijjjj/kkkkllll/"
+ "mmmmnnnn/oooopppp/qqqqrrrr/sssstttt/"
+ )
+ orig_full_path = os.path.join(tmpdir, "orig/" + path_name)
+ os.makedirs(orig_full_path)
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (95, 4, 0, 1048576, 1048576))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.USTAR_FORMAT
+ )
+
+ with tarfile.open(gpkg_file_loc, "r") as container:
+ metadata = io.BytesIO(container.extractfile("test/metadata.tar").read())
+ self.assertEqual(
+ test_gpkg._get_tar_format(metadata), tarfile.USTAR_FORMAT
+ )
+ metadata.close()
+
+ image = io.BytesIO(container.extractfile("test/image.tar").read())
+ self.assertEqual(test_gpkg._get_tar_format(image), tarfile.USTAR_FORMAT)
+ image.close()
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig/" + path_name + "test"),
+ os.path.join(tmpdir, "test/" + path_name + "test"),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_long_path(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="none"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+
+ path_name = (
+ "aaaabbbb/ccccdddd/eeeeffff/gggghhhh/iiiijjjj/kkkkllll/"
+ "mmmmnnnn/oooopppp/qqqqrrrr/sssstttt/uuuuvvvv/wwwwxxxx/"
+ "yyyyzzzz/00001111/22223333/44445555/66667777/88889999/"
+ "aaaabbbb/ccccdddd/eeeeffff/gggghhhh/iiiijjjj/kkkkllll/"
+ "mmmmnnnn/oooopppp/qqqqrrrr/sssstttt/uuuuvvvv/wwwwxxxx/"
+ "yyyyzzzz/00001111/22223333/44445555/66667777/88889999/"
+ )
+ orig_full_path = os.path.join(tmpdir, "orig/" + path_name)
+ os.makedirs(orig_full_path)
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (329, 4, 0, 1048576, 1048576))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.USTAR_FORMAT
+ )
+
+ with tarfile.open(gpkg_file_loc, "r") as container:
+ metadata = io.BytesIO(container.extractfile("test/metadata.tar").read())
+ self.assertEqual(
+ test_gpkg._get_tar_format(metadata), tarfile.USTAR_FORMAT
+ )
+ metadata.close()
+
+ image = io.BytesIO(container.extractfile("test/image.tar").read())
+ self.assertEqual(test_gpkg._get_tar_format(image), tarfile.GNU_FORMAT)
+ image.close()
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig/" + path_name + "test"),
+ os.path.join(tmpdir, "test/" + path_name + "test"),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_non_ascii_path(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="none"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+
+ path_name = "中文测试/日本語テスト/한국어시험/"
+ orig_full_path = os.path.join(tmpdir, "orig/" + path_name)
+ os.makedirs(orig_full_path)
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (53, 4, 0, 1048576, 1048576))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.USTAR_FORMAT
+ )
+
+ with tarfile.open(gpkg_file_loc, "r") as container:
+ metadata = io.BytesIO(container.extractfile("test/metadata.tar").read())
+ self.assertEqual(
+ test_gpkg._get_tar_format(metadata), tarfile.USTAR_FORMAT
+ )
+ metadata.close()
+
+ image = io.BytesIO(container.extractfile("test/image.tar").read())
+ self.assertEqual(test_gpkg._get_tar_format(image), tarfile.USTAR_FORMAT)
+ image.close()
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig/" + path_name + "test"),
+ os.path.join(tmpdir, "test/" + path_name + "test"),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_symlink_path(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="none"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+ os.symlink(
+ "aaaabbbb/ccccdddd/eeeeffff/gggghhhh/iiiijjjj/kkkkllll/"
+ "mmmmnnnn/oooopppp/qqqqrrrr/sssstttt/uuuuvvvv/wwwwxxxx/"
+ "yyyyzzzz/00001111/22223333/44445555/66667777/88889999/test",
+ os.path.join(orig_full_path, "a_long_symlink"),
+ )
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (0, 14, 166, 0, 0))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.USTAR_FORMAT
+ )
+
+ with tarfile.open(gpkg_file_loc, "r") as container:
+ metadata = io.BytesIO(container.extractfile("test/metadata.tar").read())
+ self.assertEqual(
+ test_gpkg._get_tar_format(metadata), tarfile.USTAR_FORMAT
+ )
+ metadata.close()
+
+ image = io.BytesIO(container.extractfile("test/image.tar").read())
+ self.assertEqual(test_gpkg._get_tar_format(image), tarfile.GNU_FORMAT)
+ image.close()
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig/", "a_long_symlink"),
+ os.path.join(tmpdir, "test/", "a_long_symlink"),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_long_hardlink_path(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="none"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+
+ path_name = (
+ "aaaabbbb/ccccdddd/eeeeffff/gggghhhh/iiiijjjj/kkkkllll/"
+ "mmmmnnnn/oooopppp/qqqqrrrr/sssstttt/uuuuvvvv/wwwwxxxx/"
+ )
+ file_name = (
+ "test-A-B-C-D-E-F-G-H-I-J-K-L-M-N-O-P-Q-R-S-T-U-V-W-X-Y-Z"
+ "A-B-C-D-E-F-G-H-I-J-K-L-M-N-O-P-Q-R-S-T-U-V-W-X-Y-Z"
+ "A-B-C-D-E-F-G-H-I-J-K-L-M-N-O-P-Q-R-S-T-U-V-W-X-Y-Z"
+ )
+ orig_full_path = os.path.join(tmpdir, "orig", path_name)
+ os.makedirs(orig_full_path)
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ os.link(
+ os.path.join(orig_full_path, "test"),
+ os.path.join(orig_full_path, file_name),
+ )
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (113, 158, 272, 1048576, 2097152))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.USTAR_FORMAT
+ )
+
+ with tarfile.open(gpkg_file_loc, "r") as container:
+ metadata = io.BytesIO(container.extractfile("test/metadata.tar").read())
+ self.assertEqual(
+ test_gpkg._get_tar_format(metadata), tarfile.USTAR_FORMAT
+ )
+ metadata.close()
+
+ image = io.BytesIO(container.extractfile("test/image.tar").read())
+ self.assertEqual(test_gpkg._get_tar_format(image), tarfile.GNU_FORMAT)
+ image.close()
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig", path_name, file_name),
+ os.path.join(tmpdir, "test", path_name, file_name),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
+
+ def test_gpkg_long_filename(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="none"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+ path_name = "aaaabbbb/ccccdddd/eeeeffff/gggghhhh/iiiijjjj/kkkkllll/"
+ file_name = (
+ "test1234567890"
+ "A-B-C-D-E-F-G-H-I-J-K-L-M-N-O-P-Q-R-S-T-U-V-W-X-Y-Z"
+ "A-B-C-D-E-F-G-H-I-J-K-L-M-N-O-P-Q-R-S-T-U-V-W-X-Y-Z"
+ "A-B-C-D-E-F-G-H-I-J-K-L-M-N-O-P-Q-R-S-T-U-V-W-X-Y-Z"
+ )
+
+ orig_full_path = os.path.join(tmpdir, "orig/" + path_name)
+ os.makedirs(orig_full_path)
+ with open(os.path.join(orig_full_path, file_name), "wb") as test_file:
+ test_file.write(urandom(1048576))
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (59, 167, 0, 1048576, 1048576))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.USTAR_FORMAT
+ )
+
+ with tarfile.open(gpkg_file_loc, "r") as container:
+ metadata = io.BytesIO(container.extractfile("test/metadata.tar").read())
+ self.assertEqual(
+ test_gpkg._get_tar_format(metadata), tarfile.USTAR_FORMAT
+ )
+ metadata.close()
+
+ image = io.BytesIO(container.extractfile("test/image.tar").read())
+ self.assertEqual(test_gpkg._get_tar_format(image), tarfile.GNU_FORMAT)
+ image.close()
+
+ test_gpkg.decompress(os.path.join(tmpdir, "test"))
+ r = compare_files(
+ os.path.join(tmpdir, "orig", path_name, file_name),
+ os.path.join(tmpdir, "test", path_name, file_name),
+ skipped_types=("atime", "mtime", "ctime"),
+ )
+ self.assertEqual(r, ())
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/test_gpkg_size.py b/lib/portage/tests/gpkg/test_gpkg_size.py
new file mode 100644
index 000000000..9a4ff0574
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_size.py
@@ -0,0 +1,54 @@
+# Copyright Gentoo Foundation 2006-2020
+# Portage Unit Testing Functionality
+
+import tempfile
+import tarfile
+
+from portage import os, shutil
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.gpkg import gpkg
+
+
+class test_gpkg_large_size_case(TestCase):
+ def test_gpkg_large_size(self):
+ playground = ResolverPlayground(
+ user_config={
+ "make.conf": ('BINPKG_COMPRESS="gzip"',),
+ }
+ )
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ settings = playground.settings
+
+ orig_full_path = os.path.join(tmpdir, "orig/")
+ os.makedirs(orig_full_path)
+ # Check if filesystem support sparse file
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.truncate(1048576)
+
+ if os.stat(os.path.join(orig_full_path, "test")).st_blocks != 0:
+ self.skipTest("Filesystem does not support sparse file")
+
+ with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
+ test_file.truncate(10737418240)
+
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ test_gpkg = gpkg(settings, "test", gpkg_file_loc)
+
+ check_result = test_gpkg._check_pre_image_files(
+ os.path.join(tmpdir, "orig")
+ )
+ self.assertEqual(check_result, (0, 4, 0, 10737418240, 10737418240))
+
+ test_gpkg.compress(os.path.join(tmpdir, "orig"), {"meta": "test"})
+
+ with open(gpkg_file_loc, "rb") as container:
+ # container
+ self.assertEqual(
+ test_gpkg._get_tar_format(container), tarfile.GNU_FORMAT
+ )
+ finally:
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/gpkg/test_gpkg_stream.py b/lib/portage/tests/gpkg/test_gpkg_stream.py
new file mode 100644
index 000000000..040590e49
--- /dev/null
+++ b/lib/portage/tests/gpkg/test_gpkg_stream.py
@@ -0,0 +1,93 @@
+# Copyright Gentoo Foundation 2006-2020
+# Portage Unit Testing Functionality
+
+import tempfile
+import io
+import tarfile
+from os import urandom
+
+import portage.gpkg
+from portage import os
+from portage import shutil
+from portage.tests import TestCase
+from portage.exception import CompressorOperationFailed
+
+
+class test_gpkg_stream_case(TestCase):
+ def test_gpkg_stream_reader(self):
+ data = urandom(1048576)
+ data_io = io.BytesIO(data)
+ data_io.seek(0)
+ with portage.gpkg.tar_stream_reader(data_io, ["cat"]) as test_reader:
+ data2 = test_reader.read()
+ data_io.close()
+ self.assertEqual(data, data2)
+
+ def test_gpkg_stream_reader_without_cmd(self):
+ data = urandom(1048576)
+ data_io = io.BytesIO(data)
+ data_io.seek(0)
+ with portage.gpkg.tar_stream_reader(data_io) as test_reader:
+ data2 = test_reader.read()
+ data_io.close()
+ self.assertEqual(data, data2)
+
+ def test_gpkg_stream_reader_kill(self):
+ data = urandom(1048576)
+ data_io = io.BytesIO(data)
+ data_io.seek(0)
+ with portage.gpkg.tar_stream_reader(data_io, ["cat"]) as test_reader:
+ try:
+ test_reader.kill()
+ except CompressorOperationFailed:
+ pass
+ data_io.close()
+ self.assertNotEqual(test_reader.proc.poll(), None)
+
+ def test_gpkg_stream_reader_kill_without_cmd(self):
+ data = urandom(1048576)
+ data_io = io.BytesIO(data)
+ data_io.seek(0)
+ with portage.gpkg.tar_stream_reader(data_io) as test_reader:
+ test_reader.kill()
+ data_io.close()
+ self.assertEqual(test_reader.proc, None)
+
+ def test_gpkg_stream_writer(self):
+ tmpdir = tempfile.mkdtemp()
+ try:
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ data = urandom(1048576)
+ with tarfile.open(gpkg_file_loc, "w") as test_tar:
+ test_tarinfo = tarfile.TarInfo("test")
+ with portage.gpkg.tar_stream_writer(
+ test_tarinfo, test_tar, tarfile.USTAR_FORMAT, ["cat"]
+ ) as test_writer:
+ test_writer.write(data)
+
+ with tarfile.open(gpkg_file_loc, "r") as test_tar:
+ test_tarinfo = test_tar.getmember("test")
+ data2 = test_tar.extractfile(test_tarinfo).read()
+ self.assertEqual(data, data2)
+ finally:
+ shutil.rmtree(tmpdir)
+
+ def test_gpkg_stream_writer_without_cmd(self):
+ tmpdir = tempfile.mkdtemp()
+
+ try:
+ gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
+ data = urandom(1048576)
+ with tarfile.open(gpkg_file_loc, "w") as test_tar:
+ test_tarinfo = tarfile.TarInfo("test")
+ with portage.gpkg.tar_stream_writer(
+ test_tarinfo, test_tar, tarfile.USTAR_FORMAT
+ ) as test_writer:
+ test_writer.write(data)
+
+ with tarfile.open(gpkg_file_loc, "r") as test_tar:
+ test_tarinfo = test_tar.getmember("test")
+ data2 = test_tar.extractfile(test_tarinfo).read()
+ self.assertEqual(data, data2)
+ finally:
+ shutil.rmtree(tmpdir)
diff --git a/lib/portage/tests/lafilefixer/meson.build b/lib/portage/tests/lafilefixer/meson.build
new file mode 100644
index 000000000..7a2f9b8b0
--- /dev/null
+++ b/lib/portage/tests/lafilefixer/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_lafilefixer.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/lafilefixer',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/lafilefixer/test_lafilefixer.py b/lib/portage/tests/lafilefixer/test_lafilefixer.py
index d52782fb5..c3bc67701 100644
--- a/lib/portage/tests/lafilefixer/test_lafilefixer.py
+++ b/lib/portage/tests/lafilefixer/test_lafilefixer.py
@@ -34,9 +34,9 @@ class test_lafilefixer(TestCase):
yield b""
# no dependency_libs
yield b"dlname='libfoo.so.1'\n" + b"current=6\n" + b"age=0\n" + b"revision=2\n"
- # borken dependency_libs
+ # broken dependency_libs
yield b"dlname='libfoo.so.1'\n" + b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + b"old_library='libpdf.a'\n" + b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc' \n"
- # borken dependency_libs
+ # broken dependency_libs
yield b"dlname='libfoo.so.1'\n" + b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + b"old_library='libpdf.a'\n" + b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc\n"
# crap in dependency_libs
yield b"dlname='libfoo.so.1'\n" + b"library_names='libfoo.so.1.0.2 libfoo.so.1 libfoo.so'\n" + b"old_library='libpdf.a'\n" + b"dependency_libs=' /usr/lib64/liba.la /usr/lib64/libb.la -lc /-lstdc++'\n"
diff --git a/lib/portage/tests/lazyimport/meson.build b/lib/portage/tests/lazyimport/meson.build
new file mode 100644
index 000000000..b0377dc9a
--- /dev/null
+++ b/lib/portage/tests/lazyimport/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_lazy_import_portage_baseline.py',
+ 'test_preload_portage_submodules.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/lazyimport',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/lazyimport/test_lazy_import_portage_baseline.py b/lib/portage/tests/lazyimport/test_lazy_import_portage_baseline.py
index cf239240c..cbeba37b5 100644
--- a/lib/portage/tests/lazyimport/test_lazy_import_portage_baseline.py
+++ b/lib/portage/tests/lazyimport/test_lazy_import_portage_baseline.py
@@ -13,12 +13,12 @@ from _emerge.SpawnProcess import SpawnProcess
class LazyImportPortageBaselineTestCase(TestCase):
-
- _module_re = re.compile(r"^(portage|repoman|_emerge)\.")
+ _module_re = re.compile(r"^(portage|_emerge)\.")
_baseline_imports = frozenset(
[
"portage.const",
+ "portage.installation",
"portage.localization",
"portage.proxy",
"portage.proxy.lazyimport",
diff --git a/lib/portage/tests/lint/meson.build b/lib/portage/tests/lint/meson.build
new file mode 100644
index 000000000..9a244519d
--- /dev/null
+++ b/lib/portage/tests/lint/meson.build
@@ -0,0 +1,12 @@
+py.install_sources(
+ [
+ 'metadata.py',
+ 'test_bash_syntax.py',
+ 'test_compile_modules.py',
+ 'test_import_modules.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/lint',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/lint/test_compile_modules.py b/lib/portage/tests/lint/test_compile_modules.py
index 7cdaa8bcc..e3941bad5 100644
--- a/lib/portage/tests/lint/test_compile_modules.py
+++ b/lib/portage/tests/lint/test_compile_modules.py
@@ -60,7 +60,7 @@ class CompileModulesTestCase(TestCase):
encoding=_encodings["content"],
errors="replace",
)
- except IOError as e:
+ except OSError as e:
# Some tests create files that are unreadable by the
# user (by design), so ignore EACCES issues.
if e.errno != errno.EACCES:
diff --git a/lib/portage/tests/lint/test_import_modules.py b/lib/portage/tests/lint/test_import_modules.py
index 5522e02d1..d0465f388 100644
--- a/lib/portage/tests/lint/test_import_modules.py
+++ b/lib/portage/tests/lint/test_import_modules.py
@@ -23,7 +23,7 @@ class ImportModulesTestCase(TestCase):
__import__(mod)
except ImportError as e:
if mod not in expected_failures:
- self.assertTrue(False, "failed to import '%s': %s" % (mod, e))
+ self.assertTrue(False, f"failed to import '{mod}': {e}")
del e
def _iter_modules(self, base_dir):
diff --git a/lib/portage/tests/locks/meson.build b/lib/portage/tests/locks/meson.build
new file mode 100644
index 000000000..509f43eaa
--- /dev/null
+++ b/lib/portage/tests/locks/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_asynchronous_lock.py',
+ 'test_lock_nonblock.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/locks',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/locks/test_asynchronous_lock.py b/lib/portage/tests/locks/test_asynchronous_lock.py
index f853e82d7..da371e7c2 100644
--- a/lib/portage/tests/locks/test_asynchronous_lock.py
+++ b/lib/portage/tests/locks/test_asynchronous_lock.py
@@ -1,14 +1,9 @@
-# Copyright 2010-2011 Gentoo Foundation
+# Copyright 2010-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import signal
import tempfile
-try:
- import dummy_threading
-except ImportError:
- dummy_threading = None
-
from portage import os
from portage import shutil
from portage.tests import TestCase
@@ -23,20 +18,16 @@ class AsynchronousLockTestCase(TestCase):
try:
path = os.path.join(tempdir, "lock_me")
for force_async in (True, False):
- for force_dummy in (
- (False,) if dummy_threading is None else (True, False)
- ):
- async_lock = AsynchronousLock(
- path=path,
- scheduler=scheduler,
- _force_async=force_async,
- _force_thread=True,
- _force_dummy=force_dummy,
- )
- async_lock.start()
- self.assertEqual(async_lock.wait(), os.EX_OK)
- self.assertEqual(async_lock.returncode, os.EX_OK)
- scheduler.run_until_complete(async_lock.async_unlock())
+ async_lock = AsynchronousLock(
+ path=path,
+ scheduler=scheduler,
+ _force_async=force_async,
+ _force_thread=True,
+ )
+ async_lock.start()
+ self.assertEqual(async_lock.wait(), os.EX_OK)
+ self.assertEqual(async_lock.returncode, os.EX_OK)
+ scheduler.run_until_complete(async_lock.async_unlock())
async_lock = AsynchronousLock(
path=path,
diff --git a/lib/portage/tests/locks/test_lock_nonblock.py b/lib/portage/tests/locks/test_lock_nonblock.py
index e3f9b4d02..d30dfe113 100644
--- a/lib/portage/tests/locks/test_lock_nonblock.py
+++ b/lib/portage/tests/locks/test_lock_nonblock.py
@@ -1,6 +1,8 @@
-# Copyright 2011-2020 Gentoo Authors
+# Copyright 2011-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import multiprocessing
+import sys
import tempfile
import traceback
@@ -17,38 +19,35 @@ class LockNonblockTestCase(TestCase):
try:
path = os.path.join(tempdir, "lock_me")
lock1 = portage.locks.lockfile(path)
- pid = os.fork()
- if pid == 0:
- portage._ForkWatcher.hook(portage._ForkWatcher)
- portage.locks._close_fds()
- # Disable close_fds since we don't exec
- # (see _setup_pipes docstring).
- portage.process._setup_pipes({0: 0, 1: 1, 2: 2}, close_fds=False)
- rval = 2
- try:
- try:
- lock2 = portage.locks.lockfile(path, flags=os.O_NONBLOCK)
- except portage.exception.TryAgain:
- rval = os.EX_OK
- else:
- rval = 1
- portage.locks.unlockfile(lock2)
- except SystemExit:
- raise
- except:
- traceback.print_exc()
- finally:
- os._exit(rval)
-
- self.assertEqual(pid > 0, True)
- pid, status = os.waitpid(pid, 0)
- self.assertEqual(os.WIFEXITED(status), True)
- self.assertEqual(os.WEXITSTATUS(status), os.EX_OK)
+ proc = multiprocessing.Process(target=self._lock_subprocess, args=(path,))
+ proc.start()
+ self.assertEqual(proc.pid > 0, True)
+ proc.join()
+ self.assertEqual(proc.exitcode, os.EX_OK)
portage.locks.unlockfile(lock1)
finally:
shutil.rmtree(tempdir)
+ @staticmethod
+ def _lock_subprocess(path):
+ portage.locks._close_fds()
+ # Disable close_fds since we don't exec
+ # (see _setup_pipes docstring).
+ portage.process._setup_pipes({0: 0, 1: 1, 2: 2}, close_fds=False)
+ rval = 2
+ try:
+ try:
+ lock2 = portage.locks.lockfile(path, flags=os.O_NONBLOCK)
+ except portage.exception.TryAgain:
+ rval = os.EX_OK
+ else:
+ rval = 1
+ portage.locks.unlockfile(lock2)
+ except Exception:
+ traceback.print_exc()
+ sys.exit(rval)
+
def testLockNonblock(self):
self._testLockNonblock()
diff --git a/lib/portage/tests/meson.build b/lib/portage/tests/meson.build
new file mode 100644
index 000000000..7eb65b6bb
--- /dev/null
+++ b/lib/portage/tests/meson.build
@@ -0,0 +1,31 @@
+py.install_sources(
+ [
+ 'conftest.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/tests',
+ pure : not native_extensions
+)
+
+subdir('bin')
+subdir('dbapi')
+subdir('dep')
+subdir('ebuild')
+subdir('emerge')
+subdir('env')
+subdir('glsa')
+subdir('gpkg')
+subdir('lafilefixer')
+subdir('lazyimport')
+subdir('lint')
+subdir('locks')
+subdir('news')
+subdir('process')
+subdir('resolver')
+subdir('sets')
+subdir('sync')
+subdir('unicode')
+subdir('update')
+subdir('util')
+subdir('versions')
+subdir('xpak')
diff --git a/lib/portage/tests/news/meson.build b/lib/portage/tests/news/meson.build
new file mode 100644
index 000000000..270b8bf13
--- /dev/null
+++ b/lib/portage/tests/news/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_NewsItem.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/news',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/news/test_NewsItem.py b/lib/portage/tests/news/test_NewsItem.py
index 5fca58674..7a8393c51 100644
--- a/lib/portage/tests/news/test_NewsItem.py
+++ b/lib/portage/tests/news/test_NewsItem.py
@@ -1,109 +1,400 @@
# test_NewsItem.py -- Portage Unit Testing Functionality
-# Copyright 2007-2019 Gentoo Authors
+# Copyright 2007-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from portage import os
from portage.tests import TestCase
-from portage.news import NewsItem
-from portage.dbapi.virtual import testdbapi
-from tempfile import mkstemp
+from portage.news import NewsItem, NewsManager
+from portage.dbapi.virtual import fakedbapi
-# TODO(antarus) Make newsitem use a loader so we can load using a string instead of a tempfile
+from dataclasses import dataclass
+from string import Template
+from typing import Optional
+from unittest.mock import MagicMock, mock_open, patch
+import textwrap
-class NewsItemTestCase(TestCase):
- """These tests suck: they use your running config instead of making their own"""
+# The specification for news items is GLEP 42 ("Critical News Reporting"):
+# https://www.gentoo.org/glep/glep-0042.html
+
+
+@dataclass
+class FakeNewsItem(NewsItem):
+ title: str
+ author: str
+ content_type: str
+ posted: str
+ revision: int
+ news_item_format: str
+ content: str
+ display_if_installed: Optional[list[str]] = None
+ display_if_profile: Optional[list[str]] = None
+ display_if_keyword: Optional[list[str]] = None
+
+ item_template_header = Template(
+ textwrap.dedent(
+ """
+ Title: ${title}
+ Author: ${author}
+ Content-Type: ${content_type}
+ Posted: ${posted}
+ Revision: ${revision}
+ News-Item-Format: ${news_item_format}
+ """
+ )
+ )
+
+ def __post_init__(self):
+ super().__init__(path="mocked_news", name=self.title)
+
+ def isValid(self):
+ with patch("builtins.open", mock_open(read_data=str(self))):
+ return super().isValid()
+
+ # TODO: Migrate __str__ to NewsItem? NewsItem doesn't actually parse
+ # all fields right now though.
+ def __str__(self) -> str:
+ item = self.item_template_header.substitute(
+ title=self.title,
+ author=self.author,
+ content_type=self.content_type,
+ posted=self.posted,
+ revision=self.revision,
+ news_item_format=self.news_item_format,
+ )
+
+ for package in self.display_if_installed:
+ item += f"Display-If-Installed: {package}\n"
- fakeItem = """
-Title: YourSQL Upgrades from 4.0 to 4.1
-Author: Ciaran McCreesh <ciaranm@gentoo.org>
-Content-Type: text/plain
-Posted: 01-Nov-2005
-Revision: 1
-News-Item-Format: 1.0
-#Display-If-Installed:
-#Display-If-Profile:
-#Display-If-Arch:
+ for profile in self.display_if_profile:
+ item += f"Display-If-Profile: {profile}\n"
-YourSQL databases created using YourSQL version 4.0 are incompatible
-with YourSQL version 4.1 or later. There is no reliable way to
-automate the database format conversion, so action from the system
-administrator is required before an upgrade can take place.
+ for keyword in self.display_if_keyword:
+ item += f"Display-If-Keyword: {keyword}\n"
-Please see the Gentoo YourSQL Upgrade Guide for instructions:
+ item += f"\n{self.content}"
- http://www.gentoo.org/doc/en/yoursql-upgrading.xml
+ return item
-Also see the official YourSQL documentation:
- http://dev.yoursql.com/doc/refman/4.1/en/upgrading-from-4-0.html
+class NewsItemTestCase(TestCase):
+ # Default values for testing
+ placeholders = {
+ "title": "YourSQL Upgrades from 4.0 to 4.1",
+ "author": "Ciaran McCreesh <ciaranm@gentoo.org>",
+ "content_type": "Content-Type: text/plain",
+ "posted": "01-Nov-2005",
+ "revision": 1,
+ "news_item_format": "1.0",
+ "display_if_installed": [],
+ "display_if_profile": [],
+ "display_if_keyword": [],
+ "content": textwrap.dedent(
+ """
+ YourSQL databases created using YourSQL version 4.0 are incompatible
+ with YourSQL version 4.1 or later. There is no reliable way to
+ automate the database format conversion, so action from the system
+ administrator is required before an upgrade can take place.
+
+ Please see the Gentoo YourSQL Upgrade Guide for instructions:
+
+ https://gentoo.org/doc/en/yoursql-upgrading.xml
+
+ Also see the official YourSQL documentation:
+
+ https://dev.example.com/doc/refman/4.1/en/upgrading-from-4-0.html
-After upgrading, you should also recompile any packages which link
-against YourSQL:
+ After upgrading, you should also recompile any packages which link
+ against YourSQL:
- revdep-rebuild --library=libyoursqlclient.so.12
+ revdep-rebuild --library=libyoursqlclient.so.12
-The revdep-rebuild tool is provided by app-portage/gentoolkit.
-"""
+ The revdep-rebuild tool is provided by app-portage/gentoolkit.
+ """
+ ),
+ }
- def setUp(self):
- self.profile = "/var/db/repos/gentoo/profiles/default-linux/x86/2007.0/"
+ def setUp(self) -> None:
+ super().setUp()
+ self.profile_base = "/var/db/repos/gentoo/profiles/default-linux"
+ self.profile = f"{self.profile_base}/x86/2007.0/"
self.keywords = "x86"
- # Use fake/test dbapi to avoid slow tests
- self.vardb = testdbapi()
- # self.vardb.inject_cpv('sys-apps/portage-2.0', { 'SLOT' : 0 })
# Consumers only use ARCH, so avoid portage.settings by using a dict
self.settings = {"ARCH": "x86"}
+ # Use fake/test dbapi to avoid slow tests
+ self.vardb = fakedbapi(self.settings)
- def testDisplayIfProfile(self):
- tmpItem = self.fakeItem[:].replace(
- "#Display-If-Profile:", "Display-If-Profile: %s" % self.profile
- )
+ def _createNewsItem(self, *kwargs) -> FakeNewsItem:
+ # Use our placeholders unless overridden
+ news_args = self.placeholders.copy()
+ # Substitute in what we're given to allow for easily passing
+ # just custom values.
+ news_args.update(*kwargs)
+
+ return FakeNewsItem(**news_args)
- item = self._processItem(tmpItem)
- try:
+ def _checkAndCreateNewsItem(
+ self, news_args: dict, relevant: bool = True, reason: str = ""
+ ) -> FakeNewsItem:
+ return self._checkNewsItem(self._createNewsItem(news_args), relevant, reason)
+
+ def _checkNewsItem(self, item: NewsItem, relevant: bool = True, reason: str = ""):
+ self.assertTrue(item.isValid())
+
+ if relevant:
self.assertTrue(
item.isRelevant(self.vardb, self.settings, self.profile),
- msg="Expected %s to be relevant, but it was not!" % tmpItem,
+ msg=f"Expected {item} to be relevant, but it was not!",
+ )
+ else:
+ self.assertFalse(
+ item.isRelevant(self.vardb, self.settings, self.profile),
+ msg=f"Expected {item} to be irrelevant, but it was relevant!",
)
- finally:
- os.unlink(item.path)
- def testDisplayIfInstalled(self):
- tmpItem = self.fakeItem[:].replace(
- "#Display-If-Installed:", "Display-If-Installed: %s" % "sys-apps/portage"
- )
+ def testNewsManager(self):
+ vardb = MagicMock()
+ portdb = MagicMock()
+ portdb.repositories.mainRepoLocation = MagicMock(return_value="/tmp/repo")
+ portdb.settings.profile_path = "/tmp/repo/profiles/arch/amd64"
+
+ news_manager = NewsManager(portdb, vardb, portdb.portdir, portdb.portdir)
+ self.assertEqual(news_manager._profile_path, "arch/amd64")
+ self.assertNotEqual(news_manager._profile_path, "tmp/repo/profiles/arch/amd64")
+
+ def testBasicNewsItem(self):
+ # Simple test with no filter fields (Display-If-*)
+ item = self._createNewsItem()
+ self.assertTrue(item.isValid())
+ self.assertTrue(item.isRelevant(self.vardb, self.settings, self.profile))
- try:
- item = self._processItem(tmpItem)
+ # Does an invalid item fail? ("a" is not a valid package name)
+ item = self._createNewsItem({"display_if_installed": "a"})
+ self.assertFalse(item.isValid())
+
+ def testDisplayIfProfile(self):
+ # We repeat all of these with the full profile path (including repo)
+ # and a relative path, as we've had issues there before.
+ # Note that we can't use _checkNewsItem() here as we override the
+ # profile value passed to isRelevant.
+ for profile_prefix in ("", self.profile_base):
+ # First, just check the simple case of one profile matching ours.
+ item = self._createNewsItem(
+ {"display_if_profile": [profile_prefix + self.profile]}
+ )
+ self.assertTrue(item.isValid())
self.assertTrue(
- item.isRelevant(self.vardb, self.settings, self.profile),
- msg="Expected %s to be relevant, but it was not!" % tmpItem,
+ item.isRelevant(
+ self.vardb, self.settings, profile_prefix + self.profile
+ ),
+ msg=f"Expected {item} to be relevant, but it was not!",
)
- finally:
- os.unlink(item.path)
- def testDisplayIfKeyword(self):
- tmpItem = self.fakeItem[:].replace(
- "#Display-If-Keyword:", "Display-If-Keyword: %s" % self.keywords
- )
+ # Test the negative case: what if the only profile listed
+ # does *not* match ours?
+ item = self._createNewsItem(
+ {"display_if_profile": [profile_prefix + "profiles/i-do-not-exist"]}
+ )
+ self.assertTrue(item.isValid())
+ self.assertFalse(
+ item.isRelevant(
+ self.vardb, self.settings, profile_prefix + self.profile
+ ),
+ msg=f"Expected {item} to be irrelevant, but it was relevant!",
+ )
- try:
- item = self._processItem(tmpItem)
+ # What if several profiles are listed and we match one of them?
+ item = self._createNewsItem(
+ {
+ "display_if_profile": [
+ profile_prefix + self.profile,
+ profile_prefix + f"{self.profile_base}/amd64/2023.0",
+ ]
+ }
+ )
+ self.assertTrue(item.isValid())
self.assertTrue(
- item.isRelevant(self.vardb, self.settings, self.profile),
- msg="Expected %s to be relevant, but it was not!" % tmpItem,
+ item.isRelevant(
+ self.vardb, self.settings, profile_prefix + self.profile
+ ),
+ msg=f"Expected {item} to be relevant, but it was not!",
)
- finally:
- os.unlink(item.path)
-
- def _processItem(self, item):
- filename = None
- fd, filename = mkstemp()
- f = os.fdopen(fd, "w")
- f.write(item)
- f.close()
- try:
- return NewsItem(filename, 0)
- except TypeError:
- self.fail("Error while processing news item %s" % filename)
+
+ # What if several profiles are listed and we match none of them?
+ item = self._createNewsItem(
+ {
+ "display_if_profile": [
+ profile_prefix + f"{self.profile_base}/x86/2023.0",
+ profile_prefix + f"{self.profile_base}/amd64/2023.0",
+ ]
+ }
+ )
+ self.assertTrue(item.isValid())
+ self.assertFalse(
+ item.isRelevant(
+ self.vardb, self.settings, profile_prefix + self.profile
+ ),
+ msg=f"Expected {item} to be irrelevant, but it was relevant!",
+ )
+
+ def testDisplayIfInstalled(self):
+ self.vardb.cpv_inject("sys-apps/portage-2.0", {"SLOT": "0"})
+
+ self._checkAndCreateNewsItem({"display_if_installed": ["sys-apps/portage"]})
+
+ # Test the negative case: a single Display-If-Installed listing
+ # a package we don't have.
+ self._checkAndCreateNewsItem(
+ {"display_if_installed": ["sys-apps/i-do-not-exist"]}, False
+ )
+
+ # What about several packages and we have none of them installed?
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_installed": [
+ "dev-util/pkgcheck",
+ "dev-util/pkgdev",
+ "sys-apps/pkgcore",
+ ]
+ },
+ False,
+ )
+
+ # What about several packages and we have one of them installed?
+ self.vardb.cpv_inject("net-misc/openssh-9.2_p1", {"SLOT": "0"})
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_installed": [
+ "net-misc/openssh",
+ "net-misc/dropbear",
+ ]
+ }
+ )
+
+ # What about several packages and we have all of them installed?
+ # Note: we already have openssh added from the above test
+ self.vardb.cpv_inject("net-misc/dropbear-2022.83", {"SLOT": "0"})
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_installed": [
+ "net-misc/openssh",
+ "net-misc/dropbear",
+ ]
+ }
+ )
+
+ # What if we have a newer version of the listed package which
+ # shouldn't match the constraint?
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_installed": [
+ "<net-misc/openssh-9.2_p1",
+ ]
+ },
+ False,
+ )
+
+ # What if we have a newer version of the listed package which
+ # should match the constraint?
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_installed": [
+ ">=net-misc/openssh-9.2_p1",
+ ]
+ }
+ )
+
+ # What if the item lists multiple packages and we have one of
+ # them installed, but not all?
+ # (Note that openssh is already "installed" by this point because
+ # of a previous test.)
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_installed": [
+ ">=net-misc/openssh-9.2_p1",
+ "<net-misc/openssh-9.2_p1",
+ ]
+ }
+ )
+
+ def testDisplayIfKeyword(self):
+ self._checkAndCreateNewsItem({"display_if_keyword": [self.keywords]})
+
+ # Test the negative case: a keyword we don't have set.
+ self._checkAndCreateNewsItem({"display_if_keyword": ["fake-keyword"]}, False)
+
+ # What if several keywords are listed and we match one of them?
+ self._checkAndCreateNewsItem(
+ {"display_if_keyword": [self.keywords, "amd64", "~hppa"]}
+ )
+
+ # What if several keywords are listed and we match none of them?
+ self._checkAndCreateNewsItem({"display_if_keyword": ["amd64", "~hppa"]}, False)
+
+ # What if the ~keyword (testing) keyword is listed but we're keyword (stable)?
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_keyword": [
+ f"~{self.keywords}",
+ ]
+ },
+ False,
+ )
+
+ # What if the stable keyword is listed but we're ~keyword (testing)?
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_keyword": [
+ f"{self.keywords}",
+ ]
+ }
+ )
+
+ def testMultipleRestrictions(self):
+ # GLEP 42 specifies an algorithm for how combining restrictions
+ # should work. See https://www.gentoo.org/glep/glep-0042.html#news-item-headers.
+ # Different types of Display-If-* are ANDed, not ORed.
+
+ # What if there's a Display-If-Keyword that matches and a
+ # Display-If-Installed which does too?
+ self.vardb.cpv_inject("sys-apps/portage-2.0", {"SLOT": "0"})
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_keyword": [self.keywords],
+ "display_if_installed": ["sys-apps/portage"],
+ }
+ )
+
+ # What if there's a Display-If-Keyword that matches and a
+ # Display-If-Installed which doesn't?
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_keyword": [self.keywords],
+ "display_if_installed": ["sys-apps/i-do-not-exist"],
+ },
+ False,
+ )
+
+ # What if there's a Display-If-{Installed,Keyword,Profile} and
+ # they all match?
+ # (Note that sys-apps/portage is already "installed" by this point
+ # because of the above test.)
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_keyword": [self.keywords],
+ "display_if_installed": ["sys-apps/portage"],
+ "display_if_profile": [self.profile],
+ }
+ )
+
+ # What if there's a Display-If-{Installed,Keyword,Profile} and
+ # none of them match?
+ # (Note that sys-apps/portage is already "installed" by this point
+ # because of the above test.)
+ self._checkAndCreateNewsItem(
+ {
+ "display_if_keyword": ["i-do-not-exist"],
+ "display_if_installed": ["sys-apps/i-do-not-exist"],
+ "display_if_profile": [self.profile_base + "/i-do-not-exist"],
+ },
+ False,
+ )
diff --git a/lib/portage/tests/process/meson.build b/lib/portage/tests/process/meson.build
new file mode 100644
index 000000000..e2b3c11d3
--- /dev/null
+++ b/lib/portage/tests/process/meson.build
@@ -0,0 +1,19 @@
+py.install_sources(
+ [
+ 'test_AsyncFunction.py',
+ 'test_ForkProcess.py',
+ 'test_PipeLogger.py',
+ 'test_PopenProcessBlockingIO.py',
+ 'test_PopenProcess.py',
+ 'test_pickle.py',
+ 'test_poll.py',
+ 'test_spawn_fail_e2big.py',
+ 'test_spawn_returnproc.py',
+ 'test_spawn_warn_large_env.py',
+ 'test_unshare_net.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/process',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/process/test_AsyncFunction.py b/lib/portage/tests/process/test_AsyncFunction.py
index 81b3f41fb..eb426a5c0 100644
--- a/lib/portage/tests/process/test_AsyncFunction.py
+++ b/lib/portage/tests/process/test_AsyncFunction.py
@@ -1,6 +1,8 @@
-# Copyright 2020-2021 Gentoo Authors
+# Copyright 2020-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import functools
+import multiprocessing
import sys
import portage
@@ -15,21 +17,41 @@ from portage.util.futures.unix_events import _set_nonblocking
class AsyncFunctionTestCase(TestCase):
@staticmethod
def _read_from_stdin(pw):
- os.close(pw)
+ if pw is not None:
+ os.close(pw)
return "".join(sys.stdin)
async def _testAsyncFunctionStdin(self, loop):
test_string = "1\n2\n3\n"
- pr, pw = os.pipe()
- fd_pipes = {0: pr}
- reader = AsyncFunction(
- scheduler=loop, fd_pipes=fd_pipes, target=self._read_from_stdin, args=(pw,)
- )
- reader.start()
- os.close(pr)
- _set_nonblocking(pw)
- with open(pw, mode="wb", buffering=0) as pipe_write:
+ pr, pw = multiprocessing.Pipe(duplex=False)
+ stdin_backup = os.dup(portage._get_stdin().fileno())
+ os.dup2(pr.fileno(), portage._get_stdin().fileno())
+ pr.close()
+ try:
+ reader = AsyncFunction(
+ # Should automatically inherit stdin as fd_pipes[0]
+ # when background is False, for things like
+ # emerge --sync --ask (bug 916116).
+ background=False,
+ scheduler=loop,
+ target=self._read_from_stdin,
+ args=(
+ (
+ pw.fileno()
+ if multiprocessing.get_start_method() == "fork"
+ else None
+ ),
+ ),
+ )
+ reader.start()
+ finally:
+ os.dup2(stdin_backup, portage._get_stdin().fileno())
+ os.close(stdin_backup)
+
+ _set_nonblocking(pw.fileno())
+ with open(pw.fileno(), mode="wb", buffering=0, closefd=False) as pipe_write:
await _writer(pipe_write, test_string.encode("utf_8"))
+ pw.close()
self.assertEqual((await reader.async_wait()), os.EX_OK)
self.assertEqual(reader.result, test_string)
@@ -37,18 +59,52 @@ class AsyncFunctionTestCase(TestCase):
loop = asyncio._wrap_loop()
loop.run_until_complete(self._testAsyncFunctionStdin(loop=loop))
- def _test_getpid_fork(self):
+ def testAsyncFunctionStdinSpawn(self):
+ orig_start_method = multiprocessing.get_start_method()
+ if orig_start_method == "spawn":
+ self.skipTest("multiprocessing start method is already spawn")
+ # NOTE: An attempt was made to use multiprocessing.get_context("spawn")
+ # here, but it caused the python process to terminate unexpectedly
+ # during a send_handle call.
+ multiprocessing.set_start_method("spawn", force=True)
+ try:
+ self.testAsyncFunctionStdin()
+ finally:
+ multiprocessing.set_start_method(orig_start_method, force=True)
+
+ @staticmethod
+ def _test_getpid_fork(preexec_fn=None):
"""
Verify that portage.getpid() cache is updated in a forked child process.
"""
+ if preexec_fn is not None:
+ preexec_fn()
loop = asyncio._wrap_loop()
proc = AsyncFunction(scheduler=loop, target=portage.getpid)
proc.start()
proc.wait()
- self.assertEqual(proc.pid, proc.result)
+ return proc.pid == proc.result
def test_getpid_fork(self):
- self._test_getpid_fork()
+ self.assertTrue(self._test_getpid_fork())
+
+ def test_spawn_getpid(self):
+ """
+ Test portage.getpid() with multiprocessing spawn start method.
+ """
+ loop = asyncio._wrap_loop()
+ proc = AsyncFunction(
+ scheduler=loop,
+ target=self._test_getpid_fork,
+ kwargs=dict(
+ preexec_fn=functools.partial(
+ multiprocessing.set_start_method, "spawn", force=True
+ )
+ ),
+ )
+ proc.start()
+ self.assertEqual(proc.wait(), 0)
+ self.assertTrue(proc.result)
def test_getpid_double_fork(self):
"""
@@ -59,3 +115,4 @@ class AsyncFunctionTestCase(TestCase):
proc = AsyncFunction(scheduler=loop, target=self._test_getpid_fork)
proc.start()
self.assertEqual(proc.wait(), 0)
+ self.assertTrue(proc.result)
diff --git a/lib/portage/tests/process/test_ForkProcess.py b/lib/portage/tests/process/test_ForkProcess.py
new file mode 100644
index 000000000..bc0b836f1
--- /dev/null
+++ b/lib/portage/tests/process/test_ForkProcess.py
@@ -0,0 +1,46 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import functools
+import multiprocessing
+import tempfile
+from unittest.mock import patch
+
+from portage import os
+from portage.tests import TestCase
+from portage.util._async.ForkProcess import ForkProcess
+from portage.util.futures import asyncio
+
+
+class ForkProcessTestCase(TestCase):
+ @staticmethod
+ def _test_spawn_logfile(logfile, target):
+ multiprocessing.set_start_method("spawn", force=True)
+ loop = asyncio._wrap_loop()
+ proc = ForkProcess(scheduler=loop, target=target, logfile=logfile)
+ proc.start()
+ return proc.wait()
+
+ def test_spawn_logfile(self):
+ """
+ Test logfile with multiprocessing spawn start method.
+ """
+ test_string = "hello world"
+ with tempfile.NamedTemporaryFile() as logfile:
+ loop = asyncio._wrap_loop()
+ proc = ForkProcess(
+ scheduler=loop,
+ target=self._test_spawn_logfile,
+ args=(logfile.name, functools.partial(print, test_string, end="")),
+ )
+ proc.start()
+ self.assertEqual(proc.wait(), os.EX_OK)
+
+ with open(logfile.name, "rb") as output:
+ self.assertEqual(output.read(), test_string.encode("utf-8"))
+
+ def test_spawn_logfile_no_send_handle(self):
+ with patch(
+ "portage.util._async.ForkProcess.ForkProcess._HAVE_SEND_HANDLE", new=False
+ ):
+ self.test_spawn_logfile()
diff --git a/lib/portage/tests/process/test_PipeLogger.py b/lib/portage/tests/process/test_PipeLogger.py
index 4d9234639..d4b5e6175 100644
--- a/lib/portage/tests/process/test_PipeLogger.py
+++ b/lib/portage/tests/process/test_PipeLogger.py
@@ -59,18 +59,18 @@ class PipeLoggerTestCase(TestCase):
6,
7,
8,
- 2 ** 5,
- 2 ** 10,
- 2 ** 12,
- 2 ** 13,
- 2 ** 14,
- 2 ** 17,
- 2 ** 17 + 1,
+ 2**5,
+ 2**10,
+ 2**12,
+ 2**13,
+ 2**14,
+ 2**17,
+ 2**17 + 1,
):
test_string = x * "a"
output = loop.run_until_complete(
self._testPipeLoggerToPipe(test_string, loop)
)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
diff --git a/lib/portage/tests/process/test_PopenProcess.py b/lib/portage/tests/process/test_PopenProcess.py
index 4135802cd..164e57ff9 100644
--- a/lib/portage/tests/process/test_PopenProcess.py
+++ b/lib/portage/tests/process/test_PopenProcess.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2013 Gentoo Foundation
+# Copyright 2012-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import subprocess
@@ -14,8 +14,11 @@ from _emerge.PipeReader import PipeReader
class PopenPipeTestCase(TestCase):
"""
- Test PopenProcess, which can be useful for Jython support, since it
- uses the subprocess.Popen instead of os.fork().
+ Test PopenProcess, which is historically useful for Jython support,
+ since it uses the subprocess.Popen instead of os.fork().
+
+ Portage does not currently support Jython, but re-introducing support
+ in The Future (TM) may be possible.
"""
_echo_cmd = "echo -n '%s'"
@@ -49,7 +52,6 @@ class PopenPipeTestCase(TestCase):
return consumer.getvalue().decode("ascii", "replace")
def _testPipeLogger(self, test_string):
-
producer = PopenProcess(
proc=subprocess.Popen(
["bash", "-c", self._echo_cmd % test_string],
@@ -61,7 +63,6 @@ class PopenPipeTestCase(TestCase):
fd, log_file_path = tempfile.mkstemp()
try:
-
consumer = PipeLogger(
background=True,
input_fd=producer.proc.stdout,
@@ -86,14 +87,14 @@ class PopenPipeTestCase(TestCase):
return content.decode("ascii", "replace")
def testPopenPipe(self):
- for x in (1, 2, 5, 6, 7, 8, 2 ** 5, 2 ** 10, 2 ** 12, 2 ** 13, 2 ** 14):
+ for x in (1, 2, 5, 6, 7, 8, 2**5, 2**10, 2**12, 2**13, 2**14):
test_string = x * "a"
output = self._testPipeReader(test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
output = self._testPipeLogger(test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
diff --git a/lib/portage/tests/process/test_PopenProcessBlockingIO.py b/lib/portage/tests/process/test_PopenProcessBlockingIO.py
index 3b7208c7e..893692288 100644
--- a/lib/portage/tests/process/test_PopenProcessBlockingIO.py
+++ b/lib/portage/tests/process/test_PopenProcessBlockingIO.py
@@ -1,14 +1,8 @@
-# Copyright 2012 Gentoo Foundation
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import subprocess
-try:
- import threading
-except ImportError:
- # dummy_threading will not suffice
- threading = None
-
from portage import os
from portage.tests import TestCase
from portage.util._async.PopenProcess import PopenProcess
@@ -18,10 +12,13 @@ from portage.util._async.PipeReaderBlockingIO import PipeReaderBlockingIO
class PopenPipeBlockingIOTestCase(TestCase):
"""
- Test PopenProcess, which can be useful for Jython support:
+ Test PopenProcess, which is historically useful for Jython support:
* use subprocess.Popen since Jython does not support os.fork()
* use blocking IO with threads, since Jython does not support
fcntl non-blocking IO)
+
+ Portage does not currently support Jython, but re-introducing support
+ in The Future (TM) may be possible.
"""
_echo_cmd = "echo -n '%s'"
@@ -55,16 +52,9 @@ class PopenPipeBlockingIOTestCase(TestCase):
return consumer.getvalue().decode("ascii", "replace")
def testPopenPipeBlockingIO(self):
-
- if threading is None:
- skip_reason = "threading disabled"
- self.portage_skip = "threading disabled"
- self.assertFalse(True, skip_reason)
- return
-
- for x in (1, 2, 5, 6, 7, 8, 2 ** 5, 2 ** 10, 2 ** 12, 2 ** 13, 2 ** 14):
+ for x in (1, 2, 5, 6, 7, 8, 2**5, 2**10, 2**12, 2**13, 2**14):
test_string = x * "a"
output = self._testPipeReader(test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string, output, f"x = {x}, len(output) = {len(output)}"
)
diff --git a/lib/portage/tests/process/test_pickle.py b/lib/portage/tests/process/test_pickle.py
new file mode 100644
index 000000000..9b7d9ef42
--- /dev/null
+++ b/lib/portage/tests/process/test_pickle.py
@@ -0,0 +1,43 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import pickle
+
+from portage.tests import TestCase
+from _emerge.Package import _PackageMetadataWrapperBase
+from _emerge.FifoIpcDaemon import FifoIpcDaemon
+
+
+class PickleTestCase(TestCase):
+ def test_PackageMetadataWrapperBase(self):
+ """
+ Verify that instances of slot_dict_class, like
+ PackageMetadataWrapperBase, are picklable for
+ compatibility with the multiprocessing spawn
+ start method.
+ """
+ obj = _PackageMetadataWrapperBase(EAPI="8")
+ self.assertEqual(obj["EAPI"], "8")
+ serialized = pickle.dumps(obj)
+ obj_copy = pickle.loads(serialized)
+ self.assertEqual(len(obj_copy), len(obj))
+ self.assertEqual(obj_copy["EAPI"], obj["EAPI"])
+ self.assertEqual(obj_copy, obj)
+
+ def test_FifoIpcDaemon_files_dict(self):
+ """
+ Verify that FifoIpcDaemon._files_dict instances are picklable for
+ compatibility with the multiprocessing spawn start method.
+ """
+ obj = FifoIpcDaemon._files_dict(
+ (k, "test-value") for k in FifoIpcDaemon._file_names
+ )
+ self.assertEqual(obj["pipe_in"], "test-value")
+ # Attributes of same name exist because of slot_dict_class prefix="" argument.
+ self.assertEqual(obj.pipe_in, obj["pipe_in"])
+ serialized = pickle.dumps(obj)
+ obj_copy = pickle.loads(serialized)
+ self.assertEqual(len(obj_copy), len(obj))
+ self.assertEqual(obj_copy["pipe_in"], obj["pipe_in"])
+ self.assertEqual(obj_copy.pipe_in, obj["pipe_in"])
+ self.assertEqual(obj_copy, obj)
diff --git a/lib/portage/tests/process/test_poll.py b/lib/portage/tests/process/test_poll.py
index c523a8d8f..65a9ca1bf 100644
--- a/lib/portage/tests/process/test_poll.py
+++ b/lib/portage/tests/process/test_poll.py
@@ -1,4 +1,4 @@
-# Copyright 1998-2020 Gentoo Authors
+# Copyright 1998-2020, 2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import functools
@@ -7,6 +7,8 @@ import shutil
import socket
import tempfile
+import pytest
+
from portage import os
from portage.tests import TestCase
from portage.util._eventloop.global_event_loop import global_event_loop
@@ -15,7 +17,6 @@ from _emerge.PipeReader import PipeReader
class PipeReaderTestCase(TestCase):
-
_use_array = False
_echo_cmd = "echo -n '%s'"
@@ -29,7 +30,7 @@ class PipeReaderTestCase(TestCase):
def make_pipes():
try:
return pty.openpty(), None
- except EnvironmentError:
+ except OSError:
self.skipTest("pty not available")
self._do_test(make_pipes)
@@ -95,27 +96,28 @@ class PipeReaderTestCase(TestCase):
return consumer.getvalue().decode("ascii", "replace")
def _do_test(self, make_pipes):
- for x in (1, 2, 5, 6, 7, 8, 2 ** 5, 2 ** 10, 2 ** 12, 2 ** 13, 2 ** 14):
+ for x in (1, 2, 5, 6, 7, 8, 2**5, 2**10, 2**12, 2**13, 2**14):
test_string = x * "a"
(read_end, write_end), cleanup = make_pipes()
try:
output = self._testPipeReader(read_end, write_end, test_string)
self.assertEqual(
- test_string, output, "x = %s, len(output) = %s" % (x, len(output))
+ test_string,
+ output,
+ f"x = {x}, len(output) = {len(output)}",
)
finally:
if cleanup is not None:
cleanup()
+@pytest.mark.xfail() # This fails sometimes, that's the reason of xfail here
class PipeReaderArrayTestCase(PipeReaderTestCase):
-
_use_array = True
# sleep allows reliable triggering of the failure mode on fast computers
_echo_cmd = "sleep 0.1 ; echo -n '%s'"
def __init__(self, *args, **kwargs):
- super(PipeReaderArrayTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
# https://bugs.python.org/issue5380
# https://bugs.pypy.org/issue956
- self.todo = True
diff --git a/lib/portage/tests/process/test_spawn_fail_e2big.py b/lib/portage/tests/process/test_spawn_fail_e2big.py
new file mode 100644
index 000000000..abb1113fe
--- /dev/null
+++ b/lib/portage/tests/process/test_spawn_fail_e2big.py
@@ -0,0 +1,33 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import platform
+import resource
+
+import pytest
+
+import portage.process
+from portage.const import BASH_BINARY
+
+
+@pytest.mark.skipif(platform.system() != "Linux", reason="not Linux")
+def test_spawnE2big(capsys, tmp_path):
+ env = dict()
+ # Kernel MAX_ARG_STRLEN is defined as 32 * PAGE_SIZE
+ max_arg_strlen_bytes = 32 * resource.getpagesize()
+ env["VERY_LARGE_ENV_VAR"] = "X" * max_arg_strlen_bytes
+
+ logfile = tmp_path / "logfile"
+ echo_output = "Should never appear"
+ with capsys.disabled():
+ retval = portage.process.spawn(
+ [BASH_BINARY, "-c", "echo", echo_output], env=env, logfile=logfile
+ )
+
+ with open(logfile) as f:
+ logfile_content = f.read()
+ assert (
+ f"Largest environment variable: VERY_LARGE_ENV_VAR ({max_arg_strlen_bytes + 20} bytes)"
+ in logfile_content
+ )
+ assert retval == 1
diff --git a/lib/portage/tests/process/test_spawn_returnproc.py b/lib/portage/tests/process/test_spawn_returnproc.py
new file mode 100644
index 000000000..8fbf54d0d
--- /dev/null
+++ b/lib/portage/tests/process/test_spawn_returnproc.py
@@ -0,0 +1,39 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import os
+import signal
+
+from portage.process import find_binary, spawn
+from portage.tests import TestCase
+from portage.util._eventloop.global_event_loop import global_event_loop
+
+
+class SpawnReturnProcTestCase(TestCase):
+ def testSpawnReturnProcWait(self):
+ true_binary = find_binary("true")
+ self.assertNotEqual(true_binary, None)
+
+ loop = global_event_loop()
+
+ async def watch_pid():
+ proc = spawn([true_binary], returnproc=True)
+ self.assertEqual(await proc.wait(), os.EX_OK)
+
+ # A second wait should also work.
+ self.assertEqual(await proc.wait(), os.EX_OK)
+
+ loop.run_until_complete(watch_pid())
+
+ def testSpawnReturnProcTerminate(self):
+ sleep_binary = find_binary("sleep")
+ self.assertNotEqual(sleep_binary, None)
+
+ loop = global_event_loop()
+
+ async def watch_pid():
+ proc = spawn([sleep_binary, "9999"], returnproc=True)
+ proc.terminate()
+ self.assertEqual(await proc.wait(), -signal.SIGTERM)
+
+ loop.run_until_complete(watch_pid())
diff --git a/lib/portage/tests/process/test_spawn_warn_large_env.py b/lib/portage/tests/process/test_spawn_warn_large_env.py
new file mode 100644
index 000000000..185344881
--- /dev/null
+++ b/lib/portage/tests/process/test_spawn_warn_large_env.py
@@ -0,0 +1,46 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import platform
+import tempfile
+
+from pathlib import Path
+
+import portage.process
+
+from portage import shutil
+from portage.tests import TestCase
+
+
+class SpawnWarnLargeEnvTestCase(TestCase):
+ def testSpawnWarnLargeEnv(self):
+ if platform.system() != "Linux":
+ self.skipTest("not Linux")
+
+ env = dict()
+ env["LARGE_ENV_VAR"] = "X" * 1024 * 96
+
+ tmpdir = tempfile.mkdtemp()
+ previous_env_too_large_warnings = portage.process.env_too_large_warnings
+ try:
+ logfile = tmpdir / Path("logfile")
+ echo_output = "This is an echo process with a large env"
+ retval = portage.process.spawn(
+ ["echo", echo_output],
+ env=env,
+ logfile=logfile,
+ warn_on_large_env=True,
+ )
+
+ with open(logfile) as f:
+ logfile_content = f.read()
+ self.assertIn(
+ echo_output,
+ logfile_content,
+ )
+ self.assertTrue(
+ portage.process.env_too_large_warnings > previous_env_too_large_warnings
+ )
+ self.assertEqual(retval, 0)
+ finally:
+ shutil.rmtree(tmpdir)
diff --git a/lib/portage/tests/process/test_unshare_net.py b/lib/portage/tests/process/test_unshare_net.py
index a5372434f..ad3b288ef 100644
--- a/lib/portage/tests/process/test_unshare_net.py
+++ b/lib/portage/tests/process/test_unshare_net.py
@@ -1,16 +1,18 @@
-# Copyright 2019 Gentoo Authors
+# Copyright 2019, 2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import errno
import os
import platform
+import pytest
+
import portage.process
from portage.const import BASH_BINARY
from portage.tests import TestCase
-CLONE_NEWNET = 0x40000000
+CLONE_NEWNET = 0x40000000
UNSHARE_NET_TEST_SCRIPT = """
ping -c 1 -W 1 127.0.0.1 || exit 1
ping -c 1 -W 1 10.0.0.1 || exit 1
@@ -21,19 +23,24 @@ ping -c 1 -W 1 fd::1 || exit 1
class UnshareNetTestCase(TestCase):
+ def setUp(self):
+ """
+ Initialize ABILITY_TO_UNSHARE in setUp so that _unshare_validate
+ uses the correct PORTAGE_MULTIPROCESSING_START_METHOD setup
+ from super().setUp().
+ """
+ super().setUp()
+ self.ABILITY_TO_UNSHARE = portage.process._unshare_validate(CLONE_NEWNET)
+
+ @pytest.mark.skipif(
+ portage.process.find_binary("ping") is None, reason="ping not found"
+ )
+ @pytest.mark.skipif(platform.system() != "Linux", reason="not Linux")
def testUnshareNet(self):
-
- if platform.system() != "Linux":
- self.skipTest("not Linux")
- if portage.process.find_binary("ping") is None:
- self.skipTest("ping not found")
-
- errno_value = portage.process._unshare_validate(CLONE_NEWNET)
- if errno_value != 0:
- self.skipTest(
- "Unable to unshare: %s" % (errno.errorcode.get(errno_value, "?"))
+ if self.ABILITY_TO_UNSHARE != 0:
+ pytest.skip(
+ f"Unable to unshare: {errno.errorcode.get(self.ABILITY_TO_UNSHARE, '?')}"
)
-
env = os.environ.copy()
env["IPV6"] = "1" if portage.process._has_ipv6() else ""
self.assertEqual(
diff --git a/lib/portage/tests/resolver/ResolverPlayground.py b/lib/portage/tests/resolver/ResolverPlayground.py
index fdd0714e6..f52a98f8d 100644
--- a/lib/portage/tests/resolver/ResolverPlayground.py
+++ b/lib/portage/tests/resolver/ResolverPlayground.py
@@ -1,8 +1,9 @@
-# Copyright 2010-2021 Gentoo Authors
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import bz2
import fnmatch
+import subprocess
import tempfile
import portage
@@ -13,30 +14,31 @@ from portage.const import (
GLOBAL_CONFIG_PATH,
PORTAGE_BIN_PATH,
USER_CONFIG_PATH,
+ SUPPORTED_GENTOO_BINPKG_FORMATS,
)
from portage.process import find_binary
from portage.dep import Atom, _repo_separator
-from portage.package.ebuild.config import config
-from portage.package.ebuild.digestgen import digestgen
+from portage.dbapi.bintree import binarytree
from portage._sets import load_default_config
from portage._sets.base import InternalPackageSet
from portage.tests import cnf_path
from portage.util import ensure_dirs, normalize_path
from portage.versions import catsplit
+from portage.exception import InvalidBinaryPackageFormat
+from portage.gpg import GPG
import _emerge
-from _emerge.actions import _calc_depclean
+from _emerge.actions import _calc_depclean, expand_set_arguments
from _emerge.Blocker import Blocker
from _emerge.create_depgraph_params import create_depgraph_params
from _emerge.DependencyArg import DependencyArg
-from _emerge.depgraph import backtrack_depgraph
+from _emerge.depgraph import (
+ _frozen_depgraph_config,
+ backtrack_depgraph,
+)
+from _emerge.Package import Package
from _emerge.RootConfig import RootConfig
-try:
- from repoman.tests import cnf_path_repoman
-except ImportError:
- cnf_path_repoman = None
-
class ResolverPlayground:
"""
@@ -52,6 +54,7 @@ class ResolverPlayground:
"make.conf",
"modules",
"package.accept_keywords",
+ "package.env",
"package.keywords",
"package.license",
"package.mask",
@@ -60,14 +63,11 @@ class ResolverPlayground:
"packages",
"package.unmask",
"package.use",
- "package.use.aliases",
"package.use.force",
"package.use.mask",
"package.use.stable.force",
"package.use.stable.mask",
"soname.provided",
- "unpack_dependencies",
- "use.aliases",
"use.force",
"use.mask",
"layout.conf",
@@ -134,12 +134,16 @@ class ResolverPlayground:
"""
self.debug = debug
- if eprefix is None:
- self.eprefix = normalize_path(tempfile.mkdtemp())
+ if True:
+ if eprefix is None:
+ self.eprefix = normalize_path(tempfile.mkdtemp())
+ else:
+ self.eprefix = normalize_path(eprefix)
# EPREFIX/bin is used by fake true_binaries. Real binaries goes into EPREFIX/usr/bin
eubin = os.path.join(self.eprefix, "usr", "bin")
ensure_dirs(eubin)
+ os.symlink(portage._python_interpreter, os.path.join(eubin, "python"))
for x in self.portage_bin:
os.symlink(os.path.join(PORTAGE_BIN_PATH, x), os.path.join(eubin, x))
@@ -151,6 +155,7 @@ class ResolverPlayground:
essential_binaries = (
"awk",
"basename",
+ "bash",
"bzip2",
"cat",
"chgrp",
@@ -161,6 +166,7 @@ class ResolverPlayground:
"egrep",
"env",
"find",
+ "flock",
"grep",
"head",
"install",
@@ -195,8 +201,6 @@ class ResolverPlayground:
os.symlink(path, os.path.join(eubin, x))
finally:
os.environ["PATH"] = orig_path
- else:
- self.eprefix = normalize_path(eprefix)
# Tests may override portage.const.EPREFIX in order to
# simulate a prefix installation. It's reasonable to do
@@ -225,7 +229,6 @@ class ResolverPlayground:
self._create_distfiles(distfiles)
self._create_ebuilds(ebuilds)
- self._create_binpkgs(binpkgs)
self._create_installed(installed)
self._create_profile(
ebuilds, eclasses, installed, profile, repo_configs, user_config, sets
@@ -234,6 +237,8 @@ class ResolverPlayground:
self.settings, self.trees = self._load_config()
+ self.gpg = None
+ self._create_binpkgs(binpkgs)
self._create_ebuild_manifests(ebuilds)
portage.util.noiselimit = 0
@@ -262,12 +267,12 @@ class ResolverPlayground:
try:
os.makedirs(profile_path)
- except os.error:
+ except OSError:
pass
repo_name_file = os.path.join(profile_path, "repo_name")
with open(repo_name_file, "w") as f:
- f.write("%s\n" % repo)
+ f.write(f"{repo}\n")
return self._repositories[repo]["location"]
@@ -305,42 +310,52 @@ class ResolverPlayground:
ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
try:
os.makedirs(ebuild_dir)
- except os.error:
+ except OSError:
pass
with open(ebuild_path, "w") as f:
if copyright_header is not None:
f.write(copyright_header)
- f.write('EAPI="%s"\n' % eapi)
+ f.write(f'EAPI="{eapi}"\n')
for k, v in metadata.items():
- f.write('%s="%s"\n' % (k, v))
+ f.write(f'{k}="{v}"\n')
if misc_content is not None:
f.write(misc_content)
def _create_ebuild_manifests(self, ebuilds):
- tmpsettings = config(clone=self.settings)
- tmpsettings["PORTAGE_QUIET"] = "1"
- for cpv in ebuilds:
- a = Atom("=" + cpv, allow_repo=True)
- repo = a.repo
- if repo is None:
- repo = "test_repo"
-
- repo_dir = self._get_repo_dir(repo)
- ebuild_dir = os.path.join(repo_dir, a.cp)
- ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
-
- portdb = self.trees[self.eroot]["porttree"].dbapi
- tmpsettings["O"] = ebuild_dir
- if not digestgen(mysettings=tmpsettings, myportdb=portdb):
- raise AssertionError("digest creation failed for %s" % ebuild_path)
+ for repo_name in self._repositories:
+ if repo_name == "DEFAULT":
+ continue
+ egencache_cmd = [
+ "egencache",
+ f"--repo={repo_name}",
+ "--update",
+ "--update-manifests",
+ "--sign-manifests=n",
+ "--strict-manifests=n",
+ f"--repositories-configuration={self.settings['PORTAGE_REPOSITORIES']}",
+ f"--jobs={portage.util.cpuinfo.get_cpu_count()}",
+ ]
+ result = subprocess.run(
+ egencache_cmd,
+ env=self.settings.environ(),
+ )
+ if result.returncode != os.EX_OK:
+ raise AssertionError(f"command failed: {egencache_cmd}")
def _create_binpkgs(self, binpkgs):
- # When using BUILD_ID, there can be mutiple instances for the
+ # When using BUILD_ID, there can be multiple instances for the
# same cpv. Therefore, binpkgs may be an iterable instead of
# a dict.
items = getattr(binpkgs, "items", None)
items = items() if items is not None else binpkgs
+ binpkg_format = self.settings.get(
+ "BINPKG_FORMAT", SUPPORTED_GENTOO_BINPKG_FORMATS[0]
+ )
+ if binpkg_format == "gpkg":
+ if self.gpg is None:
+ self.gpg = GPG(self.settings)
+ self.gpg.unlock()
for cpv, metadata in items:
a = Atom("=" + cpv, allow_repo=True)
repo = a.repo
@@ -356,19 +371,43 @@ class ResolverPlayground:
metadata["repository"] = repo
metadata["CATEGORY"] = cat
metadata["PF"] = pf
+ metadata["BINPKG_FORMAT"] = binpkg_format
repo_dir = self.pkgdir
category_dir = os.path.join(repo_dir, cat)
if "BUILD_ID" in metadata:
- binpkg_path = os.path.join(
- category_dir, pn, "%s-%s.xpak" % (pf, metadata["BUILD_ID"])
- )
+ if binpkg_format == "xpak":
+ binpkg_path = os.path.join(
+ category_dir, pn, f"{pf}-{metadata['BUILD_ID']}.xpak"
+ )
+ elif binpkg_format == "gpkg":
+ binpkg_path = os.path.join(
+ category_dir,
+ pn,
+ f"{pf}-{metadata['BUILD_ID']}.gpkg.tar",
+ )
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
else:
- binpkg_path = os.path.join(category_dir, pf + ".tbz2")
+ if binpkg_format == "xpak":
+ binpkg_path = os.path.join(category_dir, pf + ".tbz2")
+ elif binpkg_format == "gpkg":
+ binpkg_path = os.path.join(category_dir, pf + ".gpkg.tar")
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
ensure_dirs(os.path.dirname(binpkg_path))
- t = portage.xpak.tbz2(binpkg_path)
- t.recompose_mem(portage.xpak.xpak_mem(metadata))
+ if binpkg_format == "xpak":
+ t = portage.xpak.tbz2(binpkg_path)
+ t.recompose_mem(portage.xpak.xpak_mem(metadata))
+ elif binpkg_format == "gpkg":
+ t = portage.gpkg.gpkg(self.settings, a.cpv, binpkg_path)
+ t.compress(os.path.dirname(binpkg_path), metadata)
+ else:
+ raise InvalidBinaryPackageFormat(binpkg_format)
+
+ bintree = binarytree(pkgdir=self.pkgdir, settings=self.settings)
+ bintree.populate(force_reindex=True)
def _create_installed(self, installed):
for cpv in installed:
@@ -380,7 +419,7 @@ class ResolverPlayground:
vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
try:
os.makedirs(vdb_pkg_dir)
- except os.error:
+ except OSError:
pass
metadata = installed[cpv].copy()
@@ -406,13 +445,13 @@ class ResolverPlayground:
metadata["repository"] = repo
for k, v in metadata.items():
with open(os.path.join(vdb_pkg_dir, k), "w") as f:
- f.write("%s\n" % v)
+ f.write(f"{v}\n")
ebuild_path = os.path.join(vdb_pkg_dir, a.cpv.split("/")[1] + ".ebuild")
with open(ebuild_path, "w") as f:
- f.write('EAPI="%s"\n' % metadata.pop("EAPI", "0"))
+ f.write(f"EAPI=\"{metadata.pop('EAPI', '0')}\"\n")
for k, v in metadata.items():
- f.write('%s="%s"\n' % (k, v))
+ f.write(f'{k}="{v}"\n')
env_path = os.path.join(vdb_pkg_dir, "environment.bz2")
with bz2.BZ2File(env_path, mode="w") as f:
@@ -422,12 +461,11 @@ class ResolverPlayground:
def _create_profile(
self, ebuilds, eclasses, installed, profile, repo_configs, user_config, sets
):
-
user_config_dir = os.path.join(self.eroot, USER_CONFIG_PATH)
try:
os.makedirs(user_config_dir)
- except os.error:
+ except OSError:
pass
for repo in self._repositories:
@@ -465,7 +503,7 @@ class ResolverPlayground:
fnmatch.fnmatch(config_file, os.path.join(x, "*"))
for x in self.config_files
):
- raise ValueError("Unknown config file: '%s'" % config_file)
+ raise ValueError(f"Unknown config file: '{config_file}'")
if config_file in ("layout.conf",):
file_name = os.path.join(repo_dir, "metadata", config_file)
@@ -477,7 +515,7 @@ class ResolverPlayground:
os.makedirs(os.path.dirname(file_name))
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# Temporarily write empty value of masters until it becomes default.
# TODO: Delete all references to "# use implicit masters" when empty value becomes default.
if config_file == "layout.conf" and not any(
@@ -491,13 +529,11 @@ class ResolverPlayground:
os.makedirs(eclass_dir)
for eclass_name, eclass_content in eclasses.items():
- with open(
- os.path.join(eclass_dir, "{}.eclass".format(eclass_name)), "wt"
- ) as f:
+ with open(os.path.join(eclass_dir, f"{eclass_name}.eclass"), "w") as f:
if isinstance(eclass_content, str):
eclass_content = [eclass_content]
for line in eclass_content:
- f.write("{}\n".format(line))
+ f.write(f"{line}\n")
# Temporarily write empty value of masters until it becomes default.
if not repo_config or "layout.conf" not in repo_config:
@@ -533,32 +569,36 @@ class ResolverPlayground:
if profile:
for config_file, lines in profile.items():
if config_file not in self.config_files:
- raise ValueError("Unknown config file: '%s'" % config_file)
+ raise ValueError(f"Unknown config file: '{config_file}'")
file_name = os.path.join(sub_profile_dir, config_file)
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# Create profile symlink
os.symlink(
sub_profile_dir, os.path.join(user_config_dir, "make.profile")
)
+ gpg_test_path = os.environ["PORTAGE_GNUPGHOME"]
+
make_conf = {
"ACCEPT_KEYWORDS": "x86",
+ "BINPKG_GPG_SIGNING_BASE_COMMAND": f"flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]",
+ "BINPKG_GPG_SIGNING_GPG_HOME": gpg_test_path,
+ "BINPKG_GPG_SIGNING_KEY": "0x5D90EA06352177F6",
+ "BINPKG_GPG_VERIFY_GPG_HOME": gpg_test_path,
"CLEAN_DELAY": "0",
"DISTDIR": self.distdir,
"EMERGE_WARNING_DELAY": "0",
+ "FEATURES": "${FEATURES} binpkg-signing gpg-keepalive",
"PKGDIR": self.pkgdir,
"PORTAGE_INST_GID": str(portage.data.portage_gid),
"PORTAGE_INST_UID": str(portage.data.portage_uid),
"PORTAGE_TMPDIR": os.path.join(self.eroot, "var/tmp"),
}
- if os.environ.get("NOCOLOR"):
- make_conf["NOCOLOR"] = os.environ["NOCOLOR"]
-
# Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they
# need to be inherited by ebuild subprocesses.
if "PORTAGE_USERNAME" in os.environ:
@@ -572,6 +612,10 @@ class ResolverPlayground:
if "make.conf" in user_config:
make_conf_lines.extend(user_config["make.conf"])
+ if "BINPKG_FORMAT=gpkg" in user_config["make.conf"]:
+ make_conf_lines.append(
+ 'FEATURES="${FEATURES} binpkg-request-signature"'
+ )
if not portage.process.sandbox_capable or os.environ.get("SANDBOX_ON") == "1":
# avoid problems from nested sandbox instances
@@ -582,19 +626,19 @@ class ResolverPlayground:
for config_file, lines in configs.items():
if config_file not in self.config_files:
- raise ValueError("Unknown config file: '%s'" % config_file)
+ raise ValueError(f"Unknown config file: '{config_file}'")
file_name = os.path.join(user_config_dir, config_file)
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# Create /usr/share/portage/config/make.globals
make_globals_path = os.path.join(
self.eroot, GLOBAL_CONFIG_PATH.lstrip(os.sep), "make.globals"
)
ensure_dirs(os.path.dirname(make_globals_path))
- os.symlink(os.path.join(cnf_path, "make.globals"), make_globals_path)
+ os.symlink(os.path.join(str(cnf_path), "make.globals"), make_globals_path)
# Create /usr/share/portage/config/sets/portage.conf
default_sets_conf_dir = os.path.join(
@@ -603,10 +647,10 @@ class ResolverPlayground:
try:
os.makedirs(default_sets_conf_dir)
- except os.error:
+ except OSError:
pass
- provided_sets_portage_conf = os.path.join(cnf_path, "sets", "portage.conf")
+ provided_sets_portage_conf = os.path.join(str(cnf_path), "sets", "portage.conf")
os.symlink(
provided_sets_portage_conf,
os.path.join(default_sets_conf_dir, "portage.conf"),
@@ -616,19 +660,14 @@ class ResolverPlayground:
try:
os.makedirs(set_config_dir)
- except os.error:
+ except OSError:
pass
for sets_file, lines in sets.items():
file_name = os.path.join(set_config_dir, sets_file)
with open(file_name, "w") as f:
for line in lines:
- f.write("%s\n" % line)
-
- if cnf_path_repoman is not None:
- # Create /usr/share/repoman
- repoman_share_dir = os.path.join(self.eroot, "usr", "share", "repoman")
- os.symlink(cnf_path_repoman, repoman_share_dir)
+ f.write(f"{line}\n")
def _create_world(self, world, world_sets):
# Create /var/lib/portage/world
@@ -640,27 +679,27 @@ class ResolverPlayground:
with open(world_file, "w") as f:
for atom in world:
- f.write("%s\n" % atom)
+ f.write(f"{atom}\n")
with open(world_set_file, "w") as f:
for atom in world_sets:
- f.write("%s\n" % atom)
+ f.write(f"{atom}\n")
def _load_config(self):
-
create_trees_kwargs = {}
if self.target_root != os.sep:
create_trees_kwargs["target_root"] = self.target_root
env = {
+ "PATH": f"{self.eprefix}/usr/sbin:{self.eprefix}/usr/bin:{os.environ['PATH']}",
"PORTAGE_REPOSITORIES": "\n".join(
"[%s]\n%s"
% (
repo_name,
- "\n".join("%s = %s" % (k, v) for k, v in repo_config.items()),
+ "\n".join(f"{k} = {v}" for k, v in repo_config.items()),
)
for repo_name, repo_config in self._repositories.items()
- )
+ ),
}
if self.debug:
@@ -696,12 +735,28 @@ class ResolverPlayground:
global_noiselimit = portage.util.noiselimit
global_emergelog_disable = _emerge.emergelog._disable
try:
-
if not self.debug:
portage.util.noiselimit = -2
_emerge.emergelog._disable = True
- if action in ("depclean", "prune"):
+ # NOTE: frozen_config could be cached and reused if options and params were constant.
+ params_action = (
+ "remove" if action in ("dep_check", "depclean", "prune") else action
+ )
+ params = create_depgraph_params(options, params_action)
+ frozen_config = _frozen_depgraph_config(
+ self.settings, self.trees, options, params, None
+ )
+
+ atoms, retval = expand_set_arguments(
+ atoms, action, self.trees[self.eroot]["root_config"]
+ )
+ if retval != os.EX_OK:
+ raise AssertionError(
+ f"expand_set_arguments failed with retval {retval}"
+ )
+
+ if params_action == "remove":
depclean_result = _calc_depclean(
self.settings,
self.trees,
@@ -710,6 +765,7 @@ class ResolverPlayground:
action,
InternalPackageSet(initial_atoms=atoms, allow_wildcard=True),
None,
+ frozen_config=frozen_config,
)
result = ResolverPlaygroundDepcleanResult(
atoms,
@@ -720,9 +776,15 @@ class ResolverPlayground:
depclean_result.depgraph,
)
else:
- params = create_depgraph_params(options, action)
success, depgraph, favorites = backtrack_depgraph(
- self.settings, self.trees, options, params, action, atoms, None
+ self.settings,
+ self.trees,
+ options,
+ params,
+ action,
+ atoms,
+ None,
+ frozen_config=frozen_config,
)
depgraph._show_merge_list()
depgraph.display_problems()
@@ -742,11 +804,13 @@ class ResolverPlayground:
return
def cleanup(self):
+ if self.gpg is not None:
+ self.gpg.stop()
for eroot in self.trees:
portdb = self.trees[eroot]["porttree"].dbapi
portdb.close_caches()
if self.debug:
- print("\nEROOT=%s" % self.eroot)
+ print(f"\nEROOT={self.eroot}")
else:
shutil.rmtree(self.eroot)
if hasattr(self, "_orig_eprefix"):
@@ -780,7 +844,7 @@ class ResolverPlaygroundTestCase:
checks = dict.fromkeys(result.checks)
for key, value in self._checks.items():
if not key in checks:
- raise KeyError("Not an available check: '%s'" % key)
+ raise KeyError(f"Not an available check: '{key}'")
checks[key] = value
fail_msgs = []
@@ -906,10 +970,11 @@ class ResolverPlaygroundTestCase:
)
and expected is not None
):
- expected = set(expected)
+ # unsatisfied_deps can be a dict for depclean-like actions
+ expected = expected if isinstance(expected, dict) else set(expected)
elif key == "forced_rebuilds" and expected is not None:
- expected = dict((k, set(v)) for k, v in expected.items())
+ expected = {k: set(v) for k, v in expected.items()}
if got != expected:
fail_msgs.append(
@@ -940,21 +1005,20 @@ def _mergelist_str(x, depgraph):
repo_str = _repo_separator + x.repo
build_id_str = ""
if x.type_name == "binary" and x.cpv.build_id is not None:
- build_id_str = "-%s" % x.cpv.build_id
+ build_id_str = f"-{x.cpv.build_id}"
mergelist_str = x.cpv + build_id_str + repo_str
if x.built:
if x.operation == "merge":
desc = x.type_name
else:
desc = x.operation
- mergelist_str = "[%s]%s" % (desc, mergelist_str)
+ mergelist_str = f"[{desc}]{mergelist_str}"
if x.root != depgraph._frozen_config._running_root.root:
mergelist_str += "{targetroot}"
return mergelist_str
class ResolverPlaygroundResult:
-
checks = (
"success",
"mergelist",
@@ -1047,17 +1111,17 @@ class ResolverPlaygroundResult:
)
if self.depgraph._dynamic_config._unsatisfied_deps_for_display:
- self.unsatisfied_deps = set(
+ self.unsatisfied_deps = {
dep_info[0][1]
for dep_info in self.depgraph._dynamic_config._unsatisfied_deps_for_display
- )
+ }
if self.depgraph._forced_rebuilds:
- self.forced_rebuilds = dict(
- (child.cpv, set(parent.cpv for parent in parents))
+ self.forced_rebuilds = {
+ child.cpv: {parent.cpv for parent in parents}
for child_dict in self.depgraph._forced_rebuilds.values()
for child, parents in child_dict.items()
- )
+ }
required_use_unsatisfied = []
for (
@@ -1071,18 +1135,20 @@ class ResolverPlaygroundResult:
class ResolverPlaygroundDepcleanResult:
-
checks = (
"success",
"cleanlist",
"ordered",
"req_pkg_count",
"graph_order",
+ "unsatisfied_deps",
)
optional_checks = (
+ "cleanlist",
"ordered",
"req_pkg_count",
"graph_order",
+ "unsatisfied_deps",
)
def __init__(self, atoms, rval, cleanlist, ordered, req_pkg_count, depgraph):
@@ -1094,3 +1160,10 @@ class ResolverPlaygroundDepcleanResult:
self.graph_order = [
_mergelist_str(node, depgraph) for node in depgraph._dynamic_config.digraph
]
+ self.unsatisfied_deps = {}
+ for dep in depgraph._dynamic_config._initially_unsatisfied_deps:
+ if isinstance(dep.parent, Package):
+ parent_repr = dep.parent.cpv
+ else:
+ parent_repr = dep.parent.arg
+ self.unsatisfied_deps.setdefault(parent_repr, set()).add(dep.atom)
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/meson.build b/lib/portage/tests/resolver/binpkg_multi_instance/meson.build
new file mode 100644
index 000000000..e4913f9da
--- /dev/null
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_build_id_profile_format.py',
+ 'test_rebuilt_binaries.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/resolver/binpkg_multi_instance',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py b/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
index b311961d6..ccb1f5b8d 100644
--- a/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/test_build_id_profile_format.py
@@ -1,16 +1,19 @@
# Copyright 2015-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class BuildIdProfileFormatTestCase(TestCase):
def testBuildIdProfileFormat(self):
-
profile = {
"packages": ("=app-misc/A-1-2::test_repo",),
"package.mask": ("<app-misc/A-1::test_repo",),
@@ -139,21 +142,30 @@ class BuildIdProfileFormatTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- debug=False,
- binpkgs=binpkgs,
- ebuilds=ebuilds,
- installed=installed,
- repo_configs=repo_configs,
- profile=profile,
- user_config=user_config,
- world=world,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- # playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ _user_config = user_config.copy()
+ _user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ debug=False,
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ repo_configs=repo_configs,
+ profile=profile,
+ user_config=_user_config,
+ world=world,
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ # playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py b/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
index c854604c1..8ce10064f 100644
--- a/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
+++ b/lib/portage/tests/resolver/binpkg_multi_instance/test_rebuilt_binaries.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class RebuiltBinariesCase(TestCase):
def testRebuiltBinaries(self):
-
user_config = {
"make.conf": ('FEATURES="binpkg-multi-instance"',),
}
@@ -99,18 +102,27 @@ class RebuiltBinariesCase(TestCase):
),
)
- playground = ResolverPlayground(
- debug=False,
- binpkgs=binpkgs,
- installed=installed,
- user_config=user_config,
- world=world,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- # playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ _user_config = user_config.copy()
+ _user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ debug=False,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config=_user_config,
+ world=world,
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ # playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/meson.build b/lib/portage/tests/resolver/meson.build
new file mode 100644
index 000000000..8892c7813
--- /dev/null
+++ b/lib/portage/tests/resolver/meson.build
@@ -0,0 +1,100 @@
+py.install_sources(
+ [
+ 'ResolverPlayground.py',
+ 'test_alternatives_gzip.py',
+ 'test_aggressive_backtrack_downgrade.py',
+ 'test_autounmask.py',
+ 'test_autounmask_binpkg_use.py',
+ 'test_autounmask_keep_keywords.py',
+ 'test_autounmask_multilib_use.py',
+ 'test_autounmask_parent.py',
+ 'test_autounmask_use_backtrack.py',
+ 'test_autounmask_use_breakage.py',
+ 'test_autounmask_use_slot_conflict.py',
+ 'test_backtracking.py',
+ 'test_bdeps.py',
+ 'test_binary_pkg_ebuild_visibility.py',
+ 'test_blocker.py',
+ 'test_broken_deps.py',
+ 'test_changed_deps.py',
+ 'test_circular_choices.py',
+ 'test_circular_choices_rust.py',
+ 'test_circular_dependencies.py',
+ 'test_complete_graph.py',
+ 'test_complete_if_new_subslot_without_revbump.py',
+ 'test_cross_dep_priority.py',
+ 'test_depclean.py',
+ 'test_depclean_order.py',
+ 'test_depclean_slot_unavailable.py',
+ 'test_depth.py',
+ 'test_disjunctive_depend_order.py',
+ 'test_eapi.py',
+ 'test_features_test_use.py',
+ 'test_imagemagick_graphicsmagick.py',
+ 'test_installkernel.py',
+ 'test_keywords.py',
+ 'test_merge_order.py',
+ 'test_missing_iuse_and_evaluated_atoms.py',
+ 'test_multirepo.py',
+ 'test_multislot.py',
+ 'test_old_dep_chain_display.py',
+ 'test_onlydeps.py',
+ 'test_onlydeps_circular.py',
+ 'test_onlydeps_ideps.py',
+ 'test_onlydeps_minimal.py',
+ 'test_or_choices.py',
+ 'test_or_downgrade_installed.py',
+ 'test_or_upgrade_installed.py',
+ 'test_output.py',
+ 'test_package_tracker.py',
+ 'test_perl_rebuild_bug.py',
+ 'test_profile_default_eapi.py',
+ 'test_profile_package_set.py',
+ 'test_rebuild.py',
+ 'test_rebuild_ghostscript.py',
+ 'test_regular_slot_change_without_revbump.py',
+ 'test_required_use.py',
+ 'test_runtime_cycle_merge_order.py',
+ 'test_simple.py',
+ 'test_slot_abi.py',
+ 'test_slot_abi_downgrade.py',
+ 'test_slot_change_without_revbump.py',
+ 'test_slot_collisions.py',
+ 'test_slot_conflict_blocked_prune.py',
+ 'test_slot_conflict_force_rebuild.py',
+ 'test_slot_conflict_mask_update.py',
+ 'test_slot_conflict_rebuild.py',
+ 'test_slot_conflict_unsatisfied_deep_deps.py',
+ 'test_slot_conflict_update.py',
+ 'test_slot_conflict_update_virt.py',
+ 'test_slot_operator_autounmask.py',
+ 'test_slot_operator_bdeps.py',
+ 'test_slot_operator_complete_graph.py',
+ 'test_slot_operator_exclusive_slots.py',
+ 'test_slot_operator_missed_update.py',
+ 'test_slot_operator_rebuild.py',
+ 'test_slot_operator_required_use.py',
+ 'test_slot_operator_reverse_deps.py',
+ 'test_slot_operator_runtime_pkg_mask.py',
+ 'test_slot_operator_unsatisfied.py',
+ 'test_slot_operator_unsolved.py',
+ 'test_slot_operator_update_probe_parent_downgrade.py',
+ 'test_solve_non_slot_operator_slot_conflicts.py',
+ 'test_targetroot.py',
+ 'test_unmerge_order.py',
+ 'test_unnecessary_slot_upgrade.py',
+ 'test_update.py',
+ 'test_useflags.py',
+ 'test_use_dep_defaults.py',
+ 'test_virtual_minimize_children.py',
+ 'test_virtual_slot.py',
+ 'test_with_test_deps.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/resolver',
+ pure : not native_extensions
+)
+
+subdir('binpkg_multi_instance')
+subdir('soname')
diff --git a/lib/portage/tests/resolver/soname/meson.build b/lib/portage/tests/resolver/soname/meson.build
new file mode 100644
index 000000000..0c3bac3ff
--- /dev/null
+++ b/lib/portage/tests/resolver/soname/meson.build
@@ -0,0 +1,19 @@
+py.install_sources(
+ [
+ 'test_autounmask.py',
+ 'test_depclean.py',
+ 'test_downgrade.py',
+ 'test_or_choices.py',
+ 'test_reinstall.py',
+ 'test_skip_update.py',
+ 'test_slot_conflict_reinstall.py',
+ 'test_slot_conflict_update.py',
+ 'test_soname_provided.py',
+ 'test_unsatisfiable.py',
+ 'test_unsatisfied.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/resolver/soname',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/resolver/soname/test_autounmask.py b/lib/portage/tests/resolver/soname/test_autounmask.py
index 3ee0be8d0..42ecff31c 100644
--- a/lib/portage/tests/resolver/soname/test_autounmask.py
+++ b/lib/portage/tests/resolver/soname/test_autounmask.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameAutoUnmaskTestCase(TestCase):
def testSonameAutoUnmask(self):
-
binpkgs = {
"dev-libs/icu-49": {
"KEYWORDS": "x86",
@@ -85,13 +88,26 @@ class SonameAutoUnmaskTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, installed=installed, world=world, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_depclean.py b/lib/portage/tests/resolver/soname/test_depclean.py
index 9cf1877a3..e85f4399d 100644
--- a/lib/portage/tests/resolver/soname/test_depclean.py
+++ b/lib/portage/tests/resolver/soname/test_depclean.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SonameDepcleanTestCase(TestCase):
def testSonameDepclean(self):
-
installed = {
"app-misc/A-1": {
"RDEPEND": "dev-libs/B",
diff --git a/lib/portage/tests/resolver/soname/test_downgrade.py b/lib/portage/tests/resolver/soname/test_downgrade.py
index b683745e0..13246b8dd 100644
--- a/lib/portage/tests/resolver/soname/test_downgrade.py
+++ b/lib/portage/tests/resolver/soname/test_downgrade.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameDowngradeTestCase(TestCase):
def testSingleSlot(self):
-
ebuilds = {
"dev-libs/icu-49": {},
"dev-libs/icu-4.8": {},
@@ -125,25 +128,31 @@ class SonameDowngradeTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs,
- ebuilds=ebuilds,
- installed=installed,
- user_config=user_config,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config=user_config,
+ world=world,
+ debug=False,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
def testTwoSlots(self):
-
ebuilds = {
"dev-libs/glib-1.2.10": {"SLOT": "1"},
"dev-libs/glib-2.30.2": {"SLOT": "2"},
@@ -217,19 +226,27 @@ class SonameDowngradeTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- user_config=user_config,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config=user_config,
+ world=world,
+ debug=False,
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_or_choices.py b/lib/portage/tests/resolver/soname/test_or_choices.py
index c636726f3..31b8ca94d 100644
--- a/lib/portage/tests/resolver/soname/test_or_choices.py
+++ b/lib/portage/tests/resolver/soname/test_or_choices.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameOrChoicesTestCase(TestCase):
def testSonameConflictMissedUpdate(self):
-
binpkgs = {
"dev-lang/ocaml-4.02.1": {
"EAPI": "5",
@@ -83,14 +86,26 @@ class SonameOrChoicesTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- debug=False, binpkgs=binpkgs, installed=installed, world=world
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ debug=False,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_reinstall.py b/lib/portage/tests/resolver/soname/test_reinstall.py
index f4616f9dd..1637561c4 100644
--- a/lib/portage/tests/resolver/soname/test_reinstall.py
+++ b/lib/portage/tests/resolver/soname/test_reinstall.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameReinstallTestCase(TestCase):
def testSonameReinstall(self):
-
binpkgs = {
"app-misc/A-1": {
"RDEPEND": "dev-libs/B",
@@ -72,14 +75,27 @@ class SonameReinstallTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- debug=False, binpkgs=binpkgs, installed=installed, world=world
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ debug=False,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_skip_update.py b/lib/portage/tests/resolver/soname/test_skip_update.py
index 336bfac4f..dc48a66f9 100644
--- a/lib/portage/tests/resolver/soname/test_skip_update.py
+++ b/lib/portage/tests/resolver/soname/test_skip_update.py
@@ -1,19 +1,38 @@
-# Copyright 2015 Gentoo Foundation
+# Copyright 2015-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+from unittest.mock import patch
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameSkipUpdateTestCase(TestCase):
- def testSonameSkipUpdate(self):
+ def testSonameSkipUpdateNoPruneRebuilds(self):
+ """
+ Make sure that there are fewer backtracking runs required if we
+ disable prune_rebuilds backtracking, which shows that
+ _eliminate_rebuilds works for the purposes of bug 915494.
+ """
+ with patch(
+ "_emerge.depgraph._dynamic_depgraph_config._ENABLE_PRUNE_REBUILDS",
+ new=False,
+ ):
+ self.testSonameSkipUpdate(backtrack=2)
+ def testSonameSkipUpdate(self, backtrack=3):
binpkgs = {
"app-misc/A-1": {
- "RDEPEND": "dev-libs/B",
+ # Simulate injected libc dep which should not trigger
+ # reinstall due to use of strip_libc_deps in
+ # depgraph._eliminate_rebuilds dep comparison.
+ "RDEPEND": "dev-libs/B >=sys-libs/glibc-2.37",
"DEPEND": "dev-libs/B",
"REQUIRES": "x86_32: libB.so.1",
},
@@ -23,6 +42,10 @@ class SonameSkipUpdateTestCase(TestCase):
"dev-libs/B-1": {
"PROVIDES": "x86_32: libB.so.1",
},
+ "sys-libs/glibc-2.37-r7": {
+ "PROVIDES": "x86_32: libc.so.6",
+ },
+ "virtual/libc-1-r1": {"RDEPEND": "sys-libs/glibc"},
}
installed = {
@@ -34,6 +57,12 @@ class SonameSkipUpdateTestCase(TestCase):
"dev-libs/B-1": {
"PROVIDES": "x86_32: libB.so.1",
},
+ "sys-libs/glibc-2.37-r7": {
+ "PROVIDES": "x86_32: libc.so.6",
+ },
+ "virtual/libc-1-r1": {
+ "RDEPEND": "sys-libs/glibc",
+ },
}
world = ("app-misc/A",)
@@ -49,6 +78,7 @@ class SonameSkipUpdateTestCase(TestCase):
"--ignore-soname-deps": "y",
"--update": True,
"--usepkgonly": True,
+ "--backtrack": backtrack,
},
success=True,
mergelist=[
@@ -65,20 +95,33 @@ class SonameSkipUpdateTestCase(TestCase):
"--ignore-soname-deps": "n",
"--update": True,
"--usepkgonly": True,
+ "--backtrack": backtrack,
},
success=True,
mergelist=[],
),
)
- playground = ResolverPlayground(
- debug=False, binpkgs=binpkgs, installed=installed, world=world
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ debug=False,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py b/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
index 39430ae41..8d1faa0be 100644
--- a/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
+++ b/lib/portage/tests/resolver/soname/test_slot_conflict_reinstall.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameSlotConflictReinstallTestCase(TestCase):
def testSonameSlotConflictReinstall(self):
-
binpkgs = {
"app-misc/A-1": {
"PROVIDES": "x86_32: libA-1.so",
@@ -80,16 +83,28 @@ class SonameSlotConflictReinstallTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, installed=installed, world=world, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
def testSonameSlotConflictMassRebuild(self):
"""
@@ -124,21 +139,21 @@ class SonameSlotConflictReinstallTestCase(TestCase):
expected_mergelist = ["[binary]app-misc/A-1", "[binary]app-misc/B-2"]
for i in range(5):
- binpkgs["app-misc/C%sC-1" % i] = {
+ binpkgs[f"app-misc/C{i}C-1"] = {
"DEPEND": "app-misc/B",
"RDEPEND": "app-misc/B",
"REQUIRES": "x86_32: libB-2.so",
}
- installed["app-misc/C%sC-1" % i] = {
+ installed[f"app-misc/C{i}C-1"] = {
"DEPEND": "app-misc/B",
"RDEPEND": "app-misc/B",
"REQUIRES": "x86_32: libB-1.so",
}
for x in ("DEPEND", "RDEPEND"):
- binpkgs["app-misc/A-1"][x] += " app-misc/C%sC" % i
+ binpkgs["app-misc/A-1"][x] += f" app-misc/C{i}C"
- expected_mergelist.append("[binary]app-misc/C%sC-1" % i)
+ expected_mergelist.append(f"[binary]app-misc/C{i}C-1")
test_cases = (
ResolverPlaygroundTestCase(
@@ -159,16 +174,29 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world = []
- playground = ResolverPlayground(
- binpkgs=binpkgs, installed=installed, world=world, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
def testSonameSlotConflictForgottenChild(self):
"""
@@ -242,16 +270,29 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world = ["app-misc/A"]
- playground = ResolverPlayground(
- binpkgs=binpkgs, installed=installed, world=world, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
def testSonameSlotConflictMixedDependencies(self):
"""
@@ -316,13 +357,25 @@ class SonameSlotConflictReinstallTestCase(TestCase):
world = []
- playground = ResolverPlayground(
- binpkgs=binpkgs, installed=installed, world=world, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_slot_conflict_update.py b/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
index 0541a185e..dcedfdc10 100644
--- a/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
+++ b/lib/portage/tests/resolver/soname/test_slot_conflict_update.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameSlotConflictUpdateTestCase(TestCase):
def testSonameSlotConflictUpdate(self):
-
binpkgs = {
"app-text/podofo-0.9.2": {
"RDEPEND": "dev-util/boost-build",
@@ -88,13 +91,26 @@ class SonameSlotConflictUpdateTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, installed=installed, world=world, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_soname_provided.py b/lib/portage/tests/resolver/soname/test_soname_provided.py
index 3cd9f1423..e266fff3e 100644
--- a/lib/portage/tests/resolver/soname/test_soname_provided.py
+++ b/lib/portage/tests/resolver/soname/test_soname_provided.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameProvidedTestCase(TestCase):
def testSonameProvided(self):
-
binpkgs = {
"app-misc/A-1": {
"EAPI": "5",
@@ -62,18 +65,28 @@ class SonameProvidedTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs,
- debug=False,
- profile=profile,
- installed=installed,
- world=world,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ debug=False,
+ profile=profile,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_unsatisfiable.py b/lib/portage/tests/resolver/soname/test_unsatisfiable.py
index a8d2e10db..1f47ca460 100644
--- a/lib/portage/tests/resolver/soname/test_unsatisfiable.py
+++ b/lib/portage/tests/resolver/soname/test_unsatisfiable.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameUnsatisfiableTestCase(TestCase):
def testSonameUnsatisfiable(self):
-
binpkgs = {
"app-misc/A-1": {
"EAPI": "5",
@@ -57,14 +60,27 @@ class SonameUnsatisfiableTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, debug=False, installed=installed, world=world
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ debug=False,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/soname/test_unsatisfied.py b/lib/portage/tests/resolver/soname/test_unsatisfied.py
index 955d5d75b..cf266c45a 100644
--- a/lib/portage/tests/resolver/soname/test_unsatisfied.py
+++ b/lib/portage/tests/resolver/soname/test_unsatisfied.py
@@ -1,16 +1,19 @@
# Copyright 2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SonameUnsatisfiedTestCase(TestCase):
def testSonameUnsatisfied(self):
-
binpkgs = {
"app-misc/A-1": {
"EAPI": "5",
@@ -70,14 +73,27 @@ class SonameUnsatisfiedTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, debug=False, installed=installed, world=world
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ debug=False,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_aggressive_backtrack_downgrade.py b/lib/portage/tests/resolver/test_aggressive_backtrack_downgrade.py
index 5300d1b44..c0f5cda04 100644
--- a/lib/portage/tests/resolver/test_aggressive_backtrack_downgrade.py
+++ b/lib/portage/tests/resolver/test_aggressive_backtrack_downgrade.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class AgressiveBacktrackDowngradeTestCase(TestCase):
def testAgressiveBacktrackDowngrade(self):
-
ebuilds = {
"www-client/firefox-69.0": {
"EAPI": "7",
diff --git a/lib/portage/tests/resolver/test_alternatives_gzip.py b/lib/portage/tests/resolver/test_alternatives_gzip.py
new file mode 100644
index 000000000..e763e8464
--- /dev/null
+++ b/lib/portage/tests/resolver/test_alternatives_gzip.py
@@ -0,0 +1,246 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class AlternativesGzipTestCase(TestCase):
+ def testAlternativesGzip(self):
+ """
+ Test bug 917259, where app-alternatives/gzip is upgraded before
+ its pigz RDEPEND is installed. This is triggered when
+ find_smallest_cycle selects a large cycle and the topological
+ sort produces poor results when leaf_nodes returns
+ app-alternatives/gzip as part of a large group of nodes.
+ This problem was solved by changing the topological sort to
+ increase ignore_priority in order to select a smaller number
+ of leaf nodes at a time.
+ """
+ ebuilds = {
+ "app-alternatives/gzip-1": {
+ "EAPI": "8",
+ "RDEPEND": "reference? ( >=app-arch/gzip-1.12-r3 ) pigz? ( >=app-arch/pigz-2.8[-symlink(-)] )",
+ "IUSE": "reference pigz",
+ "REQUIRED_USE": "^^ ( reference pigz )",
+ },
+ "app-alternatives/gzip-0": {
+ "EAPI": "8",
+ "RDEPEND": "reference? ( >=app-arch/gzip-1.12-r3 ) pigz? ( app-arch/pigz[-symlink(-)] )",
+ "IUSE": "reference pigz",
+ "REQUIRED_USE": "^^ ( reference pigz )",
+ },
+ "app-arch/gzip-1.13": {
+ "EAPI": "8",
+ "RDEPEND": "!app-arch/pigz[symlink(-)]",
+ "PDEPEND": "app-alternatives/gzip",
+ },
+ "app-arch/zstd-1.5.5": {
+ "EAPI": "8",
+ "DEPEND": ">=sys-libs/zlib-1.2.3",
+ "RDEPEND": ">=sys-libs/zlib-1.2.3",
+ },
+ "app-arch/pigz-2.8": {
+ "EAPI": "8",
+ "DEPEND": ">=sys-libs/zlib-1.2.3",
+ "RDEPEND": ">=sys-libs/zlib-1.2.3",
+ "PDEPEND": "app-alternatives/gzip",
+ },
+ "dev-lang/perl-5.36.1-r3": {
+ "EAPI": "8",
+ "BDEPEND": ">=sys-libs/zlib-1.2.12 virtual/libcrypt:=",
+ "RDEPEND": ">=sys-libs/zlib-1.2.12 virtual/libcrypt:=",
+ "DEPEND": ">=sys-libs/zlib-1.2.12 virtual/libcrypt:=",
+ },
+ "dev-libs/libgcrypt-1.10.2": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ "DEPEND": "sys-libs/glibc",
+ "RDEPEND": "sys-libs/glibc",
+ },
+ "dev-libs/libpcre2-10.42-r1": {
+ "EAPI": "8",
+ "SLOT": "0/3",
+ "DEPEND": "sys-libs/zlib",
+ "RDEPEND": "sys-libs/zlib",
+ },
+ "sys-apps/locale-gen-2.23-r1": {
+ "EAPI": "8",
+ "RDEPEND": "app-alternatives/gzip",
+ },
+ "sys-apps/systemd-253.6": {
+ "EAPI": "8",
+ "SLOT": "0/2",
+ "BDEPEND": "dev-lang/perl",
+ "DEPEND": ">=sys-apps/util-linux-2.30:= >=dev-libs/libgcrypt-1.4.5:0= virtual/libcrypt:= dev-libs/libpcre2",
+ "RDEPEND": ">=sys-apps/util-linux-2.30:= >=dev-libs/libgcrypt-1.4.5:0= virtual/libcrypt:= dev-libs/libpcre2",
+ },
+ "sys-apps/util-linux-2.38.1-r2": {
+ "EAPI": "8",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ "DEPEND": "virtual/libcrypt:= sys-libs/zlib:= virtual/libudev:= dev-libs/libpcre2:=",
+ "RDEPEND": "sys-apps/systemd sys-libs/zlib:= virtual/libudev:= dev-libs/libpcre2:=",
+ },
+ "sys-devel/automake-1.16.5-r1": {
+ "EAPI": "8",
+ "BDEPEND": "app-alternatives/gzip",
+ "RDEPEND": ">=dev-lang/perl-5.6",
+ },
+ "sys-libs/glibc-2.37-r7": {
+ "EAPI": "8",
+ "BDEPEND": "sys-apps/locale-gen",
+ "IDEPEND": "sys-apps/locale-gen",
+ "RDEPEND": "dev-lang/perl",
+ },
+ "sys-libs/libxcrypt-4.4.36": {
+ "BDEPEND": "dev-lang/perl",
+ "DEPEND": "sys-libs/glibc",
+ "RDEPEND": "sys-libs/glibc",
+ },
+ "sys-libs/zlib-1.3-r1": {
+ "EAPI": "8",
+ "SLOT": "0/1",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ },
+ "sys-libs/zlib-1.2.13-r2": {
+ "EAPI": "8",
+ "SLOT": "0/1",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ },
+ "virtual/libcrypt-2-r1": {
+ "EAPI": "8",
+ "SLOT": "0/2",
+ "RDEPEND": "sys-libs/libxcrypt",
+ },
+ "virtual/libudev-251-r2": {
+ "EAPI": "8",
+ "SLOT": "0/1",
+ "RDEPEND": ">=sys-apps/systemd-251:0/2",
+ },
+ }
+
+ installed = {
+ "app-alternatives/gzip-0": {
+ "EAPI": "8",
+ "RDEPEND": "reference? ( >=app-arch/gzip-1.12-r3 ) pigz? ( app-arch/pigz[-symlink(-)] )",
+ "IUSE": "reference pigz",
+ "USE": "reference",
+ },
+ "app-arch/gzip-1.13": {
+ "EAPI": "8",
+ "RDEPEND": "!app-arch/pigz[symlink(-)]",
+ "PDEPEND": "app-alternatives/gzip",
+ },
+ "app-arch/zstd-1.5.5": {
+ "EAPI": "8",
+ "DEPEND": ">=sys-libs/zlib-1.2.3",
+ "RDEPEND": ">=sys-libs/zlib-1.2.3",
+ },
+ "dev-lang/perl-5.36.1-r3": {
+ "EAPI": "8",
+ "BDEPEND": ">=sys-libs/zlib-1.2.12 virtual/libcrypt:0/2=",
+ "RDEPEND": ">=sys-libs/zlib-1.2.12 virtual/libcrypt:0/2=",
+ "DEPEND": ">=sys-libs/zlib-1.2.12 virtual/libcrypt:0/2=",
+ },
+ "dev-libs/libgcrypt-1.10.2": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ "DEPEND": "sys-libs/glibc",
+ "RDEPEND": "sys-libs/glibc",
+ },
+ "dev-libs/libpcre2-10.42-r1": {
+ "EAPI": "8",
+ "SLOT": "0/3",
+ "DEPEND": "sys-libs/zlib",
+ "RDEPEND": "sys-libs/zlib",
+ },
+ "sys-apps/locale-gen-2.23-r1": {
+ "EAPI": "8",
+ "RDEPEND": "app-alternatives/gzip",
+ },
+ "sys-apps/systemd-253.6": {
+ "EAPI": "8",
+ "SLOT": "0/2",
+ "BDEPEND": "dev-lang/perl",
+ "DEPEND": ">=sys-apps/util-linux-2.30:0= >=dev-libs/libgcrypt-1.4.5:0= virtual/libcrypt:0/2= dev-libs/libpcre2",
+ "RDEPEND": ">=sys-apps/util-linux-2.30:0= >=dev-libs/libgcrypt-1.4.5:0= virtual/libcrypt:0/2= dev-libs/libpcre2",
+ },
+ "sys-apps/util-linux-2.38.1-r2": {
+ "EAPI": "8",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ "DEPEND": "virtual/libcrypt:0/2= sys-libs/zlib:0/1= virtual/libudev:0/1= dev-libs/libpcre2:0/3=",
+ "RDEPEND": "sys-apps/systemd sys-libs/zlib:0/1= virtual/libudev:0/1= dev-libs/libpcre2:0/3=",
+ },
+ "sys-devel/automake-1.16.5-r1": {
+ "EAPI": "8",
+ "BDEPEND": "app-alternatives/gzip",
+ "RDEPEND": ">=dev-lang/perl-5.6",
+ },
+ "sys-libs/glibc-2.37-r7": {
+ "EAPI": "8",
+ "BDEPEND": "sys-apps/locale-gen",
+ "IDEPEND": "sys-apps/locale-gen",
+ "RDEPEND": "dev-lang/perl",
+ },
+ "sys-libs/libxcrypt-4.4.36": {
+ "BDEPEND": "dev-lang/perl",
+ "DEPEND": "sys-libs/glibc",
+ "RDEPEND": "sys-libs/glibc",
+ },
+ "sys-libs/zlib-1.2.13-r2": {
+ "EAPI": "8",
+ "SLOT": "0/1",
+ "BDEPEND": ">=sys-devel/automake-1.16.5",
+ },
+ "virtual/libcrypt-2-r1": {
+ "EAPI": "8",
+ "SLOT": "0/2",
+ "RDEPEND": "sys-libs/libxcrypt",
+ },
+ "virtual/libudev-251-r2": {
+ "EAPI": "8",
+ "SLOT": "0/1",
+ "RDEPEND": ">=sys-apps/systemd-251:0/2",
+ },
+ }
+
+ world = [
+ "app-alternatives/gzip",
+ "app-arch/gzip",
+ "app-arch/zstd",
+ "sys-apps/systemd",
+ ]
+
+ user_config = {
+ "package.use": ("app-alternatives/gzip -reference pigz",),
+ }
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["app-alternatives/gzip", "sys-libs/zlib"],
+ success=True,
+ mergelist=[
+ "sys-libs/zlib-1.3-r1",
+ "app-arch/pigz-2.8",
+ "app-alternatives/gzip-1",
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ user_config=user_config,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_autounmask.py b/lib/portage/tests/resolver/test_autounmask.py
index 4e98e5bfc..f3eb72fa1 100644
--- a/lib/portage/tests/resolver/test_autounmask.py
+++ b/lib/portage/tests/resolver/test_autounmask.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class AutounmaskTestCase(TestCase):
def testAutounmask(self):
-
ebuilds = {
# ebuilds to test use changes
"dev-libs/A-1": {"SLOT": 1, "DEPEND": "dev-libs/B[foo]", "EAPI": 2},
@@ -451,7 +450,6 @@ class AutounmaskTestCase(TestCase):
playground.cleanup()
def testAutounmaskForLicenses(self):
-
ebuilds = {
"dev-libs/A-1": {"LICENSE": "TEST"},
"dev-libs/B-1": {"LICENSE": "TEST", "IUSE": "foo", "KEYWORDS": "~x86"},
@@ -474,7 +472,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask-license": "y"},
success=False,
mergelist=["dev-libs/A-1"],
- license_changes={"dev-libs/A-1": set(["TEST"])},
+ license_changes={"dev-libs/A-1": {"TEST"}},
),
# Test that --autounmask enables --autounmask-license
ResolverPlaygroundTestCase(
@@ -482,7 +480,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask": True},
success=False,
mergelist=["dev-libs/A-1"],
- license_changes={"dev-libs/A-1": set(["TEST"])},
+ license_changes={"dev-libs/A-1": {"TEST"}},
),
# Test that --autounmask-license is not enabled by default
ResolverPlaygroundTestCase(
@@ -510,7 +508,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask": True},
success=False,
mergelist=["dev-libs/B-1", "dev-libs/C-1"],
- license_changes={"dev-libs/B-1": set(["TEST"])},
+ license_changes={"dev-libs/B-1": {"TEST"}},
unstable_keywords=["dev-libs/B-1"],
use_changes={"dev-libs/B-1": {"foo": True}},
),
@@ -521,10 +519,10 @@ class AutounmaskTestCase(TestCase):
success=False,
mergelist=["dev-libs/E-1", "dev-libs/F-1", "dev-libs/D-1"],
license_changes={
- "dev-libs/D-1": set(["TEST"]),
- "dev-libs/E-1": set(["TEST"]),
- "dev-libs/E-2": set(["TEST"]),
- "dev-libs/F-1": set(["TEST"]),
+ "dev-libs/D-1": {"TEST"},
+ "dev-libs/E-1": {"TEST"},
+ "dev-libs/E-2": {"TEST"},
+ "dev-libs/F-1": {"TEST"},
},
),
# Test license only for bug #420847
@@ -533,7 +531,7 @@ class AutounmaskTestCase(TestCase):
options={"--autounmask": True},
success=False,
mergelist=["dev-java/sun-jdk-1.6.0.31"],
- license_changes={"dev-java/sun-jdk-1.6.0.31": set(["TEST"])},
+ license_changes={"dev-java/sun-jdk-1.6.0.31": {"TEST"}},
),
)
@@ -546,7 +544,6 @@ class AutounmaskTestCase(TestCase):
playground.cleanup()
def testAutounmaskAndSets(self):
-
ebuilds = {
# ebuilds to test use changes
"dev-libs/A-1": {},
@@ -663,7 +660,6 @@ class AutounmaskTestCase(TestCase):
playground.cleanup()
def testAutounmask9999(self):
-
ebuilds = {
"dev-libs/A-1": {},
"dev-libs/A-2": {},
@@ -682,15 +678,15 @@ class AutounmaskTestCase(TestCase):
success=False,
options={"--autounmask": True},
mergelist=["dev-libs/A-2", "dev-libs/B-1"],
- needed_p_mask_changes=set(["dev-libs/A-2"]),
+ needed_p_mask_changes={"dev-libs/A-2"},
),
ResolverPlaygroundTestCase(
["dev-libs/C"],
success=False,
options={"--autounmask": True},
mergelist=["dev-libs/A-9999", "dev-libs/C-1"],
- unstable_keywords=set(["dev-libs/A-9999"]),
- needed_p_mask_changes=set(["dev-libs/A-9999"]),
+ unstable_keywords={"dev-libs/A-9999"},
+ needed_p_mask_changes={"dev-libs/A-9999"},
),
)
diff --git a/lib/portage/tests/resolver/test_autounmask_binpkg_use.py b/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
index e2164f0b1..53dd18ab9 100644
--- a/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
+++ b/lib/portage/tests/resolver/test_autounmask_binpkg_use.py
@@ -1,11 +1,15 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class AutounmaskBinpkgUseTestCase(TestCase):
@@ -38,7 +42,7 @@ class AutounmaskBinpkgUseTestCase(TestCase):
test_cases = (
# Bug 619626: Test for unnecessary rebuild due
# to rejection of binary packages that would
- # be acceptable after appplication of autounmask
+ # be acceptable after application of autounmask
# USE changes.
ResolverPlaygroundTestCase(
["dev-libs/A"],
@@ -55,13 +59,26 @@ class AutounmaskBinpkgUseTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds, binpkgs=binpkgs, installed=installed, debug=False
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_autounmask_multilib_use.py b/lib/portage/tests/resolver/test_autounmask_multilib_use.py
index 6a76b0d9e..2d3da85c5 100644
--- a/lib/portage/tests/resolver/test_autounmask_multilib_use.py
+++ b/lib/portage/tests/resolver/test_autounmask_multilib_use.py
@@ -1,6 +1,8 @@
-# Copyright 2013 Gentoo Foundation
+# Copyright 2013, 2023 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import pytest
+
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
@@ -9,10 +11,8 @@ from portage.tests.resolver.ResolverPlayground import (
class AutounmaskMultilibUseTestCase(TestCase):
+ @pytest.mark.xfail()
def testAutounmaskMultilibUse(self):
-
- self.todo = True
-
ebuilds = {
"x11-proto/xextproto-7.2.1-r1": {
"EAPI": "5",
diff --git a/lib/portage/tests/resolver/test_autounmask_parent.py b/lib/portage/tests/resolver/test_autounmask_parent.py
index fefaaad7d..0f1d5423b 100644
--- a/lib/portage/tests/resolver/test_autounmask_parent.py
+++ b/lib/portage/tests/resolver/test_autounmask_parent.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class AutounmaskParentTestCase(TestCase):
def testAutounmaskParentUse(self):
-
ebuilds = {
"dev-libs/B-1": {
"EAPI": "5",
diff --git a/lib/portage/tests/resolver/test_autounmask_use_breakage.py b/lib/portage/tests/resolver/test_autounmask_use_breakage.py
index 02c08affa..bbdac22b6 100644
--- a/lib/portage/tests/resolver/test_autounmask_use_breakage.py
+++ b/lib/portage/tests/resolver/test_autounmask_use_breakage.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class AutounmaskUseBreakageTestCase(TestCase):
def testAutounmaskUseBreakage(self):
-
ebuilds = {
"app-misc/A-0": {
"EAPI": "5",
diff --git a/lib/portage/tests/resolver/test_autounmask_use_slot_conflict.py b/lib/portage/tests/resolver/test_autounmask_use_slot_conflict.py
index 5f3e03b56..a0b8c4ae7 100644
--- a/lib/portage/tests/resolver/test_autounmask_use_slot_conflict.py
+++ b/lib/portage/tests/resolver/test_autounmask_use_slot_conflict.py
@@ -1,6 +1,8 @@
-# Copyright 2017-2021 Gentoo Authors
+# Copyright 2017-2021, 2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import pytest
+
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
@@ -9,9 +11,8 @@ from portage.tests.resolver.ResolverPlayground import (
class AutounmaskUseSlotConflictTestCase(TestCase):
+ @pytest.mark.xfail()
def testAutounmaskUseSlotConflict(self):
- self.todo = True
-
ebuilds = {
"sci-libs/K-1": {"IUSE": "+foo", "EAPI": 1},
"sci-libs/L-1": {"DEPEND": "sci-libs/K[-foo]", "EAPI": 2},
diff --git a/lib/portage/tests/resolver/test_bdeps.py b/lib/portage/tests/resolver/test_bdeps.py
index ce1f8e0d5..35098457f 100644
--- a/lib/portage/tests/resolver/test_bdeps.py
+++ b/lib/portage/tests/resolver/test_bdeps.py
@@ -1,16 +1,19 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
+from portage.output import colorize
class BdepsTestCase(TestCase):
def testImageMagickUpdate(self):
-
ebuilds = {
"app-misc/A-1": {
"EAPI": "6",
@@ -183,18 +186,27 @@ class BdepsTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- debug=False,
- ebuilds=ebuilds,
- installed=installed,
- binpkgs=binpkgs,
- world=world,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- # Disable debug so that cleanup works.
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ debug=False,
+ ebuilds=ebuilds,
+ installed=installed,
+ binpkgs=binpkgs,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ # Disable debug so that cleanup works.
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py b/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
index 9a6a5417a..ed7f4e4f9 100644
--- a/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
+++ b/lib/portage/tests/resolver/test_binary_pkg_ebuild_visibility.py
@@ -1,16 +1,19 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class BinaryPkgEbuildVisibilityTestCase(TestCase):
def testBinaryPkgEbuildVisibility(self):
-
binpkgs = {
"app-misc/foo-3": {},
"app-misc/foo-2": {},
@@ -124,12 +127,24 @@ class BinaryPkgEbuildVisibilityTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, ebuilds=ebuilds, installed=installed, world=world
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_broken_deps.py b/lib/portage/tests/resolver/test_broken_deps.py
new file mode 100644
index 000000000..8ca7809d3
--- /dev/null
+++ b/lib/portage/tests/resolver/test_broken_deps.py
@@ -0,0 +1,76 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class BrokenDepsTestCase(TestCase):
+ def testBrokenDeps(self):
+ """
+ Test the _calc_depclean "dep_check" action which will eventually
+ be used to check for unsatisfied deps of installed packages
+ for bug 921333.
+ """
+ ebuilds = {
+ "dev-qt/qtcore-5.15.12": {
+ "EAPI": "8",
+ },
+ "dev-qt/qtcore-5.15.11-r1": {
+ "EAPI": "8",
+ },
+ "dev-qt/qtxmlpatterns-5.15.12": {
+ "EAPI": "8",
+ "DEPEND": "=dev-qt/qtcore-5.15.12*",
+ "RDEPEND": "=dev-qt/qtcore-5.15.12*",
+ },
+ "dev-qt/qtxmlpatterns-5.15.11": {
+ "EAPI": "8",
+ "DEPEND": "=dev-qt/qtcore-5.15.11*",
+ "RDEPEND": "=dev-qt/qtcore-5.15.11*",
+ },
+ "kde-frameworks/syntax-highlighting-5.113.0": {
+ "EAPI": "8",
+ "DEPEND": ">=dev-qt/qtxmlpatterns-5.15.9:5",
+ },
+ }
+ installed = {
+ "dev-qt/qtcore-5.15.12": {
+ "EAPI": "8",
+ },
+ "dev-qt/qtxmlpatterns-5.15.11": {
+ "EAPI": "8",
+ "DEPEND": "=dev-qt/qtcore-5.15.11*",
+ "RDEPEND": "=dev-qt/qtcore-5.15.11*",
+ },
+ "kde-frameworks/syntax-highlighting-5.113.0": {
+ "EAPI": "8",
+ "DEPEND": ">=dev-qt/qtxmlpatterns-5.15.9:5",
+ },
+ }
+
+ world = ("kde-frameworks/syntax-highlighting",)
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ [],
+ action="dep_check",
+ success=True,
+ unsatisfied_deps={
+ "dev-qt/qtxmlpatterns-5.15.11": {"=dev-qt/qtcore-5.15.11*"}
+ },
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, world=world
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_changed_deps.py b/lib/portage/tests/resolver/test_changed_deps.py
index c3a0e2f87..a14f3d704 100644
--- a/lib/portage/tests/resolver/test_changed_deps.py
+++ b/lib/portage/tests/resolver/test_changed_deps.py
@@ -1,16 +1,19 @@
# Copyright 2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class ChangedDepsTestCase(TestCase):
def testChangedDeps(self):
-
ebuilds = {
"app-misc/A-0": {
"DEPEND": "app-misc/B",
@@ -103,16 +106,26 @@ class ChangedDepsTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- debug=False,
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ debug=False,
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_circular_choices.py b/lib/portage/tests/resolver/test_circular_choices.py
index 34a666d56..e25792341 100644
--- a/lib/portage/tests/resolver/test_circular_choices.py
+++ b/lib/portage/tests/resolver/test_circular_choices.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class CircularJsoncppCmakeBootstrapTestCase(TestCase):
def testCircularJsoncppCmakeBootstrapOrDeps(self):
-
ebuilds = {
"dev-libs/jsoncpp-1.9.2": {
"EAPI": "7",
@@ -80,7 +79,6 @@ class CircularJsoncppCmakeBootstrapTestCase(TestCase):
playground.cleanup()
def testVirtualCmakeBootstrapUseConditional(self):
-
ebuilds = {
"dev-libs/jsoncpp-1.9.2": {"EAPI": "7", "BDEPEND": "virtual/cmake"},
"dev-util/cmake-bootstrap-3.16.2": {
@@ -122,7 +120,6 @@ class CircularJsoncppCmakeBootstrapTestCase(TestCase):
class CircularChoicesTestCase(TestCase):
def testDirectCircularDependency(self):
-
ebuilds = {
"dev-lang/gwydion-dylan-2.4.0": {
"DEPEND": "|| ( dev-lang/gwydion-dylan dev-lang/gwydion-dylan-bin )"
@@ -153,7 +150,6 @@ class CircularChoicesTestCase(TestCase):
class VirtualCircularChoicesTestCase(TestCase):
def testDirectVirtualCircularDependency(self):
-
ebuilds = {
"dev-java/icedtea-6.1.10.3": {"SLOT": "6", "DEPEND": "virtual/jdk"},
"dev-java/icedtea6-bin-1.10.3": {},
@@ -187,7 +183,6 @@ class VirtualCircularChoicesTestCase(TestCase):
class CircularPypyExeTestCase(TestCase):
def testCircularPypyExe(self):
-
ebuilds = {
"dev-python/pypy-7.3.0": {
"EAPI": "7",
diff --git a/lib/portage/tests/resolver/test_circular_choices_rust.py b/lib/portage/tests/resolver/test_circular_choices_rust.py
index 50c6e24f7..0c479ce85 100644
--- a/lib/portage/tests/resolver/test_circular_choices_rust.py
+++ b/lib/portage/tests/resolver/test_circular_choices_rust.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class CircularRustTestCase(TestCase):
def testCircularPypyExe(self):
-
ebuilds = {
"dev-lang/rust-1.47.0-r2": {
"EAPI": "7",
@@ -43,7 +42,7 @@ class CircularRustTestCase(TestCase):
# when a package would replace its own builtime dependency.
# This needs to be tested with and without --update, since
# that affects package selection logic significantly,
- # expecially for packages given as arguments.
+ # especially for packages given as arguments.
ResolverPlaygroundTestCase(
["=dev-lang/rust-1.46*"],
mergelist=["dev-lang/rust-1.46.0"],
diff --git a/lib/portage/tests/resolver/test_circular_dependencies.py b/lib/portage/tests/resolver/test_circular_dependencies.py
index ac4c9a6f9..739721080 100644
--- a/lib/portage/tests/resolver/test_circular_dependencies.py
+++ b/lib/portage/tests/resolver/test_circular_dependencies.py
@@ -9,7 +9,6 @@ from portage.tests.resolver.ResolverPlayground import (
class CircularDependencyTestCase(TestCase):
-
# TODO:
# use config change by autounmask
# conflict on parent's parent
@@ -19,7 +18,6 @@ class CircularDependencyTestCase(TestCase):
# play with REQUIRED_USE
def testCircularDependency(self):
-
ebuilds = {
"dev-libs/Z-1": {
"DEPEND": "foo? ( !bar? ( dev-libs/Y ) )",
diff --git a/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py b/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
index 3a5912606..4bdea55ac 100644
--- a/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
+++ b/lib/portage/tests/resolver/test_complete_if_new_subslot_without_revbump.py
@@ -1,16 +1,19 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class CompeteIfNewSubSlotWithoutRevBumpTestCase(TestCase):
def testCompeteIfNewSubSlotWithoutRevBump(self):
-
ebuilds = {
"media-libs/libpng-1.5.14": {"EAPI": "5", "SLOT": "0"},
"x11-libs/gdk-pixbuf-2.26.5": {
@@ -55,16 +58,25 @@ class CompeteIfNewSubSlotWithoutRevBumpTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_cross_dep_priority.py b/lib/portage/tests/resolver/test_cross_dep_priority.py
new file mode 100644
index 000000000..10f2eb36e
--- /dev/null
+++ b/lib/portage/tests/resolver/test_cross_dep_priority.py
@@ -0,0 +1,164 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import shutil
+import subprocess
+import os
+
+import portage
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class CrossDepPriorityTestCase(TestCase):
+ def testCrossDepPriority(self):
+ """
+ Test bug 919174, where cross-root merge to an empty root
+ failed due to circular dependencies.
+ """
+ ebuilds = {
+ "dev-lang/python-3.11.6": {
+ "EAPI": "8",
+ "DEPEND": "sys-apps/util-linux:=",
+ "RDEPEND": "sys-apps/util-linux:=",
+ },
+ "sys-apps/util-linux-2.38.1-r2": {
+ "EAPI": "8",
+ "DEPEND": "selinux? ( >=sys-libs/libselinux-2.2.2-r4 )",
+ "RDEPEND": "selinux? ( >=sys-libs/libselinux-2.2.2-r4 )",
+ "IUSE": "selinux",
+ },
+ "sys-libs/libselinux-3.5-r1": {
+ "EAPI": "8",
+ "DEPEND": "python? ( dev-lang/python )",
+ "RDEPEND": "python? ( dev-lang/python )",
+ "IUSE": "python",
+ },
+ "dev-libs/gmp-6.3.0": {
+ "EAPI": "8",
+ "SLOT": "0/10.4",
+ "DEPEND": "cxx? ( sys-devel/gcc )",
+ "RDEPEND": "cxx? ( sys-devel/gcc )",
+ "IUSE": "cxx",
+ },
+ "sys-devel/gcc-13.2.1_p20230826": {
+ "EAPI": "8",
+ "DEPEND": ">=dev-libs/gmp-4.3.2:0=",
+ "RDEPEND": ">=dev-libs/gmp-4.3.2:0=",
+ },
+ }
+
+ installed = {
+ "dev-lang/python-3.11.6": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "DEPEND": "sys-apps/util-linux:0/0=",
+ "RDEPEND": "sys-apps/util-linux:0/0=",
+ },
+ "sys-apps/util-linux-2.38.1-r2": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "DEPEND": "selinux? ( >=sys-libs/libselinux-2.2.2-r4 )",
+ "RDEPEND": "selinux? ( >=sys-libs/libselinux-2.2.2-r4 )",
+ "IUSE": "selinux",
+ "USE": "selinux",
+ },
+ "sys-libs/libselinux-3.5-r1": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "DEPEND": "python? ( dev-lang/python )",
+ "RDEPEND": "python? ( dev-lang/python )",
+ "IUSE": "python",
+ "USE": "python",
+ },
+ "dev-libs/gmp-6.3.0": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "SLOT": "0/10.4",
+ "DEPEND": "cxx? ( sys-devel/gcc )",
+ "RDEPEND": "cxx? ( sys-devel/gcc )",
+ "IUSE": "cxx",
+ "USE": "cxx",
+ },
+ "sys-devel/gcc-13.2.1_p20230826": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "DEPEND": ">=dev-libs/gmp-4.3.2:0/10.4=",
+ "RDEPEND": ">=dev-libs/gmp-4.3.2:0/10.4=",
+ },
+ }
+
+ world = [
+ "sys-apps/util-linux",
+ "sys-devel/gcc",
+ ]
+
+ user_config = {
+ "make.conf": ('USE="cxx python selinux"',),
+ }
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--emptytree": True},
+ success=True,
+ mergelist=[
+ "dev-libs/gmp-6.3.0",
+ "sys-devel/gcc-13.2.1_p20230826",
+ "sys-apps/util-linux-2.38.1-r2",
+ "dev-lang/python-3.11.6",
+ "sys-libs/libselinux-3.5-r1",
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ user_config=user_config,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+
+ # Since ResolverPlayground does not internally support
+ # cross-root, test with emerge.
+ cross_root = os.path.join(playground.settings["EPREFIX"], "cross_root")
+ world_file = os.path.join(
+ cross_root,
+ playground.settings["EPREFIX"].lstrip(os.sep),
+ portage.const.WORLD_FILE,
+ )
+ os.makedirs(os.path.dirname(world_file))
+ shutil.copy(
+ os.path.join(playground.settings["EPREFIX"], portage.const.WORLD_FILE),
+ world_file,
+ )
+ result = subprocess.run(
+ [
+ "emerge",
+ f"--root={cross_root}",
+ "--pretend",
+ "--verbose",
+ "--usepkgonly",
+ "--quickpkg-direct=y",
+ "@world",
+ ],
+ env=playground.settings.environ(),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ output = result.stdout.decode(errors="replace")
+ try:
+ self.assertTrue("5 packages (5 new, 5 binaries)" in output)
+ self.assertEqual(result.returncode, os.EX_OK)
+ except Exception:
+ print(output)
+ raise
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_depclean.py b/lib/portage/tests/resolver/test_depclean.py
index 8ff65551d..01dae5f37 100644
--- a/lib/portage/tests/resolver/test_depclean.py
+++ b/lib/portage/tests/resolver/test_depclean.py
@@ -172,7 +172,6 @@ class DepcleanInstalledKeywordMaskedSlotTestCase(TestCase):
class DepcleanWithExcludeTestCase(TestCase):
def testDepcleanWithExclude(self):
-
installed = {
"dev-libs/A-1": {},
"dev-libs/B-1": {"RDEPEND": "dev-libs/A"},
@@ -224,7 +223,6 @@ class DepcleanWithExcludeTestCase(TestCase):
class DepcleanWithExcludeAndSlotsTestCase(TestCase):
def testDepcleanWithExcludeAndSlots(self):
-
installed = {
"dev-libs/Z-1": {"SLOT": 1},
"dev-libs/Z-2": {"SLOT": 2},
@@ -267,7 +265,6 @@ class DepcleanWithExcludeAndSlotsTestCase(TestCase):
class DepcleanAndWildcardsTestCase(TestCase):
def testDepcleanAndWildcards(self):
-
installed = {
"dev-libs/A-1": {"RDEPEND": "dev-libs/B"},
"dev-libs/B-1": {},
diff --git a/lib/portage/tests/resolver/test_depclean_order.py b/lib/portage/tests/resolver/test_depclean_order.py
index a8c334304..36d60d44e 100644
--- a/lib/portage/tests/resolver/test_depclean_order.py
+++ b/lib/portage/tests/resolver/test_depclean_order.py
@@ -1,4 +1,4 @@
-# Copyright 2013 Gentoo Foundation
+# Copyright 2013-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SimpleDepcleanTestCase(TestCase):
def testSimpleDepclean(self):
-
ebuilds = {
"dev-libs/A-1": {
"EAPI": "5",
@@ -58,3 +57,118 @@ class SimpleDepcleanTestCase(TestCase):
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
+
+ def testIDEPENDDepclean(self):
+ """
+ Test for bug 916135, where a direct circular dependency caused
+ the unmerge order to fail to account for IDEPEND.
+ """
+
+ ebuilds = {
+ "dev-util/A-1": {},
+ "dev-libs/B-1": {
+ "EAPI": "8",
+ "IDEPEND": "dev-util/A",
+ "RDEPEND": "dev-libs/B:=",
+ },
+ "dev-libs/C-1": {},
+ }
+
+ installed = {
+ "dev-util/A-1": {},
+ "dev-libs/B-1": {
+ "EAPI": "8",
+ "IDEPEND": "dev-util/A",
+ "RDEPEND": "dev-libs/B:0/0=",
+ },
+ "dev-libs/C-1": {},
+ }
+
+ world = ("dev-libs/C",)
+
+ test_cases = (
+ # Remove dev-libs/B first because it IDEPENDs on dev-util/A
+ ResolverPlaygroundTestCase(
+ [],
+ options={"--depclean": True},
+ success=True,
+ ordered=True,
+ cleanlist=[
+ "dev-libs/B-1",
+ "dev-util/A-1",
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, world=world
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
+
+ def testCircularDepclean(self):
+ """
+ Test for bug 916135, where an indirect circular dependency caused
+ the unmerge order to fail to account for IDEPEND.
+ """
+
+ ebuilds = {
+ "dev-util/A-1": {},
+ "dev-libs/B-1": {
+ "EAPI": "8",
+ "SLOT": "1",
+ "IDEPEND": "dev-util/A",
+ "RDEPEND": "dev-libs/B:=",
+ },
+ "dev-libs/B-2": {
+ "EAPI": "8",
+ "SLOT": "2",
+ "IDEPEND": "dev-util/A",
+ "RDEPEND": "dev-libs/B:=",
+ },
+ "dev-libs/C-1": {},
+ }
+
+ installed = {
+ "dev-util/A-1": {},
+ "dev-libs/B-1": {
+ "EAPI": "8",
+ "SLOT": "1",
+ "IDEPEND": "dev-util/A",
+ "RDEPEND": "dev-libs/B:2/2=",
+ },
+ "dev-libs/B-2": {
+ "EAPI": "8",
+ "SLOT": "2",
+ "IDEPEND": "dev-util/A",
+ "RDEPEND": "dev-libs/B:1/1=",
+ },
+ "dev-libs/C-1": {},
+ }
+
+ world = ("dev-libs/C",)
+
+ test_cases = (
+ # Remove dev-libs/B first because it IDEPENDs on dev-util/A
+ ResolverPlaygroundTestCase(
+ [],
+ options={"--depclean": True},
+ success=True,
+ ordered=True,
+ cleanlist=["dev-libs/B-2", "dev-libs/B-1", "dev-util/A-1"],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, world=world
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_depclean_slot_unavailable.py b/lib/portage/tests/resolver/test_depclean_slot_unavailable.py
index f61670cf6..ba4ea0a0e 100644
--- a/lib/portage/tests/resolver/test_depclean_slot_unavailable.py
+++ b/lib/portage/tests/resolver/test_depclean_slot_unavailable.py
@@ -46,7 +46,7 @@ class DepcleanUnavailableSlotTestCase(TestCase):
finally:
playground.cleanup()
- # Now make the newer version availale and verify that
+ # Now make the newer version available and verify that
# the lower version is depcleaned.
ebuilds.update(
{
diff --git a/lib/portage/tests/resolver/test_depth.py b/lib/portage/tests/resolver/test_depth.py
index d166147a8..ab5f8e7ec 100644
--- a/lib/portage/tests/resolver/test_depth.py
+++ b/lib/portage/tests/resolver/test_depth.py
@@ -1,4 +1,4 @@
-# Copyright 2011-2020 Gentoo Authors
+# Copyright 2011-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class ResolverDepthTestCase(TestCase):
def testResolverDepth(self):
-
profile = {
"package.mask": (
# Mask an installed package (for which an update is
@@ -319,6 +318,12 @@ class ResolverDepthTestCase(TestCase):
"sys-fs/udev-164",
],
),
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--emptytree": True, "--exclude": ["dev-libs/B"]},
+ success=True,
+ mergelist=["dev-libs/C-2", "dev-libs/A-2"],
+ ),
)
playground = ResolverPlayground(
diff --git a/lib/portage/tests/resolver/test_disjunctive_depend_order.py b/lib/portage/tests/resolver/test_disjunctive_depend_order.py
index e08a1d845..110259465 100644
--- a/lib/portage/tests/resolver/test_disjunctive_depend_order.py
+++ b/lib/portage/tests/resolver/test_disjunctive_depend_order.py
@@ -1,11 +1,15 @@
# Copyright 2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class DisjunctiveDependOrderTestCase(TestCase):
@@ -71,12 +75,25 @@ class DisjunctiveDependOrderTestCase(TestCase):
),
)
- playground = ResolverPlayground(debug=False, binpkgs=binpkgs, ebuilds=ebuilds)
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ debug=False,
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_eapi.py b/lib/portage/tests/resolver/test_eapi.py
index 908e12f90..32dcb4989 100644
--- a/lib/portage/tests/resolver/test_eapi.py
+++ b/lib/portage/tests/resolver/test_eapi.py
@@ -1,4 +1,4 @@
-# Copyright 2010-2020 Gentoo Authors
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class EAPITestCase(TestCase):
def testEAPI(self):
-
ebuilds = {
# EAPI-1: IUSE-defaults
"dev-libs/A-1.0": {"EAPI": 0, "IUSE": "+foo"},
@@ -186,3 +185,46 @@ class EAPITestCase(TestCase):
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
+
+ def testBdepend(self):
+ ebuilds = {
+ "dev-libs/A-1.0": {"EAPI": 7},
+ "dev-libs/B-1.0": {"EAPI": 7, "BDEPEND": "dev-libs/A"},
+ }
+
+ # Verify that BDEPEND is considered at all.
+ test_case = ResolverPlaygroundTestCase(
+ ["=dev-libs/B-1.0"],
+ success=True,
+ mergelist=["dev-libs/A-1.0", "dev-libs/B-1.0"],
+ )
+
+ playground = ResolverPlayground(ebuilds=ebuilds)
+ try:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
+
+ def testIdepend(self):
+ ebuilds = {
+ "dev-libs/A-1.0": {"EAPI": 8},
+ "dev-libs/B-1.0": {"EAPI": 8, "IDEPEND": "dev-libs/A"},
+ }
+
+ test_cases = (
+ # Verify that IDEPEND is considered at all.
+ ResolverPlaygroundTestCase(
+ ["=dev-libs/B-1.0"],
+ success=True,
+ mergelist=["dev-libs/A-1.0", "dev-libs/B-1.0"],
+ ),
+ )
+
+ playground = ResolverPlayground(ebuilds=ebuilds)
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_emptytree_reinstall_unsatisfiability.py b/lib/portage/tests/resolver/test_emptytree_reinstall_unsatisfiability.py
new file mode 100644
index 000000000..fcdc01d7f
--- /dev/null
+++ b/lib/portage/tests/resolver/test_emptytree_reinstall_unsatisfiability.py
@@ -0,0 +1,137 @@
+# Copyright 2024 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class EmptytreeReinstallUnsatisfiabilityTestCase(TestCase):
+ def testEmptytreeReinstallUnsatisfiability(self):
+ """
+ Tests to check if emerge fails and complains when --emptytree
+ package dependency graph reinstall is unsatisfied, even if the already
+ installed packages successfully satisfy the dependency tree.
+
+ See bug #651018 where emerge silently skips package
+ reinstalls because of unsatisfied use flag requirements.
+ """
+ ebuilds = {
+ "dev-libs/A-1": {
+ "DEPEND": "dev-libs/B",
+ "RDEPEND": "dev-libs/B",
+ "EAPI": "2",
+ },
+ "dev-libs/B-1": {
+ "DEPEND": "dev-libs/C[foo]",
+ "RDEPEND": "dev-libs/C[foo]",
+ "EAPI": "2",
+ },
+ "dev-libs/C-1": {
+ "IUSE": "foo",
+ "EAPI": "2",
+ },
+ "dev-libs/X-1": {
+ "DEPEND": "dev-libs/Y[-baz]",
+ "RDEPEND": "dev-libs/Y[-baz]",
+ "EAPI": "2",
+ },
+ "dev-libs/Y-1": {
+ "IUSE": "baz",
+ "EAPI": "2",
+ },
+ "dev-libs/Z-1": {
+ "DEPEND": "dev-libs/W",
+ "RDEPEND": "dev-libs/W",
+ "EAPI": "2",
+ },
+ "dev-libs/W-1": {
+ "EAPI": "2",
+ },
+ }
+
+ installed = {
+ "dev-libs/A-1": {
+ "DEPEND": "dev-libs/B",
+ "RDEPEND": "dev-libs/B",
+ "EAPI": "2",
+ },
+ "dev-libs/B-1": {
+ "DEPEND": "dev-libs/C[foo]",
+ "RDEPEND": "dev-libs/C[foo]",
+ "EAPI": "2",
+ },
+ "dev-libs/C-1": {
+ "IUSE": "foo",
+ "USE": "foo",
+ "EAPI": "2",
+ },
+ "dev-libs/X-1": {
+ "DEPEND": "dev-libs/Y[-baz]",
+ "RDEPEND": "dev-libs/Y[-baz]",
+ "EAPI": "2",
+ },
+ "dev-libs/Y-1": {
+ "IUSE": "baz",
+ "USE": "-baz",
+ "EAPI": "2",
+ },
+ "dev-libs/Z-1": {
+ "DEPEND": "dev-libs/W",
+ "RDEPEND": "dev-libs/W",
+ "EAPI": "2",
+ },
+ "dev-libs/W-1": {
+ "EAPI": "2",
+ },
+ }
+
+ user_config = {
+ "package.use": ("dev-libs/Y baz",),
+ "package.mask": ("dev-libs/W",),
+ }
+
+ world = ["dev-libs/X"]
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ options={"--emptytree": True},
+ success=False,
+ mergelist=["dev-libs/C-1", "dev-libs/B-1", "dev-libs/A-1"],
+ use_changes={"dev-libs/C-1": {"foo": True}},
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ options={"--emptytree": True, "--exclude": ["dev-libs/C"]},
+ success=True,
+ mergelist=["dev-libs/B-1", "dev-libs/A-1"],
+ ),
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--emptytree": True},
+ success=False,
+ mergelist=["dev-libs/Y-1", "dev-libs/X-1"],
+ use_changes={"dev-libs/Y-1": {"baz": False}},
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/Z"],
+ options={"--emptytree": True},
+ success=False,
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config=user_config,
+ world=world,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_imagemagick_graphicsmagick.py b/lib/portage/tests/resolver/test_imagemagick_graphicsmagick.py
index 86a9cf460..bcff03b44 100644
--- a/lib/portage/tests/resolver/test_imagemagick_graphicsmagick.py
+++ b/lib/portage/tests/resolver/test_imagemagick_graphicsmagick.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class ImageMagickGraphicsMagickTestCase(TestCase):
def testImageMagickUpdate(self):
-
ebuilds = {
"media-gfx/imagemagick-6.9.7.0": {
"EAPI": "6",
diff --git a/lib/portage/tests/resolver/test_installkernel.py b/lib/portage/tests/resolver/test_installkernel.py
new file mode 100644
index 000000000..5909b53aa
--- /dev/null
+++ b/lib/portage/tests/resolver/test_installkernel.py
@@ -0,0 +1,93 @@
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class InstallKernelTestCase(TestCase):
+ def testInstallKernel(self):
+ ebuilds = {
+ "sys-kernel/installkernel-systemd-boot-1": {
+ "EAPI": "8",
+ "RDEPEND": "!sys-kernel/installkernel-gentoo",
+ },
+ "sys-kernel/installkernel-gentoo-3": {
+ "EAPI": "8",
+ "RDEPEND": "!sys-kernel/installkernel-systemd-boot",
+ },
+ "sys-kernel/gentoo-kernel-5.15.23": {
+ "EAPI": "8",
+ "PDEPEND": ">=virtual/dist-kernel-5.15.23",
+ "RDEPEND": "|| ( sys-kernel/installkernel-gentoo sys-kernel/installkernel-systemd-boot )",
+ },
+ "sys-kernel/gentoo-kernel-bin-5.15.23": {
+ "EAPI": "8",
+ "PDEPEND": ">=virtual/dist-kernel-5.15.23",
+ "RDEPEND": "|| ( sys-kernel/installkernel-gentoo sys-kernel/installkernel-systemd-boot )",
+ },
+ "virtual/dist-kernel-5.15.23": {
+ "EAPI": "8",
+ "PDEPEND": "|| ( ~sys-kernel/gentoo-kernel-5.15.23 ~sys-kernel/gentoo-kernel-bin-5.15.23 )",
+ },
+ }
+
+ installed = {
+ "sys-kernel/installkernel-gentoo-3": {
+ "EAPI": "8",
+ "RDEPEND": "!sys-kernel/installkernel-systemd-boot",
+ },
+ }
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ [
+ "sys-kernel/installkernel-systemd-boot",
+ ],
+ ambiguous_merge_order=True,
+ success=True,
+ mergelist=[
+ "sys-kernel/installkernel-systemd-boot-1",
+ "[uninstall]sys-kernel/installkernel-gentoo-3",
+ (
+ "!sys-kernel/installkernel-gentoo",
+ "!sys-kernel/installkernel-systemd-boot",
+ ),
+ ],
+ ),
+ # Test bug 833014, where the calculation failed unless
+ # --update and --deep are specified.
+ ResolverPlaygroundTestCase(
+ [
+ "sys-kernel/installkernel-systemd-boot",
+ "sys-kernel/gentoo-kernel-bin",
+ ],
+ ambiguous_merge_order=True,
+ success=True,
+ mergelist=[
+ "virtual/dist-kernel-5.15.23",
+ "sys-kernel/installkernel-systemd-boot-1",
+ "sys-kernel/gentoo-kernel-bin-5.15.23",
+ "[uninstall]sys-kernel/installkernel-gentoo-3",
+ (
+ "!sys-kernel/installkernel-systemd-boot",
+ "!sys-kernel/installkernel-gentoo",
+ ),
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ debug=False, ebuilds=ebuilds, installed=installed
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_merge_order.py b/lib/portage/tests/resolver/test_merge_order.py
index 940eb3bbb..a6c236a20 100644
--- a/lib/portage/tests/resolver/test_merge_order.py
+++ b/lib/portage/tests/resolver/test_merge_order.py
@@ -1,4 +1,4 @@
-# Copyright 2011-2020 Gentoo Authors
+# Copyright 2011-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
@@ -382,7 +382,9 @@ class MergeOrderTestCase(TestCase):
ambiguous_merge_order=True,
# The following merge order assertion reflects optimal order for
# a circular relationship which is DEPEND in one direction and
- # RDEPEND in the other.
+ # RDEPEND in the other. However, it is not respected because
+ # it would result in a temporarily broken RDEPEND, so we instead
+ # rely on satisfied installed build-time dependencies.
merge_order_assertions=(
("app-misc/circ-buildtime-a-1", "app-misc/circ-buildtime-c-1"),
),
@@ -691,8 +693,8 @@ class MergeOrderTestCase(TestCase):
"app-misc/circ-post-runtime-b-1",
"app-misc/some-app-b-1",
"app-misc/circ-runtime-a-1",
- "app-misc/circ-runtime-b-1",
"app-misc/circ-runtime-c-1",
+ "app-misc/circ-runtime-b-1",
"app-misc/some-app-a-1",
"app-misc/blocker-buildtime-unbuilt-a-1",
"[uninstall]app-misc/installed-blocker-a-1",
@@ -702,12 +704,12 @@ class MergeOrderTestCase(TestCase):
"x11-base/xorg-server-1.14.1",
"media-libs/mesa-9.1.3",
"app-misc/circ-buildtime-a-1",
- "app-misc/circ-buildtime-b-1",
"app-misc/circ-buildtime-c-1",
+ "app-misc/circ-buildtime-b-1",
"app-misc/some-app-c-1",
"app-misc/circ-satisfied-a-1",
- "app-misc/circ-satisfied-b-1",
"app-misc/circ-satisfied-c-1",
+ "app-misc/circ-satisfied-b-1",
],
),
)
diff --git a/lib/portage/tests/resolver/test_multirepo.py b/lib/portage/tests/resolver/test_multirepo.py
index 3a8eaa3d6..3ab665e6f 100644
--- a/lib/portage/tests/resolver/test_multirepo.py
+++ b/lib/portage/tests/resolver/test_multirepo.py
@@ -1,11 +1,15 @@
# Copyright 2010-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class MultirepoTestCase(TestCase):
@@ -236,15 +240,28 @@ class MultirepoTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds, binpkgs=binpkgs, installed=installed, sets=sets
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ sets=sets,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
def testMultirepoUserConfig(self):
ebuilds = {
@@ -382,12 +399,20 @@ class MultirepoTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds, installed=installed, user_config=user_config
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, user_config=user_config
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_onlydeps_ideps.py b/lib/portage/tests/resolver/test_onlydeps_ideps.py
new file mode 100644
index 000000000..e34ee2aed
--- /dev/null
+++ b/lib/portage/tests/resolver/test_onlydeps_ideps.py
@@ -0,0 +1,172 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class OnlydepsIdepsTestCase(TestCase):
+ def testOnlydepsIdepsEAPI7(self):
+ ebuilds = {
+ "dev-libs/A-1": {
+ "EAPI": "7",
+ "DEPEND": "dev-libs/B",
+ "RDEPEND": "dev-libs/C",
+ "PDEPEND": "dev-libs/D",
+ "IDEPEND": "dev-libs/E",
+ },
+ "dev-libs/B-1": {},
+ "dev-libs/C-1": {},
+ "dev-libs/D-1": {},
+ "dev-libs/E-1": {},
+ }
+ ebuilds["dev-libs/F-1"] = ebuilds["dev-libs/A-1"]
+ installed = {}
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ all_permutations=True,
+ success=True,
+ options={"--onlydeps": True, "--onlydeps-with-rdeps": "y"},
+ ambiguous_merge_order=True,
+ mergelist=[("dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1")],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ all_permutations=True,
+ success=True,
+ options={"--onlydeps": True, "--onlydeps-with-rdeps": "n"},
+ mergelist=["dev-libs/B-1"],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/F"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": "y",
+ },
+ ambiguous_merge_order=True,
+ mergelist=[("dev-libs/B-1")],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/F"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": True,
+ },
+ ambiguous_merge_order=True,
+ mergelist=[("dev-libs/B-1")],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/F"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": "n",
+ },
+ mergelist=["dev-libs/B-1"],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, debug=False
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
+
+ def testOnlydepsIdepsEAPI8(self):
+ ebuilds = {
+ "dev-libs/A-1": {
+ "EAPI": "8",
+ "DEPEND": "dev-libs/B",
+ "RDEPEND": "dev-libs/C",
+ "PDEPEND": "dev-libs/D",
+ "IDEPEND": "dev-libs/E",
+ },
+ "dev-libs/B-1": {},
+ "dev-libs/C-1": {},
+ "dev-libs/D-1": {},
+ "dev-libs/E-1": {},
+ }
+ ebuilds["dev-libs/F-1"] = ebuilds["dev-libs/A-1"]
+ installed = {}
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ all_permutations=True,
+ success=True,
+ options={"--onlydeps": True, "--onlydeps-with-rdeps": "y"},
+ ambiguous_merge_order=True,
+ mergelist=[
+ ("dev-libs/B-1", "dev-libs/C-1", "dev-libs/D-1", "dev-libs/E-1")
+ ],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ all_permutations=True,
+ success=True,
+ options={"--onlydeps": True, "--onlydeps-with-rdeps": "n"},
+ mergelist=["dev-libs/B-1"],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/F"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": "y",
+ },
+ ambiguous_merge_order=True,
+ mergelist=[("dev-libs/B-1", "dev-libs/E-1")],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/F"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": True,
+ },
+ ambiguous_merge_order=True,
+ mergelist=[("dev-libs/B-1", "dev-libs/E-1")],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/F"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": "n",
+ },
+ mergelist=["dev-libs/B-1"],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, debug=False
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_onlydeps_minimal.py b/lib/portage/tests/resolver/test_onlydeps_minimal.py
index 0dec40c2e..372c0e5aa 100644
--- a/lib/portage/tests/resolver/test_onlydeps_minimal.py
+++ b/lib/portage/tests/resolver/test_onlydeps_minimal.py
@@ -15,10 +15,12 @@ class OnlydepsMinimalTestCase(TestCase):
"DEPEND": "dev-libs/B",
"RDEPEND": "dev-libs/C",
"PDEPEND": "dev-libs/D",
+ "IDEPEND": "dev-libs/E",
},
"dev-libs/B-1": {},
"dev-libs/C-1": {},
"dev-libs/D-1": {},
+ "dev-libs/E-1": {},
}
installed = {}
@@ -38,6 +40,29 @@ class OnlydepsMinimalTestCase(TestCase):
options={"--onlydeps": True, "--onlydeps-with-rdeps": "n"},
mergelist=["dev-libs/B-1"],
),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": "y",
+ },
+ ambiguous_merge_order=True,
+ mergelist=[("dev-libs/B-1",)],
+ ),
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ all_permutations=True,
+ success=True,
+ options={
+ "--onlydeps": True,
+ "--onlydeps-with-rdeps": "n",
+ "--onlydeps-with-ideps": "n",
+ },
+ mergelist=["dev-libs/B-1"],
+ ),
)
playground = ResolverPlayground(
diff --git a/lib/portage/tests/resolver/test_or_choices.py b/lib/portage/tests/resolver/test_or_choices.py
index 922670517..4258a1ab5 100644
--- a/lib/portage/tests/resolver/test_or_choices.py
+++ b/lib/portage/tests/resolver/test_or_choices.py
@@ -1,8 +1,10 @@
-# Copyright 2013-2020 Gentoo Authors
+# Copyright 2013-2020, 2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import itertools
+import pytest
+
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
@@ -75,7 +77,6 @@ class OrChoicesTestCase(TestCase):
playground.cleanup()
def testInitiallyUnsatisfied(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2"},
@@ -113,7 +114,6 @@ class OrChoicesTestCase(TestCase):
playground.cleanup()
def testUseMask(self):
-
profile = {
"use.mask": ("abi_ppc_32",),
}
@@ -151,7 +151,6 @@ class OrChoicesTestCase(TestCase):
playground.cleanup()
def testConflictMissedUpdate(self):
-
ebuilds = {
"dev-lang/ocaml-4.02.1": {
"EAPI": "5",
@@ -640,6 +639,7 @@ class OrChoicesTestCase(TestCase):
class OrChoicesLibpostprocTestCase(TestCase):
+ @pytest.mark.xfail(reason="Irrelevant blocker conflict")
def testOrChoicesLibpostproc(self):
# This test case is expected to fail after the fix for bug 706278,
# since the "undesirable" slot upgrade which triggers a blocker conflict
@@ -649,8 +649,6 @@ class OrChoicesLibpostprocTestCase(TestCase):
# compatible with any available media-video/ffmpeg slot. In order to
# solve this test case, some fancy backtracking (like for bug 382421)
# will be required.
- self.todo = True
-
ebuilds = {
"media-video/ffmpeg-0.10": {"EAPI": "5", "SLOT": "0.10"},
"media-video/ffmpeg-1.2.2": {"EAPI": "5", "SLOT": "0"},
diff --git a/lib/portage/tests/resolver/test_package_tracker.py b/lib/portage/tests/resolver/test_package_tracker.py
index c343589f9..2789af5d9 100644
--- a/lib/portage/tests/resolver/test_package_tracker.py
+++ b/lib/portage/tests/resolver/test_package_tracker.py
@@ -9,7 +9,6 @@ from _emerge.resolver.package_tracker import PackageTracker, PackageTrackerDbapi
class PackageTrackerTestCase(TestCase):
-
FakePackage = collections.namedtuple(
"FakePackage", ["root", "cp", "cpv", "slot", "slot_atom", "version", "repo"]
)
@@ -20,7 +19,7 @@ class PackageTrackerTestCase(TestCase):
def make_pkg(self, root, atom, repo="test_repo"):
atom = Atom(atom)
- slot_atom = Atom("%s:%s" % (atom.cp, atom.slot))
+ slot_atom = Atom(f"{atom.cp}:{atom.slot}")
slot = atom.slot
return self.FakePackage(
diff --git a/lib/portage/tests/resolver/test_perl_rebuild_bug.py b/lib/portage/tests/resolver/test_perl_rebuild_bug.py
new file mode 100644
index 000000000..7e376f396
--- /dev/null
+++ b/lib/portage/tests/resolver/test_perl_rebuild_bug.py
@@ -0,0 +1,121 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class PerlRebuildBugTestCase(TestCase):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def testPerlRebuildBug(self):
+ """
+ The infamous Perl rebuild bug.
+
+ A non-slotted build-time dependency cycle is created by:
+ dev-lang/perl -> sys-libs/zlib -> sys-devel/automake -> dev-lang/perl
+ Everything else depends on this cycle.
+
+ Bug in solving for smallest cycle causes slot in RDEPEND of
+ dev-perl/Locale-gettext to be ignored, so all dependencies other than
+ perl's >=sys-libs/zlib-1.2.12 are satisfied by already-installed
+ packages. dev-perl/Locale-gettext and sys-devel/automake become leaves
+ of the depgraph after satisfied packages are ignored. They become
+ emerged first. This causes an issue because dev-perl/Locale-gettext is
+ now built before the slot upgrade of dev-lang/perl.
+ """
+ ebuilds = {
+ "dev-lang/perl-5.36.0-r2": {
+ "EAPI": "5",
+ "DEPEND": ">=sys-libs/zlib-1.2.12",
+ "RDEPEND": ">=sys-libs/zlib-1.2.12",
+ "SLOT": "0/5.36",
+ },
+ "dev-perl/Locale-gettext-1.70.0-r1": {
+ "EAPI": "5",
+ "DEPEND": "dev-lang/perl",
+ "RDEPEND": "dev-lang/perl:=",
+ },
+ "sys-apps/help2man-1.49.3": {
+ "EAPI": "5",
+ "DEPEND": "dev-lang/perl dev-perl/Locale-gettext",
+ "RDEPEND": "dev-lang/perl dev-perl/Locale-gettext",
+ },
+ "sys-devel/automake-1.16.5": {
+ "EAPI": "5",
+ "DEPEND": "dev-lang/perl",
+ "RDEPEND": "dev-lang/perl",
+ },
+ "sys-libs/zlib-1.2.13-r1": {
+ "EAPI": "5",
+ "DEPEND": "sys-devel/automake",
+ },
+ }
+
+ installed = {
+ "dev-lang/perl-5.34.0-r3": {
+ "EAPI": "5",
+ "DEPEND": "sys-libs/zlib",
+ "RDEPEND": "sys-libs/zlib",
+ "SLOT": "0/5.34",
+ },
+ "dev-perl/Locale-gettext-1.70.0-r1": {
+ "EAPI": "5",
+ "DEPEND": "dev-lang/perl",
+ "RDEPEND": "dev-lang/perl:0/5.34=",
+ },
+ "sys-apps/help2man-1.48.5": {
+ "EAPI": "5",
+ "DEPEND": "dev-lang/perl dev-perl/Locale-gettext",
+ "RDEPEND": "dev-lang/perl dev-perl/Locale-gettext",
+ },
+ "sys-devel/automake-1.16.4": {
+ "EAPI": "5",
+ "DEPEND": "dev-lang/perl",
+ "RDEPEND": "dev-lang/perl",
+ },
+ "sys-libs/zlib-1.2.11-r4": {
+ "EAPI": "5",
+ "DEPEND": "sys-devel/automake",
+ },
+ }
+
+ world = ["sys-apps/help2man"]
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--deep": True, "--update": True, "--verbose": True},
+ success=True,
+ ambiguous_merge_order=True,
+ merge_order_assertions=(
+ (
+ "dev-lang/perl-5.36.0-r2",
+ "dev-perl/Locale-gettext-1.70.0-r1",
+ ),
+ ),
+ mergelist=[
+ "sys-devel/automake-1.16.5",
+ "sys-libs/zlib-1.2.13-r1",
+ "dev-lang/perl-5.36.0-r2",
+ "dev-perl/Locale-gettext-1.70.0-r1",
+ "sys-apps/help2man-1.49.3",
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_profile_default_eapi.py b/lib/portage/tests/resolver/test_profile_default_eapi.py
index 45b8c41a6..e3dfec47e 100644
--- a/lib/portage/tests/resolver/test_profile_default_eapi.py
+++ b/lib/portage/tests/resolver/test_profile_default_eapi.py
@@ -1,7 +1,6 @@
# Copyright 2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-import io
from portage import os, _encodings
from portage.const import USER_CONFIG_PATH
@@ -13,7 +12,6 @@ from portage.util import ensure_dirs
class ProfileDefaultEAPITestCase(TestCase):
def testProfileDefaultEAPI(self):
-
repo_configs = {
"test_repo": {
"layout.conf": (
@@ -108,13 +106,13 @@ class ProfileDefaultEAPITestCase(TestCase):
for prof_path, data in profile_info:
ensure_dirs(prof_path)
for k, v in data.items():
- with io.open(
+ with open(
os.path.join(prof_path, k),
mode="w",
encoding=_encodings["repo.content"],
) as f:
for line in v:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# The config must be reloaded in order to account
# for the above profile customizations.
diff --git a/lib/portage/tests/resolver/test_profile_package_set.py b/lib/portage/tests/resolver/test_profile_package_set.py
index 6b64dcdae..5f184f08d 100644
--- a/lib/portage/tests/resolver/test_profile_package_set.py
+++ b/lib/portage/tests/resolver/test_profile_package_set.py
@@ -1,7 +1,6 @@
# Copyright 2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
-import io
from portage import os, _encodings
from portage.tests import TestCase
@@ -14,7 +13,6 @@ from portage.util import ensure_dirs
class ProfilePackageSetTestCase(TestCase):
def testProfilePackageSet(self):
-
repo_configs = {
"test_repo": {
"layout.conf": ("profile-formats = profile-set",),
@@ -98,13 +96,13 @@ class ProfilePackageSetTestCase(TestCase):
prof_path = os.path.join(profile_root, p)
ensure_dirs(prof_path)
for k, v in data.items():
- with io.open(
+ with open(
os.path.join(prof_path, k),
mode="w",
encoding=_encodings["repo.content"],
) as f:
for line in v:
- f.write("%s\n" % line)
+ f.write(f"{line}\n")
# The config must be reloaded in order to account
# for the above profile customizations.
diff --git a/lib/portage/tests/resolver/test_rebuild_ghostscript.py b/lib/portage/tests/resolver/test_rebuild_ghostscript.py
new file mode 100644
index 000000000..8ee3349d6
--- /dev/null
+++ b/lib/portage/tests/resolver/test_rebuild_ghostscript.py
@@ -0,0 +1,160 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class RebuildGhostscriptTestCase(TestCase):
+ def testRebuildGhostscript(self):
+ """
+ Test bug 703676, where app-text/libspectre was rebuilt before
+ its app-text/ghostscript-gpl DEPEND.
+ """
+ ebuilds = {
+ "app-text/ghostscript-gpl-10.01.1": {
+ "EAPI": "8",
+ "DEPEND": "gtk? ( x11-libs/gtk+:3 )",
+ "RDEPEND": "gtk? ( x11-libs/gtk+:3 )",
+ "IUSE": "gtk",
+ },
+ "app-text/ghostscript-gpl-10.01.2": {
+ "EAPI": "8",
+ "SLOT": "0/10.01",
+ "DEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "RDEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "IUSE": "dbus gtk",
+ },
+ "app-text/libspectre-0.2.11": {
+ "EAPI": "8",
+ "DEPEND": ">=app-text/ghostscript-gpl-9.53.0:=",
+ "RDEPEND": ">=app-text/ghostscript-gpl-9.53.0:=",
+ },
+ "app-text/libspectre-0.2.12": {
+ "EAPI": "8",
+ "DEPEND": ">=app-text/ghostscript-gpl-9.53.0:=",
+ "RDEPEND": ">=app-text/ghostscript-gpl-9.53.0:=",
+ },
+ "net-dns/avahi-0.8-r7": {
+ "EAPI": "8",
+ "DEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "RDEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "IUSE": "dbus gtk",
+ },
+ "net-print/cups-2.4.6": {
+ "EAPI": "8",
+ "DEPEND": "zeroconf? ( >=net-dns/avahi-0.6.31-r2[dbus] )",
+ "RDEPEND": "zeroconf? ( >=net-dns/avahi-0.6.31-r2[dbus] )",
+ "IUSE": "zeroconf",
+ },
+ "sys-apps/dbus-1.15.6": {
+ "EAPI": "8",
+ },
+ "x11-libs/gtk+-3.24.38": {
+ "EAPI": "8",
+ "SLOT": "3",
+ "DEPEND": "cups? ( >=net-print/cups-2.0 )",
+ "RDEPEND": "cups? ( >=net-print/cups-2.0 )",
+ "IUSE": "cups",
+ },
+ "x11-libs/goffice-0.10.55": {
+ "EAPI": "8",
+ "DEPEND": ">=app-text/libspectre-0.2.6:=",
+ "RDEPEND": ">=app-text/libspectre-0.2.6:=",
+ },
+ }
+
+ installed = {
+ "app-text/ghostscript-gpl-10.01.1": {
+ "EAPI": "8",
+ "DEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "RDEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "IUSE": "dbus gtk",
+ "USE": "dbus gtk",
+ },
+ "app-text/libspectre-0.2.11": {
+ "EAPI": "8",
+ "DEPEND": ">=app-text/ghostscript-gpl-9.53.0:0/10.01=",
+ "RDEPEND": ">=app-text/ghostscript-gpl-9.53.0:0/10.01=",
+ },
+ "net-dns/avahi-0.8-r7": {
+ "EAPI": "8",
+ "DEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "RDEPEND": "dbus? ( sys-apps/dbus ) gtk? ( x11-libs/gtk+:3 )",
+ "IUSE": "dbus gtk",
+ "USE": "dbus gtk",
+ },
+ "net-print/cups-2.4.6": {
+ "EAPI": "8",
+ "DEPEND": "zeroconf? ( >=net-dns/avahi-0.6.31-r2[dbus] )",
+ "RDEPEND": "zeroconf? ( >=net-dns/avahi-0.6.31-r2[dbus] )",
+ "IUSE": "zeroconf",
+ "USE": "zeroconf",
+ },
+ "sys-apps/dbus-1.15.6": {
+ "EAPI": "8",
+ },
+ "x11-libs/gtk+-3.24.38": {
+ "EAPI": "8",
+ "SLOT": "3",
+ "DEPEND": "cups? ( >=net-print/cups-2.0 )",
+ "RDEPEND": "cups? ( >=net-print/cups-2.0 )",
+ "IUSE": "cups",
+ "USE": "cups",
+ },
+ "x11-libs/goffice-0.10.55": {
+ "EAPI": "8",
+ "DEPEND": ">=app-text/libspectre-0.2.6:0=",
+ "RDEPEND": ">=app-text/libspectre-0.2.6:0=",
+ },
+ }
+
+ world = [
+ "x11-libs/goffice",
+ ]
+
+ user_config = {
+ "make.conf": ('USE="cups dbus gtk zeroconf"',),
+ }
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--deep": True, "--update": True},
+ success=True,
+ mergelist=[
+ "app-text/ghostscript-gpl-10.01.2",
+ "app-text/libspectre-0.2.12",
+ ],
+ ),
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--emptytree": True},
+ success=True,
+ mergelist=[
+ "sys-apps/dbus-1.15.6",
+ "x11-libs/gtk+-3.24.38",
+ "app-text/ghostscript-gpl-10.01.2",
+ "net-dns/avahi-0.8-r7",
+ "net-print/cups-2.4.6",
+ "app-text/libspectre-0.2.12",
+ "x11-libs/goffice-0.10.55",
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ user_config=user_config,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py b/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
index bb1ce0e87..8cde38a9e 100644
--- a/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
+++ b/lib/portage/tests/resolver/test_regular_slot_change_without_revbump.py
@@ -1,16 +1,19 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class RegularSlotChangeWithoutRevBumpTestCase(TestCase):
def testRegularSlotChangeWithoutRevBumpTestCase(self):
-
ebuilds = {
"dev-libs/boost-1.52.0": {"SLOT": "0"},
"app-office/libreoffice-4.0.0.2": {
@@ -42,16 +45,26 @@ class RegularSlotChangeWithoutRevBumpTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_required_use.py b/lib/portage/tests/resolver/test_required_use.py
index 06ab64e9f..79dd0de82 100644
--- a/lib/portage/tests/resolver/test_required_use.py
+++ b/lib/portage/tests/resolver/test_required_use.py
@@ -301,7 +301,6 @@ class RequiredUSETestCase(TestCase):
playground.cleanup()
def testRequiredUseOrDeps(self):
-
ebuilds = {
"dev-libs/A-1": {
"IUSE": "+x +y",
diff --git a/lib/portage/tests/resolver/test_runtime_cycle_merge_order.py b/lib/portage/tests/resolver/test_runtime_cycle_merge_order.py
index 305757ff4..ed329aa09 100644
--- a/lib/portage/tests/resolver/test_runtime_cycle_merge_order.py
+++ b/lib/portage/tests/resolver/test_runtime_cycle_merge_order.py
@@ -1,4 +1,4 @@
-# Copyright 2016 Gentoo Foundation
+# Copyright 2016-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
@@ -56,8 +56,11 @@ class RuntimeCycleMergeOrderTestCase(TestCase):
("app-misc/leaf-b-1", "app-misc/leaf-d-1", "app-misc/leaf-e-1"),
("app-misc/branch-d-1", "app-misc/branch-e-1"),
"app-misc/runtime-c-1",
- ("app-misc/runtime-cycle-c-1", "app-misc/branch-c-1"),
- "app-misc/branch-b-1",
+ (
+ "app-misc/branch-b-1",
+ "app-misc/runtime-cycle-c-1",
+ "app-misc/branch-c-1",
+ ),
("app-misc/runtime-cycle-b-1", "app-misc/plugin-b-1"),
"app-misc/plugins-consumer-1",
],
@@ -71,3 +74,147 @@ class RuntimeCycleMergeOrderTestCase(TestCase):
self.assertEqual(test_case.test_success, True, test_case.fail_msg)
finally:
playground.cleanup()
+
+ def testBuildtimeRuntimeCycleMergeOrder(self):
+ installed = {
+ "dev-util/cmake-3.26.5-r2": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "DEPEND": "net-misc/curl",
+ "RDEPEND": "net-misc/curl",
+ },
+ "net-dns/c-ares-1.21.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ "RDEPEND": "net-dns/c-ares",
+ },
+ "net-misc/curl-8.4.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ "DEPEND": """
+ net-dns/c-ares
+ http2? ( net-libs/nghttp2:= )
+ """,
+ "RDEPEND": """
+ net-dns/c-ares
+ http2? ( net-libs/nghttp2:= )
+ """,
+ },
+ "net-dns/c-ares-1.21.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ },
+ }
+
+ binpkgs = {
+ "net-misc/curl-8.4.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ "IUSE": "http2",
+ "USE": "http2",
+ "DEPEND": """
+ net-dns/c-ares
+ http2? ( net-libs/nghttp2:= )
+ """,
+ "RDEPEND": """
+ net-dns/c-ares
+ http2? ( net-libs/nghttp2:= )
+ """,
+ },
+ "dev-util/cmake-3.26.5-r2": {
+ "EAPI": "8",
+ "KEYWORDS": "x86",
+ "DEPEND": "net-misc/curl",
+ "RDEPEND": "net-misc/curl",
+ },
+ }
+
+ ebuilds = {
+ "dev-util/cmake-3.26.5-r2": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ "DEPEND": "net-misc/curl",
+ "RDEPEND": "net-misc/curl",
+ },
+ "dev-util/cmake-3.27.8": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "~x86",
+ "DEPEND": "net-misc/curl",
+ "RDEPEND": "net-misc/curl",
+ },
+ "net-dns/c-ares-1.21.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ },
+ "net-libs/nghttp2-1.57.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ "BDEPEND": "dev-util/cmake",
+ "RDEPEND": "net-dns/c-ares",
+ },
+ "net-misc/curl-8.4.0": {
+ "EAPI": "8",
+ "SLOT": "0",
+ "KEYWORDS": "x86",
+ "IUSE": "http2",
+ "DEPEND": """
+ net-dns/c-ares
+ http2? ( net-libs/nghttp2:= )
+ """,
+ "RDEPEND": """
+ net-dns/c-ares
+ http2? ( net-libs/nghttp2:= )
+ """,
+ },
+ }
+
+ world = ("dev-util/cmake",)
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={
+ "--verbose": True,
+ "--update": True,
+ "--deep": True,
+ "--newuse": True,
+ "--usepkg": True,
+ },
+ success=True,
+ # It would also work to punt the dev-util/cmake upgrade
+ # until the end, given it's already installed.
+ mergelist=[
+ "dev-util/cmake-3.27.8",
+ "net-libs/nghttp2-1.57.0",
+ "[binary]net-misc/curl-8.4.0",
+ ],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ world=world,
+ installed=installed,
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ debug=False,
+ user_config={
+ "make.conf": (
+ f'ACCEPT_KEYWORDS="~x86"',
+ f'USE="http2"',
+ ),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_simple.py b/lib/portage/tests/resolver/test_simple.py
index 9bcf446be..3c75d9707 100644
--- a/lib/portage/tests/resolver/test_simple.py
+++ b/lib/portage/tests/resolver/test_simple.py
@@ -1,11 +1,15 @@
# Copyright 2010-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SimpleResolverTestCase(TestCase):
@@ -75,12 +79,23 @@ class SimpleResolverTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds, binpkgs=binpkgs, installed=installed
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_abi.py b/lib/portage/tests/resolver/test_slot_abi.py
index afab001df..d71f47e30 100644
--- a/lib/portage/tests/resolver/test_slot_abi.py
+++ b/lib/portage/tests/resolver/test_slot_abi.py
@@ -1,16 +1,20 @@
# Copyright 2012-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotAbiTestCase(TestCase):
def __init__(self, *args, **kwargs):
- super(SlotAbiTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSubSlot(self):
ebuilds = {
@@ -118,19 +122,29 @@ class SlotAbiTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
def testWholeSlot(self):
ebuilds = {
@@ -243,19 +257,29 @@ class SlotAbiTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
def testWholeSlotConditional(self):
ebuilds = {
@@ -447,16 +471,26 @@ class SlotAbiTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_abi_downgrade.py b/lib/portage/tests/resolver/test_slot_abi_downgrade.py
index badd31b2d..896215f29 100644
--- a/lib/portage/tests/resolver/test_slot_abi_downgrade.py
+++ b/lib/portage/tests/resolver/test_slot_abi_downgrade.py
@@ -1,16 +1,20 @@
# Copyright 2012-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotAbiDowngradeTestCase(TestCase):
def __init__(self, *args, **kwargs):
- super(SlotAbiDowngradeTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSubSlot(self):
ebuilds = {
@@ -96,19 +100,29 @@ class SlotAbiDowngradeTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
def testWholeSlotSubSlotMix(self):
ebuilds = {
@@ -197,16 +211,26 @@ class SlotAbiDowngradeTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_change_without_revbump.py b/lib/portage/tests/resolver/test_slot_change_without_revbump.py
index 3dbd4f75e..8b2f043a7 100644
--- a/lib/portage/tests/resolver/test_slot_change_without_revbump.py
+++ b/lib/portage/tests/resolver/test_slot_change_without_revbump.py
@@ -1,16 +1,19 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotChangeWithoutRevBumpTestCase(TestCase):
def testSlotChangeWithoutRevBump(self):
-
ebuilds = {
"app-arch/libarchive-3.1.1": {"EAPI": "5", "SLOT": "0/13"},
"app-arch/libarchive-3.0.4-r1": {"EAPI": "5", "SLOT": "0"},
@@ -71,16 +74,25 @@ class SlotChangeWithoutRevBumpTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_collisions.py b/lib/portage/tests/resolver/test_slot_collisions.py
index dd8b98243..47fa8dc1d 100644
--- a/lib/portage/tests/resolver/test_slot_collisions.py
+++ b/lib/portage/tests/resolver/test_slot_collisions.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotCollisionTestCase(TestCase):
def testSlotCollision(self):
-
ebuilds = {
"dev-libs/A-1": {"PDEPEND": "foo? ( dev-libs/B )", "IUSE": "foo"},
"dev-libs/B-1": {"IUSE": "foo"},
diff --git a/lib/portage/tests/resolver/test_slot_conflict_blocked_prune.py b/lib/portage/tests/resolver/test_slot_conflict_blocked_prune.py
new file mode 100644
index 000000000..14e98cd00
--- /dev/null
+++ b/lib/portage/tests/resolver/test_slot_conflict_blocked_prune.py
@@ -0,0 +1,78 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class SlotConflictBlockedPruneTestCase(TestCase):
+ def testSlotConflictBlockedPrune(self):
+ """
+ Bug 622270
+ Downgrading package (as openssl here) due to un-accepting unstable.
+ Dependent package (as rustup here) cannot be rebuilt due to missing
+ keyword, so dependee downgrade is cancelled, but other dependents
+ (such as xwayland here) are rebuilt nevertheless. This should not
+ happen and the rebuilds should be pruned.
+ """
+ ebuilds = {
+ "x11-base/xwayland-23.1.1": {
+ "EAPI": "5",
+ "RDEPEND": "dev-libs/openssl:=",
+ },
+ "dev-util/rustup-1.25.2": {
+ "EAPI": "5",
+ "RDEPEND": "dev-libs/openssl:0=",
+ "KEYWORDS": "~x86",
+ },
+ "dev-libs/openssl-1.1.1u": {
+ "EAPI": "5",
+ "SLOT": "0/1.1",
+ },
+ "dev-libs/openssl-3.1.1": {
+ "EAPI": "5",
+ "SLOT": "0/3",
+ "KEYWORDS": "~x86",
+ },
+ }
+
+ installed = {
+ "x11-base/xwayland-23.1.1": {
+ "EAPI": "5",
+ "RDEPEND": "dev-libs/openssl:0/3=",
+ },
+ "dev-util/rustup-1.25.2": {
+ "EAPI": "5",
+ "RDEPEND": "dev-libs/openssl:0/3=",
+ "KEYWORDS": "~x86",
+ },
+ "dev-libs/openssl-3.1.1": {
+ "EAPI": "5",
+ "SLOT": "0/3",
+ "KEYWORDS": "~x86",
+ },
+ }
+
+ world = ["x11-base/xwayland", "dev-util/rustup"]
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@world"],
+ options={"--deep": True, "--update": True, "--verbose": True},
+ success=True,
+ mergelist=["x11-base/xwayland-23.1.1"],
+ ),
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, world=world
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_conflict_force_rebuild.py b/lib/portage/tests/resolver/test_slot_conflict_force_rebuild.py
index 52683f18e..bc3bae35e 100644
--- a/lib/portage/tests/resolver/test_slot_conflict_force_rebuild.py
+++ b/lib/portage/tests/resolver/test_slot_conflict_force_rebuild.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotConflictForceRebuildTestCase(TestCase):
def testSlotConflictForceRebuild(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2"},
diff --git a/lib/portage/tests/resolver/test_slot_conflict_rebuild.py b/lib/portage/tests/resolver/test_slot_conflict_rebuild.py
index a3327d9fa..d1f3b9a88 100644
--- a/lib/portage/tests/resolver/test_slot_conflict_rebuild.py
+++ b/lib/portage/tests/resolver/test_slot_conflict_rebuild.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotConflictRebuildTestCase(TestCase):
def testSlotConflictRebuild(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2"},
@@ -100,21 +99,21 @@ class SlotConflictRebuildTestCase(TestCase):
expected_mergelist = ["app-misc/A-1", "app-misc/B-2"]
for i in range(5):
- ebuilds["app-misc/C%sC-1" % i] = {
+ ebuilds[f"app-misc/C{i}C-1"] = {
"EAPI": "5",
"DEPEND": "app-misc/B:=",
"RDEPEND": "app-misc/B:=",
}
- installed["app-misc/C%sC-1" % i] = {
+ installed[f"app-misc/C{i}C-1"] = {
"EAPI": "5",
"DEPEND": "app-misc/B:1/1=",
"RDEPEND": "app-misc/B:1/1=",
}
for x in ("DEPEND", "RDEPEND"):
- ebuilds["app-misc/A-1"][x] += " app-misc/C%sC" % i
+ ebuilds["app-misc/A-1"][x] += f" app-misc/C{i}C"
- expected_mergelist.append("app-misc/C%sC-1" % i)
+ expected_mergelist.append(f"app-misc/C{i}C-1")
test_cases = (
ResolverPlaygroundTestCase(
@@ -392,7 +391,6 @@ class SlotConflictRebuildTestCase(TestCase):
playground.cleanup()
def testSlotConflictRebuildGolang(self):
-
ebuilds = {
"dev-lang/go-1.14.7": {"EAPI": "7", "SLOT": "0/1.14.7"},
"dev-lang/go-1.15": {"EAPI": "7", "SLOT": "0/1.15"},
diff --git a/lib/portage/tests/resolver/test_slot_conflict_unsatisfied_deep_deps.py b/lib/portage/tests/resolver/test_slot_conflict_unsatisfied_deep_deps.py
index b392aaded..233a9bbf4 100644
--- a/lib/portage/tests/resolver/test_slot_conflict_unsatisfied_deep_deps.py
+++ b/lib/portage/tests/resolver/test_slot_conflict_unsatisfied_deep_deps.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotConflictUnsatisfiedDeepDepsTestCase(TestCase):
def testSlotConflictUnsatisfiedDeepDeps(self):
-
ebuilds = {
"dev-libs/A-1": {},
"dev-libs/A-2": {"KEYWORDS": "~x86"},
diff --git a/lib/portage/tests/resolver/test_slot_conflict_update.py b/lib/portage/tests/resolver/test_slot_conflict_update.py
index 79df55250..4bdd40416 100644
--- a/lib/portage/tests/resolver/test_slot_conflict_update.py
+++ b/lib/portage/tests/resolver/test_slot_conflict_update.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotConflictUpdateTestCase(TestCase):
def testSlotConflictUpdate(self):
-
ebuilds = {
"app-text/podofo-0.9.2": {"EAPI": "5", "RDEPEND": "dev-util/boost-build"},
"dev-cpp/libcmis-0.3.1": {"EAPI": "5", "RDEPEND": "dev-libs/boost:="},
diff --git a/lib/portage/tests/resolver/test_slot_conflict_update_virt.py b/lib/portage/tests/resolver/test_slot_conflict_update_virt.py
index 85d9db471..c88fccddb 100644
--- a/lib/portage/tests/resolver/test_slot_conflict_update_virt.py
+++ b/lib/portage/tests/resolver/test_slot_conflict_update_virt.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotConflictUpdateVirtTestCase(TestCase):
def testSlotConflictUpdateVirt(self):
-
ebuilds = {
"dev-db/mysql-connector-c-6.1.11-r2": {"EAPI": "7", "SLOT": "0/18"},
"dev-db/mysql-connector-c-8.0.17-r3": {"EAPI": "7", "SLOT": "0/21"},
diff --git a/lib/portage/tests/resolver/test_slot_operator_autounmask.py b/lib/portage/tests/resolver/test_slot_operator_autounmask.py
index 7d6c3af26..88071fe25 100644
--- a/lib/portage/tests/resolver/test_slot_operator_autounmask.py
+++ b/lib/portage/tests/resolver/test_slot_operator_autounmask.py
@@ -1,16 +1,20 @@
# Copyright 2013-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotOperatorAutoUnmaskTestCase(TestCase):
def __init__(self, *args, **kwargs):
- super(SlotOperatorAutoUnmaskTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSubSlot(self):
ebuilds = {
@@ -109,16 +113,25 @@ class SlotOperatorAutoUnmaskTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_operator_bdeps.py b/lib/portage/tests/resolver/test_slot_operator_bdeps.py
index 0b1f426b7..13d08072d 100644
--- a/lib/portage/tests/resolver/test_slot_operator_bdeps.py
+++ b/lib/portage/tests/resolver/test_slot_operator_bdeps.py
@@ -1,11 +1,13 @@
# Copyright 2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotOperatorBdependTestCase(TestCase):
@@ -88,20 +90,28 @@ class SlotOperatorBdependTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
def testSlotOperatorBdependAfterBreakage(self):
"""
@@ -182,17 +192,25 @@ class SlotOperatorBdependTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.debug = False
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_operator_complete_graph.py b/lib/portage/tests/resolver/test_slot_operator_complete_graph.py
index f0b3050d9..498891cd7 100644
--- a/lib/portage/tests/resolver/test_slot_operator_complete_graph.py
+++ b/lib/portage/tests/resolver/test_slot_operator_complete_graph.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorCompleteGraphTestCase(TestCase):
def testSlotOperatorCompleteGraph(self):
-
ebuilds = {
"app-misc/meta-pkg-2": {
"EAPI": "6",
diff --git a/lib/portage/tests/resolver/test_slot_operator_exclusive_slots.py b/lib/portage/tests/resolver/test_slot_operator_exclusive_slots.py
index f3b7cca12..923e0f788 100644
--- a/lib/portage/tests/resolver/test_slot_operator_exclusive_slots.py
+++ b/lib/portage/tests/resolver/test_slot_operator_exclusive_slots.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorExclusiveSlotsTestCase(TestCase):
def testSlotOperatorExclusiveSlots(self):
-
ebuilds = {
"media-libs/mesa-17.0.1": {
"EAPI": "6",
diff --git a/lib/portage/tests/resolver/test_slot_operator_missed_update.py b/lib/portage/tests/resolver/test_slot_operator_missed_update.py
index 945fa8ea7..a45ebccc9 100644
--- a/lib/portage/tests/resolver/test_slot_operator_missed_update.py
+++ b/lib/portage/tests/resolver/test_slot_operator_missed_update.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class BacktrackMissedUpdateTestCase(TestCase):
def testBacktrackMissedUpdateTestCase(self):
-
ebuilds = {
"dev-lang/python-2.7.18-r2": {
"EAPI": "7",
diff --git a/lib/portage/tests/resolver/test_slot_operator_rebuild.py b/lib/portage/tests/resolver/test_slot_operator_rebuild.py
index 9e2325afb..b0a8641ae 100644
--- a/lib/portage/tests/resolver/test_slot_operator_rebuild.py
+++ b/lib/portage/tests/resolver/test_slot_operator_rebuild.py
@@ -1,16 +1,19 @@
# Copyright 2014-2018 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotOperatorRebuildTestCase(TestCase):
def testSlotOperatorRebuild(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2"},
@@ -72,16 +75,25 @@ class SlotOperatorRebuildTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ world=world,
+ debug=False,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_operator_required_use.py b/lib/portage/tests/resolver/test_slot_operator_required_use.py
index b4fa200ee..6ccd00bc0 100644
--- a/lib/portage/tests/resolver/test_slot_operator_required_use.py
+++ b/lib/portage/tests/resolver/test_slot_operator_required_use.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorRequiredUseTestCase(TestCase):
def testSlotOperatorRequiredUse(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2"},
diff --git a/lib/portage/tests/resolver/test_slot_operator_reverse_deps.py b/lib/portage/tests/resolver/test_slot_operator_reverse_deps.py
index cfa1a1334..012fd7692 100644
--- a/lib/portage/tests/resolver/test_slot_operator_reverse_deps.py
+++ b/lib/portage/tests/resolver/test_slot_operator_reverse_deps.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorReverseDepsTestCase(TestCase):
def testSlotOperatorReverseDeps(self):
-
ebuilds = {
"media-libs/mesa-11.2.2": {
"EAPI": "6",
diff --git a/lib/portage/tests/resolver/test_slot_operator_runtime_pkg_mask.py b/lib/portage/tests/resolver/test_slot_operator_runtime_pkg_mask.py
index 07cc56318..5af3ff3f4 100644
--- a/lib/portage/tests/resolver/test_slot_operator_runtime_pkg_mask.py
+++ b/lib/portage/tests/resolver/test_slot_operator_runtime_pkg_mask.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorRuntimePkgMaskTestCase(TestCase):
def testSlotOperatorRuntimePkgMask(self):
-
ebuilds = {
"app-misc/meta-pkg-2": {
"EAPI": "6",
diff --git a/lib/portage/tests/resolver/test_slot_operator_unsatisfied.py b/lib/portage/tests/resolver/test_slot_operator_unsatisfied.py
index ea1f09099..d7a4b7e06 100644
--- a/lib/portage/tests/resolver/test_slot_operator_unsatisfied.py
+++ b/lib/portage/tests/resolver/test_slot_operator_unsatisfied.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorUnsatisfiedTestCase(TestCase):
def testSlotOperatorUnsatisfied(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2"},
diff --git a/lib/portage/tests/resolver/test_slot_operator_unsolved.py b/lib/portage/tests/resolver/test_slot_operator_unsolved.py
index d43e8367d..2933be7f8 100644
--- a/lib/portage/tests/resolver/test_slot_operator_unsolved.py
+++ b/lib/portage/tests/resolver/test_slot_operator_unsolved.py
@@ -1,11 +1,15 @@
# Copyright 2013 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class SlotOperatorUnsolvedTestCase(TestCase):
@@ -15,7 +19,7 @@ class SlotOperatorUnsolvedTestCase(TestCase):
"""
def __init__(self, *args, **kwargs):
- super(SlotOperatorUnsolvedTestCase, self).__init__(*args, **kwargs)
+ super().__init__(*args, **kwargs)
def testSlotOperatorUnsolved(self):
ebuilds = {
@@ -71,17 +75,25 @@ class SlotOperatorUnsolvedTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- user_config=user_config,
- world=world,
- debug=False,
- )
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ _user_config = user_config.copy()
+ _user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config=_user_config,
+ world=world,
+ debug=False,
+ )
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_slot_operator_update_probe_parent_downgrade.py b/lib/portage/tests/resolver/test_slot_operator_update_probe_parent_downgrade.py
index 26da01a68..975814df3 100644
--- a/lib/portage/tests/resolver/test_slot_operator_update_probe_parent_downgrade.py
+++ b/lib/portage/tests/resolver/test_slot_operator_update_probe_parent_downgrade.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SlotOperatorUpdateProbeParentDowngradeTestCase(TestCase):
def testSlotOperatorUpdateProbeParentDowngrade(self):
-
ebuilds = {
"net-nds/openldap-2.4.40-r3": {
"EAPI": "5",
diff --git a/lib/portage/tests/resolver/test_solve_non_slot_operator_slot_conflicts.py b/lib/portage/tests/resolver/test_solve_non_slot_operator_slot_conflicts.py
index 8307e49ab..9bd548d19 100644
--- a/lib/portage/tests/resolver/test_solve_non_slot_operator_slot_conflicts.py
+++ b/lib/portage/tests/resolver/test_solve_non_slot_operator_slot_conflicts.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class SolveNonSlotOperatorSlotConflictsTestCase(TestCase):
def testSolveNonSlotOperatorSlotConflicts(self):
-
ebuilds = {
"app-misc/A-1": {"EAPI": "5", "SLOT": "0/1", "PDEPEND": "app-misc/B"},
"app-misc/A-2": {"EAPI": "5", "SLOT": "0/2", "PDEPEND": "app-misc/B"},
diff --git a/lib/portage/tests/resolver/test_unnecessary_slot_upgrade.py b/lib/portage/tests/resolver/test_unnecessary_slot_upgrade.py
new file mode 100644
index 000000000..a89ebdb67
--- /dev/null
+++ b/lib/portage/tests/resolver/test_unnecessary_slot_upgrade.py
@@ -0,0 +1,51 @@
+# Copyright 2021 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class UnnecessarySlotrUpgradeTestCase(TestCase):
+ def testUnnecessarySlotUpgrade(self):
+ ebuilds = {
+ "app-misc/a-1": {
+ "EAPI": "8",
+ "RDEPEND": "|| ( dev-lang/python:3.10 dev-lang/python:3.9 ) || ( dev-lang/python:3.10 dev-lang/python:3.9 )",
+ },
+ "dev-lang/python-3.9": {"SLOT": "3.9"},
+ "dev-lang/python-3.10": {"SLOT": "3.10"},
+ }
+
+ installed = {
+ "dev-lang/python-3.9": {"SLOT": "3.9"},
+ }
+
+ test_cases = (
+ # Test bug 828136, where an unnecessary python slot upgrade
+ # was triggered.
+ ResolverPlaygroundTestCase(
+ [
+ "app-misc/a",
+ ],
+ success=True,
+ mergelist=(
+ "dev-lang/python-3.10",
+ "app-misc/a-1",
+ ),
+ ),
+ )
+
+ playground = ResolverPlayground(
+ debug=False, ebuilds=ebuilds, installed=installed
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.debug = False
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_update.py b/lib/portage/tests/resolver/test_update.py
new file mode 100644
index 000000000..e67013f9f
--- /dev/null
+++ b/lib/portage/tests/resolver/test_update.py
@@ -0,0 +1,106 @@
+# Copyright 2022-2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class UpdateIfInstalledTestCase(TestCase):
+ def testUpdateIfInstalledEmerge(self):
+ installed = {
+ "dev-lang/ghc-4": {},
+ "dev-libs/larryware-3": {},
+ "dev-libs/larryware-ng-3": {},
+ "virtual/libc-1": {},
+ }
+
+ ebuilds = installed.copy()
+ ebuilds.update(
+ {
+ "app-misc/cowsay-10": {},
+ "dev-lang/ghc-5": {},
+ "dev-libs/larryware-4": {},
+ "dev-libs/larryware-ng-4": {"RDEPEND": ">=net-libs/moo-1"},
+ "net-libs/moo-1": {},
+ }
+ )
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds, installed=installed, debug=False
+ )
+
+ test_cases = (
+ # We should only try to update ghc when passed ghc and
+ # --update-if-installed. We don't want larryware to appear here,
+ # despite it being eligible for an upgrade otherwise with --update.
+ ResolverPlaygroundTestCase(
+ ["dev-lang/ghc"],
+ mergelist=["dev-lang/ghc-5"],
+ options={
+ "--update-if-installed": True,
+ },
+ success=True,
+ ),
+ # Only try to upgrade ghc even if passed another candidate,
+ # as there's no upgrade due for it. We don't want to
+ # reinstall virtual/libc for the sake of it.
+ ResolverPlaygroundTestCase(
+ ["dev-lang/ghc", "virtual/libc"],
+ mergelist=["dev-lang/ghc-5"],
+ options={
+ "--update-if-installed": True,
+ },
+ success=True,
+ ),
+ # Try to upgrade a package with no new versions available.
+ # This is just checking we still have --update semantics.
+ ResolverPlaygroundTestCase(
+ ["virtual/libc"],
+ mergelist=[],
+ options={
+ "--update-if-installed": True,
+ },
+ success=True,
+ ),
+ # If a new package is given, we want to do nothing.
+ ResolverPlaygroundTestCase(
+ ["app-misc/cowsay"],
+ mergelist=[],
+ options={
+ "--update-if-installed": True,
+ },
+ success=True,
+ ),
+ # If a new package (app-misc/cowsay) is given combined with
+ # a package eligible for an upgrade (dev-libs/larryware),
+ # upgrade just the latter.
+ ResolverPlaygroundTestCase(
+ ["app-misc/cowsay", "dev-libs/larryware"],
+ mergelist=["dev-libs/larryware-4"],
+ options={
+ "--update-if-installed": True,
+ },
+ success=True,
+ ),
+ # Make sure that we can still pull in upgrades as
+ # dependencies (net-libs/moo) of the package we requested
+ # (dev-libs/larryware-ng).
+ ResolverPlaygroundTestCase(
+ ["dev-libs/larryware-ng"],
+ mergelist=["net-libs/moo-1", "dev-libs/larryware-ng-4"],
+ options={
+ "--update-if-installed": True,
+ },
+ success=True,
+ ),
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_use_dep_defaults.py b/lib/portage/tests/resolver/test_use_dep_defaults.py
index 67907c3b9..845bb02b1 100644
--- a/lib/portage/tests/resolver/test_use_dep_defaults.py
+++ b/lib/portage/tests/resolver/test_use_dep_defaults.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class UseDepDefaultsTestCase(TestCase):
def testUseDepDefaultse(self):
-
ebuilds = {
"dev-libs/A-1": {
"DEPEND": "dev-libs/B[foo]",
diff --git a/lib/portage/tests/resolver/test_useflags.py b/lib/portage/tests/resolver/test_useflags.py
index 6d74807e5..142a31c7f 100644
--- a/lib/portage/tests/resolver/test_useflags.py
+++ b/lib/portage/tests/resolver/test_useflags.py
@@ -1,11 +1,15 @@
-# Copyright 2014 Gentoo Foundation
+# Copyright 2014-2024 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import sys
+
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import (
ResolverPlayground,
ResolverPlaygroundTestCase,
)
+from portage.output import colorize
class UseFlagsTestCase(TestCase):
@@ -118,15 +122,199 @@ class UseFlagsTestCase(TestCase):
),
)
- playground = ResolverPlayground(
- ebuilds=ebuilds,
- binpkgs=binpkgs,
- installed=installed,
- user_config=user_config,
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ user_config["make.conf"] = (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config=user_config,
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
+
+ def testBlockerBinpkgRespectUse(self):
+ """
+ Test for bug #916336 where we tried to check properties of a blocker
+ object which isn't a Package to be merged.
+ """
+
+ ebuilds = {
+ "dev-libs/A-1": {
+ "EAPI": "7",
+ "IUSE": "abi_x86_32",
+ "RDEPEND": "dev-libs/B",
+ },
+ "dev-libs/B-1": {
+ "EAPI": "7",
+ "IUSE": "abi_x86_32",
+ },
+ "dev-libs/A-2": {
+ "EAPI": "7",
+ "IUSE": "abi_x86_32",
+ "RDEPEND": "!<dev-libs/B-2",
+ },
+ "dev-libs/B-2": {
+ "EAPI": "7",
+ "IUSE": "abi_x86_32",
+ },
+ }
+ installed = {
+ "dev-libs/A-1": {
+ "IUSE": "abi_x86_32",
+ "USE": "abi_x86_32",
+ },
+ "dev-libs/B-1": {
+ "IUSE": "abi_x86_32",
+ "USE": "abi_x86_32",
+ },
+ }
+ binpkgs = ebuilds.copy()
+
+ user_config = {
+ "make.conf": (
+ 'FEATURES="binpkg-multi-instance"',
+ 'USE="abi_x86_32 abi_x86_32"',
+ ),
+ }
+
+ world = ("dev-libs/A",)
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["dev-libs/A"],
+ options={
+ "--verbose": "y",
+ "--update": True,
+ "--deep": True,
+ "--complete-graph": True,
+ "--usepkg": True,
+ "--autounmask": "n",
+ "--autounmask-backtrack": "n",
+ "--autounmask-use": "n",
+ },
+ success=True,
+ mergelist=["dev-libs/A-2", "[uninstall]dev-libs/B-1", "!<dev-libs/B-2"],
+ ),
)
- try:
- for test_case in test_cases:
- playground.run_TestCase(test_case)
- self.assertEqual(test_case.test_success, True, test_case.fail_msg)
- finally:
- playground.cleanup()
+
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config=user_config,
+ world=world,
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
+
+ def testNoMergeBinpkgRespectUse(self):
+ """
+ Testcase for bug #916614 where an incomplete depgraph may be fed into
+ _show_ignored_binaries_respect_use.
+
+ We use a mix of +/-abi_x86_32 to trigger the binpkg-respect-use notice
+ and depend on a non-existent package in one of the available ebuilds we
+ queue to reinstall to trigger an aborted calculation.
+ """
+ ebuilds = {
+ "dev-libs/A-2": {
+ "EAPI": "7",
+ "IUSE": "abi_x86_32",
+ },
+ "dev-libs/B-1": {
+ "IUSE": "abi_x86_32",
+ "RDEPEND": "=dev-libs/A-1",
+ },
+ }
+
+ installed = {
+ "dev-libs/B-1": {
+ "IUSE": "abi_x86_32",
+ "USE": "abi_x86_32",
+ },
+ "dev-libs/A-1": {
+ "IUSE": "abi_x86_32",
+ "USE": "abi_x86_32",
+ },
+ }
+
+ binpkgs = {
+ "dev-libs/A-2": {
+ "IUSE": "abi_x86_32",
+ "USE": "abi_x86_32",
+ },
+ "dev-libs/B-1": {
+ "IUSE": "abi_x86_32",
+ "USE": "",
+ "BUILD_ID": "2",
+ "BUILD_TIME": "2",
+ },
+ }
+
+ user_config = {
+ "make.conf": (
+ 'FEATURES="binpkg-multi-instance"',
+ 'USE="abi_x86_32 abi_x86_32"',
+ ),
+ }
+
+ world = ("dev-libs/A",)
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ ["@installed"],
+ options={
+ "--verbose": "y",
+ "--emptytree": True,
+ "--usepkg": True,
+ },
+ success=False,
+ mergelist=None,
+ slot_collision_solutions=None,
+ ),
+ )
+
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ user_config["make.conf"] += (f'BINPKG_FORMAT="{binpkg_format}"',)
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ binpkgs=binpkgs,
+ installed=installed,
+ user_config=user_config,
+ world=world,
+ )
+
+ try:
+ for test_case in test_cases:
+ playground.run_TestCase(test_case)
+ self.assertEqual(
+ test_case.test_success, True, test_case.fail_msg
+ )
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/resolver/test_virtual_slot.py b/lib/portage/tests/resolver/test_virtual_slot.py
index d55518fa7..19dc254dd 100644
--- a/lib/portage/tests/resolver/test_virtual_slot.py
+++ b/lib/portage/tests/resolver/test_virtual_slot.py
@@ -10,7 +10,6 @@ from portage.tests.resolver.ResolverPlayground import (
class VirtualSlotResolverTestCase(TestCase):
def testLicenseMaskedVirtualSlotUpdate(self):
-
ebuilds = {
"dev-java/oracle-jdk-bin-1.7.0": {"SLOT": "1.7", "LICENSE": "TEST"},
"dev-java/sun-jdk-1.6.0": {"SLOT": "1.6", "LICENSE": "TEST"},
@@ -60,7 +59,6 @@ class VirtualSlotResolverTestCase(TestCase):
playground.cleanup()
def testVirtualSlotUpdate(self):
-
ebuilds = {
"dev-java/oracle-jdk-bin-1.7.0": {"SLOT": "1.7", "LICENSE": "TEST"},
"dev-java/sun-jdk-1.6.0": {"SLOT": "1.6", "LICENSE": "TEST"},
@@ -118,7 +116,6 @@ class VirtualSlotResolverTestCase(TestCase):
playground.cleanup()
def testVirtualSubslotUpdate(self):
-
ebuilds = {
"virtual/pypy-2.3.1": {
"EAPI": "5",
@@ -190,7 +187,6 @@ class VirtualSlotResolverTestCase(TestCase):
playground.cleanup()
def testVirtualSlotDepclean(self):
-
ebuilds = {
"dev-java/oracle-jdk-bin-1.7.0": {"SLOT": "1.7", "LICENSE": "TEST"},
"dev-java/sun-jdk-1.6.0": {"SLOT": "1.6", "LICENSE": "TEST"},
diff --git a/lib/portage/tests/runTests.py b/lib/portage/tests/runTests.py
deleted file mode 100755
index 85b746092..000000000
--- a/lib/portage/tests/runTests.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python -bWd
-# runTests.py -- Portage Unit Test Functionality
-# Copyright 2006-2020 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-import grp
-import os
-import os.path as osp
-import platform
-import pwd
-import signal
-import sys
-
-
-def debug_signal(signum, frame):
- import pdb
-
- pdb.set_trace()
-
-
-if platform.python_implementation() == "Jython":
- debug_signum = signal.SIGUSR2 # bug #424259
-else:
- debug_signum = signal.SIGUSR1
-
-signal.signal(debug_signum, debug_signal)
-
-# Pretend that the current user's uid/gid are the 'portage' uid/gid,
-# so things go smoothly regardless of the current user and global
-# user/group configuration.
-os.environ["PORTAGE_USERNAME"] = pwd.getpwuid(os.getuid()).pw_name
-os.environ["PORTAGE_GRPNAME"] = grp.getgrgid(os.getgid()).gr_name
-
-# Insert our parent dir so we can do shiny import "tests"
-# This line courtesy of Marienz and Pkgcore ;)
-sys.path.insert(0, osp.dirname(osp.dirname(osp.dirname(osp.realpath(__file__)))))
-
-import portage
-
-portage._internal_caller = True
-
-# Ensure that we don't instantiate portage.settings, so that tests should
-# work the same regardless of global configuration file state/existence.
-portage._disable_legacy_globals()
-
-if os.environ.get("NOCOLOR") in ("yes", "true"):
- portage.output.nocolor()
-
-import portage.tests as tests
-from portage.util._eventloop.global_event_loop import global_event_loop
-from portage.const import PORTAGE_BIN_PATH
-
-path = os.environ.get("PATH", "").split(":")
-path = [x for x in path if x]
-
-insert_bin_path = True
-try:
- insert_bin_path = not path or not os.path.samefile(path[0], PORTAGE_BIN_PATH)
-except OSError:
- pass
-
-if insert_bin_path:
- path.insert(0, PORTAGE_BIN_PATH)
- os.environ["PATH"] = ":".join(path)
-
-if __name__ == "__main__":
- try:
- sys.exit(tests.main())
- finally:
- global_event_loop().close()
diff --git a/lib/portage/tests/sets/base/meson.build b/lib/portage/tests/sets/base/meson.build
new file mode 100644
index 000000000..db76ccced
--- /dev/null
+++ b/lib/portage/tests/sets/base/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_internal_package_set.py',
+ 'test_variable_set.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/sets/base',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/sets/base/testInternalPackageSet.py b/lib/portage/tests/sets/base/test_internal_package_set.py
index 0793df3fb..77934cab2 100644
--- a/lib/portage/tests/sets/base/testInternalPackageSet.py
+++ b/lib/portage/tests/sets/base/test_internal_package_set.py
@@ -12,8 +12,8 @@ class InternalPackageSetTestCase(TestCase):
"""Simple Test Case for InternalPackageSet"""
def testInternalPackageSet(self):
- i1_atoms = set(("dev-libs/A", ">=dev-libs/A-1", "dev-libs/B"))
- i2_atoms = set(("dev-libs/A", "dev-libs/*", "dev-libs/C"))
+ i1_atoms = {"dev-libs/A", ">=dev-libs/A-1", "dev-libs/B"}
+ i2_atoms = {"dev-libs/A", "dev-libs/*", "dev-libs/C"}
i1 = InternalPackageSet(initial_atoms=i1_atoms)
i2 = InternalPackageSet(initial_atoms=i2_atoms, allow_wildcard=True)
diff --git a/lib/portage/tests/sets/base/test_variable_set.py b/lib/portage/tests/sets/base/test_variable_set.py
new file mode 100644
index 000000000..60c43a5b8
--- /dev/null
+++ b/lib/portage/tests/sets/base/test_variable_set.py
@@ -0,0 +1,45 @@
+# Copyright 2022-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import (
+ ResolverPlayground,
+ ResolverPlaygroundTestCase,
+)
+
+
+class VariableSetTestCase(TestCase):
+ def testVariableSetEmerge(self):
+
+ # Using local set definition because @golang-rebuild migrated to dev-lang/go since bug 919751.
+ golang_rebuild = "{class=portage.sets.dbapi.VariableSet,variable=BDEPEND,includes=dev-lang/go}"
+
+ ebuilds = {
+ "dev-go/go-pkg-1": {"BDEPEND": "dev-lang/go"},
+ "www-client/firefox-1": {
+ "BDEPEND": "|| ( virtual/rust:0/a virtual/rust:0/b )"
+ },
+ }
+ installed = ebuilds
+ playground = ResolverPlayground(ebuilds=ebuilds, installed=installed)
+
+ test_cases = (
+ ResolverPlaygroundTestCase(
+ [f"@golang-rebuild{golang_rebuild}"],
+ mergelist=["dev-go/go-pkg-1"],
+ success=True,
+ ),
+ ResolverPlaygroundTestCase(
+ ["@rust-rebuild"],
+ mergelist=["www-client/firefox-1"],
+ success=True,
+ ),
+ )
+
+ try:
+ for test_case in test_cases:
+ # Create an artificial VariableSet to test against
+ playground.run_TestCase(test_case)
+ self.assertEqual(test_case.test_success, True, test_case.fail_msg)
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/sets/files/meson.build b/lib/portage/tests/sets/files/meson.build
new file mode 100644
index 000000000..240551984
--- /dev/null
+++ b/lib/portage/tests/sets/files/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_config_file_set.py',
+ 'test_static_file_set.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/sets/files',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/sets/files/testConfigFileSet.py b/lib/portage/tests/sets/files/test_config_file_set.py
index 81419df4a..fdb28da7f 100644
--- a/lib/portage/tests/sets/files/testConfigFileSet.py
+++ b/lib/portage/tests/sets/files/test_config_file_set.py
@@ -1,5 +1,5 @@
# testConfigFileSet.py -- Portage Unit Testing Functionality
-# Copyright 2007 Gentoo Foundation
+# Copyright 2007-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import tempfile
@@ -13,6 +13,7 @@ class ConfigFileSetTestCase(TestCase):
"""Simple Test Case for ConfigFileSet"""
def setUp(self):
+ super().setUp()
fd, self.testfile = tempfile.mkstemp(
suffix=".testdata", prefix=self.__class__.__name__, text=True
)
diff --git a/lib/portage/tests/sets/files/testStaticFileSet.py b/lib/portage/tests/sets/files/test_static_file_set.py
index a4e6c29c2..e8f51ca20 100644
--- a/lib/portage/tests/sets/files/testStaticFileSet.py
+++ b/lib/portage/tests/sets/files/test_static_file_set.py
@@ -1,5 +1,5 @@
# testStaticFileSet.py -- Portage Unit Testing Functionality
-# Copyright 2007 Gentoo Foundation
+# Copyright 2007-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import tempfile
@@ -13,6 +13,7 @@ class StaticFileSetTestCase(TestCase):
"""Simple Test Case for StaticFileSet"""
def setUp(self):
+ super().setUp()
fd, self.testfile = tempfile.mkstemp(
suffix=".testdata", prefix=self.__class__.__name__, text=True
)
diff --git a/lib/portage/tests/sets/meson.build b/lib/portage/tests/sets/meson.build
new file mode 100644
index 000000000..9037105ce
--- /dev/null
+++ b/lib/portage/tests/sets/meson.build
@@ -0,0 +1,12 @@
+py.install_sources(
+ [
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/sets',
+ pure : not native_extensions
+)
+
+subdir('base')
+subdir('files')
+subdir('shell')
diff --git a/lib/portage/tests/sets/shell/meson.build b/lib/portage/tests/sets/shell/meson.build
new file mode 100644
index 000000000..41eef9357
--- /dev/null
+++ b/lib/portage/tests/sets/shell/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_shell.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/sets/shell',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/sets/shell/testShell.py b/lib/portage/tests/sets/shell/test_shell.py
index dcbdafeb8..15f8907df 100644
--- a/lib/portage/tests/sets/shell/testShell.py
+++ b/lib/portage/tests/sets/shell/test_shell.py
@@ -1,5 +1,5 @@
# testCommandOututSet.py -- Portage Unit Testing Functionality
-# Copyright 2007-2020 Gentoo Authors
+# Copyright 2007-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.process import find_binary
@@ -11,18 +11,17 @@ class CommandOutputSetTestCase(TestCase):
"""Simple Test Case for CommandOutputSet"""
def setUp(self):
- pass
+ super().setUp()
def tearDown(self):
pass
def testCommand(self):
-
params = set(test_cps)
command = find_binary("bash")
command += " -c '"
for a in params:
- command += ' echo -e "%s" ; ' % a
+ command += f' echo -e "{a}" ; '
command += "'"
s = CommandOutputSet(command)
atoms = s.getAtoms()
diff --git a/lib/portage/tests/sync/meson.build b/lib/portage/tests/sync/meson.build
new file mode 100644
index 000000000..8c566080e
--- /dev/null
+++ b/lib/portage/tests/sync/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_sync_local.py',
+ 'test_sync_zipfile.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/sync',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/sync/test_sync_local.py b/lib/portage/tests/sync/test_sync_local.py
index a5fc069c3..aeeb5d0b1 100644
--- a/lib/portage/tests/sync/test_sync_local.py
+++ b/lib/portage/tests/sync/test_sync_local.py
@@ -1,4 +1,4 @@
-# Copyright 2014-2021 Gentoo Authors
+# Copyright 2014-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import datetime
@@ -23,18 +23,13 @@ class SyncLocalTestCase(TestCase):
def _must_skip(self):
if find_binary("rsync") is None:
- return "rsync: command not found"
+ self.skipTest("rsync: command not found")
if find_binary("git") is None:
- return "git: command not found"
+ self.skipTest("git: command not found")
def testSyncLocal(self):
debug = False
-
- skip_reason = self._must_skip()
- if skip_reason:
- self.portage_skip = skip_reason
- self.assertFalse(True, skip_reason)
- return
+ self._must_skip()
repos_conf = textwrap.dedent(
"""
@@ -88,13 +83,13 @@ class SyncLocalTestCase(TestCase):
cmds = {}
for cmd in ("emerge", "emaint"):
for bindir in (self.bindir, self.sbindir):
- path = os.path.join(bindir, cmd)
+ path = os.path.join(str(bindir), cmd)
if os.path.exists(path):
cmds[cmd] = (portage._python_interpreter, "-b", "-Wd", path)
break
else:
raise AssertionError(
- "%s binary not found in %s or %s" % (cmd, self.bindir, self.sbindir)
+ f"{cmd} binary not found in {self.bindir} or {self.sbindir}"
)
git_binary = find_binary("git")
@@ -139,7 +134,7 @@ class SyncLocalTestCase(TestCase):
) as f:
f.write(
bump_timestamp.timestamp.strftime(
- "%s\n" % TIMESTAMP_FORMAT,
+ f"{TIMESTAMP_FORMAT}\n",
)
)
@@ -320,10 +315,8 @@ class SyncLocalTestCase(TestCase):
)
def hg_init_global_config():
- with open(os.path.join(homedir, ".hgrc"), "wt") as f:
- f.write(
- "[ui]\nusername = {} <{}>\n".format(committer_name, committer_email)
- )
+ with open(os.path.join(homedir, ".hgrc"), "w") as f:
+ f.write(f"[ui]\nusername = {committer_name} <{committer_email}>\n")
hg_repo_create = (
(repo.location, hg_init_global_config),
@@ -335,7 +328,7 @@ class SyncLocalTestCase(TestCase):
sync_type_mercurial = ((homedir, lambda: repos_set_conf("mercurial")),)
def append_newline(path):
- with open(path, "at") as f:
+ with open(path, "a") as f:
f.write("\n")
upstream_hg_commit = (
@@ -394,7 +387,7 @@ class SyncLocalTestCase(TestCase):
"GENTOO_COMMITTER_NAME": committer_name,
"GENTOO_COMMITTER_EMAIL": committer_email,
"HOME": homedir,
- "PATH": os.environ["PATH"],
+ "PATH": settings["PATH"],
"PORTAGE_GRPNAME": os.environ["PORTAGE_GRPNAME"],
"PORTAGE_USERNAME": os.environ["PORTAGE_USERNAME"],
"PYTHONDONTWRITEBYTECODE": os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
@@ -415,7 +408,7 @@ class SyncLocalTestCase(TestCase):
with open(timestamp_path, "w") as f:
f.write(
bump_timestamp.timestamp.strftime(
- "%s\n" % TIMESTAMP_FORMAT,
+ f"{TIMESTAMP_FORMAT}\n",
)
)
@@ -460,7 +453,6 @@ class SyncLocalTestCase(TestCase):
+ sync_cmds
+ mercurial_tests
):
-
if hasattr(cmd, "__call__"):
cmd()
continue
@@ -481,11 +473,7 @@ class SyncLocalTestCase(TestCase):
self.assertEqual(
os.EX_OK,
proc.returncode,
- "%s failed in %s"
- % (
- cmd,
- cwd,
- ),
+ f"{cmd} failed in {cwd}",
)
finally:
diff --git a/lib/portage/tests/sync/test_sync_zipfile.py b/lib/portage/tests/sync/test_sync_zipfile.py
new file mode 100644
index 000000000..4fbde8a35
--- /dev/null
+++ b/lib/portage/tests/sync/test_sync_zipfile.py
@@ -0,0 +1,99 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import http.server
+import os
+import shutil
+import socketserver
+import subprocess
+import tempfile
+import textwrap
+import threading
+from functools import partial
+
+import portage
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+
+
+class test_sync_zipfile_case(TestCase):
+ def test_sync_zipfile(self):
+ cpv = "dev-libs/A-0"
+ ebuilds = {
+ cpv: {"EAPI": "8"},
+ }
+ etag = "foo"
+
+ server = None
+ playground = None
+ tmpdir = tempfile.mkdtemp()
+ try:
+
+ class Handler(http.server.SimpleHTTPRequestHandler):
+ def end_headers(self):
+ self.send_header("etag", etag)
+ super().end_headers()
+
+ server = socketserver.TCPServer(
+ ("127.0.0.1", 0),
+ partial(Handler, directory=tmpdir),
+ )
+ threading.Thread(target=server.serve_forever, daemon=True).start()
+
+ playground = ResolverPlayground(
+ ebuilds=ebuilds,
+ )
+ settings = playground.settings
+
+ env = settings.environ()
+
+ repos_conf = textwrap.dedent(
+ """
+ [test_repo]
+ location = %(location)s
+ sync-type = zipfile
+ sync-uri = %(sync-uri)s
+ auto-sync = true
+ """
+ )
+
+ repo_location = f"{playground.eprefix}/var/repositories/test_repo"
+
+ env["PORTAGE_REPOSITORIES"] = repos_conf % {
+ "location": repo_location,
+ "sync-uri": "http://{}:{}/test_repo.zip".format(*server.server_address),
+ }
+
+ shutil.make_archive(os.path.join(tmpdir, "test_repo"), "zip", repo_location)
+
+ ebuild = playground.trees[playground.eroot]["porttree"].dbapi.findname(cpv)
+ self.assertTrue(os.path.exists(ebuild))
+ shutil.rmtree(repo_location)
+ self.assertFalse(os.path.exists(ebuild))
+
+ result = subprocess.run(
+ [
+ "emerge",
+ "--sync",
+ ],
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ )
+ output = result.stdout.decode(errors="replace")
+ try:
+ self.assertEqual(result.returncode, os.EX_OK)
+ except Exception:
+ print(output)
+ raise
+
+ repo = settings.repositories["test_repo"]
+ sync_mod = portage.sync.module_controller.get_class("zipfile")
+ status, repo_revision = sync_mod().retrieve_head(options={"repo": repo})
+ self.assertEqual(status, os.EX_OK)
+ self.assertEqual(repo_revision, etag)
+ finally:
+ if server is not None:
+ server.shutdown()
+ shutil.rmtree(tmpdir)
+ playground.cleanup()
diff --git a/lib/portage/tests/unicode/meson.build b/lib/portage/tests/unicode/meson.build
new file mode 100644
index 000000000..1443c5fd8
--- /dev/null
+++ b/lib/portage/tests/unicode/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_string_format.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/unicode',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/unicode/test_string_format.py b/lib/portage/tests/unicode/test_string_format.py
index 65d3c1905..9878d47fa 100644
--- a/lib/portage/tests/unicode/test_string_format.py
+++ b/lib/portage/tests/unicode/test_string_format.py
@@ -11,7 +11,7 @@ from _emerge.UseFlagDisplay import UseFlagDisplay
class StringFormatTestCase(TestCase):
"""
- Test that string formatting works correctly in the current interpretter,
+ Test that string formatting works correctly in the current interpreter,
which may be either python2 or python3.
"""
@@ -21,37 +21,34 @@ class StringFormatTestCase(TestCase):
)
def testDependencyArg(self):
-
self.assertEqual(_encodings["content"], "utf_8")
for arg_unicode in self.unicode_strings:
arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings["content"])
dependency_arg = DependencyArg(arg=arg_unicode)
- formatted_str = "%s" % (dependency_arg,)
+ formatted_str = f"{dependency_arg}"
self.assertEqual(formatted_str, arg_unicode)
# Test the __str__ method which returns unicode in python3
- formatted_str = "%s" % (dependency_arg,)
+ formatted_str = f"{dependency_arg}"
self.assertEqual(formatted_str, arg_unicode)
def testPortageException(self):
-
self.assertEqual(_encodings["content"], "utf_8")
for arg_unicode in self.unicode_strings:
arg_bytes = _unicode_encode(arg_unicode, encoding=_encodings["content"])
e = PortageException(arg_unicode)
- formatted_str = "%s" % (e,)
+ formatted_str = f"{e}"
self.assertEqual(formatted_str, arg_unicode)
# Test the __str__ method which returns unicode in python3
- formatted_str = "%s" % (e,)
+ formatted_str = f"{e}"
self.assertEqual(formatted_str, arg_unicode)
def testUseFlagDisplay(self):
-
self.assertEqual(_encodings["content"], "utf_8")
for enabled in (True, False):
@@ -59,9 +56,9 @@ class StringFormatTestCase(TestCase):
for arg_unicode in self.unicode_strings:
e = UseFlagDisplay(arg_unicode, enabled, forced)
- formatted_str = "%s" % (e,)
+ formatted_str = f"{e}"
self.assertEqual(isinstance(formatted_str, str), True)
# Test the __str__ method which returns unicode in python3
- formatted_str = "%s" % (e,)
+ formatted_str = f"{e}"
self.assertEqual(isinstance(formatted_str, str), True)
diff --git a/lib/portage/tests/update/meson.build b/lib/portage/tests/update/meson.build
new file mode 100644
index 000000000..741170e77
--- /dev/null
+++ b/lib/portage/tests/update/meson.build
@@ -0,0 +1,11 @@
+py.install_sources(
+ [
+ 'test_move_ent.py',
+ 'test_move_slot_ent.py',
+ 'test_update_dbentry.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/update',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/update/test_move_ent.py b/lib/portage/tests/update/test_move_ent.py
index ba5add989..0b938dd28 100644
--- a/lib/portage/tests/update/test_move_ent.py
+++ b/lib/portage/tests/update/test_move_ent.py
@@ -1,19 +1,129 @@
-# Copyright 2012-2021 Gentoo Authors
+# Copyright 2012-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
import textwrap
-
+import pytest
import portage
from portage import os
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import ResolverPlayground
from portage.util import ensure_dirs
from portage._global_updates import _do_global_updates
+from portage.output import colorize
class MoveEntTestCase(TestCase):
def testMoveEnt(self):
+ ebuilds = {
+ "dev-libs/A-2::dont_apply_updates": {
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ }
+
+ installed = {
+ "dev-libs/A-1::test_repo": {
+ "EAPI": "4",
+ },
+ "dev-libs/A-2::dont_apply_updates": {
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ }
+
+ binpkgs = {
+ "dev-libs/A-1::test_repo": {
+ "EAPI": "4",
+ },
+ "dev-libs/A-2::dont_apply_updates": {
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ }
+
+ updates = textwrap.dedent(
+ """
+ move dev-libs/A dev-libs/A-moved
+ """
+ )
+
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config={
+ "make.conf": (
+ f'BINPKG_FORMAT="{binpkg_format}"',
+ 'FEATURES="-binpkg-signing"',
+ ),
+ },
+ )
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Create an empty updates directory, so that this
+ # repo doesn't inherit updates from the main repo.
+ ensure_dirs(
+ os.path.join(
+ portdb.getRepositoryPath("dont_apply_updates"),
+ "profiles",
+ "updates",
+ )
+ )
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+
+ # Workaround for cache validation not working
+ # correctly when filesystem has timestamp precision
+ # of 1 second.
+ vardb._clear_cache()
+
+ # A -> A-moved
+ self.assertRaises(KeyError, vardb.aux_get, "dev-libs/A-1", ["EAPI"])
+ vardb.aux_get("dev-libs/A-moved-1", ["EAPI"])
+ # The original package should still exist because a binary
+ # package move is a copy on write operation.
+ bindb.aux_get("dev-libs/A-1", ["EAPI"])
+ bindb.aux_get("dev-libs/A-moved-1", ["EAPI"])
+
+ # dont_apply_updates
+ self.assertRaises(
+ KeyError, vardb.aux_get, "dev-libs/A-moved-2", ["EAPI"]
+ )
+ vardb.aux_get("dev-libs/A-2", ["EAPI"])
+ self.assertRaises(
+ KeyError, bindb.aux_get, "dev-libs/A-moved-2", ["EAPI"]
+ )
+ bindb.aux_get("dev-libs/A-2", ["EAPI"])
+ finally:
+ playground.cleanup()
+
+ def testMoveEntWithSignature(self):
ebuilds = {
"dev-libs/A-2::dont_apply_updates": {
"EAPI": "4",
@@ -47,60 +157,169 @@ class MoveEntTestCase(TestCase):
"""
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, ebuilds=ebuilds, installed=installed
+ for binpkg_format in ("gpkg",):
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
+ )
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Create an empty updates directory, so that this
+ # repo doesn't inherit updates from the main repo.
+ ensure_dirs(
+ os.path.join(
+ portdb.getRepositoryPath("dont_apply_updates"),
+ "profiles",
+ "updates",
+ )
+ )
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+
+ # Workaround for cache validation not working
+ # correctly when filesystem has timestamp precision
+ # of 1 second.
+ vardb._clear_cache()
+
+ # A -> A-moved
+ self.assertRaises(KeyError, vardb.aux_get, "dev-libs/A-1", ["EAPI"])
+ vardb.aux_get("dev-libs/A-moved-1", ["EAPI"])
+ # The original package should still exist because a binary
+ # package move is a copy on write operation.
+ bindb.aux_get("dev-libs/A-1", ["EAPI"])
+ print(bindb.aux_get("dev-libs/A-1", "PF"))
+ self.assertRaises(
+ KeyError, bindb.aux_get, "dev-libs/A-moved-1", ["EAPI"]
+ )
+
+ # dont_apply_updates
+ self.assertRaises(
+ KeyError, vardb.aux_get, "dev-libs/A-moved-2", ["EAPI"]
+ )
+ vardb.aux_get("dev-libs/A-2", ["EAPI"])
+ self.assertRaises(
+ KeyError, bindb.aux_get, "dev-libs/A-moved-2", ["EAPI"]
+ )
+ bindb.aux_get("dev-libs/A-2", ["EAPI"])
+
+ finally:
+ playground.cleanup()
+
+ # Ignore "The loop argument is deprecated" since this argument is conditionally
+ # added to asyncio.Lock as needed for compatibility with python 3.9.
+ @pytest.mark.filterwarnings("ignore:The loop argument is deprecated")
+ @pytest.mark.filterwarnings("error")
+ def testMoveEntWithCorruptIndex(self):
+ """
+ Test handling of the Packages index being stale (bug #920828)
+ and gpkg's binpkg-multi-instance handling.
+
+ We expect a UserWarning to be thrown if the gpkg structure is broken,
+ so we promote that to an error.
+ """
+ ebuilds = {
+ "dev-libs/A-moved-1::test_repo": {
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ "dev-libs/B-1::test_repo": {"EAPI": "4", "RDEPEND": "dev-libs/A-moved"},
+ }
+
+ installed = {
+ "dev-libs/A-1::test_repo": {
+ "EAPI": "4",
+ },
+ "dev-libs/B-1::test_repo": {"EAPI": "4", "RDEPEND": "dev-libs/A"},
+ }
+
+ binpkgs = {
+ "dev-libs/A-1::test_repo": {
+ "EAPI": "4",
+ "BUILD_ID": "1",
+ },
+ "dev-libs/B-1::test_repo": {
+ "EAPI": "4",
+ "BUILD_ID": "1",
+ "RDEPEND": "dev-libs/A",
+ },
+ }
+
+ updates = textwrap.dedent(
+ """
+ move dev-libs/A dev-libs/A-moved
+ """
)
- settings = playground.settings
- trees = playground.trees
- eroot = settings["EROOT"]
- test_repo_location = settings.repositories["test_repo"].location
- portdb = trees[eroot]["porttree"].dbapi
- vardb = trees[eroot]["vartree"].dbapi
- bindb = trees[eroot]["bintree"].dbapi
-
- updates_dir = os.path.join(test_repo_location, "profiles", "updates")
-
- try:
- ensure_dirs(updates_dir)
- with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
- f.write(updates)
-
- # Create an empty updates directory, so that this
- # repo doesn't inherit updates from the main repo.
- ensure_dirs(
- os.path.join(
- portdb.getRepositoryPath("dont_apply_updates"),
- "profiles",
- "updates",
+ for binpkg_format in ("gpkg",):
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config={
+ "make.conf": (
+ f'BINPKG_FORMAT="{binpkg_format}"',
+ f'FEATURES="binpkg-multi-instance pkgdir-index-trusted"',
+ ),
+ },
+ debug=False,
)
- )
-
- global_noiselimit = portage.util.noiselimit
- portage.util.noiselimit = -2
- try:
- _do_global_updates(trees, {})
- finally:
- portage.util.noiselimit = global_noiselimit
-
- # Workaround for cache validation not working
- # correctly when filesystem has timestamp precision
- # of 1 second.
- vardb._clear_cache()
-
- # A -> A-moved
- self.assertRaises(KeyError, vardb.aux_get, "dev-libs/A-1", ["EAPI"])
- vardb.aux_get("dev-libs/A-moved-1", ["EAPI"])
- # The original package should still exist because a binary
- # package move is a copy on write operation.
- bindb.aux_get("dev-libs/A-1", ["EAPI"])
- bindb.aux_get("dev-libs/A-moved-1", ["EAPI"])
-
- # dont_apply_updates
- self.assertRaises(KeyError, vardb.aux_get, "dev-libs/A-moved-2", ["EAPI"])
- vardb.aux_get("dev-libs/A-2", ["EAPI"])
- self.assertRaises(KeyError, bindb.aux_get, "dev-libs/A-moved-2", ["EAPI"])
- bindb.aux_get("dev-libs/A-2", ["EAPI"])
-
- finally:
- playground.cleanup()
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Make the Packages index out-of-date
+ os.remove(
+ os.path.join(
+ bindb.bintree.pkgdir, "dev-libs", "A", "A-1-1.gpkg.tar"
+ )
+ )
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/update/test_move_slot_ent.py b/lib/portage/tests/update/test_move_slot_ent.py
index 87e38f97e..62b5c3544 100644
--- a/lib/portage/tests/update/test_move_slot_ent.py
+++ b/lib/portage/tests/update/test_move_slot_ent.py
@@ -1,19 +1,21 @@
-# Copyright 2012-2019 Gentoo Authors
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import sys
import textwrap
import portage
from portage import os
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import ResolverPlayground
from portage.util import ensure_dirs
from portage._global_updates import _do_global_updates
+from portage.output import colorize
class MoveSlotEntTestCase(TestCase):
def testMoveSlotEnt(self):
-
ebuilds = {
"dev-libs/A-2::dont_apply_updates": {
"EAPI": "5",
@@ -75,63 +77,233 @@ class MoveSlotEntTestCase(TestCase):
"""
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, ebuilds=ebuilds, installed=installed
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config={
+ "make.conf": (
+ f'BINPKG_FORMAT="{binpkg_format}"',
+ 'FEATURES="-binpkg-signing"',
+ ),
+ },
+ )
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Create an empty updates directory, so that this
+ # repo doesn't inherit updates from the main repo.
+ ensure_dirs(
+ os.path.join(
+ portdb.getRepositoryPath("dont_apply_updates"),
+ "profiles",
+ "updates",
+ )
+ )
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+
+ # Workaround for cache validation not working
+ # correctly when filesystem has timestamp precision
+ # of 1 second.
+ vardb._clear_cache()
+
+ # 0/2.30 -> 2/2.30
+ self.assertEqual(
+ "2/2.30", vardb.aux_get("dev-libs/A-1", ["SLOT"])[0]
+ )
+ self.assertEqual(
+ "2/2.30", bindb.aux_get("dev-libs/A-1", ["SLOT"])[0]
+ )
+
+ # 0 -> 1
+ self.assertEqual("1", vardb.aux_get("dev-libs/B-1", ["SLOT"])[0])
+ self.assertEqual("1", bindb.aux_get("dev-libs/B-1", ["SLOT"])[0])
+
+ # 0/1 -> 1 (equivalent to 1/1)
+ self.assertEqual("1", vardb.aux_get("dev-libs/C-1", ["SLOT"])[0])
+ self.assertEqual("1", bindb.aux_get("dev-libs/C-1", ["SLOT"])[0])
+
+ # dont_apply_updates
+ self.assertEqual(
+ "0/2.30", bindb.aux_get("dev-libs/A-2", ["SLOT"])[0]
+ )
+ self.assertEqual("0", bindb.aux_get("dev-libs/B-2", ["SLOT"])[0])
+ self.assertEqual(
+ "0/2.1", bindb.aux_get("dev-libs/C-2.1", ["SLOT"])[0]
+ )
+
+ finally:
+ playground.cleanup()
+
+ def testMoveSlotEntWithSignature(self):
+ ebuilds = {
+ "dev-libs/A-2::dont_apply_updates": {
+ "EAPI": "5",
+ "SLOT": "0/2.30",
+ },
+ "dev-libs/B-2::dont_apply_updates": {
+ "SLOT": "0",
+ },
+ "dev-libs/C-2.1::dont_apply_updates": {
+ "EAPI": "5",
+ "SLOT": "0/2.1",
+ },
+ }
+
+ installed = {
+ "dev-libs/A-1::test_repo": {
+ "EAPI": "5",
+ "SLOT": "0/2.30",
+ },
+ "dev-libs/B-1::test_repo": {
+ "SLOT": "0",
+ },
+ "dev-libs/C-1::test_repo": {
+ "EAPI": "5",
+ "SLOT": "0/1",
+ },
+ }
+
+ binpkgs = {
+ "dev-libs/A-1::test_repo": {
+ "EAPI": "5",
+ "SLOT": "0/2.30",
+ },
+ "dev-libs/A-2::dont_apply_updates": {
+ "EAPI": "5",
+ "SLOT": "0/2.30",
+ },
+ "dev-libs/B-1::test_repo": {
+ "SLOT": "0",
+ },
+ "dev-libs/B-2::dont_apply_updates": {
+ "SLOT": "0",
+ },
+ "dev-libs/C-1::test_repo": {
+ "EAPI": "5",
+ "SLOT": "0/1",
+ },
+ "dev-libs/C-2.1::dont_apply_updates": {
+ "EAPI": "5",
+ "SLOT": "0/2.1",
+ },
+ }
+
+ updates = textwrap.dedent(
+ """
+ slotmove dev-libs/A 0 2
+ slotmove dev-libs/B 0 1
+ slotmove dev-libs/C 0 1
+ """
)
- settings = playground.settings
- trees = playground.trees
- eroot = settings["EROOT"]
- test_repo_location = settings.repositories["test_repo"].location
- portdb = trees[eroot]["porttree"].dbapi
- vardb = trees[eroot]["vartree"].dbapi
- bindb = trees[eroot]["bintree"].dbapi
-
- updates_dir = os.path.join(test_repo_location, "profiles", "updates")
-
- try:
- ensure_dirs(updates_dir)
- with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
- f.write(updates)
-
- # Create an empty updates directory, so that this
- # repo doesn't inherit updates from the main repo.
- ensure_dirs(
- os.path.join(
- portdb.getRepositoryPath("dont_apply_updates"),
- "profiles",
- "updates",
+ for binpkg_format in ("gpkg",):
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ user_config={
+ "make.conf": (
+ f'BINPKG_FORMAT="{binpkg_format}"',
+ 'FEATURES="binpkg-signing"',
+ ),
+ },
)
- )
-
- global_noiselimit = portage.util.noiselimit
- portage.util.noiselimit = -2
- try:
- _do_global_updates(trees, {})
- finally:
- portage.util.noiselimit = global_noiselimit
-
- # Workaround for cache validation not working
- # correctly when filesystem has timestamp precision
- # of 1 second.
- vardb._clear_cache()
-
- # 0/2.30 -> 2/2.30
- self.assertEqual("2/2.30", vardb.aux_get("dev-libs/A-1", ["SLOT"])[0])
- self.assertEqual("2/2.30", bindb.aux_get("dev-libs/A-1", ["SLOT"])[0])
-
- # 0 -> 1
- self.assertEqual("1", vardb.aux_get("dev-libs/B-1", ["SLOT"])[0])
- self.assertEqual("1", bindb.aux_get("dev-libs/B-1", ["SLOT"])[0])
-
- # 0/1 -> 1 (equivalent to 1/1)
- self.assertEqual("1", vardb.aux_get("dev-libs/C-1", ["SLOT"])[0])
- self.assertEqual("1", bindb.aux_get("dev-libs/C-1", ["SLOT"])[0])
-
- # dont_apply_updates
- self.assertEqual("0/2.30", bindb.aux_get("dev-libs/A-2", ["SLOT"])[0])
- self.assertEqual("0", bindb.aux_get("dev-libs/B-2", ["SLOT"])[0])
- self.assertEqual("0/2.1", bindb.aux_get("dev-libs/C-2.1", ["SLOT"])[0])
-
- finally:
- playground.cleanup()
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Create an empty updates directory, so that this
+ # repo doesn't inherit updates from the main repo.
+ ensure_dirs(
+ os.path.join(
+ portdb.getRepositoryPath("dont_apply_updates"),
+ "profiles",
+ "updates",
+ )
+ )
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+
+ # Workaround for cache validation not working
+ # correctly when filesystem has timestamp precision
+ # of 1 second.
+ vardb._clear_cache()
+
+ # 0/2.30 -> 2/2.30
+ self.assertEqual(
+ "2/2.30", vardb.aux_get("dev-libs/A-1", ["SLOT"])[0]
+ )
+
+ # Stale signed packages removed since a7bbb4fc4d38.
+ self.assertRaises(KeyError, bindb.aux_get, "dev-libs/A-1", ["SLOT"])
+ # self.assertEqual(
+ # "0/2.30", bindb.aux_get("dev-libs/A-1", ["SLOT"])[0]
+ # )
+
+ # 0 -> 1
+ self.assertEqual("1", vardb.aux_get("dev-libs/B-1", ["SLOT"])[0])
+ # Stale signed packages removed since a7bbb4fc4d38.
+ self.assertRaises(KeyError, bindb.aux_get, "dev-libs/B-1", ["SLOT"])
+ # self.assertEqual("0", bindb.aux_get("dev-libs/B-1", ["SLOT"])[0])
+
+ # 0/1 -> 1 (equivalent to 1/1)
+ self.assertEqual("1", vardb.aux_get("dev-libs/C-1", ["SLOT"])[0])
+ # Stale signed packages removed since a7bbb4fc4d38.
+ self.assertRaises(KeyError, bindb.aux_get, "dev-libs/C-1", ["SLOT"])
+ # self.assertEqual("0/1", bindb.aux_get("dev-libs/C-1", ["SLOT"])[0])
+
+ # dont_apply_updates
+ self.assertEqual(
+ "0/2.30", bindb.aux_get("dev-libs/A-2", ["SLOT"])[0]
+ )
+ self.assertEqual("0", bindb.aux_get("dev-libs/B-2", ["SLOT"])[0])
+ self.assertEqual(
+ "0/2.1", bindb.aux_get("dev-libs/C-2.1", ["SLOT"])[0]
+ )
+
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/update/test_update_dbentry.py b/lib/portage/tests/update/test_update_dbentry.py
index bed0f4b7c..3b3f0caae 100644
--- a/lib/portage/tests/update/test_update_dbentry.py
+++ b/lib/portage/tests/update/test_update_dbentry.py
@@ -1,18 +1,23 @@
-# Copyright 2012-2013 Gentoo Foundation
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import shutil
+import sys
import re
import textwrap
import portage
from portage import os
+from portage.const import SUPPORTED_GENTOO_BINPKG_FORMATS
from portage.dep import Atom
+from portage.exception import CorruptionKeyError
from portage.tests import TestCase
from portage.tests.resolver.ResolverPlayground import ResolverPlayground
from portage.update import update_dbentry
from portage.util import ensure_dirs
from portage.versions import _pkg_str
from portage._global_updates import _do_global_updates
+from portage.output import colorize
class UpdateDbentryTestCase(TestCase):
@@ -160,7 +165,6 @@ class UpdateDbentryTestCase(TestCase):
self.assertEqual(result, output_str)
def testUpdateDbentryDbapiTestCase(self):
-
ebuilds = {
"dev-libs/A-2::dont_apply_updates": {
"RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
@@ -184,9 +188,14 @@ class UpdateDbentryTestCase(TestCase):
"EAPI": "4",
"SLOT": "2",
},
+ "dev-libs/B-2::test_repo": {
+ "SLOT": "2",
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ },
"dev-libs/B-1::test_repo": {
"RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
- "EAPI": "4-python",
+ "EAPI": "4",
},
"dev-libs/M-1::test_repo": {
"EAPI": "4",
@@ -195,7 +204,7 @@ class UpdateDbentryTestCase(TestCase):
"EAPI": "4",
},
"dev-libs/N-2::test_repo": {
- "EAPI": "4-python",
+ "EAPI": "4",
},
}
@@ -211,7 +220,12 @@ class UpdateDbentryTestCase(TestCase):
},
"dev-libs/B-1::test_repo": {
"RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
- "EAPI": "4-python",
+ "EAPI": "4",
+ },
+ "dev-libs/B-2::test_repo": {
+ "SLOT": "2",
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
},
}
@@ -220,102 +234,289 @@ class UpdateDbentryTestCase(TestCase):
updates = textwrap.dedent(
"""
move dev-libs/M dev-libs/M-moved
- move dev-libs/N dev-libs/N.moved
"""
)
- playground = ResolverPlayground(
- binpkgs=binpkgs, ebuilds=ebuilds, installed=installed, world=world
+ for binpkg_format in SUPPORTED_GENTOO_BINPKG_FORMATS:
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (
+ f'BINPKG_FORMAT="{binpkg_format}"',
+ 'FEATURES="-binpkg-signing"',
+ ),
+ },
+ )
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+ setconfig = trees[eroot]["root_config"].setconfig
+ selected_set = setconfig.getSets()["selected"]
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Create an empty updates directory, so that this
+ # repo doesn't inherit updates from the main repo.
+ ensure_dirs(
+ os.path.join(
+ portdb.getRepositoryPath("dont_apply_updates"),
+ "profiles",
+ "updates",
+ )
+ )
+
+ # Delete some things in order to trigger CorruptionKeyError during package moves.
+ corruption_atom = Atom("dev-libs/B:2")
+ # Demonstrate initial state.
+ self.assertEqual(bindb.match(corruption_atom), ["dev-libs/B-2"])
+ for cpv in bindb.match(corruption_atom):
+ os.unlink(bindb.bintree.getname(cpv))
+ self.assertRaises(
+ CorruptionKeyError,
+ bindb.aux_update,
+ cpv,
+ {"RDEPEND": "dev-libs/M-moved"},
+ )
+ # Demonstrate corrupt state.
+ self.assertEqual(bindb.match(corruption_atom), ["dev-libs/B-2"])
+
+ # Demonstrate initial state.
+ self.assertEqual(vardb.match(corruption_atom), ["dev-libs/B-2"])
+ for cpv in vardb.match(corruption_atom):
+ shutil.rmtree(vardb.getpath(cpv))
+ self.assertRaises(
+ CorruptionKeyError,
+ vardb.aux_update,
+ cpv,
+ {"RDEPEND": "dev-libs/M-moved"},
+ )
+ # Demonstrate correct state because vardbapi checks the disk.
+ self.assertEqual(vardb.match(corruption_atom), [])
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+
+ # Workaround for cache validation not working
+ # correctly when filesystem has timestamp precision
+ # of 1 second.
+ vardb._clear_cache()
+
+ # M -> M-moved
+ old_pattern = re.compile(r"\bdev-libs/M(\s|$)")
+ rdepend = vardb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+ rdepend = bindb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+ rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+ rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+
+ # dont_apply_updates
+ rdepend = vardb.aux_get("dev-libs/A-2", ["RDEPEND"])[0]
+ self.assertTrue("dev-libs/M" in rdepend)
+ self.assertTrue("dev-libs/M-moved" not in rdepend)
+ rdepend = bindb.aux_get("dev-libs/A-2", ["RDEPEND"])[0]
+ self.assertTrue("dev-libs/M" in rdepend)
+ self.assertTrue("dev-libs/M-moved" not in rdepend)
+
+ # Demonstrate that match still returns stale results
+ # due to intentional corruption.
+ self.assertEqual(bindb.match(corruption_atom), ["dev-libs/B-2"])
+
+ # Update bintree state so aux_get will properly raise KeyError.
+ for cpv in bindb.match(corruption_atom):
+ # Demonstrate that aux_get returns stale results.
+ self.assertEqual(
+ ["dev-libs/M dev-libs/N dev-libs/P"],
+ bindb.aux_get(cpv, ["RDEPEND"]),
+ )
+ bindb.bintree.remove(cpv)
+ self.assertEqual(bindb.match(corruption_atom), [])
+ self.assertRaises(
+ KeyError, bindb.aux_get, "dev-libs/B-2", ["RDEPEND"]
+ )
+ self.assertRaises(
+ KeyError, vardb.aux_get, "dev-libs/B-2", ["RDEPEND"]
+ )
+
+ selected_set.load()
+ self.assertTrue("dev-libs/M" not in selected_set)
+ self.assertTrue("dev-libs/M-moved" in selected_set)
+
+ finally:
+ playground.cleanup()
+
+ def testUpdateDbentryDbapiTestCaseWithSignature(self):
+ ebuilds = {
+ "dev-libs/A-2::dont_apply_updates": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ "dev-libs/B-2::dont_apply_updates": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ }
+
+ installed = {
+ "dev-libs/A-1::test_repo": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ },
+ "dev-libs/A-2::dont_apply_updates": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ "dev-libs/B-1::test_repo": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ },
+ "dev-libs/M-1::test_repo": {
+ "EAPI": "4",
+ },
+ "dev-libs/N-1::test_repo": {
+ "EAPI": "4",
+ },
+ "dev-libs/N-2::test_repo": {
+ "EAPI": "4",
+ },
+ }
+
+ binpkgs = {
+ "dev-libs/A-1::test_repo": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ },
+ "dev-libs/A-2::dont_apply_updates": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ "SLOT": "2",
+ },
+ "dev-libs/B-1::test_repo": {
+ "RDEPEND": "dev-libs/M dev-libs/N dev-libs/P",
+ "EAPI": "4",
+ },
+ }
+
+ world = ["dev-libs/M", "dev-libs/N"]
+
+ updates = textwrap.dedent(
+ """
+ move dev-libs/M dev-libs/M-moved
+ """
)
- settings = playground.settings
- trees = playground.trees
- eroot = settings["EROOT"]
- test_repo_location = settings.repositories["test_repo"].location
- portdb = trees[eroot]["porttree"].dbapi
- vardb = trees[eroot]["vartree"].dbapi
- bindb = trees[eroot]["bintree"].dbapi
- setconfig = trees[eroot]["root_config"].setconfig
- selected_set = setconfig.getSets()["selected"]
-
- updates_dir = os.path.join(test_repo_location, "profiles", "updates")
-
- try:
- ensure_dirs(updates_dir)
- with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
- f.write(updates)
-
- # Create an empty updates directory, so that this
- # repo doesn't inherit updates from the main repo.
- ensure_dirs(
- os.path.join(
- portdb.getRepositoryPath("dont_apply_updates"),
- "profiles",
- "updates",
+ for binpkg_format in ("gpkg",):
+ with self.subTest(binpkg_format=binpkg_format):
+ print(colorize("HILITE", binpkg_format), end=" ... ")
+ sys.stdout.flush()
+ playground = ResolverPlayground(
+ binpkgs=binpkgs,
+ ebuilds=ebuilds,
+ installed=installed,
+ world=world,
+ user_config={
+ "make.conf": (f'BINPKG_FORMAT="{binpkg_format}"',),
+ },
)
- )
-
- global_noiselimit = portage.util.noiselimit
- portage.util.noiselimit = -2
- try:
- _do_global_updates(trees, {})
- finally:
- portage.util.noiselimit = global_noiselimit
-
- # Workaround for cache validation not working
- # correctly when filesystem has timestamp precision
- # of 1 second.
- vardb._clear_cache()
-
- # M -> M-moved
- old_pattern = re.compile(r"\bdev-libs/M(\s|$)")
- rdepend = vardb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
- self.assertTrue(old_pattern.search(rdepend) is None)
- self.assertTrue("dev-libs/M-moved" in rdepend)
- rdepend = bindb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
- self.assertTrue(old_pattern.search(rdepend) is None)
- self.assertTrue("dev-libs/M-moved" in rdepend)
- rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
- self.assertTrue(old_pattern.search(rdepend) is None)
- self.assertTrue("dev-libs/M-moved" in rdepend)
- rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
- self.assertTrue(old_pattern.search(rdepend) is None)
- self.assertTrue("dev-libs/M-moved" in rdepend)
-
- # EAPI 4-python/*-progress N -> N.moved
- rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
- old_pattern = re.compile(r"\bdev-libs/N(\s|$)")
- self.assertTrue(old_pattern.search(rdepend) is None)
- self.assertTrue("dev-libs/N.moved" in rdepend)
- rdepend = bindb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
- self.assertTrue(old_pattern.search(rdepend) is None)
- self.assertTrue("dev-libs/N.moved" in rdepend)
- self.assertRaises(KeyError, vardb.aux_get, "dev-libs/N-2", ["EAPI"])
- vardb.aux_get("dev-libs/N.moved-2", ["RDEPEND"])[0]
-
- # EAPI 4 does not allow dots in package names for N -> N.moved
- rdepend = vardb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
- self.assertTrue("dev-libs/N" in rdepend)
- self.assertTrue("dev-libs/N.moved" not in rdepend)
- rdepend = bindb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
- self.assertTrue("dev-libs/N" in rdepend)
- self.assertTrue("dev-libs/N.moved" not in rdepend)
- vardb.aux_get("dev-libs/N-1", ["RDEPEND"])[0]
- self.assertRaises(KeyError, vardb.aux_get, "dev-libs/N.moved-1", ["EAPI"])
-
- # dont_apply_updates
- rdepend = vardb.aux_get("dev-libs/A-2", ["RDEPEND"])[0]
- self.assertTrue("dev-libs/M" in rdepend)
- self.assertTrue("dev-libs/M-moved" not in rdepend)
- rdepend = bindb.aux_get("dev-libs/A-2", ["RDEPEND"])[0]
- self.assertTrue("dev-libs/M" in rdepend)
- self.assertTrue("dev-libs/M-moved" not in rdepend)
-
- selected_set.load()
- self.assertTrue("dev-libs/M" not in selected_set)
- self.assertTrue("dev-libs/M-moved" in selected_set)
- self.assertTrue("dev-libs/N" not in selected_set)
- self.assertTrue("dev-libs/N.moved" in selected_set)
-
- finally:
- playground.cleanup()
+
+ settings = playground.settings
+ trees = playground.trees
+ eroot = settings["EROOT"]
+ test_repo_location = settings.repositories["test_repo"].location
+ portdb = trees[eroot]["porttree"].dbapi
+ vardb = trees[eroot]["vartree"].dbapi
+ bindb = trees[eroot]["bintree"].dbapi
+ setconfig = trees[eroot]["root_config"].setconfig
+ selected_set = setconfig.getSets()["selected"]
+
+ updates_dir = os.path.join(test_repo_location, "profiles", "updates")
+
+ try:
+ ensure_dirs(updates_dir)
+ with open(os.path.join(updates_dir, "1Q-2010"), "w") as f:
+ f.write(updates)
+
+ # Create an empty updates directory, so that this
+ # repo doesn't inherit updates from the main repo.
+ ensure_dirs(
+ os.path.join(
+ portdb.getRepositoryPath("dont_apply_updates"),
+ "profiles",
+ "updates",
+ )
+ )
+
+ global_noiselimit = portage.util.noiselimit
+ portage.util.noiselimit = -2
+ try:
+ _do_global_updates(trees, {})
+ finally:
+ portage.util.noiselimit = global_noiselimit
+
+ # Workaround for cache validation not working
+ # correctly when filesystem has timestamp precision
+ # of 1 second.
+ vardb._clear_cache()
+
+ # M -> M-moved
+ old_pattern = re.compile(r"\bdev-libs/M(\s|$)")
+ rdepend = vardb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+ # Stale signed packages removed since a7bbb4fc4d38.
+ self.assertRaises(
+ KeyError, bindb.aux_get, "dev-libs/A-1", ["RDEPEND"]
+ )
+ # rdepend = bindb.aux_get("dev-libs/A-1", ["RDEPEND"])[0]
+ # self.assertFalse(old_pattern.search(rdepend) is None)
+ # self.assertFalse("dev-libs/M-moved" in rdepend)
+ rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+ rdepend = vardb.aux_get("dev-libs/B-1", ["RDEPEND"])[0]
+ self.assertTrue(old_pattern.search(rdepend) is None)
+ self.assertTrue("dev-libs/M-moved" in rdepend)
+
+ # dont_apply_updates
+ rdepend = vardb.aux_get("dev-libs/A-2", ["RDEPEND"])[0]
+ self.assertTrue("dev-libs/M" in rdepend)
+ self.assertTrue("dev-libs/M-moved" not in rdepend)
+ rdepend = bindb.aux_get("dev-libs/A-2", ["RDEPEND"])[0]
+ self.assertTrue("dev-libs/M" in rdepend)
+ self.assertTrue("dev-libs/M-moved" not in rdepend)
+
+ selected_set.load()
+ self.assertTrue("dev-libs/M" not in selected_set)
+ self.assertTrue("dev-libs/M-moved" in selected_set)
+
+ finally:
+ playground.cleanup()
diff --git a/lib/portage/tests/util/dyn_libs/meson.build b/lib/portage/tests/util/dyn_libs/meson.build
new file mode 100644
index 000000000..8f2c919c1
--- /dev/null
+++ b/lib/portage/tests/util/dyn_libs/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_installed_dynlibs.py',
+ 'test_soname_deps.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/util/dyn_libs',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/util/dyn_libs/test_installed_dynlibs.py b/lib/portage/tests/util/dyn_libs/test_installed_dynlibs.py
new file mode 100644
index 000000000..421dcf606
--- /dev/null
+++ b/lib/portage/tests/util/dyn_libs/test_installed_dynlibs.py
@@ -0,0 +1,65 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import os
+import tempfile
+
+from portage.const import BASH_BINARY
+from portage.tests import TestCase
+from portage.util import ensure_dirs
+from portage.util._dyn_libs.dyn_libs import installed_dynlibs
+from portage.util.file_copy import copyfile
+
+
+class InstalledDynlibsTestCase(TestCase):
+ def testInstalledDynlibsRegular(self):
+ """
+ Return True for *.so regular files.
+ """
+ with tempfile.TemporaryDirectory() as directory:
+ bash_copy = os.path.join(directory, "lib", "libfoo.so")
+ ensure_dirs(os.path.dirname(bash_copy))
+ copyfile(BASH_BINARY, bash_copy)
+ self.assertTrue(installed_dynlibs(directory))
+
+ def testInstalledDynlibsOnlySymlink(self):
+ """
+ If a *.so symlink is installed but does not point to a regular
+ file inside the top directory, installed_dynlibs should return
+ False (bug 921170).
+ """
+ with tempfile.TemporaryDirectory() as directory:
+ symlink_path = os.path.join(directory, "lib", "libfoo.so")
+ ensure_dirs(os.path.dirname(symlink_path))
+ os.symlink(BASH_BINARY, symlink_path)
+ self.assertFalse(installed_dynlibs(directory))
+
+ def testInstalledDynlibsSymlink(self):
+ """
+ Return True for a *.so symlink pointing to a regular file inside
+ the top directory.
+ """
+ with tempfile.TemporaryDirectory() as directory:
+ bash_copy = os.path.join(directory, BASH_BINARY.lstrip(os.sep))
+ ensure_dirs(os.path.dirname(bash_copy))
+ copyfile(BASH_BINARY, bash_copy)
+ symlink_path = os.path.join(directory, "lib", "libfoo.so")
+ ensure_dirs(os.path.dirname(symlink_path))
+ os.symlink(bash_copy, symlink_path)
+ self.assertTrue(installed_dynlibs(directory))
+
+ def testInstalledDynlibsAbsoluteSymlink(self):
+ """
+ If a *.so symlink target is outside of the top directory,
+ traversal follows the corresponding file inside the top
+ directory if it exists, and otherwise stops following the
+ symlink.
+ """
+ with tempfile.TemporaryDirectory() as directory:
+ bash_copy = os.path.join(directory, BASH_BINARY.lstrip(os.sep))
+ ensure_dirs(os.path.dirname(bash_copy))
+ copyfile(BASH_BINARY, bash_copy)
+ symlink_path = os.path.join(directory, "lib", "libfoo.so")
+ ensure_dirs(os.path.dirname(symlink_path))
+ os.symlink(BASH_BINARY, symlink_path)
+ self.assertTrue(installed_dynlibs(directory))
diff --git a/lib/portage/tests/util/eventloop/meson.build b/lib/portage/tests/util/eventloop/meson.build
new file mode 100644
index 000000000..51ab26901
--- /dev/null
+++ b/lib/portage/tests/util/eventloop/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_call_soon_fifo.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/util/eventloop',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/util/eventloop/test_call_soon_fifo.py b/lib/portage/tests/util/eventloop/test_call_soon_fifo.py
index 9e715559a..f1bd2b545 100644
--- a/lib/portage/tests/util/eventloop/test_call_soon_fifo.py
+++ b/lib/portage/tests/util/eventloop/test_call_soon_fifo.py
@@ -10,7 +10,6 @@ from portage.util._eventloop.global_event_loop import global_event_loop
class CallSoonFifoTestCase(TestCase):
def testCallSoonFifo(self):
-
event_loop = global_event_loop()
inputs = [random.random() for index in range(10)]
outputs = []
diff --git a/lib/portage/tests/util/file_copy/meson.build b/lib/portage/tests/util/file_copy/meson.build
new file mode 100644
index 000000000..91813dddc
--- /dev/null
+++ b/lib/portage/tests/util/file_copy/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_copyfile.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/util/file_copy',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/util/file_copy/test_copyfile.py b/lib/portage/tests/util/file_copy/test_copyfile.py
index d7e74f26d..e114e6ae3 100644
--- a/lib/portage/tests/util/file_copy/test_copyfile.py
+++ b/lib/portage/tests/util/file_copy/test_copyfile.py
@@ -1,18 +1,20 @@
-# Copyright 2017 Gentoo Foundation
+# Copyright 2017, 2023 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import shutil
import tempfile
+from unittest.mock import patch
+
+import pytest
from portage import os
from portage.tests import TestCase
from portage.checksum import perform_md5
-from portage.util.file_copy import copyfile
+from portage.util.file_copy import copyfile, _fastcopy
class CopyFileTestCase(TestCase):
def testCopyFile(self):
-
tempdir = tempfile.mkdtemp()
try:
src_path = os.path.join(tempdir, "src")
@@ -31,7 +33,6 @@ class CopyFileTestCase(TestCase):
class CopyFileSparseTestCase(TestCase):
def testCopyFileSparse(self):
-
tempdir = tempfile.mkdtemp()
try:
src_path = os.path.join(tempdir, "src")
@@ -42,26 +43,37 @@ class CopyFileSparseTestCase(TestCase):
# files too big, in case the filesystem doesn't support
# sparse files.
with open(src_path, "wb") as f:
+ f.seek(2**16, os.SEEK_SET)
f.write(content)
- f.seek(2 ** 17, 1)
- f.write(content)
- f.seek(2 ** 18, 1)
+ f.seek(2**17, os.SEEK_SET)
f.write(content)
# Test that sparse blocks are handled correctly at
- # the end of the file (involves seek and truncate).
- f.seek(2 ** 17, 1)
+ # the end of the file.
+ f.truncate(2**18)
- copyfile(src_path, dest_path)
+ fastcopy_success = False
+
+ def mock_fastcopy(src, dst):
+ nonlocal fastcopy_success
+ _fastcopy(src, dst)
+ fastcopy_success = True
+
+ with patch("portage.util.file_copy._fastcopy", new=mock_fastcopy):
+ copyfile(src_path, dest_path)
self.assertEqual(perform_md5(src_path), perform_md5(dest_path))
- # This last part of the test is expected to fail when sparse
- # copy is not implemented, so set the todo flag in order
- # to tolerate failures.
- self.todo = True
+ src_stat = os.stat(src_path)
+ dest_stat = os.stat(dest_path)
+
+ self.assertEqual(src_stat.st_size, dest_stat.st_size)
# If sparse blocks were preserved, then both files should
# consume the same number of blocks.
- self.assertEqual(os.stat(src_path).st_blocks, os.stat(dest_path).st_blocks)
+ # This is expected to fail when sparse copy is not implemented.
+ if src_stat.st_blocks != dest_stat.st_blocks:
+ if fastcopy_success:
+ pytest.fail(reason="sparse copy failed with _fastcopy")
+ pytest.xfail(reason="sparse copy is not implemented")
finally:
shutil.rmtree(tempdir)
diff --git a/lib/portage/tests/util/futures/asyncio/meson.build b/lib/portage/tests/util/futures/asyncio/meson.build
new file mode 100644
index 000000000..2de0668d6
--- /dev/null
+++ b/lib/portage/tests/util/futures/asyncio/meson.build
@@ -0,0 +1,14 @@
+py.install_sources(
+ [
+ 'test_event_loop_in_fork.py',
+ 'test_pipe_closed.py',
+ 'test_policy_wrapper_recursion.py',
+ 'test_run_until_complete.py',
+ 'test_subprocess_exec.py',
+ 'test_wakeup_fd_sigchld.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/util/futures/asyncio',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/util/futures/asyncio/test_child_watcher.py b/lib/portage/tests/util/futures/asyncio/test_child_watcher.py
deleted file mode 100644
index fc536c92c..000000000
--- a/lib/portage/tests/util/futures/asyncio/test_child_watcher.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2018-2021 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-import os
-
-from portage.process import find_binary, spawn
-from portage.tests import TestCase
-from portage.util._eventloop.global_event_loop import global_event_loop
-from portage.util.futures import asyncio
-from portage.util.futures.unix_events import DefaultEventLoopPolicy
-
-
-class ChildWatcherTestCase(TestCase):
- def testChildWatcher(self):
- true_binary = find_binary("true")
- self.assertNotEqual(true_binary, None)
-
- initial_policy = asyncio.get_event_loop_policy()
- if not isinstance(initial_policy, DefaultEventLoopPolicy):
- asyncio.set_event_loop_policy(DefaultEventLoopPolicy())
-
- loop = None
- try:
- try:
- asyncio.set_child_watcher(None)
- except NotImplementedError:
- pass
- else:
- self.assertTrue(False)
-
- args_tuple = ("hello", "world")
-
- loop = asyncio._wrap_loop()
- future = loop.create_future()
-
- def callback(pid, returncode, *args):
- future.set_result((pid, returncode, args))
-
- async def watch_pid():
-
- with asyncio.get_child_watcher() as watcher:
- pids = spawn([true_binary], returnpid=True)
- watcher.add_child_handler(pids[0], callback, *args_tuple)
- self.assertEqual((await future), (pids[0], os.EX_OK, args_tuple))
-
- loop.run_until_complete(watch_pid())
- finally:
- asyncio.set_event_loop_policy(initial_policy)
- if loop not in (None, global_event_loop()):
- loop.close()
- self.assertFalse(global_event_loop().is_closed())
diff --git a/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py b/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py
index 972f8863a..50d561df6 100644
--- a/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py
+++ b/lib/portage/tests/util/futures/asyncio/test_pipe_closed.py
@@ -25,7 +25,7 @@ class _PipeClosedTestCase:
def test_pty_device(self):
try:
read_end, write_end = pty.openpty()
- except EnvironmentError:
+ except OSError:
self.skipTest("pty not available")
self._do_test(read_end, write_end)
@@ -74,7 +74,7 @@ class ReaderPipeClosedTestCase(_PipeClosedTestCase, TestCase):
loop.run_until_complete(asyncio.sleep(0, loop=loop))
self.assertFalse(reader_callback.called.done())
- # Demonstrate that the callback is called afer the
+ # Demonstrate that the callback is called after the
# other end of the pipe has been closed.
write_end.close()
loop.run_until_complete(reader_callback.called)
@@ -123,7 +123,7 @@ class WriterPipeClosedTestCase(_PipeClosedTestCase, TestCase):
while True:
try:
os.write(write_end.fileno(), 512 * b"0")
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EAGAIN:
raise
break
@@ -138,7 +138,7 @@ class WriterPipeClosedTestCase(_PipeClosedTestCase, TestCase):
self.assertFalse(writer_callback.called.done())
- # Demonstrate that the callback is called afer the
+ # Demonstrate that the callback is called after the
# other end of the pipe has been closed.
read_end.close()
loop.run_until_complete(writer_callback.called)
diff --git a/lib/portage/tests/util/futures/asyncio/test_subprocess_exec.py b/lib/portage/tests/util/futures/asyncio/test_subprocess_exec.py
index d9fcd8a5a..bb284d49f 100644
--- a/lib/portage/tests/util/futures/asyncio/test_subprocess_exec.py
+++ b/lib/portage/tests/util/futures/asyncio/test_subprocess_exec.py
@@ -35,12 +35,11 @@ class SubprocessExecTestCase(TestCase):
def test(loop):
async def test_coroutine():
-
proc = await create_subprocess_exec(
echo_binary,
*args_tuple,
stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT
+ stderr=subprocess.STDOUT,
)
out, err = await proc.communicate()
@@ -110,7 +109,6 @@ class SubprocessExecTestCase(TestCase):
echo_binary = echo_binary.encode()
def test(loop):
-
pr, pw = os.pipe()
cat_proc = loop.run_until_complete(
@@ -157,7 +155,7 @@ class SubprocessExecTestCase(TestCase):
stdin=devnull,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
- loop=loop
+ loop=loop,
)
)
diff --git a/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py b/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py
index 7806b56d7..eef7ad5b8 100644
--- a/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py
+++ b/lib/portage/tests/util/futures/asyncio/test_wakeup_fd_sigchld.py
@@ -68,7 +68,7 @@ sys.exit(os.EX_OK)
except Exception:
portage.writemsg(
"".join(
- "{}\n".format(line)
+ f"{line}\n"
for line in out.decode(errors="replace").splitlines()[:50]
),
noiselevel=-1,
diff --git a/lib/portage/tests/util/futures/meson.build b/lib/portage/tests/util/futures/meson.build
new file mode 100644
index 000000000..cb7831484
--- /dev/null
+++ b/lib/portage/tests/util/futures/meson.build
@@ -0,0 +1,14 @@
+py.install_sources(
+ [
+ 'test_done_callback.py',
+ 'test_done_callback_after_exit.py',
+ 'test_iter_completed.py',
+ 'test_retry.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/util/futures',
+ pure : not native_extensions
+)
+
+subdir('asyncio')
diff --git a/lib/portage/tests/util/futures/test_compat_coroutine.py b/lib/portage/tests/util/futures/test_compat_coroutine.py
deleted file mode 100644
index f5ae34397..000000000
--- a/lib/portage/tests/util/futures/test_compat_coroutine.py
+++ /dev/null
@@ -1,211 +0,0 @@
-# Copyright 2018 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-
-from portage.util.futures import asyncio
-from portage.util.futures.compat_coroutine import (
- coroutine,
- coroutine_return,
-)
-from portage.util.futures._sync_decorator import _sync_decorator, _sync_methods
-from portage.tests import TestCase
-
-
-class CompatCoroutineTestCase(TestCase):
- def test_returning_coroutine(self):
- @coroutine
- def returning_coroutine(loop=None):
- yield asyncio.sleep(0, loop=loop)
- coroutine_return("success")
-
- loop = asyncio.get_event_loop()
- self.assertEqual(
- "success",
- asyncio.get_event_loop().run_until_complete(returning_coroutine(loop=loop)),
- )
-
- def test_raising_coroutine(self):
- class TestException(Exception):
- pass
-
- @coroutine
- def raising_coroutine(loop=None):
- yield asyncio.sleep(0, loop=loop)
- raise TestException("exception")
-
- loop = asyncio.get_event_loop()
- self.assertRaises(
- TestException, loop.run_until_complete, raising_coroutine(loop=loop)
- )
-
- def test_catching_coroutine(self):
- class TestException(Exception):
- pass
-
- @coroutine
- def catching_coroutine(loop=None):
- loop = asyncio._wrap_loop(loop)
- future = loop.create_future()
- loop.call_soon(future.set_exception, TestException("exception"))
- try:
- yield future
- except TestException:
- self.assertTrue(True)
- else:
- self.assertTrue(False)
- coroutine_return("success")
-
- loop = asyncio.get_event_loop()
- self.assertEqual(
- "success", loop.run_until_complete(catching_coroutine(loop=loop))
- )
-
- def test_cancelled_coroutine(self):
- """
- Verify that a coroutine can handle (and reraise) asyncio.CancelledError
- in order to perform any necessary cleanup. Note that the
- asyncio.CancelledError will only be thrown in the coroutine if there's
- an opportunity (yield) before the generator raises StopIteration.
- """
- loop = asyncio.get_event_loop()
- ready_for_exception = loop.create_future()
- exception_in_coroutine = loop.create_future()
-
- @coroutine
- def cancelled_coroutine(loop=None):
- loop = asyncio._wrap_loop(loop)
- while True:
- task = loop.create_future()
- try:
- ready_for_exception.set_result(None)
- yield task
- except BaseException as e:
- # Since python3.8, asyncio.CancelledError inherits
- # from BaseException.
- task.done() or task.cancel()
- exception_in_coroutine.set_exception(e)
- raise
- else:
- exception_in_coroutine.set_result(None)
-
- future = cancelled_coroutine(loop=loop)
- loop.run_until_complete(ready_for_exception)
- future.cancel()
-
- self.assertRaises(asyncio.CancelledError, loop.run_until_complete, future)
-
- self.assertRaises(
- asyncio.CancelledError, loop.run_until_complete, exception_in_coroutine
- )
-
- def test_cancelled_future(self):
- """
- When a coroutine raises CancelledError, the coroutine's
- future is cancelled.
- """
-
- @coroutine
- def cancelled_future_coroutine(loop=None):
- loop = asyncio._wrap_loop(loop)
- while True:
- future = loop.create_future()
- loop.call_soon(future.cancel)
- yield future
-
- loop = asyncio.get_event_loop()
- future = loop.run_until_complete(
- asyncio.wait([cancelled_future_coroutine(loop=loop)], loop=loop)
- )[0].pop()
- self.assertTrue(future.cancelled())
-
- def test_yield_expression_result(self):
- @coroutine
- def yield_expression_coroutine(loop=None):
- for i in range(3):
- x = yield asyncio.sleep(0, result=i, loop=loop)
- self.assertEqual(x, i)
-
- loop = asyncio.get_event_loop()
- loop.run_until_complete(yield_expression_coroutine(loop=loop))
-
- def test_method_coroutine(self):
- class Cubby:
-
- _empty = object()
-
- def __init__(self, loop):
- self._loop = loop
- self._value = self._empty
- self._waiters = []
-
- def _notify(self):
- waiters = self._waiters
- self._waiters = []
- for waiter in waiters:
- waiter.cancelled() or waiter.set_result(None)
-
- def _wait(self):
- waiter = self._loop.create_future()
- self._waiters.append(waiter)
- return waiter
-
- @coroutine
- def read(self, loop=None):
- while self._value is self._empty:
- yield self._wait()
-
- value = self._value
- self._value = self._empty
- self._notify()
- coroutine_return(value)
-
- @coroutine
- def write(self, value, loop=None):
- while self._value is not self._empty:
- yield self._wait()
-
- self._value = value
- self._notify()
-
- @coroutine
- def writer_coroutine(cubby, values, sentinel, loop=None):
- for value in values:
- yield cubby.write(value, loop=loop)
- yield cubby.write(sentinel, loop=loop)
-
- @coroutine
- def reader_coroutine(cubby, sentinel, loop=None):
- results = []
- while True:
- result = yield cubby.read(loop=loop)
- if result == sentinel:
- break
- results.append(result)
- coroutine_return(results)
-
- loop = asyncio.get_event_loop()
- cubby = Cubby(loop)
- values = list(range(3))
- writer = asyncio.ensure_future(
- writer_coroutine(cubby, values, None, loop=loop), loop=loop
- )
- reader = asyncio.ensure_future(
- reader_coroutine(cubby, None, loop=loop), loop=loop
- )
- loop.run_until_complete(asyncio.wait([writer, reader], loop=loop))
-
- self.assertEqual(reader.result(), values)
-
- # Test decoration of coroutine methods and functions for
- # synchronous usage, allowing coroutines to smoothly
- # blend with synchronous code.
- sync_cubby = _sync_methods(cubby, loop=loop)
- sync_reader = _sync_decorator(reader_coroutine, loop=loop)
- writer = asyncio.ensure_future(
- writer_coroutine(cubby, values, None, loop=loop), loop=loop
- )
- self.assertEqual(sync_reader(cubby, None), values)
- self.assertTrue(writer.done())
-
- for i in range(3):
- sync_cubby.write(i)
- self.assertEqual(sync_cubby.read(), i)
diff --git a/lib/portage/tests/util/futures/test_done_callback.py b/lib/portage/tests/util/futures/test_done_callback.py
index 40c9ae95c..395256932 100644
--- a/lib/portage/tests/util/futures/test_done_callback.py
+++ b/lib/portage/tests/util/futures/test_done_callback.py
@@ -7,7 +7,6 @@ from portage.util._eventloop.global_event_loop import global_event_loop
class FutureDoneCallbackTestCase(TestCase):
def testFutureDoneCallback(self):
-
event_loop = global_event_loop()
def done_callback(finished):
diff --git a/lib/portage/tests/util/futures/test_iter_completed.py b/lib/portage/tests/util/futures/test_iter_completed.py
index 4b0241c61..0c549018e 100644
--- a/lib/portage/tests/util/futures/test_iter_completed.py
+++ b/lib/portage/tests/util/futures/test_iter_completed.py
@@ -1,7 +1,11 @@
-# Copyright 2018 Gentoo Foundation
+# Copyright 2018-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import time
+
+import functools
+import pytest
+
from portage.tests import TestCase
from portage.util._async.ForkProcess import ForkProcess
from portage.util._eventloop.global_event_loop import global_event_loop
@@ -16,6 +20,7 @@ class SleepProcess(ForkProcess):
__slots__ = ("future", "seconds")
def _start(self):
+ self.target = functools.partial(time.sleep, self.seconds)
self.addExitListener(self._future_done)
ForkProcess._start(self)
@@ -23,17 +28,12 @@ class SleepProcess(ForkProcess):
if not self.future.cancelled():
self.future.set_result(self.seconds)
- def _run(self):
- time.sleep(self.seconds)
-
class IterCompletedTestCase(TestCase):
+ # Mark this as todo, since we don't want to fail if heavy system load causes
+ # the tasks to finish in an unexpected order.
+ @pytest.mark.xfail(strict=False)
def testIterCompleted(self):
-
- # Mark this as todo, since we don't want to fail if heavy system
- # load causes the tasks to finish in an unexpected order.
- self.todo = True
-
loop = global_event_loop()
tasks = [
SleepProcess(seconds=0.200),
@@ -57,7 +57,6 @@ class IterCompletedTestCase(TestCase):
self.assertEqual(seconds, future.result())
def testAsyncCancel(self):
-
loop = global_event_loop()
input_futures = set()
future_count = 3
diff --git a/lib/portage/tests/util/futures/test_retry.py b/lib/portage/tests/util/futures/test_retry.py
index 8ea832136..2197f6697 100644
--- a/lib/portage/tests/util/futures/test_retry.py
+++ b/lib/portage/tests/util/futures/test_retry.py
@@ -1,18 +1,14 @@
-# Copyright 2018-2021 Gentoo Authors
+# Copyright 2018-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from concurrent.futures import Future, ThreadPoolExecutor
import contextlib
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
+import threading
import weakref
import time
-import portage
from portage.tests import TestCase
from portage.util._eventloop.global_event_loop import global_event_loop
from portage.util.backoff import RandomExponentialBackoff
@@ -176,8 +172,13 @@ class RetryTestCase(TestCase):
asyncio.wait([decorated_func()], loop=loop)
)
self.assertEqual(len(done), 1)
+ cause = done.pop().exception().__cause__
self.assertTrue(
- isinstance(done.pop().exception().__cause__, SucceedNeverException)
+ isinstance(
+ cause,
+ (asyncio.TimeoutError, SucceedNeverException),
+ ),
+ msg=f"Cause was {cause.__class__.__name__}",
)
def testOverallTimeoutWithTimeoutError(self):
@@ -208,7 +209,7 @@ class RetryForkExecutorTestCase(RetryTestCase):
"""
def __init__(self, *pargs, **kwargs):
- super(RetryForkExecutorTestCase, self).__init__(*pargs, **kwargs)
+ super().__init__(*pargs, **kwargs)
self._executor = None
def _setUpExecutor(self):
@@ -220,6 +221,7 @@ class RetryForkExecutorTestCase(RetryTestCase):
self._executor = None
def setUp(self):
+ super().setUp()
self._setUpExecutor()
def tearDown(self):
@@ -227,16 +229,19 @@ class RetryForkExecutorTestCase(RetryTestCase):
@contextlib.contextmanager
def _wrap_coroutine_func(self, coroutine_func):
+ uses_subprocess = isinstance(self._executor, ForkExecutor)
parent_loop = global_event_loop()
- parent_pid = portage.getpid()
pending = weakref.WeakValueDictionary()
# Since ThreadPoolExecutor does not propagate cancellation of a
# parent_future to the underlying coroutine, use kill_switch to
# propagate task cancellation to wrapper, so that HangForever's
# thread returns when retry eventually cancels parent_future.
- def wrapper(kill_switch):
- if portage.getpid() == parent_pid:
+ if uses_subprocess:
+ wrapper = _run_coroutine_in_subprocess(coroutine_func)
+ else:
+
+ def wrapper(kill_switch):
# thread in main process
def done_callback(result):
result.cancelled() or result.exception() or result.result()
@@ -260,22 +265,19 @@ class RetryForkExecutorTestCase(RetryTestCase):
else:
return future.result().result()
- # child process
- loop = global_event_loop()
- try:
- return loop.run_until_complete(coroutine_func())
- finally:
- loop.close()
-
def execute_wrapper():
- kill_switch = threading.Event()
+ # Use kill_switch for threads because they can't be killed
+ # like processes. Do not pass kill_switch to subprocesses
+ # because it is not picklable.
+ kill_switch = None if uses_subprocess else threading.Event()
+ wrapper_args = [kill_switch] if kill_switch else []
parent_future = asyncio.ensure_future(
- parent_loop.run_in_executor(self._executor, wrapper, kill_switch),
+ parent_loop.run_in_executor(self._executor, wrapper, *wrapper_args),
loop=parent_loop,
)
def kill_callback(parent_future):
- if not kill_switch.is_set():
+ if kill_switch is not None and not kill_switch.is_set():
kill_switch.set()
parent_future.add_done_callback(kill_callback)
@@ -296,6 +298,19 @@ class RetryForkExecutorTestCase(RetryTestCase):
future.cancelled() or future.exception() or future.result()
+class _run_coroutine_in_subprocess:
+ def __init__(self, coroutine_func):
+ self._coroutine_func = coroutine_func
+
+ def __call__(self):
+ # child process
+ loop = global_event_loop()
+ try:
+ return loop.run_until_complete(self._coroutine_func())
+ finally:
+ loop.close()
+
+
class RetryThreadExecutorTestCase(RetryForkExecutorTestCase):
def _setUpExecutor(self):
self._executor = ThreadPoolExecutor(max_workers=1)
diff --git a/lib/portage/tests/util/meson.build b/lib/portage/tests/util/meson.build
new file mode 100644
index 000000000..010dfa784
--- /dev/null
+++ b/lib/portage/tests/util/meson.build
@@ -0,0 +1,31 @@
+py.install_sources(
+ [
+ 'test_checksum.py',
+ 'test_digraph.py',
+ 'test_file_copier.py',
+ 'test_getconfig.py',
+ 'test_grabdict.py',
+ 'test_install_mask.py',
+ 'test_manifest.py',
+ 'test_mtimedb.py',
+ 'test_normalizedPath.py',
+ 'test_shelve.py',
+ 'test_socks5.py',
+ 'test_stackDictList.py',
+ 'test_stackDicts.py',
+ 'test_stackLists.py',
+ 'test_uniqueArray.py',
+ 'test_varExpand.py',
+ 'test_whirlpool.py',
+ 'test_xattr.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/util',
+ pure : not native_extensions
+)
+
+subdir('dyn_libs')
+subdir('eventloop')
+subdir('file_copy')
+subdir('futures')
diff --git a/lib/portage/tests/util/test_checksum.py b/lib/portage/tests/util/test_checksum.py
index da864ba22..4a63e6fdc 100644
--- a/lib/portage/tests/util/test_checksum.py
+++ b/lib/portage/tests/util/test_checksum.py
@@ -1,9 +1,9 @@
-# Copyright 2011-2017 Gentoo Foundation
+# Copyright 2011-2022 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
-from portage.checksum import checksum_str
+from portage.checksum import checksum_str, _apply_hash_filter
from portage.exception import DigestException
@@ -121,28 +121,25 @@ class ChecksumTestCase(TestCase):
except DigestException:
self.skipTest("SHA3_512 implementation not available")
- def test_streebog256(self):
- try:
- self.assertEqual(
- checksum_str(b"", "STREEBOG256"),
- "3f539a213e97c802cc229d474c6aa32a825a360b2a933a949fd925208d9ce1bb",
- )
- self.assertEqual(
- checksum_str(self.text, "STREEBOG256"),
- "4992f1239c46f15b89e7b83ded4d83fb5966da3692788a4a1a6d118f78c08444",
- )
- except DigestException:
- self.skipTest("STREEBOG256 implementation not available")
- def test_streebog512(self):
- try:
- self.assertEqual(
- checksum_str(b"", "STREEBOG512"),
- "8e945da209aa869f0455928529bcae4679e9873ab707b55315f56ceb98bef0a7362f715528356ee83cda5f2aac4c6ad2ba3a715c1bcd81cb8e9f90bf4c1c1a8a",
- )
- self.assertEqual(
- checksum_str(self.text, "STREEBOG512"),
- "330f5c26437f4e22c0163c72b12e93b8c27202f0750627355bdee43a0e0b253c90fbf0a27adbe5414019ff01ed84b7b240a1da1cbe10fae3adffc39c2d87a51f",
- )
- except DigestException:
- self.skipTest("STREEBOG512 implementation not available")
+class ApplyHashFilterTestCase(TestCase):
+ def test_apply_hash_filter(self):
+ indict = {"MD5": "", "SHA1": "", "SHA256": "", "size": ""}
+
+ self.assertEqual(
+ sorted(_apply_hash_filter(indict, lambda x: True)),
+ ["MD5", "SHA1", "SHA256", "size"],
+ )
+ self.assertEqual(
+ sorted(_apply_hash_filter(indict, lambda x: x == "MD5")), ["MD5", "size"]
+ )
+ self.assertEqual(
+ sorted(_apply_hash_filter(indict, lambda x: x != "MD5")),
+ ["SHA1", "SHA256", "size"],
+ )
+ self.assertEqual(
+ sorted(_apply_hash_filter(indict, lambda x: x == "SHA256")),
+ ["SHA256", "size"],
+ )
+ # this should return size + one of the hashes
+ self.assertEqual(len(list(_apply_hash_filter(indict, lambda x: False))), 2)
diff --git a/lib/portage/tests/util/test_digraph.py b/lib/portage/tests/util/test_digraph.py
index b48948f24..ce162c2b0 100644
--- a/lib/portage/tests/util/test_digraph.py
+++ b/lib/portage/tests/util/test_digraph.py
@@ -118,17 +118,15 @@ class DigraphTest(TestCase):
self.assertEqual(
x.shortest_path("D", "A", ignore_priority=-2), ["D", "C", "B", "A"]
)
- cycles = set(tuple(y) for y in x.get_cycles())
+ cycles = {tuple(y) for y in x.get_cycles()}
self.assertEqual(
cycles,
- set(
- [
- ("D", "C", "B", "A"),
- ("C", "B", "A", "D"),
- ("B", "A", "D", "C"),
- ("A", "D", "C", "B"),
- ]
- ),
+ {
+ ("D", "C", "B", "A"),
+ ("C", "B", "A", "D"),
+ ("B", "A", "D", "C"),
+ ("A", "D", "C", "B"),
+ },
)
x.remove_edge("A", "B")
self.assertEqual(x.get_cycles(), [])
@@ -154,17 +152,15 @@ class DigraphTest(TestCase):
self.assertEqual(x.firstzero(), "B")
self.assertRaises(KeyError, x.remove, "Z")
x.delnode("Z")
- self.assertEqual(set(x), set(["A", "B", "C", "D", "E"]))
+ self.assertEqual(set(x), {"A", "B", "C", "D", "E"})
self.assertEqual(x.get("A"), "A")
self.assertEqual(x.get("A", "default"), "A")
- self.assertEqual(set(x.all_nodes()), set(["A", "B", "C", "D", "E"]))
- self.assertEqual(set(x.leaf_nodes()), set(["B", "D", "E"]))
- self.assertEqual(
- set(x.leaf_nodes(ignore_priority=0)), set(["A", "B", "D", "E"])
- )
+ self.assertEqual(set(x.all_nodes()), {"A", "B", "C", "D", "E"})
+ self.assertEqual(set(x.leaf_nodes()), {"B", "D", "E"})
+ self.assertEqual(set(x.leaf_nodes(ignore_priority=0)), {"A", "B", "D", "E"})
self.assertEqual(x.root_nodes(), ["A"])
- self.assertEqual(set(x.root_nodes(ignore_priority=0)), set(["A", "B", "C"]))
- self.assertEqual(set(x.child_nodes("A")), set(["B", "C"]))
+ self.assertEqual(set(x.root_nodes(ignore_priority=0)), {"A", "B", "C"})
+ self.assertEqual(set(x.child_nodes("A")), {"B", "C"})
self.assertEqual(x.child_nodes("A", ignore_priority=2), [])
self.assertEqual(x.parent_nodes("B"), ["A"])
self.assertEqual(x.parent_nodes("B", ignore_priority=-2), ["A"])
@@ -177,12 +173,12 @@ class DigraphTest(TestCase):
self.assertEqual(x.shortest_path("A", "D"), ["A", "C", "D"])
self.assertEqual(x.shortest_path("D", "A"), None)
self.assertEqual(x.shortest_path("A", "D", ignore_priority=2), None)
- cycles = set(tuple(y) for y in x.get_cycles())
+ cycles = {tuple(y) for y in x.get_cycles()}
self.assertEqual(cycles, set())
x.remove("D")
- self.assertEqual(set(x.all_nodes()), set(["A", "B", "C", "E"]))
+ self.assertEqual(set(x.all_nodes()), {"A", "B", "C", "E"})
x.remove("C")
- self.assertEqual(set(x.all_nodes()), set(["A", "B", "E"]))
+ self.assertEqual(set(x.all_nodes()), {"A", "B", "E"})
portage.util.noiselimit = -2
x.debug_print()
portage.util.noiselimit = 0
@@ -210,9 +206,9 @@ class DigraphTest(TestCase):
self.assertEqual(x.all_nodes(), ["A", "B", "C"])
self.assertEqual(x.leaf_nodes(), [])
self.assertEqual(x.root_nodes(), [])
- self.assertEqual(set(x.child_nodes("A")), set(["B", "C"]))
+ self.assertEqual(set(x.child_nodes("A")), {"B", "C"})
self.assertEqual(x.child_nodes("A", ignore_priority=0), ["B"])
- self.assertEqual(set(x.parent_nodes("A")), set(["B", "C"]))
+ self.assertEqual(set(x.parent_nodes("A")), {"B", "C"})
self.assertEqual(x.parent_nodes("A", ignore_priority=0), ["C"])
self.assertEqual(x.parent_nodes("A", ignore_priority=1), [])
self.assertEqual(x.hasallzeros(), False)
@@ -223,22 +219,18 @@ class DigraphTest(TestCase):
x.shortest_path("A", "C", ignore_priority=0), ["A", "B", "C"]
)
self.assertEqual(x.shortest_path("C", "A", ignore_priority=0), ["C", "A"])
- cycles = set(frozenset(y) for y in x.get_cycles())
+ cycles = {frozenset(y) for y in x.get_cycles()}
self.assertEqual(
cycles,
- set(
- [
- frozenset(["A", "B"]),
- frozenset(["A", "C"]),
- frozenset(["B", "C"]),
- ]
- ),
+ {
+ frozenset(["A", "B"]),
+ frozenset(["A", "C"]),
+ frozenset(["B", "C"]),
+ },
)
x.remove_edge("A", "B")
- cycles = set(frozenset(y) for y in x.get_cycles())
- self.assertEqual(
- cycles, set([frozenset(["A", "C"]), frozenset(["C", "B"])])
- )
+ cycles = {frozenset(y) for y in x.get_cycles()}
+ self.assertEqual(cycles, {frozenset(["A", "C"]), frozenset(["C", "B"])})
x.difference_update(["C"])
self.assertEqual(x.all_nodes(), ["A", "B"])
portage.util.noiselimit = -2
diff --git a/lib/portage/tests/util/test_file_copier.py b/lib/portage/tests/util/test_file_copier.py
index d2109e1f5..62db37001 100644
--- a/lib/portage/tests/util/test_file_copier.py
+++ b/lib/portage/tests/util/test_file_copier.py
@@ -17,7 +17,6 @@ class FileCopierTestCase(TestCase):
loop = global_event_loop()
tempdir = tempfile.mkdtemp()
try:
-
# regular successful copy
src_path = os.path.join(tempdir, "src")
dest_path = os.path.join(tempdir, "dest")
diff --git a/lib/portage/tests/util/test_getconfig.py b/lib/portage/tests/util/test_getconfig.py
index 14fe145eb..9a2af43e4 100644
--- a/lib/portage/tests/util/test_getconfig.py
+++ b/lib/portage/tests/util/test_getconfig.py
@@ -26,7 +26,7 @@ class GetConfigTestCase(TestCase):
}
def testGetConfig(self):
- make_globals_file = os.path.join(self.cnf_path, "make.globals")
+ make_globals_file = os.path.join(str(self.cnf_path), "make.globals")
d = getconfig(make_globals_file)
for k, v in self._cases.items():
self.assertEqual(d[k], v)
@@ -71,9 +71,9 @@ class GetConfigTestCase(TestCase):
# Format like env_update formats /etc/profile.env.
for k, v in cases.items():
if v.startswith("$") and not v.startswith("${"):
- line = "export %s=$'%s'\n" % (k, v[1:])
+ line = f"export {k}=$'{v[1:]}'\n"
else:
- line = "export %s='%s'\n" % (k, v)
+ line = f"export {k}='{v}'\n"
f.write(_unicode_encode(line))
f.flush()
diff --git a/lib/portage/tests/util/test_install_mask.py b/lib/portage/tests/util/test_install_mask.py
index d9558a857..3927313a3 100644
--- a/lib/portage/tests/util/test_install_mask.py
+++ b/lib/portage/tests/util/test_install_mask.py
@@ -1,8 +1,11 @@
-# Copyright 2018 Gentoo Foundation
+# Copyright 2018-2022 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
+import tempfile
+from portage import os
+from portage import shutil
from portage.tests import TestCase
-from portage.util.install_mask import InstallMask
+from portage.util.install_mask import InstallMask, install_mask_dir
class InstallMaskTestCase(TestCase):
@@ -166,3 +169,27 @@ class InstallMaskTestCase(TestCase):
install_mask_str, path
),
)
+
+ def testSymlinkDir(self):
+ """
+ Test that masked symlinks to directories are removed.
+ """
+ tmp_dir = tempfile.mkdtemp()
+
+ try:
+ base_dir = os.path.join(tmp_dir, "foo")
+ target_dir = os.path.join(tmp_dir, "foo", "bar")
+ link_name = os.path.join(tmp_dir, "foo", "baz")
+
+ os.mkdir(base_dir)
+ os.mkdir(target_dir)
+ os.symlink(target_dir, link_name)
+
+ install_mask = InstallMask("/foo/")
+ install_mask_dir(tmp_dir, install_mask)
+ self.assertFalse(
+ os.path.lexists(link_name), f"failed to remove {link_name}"
+ )
+ self.assertFalse(os.path.lexists(base_dir), f"failed to remove {base_dir}")
+ finally:
+ shutil.rmtree(tmp_dir)
diff --git a/lib/portage/tests/util/test_manifest.py b/lib/portage/tests/util/test_manifest.py
new file mode 100644
index 000000000..2d41b9fc9
--- /dev/null
+++ b/lib/portage/tests/util/test_manifest.py
@@ -0,0 +1,34 @@
+# Copyright 2022-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import shutil
+import tempfile
+
+from pathlib import Path
+from portage import Manifest
+from portage.tests import TestCase
+
+
+class ManifestTestCase(TestCase):
+ def test_simple_addFile(self):
+ base_tempdir = tempfile.mkdtemp()
+ tempdir = Path(base_tempdir) / "app-portage" / "diffball"
+ manifest = Manifest(str(tempdir), required_hashes=["SHA512", "BLAKE2B"])
+
+ (tempdir / "files").mkdir(parents=True)
+ (tempdir / "files" / "test.patch").write_text(
+ "Fix the diffball foobar functionality.\n"
+ )
+
+ # Nothing should be in the Manifest yet
+ with self.assertRaises(KeyError):
+ manifest.getFileData("AUX", "test.patch", "SHA512")
+
+ manifest.addFile("AUX", "files/test.patch")
+
+ self.assertEqual(len(manifest.fhashdict["AUX"].keys()), 1)
+ self.assertEqual(
+ manifest.getFileData("AUX", "test.patch", "SHA512"),
+ "e30d069dcf284cbcb2d5685f03ca362469026b469dec4f8655d0c9a2bf317f5d9f68f61855ea403f4959bc0b9c003ae824fb9d6ab2472a739950623523af9da9",
+ )
+ shutil.rmtree(base_tempdir)
diff --git a/lib/portage/tests/util/test_mtimedb.py b/lib/portage/tests/util/test_mtimedb.py
new file mode 100644
index 000000000..d80b4f1da
--- /dev/null
+++ b/lib/portage/tests/util/test_mtimedb.py
@@ -0,0 +1,362 @@
+# Copyright 2022 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from unittest.mock import patch, mock_open
+import json
+
+from portage.tests import TestCase
+
+import portage
+from portage.data import portage_gid, uid
+from portage.util.mtimedb import MtimeDB, _MTIMEDBKEYS
+
+
+# Some data for the fixtures:
+
+_ONE_RESUME_LIST_JSON = b"""{
+ "info": {
+ "/usr/share/binutils-data/x86_64-pc-linux-gnu/2.37/info": 1711787325,
+ "/usr/share/gcc-data/x86_64-pc-linux-gnu/11.2.0/info": 1735158257,
+ "/usr/share/info": 1650633847
+ },
+ "ldpath": {
+ "/lib": 1748456830,
+ "/lib64": 1750523381,
+ "/usr/lib": 1750461195,
+ "/usr/lib/llvm/13/lib64": 1747003135,
+ "/usr/lib/rust/lib": 1750461173,
+ "/usr/lib64": 1750881821,
+ "/usr/local/lib": 1711784303,
+ "/usr/local/lib64": 1711784303
+ },
+ "resume": {
+ "favorites": [
+ "@world"
+ ],
+ "mergelist": [
+ [
+ "ebuild",
+ "/",
+ "some-cat/some-package-1.2.3-r4",
+ "merge"
+ ],
+ [
+ "ebuild",
+ "/",
+ "another-cat/another-package-4.3.2-r1",
+ "merge"
+ ]
+ ],
+ "myopts": {
+ "--buildpkg": true,
+ "--deep": true,
+ "--getbinpkg": true,
+ "--keep-going": true,
+ "--newuse": true,
+ "--quiet": true,
+ "--regex-search-auto": "y",
+ "--update": true,
+ "--usepkg": true,
+ "--verbose": true
+ }
+ },
+ "starttime": 0,
+ "updates": {
+ "/var/db/repos/gentoo/profiles/updates/1Q-2022": 1747854791,
+ "/var/db/repos/gentoo/profiles/updates/2Q-2022": 1752846209,
+ "/var/db/repos/gentoo/profiles/updates/4Q-2021": 1742787797
+ },
+ "version": "3.0.30"
+}
+"""
+
+_EMPTY_FILE = b""
+
+_PARTIAL_FILE_JSON = b"""{
+ "ldpath": {
+ "/lib": 1748456830,
+ "/lib64": 1750523381,
+ "/usr/lib": 1750461195,
+ "/usr/lib/llvm/13/lib64": 1747003135,
+ "/usr/lib/rust/lib": 1750461173,
+ "/usr/lib64": 1750881821,
+ "/usr/local/lib": 1711784303,
+ "/usr/local/lib64": 1711784303
+ },
+ "resume": {
+ "favorites": [
+ "@world"
+ ],
+ "mergelist": [
+ [
+ "ebuild",
+ "/",
+ "some-cat/some-package-1.2.3-r4",
+ "merge"
+ ],
+ [
+ "ebuild",
+ "/",
+ "another-cat/another-package-4.3.2-r1",
+ "merge"
+ ]
+ ],
+ "myopts": {
+ "--buildpkg": true,
+ "--deep": true,
+ "--getbinpkg": true,
+ "--keep-going": true,
+ "--newuse": true,
+ "--quiet": true,
+ "--regex-search-auto": "y",
+ "--update": true,
+ "--usepkg": true,
+ "--verbose": true
+ }
+ },
+ "starttime": 0,
+}
+"""
+
+_TWO_RESUME_LISTS_JSON = b"""{
+ "info": {
+ "/usr/share/binutils-data/x86_64-pc-linux-gnu/2.37/info": 1711787325,
+ "/usr/share/gcc-data/x86_64-pc-linux-gnu/11.2.0/info": 1735158257,
+ "/usr/share/info": 1650633847
+ },
+ "ldpath": {
+ "/lib": 1748456830,
+ "/lib64": 1750523381,
+ "/usr/lib": 1750461195,
+ "/usr/lib/llvm/13/lib64": 1747003135,
+ "/usr/lib/rust/lib": 1750461173,
+ "/usr/lib64": 1750881821,
+ },
+ "resume": {
+ "favorites": [
+ "@world"
+ ],
+ "mergelist": [
+ [
+ "ebuild",
+ "/",
+ "some-cat/some-package-1.2.3-r4",
+ "merge"
+ ],
+ [
+ "ebuild",
+ "/",
+ "another-cat/another-package-4.3.2-r1",
+ "merge"
+ ]
+ ],
+ "myopts": {
+ "--buildpkg": true,
+ "--deep": true,
+ "--getbinpkg": true,
+ "--keep-going": true,
+ "--newuse": true,
+ "--quiet": true,
+ "--regex-search-auto": "y",
+ "--update": true,
+ "--usepkg": true,
+ "--verbose": true
+ }
+ },
+ "resume_backup": {
+ "favorites": [
+ "my-cat/cool"
+ ],
+ "mergelist": [
+ [
+ "ebuild",
+ "/",
+ "sys-apps/cool-dep-2.2.2",
+ "merge"
+ ],
+ [
+ "binary",
+ "/",
+ "my-cat/cool-1.5.2",
+ "merge"
+ ]
+ ],
+ "myopts": {
+ "--getbinpkg": true,
+ "--regex-search-auto": "y",
+ "--usepkg": true,
+ }
+ },
+ "starttime": 0,
+ "updates": {
+ "/var/db/repos/gentoo/profiles/updates/1Q-2022": 1747854791,
+ "/var/db/repos/gentoo/profiles/updates/2Q-2022": 1752846209,
+ "/var/db/repos/gentoo/profiles/updates/4Q-2021": 1742787797
+ },
+ "version": "3.0.30"
+}
+"""
+
+
+class MtimeDBTestCase(TestCase):
+ text = b"Unit tests for MtimeDB"
+
+ def test_instances_created_with_only_expected_keys(self):
+ all_fixtures = (
+ _ONE_RESUME_LIST_JSON,
+ _EMPTY_FILE,
+ _PARTIAL_FILE_JSON,
+ _TWO_RESUME_LISTS_JSON,
+ )
+ for contents in all_fixtures:
+ with patch("portage.util.mtimedb.open", mock_open(read_data=contents)):
+ mtimedb = MtimeDB("/path/to/mtimedb")
+ self.assertLessEqual(set(mtimedb.keys()), _MTIMEDBKEYS)
+
+ def test_default_values(self):
+ with patch("portage.util.mtimedb.open", mock_open(read_data=_EMPTY_FILE)):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ self.assertEqual(mtimedb["starttime"], 0)
+ self.assertEqual(mtimedb["version"], "")
+ self.assertEqual(mtimedb["info"], {})
+ self.assertEqual(mtimedb["ldpath"], {})
+ self.assertEqual(mtimedb["updates"], {})
+
+ def test_instances_keep_a_deepcopy_of_clean_data(self):
+ with patch(
+ "portage.util.mtimedb.open", mock_open(read_data=_ONE_RESUME_LIST_JSON)
+ ):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ self.assertEqual(dict(mtimedb), dict(mtimedb._clean_data))
+ self.assertIsNot(mtimedb, mtimedb._clean_data)
+
+ def test_load_data_called_at_instance_creation_time(self):
+ with patch(
+ "portage.util.mtimedb.open", mock_open(read_data=_ONE_RESUME_LIST_JSON)
+ ):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ self.assertEqual(
+ mtimedb["info"],
+ {
+ "/usr/share/binutils-data/x86_64-pc-linux-gnu/2.37/info": (1711787325),
+ "/usr/share/gcc-data/x86_64-pc-linux-gnu/11.2.0/info": (1735158257),
+ "/usr/share/info": 1650633847,
+ },
+ )
+ self.assertEqual(
+ mtimedb["ldpath"],
+ {
+ "/lib": 1748456830,
+ "/lib64": 1750523381,
+ "/usr/lib": 1750461195,
+ "/usr/lib/llvm/13/lib64": 1747003135,
+ "/usr/lib/rust/lib": 1750461173,
+ "/usr/lib64": 1750881821,
+ "/usr/local/lib": 1711784303,
+ "/usr/local/lib64": 1711784303,
+ },
+ )
+ self.assertEqual(
+ mtimedb["resume"],
+ {
+ "favorites": ["@world"],
+ "mergelist": [
+ ["ebuild", "/", "some-cat/some-package-1.2.3-r4", "merge"],
+ ["ebuild", "/", "another-cat/another-package-4.3.2-r1", "merge"],
+ ],
+ "myopts": {
+ "--buildpkg": True,
+ "--deep": True,
+ "--getbinpkg": True,
+ "--keep-going": True,
+ "--newuse": True,
+ "--quiet": True,
+ "--regex-search-auto": "y",
+ "--update": True,
+ "--usepkg": True,
+ "--verbose": True,
+ },
+ },
+ )
+ self.assertEqual(mtimedb["starttime"], 0)
+ self.assertEqual(
+ mtimedb["updates"],
+ {
+ "/var/db/repos/gentoo/profiles/updates/1Q-2022": 1747854791,
+ "/var/db/repos/gentoo/profiles/updates/2Q-2022": 1752846209,
+ "/var/db/repos/gentoo/profiles/updates/4Q-2021": 1742787797,
+ },
+ )
+ self.assertEqual(mtimedb["version"], "3.0.30")
+
+ @patch("portage.util.mtimedb.MtimeDB._MtimeDB__write_to_disk")
+ def test_commit_writes_to_disk_if_needed_and_possible(self, pwrite2disk):
+ with patch("portage.util.mtimedb.open", mock_open(read_data=_EMPTY_FILE)):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ mtimedb.commit()
+ pwrite2disk.assert_not_called()
+ mtimedb["updates"]["/long/path/1Q-2021"] = 1739992409
+ d = {}
+ d.update(mtimedb)
+ mtimedb.commit()
+ pwrite2disk.assert_called_once_with(d)
+
+ @patch("portage.util.mtimedb.MtimeDB._MtimeDB__write_to_disk")
+ def test_commit_does_not_write_to_disk_if_no_file(self, pwrite2disk):
+ with patch("portage.util.mtimedb.open", mock_open(read_data=_EMPTY_FILE)):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ mtimedb["updates"]["/long/path/1Q-2021"] = 1739992409
+ mtimedb.filename = None
+ mtimedb.commit()
+ pwrite2disk.assert_not_called()
+
+ @patch("portage.util.mtimedb.MtimeDB._MtimeDB__write_to_disk")
+ def test_commit_does_not_write_to_disk_if_no_changes(self, pwrite2disk):
+ with patch("portage.util.mtimedb.open", mock_open(read_data=_EMPTY_FILE)):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ mtimedb.commit()
+ pwrite2disk.assert_not_called()
+
+ def test_is_readonly_attribute(self):
+ with patch(
+ "portage.util.mtimedb.open", mock_open(read_data=_ONE_RESUME_LIST_JSON)
+ ):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ self.assertFalse(mtimedb.is_readonly)
+
+ mtimedb.filename = None
+ self.assertTrue(mtimedb.is_readonly)
+
+ mtimedb.filename = "/what/ever/mtimedb"
+ self.assertFalse(mtimedb.is_readonly)
+
+ def test_make_readonly(self):
+ with patch(
+ "portage.util.mtimedb.open", mock_open(read_data=_ONE_RESUME_LIST_JSON)
+ ):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ mtimedb.make_readonly()
+ self.assertTrue(mtimedb.is_readonly)
+ self.assertIs(mtimedb.filename, None)
+
+ @patch("portage.util.mtimedb.apply_secpass_permissions")
+ @patch("portage.util.mtimedb.atomic_ofstream")
+ def test_write_to_disk(self, matomic_ofstream, mapply_perms):
+ with patch(
+ "portage.util.mtimedb.open", mock_open(read_data=_ONE_RESUME_LIST_JSON)
+ ):
+ mtimedb = MtimeDB("/some/path/mtimedb")
+ d = {"z": "zome", "a": "AAA"}
+ encoding = portage._encodings["repo.content"]
+ # I'm interested here in unit testing, but the method is private
+ # and shouldn't be called directly from outside, obviously:
+ mtimedb._MtimeDB__write_to_disk(d)
+ self.assertEqual(d["version"], str(portage.VERSION))
+ matomic_ofstream.return_value.write.assert_called_once_with(
+ json.dumps(d, **mtimedb._json_write_opts).encode(encoding)
+ )
+ mapply_perms.assert_called_once_with(
+ mtimedb.filename, uid=uid, gid=portage_gid, mode=0o644
+ )
+ self.assertEqual(d, mtimedb._clean_data)
+ self.assertIsNot(d, mtimedb._clean_data)
diff --git a/lib/portage/tests/util/test_normalizedPath.py b/lib/portage/tests/util/test_normalizedPath.py
index 6609e5756..563294050 100644
--- a/lib/portage/tests/util/test_normalizedPath.py
+++ b/lib/portage/tests/util/test_normalizedPath.py
@@ -7,7 +7,6 @@ from portage.tests import TestCase
class NormalizePathTestCase(TestCase):
def testNormalizePath(self):
-
from portage.util import normalize_path
path = "///foo/bar/baz"
diff --git a/lib/portage/tests/util/test_shelve.py b/lib/portage/tests/util/test_shelve.py
index b68e9a043..b1759ed5f 100644
--- a/lib/portage/tests/util/test_shelve.py
+++ b/lib/portage/tests/util/test_shelve.py
@@ -1,4 +1,4 @@
-# Copyright 2020-2021 Gentoo Authors
+# Copyright 2020-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import argparse
@@ -12,7 +12,6 @@ from portage.util.shelve import dump, open_shelve, restore
class ShelveUtilsTestCase(TestCase):
-
TEST_DATA = (
# distfiles_db
{
@@ -45,7 +44,6 @@ class ShelveUtilsTestCase(TestCase):
db.close()
dump(dump_args)
- os.unlink(dump_args.src)
restore_args = argparse.Namespace(
dest=dump_args.src,
src=dump_args.dest,
diff --git a/lib/portage/tests/util/test_socks5.py b/lib/portage/tests/util/test_socks5.py
index 18b8d4db8..a8cd0c46c 100644
--- a/lib/portage/tests/util/test_socks5.py
+++ b/lib/portage/tests/util/test_socks5.py
@@ -1,6 +1,7 @@
-# Copyright 2019-2021 Gentoo Authors
+# Copyright 2019-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import asyncio
import functools
import shutil
import socket
@@ -10,8 +11,9 @@ import time
import portage
from portage.tests import TestCase
-from portage.util._eventloop.global_event_loop import global_event_loop
from portage.util import socks5
+from portage.util.futures.executor.fork import ForkExecutor
+from portage.util._eventloop.global_event_loop import global_event_loop
from portage.const import PORTAGE_BIN_PATH
from http.server import BaseHTTPRequestHandler, HTTPServer
@@ -88,18 +90,20 @@ class AsyncHTTPServerTestCase(TestCase):
if f is not None:
f.close()
- def test_http_server(self):
+ async def _test_http_server(self):
+ asyncio.run(self._test_http_server())
+
+ async def _test_http_server(self):
host = "127.0.0.1"
content = b"Hello World!\n"
path = "/index.html"
- loop = global_event_loop()
+
+ loop = asyncio.get_running_loop()
for i in range(2):
with AsyncHTTPServer(host, {path: content}, loop) as server:
for j in range(2):
- result = loop.run_until_complete(
- loop.run_in_executor(
- None, self._fetch_directly, host, server.server_port, path
- )
+ result = await loop.run_in_executor(
+ None, self._fetch_directly, host, server.server_port, path
)
self.assertEqual(result, content)
@@ -124,7 +128,7 @@ class _socket_file_wrapper(portage.proxy.objectproxy.ObjectProxy):
def __getattribute__(self, attr):
if attr == "close":
return object.__getattribute__(self, "close")
- return super(_socket_file_wrapper, self).__getattribute__(attr)
+ return super().__getattribute__(attr)
def __enter__(self):
return self
@@ -177,38 +181,83 @@ class Socks5ServerTestCase(TestCase):
return f.read()
def test_socks5_proxy(self):
+ asyncio.run(self._test_socks5_proxy())
- loop = global_event_loop()
+ async def _test_socks5_proxy(self):
+ loop = asyncio.get_running_loop()
host = "127.0.0.1"
content = b"Hello World!"
path = "/index.html"
proxy = None
tempdir = tempfile.mkdtemp()
+ previous_exithandlers = portage.process._exithandlers
try:
+ portage.process._exithandlers = []
with AsyncHTTPServer(host, {path: content}, loop) as server:
-
settings = {
"PORTAGE_TMPDIR": tempdir,
"PORTAGE_BIN_PATH": PORTAGE_BIN_PATH,
}
proxy = socks5.get_socks5_proxy(settings)
- loop.run_until_complete(socks5.proxy.ready())
-
- result = loop.run_until_complete(
- loop.run_in_executor(
- None,
- self._fetch_via_proxy,
- proxy,
- host,
- server.server_port,
- path,
- )
+ await socks5.proxy.ready()
+
+ result = await loop.run_in_executor(
+ None,
+ self._fetch_via_proxy,
+ proxy,
+ host,
+ server.server_port,
+ path,
)
self.assertEqual(result, content)
finally:
- socks5.proxy.stop()
+ try:
+ # Also run_coroutine_exitfuncs to test atexit hook cleanup.
+ self.assertNotEqual(portage.process._exithandlers, [])
+ await portage.process.run_coroutine_exitfuncs()
+ self.assertEqual(portage.process._exithandlers, [])
+ finally:
+ portage.process._exithandlers = previous_exithandlers
+ shutil.rmtree(tempdir)
+
+
+class Socks5ServerLoopCloseTestCase(TestCase):
+ """
+ For bug 925240, test that the socks5 proxy is automatically
+ terminated when the main event loop is closed, using a subprocess
+ for isolation.
+ """
+
+ def testSocks5ServerLoopClose(self):
+ asyncio.run(self._testSocks5ServerLoopClose())
+
+ async def _testSocks5ServerLoopClose(self):
+ loop = asyncio.get_running_loop()
+ self.assertEqual(
+ await loop.run_in_executor(
+ ForkExecutor(loop=loop), self._testSocks5ServerLoopCloseSubprocess
+ ),
+ True,
+ )
+
+ @staticmethod
+ def _testSocks5ServerLoopCloseSubprocess():
+ loop = global_event_loop()
+ tempdir = tempfile.mkdtemp()
+ try:
+ settings = {
+ "PORTAGE_TMPDIR": tempdir,
+ "PORTAGE_BIN_PATH": PORTAGE_BIN_PATH,
+ }
+
+ socks5.get_socks5_proxy(settings)
+ loop.run_until_complete(socks5.proxy.ready())
+ finally:
+ loop.close()
shutil.rmtree(tempdir)
+
+ return not socks5.proxy.is_running()
diff --git a/lib/portage/tests/util/test_stackDicts.py b/lib/portage/tests/util/test_stackDicts.py
index fda5d5844..98d2fadfd 100644
--- a/lib/portage/tests/util/test_stackDicts.py
+++ b/lib/portage/tests/util/test_stackDicts.py
@@ -8,7 +8,6 @@ from portage.util import stack_dicts
class StackDictsTestCase(TestCase):
def testStackDictsPass(self):
-
tests = [
([{"a": "b"}, {"b": "c"}], {"a": "b", "b": "c"}, False, [], False),
([{"a": "b"}, {"a": "c"}], {"a": "b c"}, True, [], False),
@@ -22,7 +21,6 @@ class StackDictsTestCase(TestCase):
self.assertEqual(result, test[1])
def testStackDictsFail(self):
-
tests = [
([None, {}], None, False, [], True),
([{"a": "b"}, {"a": "c"}], {"a": "b c"}, False, [], False),
diff --git a/lib/portage/tests/util/test_stackLists.py b/lib/portage/tests/util/test_stackLists.py
index 288685d09..1457c828c 100644
--- a/lib/portage/tests/util/test_stackLists.py
+++ b/lib/portage/tests/util/test_stackLists.py
@@ -8,7 +8,6 @@ from portage.util import stack_lists
class StackListsTestCase(TestCase):
def testStackLists(self):
-
tests = [
([["a", "b", "c"], ["d", "e", "f"]], ["a", "c", "b", "e", "d", "f"], False),
([["a", "x"], ["b", "x"]], ["a", "x", "b"], False),
diff --git a/lib/portage/tests/util/test_uniqueArray.py b/lib/portage/tests/util/test_uniqueArray.py
index c4446c8ea..717a914ca 100644
--- a/lib/portage/tests/util/test_uniqueArray.py
+++ b/lib/portage/tests/util/test_uniqueArray.py
@@ -24,6 +24,5 @@ class UniqueArrayTestCase(TestCase):
number = result.count(item)
self.assertFalse(
number != 1,
- msg=("%s contains %s of %s, " "should be only 1")
- % (result, number, item),
+ msg=f"{result} contains {number} of {item}, should be only 1",
)
diff --git a/lib/portage/tests/util/test_varExpand.py b/lib/portage/tests/util/test_varExpand.py
index b2cbee009..5d68a75af 100644
--- a/lib/portage/tests/util/test_varExpand.py
+++ b/lib/portage/tests/util/test_varExpand.py
@@ -8,10 +8,9 @@ from portage.util import varexpand
class VarExpandTestCase(TestCase):
def testVarExpandPass(self):
-
varDict = {"a": "5", "b": "7", "c": "-5"}
for key in varDict:
- result = varexpand("$%s" % key, varDict)
+ result = varexpand(f"${key}", varDict)
self.assertFalse(
result != varDict[key],
@@ -56,42 +55,36 @@ class VarExpandTestCase(TestCase):
result = varexpand(test[0], varDict)
self.assertFalse(
result != test[1],
- msg="Got %s != %s from varexpand(%s, %s)"
- % (result, test[1], test[0], varDict),
+ msg=f"Got {result} != {test[1]} from varexpand({test[0]}, {varDict})",
)
def testVarExpandDoubleQuotes(self):
-
varDict = {"a": "5"}
tests = [('"${a}"', '"5"')]
for test in tests:
result = varexpand(test[0], varDict)
self.assertFalse(
result != test[1],
- msg="Got %s != %s from varexpand(%s, %s)"
- % (result, test[1], test[0], varDict),
+ msg=f"Got {result} != {test[1]} from varexpand({test[0]}, {varDict})",
)
def testVarExpandSingleQuotes(self):
-
varDict = {"a": "5"}
tests = [("'${a}'", "'${a}'")]
for test in tests:
result = varexpand(test[0], varDict)
self.assertFalse(
result != test[1],
- msg="Got %s != %s from varexpand(%s, %s)"
- % (result, test[1], test[0], varDict),
+ msg=f"Got {result} != {test[1]} from varexpand({test[0]}, {varDict})",
)
def testVarExpandFail(self):
-
varDict = {"a": "5", "b": "7", "c": "15"}
testVars = ["fail"]
for var in testVars:
- result = varexpand("$%s" % var, varDict)
+ result = varexpand(f"${var}", varDict)
self.assertFalse(
len(result),
msg="Got %s == %s, from varexpand(%s, %s)"
diff --git a/lib/portage/tests/util/test_whirlpool.py b/lib/portage/tests/util/test_whirlpool.py
index 554fffc6a..900de4b98 100644
--- a/lib/portage/tests/util/test_whirlpool.py
+++ b/lib/portage/tests/util/test_whirlpool.py
@@ -1,23 +1,42 @@
-# Copyright 2011-2014 Gentoo Foundation
+# Copyright 2011-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import subprocess
+import pytest
-import portage
-from portage import os
-from portage.const import PORTAGE_PYM_PATH
from portage.tests import TestCase
+from portage.util.whirlpool import CWhirlpool, PyWhirlpool
+
+
+CWHIRLPOOL_AVAILABLE = CWhirlpool.is_available
+CWHIRLPOOL_NOT_AVAILABLE_MSG = "C Whirlpool extension is not importable"
class WhirlpoolTestCase(TestCase):
- def testBundledWhirlpool(self):
- # execute the tests bundled with the whirlpool module
- retval = subprocess.call(
- [
- portage._python_interpreter,
- "-b",
- "-Wd",
- os.path.join(PORTAGE_PYM_PATH, "portage/util/whirlpool.py"),
- ]
+ def testBundledWhirlpool(self, cls=PyWhirlpool):
+ self.assertEqual(
+ cls(b"The quick brown fox jumps over the lazy dog").hexdigest(),
+ "b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35",
+ )
+ self.assertEqual(
+ cls(b"The quick brown fox jumps over the lazy eog").hexdigest(),
+ "c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c",
)
- self.assertEqual(retval, os.EX_OK)
+ self.assertEqual(
+ cls(b"").hexdigest(),
+ "19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3",
+ )
+ w = cls()
+ w.update(b"")
+ self.assertEqual(
+ w.hexdigest(),
+ "19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3",
+ )
+
+ @pytest.mark.skipif(
+ not CWHIRLPOOL_AVAILABLE,
+ reason=CWHIRLPOOL_NOT_AVAILABLE_MSG,
+ )
+ def testCWhirlpool(self):
+ if not CWHIRLPOOL_AVAILABLE:
+ self.skipTest(CWHIRLPOOL_NOT_AVAILABLE_MSG)
+ self.testBundledWhirlpool(CWhirlpool)
diff --git a/lib/portage/tests/util/test_xattr.py b/lib/portage/tests/util/test_xattr.py
index a10dd194e..59ff7b40f 100644
--- a/lib/portage/tests/util/test_xattr.py
+++ b/lib/portage/tests/util/test_xattr.py
@@ -167,4 +167,4 @@ class StandardTest(TestCase):
"""Make sure the exported API matches"""
for mod in self.MODULES:
for f in self.FUNCS:
- self.assertTrue(hasattr(mod, f), "%s func missing in %s" % (f, mod))
+ self.assertTrue(hasattr(mod, f), f"{f} func missing in {mod}")
diff --git a/lib/portage/tests/versions/meson.build b/lib/portage/tests/versions/meson.build
new file mode 100644
index 000000000..1df4905fd
--- /dev/null
+++ b/lib/portage/tests/versions/meson.build
@@ -0,0 +1,10 @@
+py.install_sources(
+ [
+ 'test_cpv_sort_key.py',
+ 'test_vercmp.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/versions',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/versions/test_cpv_sort_key.py b/lib/portage/tests/versions/test_cpv_sort_key.py
index f0995b1cb..7271f2e3e 100644
--- a/lib/portage/tests/versions/test_cpv_sort_key.py
+++ b/lib/portage/tests/versions/test_cpv_sort_key.py
@@ -7,7 +7,6 @@ from portage.versions import cpv_sort_key
class CpvSortKeyTestCase(TestCase):
def testCpvSortKey(self):
-
tests = [
(
("a/b-2_alpha", "a", "b", "a/b-2", "a/a-1", "a/b-1"),
diff --git a/lib/portage/tests/versions/test_vercmp.py b/lib/portage/tests/versions/test_vercmp.py
index 5b204658e..6aaf1fa5f 100644
--- a/lib/portage/tests/versions/test_vercmp.py
+++ b/lib/portage/tests/versions/test_vercmp.py
@@ -10,7 +10,6 @@ class VerCmpTestCase(TestCase):
"""A simple testCase for portage.versions.vercmp()"""
def testVerCmpGreater(self):
-
tests = [
("6.0", "5.0"),
("5.0", "5"),
@@ -27,7 +26,7 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) <= 0,
- msg="%s < %s? Wrong!" % (test[0], test[1]),
+ msg=f"{test[0]} < {test[1]}? Wrong!",
)
def testVerCmpLess(self):
@@ -57,11 +56,10 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) >= 0,
- msg="%s > %s? Wrong!" % (test[0], test[1]),
+ msg=f"{test[0]} > {test[1]}? Wrong!",
)
def testVerCmpEqual(self):
-
tests = [
("4.0", "4.0"),
("1.0", "1.0"),
@@ -73,11 +71,10 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) != 0,
- msg="%s != %s? Wrong!" % (test[0], test[1]),
+ msg=f"{test[0]} != {test[1]}? Wrong!",
)
def testVerNotEqual(self):
-
tests = [
("1", "2"),
("1.0_alpha", "1.0_pre"),
@@ -96,5 +93,5 @@ class VerCmpTestCase(TestCase):
for test in tests:
self.assertFalse(
vercmp(test[0], test[1]) == 0,
- msg="%s == %s? Wrong!" % (test[0], test[1]),
+ msg=f"{test[0]} == {test[1]}? Wrong!",
)
diff --git a/lib/portage/tests/xpak/meson.build b/lib/portage/tests/xpak/meson.build
new file mode 100644
index 000000000..7ca8d4e4d
--- /dev/null
+++ b/lib/portage/tests/xpak/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'test_decodeint.py',
+ '__init__.py',
+ '__test__.py',
+ ],
+ subdir : 'portage/tests/xpak',
+ pure : not native_extensions
+)
diff --git a/lib/portage/tests/xpak/test_decodeint.py b/lib/portage/tests/xpak/test_decodeint.py
index 25388d866..4d2324191 100644
--- a/lib/portage/tests/xpak/test_decodeint.py
+++ b/lib/portage/tests/xpak/test_decodeint.py
@@ -8,9 +8,8 @@ from portage.xpak import decodeint, encodeint
class testDecodeIntTestCase(TestCase):
def testDecodeInt(self):
-
for n in range(1000):
self.assertEqual(decodeint(encodeint(n)), n)
- for n in (2 ** 32 - 1,):
+ for n in (2**32 - 1,):
self.assertEqual(decodeint(encodeint(n)), n)
diff --git a/lib/portage/update.py b/lib/portage/update.py
index 0ccca6e26..0abb92094 100644
--- a/lib/portage/update.py
+++ b/lib/portage/update.py
@@ -2,7 +2,6 @@
# Distributed under the terms of the GNU General Public License v2
import errno
-import io
import re
import stat
import sys
@@ -32,7 +31,6 @@ ignored_dbentries = ("CONTENTS", "environment.bz2")
def update_dbentry(update_cmd, mycontent, eapi=None, parent=None):
-
if parent is not None:
eapi = parent.eapi
@@ -157,9 +155,8 @@ def fixdbentries(update_iter, dbdir, eapi=None, parent=None):
mydata = {}
for myfile in [f for f in os.listdir(dbdir) if f not in ignored_dbentries]:
file_path = os.path.join(dbdir, myfile)
- with io.open(
+ with open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -200,9 +197,8 @@ def grab_updates(updpath, prev_mtimes=None):
if not stat.S_ISREG(mystat.st_mode):
continue
if int(prev_mtimes.get(file_path, -1)) != mystat[stat.ST_MTIME]:
- f = io.open(
+ f = open(
_unicode_encode(file_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
@@ -382,18 +378,17 @@ def update_config_files(
for x in myxfiles:
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(
os.path.join(abs_user_config, x),
encoding=_encodings["fs"],
errors="strict",
),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
file_contents[x] = f.readlines()
- except IOError:
+ except OSError:
continue
finally:
if f is not None:
@@ -428,12 +423,12 @@ def update_config_files(
if match_callback(repo_name, atom, new_atom):
# add a comment with the update command, so
# the user can clearly see what happened
- contents[pos] = "# %s\n" % " ".join(
- "%s" % (x,) for x in update_cmd
+ contents[pos] = "# {}\n".format(
+ " ".join(f"{x}" for x in update_cmd)
)
contents.insert(
pos + 1,
- line.replace("%s" % (atom,), "%s" % (new_atom,), 1),
+ line.replace(f"{atom}", f"{new_atom}", 1),
)
# we've inserted an additional line, so we need to
# skip it when it's reached in the next iteration
@@ -452,10 +447,10 @@ def update_config_files(
try:
write_atomic(updating_file, "".join(file_contents[x]))
except PortageException as e:
- writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"\n!!! {str(e)}\n", noiselevel=-1)
writemsg(
_("!!! An error occurred while updating a config file:")
- + " '%s'\n" % updating_file,
+ + f" '{updating_file}'\n",
noiselevel=-1,
)
continue
diff --git a/lib/portage/util/ExtractKernelVersion.py b/lib/portage/util/ExtractKernelVersion.py
index 41a8a6eb0..6a6501b5d 100644
--- a/lib/portage/util/ExtractKernelVersion.py
+++ b/lib/portage/util/ExtractKernelVersion.py
@@ -3,7 +3,6 @@
__all__ = ["ExtractKernelVersion"]
-import io
import logging
from portage import os, _encodings, _unicode_encode
@@ -25,24 +24,19 @@ def ExtractKernelVersion(base_dir):
lines = []
pathname = os.path.join(base_dir, "Makefile")
try:
- f = io.open(
+ f = open(
_unicode_encode(pathname, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
except OSError as details:
return (None, str(details))
- except IOError as details:
- return (None, str(details))
try:
for i in range(4):
lines.append(f.readline())
except OSError as details:
return (None, str(details))
- except IOError as details:
- return (None, str(details))
finally:
f.close()
@@ -81,7 +75,7 @@ def ExtractKernelVersion(base_dir):
for file_path, file_errors in loader_errors.items():
for error_str in file_errors:
writemsg_level(
- "%s: %s\n" % (file_path, error_str),
+ f"{file_path}: {error_str}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/__init__.py b/lib/portage/util/__init__.py
index 5ade7f660..1f8c9e94f 100644
--- a/lib/portage/util/__init__.py
+++ b/lib/portage/util/__init__.py
@@ -1,6 +1,26 @@
-# Copyright 2004-2020 Gentoo Authors
+# Copyright 2004-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+from portage.cache.mappings import UserDict
+from portage.proxy.objectproxy import ObjectProxy
+from portage.localization import _
+from portage.exception import (
+ InvalidAtom,
+ PortageException,
+ FileNotFound,
+ IsADirectory,
+ OperationNotPermitted,
+ ParseError,
+ PermissionDenied,
+ ReadOnlyFileSystem,
+)
+from portage.const import VCS_DIRS
+from portage import _unicode_decode
+from portage import _unicode_encode
+from portage import _os_merge
+from portage import _encodings
+from portage import os
+
__all__ = [
"apply_permissions",
"apply_recursive_permissions",
@@ -36,6 +56,7 @@ __all__ = [
"writemsg",
"writemsg_level",
"writemsg_stdout",
+ "no_color",
]
from contextlib import AbstractContextManager
@@ -51,6 +72,7 @@ import string
import sys
import traceback
import glob
+from typing import Optional, TextIO
import portage
@@ -61,31 +83,11 @@ portage.proxy.lazyimport.lazyimport(
"subprocess",
)
-from portage import os
-from portage import _encodings
-from portage import _os_merge
-from portage import _unicode_encode
-from portage import _unicode_decode
-from portage.const import VCS_DIRS
-from portage.exception import (
- InvalidAtom,
- PortageException,
- FileNotFound,
- IsADirectory,
- OperationNotPermitted,
- ParseError,
- PermissionDenied,
- ReadOnlyFileSystem,
-)
-from portage.localization import _
-from portage.proxy.objectproxy import ObjectProxy
-from portage.cache.mappings import UserDict
-
noiselimit = 0
-def initialize_logger(level=logging.WARNING):
+def initialize_logger(level=logging.WARNING) -> None:
"""Sets up basic logging of portage activities
Args:
level: the level to emit messages at ('info', 'debug', 'warning' ...)
@@ -95,8 +97,15 @@ def initialize_logger(level=logging.WARNING):
logging.basicConfig(level=level, format="[%(levelname)-4s] %(message)s")
-def writemsg(mystr, noiselevel=0, fd=None):
- """Prints out warning and debug messages based on the noiselimit setting"""
+def writemsg(mystr: str, noiselevel: int = 0, fd: Optional[TextIO] = None) -> None:
+ """
+ Prints out warning and debug messages based on the noiselimit setting
+
+ Takes three arguments
+ 1. mystr: The message to write
+ 2. noiselevel: The noiselevel of the message
+ 3. fd: file descriptor - where to write the message to
+ """
global noiselimit
if fd is None:
fd = sys.stderr
@@ -116,24 +125,23 @@ def writemsg(mystr, noiselevel=0, fd=None):
fd.flush()
-def writemsg_stdout(mystr, noiselevel=0):
+def writemsg_stdout(mystr: str, noiselevel: int = 0) -> None:
"""Prints messages stdout based on the noiselimit setting"""
writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
-def writemsg_level(msg, level=0, noiselevel=0):
+def writemsg_level(msg: str, level: int = 0, noiselevel: int = 0) -> None:
"""
Show a message for the given level as defined by the logging module
- (default is 0). When level >= logging.WARNING then the message is
+ (default is 0).
+
+ When level >= logging.WARNING then the message is
sent to stderr, otherwise it is sent to stdout. The noiselevel is
passed directly to writemsg().
-
- @type msg: str
- @param msg: a message string, including newline if appropriate
- @type level: int
- @param level: a numeric logging level (see the logging module)
- @type noiselevel: int
- @param noiselevel: passed directly to writemsg
+ Takes three parameters
+ 1. msg - the message to output
+ 2. level - the numeric logging level (see python's logging module)
+ 3. noiselevel - portage's logging level, passed directly to writemsg
"""
if level >= logging.WARNING:
fd = sys.stderr
@@ -142,7 +150,7 @@ def writemsg_level(msg, level=0, noiselevel=0):
writemsg(msg, noiselevel=noiselevel, fd=fd)
-def normalize_path(mypath):
+def normalize_path(mypath) -> str:
"""
os.path.normpath("//foo") returns "//foo" instead of "/foo"
We dislike this behavior so we create our own normpath func
@@ -472,9 +480,8 @@ def read_corresponding_eapi_file(filename, default="0"):
eapi = None
try:
- with io.open(
+ with open(
_unicode_encode(eapi_file, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
) as f:
@@ -487,7 +494,7 @@ def read_corresponding_eapi_file(filename, default="0"):
% (eapi_file),
noiselevel=-1,
)
- except IOError:
+ except OSError:
pass
_eapi_cache[eapi_file] = eapi
@@ -574,7 +581,6 @@ def grabfile_package(
eapi=None,
eapi_default="0",
):
-
pkgs = grabfile(
myfilename, compatlevel, recursive=recursive, remember_source_file=True
)
@@ -679,9 +685,8 @@ def grablines(myfilename, recursive=0, remember_source_file=False):
else:
try:
- with io.open(
+ with open(
_unicode_encode(myfilename, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
) as myfile:
@@ -689,7 +694,7 @@ def grablines(myfilename, recursive=0, remember_source_file=False):
mylines = [(line, myfilename) for line in myfile.readlines()]
else:
mylines = myfile.readlines()
- except IOError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(myfilename)
elif e.errno in (errno.ENOENT, errno.ESTALE):
@@ -708,7 +713,7 @@ def writedict(mydict, myfilename, writekey=True):
lines.append(v + "\n")
else:
for k, v in mydict.items():
- lines.append("%s %s\n" % (k, " ".join(v)))
+ lines.append(f"{k} {' '.join(v)}\n")
write_atomic(myfilename, "".join(lines))
@@ -734,11 +739,11 @@ class _getconfig_shlex(shlex.shlex):
try:
newfile = varexpand(newfile, self.var_expand_map)
return shlex.shlex.sourcehook(self, newfile)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(newfile)
if e.errno not in (errno.ENOENT, errno.ENOTDIR):
- writemsg("open('%s', 'r'): %s\n" % (newfile, e), noiselevel=-1)
+ writemsg(f"open('{newfile}', 'r'): {e}\n", noiselevel=-1)
raise
msg = self.error_leader()
@@ -748,7 +753,7 @@ class _getconfig_shlex(shlex.shlex):
msg += _("%s: No such file or directory") % newfile
if self.__portage_tolerant:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
else:
raise ParseError(msg)
return (newfile, io.StringIO())
@@ -760,7 +765,6 @@ _invalid_var_name_re = re.compile(r"^\d|\W")
def getconfig(
mycfg, tolerant=False, allow_sourcing=False, expand=True, recursive=False
):
-
if isinstance(expand, dict):
# Some existing variable definitions have been
# passed in, for use in substitutions.
@@ -795,16 +799,15 @@ def getconfig(
try:
f = open(
_unicode_encode(mycfg, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
content = f.read()
- except IOError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(mycfg)
if e.errno != errno.ENOENT:
- writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
+ writemsg(f"open('{mycfg}', 'r'): {e}\n", noiselevel=-1)
if e.errno not in (errno.EISDIR,):
raise
return None
@@ -867,7 +870,7 @@ def getconfig(
if not tolerant:
raise ParseError(msg)
else:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
return mykeys
elif equ != "=":
@@ -875,7 +878,7 @@ def getconfig(
if not tolerant:
raise ParseError(msg)
else:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
return mykeys
val = _unicode_decode(lex.get_token())
@@ -886,14 +889,14 @@ def getconfig(
if not tolerant:
raise ParseError(msg)
else:
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
return mykeys
if _invalid_var_name_re.search(key) is not None:
msg = lex.error_leader() + _("Invalid variable name '%s'") % (key,)
if not tolerant:
raise ParseError(msg)
- writemsg("%s\n" % msg, noiselevel=-1)
+ writemsg(f"{msg}\n", noiselevel=-1)
continue
if expand:
@@ -908,8 +911,8 @@ def getconfig(
except Exception as e:
if isinstance(e, ParseError) or lex is None:
raise
- msg = "%s%s" % (lex.error_leader(), e)
- writemsg("%s\n" % msg, noiselevel=-1)
+ msg = f"{lex.error_leader()}{e}"
+ writemsg(f"{msg}\n", noiselevel=-1)
raise
return mykeys
@@ -1085,7 +1088,7 @@ def dump_traceback(msg, noiselevel=1):
stack = traceback.extract_tb(info[2])
error = str(info[1])
writemsg("\n====================================\n", noiselevel=noiselevel)
- writemsg("%s\n\n" % msg, noiselevel=noiselevel)
+ writemsg(f"{msg}\n\n", noiselevel=noiselevel)
for line in traceback.format_list(stack):
writemsg(line, noiselevel=noiselevel)
if error:
@@ -1128,7 +1131,7 @@ class cmp_sort_key:
def __lt__(self, other):
if other.__class__ is not self.__class__:
raise TypeError(
- "Expected type %s, got %s" % (self.__class__, other.__class__)
+ f"Expected type {self.__class__}, got {other.__class__}"
)
return self._cmp_func(self._obj, other._obj) < 0
@@ -1195,7 +1198,7 @@ def _do_stat(filename, follow_links=True):
return os.stat(filename)
return os.lstat(filename)
except OSError as oe:
- func_call = "stat('%s')" % filename
+ func_call = f"stat('{filename}')"
if oe.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
if oe.errno == errno.EACCES:
@@ -1290,7 +1293,7 @@ def apply_permissions(
os.chmod(filename, new_mode)
modified = True
except OSError as oe:
- func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
+ func_call = f"chmod('{filename}', {oct(new_mode)})"
if oe.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
elif oe.errno == errno.EACCES:
@@ -1412,7 +1415,6 @@ def apply_secpass_permissions(
# Avoid accessing portage.data.secpass when possible, since
# it triggers config loading (undesirable for chmod-lite).
if (uid != -1 or gid != -1) and portage.data.secpass < 2:
-
if uid != -1 and uid != stat_cached.st_uid:
all_applied = False
uid = -1
@@ -1463,11 +1465,11 @@ class atomic_ofstream(AbstractContextManager, ObjectProxy):
tmp_name, encoding=_encodings["fs"], errors="strict"
),
mode=mode,
- **kargs
+ **kargs,
),
)
return
- except IOError as e:
+ except OSError as e:
if canonical_path == filename:
raise
# Ignore this error, since it's irrelevant
@@ -1482,7 +1484,7 @@ class atomic_ofstream(AbstractContextManager, ObjectProxy):
open_func(
_unicode_encode(tmp_name, encoding=_encodings["fs"], errors="strict"),
mode=mode,
- **kargs
+ **kargs,
),
)
@@ -1560,10 +1562,10 @@ def write_atomic(file_path, content, **kwargs):
f = atomic_ofstream(file_path, **kwargs)
f.write(content)
f.close()
- except (IOError, OSError) as e:
+ except OSError as e:
if f:
f.abort()
- func_call = "write_atomic('%s')" % file_path
+ func_call = f"write_atomic('{file_path}')"
if e.errno == errno.EPERM:
raise OperationNotPermitted(func_call)
elif e.errno == errno.EACCES:
@@ -1592,7 +1594,7 @@ def ensure_dirs(dir_path, **kwargs):
os.makedirs(dir_path)
created_dir = True
except OSError as oe:
- func_call = "makedirs('%s')" % dir_path
+ func_call = f"makedirs('{dir_path}')"
if oe.errno in (errno.EEXIST,):
pass
else:
@@ -1625,7 +1627,6 @@ class LazyItemsDict(UserDict):
__slots__ = ("lazy_items",)
def __init__(self, *args, **kwargs):
-
self.lazy_items = {}
UserDict.__init__(self, *args, **kwargs)
@@ -1721,21 +1722,15 @@ class LazyItemsDict(UserDict):
raise TypeError(
"LazyItemsDict "
+ "deepcopy is unsafe with lazy items that are "
- + "not singletons: key=%s value=%s"
- % (
- k,
- lazy_item,
- )
+ + f"not singletons: key={k} value={lazy_item}"
)
UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
return result
class _LazyItem:
-
__slots__ = ("func", "pargs", "kwargs", "singleton")
def __init__(self, func, pargs, kwargs, singleton):
-
if not pargs:
pargs = None
if not kwargs:
@@ -1965,7 +1960,7 @@ def find_updated_config_files(target_root, config_protect):
if stat.S_ISDIR(mymode):
mycommand = (
- "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
+ f"find '{x}' -name '.*' -type d -prune -o -name '._cfg????_*'"
)
else:
mycommand = (
@@ -2005,8 +2000,7 @@ def getlibpaths(root, env=None):
if include_match is not None:
subpath = os.path.join(os.path.dirname(path), include_match.group(1))
for p in glob.glob(subpath):
- for r in read_ld_so_conf(p):
- yield r
+ yield from read_ld_so_conf(p)
else:
yield l
@@ -2020,3 +2014,12 @@ def getlibpaths(root, env=None):
rval.append("/lib")
return [normalize_path(x) for x in rval if x]
+
+
+def no_color(settings: Optional[dict]) -> bool:
+ # In several years (2026+), we can cleanup NOCOLOR support, and just support NO_COLOR.
+ has_color: str = settings.get("NO_COLOR")
+ nocolor: str = settings.get("NOCOLOR", "false").lower()
+ if has_color is None:
+ return nocolor in ("yes", "true")
+ return bool(has_color)
diff --git a/lib/portage/util/_async/AsyncFunction.py b/lib/portage/util/_async/AsyncFunction.py
index 8c13b3f5b..6f55aba56 100644
--- a/lib/portage/util/_async/AsyncFunction.py
+++ b/lib/portage/util/_async/AsyncFunction.py
@@ -1,6 +1,8 @@
-# Copyright 2015-2020 Gentoo Authors
+# Copyright 2015-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import functools
+import multiprocessing
import pickle
import traceback
@@ -16,34 +18,30 @@ class AsyncFunction(ForkProcess):
"result" attribute after the forked process has exited.
"""
- # NOTE: This class overrides the meaning of the SpawnProcess 'args'
- # attribute, and uses it to hold the positional arguments for the
- # 'target' function.
__slots__ = (
- "kwargs",
"result",
- "target",
"_async_func_reader",
- "_async_func_reader_pw",
)
def _start(self):
- pr, pw = os.pipe()
- self.fd_pipes = {} if self.fd_pipes is None else self.fd_pipes
- self.fd_pipes[pw] = pw
- self._async_func_reader_pw = pw
+ pr, pw = multiprocessing.Pipe(duplex=False)
self._async_func_reader = PipeReader(
input_files={"input": pr}, scheduler=self.scheduler
)
self._async_func_reader.addExitListener(self._async_func_reader_exit)
self._async_func_reader.start()
+ # args and kwargs are passed as additional args by ForkProcess._bootstrap.
+ self.target = functools.partial(self._target_wrapper, pw, self.target)
ForkProcess._start(self)
- os.close(pw)
+ pw.close()
- def _run(self):
+ @staticmethod
+ def _target_wrapper(pw, target, *args, **kwargs):
try:
- result = self.target(*(self.args or []), **(self.kwargs or {}))
- os.write(self._async_func_reader_pw, pickle.dumps(result))
+ result = target(*args, **kwargs)
+ result_bytes = pickle.dumps(result)
+ while result_bytes:
+ result_bytes = result_bytes[os.write(pw.fileno(), result_bytes) :]
except Exception:
traceback.print_exc()
return 1
@@ -56,6 +54,10 @@ class AsyncFunction(ForkProcess):
if self._async_func_reader is None:
ForkProcess._async_waitpid(self)
+ def _async_wait(self):
+ if self._async_func_reader is None:
+ ForkProcess._async_wait(self)
+
def _async_func_reader_exit(self, pipe_reader):
try:
self.result = pickle.loads(pipe_reader.getvalue())
diff --git a/lib/portage/util/_async/AsyncScheduler.py b/lib/portage/util/_async/AsyncScheduler.py
index b8ed31f8c..07782b478 100644
--- a/lib/portage/util/_async/AsyncScheduler.py
+++ b/lib/portage/util/_async/AsyncScheduler.py
@@ -94,7 +94,7 @@ class AsyncScheduler(AsynchronousTask, PollScheduler):
self._schedule()
def _cleanup(self):
- super(AsyncScheduler, self)._cleanup()
+ super()._cleanup()
if self._loadavg_check_id is not None:
self._loadavg_check_id.cancel()
self._loadavg_check_id = None
@@ -104,4 +104,4 @@ class AsyncScheduler(AsynchronousTask, PollScheduler):
Override _async_wait to call self._cleanup().
"""
self._cleanup()
- super(AsyncScheduler, self)._async_wait()
+ super()._async_wait()
diff --git a/lib/portage/util/_async/AsyncTaskFuture.py b/lib/portage/util/_async/AsyncTaskFuture.py
index 0cd034c97..4c2f7a571 100644
--- a/lib/portage/util/_async/AsyncTaskFuture.py
+++ b/lib/portage/util/_async/AsyncTaskFuture.py
@@ -1,4 +1,4 @@
-# Copyright 2018-2021 Gentoo Foundation
+# Copyright 2018-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import os
@@ -20,6 +20,12 @@ class AsyncTaskFuture(AsynchronousTask):
self.future = asyncio.ensure_future(self.future, self.scheduler)
self.future.add_done_callback(self._done_callback)
+ def isAlive(self):
+ """
+ Returns True if self.future is an asyncio.Future that is not done.
+ """
+ return isinstance(self.future, asyncio.Future) and not self.future.done()
+
def _cancel(self):
if not self.future.done():
self.future.cancel()
diff --git a/lib/portage/util/_async/BuildLogger.py b/lib/portage/util/_async/BuildLogger.py
index cbed2d811..0cfc90a94 100644
--- a/lib/portage/util/_async/BuildLogger.py
+++ b/lib/portage/util/_async/BuildLogger.py
@@ -1,4 +1,4 @@
-# Copyright 2020-2021 Gentoo Authors
+# Copyright 2020-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import functools
@@ -6,13 +6,46 @@ import subprocess
from _emerge.AsynchronousTask import AsynchronousTask
+import portage
from portage import os
+from portage.proxy.objectproxy import ObjectProxy
from portage.util import shlex_split
from portage.util._async.PipeLogger import PipeLogger
from portage.util._async.PopenProcess import PopenProcess
from portage.util.futures import asyncio
+class _file_close_wrapper(ObjectProxy):
+ """
+ Prevent fd inheritance via fork, ensuring that we can observe
+ EOF on the read end of the pipe (bug 919072).
+ """
+
+ __slots__ = ("_file",)
+
+ def __init__(self, file):
+ ObjectProxy.__init__(self)
+ object.__setattr__(self, "_file", file)
+ portage.locks._open_fds[file.fileno()] = self
+
+ def _get_target(self):
+ return object.__getattribute__(self, "_file")
+
+ def __getattribute__(self, attr):
+ if attr == "close":
+ return object.__getattribute__(self, attr)
+ return getattr(object.__getattribute__(self, "_file"), attr)
+
+ def close(self):
+ file = object.__getattribute__(self, "_file")
+ if not file.closed:
+ # This must only be called if the file is open,
+ # which ensures that file.fileno() does not
+ # collide with an open lock file descriptor.
+ del portage.locks._open_fds[file.fileno()]
+ file.close()
+
+
class BuildLogger(AsynchronousTask):
"""
Write to a log file, with compression support provided by PipeLogger.
@@ -60,14 +93,14 @@ class BuildLogger(AsynchronousTask):
scheduler=self.scheduler,
)
filter_proc.start()
- except EnvironmentError:
+ except OSError:
# Maybe the command is missing or broken somehow...
os.close(filter_input)
os.close(stdin)
os.close(log_input)
os.close(filter_output)
else:
- self._stdin = os.fdopen(stdin, "wb", 0)
+ self._stdin = _file_close_wrapper(os.fdopen(stdin, "wb", 0))
os.close(filter_input)
os.close(filter_output)
@@ -76,7 +109,7 @@ class BuildLogger(AsynchronousTask):
# that is missing or broken somehow, create a pipe that
# logs directly to pipe_logger.
log_input, stdin = os.pipe()
- self._stdin = os.fdopen(stdin, "wb", 0)
+ self._stdin = _file_close_wrapper(os.fdopen(stdin, "wb", 0))
# Set background=True so that pipe_logger does not log to stdout.
pipe_logger = PipeLogger(
diff --git a/lib/portage/util/_async/FileCopier.py b/lib/portage/util/_async/FileCopier.py
index 3cd0fe98b..d53ff0859 100644
--- a/lib/portage/util/_async/FileCopier.py
+++ b/lib/portage/util/_async/FileCopier.py
@@ -1,4 +1,4 @@
-# Copyright 2013-2019 Gentoo Authors
+# Copyright 2013-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import os as _os
@@ -20,16 +20,20 @@ class FileCopier(AsyncTaskFuture):
def _start(self):
self.future = asyncio.ensure_future(
- self.scheduler.run_in_executor(ForkExecutor(loop=self.scheduler), self._run)
+ self.scheduler.run_in_executor(
+ ForkExecutor(loop=self.scheduler),
+ self._target,
+ self.src_path,
+ self.dest_path,
+ )
)
- super(FileCopier, self)._start()
+ super()._start()
- def _run(self):
- src_path = _unicode_encode(
- self.src_path, encoding=_encodings["fs"], errors="strict"
- )
+ @staticmethod
+ def _target(src_path, dest_path):
+ src_path = _unicode_encode(src_path, encoding=_encodings["fs"], errors="strict")
dest_path = _unicode_encode(
- self.dest_path, encoding=_encodings["fs"], errors="strict"
+ dest_path, encoding=_encodings["fs"], errors="strict"
)
copyfile(src_path, dest_path)
apply_stat_permissions(dest_path, _os.stat(src_path))
diff --git a/lib/portage/util/_async/FileDigester.py b/lib/portage/util/_async/FileDigester.py
index 0d250ec99..6491423ae 100644
--- a/lib/portage/util/_async/FileDigester.py
+++ b/lib/portage/util/_async/FileDigester.py
@@ -1,13 +1,13 @@
-# Copyright 2013 Gentoo Foundation
+# Copyright 2013-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-from portage import os
+import functools
+
from portage.checksum import perform_multiple_checksums
-from portage.util._async.ForkProcess import ForkProcess
-from _emerge.PipeReader import PipeReader
+from portage.util._async.AsyncFunction import AsyncFunction
-class FileDigester(ForkProcess):
+class FileDigester(AsyncFunction):
"""
Asynchronously generate file digests. Pass in file_path and
hash_names, and after successful execution, the digests
@@ -17,65 +17,15 @@ class FileDigester(ForkProcess):
__slots__ = (
"file_path",
- "digests",
"hash_names",
- "_digest_pipe_reader",
- "_digest_pw",
)
def _start(self):
- pr, pw = os.pipe()
- self.fd_pipes = {}
- self.fd_pipes[pw] = pw
- self._digest_pw = pw
- self._digest_pipe_reader = PipeReader(
- input_files={"input": pr}, scheduler=self.scheduler
+ self.target = functools.partial(
+ perform_multiple_checksums, self.file_path, hashes=self.hash_names
)
- self._digest_pipe_reader.addExitListener(self._digest_pipe_reader_exit)
- self._digest_pipe_reader.start()
- ForkProcess._start(self)
- os.close(pw)
-
- def _run(self):
- digests = perform_multiple_checksums(self.file_path, hashes=self.hash_names)
-
- buf = "".join("%s=%s\n" % item for item in digests.items()).encode("utf_8")
-
- while buf:
- buf = buf[os.write(self._digest_pw, buf) :]
-
- return os.EX_OK
-
- def _parse_digests(self, data):
-
- digests = {}
- for line in data.decode("utf_8").splitlines():
- parts = line.split("=", 1)
- if len(parts) == 2:
- digests[parts[0]] = parts[1]
-
- self.digests = digests
-
- def _async_waitpid(self):
- # Ignore this event, since we want to ensure that we
- # exit only after _digest_pipe_reader has reached EOF.
- if self._digest_pipe_reader is None:
- ForkProcess._async_waitpid(self)
-
- def _digest_pipe_reader_exit(self, pipe_reader):
- self._parse_digests(pipe_reader.getvalue())
- self._digest_pipe_reader = None
- if self.pid is None:
- self._unregister()
- self._async_wait()
- else:
- self._async_waitpid()
-
- def _unregister(self):
- ForkProcess._unregister(self)
+ super()._start()
- pipe_reader = self._digest_pipe_reader
- if pipe_reader is not None:
- self._digest_pipe_reader = None
- pipe_reader.removeExitListener(self._digest_pipe_reader_exit)
- pipe_reader.cancel()
+ @property
+ def digests(self):
+ return self.result
diff --git a/lib/portage/util/_async/ForkProcess.py b/lib/portage/util/_async/ForkProcess.py
index e70238705..e6cfdefb8 100644
--- a/lib/portage/util/_async/ForkProcess.py
+++ b/lib/portage/util/_async/ForkProcess.py
@@ -1,129 +1,272 @@
-# Copyright 2012-2021 Gentoo Authors
+# Copyright 2012-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import fcntl
-import functools
import multiprocessing
+import warnings
import signal
import sys
+from typing import Optional
+
import portage
from portage import os
+from portage.cache.mappings import slot_dict_class
from portage.util.futures import asyncio
from _emerge.SpawnProcess import SpawnProcess
class ForkProcess(SpawnProcess):
+ # NOTE: This class overrides the meaning of the SpawnProcess 'args'
+ # attribute, and uses it to hold the positional arguments for the
+ # 'target' function.
+ __slots__ = (
+ "kwargs",
+ "target",
+ "_child_connection",
+ # Duplicate file descriptors for use by _send_fd_pipes background thread.
+ "_fd_pipes",
+ )
+
+ _file_names = ("connection", "slave_fd")
+ _files_dict = slot_dict_class(_file_names, prefix="")
+
+ _HAVE_SEND_HANDLE = getattr(multiprocessing.reduction, "HAVE_SEND_HANDLE", False)
+
+ def _start(self):
+ if multiprocessing.get_start_method() == "fork":
+ # Backward compatibility mode.
+ super()._start()
+ return
+
+ # This mode supports multiprocessing start methods
+ # other than fork. Note that the fd_pipes implementation
+ # uses a thread via run_in_executor, and threads are not
+ # recommended for mixing with the fork start method due
+ # to cpython issue 84559.
+ if self.fd_pipes and not self._HAVE_SEND_HANDLE:
+ raise NotImplementedError(
+ 'fd_pipes only supported with HAVE_SEND_HANDLE or multiprocessing start method "fork"'
+ )
+
+ if self.fd_pipes or self.logfile or not self.background:
+ # Log via multiprocessing.Pipe if necessary.
+ connection, self._child_connection = multiprocessing.Pipe(
+ duplex=self._HAVE_SEND_HANDLE
+ )
+
+ # Handle fd_pipes in _main instead, since file descriptors are
+ # not inherited with the multiprocessing "spawn" start method.
+ # Pass fd_pipes=None to spawn here so that it doesn't leave
+ # a closed stdin duplicate in fd_pipes (that would trigger
+ # "Bad file descriptor" error if we tried to send it via
+ # send_handle).
+ self._proc = self._spawn(self.args, fd_pipes=None)
+
+ self._registered = True
+
+ if self._child_connection is None:
+ self._async_waitpid()
+ else:
+ self._child_connection.close()
+ self.fd_pipes = self.fd_pipes or {}
+ stdout_fd = None
+ if not self.background:
+ self.fd_pipes.setdefault(0, portage._get_stdin().fileno())
+ self.fd_pipes.setdefault(1, sys.__stdout__.fileno())
+ self.fd_pipes.setdefault(2, sys.__stderr__.fileno())
+ if self.create_pipe is not False:
+ stdout_fd = os.dup(self.fd_pipes[1])
+
+ if self._HAVE_SEND_HANDLE:
+ if self.create_pipe is not False:
+ master_fd, slave_fd = self._pipe(self.fd_pipes)
+ self.fd_pipes[1] = slave_fd
+ self.fd_pipes[2] = slave_fd
+ else:
+ if self.logfile:
+ raise NotImplementedError(
+ "logfile conflicts with create_pipe=False"
+ )
+ # When called via process.spawn, SpawnProcess
+ # will have created a pipe earlier, so it would be
+ # redundant to do it here (it could also trigger spawn
+ # recursion via set_term_size as in bug 923750).
+ master_fd = None
+ slave_fd = None
+
+ self._files = self._files_dict(connection=connection, slave_fd=slave_fd)
+
+ # Create duplicate file descriptors in self._fd_pipes
+ # so that the caller is free to manage the lifecycle
+ # of the original fd_pipes.
+ self._fd_pipes = {}
+ fd_map = {}
+ for dest, src in list(self.fd_pipes.items()):
+ if src not in fd_map:
+ src_new = fd_map[src] = os.dup(src)
+ old_fdflags = fcntl.fcntl(src, fcntl.F_GETFD)
+ fcntl.fcntl(src_new, fcntl.F_SETFD, old_fdflags)
+ os.set_inheritable(
+ src_new, not bool(old_fdflags & fcntl.FD_CLOEXEC)
+ )
+ self._fd_pipes[dest] = fd_map[src]
+
+ asyncio.ensure_future(
+ self._proc.wait(), self.scheduler
+ ).add_done_callback(self._close_fd_pipes)
+ else:
+ master_fd = connection
+
+ self._start_main_task(
+ master_fd, log_file_path=self.logfile, stdout_fd=stdout_fd
+ )
+
+ def _close_fd_pipes(self, future):
+ """
+ Cleanup self._fd_pipes if needed, since _send_fd_pipes could
+ have been cancelled.
+ """
+ # future.result() raises asyncio.CancelledError if
+ # future.cancelled(), but that should not happen.
+ future.result()
+ if self._fd_pipes is not None:
+ for fd in set(self._fd_pipes.values()):
+ os.close(fd)
+ self._fd_pipes = None
+
+ @property
+ def _fd_pipes_send_handle(self):
+ """Returns True if we have a connection to implement fd_pipes via send_handle."""
+ return bool(
+ self._HAVE_SEND_HANDLE
+ and self._files
+ and getattr(self._files, "connection", False)
+ )
+
+ def _send_fd_pipes(self):
+ """
+ Communicate with _bootstrap to send fd_pipes via send_handle.
+ This performs blocking IO, intended for invocation via run_in_executor.
+ """
+ fd_list = list(set(self._fd_pipes.values()))
+ try:
+ self._files.connection.send(
+ (self._fd_pipes, fd_list),
+ )
+ for fd in fd_list:
+ multiprocessing.reduction.send_handle(
+ self._files.connection,
+ fd,
+ self.pid,
+ )
+ except BrokenPipeError as e:
+ # This case is triggered by testAsynchronousLockWaitCancel
+ # when the test case terminates the child process while
+ # this thread is still sending the fd_pipes (bug 923852).
+ # Even if the child terminated abnormally, then there is
+ # no harm in suppressing the exception here, since the
+ # child error should have gone to stderr.
+ raise asyncio.CancelledError from e
- __slots__ = ("_proc", "_proc_join_task")
+ # self._fd_pipes contains duplicates that must be closed.
+ for fd in fd_list:
+ os.close(fd)
+ self._fd_pipes = None
- # Number of seconds between poll attempts for process exit status
- # (after the sentinel has become ready).
- _proc_join_interval = 0.1
+ async def _main(self, build_logger, pipe_logger, loop=None):
+ try:
+ if self._fd_pipes_send_handle:
+ await self.scheduler.run_in_executor(
+ None,
+ self._send_fd_pipes,
+ )
+ except asyncio.CancelledError:
+ self._main_cancel(build_logger, pipe_logger)
+ raise
+ finally:
+ if self._files:
+ if hasattr(self._files, "connection"):
+ self._files.connection.close()
+ del self._files.connection
+ if hasattr(self._files, "slave_fd"):
+ if self._files.slave_fd is not None:
+ os.close(self._files.slave_fd)
+ del self._files.slave_fd
+
+ await super()._main(build_logger, pipe_logger, loop=loop)
- def _spawn(self, args, fd_pipes=None, **kwargs):
+ def _spawn(
+ self, args: list[str], fd_pipes: Optional[dict[int, int]] = None, **kwargs
+ ) -> portage.process.MultiprocessingProcess:
"""
Override SpawnProcess._spawn to fork a subprocess that calls
self._run(). This uses multiprocessing.Process in order to leverage
any pre-fork and post-fork interpreter housekeeping that it provides,
promoting a healthy state for the forked interpreter.
"""
+
+ if self.__class__._run is ForkProcess._run:
+ # target replaces the deprecated self._run method
+ target = self.target
+ args = self.args
+ kwargs = self.kwargs
+ else:
+ # _run implementation triggers backward-compatibility mode
+ target = self._run
+ args = None
+ kwargs = None
+ warnings.warn(
+ 'portage.util._async.ForkProcess.ForkProcess._run is deprecated in favor of the "target" parameter',
+ UserWarning,
+ stacklevel=2,
+ )
+
# Since multiprocessing.Process closes sys.__stdin__, create a
# temporary duplicate of fd_pipes[0] so that sys.__stdin__ can
# be restored in the subprocess, in case this is needed for
# things like PROPERTIES=interactive support.
stdin_dup = None
try:
- stdin_fd = fd_pipes.get(0)
+ stdin_fd = fd_pipes.get(0) if fd_pipes else None
if stdin_fd is not None and stdin_fd == portage._get_stdin().fileno():
stdin_dup = os.dup(stdin_fd)
fcntl.fcntl(
stdin_dup, fcntl.F_SETFD, fcntl.fcntl(stdin_fd, fcntl.F_GETFD)
)
fd_pipes[0] = stdin_dup
- self._proc = multiprocessing.Process(
- target=self._bootstrap, args=(fd_pipes,)
+
+ proc = multiprocessing.Process(
+ target=self._bootstrap,
+ args=(
+ self._child_connection,
+ self._HAVE_SEND_HANDLE,
+ fd_pipes,
+ target,
+ args,
+ kwargs,
+ ),
)
- self._proc.start()
+ proc.start()
finally:
if stdin_dup is not None:
os.close(stdin_dup)
- self._proc_join_task = asyncio.ensure_future(
- self._proc_join(self._proc, loop=self.scheduler), loop=self.scheduler
- )
- self._proc_join_task.add_done_callback(
- functools.partial(self._proc_join_done, self._proc)
- )
-
- return [self._proc.pid]
+ return portage.process.MultiprocessingProcess(proc)
def _cancel(self):
if self._proc is None:
- super(ForkProcess, self)._cancel()
+ super()._cancel()
else:
self._proc.terminate()
- def _async_wait(self):
- if self._proc_join_task is None:
- super(ForkProcess, self)._async_wait()
-
- def _async_waitpid(self):
- if self._proc_join_task is None:
- super(ForkProcess, self)._async_waitpid()
-
- async def _proc_join(self, proc, loop=None):
- sentinel_reader = self.scheduler.create_future()
- self.scheduler.add_reader(
- proc.sentinel,
- lambda: sentinel_reader.done() or sentinel_reader.set_result(None),
- )
- try:
- await sentinel_reader
- finally:
- # If multiprocessing.Process supports the close method, then
- # access to proc.sentinel will raise ValueError if the
- # sentinel has been closed. In this case it's not safe to call
- # remove_reader, since the file descriptor may have been closed
- # and then reallocated to a concurrent coroutine. When the
- # close method is not supported, proc.sentinel remains open
- # until proc's finalizer is called.
- try:
- self.scheduler.remove_reader(proc.sentinel)
- except ValueError:
- pass
-
- # Now that proc.sentinel is ready, poll until process exit
- # status has become available.
- while True:
- proc.join(0)
- if proc.exitcode is not None:
- break
- await asyncio.sleep(self._proc_join_interval, loop=loop)
-
- def _proc_join_done(self, proc, future):
- future.cancelled() or future.result()
- self._was_cancelled()
- if self.returncode is None:
- self.returncode = proc.exitcode
-
- self._proc = None
- if hasattr(proc, "close"):
- proc.close()
- self._proc_join_task = None
- self._async_wait()
-
def _unregister(self):
- super(ForkProcess, self)._unregister()
+ super()._unregister()
if self._proc is not None:
- if self._proc.is_alive():
- self._proc.terminate()
- self._proc = None
- if self._proc_join_task is not None:
- self._proc_join_task.cancel()
- self._proc_join_task = None
-
- def _bootstrap(self, fd_pipes):
+ self._proc.terminate()
+
+ @staticmethod
+ def _bootstrap(child_connection, have_send_handle, fd_pipes, target, args, kwargs):
# Use default signal handlers in order to avoid problems
# killing subprocesses as reported in bug #353239.
signal.signal(signal.SIGINT, signal.SIG_DFL)
@@ -140,14 +283,34 @@ class ForkProcess(SpawnProcess):
pass
portage.locks._close_fds()
- # We don't exec, so use close_fds=False
- # (see _setup_pipes docstring).
- portage.process._setup_pipes(fd_pipes, close_fds=False)
+
+ if child_connection is not None:
+ if have_send_handle:
+ fd_pipes, fd_list = child_connection.recv()
+ fd_pipes_map = {}
+ for fd in fd_list:
+ fd_pipes_map[fd] = multiprocessing.reduction.recv_handle(
+ child_connection
+ )
+ child_connection.close()
+ for k, v in list(fd_pipes.items()):
+ fd_pipes[k] = fd_pipes_map[v]
+
+ else:
+ fd_pipes = fd_pipes or {}
+ fd_pipes[sys.stdout.fileno()] = child_connection.fileno()
+ fd_pipes[sys.stderr.fileno()] = child_connection.fileno()
+ fd_pipes[child_connection.fileno()] = child_connection.fileno()
+
+ if fd_pipes:
+ # We don't exec, so use close_fds=False
+ # (see _setup_pipes docstring).
+ portage.process._setup_pipes(fd_pipes, close_fds=False)
# Since multiprocessing.Process closes sys.__stdin__ and
# makes sys.stdin refer to os.devnull, restore it when
# appropriate.
- if 0 in fd_pipes:
+ if fd_pipes and 0 in fd_pipes:
# It's possible that sys.stdin.fileno() is already 0,
# and in that case the above _setup_pipes call will
# have already updated its identity via dup2. Otherwise,
@@ -160,7 +323,10 @@ class ForkProcess(SpawnProcess):
)
sys.__stdin__ = sys.stdin
- sys.exit(self._run())
+ sys.exit(target(*(args or []), **(kwargs or {})))
def _run(self):
+ """
+ Deprecated and replaced with the "target" constructor parameter.
+ """
raise NotImplementedError(self)
diff --git a/lib/portage/util/_async/PipeLogger.py b/lib/portage/util/_async/PipeLogger.py
index 006f915ef..5f3c83227 100644
--- a/lib/portage/util/_async/PipeLogger.py
+++ b/lib/portage/util/_async/PipeLogger.py
@@ -14,10 +14,9 @@ from _emerge.AbstractPollTask import AbstractPollTask
class PipeLogger(AbstractPollTask):
-
"""
This can be used for logging output of a child process,
- optionally outputing to log_file_path and/or stdout_fd. It can
+ optionally outputting to log_file_path and/or stdout_fd. It can
also monitor for EOF on input_fd, which may be used to detect
termination of a child process. If log_file_path ends with
'.gz' then the log file is written with compression.
@@ -31,7 +30,6 @@ class PipeLogger(AbstractPollTask):
)
def _start(self):
-
log_file_path = self.log_file_path
if hasattr(log_file_path, "write"):
self._log_file_nb = True
diff --git a/lib/portage/util/_async/PipeReaderBlockingIO.py b/lib/portage/util/_async/PipeReaderBlockingIO.py
index 74292fb0b..9b454ba4d 100644
--- a/lib/portage/util/_async/PipeReaderBlockingIO.py
+++ b/lib/portage/util/_async/PipeReaderBlockingIO.py
@@ -1,11 +1,7 @@
-# Copyright 2012-2018 Gentoo Foundation
+# Copyright 2012-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-try:
- import threading
-except ImportError:
- # dummy_threading will not suffice
- threading = None
+import threading
from portage import os
from _emerge.AbstractPollTask import AbstractPollTask
@@ -15,10 +11,13 @@ class PipeReaderBlockingIO(AbstractPollTask):
"""
Reads output from one or more files and saves it in memory, for
retrieval via the getvalue() method. This is driven by a thread
- for each input file, in order to support blocking IO. This may
- be useful for using threads to handle blocking IO with Jython,
- since Jython lacks the fcntl module which is needed for
+ for each input file, in order to support blocking IO. This is
+ historically useful for using threads to handle blocking IO with
+ Jython, since Jython lacks the fcntl module which is needed for
non-blocking IO (see http://bugs.jython.org/issue1074).
+
+ Portage does not currently support Jython, but re-introducing
+ support in The Future (TM) may be possible.
"""
__slots__ = ("input_files", "_read_data", "_terminate", "_threads", "_thread_rlock")
@@ -38,11 +37,7 @@ class PipeReaderBlockingIO(AbstractPollTask):
self._threads[f] = t
def _reader_thread(self, f):
- try:
- terminated = self._terminate.is_set
- except AttributeError:
- # Jython 2.7.0a2
- terminated = self._terminate.isSet
+ terminated = self._terminate.is_set
bufsize = self._bufsize
while not terminated():
buf = f.read(bufsize)
diff --git a/lib/portage/util/_async/PopenProcess.py b/lib/portage/util/_async/PopenProcess.py
index 25afad0f4..a0e532e27 100644
--- a/lib/portage/util/_async/PopenProcess.py
+++ b/lib/portage/util/_async/PopenProcess.py
@@ -1,19 +1,18 @@
-# Copyright 2012-2021 Gentoo Authors
+# Copyright 2012-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import portage
from _emerge.SubProcess import SubProcess
class PopenProcess(SubProcess):
-
__slots__ = (
"pipe_reader",
"proc",
)
def _start(self):
-
- self.pid = self.proc.pid
+ self._proc = portage.process.Process(self.proc.pid)
self._registered = True
if self.pipe_reader is None:
diff --git a/lib/portage/util/_async/SchedulerInterface.py b/lib/portage/util/_async/SchedulerInterface.py
index a83e1e015..485958491 100644
--- a/lib/portage/util/_async/SchedulerInterface.py
+++ b/lib/portage/util/_async/SchedulerInterface.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2021 Gentoo Authors
+# Copyright 2012-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import gzip
@@ -12,7 +12,6 @@ from ..SlotObject import SlotObject
class SchedulerInterface(SlotObject):
-
_event_loop_attrs = (
"add_reader",
"add_writer",
@@ -50,6 +49,13 @@ class SchedulerInterface(SlotObject):
for k in self._event_loop_attrs:
setattr(self, k, getattr(event_loop, k))
+ @property
+ def _loop(self):
+ """
+ Returns the real underlying asyncio loop.
+ """
+ return self._event_loop._loop
+
@staticmethod
def _return_false():
return False
@@ -112,13 +118,12 @@ class SchedulerInterface(SlotObject):
mode="ab",
)
f_real = f
- except IOError as e:
+ except OSError as e:
if e.errno not in (errno.ENOENT, errno.ESTALE):
raise
if not msg_shown:
writemsg_level(msg, level=level, noiselevel=noiselevel)
else:
-
if log_path.endswith(".gz"):
# NOTE: The empty filename argument prevents us from
# triggering a bug in python3 which causes GzipFile
diff --git a/lib/portage/util/_async/TaskScheduler.py b/lib/portage/util/_async/TaskScheduler.py
index 09920111e..ec97a84ac 100644
--- a/lib/portage/util/_async/TaskScheduler.py
+++ b/lib/portage/util/_async/TaskScheduler.py
@@ -5,7 +5,6 @@ from .AsyncScheduler import AsyncScheduler
class TaskScheduler(AsyncScheduler):
-
"""
A simple way to handle scheduling of AbstractPollTask instances. Simply
pass a task iterator into the constructor and call start(). Use the
diff --git a/lib/portage/util/_async/meson.build b/lib/portage/util/_async/meson.build
new file mode 100644
index 000000000..66daf7fb5
--- /dev/null
+++ b/lib/portage/util/_async/meson.build
@@ -0,0 +1,20 @@
+py.install_sources(
+ [
+ 'AsyncFunction.py',
+ 'AsyncScheduler.py',
+ 'AsyncTaskFuture.py',
+ 'BuildLogger.py',
+ 'FileCopier.py',
+ 'FileDigester.py',
+ 'ForkProcess.py',
+ 'PipeLogger.py',
+ 'PipeReaderBlockingIO.py',
+ 'PopenProcess.py',
+ 'SchedulerInterface.py',
+ 'TaskScheduler.py',
+ 'run_main_scheduler.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/_async',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/_ctypes.py b/lib/portage/util/_ctypes.py
index e6d1e327c..04e965ba9 100644
--- a/lib/portage/util/_ctypes.py
+++ b/lib/portage/util/_ctypes.py
@@ -48,3 +48,18 @@ def LoadLibrary(name):
_library_handles[name] = handle
return handle
+
+
+def load_libc():
+ """
+ Loads the C standard library, returns a tuple with the CDLL handle and
+ the filename. Returns (None, None) if unavailable.
+ """
+ filename = find_library("c")
+ if filename is None:
+ # find_library fails for musl where there is no soname
+ filename = "libc.so"
+ try:
+ return (LoadLibrary(filename), filename)
+ except OSError:
+ return (None, None)
diff --git a/lib/portage/util/_dyn_libs/LinkageMapELF.py b/lib/portage/util/_dyn_libs/LinkageMapELF.py
index 22b057973..67ed16ccb 100644
--- a/lib/portage/util/_dyn_libs/LinkageMapELF.py
+++ b/lib/portage/util/_dyn_libs/LinkageMapELF.py
@@ -52,14 +52,12 @@ _approx_multilib_categories = {
class LinkageMapELF:
-
"""Models dynamic linker dependencies."""
_needed_aux_key = "NEEDED.ELF.2"
_soname_map_class = slot_dict_class(("consumers", "providers"), prefix="")
class _obj_properties_class:
-
__slots__ = (
"arch",
"needed",
@@ -108,7 +106,6 @@ class LinkageMapELF:
return key
class _ObjectKey:
-
"""Helper class used as _obj_properties keys for objects."""
__slots__ = ("_key",)
@@ -279,7 +276,7 @@ class LinkageMapELF:
args.extend(os.path.join(root, x.lstrip("." + os.sep)) for x in plibs)
try:
proc = subprocess.Popen(args, stdout=subprocess.PIPE)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
raise CommandNotFound(args[0])
@@ -308,9 +305,7 @@ class LinkageMapELF:
try:
entry = NeededEntry.parse("scanelf", l)
except InvalidData as e:
- writemsg_level(
- "\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1
- )
+ writemsg_level(f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1)
continue
try:
with open(
@@ -322,7 +317,7 @@ class LinkageMapELF:
"rb",
) as f:
elf_header = ELFHeader.read(f)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
# File removed concurrently.
@@ -344,7 +339,7 @@ class LinkageMapELF:
)
out, err = proc.communicate()
proc.wait()
- except EnvironmentError:
+ except OSError:
pass
else:
if b"SB shared object" in out:
@@ -392,7 +387,7 @@ class LinkageMapELF:
try:
entry = NeededEntry.parse(location, l)
except InvalidData as e:
- writemsg_level("\n%s\n\n" % (e,), level=logging.ERROR, noiselevel=-1)
+ writemsg_level(f"\n{e}\n\n", level=logging.ERROR, noiselevel=-1)
continue
# If NEEDED.ELF.2 contains the new multilib category field,
@@ -409,7 +404,7 @@ class LinkageMapELF:
expand = {"ORIGIN": os.path.dirname(entry.filename)}
entry.runpaths = frozenset(
normalize_path(
- varexpand(x, expand, error_leader=lambda: "%s: " % location)
+ varexpand(x, expand, error_leader=lambda: f"{location}: ")
)
for x in entry.runpaths
)
@@ -518,7 +513,6 @@ class LinkageMapELF:
os = _os_merge
class _LibraryCache:
-
"""
Caches properties associated with paths.
@@ -662,15 +656,14 @@ class LinkageMapELF:
if debug:
if not os.path.isfile(lib):
writemsg_level(
- _("Missing library:") + " %s\n" % (lib,),
+ _("Missing library:") + f" {lib}\n",
level=logging.DEBUG,
noiselevel=-1,
)
else:
writemsg_level(
_("Possibly missing symlink:")
- + "%s\n"
- % (os.path.join(os.path.dirname(lib), soname)),
+ + f"{os.path.join(os.path.dirname(lib), soname)}\n",
level=logging.DEBUG,
noiselevel=-1,
)
@@ -719,7 +712,7 @@ class LinkageMapELF:
os = _os_merge
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
+ raise KeyError(f"{obj_key} ({obj}) not in object list")
basename = os.path.basename(obj)
soname = self._obj_properties[obj_key].soname
return (
@@ -770,10 +763,10 @@ class LinkageMapELF:
else:
obj_key = self._obj_key_cache.get(obj)
if obj_key is None:
- raise KeyError("%s not in object list" % obj)
+ raise KeyError(f"{obj} not in object list")
obj_props = self._obj_properties.get(obj_key)
if obj_props is None:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
if obj_props.owner is None:
return ()
return (obj_props.owner,)
@@ -793,10 +786,10 @@ class LinkageMapELF:
if isinstance(obj, self._ObjectKey):
obj_key = obj
if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
return self._obj_properties[obj_key].soname
if obj not in self._obj_key_cache:
- raise KeyError("%s not in object list" % obj)
+ raise KeyError(f"{obj} not in object list")
return self._obj_properties[self._obj_key_cache[obj]].soname
def findProviders(self, obj):
@@ -831,17 +824,17 @@ class LinkageMapELF:
if isinstance(obj, self._ObjectKey):
obj_key = obj
if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
else:
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
+ raise KeyError(f"{obj_key} ({obj}) not in object list")
obj_props = self._obj_properties[obj_key]
arch = obj_props.arch
needed = obj_props.needed
path = obj_props.runpaths
- path_keys = set(self._path_key(x) for x in path.union(self._defpath))
+ path_keys = {self._path_key(x) for x in path.union(self._defpath)}
for soname in needed:
rValue[soname] = set()
if arch not in self._libs or soname not in self._libs[arch]:
@@ -910,13 +903,13 @@ class LinkageMapELF:
if isinstance(obj, self._ObjectKey):
obj_key = obj
if obj_key not in self._obj_properties:
- raise KeyError("%s not in object list" % obj_key)
+ raise KeyError(f"{obj_key} not in object list")
objs = self._obj_properties[obj_key].alt_paths
else:
- objs = set([obj])
+ objs = {obj}
obj_key = self._obj_key(obj)
if obj_key not in self._obj_properties:
- raise KeyError("%s (%s) not in object list" % (obj_key, obj))
+ raise KeyError(f"{obj_key} ({obj}) not in object list")
# If there is another version of this lib with the
# same soname and the soname symlink points to that
@@ -949,7 +942,7 @@ class LinkageMapELF:
if arch_map is not None:
soname_node = arch_map.get(soname)
- defpath_keys = set(self._path_key(x) for x in self._defpath)
+ defpath_keys = {self._path_key(x) for x in self._defpath}
satisfied_consumer_keys = set()
if soname_node is not None:
if exclude_providers is not None or not greedy:
@@ -984,7 +977,7 @@ class LinkageMapELF:
if soname_node is not None:
# For each potential consumer, add it to rValue if an object from the
# arguments resides in the consumer's runpath.
- objs_dir_keys = set(self._path_key(os.path.dirname(x)) for x in objs)
+ objs_dir_keys = {self._path_key(os.path.dirname(x)) for x in objs}
for consumer_key in soname_node.consumers:
if consumer_key in satisfied_consumer_keys:
continue
diff --git a/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py b/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
index fd5c97362..7b7276778 100644
--- a/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
+++ b/lib/portage/util/_dyn_libs/PreservedLibsRegistry.py
@@ -71,7 +71,7 @@ class PreservedLibsRegistry:
"rb",
)
content = f.read()
- except EnvironmentError as e:
+ except OSError as e:
if not hasattr(e, "errno"):
raise
elif e.errno == errno.ENOENT:
@@ -144,10 +144,10 @@ class PreservedLibsRegistry:
else:
pickle.dump(self._data, f, protocol=2)
f.close()
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != PermissionDenied.errno:
writemsg_level(
- "!!! %s %s\n" % (e, self._filename),
+ f"!!! {e} {self._filename}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/_dyn_libs/display_preserved_libs.py b/lib/portage/util/_dyn_libs/display_preserved_libs.py
index e81ca369c..32dcc9428 100644
--- a/lib/portage/util/_dyn_libs/display_preserved_libs.py
+++ b/lib/portage/util/_dyn_libs/display_preserved_libs.py
@@ -8,7 +8,6 @@ from portage.output import colorize
def display_preserved_libs(vardb, verbose=False):
-
MAX_DISPLAY = 3
plibdata = vardb._plib_registry.getPreservedLibs()
@@ -20,12 +19,12 @@ def display_preserved_libs(vardb, verbose=False):
linkmap.rebuild()
except portage.exception.CommandNotFound as e:
portage.util.writemsg_level(
- "!!! Command Not Found: %s\n" % (e,), level=logging.ERROR, noiselevel=-1
+ f"!!! Command Not Found: {e}\n", level=logging.ERROR, noiselevel=-1
)
else:
search_for_owners = set()
for cpv in plibdata:
- internal_plib_keys = set(linkmap._obj_key(f) for f in plibdata[cpv])
+ internal_plib_keys = {linkmap._obj_key(f) for f in plibdata[cpv]}
for f in plibdata[cpv]:
if f in consumer_map:
continue
@@ -54,7 +53,7 @@ def display_preserved_libs(vardb, verbose=False):
all_preserved.update(*plibdata.values())
for cpv in plibdata:
- print(colorize("WARN", ">>>") + " package: %s" % cpv)
+ print(colorize("WARN", ">>>") + f" package: {cpv}")
samefile_map = {}
for f in plibdata[cpv]:
obj_key = linkmap._obj_key(f)
@@ -67,7 +66,7 @@ def display_preserved_libs(vardb, verbose=False):
for alt_paths in samefile_map.values():
alt_paths = sorted(alt_paths)
for p in alt_paths:
- print(colorize("WARN", " * ") + " - %s" % (p,))
+ print(colorize("WARN", " * ") + f" - {p}")
f = alt_paths[0]
consumers = consumer_map.get(f, [])
consumers_non_preserved = [c for c in consumers if c not in all_preserved]
@@ -92,9 +91,7 @@ def display_preserved_libs(vardb, verbose=False):
owners_desc = "preserved"
else:
owners_desc = ", ".join(x.mycpv for x in owners.get(c, []))
- print(
- colorize("WARN", " * ") + " used by %s (%s)" % (c, owners_desc)
- )
+ print(colorize("WARN", " * ") + f" used by {c} ({owners_desc})")
if not verbose and len(consumers) > max_display:
print(
colorize("WARN", " * ")
diff --git a/lib/portage/util/_dyn_libs/dyn_libs.py b/lib/portage/util/_dyn_libs/dyn_libs.py
new file mode 100644
index 000000000..6f8a07d70
--- /dev/null
+++ b/lib/portage/util/_dyn_libs/dyn_libs.py
@@ -0,0 +1,65 @@
+# Copyright 2021-2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import os
+import stat
+
+import portage
+
+
+def installed_dynlibs(directory):
+ """
+ This traverses installed *.so symlinks to check if they point to
+ regular files. If a symlink target is outside of the top directory,
+ traversal follows the corresponding file inside the top directory
+ if it exists, and otherwise stops following the symlink.
+ """
+ directory_prefix = f"{directory.rstrip(os.sep)}{os.sep}"
+ for parent, _dirnames, filenames in os.walk(directory):
+ for filename in filenames:
+ if filename.endswith(".so"):
+ filename_abs = os.path.join(parent, filename)
+ target = filename_abs
+ levels = 0
+ while True:
+ try:
+ st = os.lstat(target)
+ except OSError:
+ break
+ if stat.S_ISREG(st.st_mode):
+ return True
+ elif stat.S_ISLNK(st.st_mode):
+ levels += 1
+ if levels == 40:
+ portage.writemsg(
+ f"too many levels of symbolic links: {filename_abs}\n",
+ noiselevel=-1,
+ )
+ break
+ target = portage.abssymlink(target)
+ if not target.startswith(directory_prefix):
+ # If target is outside the top directory, then follow the
+ # corresponding file inside the top directory if it exists,
+ # and otherwise stop following.
+ target = os.path.join(
+ directory_prefix, target.lstrip(os.sep)
+ )
+ else:
+ break
+ return False
+
+
+def check_dyn_libs_inconsistent(directory, provides):
+ """Checks directory for whether any dynamic libraries were installed and
+ if PROVIDES corresponds."""
+
+ # Let's check if we've got inconsistent results.
+ # If we're installing dynamic libraries (.so files), we should
+ # really have a PROVIDES.
+ # (This is a complementary check at the point of ingestion for the
+ # creation check in doebuild.py)
+ # Note: we could check a non-empty PROVIDES against the list of .sos,
+ # but this doesn't gain us anything. We're interested in failure
+ # to properly parse the installed files at all, which should really
+ # be a global problem (e.g. bug #811462)
+ return not provides and installed_dynlibs(directory)
diff --git a/lib/portage/util/_dyn_libs/meson.build b/lib/portage/util/_dyn_libs/meson.build
new file mode 100644
index 000000000..f744d2a08
--- /dev/null
+++ b/lib/portage/util/_dyn_libs/meson.build
@@ -0,0 +1,14 @@
+py.install_sources(
+ [
+ 'LinkageMapELF.py',
+ 'NeededEntry.py',
+ 'PreservedLibsRegistry.py',
+ 'display_preserved_libs.py',
+ 'dyn_libs.py',
+ 'soname_deps.py',
+ 'soname_deps_qa.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/_dyn_libs',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/_dyn_libs/soname_deps.py b/lib/portage/util/_dyn_libs/soname_deps.py
index b1d03bad8..7d65209a4 100644
--- a/lib/portage/util/_dyn_libs/soname_deps.py
+++ b/lib/portage/util/_dyn_libs/soname_deps.py
@@ -67,7 +67,7 @@ class SonameDepsProcessor:
if multilib_cat is None:
# This usage is invalid. The caller must ensure that
# the multilib category data is supplied here.
- raise AssertionError("Missing multilib category data: %s" % entry.filename)
+ raise AssertionError(f"Missing multilib category data: {entry.filename}")
self._basename_map.setdefault(os.path.basename(entry.filename), []).append(
entry
@@ -85,7 +85,7 @@ class SonameDepsProcessor:
varexpand(
x,
expand,
- error_leader=lambda: "%s: DT_RUNPATH: " % entry.filename,
+ error_leader=lambda: f"{entry.filename}: DT_RUNPATH: ",
)
)
for x in entry.runpaths
diff --git a/lib/portage/util/_dyn_libs/soname_deps_qa.py b/lib/portage/util/_dyn_libs/soname_deps_qa.py
index 532c7bbab..4c659d1e3 100644
--- a/lib/portage/util/_dyn_libs/soname_deps_qa.py
+++ b/lib/portage/util/_dyn_libs/soname_deps_qa.py
@@ -1,7 +1,6 @@
# Copyright 2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-import io
from portage import (
_encodings,
@@ -60,18 +59,17 @@ def _get_unresolved_soname_deps(metadata_dir, all_provides):
@return: list of tuple(filename, tuple(unresolved sonames))
"""
try:
- with io.open(
+ with open(
_unicode_encode(
os.path.join(metadata_dir, "REQUIRES"),
encoding=_encodings["fs"],
errors="strict",
),
- mode="rt",
encoding=_encodings["repo.content"],
errors="strict",
) as f:
requires = frozenset(parse_soname_deps(f.read()))
- except EnvironmentError:
+ except OSError:
return []
unresolved_by_category = {}
@@ -82,9 +80,8 @@ def _get_unresolved_soname_deps(metadata_dir, all_provides):
)
needed_filename = os.path.join(metadata_dir, "NEEDED.ELF.2")
- with io.open(
+ with open(
_unicode_encode(needed_filename, encoding=_encodings["fs"], errors="strict"),
- mode="rt",
encoding=_encodings["repo.content"],
errors="strict",
) as f:
diff --git a/lib/portage/util/_eventloop/asyncio_event_loop.py b/lib/portage/util/_eventloop/asyncio_event_loop.py
index 88933af9d..821cc7f10 100644
--- a/lib/portage/util/_eventloop/asyncio_event_loop.py
+++ b/lib/portage/util/_eventloop/asyncio_event_loop.py
@@ -1,12 +1,18 @@
-# Copyright 2018-2021 Gentoo Authors
+# Copyright 2018-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
import os
import signal
+import threading
import asyncio as _real_asyncio
from asyncio.events import AbstractEventLoop as _AbstractEventLoop
-from asyncio.unix_events import AbstractChildWatcher as _AbstractChildWatcher
+from asyncio.unix_events import ThreadedChildWatcher
+
+try:
+ from asyncio.unix_events import PidfdChildWatcher
+except ImportError:
+ PidfdChildWatcher = None
import portage
@@ -20,18 +26,14 @@ class AsyncioEventLoop(_AbstractEventLoop):
def __init__(self, loop=None):
loop = loop or _real_asyncio.get_event_loop()
self._loop = loop
- self.run_until_complete = (
- self._run_until_complete
- if portage._internal_caller
- else loop.run_until_complete
- )
+ self.run_until_complete = self._run_until_complete
self.call_soon = loop.call_soon
self.call_soon_threadsafe = loop.call_soon_threadsafe
self.call_later = loop.call_later
self.call_at = loop.call_at
self.is_running = loop.is_running
self.is_closed = loop.is_closed
- self.close = loop.close
+ self.close = self._close
self.create_future = (
loop.create_future
if hasattr(loop, "create_future")
@@ -50,10 +52,31 @@ class AsyncioEventLoop(_AbstractEventLoop):
self.get_debug = loop.get_debug
self._wakeup_fd = -1
self._child_watcher = None
+ # Used to drop recursive calls to _close.
+ self._closing = False
+ # Initialized in _run_until_complete.
+ self._is_main = None
if portage._internal_caller:
loop.set_exception_handler(self._internal_caller_exception_handler)
+ def _close(self):
+ """
+ Before closing the main loop, run portage.process.run_exitfuncs()
+ with the event loop running so that anything attached can clean
+ itself up (like the socks5 ProxyManager for bug 925240).
+ """
+ if not (self._closing or self.is_closed()):
+ self._closing = True
+ if self._is_main:
+ self.run_until_complete(self._close_main())
+ self._loop.close()
+ self._closing = False
+
+ async def _close_main(self):
+ await portage.process.run_coroutine_exitfuncs()
+ portage.process.run_exitfuncs()
+
@staticmethod
def _internal_caller_exception_handler(loop, context):
"""
@@ -91,9 +114,24 @@ class AsyncioEventLoop(_AbstractEventLoop):
@return: the internal event loop's AbstractChildWatcher interface
"""
if self._child_watcher is None:
- self._child_watcher = _ChildWatcherThreadSafetyWrapper(
- self, _real_asyncio.get_child_watcher()
- )
+ pidfd_works = False
+ if PidfdChildWatcher is not None and hasattr(os, "pidfd_open"):
+ try:
+ fd = os.pidfd_open(portage.getpid())
+ except Exception:
+ pass
+ else:
+ os.close(fd)
+ pidfd_works = True
+
+ if pidfd_works:
+ watcher = PidfdChildWatcher()
+ else:
+ watcher = ThreadedChildWatcher()
+
+ watcher.attach_loop(self._loop)
+ self._child_watcher = _ChildWatcherThreadSafetyWrapper(self, watcher)
+
return self._child_watcher
@property
@@ -101,7 +139,7 @@ class AsyncioEventLoop(_AbstractEventLoop):
"""
Portage internals use this as a layer of indirection in cases
where a wrapper around an asyncio.AbstractEventLoop implementation
- is needed for purposes of compatiblity.
+ is needed for purposes of compatibility.
@rtype: asyncio.AbstractEventLoop
@return: the internal event loop's AbstractEventLoop interface
@@ -110,7 +148,7 @@ class AsyncioEventLoop(_AbstractEventLoop):
def _run_until_complete(self, future):
"""
- An implementation of AbstractEventLoop.run_until_complete that supresses
+ An implementation of AbstractEventLoop.run_until_complete that suppresses
spurious error messages like the following reported in bug 655656:
Exception ignored when trying to write to the signal wakeup fd:
@@ -119,6 +157,12 @@ class AsyncioEventLoop(_AbstractEventLoop):
In order to avoid potential interference with API consumers, this
implementation is only used when portage._internal_caller is True.
"""
+ if self._is_main is None:
+ self._is_main = threading.current_thread() is threading.main_thread()
+
+ if not portage._internal_caller:
+ return self._loop.run_until_complete(future)
+
if self._wakeup_fd != -1:
signal.set_wakeup_fd(self._wakeup_fd)
self._wakeup_fd = -1
@@ -135,7 +179,11 @@ class AsyncioEventLoop(_AbstractEventLoop):
pass
-class _ChildWatcherThreadSafetyWrapper(_AbstractChildWatcher):
+class _ChildWatcherThreadSafetyWrapper:
+ """
+ This class provides safety if multiple loops are running in different threads.
+ """
+
def __init__(self, loop, real_watcher):
self._loop = loop
self._real_watcher = real_watcher
diff --git a/lib/portage/util/_eventloop/meson.build b/lib/portage/util/_eventloop/meson.build
new file mode 100644
index 000000000..e4474452b
--- /dev/null
+++ b/lib/portage/util/_eventloop/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'asyncio_event_loop.py',
+ 'global_event_loop.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/_eventloop',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/_get_vm_info.py b/lib/portage/util/_get_vm_info.py
index 9fb7e4ba8..5f82c84ea 100644
--- a/lib/portage/util/_get_vm_info.py
+++ b/lib/portage/util/_get_vm_info.py
@@ -9,7 +9,6 @@ from portage import _unicode_decode
def get_vm_info():
-
vm_info = {}
env = os.environ.copy()
@@ -51,7 +50,6 @@ def get_vm_info():
pass
else:
-
try:
proc = subprocess.Popen(
["sysctl", "-a"],
diff --git a/lib/portage/util/_info_files.py b/lib/portage/util/_info_files.py
index 528b273d9..45d674b9b 100644
--- a/lib/portage/util/_info_files.py
+++ b/lib/portage/util/_info_files.py
@@ -12,7 +12,6 @@ from portage import os
def chk_updated_info_files(root, infodirs, prev_mtimes):
-
if os.path.exists("/usr/bin/install-info"):
out = portage.output.EOutput()
regen_infodirs = []
@@ -36,7 +35,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
if portage.util.noiselimit >= 0:
out.einfo("Regenerating GNU info directory index...")
- dir_extensions = ("", ".gz", ".bz2")
+ dir_extensions = ("", ".gz", ".bz2", ".xz", ".lz", ".lz4", ".zst", ".lzma")
icount = 0
badcount = 0
errmsg = ""
@@ -68,7 +67,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
try:
os.rename(dir_file + ext, dir_file + ext + ".old")
moved_old_dir = True
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -77,7 +76,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
proc = subprocess.Popen(
[
"/usr/bin/install-info",
- "--dir-file=%s" % os.path.join(inforoot, "dir"),
+ f"--dir-file={os.path.join(inforoot, 'dir')}",
os.path.join(inforoot, x),
],
env=dict(os.environ, LANG="C", LANGUAGE="C"),
@@ -114,7 +113,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
for ext in dir_extensions:
try:
os.rename(dir_file + ext + ".old", dir_file + ext)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
@@ -124,7 +123,7 @@ def chk_updated_info_files(root, infodirs, prev_mtimes):
for ext in dir_extensions:
try:
os.unlink(dir_file + ext + ".old")
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
del e
diff --git a/lib/portage/util/_path.py b/lib/portage/util/_path.py
index 82fe95870..d99f15664 100644
--- a/lib/portage/util/_path.py
+++ b/lib/portage/util/_path.py
@@ -12,7 +12,7 @@ def exists_raise_eaccess(path):
os.stat(path)
except OSError as e:
if e.errno == PermissionDenied.errno:
- raise PermissionDenied("stat('%s')" % path)
+ raise PermissionDenied(f"stat('{path}')")
return False
else:
return True
@@ -23,7 +23,7 @@ def isdir_raise_eaccess(path):
st = os.stat(path)
except OSError as e:
if e.errno == PermissionDenied.errno:
- raise PermissionDenied("stat('%s')" % path)
+ raise PermissionDenied(f"stat('{path}')")
return False
else:
return stat.S_ISDIR(st.st_mode)
diff --git a/lib/portage/util/_pty.py b/lib/portage/util/_pty.py
index e58f95e0a..9d090b711 100644
--- a/lib/portage/util/_pty.py
+++ b/lib/portage/util/_pty.py
@@ -1,9 +1,11 @@
-# Copyright 2010-2011 Gentoo Foundation
+# Copyright 2010-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import asyncio
import platform
import pty
import termios
+from typing import Optional, Union
from portage import os
from portage.output import get_term_size, set_term_size
@@ -19,17 +21,21 @@ _disable_openpty = platform.system() in ("SunOS",)
_fbsd_test_pty = platform.system() == "FreeBSD"
-def _create_pty_or_pipe(copy_term_size=None):
+def _create_pty_or_pipe(
+ copy_term_size: Optional[int] = None,
+) -> tuple[Union[asyncio.Future, bool], int, int]:
"""
Try to create a pty and if then fails then create a normal
- pipe instead.
+ pipe instead. If a Future is returned for pty_ready, then the
+ caller should wait for it (which comes from set_term_size
+ because it spawns stty).
@param copy_term_size: If a tty file descriptor is given
then the term size will be copied to the pty.
@type copy_term_size: int
@rtype: tuple
- @return: A tuple of (is_pty, master_fd, slave_fd) where
- is_pty is True if a pty was successfully allocated, and
+ @return: A tuple of (pty_ready, master_fd, slave_fd) where
+ pty_ready is asyncio.Future or True if a pty was successfully allocated, and
False if a normal pipe was allocated.
"""
@@ -56,9 +62,9 @@ def _create_pty_or_pipe(copy_term_size=None):
try:
master_fd, slave_fd = pty.openpty()
got_pty = True
- except EnvironmentError as e:
+ except OSError as e:
_disable_openpty = True
- writemsg("openpty failed: '%s'\n" % str(e), noiselevel=-1)
+ writemsg(f"openpty failed: '{str(e)}'\n", noiselevel=-1)
del e
master_fd, slave_fd = os.pipe()
@@ -69,8 +75,11 @@ def _create_pty_or_pipe(copy_term_size=None):
mode[1] &= ~termios.OPOST
termios.tcsetattr(slave_fd, termios.TCSANOW, mode)
+ pty_ready = None
if got_pty and copy_term_size is not None and os.isatty(copy_term_size):
rows, columns = get_term_size()
- set_term_size(rows, columns, slave_fd)
+ pty_ready = set_term_size(rows, columns, slave_fd)
- return (got_pty, master_fd, slave_fd)
+ # The future only exists when got_pty is True, so we can
+ # return the future in lieu of got_pty when it exists.
+ return (got_pty if pty_ready is None else pty_ready, master_fd, slave_fd)
diff --git a/lib/portage/util/_urlopen.py b/lib/portage/util/_urlopen.py
index 70440c3e1..d451a94a8 100644
--- a/lib/portage/util/_urlopen.py
+++ b/lib/portage/util/_urlopen.py
@@ -26,10 +26,10 @@ def have_pep_476():
return hasattr(__import__("ssl"), "_create_unverified_context")
-def urlopen(url, if_modified_since=None, proxies=None):
+def urlopen(url, timeout=10, if_modified_since=None, headers={}, proxies=None):
parse_result = urllib_parse.urlparse(url)
if parse_result.scheme not in ("http", "https"):
- return _urlopen(url)
+ return _urlopen(url, timeout=timeout)
netloc = parse_result.netloc.rpartition("@")[-1]
url = urllib_parse.urlunparse(
@@ -45,6 +45,8 @@ def urlopen(url, if_modified_since=None, proxies=None):
password_manager = urllib_request.HTTPPasswordMgrWithDefaultRealm()
request = urllib_request.Request(url)
request.add_header("User-Agent", "Gentoo Portage")
+ for key in headers:
+ request.add_header(key, headers[key])
if if_modified_since:
request.add_header("If-Modified-Since", _timestamp_to_http(if_modified_since))
if parse_result.username is not None:
@@ -57,7 +59,7 @@ def urlopen(url, if_modified_since=None, proxies=None):
handlers.append(urllib_request.ProxyHandler(proxies))
opener = urllib_request.build_opener(*handlers)
- hdl = opener.open(request)
+ hdl = opener.open(request, timeout=timeout)
if hdl.headers.get("last-modified", ""):
try:
add_header = hdl.headers.add_header
diff --git a/lib/portage/util/_xattr.py b/lib/portage/util/_xattr.py
index ff0a7d8c5..41b396d0b 100644
--- a/lib/portage/util/_xattr.py
+++ b/lib/portage/util/_xattr.py
@@ -59,7 +59,7 @@ class _XattrSystemCommands(_XattrGetAll):
@classmethod
def get(cls, item, name, nofollow=False, namespace=None):
if namespace:
- name = "%s.%s" % (namespace, name)
+ name = f"{namespace}.{name}"
cmd = ["getfattr", "--absolute-names", "-n", name, item]
if nofollow:
cmd += ["-h"]
@@ -75,14 +75,14 @@ class _XattrSystemCommands(_XattrGetAll):
@classmethod
def set(cls, item, name, value, _flags=0, namespace=None):
if namespace:
- name = "%s.%s" % (namespace, name)
+ name = f"{namespace}.{name}"
cmd = ["setfattr", "-n", name, "-v", value, item]
cls._call(cmd)
@classmethod
def remove(cls, item, name, nofollow=False, namespace=None):
if namespace:
- name = "%s.%s" % (namespace, name)
+ name = f"{namespace}.{name}"
cmd = ["setfattr", "-x", name, item]
if nofollow:
cmd += ["-h"]
@@ -93,12 +93,12 @@ class _XattrSystemCommands(_XattrGetAll):
cmd = ["getfattr", "-d", "--absolute-names", item]
if nofollow:
cmd += ["-h"]
- cmd += ["-m", ("^%s[.]" % namespace) if namespace else "-"]
+ cmd += ["-m", (f"^{namespace}[.]") if namespace else "-"]
proc = cls._call(cmd, stdout=subprocess.PIPE)
ret = []
if namespace:
- namespace = "%s." % namespace
+ namespace = f"{namespace}."
for name, value in cls._parse_output(proc.stdout):
if namespace:
if name.startswith(namespace):
@@ -169,7 +169,6 @@ if hasattr(os, "getxattr"):
def list(item, nofollow=False, namespace=None):
return os.listxattr(item, follow_symlinks=not nofollow)
-
else:
try:
# Maybe we have the xattr module.
diff --git a/lib/portage/util/backoff.py b/lib/portage/util/backoff.py
index d2c78ad76..b5714dfe5 100644
--- a/lib/portage/util/backoff.py
+++ b/lib/portage/util/backoff.py
@@ -40,7 +40,7 @@ class ExponentialBackoff:
@rtype: int
"""
try:
- return min(self._limit, self._multiplier * (self._base ** tries))
+ return min(self._limit, self._multiplier * (self._base**tries))
except OverflowError:
return self._limit
@@ -52,4 +52,4 @@ class RandomExponentialBackoff(ExponentialBackoff):
"""
def __call__(self, tries):
- return random.random() * super(RandomExponentialBackoff, self).__call__(tries)
+ return random.random() * super().__call__(tries)
diff --git a/lib/portage/util/bin_entry_point.py b/lib/portage/util/bin_entry_point.py
index acc16d544..efa8b17b7 100644
--- a/lib/portage/util/bin_entry_point.py
+++ b/lib/portage/util/bin_entry_point.py
@@ -1,9 +1,8 @@
-# Copyright 2021 Gentoo Authors
+# Copyright 2021-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["bin_entry_point"]
-import re
import sys
from portage.const import PORTAGE_BIN_PATH
@@ -18,17 +17,10 @@ def bin_entry_point():
"""
script_path = os.path.join(PORTAGE_BIN_PATH, os.path.basename(sys.argv[0]))
if os.access(script_path, os.X_OK):
- with open(script_path, "rt") as f:
- shebang = f.readline()
- python_match = re.search(r"/python[\d\.]*\s+([^/]*)\s+$", shebang)
- if python_match:
- sys.argv = [
- os.path.join(os.path.dirname(sys.argv[0]), "python"),
- python_match.group(1),
- script_path,
- ] + sys.argv[1:]
- os.execvp(sys.argv[0], sys.argv)
- sys.argv[0] = script_path
+ sys.argv = [
+ sys.executable,
+ script_path,
+ ] + sys.argv[1:]
os.execvp(sys.argv[0], sys.argv)
else:
print("File not found:", script_path, file=sys.stderr)
diff --git a/lib/portage/util/changelog.py b/lib/portage/util/changelog.py
index db42de812..bcf90eb99 100644
--- a/lib/portage/util/changelog.py
+++ b/lib/portage/util/changelog.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python -b
+#!/usr/bin/env python
# Copyright 2009-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
@@ -41,7 +41,7 @@ class ChangeLogTypeSort(str):
return second in ("EBUILD", "MISC", "AUX")
if first is None:
return False
- raise ValueError("Unknown file type '%s'" % first)
+ raise ValueError(f"Unknown file type '{first}'")
def __lt__(self, other):
"""
diff --git a/lib/portage/util/compression_probe.py b/lib/portage/util/compression_probe.py
index 423e786bf..0879754b2 100644
--- a/lib/portage/util/compression_probe.py
+++ b/lib/portage/util/compression_probe.py
@@ -37,12 +37,12 @@ _compressors = {
"package": "app-arch/lzop",
},
"xz": {
- "compress": "xz ${BINPKG_COMPRESS_FLAGS}",
- "decompress": "xz -d",
+ "compress": "xz -T{JOBS} --memlimit-compress=50% -q ${BINPKG_COMPRESS_FLAGS}",
+ "decompress": "xz -T{JOBS} -d",
"package": "app-arch/xz-utils",
},
"zstd": {
- "compress": "zstd ${BINPKG_COMPRESS_FLAGS}",
+ "compress": "zstd -T{JOBS} ${BINPKG_COMPRESS_FLAGS}",
# If the compression windowLog was larger than the default of 27,
# then --long=windowLog needs to be passed to the decompressor.
# Therefore, pass a larger --long=31 value to the decompressor
@@ -95,7 +95,7 @@ def compression_probe(f):
_unicode_encode(f, encoding=_encodings["fs"], errors="strict"),
mode="rb",
)
- except IOError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(f)
elif e.errno in (errno.ENOENT, errno.ESTALE):
@@ -111,7 +111,6 @@ def compression_probe(f):
def _compression_probe_file(f):
-
m = _compression_re.match(f.read(_max_compression_re_len))
if m is not None:
for k, v in m.groupdict().items():
diff --git a/lib/portage/util/configparser.py b/lib/portage/util/configparser.py
index 703fad408..be7d87bc0 100644
--- a/lib/portage/util/configparser.py
+++ b/lib/portage/util/configparser.py
@@ -50,13 +50,12 @@ def read_configs(parser, paths):
if isinstance(p, str):
f = None
try:
- f = io.open(
+ f = open(
_unicode_encode(p, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["repo.content"],
errors="replace",
)
- except EnvironmentError:
+ except OSError:
pass
else:
# The 'source' keyword argument is needed since otherwise
@@ -73,5 +72,5 @@ def read_configs(parser, paths):
read_file(p, **kwargs)
else:
raise TypeError(
- "Unsupported type %r of element %r of 'paths' argument" % (type(p), p)
+ f"Unsupported type {type(p)!r} of element {p!r} of 'paths' argument"
)
diff --git a/lib/portage/util/cpuinfo.py b/lib/portage/util/cpuinfo.py
index 3cbc5b650..77529bcda 100644
--- a/lib/portage/util/cpuinfo.py
+++ b/lib/portage/util/cpuinfo.py
@@ -1,7 +1,9 @@
# Copyright 2015-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
-__all__ = ["get_cpu_count"]
+import re
+
+__all__ = ["get_cpu_count", "makeopts_to_job_count"]
# Before you set out to change this function, figure out what you're really
# asking:
@@ -48,3 +50,21 @@ def get_cpu_count():
return multiprocessing.cpu_count()
except (ImportError, NotImplementedError):
return None
+
+
+def makeopts_to_job_count(makeopts):
+ """
+ Parse the job count (-jN) from MAKEOPTS. Python version of
+ bin/isolated-functions.sh's ___makeopts_jobs().
+
+ @return: Number of jobs to run or number of CPUs if none set.
+ """
+ if not makeopts:
+ return get_cpu_count()
+
+ jobs = re.match(r".*(j|--jobs=\s)\s*([0-9]+)", makeopts)
+
+ if not jobs:
+ return get_cpu_count()
+
+ return jobs.groups()[1]
diff --git a/lib/portage/util/digraph.py b/lib/portage/util/digraph.py
index 360d22335..18a42158d 100644
--- a/lib/portage/util/digraph.py
+++ b/lib/portage/util/digraph.py
@@ -312,25 +312,19 @@ class digraph:
writemsg(s, noiselevel=-1)
for node in self.nodes:
- output("%s " % (node,))
+ output(f"{node} ")
if self.nodes[node][0]:
output("depends on\n")
else:
output("(no children)\n")
for child, priorities in self.nodes[node][0].items():
- output(
- " %s (%s)\n"
- % (
- child,
- priorities[-1],
- )
- )
+ output(f" {child} ({priorities[-1]})\n")
def bfs(self, start, ignore_priority=None):
if start not in self:
raise KeyError(start)
- queue, enqueued = deque([(None, start)]), set([start])
+ queue, enqueued = deque([(None, start)]), {start}
while queue:
parent, n = queue.popleft()
yield parent, n
diff --git a/lib/portage/util/elf/constants.py b/lib/portage/util/elf/constants.py
index d86b39483..9216a3535 100644
--- a/lib/portage/util/elf/constants.py
+++ b/lib/portage/util/elf/constants.py
@@ -31,11 +31,18 @@ EM_S390 = 22
EM_ARM = 40
EM_SH = 42
EM_SPARCV9 = 43
+EM_ARC = 45
EM_IA_64 = 50
EM_X86_64 = 62
+EM_ARC_COMPACT = 93
EM_ALTERA_NIOS2 = 113
EM_AARCH64 = 183
+EM_ARC_COMPACT2 = 195
+EM_AMDGPU = 224
EM_RISCV = 243
+EM_ARC_COMPACT3_64 = 253
+EM_ARC_COMPACT3 = 255
+EM_LOONGARCH = 258
EM_ALPHA = 0x9026
E_ENTRY = 24
@@ -52,3 +59,11 @@ EF_RISCV_FLOAT_ABI_SOFT = 0x0000
EF_RISCV_FLOAT_ABI_SINGLE = 0x0002
EF_RISCV_FLOAT_ABI_DOUBLE = 0x0004
EF_RISCV_FLOAT_ABI_QUAD = 0x0006
+
+EF_LOONGARCH_ABI_LP64_SOFT_FLOAT = 0b001
+EF_LOONGARCH_ABI_LP64_SINGLE_FLOAT = 0b010
+EF_LOONGARCH_ABI_LP64_DOUBLE_FLOAT = 0b011
+EF_LOONGARCH_ABI_ILP32_SOFT_FLOAT = 0b101
+EF_LOONGARCH_ABI_ILP32_SINGLE_FLOAT = 0b110
+EF_LOONGARCH_ABI_ILP32_DOUBLE_FLOAT = 0b111
+EF_LOONGARCH_ABI_MASK = 0x07
diff --git a/lib/portage/util/elf/header.py b/lib/portage/util/elf/header.py
index c307fab28..3a01d47f4 100644
--- a/lib/portage/util/elf/header.py
+++ b/lib/portage/util/elf/header.py
@@ -20,7 +20,6 @@ from portage.util.elf.constants import (
class ELFHeader:
-
__slots__ = ("e_flags", "e_machine", "e_type", "ei_class", "ei_data")
@classmethod
diff --git a/lib/portage/util/elf/meson.build b/lib/portage/util/elf/meson.build
new file mode 100644
index 000000000..e12adcfe0
--- /dev/null
+++ b/lib/portage/util/elf/meson.build
@@ -0,0 +1,9 @@
+py.install_sources(
+ [
+ 'constants.py',
+ 'header.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/elf',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/endian/meson.build b/lib/portage/util/endian/meson.build
new file mode 100644
index 000000000..527959f8f
--- /dev/null
+++ b/lib/portage/util/endian/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'decode.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/endian',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/env_update.py b/lib/portage/util/env_update.py
index bb0ebf84c..b19a85325 100644
--- a/lib/portage/util/env_update.py
+++ b/lib/portage/util/env_update.py
@@ -1,12 +1,12 @@
-# Copyright 2010-2020 Gentoo Authors
+# Copyright 2010-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ["env_update"]
import errno
import glob
-import io
import stat
+import subprocess
import time
import portage
@@ -119,25 +119,23 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
fns = templist
del templist
- space_separated = set(["CONFIG_PROTECT", "CONFIG_PROTECT_MASK"])
- colon_separated = set(
- [
- "ADA_INCLUDE_PATH",
- "ADA_OBJECTS_PATH",
- "CLASSPATH",
- "INFODIR",
- "INFOPATH",
- "KDEDIRS",
- "LDPATH",
- "MANPATH",
- "PATH",
- "PKG_CONFIG_PATH",
- "PRELINK_PATH",
- "PRELINK_PATH_MASK",
- "PYTHONPATH",
- "ROOTPATH",
- ]
- )
+ space_separated = {"CONFIG_PROTECT", "CONFIG_PROTECT_MASK"}
+ colon_separated = {
+ "ADA_INCLUDE_PATH",
+ "ADA_OBJECTS_PATH",
+ "CLASSPATH",
+ "INFODIR",
+ "INFOPATH",
+ "KDEDIRS",
+ "LDPATH",
+ "MANPATH",
+ "PATH",
+ "PKG_CONFIG_PATH",
+ "PRELINK_PATH",
+ "PRELINK_PATH_MASK",
+ "PYTHONPATH",
+ "ROOTPATH",
+ }
config_list = []
@@ -146,12 +144,12 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
try:
myconfig = getconfig(file_path, expand=False)
except ParseError as e:
- writemsg("!!! '%s'\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! '{str(e)}'\n", noiselevel=-1)
del e
continue
if myconfig is None:
# broken symlink or file removed by a concurrent process
- writemsg("!!! File Not Found: '%s'\n" % file_path, noiselevel=-1)
+ writemsg(f"!!! File Not Found: '{file_path}'\n", noiselevel=-1)
continue
config_list.append(myconfig)
@@ -195,9 +193,8 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
ldsoconf_path = os.path.join(eroot, "etc", "ld.so.conf")
try:
- myld = io.open(
+ myld = open(
_unicode_encode(ldsoconf_path, encoding=_encodings["fs"], errors="strict"),
- mode="r",
encoding=_encodings["content"],
errors="replace",
)
@@ -209,7 +206,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
if x[:1] == "#":
continue
oldld.append(x[:-1])
- except (IOError, OSError) as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
oldld = None
@@ -219,11 +216,30 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
# ld.so.conf needs updating and ldconfig needs to be run
myfd = atomic_ofstream(ldsoconf_path)
myfd.write("# ld.so.conf autogenerated by env-update; make all changes to\n")
- myfd.write("# contents of /etc/env.d directory\n")
+ myfd.write(f"# contents of {eprefix}/etc/env.d directory.\n")
for x in specials["LDPATH"]:
myfd.write(x + "\n")
myfd.close()
+ if eprefix:
+ # ldconfig needs ld.so.conf paths to be prefixed, but the bfd linker
+ # needs them unprefixed, so write an alternative ld.so.conf file for
+ # the latter. Other linkers do not use these files. See ldelf.c in
+ # binutils for precise bfd behavior, as well as bug #892549.
+ ldsoconf_path = os.path.join(eroot, "usr", "etc", "ld.so.conf")
+ ensure_dirs(os.path.dirname(ldsoconf_path), mode=0o755)
+ myfd = atomic_ofstream(ldsoconf_path)
+ myfd.write(
+ "# ld.so.conf autogenerated by env-update; make all changes to\n"
+ f"# contents of {eprefix}/etc/env.d directory.\n"
+ "# This file is only used by the bfd linker. The paths are not\n"
+ "# prefixed as this is automatically added by the linker.\n"
+ )
+ for x in specials["LDPATH"]:
+ if x.startswith(eprefix + os.path.sep):
+ myfd.write(x[len(eprefix) :] + "\n")
+ myfd.close()
+
potential_lib_dirs = set()
for lib_dir_glob in ("usr/lib*", "lib*"):
x = os.path.join(eroot, lib_dir_glob)
@@ -245,10 +261,10 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
newprelink.write(
"# prelink.conf autogenerated by env-update; make all changes to\n"
)
- newprelink.write("# contents of /etc/env.d directory\n")
+ newprelink.write(f"# contents of {eprefix}/etc/env.d directory\n")
for x in sorted(potential_lib_dirs) + ["bin", "sbin"]:
- newprelink.write("-l /%s\n" % (x,))
+ newprelink.write(f"-l /{x}\n")
prelink_paths = set()
prelink_paths |= set(specials.get("LDPATH", []))
prelink_paths |= set(specials.get("PATH", []))
@@ -269,9 +285,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
plmasked = 1
break
if not plmasked:
- newprelink.write("-h %s\n" % (x,))
+ newprelink.write(f"-h {x}\n")
for x in prelink_path_mask:
- newprelink.write("-b %s\n" % (x,))
+ newprelink.write(f"-b {x}\n")
newprelink.close()
# Migration code path. If /etc/prelink.conf was generated by us, then
@@ -289,9 +305,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
== b"# prelink.conf autogenerated by env-update; make all changes to\n"
):
f = atomic_ofstream(prelink_conf)
- f.write("-c /etc/prelink.conf.d/*.conf\n")
+ f.write(f"-c {eprefix}/etc/prelink.conf.d/*.conf\n")
f.close()
- except IOError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
@@ -347,7 +363,7 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
and "CBUILD" in settings
and settings["CHOST"] != settings["CBUILD"]
):
- ldconfig = find_binary("%s-ldconfig" % settings["CHOST"])
+ ldconfig = find_binary(f"{settings['CHOST']}-ldconfig")
else:
ldconfig = os.path.join(eroot, "sbin", "ldconfig")
@@ -364,21 +380,30 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
# an older package installed ON TOP of a newer version will cause ldconfig
# to overwrite the symlinks we just made. -X means no links. After 'clean'
# we can safely create links.
- writemsg_level(
- _(">>> Regenerating %setc/ld.so.cache...\n") % (target_root,)
- )
- os.system("cd / ; %s -X -r '%s'" % (ldconfig, target_root))
+ writemsg_level(_(f">>> Regenerating {eroot}etc/ld.so.cache...\n"))
+ ret = subprocess.run(
+ [ldconfig, "-X", "-r", target_root], cwd="/"
+ ).returncode
elif ostype in ("FreeBSD", "DragonFly"):
writemsg_level(
- _(">>> Regenerating %svar/run/ld-elf.so.hints...\n") % target_root
- )
- os.system(
- (
- "cd / ; %s -elf -i "
- + "-f '%svar/run/ld-elf.so.hints' '%setc/ld.so.conf'"
- )
- % (ldconfig, target_root, target_root)
+ _(f">>> Regenerating {target_root}var/run/ld-elf.so.hints...\n")
)
+ ret = subprocess.run(
+ [
+ ldconfig,
+ "-elf",
+ "-i",
+ "-f",
+ f"{target_root}var/run/ld-elf.so.hints",
+ f"{target_root}etc/ld.so.conf",
+ ],
+ cwd="/",
+ ).returncode
+
+ if ret > 0:
+ writemsg(f"!!! ldconfig failed with exit status {ret}\n", noiselevel=-1)
+ if ret < 0:
+ writemsg(f"!!! ldconfig was killed with signal {-ret}\n", noiselevel=-1)
del specials["LDPATH"]
@@ -386,8 +411,8 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
notice += "# DO NOT EDIT THIS FILE."
penvnotice = notice + " CHANGES TO STARTUP PROFILES\n"
cenvnotice = penvnotice[:]
- penvnotice += "# GO INTO /etc/profile NOT /etc/profile.env\n\n"
- cenvnotice += "# GO INTO /etc/csh.cshrc NOT /etc/csh.env\n\n"
+ penvnotice += f"# GO INTO {eprefix}/etc/profile NOT {eprefix}/etc/profile.env\n\n"
+ cenvnotice += f"# GO INTO {eprefix}/etc/csh.cshrc NOT {eprefix}/etc/csh.env\n\n"
# create /etc/profile.env for bash support
profile_env_path = os.path.join(eroot, "etc", "profile.env")
@@ -399,9 +424,9 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
for k in env_keys:
v = env[k]
if v.startswith("$") and not v.startswith("${"):
- outfile.write("export %s=$'%s'\n" % (k, v[1:]))
+ outfile.write(f"export {k}=$'{v[1:]}'\n")
else:
- outfile.write("export %s='%s'\n" % (k, v))
+ outfile.write(f"export {k}='{v}'\n")
# Create the systemd user environment configuration file
# /etc/environment.d/10-gentoo-env.conf with the
@@ -417,11 +442,6 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
systemd_gentoo_env.write(senvnotice)
for env_key in env_keys:
- # Skip PATH since this makes it impossible to use
- # "systemctl --user import-environment PATH".
- if env_key == "PATH":
- continue
-
env_key_value = env[env_key]
# Skip variables with the empty string
@@ -442,5 +462,5 @@ def _env_update(makelinks, target_root, prev_mtimes, contents, env, writemsg_lev
outfile = atomic_ofstream(os.path.join(eroot, "etc", "csh.env"))
outfile.write(cenvnotice)
for x in env_keys:
- outfile.write("setenv %s '%s'\n" % (x, env[x]))
+ outfile.write(f"setenv {x} '{env[x]}'\n")
outfile.close()
diff --git a/lib/portage/util/file_copy.py b/lib/portage/util/file_copy.py
new file mode 100644
index 000000000..e3926d8ef
--- /dev/null
+++ b/lib/portage/util/file_copy.py
@@ -0,0 +1,137 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+import errno
+import fcntl
+import logging
+import os
+import platform
+import shutil
+import sys
+
+
+logger = logging.getLogger(__name__)
+
+# Added in Python 3.12
+FICLONE = getattr(fcntl, "FICLONE", 0x40049409)
+
+# Unavailable in PyPy
+SEEK_DATA = getattr(os, "SEEK_DATA", 3)
+SEEK_HOLE = getattr(os, "SEEK_HOLE", 4)
+
+
+def _get_chunks(src):
+ try:
+ offset_hole = 0
+ while True:
+ try:
+ # Find the next bit of data
+ offset_data = os.lseek(src, offset_hole, SEEK_DATA)
+ except OSError as e:
+ # Re-raise for unexpected errno values
+ if e.errno not in (errno.EINVAL, errno.ENXIO):
+ raise
+
+ offset_end = os.lseek(src, 0, os.SEEK_END)
+
+ if e.errno == errno.ENXIO:
+ # End of file
+ if offset_end > offset_hole:
+ # Hole at end of file
+ yield (offset_end, 0)
+ else:
+ # SEEK_DATA failed with EINVAL, return the whole file
+ yield (0, offset_end)
+
+ break
+ else:
+ offset_hole = os.lseek(src, offset_data, SEEK_HOLE)
+ yield (offset_data, offset_hole - offset_data)
+
+ except OSError:
+ logger.warning("_get_chunks failed unexpectedly", exc_info=sys.exc_info())
+ raise
+
+
+def _do_copy_file_range(src, dst, offset, count):
+ while count > 0:
+ # count must fit in ssize_t
+ c = min(count, sys.maxsize)
+ written = os.copy_file_range(src, dst, c, offset, offset)
+ if written == 0:
+ # https://bugs.gentoo.org/828844
+ raise OSError(errno.EOPNOTSUPP, os.strerror(errno.EOPNOTSUPP))
+ offset += written
+ count -= written
+
+
+def _do_sendfile(src, dst, offset, count):
+ os.lseek(dst, offset, os.SEEK_SET)
+ while count > 0:
+ # count must fit in ssize_t
+ c = min(count, sys.maxsize)
+ written = os.sendfile(dst, src, offset, c)
+ offset += written
+ count -= written
+
+
+def _fastcopy(src, dst):
+ with (
+ open(src, "rb", buffering=0) as srcf,
+ open(dst, "wb", buffering=0) as dstf,
+ ):
+ srcfd = srcf.fileno()
+ dstfd = dstf.fileno()
+
+ if platform.system() == "Linux":
+ try:
+ fcntl.ioctl(dstfd, FICLONE, srcfd)
+ return
+ except OSError:
+ pass
+
+ try_cfr = hasattr(os, "copy_file_range")
+
+ for offset, count in _get_chunks(srcfd):
+ if count == 0:
+ os.ftruncate(dstfd, offset)
+ else:
+ if try_cfr:
+ try:
+ _do_copy_file_range(srcfd, dstfd, offset, count)
+ continue
+ except OSError as e:
+ try_cfr = False
+ if e.errno not in (errno.EXDEV, errno.ENOSYS, errno.EOPNOTSUPP):
+ logger.warning(
+ "_do_copy_file_range failed unexpectedly",
+ exc_info=sys.exc_info(),
+ )
+ try:
+ _do_sendfile(srcfd, dstfd, offset, count)
+ except OSError:
+ logger.warning(
+ "_do_sendfile failed unexpectedly", exc_info=sys.exc_info()
+ )
+ raise
+
+
+def copyfile(src, dst):
+ """
+ Copy the contents (no metadata) of the file named src to a file
+ named dst.
+
+ If possible, copying is done within the kernel, and uses
+ "copy acceleration" techniques (such as reflinks). This also
+ supports sparse files.
+
+ @param src: path of source file
+ @type src: str
+ @param dst: path of destination file
+ @type dst: str
+ """
+
+ try:
+ _fastcopy(src, dst)
+ except OSError:
+ shutil.copyfile(src, dst)
diff --git a/lib/portage/util/file_copy/__init__.py b/lib/portage/util/file_copy/__init__.py
deleted file mode 100644
index 451f57d5c..000000000
--- a/lib/portage/util/file_copy/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Copyright 2017 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-
-import os
-import shutil
-import tempfile
-
-try:
- from portage.util.file_copy.reflink_linux import file_copy as _file_copy
-except ImportError:
- _file_copy = None
-
-
-def _optimized_copyfile(src, dst):
- """
- Copy the contents (no metadata) of the file named src to a file
- named dst.
-
- If possible, copying is done within the kernel, and uses
- "copy acceleration" techniques (such as reflinks). This also
- supports sparse files.
-
- @param src: path of source file
- @type src: str
- @param dst: path of destination file
- @type dst: str
- """
- with open(src, "rb", buffering=0) as src_file, open(
- dst, "wb", buffering=0
- ) as dst_file:
- _file_copy(src_file.fileno(), dst_file.fileno())
-
-
-if _file_copy is None:
- copyfile = shutil.copyfile
-else:
- copyfile = _optimized_copyfile
diff --git a/lib/portage/util/futures/_asyncio/__init__.py b/lib/portage/util/futures/_asyncio/__init__.py
index ccf800c66..e377a9cdd 100644
--- a/lib/portage/util/futures/_asyncio/__init__.py
+++ b/lib/portage/util/futures/_asyncio/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2018-2021 Gentoo Authors
+# Copyright 2018-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = (
@@ -9,18 +9,22 @@ __all__ = (
"CancelledError",
"Future",
"InvalidStateError",
+ "Lock",
"TimeoutError",
"get_child_watcher",
"get_event_loop",
"set_child_watcher",
"get_event_loop_policy",
"set_event_loop_policy",
+ "run",
+ "shield",
"sleep",
"Task",
"wait",
+ "wait_for",
)
-import subprocess
+import sys
import types
import weakref
@@ -34,20 +38,20 @@ from asyncio import (
FIRST_EXCEPTION,
Future,
InvalidStateError,
+ iscoroutinefunction,
+ Lock as _Lock,
+ shield,
TimeoutError,
+ wait_for,
)
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
+import threading
import portage
portage.proxy.lazyimport.lazyimport(
globals(),
"portage.util.futures.unix_events:_PortageEventLoopPolicy",
- "portage.util.futures:compat_coroutine@_compat_coroutine",
)
from portage.util._eventloop.asyncio_event_loop import (
AsyncioEventLoop as _AsyncioEventLoop,
@@ -106,6 +110,14 @@ def set_child_watcher(watcher):
return get_event_loop_policy().set_child_watcher(watcher)
+# Emulate run since it's the preferred python API.
+def run(coro):
+ return _safe_loop().run_until_complete(coro)
+
+
+run.__doc__ = _real_asyncio.run.__doc__
+
+
def create_subprocess_exec(*args, **kwargs):
"""
Create a subprocess.
@@ -146,17 +158,18 @@ def wait(futures, loop=None, timeout=None, return_when=ALL_COMPLETED):
return _real_asyncio.wait(futures, timeout=timeout, return_when=return_when)
-def iscoroutinefunction(func):
+class Lock(_Lock):
"""
- Return True if func is a decorated coroutine function,
- supporting both asyncio.coroutine and compat_coroutine since
- their behavior is identical for all practical purposes.
+ Inject loop parameter for python3.9 or less in order to avoid
+ "got Future <Future pending> attached to a different loop" errors.
"""
- if _compat_coroutine._iscoroutinefunction(func):
- return True
- if _real_asyncio.iscoroutinefunction(func):
- return True
- return False
+
+ def __init__(self, **kwargs):
+ if sys.version_info >= (3, 10):
+ kwargs.pop("loop", None)
+ elif "loop" not in kwargs:
+ kwargs["loop"] = _safe_loop()._loop
+ super().__init__(**kwargs)
class Task(Future):
@@ -299,13 +312,37 @@ def _safe_loop():
def _get_running_loop():
+ """
+ This calls the real asyncio get_running_loop() and wraps that with
+ portage's internal AsyncioEventLoop wrapper. If there is no running
+ asyncio event loop but portage has a reference to another running
+ loop in this thread, then use that instead.
+
+ This behavior enables portage internals to use the real asyncio.run
+ while remaining compatible with internal code that does not use the
+ real asyncio.run.
+ """
+ try:
+ _loop = _real_asyncio.get_running_loop()
+ except RuntimeError:
+ _loop = None
+
with _thread_weakrefs.lock:
if _thread_weakrefs.pid == portage.getpid():
try:
loop = _thread_weakrefs.loops[threading.get_ident()]
except KeyError:
- return None
- return loop if loop.is_running() else None
+ pass
+ else:
+ if _loop is loop._loop:
+ return loop
+ elif _loop is None:
+ return loop if loop.is_running() else None
+
+ # If _loop it not None here it means it was probably a temporary
+ # loop created by asyncio.run, so we don't try to cache it, and
+ # just return a temporary wrapper.
+ return None if _loop is None else _AsyncioEventLoop(loop=_loop)
def _thread_weakrefs_atexit():
diff --git a/lib/portage/util/futures/_asyncio/meson.build b/lib/portage/util/futures/_asyncio/meson.build
new file mode 100644
index 000000000..fa0bc4a86
--- /dev/null
+++ b/lib/portage/util/futures/_asyncio/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'streams.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/futures/_asyncio',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/futures/_asyncio/streams.py b/lib/portage/util/futures/_asyncio/streams.py
index 95a4244a6..6b902975c 100644
--- a/lib/portage/util/futures/_asyncio/streams.py
+++ b/lib/portage/util/futures/_asyncio/streams.py
@@ -76,7 +76,7 @@ async def _writer(output_file, content, loop=DeprecationWarning):
while content:
try:
content = content[os.write(fd, content) :]
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EAGAIN:
raise
waiter = loop.create_future()
diff --git a/lib/portage/util/futures/_sync_decorator.py b/lib/portage/util/futures/_sync_decorator.py
index d4df26a9b..436e7c346 100644
--- a/lib/portage/util/futures/_sync_decorator.py
+++ b/lib/portage/util/futures/_sync_decorator.py
@@ -39,14 +39,15 @@ def _sync_methods(obj, loop=None):
loop = asyncio._wrap_loop(loop)
return _ObjectAttrWrapper(
obj,
- lambda attr: _sync_decorator(attr, loop=loop)
- if asyncio.iscoroutinefunction(attr)
- else attr,
+ lambda attr: (
+ _sync_decorator(attr, loop=loop)
+ if asyncio.iscoroutinefunction(attr)
+ else attr
+ ),
)
class _ObjectAttrWrapper(portage.proxy.objectproxy.ObjectProxy):
-
__slots__ = ("_obj", "_attr_wrapper")
def __init__(self, obj, attr_wrapper):
diff --git a/lib/portage/util/futures/compat_coroutine.py b/lib/portage/util/futures/compat_coroutine.py
deleted file mode 100644
index c7e436343..000000000
--- a/lib/portage/util/futures/compat_coroutine.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright 2018-2021 Gentoo Foundation
-# Distributed under the terms of the GNU General Public License v2
-
-import functools
-
-import portage
-
-portage.proxy.lazyimport.lazyimport(
- globals(),
- "portage.util.futures:asyncio",
-)
-
-# A marker for iscoroutinefunction.
-_is_coroutine = object()
-
-
-def _iscoroutinefunction(func):
- """
- Return True if func is a decorated coroutine function
- created with the coroutine decorator for this module.
- """
- return getattr(func, "_is_coroutine", None) is _is_coroutine
-
-
-def coroutine(generator_func):
- """
- A decorator for a generator function that behaves as coroutine function.
- The generator should yield a Future instance in order to wait for it,
- and the result becomes the result of the current yield-expression,
- via the PEP 342 generator send() method.
-
- The decorated function returns a Future which is done when the generator
- is exhausted. The generator can return a value via the coroutine_return
- function.
-
- @param generator_func: A generator function that yields Futures, and
- will receive the result of each Future as the result of the
- corresponding yield-expression.
- @type generator_func: function
- @rtype: function
- @return: A function which calls the given generator function and
- returns a Future that is done when the generator is exhausted.
- """
- # Note that functools.partial does not work for decoration of
- # methods, since it doesn't implement the descriptor protocol.
- # This problem is solve by defining a wrapper function.
- @functools.wraps(generator_func)
- def wrapped(*args, **kwargs):
- return _generator_future(generator_func, *args, **kwargs)
-
- wrapped._is_coroutine = _is_coroutine
- return wrapped
-
-
-def coroutine_return(result=None):
- """
- Terminate the current coroutine and set the result of the associated
- Future.
-
- @param result: of the current coroutine's Future
- @type object
- """
- raise _CoroutineReturnValue(result)
-
-
-def _generator_future(generator_func, *args, **kwargs):
- """
- Call generator_func with the given arguments, and return a Future
- that is done when the resulting generation is exhausted. If a
- keyword argument named 'loop' is given, then it is used instead of
- the default event loop.
- """
- loop = kwargs.get("loop")
- loop = asyncio._wrap_loop(loop)
- result = loop.create_future()
- _GeneratorTask(generator_func(*args, **kwargs), result, loop=loop)
- return result
-
-
-class _CoroutineReturnValue(Exception):
- def __init__(self, result):
- self.result = result
-
-
-class _GeneratorTask:
- """
- Asynchronously executes the generator to completion, waiting for
- the result of each Future that it yields, and sending the result
- to the generator.
- """
-
- def __init__(self, generator, result, loop):
- self._generator = generator
- self._result = result
- self._current_task = None
- self._loop = loop
- result.add_done_callback(self._cancel_callback)
- loop.call_soon(self._next)
-
- def _cancel_callback(self, result):
- if result.cancelled() and self._current_task is not None:
- # The done callback for self._current_task invokes
- # _next in either case here.
- self._current_task.done() or self._current_task.cancel()
-
- def _next(self, previous=None):
- self._current_task = None
- if self._result.cancelled():
- if previous is not None:
- # Consume exceptions, in order to avoid triggering
- # the event loop's exception handler.
- previous.cancelled() or previous.exception()
-
- # This will throw asyncio.CancelledError in the coroutine if
- # there's an opportunity (yield) before the generator raises
- # StopIteration.
- previous = self._result
- try:
- if previous is None:
- future = next(self._generator)
- elif previous.cancelled():
- future = self._generator.throw(asyncio.CancelledError())
- elif previous.exception() is None:
- future = self._generator.send(previous.result())
- else:
- future = self._generator.throw(previous.exception())
-
- except asyncio.CancelledError:
- self._result.cancel()
- except _CoroutineReturnValue as e:
- if not self._result.cancelled():
- self._result.set_result(e.result)
- except StopIteration:
- if not self._result.cancelled():
- self._result.set_result(None)
- except Exception as e:
- if not self._result.cancelled():
- self._result.set_exception(e)
- else:
- self._current_task = asyncio.ensure_future(future, loop=self._loop)
- self._current_task.add_done_callback(self._next)
diff --git a/lib/portage/util/futures/executor/fork.py b/lib/portage/util/futures/executor/fork.py
index 0c3342944..1e3d01072 100644
--- a/lib/portage/util/futures/executor/fork.py
+++ b/lib/portage/util/futures/executor/fork.py
@@ -1,4 +1,4 @@
-# Copyright 2018 Gentoo Foundation
+# Copyright 2018-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = ("ForkExecutor",)
@@ -6,7 +6,6 @@ __all__ = ("ForkExecutor",)
import collections
import functools
import os
-import sys
import traceback
from portage.util._async.AsyncFunction import AsyncFunction
@@ -41,7 +40,9 @@ class ForkExecutor:
"""
future = self._loop.create_future()
proc = AsyncFunction(
- target=functools.partial(self._guarded_fn_call, fn, args, kwargs)
+ target=functools.partial(self._guarded_fn_call, fn, args, kwargs),
+ # Directly inherit stdio streams and run in the foreground with no log.
+ create_pipe=False,
)
self._submit_queue.append((future, proc))
self._schedule()
@@ -92,9 +93,7 @@ class ForkExecutor:
# distinguish between kill and crash
future.set_exception(
Exception(
- "pid {} crashed or killed, exitcode {}".format(
- proc.pid, proc.returncode
- )
+ f"pid {proc.pid} crashed or killed, exitcode {proc.returncode}"
)
)
@@ -123,7 +122,7 @@ class _ExceptionWithTraceback:
tb = traceback.format_exception(type(exc), exc, exc.__traceback__)
tb = "".join(tb)
self.exc = exc
- self.tb = '\n"""\n%s"""' % tb
+ self.tb = f'\n"""\n{tb}"""'
def __reduce__(self):
return _rebuild_exc, (self.exc, self.tb)
@@ -140,9 +139,3 @@ class _RemoteTraceback(Exception):
def _rebuild_exc(exc, tb):
exc.__cause__ = _RemoteTraceback(tb)
return exc
-
-
-if sys.version_info < (3,):
- # Python 2 does not support exception chaining, so
- # don't bother to preserve the traceback.
- _ExceptionWithTraceback = lambda exc: exc
diff --git a/lib/portage/util/futures/executor/meson.build b/lib/portage/util/futures/executor/meson.build
new file mode 100644
index 000000000..fdd7c06f9
--- /dev/null
+++ b/lib/portage/util/futures/executor/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'fork.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/futures/executor',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/futures/extendedfutures.py b/lib/portage/util/futures/extendedfutures.py
index c23feafb5..b772698b9 100644
--- a/lib/portage/util/futures/extendedfutures.py
+++ b/lib/portage/util/futures/extendedfutures.py
@@ -37,7 +37,7 @@ class ExtendedFuture(Future):
set.
"""
self.default_result = default_result
- super(ExtendedFuture, self).__init__()
+ super().__init__()
self.set = self.set_result
def set_result(self, data, ignore_InvalidState=False):
@@ -48,14 +48,14 @@ class ExtendedFuture(Future):
"""
if ignore_InvalidState:
try:
- super(ExtendedFuture, self).set_result(data)
+ super().set_result(data)
except InvalidStateError:
pass
else:
- super(ExtendedFuture, self).set_result(data)
+ super().set_result(data)
def get(self, default=UNSET_CONST.result()):
- """Convienience function to wrap result() but adds an optional
+ """Convenience function to wrap result() but adds an optional
default value to return rather than raise an InvalidStateError
@param default: Optional override for the classwide default_result
@@ -77,12 +77,12 @@ class ExtendedFuture(Future):
def exception(self):
try:
- return super(ExtendedFuture, self).exception(timeout=0)
+ return super().exception(timeout=0)
except concurrent.futures.TimeoutError:
raise InvalidStateError
def result(self):
try:
- return super(ExtendedFuture, self).result(timeout=0)
+ return super().result(timeout=0)
except concurrent.futures.TimeoutError:
raise InvalidStateError
diff --git a/lib/portage/util/futures/iter_completed.py b/lib/portage/util/futures/iter_completed.py
index f4b4e5e0b..5ee0b48c7 100644
--- a/lib/portage/util/futures/iter_completed.py
+++ b/lib/portage/util/futures/iter_completed.py
@@ -34,8 +34,7 @@ def iter_completed(futures, max_jobs=None, max_load=None, loop=None):
for future_done_set in async_iter_completed(
futures, max_jobs=max_jobs, max_load=max_load, loop=loop
):
- for future in loop.run_until_complete(future_done_set):
- yield future
+ yield from loop.run_until_complete(future_done_set)
def async_iter_completed(futures, max_jobs=None, max_load=None, loop=None):
diff --git a/lib/portage/util/futures/meson.build b/lib/portage/util/futures/meson.build
new file mode 100644
index 000000000..d561fa312
--- /dev/null
+++ b/lib/portage/util/futures/meson.build
@@ -0,0 +1,16 @@
+py.install_sources(
+ [
+ 'extendedfutures.py',
+ 'futures.py',
+ 'iter_completed.py',
+ 'retry.py',
+ 'unix_events.py',
+ '_sync_decorator.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/futures',
+ pure : not native_extensions
+)
+
+subdir('executor')
+subdir('_asyncio')
diff --git a/lib/portage/util/futures/retry.py b/lib/portage/util/futures/retry.py
index 496bfb562..a8a3ad4fb 100644
--- a/lib/portage/util/futures/retry.py
+++ b/lib/portage/util/futures/retry.py
@@ -84,7 +84,7 @@ def _retry(
reraise,
func,
*args,
- **kwargs
+ **kwargs,
):
"""
Retry coroutine, used to implement retry decorator.
diff --git a/lib/portage/util/futures/unix_events.py b/lib/portage/util/futures/unix_events.py
index f2f01f0c6..374497010 100644
--- a/lib/portage/util/futures/unix_events.py
+++ b/lib/portage/util/futures/unix_events.py
@@ -23,7 +23,6 @@ if hasattr(os, "set_blocking"):
def _set_nonblocking(fd):
os.set_blocking(fd, False)
-
else:
def _set_nonblocking(fd):
@@ -71,11 +70,11 @@ class _AsyncioEventLoopPolicy(_PortageEventLoopPolicy):
def get_event_loop(self):
self._check_recursion()
- return super(_AsyncioEventLoopPolicy, self).get_event_loop()
+ return super().get_event_loop()
def get_child_watcher(self):
self._check_recursion()
- return super(_AsyncioEventLoopPolicy, self).get_child_watcher()
+ return super().get_child_watcher()
DefaultEventLoopPolicy = _AsyncioEventLoopPolicy
diff --git a/lib/portage/util/hooks.py b/lib/portage/util/hooks.py
index 204ad4122..cbb15f123 100644
--- a/lib/portage/util/hooks.py
+++ b/lib/portage/util/hooks.py
@@ -46,7 +46,7 @@ def perform_hooks(rel_directory, *argv, prefix="/"):
if retval != portage.os.EX_OK:
writemsg_level(
- " %s Spawn failed for: %s, %s\n" % (bad("*"), name, filepath),
+ f" {bad('*')} Spawn failed for: {name}, {filepath}\n",
level=logging.ERROR,
noiselevel=-1,
)
diff --git a/lib/portage/util/install_mask.py b/lib/portage/util/install_mask.py
index 2b65fc230..638c150ff 100644
--- a/lib/portage/util/install_mask.py
+++ b/lib/portage/util/install_mask.py
@@ -171,22 +171,26 @@ def install_mask_dir(base_dir, install_mask, onerror=None):
dir_stack = []
# Remove masked files.
- for parent, dirs, files in os.walk(base_dir, onerror=onerror):
+ todo = [base_dir]
+ while todo:
+ parent = todo.pop()
try:
parent = _unicode_decode(parent, errors="strict")
except UnicodeDecodeError:
continue
+
dir_stack.append(parent)
- for fname in files:
+ for entry in os.scandir(parent):
try:
- fname = _unicode_decode(fname, errors="strict")
+ abs_path = _unicode_decode(entry.path, errors="strict")
except UnicodeDecodeError:
continue
- abs_path = os.path.join(parent, fname)
- relative_path = abs_path[base_dir_len:]
- if install_mask.match(relative_path):
+
+ if entry.is_dir(follow_symlinks=False):
+ todo.append(entry.path)
+ elif install_mask.match(abs_path[base_dir_len:]):
try:
- os.unlink(abs_path)
+ os.unlink(entry.path)
except OSError as e:
onerror(e)
diff --git a/lib/portage/util/iterators/MultiIterGroupBy.py b/lib/portage/util/iterators/MultiIterGroupBy.py
index 241b98d57..509cbf44c 100644
--- a/lib/portage/util/iterators/MultiIterGroupBy.py
+++ b/lib/portage/util/iterators/MultiIterGroupBy.py
@@ -20,7 +20,6 @@ class MultiIterGroupBy:
self._key = key
def __iter__(self):
-
trackers = []
for iterator in self._iterators:
trackers.append(_IteratorTracker(iterator))
@@ -34,9 +33,7 @@ class MultiIterGroupBy:
min_progress = None
while trackers:
-
for tracker in trackers:
-
if tracker.current is not None and tracker.current != min_progress:
# The trackers are sorted by progress, so the
# remaining trackers are guaranteed to have
@@ -80,11 +77,9 @@ class MultiIterGroupBy:
class _IteratorTracker:
-
__slots__ = ("current", "iterator")
def __init__(self, iterator):
-
self.iterator = iterator
self.current = None
diff --git a/lib/portage/util/iterators/meson.build b/lib/portage/util/iterators/meson.build
new file mode 100644
index 000000000..d9a7d68e7
--- /dev/null
+++ b/lib/portage/util/iterators/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'MultiIterGroupBy.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util/iterators',
+ pure : not native_extensions
+)
diff --git a/lib/portage/util/lafilefixer.py b/lib/portage/util/lafilefixer.py
index 59df30b4b..870c6d97b 100644
--- a/lib/portage/util/lafilefixer.py
+++ b/lib/portage/util/lafilefixer.py
@@ -20,7 +20,7 @@ from portage.exception import InvalidData
# are ignored by libtool (last one wins), but we treat it as error (like
# lafilefixer does).
# What it does:
-# * Replaces all .la files with absolut paths in dependency_libs with
+# * Replaces all .la files with absolute paths in dependency_libs with
# corresponding -l* and -L* entries
# (/usr/lib64/libfoo.la -> -L/usr/lib64 -lfoo)
# * Moves various flags (see flag_re below) to inherited_linker_flags,
@@ -40,8 +40,8 @@ inh_link_flags_re = re.compile(b"inherited_linker_flags='(?P<value>[^']*)'$")
# replace 'X11R6/lib' and 'local/lib' with 'lib', no idea what's this about.
X11_local_sub = re.compile(b"X11R6/lib|local/lib")
# get rid of the '..'
-pkgconfig_sub1 = re.compile(br"usr/lib[^/]*/pkgconfig/\.\./\.\.")
-pkgconfig_sub2 = re.compile(br"(?P<usrlib>usr/lib[^/]*)/pkgconfig/\.\.")
+pkgconfig_sub1 = re.compile(rb"usr/lib[^/]*/pkgconfig/\.\./\.\.")
+pkgconfig_sub2 = re.compile(rb"(?P<usrlib>usr/lib[^/]*)/pkgconfig/\.\.")
# detect flags that should go into inherited_linker_flags instead of dependency_libs
flag_re = re.compile(
@@ -135,7 +135,7 @@ def rewrite_lafile(contents):
ladir = X11_local_sub.sub(b"lib", ladir)
ladir = pkgconfig_sub1.sub(b"usr", ladir)
- ladir = pkgconfig_sub2.sub(br"\g<usrlib>", ladir)
+ ladir = pkgconfig_sub2.sub(rb"\g<usrlib>", ladir)
if ladir not in libladir:
libladir.append(ladir)
diff --git a/lib/portage/util/listdir.py b/lib/portage/util/listdir.py
index e7c436282..5d1765ced 100644
--- a/lib/portage/util/listdir.py
+++ b/lib/portage/util/listdir.py
@@ -29,7 +29,7 @@ def cacheddir(
pathstat = os.stat(mypath)
if not stat.S_ISDIR(pathstat.st_mode):
raise DirectoryNotFound(mypath)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno == PermissionDenied.errno:
raise PermissionDenied(mypath)
del e
@@ -39,7 +39,7 @@ def cacheddir(
else:
try:
fpaths = os.listdir(mypath)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.EACCES:
raise
del e
@@ -60,7 +60,7 @@ def cacheddir(
ftype.append(2)
else:
ftype.append(3)
- except (IOError, OSError):
+ except OSError:
ftype.append(3)
if ignorelist or ignorecvs:
diff --git a/lib/portage/util/locale.py b/lib/portage/util/locale.py
index 8fb6cb6eb..f45d76176 100644
--- a/lib/portage/util/locale.py
+++ b/lib/portage/util/locale.py
@@ -1,5 +1,4 @@
-# -*- coding:utf-8 -*-
-# Copyright 2015-2020 Gentoo Authors
+# Copyright 2015-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
"""
@@ -10,13 +9,15 @@ locale.
import locale
import logging
-import os
+import multiprocessing
+import sys
import textwrap
import traceback
import portage
from portage.util import _unicode_decode, writemsg_level
-from portage.util._ctypes import find_library, LoadLibrary
+from portage.util._ctypes import load_libc
+from portage.util.futures import asyncio
locale_categories = (
@@ -42,15 +43,9 @@ def _check_locale(silent):
"""
The inner locale check function.
"""
- try:
- from portage.util import libc
- except ImportError:
- libc_fn = find_library("c")
- if libc_fn is None:
- return None
- libc = LoadLibrary(libc_fn)
- if libc is None:
- return None
+ (libc, _) = load_libc()
+ if libc is None:
+ return None
lc = list(range(ord("a"), ord("z") + 1))
uc = list(range(ord("A"), ord("Z") + 1))
@@ -78,32 +73,53 @@ def _check_locale(silent):
if uc != ruc:
msg.extend(
[
- " %s -> %s" % (chars(lc), chars(ruc)),
+ f" {chars(lc)} -> {chars(ruc)}",
" %28s: %s" % ("expected", chars(uc)),
]
)
if lc != rlc:
msg.extend(
[
- " %s -> %s" % (chars(uc), chars(rlc)),
+ f" {chars(uc)} -> {chars(rlc)}",
" %28s: %s" % ("expected", chars(lc)),
]
)
writemsg_level(
- "".join(["!!! %s\n" % l for l in msg]), level=logging.ERROR, noiselevel=-1
+ "".join([f"!!! {l}\n" for l in msg]), level=logging.ERROR, noiselevel=-1
)
return False
return True
+def _set_and_check_locale(silent, env, mylocale):
+ try:
+ if env is not None:
+ try:
+ locale.setlocale(locale.LC_CTYPE, mylocale)
+ except locale.Error:
+ sys.exit(2)
+
+ ret = _check_locale(silent)
+ if ret is None:
+ sys.exit(2)
+ else:
+ sys.exit(0 if ret else 1)
+ except Exception:
+ traceback.print_exc()
+ sys.exit(2)
+
+
def check_locale(silent=False, env=None):
"""
Check whether the locale is sane. Returns True if it is, prints
warning and returns False if it is not. Returns None if the check
can not be executed due to platform limitations.
"""
+ return asyncio.run(async_check_locale(silent=silent, env=env))
+
+async def async_check_locale(silent=False, env=None):
if env is not None:
for v in ("LC_ALL", "LC_CTYPE", "LANG"):
if v in env:
@@ -117,30 +133,17 @@ def check_locale(silent=False, env=None):
except KeyError:
pass
- pid = os.fork()
- if pid == 0:
- portage._ForkWatcher.hook(portage._ForkWatcher)
- try:
- if env is not None:
- try:
- locale.setlocale(locale.LC_CTYPE, portage._native_string(mylocale))
- except locale.Error:
- os._exit(2)
-
- ret = _check_locale(silent)
- if ret is None:
- os._exit(2)
- else:
- os._exit(0 if ret else 1)
- except Exception:
- traceback.print_exc()
- os._exit(2)
-
- pid2, ret = os.waitpid(pid, 0)
- assert pid == pid2
+ proc = multiprocessing.Process(
+ target=_set_and_check_locale,
+ args=(silent, env, None if env is None else portage._native_string(mylocale)),
+ )
+ proc.start()
+ proc = portage.process.MultiprocessingProcess(proc)
+ await proc.wait()
+
pyret = None
- if os.WIFEXITED(ret):
- ret = os.WEXITSTATUS(ret)
+ if proc.returncode >= 0:
+ ret = proc.returncode
if ret != 2:
pyret = ret == 0
@@ -149,13 +152,22 @@ def check_locale(silent=False, env=None):
return pyret
+async_check_locale.__doc__ = check_locale.__doc__
+async_check_locale.__doc__ += """
+ This function is a coroutine.
+"""
+
+
def split_LC_ALL(env):
"""
Replace LC_ALL with split-up LC_* variables if it is defined.
Works on the passed environment (or settings instance).
"""
lc_all = env.get("LC_ALL")
- if lc_all is not None:
+ if lc_all:
for c in locale_categories:
env[c] = lc_all
- del env["LC_ALL"]
+ # Set empty so that config.reset() can restore LC_ALL state,
+ # since del can permanently delete variables which are not
+ # stored in the config's backupenv.
+ env["LC_ALL"] = ""
diff --git a/lib/portage/util/meson.build b/lib/portage/util/meson.build
new file mode 100644
index 000000000..8a60617d6
--- /dev/null
+++ b/lib/portage/util/meson.build
@@ -0,0 +1,49 @@
+py.install_sources(
+ [
+ 'ExtractKernelVersion.py',
+ 'SlotObject.py',
+ 'backoff.py',
+ 'bin_entry_point.py',
+ 'changelog.py',
+ 'compression_probe.py',
+ 'configparser.py',
+ 'cpuinfo.py',
+ 'digraph.py',
+ 'env_update.py',
+ 'file_copy.py',
+ 'formatter.py',
+ 'hooks.py',
+ 'install_mask.py',
+ 'lafilefixer.py',
+ 'listdir.py',
+ 'locale.py',
+ 'movefile.py',
+ 'mtimedb.py',
+ 'netlink.py',
+ 'path.py',
+ 'shelve.py',
+ 'socks5.py',
+ 'whirlpool.py',
+ 'writeable_check.py',
+ '_compare_files.py',
+ '_ctypes.py',
+ '_desktop_entry.py',
+ '_get_vm_info.py',
+ '_info_files.py',
+ '_path.py',
+ '_pty.py',
+ '_urlopen.py',
+ '_xattr.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/util',
+ pure : not native_extensions
+)
+
+subdir('elf')
+subdir('endian')
+subdir('futures')
+subdir('iterators')
+subdir('_async')
+subdir('_dyn_libs')
+subdir('_eventloop')
diff --git a/lib/portage/util/movefile.py b/lib/portage/util/movefile.py
index ddafe5571..75100a3ac 100644
--- a/lib/portage/util/movefile.py
+++ b/lib/portage/util/movefile.py
@@ -38,7 +38,6 @@ _xattr_excluder_cache = {}
def _get_xattr_excluder(pattern):
-
try:
value = _xattr_excluder_cache[pattern]
except KeyError:
@@ -49,11 +48,9 @@ def _get_xattr_excluder(pattern):
class _xattr_excluder:
-
__slots__ = ("_pattern_split",)
def __init__(self, pattern):
-
if pattern is None:
self._pattern_split = None
else:
@@ -65,7 +62,6 @@ class _xattr_excluder:
self._pattern_split = tuple(pattern)
def __call__(self, attr):
-
if self._pattern_split is None:
return False
@@ -81,7 +77,7 @@ def _copyxattr(src, dest, exclude=None):
"""Copy the extended attributes from |src| to |dest|"""
try:
attrs = xattr.list(src)
- except (OSError, IOError) as e:
+ except OSError as e:
if e.errno != OperationNotSupported.errno:
raise
attrs = ()
@@ -97,7 +93,7 @@ def _copyxattr(src, dest, exclude=None):
try:
xattr.set(dest, attr, xattr.get(src, attr))
raise_exception = False
- except (OSError, IOError):
+ except OSError:
raise_exception = True
if raise_exception:
raise OperationNotSupported(
@@ -109,6 +105,36 @@ def _copyxattr(src, dest, exclude=None):
)
+def _cmpxattr(src: bytes, dest: bytes, exclude=None) -> bool:
+ """
+ Compares extended attributes between |src| and |dest| and returns True
+ if they are equal or xattrs are not supported, False otherwise.
+ Assumes all given paths are UTF-8 encoded.
+ """
+ try:
+ src_attrs = xattr.list(src)
+ dest_attrs = xattr.list(dest)
+ except OSError as e:
+ if e.errno != OperationNotSupported.errno:
+ raise
+ return True
+
+ if src_attrs:
+ if exclude is not None and isinstance(src_attrs[0], bytes):
+ exclude = exclude.encode(_encodings["fs"])
+ exclude = _get_xattr_excluder(exclude)
+
+ src_attrs = {attr for attr in src_attrs if not exclude(attr)}
+ dest_attrs = {attr for attr in dest_attrs if not exclude(attr)}
+ if src_attrs != dest_attrs:
+ return False
+
+ for attr in src_attrs:
+ if xattr.get(src, attr) != xattr.get(dest, attr):
+ return False
+ return True
+
+
def movefile(
src,
dest,
@@ -149,15 +175,15 @@ def movefile(
raise
except Exception as e:
writemsg(
- "!!! %s\n" % _("Stating source file failed... movefile()"), noiselevel=-1
+ f"!!! {_('Stating source file failed... movefile()')}\n", noiselevel=-1
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
destexists = 1
try:
dstat = os.lstat(dest)
- except (OSError, IOError):
+ except OSError:
dstat = os.lstat(os.path.dirname(dest))
destexists = 0
@@ -171,7 +197,7 @@ def movefile(
bsd_chflags.chflags(os.path.dirname(dest), 0)
if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
+ if not stat.S_ISLNK(sstat[stat.ST_MODE]) and stat.S_ISLNK(dstat[stat.ST_MODE]):
try:
os.unlink(dest)
destexists = 0
@@ -185,8 +211,18 @@ def movefile(
target = os.readlink(src)
if mysettings and "D" in mysettings and target.startswith(mysettings["D"]):
target = target[len(mysettings["D"]) - 1 :]
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
+ # Atomically update the path if it exists.
+ try:
+ os.rename(src, dest)
+ return sstat.st_mtime_ns
+ except OSError:
+ # If it failed due to cross-link device, fallthru below.
+ # Clear the target first so we can create it.
+ try:
+ os.unlink(dest)
+ except FileNotFoundError:
+ pass
+
try:
if selinux_enabled:
selinux.symlink(target, dest, src)
@@ -222,11 +258,9 @@ def movefile(
except SystemExit as e:
raise
except Exception as e:
- writemsg(
- "!!! %s\n" % _("failed to properly create symlink:"), noiselevel=-1
- )
- writemsg("!!! %s -> %s\n" % (dest, target), noiselevel=-1)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {_('failed to properly create symlink:')}\n", noiselevel=-1)
+ writemsg(f"!!! {dest} -> {target}\n", noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
hardlinked = False
@@ -236,9 +270,7 @@ def movefile(
# and them use os.rename() to replace the destination.
if hardlink_candidates:
head, tail = os.path.split(dest)
- hardlink_tmp = os.path.join(
- head, ".%s._portage_merge_.%s" % (tail, portage.getpid())
- )
+ hardlink_tmp = os.path.join(head, f".{tail}._portage_merge_.{portage.getpid()}")
try:
os.unlink(hardlink_tmp)
except OSError as e:
@@ -248,7 +280,7 @@ def movefile(
% (hardlink_tmp,),
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
del e
for hardlink_src in hardlink_candidates:
@@ -264,7 +296,7 @@ def movefile(
_("!!! Failed to rename %s to %s\n") % (hardlink_tmp, dest),
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
hardlinked = True
try:
@@ -287,12 +319,11 @@ def movefile(
if e.errno != errno.EXDEV:
# Some random error.
writemsg(
- "!!! %s\n"
- % _("Failed to move %(src)s to %(dest)s")
+ f"!!! {_('Failed to move %(src)s to %(dest)s')}\n"
% {"src": src, "dest": dest},
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
# Invalid cross-device-link 'bind' mounted or actually Cross-Device
if renamefailed:
@@ -322,19 +353,18 @@ def movefile(
)
msg = textwrap.wrap(msg, 65)
for line in msg:
- writemsg("!!! %s\n" % (line,), noiselevel=-1)
+ writemsg(f"!!! {line}\n", noiselevel=-1)
raise
_rename(dest_tmp_bytes, dest_bytes)
_os.unlink(src_bytes)
success = True
except Exception as e:
writemsg(
- "!!! %s\n"
- % _("copy %(src)s -> %(dest)s failed.")
+ f"!!! {_('copy %(src)s -> %(dest)s failed.')}\n"
% {"src": src, "dest": dest},
noiselevel=-1,
)
- writemsg("!!! %s\n" % (e,), noiselevel=-1)
+ writemsg(f"!!! {e}\n", noiselevel=-1)
return None
finally:
if not success:
@@ -355,7 +385,7 @@ def movefile(
},
noiselevel=-1,
)
- writemsg("!!! %s\n" % a, noiselevel=-1)
+ writemsg(f"!!! {a}\n", noiselevel=-1)
return None # failure
# In Python <3.3 always use stat_obj[stat.ST_MTIME] for the integral timestamp
@@ -385,8 +415,8 @@ def movefile(
newmtime = os.stat(dest).st_mtime_ns
except OSError as e:
writemsg(_("!!! Failed to stat in movefile()\n"), noiselevel=-1)
- writemsg("!!! %s\n" % dest, noiselevel=-1)
- writemsg("!!! %s\n" % str(e), noiselevel=-1)
+ writemsg(f"!!! {dest}\n", noiselevel=-1)
+ writemsg(f"!!! {str(e)}\n", noiselevel=-1)
return None
if bsd_chflags:
diff --git a/lib/portage/util/mtimedb.py b/lib/portage/util/mtimedb.py
index 7a7fe6784..cbd233dd1 100644
--- a/lib/portage/util/mtimedb.py
+++ b/lib/portage/util/mtimedb.py
@@ -23,7 +23,23 @@ from portage.localization import _
from portage.util import apply_secpass_permissions, atomic_ofstream, writemsg
+_MTIMEDBKEYS = {
+ "info",
+ "ldpath",
+ "resume",
+ "resume_backup",
+ "starttime",
+ "updates",
+ "version",
+}
+
+
class MtimeDB(dict):
+ """The MtimeDB class is used to interact with a file storing the
+ current resume lists.
+ It is a subclass of ``dict`` and it reads from/writes to JSON, by
+ default, although it can be configured to use ``pickle``.
+ """
# JSON read support has been available since portage-2.1.10.49.
_json_write = True
@@ -35,24 +51,32 @@ class MtimeDB(dict):
self.filename = filename
self._load(filename)
+ @property
+ def is_readonly(self):
+ if self.filename is None:
+ return True
+ else:
+ return False
+
+ def make_readonly(self):
+ self.filename = None
+
def _load(self, filename):
f = None
content = None
try:
f = open(_unicode_encode(filename), "rb")
content = f.read()
- except EnvironmentError as e:
+ except OSError as e:
if getattr(e, "errno", None) in (errno.ENOENT, errno.EACCES):
pass
else:
- writemsg(
- _("!!! Error loading '%s': %s\n") % (filename, e), noiselevel=-1
- )
+ writemsg(_(f"!!! Error loading '{filename}': {e}\n"), noiselevel=-1)
finally:
if f is not None:
f.close()
- d = None
+ d = {}
if content:
try:
d = json.loads(
@@ -74,12 +98,7 @@ class MtimeDB(dict):
except SystemExit:
raise
except Exception:
- writemsg(
- _("!!! Error loading '%s': %s\n") % (filename, e), noiselevel=-1
- )
-
- if d is None:
- d = {}
+ writemsg(_(f"!!! Error loading '{filename}': {e}\n"), noiselevel=-1)
if "old" in d:
d["updates"] = d["old"]
@@ -92,50 +111,41 @@ class MtimeDB(dict):
for k in ("info", "ldpath", "updates"):
d.setdefault(k, {})
- mtimedbkeys = set(
- (
- "info",
- "ldpath",
- "resume",
- "resume_backup",
- "starttime",
- "updates",
- "version",
- )
- )
-
- for k in list(d):
- if k not in mtimedbkeys:
- writemsg(_("Deleting invalid mtimedb key: %s\n") % str(k))
- del d[k]
+ for k in set(d.keys()) - _MTIMEDBKEYS:
+ writemsg(_(f"Deleting invalid mtimedb key: {k}\n"))
+ del d[k]
self.update(d)
self._clean_data = copy.deepcopy(d)
def commit(self):
- if not self.filename:
+ if self.is_readonly:
return
d = {}
d.update(self)
# Only commit if the internal state has changed.
if d != self._clean_data:
- d["version"] = str(portage.VERSION)
- try:
- f = atomic_ofstream(self.filename, mode="wb")
- except EnvironmentError:
- pass
- else:
- if self._json_write:
- f.write(
- _unicode_encode(
- json.dumps(d, **self._json_write_opts),
- encoding=_encodings["repo.content"],
- errors="strict",
- )
+ self.__write_to_disk(d)
+
+ def __write_to_disk(self, d):
+ """Private method used by the ``commit`` method."""
+ d["version"] = str(portage.VERSION)
+ try:
+ f = atomic_ofstream(self.filename, mode="wb")
+ except OSError:
+ pass
+ else:
+ if self._json_write:
+ f.write(
+ _unicode_encode(
+ json.dumps(d, **self._json_write_opts),
+ encoding=_encodings["repo.content"],
+ errors="strict",
)
- else:
- pickle.dump(d, f, protocol=2)
- f.close()
- apply_secpass_permissions(
- self.filename, uid=uid, gid=portage_gid, mode=0o644
)
- self._clean_data = copy.deepcopy(d)
+ else:
+ pickle.dump(d, f, protocol=2)
+ f.close()
+ apply_secpass_permissions(
+ self.filename, uid=uid, gid=portage_gid, mode=0o644
+ )
+ self._clean_data = copy.deepcopy(d)
diff --git a/lib/portage/util/netlink.py b/lib/portage/util/netlink.py
index b32010654..508c92676 100644
--- a/lib/portage/util/netlink.py
+++ b/lib/portage/util/netlink.py
@@ -65,7 +65,7 @@ class RtNetlink:
self.addr = (0, 0)
try:
self.sock.bind(self.addr)
- except socket.error:
+ except OSError:
self.sock.close()
raise
diff --git a/lib/portage/util/shelve.py b/lib/portage/util/shelve.py
index 6100c8719..85d5829f3 100644
--- a/lib/portage/util/shelve.py
+++ b/lib/portage/util/shelve.py
@@ -5,6 +5,8 @@ import logging
import pickle
import shelve
+logger = logging.getLogger(__name__)
+
def open_shelve(db_file, flag="r"):
"""
@@ -36,7 +38,7 @@ def dump(args):
try:
value = src[key]
except KeyError:
- logging.exception(key)
+ logger.exception(key)
continue
pickle.dump((key, value), dest)
finally:
diff --git a/lib/portage/util/socks5.py b/lib/portage/util/socks5.py
index 820240571..c32ba7767 100644
--- a/lib/portage/util/socks5.py
+++ b/lib/portage/util/socks5.py
@@ -1,17 +1,17 @@
# SOCKSv5 proxy manager for network-sandbox
-# Copyright 2015-2021 Gentoo Authors
+# Copyright 2015-2024 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
+import asyncio
import errno
import os
-import signal
import socket
+import portage
import portage.data
from portage import _python_interpreter
from portage.data import portage_gid, portage_uid, userpriv_groups
from portage.process import atexit_register, spawn
-from portage.util.futures import asyncio
class ProxyManager:
@@ -22,7 +22,8 @@ class ProxyManager:
def __init__(self):
self.socket_path = None
- self._pids = []
+ self._proc = None
+ self._proc_waiter = None
def start(self, settings):
"""
@@ -51,22 +52,25 @@ class ProxyManager:
spawn_kwargs.update(
uid=portage_uid, gid=portage_gid, groups=userpriv_groups, umask=0o077
)
- self._pids = spawn(
+ self._proc = spawn(
[_python_interpreter, server_bin, self.socket_path],
- returnpid=True,
- **spawn_kwargs
+ returnproc=True,
+ **spawn_kwargs,
)
- def stop(self):
+ async def stop(self):
"""
- Stop the SOCKSv5 server.
+ Stop the SOCKSv5 server. This method is a coroutine.
"""
- for p in self._pids:
- os.kill(p, signal.SIGINT)
- os.waitpid(p, 0)
+ if self._proc is not None:
+ self._proc.terminate()
+ if self._proc_waiter is None:
+ self._proc_waiter = asyncio.ensure_future(self._proc.wait())
+ await self._proc_waiter
self.socket_path = None
- self._pids = []
+ self._proc = None
+ self._proc_waiter = None
def is_running(self):
"""
@@ -80,22 +84,17 @@ class ProxyManager:
"""
Wait for the proxy socket to become ready. This method is a coroutine.
"""
+ if self._proc_waiter is None:
+ self._proc_waiter = asyncio.ensure_future(self._proc.wait())
while True:
- try:
- wait_retval = os.waitpid(self._pids[0], os.WNOHANG)
- except OSError as e:
- if e.errno == errno.EINTR:
- continue
- raise
-
- if wait_retval is not None and wait_retval != (0, 0):
+ if self._proc_waiter.done():
raise OSError(3, "No such process")
try:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.socket_path)
- except EnvironmentError as e:
+ except OSError as e:
if e.errno != errno.ENOENT:
raise
await asyncio.sleep(0.2)
diff --git a/lib/portage/util/whirlpool.py b/lib/portage/util/whirlpool.py
index de344d8eb..62fcfda53 100644
--- a/lib/portage/util/whirlpool.py
+++ b/lib/portage/util/whirlpool.py
@@ -1,3 +1,6 @@
+# Copyright 2022 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
# whirlpool.py - pure Python implementation of the Whirlpool algorithm.
# Bjorn Edstrom <be@bjrn.se> 16 december 2007.
##
@@ -27,21 +30,36 @@
# pylint: disable=mixed-indentation
+import warnings
+
+from portage.localization import _
+
+try:
+ from ._whirlpool import Whirlpool as WhirlpoolExt
+except ImportError:
+ WhirlpoolExt = None
+
+
# block_size = 64
digest_size = 64
digestsize = 64
-class Whirlpool:
+class PyWhirlpool:
"""Return a new Whirlpool object. An optional string argument
may be provided; if present, this string will be automatically
hashed."""
- def __init__(self, arg=None):
+ def __init__(self, arg=b""):
+ warnings.warn(
+ _(
+ "The last-resort unaccelerated Whirlpool implementation is "
+ "being used. It is known to be absurdly slow. Please report "
+ "that the Whirlpool hash is deprecated to the repository owner."
+ )
+ )
self.ctx = WhirlpoolStruct()
- if arg:
- self.update(arg)
- self.digest_status = 0
+ self.update(arg)
def update(self, arg):
"""update(arg)"""
@@ -60,7 +78,7 @@ class Whirlpool:
dig = self.digest()
tempstr = ""
for d in dig:
- xxx = "%02x" % (ord(d))
+ xxx = f"{ord(d):02x}"
tempstr = tempstr + xxx
return tempstr
@@ -71,11 +89,36 @@ class Whirlpool:
return copy.deepcopy(self)
-def new(init=None):
+class CWhirlpool:
"""Return a new Whirlpool object. An optional string argument
may be provided; if present, this string will be automatically
hashed."""
- return Whirlpool(init)
+
+ is_available = WhirlpoolExt is not None
+
+ def __init__(self, arg=b""):
+ self.obj = WhirlpoolExt()
+ self.dig = None
+ self.update(arg)
+
+ def update(self, arg):
+ if self.dig is not None:
+ raise RuntimeError("Whirlpool object already finalized")
+ self.obj.update(arg)
+
+ def digest(self):
+ if self.dig is None:
+ self.dig = self.obj.digest()
+ return self.dig
+
+ def hexdigest(self):
+ """hexdigest()"""
+ dig = self.digest()
+ tempstr = ""
+ for d in dig:
+ xxx = f"{d:02x}"
+ tempstr = tempstr + xxx
+ return tempstr
#
@@ -2182,7 +2225,9 @@ def WhirlpoolInit(ctx):
def WhirlpoolAdd(source, sourceBits, ctx):
if not isinstance(source, bytes):
- raise TypeError("Expected %s, got %s" % (bytes, type(source)))
+ raise TypeError(f"Expected {bytes}, got {type(source)}")
+ if sourceBits == 0:
+ return
carry = 0
value = sourceBits
@@ -2330,23 +2375,3 @@ def processBuffer(ctx):
# apply the Miyaguchi-Preneel compression function
for i in range(8):
ctx.hash[i] ^= state[i] ^ block[i]
-
-
-#
-# Tests.
-#
-
-
-if __name__ == "__main__":
- assert (
- Whirlpool(b"The quick brown fox jumps over the lazy dog").hexdigest()
- == "b97de512e91e3828b40d2b0fdce9ceb3c4a71f9bea8d88e75c4fa854df36725fd2b52eb6544edcacd6f8beddfea403cb55ae31f03ad62a5ef54e42ee82c3fb35"
- )
- assert (
- Whirlpool(b"The quick brown fox jumps over the lazy eog").hexdigest()
- == "c27ba124205f72e6847f3e19834f925cc666d0974167af915bb462420ed40cc50900d85a1f923219d832357750492d5c143011a76988344c2635e69d06f2d38c"
- )
- assert (
- Whirlpool(b"").hexdigest()
- == "19fa61d75522a4669b44e39c1d2e1726c530232130d407f89afee0964997f7a73e83be698b288febcf88e3e03c4f0757ea8964e59b63d93708b138cc42a66eb3"
- )
diff --git a/lib/portage/util/writeable_check.py b/lib/portage/util/writeable_check.py
index be73745a1..ad1d9edff 100644
--- a/lib/portage/util/writeable_check.py
+++ b/lib/portage/util/writeable_check.py
@@ -1,4 +1,3 @@
-# -*- coding:utf-8 -*-
# Copyright 2014-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
"""
@@ -9,7 +8,6 @@ accepts a list of directories and returns a list of mounts which need to be
remounted RW, then add "elif ostype == (the ostype value for your OS)" to
get_ro_checker().
"""
-import io
import logging
import os
@@ -45,14 +43,14 @@ def linux_ro_checker(dir_list):
invalids = []
try:
- with io.open(
+ with open(
"/proc/self/mountinfo",
- mode="r",
encoding=_encodings["content"],
errors="replace",
+ newline="\n",
) as f:
for line in f:
- # we're interested in dir and both attr fileds which always
+ # we're interested in dir and both attr fields which always
# start with either 'ro' or 'rw'
# example line:
# 14 1 8:3 / / rw,noatime - ext3 /dev/root rw,errors=continue,commit=5,barrier=1,data=writeback
@@ -61,7 +59,7 @@ def linux_ro_checker(dir_list):
# to the left of the ' - ', after the attr's, so split it there
mount = line.split(" - ", 1)
try:
- _dir, attr1 = mount[0].split()[4:6]
+ _dir, attr1 = mount[0].split(" ")[4:6]
except ValueError:
# If it raises ValueError we can simply ignore the line.
invalids.append(line)
@@ -71,10 +69,10 @@ def linux_ro_checker(dir_list):
# for example: 16 1 0:16 / /root rw,noatime - lxfs rw
if len(mount) > 1:
try:
- attr2 = mount[1].split()[2]
+ attr2 = mount[1].split(" ")[2]
except IndexError:
try:
- attr2 = mount[1].split()[1]
+ attr2 = mount[1].split(" ")[1]
except IndexError:
invalids.append(line)
continue
@@ -86,7 +84,7 @@ def linux_ro_checker(dir_list):
# If /proc/self/mountinfo can't be read, assume that there are no RO
# filesystems and return.
- except EnvironmentError:
+ except OSError:
writemsg_level(
_("!!! /proc/self/mountinfo cannot be read"),
level=logging.WARNING,
diff --git a/lib/portage/versions.py b/lib/portage/versions.py
index fe1ff6ce0..0e515ba5c 100644
--- a/lib/portage/versions.py
+++ b/lib/portage/versions.py
@@ -1,5 +1,5 @@
# versions.py -- core Portage functionality
-# Copyright 1998-2016 Gentoo Foundation
+# Copyright 1998-2023 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
__all__ = [
@@ -16,8 +16,11 @@ __all__ = [
]
import re
+import typing
import warnings
from functools import lru_cache
+from typing import Any, Optional, Union
+from collections.abc import Sequence
import portage
@@ -28,7 +31,7 @@ portage.proxy.lazyimport.lazyimport(
"portage.util:cmp_sort_key",
)
from portage import _unicode_decode
-from portage.eapi import _get_eapi_attrs
+from portage.eapi import _eapi_attrs, _get_eapi_attrs
from portage.exception import InvalidData
from portage.localization import _
@@ -47,38 +50,17 @@ _cat = r"[\w+][\w+.-]*"
# 2.1.2 A package name may contain any of the characters [A-Za-z0-9+_-].
# It must not begin with a hyphen,
# and must not end in a hyphen followed by one or more digits.
-_pkg = {
- "dots_disallowed_in_PN": r"[\w+][\w+-]*?",
- "dots_allowed_in_PN": r"[\w+][\w+.-]*?",
-}
+_pkg = r"[\w+][\w+-]*?"
_v = r"(\d+)((\.\d+)*)([a-z]?)((_(pre|p|beta|alpha|rc)\d*)*)"
_rev = r"\d+"
_vr = _v + "(-r(" + _rev + "))?"
-_cp = {
- "dots_disallowed_in_PN": "("
- + _cat
- + "/"
- + _pkg["dots_disallowed_in_PN"]
- + "(-"
- + _vr
- + ")?)",
- "dots_allowed_in_PN": "("
- + _cat
- + "/"
- + _pkg["dots_allowed_in_PN"]
- + "(-"
- + _vr
- + ")?)",
-}
-_cpv = {
- "dots_disallowed_in_PN": "(" + _cp["dots_disallowed_in_PN"] + "-" + _vr + ")",
- "dots_allowed_in_PN": "(" + _cp["dots_allowed_in_PN"] + "-" + _vr + ")",
-}
-_pv = {
- "dots_disallowed_in_PN": "(?P<pn>"
- + _pkg["dots_disallowed_in_PN"]
+_cp = "(" + _cat + "/" + _pkg + "(-" + _vr + ")?)"
+_cpv = "(" + _cp + "-" + _vr + ")"
+_pv = (
+ "(?P<pn>"
+ + _pkg
+ "(?P<pn_inval>-"
+ _vr
+ ")?)"
@@ -86,18 +68,8 @@ _pv = {
+ _v
+ ")(-r(?P<rev>"
+ _rev
- + "))?",
- "dots_allowed_in_PN": "(?P<pn>"
- + _pkg["dots_allowed_in_PN"]
- + "(?P<pn_inval>-"
- + _vr
- + ")?)"
- + "-(?P<ver>"
- + _v
- + ")(-r(?P<rev>"
- + _rev
- + "))?",
-}
+ + "))?"
+)
ver_regexp = re.compile("^" + _vr + "$")
suffix_regexp = re.compile("^(alpha|beta|rc|pre|p)(\\d*)$")
@@ -107,7 +79,7 @@ endversion_keys = ["pre", "p", "alpha", "beta", "rc"]
_slot_re_cache = {}
-def _get_slot_re(eapi_attrs):
+def _get_slot_re(eapi_attrs: _eapi_attrs) -> typing.Pattern:
cache_key = eapi_attrs.slot_operator
slot_re = _slot_re_cache.get(cache_key)
if slot_re is not None:
@@ -124,27 +96,20 @@ def _get_slot_re(eapi_attrs):
return slot_re
-_pv_re_cache = {}
+_pv_re = None
-def _get_pv_re(eapi_attrs):
- cache_key = eapi_attrs.dots_in_PN
- pv_re = _pv_re_cache.get(cache_key)
- if pv_re is not None:
- return pv_re
+def _get_pv_re(eapi_attrs: _eapi_attrs) -> typing.Pattern:
+ global _pv_re
+ if _pv_re is not None:
+ return _pv_re
- if eapi_attrs.dots_in_PN:
- pv_re = _pv["dots_allowed_in_PN"]
- else:
- pv_re = _pv["dots_disallowed_in_PN"]
-
- pv_re = re.compile(r"^" + pv_re + r"$", re.VERBOSE | re.UNICODE)
+ _pv_re = re.compile(r"^" + _pv + r"$", re.VERBOSE | re.UNICODE)
- _pv_re_cache[cache_key] = pv_re
- return pv_re
+ return _pv_re
-def ververify(myver, silent=1):
+def ververify(myver: str, silent: int = 1) -> bool:
if ver_regexp.match(myver):
return True
if not silent:
@@ -153,7 +118,7 @@ def ververify(myver, silent=1):
@lru_cache(1024)
-def vercmp(ver1, ver2, silent=1):
+def vercmp(ver1: str, ver2: str, silent: int = 1) -> Optional[int]:
"""
Compare two versions
Example usage:
@@ -300,7 +265,7 @@ def vercmp(ver1, ver2, silent=1):
return rval
-def pkgcmp(pkg1, pkg2):
+def pkgcmp(pkg1: tuple[str, str, str], pkg2: tuple[str, str, str]) -> Optional[int]:
"""
Compare 2 package versions created in pkgsplit format.
@@ -327,7 +292,7 @@ def pkgcmp(pkg1, pkg2):
return vercmp("-".join(pkg1[1:]), "-".join(pkg2[1:]))
-def _pkgsplit(mypkg, eapi=None):
+def _pkgsplit(mypkg: str, eapi: Any = None) -> Optional[tuple[str, str, str]]:
"""
@param mypkg: pv
@return:
@@ -350,12 +315,16 @@ def _pkgsplit(mypkg, eapi=None):
return (m.group("pn"), m.group("ver"), rev)
-_cat_re = re.compile("^%s$" % _cat, re.UNICODE)
+_cat_re = re.compile(f"^{_cat}$", re.UNICODE)
_missing_cat = "null"
@lru_cache(10240)
-def catpkgsplit(mydata, silent=1, eapi=None):
+def catpkgsplit(
+ mydata: Union[str, "_pkg_str"],
+ silent: int = 1,
+ eapi: Any = None,
+) -> Optional[tuple[str, ...]]:
"""
Takes a Category/Package-Version-Rev and returns a list of each.
@@ -406,33 +375,33 @@ class _pkg_str(str):
def __new__(
cls,
- cpv,
- metadata=None,
- settings=None,
- eapi=None,
- repo=None,
- slot=None,
- build_time=None,
- build_id=None,
- file_size=None,
- mtime=None,
- db=None,
+ cpv: str,
+ metadata: Optional[dict[str, Any]] = None,
+ settings: Any = None,
+ eapi: Any = None,
+ repo: Optional[str] = None,
+ slot: Optional[str] = None,
+ build_time: Optional[int] = None,
+ build_id: Optional[str] = None,
+ file_size: Optional[int] = None,
+ mtime: Optional[int] = None,
+ db: Any = None,
):
return str.__new__(cls, cpv)
def __init__(
self,
- cpv,
- metadata=None,
- settings=None,
- eapi=None,
- repo=None,
- slot=None,
- build_time=None,
- build_id=None,
- file_size=None,
- mtime=None,
- db=None,
+ cpv: str,
+ metadata: Optional[dict[str, Any]] = None,
+ settings: Any = None,
+ eapi: Any = None,
+ repo: Optional[str] = None,
+ slot: Optional[str] = None,
+ build_time: Optional[int] = None,
+ build_id: Optional[str] = None,
+ file_size: Optional[int] = None,
+ mtime: Optional[int] = None,
+ db: Any = None,
):
if not isinstance(cpv, str):
# Avoid TypeError from str.__init__ with PyPy.
@@ -494,13 +463,13 @@ class _pkg_str(str):
repo = _unknown_repo
self.__dict__["repo"] = repo
- def __setattr__(self, name, value):
+ def __setattr__(self, name: str, value: Any) -> None:
raise AttributeError(
"_pkg_str instances are immutable", self.__class__, name, value
)
@staticmethod
- def _long(var, default):
+ def _long(var: Any, default: int) -> int:
if var is not None:
try:
var = int(var)
@@ -512,7 +481,7 @@ class _pkg_str(str):
return var
@property
- def stable(self):
+ def stable(self) -> bool:
try:
return self._stable
except AttributeError:
@@ -530,7 +499,9 @@ class _pkg_str(str):
return stable
-def pkgsplit(mypkg, silent=1, eapi=None):
+def pkgsplit(
+ mypkg: str, silent: int = 1, eapi: Any = None
+) -> Optional[tuple[str, str, str]]:
"""
@param mypkg: either a pv or cpv
@return:
@@ -547,7 +518,7 @@ def pkgsplit(mypkg, silent=1, eapi=None):
return (cat + "/" + pn, ver, rev)
-def cpv_getkey(mycpv, eapi=None):
+def cpv_getkey(mycpv: Union[_pkg_str, str], eapi: Any = None) -> Optional[str]:
"""Calls catpkgsplit on a cpv and returns only the cp."""
try:
return mycpv.cp
@@ -558,7 +529,7 @@ def cpv_getkey(mycpv, eapi=None):
return mysplit[0] + "/" + mysplit[1]
warnings.warn(
- "portage.versions.cpv_getkey() " + "called with invalid cpv: '%s'" % (mycpv,),
+ "portage.versions.cpv_getkey() " + f"called with invalid cpv: '{mycpv}'",
DeprecationWarning,
stacklevel=2,
)
@@ -573,7 +544,7 @@ def cpv_getkey(mycpv, eapi=None):
return mysplit[0]
-def cpv_getversion(mycpv, eapi=None):
+def cpv_getversion(mycpv: Union[str, _pkg_str], eapi: Any = None) -> Optional[str]:
"""Returns the v (including revision) from an cpv."""
try:
return mycpv.version
@@ -585,7 +556,7 @@ def cpv_getversion(mycpv, eapi=None):
return mycpv[len(cp + "-") :]
-def cpv_sort_key(eapi=None):
+def cpv_sort_key(eapi: Any = None) -> Any:
"""
Create an object for sorting cpvs, to be used as the 'key' parameter
in places like list.sort() or sorted(). This calls catpkgsplit() once for
@@ -600,8 +571,7 @@ def cpv_sort_key(eapi=None):
split_cache = {}
- def cmp_cpv(cpv1, cpv2):
-
+ def cmp_cpv(cpv1: Any, cpv2: Any) -> int:
split1 = split_cache.get(cpv1, False)
if split1 is False:
split1 = None
@@ -634,11 +604,11 @@ def cpv_sort_key(eapi=None):
return cmp_sort_key(cmp_cpv)
-def catsplit(mydep):
+def catsplit(mydep: str) -> list[str]:
return mydep.split("/", 1)
-def best(mymatches, eapi=None):
+def best(mymatches: Sequence[Any], eapi: Any = None) -> Any:
"""Accepts None arguments; assumes matches are valid."""
if not mymatches:
return ""
diff --git a/lib/portage/xml/meson.build b/lib/portage/xml/meson.build
new file mode 100644
index 000000000..a0d113dc2
--- /dev/null
+++ b/lib/portage/xml/meson.build
@@ -0,0 +1,8 @@
+py.install_sources(
+ [
+ 'metadata.py',
+ '__init__.py',
+ ],
+ subdir : 'portage/xml',
+ pure : not native_extensions
+)
diff --git a/lib/portage/xml/metadata.py b/lib/portage/xml/metadata.py
index 807847d7e..33bb977d7 100644
--- a/lib/portage/xml/metadata.py
+++ b/lib/portage/xml/metadata.py
@@ -83,7 +83,7 @@ class _Maintainer:
setattr(self, attr.tag, attr.text)
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.email)
+ return f"<{self.__class__.__name__} {self.email!r}>"
class _Useflag:
@@ -106,14 +106,14 @@ class _Useflag:
_desc = ""
if node.text:
_desc = node.text
- for child in node.getchildren():
+ for child in node:
_desc += child.text if child.text else ""
_desc += child.tail if child.tail else ""
# This takes care of tabs and newlines left from the file
self.description = re.sub(r"\s+", " ", _desc)
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.name)
+ return f"<{self.__class__.__name__} {self.name!r}>"
class _Upstream:
@@ -145,7 +145,7 @@ class _Upstream:
self.remoteids = self.upstream_remoteids()
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.__dict__)
+ return f"<{self.__class__.__name__} {self.__dict__!r}>"
def upstream_bugtrackers(self):
"""Retrieve upstream bugtracker location from xml node."""
@@ -200,7 +200,7 @@ class MetaDataXML:
except ImportError:
pass
except ExpatError as e:
- raise SyntaxError("%s" % (e,))
+ raise SyntaxError(f"{e}")
if isinstance(herds, etree.ElementTree):
herds_etree = herds
@@ -219,7 +219,7 @@ class MetaDataXML:
self._upstream = None
def __repr__(self):
- return "<%s %r>" % (self.__class__.__name__, self.metadata_xml_path)
+ return f"<{self.__class__.__name__} {self.metadata_xml_path!r}>"
def _get_herd_email(self, herd):
"""Get a herd's email address.
@@ -239,7 +239,7 @@ class MetaDataXML:
),
parser=etree.XMLParser(target=_MetadataTreeBuilder()),
)
- except (ImportError, IOError, SyntaxError):
+ except (ImportError, OSError, SyntaxError):
return None
# Some special herds are not listed in herds.xml
diff --git a/lib/portage/xpak.py b/lib/portage/xpak.py
index b20429b6f..94a07a84c 100644
--- a/lib/portage/xpak.py
+++ b/lib/portage/xpak.py
@@ -43,6 +43,8 @@ from portage import normalize_path
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
+from portage.binpkg import get_binpkg_format
+from portage.exception import InvalidBinaryPackageFormat
from portage.util.file_copy import copyfile
@@ -53,7 +55,6 @@ def addtolist(mylist, curdir):
_unicode_decode(curdir, encoding=_encodings["fs"], errors="strict")
)
for parent, dirs, files in os.walk(curdir):
-
parent = _unicode_decode(parent, encoding=_encodings["fs"], errors="strict")
if parent != curdir:
mylist.append(parent[len(curdir) + 1 :] + os.sep)
@@ -103,6 +104,11 @@ def xpak(rootdir, outfile=None):
and under the name 'outfile' if it is specified. Otherwise it returns the
xpak segment."""
+ if portage.utf8_mode and not isinstance(rootdir, bytes):
+ # Since paths are encoded below, rootdir must also be encoded
+ # when _unicode_func_wrapper is not used.
+ rootdir = os.fsencode(rootdir)
+
mylist = []
addtolist(mylist, rootdir)
@@ -171,7 +177,7 @@ def xpak_mem(mydata):
def xsplit(infile):
"""(infile) -- Splits the infile into two files.
'infile.index' contains the index segment.
- 'infile.dat' contails the data segment."""
+ 'infile.dat' contains the data segment."""
infile = _unicode_decode(infile, encoding=_encodings["fs"], errors="strict")
myfile = open(
_unicode_encode(infile, encoding=_encodings["fs"], errors="strict"), "rb"
@@ -340,7 +346,7 @@ class tbz2:
the directory provided. Raises IOError if scan() fails.
Returns result of upackinfo()."""
if not self.scan():
- raise IOError
+ raise OSError
if cleanup:
self.cleanup(datadir)
if not os.path.exists(datadir):
@@ -388,7 +394,7 @@ class tbz2:
"ab+",
)
if not myfile:
- raise IOError
+ raise OSError
myfile.seek(-self.xpaksize, 2) # 0,2 or -0,2 just mean EOF.
myfile.truncate()
myfile.write(xpdata + encodeint(len(xpdata)) + b"STOP")
@@ -435,14 +441,26 @@ class tbz2:
self.infosize = 0
self.xpaksize = 0
if trailer[-4:] != b"STOP":
+ try:
+ get_binpkg_format(self.file, check_file=True)
+ except InvalidBinaryPackageFormat:
+ pass
return 0
if trailer[0:8] != b"XPAKSTOP":
+ try:
+ get_binpkg_format(self.file, check_file=True)
+ except InvalidBinaryPackageFormat:
+ pass
return 0
self.infosize = decodeint(trailer[8:12])
self.xpaksize = self.infosize + 8
a.seek(-(self.xpaksize), 2)
header = a.read(16)
if header[0:8] != b"XPAKPACK":
+ try:
+ get_binpkg_format(self.file, check_file=True)
+ except InvalidBinaryPackageFormat:
+ pass
return 0
self.indexsize = decodeint(header[8:12])
self.datasize = decodeint(header[12:16])
@@ -453,6 +471,10 @@ class tbz2:
except SystemExit:
raise
except:
+ try:
+ get_binpkg_format(self.file, check_file=True)
+ except InvalidBinaryPackageFormat:
+ pass
return 0
finally:
if a is not None: