aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'repoman')
-rw-r--r--repoman/MANIFEST.in3
-rw-r--r--repoman/README49
-rw-r--r--repoman/TEST-NOTES45
-rwxr-xr-xrepoman/bin/repoman43
-rw-r--r--repoman/cnf/metadata.xsd547
-rw-r--r--repoman/man/repoman.1409
-rw-r--r--repoman/pym/repoman/__init__.py0
-rw-r--r--repoman/pym/repoman/_portage.py25
-rw-r--r--repoman/pym/repoman/_subprocess.py83
-rw-r--r--repoman/pym/repoman/actions.py538
-rw-r--r--repoman/pym/repoman/argparser.py225
-rw-r--r--repoman/pym/repoman/check_missingslot.py30
-rw-r--r--repoman/pym/repoman/checks/__init__.py0
-rw-r--r--repoman/pym/repoman/checks/herds/__init__.py0
-rw-r--r--repoman/pym/repoman/checks/herds/herdbase.py135
-rw-r--r--repoman/pym/repoman/checks/herds/metadata.py26
-rw-r--r--repoman/pym/repoman/copyrights.py120
-rw-r--r--repoman/pym/repoman/errors.py22
-rw-r--r--repoman/pym/repoman/gpg.py82
-rwxr-xr-xrepoman/pym/repoman/main.py183
-rw-r--r--repoman/pym/repoman/metadata.py122
-rw-r--r--repoman/pym/repoman/modules/__init__.py0
-rw-r--r--repoman/pym/repoman/modules/commit/__init__.py0
-rw-r--r--repoman/pym/repoman/modules/commit/repochecks.py35
-rw-r--r--repoman/pym/repoman/modules/scan/__init__.py0
-rw-r--r--repoman/pym/repoman/modules/scan/depend/__init__.py32
-rw-r--r--repoman/pym/repoman/modules/scan/depend/_depend_checks.py150
-rw-r--r--repoman/pym/repoman/modules/scan/depend/_gen_arches.py57
-rw-r--r--repoman/pym/repoman/modules/scan/depend/profile.py256
-rw-r--r--repoman/pym/repoman/modules/scan/directories/__init__.py48
-rw-r--r--repoman/pym/repoman/modules/scan/directories/files.py94
-rw-r--r--repoman/pym/repoman/modules/scan/directories/mtime.py30
-rw-r--r--repoman/pym/repoman/modules/scan/eapi/__init__.py29
-rw-r--r--repoman/pym/repoman/modules/scan/eapi/eapi.py49
-rw-r--r--repoman/pym/repoman/modules/scan/ebuild/__init__.py58
-rw-r--r--repoman/pym/repoman/modules/scan/ebuild/checks.py1007
-rw-r--r--repoman/pym/repoman/modules/scan/ebuild/ebuild.py238
-rw-r--r--repoman/pym/repoman/modules/scan/ebuild/errors.py49
-rw-r--r--repoman/pym/repoman/modules/scan/ebuild/multicheck.py56
-rw-r--r--repoman/pym/repoman/modules/scan/eclasses/__init__.py47
-rw-r--r--repoman/pym/repoman/modules/scan/eclasses/live.py76
-rw-r--r--repoman/pym/repoman/modules/scan/eclasses/ruby.py48
-rw-r--r--repoman/pym/repoman/modules/scan/fetch/__init__.py33
-rw-r--r--repoman/pym/repoman/modules/scan/fetch/fetches.py190
-rw-r--r--repoman/pym/repoman/modules/scan/keywords/__init__.py33
-rw-r--r--repoman/pym/repoman/modules/scan/keywords/keywords.py133
-rw-r--r--repoman/pym/repoman/modules/scan/manifest/__init__.py30
-rw-r--r--repoman/pym/repoman/modules/scan/manifest/manifests.py139
-rw-r--r--repoman/pym/repoman/modules/scan/metadata/__init__.py85
-rw-r--r--repoman/pym/repoman/modules/scan/metadata/description.py41
-rw-r--r--repoman/pym/repoman/modules/scan/metadata/ebuild_metadata.py71
-rw-r--r--repoman/pym/repoman/modules/scan/metadata/pkgmetadata.py247
-rw-r--r--repoman/pym/repoman/modules/scan/metadata/restrict.py53
-rw-r--r--repoman/pym/repoman/modules/scan/metadata/use_flags.py94
-rw-r--r--repoman/pym/repoman/modules/scan/options/__init__.py28
-rw-r--r--repoman/pym/repoman/modules/scan/options/options.py29
-rw-r--r--repoman/pym/repoman/modules/scan/scan.py66
-rw-r--r--repoman/pym/repoman/modules/scan/scanbase.py79
-rw-r--r--repoman/pym/repoman/modules/vcs/None/__init__.py34
-rw-r--r--repoman/pym/repoman/modules/vcs/None/changes.py50
-rw-r--r--repoman/pym/repoman/modules/vcs/None/status.py53
-rw-r--r--repoman/pym/repoman/modules/vcs/__init__.py14
-rw-r--r--repoman/pym/repoman/modules/vcs/bzr/__init__.py34
-rw-r--r--repoman/pym/repoman/modules/vcs/bzr/changes.py68
-rw-r--r--repoman/pym/repoman/modules/vcs/bzr/status.py70
-rw-r--r--repoman/pym/repoman/modules/vcs/changes.py169
-rw-r--r--repoman/pym/repoman/modules/vcs/cvs/__init__.py34
-rw-r--r--repoman/pym/repoman/modules/vcs/cvs/changes.py118
-rw-r--r--repoman/pym/repoman/modules/vcs/cvs/status.py131
-rw-r--r--repoman/pym/repoman/modules/vcs/git/__init__.py34
-rw-r--r--repoman/pym/repoman/modules/vcs/git/changes.py120
-rw-r--r--repoman/pym/repoman/modules/vcs/git/status.py79
-rw-r--r--repoman/pym/repoman/modules/vcs/hg/__init__.py34
-rw-r--r--repoman/pym/repoman/modules/vcs/hg/changes.py105
-rw-r--r--repoman/pym/repoman/modules/vcs/hg/status.py65
-rw-r--r--repoman/pym/repoman/modules/vcs/settings.py108
-rw-r--r--repoman/pym/repoman/modules/vcs/svn/__init__.py34
-rw-r--r--repoman/pym/repoman/modules/vcs/svn/changes.py141
-rw-r--r--repoman/pym/repoman/modules/vcs/svn/status.py150
-rw-r--r--repoman/pym/repoman/modules/vcs/vcs.py149
-rw-r--r--repoman/pym/repoman/profile.py87
-rw-r--r--repoman/pym/repoman/qa_data.py439
-rw-r--r--repoman/pym/repoman/qa_tracker.py45
-rw-r--r--repoman/pym/repoman/repos.py298
-rw-r--r--repoman/pym/repoman/scanner.py424
-rw-r--r--repoman/pym/repoman/tests/__init__.py2
-rw-r--r--repoman/pym/repoman/tests/__test__.py0
-rw-r--r--repoman/pym/repoman/tests/changelog/__init__.py2
-rw-r--r--repoman/pym/repoman/tests/changelog/test_echangelog.py106
-rw-r--r--repoman/pym/repoman/tests/runTests.py61
-rw-r--r--repoman/pym/repoman/tests/simple/__init__.py2
-rw-r--r--repoman/pym/repoman/tests/simple/test_simple.py322
-rw-r--r--repoman/pym/repoman/utilities.py589
-rwxr-xr-xrepoman/setup.py662
94 files changed, 11230 insertions, 0 deletions
diff --git a/repoman/MANIFEST.in b/repoman/MANIFEST.in
new file mode 100644
index 000000000..ed81a97db
--- /dev/null
+++ b/repoman/MANIFEST.in
@@ -0,0 +1,3 @@
+
+# for the tests
+include cnf/metadata.xsd
diff --git a/repoman/README b/repoman/README
new file mode 100644
index 000000000..5e78842c9
--- /dev/null
+++ b/repoman/README
@@ -0,0 +1,49 @@
+About Portage
+=============
+
+Portage is a package management system based on ports collections. The
+Package Manager Specification Project (PMS) standardises and documents
+the behaviour of Portage so that the Portage tree can be used by other
+package managers.
+
+
+Dependencies
+============
+
+Python and Bash should be the only hard dependencies. Python 2.7 is the
+minimum supported version.
+
+
+Licensing and Legalese
+=======================
+
+Portage is free software; you can redistribute it and/or
+modify it under the terms of the GNU General Public License
+version 2 as published by the Free Software Foundation.
+
+Portage is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with Portage; if not, write to the Free Software
+Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA.
+
+
+More information
+================
+
+-DEVELOPING contains some code guidelines.
+-LICENSE contains the GNU General Public License version 2.
+-NEWS contains new features/major bug fixes for each version.
+-RELEASE NOTES contains mainly upgrade information for each version.
+-TEST-NOTES contains Portage unit test information.
+
+
+Links
+=====
+Gentoo project page: <https://wiki.gentoo.org/wiki/Project:Portage>
+PMS: <https://dev.gentoo.org/~ulm/pms/head/pms.html>
+PMS git repo: <https://gitweb.gentoo.org/proj/pms.git/>
diff --git a/repoman/TEST-NOTES b/repoman/TEST-NOTES
new file mode 100644
index 000000000..f9c6ab0cd
--- /dev/null
+++ b/repoman/TEST-NOTES
@@ -0,0 +1,45 @@
+UnitTests
+---------
+
+Portage has some tests that use the unittest framework that ships with python (2.3-2.4ish)
+Tests have a specific naming convention.
+
+in pym/portage/tests/ there is a runTest script that invokes pym/portage/tests/__init__.py
+
+This init looks at a hardcoded list of test dirs to search for tests.
+If you add a new dir and don't see your new tests, make sure that the dir is in this list.
+
+On the subject of adding more directories; the layout is basically 1 directory per portage
+file at this point (we have few files, and even fewer large files). Inside of the dir
+you should have files of the form test_${function}.py.
+
+So if I was to write a vercmp test, and vercmp is in portage_versions.
+
+pym/portage/tests/portage_versions/test_vercmp.py
+
+would be the filename.
+
+The __init__.py file now does recursive tests, but you need to tell it so. For example, if
+you had cache tests the dir format would be something like...
+
+pym/portage/tests/cache/flat_hash/test_foo.py
+
+and you would put "cache/flat_hash" into the testDirs variable in __init__.py.
+
+
+Skipping
+--------
+
+Please use the portage.tests.* classes as they support throwing a SkipException for
+tests that are known to fail. Normally one uses testing to do Test Driven Development
+(TDD); however we do not do that here. Therefore there are times when legitimate tests
+exist but fail due to code in trunk. We would still like the suite to pass in some instances
+because the suite is built around two things, testing functionality in the current code as
+well as poking holes in the current code (isvalidatom is an example). So sometimes we desire
+a test to point out that "this needs fixing" but it doesn't affect portage's overall
+functionality. You should raise portage.tests.SkipException in that case.
+
+emerge
+------
+
+The emerge namespace currently has 0 tests (and no runner)
diff --git a/repoman/bin/repoman b/repoman/bin/repoman
new file mode 100755
index 000000000..819e0f543
--- /dev/null
+++ b/repoman/bin/repoman
@@ -0,0 +1,43 @@
+#!/usr/bin/python -bO
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+"""Ebuild and tree health checks and maintenance utilities.
+"""
+
+from __future__ import print_function
+
+import sys
+import errno
+# This block ensures that ^C interrupts are handled quietly.
+try:
+ import signal
+
+ def exithandler(signum, _frame):
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+ signal.signal(signal.SIGTERM, signal.SIG_IGN)
+ sys.exit(128 + signum)
+
+ signal.signal(signal.SIGINT, exithandler)
+ signal.signal(signal.SIGTERM, exithandler)
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+
+except KeyboardInterrupt:
+ sys.exit(1)
+
+from os import path as osp
+if osp.isfile(osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), ".portage_not_installed")):
+ pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))), "pym")
+ sys.path.insert(0, pym_path)
+import portage
+portage._internal_caller = True
+from repoman.main import repoman_main
+
+try:
+ repoman_main(sys.argv[1:])
+except IOError as e:
+ if e.errno == errno.EACCES:
+ print("\nRepoman: Need user access")
+ sys.exit(1)
+ else:
+ raise
diff --git a/repoman/cnf/metadata.xsd b/repoman/cnf/metadata.xsd
new file mode 100644
index 000000000..0ead09ee8
--- /dev/null
+++ b/repoman/cnf/metadata.xsd
@@ -0,0 +1,547 @@
+<?xml version='1.0' encoding='UTF-8'?>
+
+<xs:schema xmlns:xs='http://www.w3.org/2001/XMLSchema'>
+ <!-- top-level variants -->
+ <xs:element name='pkgmetadata' type='pkgMetadataType'>
+ <!-- note: restrict uniquity rules are simplified
+ the spec says: one for each matched package
+ we can only do: one for each restrict rule -->
+ <xs:unique name='longDescUniquityConstraint'>
+ <xs:selector xpath='longdescription'/>
+ <xs:field xpath='@lang'/>
+ <xs:field xpath='@restrict'/>
+ </xs:unique>
+ <xs:unique name='maintainerUniquityConstraint'>
+ <xs:selector xpath='maintainer'/>
+ <xs:field xpath='email'/>
+ <xs:field xpath='@restrict'/>
+ </xs:unique>
+ <xs:unique name='slotsUniquityConstraint'>
+ <xs:selector xpath='slots'/>
+ <xs:field xpath='@lang'/>
+ </xs:unique>
+ <xs:unique name='upstreamSingleConstraint'>
+ <xs:selector xpath='upstream'/>
+ <xs:field xpath='@fake-only-once'/>
+ </xs:unique>
+ <xs:unique name='useUniquityConstraint'>
+ <xs:selector xpath='use'/>
+ <xs:field xpath='@lang'/>
+ </xs:unique>
+ </xs:element>
+ <xs:element name='catmetadata' type='catMetadataType'>
+ <xs:unique name='catLongDescUniquityConstraint'>
+ <xs:selector xpath='longdescription'/>
+ <xs:field xpath='@lang'/>
+ </xs:unique>
+ </xs:element>
+
+ <!-- global elements -->
+ <xs:complexType name='pkgMetadataType'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='longdescription' type='longDescType'/>
+ <xs:element name='maintainer' type='maintainerType'>
+ <xs:unique name='maintainerDescUniquityConstraint'>
+ <xs:selector xpath='description'/>
+ <xs:field xpath='@lang'/>
+ </xs:unique>
+ </xs:element>
+ <xs:element name='slots' type='slotsType'>
+ <xs:unique name='slotUniquityConstraint'>
+ <xs:selector xpath='slot'/>
+ <xs:field xpath='@name'/>
+ </xs:unique>
+ <xs:unique name='subslotsSingleConstraint'>
+ <xs:selector xpath='subslots'/>
+ <xs:field xpath='@fake-only-once'/>
+ </xs:unique>
+ </xs:element>
+ <xs:element name='upstream' type='upstreamType'>
+ <xs:unique name='bugsToSingleConstraint'>
+ <xs:selector xpath='bugs-to'/>
+ <xs:field xpath='@fake-only-once'/>
+ </xs:unique>
+ <xs:unique name='changelogSingleConstraint'>
+ <xs:selector xpath='changelog'/>
+ <xs:field xpath='@fake-only-once'/>
+ </xs:unique>
+ <!-- prevent accidentally repeating the same remote -->
+ <xs:unique name='upstreamRemoteIdRepetitionConstraint'>
+ <xs:selector xpath='remote-id'/>
+ <xs:field xpath='@type'/>
+ <xs:field xpath='.'/>
+ </xs:unique>
+ <xs:unique name='upstreamDocUniquityConstraint'>
+ <xs:selector xpath='doc'/>
+ <xs:field xpath='@lang'/>
+ </xs:unique>
+ </xs:element>
+ <xs:element name='use' type='useType'>
+ <xs:unique name='flagUniquityConstraint'>
+ <xs:selector xpath='flag'/>
+ <xs:field xpath='@name'/>
+ <xs:field xpath='@restrict'/>
+ </xs:unique>
+ </xs:element>
+ </xs:choice>
+ </xs:complexType>
+
+ <xs:complexType name='catMetadataType'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='longdescription' type='catLongDescType'/>
+ </xs:choice>
+ </xs:complexType>
+
+ <!-- the huge <upstream/> structure -->
+ <xs:complexType name='upstreamType'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='maintainer' type='upstreamMaintainerType'>
+ <xs:unique name='upstreamMaintainerUniquityConstraint'>
+ <xs:selector xpath='maintainer'/>
+ <xs:field xpath='name'/>
+ </xs:unique>
+ </xs:element>
+ <xs:element name='changelog' type='urlOnceType'/>
+ <xs:element name='doc' type='upstreamDocType'/>
+ <xs:element name='bugs-to' type='urlOnceType'/>
+ <xs:element name='remote-id' type='upstreamRemoteIdType'/>
+ </xs:choice>
+ <xs:attribute name='fake-only-once'
+ fixed='there can be at most one &lt;upstream/&gt; element'/>
+ </xs:complexType>
+
+ <!-- maintainer in two variants -->
+ <xs:complexType name='maintainerType'>
+ <xs:all>
+ <xs:element name='email' type='emailType'
+ minOccurs='1'/>
+ <xs:element name='name' type='xs:token'
+ minOccurs='0'/>
+ <xs:element name='description' type='maintainerDescType'
+ minOccurs='0'/>
+ </xs:all>
+ <xs:attribute name='type' type='maintainerTypeAttrType'
+ use='required'/>
+ <xs:attribute name='restrict' type='restrictAttrType'/>
+ </xs:complexType>
+
+ <xs:simpleType name='maintainerTypeAttrType'>
+ <xs:restriction base='xs:token'>
+ <xs:enumeration value='person'/>
+ <xs:enumeration value='project'/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <xs:complexType name='upstreamMaintainerType'>
+ <xs:all>
+ <xs:element name='email' type='emailType'
+ minOccurs='0'/>
+ <xs:element name='name' type='xs:token'
+ minOccurs='0'/>
+ </xs:all>
+ <xs:attribute name='status' type='upstreamMaintainerStatusAttrType'
+ default='unknown'/>
+ </xs:complexType>
+
+ <xs:simpleType name='upstreamMaintainerStatusAttrType'>
+ <xs:restriction base='xs:token'>
+ <xs:enumeration value='active'/>
+ <xs:enumeration value='inactive'/>
+ <xs:enumeration value='unknown'/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <xs:complexType name='maintainerDescType'>
+ <xs:simpleContent>
+ <xs:extension base="xs:token">
+ <xs:attribute name='lang' type='langAttrType' default='en'/>
+ </xs:extension>
+ </xs:simpleContent>
+ </xs:complexType>
+
+ <!-- long description -->
+ <xs:complexType name='longDescType' mixed='true'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='pkg' type='pkgType'/>
+ <xs:element name='cat' type='catType'/>
+ </xs:choice>
+ <xs:attribute name='lang' type='langAttrType' default='en'/>
+ <xs:attribute name='restrict' type='restrictAttrType'/>
+ </xs:complexType>
+
+ <xs:complexType name='catLongDescType' mixed='true'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='pkg' type='pkgType'/>
+ <xs:element name='cat' type='catType'/>
+ </xs:choice>
+ <xs:attribute name='lang' type='langAttrType' default='en'/>
+ </xs:complexType>
+
+ <!-- slots -->
+ <xs:complexType name='slotsType'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='slot' type='slotType'/>
+ <xs:element name='subslots' type='tokenOnceType'/>
+ </xs:choice>
+ <xs:attribute name='lang' type='langAttrType' default='en'/>
+ </xs:complexType>
+
+ <xs:complexType name='slotType'>
+ <xs:simpleContent>
+ <xs:extension base="xs:token">
+ <xs:attribute name='name' type='slotNameAttrType'
+ use='required'/>
+ </xs:extension>
+ </xs:simpleContent>
+ </xs:complexType>
+
+ <xs:simpleType name='slotNameAttrType'>
+ <xs:restriction base='xs:token'>
+ <!-- PMS 3.1.3 Slot Names + special value '*' -->
+ <xs:pattern value="[A-Za-z0-9_][A-Za-z0-9+_.-]*|[*]"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- use flags -->
+ <xs:complexType name='useType'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='flag' type='flagType'/>
+ </xs:choice>
+ <xs:attribute name='lang' type='langAttrType' default='en'/>
+ </xs:complexType>
+
+ <xs:complexType name='flagType' mixed='true'>
+ <xs:choice minOccurs='0' maxOccurs='unbounded'>
+ <xs:element name='cat' type='catType'/>
+ <xs:element name='pkg' type='pkgType'/>
+ </xs:choice>
+ <xs:attribute name='name' type='flagNameAttrType'
+ use='required'/>
+ <xs:attribute name='restrict' type='restrictAttrType'
+ default=''/>
+ </xs:complexType>
+
+ <xs:simpleType name='flagNameAttrType'>
+ <xs:restriction base='xs:token'>
+ <!-- PMS 3.1.4 USE Flag Names -->
+ <xs:pattern value="[A-Za-z0-9][A-Za-z0-9+_@-]*"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- upstream-specific types -->
+ <xs:complexType name='upstreamDocType'>
+ <xs:simpleContent>
+ <xs:extension base="urlType">
+ <xs:attribute name='lang' type='langAttrType' default='en'/>
+ </xs:extension>
+ </xs:simpleContent>
+ </xs:complexType>
+
+ <xs:complexType name='upstreamRemoteIdType'>
+ <xs:simpleContent>
+ <xs:extension base="xs:token">
+ <xs:attribute name='type' type='upstreamRemoteIdTypeAttrType'
+ use='required'/>
+ </xs:extension>
+ </xs:simpleContent>
+ </xs:complexType>
+
+ <xs:simpleType name='upstreamRemoteIdTypeAttrType'>
+ <xs:restriction base='xs:token'>
+ <xs:enumeration value='bitbucket'/>
+ <xs:enumeration value='cpan'/>
+ <xs:enumeration value='cpan-module'/>
+ <xs:enumeration value='cpe'/>
+ <xs:enumeration value='cran'/>
+ <xs:enumeration value='ctan'/>
+ <xs:enumeration value='freecode'/>
+ <xs:enumeration value='freshmeat'/>
+ <xs:enumeration value='github'/>
+ <xs:enumeration value='gitlab'/>
+ <xs:enumeration value='gitorious'/>
+ <xs:enumeration value='google-code'/>
+ <xs:enumeration value='launchpad'/>
+ <xs:enumeration value='pear'/>
+ <xs:enumeration value='pecl'/>
+ <xs:enumeration value='pypi'/>
+ <xs:enumeration value='rubyforge'/>
+ <xs:enumeration value='rubygems'/>
+ <xs:enumeration value='sourceforge'/>
+ <xs:enumeration value='sourceforge-jp'/>
+ <xs:enumeration value='vim'/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- creepy mixed-text types -->
+ <xs:simpleType name='catType'>
+ <xs:restriction base='xs:token'>
+ <!-- PMS 3.1.1 Category Names -->
+ <xs:pattern value="[A-Za-z0-9_][A-Za-z0-9+_.-]*"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:simpleType name='pkgType'>
+ <xs:restriction base='xs:token'>
+ <!-- PMS 3.1.1 Category Names + 3.1.2 Package Names -->
+ <!-- note: this does not enforce the 'anything matching
+ the version syntax' requirement -->
+ <xs:pattern
+ value="[A-Za-z0-9_][A-Za-z0-9+_.-]*/[A-Za-z0-9_][A-Za-z0-9+_-]*"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- common attributes -->
+ <xs:simpleType name='langAttrType'>
+ <xs:restriction base='xs:token'>
+ <!-- ISO 639-1 language codes -->
+ <xs:enumeration value='aa'/>
+ <xs:enumeration value='ab'/>
+ <xs:enumeration value='ae'/>
+ <xs:enumeration value='af'/>
+ <xs:enumeration value='ak'/>
+ <xs:enumeration value='am'/>
+ <xs:enumeration value='an'/>
+ <xs:enumeration value='ar'/>
+ <xs:enumeration value='as'/>
+ <xs:enumeration value='av'/>
+ <xs:enumeration value='ay'/>
+ <xs:enumeration value='az'/>
+ <xs:enumeration value='ba'/>
+ <xs:enumeration value='be'/>
+ <xs:enumeration value='bg'/>
+ <xs:enumeration value='bh'/>
+ <xs:enumeration value='bi'/>
+ <xs:enumeration value='bm'/>
+ <xs:enumeration value='bn'/>
+ <xs:enumeration value='bo'/>
+ <xs:enumeration value='bo'/>
+ <xs:enumeration value='br'/>
+ <xs:enumeration value='bs'/>
+ <xs:enumeration value='ca'/>
+ <xs:enumeration value='ce'/>
+ <xs:enumeration value='ch'/>
+ <xs:enumeration value='co'/>
+ <xs:enumeration value='cr'/>
+ <xs:enumeration value='cs'/>
+ <xs:enumeration value='cs'/>
+ <xs:enumeration value='cu'/>
+ <xs:enumeration value='cv'/>
+ <xs:enumeration value='cy'/>
+ <xs:enumeration value='cy'/>
+ <xs:enumeration value='da'/>
+ <xs:enumeration value='de'/>
+ <xs:enumeration value='de'/>
+ <xs:enumeration value='dv'/>
+ <xs:enumeration value='dz'/>
+ <xs:enumeration value='ee'/>
+ <xs:enumeration value='el'/>
+ <xs:enumeration value='el'/>
+ <xs:enumeration value='en'/>
+ <xs:enumeration value='eo'/>
+ <xs:enumeration value='es'/>
+ <xs:enumeration value='et'/>
+ <xs:enumeration value='eu'/>
+ <xs:enumeration value='eu'/>
+ <xs:enumeration value='fa'/>
+ <xs:enumeration value='fa'/>
+ <xs:enumeration value='ff'/>
+ <xs:enumeration value='fi'/>
+ <xs:enumeration value='fj'/>
+ <xs:enumeration value='fo'/>
+ <xs:enumeration value='fr'/>
+ <xs:enumeration value='fr'/>
+ <xs:enumeration value='fy'/>
+ <xs:enumeration value='ga'/>
+ <xs:enumeration value='ga'/>
+ <xs:enumeration value='Ga'/>
+ <xs:enumeration value='gd'/>
+ <xs:enumeration value='gl'/>
+ <xs:enumeration value='gn'/>
+ <xs:enumeration value='gu'/>
+ <xs:enumeration value='gv'/>
+ <xs:enumeration value='ha'/>
+ <xs:enumeration value='he'/>
+ <xs:enumeration value='hi'/>
+ <xs:enumeration value='ho'/>
+ <xs:enumeration value='hr'/>
+ <xs:enumeration value='ht'/>
+ <xs:enumeration value='hu'/>
+ <xs:enumeration value='hy'/>
+ <xs:enumeration value='hy'/>
+ <xs:enumeration value='hz'/>
+ <xs:enumeration value='ia'/>
+ <xs:enumeration value='id'/>
+ <xs:enumeration value='ie'/>
+ <xs:enumeration value='ig'/>
+ <xs:enumeration value='ii'/>
+ <xs:enumeration value='ik'/>
+ <xs:enumeration value='io'/>
+ <xs:enumeration value='is'/>
+ <xs:enumeration value='is'/>
+ <xs:enumeration value='it'/>
+ <xs:enumeration value='iu'/>
+ <xs:enumeration value='ja'/>
+ <xs:enumeration value='jv'/>
+ <xs:enumeration value='ka'/>
+ <xs:enumeration value='ka'/>
+ <xs:enumeration value='kg'/>
+ <xs:enumeration value='ki'/>
+ <xs:enumeration value='kj'/>
+ <xs:enumeration value='kk'/>
+ <xs:enumeration value='kl'/>
+ <xs:enumeration value='km'/>
+ <xs:enumeration value='kn'/>
+ <xs:enumeration value='ko'/>
+ <xs:enumeration value='kr'/>
+ <xs:enumeration value='ks'/>
+ <xs:enumeration value='ku'/>
+ <xs:enumeration value='kv'/>
+ <xs:enumeration value='kw'/>
+ <xs:enumeration value='ky'/>
+ <xs:enumeration value='la'/>
+ <xs:enumeration value='lb'/>
+ <xs:enumeration value='lg'/>
+ <xs:enumeration value='li'/>
+ <xs:enumeration value='ln'/>
+ <xs:enumeration value='lo'/>
+ <xs:enumeration value='lt'/>
+ <xs:enumeration value='lu'/>
+ <xs:enumeration value='lv'/>
+ <xs:enumeration value='mg'/>
+ <xs:enumeration value='mh'/>
+ <xs:enumeration value='mi'/>
+ <xs:enumeration value='mi'/>
+ <xs:enumeration value='mk'/>
+ <xs:enumeration value='mk'/>
+ <xs:enumeration value='ml'/>
+ <xs:enumeration value='mn'/>
+ <xs:enumeration value='mr'/>
+ <xs:enumeration value='ms'/>
+ <xs:enumeration value='ms'/>
+ <xs:enumeration value='mt'/>
+ <xs:enumeration value='my'/>
+ <xs:enumeration value='my'/>
+ <xs:enumeration value='na'/>
+ <xs:enumeration value='nb'/>
+ <xs:enumeration value='nd'/>
+ <xs:enumeration value='ne'/>
+ <xs:enumeration value='ng'/>
+ <xs:enumeration value='nl'/>
+ <xs:enumeration value='nl'/>
+ <xs:enumeration value='nn'/>
+ <xs:enumeration value='no'/>
+ <xs:enumeration value='nr'/>
+ <xs:enumeration value='nv'/>
+ <xs:enumeration value='ny'/>
+ <xs:enumeration value='oc'/>
+ <xs:enumeration value='oj'/>
+ <xs:enumeration value='om'/>
+ <xs:enumeration value='or'/>
+ <xs:enumeration value='os'/>
+ <xs:enumeration value='pa'/>
+ <xs:enumeration value='pi'/>
+ <xs:enumeration value='pl'/>
+ <xs:enumeration value='ps'/>
+ <xs:enumeration value='pt'/>
+ <xs:enumeration value='qu'/>
+ <xs:enumeration value='rm'/>
+ <xs:enumeration value='rn'/>
+ <xs:enumeration value='ro'/>
+ <xs:enumeration value='ro'/>
+ <xs:enumeration value='ru'/>
+ <xs:enumeration value='rw'/>
+ <xs:enumeration value='sa'/>
+ <xs:enumeration value='sc'/>
+ <xs:enumeration value='sd'/>
+ <xs:enumeration value='se'/>
+ <xs:enumeration value='sg'/>
+ <xs:enumeration value='si'/>
+ <xs:enumeration value='sk'/>
+ <xs:enumeration value='sk'/>
+ <xs:enumeration value='sl'/>
+ <xs:enumeration value='sm'/>
+ <xs:enumeration value='sn'/>
+ <xs:enumeration value='so'/>
+ <xs:enumeration value='sq'/>
+ <xs:enumeration value='sq'/>
+ <xs:enumeration value='sr'/>
+ <xs:enumeration value='ss'/>
+ <xs:enumeration value='st'/>
+ <xs:enumeration value='su'/>
+ <xs:enumeration value='sv'/>
+ <xs:enumeration value='sw'/>
+ <xs:enumeration value='ta'/>
+ <xs:enumeration value='te'/>
+ <xs:enumeration value='tg'/>
+ <xs:enumeration value='th'/>
+ <xs:enumeration value='ti'/>
+ <xs:enumeration value='tk'/>
+ <xs:enumeration value='tl'/>
+ <xs:enumeration value='tn'/>
+ <xs:enumeration value='to'/>
+ <xs:enumeration value='tr'/>
+ <xs:enumeration value='ts'/>
+ <xs:enumeration value='tt'/>
+ <xs:enumeration value='tw'/>
+ <xs:enumeration value='ty'/>
+ <xs:enumeration value='ug'/>
+ <xs:enumeration value='uk'/>
+ <xs:enumeration value='ur'/>
+ <xs:enumeration value='uz'/>
+ <xs:enumeration value='ve'/>
+ <xs:enumeration value='vi'/>
+ <xs:enumeration value='vo'/>
+ <xs:enumeration value='wa'/>
+ <xs:enumeration value='wo'/>
+ <xs:enumeration value='xh'/>
+ <xs:enumeration value='yi'/>
+ <xs:enumeration value='yo'/>
+ <xs:enumeration value='za'/>
+ <xs:enumeration value='zh'/>
+ <xs:enumeration value='zh'/>
+ <xs:enumeration value='zu'/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <xs:simpleType name='restrictAttrType'>
+ <xs:restriction base='xs:token'>
+ <!-- simplified package dependency syntax -->
+ <!-- note: 'pure' package atom is technically valid too
+ but not really meaningful -->
+ <xs:pattern
+ value="(([&lt;&gt;]=?|[=~])[A-Za-z0-9_][A-Za-z0-9+_.-]*/[A-Za-z0-9_][A-Za-z0-9+_-]*-[0-9]+(\.[0-9]+)*[a-z]?((_alpha|_beta|_pre|_rc|_p)[0-9]*)*(-r[0-9]+)?\*?)?"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- generic types -->
+ <xs:simpleType name='emailType'>
+ <xs:restriction base='xs:token'>
+ <!-- minimal safe regex -->
+ <xs:pattern value="[^@]+@[^.]+\..+"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <xs:complexType name='tokenOnceType'>
+ <xs:simpleContent>
+ <xs:extension base="xs:token">
+ <xs:attribute name='fake-only-once'
+ fixed='there can be at most one element of this type'/>
+ </xs:extension>
+ </xs:simpleContent>
+ </xs:complexType>
+
+ <xs:simpleType name='urlType'>
+ <xs:restriction base='xs:token'>
+ <!-- TODO: something better? -->
+ <xs:pattern value="(mailto:[^@]+@[^.]+\..+|https?://.+)"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <xs:complexType name='urlOnceType'>
+ <xs:simpleContent>
+ <xs:extension base="urlType">
+ <xs:attribute name='fake-only-once'
+ fixed='there can be at most one element of this type'/>
+ </xs:extension>
+ </xs:simpleContent>
+ </xs:complexType>
+</xs:schema>
diff --git a/repoman/man/repoman.1 b/repoman/man/repoman.1
new file mode 100644
index 000000000..e2b9c668a
--- /dev/null
+++ b/repoman/man/repoman.1
@@ -0,0 +1,409 @@
+.TH "REPOMAN" "1" "Jun 2015" "Portage VERSION" "Portage"
+.SH NAME
+repoman \- Gentoo's program to enforce a minimal level of quality assurance in
+packages added to the portage tree
+.SH SYNOPSIS
+\fBrepoman\fR [\fIoption\fR] [\fImode\fR]
+.SH DESCRIPTION
+.BR "Quality is job zero."
+
+.BR repoman
+checks the quality of ebuild repositories.
+
+Note: \fBrepoman commit\fR only works \fIinside local\fR cvs, git, or
+subversion repositories.
+
+Note: Messages pertaining to specific lines may be inaccurate in the
+prescence of continuation lines from use of the \fI\\\fR character in
+BASH.
+.SH OPTIONS
+.TP
+\fB-a\fR, \fB--ask\fR
+Request a confirmation before commiting
+.TP
+\fB\-\-digest=<y|n>\fR
+Automatically update Manifest digests for modified files. This
+option triggers a behavior that is very similar to that enabled
+by FEATURES="digest" in \fBmake.conf\fR(5). In order to enable
+this behavior by default for repoman alone, add
+\fB\-\-digest=y\fR to the \fIREPOMAN_DEFAULT_OPTS\fR variable in
+\fBmake.conf\fR(5). The \fBmanifest\-check\fR mode will
+automatically ignore the \-\-digest option.
+
+\fBNOTE:\fR
+This option does not trigger update of digests for Manifest DIST
+entries that already exist. Replacement of existing Manifest
+DIST entries can be forced by using the \fBmanifest\fR mode
+together with the \fB\-\-force\fR option.
+.TP
+\fB-f\fR, \fB--force\fR
+Force commit to proceed, regardless of QA issues. For convenience, this option
+causes the most time consuming QA checks to be skipped. The commit message will
+include an indication that this option has been enabled, together with the
+usual portage version stamp.
+
+When used together with \fBmanifest\fR mode, \fB--force\fR causes existing
+digests to be replaced for any files that exist in ${DISTDIR}.
+Existing digests are assumed to be correct for files that would otherwise
+have to be downloaded in order to recompute digests. \fBWARNING:\fR When
+replacing existing digests, it is the user's responsibility to ensure that
+files contained in ${DISTDIR} have the correct identities. Especially beware
+of partially downloaded files.
+.TP
+\fB-S\fR, \fB--straight-to-stable\fR
+Allow committing straight to stable
+.TP
+\fB-q\fR, \fB--quiet\fR
+Be less verbose about extraneous info
+.TP
+\fB-p\fR, \fB--pretend\fR
+Don't commit or fix anything; just show what would be done
+.TP
+\fB-x\fR, \fB--xmlparse\fR
+Forces the metadata.xml parse check to be carried out
+.TP
+\fB-v\fR, \fB--verbose\fR
+Displays every package name while checking
+.TP
+\fB\-\-echangelog=<y|n|force>\fR
+For commit mode, call echangelog if ChangeLog is unmodified (or
+regardless of modification if 'force' is specified). This option
+can be enabled by default for a particular repository by setting
+"update\-changelog = true" in metadata/layout.conf (see
+\fBportage\fR(5)).
+.TP
+\fB\-\-experimental\-inherit=<y|n>\fR
+Enable experimental inherit.missing checks which may misbehave when the
+internal eclass database becomes outdated.
+.TP
+\fB\-\-if\-modified=<y|n>\fR
+Only check packages that have uncommitted modifications
+.TP
+\fB\-i\fR, \fB\-\-ignore\-arches\fR
+Ignore arch-specific failures (where arch != host)
+.TP
+\fB\-\-ignore\-default\-opts\fR
+Do not use the \fIREPOMAN_DEFAULT_OPTS\fR environment variable.
+.TP
+\fB\-I\fR, \fB\-\-ignore\-masked\fR
+Ignore masked packages (not allowed with commit mode)
+.TP
+.BR "\-\-include\-arches " ARCHES
+A space separated list of arches used to filter the selection of
+profiles for dependency checks.
+.TP
+\fB\-d\fR, \fB\-\-include\-dev\fR
+Include dev profiles in dependency checks.
+.TP
+\fB\-e <y|n>\fR, \fB\-\-include\-exp\-profiles=<y|n>\fR
+Include exp profiles in dependency checks.
+.TP
+\fB\-\-unmatched\-removal\fR
+Enable strict checking of package.mask and package.unmask files for
+unmatched removal atoms.
+.TP
+\fB\-\-without\-mask\fR
+Behave as if no package.mask entries exist (not allowed with commit mode)
+.TP
+\fB-m\fR, \fB--commitmsg\fR
+Adds a commit message via the command line
+.TP
+\fB-M\fR, \fB--commitmsgfile\fR
+Adds a commit message from the specified file
+.TP
+\fB-V\fR, \fB--version\fR
+Show version info
+.TP
+\fB-h\fR, \fB--help\fR
+Show this screen
+.SH MODES
+.TP
+.B full
+Scan directory tree for QA issues (full listing)
+.TP
+.B help
+Show this screen
+.TP
+.B scan
+Scan directory tree for QA issues (short listing)
+.TP
+.B fix
+Fix simple QA issues (stray digests, missing digests)
+.TP
+.B manifest
+Generate a Manifest (fetches distfiles if necessary). See the \fB\-\-force\fR
+option if you would like to replace existing distfiles digests.
+.TP
+.B manifest-check
+Check Manifests for missing or incorrect digests
+.TP
+.B commit
+Scan directory tree for QA issues; if OK, commit via VCS
+.TP
+.B ci
+Synonym for commit
+.SH QA KEYWORDS
+.TP
+.B CVS/Entries.IO_error
+Attempting to commit, and an IO error was encountered access the Entries file
+.TP
+.B DESCRIPTION.missing
+Ebuilds that have a missing or empty DESCRIPTION variable
+.TP
+.B EAPI.definition
+EAPI definition does not conform to PMS section 7.3.1 (first
+non\-comment, non\-blank line). See bug #402167.
+.TP
+.B EAPI.deprecated
+Ebuilds that use features that are deprecated in the current EAPI
+.TP
+.B EAPI.incompatible
+Ebuilds that use features that are only available with a different EAPI
+.TP
+.B EAPI.unsupported
+Ebuilds that have an unsupported EAPI version (you must upgrade portage)
+.TP
+.B HOMEPAGE.missing
+Ebuilds that have a missing or empty HOMEPAGE variable
+.TP
+.B HOMEPAGE.virtual
+Virtuals that have a non-empty HOMEPAGE variable
+.TP
+.B IUSE.invalid
+This ebuild has a variable in IUSE that is not in the use.desc or its
+metadata.xml file
+.TP
+.B IUSE.missing
+This ebuild has a USE conditional which references a flag that is not listed in
+IUSE
+.TP
+.B KEYWORDS.dropped
+Ebuilds that appear to have dropped KEYWORDS for some arch
+.TP
+.B KEYWORDS.invalid
+This ebuild contains KEYWORDS that are not listed in profiles/arch.list or for
+which no valid profile was found
+.TP
+.B KEYWORDS.missing
+Ebuilds that have a missing or empty KEYWORDS variable
+.TP
+.B KEYWORDS.stable
+Ebuilds that have been added directly with stable KEYWORDS
+.TP
+.B KEYWORDS.stupid
+Ebuilds that use KEYWORDS=-* instead of package.mask
+.TP
+.B LICENSE.deprecated
+This ebuild is listing a deprecated license.
+.TP
+.B LICENSE.invalid
+This ebuild is listing a license that doesnt exist in portages license/ dir.
+.TP
+.B LICENSE.missing
+Ebuilds that have a missing or empty LICENSE variable
+.TP
+.B LICENSE.syntax
+Syntax error in LICENSE (usually an extra/missing space/parenthesis)
+.TP
+.B LICENSE.virtual
+Virtuals that have a non-empty LICENSE variable
+.TP
+.B LIVEVCS.stable
+Ebuild is a live ebuild (cvs, git, darcs, svn, etc) checkout with stable
+keywords.
+.TP
+.B LIVEVCS.unmasked
+Ebuild is a live ebuild (cvs, git, darcs, svn, etc) checkout but has keywords
+and is not masked in the global package.mask.
+.TP
+.B PDEPEND.suspect
+PDEPEND contains a package that usually only belongs in DEPEND
+.TP
+.B PROVIDE.syntax
+Syntax error in PROVIDE (usually an extra/missing space/parenthesis)
+.TP
+.B RDEPEND.implicit
+RDEPEND is unset in the ebuild which triggers implicit RDEPEND=$DEPEND
+assignment (prior to EAPI 4)
+.TP
+.B RDEPEND.suspect
+RDEPEND contains a package that usually only belongs in DEPEND
+.TP
+.B PROPERTIES.syntax
+Syntax error in PROPERTIES (usually an extra/missing space/parenthesis)
+.TP
+.B RESTRICT.syntax
+Syntax error in RESTRICT (usually an extra/missing space/parenthesis)
+.B SLOT.invalid
+Ebuilds that have a missing or invalid SLOT variable value
+.TP
+.B SRC_URI.mirror
+A uri listed in profiles/thirdpartymirrors is found in SRC_URI
+.TP
+.B changelog.ebuildadded
+An ebuild was added but the ChangeLog was not modified
+.TP
+.B changelog.missing
+Missing ChangeLog files
+.TP
+.B changelog.notadded
+ChangeLogs that exist but have not been added to cvs
+.TP
+.B dependency.bad
+User-visible ebuilds with unsatisfied dependencies (matched against *visible*
+ebuilds)
+.TP
+.B dependency.badindev
+User-visible ebuilds with unsatisfied dependencies (matched against *visible*
+ebuilds) in developing arch
+.TP
+.B dependency.badmasked
+Masked ebuilds with unsatisfied dependencies (matched against *all* ebuilds)
+.TP
+.B dependency.badmaskedindev
+Masked ebuilds with unsatisfied dependencies (matched against *all* ebuilds) in
+developing arch
+.TP
+.B dependency.badtilde
+Uses the ~ dep operator with a non-zero revision part, which is useless (the
+revision is ignored)
+.TP
+.B dependency.syntax
+Syntax error in dependency string (usually an extra/missing space/parenthesis)
+.TP
+.B dependency.unknown
+Ebuild has a dependency that refers to an unknown package (which may be
+valid if it is a blocker for a renamed/removed package, or is an
+alternative choice provided by an overlay)
+.TP
+.B digest.assumed
+Existing digest must be assumed correct (Package level only)
+.TP
+.B digest.missing
+Some files listed in SRC_URI aren't referenced in the Manifest
+.TP
+.B digest.unused
+Some files listed in the Manifest aren't referenced in SRC_URI
+.TP
+.B ebuild.badheader
+This ebuild has a malformed header
+.TP
+.B ebuild.invalidname
+Ebuild files with a non-parseable or syntactically incorrect name (or using 2.1
+versioning extensions)
+.TP
+.B ebuild.majorsyn
+This ebuild has a major syntax error that may cause the ebuild to fail
+partially or fully
+.TP
+.B ebuild.minorsyn
+This ebuild has a minor syntax error that contravenes gentoo coding style
+.TP
+.B ebuild.namenomatch
+Ebuild files that do not have the same name as their parent directory
+.TP
+.B ebuild.nesteddie
+Placing 'die' inside ( ) prints an error, but doesn't stop the ebuild.
+.TP
+.B ebuild.notadded
+Ebuilds that exist but have not been added to cvs
+.TP
+.B ebuild.output
+A simple sourcing of the ebuild produces output; this breaks ebuild policy.
+.TP
+.B ebuild.patches
+PATCHES variable should be a bash array to ensure white space safety
+.TP
+.B ebuild.syntax
+Error generating cache entry for ebuild; typically caused by ebuild syntax
+error or digest verification failure.
+.TP
+.B file.UTF8
+File is not UTF8 compliant
+.TP
+.B file.executable
+Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need the
+executable bit
+.TP
+.B file.name
+File/dir name must be composed of only the following chars: a-zA-Z0-9._-+:
+.TP
+.B file.size
+Files in the files directory must be under 20k
+.TP
+.B inherit.missing
+Ebuild uses functions from an eclass but does not inherit it
+.TP
+.B inherit.unused
+Ebuild inherits an eclass but does not use it
+.TP
+.B inherit.deprecated
+Ebuild inherits a deprecated eclass
+.TP
+.B java.eclassesnotused
+With virtual/jdk in DEPEND you must inherit a java eclass. Refer to
+\fIhttps://wiki.gentoo.org/wiki/Project:Java\fR for more information.
+.TP
+.B manifest.bad
+Manifest has missing or incorrect digests
+.TP
+.B metadata.bad
+Bad metadata.xml files
+.TP
+.B metadata.missing
+Missing metadata.xml files
+.TP
+.B metadata.warning
+Warnings in metadata.xml files
+.TP
+.B repo.eapi.banned
+The ebuild uses an EAPI which is banned by the repository's
+metadata/layout.conf settings.
+.TP
+.B repo.eapi.deprecated
+The ebuild uses an EAPI which is deprecated by the repository's
+metadata/layout.conf settings.
+.TP
+.B IUSE.rubydeprecated
+The ebuild has set a ruby interpreter in USE_RUBY, that is not available as a ruby target anymore
+.TP
+.B portage.internal
+The ebuild uses an internal Portage function or variable
+.TP
+.B upstream.workaround
+The ebuild works around an upstream bug, an upstream bug should be filed and
+tracked in bugs.gentoo.org
+.TP
+.B usage.obsolete
+The ebuild makes use of an obsolete construct
+.TP
+.B variable.invalidchar
+A variable contains an invalid character that is not part of the ASCII
+character set.
+.TP
+.B variable.readonly
+Assigning a readonly variable
+.TP
+.B variable.usedwithhelpers
+Ebuild uses D, ROOT, ED, EROOT or EPREFIX with helpers
+.TP
+.B virtual.oldstyle
+The ebuild PROVIDEs an old-style virtual (see GLEP 37). This is an error
+unless "allow\-provide\-virtuals = true" is set in metadata/layout.conf.
+.TP
+.B virtual.suspect
+Ebuild contains a package that usually should be pulled via virtual/,
+not directly.
+.TP
+.B wxwidgets.eclassnotused
+Ebuild DEPENDs on x11-libs/wxGTK without inheriting wxwidgets.eclass. Refer to
+bug #305469 for more information.
+.SH "REPORTING BUGS"
+Please report bugs via http://bugs.gentoo.org/
+.SH AUTHORS
+.nf
+Daniel Robbins <drobbins@gentoo.org>
+Saleem Abdulrasool <compnerd@gentoo.org>
+.fi
+.SH "SEE ALSO"
+.BR emerge (1)
diff --git a/repoman/pym/repoman/__init__.py b/repoman/pym/repoman/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/__init__.py
diff --git a/repoman/pym/repoman/_portage.py b/repoman/pym/repoman/_portage.py
new file mode 100644
index 000000000..0f611f761
--- /dev/null
+++ b/repoman/pym/repoman/_portage.py
@@ -0,0 +1,25 @@
+
+'''repoman/_portage.py
+Central location for the portage import.
+There were problems when portage was imported by submodules
+due to the portage instance was somehow different that the
+initial portage import in main.py. The later portage imports
+did not contain the repo it was working on. That repo was my cvs tree
+and not listed in those subsequent portage imports.
+
+All modules should import portage from this one
+
+from repoman._portage import portage
+
+Then continue to import the remaining portage modules needed
+'''
+
+import sys
+
+from os import path as osp
+pym_path = osp.join(osp.dirname(osp.dirname(osp.realpath(__file__))))
+sys.path.insert(0, pym_path)
+
+import portage
+portage._internal_caller = True
+portage._disable_legacy_globals()
diff --git a/repoman/pym/repoman/_subprocess.py b/repoman/pym/repoman/_subprocess.py
new file mode 100644
index 000000000..dcdc985fe
--- /dev/null
+++ b/repoman/pym/repoman/_subprocess.py
@@ -0,0 +1,83 @@
+# -*- coding:utf-8 -*-
+
+
+import codecs
+import subprocess
+import sys
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from portage import os
+from portage.process import find_binary
+from portage import _encodings, _unicode_encode
+
+
+def repoman_getstatusoutput(cmd):
+ """
+ Implements an interface similar to getstatusoutput(), but with
+ customized unicode handling (see bug #310789) and without the shell.
+ """
+ args = portage.util.shlex_split(cmd)
+
+ if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
+ not os.path.isabs(args[0]):
+ # Python 3.1 _execvp throws TypeError for non-absolute executable
+ # path passed as bytes (see http://bugs.python.org/issue8513).
+ fullname = find_binary(args[0])
+ if fullname is None:
+ raise portage.exception.CommandNotFound(args[0])
+ args[0] = fullname
+
+ encoding = _encodings['fs']
+ args = [
+ _unicode_encode(x, encoding=encoding, errors='strict') for x in args]
+ proc = subprocess.Popen(
+ args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ output = portage._unicode_decode(
+ proc.communicate()[0], encoding=encoding, errors='strict')
+ if output and output[-1] == "\n":
+ # getstatusoutput strips one newline
+ output = output[:-1]
+ return (proc.wait(), output)
+
+
+class repoman_popen(portage.proxy.objectproxy.ObjectProxy):
+ """
+ Implements an interface similar to os.popen(), but with customized
+ unicode handling (see bug #310789) and without the shell.
+ """
+
+ __slots__ = ('_proc', '_stdout')
+
+ def __init__(self, cmd):
+ args = portage.util.shlex_split(cmd)
+
+ if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
+ not os.path.isabs(args[0]):
+ # Python 3.1 _execvp throws TypeError for non-absolute executable
+ # path passed as bytes (see http://bugs.python.org/issue8513).
+ fullname = find_binary(args[0])
+ if fullname is None:
+ raise portage.exception.CommandNotFound(args[0])
+ args[0] = fullname
+
+ encoding = _encodings['fs']
+ args = [
+ _unicode_encode(x, encoding=encoding, errors='strict')
+ for x in args]
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ object.__setattr__(
+ self, '_proc', proc)
+ object.__setattr__(
+ self, '_stdout', codecs.getreader(encoding)(proc.stdout, 'strict'))
+
+ def _get_target(self):
+ return object.__getattribute__(self, '_stdout')
+
+ __enter__ = _get_target
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ proc = object.__getattribute__(self, '_proc')
+ proc.wait()
+ proc.stdout.close()
diff --git a/repoman/pym/repoman/actions.py b/repoman/pym/repoman/actions.py
new file mode 100644
index 000000000..4144b4570
--- /dev/null
+++ b/repoman/pym/repoman/actions.py
@@ -0,0 +1,538 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import errno
+import io
+import logging
+import platform
+import signal
+import sys
+import tempfile
+import time
+from itertools import chain
+
+from _emerge.UserQuery import UserQuery
+
+from repoman._portage import portage
+from portage import os
+from portage import _encodings
+from portage import _unicode_encode
+from portage.output import (
+ bold, create_color_func, green, red)
+from portage.package.ebuild.digestgen import digestgen
+from portage.util import writemsg_level
+
+from repoman.copyrights import update_copyright
+from repoman.gpg import gpgsign, need_signature
+from repoman import utilities
+from repoman.modules.vcs.vcs import vcs_files_to_cps
+
+bad = create_color_func("BAD")
+
+
+class Actions(object):
+ '''Handles post check result output and performs
+ the various vcs activities for committing the results'''
+
+ def __init__(self, repo_settings, options, scanner, vcs_settings):
+ self.repo_settings = repo_settings
+ self.options = options
+ self.scanner = scanner
+ self.vcs_settings = vcs_settings
+ self.repoman_settings = repo_settings.repoman_settings
+ self.suggest = {
+ 'ignore_masked': False,
+ 'include_dev': False,
+ }
+ if scanner.have['pmasked'] and not (options.without_mask or options.ignore_masked):
+ self.suggest['ignore_masked'] = True
+ if scanner.have['dev_keywords'] and not options.include_dev:
+ self.suggest['include_dev'] = True
+
+
+ def inform(self, can_force, result):
+ '''Inform the user of all the problems found'''
+ if ((self.suggest['ignore_masked'] or self.suggest['include_dev'])
+ and not self.options.quiet):
+ self._suggest()
+ if self.options.mode != 'commit':
+ self._non_commit(result)
+ return False
+ else:
+ self._fail(result, can_force)
+ if self.options.pretend:
+ utilities.repoman_sez(
+ "\"So, you want to play it safe. Good call.\"\n")
+ return True
+
+
+ def perform(self, qa_output):
+ myautoadd = self._vcs_autoadd()
+
+ self._vcs_deleted()
+
+ changes = self.get_vcs_changed()
+
+ mynew, mychanged, myremoved, no_expansion, expansion = changes
+
+ # Manifests need to be regenerated after all other commits, so don't commit
+ # them now even if they have changed.
+ mymanifests = set()
+ myupdates = set()
+ for f in mychanged + mynew:
+ if "Manifest" == os.path.basename(f):
+ mymanifests.add(f)
+ else:
+ myupdates.add(f)
+ myupdates.difference_update(myremoved)
+ myupdates = list(myupdates)
+ mymanifests = list(mymanifests)
+ myheaders = []
+
+ commitmessage = self.options.commitmsg
+ if self.options.commitmsgfile:
+ try:
+ f = io.open(
+ _unicode_encode(
+ self.options.commitmsgfile,
+ encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['content'], errors='replace')
+ commitmessage = f.read()
+ f.close()
+ del f
+ except (IOError, OSError) as e:
+ if e.errno == errno.ENOENT:
+ portage.writemsg(
+ "!!! File Not Found:"
+ " --commitmsgfile='%s'\n" % self.options.commitmsgfile)
+ else:
+ raise
+ if not commitmessage or not commitmessage.strip():
+ commitmessage = self.get_new_commit_message(qa_output)
+
+ commitmessage = commitmessage.rstrip()
+
+ # Update copyright for new and changed files
+ year = time.strftime('%Y', time.gmtime())
+ for fn in chain(mynew, mychanged):
+ if fn.endswith('.diff') or fn.endswith('.patch'):
+ continue
+ update_copyright(fn, year, pretend=self.options.pretend)
+
+ myupdates, broken_changelog_manifests = self.changelogs(
+ myupdates, mymanifests, myremoved, mychanged, myautoadd,
+ mynew, commitmessage)
+
+ commit_footer = self.get_commit_footer()
+ commitmessage += commit_footer
+
+ print("* %s files being committed..." % green(str(len(myupdates))), end=' ')
+
+ if not self.vcs_settings.needs_keyword_expansion:
+ # With some VCS types there's never any keyword expansion, so
+ # there's no need to regenerate manifests and all files will be
+ # committed in one big commit at the end.
+ logging.debug("VCS type doesn't need keyword expansion")
+ print()
+ elif not self.repo_settings.repo_config.thin_manifest:
+ logging.debug("perform: Calling thick_manifest()")
+ self.vcs_settings.changes.thick_manifest(myupdates, myheaders,
+ no_expansion, expansion)
+
+ logging.info("myupdates: %s", myupdates)
+ logging.info("myheaders: %s", myheaders)
+
+ uq = UserQuery(self.options)
+ if self.options.ask and uq.query('Commit changes?', True) != 'Yes':
+ print("* aborting commit.")
+ sys.exit(128 + signal.SIGINT)
+
+ # Handle the case where committed files have keywords which
+ # will change and need a priming commit before the Manifest
+ # can be committed.
+ if (myupdates or myremoved) and myheaders:
+ self.priming_commit(myupdates, myremoved, commitmessage)
+
+ # When files are removed and re-added, the cvs server will put /Attic/
+ # inside the $Header path. This code detects the problem and corrects it
+ # so that the Manifest will generate correctly. See bug #169500.
+ # Use binary mode in order to avoid potential character encoding issues.
+ self.vcs_settings.changes.clear_attic(myheaders)
+
+ if self.scanner.repolevel == 1:
+ utilities.repoman_sez(
+ "\"You're rather crazy... "
+ "doing the entire repository.\"\n")
+
+ self.vcs_settings.changes.digest_regen(myupdates, myremoved, mymanifests,
+ self.scanner, broken_changelog_manifests)
+
+ if self.repo_settings.sign_manifests:
+ self.sign_manifest(myupdates, myremoved, mymanifests)
+
+ self.vcs_settings.changes.update_index(mymanifests, myupdates)
+
+ self.add_manifest(mymanifests, myheaders, myupdates, myremoved, commitmessage)
+
+ if self.options.quiet:
+ return
+ print()
+ if self.vcs_settings.vcs:
+ print("Commit complete.")
+ else:
+ print(
+ "repoman was too scared"
+ " by not seeing any familiar version control file"
+ " that he forgot to commit anything")
+ utilities.repoman_sez(
+ "\"If everyone were like you, I'd be out of business!\"\n")
+ return
+
+
+ def _suggest(self):
+ print()
+ if self.suggest['ignore_masked']:
+ print(bold(
+ "Note: use --without-mask to check "
+ "KEYWORDS on dependencies of masked packages"))
+
+ if self.suggest['include_dev']:
+ print(bold(
+ "Note: use --include-dev (-d) to check "
+ "dependencies for 'dev' profiles"))
+ print()
+
+
+ def _non_commit(self, result):
+ if result['full']:
+ print(bold("Note: type \"repoman full\" for a complete listing."))
+ if result['warn'] and not result['fail']:
+ if self.options.quiet:
+ print(bold("Non-Fatal QA errors found"))
+ else:
+ utilities.repoman_sez(
+ "\"You're only giving me a partial QA payment?\n"
+ " I'll take it this time, but I'm not happy.\""
+ )
+ elif not result['fail']:
+ if self.options.quiet:
+ print("No QA issues found")
+ else:
+ utilities.repoman_sez(
+ "\"If everyone were like you, I'd be out of business!\"")
+ elif result['fail']:
+ print(bad("Please fix these important QA issues first."))
+ if not self.options.quiet:
+ utilities.repoman_sez(
+ "\"Make your QA payment on time"
+ " and you'll never see the likes of me.\"\n")
+ sys.exit(1)
+
+
+ def _fail(self, result, can_force):
+ if result['fail'] and can_force and self.options.force and not self.options.pretend:
+ utilities.repoman_sez(
+ " \"You want to commit even with these QA issues?\n"
+ " I'll take it this time, but I'm not happy.\"\n")
+ elif result['fail']:
+ if self.options.force and not can_force:
+ print(bad(
+ "The --force option has been disabled"
+ " due to extraordinary issues."))
+ print(bad("Please fix these important QA issues first."))
+ utilities.repoman_sez(
+ "\"Make your QA payment on time"
+ " and you'll never see the likes of me.\"\n")
+ sys.exit(1)
+
+
+ def _vcs_autoadd(self):
+ myunadded = self.vcs_settings.changes.unadded
+ myautoadd = []
+ if myunadded:
+ for x in range(len(myunadded) - 1, -1, -1):
+ xs = myunadded[x].split("/")
+ if self.repo_settings.repo_config.find_invalid_path_char(myunadded[x]) != -1:
+ # The Manifest excludes this file,
+ # so it's safe to ignore.
+ del myunadded[x]
+ elif xs[-1] == "files":
+ print("!!! files dir is not added! Please correct this.")
+ sys.exit(-1)
+ elif xs[-1] == "Manifest":
+ # It's a manifest... auto add
+ myautoadd += [myunadded[x]]
+ del myunadded[x]
+
+ if myunadded:
+ print(red(
+ "!!! The following files are in your local tree"
+ " but are not added to the master"))
+ print(red(
+ "!!! tree. Please remove them from the local tree"
+ " or add them to the master tree."))
+ for x in myunadded:
+ print(" ", x)
+ print()
+ print()
+ sys.exit(1)
+ return myautoadd
+
+
+ def _vcs_deleted(self):
+ if self.vcs_settings.changes.has_deleted:
+ print(red(
+ "!!! The following files are removed manually"
+ " from your local tree but are not"))
+ print(red(
+ "!!! removed from the repository."
+ " Please remove them, using \"%s remove [FILES]\"."
+ % self.vcs_settings.vcs))
+ for x in self.vcs_settings.changes.deleted:
+ print(" ", x)
+ print()
+ print()
+ sys.exit(1)
+
+
+ def get_vcs_changed(self):
+ '''Holding function which calls the approriate VCS module for the data'''
+ changes = self.vcs_settings.changes
+ # re-run the scan to pick up a newly modified Manifest file
+ logging.debug("RE-scanning for changes...")
+ changes.scan()
+
+ if not changes.has_changes:
+ utilities.repoman_sez(
+ "\"Doing nothing is not always good for QA.\"")
+ print()
+ print("(Didn't find any changed files...)")
+ print()
+ sys.exit(1)
+ return (changes.new, changes.changed, changes.removed,
+ changes.no_expansion, changes.expansion)
+
+ def get_commit_footer(self):
+ portage_version = getattr(portage, "VERSION", None)
+ gpg_key = self.repoman_settings.get("PORTAGE_GPG_KEY", "")
+ dco_sob = self.repoman_settings.get("DCO_SIGNED_OFF_BY", "")
+ report_options = []
+ if self.options.force:
+ report_options.append("--force")
+ if self.options.ignore_arches:
+ report_options.append("--ignore-arches")
+ if self.scanner.include_arches is not None:
+ report_options.append(
+ "--include-arches=\"%s\"" %
+ " ".join(sorted(self.scanner.include_arches)))
+
+ if portage_version is None:
+ sys.stderr.write("Failed to insert portage version in message!\n")
+ sys.stderr.flush()
+ portage_version = "Unknown"
+ # Use new footer only for git (see bug #438364).
+ if self.vcs_settings.vcs in ["git"]:
+ commit_footer = "\n\nPackage-Manager: portage-%s" % portage_version
+ if report_options:
+ commit_footer += "\nRepoMan-Options: " + " ".join(report_options)
+ if self.repo_settings.sign_manifests:
+ commit_footer += "\nManifest-Sign-Key: %s" % (gpg_key, )
+ if dco_sob:
+ commit_footer += "\nSigned-off-by: %s" % (dco_sob, )
+ else:
+ unameout = platform.system() + " "
+ if platform.system() in ["Darwin", "SunOS"]:
+ unameout += platform.processor()
+ else:
+ unameout += platform.machine()
+ commit_footer = "\n\n"
+ if dco_sob:
+ commit_footer += "Signed-off-by: %s\n" % (dco_sob, )
+ commit_footer += "(Portage version: %s/%s/%s" % \
+ (portage_version, self.vcs_settings.vcs, unameout)
+ if report_options:
+ commit_footer += ", RepoMan options: " + " ".join(report_options)
+ if self.repo_settings.sign_manifests:
+ commit_footer += ", signed Manifest commit with key %s" % \
+ (gpg_key, )
+ else:
+ commit_footer += ", unsigned Manifest commit"
+ commit_footer += ")"
+ return commit_footer
+
+
+ def changelogs(self, myupdates, mymanifests, myremoved, mychanged, myautoadd,
+ mynew, changelog_msg):
+ broken_changelog_manifests = []
+ if self.options.echangelog in ('y', 'force'):
+ logging.info("checking for unmodified ChangeLog files")
+ committer_name = utilities.get_committer_name(env=self.repoman_settings)
+ for x in sorted(vcs_files_to_cps(
+ chain(myupdates, mymanifests, myremoved),
+ self.repo_settings.repodir,
+ self.scanner.repolevel, self.scanner.reposplit, self.scanner.categories)):
+ catdir, pkgdir = x.split("/")
+ checkdir = self.repo_settings.repodir + "/" + x
+ checkdir_relative = ""
+ if self.scanner.repolevel < 3:
+ checkdir_relative = os.path.join(pkgdir, checkdir_relative)
+ if self.scanner.repolevel < 2:
+ checkdir_relative = os.path.join(catdir, checkdir_relative)
+ checkdir_relative = os.path.join(".", checkdir_relative)
+
+ changelog_path = os.path.join(checkdir_relative, "ChangeLog")
+ changelog_modified = changelog_path in self.scanner.changed.changelogs
+ if changelog_modified and self.options.echangelog != 'force':
+ continue
+
+ # get changes for this package
+ cdrlen = len(checkdir_relative)
+ check_relative = lambda e: e.startswith(checkdir_relative)
+ split_relative = lambda e: e[cdrlen:]
+ clnew = list(map(split_relative, filter(check_relative, mynew)))
+ clremoved = list(map(split_relative, filter(check_relative, myremoved)))
+ clchanged = list(map(split_relative, filter(check_relative, mychanged)))
+
+ # Skip ChangeLog generation if only the Manifest was modified,
+ # as discussed in bug #398009.
+ nontrivial_cl_files = set()
+ nontrivial_cl_files.update(clnew, clremoved, clchanged)
+ nontrivial_cl_files.difference_update(['Manifest'])
+ if not nontrivial_cl_files and self.options.echangelog != 'force':
+ continue
+
+ new_changelog = utilities.UpdateChangeLog(
+ checkdir_relative, committer_name, changelog_msg,
+ os.path.join(self.repo_settings.repodir, 'skel.ChangeLog'),
+ catdir, pkgdir,
+ new=clnew, removed=clremoved, changed=clchanged,
+ pretend=self.options.pretend)
+ if new_changelog is None:
+ writemsg_level(
+ "!!! Updating the ChangeLog failed\n",
+ level=logging.ERROR, noiselevel=-1)
+ sys.exit(1)
+
+ # if the ChangeLog was just created, add it to vcs
+ if new_changelog:
+ myautoadd.append(changelog_path)
+ # myautoadd is appended to myupdates below
+ else:
+ myupdates.append(changelog_path)
+
+ if self.options.ask and not self.options.pretend:
+ # regenerate Manifest for modified ChangeLog (bug #420735)
+ self.repoman_settings["O"] = checkdir
+ digestgen(mysettings=self.repoman_settings, myportdb=self.repo_settings.portdb)
+ else:
+ broken_changelog_manifests.append(x)
+
+ if myautoadd:
+ print(">>> Auto-Adding missing Manifest/ChangeLog file(s)...")
+ self.vcs_settings.changes.add_items(myautoadd)
+ myupdates += myautoadd
+ return myupdates, broken_changelog_manifests
+
+
+ def add_manifest(self, mymanifests, myheaders, myupdates, myremoved,
+ commitmessage):
+ myfiles = mymanifests[:]
+ # If there are no header (SVN/CVS keywords) changes in
+ # the files, this Manifest commit must include the
+ # other (yet uncommitted) files.
+ if not myheaders:
+ myfiles += myupdates
+ myfiles += myremoved
+ myfiles.sort()
+
+ fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
+ mymsg = os.fdopen(fd, "wb")
+ mymsg.write(_unicode_encode(commitmessage))
+ mymsg.close()
+
+ retval = self.vcs_settings.changes.commit(myfiles, commitmessagefile)
+ # cleanup the commit message before possibly exiting
+ try:
+ os.unlink(commitmessagefile)
+ except OSError:
+ pass
+ if retval != os.EX_OK:
+ writemsg_level(
+ "!!! Exiting on %s (shell) "
+ "error code: %s\n" % (self.vcs_settings.vcs, retval),
+ level=logging.ERROR, noiselevel=-1)
+ sys.exit(retval)
+
+
+ def priming_commit(self, myupdates, myremoved, commitmessage):
+ myfiles = myupdates + myremoved
+ fd, commitmessagefile = tempfile.mkstemp(".repoman.msg")
+ mymsg = os.fdopen(fd, "wb")
+ mymsg.write(_unicode_encode(commitmessage))
+ mymsg.close()
+
+ separator = '-' * 78
+
+ print()
+ print(green("Using commit message:"))
+ print(green(separator))
+ print(commitmessage)
+ print(green(separator))
+ print()
+
+ # Having a leading ./ prefix on file paths can trigger a bug in
+ # the cvs server when committing files to multiple directories,
+ # so strip the prefix.
+ myfiles = [f.lstrip("./") for f in myfiles]
+
+ retval = self.vcs_settings.changes.commit(myfiles, commitmessagefile)
+ # cleanup the commit message before possibly exiting
+ try:
+ os.unlink(commitmessagefile)
+ except OSError:
+ pass
+ if retval != os.EX_OK:
+ writemsg_level(
+ "!!! Exiting on %s (shell) "
+ "error code: %s\n" % (self.vcs_settings.vcs, retval),
+ level=logging.ERROR, noiselevel=-1)
+ sys.exit(retval)
+
+
+ def sign_manifest(self, myupdates, myremoved, mymanifests):
+ try:
+ for x in sorted(vcs_files_to_cps(
+ chain(myupdates, myremoved, mymanifests),
+ self.scanner.repolevel, self.scanner.reposplit, self.scanner.categories)):
+ self.repoman_settings["O"] = os.path.join(self.repo_settings.repodir, x)
+ manifest_path = os.path.join(self.repoman_settings["O"], "Manifest")
+ if not need_signature(manifest_path):
+ continue
+ gpgsign(manifest_path, self.repoman_settings, self.options)
+ except portage.exception.PortageException as e:
+ portage.writemsg("!!! %s\n" % str(e))
+ portage.writemsg("!!! Disabled FEATURES='sign'\n")
+ self.repo_settings.sign_manifests = False
+
+
+ def get_new_commit_message(self, qa_output):
+ msg_prefix = ""
+ if self.scanner.repolevel > 1:
+ msg_prefix = "/".join(self.scanner.reposplit[1:]) + ": "
+
+ try:
+ editor = os.environ.get("EDITOR")
+ if editor and utilities.editor_is_executable(editor):
+ commitmessage = utilities.get_commit_message_with_editor(
+ editor, message=qa_output, prefix=msg_prefix)
+ else:
+ commitmessage = utilities.get_commit_message_with_stdin()
+ except KeyboardInterrupt:
+ logging.fatal("Interrupted; exiting...")
+ sys.exit(1)
+ if (not commitmessage or not commitmessage.strip()
+ or commitmessage.strip() == msg_prefix):
+ print("* no commit message? aborting commit.")
+ sys.exit(1)
+ return commitmessage
diff --git a/repoman/pym/repoman/argparser.py b/repoman/pym/repoman/argparser.py
new file mode 100644
index 000000000..2d56a87e6
--- /dev/null
+++ b/repoman/pym/repoman/argparser.py
@@ -0,0 +1,225 @@
+# repoman: Argument parser
+# Copyright 2007-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+"""This module contains functions used in Repoman to parse CLI arguments."""
+
+import argparse
+import logging
+import sys
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from portage import _unicode_decode
+from portage import util
+
+
+def parse_args(argv, qahelp, repoman_default_opts):
+ """Use a customized optionParser to parse command line arguments for repoman
+ Args:
+ argv - a sequence of command line arguments
+ qahelp - a dict of qa warning to help message
+ Returns:
+ (opts, args), just like a call to parser.parse_args()
+ """
+
+ argv = portage._decode_argv(argv)
+
+ modes = {
+ 'commit': 'Run a scan then commit changes',
+ 'ci': 'Run a scan then commit changes',
+ 'fix': 'Fix simple QA issues (stray digests, missing digests)',
+ 'full': 'Scan directory tree and print all issues (not a summary)',
+ 'help': 'Show this screen',
+ 'manifest': 'Generate a Manifest (fetches files if necessary)',
+ 'manifest-check': 'Check Manifests for missing or incorrect digests',
+ 'scan': 'Scan directory tree for QA issues'
+ }
+
+ output_choices = {
+ 'default': 'The normal output format',
+ 'column': 'Columnar output suitable for use with grep'
+ }
+
+ mode_keys = list(modes)
+ mode_keys.sort()
+
+ output_keys = sorted(output_choices)
+
+ parser = argparse.ArgumentParser(
+ usage="repoman [options] [mode]",
+ description="Modes: %s" % " | ".join(mode_keys),
+ epilog="For more help consult the man page.")
+
+ parser.add_argument(
+ '-a', '--ask', dest='ask', action='store_true',
+ default=False,
+ help='Request a confirmation before commiting')
+
+ parser.add_argument(
+ '-m', '--commitmsg', dest='commitmsg',
+ help='specify a commit message on the command line')
+
+ parser.add_argument(
+ '-M', '--commitmsgfile', dest='commitmsgfile',
+ help='specify a path to a file that contains a commit message')
+
+ parser.add_argument(
+ '--digest', choices=('y', 'n'), metavar='<y|n>',
+ help='Automatically update Manifest digests for modified files')
+
+ parser.add_argument(
+ '-p', '--pretend', dest='pretend', default=False,
+ action='store_true',
+ help='don\'t commit or fix anything; just show what would be done')
+
+ parser.add_argument(
+ '-q', '--quiet', dest="quiet", action="count",
+ default=0,
+ help='do not print unnecessary messages')
+
+ parser.add_argument(
+ '--echangelog', choices=('y', 'n', 'force'), metavar="<y|n|force>",
+ help=(
+ 'for commit mode, call echangelog if ChangeLog is unmodified (or '
+ 'regardless of modification if \'force\' is specified)'))
+
+ parser.add_argument(
+ '--experimental-inherit', choices=('y', 'n'), metavar="<y|n>",
+ default='n',
+ help=(
+ 'Enable experimental inherit.missing checks which may misbehave'
+ ' when the internal eclass database becomes outdated'))
+
+ parser.add_argument(
+ '-f', '--force', dest='force', action='store_true',
+ default=False,
+ help='Commit with QA violations')
+
+ parser.add_argument(
+ '-S', '--straight-to-stable', dest='straight_to_stable',
+ default=False, action='store_true',
+ help='Allow committing straight to stable')
+
+ parser.add_argument(
+ '--vcs', dest='vcs',
+ help='Force using specific VCS instead of autodetection')
+
+ parser.add_argument(
+ '-v', '--verbose', dest="verbosity", action='count',
+ help='be very verbose in output', default=0)
+
+ parser.add_argument(
+ '-V', '--version', dest='version', action='store_true',
+ help='show version info')
+
+ parser.add_argument(
+ '-x', '--xmlparse', dest='xml_parse', action='store_true',
+ default=False,
+ help='forces the metadata.xml parse check to be carried out')
+
+ parser.add_argument(
+ '--if-modified', choices=('y', 'n'), default='n',
+ metavar="<y|n>",
+ help='only check packages that have uncommitted modifications')
+
+ parser.add_argument(
+ '-i', '--ignore-arches', dest='ignore_arches', action='store_true',
+ default=False,
+ help='ignore arch-specific failures (where arch != host)')
+
+ parser.add_argument(
+ "--ignore-default-opts",
+ action="store_true",
+ help="do not use the REPOMAN_DEFAULT_OPTS environment variable")
+
+ parser.add_argument(
+ '-I', '--ignore-masked', dest='ignore_masked', action='store_true',
+ default=False,
+ help='ignore masked packages (not allowed with commit mode)')
+
+ parser.add_argument(
+ '--include-arches',
+ dest='include_arches', metavar='ARCHES', action='append',
+ help=(
+ 'A space separated list of arches used to '
+ 'filter the selection of profiles for dependency checks'))
+
+ parser.add_argument(
+ '-d', '--include-dev', dest='include_dev', action='store_true',
+ default=False,
+ help='include dev profiles in dependency checks')
+
+ parser.add_argument(
+ '-e', '--include-exp-profiles', choices=('y', 'n'), metavar='<y|n>',
+ default=False,
+ help='include exp profiles in dependency checks')
+
+ parser.add_argument(
+ '--unmatched-removal', dest='unmatched_removal', action='store_true',
+ default=False,
+ help=(
+ 'enable strict checking of package.mask and package.unmask files'
+ ' for unmatched removal atoms'))
+
+ parser.add_argument(
+ '--without-mask', dest='without_mask', action='store_true',
+ default=False,
+ help=(
+ 'behave as if no package.mask entries exist'
+ ' (not allowed with commit mode)'))
+
+ parser.add_argument(
+ '--output-style', dest='output_style', choices=output_keys,
+ help='select output type', default='default')
+
+ parser.add_argument(
+ '--mode', dest='mode', choices=mode_keys,
+ help='specify which mode repoman will run in (default=full)')
+
+ opts, args = parser.parse_known_args(argv[1:])
+
+ if not opts.ignore_default_opts:
+ default_opts = util.shlex_split(repoman_default_opts)
+ if default_opts:
+ opts, args = parser.parse_known_args(default_opts + sys.argv[1:])
+
+ if opts.mode == 'help':
+ parser.print_help(short=False)
+
+ for arg in args:
+ if arg in modes:
+ if not opts.mode:
+ opts.mode = arg
+ break
+ else:
+ parser.error("invalid mode: %s" % arg)
+
+ if not opts.mode:
+ opts.mode = 'full'
+
+ if opts.mode == 'ci':
+ opts.mode = 'commit' # backwards compat shortcut
+
+ # Use verbosity and quiet options to appropriately fiddle with the loglevel
+ for val in range(opts.verbosity):
+ logger = logging.getLogger()
+ logger.setLevel(logger.getEffectiveLevel() - 10)
+
+ for val in range(opts.quiet):
+ logger = logging.getLogger()
+ logger.setLevel(logger.getEffectiveLevel() + 10)
+
+ if opts.mode == 'commit' and opts.commitmsg:
+ opts.commitmsg = _unicode_decode(opts.commitmsg)
+
+ if opts.mode == 'commit' and not (opts.force or opts.pretend):
+ if opts.ignore_masked:
+ opts.ignore_masked = False
+ logging.warn('Commit mode automatically disables --ignore-masked')
+ if opts.without_mask:
+ opts.without_mask = False
+ logging.warn('Commit mode automatically disables --without-mask')
+
+ return (opts, args)
diff --git a/repoman/pym/repoman/check_missingslot.py b/repoman/pym/repoman/check_missingslot.py
new file mode 100644
index 000000000..4a3c57b2c
--- /dev/null
+++ b/repoman/pym/repoman/check_missingslot.py
@@ -0,0 +1,30 @@
+# -*- coding:utf-8 -*-
+# repoman: missing slot check
+# Copyright 2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+"""This module contains the check used to find missing slot values
+in dependencies."""
+
+from portage.eapi import eapi_has_slot_operator
+
+def check_missingslot(atom, mytype, eapi, portdb, qatracker, relative_path, my_aux):
+ # If no slot or slot operator is specified in RDEP...
+ if (not atom.blocker and not atom.slot and not atom.slot_operator
+ and mytype == 'RDEPEND' and eapi_has_slot_operator(eapi)):
+ # Check whether it doesn't match more than one.
+ atom_matches = portdb.xmatch("match-all", atom)
+ dep_slots = frozenset(
+ portdb.aux_get(cpv, ['SLOT'])[0].split('/')[0]
+ for cpv in atom_matches)
+
+ if len(dep_slots) > 1:
+ # See if it is a DEPEND as well. It's a very simple & dumb
+ # check but should suffice for catching it.
+ depend = my_aux['DEPEND'].split()
+ if atom not in depend:
+ return
+
+ qatracker.add_error("dependency.missingslot", relative_path +
+ ": %s: '%s' matches more than one slot, please specify an explicit slot and/or use the := or :* slot operator" %
+ (mytype, atom))
diff --git a/repoman/pym/repoman/checks/__init__.py b/repoman/pym/repoman/checks/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/checks/__init__.py
diff --git a/repoman/pym/repoman/checks/herds/__init__.py b/repoman/pym/repoman/checks/herds/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/checks/herds/__init__.py
diff --git a/repoman/pym/repoman/checks/herds/herdbase.py b/repoman/pym/repoman/checks/herds/herdbase.py
new file mode 100644
index 000000000..d38d2e31d
--- /dev/null
+++ b/repoman/pym/repoman/checks/herds/herdbase.py
@@ -0,0 +1,135 @@
+# -*- coding: utf-8 -*-
+# repoman: Herd database analysis
+# Copyright 2010-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2 or later
+
+from __future__ import print_function, unicode_literals
+
+import errno
+import xml.etree.ElementTree
+try:
+ from xml.parsers.expat import ExpatError
+except (SystemExit, KeyboardInterrupt):
+ raise
+except (ImportError, SystemError, RuntimeError, Exception):
+ # broken or missing xml support
+ # http://bugs.python.org/issue14988
+ # This means that python is built without xml support.
+ # We tolerate global scope import failures for optional
+ # modules, so that ImportModulesTestCase can succeed (or
+ # possibly alert us about unexpected import failures).
+ pass
+
+from portage import _encodings, _unicode_encode
+from portage.exception import FileNotFound, ParseError, PermissionDenied
+from portage import os
+
+from repoman.errors import err
+
+__all__ = [
+ "make_herd_base", "get_herd_base"
+]
+
+
+def _make_email(nick_name):
+ if not nick_name.endswith('@gentoo.org'):
+ nick_name = nick_name + '@gentoo.org'
+ return nick_name
+
+
+class HerdBase(object):
+ def __init__(self, herd_to_emails, all_emails):
+ self.herd_to_emails = herd_to_emails
+ self.all_emails = all_emails
+
+ def known_herd(self, herd_name):
+ return herd_name in self.herd_to_emails
+
+ def known_maintainer(self, nick_name):
+ return _make_email(nick_name) in self.all_emails
+
+ def maintainer_in_herd(self, nick_name, herd_name):
+ return _make_email(nick_name) in self.herd_to_emails[herd_name]
+
+
+class _HerdsTreeBuilder(xml.etree.ElementTree.TreeBuilder):
+ """
+ Implements doctype() as required to avoid deprecation warnings with
+ >=python-2.7.
+ """
+ def doctype(self, name, pubid, system):
+ pass
+
+
+def make_herd_base(filename):
+ herd_to_emails = dict()
+ all_emails = set()
+
+ try:
+ xml_tree = xml.etree.ElementTree.parse(
+ _unicode_encode(
+ filename, encoding=_encodings['fs'], errors='strict'),
+ parser=xml.etree.ElementTree.XMLParser(
+ target=_HerdsTreeBuilder()))
+ except ExpatError as e:
+ raise ParseError("metadata.xml: %s" % (e,))
+ except EnvironmentError as e:
+ func_call = "open('%s')" % filename
+ if e.errno == errno.EACCES:
+ raise PermissionDenied(func_call)
+ elif e.errno == errno.ENOENT:
+ raise FileNotFound(filename)
+ raise
+
+ herds = xml_tree.findall('herd')
+ for h in herds:
+ _herd_name = h.find('name')
+ if _herd_name is None:
+ continue
+ herd_name = _herd_name.text.strip()
+ del _herd_name
+
+ maintainers = h.findall('maintainer')
+ herd_emails = set()
+ for m in maintainers:
+ _m_email = m.find('email')
+ if _m_email is None:
+ continue
+ m_email = _m_email.text.strip()
+
+ herd_emails.add(m_email)
+ all_emails.add(m_email)
+
+ herd_to_emails[herd_name] = herd_emails
+
+ return HerdBase(herd_to_emails, all_emails)
+
+
+def get_herd_base(repoman_settings):
+ try:
+ herd_base = make_herd_base(
+ os.path.join(repoman_settings["PORTDIR"], "metadata/herds.xml"))
+ except (EnvironmentError, ParseError, PermissionDenied) as e:
+ err(str(e))
+ except FileNotFound:
+ # TODO: Download as we do for metadata.dtd, but add a way to
+ # disable for non-gentoo repoman users who may not have herds.
+ herd_base = None
+ return herd_base
+
+
+if __name__ == '__main__':
+ h = make_herd_base('/usr/portage/metadata/herds.xml')
+
+ assert(h.known_herd('sound'))
+ assert(not h.known_herd('media-sound'))
+
+ assert(h.known_maintainer('sping'))
+ assert(h.known_maintainer('sping@gentoo.org'))
+ assert(not h.known_maintainer('portage'))
+
+ assert(h.maintainer_in_herd('zmedico@gentoo.org', 'tools-portage'))
+ assert(not h.maintainer_in_herd('pva@gentoo.org', 'tools-portage'))
+
+ import pprint
+ pprint.pprint(h.herd_to_emails)
diff --git a/repoman/pym/repoman/checks/herds/metadata.py b/repoman/pym/repoman/checks/herds/metadata.py
new file mode 100644
index 000000000..b4a433ed7
--- /dev/null
+++ b/repoman/pym/repoman/checks/herds/metadata.py
@@ -0,0 +1,26 @@
+# -*- coding:utf-8 -*-
+
+
+class UnknownHerdsError(ValueError):
+ def __init__(self, herd_names):
+ _plural = len(herd_names) != 1
+ super(UnknownHerdsError, self).__init__(
+ 'Unknown %s %s' % (
+ _plural and 'herds' or 'herd',
+ ','.join('"%s"' % e for e in herd_names)))
+
+
+def check_metadata_herds(xml_tree, herd_base):
+ herd_nodes = xml_tree.findall('herd')
+ unknown_herds = [
+ name for name in (
+ e.text.strip() for e in herd_nodes if e.text is not None)
+ if not herd_base.known_herd(name)]
+
+ if unknown_herds:
+ raise UnknownHerdsError(unknown_herds)
+
+
+def check_metadata(xml_tree, herd_base):
+ if herd_base is not None:
+ check_metadata_herds(xml_tree, herd_base)
diff --git a/repoman/pym/repoman/copyrights.py b/repoman/pym/repoman/copyrights.py
new file mode 100644
index 000000000..761309af6
--- /dev/null
+++ b/repoman/pym/repoman/copyrights.py
@@ -0,0 +1,120 @@
+# -*- coding:utf-8 -*-
+
+
+import difflib
+import io
+import re
+from tempfile import mkstemp
+
+from portage import _encodings
+from portage import _unicode_decode
+from portage import _unicode_encode
+from portage import os
+from portage import shutil
+from portage import util
+
+
+_copyright_re1 = re.compile(br'^(# Copyright \d\d\d\d)-\d\d\d\d ')
+_copyright_re2 = re.compile(br'^(# Copyright )(\d\d\d\d) ')
+
+
+class _copyright_repl(object):
+ __slots__ = ('year',)
+
+ def __init__(self, year):
+ self.year = year
+
+ def __call__(self, matchobj):
+ if matchobj.group(2) == self.year:
+ return matchobj.group(0)
+ else:
+ return matchobj.group(1) + matchobj.group(2) + \
+ b'-' + self.year + b' '
+
+
+def update_copyright_year(year, line):
+ """
+ These two regexes are taken from echangelog
+ update_copyright(), except that we don't hardcode
+ 1999 here (in order to be more generic).
+ """
+ is_bytes = isinstance(line, bytes)
+ if is_bytes:
+ if not line.startswith(b'# Copyright '):
+ return line
+ else:
+ if not line.startswith('# Copyright '):
+ return line
+
+ year = _unicode_encode(year)
+ line = _unicode_encode(line)
+
+ line = _copyright_re1.sub(br'\1-' + year + b' ', line)
+ line = _copyright_re2.sub(_copyright_repl(year), line)
+ if not is_bytes:
+ line = _unicode_decode(line)
+ return line
+
+
+def update_copyright(fn_path, year, pretend=False):
+ """
+ Check file for a Copyright statement, and update its year. The
+ patterns used for replacing copyrights are taken from echangelog.
+ Only the first lines of each file that start with a hash ('#') are
+ considered, until a line is found that doesn't start with a hash.
+ Files are read and written in binary mode, so that this function
+ will work correctly with files encoded in any character set, as
+ long as the copyright statements consist of plain ASCII.
+ """
+
+ try:
+ fn_hdl = io.open(_unicode_encode(
+ fn_path, encoding=_encodings['fs'], errors='strict'),
+ mode='rb')
+ except EnvironmentError:
+ return
+
+ orig_header = []
+ new_header = []
+
+ for line in fn_hdl:
+ line_strip = line.strip()
+ orig_header.append(line)
+ if not line_strip or line_strip[:1] != b'#':
+ new_header.append(line)
+ break
+
+ line = update_copyright_year(year, line)
+ new_header.append(line)
+
+ difflines = 0
+ for diffline in difflib.unified_diff(
+ [_unicode_decode(diffline) for diffline in orig_header],
+ [_unicode_decode(diffline) for diffline in new_header],
+ fromfile=fn_path, tofile=fn_path, n=0):
+ util.writemsg_stdout(diffline, noiselevel=-1)
+ difflines += 1
+ util.writemsg_stdout("\n", noiselevel=-1)
+
+ # unified diff has three lines to start with
+ if difflines > 3 and not pretend:
+ # write new file with changed header
+ f, fnnew_path = mkstemp()
+ f = io.open(f, mode='wb')
+ for line in new_header:
+ f.write(line)
+ for line in fn_hdl:
+ f.write(line)
+ f.close()
+ try:
+ fn_stat = os.stat(fn_path)
+ except OSError:
+ fn_stat = None
+
+ shutil.move(fnnew_path, fn_path)
+
+ if fn_stat is None:
+ util.apply_permissions(fn_path, mode=0o644)
+ else:
+ util.apply_stat_permissions(fn_path, fn_stat)
+ fn_hdl.close()
diff --git a/repoman/pym/repoman/errors.py b/repoman/pym/repoman/errors.py
new file mode 100644
index 000000000..9cf113ba0
--- /dev/null
+++ b/repoman/pym/repoman/errors.py
@@ -0,0 +1,22 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import sys
+
+
+def warn(txt):
+ print("repoman: " + txt)
+
+
+def err(txt):
+ warn(txt)
+ sys.exit(1)
+
+
+def caterror(catdir, repodir):
+ err(
+ "%s is not an official category."
+ " Skipping QA checks in this directory.\n"
+ "Please ensure that you add %s to %s/profiles/categories\n"
+ "if it is a new category." % (catdir, catdir, repodir))
diff --git a/repoman/pym/repoman/gpg.py b/repoman/pym/repoman/gpg.py
new file mode 100644
index 000000000..26e243201
--- /dev/null
+++ b/repoman/pym/repoman/gpg.py
@@ -0,0 +1,82 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import errno
+import logging
+import subprocess
+import sys
+
+import portage
+from portage import os
+from portage import _encodings
+from portage import _unicode_encode
+from portage.exception import MissingParameter
+from portage.process import find_binary
+
+
+# Setup the GPG commands
+def gpgsign(filename, repoman_settings, options):
+ gpgcmd = repoman_settings.get("PORTAGE_GPG_SIGNING_COMMAND")
+ if gpgcmd in [None, '']:
+ raise MissingParameter("PORTAGE_GPG_SIGNING_COMMAND is unset!"
+ " Is make.globals missing?")
+ if "${PORTAGE_GPG_KEY}" in gpgcmd and \
+ "PORTAGE_GPG_KEY" not in repoman_settings:
+ raise MissingParameter("PORTAGE_GPG_KEY is unset!")
+ if "${PORTAGE_GPG_DIR}" in gpgcmd:
+ if "PORTAGE_GPG_DIR" not in repoman_settings:
+ repoman_settings["PORTAGE_GPG_DIR"] = \
+ os.path.expanduser("~/.gnupg")
+ logging.info(
+ "Automatically setting PORTAGE_GPG_DIR to '%s'" %
+ repoman_settings["PORTAGE_GPG_DIR"])
+ else:
+ repoman_settings["PORTAGE_GPG_DIR"] = \
+ os.path.expanduser(repoman_settings["PORTAGE_GPG_DIR"])
+ if not os.access(repoman_settings["PORTAGE_GPG_DIR"], os.X_OK):
+ raise portage.exception.InvalidLocation(
+ "Unable to access directory: PORTAGE_GPG_DIR='%s'" %
+ repoman_settings["PORTAGE_GPG_DIR"])
+ gpgvars = {"FILE": filename}
+ for k in ("PORTAGE_GPG_DIR", "PORTAGE_GPG_KEY"):
+ v = repoman_settings.get(k)
+ if v is not None:
+ gpgvars[k] = v
+ gpgcmd = portage.util.varexpand(gpgcmd, mydict=gpgvars)
+ if options.pretend:
+ print("(" + gpgcmd + ")")
+ else:
+ # Encode unicode manually for bug #310789.
+ gpgcmd = portage.util.shlex_split(gpgcmd)
+
+ if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000 and \
+ not os.path.isabs(gpgcmd[0]):
+ # Python 3.1 _execvp throws TypeError for non-absolute executable
+ # path passed as bytes (see http://bugs.python.org/issue8513).
+ fullname = find_binary(gpgcmd[0])
+ if fullname is None:
+ raise portage.exception.CommandNotFound(gpgcmd[0])
+ gpgcmd[0] = fullname
+
+ gpgcmd = [
+ _unicode_encode(arg, encoding=_encodings['fs'], errors='strict')
+ for arg in gpgcmd]
+ rValue = subprocess.call(gpgcmd)
+ if rValue == os.EX_OK:
+ os.rename(filename + ".asc", filename)
+ else:
+ raise portage.exception.PortageException(
+ "!!! gpg exited with '" + str(rValue) + "' status")
+
+def need_signature(filename):
+ try:
+ with open(
+ _unicode_encode(
+ filename, encoding=_encodings['fs'], errors='strict'),
+ 'rb') as f:
+ return b"BEGIN PGP SIGNED MESSAGE" not in f.readline()
+ except IOError as e:
+ if e.errno in (errno.ENOENT, errno.ESTALE):
+ return False
+ raise
diff --git a/repoman/pym/repoman/main.py b/repoman/pym/repoman/main.py
new file mode 100755
index 000000000..2009a44fe
--- /dev/null
+++ b/repoman/pym/repoman/main.py
@@ -0,0 +1,183 @@
+#!/usr/bin/python -bO
+# -*- coding:utf-8 -*-
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function, unicode_literals
+
+import io
+import logging
+import sys
+
+# import our centrally initialized portage instance
+from repoman._portage import portage
+portage._internal_caller = True
+portage._disable_legacy_globals()
+
+
+from portage import os
+import portage.checksum
+import portage.const
+import portage.repository.config
+from portage.output import create_color_func, nocolor
+from portage.output import ConsoleStyleFile, StyleWriter
+from portage.util import formatter
+from portage.util.futures.extendedfutures import (
+ ExtendedFuture,
+ InvalidStateError,
+)
+
+from repoman.actions import Actions
+from repoman.argparser import parse_args
+from repoman.qa_data import (
+ format_qa_output, format_qa_output_column, qahelp,
+ qawarnings, qacats)
+from repoman.repos import RepoSettings
+from repoman.scanner import Scanner
+from repoman import utilities
+from repoman.modules.vcs.settings import VCSSettings
+
+if sys.hexversion >= 0x3000000:
+ basestring = str
+
+bad = create_color_func("BAD")
+
+# A sane umask is needed for files that portage creates.
+os.umask(0o22)
+
+LOGLEVEL = logging.WARNING
+portage.util.initialize_logger(LOGLEVEL)
+
+
+def repoman_main(argv):
+ config_root = os.environ.get("PORTAGE_CONFIGROOT")
+ repoman_settings = portage.config(config_root=config_root, local_config=False)
+
+ if repoman_settings.get("NOCOLOR", "").lower() in ("yes", "true") or \
+ repoman_settings.get('TERM') == 'dumb' or \
+ not sys.stdout.isatty():
+ nocolor()
+
+ options, arguments = parse_args(
+ sys.argv, qahelp, repoman_settings.get("REPOMAN_DEFAULT_OPTS", ""))
+
+ if options.version:
+ print("Portage", portage.VERSION)
+ sys.exit(0)
+
+ logger = logging.getLogger()
+
+ if options.verbosity > 0:
+ logger.setLevel(LOGLEVEL - 10 * options.verbosity)
+ else:
+ logger.setLevel(LOGLEVEL)
+
+ if options.experimental_inherit == 'y':
+ # This is experimental, so it's non-fatal.
+ qawarnings.add("inherit.missing")
+
+ # Set this to False when an extraordinary issue (generally
+ # something other than a QA issue) makes it impossible to
+ # commit (like if Manifest generation fails).
+ can_force = ExtendedFuture(True)
+
+ portdir, portdir_overlay, mydir = utilities.FindPortdir(repoman_settings)
+ if portdir is None:
+ sys.exit(1)
+
+ myreporoot = os.path.basename(portdir_overlay)
+ myreporoot += mydir[len(portdir_overlay):]
+
+ # avoid a circular parameter repo_settings
+ vcs_settings = VCSSettings(options, repoman_settings)
+
+ repo_settings = RepoSettings(
+ config_root, portdir, portdir_overlay,
+ repoman_settings, vcs_settings, options, qawarnings)
+ repoman_settings = repo_settings.repoman_settings
+
+ # Now set repo_settings
+ vcs_settings.repo_settings = repo_settings
+
+ if 'digest' in repoman_settings.features and options.digest != 'n':
+ options.digest = 'y'
+
+ logging.debug("vcs: %s" % (vcs_settings.vcs,))
+ logging.debug("repo config: %s" % (repo_settings.repo_config,))
+ logging.debug("options: %s" % (options,))
+
+ # It's confusing if these warnings are displayed without the user
+ # being told which profile they come from, so disable them.
+ env = os.environ.copy()
+ env['FEATURES'] = env.get('FEATURES', '') + ' -unknown-features-warn'
+
+ # Perform the main checks
+ scanner = Scanner(repo_settings, myreporoot, config_root, options,
+ vcs_settings, mydir, env)
+ scanner.scan_pkgs(can_force)
+
+ commitmessage = None
+
+ if options.if_modified == "y" and len(scanner.effective_scanlist) < 1:
+ logging.warning("--if-modified is enabled, but no modified packages were found!")
+
+ result = {
+ # fail will be true if we have failed in at least one non-warning category
+ 'fail': 0,
+ # warn will be true if we tripped any warnings
+ 'warn': 0,
+ # full will be true if we should print a "repoman full" informational message
+ 'full': options.mode != 'full',
+ }
+
+ # early out for manifest generation
+ if options.mode == "manifest":
+ sys.exit(result['fail'])
+
+ for x in qacats:
+ if x not in vcs_settings.qatracker.fails:
+ continue
+ result['warn'] = 1
+ if x not in qawarnings:
+ result['fail'] = 1
+
+ if result['fail'] or \
+ (result['warn'] and not (options.quiet or options.mode == "scan")):
+ result['full'] = 0
+
+ # Save QA output so that it can be conveniently displayed
+ # in $EDITOR while the user creates a commit message.
+ # Otherwise, the user would not be able to see this output
+ # once the editor has taken over the screen.
+ qa_output = io.StringIO()
+ style_file = ConsoleStyleFile(sys.stdout)
+ if options.mode == 'commit' and \
+ (not commitmessage or not commitmessage.strip()):
+ style_file.write_listener = qa_output
+ console_writer = StyleWriter(file=style_file, maxcol=9999)
+ console_writer.style_listener = style_file.new_styles
+
+ f = formatter.AbstractFormatter(console_writer)
+
+ format_outputs = {
+ 'column': format_qa_output_column,
+ 'default': format_qa_output
+ }
+
+ format_output = format_outputs.get(
+ options.output_style, format_outputs['default'])
+ format_output(f, vcs_settings.qatracker.fails, result['full'],
+ result['fail'], options, qawarnings)
+
+ style_file.flush()
+ del console_writer, f, style_file
+ qa_output = qa_output.getvalue()
+ qa_output = qa_output.splitlines(True)
+
+ # output the results
+ actions = Actions(repo_settings, options, scanner, vcs_settings)
+ if actions.inform(can_force.get(), result):
+ # perform any other actions
+ actions.perform(qa_output)
+
+ sys.exit(0)
diff --git a/repoman/pym/repoman/metadata.py b/repoman/pym/repoman/metadata.py
new file mode 100644
index 000000000..a9ad3e8c0
--- /dev/null
+++ b/repoman/pym/repoman/metadata.py
@@ -0,0 +1,122 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import errno
+import logging
+import sys
+import tempfile
+import time
+
+try:
+ from urllib.parse import urlparse
+except ImportError:
+ from urlparse import urlparse
+
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from portage import os
+from portage import shutil
+from portage.output import green
+
+if sys.hexversion >= 0x3000000:
+ basestring = str
+
+if sys.hexversion >= 0x3000000:
+ basestring = str
+
+metadata_dtd_uri = 'http://www.gentoo.org/dtd/metadata.dtd'
+metadata_xsd_uri = 'http://www.gentoo.org/xml-schema/metadata.xsd'
+# force refetch if the local copy creation time is older than this
+metadata_xsd_ctime_interval = 60 * 60 * 24 * 7 # 7 days
+
+
+def fetch_metadata_xsd(metadata_xsd, repoman_settings):
+ """
+ Fetch metadata.xsd if it doesn't exist or the ctime is older than
+ metadata_xsd_ctime_interval.
+ @rtype: bool
+ @return: True if successful, otherwise False
+ """
+
+ must_fetch = True
+ metadata_xsd_st = None
+ current_time = int(time.time())
+ try:
+ metadata_xsd_st = os.stat(metadata_xsd)
+ except EnvironmentError as e:
+ if e.errno not in (errno.ENOENT, errno.ESTALE):
+ raise
+ del e
+ else:
+ # Trigger fetch if metadata.xsd mtime is old or clock is wrong.
+ if abs(current_time - metadata_xsd_st.st_ctime) \
+ < metadata_xsd_ctime_interval:
+ must_fetch = False
+
+ if must_fetch:
+ print()
+ print(
+ "%s the local copy of metadata.xsd "
+ "needs to be refetched, doing that now" % green("***"))
+ print()
+ parsed_url = urlparse(metadata_xsd_uri)
+ setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
+ fcmd = repoman_settings.get(setting)
+ if not fcmd:
+ fcmd = repoman_settings.get('FETCHCOMMAND')
+ if not fcmd:
+ logging.error("FETCHCOMMAND is unset")
+ return False
+
+ destdir = repoman_settings["DISTDIR"]
+ fd, metadata_xsd_tmp = tempfile.mkstemp(
+ prefix='metadata.xsd.', dir=destdir)
+ os.close(fd)
+
+ try:
+ if not portage.getbinpkg.file_get(
+ metadata_xsd_uri, destdir, fcmd=fcmd,
+ filename=os.path.basename(metadata_xsd_tmp)):
+ logging.error(
+ "failed to fetch metadata.xsd from '%s'" % metadata_xsd_uri)
+ return False
+
+ try:
+ portage.util.apply_secpass_permissions(
+ metadata_xsd_tmp,
+ gid=portage.data.portage_gid, mode=0o664, mask=0o2)
+ except portage.exception.PortageException:
+ pass
+
+ shutil.move(metadata_xsd_tmp, metadata_xsd)
+ finally:
+ try:
+ os.unlink(metadata_xsd_tmp)
+ except OSError:
+ pass
+
+ return True
+
+
+def get_metadata_xsd(repo_settings):
+ '''Locate and or fetch the metadata.xsd file
+
+ @param repo_settings: RepoSettings instance
+ @returns: path to the metadata.xsd file
+ '''
+ metadata_xsd = None
+ for path in reversed(repo_settings.repo_config.eclass_db.porttrees):
+ path = os.path.join(path, 'metadata/xml-schema/metadata.xsd')
+ if os.path.exists(path):
+ metadata_xsd = path
+ break
+ if metadata_xsd is None:
+ metadata_xsd = os.path.join(
+ repo_settings.repoman_settings["DISTDIR"], 'metadata.xsd'
+ )
+
+ fetch_metadata_xsd(metadata_xsd, repo_settings.repoman_settings)
+ return metadata_xsd
diff --git a/repoman/pym/repoman/modules/__init__.py b/repoman/pym/repoman/modules/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/modules/__init__.py
diff --git a/repoman/pym/repoman/modules/commit/__init__.py b/repoman/pym/repoman/modules/commit/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/modules/commit/__init__.py
diff --git a/repoman/pym/repoman/modules/commit/repochecks.py b/repoman/pym/repoman/modules/commit/repochecks.py
new file mode 100644
index 000000000..bedbdaf34
--- /dev/null
+++ b/repoman/pym/repoman/modules/commit/repochecks.py
@@ -0,0 +1,35 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+from portage.output import red
+
+from repoman.errors import err
+
+
+def commit_check(repolevel, reposplit):
+ # Check if it's in $PORTDIR/$CATEGORY/$PN , otherwise bail if commiting.
+ # Reason for this is if they're trying to commit in just $FILESDIR/*,
+ # the Manifest needs updating.
+ # This check ensures that repoman knows where it is,
+ # and the manifest recommit is at least possible.
+ if repolevel not in [1, 2, 3]:
+ print(red("***") + (
+ " Commit attempts *must* be from within a vcs checkout,"
+ " category, or package directory."))
+ print(red("***") + (
+ " Attempting to commit from a packages files directory"
+ " will be blocked for instance."))
+ print(red("***") + (
+ " This is intended behaviour,"
+ " to ensure the manifest is recommitted for a package."))
+ print(red("***"))
+ err(
+ "Unable to identify level we're commiting from for %s" %
+ '/'.join(reposplit))
+
+
+def conflict_check(vcs_settings, options):
+ if vcs_settings.vcs:
+ conflicts = vcs_settings.status.detect_conflicts(options)
+
diff --git a/repoman/pym/repoman/modules/scan/__init__.py b/repoman/pym/repoman/modules/scan/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/__init__.py
diff --git a/repoman/pym/repoman/modules/scan/depend/__init__.py b/repoman/pym/repoman/modules/scan/depend/__init__.py
new file mode 100644
index 000000000..6d1228601
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/depend/__init__.py
@@ -0,0 +1,32 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Depend plug-in module for repoman.
+Performs Dependency checks on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'depend',
+ 'description': doc,
+ 'provides':{
+ 'profile-module': {
+ 'name': "profile",
+ 'sourcefile': "profile",
+ 'class': "ProfileDependsChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['qatracker', 'portdb', 'profiles', 'options',
+ 'repo_metadata', 'repo_settings', 'include_arches', 'caches',
+ 'repoman_incrementals', 'env', 'have', 'dev_keywords'
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ 'pkg': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/depend/_depend_checks.py b/repoman/pym/repoman/modules/scan/depend/_depend_checks.py
new file mode 100644
index 000000000..4e1d216e1
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/depend/_depend_checks.py
@@ -0,0 +1,150 @@
+# -*- coding:utf-8 -*-
+
+
+from _emerge.Package import Package
+
+from repoman.check_missingslot import check_missingslot
+# import our initialized portage instance
+from repoman._portage import portage
+from repoman.qa_data import suspect_virtual, suspect_rdepend
+
+
+def _depend_checks(ebuild, pkg, portdb, qatracker, repo_metadata):
+ '''Checks the ebuild dependencies for errors
+
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ @param portdb: portdb instance
+ @param qatracker: QATracker instance
+ @param repo_metadata: dictionary of various repository items.
+ @returns: (unknown_pkgs, badlicsyntax)
+ '''
+
+ unknown_pkgs = set()
+
+ inherited_java_eclass = "java-pkg-2" in ebuild.inherited or \
+ "java-pkg-opt-2" in ebuild.inherited,
+ inherited_wxwidgets_eclass = "wxwidgets" in ebuild.inherited
+ # operator_tokens = set(["||", "(", ")"])
+ type_list, badsyntax = [], []
+ for mytype in Package._dep_keys + ("LICENSE", "PROPERTIES", "PROVIDE"):
+ mydepstr = ebuild.metadata[mytype]
+
+ buildtime = mytype in Package._buildtime_keys
+ runtime = mytype in Package._runtime_keys
+ token_class = None
+ if mytype.endswith("DEPEND"):
+ token_class = portage.dep.Atom
+
+ try:
+ atoms = portage.dep.use_reduce(
+ mydepstr, matchall=1, flat=True,
+ is_valid_flag=pkg.iuse.is_valid_flag, token_class=token_class)
+ except portage.exception.InvalidDependString as e:
+ atoms = None
+ badsyntax.append(str(e))
+
+ if atoms and mytype.endswith("DEPEND"):
+ if runtime and \
+ "test?" in mydepstr.split():
+ qatracker.add_error(
+ mytype + '.suspect',
+ "%s: 'test?' USE conditional in %s" %
+ (ebuild.relative_path, mytype))
+
+ for atom in atoms:
+ if atom == "||":
+ continue
+
+ is_blocker = atom.blocker
+
+ # Skip dependency.unknown for blockers, so that we
+ # don't encourage people to remove necessary blockers,
+ # as discussed in bug 382407. We use atom.without_use
+ # due to bug 525376.
+ if not is_blocker and \
+ not portdb.xmatch("match-all", atom.without_use) and \
+ not atom.cp.startswith("virtual/"):
+ unknown_pkgs.add((mytype, atom.unevaluated_atom))
+
+ if pkg.category != "virtual":
+ if not is_blocker and \
+ atom.cp in suspect_virtual:
+ qatracker.add_error(
+ 'virtual.suspect', ebuild.relative_path +
+ ": %s: consider using '%s' instead of '%s'" %
+ (mytype, suspect_virtual[atom.cp], atom))
+ if not is_blocker and \
+ atom.cp.startswith("perl-core/"):
+ qatracker.add_error('dependency.perlcore',
+ ebuild.relative_path +
+ ": %s: please use '%s' instead of '%s'" %
+ (mytype,
+ atom.replace("perl-core/","virtual/perl-"),
+ atom))
+
+ if buildtime and \
+ not is_blocker and \
+ not inherited_java_eclass and \
+ atom.cp == "virtual/jdk":
+ qatracker.add_error(
+ 'java.eclassesnotused', ebuild.relative_path)
+ elif buildtime and \
+ not is_blocker and \
+ not inherited_wxwidgets_eclass and \
+ atom.cp == "x11-libs/wxGTK":
+ qatracker.add_error(
+ 'wxwidgets.eclassnotused',
+ "%s: %ss on x11-libs/wxGTK without inheriting"
+ " wxwidgets.eclass" % (ebuild.relative_path, mytype))
+ elif runtime:
+ if not is_blocker and \
+ atom.cp in suspect_rdepend:
+ qatracker.add_error(
+ mytype + '.suspect',
+ ebuild.relative_path + ": '%s'" % atom)
+
+ if atom.operator == "~" and \
+ portage.versions.catpkgsplit(atom.cpv)[3] != "r0":
+ qacat = 'dependency.badtilde'
+ qatracker.add_error(
+ qacat, "%s: %s uses the ~ operator"
+ " with a non-zero revision: '%s'" %
+ (ebuild.relative_path, mytype, atom))
+
+ check_missingslot(atom, mytype, ebuild.eapi, portdb, qatracker,
+ ebuild.relative_path, ebuild.metadata)
+
+ type_list.extend([mytype] * (len(badsyntax) - len(type_list)))
+
+ for m, b in zip(type_list, badsyntax):
+ if m.endswith("DEPEND"):
+ qacat = "dependency.syntax"
+ else:
+ qacat = m + ".syntax"
+ qatracker.add_error(
+ qacat, "%s: %s: %s" % (ebuild.relative_path, m, b))
+
+ # data required for some other tests
+ badlicsyntax = len([z for z in type_list if z == "LICENSE"])
+ badprovsyntax = len([z for z in type_list if z == "PROVIDE"])
+ baddepsyntax = len(type_list) != badlicsyntax + badprovsyntax
+ badlicsyntax = badlicsyntax > 0
+ #badprovsyntax = badprovsyntax > 0
+
+ # Parse the LICENSE variable, remove USE conditions and flatten it.
+ licenses = portage.dep.use_reduce(
+ ebuild.metadata["LICENSE"], matchall=1, flat=True)
+
+ # Check each entry to ensure that it exists in ${PORTDIR}/licenses/.
+ for lic in licenses:
+ # Need to check for "||" manually as no portage
+ # function will remove it without removing values.
+ if lic not in repo_metadata['liclist'] and lic != "||":
+ qatracker.add_error("LICENSE.invalid",
+ "%s: %s" % (ebuild.relative_path, lic))
+ elif lic in repo_metadata['lic_deprecated']:
+ qatracker.add_error("LICENSE.deprecated",
+ "%s: %s" % (ebuild.relative_path, lic))
+
+ return unknown_pkgs, baddepsyntax
diff --git a/repoman/pym/repoman/modules/scan/depend/_gen_arches.py b/repoman/pym/repoman/modules/scan/depend/_gen_arches.py
new file mode 100644
index 000000000..16b8dac5f
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/depend/_gen_arches.py
@@ -0,0 +1,57 @@
+# -*- coding:utf-8 -*-
+
+
+def _gen_arches(ebuild, options, repo_settings, profiles):
+ '''Determines the arches for the ebuild following the profile rules
+
+ @param ebuild: Ebuild which we check (object).
+ @param profiles: dictionary
+ @param options: cli options
+ @param repo_settings: repository settings instance
+ @returns: dictionary, including arches set
+ '''
+ if options.ignore_arches:
+ arches = [[
+ repo_settings.repoman_settings["ARCH"], repo_settings.repoman_settings["ARCH"],
+ repo_settings.repoman_settings["ACCEPT_KEYWORDS"].split()]]
+ else:
+ arches = set()
+ for keyword in ebuild.keywords:
+ if keyword[0] == "-":
+ continue
+ elif keyword[0] == "~":
+ arch = keyword[1:]
+ if arch == "*":
+ for expanded_arch in profiles:
+ if expanded_arch == "**":
+ continue
+ arches.add(
+ (keyword, expanded_arch, (
+ expanded_arch, "~" + expanded_arch)))
+ else:
+ arches.add((keyword, arch, (arch, keyword)))
+ else:
+ # For ebuilds with stable keywords, check if the
+ # dependencies are satisfiable for unstable
+ # configurations, since use.stable.mask is not
+ # applied for unstable configurations (see bug
+ # 563546).
+ if keyword == "*":
+ for expanded_arch in profiles:
+ if expanded_arch == "**":
+ continue
+ arches.add(
+ (keyword, expanded_arch, (expanded_arch,)))
+ arches.add(
+ (keyword, expanded_arch,
+ (expanded_arch, "~" + expanded_arch)))
+ else:
+ arches.add((keyword, keyword, (keyword,)))
+ arches.add((keyword, keyword,
+ (keyword, "~" + keyword)))
+ if not arches:
+ # Use an empty profile for checking dependencies of
+ # packages that have empty KEYWORDS.
+ arches.add(('**', '**', ('**',)))
+
+ return arches
diff --git a/repoman/pym/repoman/modules/scan/depend/profile.py b/repoman/pym/repoman/modules/scan/depend/profile.py
new file mode 100644
index 000000000..a714a9317
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/depend/profile.py
@@ -0,0 +1,256 @@
+# -*- coding:utf-8 -*-
+
+
+import copy
+from pprint import pformat
+
+from _emerge.Package import Package
+
+# import our initialized portage instance
+from repoman._portage import portage
+from repoman.modules.scan.scanbase import ScanBase
+from repoman.modules.scan.depend._depend_checks import _depend_checks
+from repoman.modules.scan.depend._gen_arches import _gen_arches
+from portage.dep import Atom
+
+
+def sort_key(item):
+ return item[2].sub_path
+
+
+class ProfileDependsChecks(ScanBase):
+ '''Perform dependency checks for the different profiles'''
+
+ def __init__(self, **kwargs):
+ '''Class init
+
+ @param qatracker: QATracker instance
+ @param portdb: portdb instance
+ @param profiles: dictionary
+ @param options: cli options
+ @param repo_settings: repository settings instance
+ @param include_arches: set
+ @param caches: dictionary of our caches
+ @param repoman_incrementals: tuple
+ @param env: the environment
+ @param have: dictionary instance
+ @param dev_keywords: developer profile keywords
+ @param repo_metadata: dictionary of various repository items.
+ '''
+ self.qatracker = kwargs.get('qatracker')
+ self.portdb = kwargs.get('portdb')
+ self.profiles = kwargs.get('profiles')
+ self.options = kwargs.get('options')
+ self.repo_settings = kwargs.get('repo_settings')
+ self.include_arches = kwargs.get('include_arches')
+ self.caches = kwargs.get('caches')
+ self.repoman_incrementals = kwargs.get('repoman_incrementals')
+ self.env = kwargs.get('env')
+ self.have = kwargs.get('have')
+ self.dev_keywords = kwargs.get('dev_keywords')
+ self.repo_metadata = kwargs.get('repo_metadata')
+
+ def check(self, **kwargs):
+ '''Perform profile dependant dependancy checks
+
+ @param arches:
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ @param baddepsyntax: boolean
+ @param unknown_pkgs: set of tuples (type, atom.unevaluated_atom)
+ @returns: dictionary
+ '''
+ ebuild = kwargs.get('ebuild').get()
+ pkg = kwargs.get('pkg').get()
+ unknown_pkgs, baddepsyntax = _depend_checks(
+ ebuild, pkg, self.portdb, self.qatracker, self.repo_metadata)
+
+ relevant_profiles = []
+ for keyword, arch, groups in _gen_arches(ebuild, self.options,
+ self.repo_settings, self.profiles):
+ if arch not in self.profiles:
+ # A missing profile will create an error further down
+ # during the KEYWORDS verification.
+ continue
+
+ if self.include_arches is not None:
+ if arch not in self.include_arches:
+ continue
+
+ relevant_profiles.extend(
+ (keyword, groups, prof) for prof in self.profiles[arch])
+
+ relevant_profiles.sort(key=sort_key)
+
+ for keyword, groups, prof in relevant_profiles:
+
+ is_stable_profile = prof.status == "stable"
+ is_dev_profile = prof.status == "dev" and \
+ self.options.include_dev
+ is_exp_profile = prof.status == "exp" and \
+ self.options.include_exp_profiles == 'y'
+ if not (is_stable_profile or is_dev_profile or is_exp_profile):
+ continue
+
+ dep_settings = self.caches['arch'].get(prof.sub_path)
+ if dep_settings is None:
+ dep_settings = portage.config(
+ config_profile_path=prof.abs_path,
+ config_incrementals=self.repoman_incrementals,
+ config_root=self.repo_settings.config_root,
+ local_config=False,
+ _unmatched_removal=self.options.unmatched_removal,
+ env=self.env, repositories=self.repo_settings.repoman_settings.repositories)
+ dep_settings.categories = self.repo_settings.repoman_settings.categories
+ if self.options.without_mask:
+ dep_settings._mask_manager_obj = \
+ copy.deepcopy(dep_settings._mask_manager)
+ dep_settings._mask_manager._pmaskdict.clear()
+ self.caches['arch'][prof.sub_path] = dep_settings
+
+ xmatch_cache_key = (prof.sub_path, tuple(groups))
+ xcache = self.caches['arch_xmatch'].get(xmatch_cache_key)
+ if xcache is None:
+ self.portdb.melt()
+ self.portdb.freeze()
+ xcache = self.portdb.xcache
+ xcache.update(self.caches['shared_xmatch'])
+ self.caches['arch_xmatch'][xmatch_cache_key] = xcache
+
+ self.repo_settings.trees[self.repo_settings.root]["porttree"].settings = dep_settings
+ self.portdb.settings = dep_settings
+ self.portdb.xcache = xcache
+
+ dep_settings["ACCEPT_KEYWORDS"] = " ".join(groups)
+ # just in case, prevent config.reset() from nuking these.
+ dep_settings.backup_changes("ACCEPT_KEYWORDS")
+
+ # This attribute is used in dbapi._match_use() to apply
+ # use.stable.{mask,force} settings based on the stable
+ # status of the parent package. This is required in order
+ # for USE deps of unstable packages to be resolved correctly,
+ # since otherwise use.stable.{mask,force} settings of
+ # dependencies may conflict (see bug #456342).
+ dep_settings._parent_stable = dep_settings._isStable(pkg)
+
+ # Handle package.use*.{force,mask) calculation, for use
+ # in dep_check.
+ dep_settings.useforce = dep_settings._use_manager.getUseForce(
+ pkg, stable=dep_settings._parent_stable)
+ dep_settings.usemask = dep_settings._use_manager.getUseMask(
+ pkg, stable=dep_settings._parent_stable)
+
+ if not baddepsyntax:
+ ismasked = not ebuild.archs or \
+ pkg.cpv not in self.portdb.xmatch("match-visible",
+ Atom("%s::%s" % (pkg.cp, self.repo_settings.repo_config.name)))
+ if ismasked:
+ if not self.have['pmasked']:
+ self.have['pmasked'] = bool(dep_settings._getMaskAtom(
+ pkg.cpv, ebuild.metadata))
+ if self.options.ignore_masked:
+ continue
+ # we are testing deps for a masked package; give it some lee-way
+ suffix = "masked"
+ matchmode = "minimum-all-ignore-profile"
+ else:
+ suffix = ""
+ matchmode = "minimum-visible"
+
+ if not self.have['dev_keywords']:
+ self.have['dev_keywords'] = \
+ bool(self.dev_keywords.intersection(ebuild.keywords))
+
+ if prof.status == "dev":
+ suffix = suffix + "indev"
+
+ for mytype in Package._dep_keys:
+
+ mykey = "dependency.bad" + suffix
+ myvalue = ebuild.metadata[mytype]
+ if not myvalue:
+ continue
+
+ success, atoms = portage.dep_check(
+ myvalue, self.portdb, dep_settings,
+ use="all", mode=matchmode, trees=self.repo_settings.trees)
+
+ if success:
+ if atoms:
+
+ # Don't bother with dependency.unknown for
+ # cases in which *DEPEND.bad is triggered.
+ for atom in atoms:
+ # dep_check returns all blockers and they
+ # aren't counted for *DEPEND.bad, so we
+ # ignore them here.
+ if not atom.blocker:
+ unknown_pkgs.discard(
+ (mytype, atom.unevaluated_atom))
+
+ if not prof.sub_path:
+ # old-style virtuals currently aren't
+ # resolvable with empty profile, since
+ # 'virtuals' mappings are unavailable
+ # (it would be expensive to search
+ # for PROVIDE in all ebuilds)
+ atoms = [
+ atom for atom in atoms if not (
+ atom.cp.startswith('virtual/')
+ and not self.portdb.cp_list(atom.cp))]
+
+ # we have some unsolvable deps
+ # remove ! deps, which always show up as unsatisfiable
+ all_atoms = [
+ str(atom.unevaluated_atom)
+ for atom in atoms if not atom.blocker]
+
+ # if we emptied out our list, continue:
+ if not all_atoms:
+ continue
+
+ # Filter out duplicates. We do this by hand (rather
+ # than use a set) so the order is stable and better
+ # matches the order that's in the ebuild itself.
+ atoms = []
+ for atom in all_atoms:
+ if atom not in atoms:
+ atoms.append(atom)
+
+ if self.options.output_style in ['column']:
+ self.qatracker.add_error(mykey,
+ "%s: %s: %s(%s) %s"
+ % (ebuild.relative_path, mytype, keyword,
+ prof, repr(atoms)))
+ else:
+ self.qatracker.add_error(mykey,
+ "%s: %s: %s(%s)\n%s"
+ % (ebuild.relative_path, mytype, keyword,
+ prof, pformat(atoms, indent=6)))
+ else:
+ if self.options.output_style in ['column']:
+ self.qatracker.add_error(mykey,
+ "%s: %s: %s(%s) %s"
+ % (ebuild.relative_path, mytype, keyword,
+ prof, repr(atoms)))
+ else:
+ self.qatracker.add_error(mykey,
+ "%s: %s: %s(%s)\n%s"
+ % (ebuild.relative_path, mytype, keyword,
+ prof, pformat(atoms, indent=6)))
+
+ if not baddepsyntax and unknown_pkgs:
+ type_map = {}
+ for mytype, atom in unknown_pkgs:
+ type_map.setdefault(mytype, set()).add(atom)
+ for mytype, atoms in type_map.items():
+ self.qatracker.add_error(
+ "dependency.unknown", "%s: %s: %s"
+ % (ebuild.relative_path, mytype, ", ".join(sorted(atoms))))
+
+ return False
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/directories/__init__.py b/repoman/pym/repoman/modules/scan/directories/__init__.py
new file mode 100644
index 000000000..47834cb40
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/directories/__init__.py
@@ -0,0 +1,48 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Directories plug-in module for repoman.
+Performs an FilesChecks check on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'directories',
+ 'description': doc,
+ 'provides':{
+ 'directories-module': {
+ 'name': "files",
+ 'sourcefile': "files",
+ 'class': "FileChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_kwargs': {
+ },
+ 'mod_kwargs': ['portdb', 'qatracker', 'repo_settings', 'vcs_settings',
+ ],
+ 'func_kwargs': {
+ 'changed': (None, None),
+ 'checkdir': (None, None),
+ 'checkdirlist': (None, None),
+ 'checkdir_relative': (None, None),
+ },
+ },
+ 'mtime-module': {
+ 'name': "mtime",
+ 'sourcefile': "mtime",
+ 'class': "MtimeChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_kwargs': {
+ },
+ 'mod_kwargs': ['vcs_settings',
+ ],
+ 'func_kwargs': {
+ 'changed': (None, None),
+ 'ebuild': (None, None),
+ 'pkg': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/directories/files.py b/repoman/pym/repoman/modules/scan/directories/files.py
new file mode 100644
index 000000000..2aed26440
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/directories/files.py
@@ -0,0 +1,94 @@
+# -*- coding:utf-8 -*-
+
+'''repoman/checks/diretories/files.py
+
+'''
+
+import io
+
+from portage import _encodings, _unicode_encode
+from portage import os
+
+from repoman.modules.vcs.vcs import vcs_new_changed
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class FileChecks(ScanBase):
+ '''Performs various file checks in the package's directory'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param portdb: portdb instance
+ @param qatracker: QATracker instance
+ @param repo_settings: settings instance
+ @param vcs_settings: VCSSettings instance
+ '''
+ super(FileChecks, self).__init__(**kwargs)
+ self.portdb = kwargs.get('portdb')
+ self.qatracker = kwargs.get('qatracker')
+ self.repo_settings = kwargs.get('repo_settings')
+ self.repoman_settings = self.repo_settings.repoman_settings
+ self.vcs_settings = kwargs.get('vcs_settings')
+
+ def check(self, **kwargs):
+ '''Checks the ebuild sources and files for errors
+
+ @param checkdir: string, directory path
+ @param checkdir_relative: repolevel determined path
+ @param changed: dictionary instance
+ @returns: dictionary
+ '''
+ checkdir = kwargs.get('checkdir')
+ checkdirlist = kwargs.get('checkdirlist').get()
+ checkdir_relative = kwargs.get('checkdir_relative')
+ changed = kwargs.get('changed').changed
+ new = kwargs.get('changed').new
+ for y_file in checkdirlist:
+ index = self.repo_settings.repo_config.find_invalid_path_char(y_file)
+ if index != -1:
+ y_relative = os.path.join(checkdir_relative, y_file)
+ invcs = self.vcs_settings.vcs is not None
+ inchangeset = vcs_new_changed(y_relative, changed, new)
+ if invcs and not inchangeset:
+ # If the file isn't in the VCS new or changed set, then
+ # assume that it's an irrelevant temporary file (Manifest
+ # entries are not generated for file names containing
+ # prohibited characters). See bug #406877.
+ index = -1
+ if index != -1:
+ self.qatracker.add_error(
+ "file.name",
+ "%s/%s: char '%s'" % (checkdir, y_file, y_file[index]))
+
+ if not (
+ y_file in ("ChangeLog", "metadata.xml")
+ or y_file.endswith(".ebuild")):
+ continue
+ f = None
+ try:
+ line = 1
+ f = io.open(
+ _unicode_encode(
+ os.path.join(checkdir, y_file),
+ encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['repo.content'])
+ for l in f:
+ line += 1
+ except UnicodeDecodeError as ue:
+ s = ue.object[:ue.start]
+ l2 = s.count("\n")
+ line += l2
+ if l2 != 0:
+ s = s[s.rfind("\n") + 1:]
+ self.qatracker.add_error(
+ "file.UTF8", "%s/%s: line %i, just after: '%s'" % (
+ checkdir, y_file, line, s))
+ finally:
+ if f is not None:
+ f.close()
+ return False
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/directories/mtime.py b/repoman/pym/repoman/modules/scan/directories/mtime.py
new file mode 100644
index 000000000..134a86b80
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/directories/mtime.py
@@ -0,0 +1,30 @@
+
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class MtimeChecks(ScanBase):
+
+ def __init__(self, **kwargs):
+ self.vcs_settings = kwargs.get('vcs_settings')
+
+ def check(self, **kwargs):
+ '''Perform a changelog and untracked checks on the ebuild
+
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ @param changed: dictionary instance
+ @returns: dictionary
+ '''
+ ebuild = kwargs.get('ebuild').get()
+ changed = kwargs.get('changed')
+ pkg = kwargs.get('pkg').get()
+ if not self.vcs_settings.vcs_preserves_mtime:
+ if ebuild.ebuild_path not in changed.new_ebuilds and \
+ ebuild.ebuild_path not in changed.ebuilds:
+ pkg.mtime = None
+ return False
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/eapi/__init__.py b/repoman/pym/repoman/modules/scan/eapi/__init__.py
new file mode 100644
index 000000000..4c3dd6e8f
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/eapi/__init__.py
@@ -0,0 +1,29 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Eapi plug-in module for repoman.
+Performs an IsEbuild check on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'eapi',
+ 'description': doc,
+ 'provides':{
+ 'live-module': {
+ 'name': "eapi",
+ 'sourcefile': "eapi",
+ 'class': "EAPIChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_kwargs': {
+ },
+ 'mod_kwargs': ['qatracker', 'repo_settings'
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/eapi/eapi.py b/repoman/pym/repoman/modules/scan/eapi/eapi.py
new file mode 100644
index 000000000..1d4ad5a4a
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/eapi/eapi.py
@@ -0,0 +1,49 @@
+
+'''eapi.py
+Perform checks on the EAPI variable.
+'''
+
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class EAPIChecks(ScanBase):
+ '''Perform checks on the EAPI variable.'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param qatracker: QATracker instance
+ @param repo_settings: Repository settings
+ '''
+ self.qatracker = kwargs.get('qatracker')
+ self.repo_settings = kwargs.get('repo_settings')
+
+ def check(self, **kwargs):
+ '''
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ @returns: dictionary
+ '''
+ ebuild = kwargs.get('ebuild').get()
+
+ if not self._checkBanned(ebuild):
+ self._checkDeprecated(ebuild)
+ return False
+
+ def _checkBanned(self, ebuild):
+ if self.repo_settings.repo_config.eapi_is_banned(ebuild.eapi):
+ self.qatracker.add_error(
+ "repo.eapi.banned", "%s: %s" % (ebuild.relative_path, ebuild.eapi))
+ return True
+ return False
+
+ def _checkDeprecated(self, ebuild):
+ if self.repo_settings.repo_config.eapi_is_deprecated(ebuild.eapi):
+ self.qatracker.add_error(
+ "repo.eapi.deprecated", "%s: %s" % (ebuild.relative_path, ebuild.eapi))
+ return True
+ return False
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/ebuild/__init__.py b/repoman/pym/repoman/modules/scan/ebuild/__init__.py
new file mode 100644
index 000000000..8666e78c2
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/ebuild/__init__.py
@@ -0,0 +1,58 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Ebuild plug-in module for repoman.
+Performs an IsEbuild check on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'ebuild',
+ 'description': doc,
+ 'provides':{
+ 'ebuild-module': {
+ 'name': "ebuild",
+ 'sourcefile': "ebuild",
+ 'class': "Ebuild",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['qatracker', 'repo_settings', 'vcs_settings',
+ 'checks', 'portdb'
+ ],
+ 'func_kwargs': {
+ 'can_force': (None, None),
+ 'catdir': (None, None),
+ 'changed': (None, None),
+ 'changelog_modified': (None, None),
+ 'checkdir': (None, None),
+ 'checkdirlist': (None, None),
+ 'ebuild': ('Future', 'UNSET'),
+ 'pkg': ('Future', 'UNSET'),
+ 'pkgdir': (None, None),
+ 'pkgs': ('Future', 'dict'),
+ 'repolevel': (None, None),
+ 'validity_future': (None, None),
+ 'xpkg': (None, None),
+ 'y_ebuild': (None, None),
+ },
+ },
+ 'multicheck-module': {
+ 'name': "multicheck",
+ 'sourcefile': "multicheck",
+ 'class': "MultiCheck",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_kwargs': {
+ },
+ 'mod_kwargs': ['qatracker', 'options'
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ 'pkg': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/ebuild/checks.py b/repoman/pym/repoman/modules/scan/ebuild/checks.py
new file mode 100644
index 000000000..fb3e01944
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/ebuild/checks.py
@@ -0,0 +1,1007 @@
+# -*- coding:utf-8 -*-
+# repoman: Checks
+# Copyright 2007-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+"""This module contains functions used in Repoman to ascertain the quality
+and correctness of an ebuild."""
+
+from __future__ import unicode_literals
+
+import codecs
+from itertools import chain
+import re
+import time
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from portage.eapi import (
+ eapi_supports_prefix, eapi_has_implicit_rdepend,
+ eapi_has_src_prepare_and_src_configure, eapi_has_dosed_dohard,
+ eapi_exports_AA, eapi_has_pkg_pretend)
+
+from . import errors
+
+
+class LineCheck(object):
+ """Run a check on a line of an ebuild."""
+ """A regular expression to determine whether to ignore the line"""
+ ignore_line = False
+ """True if lines containing nothing more than comments with optional
+ leading whitespace should be ignored"""
+ ignore_comment = True
+
+ def new(self, pkg):
+ pass
+
+ def check_eapi(self, eapi):
+ """Returns if check should be run in the given EAPI (default: True)"""
+ return True
+
+ def check(self, num, line):
+ """Run the check on line and return error if there is one"""
+ if self.re.match(line):
+ return self.error
+
+ def end(self):
+ pass
+
+
+class PhaseCheck(LineCheck):
+ """ basic class for function detection """
+
+ func_end_re = re.compile(r'^\}$')
+ phases_re = re.compile('(%s)' % '|'.join((
+ 'pkg_pretend', 'pkg_setup', 'src_unpack', 'src_prepare',
+ 'src_configure', 'src_compile', 'src_test', 'src_install',
+ 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm',
+ 'pkg_config')))
+ in_phase = ''
+
+ def check(self, num, line):
+ m = self.phases_re.match(line)
+ if m is not None:
+ self.in_phase = m.group(1)
+ if self.in_phase != '' and self.func_end_re.match(line) is not None:
+ self.in_phase = ''
+
+ return self.phase_check(num, line)
+
+ def phase_check(self, num, line):
+ """ override this function for your checks """
+ pass
+
+
+class EbuildHeader(LineCheck):
+ """Ensure ebuilds have proper headers
+ Copyright header errors
+ CVS header errors
+ License header errors
+
+ Args:
+ modification_year - Year the ebuild was last modified
+ """
+
+ repoman_check_name = 'ebuild.badheader'
+
+ gentoo_copyright = r'^# Copyright ((1999|2\d\d\d)-)?%s Gentoo Foundation$'
+ gentoo_license = (
+ '# Distributed under the terms'
+ ' of the GNU General Public License v2')
+ id_header = '# $Id$'
+ ignore_comment = False
+
+ def new(self, pkg):
+ if pkg.mtime is None:
+ self.modification_year = r'2\d\d\d'
+ else:
+ self.modification_year = str(time.gmtime(pkg.mtime)[0])
+ self.gentoo_copyright_re = re.compile(
+ self.gentoo_copyright % self.modification_year)
+
+ def check(self, num, line):
+ if num > 2:
+ return
+ elif num == 0:
+ if not self.gentoo_copyright_re.match(line):
+ return errors.COPYRIGHT_ERROR
+ elif num == 1 and line.rstrip('\n') != self.gentoo_license:
+ return errors.LICENSE_ERROR
+ #elif num == 2 and line.rstrip('\n') != self.id_header:
+ # return errors.ID_HEADER_ERROR
+
+
+class EbuildWhitespace(LineCheck):
+ """Ensure ebuilds have proper whitespacing"""
+
+ repoman_check_name = 'ebuild.minorsyn'
+
+ ignore_line = re.compile(r'(^$)|(^(\t)*#)')
+ ignore_comment = False
+ leading_spaces = re.compile(r'^[\S\t]')
+ trailing_whitespace = re.compile(r'.*([\S]$)')
+
+ def check(self, num, line):
+ if self.leading_spaces.match(line) is None:
+ return errors.LEADING_SPACES_ERROR
+ if self.trailing_whitespace.match(line) is None:
+ return errors.TRAILING_WHITESPACE_ERROR
+
+
+class EbuildBlankLine(LineCheck):
+ repoman_check_name = 'ebuild.minorsyn'
+ ignore_comment = False
+ blank_line = re.compile(r'^$')
+
+ def new(self, pkg):
+ self.line_is_blank = False
+
+ def check(self, num, line):
+ if self.line_is_blank and self.blank_line.match(line):
+ return 'Useless blank line on line: %d'
+ if self.blank_line.match(line):
+ self.line_is_blank = True
+ else:
+ self.line_is_blank = False
+
+ def end(self):
+ if self.line_is_blank:
+ yield 'Useless blank line on last line'
+
+
+class EbuildQuote(LineCheck):
+ """Ensure ebuilds have valid quoting around things like D,FILESDIR, etc..."""
+
+ repoman_check_name = 'ebuild.minorsyn'
+ _message_commands = [
+ "die", "echo", "eerror", "einfo", "elog", "eqawarn", "ewarn"]
+ _message_re = re.compile(
+ r'\s(' + "|".join(_message_commands) + r')\s+"[^"]*"\s*$')
+ _ignored_commands = ["local", "export"] + _message_commands
+ ignore_line = re.compile(
+ r'(^$)|(^\s*#.*)|(^\s*\w+=.*)' +
+ r'|(^\s*(' + "|".join(_ignored_commands) + r')\s+)')
+ ignore_comment = False
+ var_names = ["D", "DISTDIR", "FILESDIR", "S", "T", "ROOT", "WORKDIR"]
+
+ # EAPI=3/Prefix vars
+ var_names += ["ED", "EPREFIX", "EROOT"]
+
+ # variables for games.eclass
+ var_names += [
+ "Ddir", "GAMES_PREFIX_OPT", "GAMES_DATADIR",
+ "GAMES_DATADIR_BASE", "GAMES_SYSCONFDIR", "GAMES_STATEDIR",
+ "GAMES_LOGDIR", "GAMES_BINDIR"]
+
+ # variables for multibuild.eclass
+ var_names += ["BUILD_DIR"]
+
+ var_names = "(%s)" % "|".join(var_names)
+ var_reference = re.compile(
+ r'\$(\{%s\}|%s\W)' % (var_names, var_names))
+ missing_quotes = re.compile(
+ r'(\s|^)[^"\'\s]*\$\{?%s\}?[^"\'\s]*(\s|$)' % var_names)
+ cond_begin = re.compile(r'(^|\s+)\[\[($|\\$|\s+)')
+ cond_end = re.compile(r'(^|\s+)\]\]($|\\$|\s+)')
+
+ def check(self, num, line):
+ if self.var_reference.search(line) is None:
+ return
+ # There can be multiple matches / violations on a single line. We
+ # have to make sure none of the matches are violators. Once we've
+ # found one violator, any remaining matches on the same line can
+ # be ignored.
+ pos = 0
+ while pos <= len(line) - 1:
+ missing_quotes = self.missing_quotes.search(line, pos)
+ if not missing_quotes:
+ break
+ # If the last character of the previous match is a whitespace
+ # character, that character may be needed for the next
+ # missing_quotes match, so search overlaps by 1 character.
+ group = missing_quotes.group()
+ pos = missing_quotes.end() - 1
+
+ # Filter out some false positives that can
+ # get through the missing_quotes regex.
+ if self.var_reference.search(group) is None:
+ continue
+
+ # Filter matches that appear to be an
+ # argument to a message command.
+ # For example: false || ewarn "foo $WORKDIR/bar baz"
+ message_match = self._message_re.search(line)
+ if message_match is not None and \
+ message_match.start() < pos and \
+ message_match.end() > pos:
+ break
+
+ # This is an attempt to avoid false positives without getting
+ # too complex, while possibly allowing some (hopefully
+ # unlikely) violations to slip through. We just assume
+ # everything is correct if the there is a ' [[ ' or a ' ]] '
+ # anywhere in the whole line (possibly continued over one
+ # line).
+ if self.cond_begin.search(line) is not None:
+ continue
+ if self.cond_end.search(line) is not None:
+ continue
+
+ # Any remaining matches on the same line can be ignored.
+ return errors.MISSING_QUOTES_ERROR
+
+
+class EbuildAssignment(LineCheck):
+ """Ensure ebuilds don't assign to readonly variables."""
+
+ repoman_check_name = 'variable.readonly'
+ read_only_vars = 'A|CATEGORY|P|P[VNRF]|PVR|D|WORKDIR|FILESDIR|FEATURES|USE'
+ readonly_assignment = re.compile(r'^\s*(export\s+)?(%s)=' % read_only_vars)
+
+ def check(self, num, line):
+ match = self.readonly_assignment.match(line)
+ e = None
+ if match is not None:
+ e = errors.READONLY_ASSIGNMENT_ERROR
+ return e
+
+
+class Eapi3EbuildAssignment(EbuildAssignment):
+ """Ensure ebuilds don't assign to readonly EAPI 3-introduced variables."""
+
+ readonly_assignment = re.compile(r'\s*(export\s+)?(ED|EPREFIX|EROOT)=')
+
+ def check_eapi(self, eapi):
+ return eapi_supports_prefix(eapi)
+
+
+class EbuildNestedDie(LineCheck):
+ """Check ebuild for nested die statements (die statements in subshells)"""
+
+ repoman_check_name = 'ebuild.nesteddie'
+ nesteddie_re = re.compile(r'^[^#]*\s\(\s[^)]*\bdie\b')
+
+ def check(self, num, line):
+ if self.nesteddie_re.match(line):
+ return errors.NESTED_DIE_ERROR
+
+
+class EbuildUselessDodoc(LineCheck):
+ """Check ebuild for useless files in dodoc arguments."""
+ repoman_check_name = 'ebuild.minorsyn'
+ uselessdodoc_re = re.compile(
+ r'^\s*dodoc(\s+|\s+.*\s+)(ABOUT-NLS|COPYING|LICENCE|LICENSE)($|\s)')
+
+ def check(self, num, line):
+ match = self.uselessdodoc_re.match(line)
+ if match:
+ return "Useless dodoc '%s'" % (match.group(2), ) + " on line: %d"
+
+
+class EbuildUselessCdS(LineCheck):
+ """Check for redundant cd ${S} statements"""
+ repoman_check_name = 'ebuild.minorsyn'
+ _src_phases = r'^\s*src_(prepare|configure|compile|install|test)\s*\(\)'
+ method_re = re.compile(_src_phases)
+ cds_re = re.compile(r'^\s*cd\s+("\$(\{S\}|S)"|\$(\{S\}|S))\s')
+
+ def __init__(self):
+ self.check_next_line = False
+
+ def check(self, num, line):
+ if self.check_next_line:
+ self.check_next_line = False
+ if self.cds_re.match(line):
+ return errors.REDUNDANT_CD_S_ERROR
+ elif self.method_re.match(line):
+ self.check_next_line = True
+
+
+class EapiDefinition(LineCheck):
+ """
+ Check that EAPI assignment conforms to PMS section 7.3.1
+ (first non-comment, non-blank line).
+ """
+ repoman_check_name = 'EAPI.definition'
+ ignore_comment = True
+ _eapi_re = portage._pms_eapi_re
+
+ def new(self, pkg):
+ self._cached_eapi = pkg.eapi
+ self._parsed_eapi = None
+ self._eapi_line_num = None
+
+ def check(self, num, line):
+ if self._eapi_line_num is None and line.strip():
+ self._eapi_line_num = num + 1
+ m = self._eapi_re.match(line)
+ if m is not None:
+ self._parsed_eapi = m.group(2)
+
+ def end(self):
+ if self._parsed_eapi is None:
+ if self._cached_eapi != "0":
+ yield "valid EAPI assignment must occur on or before line: %s" % \
+ self._eapi_line_num
+ elif self._parsed_eapi != self._cached_eapi:
+ yield (
+ "bash returned EAPI '%s' which does not match "
+ "assignment on line: %s" %
+ (self._cached_eapi, self._eapi_line_num))
+
+
+class EbuildPatches(LineCheck):
+ """Ensure ebuilds use bash arrays for PATCHES to ensure white space safety"""
+ repoman_check_name = 'ebuild.patches'
+ re = re.compile(r'^\s*PATCHES=[^\(]')
+ error = errors.PATCHES_ERROR
+
+ def check_eapi(self, eapi):
+ return eapi in ("0", "1", "2", "3", "4", "4-python",
+ "4-slot-abi", "5", "5-hdepend", "5-progress")
+
+
+class EbuildQuotedA(LineCheck):
+ """Ensure ebuilds have no quoting around ${A}"""
+
+ repoman_check_name = 'ebuild.minorsyn'
+ a_quoted = re.compile(r'.*\"\$(\{A\}|A)\"')
+
+ def check(self, num, line):
+ match = self.a_quoted.match(line)
+ if match:
+ return "Quoted \"${A}\" on line: %d"
+
+
+class NoOffsetWithHelpers(LineCheck):
+ """ Check that the image location, the alternate root offset, and the
+ offset prefix (D, ROOT, ED, EROOT and EPREFIX) are not used with
+ helpers """
+
+ repoman_check_name = 'variable.usedwithhelpers'
+ # Ignore matches in quoted strings like this:
+ # elog "installed into ${ROOT}usr/share/php5/apc/."
+ _install_funcs = (
+ 'docinto|do(compress|dir|hard)'
+ '|exeinto|fowners|fperms|insinto|into')
+ _quoted_vars = 'D|ROOT|ED|EROOT|EPREFIX'
+ re = re.compile(
+ r'^[^#"\']*\b(%s)\s+"?\$\{?(%s)\b.*' %
+ (_install_funcs, _quoted_vars))
+ error = errors.NO_OFFSET_WITH_HELPERS
+
+
+class ImplicitRuntimeDeps(LineCheck):
+ """
+ Detect the case where DEPEND is set and RDEPEND is unset in the ebuild,
+ since this triggers implicit RDEPEND=$DEPEND assignment (prior to EAPI 4).
+ """
+
+ repoman_check_name = 'RDEPEND.implicit'
+ _assignment_re = re.compile(r'^\s*(R?DEPEND)\+?=')
+
+ def new(self, pkg):
+ self._rdepend = False
+ self._depend = False
+
+ def check_eapi(self, eapi):
+ # Beginning with EAPI 4, there is no
+ # implicit RDEPEND=$DEPEND assignment
+ # to be concerned with.
+ return eapi_has_implicit_rdepend(eapi)
+
+ def check(self, num, line):
+ if not self._rdepend:
+ m = self._assignment_re.match(line)
+ if m is None:
+ pass
+ elif m.group(1) == "RDEPEND":
+ self._rdepend = True
+ elif m.group(1) == "DEPEND":
+ self._depend = True
+
+ def end(self):
+ if self._depend and not self._rdepend:
+ yield 'RDEPEND is not explicitly assigned'
+
+
+class InheritDeprecated(LineCheck):
+ """Check if ebuild directly or indirectly inherits a deprecated eclass."""
+
+ repoman_check_name = 'inherit.deprecated'
+
+ # deprecated eclass : new eclass (False if no new eclass)
+ deprecated_eclasses = {
+ "base": False,
+ "bash-completion": "bash-completion-r1",
+ "boost-utils": False,
+ "distutils": "distutils-r1",
+ "games": False,
+ "gems": "ruby-fakegem",
+ "mono": "mono-env",
+ "python": "python-r1 / python-single-r1 / python-any-r1",
+ "ruby": "ruby-ng",
+ "x-modular": "xorg-2",
+ "gst-plugins-bad": "gstreamer",
+ "gst-plugins-base": "gstreamer",
+ "gst-plugins-good": "gstreamer",
+ "gst-plugins-ugly": "gstreamer",
+ "gst-plugins10": "gstreamer",
+ "clutter": "gnome2",
+ }
+
+ _inherit_re = re.compile(r'^\s*inherit\s(.*)$')
+
+ def new(self, pkg):
+ self._errors = []
+
+ def check(self, num, line):
+ direct_inherits = None
+ m = self._inherit_re.match(line)
+ if m is not None:
+ direct_inherits = m.group(1)
+ if direct_inherits:
+ direct_inherits = direct_inherits.split()
+
+ if not direct_inherits:
+ return
+
+ for eclass in direct_inherits:
+ replacement = self.deprecated_eclasses.get(eclass)
+ if replacement is None:
+ pass
+ elif replacement is False:
+ self._errors.append(
+ "please migrate from "
+ "'%s' (no replacement) on line: %d" % (eclass, num + 1))
+ else:
+ self._errors.append(
+ "please migrate from "
+ "'%s' to '%s' on line: %d" % (eclass, replacement, num + 1))
+
+ def end(self):
+ for error in self._errors:
+ yield error
+ del self._errors
+
+
+
+class InheritEclass(LineCheck):
+ """
+ Base class for checking for missing inherits, as well as excess inherits.
+
+ Args:
+ eclass: Set to the name of your eclass.
+ funcs: A tuple of functions that this eclass provides.
+ comprehensive: Is the list of functions complete?
+ exempt_eclasses: If these eclasses are inherited, disable the missing
+ inherit check.
+ """
+
+ def __init__(
+ self, eclass, funcs=None, comprehensive=False,
+ exempt_eclasses=None, ignore_missing=False, **kwargs):
+ self._eclass = eclass
+ self._comprehensive = comprehensive
+ self._exempt_eclasses = exempt_eclasses
+ self._ignore_missing = ignore_missing
+ inherit_re = eclass
+ self._inherit_re = re.compile(
+ r'^(\s*|.*[|&]\s*)\binherit\s(.*\s)?%s(\s|$)' % inherit_re)
+ # Match when the function is preceded only by leading whitespace, a
+ # shell operator such as (, {, |, ||, or &&, or optional variable
+ # setting(s). This prevents false positives in things like elog
+ # messages, as reported in bug #413285.
+ self._func_re = re.compile(
+ r'(^|[|&{(])\s*(\w+=.*)?\b(' + '|'.join(funcs) + r')\b')
+
+ def new(self, pkg):
+ self.repoman_check_name = 'inherit.missing'
+ # We can't use pkg.inherited because that tells us all the eclasses that
+ # have been inherited and not just the ones we inherit directly.
+ self._inherit = False
+ self._func_call = False
+ if self._exempt_eclasses is not None:
+ inherited = pkg.inherited
+ self._disabled = any(x in inherited for x in self._exempt_eclasses)
+ else:
+ self._disabled = False
+ self._eapi = pkg.eapi
+
+ def check(self, num, line):
+ if not self._inherit:
+ self._inherit = self._inherit_re.match(line)
+ if not self._inherit:
+ if self._disabled or self._ignore_missing:
+ return
+ s = self._func_re.search(line)
+ if s is not None:
+ func_name = s.group(3)
+ eapi_func = _eclass_eapi_functions.get(func_name)
+ if eapi_func is None or not eapi_func(self._eapi):
+ self._func_call = True
+ return (
+ '%s.eclass is not inherited, '
+ 'but "%s" found at line: %s' %
+ (self._eclass, func_name, '%d'))
+ elif not self._func_call:
+ self._func_call = self._func_re.search(line)
+
+ def end(self):
+ if not self._disabled and self._comprehensive and self._inherit \
+ and not self._func_call:
+ self.repoman_check_name = 'inherit.unused'
+ yield 'no function called from %s.eclass; please drop' % self._eclass
+
+_usex_supported_eapis = ("0", "1", "2", "3", "4", "4-python", "4-slot-abi")
+_in_iuse_supported_eapis = ("0", "1", "2", "3", "4", "4-python", "4-slot-abi",
+ "5", "5-hdepend", "5-progress")
+_get_libdir_supported_eapis = _in_iuse_supported_eapis
+_eclass_eapi_functions = {
+ "usex": lambda eapi: eapi not in _usex_supported_eapis,
+ "in_iuse": lambda eapi: eapi not in _in_iuse_supported_eapis,
+ "get_libdir": lambda eapi: eapi not in _get_libdir_supported_eapis,
+}
+
+# eclasses that export ${ECLASS}_src_(compile|configure|install)
+_eclass_export_functions = (
+ 'ant-tasks', 'apache-2', 'apache-module', 'aspell-dict',
+ 'autotools-utils', 'base', 'bsdmk', 'cannadic',
+ 'clutter', 'cmake-utils', 'db', 'distutils', 'elisp',
+ 'embassy', 'emboss', 'emul-linux-x86', 'enlightenment',
+ 'font-ebdftopcf', 'font', 'fox', 'freebsd', 'freedict',
+ 'games', 'games-ggz', 'games-mods', 'gdesklets',
+ 'gems', 'gkrellm-plugin', 'gnatbuild', 'gnat', 'gnome2',
+ 'gnome-python-common', 'gnustep-base', 'go-mono', 'gpe',
+ 'gst-plugins-bad', 'gst-plugins-base', 'gst-plugins-good',
+ 'gst-plugins-ugly', 'gtk-sharp-module', 'haskell-cabal',
+ 'horde', 'java-ant-2', 'java-pkg-2', 'java-pkg-simple',
+ 'java-virtuals-2', 'kde4-base', 'kde4-meta', 'kernel-2',
+ 'latex-package', 'linux-mod', 'mozlinguas', 'myspell',
+ 'myspell-r2', 'mysql', 'mysql-v2', 'mythtv-plugins',
+ 'oasis', 'obs-service', 'office-ext', 'perl-app',
+ 'perl-module', 'php-ext-base-r1', 'php-ext-pecl-r2',
+ 'php-ext-source-r2', 'php-lib-r1', 'php-pear-lib-r1',
+ 'php-pear-r1', 'python-distutils-ng', 'python',
+ 'qt4-build', 'qt4-r2', 'rox-0install', 'rox', 'ruby',
+ 'ruby-ng', 'scsh', 'selinux-policy-2', 'sgml-catalog',
+ 'stardict', 'sword-module', 'tetex-3', 'tetex',
+ 'texlive-module', 'toolchain-binutils', 'toolchain',
+ 'twisted', 'vdr-plugin-2', 'vdr-plugin', 'vim',
+ 'vim-plugin', 'vim-spell', 'virtuoso', 'vmware',
+ 'vmware-mod', 'waf-utils', 'webapp', 'xemacs-elisp',
+ 'xemacs-packages', 'xfconf', 'x-modular', 'xorg-2',
+ 'zproduct'
+)
+
+_eclass_info = {
+ 'autotools': {
+ 'funcs': (
+ 'eaclocal', 'eautoconf', 'eautoheader',
+ 'eautomake', 'eautoreconf', '_elibtoolize',
+ 'eautopoint'
+ ),
+ 'comprehensive': True,
+
+ # Exempt eclasses:
+ # git - An EGIT_BOOTSTRAP variable may be used to call one of
+ # the autotools functions.
+ # subversion - An ESVN_BOOTSTRAP variable may be used to call one of
+ # the autotools functions.
+ 'exempt_eclasses': ('git', 'git-2', 'subversion', 'autotools-utils')
+ },
+
+ 'eutils': {
+ 'funcs': (
+ 'estack_push', 'estack_pop', 'eshopts_push', 'eshopts_pop',
+ 'eumask_push', 'eumask_pop', 'epatch', 'epatch_user',
+ 'emktemp', 'edos2unix', 'in_iuse', 'use_if_iuse', 'usex'
+ ),
+ 'comprehensive': False,
+
+ # These are "eclasses are the whole ebuild" type thing.
+ 'exempt_eclasses': _eclass_export_functions,
+ },
+
+ 'flag-o-matic': {
+ 'funcs': (
+ 'filter-(ld)?flags', 'strip-flags', 'strip-unsupported-flags',
+ 'append-((ld|c(pp|xx)?))?flags', 'append-libs',
+ ),
+ 'comprehensive': False
+ },
+
+ 'libtool': {
+ 'funcs': (
+ 'elibtoolize',
+ ),
+ 'comprehensive': True,
+ 'exempt_eclasses': ('autotools',)
+ },
+
+ 'multilib': {
+ 'funcs': (
+ 'get_libdir',
+ ),
+
+ # These are "eclasses are the whole ebuild" type thing.
+ 'exempt_eclasses': _eclass_export_functions + (
+ 'autotools', 'libtool', 'multilib-minimal'),
+
+ 'comprehensive': False
+ },
+
+ 'multiprocessing': {
+ 'funcs': (
+ 'makeopts_jobs',
+ ),
+ 'comprehensive': False
+ },
+
+ 'prefix': {
+ 'funcs': (
+ 'eprefixify',
+ ),
+ 'comprehensive': True
+ },
+
+ 'toolchain-funcs': {
+ 'funcs': (
+ 'gen_usr_ldscript',
+ ),
+ 'comprehensive': False
+ },
+
+ 'user': {
+ 'funcs': (
+ 'enewuser', 'enewgroup',
+ 'egetent', 'egethome', 'egetshell', 'esethome'
+ ),
+ 'comprehensive': True
+ }
+}
+
+
+class EMakeParallelDisabled(PhaseCheck):
+ """Check for emake -j1 calls which disable parallelization."""
+ repoman_check_name = 'upstream.workaround'
+ re = re.compile(r'^\s*emake\s+.*-j\s*1\b')
+ error = errors.EMAKE_PARALLEL_DISABLED
+
+ def phase_check(self, num, line):
+ if self.in_phase == 'src_compile' or self.in_phase == 'src_install':
+ if self.re.match(line):
+ return self.error
+
+
+class EMakeParallelDisabledViaMAKEOPTS(LineCheck):
+ """Check for MAKEOPTS=-j1 that disables parallelization."""
+ repoman_check_name = 'upstream.workaround'
+ re = re.compile(r'^\s*MAKEOPTS=(\'|")?.*-j\s*1\b')
+ error = errors.EMAKE_PARALLEL_DISABLED_VIA_MAKEOPTS
+
+
+class NoAsNeeded(LineCheck):
+ """Check for calls to the no-as-needed function."""
+ repoman_check_name = 'upstream.workaround'
+ re = re.compile(r'.*\$\(no-as-needed\)')
+ error = errors.NO_AS_NEEDED
+
+
+class PreserveOldLib(LineCheck):
+ """Check for calls to the deprecated preserve_old_lib function."""
+ repoman_check_name = 'ebuild.minorsyn'
+ re = re.compile(r'.*preserve_old_lib')
+ error = errors.PRESERVE_OLD_LIB
+
+
+class SandboxAddpredict(LineCheck):
+ """Check for calls to the addpredict function."""
+ repoman_check_name = 'upstream.workaround'
+ re = re.compile(r'(^|\s)addpredict\b')
+ error = errors.SANDBOX_ADDPREDICT
+
+
+class DeprecatedBindnowFlags(LineCheck):
+ """Check for calls to the deprecated bindnow-flags function."""
+ repoman_check_name = 'ebuild.minorsyn'
+ re = re.compile(r'.*\$\(bindnow-flags\)')
+ error = errors.DEPRECATED_BINDNOW_FLAGS
+
+
+class WantAutoDefaultValue(LineCheck):
+ """Check setting WANT_AUTO* to latest (default value)."""
+ repoman_check_name = 'ebuild.minorsyn'
+ _re = re.compile(r'^WANT_AUTO(CONF|MAKE)=(\'|")?latest')
+
+ def check(self, num, line):
+ m = self._re.match(line)
+ if m is not None:
+ return 'WANT_AUTO' + m.group(1) + \
+ ' redundantly set to default value "latest" on line: %d'
+
+
+class SrcCompileEconf(PhaseCheck):
+ repoman_check_name = 'ebuild.minorsyn'
+ configure_re = re.compile(r'\s(econf|./configure)')
+
+ def check_eapi(self, eapi):
+ return eapi_has_src_prepare_and_src_configure(eapi)
+
+ def phase_check(self, num, line):
+ if self.in_phase == 'src_compile':
+ m = self.configure_re.match(line)
+ if m is not None:
+ return ("'%s'" % m.group(1)) + \
+ " call should be moved to src_configure from line: %d"
+
+
+class SrcUnpackPatches(PhaseCheck):
+ repoman_check_name = 'ebuild.minorsyn'
+ src_prepare_tools_re = re.compile(r'\s(e?patch|sed)\s')
+
+ def check_eapi(self, eapi):
+ return eapi_has_src_prepare_and_src_configure(eapi)
+
+ def phase_check(self, num, line):
+ if self.in_phase == 'src_unpack':
+ m = self.src_prepare_tools_re.search(line)
+ if m is not None:
+ return ("'%s'" % m.group(1)) + \
+ " call should be moved to src_prepare from line: %d"
+
+
+class BuiltWithUse(LineCheck):
+ repoman_check_name = 'ebuild.minorsyn'
+ re = re.compile(r'(^|.*\b)built_with_use\b')
+ error = errors.BUILT_WITH_USE
+
+
+class DeprecatedUseq(LineCheck):
+ """Checks for use of the deprecated useq function"""
+ repoman_check_name = 'ebuild.minorsyn'
+ re = re.compile(r'(^|.*\b)useq\b')
+ error = errors.USEQ_ERROR
+
+
+class DeprecatedHasq(LineCheck):
+ """Checks for use of the deprecated hasq function"""
+ repoman_check_name = 'ebuild.minorsyn'
+ re = re.compile(r'(^|.*\b)hasq\b')
+ error = errors.HASQ_ERROR
+
+
+# EAPI <2 checks
+class UndefinedSrcPrepareSrcConfigurePhases(LineCheck):
+ repoman_check_name = 'EAPI.incompatible'
+ src_configprepare_re = re.compile(r'\s*(src_configure|src_prepare)\s*\(\)')
+
+ def check_eapi(self, eapi):
+ return not eapi_has_src_prepare_and_src_configure(eapi)
+
+ def check(self, num, line):
+ m = self.src_configprepare_re.match(line)
+ if m is not None:
+ return ("'%s'" % m.group(1)) + \
+ " phase is not defined in EAPI < 2 on line: %d"
+
+
+# EAPI-3 checks
+class Eapi3DeprecatedFuncs(LineCheck):
+ repoman_check_name = 'EAPI.deprecated'
+ deprecated_commands_re = re.compile(r'^\s*(check_license)\b')
+
+ def check_eapi(self, eapi):
+ return eapi not in ('0', '1', '2')
+
+ def check(self, num, line):
+ m = self.deprecated_commands_re.match(line)
+ if m is not None:
+ return ("'%s'" % m.group(1)) + \
+ " has been deprecated in EAPI=3 on line: %d"
+
+
+# EAPI <4 checks
+class UndefinedPkgPretendPhase(LineCheck):
+ repoman_check_name = 'EAPI.incompatible'
+ pkg_pretend_re = re.compile(r'\s*(pkg_pretend)\s*\(\)')
+
+ def check_eapi(self, eapi):
+ return not eapi_has_pkg_pretend(eapi)
+
+ def check(self, num, line):
+ m = self.pkg_pretend_re.match(line)
+ if m is not None:
+ return ("'%s'" % m.group(1)) + \
+ " phase is not defined in EAPI < 4 on line: %d"
+
+
+# EAPI-4 checks
+class Eapi4IncompatibleFuncs(LineCheck):
+ repoman_check_name = 'EAPI.incompatible'
+ banned_commands_re = re.compile(r'^\s*(dosed|dohard)')
+
+ def check_eapi(self, eapi):
+ return not eapi_has_dosed_dohard(eapi)
+
+ def check(self, num, line):
+ m = self.banned_commands_re.match(line)
+ if m is not None:
+ return ("'%s'" % m.group(1)) + \
+ " has been banned in EAPI=4 on line: %d"
+
+
+class Eapi4GoneVars(LineCheck):
+ repoman_check_name = 'EAPI.incompatible'
+ undefined_vars_re = re.compile(
+ r'.*\$(\{(AA|KV|EMERGE_FROM)\}|(AA|KV|EMERGE_FROM))')
+
+ def check_eapi(self, eapi):
+ # AA, KV, and EMERGE_FROM should not be referenced in EAPI 4 or later.
+ return not eapi_exports_AA(eapi)
+
+ def check(self, num, line):
+ m = self.undefined_vars_re.match(line)
+ if m is not None:
+ return ("variable '$%s'" % m.group(1)) + \
+ " is gone in EAPI=4 on line: %d"
+
+
+class PortageInternal(LineCheck):
+ repoman_check_name = 'portage.internal'
+ ignore_comment = True
+ # Match when the command is preceded only by leading whitespace or a shell
+ # operator such as (, {, |, ||, or &&. This prevents false positives in
+ # things like elog messages, as reported in bug #413285.
+
+ internal_portage_func_or_var = (
+ 'ecompress|ecompressdir|env-update|prepall|prepalldocs|preplib')
+ re = re.compile(
+ r'^(\s*|.*[|&{(]+\s*)\b(%s)\b' % internal_portage_func_or_var)
+
+ def check(self, num, line):
+ """Run the check on line and return error if there is one"""
+ m = self.re.match(line)
+ if m is not None:
+ return ("'%s'" % m.group(2)) + " called on line: %d"
+
+
+class PortageInternalVariableAssignment(LineCheck):
+ repoman_check_name = 'portage.internal'
+ internal_assignment = re.compile(
+ r'\s*(export\s+)?(EXTRA_ECONF|EXTRA_EMAKE)\+?=')
+
+ def check(self, num, line):
+ match = self.internal_assignment.match(line)
+ e = None
+ if match is not None:
+ e = 'Assignment to variable %s' % match.group(2)
+ e += ' on line: %d'
+ return e
+
+_base_check_classes = (InheritEclass, LineCheck, PhaseCheck)
+_constant_checks = None
+
+
+def checks_init(experimental_inherit=False):
+
+ global _constant_checks, _eclass_info
+
+ if not experimental_inherit:
+ # Emulate the old eprefixify.defined and inherit.autotools checks.
+ _eclass_info = {
+ 'autotools': {
+ 'funcs': (
+ 'eaclocal', 'eautoconf', 'eautoheader',
+ 'eautomake', 'eautoreconf', '_elibtoolize',
+ 'eautopoint'
+ ),
+ 'comprehensive': True,
+ 'ignore_missing': True,
+ 'exempt_eclasses': ('git', 'git-2', 'subversion', 'autotools-utils')
+ },
+
+ 'prefix': {
+ 'funcs': (
+ 'eprefixify',
+ ),
+ 'comprehensive': False
+ }
+ }
+
+ _constant_checks = tuple(
+ chain((
+ v() for k, v in globals().items()
+ if (
+ isinstance(v, type)
+ and issubclass(v, LineCheck)
+ and v not in _base_check_classes)), (
+ InheritEclass(k, **portage._native_kwargs(kwargs))
+ for k, kwargs in _eclass_info.items())))
+
+
+_here_doc_re = re.compile(r'.*<<[-]?(\w+)\s*(>\s*\S+\s*)?$')
+_ignore_comment_re = re.compile(r'^\s*#')
+
+
+def run_checks(contents, pkg):
+ unicode_escape_codec = codecs.lookup('unicode_escape')
+ unicode_escape = lambda x: unicode_escape_codec.decode(x)[0]
+ if _constant_checks is None:
+ checks_init()
+ checks = _constant_checks
+ here_doc_delim = None
+ multiline = None
+
+ for lc in checks:
+ lc.new(pkg)
+
+ multinum = 0
+ for num, line in enumerate(contents):
+
+ # Check if we're inside a here-document.
+ if here_doc_delim is not None:
+ if here_doc_delim.match(line):
+ here_doc_delim = None
+ if here_doc_delim is None:
+ here_doc = _here_doc_re.match(line)
+ if here_doc is not None:
+ here_doc_delim = re.compile(r'^\s*%s$' % here_doc.group(1))
+ if here_doc_delim is not None:
+ continue
+
+ # Unroll multiline escaped strings so that we can check things:
+ # inherit foo bar \
+ # moo \
+ # cow
+ # This will merge these lines like so:
+ # inherit foo bar moo cow
+ try:
+ # A normal line will end in the two bytes: <\> <\n>. So decoding
+ # that will result in python thinking the <\n> is being escaped
+ # and eat the single <\> which makes it hard for us to detect.
+ # Instead, strip the newline (which we know all lines have), and
+ # append a <0>. Then when python escapes it, if the line ended
+ # in a <\>, we'll end up with a <\0> marker to key off of. This
+ # shouldn't be a problem with any valid ebuild ...
+ line_escaped = unicode_escape(line.rstrip('\n') + '0')
+ except SystemExit:
+ raise
+ except:
+ # Who knows what kind of crazy crap an ebuild will have
+ # in it -- don't allow it to kill us.
+ line_escaped = line
+ if multiline:
+ # Chop off the \ and \n bytes from the previous line.
+ multiline = multiline[:-2] + line
+ if not line_escaped.endswith('\0'):
+ line = multiline
+ num = multinum
+ multiline = None
+ else:
+ continue
+ else:
+ if line_escaped.endswith('\0'):
+ multinum = num
+ multiline = line
+ continue
+
+ if not line.endswith("#nowarn\n"):
+ # Finally we have a full line to parse.
+ is_comment = _ignore_comment_re.match(line) is not None
+ for lc in checks:
+ if is_comment and lc.ignore_comment:
+ continue
+ if lc.check_eapi(pkg.eapi):
+ ignore = lc.ignore_line
+ if not ignore or not ignore.match(line):
+ e = lc.check(num, line)
+ if e:
+ yield lc.repoman_check_name, e % (num + 1)
+
+ for lc in checks:
+ i = lc.end()
+ if i is not None:
+ for e in i:
+ yield lc.repoman_check_name, e
diff --git a/repoman/pym/repoman/modules/scan/ebuild/ebuild.py b/repoman/pym/repoman/modules/scan/ebuild/ebuild.py
new file mode 100644
index 000000000..28cb8b407
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/ebuild/ebuild.py
@@ -0,0 +1,238 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import re
+import stat
+
+from _emerge.Package import Package
+from _emerge.RootConfig import RootConfig
+
+from repoman.modules.scan.scanbase import ScanBase
+from repoman.qa_data import no_exec, allvars
+# import our initialized portage instance
+from repoman._portage import portage
+from portage import os
+from portage.const import LIVE_ECLASSES
+from portage.exception import InvalidPackageName
+
+pv_toolong_re = re.compile(r'[0-9]{19,}')
+
+
+class Ebuild(ScanBase):
+ '''Class to run primary checks on ebuilds'''
+
+ def __init__(self, **kwargs):
+ '''Class init
+
+ @param qatracker: QATracker instance
+ @param portdb: portdb instance
+ @param repo_settings: repository settings instance
+ @param vcs_settings: VCSSettings instance
+ @param checks: checks dictionary
+ '''
+ super(Ebuild, self).__init__(**kwargs)
+ self.qatracker = kwargs.get('qatracker')
+ self.portdb = kwargs.get('portdb')
+ self.repo_settings = kwargs.get('repo_settings')
+ self.vcs_settings = kwargs.get('vcs_settings')
+ self.checks = kwargs.get('checks')
+ self.root_config = RootConfig(self.repo_settings.repoman_settings,
+ self.repo_settings.trees[self.repo_settings.root], None)
+ self.changed = None
+ self.xpkg = None
+ self.y_ebuild = None
+ self.pkg = None
+ self.metadata = None
+ self.eapi = None
+ self.inherited = None
+ self.live_ebuild = None
+ self.keywords = None
+ self.pkgs = {}
+
+ def _set_paths(self, **kwargs):
+ repolevel = kwargs.get('repolevel')
+ self.relative_path = os.path.join(self.xpkg, self.y_ebuild + ".ebuild")
+ self.full_path = os.path.join(self.repo_settings.repodir, self.relative_path)
+ self.ebuild_path = self.y_ebuild + ".ebuild"
+ if repolevel < 3:
+ self.ebuild_path = os.path.join(kwargs.get('pkgdir'), self.ebuild_path)
+ if repolevel < 2:
+ self.ebuild_path = os.path.join(kwargs.get('catdir'), self.ebuild_path)
+ self.ebuild_path = os.path.join(".", self.ebuild_path)
+
+ @property
+ def untracked(self):
+ '''Determines and returns if the ebuild is not tracked by the vcs'''
+ do_check = self.vcs_settings.vcs in ("cvs", "svn", "bzr")
+ really_notadded = (self.checks['ebuild_notadded'] and
+ self.y_ebuild not in self.vcs_settings.eadded)
+ if do_check and really_notadded:
+ # ebuild not added to vcs
+ return True
+ return False
+
+ def check(self, **kwargs):
+ '''Perform a changelog and untracked checks on the ebuild
+
+ @param xpkg: Package in which we check (object).
+ @param y_ebuild: Ebuild which we check (string).
+ @param changed: dictionary instance
+ @param repolevel: The depth within the repository
+ @param catdir: The category directiory
+ @param pkgdir: the package directory
+ @returns: dictionary, including {ebuild object}
+ '''
+ self.xpkg = kwargs.get('xpkg')
+ self.y_ebuild = kwargs.get('y_ebuild')
+ self.changed = kwargs.get('changed')
+ changelog_modified = kwargs.get('changelog_modified')
+ self._set_paths(**kwargs)
+
+ if self.checks['changelog'] and not changelog_modified \
+ and self.ebuild_path in self.changed.new_ebuilds:
+ self.qatracker.add_error('changelog.ebuildadded', self.relative_path)
+
+ if self.untracked:
+ # ebuild not added to vcs
+ self.qatracker.add_error(
+ "ebuild.notadded", self.xpkg + "/" + self.y_ebuild + ".ebuild")
+ # update the dynamic data
+ dyn_ebuild = kwargs.get('ebuild')
+ dyn_ebuild.set(self)
+ return False
+
+ def set_pkg_data(self, **kwargs):
+ '''Sets some classwide data needed for some of the checks
+
+ @returns: dictionary
+ '''
+ self.pkg = self.pkgs[self.y_ebuild]
+ self.metadata = self.pkg._metadata
+ self.eapi = self.metadata["EAPI"]
+ self.inherited = self.pkg.inherited
+ self.live_ebuild = LIVE_ECLASSES.intersection(self.inherited)
+ self.keywords = self.metadata["KEYWORDS"].split()
+ self.archs = set(kw.lstrip("~") for kw in self.keywords if not kw.startswith("-"))
+ return False
+
+ def bad_split_check(self, **kwargs):
+ '''Checks for bad category/package splits.
+
+ @param pkgdir: string: path
+ @returns: dictionary
+ '''
+ pkgdir = kwargs.get('pkgdir')
+ myesplit = portage.pkgsplit(self.y_ebuild)
+ is_bad_split = myesplit is None or myesplit[0] != self.xpkg.split("/")[-1]
+ if is_bad_split:
+ is_pv_toolong = pv_toolong_re.search(myesplit[1])
+ is_pv_toolong2 = pv_toolong_re.search(myesplit[2])
+ if is_pv_toolong or is_pv_toolong2:
+ self.qatracker.add_error(
+ "ebuild.invalidname", self.xpkg + "/" + self.y_ebuild + ".ebuild")
+ return True
+ elif myesplit[0] != pkgdir:
+ print(pkgdir, myesplit[0])
+ self.qatracker.add_error(
+ "ebuild.namenomatch", self.xpkg + "/" + self.y_ebuild + ".ebuild")
+ return True
+ return False
+
+ def pkg_invalid(self, **kwargs):
+ '''Sets some pkg info and checks for invalid packages
+
+ @param validity_future: Future instance
+ @returns: dictionary, including {pkg object}
+ '''
+ fuse = kwargs.get('validity_future')
+ dyn_pkg = kwargs.get('pkg')
+ if self.pkg.invalid:
+ for k, msgs in self.pkg.invalid.items():
+ for msg in msgs:
+ self.qatracker.add_error(k, "%s: %s" % (self.relative_path, msg))
+ # update the dynamic data
+ fuse.set(False, ignore_InvalidState=True)
+ dyn_pkg.set(self.pkg)
+ return True
+ # update the dynamic data
+ dyn_pkg.set(self.pkg)
+ return False
+
+ def check_isebuild(self, **kwargs):
+ '''Test the file for qualifications that is is an ebuild
+
+ @param checkdirlist: list of files in the current package directory
+ @param checkdir: current package directory path
+ @param xpkg: current package directory being checked
+ @param validity_future: Future instance
+ @returns: dictionary, including {pkgs, can_force}
+ '''
+ checkdirlist = kwargs.get('checkdirlist').get()
+ checkdir = kwargs.get('checkdir')
+ xpkg = kwargs.get('xpkg')
+ fuse = kwargs.get('validity_future')
+ can_force = kwargs.get('can_force')
+ self.continue_ = False
+ ebuildlist = []
+ pkgs = {}
+ for y in checkdirlist:
+ file_is_ebuild = y.endswith(".ebuild")
+ file_should_be_non_executable = y in no_exec or file_is_ebuild
+
+ if file_should_be_non_executable:
+ file_is_executable = stat.S_IMODE(
+ os.stat(os.path.join(checkdir, y)).st_mode) & 0o111
+
+ if file_is_executable:
+ self.qatracker.add_error("file.executable", os.path.join(checkdir, y))
+ if file_is_ebuild:
+ pf = y[:-7]
+ ebuildlist.append(pf)
+ catdir = xpkg.split("/")[0]
+ cpv = "%s/%s" % (catdir, pf)
+ try:
+ myaux = dict(zip(allvars, self.portdb.aux_get(cpv, allvars)))
+ except KeyError:
+ fuse.set(False, ignore_InvalidState=True)
+ self.qatracker.add_error("ebuild.syntax", os.path.join(xpkg, y))
+ continue
+ except IOError:
+ fuse.set(False, ignore_InvalidState=True)
+ self.qatracker.add_error("ebuild.output", os.path.join(xpkg, y))
+ continue
+ except InvalidPackageName:
+ fuse.set(False, ignore_InvalidState=True)
+ self.qatracker.add_error("ebuild.invalidname", os.path.join(xpkg, y))
+ continue
+ if not portage.eapi_is_supported(myaux["EAPI"]):
+ fuse.set(False, ignore_InvalidState=True)
+ self.qatracker.add_error("EAPI.unsupported", os.path.join(xpkg, y))
+ continue
+ pkgs[pf] = Package(
+ cpv=cpv, metadata=myaux, root_config=self.root_config,
+ type_name="ebuild")
+
+ if len(pkgs) != len(ebuildlist):
+ # If we can't access all the metadata then it's totally unsafe to
+ # commit since there's no way to generate a correct Manifest.
+ # Do not try to do any more QA checks on this package since missing
+ # metadata leads to false positives for several checks, and false
+ # positives confuse users.
+ self.continue_ = True
+ can_force.set(False, ignore_InvalidState=True)
+ self.pkgs = pkgs
+ # set our updated data
+ dyn_pkgs = kwargs.get('pkgs')
+ dyn_pkgs.set(pkgs)
+ return self.continue_
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ return (True, [self.check_isebuild])
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check, self.set_pkg_data, self.bad_split_check, self.pkg_invalid])
diff --git a/repoman/pym/repoman/modules/scan/ebuild/errors.py b/repoman/pym/repoman/modules/scan/ebuild/errors.py
new file mode 100644
index 000000000..3090de0d1
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/ebuild/errors.py
@@ -0,0 +1,49 @@
+# -*- coding:utf-8 -*-
+# repoman: Error Messages
+# Copyright 2007-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import unicode_literals
+
+COPYRIGHT_ERROR = (
+ 'Invalid Gentoo Copyright on line: %d')
+LICENSE_ERROR = (
+ 'Invalid Gentoo/GPL License on line: %d')
+ID_HEADER_ERROR = (
+ 'Malformed Id header on line: %d')
+LEADING_SPACES_ERROR = (
+ 'Ebuild contains leading spaces on line: %d')
+TRAILING_WHITESPACE_ERROR = (
+ 'Trailing whitespace error on line: %d')
+READONLY_ASSIGNMENT_ERROR = (
+ 'Ebuild contains assignment to read-only variable on line: %d')
+MISSING_QUOTES_ERROR = (
+ 'Unquoted Variable on line: %d')
+NESTED_DIE_ERROR = (
+ 'Ebuild calls die in a subshell on line: %d')
+PATCHES_ERROR = (
+ 'PATCHES is not a bash array on line: %d')
+REDUNDANT_CD_S_ERROR = (
+ 'Ebuild has redundant cd ${S} statement on line: %d')
+EMAKE_PARALLEL_DISABLED = (
+ 'Upstream parallel compilation bug (ebuild calls emake -j1 on line: %d)')
+EMAKE_PARALLEL_DISABLED_VIA_MAKEOPTS = (
+ 'Upstream parallel compilation bug (MAKEOPTS=-j1 on line: %d)')
+DEPRECATED_BINDNOW_FLAGS = (
+ 'Deprecated bindnow-flags call on line: %d')
+EAPI_DEFINED_AFTER_INHERIT = (
+ 'EAPI defined after inherit on line: %d')
+NO_AS_NEEDED = (
+ 'Upstream asneeded linking bug (no-as-needed on line: %d)')
+PRESERVE_OLD_LIB = (
+ 'Ebuild calls deprecated preserve_old_lib on line: %d')
+BUILT_WITH_USE = (
+ 'built_with_use on line: %d')
+NO_OFFSET_WITH_HELPERS = (
+ "Helper function is used with D, ROOT, ED, EROOT or EPREFIX on line :%d")
+SANDBOX_ADDPREDICT = (
+ 'Ebuild calls addpredict on line: %d')
+USEQ_ERROR = (
+ 'Ebuild calls deprecated useq function on line: %d')
+HASQ_ERROR = (
+ 'Ebuild calls deprecated hasq function on line: %d')
diff --git a/repoman/pym/repoman/modules/scan/ebuild/multicheck.py b/repoman/pym/repoman/modules/scan/ebuild/multicheck.py
new file mode 100644
index 000000000..9e36e2a68
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/ebuild/multicheck.py
@@ -0,0 +1,56 @@
+
+'''multicheck.py
+Perform multiple different checks on an ebuild
+'''
+
+import io
+
+from portage import _encodings, _unicode_encode
+
+from repoman.modules.scan.scanbase import ScanBase
+from .checks import run_checks, checks_init
+
+
+class MultiCheck(ScanBase):
+ '''Class to run multiple different checks on an ebuild'''
+
+ def __init__(self, **kwargs):
+ '''Class init
+
+ @param qatracker: QATracker instance
+ @param options: the run time cli options
+ '''
+ self.qatracker = kwargs.get('qatracker')
+ self.options = kwargs.get('options')
+ checks_init(self.options.experimental_inherit == 'y')
+
+ def check(self, **kwargs):
+ '''Check the ebuild for utf-8 encoding
+
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ @returns: dictionary
+ '''
+ ebuild = kwargs.get('ebuild').get()
+ pkg = kwargs.get('pkg').get()
+ try:
+ # All ebuilds should have utf_8 encoding.
+ f = io.open(
+ _unicode_encode(ebuild.full_path, encoding=_encodings['fs'],
+ errors='strict'),
+ mode='r', encoding=_encodings['repo.content'])
+ try:
+ for check_name, e in run_checks(f, pkg):
+ self.qatracker.add_error(
+ check_name, ebuild.relative_path + ': %s' % e)
+ finally:
+ f.close()
+ except UnicodeDecodeError:
+ # A file.UTF8 failure will have already been recorded.
+ pass
+ return False
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/eclasses/__init__.py b/repoman/pym/repoman/modules/scan/eclasses/__init__.py
new file mode 100644
index 000000000..78d46e4b4
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/eclasses/__init__.py
@@ -0,0 +1,47 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Eclasses plug-in module for repoman.
+Performs an live and ruby eclass checks on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'eclasses',
+ 'description': doc,
+ 'provides':{
+ 'live-module': {
+ 'name': "live",
+ 'sourcefile': "live",
+ 'class': "LiveEclassChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_kwargs': {
+ },
+ 'mod_kwargs': ['qatracker', 'repo_metadata', 'repo_settings',
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ 'pkg': (None, None),
+ 'xpkg': (None, None),
+ 'y_ebuild': (None, None),
+ },
+ },
+ 'ruby-module': {
+ 'name': "ruby",
+ 'sourcefile': "ruby",
+ 'class': "RubyEclassChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_kwargs': {
+ },
+ 'mod_kwargs': ['qatracker'
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ 'pkg': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/eclasses/live.py b/repoman/pym/repoman/modules/scan/eclasses/live.py
new file mode 100644
index 000000000..dca10b583
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/eclasses/live.py
@@ -0,0 +1,76 @@
+
+'''live.py
+Performs Live eclass checks
+'''
+
+from repoman._portage import portage
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class LiveEclassChecks(ScanBase):
+ '''Performs checks for the usage of Live eclasses in ebuilds'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param qatracker: QATracker instance
+ '''
+ self.qatracker = kwargs.get('qatracker')
+ self.pmaskdict = kwargs.get('repo_metadata')['pmaskdict']
+ self.repo_settings = kwargs.get('repo_settings')
+
+ def check(self, **kwargs):
+ '''Ebuilds that inherit a "Live" eclass (darcs, subversion, git, cvs,
+ etc..) should not be allowed to be marked stable
+
+ @param pkg: Package in which we check (object).
+ @param xpkg: Package in which we check (string).
+ @param ebuild: Ebuild which we check (object).
+ @param y_ebuild: Ebuild which we check (string).
+ @returns: boolean
+ '''
+ pkg = kwargs.get("pkg").result()
+ package = kwargs.get('xpkg')
+ ebuild = kwargs.get('ebuild').get()
+ y_ebuild = kwargs.get('y_ebuild')
+
+ if ebuild.live_ebuild and self.repo_settings.repo_config.name == "gentoo":
+ return self.check_live(pkg, package, ebuild, y_ebuild)
+ return False
+
+ def check_live(self, pkg, package, ebuild, y_ebuild):
+ '''Perform the live vcs check
+
+ @param pkg: Package in which we check (object).
+ @param xpkg: Package in which we check (string).
+ @param ebuild: Ebuild which we check (object).
+ @param y_ebuild: Ebuild which we check (string).
+ @returns: boolean
+ '''
+ keywords = ebuild.keywords
+ is_stable = lambda kw: not kw.startswith("~") and not kw.startswith("-")
+ bad_stable_keywords = list(filter(is_stable, keywords))
+
+ if bad_stable_keywords:
+ self.qatracker.add_error(
+ "LIVEVCS.stable", "%s/%s.ebuild with stable keywords: %s" % (
+ package, y_ebuild, bad_stable_keywords))
+
+ good_keywords_exist = len(bad_stable_keywords) < len(keywords)
+ if good_keywords_exist and not self._has_global_mask(pkg, self.pmaskdict):
+ self.qatracker.add_error("LIVEVCS.unmasked", ebuild.relative_path)
+ return False
+
+ @staticmethod
+ def _has_global_mask(pkg, global_pmaskdict):
+ mask_atoms = global_pmaskdict.get(pkg.cp)
+ if mask_atoms:
+ pkg_list = [pkg]
+ for x in mask_atoms:
+ if portage.dep.match_from_list(x, pkg_list):
+ return x
+ return None
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/eclasses/ruby.py b/repoman/pym/repoman/modules/scan/eclasses/ruby.py
new file mode 100644
index 000000000..b3501805e
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/eclasses/ruby.py
@@ -0,0 +1,48 @@
+
+'''ruby.py
+Performs Ruby eclass checks
+'''
+
+from repoman.qa_data import ruby_deprecated
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class RubyEclassChecks(ScanBase):
+ '''Performs checks for the usage of Ruby eclasses in ebuilds'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param qatracker: QATracker instance
+ '''
+ super(RubyEclassChecks, self).__init__(**kwargs)
+ self.qatracker = kwargs.get('qatracker')
+ self.old_ruby_eclasses = ["ruby-ng", "ruby-fakegem", "ruby"]
+
+ def check(self, **kwargs):
+ '''Check ebuilds that inherit the ruby eclasses
+
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ @returns: dictionary
+ '''
+ pkg = kwargs.get('pkg').get()
+ ebuild = kwargs.get('ebuild').get()
+ is_inherited = lambda eclass: eclass in pkg.inherited
+ is_old_ruby_eclass_inherited = filter(
+ is_inherited, self.old_ruby_eclasses)
+
+ if is_old_ruby_eclass_inherited:
+ ruby_intersection = pkg.iuse.all.intersection(ruby_deprecated)
+
+ if ruby_intersection:
+ for myruby in ruby_intersection:
+ self.qatracker.add_error(
+ "IUSE.rubydeprecated",
+ (ebuild.relative_path + ": Deprecated ruby target: %s")
+ % myruby)
+ return False
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/fetch/__init__.py b/repoman/pym/repoman/modules/scan/fetch/__init__.py
new file mode 100644
index 000000000..3c8e6002c
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/fetch/__init__.py
@@ -0,0 +1,33 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """fetches plug-in module for repoman.
+Performs fetch related checks on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'fetches',
+ 'description': doc,
+ 'provides':{
+ 'fetches-module': {
+ 'name': "fetches",
+ 'sourcefile': "fetches",
+ 'class': "FetchChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['portdb', 'qatracker', 'repo_settings', 'vcs_settings',
+ ],
+ 'func_kwargs': {
+ 'changed': (None, None),
+ 'checkdir': (None, None),
+ 'checkdir_relative': (None, None),
+ 'ebuild': (None, None),
+ 'xpkg': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/fetch/fetches.py b/repoman/pym/repoman/modules/scan/fetch/fetches.py
new file mode 100644
index 000000000..555f34f14
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/fetch/fetches.py
@@ -0,0 +1,190 @@
+# -*- coding:utf-8 -*-
+
+'''fetches.py
+Performs the src_uri fetchlist and files checks
+'''
+
+from stat import S_ISDIR
+
+# import our initialized portage instance
+from repoman._portage import portage
+from repoman.modules.vcs.vcs import vcs_new_changed
+from repoman.modules.scan.scanbase import ScanBase
+
+from portage import os
+
+
+class FetchChecks(ScanBase):
+ '''Performs checks on the files needed for the ebuild'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param portdb: portdb instance
+ @param qatracker: QATracker instance
+ @param repo_settings: repository settings instance
+ @param vcs_settings: VCSSettings instance
+ '''
+ super(FetchChecks, self).__init__(**kwargs)
+ self.portdb = kwargs.get('portdb')
+ self.qatracker = kwargs.get('qatracker')
+ self.repo_settings = kwargs.get('repo_settings')
+ self.repoman_settings = self.repo_settings.repoman_settings
+ self.vcs_settings = kwargs.get('vcs_settings')
+ self._src_uri_error = False
+
+ # TODO: Build a regex instead here, for the SRC_URI.mirror check.
+ self.thirdpartymirrors = {}
+ profile_thirdpartymirrors = self.repo_settings.repoman_settings.thirdpartymirrors().items()
+ for mirror_alias, mirrors in profile_thirdpartymirrors:
+ for mirror in mirrors:
+ if not mirror.endswith("/"):
+ mirror += "/"
+ self.thirdpartymirrors[mirror] = mirror_alias
+
+ def check(self, **kwargs):
+ '''Checks the ebuild sources and files for errors
+
+ @param xpkg: the pacakge being checked
+ @param checkdir: string, directory path
+ @param checkdir_relative: repolevel determined path
+ @returns: boolean
+ '''
+ xpkg = kwargs.get('xpkg')
+ checkdir = kwargs.get('checkdir')
+ checkdir_relative = kwargs.get('checkdir_relative')
+ changed = kwargs.get('changed').changed
+ new = kwargs.get('changed').new
+ _digests = self.digests(checkdir)
+ fetchlist_dict = portage.FetchlistDict(
+ checkdir, self.repoman_settings, self.portdb)
+ myfiles_all = []
+ self._src_uri_error = False
+ for mykey in fetchlist_dict:
+ try:
+ myfiles_all.extend(fetchlist_dict[mykey])
+ except portage.exception.InvalidDependString as e:
+ self._src_uri_error = True
+ try:
+ self.portdb.aux_get(mykey, ["SRC_URI"])
+ except KeyError:
+ # This will be reported as an "ebuild.syntax" error.
+ pass
+ else:
+ self.qatracker.add_error(
+ "SRC_URI.syntax", "%s.ebuild SRC_URI: %s" % (mykey, e))
+ del fetchlist_dict
+ if not self._src_uri_error:
+ # This test can produce false positives if SRC_URI could not
+ # be parsed for one or more ebuilds. There's no point in
+ # producing a false error here since the root cause will
+ # produce a valid error elsewhere, such as "SRC_URI.syntax"
+ # or "ebuild.sytax".
+ myfiles_all = set(myfiles_all)
+ for entry in _digests:
+ if entry not in myfiles_all:
+ self.qatracker.add_error("digest.unused", checkdir + "::" + entry)
+ for entry in myfiles_all:
+ if entry not in _digests:
+ self.qatracker.add_error("digest.missing", checkdir + "::" + entry)
+ del myfiles_all
+
+ if os.path.exists(checkdir + "/files"):
+ filesdirlist = os.listdir(checkdir + "/files")
+
+ # Recurse through files directory, use filesdirlist as a stack;
+ # appending directories as needed,
+ # so people can't hide > 20k files in a subdirectory.
+ while filesdirlist:
+ y = filesdirlist.pop(0)
+ relative_path = os.path.join(xpkg, "files", y)
+ full_path = os.path.join(self.repo_settings.repodir, relative_path)
+ try:
+ mystat = os.stat(full_path)
+ except OSError as oe:
+ if oe.errno == 2:
+ # don't worry about it. it likely was removed via fix above.
+ continue
+ else:
+ raise oe
+ if S_ISDIR(mystat.st_mode):
+ if self.vcs_settings.status.isVcsDir(y):
+ continue
+ for z in os.listdir(checkdir + "/files/" + y):
+ if self.vcs_settings.status.isVcsDir(z):
+ continue
+ filesdirlist.append(y + "/" + z)
+ # Current policy is no files over 20 KiB, these are the checks.
+ # File size between 20 KiB and 60 KiB causes a warning,
+ # while file size over 60 KiB causes an error.
+ elif mystat.st_size > 61440:
+ self.qatracker.add_error(
+ "file.size.fatal", "(%d KiB) %s/files/%s" % (
+ mystat.st_size // 1024, xpkg, y))
+ elif mystat.st_size > 20480:
+ self.qatracker.add_error(
+ "file.size", "(%d KiB) %s/files/%s" % (
+ mystat.st_size // 1024, xpkg, y))
+
+ index = self.repo_settings.repo_config.find_invalid_path_char(y)
+ if index != -1:
+ y_relative = os.path.join(checkdir_relative, "files", y)
+ if self.vcs_settings.vcs is not None \
+ and not vcs_new_changed(y_relative, changed, new):
+ # If the file isn't in the VCS new or changed set, then
+ # assume that it's an irrelevant temporary file (Manifest
+ # entries are not generated for file names containing
+ # prohibited characters). See bug #406877.
+ index = -1
+ if index != -1:
+ self.qatracker.add_error(
+ "file.name",
+ "%s/files/%s: char '%s'" % (checkdir, y, y[index]))
+ return False
+
+ def digests(self, checkdir):
+ '''Returns the freshly loaded digests
+
+ @param checkdir: string, directory path
+ '''
+ mf = self.repoman_settings.repositories.get_repo_for_location(
+ os.path.dirname(os.path.dirname(checkdir)))
+ mf = mf.load_manifest(checkdir, self.repoman_settings["DISTDIR"])
+ _digests = mf.getTypeDigests("DIST")
+ del mf
+ return _digests
+
+ def check_mirrors(self, **kwargs):
+ '''Check that URIs don't reference a server from thirdpartymirrors
+
+ @param ebuild: Ebuild which we check (object).
+ @returns: boolean
+ '''
+ ebuild = kwargs.get('ebuild').get()
+
+ for uri in portage.dep.use_reduce(
+ ebuild.metadata["SRC_URI"], matchall=True, is_src_uri=True,
+ eapi=ebuild.eapi, flat=True):
+ contains_mirror = False
+ for mirror, mirror_alias in self.thirdpartymirrors.items():
+ if uri.startswith(mirror):
+ contains_mirror = True
+ break
+ if not contains_mirror:
+ continue
+
+ new_uri = "mirror://%s/%s" % (mirror_alias, uri[len(mirror):])
+ self.qatracker.add_error(
+ "SRC_URI.mirror",
+ "%s: '%s' found in thirdpartymirrors, use '%s'" % (
+ ebuild.relative_path, mirror, new_uri))
+ return False
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ return (True, [self.check])
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check_mirrors])
diff --git a/repoman/pym/repoman/modules/scan/keywords/__init__.py b/repoman/pym/repoman/modules/scan/keywords/__init__.py
new file mode 100644
index 000000000..2223927c8
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/keywords/__init__.py
@@ -0,0 +1,33 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Keywords plug-in module for repoman.
+Performs keywords checks on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'keywords',
+ 'description': doc,
+ 'provides':{
+ 'keywords-module': {
+ 'name': "keywords",
+ 'sourcefile': "keywords",
+ 'class': "KeywordChecks",
+ 'description': doc,
+ 'functions': ['prepare', 'check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['qatracker', 'options', 'repo_metadata', 'profiles',
+ ],
+ 'func_kwargs': {
+ 'changed': (None, None),
+ 'ebuild': ('Future', 'UNSET'),
+ 'pkg': ('Future', 'UNSET'),
+ 'xpkg': None,
+ 'y_ebuild': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/keywords/keywords.py b/repoman/pym/repoman/modules/scan/keywords/keywords.py
new file mode 100644
index 000000000..7cb2fe912
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/keywords/keywords.py
@@ -0,0 +1,133 @@
+# -*- coding:utf-8 -*-
+
+'''keywords.py
+Perform KEYWORDS related checks
+
+'''
+
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class KeywordChecks(ScanBase):
+ '''Perform checks on the KEYWORDS of an ebuild'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param qatracker: QATracker instance
+ @param options: argparse options instance
+ '''
+ super(KeywordChecks, self).__init__(**kwargs)
+ self.qatracker = kwargs.get('qatracker')
+ self.options = kwargs.get('options')
+ self.repo_metadata = kwargs.get('repo_metadata')
+ self.profiles = kwargs.get('profiles')
+ self.slot_keywords = {}
+
+ def prepare(self, **kwargs):
+ '''Prepare the checks for the next package.'''
+ self.slot_keywords = {}
+ return False
+
+ def check(self, **kwargs):
+ '''Perform the check.
+
+ @param pkg: Package in which we check (object).
+ @param xpkg: Package in which we check (string).
+ @param ebuild: Ebuild which we check (object).
+ @param y_ebuild: Ebuild which we check (string).
+ @param ebuild_archs: Just the architectures (no prefixes) of the ebuild.
+ @param changed: Changes instance
+ @returns: dictionary
+ '''
+ pkg = kwargs.get('pkg').get()
+ xpkg =kwargs.get('xpkg')
+ ebuild = kwargs.get('ebuild').get()
+ y_ebuild = kwargs.get('y_ebuild')
+ changed = kwargs.get('changed')
+ if not self.options.straight_to_stable:
+ self._checkAddedWithStableKeywords(
+ xpkg, ebuild, y_ebuild, ebuild.keywords, changed)
+
+ self._checkForDroppedKeywords(pkg, ebuild, ebuild.archs)
+
+ self._checkForInvalidKeywords(ebuild, xpkg, y_ebuild)
+
+ self._checkForMaskLikeKeywords(xpkg, y_ebuild, ebuild.keywords)
+
+ self.slot_keywords[pkg.slot].update(ebuild.archs)
+ return False
+
+ @staticmethod
+ def _isKeywordStable(keyword):
+ return not keyword.startswith("~") and not keyword.startswith("-")
+
+ def _checkAddedWithStableKeywords(
+ self, package, ebuild, y_ebuild, keywords, changed):
+ catdir, pkgdir = package.split("/")
+
+ stable_keywords = list(filter(self._isKeywordStable, keywords))
+ if stable_keywords:
+ if ebuild.ebuild_path in changed.new_ebuilds and catdir != "virtual":
+ stable_keywords.sort()
+ self.qatracker.add_error(
+ "KEYWORDS.stable",
+ "%s/%s.ebuild added with stable keywords: %s" %
+ (package, y_ebuild, " ".join(stable_keywords)))
+
+ def _checkForDroppedKeywords(
+ self, pkg, ebuild, ebuild_archs):
+ previous_keywords = self.slot_keywords.get(pkg.slot)
+ if previous_keywords is None:
+ self.slot_keywords[pkg.slot] = set()
+ elif ebuild_archs and "*" not in ebuild_archs and not ebuild.live_ebuild:
+ dropped_keywords = previous_keywords.difference(ebuild_archs)
+ if dropped_keywords:
+ self.qatracker.add_error(
+ "KEYWORDS.dropped", "%s: %s" % (
+ ebuild.relative_path,
+ " ".join(sorted(dropped_keywords))))
+
+ def _checkForInvalidKeywords(self, ebuild, xpkg, y_ebuild):
+ myuse = ebuild.keywords
+
+ for mykey in myuse:
+ if mykey not in ("-*", "*", "~*"):
+ myskey = mykey
+
+ if not self._isKeywordStable(myskey[:1]):
+ myskey = myskey[1:]
+
+ if myskey not in self.repo_metadata['kwlist']:
+ self.qatracker.add_error("KEYWORDS.invalid",
+ "%s/%s.ebuild: %s" % (xpkg, y_ebuild, mykey))
+ elif myskey not in self.profiles:
+ self.qatracker.add_error(
+ "KEYWORDS.invalid",
+ "%s/%s.ebuild: %s (profile invalid)"
+ % (xpkg, y_ebuild, mykey))
+
+ def _checkForMaskLikeKeywords(self, xpkg, y_ebuild, keywords):
+ # KEYWORDS="-*" is a stupid replacement for package.mask
+ # and screws general KEYWORDS semantics
+ if "-*" in keywords:
+ haskeyword = False
+
+ for kw in keywords:
+ if kw[0] == "~":
+ kw = kw[1:]
+ if kw in self.repo_metadata['kwlist']:
+ haskeyword = True
+
+ if not haskeyword:
+ self.qatracker.add_error("KEYWORDS.stupid",
+ "%s/%s.ebuild" % (xpkg, y_ebuild))
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ return (True, [self.prepare])
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/manifest/__init__.py b/repoman/pym/repoman/modules/scan/manifest/__init__.py
new file mode 100644
index 000000000..dca431b62
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/manifest/__init__.py
@@ -0,0 +1,30 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Ebuild plug-in module for repoman.
+Performs an IsEbuild check on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'manifest',
+ 'description': doc,
+ 'provides':{
+ 'manifest-module': {
+ 'name': "manifests",
+ 'sourcefile': "manifests",
+ 'class': "Manifests",
+ 'description': doc,
+ 'functions': ['check', 'create_manifest', 'digest_check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['options', 'portdb', 'qatracker', 'repo_settings',
+ ],
+ 'func_kwargs': {
+ 'checkdir': (None, None),
+ 'xpkg': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/manifest/manifests.py b/repoman/pym/repoman/modules/scan/manifest/manifests.py
new file mode 100644
index 000000000..2b8d7af77
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/manifest/manifests.py
@@ -0,0 +1,139 @@
+# -*- coding:utf-8 -*-
+
+import logging
+import sys
+
+# import our initialized portage instance
+from repoman._portage import portage
+from repoman.modules.scan.scanbase import ScanBase
+
+from portage import os
+from portage.package.ebuild.digestgen import digestgen
+from portage.util import writemsg_level
+
+
+class Manifests(ScanBase):
+ '''Creates as well as checks pkg Manifest entries/files'''
+
+ def __init__(self, **kwargs):
+ '''Class init
+
+ @param options: the run time cli options
+ @param portdb: portdb instance
+ @param qatracker: QATracker instance
+ @param repo_settings: repository settings instance
+ '''
+ self.options = kwargs.get('options')
+ self.portdb = kwargs.get('portdb')
+ self.qatracker = kwargs.get('qatracker')
+ self.repoman_settings = kwargs.get('repo_settings').repoman_settings
+ self.generated_manifest = False
+
+ def check(self, **kwargs):
+ '''Perform a changelog and untracked checks on the ebuild
+
+ @param xpkg: Package in which we check (object).
+ @param checkdirlist: list of files in the current package directory
+ @returns: dictionary
+ '''
+ checkdir = kwargs.get('checkdir')
+ xpkg = kwargs.get('xpkg')
+ self.generated_manifest = False
+ self.digest_only = self.options.mode != 'manifest-check' \
+ and self.options.digest == 'y'
+ if self.options.pretend:
+ return False
+ if self.options.mode in ("manifest", 'commit', 'fix') or self.digest_only:
+ failed = False
+ self.auto_assumed = set()
+ fetchlist_dict = portage.FetchlistDict(
+ checkdir, self.repoman_settings, self.portdb)
+ if self.options.mode == 'manifest' and self.options.force:
+ portage._doebuild_manifest_exempt_depend += 1
+ self.create_manifest(checkdir, fetchlist_dict)
+ self.repoman_settings["O"] = checkdir
+ try:
+ self.generated_manifest = digestgen(
+ mysettings=self.repoman_settings, myportdb=self.portdb)
+ except portage.exception.PermissionDenied as e:
+ self.generated_manifest = False
+ writemsg_level(
+ "!!! Permission denied: '%s'\n" % (e,),
+ level=logging.ERROR, noiselevel=-1)
+
+ if not self.generated_manifest:
+ writemsg_level(
+ "Unable to generate manifest.",
+ level=logging.ERROR, noiselevel=-1)
+ failed = True
+
+ if self.options.mode == "manifest":
+ if not failed and self.options.force and self.auto_assumed and \
+ 'assume-digests' in self.repoman_settings.features:
+ # Show which digests were assumed despite the --force option
+ # being given. This output will already have been shown by
+ # digestgen() if assume-digests is not enabled, so only show
+ # it here if assume-digests is enabled.
+ pkgs = list(fetchlist_dict)
+ pkgs.sort()
+ portage.writemsg_stdout(
+ " digest.assumed %s" %
+ portage.output.colorize(
+ "WARN", str(len(self.auto_assumed)).rjust(18)) + "\n")
+ for cpv in pkgs:
+ fetchmap = fetchlist_dict[cpv]
+ pf = portage.catsplit(cpv)[1]
+ for distfile in sorted(fetchmap):
+ if distfile in self.auto_assumed:
+ portage.writemsg_stdout(
+ " %s::%s\n" % (pf, distfile))
+ # continue, skip remaining main loop code
+ return True
+ elif failed:
+ sys.exit(1)
+ if not self.generated_manifest:
+ self.digest_check(xpkg, checkdir)
+ if self.options.mode == 'manifest-check':
+ return True
+ return False
+
+ def create_manifest(self, checkdir, fetchlist_dict):
+ '''Creates a Manifest file
+
+ @param checkdir: the directory to generate the Manifest in
+ @param fetchlist_dict: dictionary of files to fetch and/or include
+ in the manifest
+ '''
+ try:
+ distdir = self.repoman_settings['DISTDIR']
+ mf = self.repoman_settings.repositories.get_repo_for_location(
+ os.path.dirname(os.path.dirname(checkdir)))
+ mf = mf.load_manifest(
+ checkdir, distdir, fetchlist_dict=fetchlist_dict)
+ mf.create(
+ requiredDistfiles=None, assumeDistHashesAlways=True)
+ for distfiles in fetchlist_dict.values():
+ for distfile in distfiles:
+ if os.path.isfile(os.path.join(distdir, distfile)):
+ mf.fhashdict['DIST'].pop(distfile, None)
+ else:
+ self.auto_assumed.add(distfile)
+ mf.write()
+ finally:
+ portage._doebuild_manifest_exempt_depend -= 1
+
+ def digest_check(self, xpkg, checkdir):
+ '''Check the manifest entries, report any Q/A errors
+
+ @param xpkg: the cat/pkg name to check
+ @param checkdir: the directory path to check'''
+ self.repoman_settings['O'] = checkdir
+ self.repoman_settings['PORTAGE_QUIET'] = '1'
+ if not portage.digestcheck([], self.repoman_settings, strict=1):
+ self.qatracker.add_error("manifest.bad", os.path.join(xpkg, 'Manifest'))
+ self.repoman_settings.pop('PORTAGE_QUIET', None)
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ return (True, [self.check])
diff --git a/repoman/pym/repoman/modules/scan/metadata/__init__.py b/repoman/pym/repoman/modules/scan/metadata/__init__.py
new file mode 100644
index 000000000..b656d7af0
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/metadata/__init__.py
@@ -0,0 +1,85 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Metadata plug-in module for repoman.
+Performs metadata checks on packages."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'metadata',
+ 'description': doc,
+ 'provides':{
+ 'pkg-metadata': {
+ 'name': "pkgmetadata",
+ 'sourcefile': "pkgmetadata",
+ 'class': "PkgMetadata",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['repo_settings', 'qatracker', 'options',
+ 'metadata_xsd', 'uselist',
+ ],
+ 'func_kwargs': {
+ 'checkdir': (None, None),
+ 'checkdirlist': (None, None),
+ 'ebuild': (None, None),
+ 'pkg': (None, None),
+ 'repolevel': (None, None),
+ 'validity_future': (None, None),
+ 'xpkg': (None, None),
+ 'y_ebuild': (None, None),
+ },
+ },
+ 'ebuild-metadata': {
+ 'name': "ebuild_metadata",
+ 'sourcefile': "ebuild_metadata",
+ 'class': "EbuildMetadata",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['qatracker',
+ ],
+ 'func_kwargs': {
+ 'catdir': (None, None),
+ 'ebuild': (None, None),
+ 'xpkg': (None, None),
+ 'y_ebuild': (None, None),
+ },
+ },
+ 'description-metadata': {
+ 'name': "description",
+ 'sourcefile': "description",
+ 'class': "DescriptionChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['qatracker',
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ 'pkg': ('Future', 'UNSET'),
+ },
+ },
+ 'restrict-metadata': {
+ 'name': "restrict",
+ 'sourcefile': "restrict",
+ 'class': "RestrictChecks",
+ 'description': doc,
+ 'functions': ['check'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['qatracker',
+ ],
+ 'func_kwargs': {
+ 'ebuild': (None, None),
+ 'xpkg': (None, None),
+ 'y_ebuild': (None, None),
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/metadata/description.py b/repoman/pym/repoman/modules/scan/metadata/description.py
new file mode 100644
index 000000000..79f62e1de
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/metadata/description.py
@@ -0,0 +1,41 @@
+
+'''description.py
+Perform checks on the DESCRIPTION variable.
+'''
+
+from repoman.modules.scan.scanbase import ScanBase
+from repoman.qa_data import max_desc_len
+
+
+class DescriptionChecks(ScanBase):
+ '''Perform checks on the DESCRIPTION variable.'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param qatracker: QATracker instance
+ '''
+ self.qatracker = kwargs.get('qatracker')
+
+ def checkTooLong(self, **kwargs):
+ '''
+ @param pkg: Package in which we check (object).
+ @param ebuild: Ebuild which we check (object).
+ '''
+ ebuild = kwargs.get('ebuild').get()
+ pkg = kwargs.get('pkg').get()
+ # 14 is the length of DESCRIPTION=""
+ if len(pkg._metadata['DESCRIPTION']) > max_desc_len:
+ self.qatracker.add_error(
+ 'DESCRIPTION.toolong',
+ "%s: DESCRIPTION is %d characters (max %d)" %
+ (ebuild.relative_path, len(
+ pkg._metadata['DESCRIPTION']), max_desc_len))
+ return False
+
+ @property
+ def runInPkgs(self):
+ return (False, [])
+
+ @property
+ def runInEbuilds(self):
+ return (True, [self.checkTooLong])
diff --git a/repoman/pym/repoman/modules/scan/metadata/ebuild_metadata.py b/repoman/pym/repoman/modules/scan/metadata/ebuild_metadata.py
new file mode 100644
index 000000000..e991a30b3
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/metadata/ebuild_metadata.py
@@ -0,0 +1,71 @@
+# -*- coding:utf-8 -*-
+
+'''Ebuild Metadata Checks'''
+
+import re
+import sys
+
+if sys.hexversion >= 0x3000000:
+ basestring = str
+
+from repoman.modules.scan.scanbase import ScanBase
+from repoman.qa_data import missingvars
+
+NON_ASCII_RE = re.compile(r'[^\x00-\x7f]')
+
+
+class EbuildMetadata(ScanBase):
+
+ def __init__(self, **kwargs):
+ self.qatracker = kwargs.get('qatracker')
+
+ def invalidchar(self, **kwargs):
+ ebuild = kwargs.get('ebuild').get()
+ for k, v in ebuild.metadata.items():
+ if not isinstance(v, basestring):
+ continue
+ m = NON_ASCII_RE.search(v)
+ if m is not None:
+ self.qatracker.add_error(
+ "variable.invalidchar",
+ "%s: %s variable contains non-ASCII "
+ "character at position %s" %
+ (ebuild.relative_path, k, m.start() + 1))
+ return False
+
+ def missing(self, **kwargs):
+ ebuild = kwargs.get('ebuild').get()
+ for pos, missing_var in enumerate(missingvars):
+ if not ebuild.metadata.get(missing_var):
+ if kwargs.get('catdir') == "virtual" and \
+ missing_var in ("HOMEPAGE", "LICENSE"):
+ continue
+ if ebuild.live_ebuild and missing_var == "KEYWORDS":
+ continue
+ myqakey = missingvars[pos] + ".missing"
+ self.qatracker.add_error(myqakey, '%s/%s.ebuild'
+ % (kwargs.get('xpkg'), kwargs.get('y_ebuild')))
+ return False
+
+ def old_virtual(self, **kwargs):
+ ebuild = kwargs.get('ebuild').get()
+ if ebuild.metadata.get("PROVIDE"):
+ self.qatracker.add_error("virtual.oldstyle", ebuild.relative_path)
+ return False
+
+ def virtual(self, **kwargs):
+ ebuild = kwargs.get('ebuild').get()
+ if kwargs.get('catdir') == "virtual":
+ for var in ("HOMEPAGE", "LICENSE"):
+ if ebuild.metadata.get(var):
+ myqakey = var + ".virtual"
+ self.qatracker.add_error(myqakey, ebuild.relative_path)
+ return False
+
+ @property
+ def runInPkgs(self):
+ return (False, [])
+
+ @property
+ def runInEbuilds(self):
+ return (True, [self.invalidchar, self.missing, self.old_virtual, self.virtual])
diff --git a/repoman/pym/repoman/modules/scan/metadata/pkgmetadata.py b/repoman/pym/repoman/modules/scan/metadata/pkgmetadata.py
new file mode 100644
index 000000000..433551aed
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/metadata/pkgmetadata.py
@@ -0,0 +1,247 @@
+# -*- coding:utf-8 -*-
+
+'''Package Metadata Checks operations'''
+
+import sys
+
+from itertools import chain
+
+try:
+ from lxml import etree
+ from lxml.etree import ParserError
+except (SystemExit, KeyboardInterrupt):
+ raise
+except (ImportError, SystemError, RuntimeError, Exception):
+ # broken or missing xml support
+ # http://bugs.python.org/issue14988
+ msg = ["Please emerge dev-python/lxml in order to use repoman."]
+ from portage.output import EOutput
+ out = EOutput()
+ for line in msg:
+ out.eerror(line)
+ sys.exit(1)
+
+# import our initialized portage instance
+from repoman._portage import portage
+from repoman.metadata import metadata_dtd_uri
+from repoman.modules.scan.scanbase import ScanBase
+
+from portage.exception import InvalidAtom
+from portage import os
+from portage.dep import Atom
+
+from .use_flags import USEFlagChecks
+
+if sys.hexversion >= 0x3000000:
+ # pylint: disable=W0622
+ basestring = str
+
+metadata_xml_encoding = 'UTF-8'
+metadata_xml_declaration = '<?xml version="1.0" encoding="%s"?>' \
+ % (metadata_xml_encoding,)
+metadata_doctype_name = 'pkgmetadata'
+
+
+class PkgMetadata(ScanBase, USEFlagChecks):
+ '''Package metadata.xml checks'''
+
+ def __init__(self, **kwargs):
+ '''PkgMetadata init function
+
+ @param repo_settings: settings instance
+ @param qatracker: QATracker instance
+ @param options: argparse options instance
+ @param metadata_xsd: path of metadata.xsd
+ '''
+ super(PkgMetadata, self).__init__(**kwargs)
+ repo_settings = kwargs.get('repo_settings')
+ self.qatracker = kwargs.get('qatracker')
+ self.options = kwargs.get('options')
+ self.metadata_xsd = kwargs.get('metadata_xsd')
+ self.globalUseFlags = kwargs.get('uselist')
+ self.repoman_settings = repo_settings.repoman_settings
+ self.musedict = {}
+ self.muselist = set()
+
+ def check(self, **kwargs):
+ '''Performs the checks on the metadata.xml for the package
+ @param xpkg: the pacakge being checked
+ @param checkdir: string, directory path
+ @param checkdirlist: list of checkdir's
+ @param repolevel: integer
+ @returns: boolean
+ '''
+ xpkg = kwargs.get('xpkg')
+ checkdir = kwargs.get('checkdir')
+ checkdirlist = kwargs.get('checkdirlist').get()
+
+ self.musedict = {}
+ if self.options.mode in ['manifest']:
+ self.muselist = frozenset(self.musedict)
+ return False
+
+ # metadata.xml file check
+ if "metadata.xml" not in checkdirlist:
+ self.qatracker.add_error("metadata.missing", xpkg + "/metadata.xml")
+ self.muselist = frozenset(self.musedict)
+ return False
+
+ # metadata.xml parse check
+ metadata_bad = False
+
+ # read metadata.xml into memory
+ try:
+ _metadata_xml = etree.parse(os.path.join(checkdir, 'metadata.xml'))
+ except (ParserError, SyntaxError, EnvironmentError) as e:
+ metadata_bad = True
+ self.qatracker.add_error("metadata.bad", "%s/metadata.xml: %s" % (xpkg, e))
+ del e
+ self.muselist = frozenset(self.musedict)
+ return False
+
+ xml_encoding = _metadata_xml.docinfo.encoding
+ if xml_encoding.upper() != metadata_xml_encoding:
+ self.qatracker.add_error(
+ "metadata.bad", "%s/metadata.xml: "
+ "xml declaration encoding should be '%s', not '%s'" %
+ (xpkg, metadata_xml_encoding, xml_encoding))
+
+ if not _metadata_xml.docinfo.doctype:
+ metadata_bad = True
+ self.qatracker.add_error(
+ "metadata.bad",
+ "%s/metadata.xml: %s" % (xpkg, "DOCTYPE is missing"))
+ else:
+ doctype_system = _metadata_xml.docinfo.system_url
+ if doctype_system != metadata_dtd_uri:
+ if doctype_system is None:
+ system_problem = "but it is undefined"
+ else:
+ system_problem = "not '%s'" % doctype_system
+ self.qatracker.add_error(
+ "metadata.bad", "%s/metadata.xml: "
+ "DOCTYPE: SYSTEM should refer to '%s', %s" %
+ (xpkg, metadata_dtd_uri, system_problem))
+ doctype_name = _metadata_xml.docinfo.doctype.split(' ')[1]
+ if doctype_name != metadata_doctype_name:
+ self.qatracker.add_error(
+ "metadata.bad", "%s/metadata.xml: "
+ "DOCTYPE: name should be '%s', not '%s'" %
+ (xpkg, metadata_doctype_name, doctype_name))
+
+ # load USE flags from metadata.xml
+ self.musedict = self._parse_metadata_use(_metadata_xml, xpkg)
+ for atom in chain(*self.musedict.values()):
+ if atom is None:
+ continue
+ try:
+ atom = Atom(atom)
+ except InvalidAtom as e:
+ self.qatracker.add_error(
+ "metadata.bad",
+ "%s/metadata.xml: Invalid atom: %s" % (xpkg, e))
+ else:
+ if atom.cp != xpkg:
+ self.qatracker.add_error(
+ "metadata.bad",
+ "%s/metadata.xml: Atom contains "
+ "unexpected cat/pn: %s" % (xpkg, atom))
+
+ # Only carry out if in package directory or check forced
+ if not metadata_bad:
+ validator = etree.XMLSchema(file=self.metadata_xsd)
+ if not validator.validate(_metadata_xml):
+ self._add_validate_errors(xpkg, validator.error_log)
+ self.muselist = frozenset(self.musedict)
+ return False
+
+ def check_unused(self, **kwargs):
+ '''Reports on any unused metadata.xml use descriptions
+
+ @param xpkg: the pacakge being checked
+ @param used_useflags: use flag list
+ @param validity_future: Future instance
+ '''
+ xpkg = kwargs.get('xpkg')
+ valid_state = kwargs.get('validity_future').get()
+ # check if there are unused local USE-descriptions in metadata.xml
+ # (unless there are any invalids, to avoid noise)
+ if valid_state:
+ for myflag in self.muselist.difference(self.usedUseFlags):
+ self.qatracker.add_error(
+ "metadata.warning",
+ "%s/metadata.xml: unused local USE-description: '%s'"
+ % (xpkg, myflag))
+ return False
+
+ def _parse_metadata_use(self, xml_tree, xpkg):
+ """
+ Records are wrapped in XML as per GLEP 56
+ returns a dict with keys constisting of USE flag names and values
+ containing their respective descriptions
+ """
+ uselist = {}
+
+ usetags = xml_tree.findall("use")
+ if not usetags:
+ return uselist
+
+ # It's possible to have multiple 'use' elements.
+ for usetag in usetags:
+ flags = usetag.findall("flag")
+ if not flags:
+ # DTD allows use elements containing no flag elements.
+ continue
+
+ for flag in flags:
+ pkg_flag = flag.get("name")
+ if pkg_flag is not None:
+ flag_restrict = flag.get("restrict")
+
+ # emulate the Element.itertext() method from python-2.7
+ inner_text = []
+ stack = []
+ stack.append(flag)
+ while stack:
+ obj = stack.pop()
+ if isinstance(obj, basestring):
+ inner_text.append(obj)
+ continue
+ if isinstance(obj.text, basestring):
+ inner_text.append(obj.text)
+ if isinstance(obj.tail, basestring):
+ stack.append(obj.tail)
+ stack.extend(reversed(obj))
+
+ if flag.get("name") not in uselist:
+ uselist[flag.get("name")] = {}
+
+ # (flag_restrict can be None)
+ uselist[flag.get("name")][flag_restrict] = " ".join("".join(inner_text).split())
+ return uselist
+
+ def _add_validate_errors(self, xpkg, log):
+ listed = set()
+ for error in log:
+ msg_prefix = error.message.split(":",1)[0]
+ info = "%s %s" % (error.line, msg_prefix)
+ if info not in listed:
+ listed.add(info)
+ self.qatracker.add_error(
+ "metadata.bad",
+ "%s/metadata.xml: line: %s, %s"
+ % (xpkg, error.line, error.message))
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ return (True, [self.check])
+
+ @property
+ def runInEbuilds(self):
+ return (True, [self.check_useflags])
+
+ @property
+ def runInFinal(self):
+ '''Final scans at the package level'''
+ return (True, [self.check_unused])
diff --git a/repoman/pym/repoman/modules/scan/metadata/restrict.py b/repoman/pym/repoman/modules/scan/metadata/restrict.py
new file mode 100644
index 000000000..0f9c5e52e
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/metadata/restrict.py
@@ -0,0 +1,53 @@
+
+'''restrict.py
+Perform checks on the RESTRICT variable.
+'''
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from repoman.modules.scan.scanbase import ScanBase
+from repoman.qa_data import valid_restrict
+
+
+class RestrictChecks(ScanBase):
+ '''Perform checks on the RESTRICT variable.'''
+
+ def __init__(self, **kwargs):
+ '''
+ @param qatracker: QATracker instance
+ '''
+ self.qatracker = kwargs.get('qatracker')
+
+ def check(self, **kwargs):
+ xpkg = kwargs.get('xpkg')
+ ebuild = kwargs.get('ebuild').get()
+ y_ebuild = kwargs.get('y_ebuild')
+ myrestrict = None
+
+ try:
+ myrestrict = portage.dep.use_reduce(
+ ebuild.metadata["RESTRICT"], matchall=1, flat=True)
+ except portage.exception.InvalidDependString as e:
+ self.qatracker.add_error("RESTRICT.syntax",
+ "%s: RESTRICT: %s" % (ebuild.relative_path, e))
+ del e
+
+ if myrestrict:
+ myrestrict = set(myrestrict)
+ mybadrestrict = myrestrict.difference(valid_restrict)
+
+ if mybadrestrict:
+ for mybad in mybadrestrict:
+ self.qatracker.add_error("RESTRICT.invalid",
+ "%s/%s.ebuild: %s" % (xpkg, y_ebuild, mybad))
+ return False
+
+ @property
+ def runInPkgs(self):
+ return (False, [])
+
+ @property
+ def runInEbuilds(self):
+ return (True, [self.check])
+
diff --git a/repoman/pym/repoman/modules/scan/metadata/use_flags.py b/repoman/pym/repoman/modules/scan/metadata/use_flags.py
new file mode 100644
index 000000000..1738fd23e
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/metadata/use_flags.py
@@ -0,0 +1,94 @@
+# -*- coding:utf-8 -*-
+
+'''use_flags.py
+Performs USE flag related checks
+'''
+
+# import our centrally initialized portage instance
+from repoman._portage import portage
+
+from portage import eapi
+from portage.eapi import eapi_has_iuse_defaults, eapi_has_required_use
+
+
+class USEFlagChecks(object):
+ '''Performs checks on USE flags listed in the ebuilds and metadata.xml'''
+
+ def __init__(self, **kwargs):
+ '''Class init
+
+ @param qatracker: QATracker instance
+ @param globalUseFlags: Global USE flags
+ '''
+ super(USEFlagChecks, self).__init__()
+ self.qatracker = None
+ self.globalUseFlags = None
+ self.useFlags = []
+ self.defaultUseFlags = []
+ self.usedUseFlags = set()
+
+ def check_useflags(self, **kwargs):
+ '''Perform the check.
+
+ @param pkg: Package in which we check (object).
+ @param xpkg: Package in which we check (string).
+ @param ebuild: Ebuild which we check (object).
+ @param y_ebuild: Ebuild which we check (string).
+ @returns: dictionary, including {ebuild_UsedUseFlags, used_useflags}
+ '''
+ pkg = kwargs.get('pkg').get()
+ package = kwargs.get('xpkg')
+ ebuild = kwargs.get('ebuild').get()
+ y_ebuild = kwargs.get('y_ebuild')
+ # reset state variables for the run
+ self.useFlags = []
+ self.defaultUseFlags = []
+ # perform the checks
+ self._checkGlobal(pkg)
+ self._checkMetadata(package, ebuild, y_ebuild, self.muselist)
+ self._checkRequiredUSE(pkg, ebuild)
+ return False
+
+
+ def _checkGlobal(self, pkg):
+ for myflag in pkg._metadata["IUSE"].split():
+ flag_name = myflag.lstrip("+-")
+ self.usedUseFlags.add(flag_name)
+ if myflag != flag_name:
+ self.defaultUseFlags.append(myflag)
+ if flag_name not in self.globalUseFlags:
+ self.useFlags.append(flag_name)
+
+ def _checkMetadata(self, package, ebuild, y_ebuild, localUseFlags):
+ for mypos in range(len(self.useFlags) - 1, -1, -1):
+ if self.useFlags[mypos] and (self.useFlags[mypos] in localUseFlags):
+ del self.useFlags[mypos]
+
+ if self.defaultUseFlags and not eapi_has_iuse_defaults(eapi):
+ for myflag in self.defaultUseFlags:
+ self.qatracker.add_error(
+ 'EAPI.incompatible', "%s: IUSE defaults"
+ " not supported with EAPI='%s': '%s'" % (
+ ebuild.relative_path, eapi, myflag))
+
+ for mypos in range(len(self.useFlags)):
+ self.qatracker.add_error(
+ "IUSE.invalid",
+ "%s/%s.ebuild: %s" % (package, y_ebuild, self.useFlags[mypos]))
+
+ def _checkRequiredUSE(self, pkg, ebuild):
+ required_use = pkg._metadata["REQUIRED_USE"]
+ if required_use:
+ if not eapi_has_required_use(eapi):
+ self.qatracker.add_error(
+ 'EAPI.incompatible', "%s: REQUIRED_USE"
+ " not supported with EAPI='%s'"
+ % (ebuild.relative_path, eapi,))
+ try:
+ portage.dep.check_required_use(
+ required_use, (), pkg.iuse.is_valid_flag, eapi=eapi)
+ except portage.exception.InvalidDependString as e:
+ self.qatracker.add_error(
+ "REQUIRED_USE.syntax",
+ "%s: REQUIRED_USE: %s" % (ebuild.relative_path, e))
+ del e
diff --git a/repoman/pym/repoman/modules/scan/options/__init__.py b/repoman/pym/repoman/modules/scan/options/__init__.py
new file mode 100644
index 000000000..a5746ce67
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/options/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2015-2016 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Options plug-in module for repoman.
+Performs option related actions on ebuilds."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'options',
+ 'description': doc,
+ 'provides':{
+ 'options-module': {
+ 'name': "options",
+ 'sourcefile': "options",
+ 'class': "Options",
+ 'description': doc,
+ 'functions': ['is_forced'],
+ 'func_desc': {
+ },
+ 'mod_kwargs': ['options',
+ ],
+ 'func_kwargs': {
+ },
+ },
+ }
+}
+
diff --git a/repoman/pym/repoman/modules/scan/options/options.py b/repoman/pym/repoman/modules/scan/options/options.py
new file mode 100644
index 000000000..443f01bd8
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/options/options.py
@@ -0,0 +1,29 @@
+
+from repoman.modules.scan.scanbase import ScanBase
+
+
+class Options(ScanBase):
+
+ def __init__(self, **kwargs):
+ '''Class init function
+
+ @param options: argparse options instance
+ '''
+ self.options = kwargs.get('options')
+
+ def is_forced(self, **kwargs):
+ '''Simple boolean function to trigger a skip past some additional checks
+
+ @returns: dictionary
+ '''
+ if self.options.force:
+ # The dep_check() calls are the most expensive QA test. If --force
+ # is enabled, there's no point in wasting time on these since the
+ # user is intent on forcing the commit anyway.
+ return True
+ return False
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ return (True, [self.is_forced])
diff --git a/repoman/pym/repoman/modules/scan/scan.py b/repoman/pym/repoman/modules/scan/scan.py
new file mode 100644
index 000000000..d2a5f515b
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/scan.py
@@ -0,0 +1,66 @@
+# -*- coding:utf-8 -*-
+
+'''
+moudules/scan.py
+Module specific package scan list generator
+'''
+
+import logging
+import os
+import sys
+
+from repoman.errors import caterror
+
+
+def scan(repolevel, reposplit, startdir, categories, repo_settings):
+ '''Generate a list of pkgs to scan
+
+ @param repolevel: integer, number of subdirectories deep from the tree root
+ @param reposplit: list of the path subdirs
+ @param startdir: the top level directory to begin scanning from
+ @param categories: list of known categories
+ @param repo_settings: repository settings instance
+ @returns: scanlist, sorted list of pkgs to scan
+ '''
+ scanlist = []
+ if repolevel == 2:
+ # we are inside a category directory
+ catdir = reposplit[-1]
+ if catdir not in categories:
+ caterror(catdir, repo_settings.repodir)
+ mydirlist = os.listdir(startdir)
+ for x in mydirlist:
+ if x == "CVS" or x.startswith("."):
+ continue
+ if os.path.isdir(startdir + "/" + x):
+ scanlist.append(catdir + "/" + x)
+ # repo_subdir = catdir + os.sep
+ elif repolevel == 1:
+ for x in categories:
+ if not os.path.isdir(startdir + "/" + x):
+ continue
+ for y in os.listdir(startdir + "/" + x):
+ if y == "CVS" or y.startswith("."):
+ continue
+ if os.path.isdir(startdir + "/" + x + "/" + y):
+ scanlist.append(x + "/" + y)
+ # repo_subdir = ""
+ elif repolevel == 3:
+ catdir = reposplit[-2]
+ if catdir not in categories:
+ caterror(catdir, repo_settings.repodir)
+ scanlist.append(catdir + "/" + reposplit[-1])
+ # repo_subdir = scanlist[-1] + os.sep
+ else:
+ msg = 'Repoman is unable to determine PORTDIR or PORTDIR_OVERLAY' + \
+ ' from the current working directory'
+ logging.critical(msg)
+ sys.exit(1)
+
+ # repo_subdir_len = len(repo_subdir)
+ scanlist.sort()
+
+ logging.debug(
+ "Found the following packages to scan:\n%s" % '\n'.join(scanlist))
+
+ return scanlist
diff --git a/repoman/pym/repoman/modules/scan/scanbase.py b/repoman/pym/repoman/modules/scan/scanbase.py
new file mode 100644
index 000000000..aea1bb121
--- /dev/null
+++ b/repoman/pym/repoman/modules/scan/scanbase.py
@@ -0,0 +1,79 @@
+# -*- coding:utf-8 -*-
+
+
+class ScanBase(object):
+ '''Skeleton class for performing a scan for one or more items
+ to check in a pkg directory or ebuild.'''
+
+ def __init__(self, **kwargs):
+ '''Class init
+
+ @param kwargs: an optional dictionary of common repository
+ wide parameters that may be required.
+ '''
+ # Since no two checks are identicle as to what kwargs are needed,
+ # this does not define any from it here.
+ super(ScanBase, self).__init__()
+
+ """ # sample check
+ def check_foo(self, **kwargs):
+ '''Class check skeleton function. Define this for a
+ specific check to perform.
+
+ @param kwargs: an optional dictionary of dynamic package and or ebuild
+ specific data that may be required. Dynamic data can
+ vary depending what checks have run before it.
+ So execution order can be important.
+ '''
+ # Insert the code for the check here
+ # It should return a dictionary of at least {'continue': False}
+ # The continue attribute will default to False if not returned.
+ # This will allow the loop to continue with the next check in the list.
+ # Include any additional dynamic data that needs to be added or updated.
+ return False # used as a continue True/False value
+ """
+
+ @property
+ def runInPkgs(self):
+ '''Package level scans'''
+ # default no run (False) and empty list of functions to run
+ # override this method to define a function or
+ # functions to run in this process loop
+ # return a tuple of a boolean or boolean result and an ordered list
+ # of functions to run. ie: return (True, [self.check_foo])
+ # in this way, it can be dynamically determined at run time, if
+ # later stage scans are to be run.
+ # This class instance is maintaned for all stages, so data can be
+ # carried over from stage to stage
+ # next stage is runInEbuilds
+ return (False, [])
+
+ @property
+ def runInEbuilds(self):
+ '''Ebuild level scans'''
+ # default empty list of functions to run
+ # override this method to define a function or
+ # functions to run in this process loop
+ # return a tuple of a boolean or boolean result and an ordered list
+ # of functions to run. ie: return (True, [self.check_bar])
+ # in this way, it can be dynamically determined at run time, if
+ # later stage scans are to be run.
+ # This class instance is maintaned for all stages, so data can be
+ # carried over from stage to stage
+ # next stage is runInFinal
+ return (False, [])
+
+ @property
+ def runInFinal(self):
+ '''Final scans at the package level'''
+ # default empty list of functions to run
+ # override this method to define a function or
+ # functions to run in this process loop
+ # return a tuple of a boolean or boolean result and an ordered list
+ # of functions to run. ie: return (True, [self.check_baz])
+ # in this way, it can be dynamically determined at run time, if
+ # later stage scans are to be run.
+ # This class instance is maintaned for all stages, so data can be
+ # carried over from stage to stage
+ # runInFinal is currently the last stage of scans performed.
+ return (False, [])
diff --git a/repoman/pym/repoman/modules/vcs/None/__init__.py b/repoman/pym/repoman/modules/vcs/None/__init__.py
new file mode 100644
index 000000000..285932541
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/None/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2014-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """None (non vcs type) plug-in module for portage.
+Performs various git actions and checks on repositories."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'None',
+ 'description': doc,
+ 'provides':{
+ 'None-module': {
+ 'name': "None_status",
+ 'sourcefile': "status",
+ 'class': "Status",
+ 'description': doc,
+ 'functions': ['check', 'supports_gpg_sign', 'detect_conflicts'],
+ 'func_desc': {
+ },
+ 'vcs_preserves_mtime': False,
+ 'needs_keyword_expansion': False,
+ },
+ 'None-changes': {
+ 'name': "None_changes",
+ 'sourcefile': "changes",
+ 'class': "Changes",
+ 'description': doc,
+ 'functions': ['scan'],
+ 'func_desc': {
+ },
+ },
+ }
+}
diff --git a/repoman/pym/repoman/modules/vcs/None/changes.py b/repoman/pym/repoman/modules/vcs/None/changes.py
new file mode 100644
index 000000000..46c38e257
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/None/changes.py
@@ -0,0 +1,50 @@
+'''
+None module Changes class submodule
+'''
+
+from repoman.modules.vcs.changes import ChangesBase
+
+
+class Changes(ChangesBase):
+ '''Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'None'
+
+ def __init__(self, options, repo_settings):
+ '''Class init
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ super(Changes, self).__init__(options, repo_settings)
+
+ def scan(self):
+ '''VCS type scan function, looks for all detectable changes'''
+ pass
+
+ def add_items(self, autoadd):
+ '''Add files to the vcs's modified or new index
+
+ @param autoadd: the files to add to the vcs modified index'''
+ pass
+
+ def commit(self, myfiles, commitmessagefile):
+ '''None commit function
+
+ @param commitfiles: list of files to commit
+ @param commitmessagefile: file containing the commit message
+ @returns: The sub-command exit value or 0
+ '''
+ commit_cmd = []
+ # substitute a bogus vcs value for pretend output
+ commit_cmd.append("pretend")
+ commit_cmd.extend(self.vcs_settings.vcs_global_opts)
+ commit_cmd.append("commit")
+ commit_cmd.extend(self.vcs_settings.vcs_local_opts)
+ commit_cmd.extend(["-F", commitmessagefile])
+ commit_cmd.extend(f.lstrip("./") for f in myfiles)
+
+ print("(%s)" % (" ".join(commit_cmd),))
+ return 0
diff --git a/repoman/pym/repoman/modules/vcs/None/status.py b/repoman/pym/repoman/modules/vcs/None/status.py
new file mode 100644
index 000000000..d6e5ca0e4
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/None/status.py
@@ -0,0 +1,53 @@
+'''
+None (non-VCS) module Status class submodule
+'''
+
+
+class Status(object):
+ '''Performs status checks on the svn repository'''
+
+ def __init__(self, qatracker, eadded):
+ '''Class init
+
+ @param qatracker: QATracker class instance
+ @param eadded: list
+ '''
+ self.qatracker = qatracker
+ self.eadded = eadded
+
+ def check(self, checkdir, checkdir_relative, xpkg):
+ '''Perform the svn status check
+
+ @param checkdir: string of the directory being checked
+ @param checkdir_relative: string of the relative directory being checked
+ @param xpkg: string of the package being checked
+ @returns: boolean
+ '''
+ return True
+
+ @staticmethod
+ def detect_conflicts(options):
+ '''Are there any merge conflicts present in the VCS tracking system
+
+ @param options: command line options
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def supports_gpg_sign():
+ '''Does this vcs system support gpg commit signatures
+
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def isVcsDir(dirname):
+ '''Is the directory belong to the vcs system
+
+ @param dirname: string, directory name
+ @returns: Boolean
+ '''
+ return False
+
diff --git a/repoman/pym/repoman/modules/vcs/__init__.py b/repoman/pym/repoman/modules/vcs/__init__.py
new file mode 100644
index 000000000..84e837408
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/__init__.py
@@ -0,0 +1,14 @@
+
+import os
+from portage.module import Modules
+
+path = os.path.dirname(__file__)
+# initial development debug info
+#print("module path:", path)
+
+module_controller = Modules(path=path, namepath="repoman.modules.vcs")
+
+# initial development debug info
+#print(module_controller.module_names)
+module_names = module_controller.module_names[:]
+
diff --git a/repoman/pym/repoman/modules/vcs/bzr/__init__.py b/repoman/pym/repoman/modules/vcs/bzr/__init__.py
new file mode 100644
index 000000000..4490ed86c
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/bzr/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2014-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Bazaar (bzr) plug-in module for portage.
+Performs variaous Bazaar actions and checks on repositories."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'bzr',
+ 'description': doc,
+ 'provides':{
+ 'bzr-module': {
+ 'name': "bzr_status",
+ 'sourcefile': "status",
+ 'class': "Status",
+ 'description': doc,
+ 'functions': ['check', 'supports_gpg_sign', 'detect_conflicts'],
+ 'func_desc': {
+ },
+ 'vcs_preserves_mtime': True,
+ 'needs_keyword_expansion': False,
+ },
+ 'bzr-changes': {
+ 'name': "bzr_changes",
+ 'sourcefile': "changes",
+ 'class': "Changes",
+ 'description': doc,
+ 'functions': ['scan'],
+ 'func_desc': {
+ },
+ },
+ }
+}
diff --git a/repoman/pym/repoman/modules/vcs/bzr/changes.py b/repoman/pym/repoman/modules/vcs/bzr/changes.py
new file mode 100644
index 000000000..4d4808c08
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/bzr/changes.py
@@ -0,0 +1,68 @@
+'''
+Bazaar module Changes class submodule
+'''
+
+from repoman.modules.vcs.changes import ChangesBase
+from repoman._subprocess import repoman_popen
+from repoman._portage import portage
+from portage import os
+from portage.package.ebuild.digestgen import digestgen
+
+class Changes(ChangesBase):
+ '''Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'bzr'
+
+ def __init__(self, options, repo_settings):
+ '''Class init
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ super(Changes, self).__init__(options, repo_settings)
+
+ def _scan(self):
+ '''VCS type scan function, looks for all detectable changes'''
+ with repoman_popen("bzr status -S .") as f:
+ bzrstatus = f.readlines()
+ self.changed = [
+ "./" + elem.split()[-1:][0].split('/')[-1:][0]
+ for elem in bzrstatus
+ if elem and elem[1:2] == "M"]
+ self.new = [
+ "./" + elem.split()[-1:][0].split('/')[-1:][0]
+ for elem in bzrstatus
+ if elem and (elem[1:2] == "NK" or elem[0:1] == "R")]
+ self.removed = [
+ "./" + elem.split()[-3:-2][0].split('/')[-1:][0]
+ for elem in bzrstatus
+ if elem and (elem[1:2] == "K" or elem[0:1] == "R")]
+ self.bzrstatus = bzrstatus
+ # Bazaar expands nothing.
+
+ @property
+ def unadded(self):
+ '''Bazzar method of getting the unadded files in the repository'''
+ if self._unadded is not None:
+ return self._unadded
+ self._unadded = [
+ "./" + elem.rstrip().split()[1].split('/')[-1:][0]
+ for elem in self.bzrstatus
+ if elem.startswith("?") or elem[0:2] == " D"]
+ return self._unadded
+
+ def digest_regen(self, updates, removed, manifests, scanner, broken_changelog_manifests):
+ '''Regenerate manifests
+
+ @param updates: updated files
+ @param removed: removed files
+ @param manifests: Manifest files
+ @param scanner: The repoman.scanner.Scanner instance
+ @param broken_changelog_manifests: broken changelog manifests
+ '''
+ if broken_changelog_manifests:
+ for x in broken_changelog_manifests:
+ self.repoman_settings["O"] = os.path.join(self.repo_settings.repodir, x)
+ digestgen(mysettings=self.repoman_settings, myportdb=self.repo_settings.portdb)
diff --git a/repoman/pym/repoman/modules/vcs/bzr/status.py b/repoman/pym/repoman/modules/vcs/bzr/status.py
new file mode 100644
index 000000000..199e7f399
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/bzr/status.py
@@ -0,0 +1,70 @@
+'''
+Bazaar module Status class submodule
+'''
+
+from repoman._portage import portage
+from portage import os
+from repoman._subprocess import repoman_popen
+
+
+class Status(object):
+ '''Performs status checks on the svn repository'''
+
+ def __init__(self, qatracker, eadded):
+ '''Class init
+
+ @param qatracker: QATracker class instance
+ @param eadded: list
+ '''
+ self.qatracker = qatracker
+ self.eadded = eadded
+
+ def check(self, checkdir, checkdir_relative, xpkg):
+ '''Perform the svn status check
+
+ @param checkdir: string of the directory being checked
+ @param checkdir_relative: string of the relative directory being checked
+ @param xpkg: string of the package being checked
+ @returns: boolean
+ '''
+ try:
+ myf = repoman_popen(
+ "bzr ls -v --kind=file " +
+ portage._shell_quote(checkdir))
+ myl = myf.readlines()
+ myf.close()
+ except IOError:
+ raise
+ for l in myl:
+ if l[1:2] == "?":
+ continue
+ l = l.split()[-1]
+ if l[-7:] == ".ebuild":
+ self.eadded.append(os.path.basename(l[:-7]))
+ return True
+
+ @staticmethod
+ def detect_conflicts(options):
+ '''Are there any merge conflicts present in the VCS tracking system
+
+ @param options: command line options
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def supports_gpg_sign():
+ '''Does this vcs system support gpg commit signatures
+
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def isVcsDir(dirname):
+ '''Does the directory belong to the vcs system
+
+ @param dirname: string, directory name
+ @returns: Boolean
+ '''
+ return dirname in [".bzr"]
diff --git a/repoman/pym/repoman/modules/vcs/changes.py b/repoman/pym/repoman/modules/vcs/changes.py
new file mode 100644
index 000000000..aa4923f8f
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/changes.py
@@ -0,0 +1,169 @@
+'''
+Base Changes class
+'''
+
+import logging
+import os
+import subprocess
+import sys
+from itertools import chain
+
+from repoman._portage import portage
+from portage import _unicode_encode
+from portage.process import spawn
+
+
+class ChangesBase(object):
+ '''Base Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'None'
+
+ def __init__(self, options, repo_settings):
+ '''Class init function
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ self.options = options
+ self.repo_settings = repo_settings
+ self.repoman_settings = repo_settings.repoman_settings
+ self.vcs_settings = repo_settings.vcs_settings
+ self._reset()
+
+ def _reset(self):
+ '''Reset the class variables for a new run'''
+ self.new_ebuilds = set()
+ self.ebuilds = set()
+ self.changelogs = set()
+ self.changed = []
+ self.new = []
+ self.removed = []
+ self.no_expansion = set()
+ self._expansion = None
+ self._deleted = None
+ self._unadded = None
+
+ def scan(self):
+ '''Scan the vcs for detectable changes.
+
+ base method which calls the subclassing VCS module's _scan()
+ then updates some classwide variables.
+ '''
+ self._reset()
+
+ if self.vcs:
+ self._scan()
+ self.new_ebuilds.update(x for x in self.new if x.endswith(".ebuild"))
+ self.ebuilds.update(x for x in self.changed if x.endswith(".ebuild"))
+ self.changelogs.update(
+ x for x in chain(self.changed, self.new)
+ if os.path.basename(x) == "ChangeLog")
+
+ def _scan(self):
+ '''Placeholder for subclassing'''
+ pass
+
+ @property
+ def has_deleted(self):
+ '''Placeholder for VCS that requires manual deletion of files'''
+ return self.deleted != []
+
+ @property
+ def has_changes(self):
+ '''Placeholder for VCS repo common has changes result'''
+ changed = self.changed or self.new or self.removed or self.deleted
+ return changed != []
+
+ @property
+ def unadded(self):
+ '''Override this function as needed'''
+ return []
+
+ @property
+ def deleted(self):
+ '''Override this function as needed'''
+ return []
+
+ @property
+ def expansion(self):
+ '''Override this function as needed'''
+ return {}
+
+ def thick_manifest(self, updates, headers, no_expansion, expansion):
+ '''Create a thick manifest
+
+ @param updates:
+ @param headers:
+ @param no_expansion:
+ @param expansion:
+ '''
+ pass
+
+ def digest_regen(self, updates, removed, manifests, scanner,
+ broken_changelog_manifests):
+ '''Regenerate manifests
+
+ @param updates: updated files
+ @param removed: removed files
+ @param manifests: Manifest files
+ @param scanner: The repoman.scanner.Scanner instance
+ @param broken_changelog_manifests: broken changelog manifests
+ '''
+ pass
+
+ @staticmethod
+ def clear_attic(headers):
+ '''Old CVS leftover
+
+ @param headers: file headers'''
+ pass
+
+ def update_index(self, mymanifests, myupdates):
+ '''Update the vcs's modified index if it is needed
+
+ @param mymanifests: manifest files updated
+ @param myupdates: other files updated'''
+ pass
+
+ def add_items(self, autoadd):
+ '''Add files to the vcs's modified or new index
+
+ @param autoadd: the files to add to the vcs modified index'''
+ add_cmd = [self.vcs, "add"]
+ add_cmd += autoadd
+ if self.options.pretend:
+ portage.writemsg_stdout(
+ "(%s)\n" % " ".join(add_cmd),
+ noiselevel=-1)
+ else:
+ add_cmd = [_unicode_encode(arg) for arg in add_cmd]
+ retcode = subprocess.call(add_cmd)
+ if retcode != os.EX_OK:
+ logging.error(
+ "Exiting on %s error code: %s\n", self.vcs_settings.vcs, retcode)
+ sys.exit(retcode)
+
+
+ def commit(self, commitfiles, commitmessagefile):
+ '''Common generic commit function
+
+ @param commitfiles: list of files to commit
+ @param commitmessagefile: file containing the commit message
+ @returns: The sub-command exit value or 0
+ '''
+ commit_cmd = []
+ commit_cmd.append(self.vcs)
+ commit_cmd.extend(self.vcs_settings.vcs_global_opts)
+ commit_cmd.append("commit")
+ commit_cmd.extend(self.vcs_settings.vcs_local_opts)
+ commit_cmd.extend(["-F", commitmessagefile])
+ commit_cmd.extend(f.lstrip("./") for f in commitfiles)
+
+ if self.options.pretend:
+ print("(%s)" % (" ".join(commit_cmd),))
+ return 0
+ else:
+ retval = spawn(commit_cmd, env=self.repo_settings.commit_env)
+ return retval
diff --git a/repoman/pym/repoman/modules/vcs/cvs/__init__.py b/repoman/pym/repoman/modules/vcs/cvs/__init__.py
new file mode 100644
index 000000000..0b4587bc6
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/cvs/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2014-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """CVS (cvs) plug-in module for portage.
+Performs variaous CVS actions and checks on repositories."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'cvs',
+ 'description': doc,
+ 'provides':{
+ 'cvs-status': {
+ 'name': "cvs_status",
+ 'sourcefile': "status",
+ 'class': "Status",
+ 'description': doc,
+ 'functions': ['check', 'supports_gpg_sign', 'detect_conflicts'],
+ 'func_desc': {
+ },
+ 'vcs_preserves_mtime': True,
+ 'needs_keyword_expansion': True,
+ },
+ 'cvs-changes': {
+ 'name': "cvs_changes",
+ 'sourcefile': "changes",
+ 'class': "Changes",
+ 'description': doc,
+ 'functions': ['scan'],
+ 'func_desc': {
+ },
+ },
+ }
+}
diff --git a/repoman/pym/repoman/modules/vcs/cvs/changes.py b/repoman/pym/repoman/modules/vcs/cvs/changes.py
new file mode 100644
index 000000000..c3d880bdb
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/cvs/changes.py
@@ -0,0 +1,118 @@
+'''
+CVS module Changes class submodule
+'''
+
+import re
+from itertools import chain
+
+from repoman._portage import portage
+from repoman.modules.vcs.changes import ChangesBase
+from repoman.modules.vcs.vcs import vcs_files_to_cps
+from repoman._subprocess import repoman_getstatusoutput
+
+from portage import _encodings, _unicode_encode
+from portage import cvstree, os
+from portage.output import green
+from portage.package.ebuild.digestgen import digestgen
+
+
+class Changes(ChangesBase):
+ '''Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'cvs'
+
+ def __init__(self, options, repo_settings):
+ '''Class init
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ super(Changes, self).__init__(options, repo_settings)
+ self._tree = None
+
+ def _scan(self):
+ '''VCS type scan function, looks for all detectable changes'''
+ self._tree = portage.cvstree.getentries("./", recursive=1)
+ self.changed = cvstree.findchanged(self._tree, recursive=1, basedir="./")
+ self.new = cvstree.findnew(self._tree, recursive=1, basedir="./")
+ self.removed = cvstree.findremoved(self._tree, recursive=1, basedir="./")
+ bin_blob_pattern = re.compile("^-kb$")
+ self.no_expansion = set(portage.cvstree.findoption(
+ self._tree, bin_blob_pattern, recursive=1, basedir="./"))
+
+ @property
+ def unadded(self):
+ '''VCS method of getting the unadded files in the repository'''
+ if self._unadded is not None:
+ return self._unadded
+ self._unadded = portage.cvstree.findunadded(self._tree, recursive=1, basedir="./")
+ return self._unadded
+
+ @staticmethod
+ def clear_attic(headers):
+ '''Clear the attic (inactive files)
+
+ @param headers: file headers
+ '''
+ cvs_header_re = re.compile(br'^#\s*\$Header.*\$$')
+ attic_str = b'/Attic/'
+ attic_replace = b'/'
+ for x in headers:
+ f = open(
+ _unicode_encode(x, encoding=_encodings['fs'], errors='strict'),
+ mode='rb')
+ mylines = f.readlines()
+ f.close()
+ modified = False
+ for i, line in enumerate(mylines):
+ if cvs_header_re.match(line) is not None and \
+ attic_str in line:
+ mylines[i] = line.replace(attic_str, attic_replace)
+ modified = True
+ if modified:
+ portage.util.write_atomic(x, b''.join(mylines), mode='wb')
+
+ def thick_manifest(self, updates, headers, no_expansion, expansion):
+ '''Create a thick manifest
+
+ @param updates:
+ @param headers:
+ @param no_expansion:
+ @param expansion:
+ '''
+ headerstring = "'\$(Header|Id).*\$'"
+
+ for _file in updates:
+
+ # for CVS, no_expansion contains files that are excluded from expansion
+ if _file in no_expansion:
+ continue
+
+ _out = repoman_getstatusoutput(
+ "egrep -q %s %s" % (headerstring, portage._shell_quote(_file)))
+ if _out[0] == 0:
+ headers.append(_file)
+
+ print("%s have headers that will change." % green(str(len(headers))))
+ print(
+ "* Files with headers will"
+ " cause the manifests to be changed and committed separately.")
+
+ def digest_regen(self, updates, removed, manifests, scanner, broken_changelog_manifests):
+ '''Regenerate manifests
+
+ @param updates: updated files
+ @param removed: removed files
+ @param manifests: Manifest files
+ @param scanner: The repoman.scanner.Scanner instance
+ @param broken_changelog_manifests: broken changelog manifests
+ '''
+ if updates or removed:
+ for x in sorted(vcs_files_to_cps(
+ chain(updates, removed, manifests),
+ self.repo_settings.repodir,
+ scanner.repolevel, scanner.reposplit, scanner.categories)):
+ self.repoman_settings["O"] = os.path.join(self.repo_settings.repodir, x)
+ digestgen(mysettings=self.repoman_settings, myportdb=self.repo_settings.portdb)
diff --git a/repoman/pym/repoman/modules/vcs/cvs/status.py b/repoman/pym/repoman/modules/vcs/cvs/status.py
new file mode 100644
index 000000000..b936aa7d9
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/cvs/status.py
@@ -0,0 +1,131 @@
+'''
+CVS module Status class submodule
+'''
+
+import logging
+import subprocess
+import sys
+
+from repoman._portage import portage
+from portage import os
+from portage.const import BASH_BINARY
+from portage.output import red, green
+from portage import _unicode_encode, _unicode_decode
+
+
+class Status(object):
+ '''Performs status checks on the svn repository'''
+
+ def __init__(self, qatracker, eadded):
+ '''Class init
+
+ @param qatracker: QATracker class instance
+ @param eadded: list
+ '''
+ self.qatracker = qatracker
+ self.eadded = eadded
+
+ def check(self, checkdir, checkdir_relative, xpkg):
+ '''Perform the svn status check
+
+ @param checkdir: string of the directory being checked
+ @param checkdir_relative: string of the relative directory being checked
+ @param xpkg: string of the package being checked
+ @returns: boolean
+ '''
+ try:
+ myf = open(checkdir + "/CVS/Entries", "r")
+ myl = myf.readlines()
+ myf.close()
+ except IOError:
+ self.qatracker.add_error(
+ "CVS/Entries.IO_error", checkdir + "/CVS/Entries")
+ return True
+ for l in myl:
+ if l[0] != "/":
+ continue
+ splitl = l[1:].split("/")
+ if not len(splitl):
+ continue
+ if splitl[0][-7:] == ".ebuild":
+ self.eadded.append(splitl[0][:-7])
+ return True
+
+ @staticmethod
+ def detect_conflicts(options):
+ """Determine if the checkout has cvs conflicts.
+
+ TODO(antarus): Also this should probably not call sys.exit() as
+ repoman is run on >1 packages and one failure should not cause
+ subsequent packages to fail.
+
+ Returns:
+ None (calls sys.exit on fatal problems)
+ """
+
+ cmd = ("cvs -n up 2>/dev/null | "
+ "egrep '^[^\?] .*' | "
+ "egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'")
+ msg = ("Performing a %s with a little magic grep to check for updates."
+ % green("cvs -n up"))
+
+ logging.info(msg)
+ # Use Popen instead of getstatusoutput(), in order to avoid
+ # unicode handling problems (see bug #310789).
+ args = [BASH_BINARY, "-c", cmd]
+ args = [_unicode_encode(x) for x in args]
+ proc = subprocess.Popen(
+ args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out = _unicode_decode(proc.communicate()[0])
+ proc.wait()
+ mylines = out.splitlines()
+ myupdates = []
+ for line in mylines:
+ if not line:
+ continue
+
+ # [ ] Unmodified (SVN) [U] Updates [P] Patches
+ # [M] Modified [A] Added [R] Removed / Replaced
+ # [D] Deleted
+ if line[0] not in " UPMARD":
+ # Stray Manifest is fine, we will readd it anyway.
+ if line[0] == '?' and line[1:].lstrip() == 'Manifest':
+ continue
+ logging.error(red(
+ "!!! Please fix the following issues reported "
+ "from cvs: %s" % green("(U,P,M,A,R,D are ok)")))
+ logging.error(red(
+ "!!! Note: This is a pretend/no-modify pass..."))
+ logging.error(out)
+ sys.exit(1)
+ elif line[0] in "UP":
+ myupdates.append(line[2:])
+
+ if myupdates:
+ logging.info(green("Fetching trivial updates..."))
+ if options.pretend:
+ logging.info("(cvs update " + " ".join(myupdates) + ")")
+ retval = os.EX_OK
+ else:
+ retval = os.system("cvs update " + " ".join(myupdates))
+ if retval != os.EX_OK:
+ logging.fatal("!!! cvs exited with an error. Terminating.")
+ sys.exit(retval)
+ return False
+
+ @staticmethod
+ def supports_gpg_sign():
+ '''Does this vcs system support gpg commit signatures
+
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def isVcsDir(dirname):
+ '''Does the directory belong to the vcs system
+
+ @param dirname: string, directory name
+ @returns: Boolean
+ '''
+ return dirname in ["CVS"]
diff --git a/repoman/pym/repoman/modules/vcs/git/__init__.py b/repoman/pym/repoman/modules/vcs/git/__init__.py
new file mode 100644
index 000000000..eecd4a1d0
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/git/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2014-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Git (git) plug-in module for portage.
+Performs variaous git actions and checks on repositories."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'git',
+ 'description': doc,
+ 'provides':{
+ 'git-module': {
+ 'name': "git_status",
+ 'sourcefile': "status",
+ 'class': "Status",
+ 'description': doc,
+ 'functions': ['check', 'supports_gpg_sign', 'detect_conflicts'],
+ 'func_desc': {
+ },
+ 'vcs_preserves_mtime': False,
+ 'needs_keyword_expansion': False,
+ },
+ 'git-changes': {
+ 'name': "git_changes",
+ 'sourcefile': "changes",
+ 'class': "Changes",
+ 'description': doc,
+ 'functions': ['scan'],
+ 'func_desc': {
+ },
+ },
+ }
+}
diff --git a/repoman/pym/repoman/modules/vcs/git/changes.py b/repoman/pym/repoman/modules/vcs/git/changes.py
new file mode 100644
index 000000000..7e9ac1eb5
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/git/changes.py
@@ -0,0 +1,120 @@
+'''
+Git module Changes class submodule
+'''
+
+import logging
+import sys
+
+from repoman.modules.vcs.changes import ChangesBase
+from repoman._subprocess import repoman_popen
+from repoman._portage import portage
+from portage import os
+from portage.package.ebuild.digestgen import digestgen
+from portage.process import spawn
+from portage.util import writemsg_level
+
+
+class Changes(ChangesBase):
+ '''Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'git'
+
+ def __init__(self, options, repo_settings):
+ '''Class init
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ super(Changes, self).__init__(options, repo_settings)
+
+ def _scan(self):
+ '''VCS type scan function, looks for all detectable changes'''
+ with repoman_popen(
+ "git diff-index --name-only "
+ "--relative --diff-filter=M HEAD") as f:
+ changed = f.readlines()
+ self.changed = ["./" + elem[:-1] for elem in changed]
+ del changed
+
+ with repoman_popen(
+ "git diff-index --name-only "
+ "--relative --diff-filter=A HEAD") as f:
+ new = f.readlines()
+ self.new = ["./" + elem[:-1] for elem in new]
+ del new
+
+ with repoman_popen(
+ "git diff-index --name-only "
+ "--relative --diff-filter=D HEAD") as f:
+ removed = f.readlines()
+ self.removed = ["./" + elem[:-1] for elem in removed]
+ del removed
+
+ @property
+ def unadded(self):
+ '''VCS method of getting the unadded files in the repository'''
+ if self._unadded is not None:
+ return self._unadded
+ # get list of files not under version control or missing
+ with repoman_popen("git ls-files --others") as f:
+ unadded = f.readlines()
+ self._unadded = ["./" + elem[:-1] for elem in unadded]
+ del unadded
+ return self._unadded
+
+ def digest_regen(self, updates, removed, manifests, scanner, broken_changelog_manifests):
+ '''Regenerate manifests
+
+ @param updates: updated files
+ @param removed: removed files
+ @param manifests: Manifest files
+ @param scanner: The repoman.scanner.Scanner instance
+ @param broken_changelog_manifests: broken changelog manifests
+ '''
+ if broken_changelog_manifests:
+ for x in broken_changelog_manifests:
+ self.repoman_settings["O"] = os.path.join(self.repo_settings.repodir, x)
+ digestgen(mysettings=self.repoman_settings, myportdb=self.repo_settings.portdb)
+
+ def update_index(self, mymanifests, myupdates):
+ '''Update the vcs's modified index if it is needed
+
+ @param mymanifests: manifest files updated
+ @param myupdates: other files updated'''
+ # It's not safe to use the git commit -a option since there might
+ # be some modified files elsewhere in the working tree that the
+ # user doesn't want to commit. Therefore, call git update-index
+ # in order to ensure that the index is updated with the latest
+ # versions of all new and modified files in the relevant portion
+ # of the working tree.
+ myfiles = mymanifests + myupdates
+ myfiles.sort()
+ update_index_cmd = ["git", "update-index"]
+ update_index_cmd.extend(f.lstrip("./") for f in myfiles)
+ if self.options.pretend:
+ print("(%s)" % (" ".join(update_index_cmd),))
+ else:
+ retval = spawn(update_index_cmd, env=os.environ)
+ if retval != os.EX_OK:
+ writemsg_level(
+ "!!! Exiting on %s (shell) "
+ "error code: %s\n" % (self.vcs_settings.vcs, retval),
+ level=logging.ERROR, noiselevel=-1)
+ sys.exit(retval)
+
+ def commit(self, myfiles, commitmessagefile):
+ '''Git commit function
+
+ @param commitfiles: list of files to commit
+ @param commitmessagefile: file containing the commit message
+ @returns: The sub-command exit value or 0
+ '''
+ retval = super(Changes, self).commit(myfiles, commitmessagefile)
+ if retval != os.EX_OK:
+ if self.repo_settings.repo_config.sign_commit and not self.vcs_settings.status.supports_gpg_sign():
+ # Inform user that newer git is needed (bug #403323).
+ logging.error(
+ "Git >=1.7.9 is required for signed commits!")
+ return retval
diff --git a/repoman/pym/repoman/modules/vcs/git/status.py b/repoman/pym/repoman/modules/vcs/git/status.py
new file mode 100644
index 000000000..48a73bed3
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/git/status.py
@@ -0,0 +1,79 @@
+'''
+Git module Status class submodule
+'''
+
+import re
+
+from repoman._portage import portage
+from portage import os
+from repoman._subprocess import repoman_popen, repoman_getstatusoutput
+
+
+class Status(object):
+ '''Performs status checks on the svn repository'''
+
+ def __init__(self, qatracker, eadded):
+ '''Class init
+
+ @param qatracker: QATracker class instance
+ @param eadded: list
+ '''
+ self.qatracker = qatracker
+ self.eadded = eadded
+
+ def check(self, checkdir, checkdir_relative, xpkg):
+ '''Perform the svn status check
+
+ @param checkdir: string of the directory being checked
+ @param checkdir_relative: string of the relative directory being checked
+ @param xpkg: string of the package being checked
+ @returns: boolean
+ '''
+ myf = repoman_popen(
+ "git ls-files --others %s" %
+ (portage._shell_quote(checkdir_relative),))
+ for l in myf:
+ if l[:-1][-7:] == ".ebuild":
+ self.qatracker.add_error(
+ "ebuild.notadded",
+ os.path.join(xpkg, os.path.basename(l[:-1])))
+ myf.close()
+ return True
+
+ @staticmethod
+ def detect_conflicts(options):
+ '''Are there any merge conflicts present in the VCS tracking system
+
+ @param options: command line options
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def supports_gpg_sign():
+ '''Does this vcs system support gpg commit signatures
+
+ @returns: Boolean
+ '''
+ status, cmd_output = \
+ repoman_getstatusoutput("git --version")
+ cmd_output = cmd_output.split()
+ if cmd_output:
+ version = re.match(r'^(\d+)\.(\d+)\.(\d+)', cmd_output[-1])
+ if version is not None:
+ version = [int(x) for x in version.groups()]
+ if version[0] > 1 or \
+ (version[0] == 1 and version[1] > 7) or \
+ (version[0] == 1 and version[1] == 7 and version[2] >= 9):
+ return True
+ return False
+
+ @staticmethod
+ def isVcsDir(dirname):
+ '''Does the directory belong to the vcs system
+
+ @param dirname: string, directory name
+ @returns: Boolean
+ '''
+ return dirname in [".git"]
+
diff --git a/repoman/pym/repoman/modules/vcs/hg/__init__.py b/repoman/pym/repoman/modules/vcs/hg/__init__.py
new file mode 100644
index 000000000..2e39970f7
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/hg/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2014-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Mercurial (hg) plug-in module for portage.
+Performs variaous mercurial actions and checks on repositories."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'hg',
+ 'description': doc,
+ 'provides':{
+ 'hg-module': {
+ 'name': "hg_status",
+ 'sourcefile': "status",
+ 'class': "Status",
+ 'description': doc,
+ 'functions': ['check', 'supports_gpg_sign', 'detect_conflicts'],
+ 'func_desc': {
+ },
+ 'vcs_preserves_mtime': False,
+ 'needs_keyword_expansion': False,
+ },
+ 'hg-changes': {
+ 'name': "hg_changes",
+ 'sourcefile': "changes",
+ 'class': "Changes",
+ 'description': doc,
+ 'functions': ['scan'],
+ 'func_desc': {
+ },
+ },
+ }
+}
diff --git a/repoman/pym/repoman/modules/vcs/hg/changes.py b/repoman/pym/repoman/modules/vcs/hg/changes.py
new file mode 100644
index 000000000..867057545
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/hg/changes.py
@@ -0,0 +1,105 @@
+'''
+Mercurial module Changes class submodule
+'''
+
+from repoman.modules.vcs.changes import ChangesBase
+from repoman._subprocess import repoman_popen
+from repoman._portage import portage
+from portage import os
+from portage.package.ebuild.digestgen import digestgen
+from portage.process import spawn
+
+
+class Changes(ChangesBase):
+ '''Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'hg'
+
+ def __init__(self, options, repo_settings):
+ '''Class init
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ super(Changes, self).__init__(options, repo_settings)
+
+ def _scan(self):
+ '''VCS type scan function, looks for all detectable changes'''
+ with repoman_popen("hg status --no-status --modified .") as f:
+ changed = f.readlines()
+ self.changed = ["./" + elem.rstrip() for elem in changed]
+ del changed
+
+ with repoman_popen("hg status --no-status --added .") as f:
+ new = f.readlines()
+ self.new = ["./" + elem.rstrip() for elem in new]
+ del new
+
+ with repoman_popen("hg status --no-status --removed .") as f:
+ removed = f.readlines()
+ self.removed = ["./" + elem.rstrip() for elem in removed]
+ del removed
+
+ @property
+ def unadded(self):
+ '''VCS method of getting the unadded files in the repository'''
+ if self._unadded is not None:
+ return self._unadded
+ with repoman_popen("hg status --no-status --unknown .") as f:
+ unadded = f.readlines()
+ self._unadded = ["./" + elem.rstrip() for elem in unadded]
+ del unadded
+ return self._unadded
+
+ @property
+ def deleted(self):
+ '''VCS method of getting the deleted files in the repository'''
+ if self._deleted is not None:
+ return self._deleted
+ # Mercurial doesn't handle manually deleted files as removed from
+ # the repository, so the user need to remove them before commit,
+ # using "hg remove [FILES]"
+ with repoman_popen("hg status --no-status --deleted .") as f:
+ deleted = f.readlines()
+ self._deleted = ["./" + elem.rstrip() for elem in deleted]
+ del deleted
+ return self._deleted
+
+
+ def digest_regen(self, updates, removed, manifests, scanner, broken_changelog_manifests):
+ '''Regenerate manifests
+
+ @param updates: updated files
+ @param removed: removed files
+ @param manifests: Manifest files
+ @param scanner: The repoman.scanner.Scanner instance
+ @param broken_changelog_manifests: broken changelog manifests
+ '''
+ if broken_changelog_manifests:
+ for x in broken_changelog_manifests:
+ self.repoman_settings["O"] = os.path.join(self.repo_settings.repodir, x)
+ digestgen(mysettings=self.repoman_settings, myportdb=self.repo_settings.portdb)
+
+ def commit(self, myfiles, commitmessagefile):
+ '''Hg commit function
+
+ @param commitfiles: list of files to commit
+ @param commitmessagefile: file containing the commit message
+ @returns: The sub-command exit value or 0
+ '''
+ commit_cmd = []
+ commit_cmd.append(self.vcs)
+ commit_cmd.extend(self.vcs_settings.vcs_global_opts)
+ commit_cmd.append("commit")
+ commit_cmd.extend(self.vcs_settings.vcs_local_opts)
+ commit_cmd.extend(["--logfile", commitmessagefile])
+ commit_cmd.extend(myfiles)
+
+ if self.options.pretend:
+ print("(%s)" % (" ".join(commit_cmd),))
+ return 0
+ else:
+ retval = spawn(commit_cmd, env=self.repo_settings.commit_env)
+ return retval
diff --git a/repoman/pym/repoman/modules/vcs/hg/status.py b/repoman/pym/repoman/modules/vcs/hg/status.py
new file mode 100644
index 000000000..8443554f5
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/hg/status.py
@@ -0,0 +1,65 @@
+'''
+Mercurial module Status class submodule
+'''
+
+from repoman._portage import portage
+from portage import os
+from repoman._subprocess import repoman_popen
+
+
+class Status(object):
+ '''Performs status checks on the svn repository'''
+
+ def __init__(self, qatracker, eadded):
+ '''Class init
+
+ @param qatracker: QATracker class instance
+ @param eadded: list
+ '''
+ self.qatracker = qatracker
+ self.eadded = eadded
+
+ def check(self, checkdir, checkdir_relative, xpkg):
+ '''Perform the svn status check
+
+ @param checkdir: string of the directory being checked
+ @param checkdir_relative: string of the relative directory being checked
+ @param xpkg: string of the package being checked
+ @returns: boolean
+ '''
+ myf = repoman_popen(
+ "hg status --no-status --unknown %s" %
+ (portage._shell_quote(checkdir_relative),))
+ for l in myf:
+ if l[:-1][-7:] == ".ebuild":
+ self.qatracker.add_error(
+ "ebuild.notadded",
+ os.path.join(xpkg, os.path.basename(l[:-1])))
+ myf.close()
+ return True
+
+ @staticmethod
+ def detect_conflicts(options):
+ '''Are there any merge conflicts present in the VCS tracking system
+
+ @param options: command line options
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def supports_gpg_sign():
+ '''Does this vcs system support gpg commit signatures
+
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def isVcsDir(dirname):
+ '''Does the directory belong to the vcs system
+
+ @param dirname: string, directory name
+ @returns: Boolean
+ '''
+ return dirname in [".hg"]
diff --git a/repoman/pym/repoman/modules/vcs/settings.py b/repoman/pym/repoman/modules/vcs/settings.py
new file mode 100644
index 000000000..a8e91dd27
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/settings.py
@@ -0,0 +1,108 @@
+'''
+Repoman VCSSettings modules
+'''
+
+from __future__ import print_function, unicode_literals
+
+import logging
+import sys
+
+from portage.output import red
+from repoman.modules.vcs import module_controller, module_names
+from repoman.modules.vcs.vcs import FindVCS
+from repoman.qa_tracker import QATracker
+
+
+class VCSSettings(object):
+ '''Holds various VCS settings'''
+
+ def __init__(self, options=None, repoman_settings=None, repo_settings=None):
+ '''Class init function
+
+ @param options: the run time cli options
+ @param repoman_settings: portage.config settings instance
+ @param repo_settings: RepoSettings instance
+ '''
+ self.options = options
+ self.repoman_settings = repoman_settings
+ self.repo_settings = repo_settings
+ if options.vcs:
+ if options.vcs in module_names:
+ self.vcs = options.vcs
+ else:
+ self.vcs = None
+ else:
+ vcses = FindVCS()
+ if len(vcses) > 1:
+ print(red(
+ '*** Ambiguous workdir -- more than one VCS found'
+ ' at the same depth: %s.' % ', '.join(vcses)))
+ print(red(
+ '*** Please either clean up your workdir'
+ ' or specify --vcs option.'))
+ sys.exit(1)
+ elif vcses:
+ self.vcs = vcses[0]
+ else:
+ self.vcs = None
+
+ if options.if_modified == "y" and self.vcs is None:
+ logging.info(
+ "Not in a version controlled repository; "
+ "disabling --if-modified.")
+ options.if_modified = "n"
+
+ # initialize our instance placeholders
+ self._status = None
+ self._changes = None
+ # get our vcs plugin controller and available module names
+ self.module_controller = module_controller
+ self.module_names = module_names
+
+ # Disable copyright/mtime check if vcs does not preserve mtime (bug #324075).
+ if str(self.vcs) in self.module_controller.parents:
+ self.vcs_preserves_mtime = module_controller.modules[
+ "%s_status" % self.vcs]['vcs_preserves_mtime']
+ else:
+ self.vcs_preserves_mtime = False
+ logging.error("VCSSettings: Unknown VCS type: %s", self.vcs)
+ logging.error("Available modules: %s", module_controller.parents)
+
+ self.needs_keyword_expansion = module_controller.modules[
+ "%s_status" % self.vcs]['needs_keyword_expansion']
+ self.vcs_local_opts = repoman_settings.get(
+ "REPOMAN_VCS_LOCAL_OPTS", "").split()
+ self.vcs_global_opts = repoman_settings.get(
+ "REPOMAN_VCS_GLOBAL_OPTS")
+ if self.vcs_global_opts is None:
+ if self.vcs in ('cvs', 'svn'):
+ self.vcs_global_opts = "-q"
+ else:
+ self.vcs_global_opts = ""
+ self.vcs_global_opts = self.vcs_global_opts.split()
+
+ if options.mode == 'commit' and not options.pretend and not self.vcs:
+ logging.info(
+ "Not in a version controlled repository; "
+ "enabling pretend mode.")
+ options.pretend = True
+ self.qatracker = QATracker()
+ self.eadded = []
+
+ @property
+ def status(self):
+ '''Initializes and returns the class instance
+ of the vcs's Status class'''
+ if not self._status:
+ status = self.module_controller.get_class('%s_status' % self.vcs)
+ self._status = status(self.qatracker, self.eadded)
+ return self._status
+
+ @property
+ def changes(self):
+ '''Initializes and returns the class instance
+ of the vcs's Changes class'''
+ if not self._changes:
+ changes = self.module_controller.get_class('%s_changes' % self.vcs)
+ self._changes = changes(self.options, self.repo_settings)
+ return self._changes
diff --git a/repoman/pym/repoman/modules/vcs/svn/__init__.py b/repoman/pym/repoman/modules/vcs/svn/__init__.py
new file mode 100644
index 000000000..6bb0b9af4
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/svn/__init__.py
@@ -0,0 +1,34 @@
+# Copyright 2014-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+doc = """Subversion (svn) plug-in module for portage.
+Performs variaous subversion actions and checks on repositories."""
+__doc__ = doc[:]
+
+
+module_spec = {
+ 'name': 'svn',
+ 'description': doc,
+ 'provides':{
+ 'svn-module': {
+ 'name': "svn_status",
+ 'sourcefile': "status",
+ 'class': "Status",
+ 'description': doc,
+ 'functions': ['check', 'supports_gpg_sign', 'detect_conflicts'],
+ 'func_desc': {
+ },
+ 'vcs_preserves_mtime': False,
+ 'needs_keyword_expansion': True,
+ },
+ 'svn-changes': {
+ 'name': "svn_changes",
+ 'sourcefile': "changes",
+ 'class': "Changes",
+ 'description': doc,
+ 'functions': ['scan'],
+ 'func_desc': {
+ },
+ },
+ }
+}
diff --git a/repoman/pym/repoman/modules/vcs/svn/changes.py b/repoman/pym/repoman/modules/vcs/svn/changes.py
new file mode 100644
index 000000000..d83c7c45f
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/svn/changes.py
@@ -0,0 +1,141 @@
+'''
+Subversion module Changes class submodule
+'''
+
+from itertools import chain
+
+from repoman.modules.vcs.changes import ChangesBase
+from repoman._subprocess import repoman_popen
+from repoman._subprocess import repoman_getstatusoutput
+from repoman.modules.vcs.vcs import vcs_files_to_cps
+from repoman._portage import portage
+from portage import os
+from portage.output import green
+from portage.package.ebuild.digestgen import digestgen
+
+
+class Changes(ChangesBase):
+ '''Class object to scan and hold the resultant data
+ for all changes to process.
+ '''
+
+ vcs = 'svn'
+
+ def __init__(self, options, repo_settings):
+ '''Class init
+
+ @param options: the run time cli options
+ @param repo_settings: RepoSettings instance
+ '''
+ super(Changes, self).__init__(options, repo_settings)
+
+ def _scan(self):
+ '''VCS type scan function, looks for all detectable changes'''
+ with repoman_popen("svn status") as f:
+ svnstatus = f.readlines()
+ self.changed = [
+ "./" + elem.split()[-1:][0]
+ for elem in svnstatus
+ if elem and elem[:1] in "MR"]
+ self.new = [
+ "./" + elem.split()[-1:][0]
+ for elem in svnstatus
+ if elem.startswith("A")]
+ self.removed = [
+ "./" + elem.split()[-1:][0]
+ for elem in svnstatus
+ if elem.startswith("D")]
+
+ @property
+ def expansion(self):
+ '''VCS method of getting the expanded keywords in the repository'''
+ if self._expansion is not None:
+ return self._expansion
+ # Subversion expands keywords specified in svn:keywords properties.
+ with repoman_popen("svn propget -R svn:keywords") as f:
+ props = f.readlines()
+ self._expansion = dict(
+ ("./" + prop.split(" - ")[0], prop.split(" - ")[1].split())
+ for prop in props if " - " in prop)
+ del props
+ return self._expansion
+
+ @property
+ def unadded(self):
+ '''VCS method of getting the unadded files in the repository'''
+ if self._unadded is not None:
+ return self._unadded
+ with repoman_popen("svn status --no-ignore") as f:
+ svnstatus = f.readlines()
+ self._unadded = [
+ "./" + elem.rstrip().split()[1]
+ for elem in svnstatus
+ if elem.startswith("?") or elem.startswith("I")]
+ del svnstatus
+ return self._unadded
+
+ def thick_manifest(self, updates, headers, no_expansion, expansion):
+ '''Create a thick manifest
+
+ @param updates:
+ @param headers:
+ @param no_expansion:
+ @param expansion:
+ '''
+ svn_keywords = dict((k.lower(), k) for k in [
+ "Rev",
+ "Revision",
+ "LastChangedRevision",
+ "Date",
+ "LastChangedDate",
+ "Author",
+ "LastChangedBy",
+ "URL",
+ "HeadURL",
+ "Id",
+ "Header",
+ ])
+
+ for _file in updates:
+ # for SVN, expansion contains files that are included in expansion
+ if _file not in expansion:
+ continue
+
+ # Subversion keywords are case-insensitive
+ # in svn:keywords properties,
+ # but case-sensitive in contents of files.
+ enabled_keywords = []
+ for k in expansion[_file]:
+ keyword = svn_keywords.get(k.lower())
+ if keyword is not None:
+ enabled_keywords.append(keyword)
+
+ headerstring = "'\$(%s).*\$'" % "|".join(enabled_keywords)
+
+ _out = repoman_getstatusoutput(
+ "egrep -q %s %s" % (headerstring, portage._shell_quote(_file)))
+ if _out[0] == 0:
+ headers.append(_file)
+
+ print("%s have headers that will change." % green(str(len(headers))))
+ print(
+ "* Files with headers will"
+ " cause the manifests to be changed and committed separately.")
+
+ def digest_regen(self, updates, removed, manifests, scanner, broken_changelog_manifests):
+ '''Regenerate manifests
+
+ @param updates: updated files
+ @param removed: removed files
+ @param manifests: Manifest files
+ @param scanner: The repoman.scanner.Scanner instance
+ @param broken_changelog_manifests: broken changelog manifests
+ '''
+ if updates or removed:
+ for x in sorted(vcs_files_to_cps(
+ chain(updates, removed, manifests),
+ scanner.repolevel, scanner.reposplit, scanner.categories)):
+ self.repoman_settings["O"] = os.path.join(self.repo_settings.repodir, x)
+ digestgen(mysettings=self.repoman_settings, myportdb=self.repo_settings.portdb)
+
+
diff --git a/repoman/pym/repoman/modules/vcs/svn/status.py b/repoman/pym/repoman/modules/vcs/svn/status.py
new file mode 100644
index 000000000..6575fe0b0
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/svn/status.py
@@ -0,0 +1,150 @@
+'''
+Subversion module Status class submodule
+'''
+
+import logging
+import subprocess
+import sys
+
+from repoman._portage import portage
+from portage import os
+from portage.const import BASH_BINARY
+from portage.output import red, green
+from portage import _unicode_encode, _unicode_decode
+
+from repoman._subprocess import repoman_popen
+
+
+class Status(object):
+ '''Performs status checks on the svn repository'''
+
+ def __init__(self, qatracker, eadded):
+ '''Class init
+
+ @param qatracker: QATracker class instance
+ @param eadded: list
+ '''
+ self.qatracker = qatracker
+ self.eadded = eadded
+
+ def check(self, checkdir, checkdir_relative, xpkg):
+ '''Perform the svn status check
+
+ @param checkdir: string of the directory being checked
+ @param checkdir_relative: string of the relative directory being checked
+ @param xpkg: string of the package being checked
+ @returns: boolean
+ '''
+ try:
+ myf = repoman_popen(
+ "svn status --depth=files --verbose " +
+ portage._shell_quote(checkdir))
+ myl = myf.readlines()
+ myf.close()
+ except IOError:
+ raise
+ for l in myl:
+ if l[:1] == "?":
+ continue
+ if l[:7] == ' >':
+ # tree conflict, new in subversion 1.6
+ continue
+ l = l.split()[-1]
+ if l[-7:] == ".ebuild":
+ self.eadded.append(os.path.basename(l[:-7]))
+ try:
+ myf = repoman_popen(
+ "svn status " +
+ portage._shell_quote(checkdir))
+ myl = myf.readlines()
+ myf.close()
+ except IOError:
+ raise
+ for l in myl:
+ if l[0] == "A":
+ l = l.rstrip().split(' ')[-1]
+ if l[-7:] == ".ebuild":
+ self.eadded.append(os.path.basename(l[:-7]))
+ return True
+
+ @staticmethod
+ def detect_conflicts(options):
+ """Determine if the checkout has problems like cvs conflicts.
+
+ If you want more vcs support here just keep adding if blocks...
+ This could be better.
+
+ TODO(antarus): Also this should probably not call sys.exit() as
+ repoman is run on >1 packages and one failure should not cause
+ subsequent packages to fail.
+
+ Args:
+ vcs - A string identifying the version control system in use
+ Returns: boolean
+ (calls sys.exit on fatal problems)
+ """
+
+ cmd = "svn status -u 2>&1 | egrep -v '^. +.*/digest-[^/]+' | head -n-1"
+ msg = ("Performing a %s with a little magic grep to check for updates."
+ % green("svn status -u"))
+
+ logging.info(msg)
+ # Use Popen instead of getstatusoutput(), in order to avoid
+ # unicode handling problems (see bug #310789).
+ args = [BASH_BINARY, "-c", cmd]
+ args = [_unicode_encode(x) for x in args]
+ proc = subprocess.Popen(
+ args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out = _unicode_decode(proc.communicate()[0])
+ proc.wait()
+ mylines = out.splitlines()
+ myupdates = []
+ for line in mylines:
+ if not line:
+ continue
+
+ # [ ] Unmodified (SVN) [U] Updates [P] Patches
+ # [M] Modified [A] Added [R] Removed / Replaced
+ # [D] Deleted
+ if line[0] not in " UPMARD":
+ # Stray Manifest is fine, we will readd it anyway.
+ if line[0] == '?' and line[1:].lstrip() == 'Manifest':
+ continue
+ logging.error(red(
+ "!!! Please fix the following issues reported "
+ "from cvs: %s" % green("(U,P,M,A,R,D are ok)")))
+ logging.error(red(
+ "!!! Note: This is a pretend/no-modify pass..."))
+ logging.error(out)
+ sys.exit(1)
+ elif line[8] == '*':
+ myupdates.append(line[9:].lstrip(" 1234567890"))
+
+ if myupdates:
+ logging.info(green("Fetching trivial updates..."))
+ if options.pretend:
+ logging.info("(svn update " + " ".join(myupdates) + ")")
+ retval = os.EX_OK
+ else:
+ retval = os.system("svn update " + " ".join(myupdates))
+ if retval != os.EX_OK:
+ logging.fatal("!!! svn exited with an error. Terminating.")
+ sys.exit(retval)
+ return False
+
+ @staticmethod
+ def supports_gpg_sign():
+ '''Does this vcs system support gpg commit signatures
+
+ @returns: Boolean
+ '''
+ return False
+
+ @staticmethod
+ def isVcsDir(dirname):
+ '''Does the directory belong to the vcs system
+
+ @param dirname: string, directory name
+ @returns: Boolean
+ '''
+ return dirname in [".svn"]
diff --git a/repoman/pym/repoman/modules/vcs/vcs.py b/repoman/pym/repoman/modules/vcs/vcs.py
new file mode 100644
index 000000000..e9d45d4c8
--- /dev/null
+++ b/repoman/pym/repoman/modules/vcs/vcs.py
@@ -0,0 +1,149 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import collections
+import logging
+from itertools import chain
+
+from portage import os
+
+
+_vcs_type = collections.namedtuple('_vcs_type', 'name dir_name')
+
+_FindVCS_data = (
+ _vcs_type(
+ name='git',
+ dir_name='.git'
+ ),
+ _vcs_type(
+ name='bzr',
+ dir_name='.bzr'
+ ),
+ _vcs_type(
+ name='hg',
+ dir_name='.hg'
+ ),
+ _vcs_type(
+ name='svn',
+ dir_name='.svn'
+ )
+)
+
+
+def FindVCS(cwd=None):
+ """
+ Try to figure out in what VCS' working tree we are.
+
+ @param cwd: working directory (default is os.getcwd())
+ @type cwd: str
+ @return: list of strings describing the discovered vcs types
+ @rtype: list
+ """
+
+ if cwd is None:
+ cwd = os.getcwd()
+
+ outvcs = []
+
+ def seek(depth=None):
+ '''Seek for VCSes that have a top-level data directory only.
+
+ @param depth: integer
+ @returns: list of strings
+ '''
+ retvcs = []
+ pathprep = cwd
+
+ while depth is None or depth > 0:
+ for vcs_type in _FindVCS_data:
+ vcs_dir = os.path.join(pathprep, vcs_type.dir_name)
+ if os.path.isdir(vcs_dir):
+ logging.debug(
+ 'FindVCS: found %(name)s dir: %(vcs_dir)s' % {
+ 'name': vcs_type.name,
+ 'vcs_dir': os.path.abspath(vcs_dir)})
+ retvcs.append(vcs_type.name)
+
+ if retvcs:
+ break
+ pathprep = os.path.join(pathprep, '..')
+ if os.path.realpath(pathprep).strip('/') == '':
+ break
+ if depth is not None:
+ depth = depth - 1
+
+ return retvcs
+
+ # Level zero VCS-es.
+ if os.path.isdir(os.path.join(cwd, 'CVS')):
+ outvcs.append('cvs')
+ if os.path.isdir('.svn'): # <1.7
+ outvcs.append(os.path.join(cwd, 'svn'))
+
+ # If we already found one of 'level zeros', just take a quick look
+ # at the current directory. Otherwise, seek parents till we get
+ # something or reach root.
+ if outvcs:
+ outvcs.extend(seek(1))
+ else:
+ outvcs = seek()
+
+ if len(outvcs) > 1:
+ # eliminate duplicates, like for svn in bug #391199
+ outvcs = list(set(outvcs))
+
+ return outvcs
+
+
+def vcs_files_to_cps(vcs_file_iter, repodir, repolevel, reposplit, categories):
+ """
+ Iterate over the given modified file paths returned from the vcs,
+ and return a frozenset containing category/pn strings for each
+ modified package.
+ """
+
+ modified_cps = []
+
+ if repolevel == 3:
+ if reposplit[-2] in categories and \
+ next(vcs_file_iter, None) is not None:
+ modified_cps.append("/".join(reposplit[-2:]))
+
+ elif repolevel == 2:
+ category = reposplit[-1]
+ if category in categories:
+ for filename in vcs_file_iter:
+ f_split = filename.split(os.sep)
+ # ['.', pn, ...]
+ if len(f_split) > 2:
+ modified_cps.append(category + "/" + f_split[1])
+
+ else:
+ # repolevel == 1
+ for filename in vcs_file_iter:
+ f_split = filename.split(os.sep)
+ # ['.', category, pn, ...]
+ if len(f_split) > 3 and f_split[1] in categories:
+ modified_cps.append("/".join(f_split[1:3]))
+
+ # Exclude packages that have been removed, since calling
+ # code assumes that the packages exist.
+ return frozenset(x for x in frozenset(modified_cps)
+ if os.path.exists(os.path.join(repodir, x)))
+
+
+def vcs_new_changed(relative_path, mychanged, mynew):
+ '''Check if any vcs tracked file have been modified
+
+ @param relative_path:
+ @param mychanged: iterable of changed files
+ @param mynew: iterable of new files
+ @returns boolean
+ '''
+ for x in chain(mychanged, mynew):
+ if x == relative_path:
+ return True
+ return False
+
+
diff --git a/repoman/pym/repoman/profile.py b/repoman/pym/repoman/profile.py
new file mode 100644
index 000000000..50da91728
--- /dev/null
+++ b/repoman/pym/repoman/profile.py
@@ -0,0 +1,87 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+from portage import normalize_path
+from portage import os
+from portage.output import red
+
+
+class ProfileDesc(object):
+ __slots__ = ('abs_path', 'arch', 'status', 'sub_path', 'tree_path',)
+
+ def __init__(self, arch, status, sub_path, tree_path):
+ self.arch = arch
+ self.status = status
+ if sub_path:
+ sub_path = normalize_path(sub_path.lstrip(os.sep))
+ self.sub_path = sub_path
+ self.tree_path = tree_path
+ if tree_path:
+ self.abs_path = os.path.join(tree_path, 'profiles', self.sub_path)
+ else:
+ self.abs_path = tree_path
+
+ def __str__(self):
+ if self.sub_path:
+ return self.sub_path
+ return 'empty profile'
+
+
+valid_profile_types = frozenset(['dev', 'exp', 'stable'])
+
+
+def dev_profile_keywords(profiles):
+ """
+ Create a set of KEYWORDS values that exist in 'dev'
+ profiles. These are used
+ to trigger a message notifying the user when they might
+ want to add the --include-dev option.
+ """
+ type_arch_map = {}
+ for arch, arch_profiles in profiles.items():
+ for prof in arch_profiles:
+ arch_set = type_arch_map.get(prof.status)
+ if arch_set is None:
+ arch_set = set()
+ type_arch_map[prof.status] = arch_set
+ arch_set.add(arch)
+
+ dev_keywords = type_arch_map.get('dev', set())
+ dev_keywords.update(['~' + arch for arch in dev_keywords])
+ return frozenset(dev_keywords)
+
+
+def setup_profile(profile_list):
+ # Ensure that profile sub_path attributes are unique. Process in reverse order
+ # so that profiles with duplicate sub_path from overlays will override
+ # profiles with the same sub_path from parent repos.
+ profiles = {}
+ profile_list.reverse()
+ profile_sub_paths = set()
+ for prof in profile_list:
+ if prof.sub_path in profile_sub_paths:
+ continue
+ profile_sub_paths.add(prof.sub_path)
+ profiles.setdefault(prof.arch, []).append(prof)
+
+ # Use an empty profile for checking dependencies of
+ # packages that have empty KEYWORDS.
+ prof = ProfileDesc('**', 'stable', '', '')
+ profiles.setdefault(prof.arch, []).append(prof)
+ return profiles
+
+
+def check_profiles(profiles, archlist):
+ for x in archlist:
+ if x[0] == "~":
+ continue
+ if x not in profiles:
+ print(red(
+ "\"%s\" doesn't have a valid profile listed in profiles.desc." % x))
+ print(red(
+ "You need to either \"cvs update\" your profiles dir"
+ " or follow this"))
+ print(red(
+ "up with the " + x + " team."))
+ print()
diff --git a/repoman/pym/repoman/qa_data.py b/repoman/pym/repoman/qa_data.py
new file mode 100644
index 000000000..b9475e801
--- /dev/null
+++ b/repoman/pym/repoman/qa_data.py
@@ -0,0 +1,439 @@
+# -*- coding:utf-8 -*-
+
+import logging
+
+from _emerge.Package import Package
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+max_desc_len = 80
+allowed_filename_chars = "a-zA-Z0-9._-+:"
+
+qahelp = {
+ "CVS/Entries.IO_error": (
+ "Attempting to commit, and an IO error was encountered access the"
+ " Entries file"),
+ "ebuild.invalidname": (
+ "Ebuild files with a non-parseable or syntactically incorrect name"
+ " (or using 2.1 versioning extensions)"),
+ "ebuild.namenomatch": (
+ "Ebuild files that do not have the same name as their parent"
+ " directory"),
+ "changelog.ebuildadded": (
+ "An ebuild was added but the ChangeLog was not modified"),
+ "changelog.missing": (
+ "Missing ChangeLog files"),
+ "ebuild.notadded": (
+ "Ebuilds that exist but have not been added to cvs"),
+ "ebuild.patches": (
+ "PATCHES variable should be a bash array to ensure white space safety"),
+ "changelog.notadded": (
+ "ChangeLogs that exist but have not been added to cvs"),
+ "dependency.bad": (
+ "User-visible ebuilds with unsatisfied dependencies"
+ " (matched against *visible* ebuilds)"),
+ "dependency.badmasked": (
+ "Masked ebuilds with unsatisfied dependencies"
+ " (matched against *all* ebuilds)"),
+ "dependency.badindev": (
+ "User-visible ebuilds with unsatisfied dependencies"
+ " (matched against *visible* ebuilds) in developing arch"),
+ "dependency.badmaskedindev": (
+ "Masked ebuilds with unsatisfied dependencies"
+ " (matched against *all* ebuilds) in developing arch"),
+ "dependency.badtilde": (
+ "Uses the ~ dep operator with a non-zero revision part,"
+ " which is useless (the revision is ignored)"),
+ "dependency.missingslot": (
+ "RDEPEND matches more than one SLOT but does not specify a "
+ "slot and/or use the := or :* slot operator"),
+ "dependency.perlcore": (
+ "This ebuild directly depends on a package in perl-core;"
+ " it should use the corresponding virtual instead."),
+ "dependency.syntax": (
+ "Syntax error in dependency string"
+ " (usually an extra/missing space/parenthesis)"),
+ "dependency.unknown": (
+ "Ebuild has a dependency that refers to an unknown package"
+ " (which may be valid if it is a blocker for a renamed/removed package,"
+ " or is an alternative choice provided by an overlay)"),
+ "file.executable": (
+ "Ebuilds, digests, metadata.xml, Manifest, and ChangeLog do not need"
+ " the executable bit"),
+ "file.size": (
+ "Files in the files directory must be under 20 KiB"),
+ "file.size.fatal": (
+ "Files in the files directory must be under 60 KiB"),
+ "file.name": (
+ "File/dir name must be composed"
+ " of only the following chars: %s " % allowed_filename_chars),
+ "file.UTF8": (
+ "File is not UTF8 compliant"),
+ "inherit.deprecated": (
+ "Ebuild inherits a deprecated eclass"),
+ "inherit.missing": (
+ "Ebuild uses functions from an eclass but does not inherit it"),
+ "inherit.unused": (
+ "Ebuild inherits an eclass but does not use it"),
+ "java.eclassesnotused": (
+ "With virtual/jdk in DEPEND you must inherit a java eclass"),
+ "wxwidgets.eclassnotused": (
+ "Ebuild DEPENDs on x11-libs/wxGTK without inheriting wxwidgets.eclass"),
+ "KEYWORDS.dropped": (
+ "Ebuilds that appear to have dropped KEYWORDS for some arch"),
+ "KEYWORDS.missing": (
+ "Ebuilds that have a missing or empty KEYWORDS variable"),
+ "KEYWORDS.stable": (
+ "Ebuilds that have been added directly with stable KEYWORDS"),
+ "KEYWORDS.stupid": (
+ "Ebuilds that use KEYWORDS=-* instead of package.mask"),
+ "LICENSE.missing": (
+ "Ebuilds that have a missing or empty LICENSE variable"),
+ "LICENSE.virtual": (
+ "Virtuals that have a non-empty LICENSE variable"),
+ "DESCRIPTION.missing": (
+ "Ebuilds that have a missing or empty DESCRIPTION variable"),
+ "DESCRIPTION.toolong": (
+ "DESCRIPTION is over %d characters" % max_desc_len),
+ "EAPI.definition": (
+ "EAPI definition does not conform to PMS section 7.3.1"
+ " (first non-comment, non-blank line)"),
+ "EAPI.deprecated": (
+ "Ebuilds that use features that are deprecated in the current EAPI"),
+ "EAPI.incompatible": (
+ "Ebuilds that use features that are only available with a different"
+ " EAPI"),
+ "EAPI.unsupported": (
+ "Ebuilds that have an unsupported EAPI version"
+ " (you must upgrade portage)"),
+ "SLOT.invalid": (
+ "Ebuilds that have a missing or invalid SLOT variable value"),
+ "HOMEPAGE.missing": (
+ "Ebuilds that have a missing or empty HOMEPAGE variable"),
+ "HOMEPAGE.virtual": (
+ "Virtuals that have a non-empty HOMEPAGE variable"),
+ "PDEPEND.suspect": (
+ "PDEPEND contains a package that usually only belongs in DEPEND."),
+ "LICENSE.syntax": (
+ "Syntax error in LICENSE"
+ " (usually an extra/missing space/parenthesis)"),
+ "PROVIDE.syntax": (
+ "Syntax error in PROVIDE"
+ " (usually an extra/missing space/parenthesis)"),
+ "PROPERTIES.syntax": (
+ "Syntax error in PROPERTIES"
+ " (usually an extra/missing space/parenthesis)"),
+ "RESTRICT.syntax": (
+ "Syntax error in RESTRICT"
+ " (usually an extra/missing space/parenthesis)"),
+ "REQUIRED_USE.syntax": (
+ "Syntax error in REQUIRED_USE"
+ " (usually an extra/missing space/parenthesis)"),
+ "SRC_URI.syntax": (
+ "Syntax error in SRC_URI"
+ " (usually an extra/missing space/parenthesis)"),
+ "SRC_URI.mirror": (
+ "A uri listed in profiles/thirdpartymirrors is found in SRC_URI"),
+ "ebuild.syntax": (
+ "Error generating cache entry for ebuild;"
+ " typically caused by ebuild syntax error"
+ " or digest verification failure"),
+ "ebuild.output": (
+ "A simple sourcing of the ebuild produces output;"
+ " this breaks ebuild policy."),
+ "ebuild.nesteddie": (
+ "Placing 'die' inside ( ) prints an error,"
+ " but doesn't stop the ebuild."),
+ "variable.invalidchar": (
+ "A variable contains an invalid character"
+ " that is not part of the ASCII character set"),
+ "variable.readonly": (
+ "Assigning a readonly variable"),
+ "variable.usedwithhelpers": (
+ "Ebuild uses D, ROOT, ED, EROOT or EPREFIX with helpers"),
+ "LIVEVCS.stable": (
+ "This ebuild is a live checkout from a VCS but has stable keywords."),
+ "LIVEVCS.unmasked": (
+ "This ebuild is a live checkout from a VCS but has keywords"
+ " and is not masked in the global package.mask."),
+ "IUSE.invalid": (
+ "This ebuild has a variable in IUSE"
+ " that is not in the use.desc or its metadata.xml file"),
+ "IUSE.missing": (
+ "This ebuild has a USE conditional"
+ " which references a flag that is not listed in IUSE"),
+ "IUSE.rubydeprecated": (
+ "The ebuild has set a ruby interpreter in USE_RUBY,"
+ " that is not available as a ruby target anymore"),
+ "LICENSE.invalid": (
+ "This ebuild is listing a license"
+ " that doesnt exist in portages license/ dir."),
+ "LICENSE.deprecated": (
+ "This ebuild is listing a deprecated license."),
+ "KEYWORDS.invalid": (
+ "This ebuild contains KEYWORDS"
+ " that are not listed in profiles/arch.list"
+ " or for which no valid profile was found"),
+ "RDEPEND.implicit": (
+ "RDEPEND is unset in the ebuild"
+ " which triggers implicit RDEPEND=$DEPEND assignment"
+ " (prior to EAPI 4)"),
+ "RDEPEND.suspect": (
+ "RDEPEND contains a package that usually only belongs in DEPEND."),
+ "RESTRICT.invalid": (
+ "This ebuild contains invalid RESTRICT values."),
+ "digest.assumed": (
+ "Existing digest must be assumed correct (Package level only)"),
+ "digest.missing": (
+ "Some files listed in SRC_URI aren't referenced in the Manifest"),
+ "digest.unused": (
+ "Some files listed in the Manifest aren't referenced in SRC_URI"),
+ "ebuild.majorsyn": (
+ "This ebuild has a major syntax error"
+ " that may cause the ebuild to fail partially or fully"),
+ "ebuild.minorsyn": (
+ "This ebuild has a minor syntax error"
+ " that contravenes gentoo coding style"),
+ "ebuild.badheader": (
+ "This ebuild has a malformed header"),
+ "manifest.bad": (
+ "Manifest has missing or incorrect digests"),
+ "metadata.missing": (
+ "Missing metadata.xml files"),
+ "metadata.bad": (
+ "Bad metadata.xml files"),
+ "metadata.warning": (
+ "Warnings in metadata.xml files"),
+ "portage.internal": (
+ "The ebuild uses an internal Portage function or variable"),
+ "repo.eapi.banned": (
+ "The ebuild uses an EAPI which is"
+ " banned by the repository's metadata/layout.conf settings"),
+ "repo.eapi.deprecated": (
+ "The ebuild uses an EAPI which is"
+ " deprecated by the repository's metadata/layout.conf settings"),
+ "virtual.oldstyle": (
+ "The ebuild PROVIDEs an old-style virtual (see GLEP 37)"),
+ "virtual.suspect": (
+ "Ebuild contains a package"
+ " that usually should be pulled via virtual/, not directly."),
+ "usage.obsolete": (
+ "The ebuild makes use of an obsolete construct"),
+ "upstream.workaround": (
+ "The ebuild works around an upstream bug,"
+ " an upstream bug should be filed and tracked in bugs.gentoo.org")
+}
+
+qacats = list(qahelp)
+qacats.sort()
+
+qawarnings = set((
+ "changelog.missing",
+ "changelog.notadded",
+ "dependency.unknown",
+ "digest.assumed",
+ "digest.unused",
+ "ebuild.notadded",
+ "ebuild.nesteddie",
+ "dependency.badmasked",
+ "dependency.badindev",
+ "dependency.badmaskedindev",
+ "dependency.badtilde",
+ "dependency.missingslot",
+ "dependency.perlcore",
+ "DESCRIPTION.toolong",
+ "EAPI.deprecated",
+ "HOMEPAGE.virtual",
+ "LICENSE.deprecated",
+ "LICENSE.virtual",
+ "KEYWORDS.dropped",
+ "KEYWORDS.stupid",
+ "KEYWORDS.missing",
+ "PDEPEND.suspect",
+ "RDEPEND.implicit",
+ "RDEPEND.suspect",
+ "virtual.suspect",
+ "RESTRICT.invalid",
+ "ebuild.minorsyn",
+ "ebuild.badheader",
+ "ebuild.patches",
+ "file.size",
+ "inherit.unused",
+ "inherit.deprecated",
+ "java.eclassesnotused",
+ "wxwidgets.eclassnotused",
+ "metadata.warning",
+ "portage.internal",
+ "repo.eapi.deprecated",
+ "usage.obsolete",
+ "upstream.workaround",
+ "LIVEVCS.stable",
+ "LIVEVCS.unmasked",
+ "IUSE.rubydeprecated",
+))
+
+
+missingvars = ["KEYWORDS", "LICENSE", "DESCRIPTION", "HOMEPAGE"]
+allvars = set(x for x in portage.auxdbkeys if not x.startswith("UNUSED_"))
+allvars.update(Package.metadata_keys)
+allvars = sorted(allvars)
+
+for x in missingvars:
+ x += ".missing"
+ if x not in qacats:
+ logging.warning('* missingvars values need to be added to qahelp ("%s")' % x)
+ qacats.append(x)
+ qawarnings.add(x)
+
+valid_restrict = frozenset([
+ "binchecks", "bindist", "fetch", "installsources", "mirror",
+ "preserve-libs", "primaryuri", "splitdebug", "strip", "test", "userpriv"])
+
+
+suspect_rdepend = frozenset([
+ "app-arch/cabextract",
+ "app-arch/rpm2targz",
+ "app-doc/doxygen",
+ "dev-lang/nasm",
+ "dev-lang/swig",
+ "dev-lang/yasm",
+ "dev-perl/extutils-pkgconfig",
+ "dev-util/byacc",
+ "dev-util/cmake",
+ "dev-util/ftjam",
+ "dev-util/gperf",
+ "dev-util/gtk-doc",
+ "dev-util/gtk-doc-am",
+ "dev-util/intltool",
+ "dev-util/jam",
+ "dev-util/pkg-config-lite",
+ "dev-util/pkgconf",
+ "dev-util/pkgconfig",
+ "dev-util/pkgconfig-openbsd",
+ "dev-util/scons",
+ "dev-util/unifdef",
+ "dev-util/yacc",
+ "media-gfx/ebdftopcf",
+ "sys-apps/help2man",
+ "sys-devel/autoconf",
+ "sys-devel/automake",
+ "sys-devel/bin86",
+ "sys-devel/bison",
+ "sys-devel/dev86",
+ "sys-devel/flex",
+ "sys-devel/m4",
+ "sys-devel/pmake",
+ "virtual/linux-sources",
+ "virtual/pkgconfig",
+ "x11-misc/bdftopcf",
+ "x11-misc/imake",
+])
+
+suspect_virtual = {
+ "dev-util/pkg-config-lite": "virtual/pkgconfig",
+ "dev-util/pkgconf": "virtual/pkgconfig",
+ "dev-util/pkgconfig": "virtual/pkgconfig",
+ "dev-util/pkgconfig-openbsd": "virtual/pkgconfig",
+ "dev-libs/libusb": "virtual/libusb",
+ "dev-libs/libusbx": "virtual/libusb",
+ "dev-libs/libusb-compat": "virtual/libusb",
+}
+
+ruby_deprecated = frozenset([
+ "ruby_targets_ree18",
+ "ruby_targets_ruby18",
+ "ruby_targets_ruby19",
+])
+
+
+# file.executable
+no_exec = frozenset(["Manifest", "ChangeLog", "metadata.xml"])
+
+
+def format_qa_output(
+ formatter, fails, dofull, dofail, options, qawarnings):
+ """Helper function that formats output properly
+
+ @param formatter: an instance of Formatter
+ @type formatter: Formatter
+ @param fails: dict of qa status failures
+ @type fails: dict
+ @param dofull: Whether to print full results or a summary
+ @type dofull: boolean
+ @param dofail: Whether failure was hard or soft
+ @type dofail: boolean
+ @param options: The command-line options provided to repoman
+ @type options: Namespace
+ @param qawarnings: the set of warning types
+ @type qawarnings: set
+ @return: None (modifies formatter)
+ """
+ full = options.mode == 'full'
+ # we only want key value pairs where value > 0
+ for category in sorted(fails):
+ number = len(fails[category])
+ formatter.add_literal_data(" " + category)
+ spacing_width = 30 - len(category)
+ if category in qawarnings:
+ formatter.push_style("WARN")
+ else:
+ formatter.push_style("BAD")
+ formatter.add_literal_data(" [fatal]")
+ spacing_width -= 8
+
+ formatter.add_literal_data(" " * spacing_width)
+ formatter.add_literal_data("%s" % number)
+ formatter.pop_style()
+ formatter.add_line_break()
+ if not dofull:
+ if not full and dofail and category in qawarnings:
+ # warnings are considered noise when there are failures
+ continue
+ fails_list = fails[category]
+ if not full and len(fails_list) > 12:
+ fails_list = fails_list[:12]
+ for failure in fails_list:
+ formatter.add_literal_data(" " + failure)
+ formatter.add_line_break()
+
+
+def format_qa_output_column(
+ formatter, fails, dofull, dofail, options, qawarnings):
+ """Helper function that formats output in a machine-parseable column format
+
+ @param formatter: an instance of Formatter
+ @type formatter: Formatter
+ @param fails: dict of qa status failures
+ @type fails: dict
+ @param dofull: Whether to print full results or a summary
+ @type dofull: boolean
+ @param dofail: Whether failure was hard or soft
+ @type dofail: boolean
+ @param options: The command-line options provided to repoman
+ @type options: Namespace
+ @param qawarnings: the set of warning types
+ @type qawarnings: set
+ @return: None (modifies formatter)
+ """
+ full = options.mode == 'full'
+ for category in sorted(fails):
+ number = len(fails[category])
+ formatter.add_literal_data("NumberOf " + category + " ")
+ if category in qawarnings:
+ formatter.push_style("WARN")
+ else:
+ formatter.push_style("BAD")
+ formatter.add_literal_data("%s" % number)
+ formatter.pop_style()
+ formatter.add_line_break()
+ if not dofull:
+ if not full and dofail and category in qawarnings:
+ # warnings are considered noise when there are failures
+ continue
+ fails_list = fails[category]
+ if not full and len(fails_list) > 12:
+ fails_list = fails_list[:12]
+ for failure in fails_list:
+ formatter.add_literal_data(category + " " + failure)
+ formatter.add_line_break()
diff --git a/repoman/pym/repoman/qa_tracker.py b/repoman/pym/repoman/qa_tracker.py
new file mode 100644
index 000000000..9bfe0e241
--- /dev/null
+++ b/repoman/pym/repoman/qa_tracker.py
@@ -0,0 +1,45 @@
+
+import logging
+import sys
+
+from repoman.qa_data import qacats, qawarnings
+
+
+class QATracker(object):
+ '''Track all occurrances of Q/A problems detected'''
+
+ def __init__(self):
+ self.fails = {}
+ self.warns = {}
+
+ def add_error(self, detected_qa, info):
+ '''Add the Q/A error to the database of detected problems
+
+ @param detected_qa: string, member of qa_data.qacats list
+ @param info: string, details of the detected problem
+ '''
+ if detected_qa not in qacats:
+ logging.error(
+ 'QATracker: Exiting on error. Unknown detected_qa type passed '
+ 'in to add_error(): %s, %s' % (detected_qa, info))
+ sys.exit(1)
+ try:
+ self.fails[detected_qa].append(info)
+ except KeyError:
+ self.fails[detected_qa] = [info]
+
+ def add_warning(self, detected_qa, info):
+ '''Add the Q/A warning to the database of detected problems
+
+ @param detected_qa: string, member of qa_data.qawarnings list
+ @param info: string, details of the detected problem
+ '''
+ if detected_qa not in qawarnings:
+ logging.error(
+ 'QATracker: Exiting on error. Unknown detected_qa type passed '
+ 'in to add_warning(): %s, %s' % (detected_qa, info))
+ sys.exit(1)
+ try:
+ self.warns[detected_qa].append(info)
+ except KeyError:
+ self.warns[detected_qa] = [info]
diff --git a/repoman/pym/repoman/repos.py b/repoman/pym/repoman/repos.py
new file mode 100644
index 000000000..39f53c180
--- /dev/null
+++ b/repoman/pym/repoman/repos.py
@@ -0,0 +1,298 @@
+# -*- coding:utf-8 -*-
+
+
+import io
+import logging
+import re
+import sys
+import textwrap
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from portage import os
+from portage import _encodings
+from portage import _unicode_encode
+
+from repoman.errors import err
+from repoman.profile import ProfileDesc, valid_profile_types
+
+GPG_KEY_ID_REGEX = r'(0x)?([0-9a-fA-F]{8}){1,5}!?'
+bad = portage.output.create_color_func("BAD")
+
+
+class RepoSettings(object):
+ '''Holds our repo specific settings'''
+
+ def __init__(
+ self, config_root, portdir, portdir_overlay,
+ repoman_settings=None, vcs_settings=None, options=None,
+ qawarnings=None):
+ self.config_root = config_root
+ self.repoman_settings = repoman_settings
+ self.vcs_settings = vcs_settings
+
+ self.repositories = self.repoman_settings.repositories
+
+ # Ensure that current repository is in the list of enabled repositories.
+ self.repodir = os.path.realpath(portdir_overlay)
+ try:
+ self.repositories.get_repo_for_location(self.repodir)
+ except KeyError:
+ self._add_repo(config_root, portdir_overlay)
+
+ self.root = self.repoman_settings['EROOT']
+ self.trees = {
+ self.root: {'porttree': portage.portagetree(settings=self.repoman_settings)}
+ }
+ self.portdb = self.trees[self.root]['porttree'].dbapi
+
+ # Constrain dependency resolution to the master(s)
+ # that are specified in layout.conf.
+ self.repo_config = self.repositories.get_repo_for_location(self.repodir)
+ self.portdb.porttrees = list(self.repo_config.eclass_db.porttrees)
+ self.portdir = self.portdb.porttrees[0]
+ self.commit_env = os.environ.copy()
+ # list() is for iteration on a copy.
+ for repo in list(self.repositories):
+ # all paths are canonical
+ if repo.location not in self.repo_config.eclass_db.porttrees:
+ del self.repositories[repo.name]
+
+ if self.repo_config.allow_provide_virtual:
+ qawarnings.add("virtual.oldstyle")
+
+ if self.repo_config.sign_commit and options.mode in ("commit", "fix", "manifest"):
+ if vcs_settings.vcs:
+ func = getattr(self, '_vcs_gpg_%s' % vcs_settings.vcs)
+ func()
+ else:
+ logging.warning("No VCS type detected, unable to sign the commit")
+
+ # In order to disable manifest signatures, repos may set
+ # "sign-manifests = false" in metadata/layout.conf. This
+ # can be used to prevent merge conflicts like those that
+ # thin-manifests is designed to prevent.
+ self.sign_manifests = "sign" in self.repoman_settings.features and \
+ self.repo_config.sign_manifest
+
+ if self.repo_config.sign_manifest and self.repo_config.name == "gentoo" and \
+ options.mode in ("commit",) and not self.sign_manifests:
+ msg = (
+ "The '%s' repository has manifest signatures enabled, "
+ "but FEATURES=sign is currently disabled. In order to avoid this "
+ "warning, enable FEATURES=sign in make.conf. Alternatively, "
+ "repositories can disable manifest signatures by setting "
+ "'sign-manifests = false' in metadata/layout.conf.") % (
+ self.repo_config.name,)
+ for line in textwrap.wrap(msg, 60):
+ logging.warn(line)
+
+ is_commit = options.mode in ("commit",)
+ valid_gpg_key = self.repoman_settings.get("PORTAGE_GPG_KEY") and re.match(
+ r'^%s$' % GPG_KEY_ID_REGEX, self.repoman_settings["PORTAGE_GPG_KEY"])
+
+ if self.sign_manifests and is_commit and not valid_gpg_key:
+ logging.error(
+ "PORTAGE_GPG_KEY value is invalid: %s" %
+ self.repoman_settings["PORTAGE_GPG_KEY"])
+ sys.exit(1)
+
+ manifest_hashes = self.repo_config.manifest_hashes
+ if manifest_hashes is None:
+ manifest_hashes = portage.const.MANIFEST2_HASH_DEFAULTS
+
+ if options.mode in ("commit", "fix", "manifest"):
+ if portage.const.MANIFEST2_REQUIRED_HASH not in manifest_hashes:
+ msg = (
+ "The 'manifest-hashes' setting in the '%s' repository's "
+ "metadata/layout.conf does not contain the '%s' hash which "
+ "is required by this portage version. You will have to "
+ "upgrade portage if you want to generate valid manifests for "
+ "this repository.") % (
+ self.repo_config.name, portage.const.MANIFEST2_REQUIRED_HASH)
+ for line in textwrap.wrap(msg, 70):
+ logging.error(line)
+ sys.exit(1)
+
+ unsupported_hashes = manifest_hashes.difference(
+ portage.const.MANIFEST2_HASH_FUNCTIONS)
+ if unsupported_hashes:
+ msg = (
+ "The 'manifest-hashes' setting in the '%s' repository's "
+ "metadata/layout.conf contains one or more hash types '%s' "
+ "which are not supported by this portage version. You will "
+ "have to upgrade portage if you want to generate valid "
+ "manifests for this repository.") % (
+ self.repo_config.name, " ".join(sorted(unsupported_hashes)))
+ for line in textwrap.wrap(msg, 70):
+ logging.error(line)
+ sys.exit(1)
+
+ def _add_repo(self, config_root, portdir_overlay):
+ self.repo_conf = portage.repository.config
+ self.repo_name = self.repo_conf.RepoConfig._read_valid_repo_name(
+ portdir_overlay)[0]
+ self.layout_conf_data = self.repo_conf.parse_layout_conf(portdir_overlay)[0]
+ if self.layout_conf_data['repo-name']:
+ self.repo_name = self.layout_conf_data['repo-name']
+ tmp_conf_file = io.StringIO(textwrap.dedent("""
+ [%s]
+ location = %s
+ """) % (self.repo_name, portdir_overlay))
+ # Ensure that the repository corresponding to $PWD overrides a
+ # repository of the same name referenced by the existing PORTDIR
+ # or PORTDIR_OVERLAY settings.
+ self.repoman_settings['PORTDIR_OVERLAY'] = "%s %s" % (
+ self.repoman_settings.get('PORTDIR_OVERLAY', ''),
+ portage._shell_quote(portdir_overlay))
+ self.repositories = self.repo_conf.load_repository_config(
+ self.repoman_settings, extra_files=[tmp_conf_file])
+ # We have to call the config constructor again so that attributes
+ # dependent on config.repositories are initialized correctly.
+ self.repoman_settings = portage.config(
+ config_root=config_root, local_config=False,
+ repositories=self.repositories)
+
+ ##########
+ # future vcs plugin functions
+ ##########
+
+ def _vcs_gpg_bzr(self):
+ pass
+
+ def _vcs_gpg_cvs(self):
+ pass
+
+ def _vcs_gpg_git(self):
+ # NOTE: It's possible to use --gpg-sign=key_id to specify the key in
+ # the commit arguments. If key_id is unspecified, then it must be
+ # configured by `git config user.signingkey key_id`.
+ self.vcs_settings.vcs_local_opts.append("--gpg-sign")
+ if self.repoman_settings.get("PORTAGE_GPG_DIR"):
+ # Pass GNUPGHOME to git for bug #462362.
+ self.commit_env["GNUPGHOME"] = self.repoman_settings["PORTAGE_GPG_DIR"]
+
+ # Pass GPG_TTY to git for bug #477728.
+ try:
+ self.commit_env["GPG_TTY"] = os.ttyname(sys.stdin.fileno())
+ except OSError:
+ pass
+
+ def _vcs_gpg_hg(self):
+ pass
+
+ def _vcs_gpg_svn(self):
+ pass
+
+
+def list_checks(kwlist, liclist, uselist, repoman_settings):
+ liclist_deprecated = set()
+ if "DEPRECATED" in repoman_settings._license_manager._license_groups:
+ liclist_deprecated.update(
+ repoman_settings._license_manager.expandLicenseTokens(["@DEPRECATED"]))
+
+ if not liclist:
+ logging.fatal("Couldn't find licenses?")
+ sys.exit(1)
+
+ if not kwlist:
+ logging.fatal("Couldn't read KEYWORDS from arch.list")
+ sys.exit(1)
+
+ if not uselist:
+ logging.fatal("Couldn't find use.desc?")
+ sys.exit(1)
+ return liclist_deprecated
+
+
+def repo_metadata(portdb, repoman_settings):
+ # get lists of valid keywords, licenses, and use
+ kwlist = set()
+ liclist = set()
+ uselist = set()
+ profile_list = []
+ global_pmasklines = []
+
+ for path in portdb.porttrees:
+ try:
+ liclist.update(os.listdir(os.path.join(path, "licenses")))
+ except OSError:
+ pass
+ kwlist.update(
+ portage.grabfile(os.path.join(path, "profiles", "arch.list")))
+
+ use_desc = portage.grabfile(os.path.join(path, 'profiles', 'use.desc'))
+ for x in use_desc:
+ x = x.split()
+ if x:
+ uselist.add(x[0])
+
+ expand_desc_dir = os.path.join(path, 'profiles', 'desc')
+ try:
+ expand_list = os.listdir(expand_desc_dir)
+ except OSError:
+ pass
+ else:
+ for fn in expand_list:
+ if not fn[-5:] == '.desc':
+ continue
+ use_prefix = fn[:-5].lower() + '_'
+ for x in portage.grabfile(os.path.join(expand_desc_dir, fn)):
+ x = x.split()
+ if x:
+ uselist.add(use_prefix + x[0])
+
+ global_pmasklines.append(
+ portage.util.grabfile_package(
+ os.path.join(path, 'profiles', 'package.mask'),
+ recursive=1, verify_eapi=True))
+
+ desc_path = os.path.join(path, 'profiles', 'profiles.desc')
+ try:
+ desc_file = io.open(
+ _unicode_encode(
+ desc_path, encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['repo.content'], errors='replace')
+ except EnvironmentError:
+ pass
+ else:
+ for i, x in enumerate(desc_file):
+ if x[0] == "#":
+ continue
+ arch = x.split()
+ if len(arch) == 0:
+ continue
+ if len(arch) != 3:
+ err(
+ "wrong format: \"%s\" in %s line %d" %
+ (bad(x.strip()), desc_path, i + 1, ))
+ elif arch[0] not in kwlist:
+ err(
+ "invalid arch: \"%s\" in %s line %d" %
+ (bad(arch[0]), desc_path, i + 1, ))
+ elif arch[2] not in valid_profile_types:
+ err(
+ "invalid profile type: \"%s\" in %s line %d" %
+ (bad(arch[2]), desc_path, i + 1, ))
+ profile_desc = ProfileDesc(arch[0], arch[2], arch[1], path)
+ if not os.path.isdir(profile_desc.abs_path):
+ logging.error(
+ "Invalid %s profile (%s) for arch %s in %s line %d",
+ arch[2], arch[1], arch[0], desc_path, i + 1)
+ continue
+ if os.path.exists(
+ os.path.join(profile_desc.abs_path, 'deprecated')):
+ continue
+ profile_list.append(profile_desc)
+ desc_file.close()
+
+ global_pmasklines = portage.util.stack_lists(global_pmasklines, incremental=1)
+ global_pmaskdict = {}
+ for x in global_pmasklines:
+ global_pmaskdict.setdefault(x.cp, []).append(x)
+ del global_pmasklines
+
+ return (
+ kwlist, liclist, uselist, profile_list, global_pmaskdict,
+ list_checks(kwlist, liclist, uselist, repoman_settings))
diff --git a/repoman/pym/repoman/scanner.py b/repoman/pym/repoman/scanner.py
new file mode 100644
index 000000000..48d9001a9
--- /dev/null
+++ b/repoman/pym/repoman/scanner.py
@@ -0,0 +1,424 @@
+# -*- coding:utf-8 -*-
+
+from __future__ import print_function, unicode_literals
+
+import logging
+from itertools import chain
+
+import portage
+from portage import normalize_path
+from portage import os
+from portage.output import green
+from portage.util.futures.extendedfutures import ExtendedFuture
+from repoman.metadata import get_metadata_xsd
+from repoman.modules.commit import repochecks
+from repoman.profile import check_profiles, dev_profile_keywords, setup_profile
+from repoman.repos import repo_metadata
+from repoman.modules.scan.scan import scan
+from repoman.modules.vcs.vcs import vcs_files_to_cps
+
+from portage.module import Modules
+
+MODULES_PATH = os.path.join(os.path.dirname(__file__), "modules", "scan")
+# initial development debug info
+logging.debug("module path: %s", MODULES_PATH)
+
+MODULE_CONTROLLER = Modules(path=MODULES_PATH, namepath="repoman.modules.scan")
+
+MODULE_NAMES = MODULE_CONTROLLER.module_names[:]
+# initial development debug info
+logging.debug("module_names: %s", MODULE_NAMES)
+
+DATA_TYPES = {'dict': dict, 'Future': ExtendedFuture, 'list': list, 'set': set}
+
+
+class Scanner(object):
+ '''Primary scan class. Operates all the small Q/A tests and checks'''
+
+ def __init__(self, repo_settings, myreporoot, config_root, options,
+ vcs_settings, mydir, env):
+ '''Class __init__'''
+ self.repo_settings = repo_settings
+ self.config_root = config_root
+ self.options = options
+ self.vcs_settings = vcs_settings
+ self.env = env
+
+ # Repoman sets it's own ACCEPT_KEYWORDS and we don't want it to
+ # behave incrementally.
+ self.repoman_incrementals = tuple(
+ x for x in portage.const.INCREMENTALS if x != 'ACCEPT_KEYWORDS')
+
+ self.categories = []
+ for path in self.repo_settings.repo_config.eclass_db.porttrees:
+ self.categories.extend(portage.util.grabfile(
+ os.path.join(path, 'profiles', 'categories')))
+ self.repo_settings.repoman_settings.categories = frozenset(
+ portage.util.stack_lists([self.categories], incremental=1))
+ self.categories = self.repo_settings.repoman_settings.categories
+
+ self.portdb = repo_settings.portdb
+ self.portdb.settings = self.repo_settings.repoman_settings
+ # We really only need to cache the metadata that's necessary for visibility
+ # filtering. Anything else can be discarded to reduce memory consumption.
+ if self.options.mode != "manifest" and self.options.digest != "y":
+ # Don't do this when generating manifests, since that uses
+ # additional keys if spawn_nofetch is called (RESTRICT and
+ # DEFINED_PHASES).
+ self.portdb._aux_cache_keys.clear()
+ self.portdb._aux_cache_keys.update(
+ ["EAPI", "IUSE", "KEYWORDS", "repository", "SLOT"])
+
+ self.reposplit = myreporoot.split(os.path.sep)
+ self.repolevel = len(self.reposplit)
+
+ if self.options.mode == 'commit':
+ repochecks.commit_check(self.repolevel, self.reposplit)
+ repochecks.conflict_check(self.vcs_settings, self.options)
+
+ # Make startdir relative to the canonical repodir, so that we can pass
+ # it to digestgen and it won't have to be canonicalized again.
+ if self.repolevel == 1:
+ startdir = self.repo_settings.repodir
+ else:
+ startdir = normalize_path(mydir)
+ startdir = os.path.join(
+ self.repo_settings.repodir, *startdir.split(os.sep)[-2 - self.repolevel + 3:])
+
+ # get lists of valid keywords, licenses, and use
+ new_data = repo_metadata(self.portdb, self.repo_settings.repoman_settings)
+ kwlist, liclist, uselist, profile_list, \
+ global_pmaskdict, liclist_deprecated = new_data
+ self.repo_metadata = {
+ 'kwlist': kwlist,
+ 'liclist': liclist,
+ 'uselist': uselist,
+ 'profile_list': profile_list,
+ 'pmaskdict': global_pmaskdict,
+ 'lic_deprecated': liclist_deprecated,
+ }
+
+ self.repo_settings.repoman_settings['PORTAGE_ARCHLIST'] = ' '.join(sorted(kwlist))
+ self.repo_settings.repoman_settings.backup_changes('PORTAGE_ARCHLIST')
+
+ profiles = setup_profile(profile_list)
+
+ check_profiles(profiles, self.repo_settings.repoman_settings.archlist())
+
+ scanlist = scan(self.repolevel, self.reposplit, startdir, self.categories, self.repo_settings)
+
+ self.dev_keywords = dev_profile_keywords(profiles)
+
+ self.qatracker = self.vcs_settings.qatracker
+
+ if self.options.echangelog is None and self.repo_settings.repo_config.update_changelog:
+ self.options.echangelog = 'y'
+
+ if self.vcs_settings.vcs is None:
+ self.options.echangelog = 'n'
+
+ checks = {}
+ # The --echangelog option causes automatic ChangeLog generation,
+ # which invalidates changelog.ebuildadded and changelog.missing
+ # checks.
+ # Note: Some don't use ChangeLogs in distributed SCMs.
+ # It will be generated on server side from scm log,
+ # before package moves to the rsync server.
+ # This is needed because they try to avoid merge collisions.
+ # Gentoo's Council decided to always use the ChangeLog file.
+ # TODO: shouldn't this just be switched on the repo, iso the VCS?
+ is_echangelog_enabled = self.options.echangelog in ('y', 'force')
+ self.vcs_settings.vcs_is_cvs_or_svn = self.vcs_settings.vcs in ('cvs', 'svn')
+ checks['changelog'] = not is_echangelog_enabled and self.vcs_settings.vcs_is_cvs_or_svn
+
+ if self.options.mode == "manifest" or self.options.quiet:
+ pass
+ elif self.options.pretend:
+ print(green("\nRepoMan does a once-over of the neighborhood..."))
+ else:
+ print(green("\nRepoMan scours the neighborhood..."))
+
+ self.changed = self.vcs_settings.changes
+ # bypass unneeded VCS operations if not needed
+ if (self.options.if_modified == "y" or
+ self.options.mode not in ("manifest", "manifest-check")):
+ self.changed.scan()
+
+ self.have = {
+ 'pmasked': False,
+ 'dev_keywords': False,
+ }
+
+ # NOTE: match-all caches are not shared due to potential
+ # differences between profiles in _get_implicit_iuse.
+ self.caches = {
+ 'arch': {},
+ 'arch_xmatch': {},
+ 'shared_xmatch': {"cp-list": {}},
+ }
+
+ self.include_arches = None
+ if self.options.include_arches:
+ self.include_arches = set()
+ self.include_arches.update(*[x.split() for x in self.options.include_arches])
+
+ # Disable the "self.modules['Ebuild'].notadded" check when not in commit mode and
+ # running `svn status` in every package dir will be too expensive.
+ checks['ebuild_notadded'] = not \
+ (self.vcs_settings.vcs == "svn" and self.repolevel < 3 and self.options.mode != "commit")
+
+ self.effective_scanlist = scanlist
+ if self.options.if_modified == "y":
+ self.effective_scanlist = sorted(vcs_files_to_cps(
+ chain(self.changed.changed, self.changed.new, self.changed.removed),
+ self.repo_settings.repodir,
+ self.repolevel, self.reposplit, self.categories))
+
+ # Create our kwargs dict here to initialize the plugins with
+ self.kwargs = {
+ "repo_settings": self.repo_settings,
+ "portdb": self.portdb,
+ "qatracker": self.qatracker,
+ "vcs_settings": self.vcs_settings,
+ "options": self.options,
+ "metadata_xsd": get_metadata_xsd(self.repo_settings),
+ "uselist": uselist,
+ "checks": checks,
+ "repo_metadata": self.repo_metadata,
+ "profiles": profiles,
+ "include_arches": self.include_arches,
+ "caches": self.caches,
+ "repoman_incrementals": self.repoman_incrementals,
+ "env": self.env,
+ "have": self.have,
+ "dev_keywords": self.dev_keywords,
+ }
+ # initialize the plugin checks here
+ self.modules = {}
+ self._ext_futures = {}
+ self.pkg_level_futures = None
+
+ def set_kwargs(self, mod):
+ '''Creates a limited set of kwargs to pass to the module's __init__()
+
+ @param mod: module name string
+ @returns: dictionary
+ '''
+ kwargs = {}
+ for key in MODULE_CONTROLLER.modules[mod]['mod_kwargs']:
+ kwargs[key] = self.kwargs[key]
+ return kwargs
+
+ def set_func_kwargs(self, mod, dynamic_data=None):
+ '''Updates the dynamic_data dictionary with any new key, value pairs.
+ Creates a limited set of kwargs to pass to the modulefunctions to run
+
+ @param mod: module name string
+ @param dynamic_data: dictionary structure
+ @returns: dictionary
+ '''
+ func_kwargs = MODULE_CONTROLLER.modules[mod]['func_kwargs']
+ # determine new keys
+ required = set(list(func_kwargs))
+ exist = set(list(dynamic_data))
+ new = required.difference(exist)
+ # update dynamic_data with initialized entries
+ for key in new:
+ logging.debug("set_func_kwargs(); adding: %s, %s",
+ key, func_kwargs[key])
+ if func_kwargs[key][0] in ['Future', 'ExtendedFuture']:
+ if key not in self._ext_futures:
+ logging.debug(
+ "Adding a new key: %s to the ExtendedFuture dict", key)
+ self._ext_futures[key] = func_kwargs[key]
+ self._set_future(dynamic_data, key, func_kwargs[key])
+ else: # builtin python data type
+ dynamic_data[key] = DATA_TYPES[func_kwargs[key][0]]()
+ kwargs = {}
+ for key in required:
+ kwargs[key] = dynamic_data[key]
+ return kwargs
+
+ def reset_futures(self, dynamic_data):
+ '''Reset any Future data types
+
+ @param dynamic_data: dictionary
+ '''
+ for key in list(self._ext_futures):
+ if key not in self.pkg_level_futures:
+ self._set_future(dynamic_data, key, self._ext_futures[key])
+
+ @staticmethod
+ def _set_future(dynamic_data, key, data):
+ '''Set a dynamic_data key to a new ExtendedFuture instance
+
+ @param dynamic_data: dictionary
+ @param key: tuple of (dictionary-key, default-value)
+ '''
+ if data[0] in ['Future', 'ExtendedFuture']:
+ if data[1] in ['UNSET']:
+ dynamic_data[key] = ExtendedFuture()
+ else:
+ if data[1] in DATA_TYPES:
+ default = DATA_TYPES[data[1]]()
+ else:
+ default = data[1]
+ dynamic_data[key] = ExtendedFuture(default)
+
+ def scan_pkgs(self, can_force):
+ for xpkg in self.effective_scanlist:
+ xpkg_continue = False
+ # ebuilds and digests added to cvs respectively.
+ logging.info("checking package %s", xpkg)
+ # save memory by discarding xmatch caches from previous package(s)
+ self.caches['arch_xmatch'].clear()
+ catdir, pkgdir = xpkg.split("/")
+ checkdir = self.repo_settings.repodir + "/" + xpkg
+ checkdir_relative = ""
+ if self.repolevel < 3:
+ checkdir_relative = os.path.join(pkgdir, checkdir_relative)
+ if self.repolevel < 2:
+ checkdir_relative = os.path.join(catdir, checkdir_relative)
+ checkdir_relative = os.path.join(".", checkdir_relative)
+ checkdirlist = os.listdir(checkdir)
+
+ # Run the status check
+ if self.kwargs['checks']['ebuild_notadded']:
+ self.vcs_settings.status.check(checkdir, checkdir_relative, xpkg)
+
+ dynamic_data = {
+ 'changelog_modified': False,
+ 'checkdirlist': ExtendedFuture(checkdirlist),
+ 'checkdir': checkdir,
+ 'xpkg': xpkg,
+ 'changed': self.changed,
+ 'checkdir_relative': checkdir_relative,
+ 'can_force': can_force,
+ 'repolevel': self.repolevel,
+ 'catdir': catdir,
+ 'pkgdir': pkgdir,
+ 'validity_future': ExtendedFuture(True),
+ 'y_ebuild': None,
+ # this needs to be reset at the pkg level only,
+ # easiest is to just initialize it here
+ 'muselist': ExtendedFuture(set()),
+ 'src_uri_error': ExtendedFuture(),
+ }
+ self.pkg_level_futures = [
+ 'checkdirlist',
+ 'muselist',
+ 'pkgs',
+ 'src_uri_error',
+ 'validity_future',
+ ]
+ # need to set it up for ==> self.modules or some other ordered list
+ for mod in [('manifests', 'Manifests'), ('ebuild', 'Ebuild'),
+ ('keywords', 'KeywordChecks'), ('files', 'FileChecks'),
+ ('fetches', 'FetchChecks'),
+ ('pkgmetadata', 'PkgMetadata'),
+ ]:
+ mod_class = MODULE_CONTROLLER.get_class(mod[0])
+ logging.debug("Initializing class name: %s", mod_class.__name__)
+ self.modules[mod_class.__name__] = mod_class(**self.set_kwargs(mod[0]))
+ logging.debug("scan_pkgs; module: %s", mod[1])
+ do_it, functions = self.modules[mod[1]].runInPkgs
+ if do_it:
+ for func in functions:
+ _continue = func(**self.set_func_kwargs(mod[0], dynamic_data))
+ if _continue:
+ # If we can't access all the metadata then it's totally unsafe to
+ # commit since there's no way to generate a correct Manifest.
+ # Do not try to do any more QA checks on this package since missing
+ # metadata leads to false positives for several checks, and false
+ # positives confuse users.
+ xpkg_continue = True
+ break
+
+ if xpkg_continue:
+ continue
+
+ # Sort ebuilds in ascending order for the KEYWORDS.dropped check.
+ pkgs = dynamic_data['pkgs'].get()
+ ebuildlist = sorted(pkgs.values())
+ ebuildlist = [pkg.pf for pkg in ebuildlist]
+
+ if self.kwargs['checks']['changelog'] and "ChangeLog" not in checkdirlist:
+ self.qatracker.add_error("changelog.missing", xpkg + "/ChangeLog")
+
+ changelog_path = os.path.join(checkdir_relative, "ChangeLog")
+ dynamic_data["changelog_modified"] = changelog_path in self.changed.changelogs
+
+ self._scan_ebuilds(ebuildlist, dynamic_data)
+ return
+
+
+ def _scan_ebuilds(self, ebuildlist, dynamic_data):
+
+ for y_ebuild in ebuildlist:
+ self.reset_futures(dynamic_data)
+ dynamic_data['y_ebuild'] = y_ebuild
+ y_ebuild_continue = False
+
+ # initialize per ebuild plugin checks here
+ # need to set it up for ==> self.modules_list or some other ordered list
+ for mod in [('ebuild', 'Ebuild'), ('live', 'LiveEclassChecks'),
+ ('eapi', 'EAPIChecks'), ('ebuild_metadata', 'EbuildMetadata'),
+ ('fetches', 'FetchChecks'),
+ ('description', 'DescriptionChecks'),
+ ('keywords', 'KeywordChecks'),
+ ('pkgmetadata', 'PkgMetadata'), ('ruby', 'RubyEclassChecks'),
+ ('restrict', 'RestrictChecks'),
+ ('mtime', 'MtimeChecks'), ('multicheck', 'MultiCheck'),
+ # Options.is_forced() is used to bypass further checks
+ ('options', 'Options'), ('profile', 'ProfileDependsChecks'),
+ ]:
+ if mod[0] and mod[1] not in self.modules:
+ mod_class = MODULE_CONTROLLER.get_class(mod[0])
+ logging.debug("Initializing class name: %s", mod_class.__name__)
+ self.modules[mod[1]] = mod_class(**self.set_kwargs(mod[0]))
+ logging.debug("scan_ebuilds: module: %s", mod[1])
+ do_it, functions = self.modules[mod[1]].runInEbuilds
+ logging.debug("do_it: %s, functions: %s", do_it, [x.__name__ for x in functions])
+ if do_it:
+ for func in functions:
+ logging.debug("\tRunning function: %s", func)
+ _continue = func(**self.set_func_kwargs(mod[0], dynamic_data))
+ if _continue:
+ # If we can't access all the metadata then it's totally unsafe to
+ # commit since there's no way to generate a correct Manifest.
+ # Do not try to do any more QA checks on this package since missing
+ # metadata leads to false positives for several checks, and false
+ # positives confuse users.
+ y_ebuild_continue = True
+ # logging.debug("\t>>> Continuing")
+ break
+
+ if y_ebuild_continue:
+ continue
+
+ logging.debug("Finished ebuild plugin loop, continuing...")
+
+ # Final checks
+ # initialize per pkg plugin final checks here
+ # need to set it up for ==> self.modules_list or some other ordered list
+ xpkg_complete = False
+ for mod in [('pkgmetadata', 'PkgMetadata')]:
+ if mod[0] and mod[1] not in self.modules:
+ mod_class = MODULE_CONTROLLER.get_class(mod[0])
+ logging.debug("Initializing class name: %s", mod_class.__name__)
+ self.modules[mod[1]] = mod_class(**self.set_kwargs(mod[0]))
+ logging.debug("scan_ebuilds final checks: module: %s", mod[1])
+ do_it, functions = self.modules[mod[1]].runInFinal
+ logging.debug("do_it: %s, functions: %s", do_it, [x.__name__ for x in functions])
+ if do_it:
+ for func in functions:
+ logging.debug("\tRunning function: %s", func)
+ _continue = func(**self.set_func_kwargs(mod[0], dynamic_data))
+ if _continue:
+ xpkg_complete = True
+ # logging.debug("\t>>> Continuing")
+ break
+
+ if xpkg_complete:
+ return
+ return
diff --git a/repoman/pym/repoman/tests/__init__.py b/repoman/pym/repoman/tests/__init__.py
new file mode 100644
index 000000000..532918b6a
--- /dev/null
+++ b/repoman/pym/repoman/tests/__init__.py
@@ -0,0 +1,2 @@
+# Copyright 2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
diff --git a/repoman/pym/repoman/tests/__test__.py b/repoman/pym/repoman/tests/__test__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/repoman/pym/repoman/tests/__test__.py
diff --git a/repoman/pym/repoman/tests/changelog/__init__.py b/repoman/pym/repoman/tests/changelog/__init__.py
new file mode 100644
index 000000000..532918b6a
--- /dev/null
+++ b/repoman/pym/repoman/tests/changelog/__init__.py
@@ -0,0 +1,2 @@
+# Copyright 2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
diff --git a/repoman/pym/repoman/tests/changelog/test_echangelog.py b/repoman/pym/repoman/tests/changelog/test_echangelog.py
new file mode 100644
index 000000000..1640be268
--- /dev/null
+++ b/repoman/pym/repoman/tests/changelog/test_echangelog.py
@@ -0,0 +1,106 @@
+# Copyright 2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import tempfile
+import time
+
+from portage import os
+from portage import shutil
+from portage.tests import TestCase
+from repoman.utilities import UpdateChangeLog
+
+class RepomanEchangelogTestCase(TestCase):
+
+ def setUp(self):
+ super(RepomanEchangelogTestCase, self).setUp()
+
+ self.tmpdir = tempfile.mkdtemp(prefix='repoman.echangelog.')
+
+ self.skel_changelog = os.path.join(self.tmpdir, 'skel.ChangeLog')
+ skel = [
+ '# ChangeLog for <CATEGORY>/<PACKAGE_NAME>\n',
+ '# Copyright 1999-2000 Gentoo Foundation; Distributed under the GPL v2\n',
+ '# $Header: $\n'
+ ]
+ self._writelines(self.skel_changelog, skel)
+
+ self.cat = 'mycat'
+ self.pkg = 'mypkg'
+ self.pkgdir = os.path.join(self.tmpdir, self.cat, self.pkg)
+ os.makedirs(self.pkgdir)
+
+ self.header_pkg = '# ChangeLog for %s/%s\n' % (self.cat, self.pkg)
+ self.header_copyright = '# Copyright 1999-%s Gentoo Foundation; Distributed under the GPL v2\n' % \
+ time.strftime('%Y', time.gmtime())
+ self.header_cvs = '# $Header: $\n'
+
+ self.changelog = os.path.join(self.pkgdir, 'ChangeLog')
+
+ self.user = 'Testing User <portage@gentoo.org>'
+
+ def tearDown(self):
+ super(RepomanEchangelogTestCase, self).tearDown()
+ shutil.rmtree(self.tmpdir)
+
+ def _readlines(self, file):
+ with open(file, 'r') as f:
+ return f.readlines()
+
+ def _writelines(self, file, data):
+ with open(file, 'w') as f:
+ f.writelines(data)
+
+ def testRejectRootUser(self):
+ self.assertEqual(UpdateChangeLog(self.pkgdir, 'me <root@gentoo.org>', '', '', '', '', quiet=True), None)
+
+ def testMissingSkelFile(self):
+ # Test missing ChangeLog, but with empty skel (i.e. do nothing).
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', '/does/not/exist', self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertTrue(len(actual_cl[0]) > 0)
+
+ def testEmptyChangeLog(self):
+ # Make sure we do the right thing with a 0-byte ChangeLog
+ open(self.changelog, 'w').close()
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', self.skel_changelog, self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertEqual(actual_cl[0], self.header_pkg)
+ self.assertEqual(actual_cl[1], self.header_copyright)
+ self.assertEqual(actual_cl[2], self.header_cvs)
+
+ def testCopyrightUpdate(self):
+ # Make sure updating the copyright line works
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', self.skel_changelog, self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertEqual(actual_cl[1], self.header_copyright)
+
+ def testSkelHeader(self):
+ # Test skel.ChangeLog -> ChangeLog
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', self.skel_changelog, self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertEqual(actual_cl[0], self.header_pkg)
+ self.assertNotEqual(actual_cl[-1], '\n')
+
+ def testExistingGoodHeader(self):
+ # Test existing ChangeLog (correct values)
+ self._writelines(self.changelog, [self.header_pkg])
+
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', self.skel_changelog, self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertEqual(actual_cl[0], self.header_pkg)
+
+ def testExistingBadHeader(self):
+ # Test existing ChangeLog (wrong values)
+ self._writelines(self.changelog, ['# ChangeLog for \n'])
+
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', self.skel_changelog, self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertEqual(actual_cl[0], self.header_pkg)
+
+ def testTrailingNewlines(self):
+ # Make sure trailing newlines get chomped.
+ self._writelines(self.changelog, ['#\n', 'foo\n', '\n', 'bar\n', '\n', '\n'])
+
+ UpdateChangeLog(self.pkgdir, self.user, 'test!', self.skel_changelog, self.cat, self.pkg, quiet=True)
+ actual_cl = self._readlines(self.changelog)
+ self.assertNotEqual(actual_cl[-1], '\n')
diff --git a/repoman/pym/repoman/tests/runTests.py b/repoman/pym/repoman/tests/runTests.py
new file mode 100644
index 000000000..9c452764f
--- /dev/null
+++ b/repoman/pym/repoman/tests/runTests.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python -bWd
+# runTests.py -- Portage Unit Test Functionality
+# Copyright 2006-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import os, sys
+import os.path as osp
+import grp
+import platform
+import pwd
+import signal
+
+def debug_signal(signum, frame):
+ import pdb
+ pdb.set_trace()
+
+if platform.python_implementation() == 'Jython':
+ debug_signum = signal.SIGUSR2 # bug #424259
+else:
+ debug_signum = signal.SIGUSR1
+
+signal.signal(debug_signum, debug_signal)
+
+# Pretend that the current user's uid/gid are the 'portage' uid/gid,
+# so things go smoothly regardless of the current user and global
+# user/group configuration.
+os.environ["PORTAGE_USERNAME"] = pwd.getpwuid(os.getuid()).pw_name
+os.environ["PORTAGE_GRPNAME"] = grp.getgrgid(os.getgid()).gr_name
+
+# Insert our parent dir so we can do shiny import "tests"
+# This line courtesy of Marienz and Pkgcore ;)
+sys.path.insert(0, osp.dirname(osp.dirname(osp.dirname(osp.realpath(__file__)))))
+
+import portage
+portage._internal_caller = True
+
+# Ensure that we don't instantiate portage.settings, so that tests should
+# work the same regardless of global configuration file state/existence.
+portage._disable_legacy_globals()
+
+if os.environ.get('NOCOLOR') in ('yes', 'true'):
+ portage.output.nocolor()
+
+import portage.tests as tests
+from portage.const import PORTAGE_BIN_PATH
+path = os.environ.get("PATH", "").split(":")
+path = [x for x in path if x]
+
+insert_bin_path = True
+try:
+ insert_bin_path = not path or \
+ not os.path.samefile(path[0], PORTAGE_BIN_PATH)
+except OSError:
+ pass
+
+if insert_bin_path:
+ path.insert(0, PORTAGE_BIN_PATH)
+ os.environ["PATH"] = ":".join(path)
+
+if __name__ == "__main__":
+ sys.exit(tests.main())
diff --git a/repoman/pym/repoman/tests/simple/__init__.py b/repoman/pym/repoman/tests/simple/__init__.py
new file mode 100644
index 000000000..532918b6a
--- /dev/null
+++ b/repoman/pym/repoman/tests/simple/__init__.py
@@ -0,0 +1,2 @@
+# Copyright 2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
diff --git a/repoman/pym/repoman/tests/simple/test_simple.py b/repoman/pym/repoman/tests/simple/test_simple.py
new file mode 100644
index 000000000..6a7976142
--- /dev/null
+++ b/repoman/pym/repoman/tests/simple/test_simple.py
@@ -0,0 +1,322 @@
+# Copyright 2011-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+import subprocess
+import sys
+import time
+
+import portage
+from portage import os
+from portage import _unicode_decode
+from portage.const import PORTAGE_BASE_PATH, PORTAGE_PYM_PATH
+from portage.process import find_binary
+from portage.tests import TestCase
+from portage.tests.resolver.ResolverPlayground import ResolverPlayground
+from portage.util import ensure_dirs
+from repoman.copyrights import update_copyright_year
+
+class SimpleRepomanTestCase(TestCase):
+
+ def testCopyrightUpdate(self):
+ test_cases = (
+ (
+ '2011',
+ '# Copyright 1999-2008 Gentoo Foundation; Distributed under the GPL v2',
+ '# Copyright 1999-2011 Gentoo Foundation; Distributed under the GPL v2',
+ ),
+ (
+ '2011',
+ '# Copyright 1999 Gentoo Foundation; Distributed under the GPL v2',
+ '# Copyright 1999-2011 Gentoo Foundation; Distributed under the GPL v2',
+ ),
+ (
+ '1999',
+ '# Copyright 1999 Gentoo Foundation; Distributed under the GPL v2',
+ '# Copyright 1999 Gentoo Foundation; Distributed under the GPL v2',
+ ),
+ )
+
+ for year, before, after in test_cases:
+ self.assertEqual(update_copyright_year(year, before), after)
+
+ def _must_skip(self):
+ xmllint = find_binary("xmllint")
+ if not xmllint:
+ return "xmllint not found"
+
+ try:
+ __import__("xml.etree.ElementTree")
+ __import__("xml.parsers.expat").parsers.expat.ExpatError
+ except (AttributeError, ImportError):
+ return "python is missing xml support"
+
+ def testSimple(self):
+ debug = False
+
+ skip_reason = self._must_skip()
+ if skip_reason:
+ self.portage_skip = skip_reason
+ self.assertFalse(True, skip_reason)
+ return
+
+ copyright_header = """# Copyright 1999-%s Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Header: $
+""" % time.gmtime().tm_year
+
+ repo_configs = {
+ "test_repo": {
+ "layout.conf":
+ (
+ "update-changelog = true",
+ ),
+ }
+ }
+
+ profiles = (
+ ("x86", "default/linux/x86/test_profile", "stable"),
+ ("x86", "default/linux/x86/test_dev", "dev"),
+ ("x86", "default/linux/x86/test_exp", "exp"),
+ )
+
+ profile = {
+ "eapi": ("5",),
+ "package.use.stable.mask": ("dev-libs/A flag",)
+ }
+
+ ebuilds = {
+ "dev-libs/A-0": {
+ "COPYRIGHT_HEADER" : copyright_header,
+ "DESCRIPTION" : "Desc goes here",
+ "EAPI" : "5",
+ "HOMEPAGE" : "http://example.com",
+ "IUSE" : "flag",
+ "KEYWORDS": "x86",
+ "LICENSE": "GPL-2",
+ "RDEPEND": "flag? ( dev-libs/B[flag] )",
+ },
+ "dev-libs/A-1": {
+ "COPYRIGHT_HEADER" : copyright_header,
+ "DESCRIPTION" : "Desc goes here",
+ "EAPI" : "4",
+ "HOMEPAGE" : "http://example.com",
+ "IUSE" : "flag",
+ "KEYWORDS": "~x86",
+ "LICENSE": "GPL-2",
+ "RDEPEND": "flag? ( dev-libs/B[flag] )",
+ },
+ "dev-libs/B-1": {
+ "COPYRIGHT_HEADER" : copyright_header,
+ "DESCRIPTION" : "Desc goes here",
+ "EAPI" : "4",
+ "HOMEPAGE" : "http://example.com",
+ "IUSE" : "flag",
+ "KEYWORDS": "~x86",
+ "LICENSE": "GPL-2",
+ },
+ "dev-libs/C-0": {
+ "COPYRIGHT_HEADER" : copyright_header,
+ "DESCRIPTION" : "Desc goes here",
+ "EAPI" : "4",
+ "HOMEPAGE" : "http://example.com",
+ "IUSE" : "flag",
+ # must be unstable, since dev-libs/A[flag] is stable masked
+ "KEYWORDS": "~x86",
+ "LICENSE": "GPL-2",
+ "RDEPEND": "flag? ( dev-libs/A[flag] )",
+ },
+ }
+ licenses = ["GPL-2"]
+ arch_list = ["x86"]
+ metadata_xsd = os.path.join(PORTAGE_BASE_PATH, "cnf/metadata.xsd")
+ metadata_xml_files = (
+ (
+ "dev-libs/A",
+ {
+ "flags" : "<flag name='flag' restrict='&gt;=dev-libs/A-0'>Description of how USE='flag' affects this package</flag>",
+ },
+ ),
+ (
+ "dev-libs/B",
+ {
+ "flags" : "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
+ },
+ ),
+ (
+ "dev-libs/C",
+ {
+ "flags" : "<flag name='flag'>Description of how USE='flag' affects this package</flag>",
+ },
+ ),
+ )
+
+ use_desc = (
+ ("flag", "Description of how USE='flag' affects packages"),
+ )
+
+ playground = ResolverPlayground(ebuilds=ebuilds,
+ profile=profile, repo_configs=repo_configs, debug=debug)
+ settings = playground.settings
+ eprefix = settings["EPREFIX"]
+ eroot = settings["EROOT"]
+ portdb = playground.trees[playground.eroot]["porttree"].dbapi
+ homedir = os.path.join(eroot, "home")
+ distdir = os.path.join(eprefix, "distdir")
+ test_repo_location = settings.repositories["test_repo"].location
+ profiles_dir = os.path.join(test_repo_location, "profiles")
+ license_dir = os.path.join(test_repo_location, "licenses")
+
+ repoman_cmd = (portage._python_interpreter, "-b", "-Wd",
+ os.path.join(self.bindir, "repoman"))
+
+ git_binary = find_binary("git")
+ git_cmd = (git_binary,)
+
+ cp_binary = find_binary("cp")
+ self.assertEqual(cp_binary is None, False,
+ "cp command not found")
+ cp_cmd = (cp_binary,)
+
+ test_ebuild = portdb.findname("dev-libs/A-1")
+ self.assertFalse(test_ebuild is None)
+
+ committer_name = "Gentoo Dev"
+ committer_email = "gentoo-dev@gentoo.org"
+
+ git_test = (
+ ("", repoman_cmd + ("manifest",)),
+ ("", git_cmd + ("config", "--global", "user.name", committer_name,)),
+ ("", git_cmd + ("config", "--global", "user.email", committer_email,)),
+ ("", git_cmd + ("init-db",)),
+ ("", git_cmd + ("add", ".")),
+ ("", git_cmd + ("commit", "-a", "-m", "add whole repo")),
+ ("", repoman_cmd + ("full", "-d")),
+ ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "2.ebuild")),
+ ("", git_cmd + ("add", test_ebuild[:-8] + "2.ebuild")),
+ ("", repoman_cmd + ("commit", "-m", "bump to version 2")),
+ ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "3.ebuild")),
+ ("", git_cmd + ("add", test_ebuild[:-8] + "3.ebuild")),
+ ("dev-libs", repoman_cmd + ("commit", "-m", "bump to version 3")),
+ ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "4.ebuild")),
+ ("", git_cmd + ("add", test_ebuild[:-8] + "4.ebuild")),
+ ("dev-libs/A", repoman_cmd + ("commit", "-m", "bump to version 4")),
+ )
+
+ pythonpath = os.environ.get("PYTHONPATH")
+ if pythonpath is not None and not pythonpath.strip():
+ pythonpath = None
+ if pythonpath is not None and \
+ pythonpath.split(":")[0] == PORTAGE_PYM_PATH:
+ pass
+ else:
+ if pythonpath is None:
+ pythonpath = ""
+ else:
+ pythonpath = ":" + pythonpath
+ pythonpath = PORTAGE_PYM_PATH + pythonpath
+
+ env = {
+ "PORTAGE_OVERRIDE_EPREFIX" : eprefix,
+ "DISTDIR" : distdir,
+ "GENTOO_COMMITTER_NAME" : committer_name,
+ "GENTOO_COMMITTER_EMAIL" : committer_email,
+ "HOME" : homedir,
+ "PATH" : os.environ["PATH"],
+ "PORTAGE_GRPNAME" : os.environ["PORTAGE_GRPNAME"],
+ "PORTAGE_USERNAME" : os.environ["PORTAGE_USERNAME"],
+ "PORTAGE_REPOSITORIES" : settings.repositories.config_string(),
+ "PYTHONDONTWRITEBYTECODE" : os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
+ "PYTHONPATH" : pythonpath,
+ }
+
+ if os.environ.get("SANDBOX_ON") == "1":
+ # avoid problems from nested sandbox instances
+ env["FEATURES"] = "-sandbox -usersandbox"
+
+ dirs = [homedir, license_dir, profiles_dir, distdir]
+ try:
+ for d in dirs:
+ ensure_dirs(d)
+ with open(os.path.join(test_repo_location, "skel.ChangeLog"), 'w') as f:
+ f.write(copyright_header)
+ with open(os.path.join(profiles_dir, "profiles.desc"), 'w') as f:
+ for x in profiles:
+ f.write("%s %s %s\n" % x)
+
+ # ResolverPlayground only created the first profile,
+ # so create the remaining ones.
+ for x in profiles[1:]:
+ sub_profile_dir = os.path.join(profiles_dir, x[1])
+ ensure_dirs(sub_profile_dir)
+ for config_file, lines in profile.items():
+ file_name = os.path.join(sub_profile_dir, config_file)
+ with open(file_name, "w") as f:
+ for line in lines:
+ f.write("%s\n" % line)
+
+ for x in licenses:
+ open(os.path.join(license_dir, x), 'wb').close()
+ with open(os.path.join(profiles_dir, "arch.list"), 'w') as f:
+ for x in arch_list:
+ f.write("%s\n" % x)
+ with open(os.path.join(profiles_dir, "use.desc"), 'w') as f:
+ for k, v in use_desc:
+ f.write("%s - %s\n" % (k, v))
+ for cp, xml_data in metadata_xml_files:
+ with open(os.path.join(test_repo_location, cp, "metadata.xml"), 'w') as f:
+ f.write(playground.metadata_xml_template % xml_data)
+ # Use a symlink to test_repo, in order to trigger bugs
+ # involving canonical vs. non-canonical paths.
+ test_repo_symlink = os.path.join(eroot, "test_repo_symlink")
+ os.symlink(test_repo_location, test_repo_symlink)
+ metadata_xsd_dest = os.path.join(test_repo_location, 'metadata/xml-schema/metadata.xsd')
+ os.makedirs(os.path.dirname(metadata_xsd_dest))
+ os.symlink(metadata_xsd, metadata_xsd_dest)
+
+ if debug:
+ # The subprocess inherits both stdout and stderr, for
+ # debugging purposes.
+ stdout = None
+ else:
+ # The subprocess inherits stderr so that any warnings
+ # triggered by python -Wd will be visible.
+ stdout = subprocess.PIPE
+
+ for cwd in ("", "dev-libs", "dev-libs/A", "dev-libs/B"):
+ abs_cwd = os.path.join(test_repo_symlink, cwd)
+ proc = subprocess.Popen(repoman_cmd + ("full",),
+ cwd=abs_cwd, env=env, stdout=stdout)
+
+ if debug:
+ proc.wait()
+ else:
+ output = proc.stdout.readlines()
+ proc.wait()
+ proc.stdout.close()
+ if proc.returncode != os.EX_OK:
+ for line in output:
+ sys.stderr.write(_unicode_decode(line))
+
+ self.assertEqual(os.EX_OK, proc.returncode,
+ "repoman failed in %s" % (cwd,))
+
+ if git_binary is not None:
+ for cwd, cmd in git_test:
+ abs_cwd = os.path.join(test_repo_symlink, cwd)
+ proc = subprocess.Popen(cmd,
+ cwd=abs_cwd, env=env, stdout=stdout)
+
+ if debug:
+ proc.wait()
+ else:
+ output = proc.stdout.readlines()
+ proc.wait()
+ proc.stdout.close()
+ if proc.returncode != os.EX_OK:
+ for line in output:
+ sys.stderr.write(_unicode_decode(line))
+
+ self.assertEqual(os.EX_OK, proc.returncode,
+ "%s failed in %s" % (cmd, cwd,))
+ finally:
+ playground.cleanup()
diff --git a/repoman/pym/repoman/utilities.py b/repoman/pym/repoman/utilities.py
new file mode 100644
index 000000000..8a757dc85
--- /dev/null
+++ b/repoman/pym/repoman/utilities.py
@@ -0,0 +1,589 @@
+# -*- coding:utf-8 -*-
+# repoman: Utilities
+# Copyright 2007-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+"""This module contains utility functions to help repoman find ebuilds to
+scan"""
+
+from __future__ import print_function, unicode_literals
+
+__all__ = [
+ "editor_is_executable",
+ "FindPackagesToScan",
+ "FindPortdir",
+ "get_commit_message_with_editor",
+ "get_commit_message_with_stdin",
+ "get_committer_name",
+ "have_ebuild_dir",
+ "have_profile_dir",
+ "UpdateChangeLog"
+]
+
+import errno
+import io
+from itertools import chain
+import logging
+import pwd
+import stat
+import sys
+import time
+import textwrap
+import difflib
+from tempfile import mkstemp
+
+# import our initialized portage instance
+from repoman._portage import portage
+
+from portage import os
+from portage import shutil
+from portage import _encodings
+from portage import _unicode_decode
+from portage import _unicode_encode
+from portage import util
+from portage.localization import _
+from portage.process import find_binary
+from portage.output import green
+
+from repoman.copyrights import update_copyright, update_copyright_year
+
+
+normalize_path = util.normalize_path
+util.initialize_logger()
+
+if sys.hexversion >= 0x3000000:
+ basestring = str
+
+
+def have_profile_dir(path, maxdepth=3, filename="profiles.desc"):
+ """
+ Try to figure out if 'path' has a profiles/
+ dir in it by checking for the given filename.
+ """
+ while path != "/" and maxdepth:
+ if os.path.exists(os.path.join(path, "profiles", filename)):
+ return normalize_path(path)
+ path = normalize_path(path + "/..")
+ maxdepth -= 1
+
+
+def have_ebuild_dir(path, maxdepth=3):
+ """
+ Try to figure out if 'path' or a subdirectory contains one or more
+ ebuild files named appropriately for their parent directory.
+ """
+ stack = [(normalize_path(path), 1)]
+ while stack:
+ path, depth = stack.pop()
+ basename = os.path.basename(path)
+ try:
+ listdir = os.listdir(path)
+ except OSError:
+ continue
+ for filename in listdir:
+ abs_filename = os.path.join(path, filename)
+ try:
+ st = os.stat(abs_filename)
+ except OSError:
+ continue
+ if stat.S_ISDIR(st.st_mode):
+ if depth < maxdepth:
+ stack.append((abs_filename, depth + 1))
+ elif stat.S_ISREG(st.st_mode):
+ if filename.endswith(".ebuild") and \
+ filename.startswith(basename + "-"):
+ return os.path.dirname(os.path.dirname(path))
+
+
+def FindPackagesToScan(settings, startdir, reposplit):
+ """ Try to find packages that need to be scanned
+
+ Args:
+ settings - portage.config instance, preferably repoman_settings
+ startdir - directory that repoman was run in
+ reposplit - root of the repository
+ Returns:
+ A list of directories to scan
+ """
+
+ def AddPackagesInDir(path):
+ """ Given a list of dirs, add any packages in it """
+ ret = []
+ pkgdirs = os.listdir(path)
+ for d in pkgdirs:
+ if d == 'CVS' or d.startswith('.'):
+ continue
+ p = os.path.join(path, d)
+
+ if os.path.isdir(p):
+ cat_pkg_dir = os.path.join(*p.split(os.path.sep)[-2:])
+ logging.debug('adding %s to scanlist' % cat_pkg_dir)
+ ret.append(cat_pkg_dir)
+ return ret
+
+ scanlist = []
+ repolevel = len(reposplit)
+ if repolevel == 1: # root of the tree, startdir = repodir
+ for cat in settings.categories:
+ path = os.path.join(startdir, cat)
+ if not os.path.isdir(path):
+ continue
+ scanlist.extend(AddPackagesInDir(path))
+ elif repolevel == 2: # category level, startdir = catdir
+ # We only want 1 segment of the directory,
+ # this is why we use catdir instead of startdir.
+ catdir = reposplit[-2]
+ if catdir not in settings.categories:
+ logging.warn(
+ '%s is not a valid category according to profiles/categories, '
+ 'skipping checks in %s' % (catdir, catdir))
+ else:
+ scanlist = AddPackagesInDir(catdir)
+ elif repolevel == 3: # pkgdir level, startdir = pkgdir
+ catdir = reposplit[-2]
+ pkgdir = reposplit[-1]
+ if catdir not in settings.categories:
+ logging.warn(
+ '%s is not a valid category according to profiles/categories, '
+ 'skipping checks in %s' % (catdir, catdir))
+ else:
+ path = os.path.join(catdir, pkgdir)
+ logging.debug('adding %s to scanlist' % path)
+ scanlist.append(path)
+ return scanlist
+
+
+def editor_is_executable(editor):
+ """
+ Given an EDITOR string, validate that it refers to
+ an executable. This uses shlex_split() to split the
+ first component and do a PATH lookup if necessary.
+
+ @param editor: An EDITOR value from the environment.
+ @type: string
+ @rtype: bool
+ @return: True if an executable is found, False otherwise.
+ """
+ editor_split = util.shlex_split(editor)
+ if not editor_split:
+ return False
+ filename = editor_split[0]
+ if not os.path.isabs(filename):
+ return find_binary(filename) is not None
+ return os.access(filename, os.X_OK) and os.path.isfile(filename)
+
+
+def get_commit_message_with_editor(editor, message=None, prefix=""):
+ """
+ Execute editor with a temporary file as it's argument
+ and return the file content afterwards.
+
+ @param editor: An EDITOR value from the environment
+ @type: string
+ @param message: An iterable of lines to show in the editor.
+ @type: iterable
+ @param prefix: Suggested prefix for the commit message summary line.
+ @type: string
+ @rtype: string or None
+ @return: A string on success or None if an error occurs.
+ """
+ fd, filename = mkstemp()
+ try:
+ os.write(
+ fd, _unicode_encode(_(
+ prefix +
+ "\n\n# Please enter the commit message "
+ "for your changes.\n# (Comment lines starting "
+ "with '#' will not be included)\n"),
+ encoding=_encodings['content'], errors='backslashreplace'))
+ if message:
+ os.write(fd, b"#\n")
+ for line in message:
+ os.write(
+ fd, _unicode_encode(
+ "#" + line, encoding=_encodings['content'],
+ errors='backslashreplace'))
+ os.close(fd)
+ retval = os.system(editor + " '%s'" % filename)
+ if not (os.WIFEXITED(retval) and os.WEXITSTATUS(retval) == os.EX_OK):
+ return None
+ try:
+ with io.open(_unicode_encode(
+ filename, encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['content'], errors='replace') as f:
+ mylines = f.readlines()
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ del e
+ return None
+ return "".join(line for line in mylines if not line.startswith("#"))
+ finally:
+ try:
+ os.unlink(filename)
+ except OSError:
+ pass
+
+
+def get_commit_message_with_stdin():
+ """
+ Read a commit message from the user and return it.
+
+ @rtype: string or None
+ @return: A string on success or None if an error occurs.
+ """
+ print(
+ "Please enter a commit message."
+ " Use Ctrl-d to finish or Ctrl-c to abort.")
+ commitmessage = []
+ while True:
+ commitmessage.append(sys.stdin.readline())
+ if not commitmessage[-1]:
+ break
+ commitmessage = "".join(commitmessage)
+ return commitmessage
+
+
+def FindPortdir(settings):
+ """ Try to figure out what repo we are in and whether we are in a regular
+ tree or an overlay.
+
+ Basic logic is:
+
+ 1. Determine what directory we are in (supports symlinks).
+ 2. Build a list of directories from / to our current location
+ 3. Iterate over PORTDIR_OVERLAY, if we find a match,
+ search for a profiles directory in the overlay. If it has one,
+ make it portdir, otherwise make it portdir_overlay.
+ 4. If we didn't find an overlay in PORTDIR_OVERLAY,
+ see if we are in PORTDIR; if so, set portdir_overlay to PORTDIR.
+ If we aren't in PORTDIR, see if PWD has a profiles dir, if so,
+ set portdir_overlay and portdir to PWD, else make them False.
+ 5. If we haven't found portdir_overlay yet,
+ it means the user is doing something odd, report an error.
+ 6. If we haven't found a portdir yet, set portdir to PORTDIR.
+
+ Args:
+ settings - portage.config instance, preferably repoman_settings
+ Returns:
+ list(portdir, portdir_overlay, location)
+ """
+
+ portdir = None
+ portdir_overlay = None
+ location = os.getcwd()
+ pwd = _unicode_decode(os.environ.get('PWD', ''), encoding=_encodings['fs'])
+ if pwd and pwd != location and os.path.realpath(pwd) == location:
+ # getcwd() returns the canonical path but that makes it hard for repoman to
+ # orient itself if the user has symlinks in their portage tree structure.
+ # We use os.environ["PWD"], if available, to get the non-canonical path of
+ # the current working directory (from the shell).
+ location = pwd
+
+ location = normalize_path(location)
+
+ path_ids = {}
+ p = location
+ s = None
+ while True:
+ s = os.stat(p)
+ path_ids[(s.st_dev, s.st_ino)] = p
+ if p == "/":
+ break
+ p = os.path.dirname(p)
+ if location[-1] != "/":
+ location += "/"
+
+ for overlay in portage.util.shlex_split(settings["PORTDIR_OVERLAY"]):
+ overlay = os.path.realpath(overlay)
+ try:
+ s = os.stat(overlay)
+ except OSError:
+ continue
+ overlay = path_ids.get((s.st_dev, s.st_ino))
+ if overlay is None:
+ continue
+ if overlay[-1] != "/":
+ overlay += "/"
+ if True:
+ portdir_overlay = overlay
+ subdir = location[len(overlay):]
+ if subdir and subdir[-1] != "/":
+ subdir += "/"
+ if have_profile_dir(location, subdir.count("/")):
+ portdir = portdir_overlay
+ break
+
+ # Couldn't match location with anything from PORTDIR_OVERLAY,
+ # so fall back to have_profile_dir() checks alone. Assume that
+ # an overlay will contain at least a "repo_name" file while a
+ # master repo (portdir) will contain at least a "profiles.desc"
+ # file.
+ if not portdir_overlay:
+ portdir_overlay = have_profile_dir(location, filename="repo_name")
+ if not portdir_overlay:
+ portdir_overlay = have_ebuild_dir(location)
+ if portdir_overlay:
+ subdir = location[len(portdir_overlay):]
+ if subdir and subdir[-1] != os.sep:
+ subdir += os.sep
+ if have_profile_dir(location, subdir.count(os.sep)):
+ portdir = portdir_overlay
+
+ if not portdir_overlay:
+ if (settings["PORTDIR"] + os.path.sep).startswith(location):
+ portdir_overlay = settings["PORTDIR"]
+ else:
+ portdir_overlay = have_profile_dir(location)
+ portdir = portdir_overlay
+
+ if not portdir_overlay:
+ msg = 'Repoman is unable to determine PORTDIR or PORTDIR_OVERLAY' + \
+ ' from the current working directory'
+ logging.critical(msg)
+ return (None, None, None)
+
+ if not portdir:
+ portdir = settings["PORTDIR"]
+
+ if not portdir_overlay.endswith('/'):
+ portdir_overlay += '/'
+
+ if not portdir.endswith('/'):
+ portdir += '/'
+
+ return [normalize_path(x) for x in (portdir, portdir_overlay, location)]
+
+
+def get_committer_name(env=None):
+ """Generate a committer string like echangelog does."""
+ if env is None:
+ env = os.environ
+ if 'GENTOO_COMMITTER_NAME' in env and 'GENTOO_COMMITTER_EMAIL' in env:
+ user = '%s <%s>' % (
+ env['GENTOO_COMMITTER_NAME'],
+ env['GENTOO_COMMITTER_EMAIL'])
+ elif 'GENTOO_AUTHOR_NAME' in env and 'GENTOO_AUTHOR_EMAIL' in env:
+ user = '%s <%s>' % (
+ env['GENTOO_AUTHOR_NAME'],
+ env['GENTOO_AUTHOR_EMAIL'])
+ elif 'ECHANGELOG_USER' in env:
+ user = env['ECHANGELOG_USER']
+ else:
+ pwd_struct = pwd.getpwuid(os.getuid())
+ gecos = pwd_struct.pw_gecos.split(',')[0] # bug #80011
+ user = '%s <%s@gentoo.org>' % (gecos, pwd_struct.pw_name)
+ return user
+
+
+def UpdateChangeLog(
+ pkgdir, user, msg, skel_path, category, package,
+ new=(), removed=(), changed=(), pretend=False, quiet=False):
+ """
+ Write an entry to an existing ChangeLog, or create a new one.
+ Updates copyright year on changed files, and updates the header of
+ ChangeLog with the contents of skel.ChangeLog.
+ """
+
+ if '<root@' in user:
+ if not quiet:
+ logging.critical('Please set ECHANGELOG_USER or run as non-root')
+ return None
+
+ # ChangeLog times are in UTC
+ gmtime = time.gmtime()
+ year = time.strftime('%Y', gmtime)
+ date = time.strftime('%d %b %Y', gmtime)
+
+ cl_path = os.path.join(pkgdir, 'ChangeLog')
+ clold_lines = []
+ clnew_lines = []
+ old_header_lines = []
+ header_lines = []
+
+ clold_file = None
+ try:
+ clold_file = io.open(_unicode_encode(
+ cl_path, encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['repo.content'], errors='replace')
+ except EnvironmentError:
+ pass
+
+ f, clnew_path = mkstemp()
+
+ # construct correct header first
+ try:
+ if clold_file is not None:
+ # retain header from old ChangeLog
+ first_line = True
+ for line in clold_file:
+ line_strip = line.strip()
+ if line_strip and line[:1] != "#":
+ clold_lines.append(line)
+ break
+ # always make sure cat/pkg is up-to-date in case we are
+ # moving packages around, or copied from another pkg, or ...
+ if first_line:
+ if line.startswith('# ChangeLog for'):
+ line = '# ChangeLog for %s/%s\n' % (category, package)
+ first_line = False
+ old_header_lines.append(line)
+ header_lines.append(update_copyright_year(year, line))
+ if not line_strip:
+ break
+
+ clskel_file = None
+ if not header_lines:
+ # delay opening this until we find we need a header
+ try:
+ clskel_file = io.open(_unicode_encode(
+ skel_path, encoding=_encodings['fs'], errors='strict'),
+ mode='r', encoding=_encodings['repo.content'],
+ errors='replace')
+ except EnvironmentError:
+ pass
+
+ if clskel_file is not None:
+ # read skel.ChangeLog up to first empty line
+ for line in clskel_file:
+ line_strip = line.strip()
+ if not line_strip:
+ break
+ line = line.replace('<CATEGORY>', category)
+ line = line.replace('<PACKAGE_NAME>', package)
+ line = update_copyright_year(year, line)
+ header_lines.append(line)
+ header_lines.append('\n')
+ clskel_file.close()
+
+ # write new ChangeLog entry
+ clnew_lines.extend(header_lines)
+ newebuild = False
+ for fn in new:
+ if not fn.endswith('.ebuild'):
+ continue
+ ebuild = fn.split(os.sep)[-1][0:-7]
+ clnew_lines.append('*%s (%s)\n' % (ebuild, date))
+ newebuild = True
+ if newebuild:
+ clnew_lines.append('\n')
+ trivial_files = ('ChangeLog', 'Manifest')
+ display_new = [
+ '+' + elem
+ for elem in new
+ if elem not in trivial_files]
+ display_removed = [
+ '-' + elem
+ for elem in removed]
+ display_changed = [
+ elem for elem in changed
+ if elem not in trivial_files]
+ if not (display_new or display_removed or display_changed):
+ # If there's nothing else to display, show one of the
+ # trivial files.
+ for fn in trivial_files:
+ if fn in new:
+ display_new = ['+' + fn]
+ break
+ elif fn in changed:
+ display_changed = [fn]
+ break
+
+ display_new.sort()
+ display_removed.sort()
+ display_changed.sort()
+
+ mesg = '%s; %s %s:' % (date, user, ', '.join(chain(
+ display_new, display_removed, display_changed)))
+ for line in textwrap.wrap(
+ mesg, 80, initial_indent=' ', subsequent_indent=' ',
+ break_on_hyphens=False):
+ clnew_lines.append('%s\n' % line)
+ for line in textwrap.wrap(
+ msg, 80, initial_indent=' ', subsequent_indent=' '):
+ clnew_lines.append('%s\n' % line)
+ # Don't append a trailing newline if the file is new.
+ if clold_file is not None:
+ clnew_lines.append('\n')
+
+ f = io.open(
+ f, mode='w', encoding=_encodings['repo.content'],
+ errors='backslashreplace')
+
+ for line in clnew_lines:
+ f.write(line)
+
+ # append stuff from old ChangeLog
+ if clold_file is not None:
+
+ if clold_lines:
+ # clold_lines may contain a saved non-header line
+ # that we want to write first.
+ # Also, append this line to clnew_lines so that the
+ # unified_diff call doesn't show it as removed.
+ for line in clold_lines:
+ f.write(line)
+ clnew_lines.append(line)
+
+ else:
+ # ensure that there is no more than one blank
+ # line after our new entry
+ for line in clold_file:
+ if line.strip():
+ f.write(line)
+ break
+
+ # Now prepend old_header_lines to clold_lines, for use
+ # in the unified_diff call below.
+ clold_lines = old_header_lines + clold_lines
+
+ # Trim any trailing newlines.
+ lines = clold_file.readlines()
+ clold_file.close()
+ while lines and lines[-1] == '\n':
+ del lines[-1]
+ f.writelines(lines)
+ f.close()
+
+ # show diff
+ if not quiet:
+ for line in difflib.unified_diff(
+ clold_lines, clnew_lines,
+ fromfile=cl_path, tofile=cl_path, n=0):
+ util.writemsg_stdout(line, noiselevel=-1)
+ util.writemsg_stdout("\n", noiselevel=-1)
+
+ if pretend:
+ # remove what we've done
+ os.remove(clnew_path)
+ else:
+ # rename to ChangeLog, and set permissions
+ try:
+ clold_stat = os.stat(cl_path)
+ except OSError:
+ clold_stat = None
+
+ shutil.move(clnew_path, cl_path)
+
+ if clold_stat is None:
+ util.apply_permissions(cl_path, mode=0o644)
+ else:
+ util.apply_stat_permissions(cl_path, clold_stat)
+
+ if clold_file is None:
+ return True
+ else:
+ return False
+ except IOError as e:
+ err = 'Repoman is unable to create/write to Changelog.new file: %s' % (e,)
+ logging.critical(err)
+ # try to remove if possible
+ try:
+ os.remove(clnew_path)
+ except OSError:
+ pass
+ return None
+
+
+def repoman_sez(msg):
+ print (green("RepoMan sez:"), msg)
diff --git a/repoman/setup.py b/repoman/setup.py
new file mode 100755
index 000000000..b7b8ee268
--- /dev/null
+++ b/repoman/setup.py
@@ -0,0 +1,662 @@
+#!/usr/bin/env python
+# Copyright 1998-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+
+from __future__ import print_function
+
+from distutils.core import setup, Command
+from distutils.command.build import build
+from distutils.command.build_scripts import build_scripts
+from distutils.command.clean import clean
+from distutils.command.install import install
+from distutils.command.install_data import install_data
+from distutils.command.install_lib import install_lib
+from distutils.command.install_scripts import install_scripts
+from distutils.command.sdist import sdist
+from distutils.dep_util import newer
+from distutils.dir_util import mkpath, remove_tree
+from distutils.util import change_root, subst_vars
+
+import codecs
+import collections
+import glob
+import os
+import os.path
+import re
+import subprocess
+import sys
+
+
+# TODO:
+# - smarter rebuilds of docs w/ 'install_docbook' and 'install_epydoc'.
+
+x_scripts = {
+ 'bin': [
+ 'bin/repoman',
+ ],
+ 'sbin': [
+ ],
+}
+
+
+class x_build(build):
+ """ Build command with extra build_man call. """
+
+ def run(self):
+ build.run(self)
+ self.run_command('build_man')
+
+
+class build_man(Command):
+ """ Perform substitutions in manpages. """
+
+ user_options = [
+ ]
+
+ def initialize_options(self):
+ self.build_base = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_base', 'build_base'))
+
+ def run(self):
+ for d, files in self.distribution.data_files:
+ if not d.startswith('$mandir/'):
+ continue
+
+ for source in files:
+ target = os.path.join(self.build_base, source)
+ mkpath(os.path.dirname(target))
+
+ if not newer(source, target) and not newer(__file__, target):
+ continue
+
+ print('copying and updating %s -> %s' % (
+ source, target))
+
+ with codecs.open(source, 'r', 'utf8') as f:
+ data = f.readlines()
+ data[0] = data[0].replace('VERSION',
+ self.distribution.get_version())
+ with codecs.open(target, 'w', 'utf8') as f:
+ f.writelines(data)
+
+
+class docbook(Command):
+ """ Build docs using docbook. """
+
+ user_options = [
+ ('doc-formats=', None, 'Documentation formats to build (all xmlto formats for docbook are allowed, comma-separated'),
+ ]
+
+ def initialize_options(self):
+ self.doc_formats = 'xhtml,xhtml-nochunks'
+
+ def finalize_options(self):
+ self.doc_formats = self.doc_formats.replace(',', ' ').split()
+
+ def run(self):
+ if not os.path.isdir('doc/fragment'):
+ mkpath('doc/fragment')
+
+ with open('doc/fragment/date', 'w'):
+ pass
+ with open('doc/fragment/version', 'w') as f:
+ f.write('<releaseinfo>%s</releaseinfo>' % self.distribution.get_version())
+
+ for f in self.doc_formats:
+ print('Building docs in %s format...' % f)
+ subprocess.check_call(['xmlto', '-o', 'doc',
+ '-m', 'doc/custom.xsl', f, 'doc/portage.docbook'])
+
+
+class epydoc(Command):
+ """ Build API docs using epydoc. """
+
+ user_options = [
+ ]
+
+ def initialize_options(self):
+ self.build_lib = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build_py', ('build_lib', 'build_lib'))
+
+ def run(self):
+ self.run_command('build_py')
+
+ print('Building API documentation...')
+
+ process_env = os.environ.copy()
+ pythonpath = self.build_lib
+ try:
+ pythonpath += ':' + process_env['PYTHONPATH']
+ except KeyError:
+ pass
+ process_env['PYTHONPATH'] = pythonpath
+
+ subprocess.check_call(['epydoc', '-o', 'epydoc',
+ '--name', self.distribution.get_name(),
+ '--url', self.distribution.get_url(),
+ '-qq', '--no-frames', '--show-imports',
+ '--exclude', 'portage.tests',
+ '_emerge', 'portage'],
+ env = process_env)
+ os.remove('epydoc/api-objects.txt')
+
+
+class install_docbook(install_data):
+ """ install_data for docbook docs """
+
+ user_options = install_data.user_options
+
+ def initialize_options(self):
+ install_data.initialize_options(self)
+ self.htmldir = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install', ('htmldir', 'htmldir'))
+ install_data.finalize_options(self)
+
+ def run(self):
+ if not os.path.exists('doc/portage.html'):
+ self.run_command('docbook')
+ self.data_files = [
+ (self.htmldir, glob.glob('doc/*.html')),
+ ]
+ install_data.run(self)
+
+
+class install_epydoc(install_data):
+ """ install_data for epydoc docs """
+
+ user_options = install_data.user_options
+
+ def initialize_options(self):
+ install_data.initialize_options(self)
+ self.htmldir = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install', ('htmldir', 'htmldir'))
+ install_data.finalize_options(self)
+
+ def run(self):
+ if not os.path.exists('epydoc/index.html'):
+ self.run_command('epydoc')
+ self.data_files = [
+ (os.path.join(self.htmldir, 'api'), glob.glob('epydoc/*')),
+ ]
+ install_data.run(self)
+
+
+class x_build_scripts_custom(build_scripts):
+ def finalize_options(self):
+ build_scripts.finalize_options(self)
+ if 'dir_name' in dir(self):
+ self.build_dir = os.path.join(self.build_dir, self.dir_name)
+ if self.dir_name in x_scripts:
+ self.scripts = x_scripts[self.dir_name]
+ else:
+ self.scripts = set(self.scripts)
+ for other_files in x_scripts.values():
+ self.scripts.difference_update(other_files)
+
+ def run(self):
+ # group scripts by subdirectory
+ split_scripts = collections.defaultdict(list)
+ for f in self.scripts:
+ dir_name = os.path.dirname(f[len('bin/'):])
+ split_scripts[dir_name].append(f)
+
+ base_dir = self.build_dir
+ base_scripts = self.scripts
+ for d, files in split_scripts.items():
+ self.build_dir = os.path.join(base_dir, d)
+ self.scripts = files
+ self.copy_scripts()
+
+ # restore previous values
+ self.build_dir = base_dir
+ self.scripts = base_scripts
+
+
+class x_build_scripts_bin(x_build_scripts_custom):
+ dir_name = 'bin'
+
+
+class x_build_scripts_sbin(x_build_scripts_custom):
+ dir_name = 'sbin'
+
+
+class x_build_scripts_portagebin(x_build_scripts_custom):
+ dir_name = 'portage'
+
+
+class x_build_scripts(build_scripts):
+ def initialize_option(self):
+ build_scripts.initialize_options(self)
+
+ def finalize_options(self):
+ build_scripts.finalize_options(self)
+
+ def run(self):
+ self.run_command('build_scripts_bin')
+ self.run_command('build_scripts_portagebin')
+ self.run_command('build_scripts_sbin')
+
+
+class x_clean(clean):
+ """ clean extended for doc & post-test cleaning """
+
+ @staticmethod
+ def clean_docs():
+ def get_doc_outfiles():
+ for dirpath, _dirnames, filenames in os.walk('doc'):
+ for f in filenames:
+ if f.endswith('.docbook') or f == 'custom.xsl':
+ pass
+ else:
+ yield os.path.join(dirpath, f)
+
+ # do not recurse
+ break
+
+
+ for f in get_doc_outfiles():
+ print('removing %s' % repr(f))
+ os.remove(f)
+
+ if os.path.isdir('doc/fragment'):
+ remove_tree('doc/fragment')
+
+ if os.path.isdir('epydoc'):
+ remove_tree('epydoc')
+
+ def clean_tests(self):
+ # do not remove incorrect dirs accidentally
+ top_dir = os.path.normpath(os.path.join(self.build_lib, '..'))
+ cprefix = os.path.commonprefix((self.build_base, top_dir))
+ if cprefix != self.build_base:
+ return
+
+ bin_dir = os.path.join(top_dir, 'bin')
+ if os.path.exists(bin_dir):
+ remove_tree(bin_dir)
+
+ conf_dir = os.path.join(top_dir, 'cnf')
+ if os.path.islink(conf_dir):
+ print('removing %s symlink' % repr(conf_dir))
+ os.unlink(conf_dir)
+
+ pni_file = os.path.join(top_dir, '.portage_not_installed')
+ if os.path.exists(pni_file):
+ print('removing %s' % repr(pni_file))
+ os.unlink(pni_file)
+
+ def clean_man(self):
+ man_dir = os.path.join(self.build_base, 'man')
+ if os.path.exists(man_dir):
+ remove_tree(man_dir)
+
+ def run(self):
+ if self.all:
+ self.clean_tests()
+ self.clean_docs()
+ self.clean_man()
+
+ clean.run(self)
+
+
+class x_install(install):
+ """ install command with extra Portage paths """
+
+ user_options = install.user_options + [
+ # note: $prefix and $exec_prefix are reserved for Python install
+ ('system-prefix=', None, "Prefix for architecture-independent data"),
+ ('system-exec-prefix=', None, "Prefix for architecture-specific data"),
+
+ ('bindir=', None, "Install directory for main executables"),
+ ('datarootdir=', None, "Data install root directory"),
+ ('docdir=', None, "Documentation install directory"),
+ ('htmldir=', None, "HTML documentation install directory"),
+ ('mandir=', None, "Manpage root install directory"),
+ ('portage-base=', 'b', "Portage install base"),
+ ('portage-bindir=', None, "Install directory for Portage internal-use executables"),
+ ('portage-datadir=', None, 'Install directory for data files'),
+ ('sbindir=', None, "Install directory for superuser-intended executables"),
+ ('sysconfdir=', None, 'System configuration path'),
+ ]
+
+ # note: the order is important for proper substitution
+ paths = [
+ ('system_prefix', '/usr'),
+ ('system_exec_prefix', '$system_prefix'),
+
+ ('bindir', '$system_exec_prefix/bin'),
+ ('sbindir', '$system_exec_prefix/sbin'),
+ ('sysconfdir', '/etc'),
+
+ ('datarootdir', '$system_prefix/share'),
+ ('docdir', '$datarootdir/doc/$package-$version'),
+ ('htmldir', '$docdir/html'),
+ ('mandir', '$datarootdir/man'),
+
+ ('portage_base', '$system_exec_prefix/lib/portage'),
+ ('portage_bindir', '$portage_base/bin'),
+ ('portage_datadir', '$datarootdir/portage'),
+
+ # not customized at the moment
+ ('logrotatedir', '$sysconfdir/logrotate.d'),
+ ('portage_confdir', '$portage_datadir/config'),
+ ('portage_setsdir', '$portage_confdir/sets'),
+ ]
+
+ def initialize_options(self):
+ install.initialize_options(self)
+
+ for key, default in self.paths:
+ setattr(self, key, default)
+ self.subst_paths = {}
+
+ def finalize_options(self):
+ install.finalize_options(self)
+
+ # substitute variables
+ new_paths = {
+ 'package': self.distribution.get_name(),
+ 'version': self.distribution.get_version(),
+ }
+ for key, _default in self.paths:
+ new_paths[key] = subst_vars(getattr(self, key), new_paths)
+ setattr(self, key, new_paths[key])
+ self.subst_paths = new_paths
+
+
+class x_install_data(install_data):
+ """ install_data with customized path support """
+
+ user_options = install_data.user_options
+
+ def initialize_options(self):
+ install_data.initialize_options(self)
+ self.build_base = None
+ self.paths = None
+
+ def finalize_options(self):
+ install_data.finalize_options(self)
+ self.set_undefined_options('build',
+ ('build_base', 'build_base'))
+ self.set_undefined_options('install',
+ ('subst_paths', 'paths'))
+
+ def run(self):
+ self.run_command('build_man')
+
+ def process_data_files(df):
+ for d, files in df:
+ # substitute man sources
+ if d.startswith('$mandir/'):
+ files = [os.path.join(self.build_base, v) for v in files]
+
+ # substitute variables in path
+ d = subst_vars(d, self.paths)
+ yield (d, files)
+
+ old_data_files = self.data_files
+ self.data_files = process_data_files(self.data_files)
+
+ install_data.run(self)
+ self.data_files = old_data_files
+
+
+class x_install_lib(install_lib):
+ """ install_lib command with Portage path substitution """
+
+ user_options = install_lib.user_options
+
+ def initialize_options(self):
+ install_lib.initialize_options(self)
+ self.portage_base = None
+ self.portage_bindir = None
+ self.portage_confdir = None
+
+ def finalize_options(self):
+ install_lib.finalize_options(self)
+ self.set_undefined_options('install',
+ ('portage_base', 'portage_base'),
+ ('portage_bindir', 'portage_bindir'),
+ ('portage_confdir', 'portage_confdir'))
+
+ def install(self):
+ ret = install_lib.install(self)
+
+ def rewrite_file(path, val_dict):
+ path = os.path.join(self.install_dir, path)
+ print('Rewriting %s' % path)
+ with codecs.open(path, 'r', 'utf-8') as f:
+ data = f.read()
+
+ for varname, val in val_dict.items():
+ regexp = r'(?m)^(%s\s*=).*$' % varname
+ repl = r'\1 %s' % repr(val)
+
+ data = re.sub(regexp, repl, data)
+
+ with codecs.open(path, 'w', 'utf-8') as f:
+ f.write(data)
+
+ rewrite_file('portage/__init__.py', {
+ 'VERSION': self.distribution.get_version(),
+ })
+ rewrite_file('portage/const.py', {
+ 'PORTAGE_BASE_PATH': self.portage_base,
+ 'PORTAGE_BIN_PATH': self.portage_bindir,
+ 'PORTAGE_CONFIG_PATH': self.portage_confdir,
+ })
+
+ return ret
+
+
+class x_install_scripts_custom(install_scripts):
+ def initialize_options(self):
+ install_scripts.initialize_options(self)
+ self.root = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install',
+ ('root', 'root'),
+ (self.var_name, 'install_dir'))
+ install_scripts.finalize_options(self)
+ self.build_dir = os.path.join(self.build_dir, self.dir_name)
+
+ # prepend root
+ if self.root is not None:
+ self.install_dir = change_root(self.root, self.install_dir)
+
+
+class x_install_scripts_bin(x_install_scripts_custom):
+ dir_name = 'bin'
+ var_name = 'bindir'
+
+
+class x_install_scripts_sbin(x_install_scripts_custom):
+ dir_name = 'sbin'
+ var_name = 'sbindir'
+
+
+class x_install_scripts_portagebin(x_install_scripts_custom):
+ dir_name = 'portage'
+ var_name = 'portage_bindir'
+
+
+class x_install_scripts(install_scripts):
+ def initialize_option(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self.run_command('install_scripts_bin')
+ self.run_command('install_scripts_portagebin')
+ self.run_command('install_scripts_sbin')
+
+
+class x_sdist(sdist):
+ """ sdist defaulting to .tar.bz2 format """
+
+ def finalize_options(self):
+ if self.formats is None:
+ self.formats = ['bztar']
+
+ sdist.finalize_options(self)
+
+
+class build_tests(x_build_scripts_custom):
+ """ Prepare build dir for running tests. """
+
+ def initialize_options(self):
+ x_build_scripts_custom.initialize_options(self)
+ self.build_base = None
+ self.build_lib = None
+
+ def finalize_options(self):
+ x_build_scripts_custom.finalize_options(self)
+ self.set_undefined_options('build',
+ ('build_base', 'build_base'),
+ ('build_lib', 'build_lib'))
+
+ # since we will be writing to $build_lib/.., it is important
+ # that we do not leave $build_base
+ self.top_dir = os.path.normpath(os.path.join(self.build_lib, '..'))
+ cprefix = os.path.commonprefix((self.build_base, self.top_dir))
+ if cprefix != self.build_base:
+ raise SystemError('build_lib must be a subdirectory of build_base')
+
+ self.build_dir = os.path.join(self.top_dir, 'bin')
+
+ def run(self):
+ self.run_command('build_py')
+
+ # install all scripts $build_lib/../bin
+ # (we can't do a symlink since we want shebangs corrected)
+ x_build_scripts_custom.run(self)
+
+ # symlink 'cnf' directory
+ conf_dir = os.path.join(self.top_dir, 'cnf')
+ if os.path.exists(conf_dir):
+ if not os.path.islink(conf_dir):
+ raise SystemError('%s exists and is not a symlink (collision)'
+ % repr(conf_dir))
+ os.unlink(conf_dir)
+ conf_src = os.path.relpath('cnf', self.top_dir)
+ print('Symlinking %s -> %s' % (conf_dir, conf_src))
+ os.symlink(conf_src, conf_dir)
+
+ # create $build_lib/../.portage_not_installed
+ # to enable proper paths in tests
+ with open(os.path.join(self.top_dir, '.portage_not_installed'), 'w'):
+ pass
+
+
+class test(Command):
+ """ run tests """
+
+ user_options = []
+
+ def initialize_options(self):
+ self.build_lib = None
+
+ def finalize_options(self):
+ self.set_undefined_options('build',
+ ('build_lib', 'build_lib'))
+
+ def run(self):
+ self.run_command('build_tests')
+ subprocess.check_call([
+ sys.executable, '-bWd',
+ os.path.join(self.build_lib, 'portage/tests/runTests.py')
+ ])
+
+
+def find_packages():
+ for dirpath, _dirnames, filenames in os.walk('pym'):
+ if '__init__.py' in filenames:
+ yield os.path.relpath(dirpath, 'pym')
+
+
+def find_scripts():
+ for dirpath, _dirnames, filenames in os.walk('bin'):
+ for f in filenames:
+ if f not in ['deprecated-path']:
+ yield os.path.join(dirpath, f)
+
+
+def get_manpages():
+ linguas = os.environ.get('LINGUAS')
+ if linguas is not None:
+ linguas = linguas.split()
+
+ for dirpath, _dirnames, filenames in os.walk('man'):
+ groups = collections.defaultdict(list)
+ for f in filenames:
+ _fn, suffix = f.rsplit('.', 1)
+ groups[suffix].append(os.path.join(dirpath, f))
+
+ topdir = dirpath[len('man/'):]
+ if not topdir or linguas is None or topdir in linguas:
+ for g, mans in groups.items():
+ yield [os.path.join('$mandir', topdir, 'man%s' % g), mans]
+
+
+setup(
+ name = 'repoman',
+ version = '2.3.0',
+ url = 'https://wiki.gentoo.org/wiki/Project:Portage',
+ author = 'Gentoo Portage Development Team',
+ author_email = 'dev-portage@gentoo.org',
+
+ package_dir = {'': 'pym'},
+ packages = list(find_packages()),
+ # something to cheat build & install commands
+ scripts = list(find_scripts()),
+
+ data_files = list(get_manpages()) + [
+ ['$docdir', ['NEWS', 'RELEASE-NOTES']],
+ ],
+
+ cmdclass = {
+ 'build': x_build,
+ 'build_man': build_man,
+ 'build_scripts': x_build_scripts,
+ 'build_scripts_bin': x_build_scripts_bin,
+ 'build_scripts_portagebin': x_build_scripts_portagebin,
+ 'build_scripts_sbin': x_build_scripts_sbin,
+ 'build_tests': build_tests,
+ 'clean': x_clean,
+ 'docbook': docbook,
+ 'epydoc': epydoc,
+ 'install': x_install,
+ 'install_data': x_install_data,
+ 'install_docbook': install_docbook,
+ 'install_epydoc': install_epydoc,
+ 'install_lib': x_install_lib,
+ 'install_scripts': x_install_scripts,
+ 'install_scripts_bin': x_install_scripts_bin,
+ 'install_scripts_portagebin': x_install_scripts_portagebin,
+ 'install_scripts_sbin': x_install_scripts_sbin,
+ 'sdist': x_sdist,
+ 'test': test,
+ },
+
+ classifiers = [
+ 'Development Status :: 5 - Production/Stable',
+ 'Environment :: Console',
+ 'Intended Audience :: System Administrators',
+ 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
+ 'Operating System :: POSIX',
+ 'Programming Language :: Python',
+ 'Topic :: System :: Installation/Setup'
+ ]
+)