aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.bzrignore1
-rw-r--r--NEWS84
-rwxr-xr-xbin/pwrapper4
-rwxr-xr-xbuild_docs.py2
-rw-r--r--dev-notes/developing.rst2
-rw-r--r--dev-notes/plugins.rst25
-rwxr-xr-xexamples/changed_use.py74
-rwxr-xr-xexamples/report_pkg_changes.py11
-rw-r--r--pkgcore/binpkg/repo_ops.py10
-rw-r--r--pkgcore/binpkg/repository.py20
-rw-r--r--pkgcore/binpkg/xpak.py4
-rw-r--r--pkgcore/cache/flat_hash.py3
-rw-r--r--pkgcore/cache/fs_template.py2
-rw-r--r--pkgcore/cache/metadata.py4
-rw-r--r--pkgcore/cache/template.py4
-rw-r--r--pkgcore/chksum/__init__.py8
-rw-r--r--pkgcore/chksum/defaults.py9
-rw-r--r--pkgcore/config/__init__.py35
-rw-r--r--pkgcore/config/basics.py157
-rw-r--r--pkgcore/config/central.py93
-rw-r--r--pkgcore/config/cparser.py5
-rw-r--r--pkgcore/config/dhcpformat.py17
-rw-r--r--pkgcore/config/domain.py2
-rw-r--r--pkgcore/config/mke2fsformat.py3
-rw-r--r--pkgcore/ebuild/atom.py8
-rw-r--r--pkgcore/ebuild/atom_restricts.py3
-rw-r--r--pkgcore/ebuild/conditionals.py4
-rw-r--r--pkgcore/ebuild/cpv.py4
-rw-r--r--pkgcore/ebuild/digest.py8
-rw-r--r--pkgcore/ebuild/domain.py46
-rw-r--r--pkgcore/ebuild/ebd.py47
-rw-r--r--pkgcore/ebuild/ebuild_built.py9
-rw-r--r--pkgcore/ebuild/ebuild_src.py15
-rw-r--r--pkgcore/ebuild/eclass_cache.py14
-rw-r--r--pkgcore/ebuild/filter_env.py2
-rw-r--r--pkgcore/ebuild/formatter.py418
-rw-r--r--pkgcore/ebuild/misc.py5
-rw-r--r--pkgcore/ebuild/overlay_repository.py2
-rw-r--r--pkgcore/ebuild/portage_conf.py44
-rw-r--r--pkgcore/ebuild/processor.py9
-rw-r--r--pkgcore/ebuild/profiles.py26
-rw-r--r--pkgcore/ebuild/repo_objs.py8
-rw-r--r--pkgcore/ebuild/repository.py24
-rw-r--r--pkgcore/ebuild/resolver.py4
-rw-r--r--pkgcore/ebuild/triggers.py16
-rw-r--r--pkgcore/fetch/__init__.py2
-rw-r--r--pkgcore/fetch/custom.py2
-rw-r--r--pkgcore/fs/contents.py6
-rw-r--r--pkgcore/fs/fs.py2
-rw-r--r--pkgcore/fs/livefs.py8
-rw-r--r--pkgcore/fs/ops.py6
-rw-r--r--pkgcore/fs/tar.py7
-rw-r--r--pkgcore/interfaces/data_source.py2
-rw-r--r--pkgcore/interfaces/format.py2
-rw-r--r--pkgcore/interfaces/observer.py11
-rw-r--r--pkgcore/interfaces/repo.py7
-rw-r--r--pkgcore/merge/engine.py7
-rw-r--r--pkgcore/merge/triggers.py7
-rw-r--r--pkgcore/package/conditionals.py11
-rw-r--r--pkgcore/package/metadata.py4
-rw-r--r--pkgcore/pkgsets/filelist.py12
-rw-r--r--pkgcore/pkgsets/glsa.py17
-rw-r--r--pkgcore/plugin.py79
-rw-r--r--pkgcore/plugins/pkgcore_ebuild_built.py8
-rw-r--r--pkgcore/plugins/pkgcore_ebuild_src.py8
-rw-r--r--pkgcore/plugins/pkgcore_formats_default.py9
-rw-r--r--pkgcore/repository/configured.py4
-rw-r--r--pkgcore/repository/misc.py6
-rw-r--r--pkgcore/repository/multiplex.py6
-rw-r--r--pkgcore/repository/prototype.py7
-rw-r--r--pkgcore/repository/virtual.py2
-rw-r--r--pkgcore/repository/visibility.py2
-rw-r--r--pkgcore/repository/wrapper.py2
-rw-r--r--pkgcore/resolver/choice_point.py2
-rw-r--r--pkgcore/resolver/plan.py27
-rw-r--r--pkgcore/restrictions/boolean.py10
-rw-r--r--pkgcore/restrictions/packages.py6
-rw-r--r--pkgcore/restrictions/restriction.py4
-rw-r--r--pkgcore/restrictions/util.py2
-rw-r--r--pkgcore/restrictions/values.py22
-rw-r--r--pkgcore/scripts/__init__.py6
-rw-r--r--pkgcore/scripts/pconfig.py55
-rw-r--r--pkgcore/scripts/pmaint.py123
-rw-r--r--pkgcore/scripts/pmerge.py195
-rw-r--r--pkgcore/scripts/pplugincache.py4
-rw-r--r--pkgcore/scripts/pquery.py5
-rw-r--r--pkgcore/scripts/pregen.py117
-rw-r--r--pkgcore/spawn.py7
-rw-r--r--pkgcore/sync/base.py12
-rw-r--r--pkgcore/sync/bzr.py4
-rw-r--r--pkgcore/sync/rsync.py4
-rw-r--r--pkgcore/test/__init__.py7
-rw-r--r--pkgcore/test/chksum/test_defaults.py4
-rw-r--r--pkgcore/test/chksum/test_init.py4
-rw-r--r--pkgcore/test/config/test_basics.py220
-rw-r--r--pkgcore/test/config/test_central.py112
-rw-r--r--pkgcore/test/config/test_cparser.py7
-rw-r--r--pkgcore/test/config/test_dhcpformat.py13
-rw-r--r--pkgcore/test/config/test_init.py21
-rw-r--r--pkgcore/test/ebuild/test_atom.py17
-rw-r--r--pkgcore/test/ebuild/test_conditionals.py7
-rw-r--r--pkgcore/test/ebuild/test_cpv.py6
-rw-r--r--pkgcore/test/ebuild/test_ebuild_src.py33
-rw-r--r--pkgcore/test/ebuild/test_eclass_cache.py6
-rw-r--r--pkgcore/test/ebuild/test_formatter.py339
-rw-r--r--pkgcore/test/ebuild/test_misc.py3
-rw-r--r--pkgcore/test/ebuild/test_profiles.py6
-rw-r--r--pkgcore/test/ebuild/test_repository.py2
-rw-r--r--pkgcore/test/fetch/test_base.py6
-rw-r--r--pkgcore/test/fetch/test_init.py5
-rw-r--r--pkgcore/test/fs/test_contents.py6
-rw-r--r--pkgcore/test/fs/test_ops.py4
-rw-r--r--pkgcore/test/merge/test_triggers.py77
-rw-r--r--pkgcore/test/misc.py56
-rw-r--r--pkgcore/test/package/test_base.py2
-rw-r--r--pkgcore/test/package/test_mutated.py2
-rw-r--r--pkgcore/test/pkgsets/test_glsa.py5
-rw-r--r--pkgcore/test/repository/test_multiplex.py6
-rw-r--r--pkgcore/test/repository/test_prototype.py2
-rw-r--r--pkgcore/test/restrictions/test_packages.py14
-rw-r--r--pkgcore/test/restrictions/test_restriction.py6
-rw-r--r--pkgcore/test/scripts/helpers.py76
-rw-r--r--pkgcore/test/scripts/test_pconfig.py14
-rw-r--r--pkgcore/test/scripts/test_pebuild.py1
-rw-r--r--pkgcore/test/scripts/test_pmaint.py42
-rw-r--r--pkgcore/test/scripts/test_pmerge.py11
-rw-r--r--pkgcore/test/scripts/test_pregen.py20
-rw-r--r--pkgcore/test/test_demandload_usage.py4
-rw-r--r--pkgcore/test/test_plugin.py137
-rw-r--r--pkgcore/test/test_spawn.py4
-rw-r--r--pkgcore/test/util/test_caching.py168
-rw-r--r--pkgcore/test/util/test_commandline.py78
-rw-r--r--pkgcore/test/util/test_compatibility.py30
-rw-r--r--pkgcore/test/util/test_containers.py201
-rw-r--r--pkgcore/test/util/test_currying.py156
-rw-r--r--pkgcore/test/util/test_dependant_methods.py72
-rw-r--r--pkgcore/test/util/test_descriptors.py22
-rw-r--r--pkgcore/test/util/test_file.py196
-rw-r--r--pkgcore/test/util/test_formatters.py129
-rw-r--r--pkgcore/test/util/test_iterables.py100
-rw-r--r--pkgcore/test/util/test_klass.py184
-rw-r--r--pkgcore/test/util/test_lists.py153
-rw-r--r--pkgcore/test/util/test_mappings.py484
-rw-r--r--pkgcore/test/util/test_modules.py106
-rw-r--r--pkgcore/test/util/test_obj.py138
-rw-r--r--pkgcore/test/util/test_osutils.py404
-rw-r--r--pkgcore/test/util/test_parserestrict.py4
-rw-r--r--pkgcore/test/util/test_weakrefs.py42
-rw-r--r--pkgcore/util/bzip2.py2
-rw-r--r--pkgcore/util/caching.py82
-rw-r--r--pkgcore/util/commandline.py84
-rw-r--r--pkgcore/util/compatibility.py29
-rw-r--r--pkgcore/util/containers.py171
-rw-r--r--pkgcore/util/currying.py129
-rw-r--r--pkgcore/util/demandload.py202
-rw-r--r--pkgcore/util/dependant_methods.py85
-rw-r--r--pkgcore/util/descriptors.py28
-rw-r--r--pkgcore/util/file.py270
-rw-r--r--pkgcore/util/formatters.py476
-rw-r--r--pkgcore/util/iterables.py202
-rw-r--r--pkgcore/util/klass.py95
-rw-r--r--pkgcore/util/lists.py186
-rw-r--r--pkgcore/util/mappings.py414
-rw-r--r--pkgcore/util/modules.py45
-rw-r--r--pkgcore/util/obj.py206
-rw-r--r--pkgcore/util/osutils/__init__.py341
-rw-r--r--pkgcore/util/osutils/native_readdir.py41
-rw-r--r--pkgcore/util/parserestrict.py2
-rw-r--r--pkgcore/util/pickling.py18
-rw-r--r--pkgcore/util/tar.py35
-rw-r--r--pkgcore/util/weakrefs.py12
-rw-r--r--pkgcore/util/xml/__init__.py46
-rw-r--r--pkgcore/util/xml/bundled_elementtree.py1254
-rw-r--r--pkgcore/vdb/__init__.py2
-rw-r--r--pkgcore/vdb/contents.py13
-rw-r--r--pkgcore/vdb/ondisk.py22
-rw-r--r--pkgcore/vdb/virtuals.py13
-rw-r--r--setup.py40
-rw-r--r--src/atom.c6
-rw-r--r--src/caching.c711
-rw-r--r--src/chflags.c2
-rw-r--r--src/common.h89
-rw-r--r--src/cpv.c4
-rw-r--r--src/depset.c2
-rw-r--r--src/filter_env.c2
-rw-r--r--src/functoolsmodule.c279
-rw-r--r--src/heapdef.h80
-rw-r--r--src/klass.c411
-rw-r--r--src/lists.c482
-rw-r--r--src/posix.c721
-rw-r--r--src/py24-compatibility.h35
-rw-r--r--src/readdir.c397
-rw-r--r--src/restrictions.c18
193 files changed, 2661 insertions, 11243 deletions
diff --git a/.bzrignore b/.bzrignore
index 4b763f98d..345acd34b 100644
--- a/.bzrignore
+++ b/.bzrignore
@@ -8,4 +8,5 @@
./dist
./_trial_temp
./pkgcore/plugins/plugincache
+./pkgcore/plugins/plugincache2
./man/*.1
diff --git a/NEWS b/NEWS
index 4053abf2f..1af2a003d 100644
--- a/NEWS
+++ b/NEWS
@@ -328,14 +328,14 @@ pkgcore 0.2:
* massive resolver cleanup, and general fixes.
* rewritten plugins system, register_plugins is no longer used.
-
+
* paludis flat_list cache read/write support.
* portage flat_list cache write support (cache used for
$PORTDIR/metadata/sync)
-
+
* pebuild/pregen/pclone_cache: heavy UI cleanup.
-
+
* pquery:
* prettier printing of depends/rdepends/post_rdepends under -v
* print revdep reasons
@@ -345,7 +345,7 @@ pkgcore 0.2:
regex by default for --maintainer style options.
* added repo_id atom extension; see doc/extended-atom-syntax.rst for details.
- short version, sys-apps/portage::gentoo would match portage *only* from
+ short version, sys-apps/portage::gentoo would match portage *only* from
``gentoo`` repository.
* overlays now combine mirror targets from their parent repository, and
@@ -357,14 +357,14 @@ pkgcore 0.2:
repositories for example).
* mke2fs (literal /etc/mke2fs.conf file) akin configuration format was
- added, pkgcore.config.mke2fsformat.config_from_file .
+ added, pkgcore.config.mke2fsformat.config_from_file.
* expanded test coverage.
-
+
* merged standalone test runner into setup.py; prefered way of running it is
``python setup.py test`` now.
-* ongoing portage configuration support additions-
+* ongoing portage configuration support additions-
* FEATURES=collision-protect support
* INSTALL_MASK support, FEATURES noinfo, nodoc, and noman support.
* /etc/portage/package.* files can be directories holding seperate files
@@ -375,7 +375,7 @@ pkgcore 0.2:
* performance improvements:
* cpython extensions of select os.path.* functionality; 20x boost for what
was converted over (stdlib's posix module is a bit inefficient).
-
+
* cpython extension for file io in pkgcore.util.osutils: 7x faster on ENOENT
cases, 4x-5x on actual reading of small files (think cache files). If
iterating over lines of a file, use pkgcore.util.osutils.readlines- again,
@@ -384,7 +384,7 @@ pkgcore 0.2:
* partial cpython reimplementation of atom code; mainly parsing, and
critical __getattr__ invocation (2+x faster parse).
-
+
* partial cpython reimplementation of depset code; strictly just parsing.
Faster (given), but mainly is able to do optimizations to the depset
cheaply that python side is heavily slowed down by- ( x ( y ) ) becomes
@@ -395,7 +395,7 @@ pkgcore 0.2:
restrict objects now are around 1-3us for new instantation, .5 to 1us
for getting a cached obj instead of instantiating).
- * bug corrected in base repo classes identify_candidates method; should now
+ * bug corrected in base repo classes identify_candidates method; should now
force a full walk of the repo only when absolutely required.
* chksuming now does a single walk over a file for all checksummers,
@@ -403,33 +403,33 @@ pkgcore 0.2:
performance.
* vdb virtuals caching; massive performance boost via reduced IO. Relies on
- mtime checks of vdb pkg directories for staleness detection,
+ mtime checks of vdb pkg directories for staleness detection,
auto-regenerating itself as needed.
* heavy profile code cleanup; should only read each common profile node once
- now when loading up multiple profiles (pcheck). Far easier code to read
+ now when loading up multiple profiles (pcheck). Far easier code to read
in addition.
-* cache eclass staleness verification now relies on mtime comparison only-
+* cache eclass staleness verification now relies on mtime comparison only-
allows for eclasses to move between repos; matches portage behaviour.
-* pkgcore.util.caching.*, via __force_caching__ class attr in consumers, can
+* pkgcore.util.caching.*, via __force_caching__ class attr in consumers, can
be used to force singleton instance creation/caching (error if unhashable).
* ebuild support:
- * PORTAGE_ACTUAL_DISTDIR was reenabled, thus cvs/svn equivalent ebuilds are
+ * PORTAGE_ACTUAL_DISTDIR was reenabled, thus cvs/svn equivalent ebuilds are
usable once again.
- * fixed pkgcore's pkgcore emulation of has_version/best_version matching
- behaviour for old style virtuals to match portages (oddity, but ebuilds
+ * fixed pkgcore's pkgcore emulation of has_version/best_version matching
+ behaviour for old style virtuals to match portages (oddity, but ebuilds
rely on the goofy behaviour).
- * various fixups to unpack function; should match portage behaviour as of
+ * various fixups to unpack function; should match portage behaviour as of
01/07 now.
* if FEATURES=test, set USE=test; if USE=test has been explicitly masked for
a package, disable src_test run; matches portage 2.1.2 behaviour.
* cleanup build directory, and unmerge directories upon finishing
-* filter-env now is accessible directly via python; pkgcore.ebuild.filter_env .
- Needs further work prior to being usable for pcheck inspection of ebuilds,
+* filter-env now is accessible directly via python; pkgcore.ebuild.filter_env.
+ Needs further work prior to being usable for pcheck inspection of ebuilds,
but it's a good start.
@@ -466,54 +466,54 @@ pkgcore 0.1.2:
pkgcore 0.1.1:
* hang fix for test_filter_env
-
+
* package.keywords fixes: no longer incremental, supports '*' and '~*'
properly
-
+
* FEATURES="userpriv" support works again.
-
+
* pmerge repository ordering now behaves properly; prefers src ebuilds, then
built pkgs; -k inverts that (previously was semi-undefined)
-
+
* binpkg fixes: run setup phase
-
-* replace op fixes: force seperate WORKDIR for unmerge to protect against
+
+* replace op fixes: force seperate WORKDIR for unmerge to protect against
env collisions
-
+
* loosened category rules: allow _. chars to support cross-dev hack.
-
+
* build fixes: make $A unique to avoid duplicate unpacks; force distdir
creation regardless of whether or not the pkg has any stated SRC_URI
(fixes cvs and subversion eclsas usage). Fix sandbox execution to chdir
to an existant directory (sandbox will fail if ran from a nonexistant dir).
-
-* change DelayedInstantiation objects to track __class__ themselves; this
+
+* change DelayedInstantiation objects to track __class__ themselves; this
fixes pquery to properly shutdown when ctrl+c'd (previously could swallow
the interrupt due to cpython isinstance swallowing KeyboardInterrupt).
-
-
-pkgcore 0.1:
+
+
+pkgcore 0.1:
Initial release.
-
+
* Sync functionality doesn't yet exist (pmaint script will be in 0.2)
-
-* pmerge vdb modification requires --force; this will be disabled in 0.2,
+
+* pmerge vdb modification requires --force; this will be disabled in 0.2,
mainly is in place so that folks who are just looking, don't inadvertantly
trigger an actual modification.
-
+
* not all portage FEATURES are implemented; same for QA.
-
-* If overlays are in use, pkgcore may defer to its' seperate cache to avoid
+
+* If overlays are in use, pkgcore may defer to its' seperate cache to avoid
pkgcore causing cache regen for portage (and vice versa); this occurs due
- to pkgcore treating overlays as their own repo and combining them at a
+ to pkgcore treating overlays as their own repo and combining them at a
higher level; portage smushes them all together thus rendering each subtree
unusable in any standalone fashion.
* pkgcore is far more anal about blocking bad behaviour in ebuilds during
- metadata regeneration; tree is clean, but if you do something wrong in
+ metadata regeneration; tree is clean, but if you do something wrong in
global scope, it *will* catch it and block it.
-
+
* EBD; daemonized ebuild.sh processing (effectively), pkgcore reuses old
ebuild.sh processes to avoid bash startup, speeding regen up by roughly
2x.
diff --git a/bin/pwrapper b/bin/pwrapper
index b8a52efcc..ef1fa630b 100755
--- a/bin/pwrapper
+++ b/bin/pwrapper
@@ -12,8 +12,8 @@ import sys
sys.path.insert(0, osp.dirname(osp.dirname(osp.abspath(__file__))))
-from pkgcore.util import modules, commandline
-
+from pkgcore.util import commandline
+from snakeoil import modules
if __name__ == '__main__':
name = osp.basename(sys.argv[0])
diff --git a/build_docs.py b/build_docs.py
index 4442bfb98..40285355e 100755
--- a/build_docs.py
+++ b/build_docs.py
@@ -15,7 +15,7 @@ import docutils.utils
sys.path.append('man')
import manpage
-from pkgcore.util import modules
+from snakeoil import modules
# (limited) support for trac wiki links.
# This is copied and hacked up from rst.py in the trac source.
diff --git a/dev-notes/developing.rst b/dev-notes/developing.rst
index af234f2b0..b38bca064 100644
--- a/dev-notes/developing.rst
+++ b/dev-notes/developing.rst
@@ -36,7 +36,7 @@ find the pkgcore code. For example::
Now test to see if it works::
- $ python -c'import pkgcore'
+ $ python -c 'import pkgcore'
Python will scan pkgcore, see the pkgcore directory in it (and that it has
__init__.py), and use that.
diff --git a/dev-notes/plugins.rst b/dev-notes/plugins.rst
index 7296b6ec7..5179b2924 100644
--- a/dev-notes/plugins.rst
+++ b/dev-notes/plugins.rst
@@ -90,19 +90,22 @@ Import behaviour
Assuming the cache is working correctly (it was generated after
installing a plugin as root) pkgcore will import all plugin modules
-containing plugins for a requested key. No more, no less. The priority
-and disabled values are not cached (intentionally, since they may
-change without a cache invalidation), so it has to check all plugins
-for the key even if only one of them is requested (``get_plugin``, and
-the same will usually be true for ``get_plugins``).
+containing plugins for a requested key in priority order until it hits
+one that is not disabled. The "disabled" value is not cached (a plugin
+that is unconditionally disabled makes no sense), but the priority
+value is. You can fake a dynamic priority by having two instances of
+your plugin registered and only one of them enabled at the same
+time.
This means it makes sense to have only one kind of plugin per plugin
-module (unless the required imports overlap).
-
-The disabled and priority values are not cached by the plugin system
-after the plugin module is imported. This means they should be simple
-attributes (either completely constant or set at import time) or
-properties that do their own caching.
+module (unless the required imports overlap): this avoids pulling in
+imports for other kinds of plugin when one kind of plugin is
+requested.
+
+The disabled value is not cached by the plugin system after the plugin
+module is imported. This means it should be a simple attribute (either
+completely constant or set at import time) or property that does its
+own caching.
Adding a plugin package
=======================
diff --git a/examples/changed_use.py b/examples/changed_use.py
new file mode 100755
index 000000000..a0b715704
--- /dev/null
+++ b/examples/changed_use.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+
+# Copyright 2007 Charlie Shepherd
+
+import sys
+
+try:
+ from pkgcore.util import commandline
+ from pkgcore.restrictions.boolean import OrRestriction
+ from pkgcore.util.repo_utils import get_virtual_repos, get_raw_repos
+ from pkgcore.repository.multiplex import tree as multiplex_tree
+except ImportError:
+ print >> sys.stderr, 'Cannot import pkgcore!'
+ print >> sys.stderr, 'Verify it is properly installed and/or ' \
+ 'PYTHONPATH is set correctly.'
+ print >> sys.stderr, 'Add --debug to the commandline for a traceback.'
+ if '--debug' in sys.argv:
+ raise
+ sys.exit(1)
+
+class OptionParser(commandline.OptionParser):
+
+ def __init__(self, **kwargs):
+ commandline.OptionParser.__init__(
+ self, description=__doc__, usage='%prog [options]',
+ **kwargs)
+ self.add_option('--repo', action='callback', type='string',
+ callback=commandline.config_callback,
+ callback_args=('repo',),
+ help='repo to use (default from domain if omitted).')
+ self.add_option('--verbose', '-v', action='store_true', default=False,
+ help='print packages that have not changed too')
+ self.add_option('--quiet', '-q', action='store_true', default=False,
+ help='don\'t print changed useflags')
+
+ def check_values(self, values, args):
+ values, args = commandline.OptionParser.check_values(
+ self, values, args)
+
+ domain = values.config.get_default('domain')
+
+ values.vdb = domain.vdb[0]
+ # Get repo(s) to operate on.
+ if values.repo:
+ repos = (values.repo,)
+ else:
+ repos = values.config.get_default('domain').all_repos
+ values.repo = multiplex_tree(*get_virtual_repos(get_raw_repos(repos), False))
+
+ values.use = domain.use
+
+ values.restrict = OrRestriction(self.convert_to_restrict(args))
+ return values, ()
+
+def main(options, out, err):
+ repo = options.repo
+ for built in options.vdb.itermatch(options.restrict):
+ current = repo.match(built.versioned_atom)
+ if current:
+ current = current[0]
+ oldflags = built.iuse & set(built.use)
+ newflags = current.iuse & options.use
+ if oldflags != newflags:
+ changed_flags = oldflags ^ newflags
+ if options.quiet:
+ out.write(current.cpvstr)
+ else:
+ out.write("for package %s, %d flags have changed:\n\t%s" %
+ (current.cpvstr, len(changed_flags), ' '.join(changed_flags)))
+ else:
+ if options.verbose: out.write("%s is the same as it was before" % current.cpvstr)
+
+if __name__ == '__main__':
+ commandline.main({None: (OptionParser, main)})
diff --git a/examples/report_pkg_changes.py b/examples/report_pkg_changes.py
index 43b839610..1804a63e0 100755
--- a/examples/report_pkg_changes.py
+++ b/examples/report_pkg_changes.py
@@ -1,14 +1,15 @@
#!/usr/bin/python
import sys, os
-from pkgcore.config import load_config
-# we use a WorldFile since it *currently* forces unversioned atoms.
-from pkgcore.pkgsets.filelist import WorldFile
+from pkgcore.config import load_config
from pkgcore.util.packages import groupby_pkg
-from pkgcore.util.file import iter_read_bash
-from pkgcore.util.osutils import listdir_files
from pkgcore.ebuild.atom import atom
+# we use a WorldFile since it *currently* forces unversioned atoms.
+from pkgcore.pkgsets.filelist import WorldFile
+
+from snakeoil.fileutils import iter_read_bash
+from snakeoil.osutils import listdir_files
def main(target_repo, seen, moves):
# could build the atom from categories/packages, but prefer this;
diff --git a/pkgcore/binpkg/repo_ops.py b/pkgcore/binpkg/repo_ops.py
index 022f982eb..bae5e0e0f 100644
--- a/pkgcore/binpkg/repo_ops.py
+++ b/pkgcore/binpkg/repo_ops.py
@@ -2,14 +2,16 @@
# License: GPL2
import os, errno
-from pkgcore.util.osutils import join as pjoin
+
from pkgcore.interfaces import repo as repo_interfaces
from pkgcore.fs import tar
from pkgcore.binpkg import xpak
-from pkgcore.util import osutils
-from pkgcore.util.bzip2 import compress
from pkgcore.ebuild.conditionals import stringify_boolean
-from pkgcore.util.demandload import demandload
+
+from snakeoil import osutils
+from pkgcore.util.bzip2 import compress
+from snakeoil.osutils import join as pjoin
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.log:logger")
def discern_loc(base, pkg):
diff --git a/pkgcore/binpkg/repository.py b/pkgcore/binpkg/repository.py
index cb60f7cd8..b27bd211c 100644
--- a/pkgcore/binpkg/repository.py
+++ b/pkgcore/binpkg/repository.py
@@ -6,22 +6,24 @@ binpkg ebuild repository
"""
import os, stat
-from pkgcore.util.osutils import join as pjoin
-from pkgcore.repository import prototype, errors
-#needed to grab the PN
-from pkgcore.ebuild.cpv import CPV as cpv
-from pkgcore.util.currying import partial
+from pkgcore.repository import prototype, errors
from pkgcore.merge import triggers
from pkgcore.plugin import get_plugin
-from pkgcore.util.mappings import DictMixin
-from pkgcore.util.osutils import listdir_dirs, listdir_files
from pkgcore.binpkg.xpak import Xpak
from pkgcore.fs.tar import generate_contents
-from pkgcore.util.bzip2 import decompress
from pkgcore.ebuild.ebuild_built import pkg_uses_default_preinst
from pkgcore.config import ConfigHint
-from pkgcore.util.demandload import demandload
+#needed to grab the PN
+from pkgcore.ebuild.cpv import CPV as cpv
+
+from snakeoil.currying import partial
+from snakeoil.mappings import DictMixin
+from snakeoil.osutils import listdir_dirs, listdir_files
+from pkgcore.util.bzip2 import decompress
+from snakeoil.osutils import join as pjoin
+
+from snakeoil.demandload import demandload
demandload(globals(),
"pkgcore.merge:engine "
"pkgcore.fs.livefs:scan "
diff --git a/pkgcore/binpkg/xpak.py b/pkgcore/binpkg/xpak.py
index 07d372a3d..c5102b393 100644
--- a/pkgcore/binpkg/xpak.py
+++ b/pkgcore/binpkg/xpak.py
@@ -6,8 +6,8 @@ XPAK container support
"""
import struct
-from pkgcore.util.mappings import OrderedDict
-from pkgcore.util.demandload import demandload
+from snakeoil.mappings import OrderedDict
+from snakeoil.demandload import demandload
demandload(globals(), "os errno")
#
diff --git a/pkgcore/cache/flat_hash.py b/pkgcore/cache/flat_hash.py
index 44cfbbca2..07882432c 100644
--- a/pkgcore/cache/flat_hash.py
+++ b/pkgcore/cache/flat_hash.py
@@ -6,11 +6,10 @@ per key file based backend
"""
import os, stat, errno
-from pkgcore.util.osutils import join as pjoin, readlines
from pkgcore.cache import fs_template
from pkgcore.cache import errors
from pkgcore.config import ConfigHint
-
+from snakeoil.osutils import join as pjoin, readlines
class database(fs_template.FsBased):
diff --git a/pkgcore/cache/fs_template.py b/pkgcore/cache/fs_template.py
index 5ab34450f..209fe5968 100644
--- a/pkgcore/cache/fs_template.py
+++ b/pkgcore/cache/fs_template.py
@@ -8,7 +8,7 @@ template for fs based backends
import os
from pkgcore.cache import template
from pkgcore.os_data import portage_gid
-from pkgcore.util.osutils import ensure_dirs
+from snakeoil.osutils import ensure_dirs
class FsBased(template.database):
"""Template wrapping fs needed options.
diff --git a/pkgcore/cache/metadata.py b/pkgcore/cache/metadata.py
index 9b2887c21..22db70c8e 100644
--- a/pkgcore/cache/metadata.py
+++ b/pkgcore/cache/metadata.py
@@ -8,11 +8,11 @@ cache backend designed for rsynced tree's pregenerated metadata.
import os
import errno
-from pkgcore.util.osutils import join as pjoin
from pkgcore.cache import flat_hash, errors
from pkgcore.config import ConfigHint
from pkgcore.ebuild import eclass_cache
-from pkgcore.util.mappings import ProtectedDict
+from snakeoil.osutils import join as pjoin
+from snakeoil.mappings import ProtectedDict
# store the current key order *here*.
diff --git a/pkgcore/cache/template.py b/pkgcore/cache/template.py
index bdd05c3fb..a1ac94e2e 100644
--- a/pkgcore/cache/template.py
+++ b/pkgcore/cache/template.py
@@ -7,8 +7,8 @@ template for cache backend classes
"""
from pkgcore.cache import errors
-from pkgcore.util.mappings import ProtectedDict
-from pkgcore.util.obj import make_SlottedDict_kls
+from snakeoil.mappings import ProtectedDict
+from snakeoil.obj import make_SlottedDict_kls
# temp hack for .2
from pkgcore.ebuild.const import metadata_keys
diff --git a/pkgcore/chksum/__init__.py b/pkgcore/chksum/__init__.py
index f8bd1274e..b1efaa5aa 100644
--- a/pkgcore/chksum/__init__.py
+++ b/pkgcore/chksum/__init__.py
@@ -6,12 +6,12 @@ chksum verification/generation subsystem
"""
from pkgcore.interfaces.data_source import base as base_data_source
-from pkgcore.util.demandload import demandload
+from snakeoil.demandload import demandload
demandload(globals(), "os sys "
- "pkgcore.util.modules:load_module "
- "pkgcore.util.osutils:listdir_files "
"pkgcore.log:logger "
- "pkgcore.chksum.defaults:loop_over_file ")
+ "pkgcore.chksum.defaults:loop_over_file "
+ "snakeoil.modules:load_module "
+ "snakeoil.osutils:listdir_files ")
chksum_types = {}
__inited__ = False
diff --git a/pkgcore/chksum/defaults.py b/pkgcore/chksum/defaults.py
index f25fec2f4..57f05403f 100644
--- a/pkgcore/chksum/defaults.py
+++ b/pkgcore/chksum/defaults.py
@@ -5,11 +5,10 @@
"""
default chksum handlers implementation- sha1, sha256, rmd160, and md5
"""
-
-from pkgcore.util.currying import partial
-from pkgcore.util import modules
from pkgcore.interfaces.data_source import base as base_data_source
-from pkgcore.util.demandload import demandload
+from snakeoil.currying import partial
+from snakeoil import modules
+from snakeoil.demandload import demandload
demandload(globals(), "os")
blocksize = 32768
@@ -110,7 +109,7 @@ class Chksummer(object):
# 37 +/- 1 msec roughly
#
# python2.5 -m timeit -s
-# 'from pkgcore.chksum import defaults; from pkgcore.util import currying;'
+# 'from pkgcore.chksum import defaults; from snakeoil import currying;'
# -s 'import hashlib; hash = currying.pre_curry(hashlib.new, "md5")'
# 'defaults.loop_over_file(hash, "/home/marienz/tmp/Python-2.4.3.tar.bz2")'
# 37 +/- 1 msec roughly
diff --git a/pkgcore/config/__init__.py b/pkgcore/config/__init__.py
index a8cd73437..026150407 100644
--- a/pkgcore/config/__init__.py
+++ b/pkgcore/config/__init__.py
@@ -15,15 +15,14 @@ class ConfigHint(object):
"""hint for introspection supplying overrides"""
# be aware this is used in clone
- __slots__ = ("types", "positional", "required", "typename", "incrementals",
- "allow_unknowns", "doc")
+ __slots__ = (
+ "types", "positional", "required", "typename", "allow_unknowns", "doc")
def __init__(self, types=None, positional=None, required=None, doc=None,
- incrementals=None, typename=None, allow_unknowns=False):
+ typename=None, allow_unknowns=False):
self.types = types or {}
self.positional = positional or []
self.required = required or []
- self.incrementals = incrementals or []
self.typename = typename
self.allow_unknowns = allow_unknowns
self.doc = doc
@@ -48,7 +47,7 @@ def configurable(*args, **kwargs):
def load_config(user_conf_file=USER_CONF_FILE,
system_conf_file=SYSTEM_CONF_FILE,
- debug=False):
+ debug=False, prepend_sources=(), skip_config_files=False):
"""
the main entry point for any code looking to use pkgcore.
@@ -63,17 +62,19 @@ def load_config(user_conf_file=USER_CONF_FILE,
from pkgcore.plugin import get_plugins
import os
- have_system_conf = os.path.isfile(system_conf_file)
- have_user_conf = os.path.isfile(user_conf_file)
- configs = []
- if have_system_conf or have_user_conf:
- if have_user_conf:
- configs.append(cparser.config_from_file(open(user_conf_file)))
- if have_system_conf:
- configs.append(cparser.config_from_file(open(system_conf_file)))
- else:
- # make.conf...
- from pkgcore.ebuild.portage_conf import config_from_make_conf
- configs.append(config_from_make_conf())
+ configs = list(prepend_sources)
+ if not skip_config_files:
+ have_system_conf = os.path.isfile(system_conf_file)
+ have_user_conf = os.path.isfile(user_conf_file)
+ if have_system_conf or have_user_conf:
+ if have_user_conf:
+ configs.append(cparser.config_from_file(open(user_conf_file)))
+ if have_system_conf:
+ configs.append(
+ cparser.config_from_file(open(system_conf_file)))
+ else:
+ # make.conf...
+ from pkgcore.ebuild.portage_conf import config_from_make_conf
+ configs.append(config_from_make_conf())
configs.extend(get_plugins('global_config'))
return central.ConfigManager(configs, debug=debug)
diff --git a/pkgcore/config/basics.py b/pkgcore/config/basics.py
index a209f3640..59012265f 100644
--- a/pkgcore/config/basics.py
+++ b/pkgcore/config/basics.py
@@ -11,9 +11,9 @@ L{configuration exception<pkgcore.config.errors.ConfigurationError>}
from pkgcore.config import errors, configurable
-from pkgcore.util import currying
-from pkgcore.util.demandload import demandload
-demandload(globals(), "pkgcore.util:modules")
+from snakeoil import currying
+from snakeoil.demandload import demandload
+demandload(globals(), "snakeoil:modules")
type_names = ("list", "str", "bool", "int")
@@ -31,7 +31,6 @@ class ConfigType(object):
@ivar callable: callable used to instantiate this type.
@ivar types: dict mapping key names to type strings.
@ivar positional: container holding positional arguments.
- @ivar incrementals: container holding incrementals.
@ivar required: container holding required arguments.
@ivar allow_unknowns: controls whether unknown settings should error.
"""
@@ -89,7 +88,6 @@ class ConfigType(object):
for arg in self.positional:
self.types[arg] = 'str'
self.required = tuple(self.positional)
- self.incrementals = []
self.allow_unknowns = False
# Process ConfigHint (if any)
@@ -102,8 +100,6 @@ class ConfigType(object):
self.positional = tuple(hint_overrides.positional)
if hint_overrides.typename:
self.name = hint_overrides.typename
- if hint_overrides.incrementals:
- self.incrementals = hint_overrides.incrementals
if hint_overrides.doc:
self.doc = hint_overrides.doc
self.allow_unknowns = hint_overrides.allow_unknowns
@@ -225,6 +221,143 @@ class DictConfigSection(ConfigSection):
raise
+class FakeIncrementalDictConfigSection(ConfigSection):
+
+ """Turns a dict and a conversion function into a ConfigSection."""
+
+ def __init__(self, conversion_func, source_dict):
+ """Initialize.
+
+ A request for a section of a list type will look for
+ name.prepend and name.append keys too, using those for values
+ prepended/appended to the inherited values. The conversion
+ func should return a single sequence for list types and in
+ repr for list types.
+
+ @type conversion_func: callable.
+ @param conversion_func: called with a ConfigManager, a value from
+ the dict and a type name.
+ @type source_dict: dict with string keys and arbitrary values.
+ """
+ ConfigSection.__init__(self)
+ self.func = conversion_func
+ self.dict = source_dict
+
+ def __contains__(self, name):
+ return name in self.dict or name + '.append' in self.dict or \
+ name + '.prepend' in self.dict
+
+ def keys(self):
+ keys = set()
+ for key in self.dict:
+ if key.endswith('.append'):
+ keys.add(key[:-7])
+ elif key.endswith('.prepend'):
+ keys.add(key[:-8])
+ else:
+ keys.add(key)
+ return list(keys)
+
+ def get_value(self, central, name, arg_type):
+ # Check if we need our special incremental magic.
+ if arg_type in ('list', 'str', 'repr') or arg_type.startswith('refs:'):
+ result = []
+ # Careful: None is a valid dict value, so use something else here.
+ missing = object()
+ for subname in (name + '.prepend', name, name + '.append'):
+ val = self.dict.get(subname, missing)
+ if val is missing:
+ val = None
+ else:
+ try:
+ val = self.func(central, val, arg_type)
+ except errors.ConfigurationError, e:
+ e.stack.append('Converting argument %r to %s' % (
+ subname, arg_type))
+ raise
+ result.append(val)
+ if result[0] is result[1] is result[2] is None:
+ raise KeyError(name)
+ if arg_type != 'repr':
+ # Done.
+ return result
+ # If "kind" is of some incremental-ish kind or we have
+ # .prepend or .append for this key then we need to
+ # convert everything we have to the same kind and
+ # return all three.
+ #
+ # (we do not get called for separate reprs for the
+ # .prepend or .append because those are filtered from
+ # .keys(). If we do not filter those from .keys()
+ # central gets upset because it does not know their
+ # type. Perhaps this means we should have a separate
+ # .keys() used together with repr, not sure yet
+ # --marienz)
+ #
+ # The problem here is that we may get unsuitable for
+ # incremental or differing types for the three reprs
+ # we run, so we need to convert to a suitable common
+ # kind.
+ if result[0] is None and result[2] is None:
+ # Simple case: no extra data, so no need for any
+ # conversions.
+ kind, val = result[1]
+ if kind in ('list', 'str') or kind == 'refs':
+ # Caller expects a three-tuple.
+ return kind, (None, val, None)
+ else:
+ # non-incremental, just return as-is.
+ return kind, val
+ # We have more than one return value. Figure out what
+ # target to convert to. Choices are list, str and refs.
+ kinds = set(v[0] for v in result if v is not None)
+ if 'refs' in kinds or 'ref' in kinds:
+ # If we have any refs we have to convert to refs.
+ target_kind = 'refs'
+ elif kinds == set(['str']):
+ # If we have only str we can just use that.
+ target_kind = 'str'
+ else:
+ # Convert to list. May not make any sense, but is
+ # the best we can do.
+ target_kind = 'list'
+ converted = []
+ for val in result:
+ if val is None:
+ converted.append(None)
+ continue
+ kind, val = val
+ if kind == 'ref':
+ assert target_kind == 'refs', target_kind
+ converted.append([val])
+ elif kind == 'refs':
+ assert target_kind == 'refs', target_kind
+ converted.append(val)
+ elif kind == 'list':
+ assert target_kind != 'str', target_kind
+ converted.append(val)
+ else:
+ # Everything else gets converted to a string first.
+ if kind == 'callable':
+ val = '%s.%s' % (val.__module__, val.__name__)
+ elif kind in ('bool', 'int', 'str'):
+ val = str(val)
+ else:
+ raise errors.ConfigurationError(
+ 'unsupported type %r' % (kind,))
+ # Then convert the str to list if needed.
+ if target_kind == 'str':
+ converted.append(val)
+ else:
+ converted.append([val])
+ return target_kind, converted
+ # Not incremental.
+ try:
+ return self.func(central, self.dict[name], arg_type)
+ except errors.ConfigurationError, e:
+ e.stack.append('Converting argument %r to %s' % (name, arg_type))
+ raise
+
def convert_string(central, value, arg_type):
"""Conversion func for a string-based DictConfigSection."""
assert isinstance(value, basestring), value
@@ -310,10 +443,12 @@ def convert_hybrid(central, value, arg_type):
# "Invalid name" (pylint thinks these are module-level constants)
# pylint: disable-msg=C0103
-HardCodedConfigSection = currying.pre_curry(DictConfigSection, convert_asis)
-ConfigSectionFromStringDict = currying.pre_curry(DictConfigSection,
- convert_string)
-AutoConfigSection = currying.pre_curry(DictConfigSection, convert_hybrid)
+HardCodedConfigSection = currying.partial(
+ FakeIncrementalDictConfigSection, convert_asis)
+ConfigSectionFromStringDict = currying.partial(
+ FakeIncrementalDictConfigSection, convert_string)
+AutoConfigSection = currying.partial(
+ FakeIncrementalDictConfigSection, convert_hybrid)
def section_alias(target, typename):
diff --git a/pkgcore/config/central.py b/pkgcore/config/central.py
index c6f3ab319..87f394194 100644
--- a/pkgcore/config/central.py
+++ b/pkgcore/config/central.py
@@ -9,7 +9,7 @@ A lot of extra documentation on this is in dev-notes/config.rst.
from pkgcore.config import errors, basics
-from pkgcore.util import mappings
+from snakeoil import mappings
class _ConfigMapping(mappings.DictMixin):
@@ -275,7 +275,7 @@ class ConfigManager(object):
result = self.collapsed_configs.get(name)
if result is not None:
return result
- for config in self.configs:
+ for source_index, config in enumerate(self.configs):
if name in config:
section = config[name]
break
@@ -285,7 +285,7 @@ class ConfigManager(object):
'no section called %r' % (name,))
return None
try:
- result = self.collapse_section(section)
+ result = self.collapse_section(section, name, source_index)
result.name = name
except errors.ConfigurationError, e:
e.stack.append('Collapsing section named %r' % (name,))
@@ -295,7 +295,7 @@ class ConfigManager(object):
finally:
self._refs.remove(name)
- def collapse_section(self, section, _refs=None):
+ def collapse_section(self, section, _name=None, _index=None):
"""Collapse a ConfigSection to a L{CollapsedConfig}."""
# Bail if this is an inherit-only (uncollapsable) section.
@@ -308,30 +308,47 @@ class ConfigManager(object):
raise errors.CollapseInheritOnly(
'cannot collapse inherit-only section')
- # List of (name, ConfigSection) tuples, most specific first.
- slist = [(None, section)]
+ # List of (name, ConfigSection, index) tuples, most specific first.
+ slist = [(_name, section, _index)]
# first map out inherits.
- inherit_names = set()
- for current_section, current_conf in slist:
+ inherit_names = set([_name])
+ for current_section, current_conf, index in slist:
if 'inherit' not in current_conf:
continue
- for inherit in current_conf.get_value(self, 'inherit', 'list'):
- if inherit in inherit_names:
- raise errors.ConfigurationError('Inherit %r is recursive'
- % (inherit,))
- inherit_names.add(inherit)
- for config in self.configs:
- if inherit in config:
- slist.append((inherit, config[inherit]))
- break
+ prepend, inherits, append = current_conf.get_value(
+ self, 'inherit', 'list')
+ if prepend is not None or append is not None:
+ raise errors.ConfigurationError(
+ 'Prepending or appending to the inherit list makes no '
+ 'sense')
+ for inherit in inherits:
+ if inherit == current_section:
+ # Self-inherit is a bit special.
+ for i, config in enumerate(self.configs[index + 1:]):
+ if inherit in config:
+ slist.append((inherit, config[inherit],
+ index + i + 1))
+ break
+ else:
+ raise errors.ConfigurationError(
+ 'Self-inherit %r cannot be found' % (inherit,))
else:
- raise errors.ConfigurationError(
- 'inherit target %r cannot be found' % (inherit,))
+ if inherit in inherit_names:
+ raise errors.ConfigurationError(
+ 'Inherit %r is recursive' % (inherit,))
+ inherit_names.add(inherit)
+ for i, config in enumerate(self.configs):
+ if inherit in config:
+ slist.append((inherit, config[inherit], i))
+ break
+ else:
+ raise errors.ConfigurationError(
+ 'inherit target %r cannot be found' % (inherit,))
# Grab the "class" setting first (we need it to get a type obj
- # to support incrementals in the more general loop)
- for inherit_name, inherit_conf in slist:
+ # to collapse to the right type in the more general loop)
+ for inherit_name, inherit_conf, index in slist:
if "class" in inherit_conf:
break
else:
@@ -340,20 +357,18 @@ class ConfigManager(object):
type_obj = basics.ConfigType(inherit_conf.get_value(self, 'class',
'callable'))
- # collapse, honoring incrementals.
conf = {}
- for inherit_name, inherit_conf in slist:
+ for section_nr, (inherit_name, inherit_conf, index) in \
+ enumerate(reversed(slist)):
for key in inherit_conf.keys():
if key in ('class', 'inherit', 'inherit-only'):
continue
- if key in conf and key not in type_obj.incrementals:
- continue
typename = type_obj.types.get(key)
if typename is None:
if key == 'default':
typename = 'bool'
elif not type_obj.allow_unknowns:
- if inherit_name is not None:
+ if section_nr != len(slist) - 1:
raise errors.ConfigurationError(
'Type of %r inherited from %r unknown' % (
key, inherit_name))
@@ -376,15 +391,31 @@ class ConfigManager(object):
raise
elif is_refs:
try:
- result = list(ref.collapse() for ref in result)
+ result = list(
+ list(ref.collapse() for ref in subresult or ())
+ for subresult in result)
except errors.ConfigurationError, e:
e.stack.append(
'Collapsing section refs %r' % (key,))
raise
- if key in conf and key in type_obj.incrementals:
- conf[key] = result + conf[key]
- else:
- conf[key] = result
+ if typename == 'list' or typename.startswith('refs:'):
+ prepend, result, append = result
+ if result is None:
+ if key in conf:
+ result = conf[key]
+ else:
+ result = []
+ if prepend:
+ result = prepend + result
+ if append:
+ result += append
+ elif typename == 'str':
+ prepend, result, append = result
+ if result is None and key in conf:
+ result = conf[key]
+ result = ' '.join(
+ v for v in (prepend, result, append) if v)
+ conf[key] = result
default = conf.pop('default', False)
return CollapsedConfig(
type_obj, conf, self, debug=self.debug, default=default)
diff --git a/pkgcore/config/cparser.py b/pkgcore/config/cparser.py
index fa933b260..5ba581735 100644
--- a/pkgcore/config/cparser.py
+++ b/pkgcore/config/cparser.py
@@ -7,9 +7,8 @@ ini based configuration format
from ConfigParser import ConfigParser
-from pkgcore.util import mappings
from pkgcore.config import basics
-
+from snakeoil import mappings
class CaseSensitiveConfigParser(ConfigParser):
def optionxform(self, val):
@@ -21,7 +20,7 @@ def config_from_file(file_obj):
generate a config dict
@param file_obj: file protocol instance
- @return: L{pkgcore.util.mappings.LazyValDict} instance
+ @return: L{snakeoil.mappings.LazyValDict} instance
"""
cparser = CaseSensitiveConfigParser()
cparser.readfp(file_obj)
diff --git a/pkgcore/config/dhcpformat.py b/pkgcore/config/dhcpformat.py
index ed8c19427..7cbcfafa3 100644
--- a/pkgcore/config/dhcpformat.py
+++ b/pkgcore/config/dhcpformat.py
@@ -38,10 +38,10 @@ Example of the supported format (not a complete config)::
}
"""
-from pkgcore.util import mappings, modules, demandload
+from snakeoil import mappings, modules, demandload
from pkgcore.config import basics, errors
-demandload.demandload(globals(), 'pkgcore.util.compatibility:all')
+demandload.demandload(globals(), 'snakeoil.compatibility:all')
import pyparsing as pyp
@@ -125,7 +125,7 @@ class ConfigSection(basics.ConfigSection):
if not isinstance(value, basestring):
# sequence
value = ' '.join(value)
- return basics.list_parser(value)
+ return None, basics.list_parser(value), None
elif arg_type == 'repr':
if len(value) == 1:
value = value[0]
@@ -150,10 +150,13 @@ class ConfigSection(basics.ConfigSection):
if not isinstance(value[0], basestring):
raise errors.ConfigurationError(
'%r should be a string' % value)
- return {
- 'str': basics.str_parser,
- 'bool': basics.bool_parser,
- }[arg_type](value[0])
+ if arg_type == 'str':
+ return [None, basics.str_parser(value[0]), None]
+ elif arg_type == 'bool':
+ return basics.bool_parser(value[0])
+ else:
+ raise errors.ConfigurationError(
+ 'unsupported type %r' % (arg_type,))
def config_from_file(file_obj):
diff --git a/pkgcore/config/domain.py b/pkgcore/config/domain.py
index 78d9a30cb..f9c13dfdc 100644
--- a/pkgcore/config/domain.py
+++ b/pkgcore/config/domain.py
@@ -6,7 +6,7 @@ base class to derive from for domain objects
Bit empty at the moment
"""
-from pkgcore.util.demandload import demandload
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.repository:multiplex")
# yes this is basically empty. will fill it out as the base is better
diff --git a/pkgcore/config/mke2fsformat.py b/pkgcore/config/mke2fsformat.py
index 4865613c5..212d59701 100644
--- a/pkgcore/config/mke2fsformat.py
+++ b/pkgcore/config/mke2fsformat.py
@@ -39,9 +39,8 @@ Example of the supported format (not a complete config)::
# The tests for this are in test_dhcpformat.
-from pkgcore.util import mappings
from pkgcore.config import dhcpformat, errors
-
+from snakeoil import mappings
import pyparsing as pyp
diff --git a/pkgcore/ebuild/atom.py b/pkgcore/ebuild/atom.py
index 699c3547b..82b7bdcc4 100644
--- a/pkgcore/ebuild/atom.py
+++ b/pkgcore/ebuild/atom.py
@@ -8,15 +8,15 @@
gentoo ebuild atom, should be generalized into an agnostic base
"""
-from pkgcore.util.klass import generic_equality
from pkgcore.restrictions import values, packages, boolean
-from pkgcore.util.compatibility import all
from pkgcore.ebuild import cpv, errors
from pkgcore.ebuild.atom_restricts import VersionMatch
-from pkgcore.util.demandload import demandload
+from snakeoil.compatibility import all
+from snakeoil.klass import generic_equality
+from snakeoil.demandload import demandload
demandload(globals(),
"pkgcore.restrictions.delegated:delegate "
- "pkgcore.util.currying:partial "
+ "snakeoil.currying:partial "
)
# namespace compatibility...
diff --git a/pkgcore/ebuild/atom_restricts.py b/pkgcore/ebuild/atom_restricts.py
index 512c3ed53..c58fdcf04 100644
--- a/pkgcore/ebuild/atom_restricts.py
+++ b/pkgcore/ebuild/atom_restricts.py
@@ -8,10 +8,9 @@
atom version restrict
"""
-from pkgcore.util.klass import generic_equality
from pkgcore.restrictions import packages, restriction
from pkgcore.ebuild import cpv, errors
-
+from snakeoil.klass import generic_equality
# TODO: change values.EqualityMatch so it supports le, lt, gt, ge, eq,
# ne ops, and convert this to it.
diff --git a/pkgcore/ebuild/conditionals.py b/pkgcore/ebuild/conditionals.py
index 0bf7dc916..897f22d32 100644
--- a/pkgcore/ebuild/conditionals.py
+++ b/pkgcore/ebuild/conditionals.py
@@ -10,8 +10,8 @@ appropriate conditionals.
# TODO: move exceptions elsewhere, bind them to a base exception for pkgcore
from pkgcore.restrictions import packages, values, boolean
-from pkgcore.util.iterables import expandable_chain
-from pkgcore.util.lists import iflatten_instance
+from snakeoil.iterables import expandable_chain
+from snakeoil.lists import iflatten_instance
from pkgcore.ebuild.atom import atom
from pkgcore.ebuild.errors import ParseError
diff --git a/pkgcore/ebuild/cpv.py b/pkgcore/ebuild/cpv.py
index d2c68588b..991f4610a 100644
--- a/pkgcore/ebuild/cpv.py
+++ b/pkgcore/ebuild/cpv.py
@@ -8,8 +8,9 @@
from pkgcore.ebuild.errors import InvalidCPV
from pkgcore.package import base
+from snakeoil.klass import generic_equality
# do this to break the cycle.
-from pkgcore.util.demandload import demandload, demand_compile
+from snakeoil.demandload import demandload, demand_compile
demandload(globals(), "pkgcore.ebuild:atom")
suffix_regexp = demand_compile("^(alpha|beta|rc|pre|p)(\\d*)$")
@@ -110,6 +111,7 @@ class native_CPV(object):
except AttributeError:
return 1
+
def native_ver_cmp(ver1, rev1, ver2, rev2):
# If the versions are the same, comparing revisions will suffice.
diff --git a/pkgcore/ebuild/digest.py b/pkgcore/ebuild/digest.py
index e3a8ac058..8e508a56e 100644
--- a/pkgcore/ebuild/digest.py
+++ b/pkgcore/ebuild/digest.py
@@ -6,11 +6,11 @@ ebuild tree manifest/digest support
"""
from itertools import izip
from pkgcore.chksum import errors, gpg
-from pkgcore.util.obj import make_SlottedDict_kls
-from pkgcore.util.demandload import demandload
+from snakeoil.obj import make_SlottedDict_kls
+from snakeoil.demandload import demandload
demandload(globals(),
- "pkgcore.util.lists:iflatten_instance "
- "pkgcore:fetch ")
+ "pkgcore:fetch "
+ "snakeoil.lists:iflatten_instance ")
def parse_digest(source, throw_errors=True):
d = {}
diff --git a/pkgcore/ebuild/domain.py b/pkgcore/ebuild/domain.py
index e6b4ed928..cb3272503 100644
--- a/pkgcore/ebuild/domain.py
+++ b/pkgcore/ebuild/domain.py
@@ -13,26 +13,27 @@ import pkgcore.config.domain
from pkgcore.config import ConfigHint
from pkgcore.restrictions.delegated import delegate
from pkgcore.restrictions import packages, values
-from pkgcore.util.file import iter_read_bash
-from pkgcore.ebuild.atom import (generate_collapsed_restriction)
+from pkgcore.ebuild.atom import generate_collapsed_restriction
from pkgcore.repository import multiplex, visibility
-from pkgcore.util.lists import (stable_unique, unstable_unique)
-from pkgcore.util.compatibility import any
-from pkgcore.util.mappings import ProtectedDict
from pkgcore.interfaces.data_source import local_source
from pkgcore.config.errors import BaseException
-from pkgcore.util.demandload import demandload
from pkgcore.ebuild import const
from pkgcore.ebuild.profiles import incremental_expansion
-from pkgcore.util.parserestrict import parse_match
-from pkgcore.util.currying import partial
from pkgcore.ebuild.misc import (collapsed_restrict_to_data,
non_incremental_collapsed_restrict_to_data)
+from pkgcore.util.parserestrict import parse_match
+from snakeoil.lists import stable_unique, unstable_unique
+from snakeoil.compatibility import any
+from snakeoil.mappings import ProtectedDict
+from snakeoil.fileutils import iter_read_bash
+from snakeoil.currying import partial
+from snakeoil.demandload import demandload
demandload(
globals(),
'errno '
- 'pkgcore.util:osutils '
+ 'pkgcore.fs.livefs:iter_scan '
+ 'pkgcore.fs.fs:fsFile '
)
class MissingFile(BaseException):
@@ -92,7 +93,7 @@ class domain(pkgcore.config.domain.domain):
# TODO this is missing defaults
pkgcore_config_type = ConfigHint(
- _types, incrementals=const.incrementals, typename='domain',
+ _types, typename='domain',
required=['repositories', 'profile', 'vdb', 'fetcher', 'name'],
allow_unknowns=True)
@@ -143,17 +144,14 @@ class domain(pkgcore.config.domain.domain):
raise Failure("failed reading '%s': %s" % (fp, e))
for dirpath in settings.pop('%s-dirs' % (key,), ()):
try:
- files = osutils.listdir_files(dirpath)
+ for file in iter_scan(dirpath):
+ if (not isinstance(file, fsFile) or
+ any(True for thing in file.location.split('/')
+ if thing.startswith('.'))):
+ continue
+ val.extend(action(x) for x in iter_read_bash(file.location))
except (OSError, IOError), e:
- raise Failure('failed listing %r: %s' % (dirpath, e))
- for fp in files:
- if fp.startswith('.'):
- continue
- fp = osutils.join(dirpath, fp)
- try:
- val.extend(action(x) for x in iter_read_bash(fp))
- except (IOError, OSError, ValueError), e:
- raise Failure('failed reading %r: %r' % (fp, str(e)))
+ raise Failure('failed reading %r: %s' % (dirpath, e))
self.name = name
settings.setdefault("PKGCORE_DOMAIN", name)
@@ -185,6 +183,8 @@ class domain(pkgcore.config.domain.domain):
if "test" in settings.get("FEATURES", []):
use.add("test")
+ self.use_expand = set(profile.use_expand)
+ self.use_expand_hidden = set(profile.use_expand_hidden)
for u in profile.use_expand:
v = settings.get(u)
if v is None:
@@ -300,7 +300,7 @@ class domain(pkgcore.config.domain.domain):
for l, repos, filtered in ((self.repos, repositories, True),
(self.vdb, vdb, False)):
-
+
for repo in repos:
if not repo.configured:
pargs = [repo]
@@ -327,7 +327,7 @@ class domain(pkgcore.config.domain.domain):
vfilter, True)
self.filtered_named_repos[key] = wrapped_repo
l.append(wrapped_repo)
-
+
if profile_repo is not None:
self.repos = [profile_repo] + self.repos
@@ -379,7 +379,7 @@ class domain(pkgcore.config.domain.domain):
if incremental:
raise NotImplementedError(self.incremental_apply_keywords_filter)
- f = self.incremental_apply_keywords_filter
+ #f = self.incremental_apply_keywords_filter
else:
f = self.apply_keywords_filter
return delegate(partial(f, data))
diff --git a/pkgcore/ebuild/ebd.py b/pkgcore/ebuild/ebd.py
index 152fc9f51..3ae33ce78 100644
--- a/pkgcore/ebuild/ebd.py
+++ b/pkgcore/ebuild/ebd.py
@@ -17,15 +17,15 @@ from pkgcore.ebuild.processor import \
request_ebuild_processor, release_ebuild_processor, \
expected_ebuild_env, chuck_UnhandledCommand
from pkgcore.os_data import portage_gid, portage_uid
-from pkgcore.util.osutils import ensure_dirs, normpath, join as pjoin
from pkgcore.spawn import (
spawn_bash, spawn, is_sandbox_capable, is_fakeroot_capable)
-from pkgcore.util.currying import post_curry, pretty_docs
from pkgcore.os_data import xargs
from pkgcore.ebuild.const import eapi_capable
from pkgcore.interfaces import observer
-from pkgcore.util.demandload import demandload
+from snakeoil.currying import post_curry, pretty_docs
+from snakeoil.osutils import ensure_dirs, normpath, join as pjoin
+from snakeoil.demandload import demandload
demandload(globals(),
"pkgcore.ebuild.ebuild_built:fake_package_factory,package "
"pkgcore.log:logger "
@@ -34,26 +34,27 @@ demandload(globals(),
def _reset_env_data_source(method):
return method
- def store_env_data_wrapper(self, *args, **kwds):
- try:
- return method(self, *args, **kwds)
- finally:
- # note that we're *not* return'ing anything ourselves.
- # we want the original val to slide back
- if self.env_data_source is None:
- try:
- fp = self.env["PORT_ENV_FILE"]
- f = self.env_data.get_fileobj()
- f.seek(0, 0)
- f.truncate(0)
- f.write(open(fp, "r").read())
- del f, fp
- except (IOError, OSError), oe:
- if oe.errno != errno.ENOENT:
- raise
-
- store_env_data_wrapper.__doc__ = method.__doc__
- return store_env_data_wrapper
+ # unreachable code. -masterdriverz
+ #def store_env_data_wrapper(self, *args, **kwds):
+ # try:
+ # return method(self, *args, **kwds)
+ # finally:
+ # # note that we're *not* return'ing anything ourselves.
+ # # we want the original val to slide back
+ # if self.env_data_source is None:
+ # try:
+ # fp = self.env["PORT_ENV_FILE"]
+ # f = self.env_data.get_fileobj()
+ # f.seek(0, 0)
+ # f.truncate(0)
+ # f.write(open(fp, "r").read())
+ # del f, fp
+ # except (IOError, OSError), oe:
+ # if oe.errno != errno.ENOENT:
+ # raise
+
+ #store_env_data_wrapper.__doc__ = method.__doc__
+ #return store_env_data_wrapper
class ebd(object):
diff --git a/pkgcore/ebuild/ebuild_built.py b/pkgcore/ebuild/ebuild_built.py
index 9f8d42330..c1fd6f5c9 100644
--- a/pkgcore/ebuild/ebuild_built.py
+++ b/pkgcore/ebuild/ebuild_built.py
@@ -6,15 +6,16 @@ built ebuild packages (vdb packages and binpkgs are derivatives of this)
"""
from pkgcore.ebuild import ebuild_src
-from pkgcore.util.mappings import IndeterminantDict
from pkgcore.package import metadata
from pkgcore.interfaces.data_source import local_source
from pkgcore.fs import scan
-from pkgcore.util.currying import post_curry
from pkgcore.ebuild import ebd
-from pkgcore.util.obj import DelayedInstantiation
-from pkgcore.util.demandload import demandload
+from snakeoil.mappings import IndeterminantDict
+from snakeoil.currying import post_curry
+from snakeoil.obj import DelayedInstantiation
+
+from snakeoil.demandload import demandload
demandload(globals(),
"pkgcore.merge:engine "
"pkgcore.ebuild:triggers "
diff --git a/pkgcore/ebuild/ebuild_src.py b/pkgcore/ebuild/ebuild_src.py
index 287b5a5f7..4f8233515 100644
--- a/pkgcore/ebuild/ebuild_src.py
+++ b/pkgcore/ebuild/ebuild_src.py
@@ -6,17 +6,13 @@ package class for buildable ebuilds
"""
import os
-from pkgcore.package import metadata
-from pkgcore.package import errors as metadata_errors
from itertools import imap
-WeakValCache = metadata.WeakValCache
-
+from pkgcore.package import metadata
+from pkgcore.package import errors as metadata_errors
from pkgcore.ebuild.cpv import CPV
from pkgcore.ebuild import conditionals
from pkgcore.ebuild.atom import atom
-from pkgcore.util.mappings import IndeterminantDict
-from pkgcore.util.currying import alias_class_method, partial
from pkgcore.cache import errors as cache_errors
from pkgcore.restrictions.packages import AndRestriction
from pkgcore.restrictions import boolean
@@ -24,9 +20,14 @@ from pkgcore.chksum.errors import MissingChksum
from pkgcore.fetch.errors import UnknownMirror
from pkgcore.fetch import fetchable, mirror, uri_list, default_mirror
from pkgcore.ebuild import const, processor
-from pkgcore.util.demandload import demandload
+
+from snakeoil.mappings import IndeterminantDict
+from snakeoil.currying import alias_class_method, partial
+
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.log:logger")
+WeakValCache = metadata.WeakValCache
def generate_depset(c, key, non_package_type, s):
try:
diff --git a/pkgcore/ebuild/eclass_cache.py b/pkgcore/ebuild/eclass_cache.py
index 9e01a9488..48b7506a2 100644
--- a/pkgcore/ebuild/eclass_cache.py
+++ b/pkgcore/ebuild/eclass_cache.py
@@ -8,13 +8,17 @@ in memory representation of on disk eclass stacking order
from pkgcore.interfaces.data_source import local_source
from pkgcore.config import ConfigHint
-from pkgcore.util.mappings import ImmutableDict
-from pkgcore.util.weakrefs import WeakValCache
-from pkgcore.util.osutils import join as pjoin
-from pkgcore.util.demandload import demandload
+from snakeoil.mappings import ImmutableDict
+from snakeoil.weakrefs import WeakValCache
+from snakeoil.osutils import join as pjoin
+
+from snakeoil.demandload import demandload
demandload(globals(),
- "pkgcore.util.osutils:normpath pkgcore.util.mappings:StackedDict os")
+ "os "
+ "snakeoil.osutils:normpath "
+ "snakeoil.mappings:StackedDict "
+)
class base(object):
"""
diff --git a/pkgcore/ebuild/filter_env.py b/pkgcore/ebuild/filter_env.py
index 3fb748066..1a20391fd 100644
--- a/pkgcore/ebuild/filter_env.py
+++ b/pkgcore/ebuild/filter_env.py
@@ -8,7 +8,7 @@
"""Filter a bash environment dump."""
-from pkgcore.util import demandload
+from snakeoil import demandload
demandload.demandload(
globals(),
're '
diff --git a/pkgcore/ebuild/formatter.py b/pkgcore/ebuild/formatter.py
new file mode 100644
index 000000000..6eed347eb
--- /dev/null
+++ b/pkgcore/ebuild/formatter.py
@@ -0,0 +1,418 @@
+# Copyright: 2006 Charlie Shepherd <masterdriverz@gentoo.org>
+# License: GPL2
+
+"""PMerge formatting module
+
+To add a new formatter, add the relevant class (which
+should be a subclass of Formatter). Documentation is
+a necessity - things can change/break easily between
+versions. Then add the class name (_not_ an instance) to
+the formatters dictionary - this will instantly make your
+formatter available on the commandline.
+"""
+
+import operator
+
+
+class NoChoice(KeyboardInterrupt):
+ """Raised by L{userquery} if no choice was made.
+
+ HACK: this subclasses KeyboardInterrupt, so if you ignore this it
+ should do something reasonable.
+ """
+
+def userquery(prompt, out, err, responses=None, default_answer=None, limit=3):
+ """Ask the user to choose from a set of options.
+
+ Displays a prompt and a set of responses, then waits for a
+ response which is checked against the responses. If there is an
+ unambiguous match the value is returned.
+
+ If the user does not input a valid response after a number of
+ tries L{NoChoice} is raised. You can catch this if you want to do
+ something special. Because it subclasses C{KeyboardInterrupt}
+ the default behaviour is to abort as if the user hit ctrl+c.
+
+ @type prompt: C{basestring} or a tuple of things to pass to a formatter.
+ XXX this is a crummy api but I cannot think of a better one supporting
+ the very common case of wanting just a string as prompt.
+ @type out: formatter.
+ @type err: formatter.
+ @type responses: mapping with C{basestring} keys and tuple values.
+ @param responses: mapping of user input to function result.
+ The first item in the value tuple is returned, the rest is passed to
+ out.
+ Defaults to::
+ {
+ 'yes': (True, out.fg('green'), 'Yes'),
+ 'no': (False, out.fg('red'), 'No'),
+ }
+ @param default_answer: returned if there is no input
+ (user just hits enter). Defaults to True if responses is unset,
+ unused otherwise.
+ @param limit: number of allowed tries.
+ """
+ if responses is None:
+ responses = {
+ 'yes': (True, out.fg('green'), 'Yes'),
+ 'no': (False, out.fg('red'), 'No'),
+ }
+ if default_answer is None:
+ default_answer = True
+ if default_answer is not None:
+ for val in responses.itervalues():
+ if val[0] == default_answer:
+ default_answer_name = val[1:]
+ for i in xrange(limit):
+ # XXX see docstring about crummyness
+ if isinstance(prompt, tuple):
+ out.write(autoline=False, *prompt)
+ else:
+ out.write(prompt, autoline=False)
+ out.write(' [', autoline=False)
+ prompts = responses.values()
+ for choice in prompts[:-1]:
+ out.write(autoline=False, *choice[1:])
+ out.write(out.reset, '/', autoline=False)
+ out.write(autoline=False, *prompts[-1][1:])
+ out.write(out.reset, ']', autoline=False)
+ if default_answer is not None:
+ out.write(' (default: ', autoline=False)
+ out.write(autoline=False, *default_answer_name)
+ out.write(')', autoline=False)
+ out.write(': ', autoline=False)
+ response = raw_input()
+ if not response:
+ return default_answer
+ results = set(
+ (key, value) for key, value in responses.iteritems()
+ if key[:len(response)].lower() == response.lower())
+ if not results:
+ err.write('Sorry, response "%s" not understood.' % (response,))
+ elif len(results) > 1:
+ err.write('Response "%s" is ambiguous (%s)' % (
+ response, ', '.join(key for key, val in results)))
+ else:
+ return list(results)[0][1][0]
+
+ raise NoChoice()
+
+def filter_use(use, use_expand, use_expand_hidden):
+ """Split USE flags up into "normal" flags and use-expanded ones.
+
+ @type use: iterable of strings
+ @param use: flags that are set.
+ @type use_expand: iterable of strings
+ @param use_expand: names of use-expanded variables.
+ @type use_expand_hidden: set of strings
+ @param use_expand_hidden: names of use-expanded vars that should not
+ be added to the dict.
+ @rtype: sequence of strings, dict mapping a string to a list of strings
+ @return: list of normal flags and a mapping from use_expand name to value
+ (with the use-expanded bit stripped off, so C{"video_cards_alsa"}
+ becomes C{"{'video_cards': ['alsa']}"}).
+ """
+ ue_dict = {}
+ usel = []
+ for flag in use:
+ for expand in use_expand:
+ if flag.startswith(expand.lower() + '_'):
+ if expand not in use_expand_hidden:
+ ue_dict.setdefault(expand.lower(), []).append(
+ flag[len(expand) + 1:])
+ break
+ else:
+ usel.append(flag)
+ return usel, ue_dict
+
+def format_use(out, attr, selectable, choice, oldselectable=None,
+ oldchoice=None):
+ """Write the current selection from a set of flags to a formatter.
+
+ @type out: formatter
+ @param out: the formatter to write to.
+ @type attr: string
+ @param attr: the name of the setting.
+ @type selectable: set of strings
+ @param selectable: the possible values.
+ @type choice: set of strings
+ @param choice: the chosen values.
+ @type oldselectable: set of strings
+ @param oldselectable: the values possible in the previous version.
+ @type oldchoice: set of strings
+ @param oldchoie: the previously chosen values.
+ """
+ red = out.fg('red')
+ green = out.fg('green')
+ blue = out.fg('blue')
+ yellow = out.fg('yellow')
+
+ flags = []
+ enabled = set(selectable) & set(choice)
+ disabled = set(selectable) - set(choice)
+ if oldselectable is not None and oldchoice is not None:
+# print 'oldselectable: %s' % oldselectable, 'oldchoice: %s' % oldchoice
+ old_enabled = set(oldselectable) & set(oldchoice)
+ old_disabled = set(oldselectable) - set(oldchoice)
+ for flag in sorted(enabled):
+ assert flag
+ if flag in old_enabled:
+ # Unchanged flag.
+ flags.extend((red, flag, ' '))
+ elif flag in old_disabled:
+ # Toggled.
+ # Trailing single space is important, we can pop it below.
+ flags.extend((green, flag, '*', ' '))
+ else:
+ # Flag did not exist earlier.
+ flags.extend((yellow, flag, '%', ' '))
+ for flag in sorted(disabled | (set(oldselectable) - set(selectable))):
+ assert flag
+ if flag not in disabled:
+ # Removed flag.
+ flags.extend((yellow, '(-', flag, '%)', ' '))
+ elif flag in old_disabled:
+ # Unchanged.
+ flags.extend((blue, '-', flag, ' '))
+ elif flag in old_enabled:
+ # Toggled.
+ flags.extend((yellow, '-', flag, '*', ' '))
+ else:
+ # New.
+ flags.extend((yellow, '-', flag, '%', ' '))
+ else:
+ for flag in sorted(enabled):
+ flags.extend((red, flag, ' '))
+ for flag in sorted(disabled):
+ flags.extend((yellow, '-', flag, ' '))
+
+ # Only write this if we have something to write
+ if flags:
+ out.write(attr.upper(), '="')
+ # Omit the final space.
+ out.write(*flags[:-1])
+ out.write('" ')
+
+
+class Formatter(object):
+
+ """Base Formatter class: All formatters should be subclasses of this."""
+
+ def __init__(self, out, err, **kwargs):
+ self.out = out
+ self.err = err
+ self.__dict__.update(kwargs)
+
+ def format(self, op):
+ """Formats an op. Subclasses must define this method"""
+ raise NotImplementedError(self.format)
+
+ def ask(self, question, responses=None, default_answer=None, limit=3):
+ return userquery(
+ question, self.out, self.err, responses, default_answer, limit)
+
+ def end(self):
+ """Called at the end, normally for summary information"""
+
+
+class BasicFormatter(Formatter):
+ """A basic formatter, intended for scripts"""
+ def format(self, op):
+ self.out.write(op.pkg.key)
+
+
+class PkgcoreFormatter(Formatter):
+ """The original pkgcore output"""
+ def format(self, op):
+ if op.desc == "replace":
+ self.out.write("replace %s, %s" % (op.old_pkg, op.pkg))
+ else:
+ self.out.write("%s %s" % (op.desc.ljust(7), op.pkg))
+
+
+class PortageFormatter(Formatter):
+
+ """Portage formatter
+
+ A Formatter designed to resemble Portage's output
+ as much as much as possible.
+ """
+
+ def __init__(self, **kwargs):
+ kwargs.setdefault("use_expand", set())
+ kwargs.setdefault("use_expand_hidden", set())
+ Formatter.__init__(self, **kwargs)
+ # Map repo location to an index.
+ self.repos = {}
+
+ def format(self, op):
+ # [<type> NRFDU]
+ # <type> - ebuild, block or nomerge (for --tree)
+ # N - New package
+ # R - Rebuild package
+ # F - Fetch restricted
+ # D - Downgrade
+ # U - Upgrade
+ # Caveats:
+ # - U and D are both displayed to show a downgrade - this is kept
+ # in order to be consistent with existing portage behaviour
+
+ verbose = self.verbose
+
+ out = self.out
+ origautoline = out.autoline
+ out.autoline = False
+
+ # This is for the summary at the end
+ # TODO prefer repoid over location here?
+ reponr = self.repos.setdefault(op.pkg.repo.location,
+ len(self.repos) + 1)
+
+ # We don't do blockers or --tree stuff yet
+ out.write('[ebuild ')
+
+ # Order is important here - look at the diagram in
+ # PortageFormat.formatter
+ type = op.desc
+ if op.desc == "add":
+ out.write(out.fg('green'), ' N')
+ if op.pkg.slot != '0':
+ out.write(out.fg('green'), 'S')
+ else:
+ out.write(' ')
+ elif op.desc == "replace" and op.pkg == op.old_pkg:
+ out.write(out.fg('yellow'), ' R')
+ else:
+ out.write(' ')
+ type = 'upgrade'
+
+ if 'fetch' in op.pkg.restrict:
+ out.write(out.fg('red'), 'F')
+ else:
+ out.write(' ')
+ if type == 'upgrade':
+ if op.pkg.fullver != op.old_pkg.fullver:
+ out.write(out.fg('cyan'), 'U')
+ if op.pkg > op.old_pkg:
+ out.write(' ')
+ else:
+ out.write(out.fg('blue'), 'D')
+ else:
+ out.write(' ')
+ out.write('] ')
+
+ out.write(out.fg('green'), '%s ' % op.pkg.cpvstr)
+
+ if type == 'upgrade':
+ out.write(out.fg('blue'), '[%s] ' % op.old_pkg.fullver)
+
+ # Build a list of (useflags, use_expand_dicts) tuples.
+ # HACK: if we are in "replace" mode we build a list of length
+ # 4, else this is a list of length 2. We then pass this to
+ # format_use which can take either 2 or 4 arguments.
+ if op.desc == 'replace':
+ uses = (op.pkg.iuse, op.pkg.use, op.old_pkg.iuse, op.old_pkg.use)
+ else:
+ uses = (op.pkg.iuse, op.pkg.use)
+ stuff = [filter_use(x, self.use_expand, self.use_expand_hidden)
+ for x in uses]
+ # Convert the list of tuples to a list of lists and a list of
+ # dicts (both length 2 or 4).
+ uselists, usedicts = zip(*stuff)
+ format_use(out, 'use', *uselists)
+ for expand in [x for x in self.use_expand
+ if x not in self.use_expand_hidden]:
+ flaglists = [d.get(expand.lower(), ()) for d in usedicts]
+ format_use(out, expand, *flaglists)
+
+ if verbose:
+ out.write(out.fg('blue'), " [%d]" % (reponr,))
+
+ out.write('\n')
+ out.autoline = origautoline
+
+ def end(self):
+ if self.verbose:
+ self.out.write()
+ repos = self.repos.items()
+ repos.sort(key=operator.itemgetter(1))
+ for k, v in repos:
+ self.out.write(self.out.fg('blue'), "[%d] %s" % (v, k))
+
+
+class PaludisFormatter(Formatter):
+
+ """Paludis formatter
+
+ A Formatter designed to resemble Paludis' output
+ as much as much as possible.
+ """
+
+ def __init__(self, out, err, **kwargs):
+ Formatter.__init__(self, out, err, **kwargs)
+ self.packages = self.new = self.upgrades = self.downgrades = 0
+ self.nslots = 0
+
+ def format(self, op):
+ out = self.out
+ origautoline = out.autoline
+ out.autoline = False
+ out = self.out
+ self.packages += 1
+
+ out.write('* ')
+ out.write(out.fg('blue'), op.pkg.key)
+ out.write("-%s" % op.pkg.fullver)
+ out.write("::%s " % op.pkg.repo.repo_id)
+ out.write(out.fg('blue'), "{:%s} " % op.pkg.slot)
+ if op.desc == 'add':
+ if op.pkg.slot != '0':
+ suffix = 'S'
+ self.nslots += 1
+ else:
+ suffix = 'N'
+ self.new += 1
+ out.write(out.fg('yellow'), "[%s]" % suffix)
+ elif op.desc == 'replace':
+ if op.pkg != op.old_pkg:
+ if op.pkg > op.old_pkg:
+ suffix = "U"
+ self.upgrades += 1
+ else:
+ suffix = "D"
+ self.downgrades += 1
+ out.write(out.fg('yellow'), "[%s %s]" % (
+ suffix, op.old_pkg.fullver))
+ else:
+ out.write(out.fg('yellow'), "[R]")
+
+ red = out.fg('red')
+ green = out.fg('green')
+ flags = []
+ use = set(op.pkg.use)
+ for flag in sorted(op.pkg.iuse):
+ if flag in use:
+ flags.extend((green, flag, ' '))
+ else:
+ flags.extend((red, '-', flag, ' '))
+ if flags:
+ out.write(' ')
+ # Throw away the final space.
+ out.write(*flags[:-1])
+ out.write('\n')
+ out.autoline = origautoline
+
+ def end(self):
+ self.out.write(
+ 'Total: %d packages '
+ '(%d new, %d upgrades, %d downgrades, %d in new slots)' % (
+ self.packages, self.new, self.upgrades, self.downgrades,
+ self.nslots))
+
+
+formatters = {
+ 'basic': BasicFormatter,
+ 'pkgcore': PkgcoreFormatter,
+ 'portage': PortageFormatter,
+ 'paludis': PaludisFormatter,
+ }
diff --git a/pkgcore/ebuild/misc.py b/pkgcore/ebuild/misc.py
index aa41d8e5e..c1e17b3be 100644
--- a/pkgcore/ebuild/misc.py
+++ b/pkgcore/ebuild/misc.py
@@ -8,8 +8,9 @@ misc. stuff we've not found a spot for yet.
from pkgcore.restrictions import packages, restriction
from pkgcore.ebuild.atom import atom
from pkgcore.ebuild.profiles import incremental_expansion
-from pkgcore.util.lists import iflatten_instance
-from pkgcore.util.klass import generic_equality
+
+from snakeoil.lists import iflatten_instance
+from snakeoil.klass import generic_equality
class collapsed_restrict_to_data(object):
diff --git a/pkgcore/ebuild/overlay_repository.py b/pkgcore/ebuild/overlay_repository.py
index 14c658be4..c8abad424 100644
--- a/pkgcore/ebuild/overlay_repository.py
+++ b/pkgcore/ebuild/overlay_repository.py
@@ -8,9 +8,9 @@ implementation of the standard PORTDIR + PORTDIR_OVERLAY repository stacking
from pkgcore.repository import multiplex
from pkgcore.config import ConfigHint, errors
from pkgcore.ebuild import repository
-from pkgcore.util.lists import unstable_unique
from pkgcore.restrictions import packages
+from snakeoil.lists import unstable_unique
class OverlayRepo(multiplex.tree):
diff --git a/pkgcore/ebuild/portage_conf.py b/pkgcore/ebuild/portage_conf.py
index e56402064..19ac037be 100644
--- a/pkgcore/ebuild/portage_conf.py
+++ b/pkgcore/ebuild/portage_conf.py
@@ -8,18 +8,19 @@ Converts portage configuration files into L{pkgcore.config} form.
import os
import stat
+
from pkgcore.config import basics, configurable
from pkgcore import const
-from pkgcore.util.osutils import normpath, abspath, listdir_files, pjoin
-from pkgcore.util.demandload import demandload
+from pkgcore.pkgsets.glsa import SecurityUpgrades
+
+from snakeoil.osutils import normpath, abspath, listdir_files, pjoin
+from snakeoil.demandload import demandload
demandload(globals(), "errno pkgcore.config:errors "
- "pkgcore.pkgsets.glsa:SecurityUpgrades "
- "pkgcore.util.file:read_bash_dict "
- "pkgcore.util:bzip2 "
"pkgcore.log:logger "
- 'pkgcore.util.xml:etree '
'ConfigParser:ConfigParser '
-)
+ "snakeoil.fileutils:read_bash_dict "
+ "pkgcore.util:bzip2 "
+ 'snakeoil.xml:etree ')
def my_convert_hybrid(manager, val, arg_type):
@@ -60,7 +61,7 @@ def add_layman_syncers(new_config, rsync_opts, overlay_paths, config_root='/',
return {}
c = ConfigParser()
- c.read(pjoin(config_root, default_loc))
+ c.readfp(f)
storage_loc = c.get('MAIN', 'storage')
overlay_xml = pjoin(storage_loc, default_conf)
del c
@@ -87,7 +88,6 @@ def add_layman_syncers(new_config, rsync_opts, overlay_paths, config_root='/',
continue
elif path not in overlay_paths:
continue
- proto = None
if src_type == 'tar':
continue
elif src_type == 'svn':
@@ -270,7 +270,7 @@ def config_from_make_conf(location="/etc/"):
kwds = {"class": "pkgcore.vdb.repository",
"location": pjoin(root, 'var', 'db', 'pkg')}
- kwds["cache_location"] = pjoin(config_root, 'var', 'cache', 'edb',
+ kwds["cache_location"] = pjoin(config_root, 'var', 'cache', 'edb',
'dep', 'var', 'db', 'pkg')
new_config["vdb"] = basics.AutoConfigSection(kwds)
@@ -301,7 +301,7 @@ def config_from_make_conf(location="/etc/"):
})
- # used by PORTDIR syncer, and any layman defined syncers
+ # used by PORTDIR syncer, and any layman defined syncers
rsync_opts = isolate_rsync_opts(conf_dict)
portdir_syncer = conf_dict.pop("SYNC", None)
@@ -353,7 +353,8 @@ def config_from_make_conf(location="/etc/"):
d['location'] = portdir
d['cache'] = ('portdir cache',)
- new_config[portdir] = basics.DictConfigSection(my_convert_hybrid, d)
+ new_config[portdir] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid, d)
new_config["eclass stack"] = basics.section_alias(
pjoin(portdir, 'eclass'), 'eclass_cache')
new_config['portdir'] = basics.section_alias(portdir, 'repo')
@@ -372,7 +373,8 @@ def config_from_make_conf(location="/etc/"):
d['location'] = portdir
d['cache'] = cache
- new_config[portdir] = basics.DictConfigSection(my_convert_hybrid, d)
+ new_config[portdir] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid, d)
if rsync_portdir_cache:
# created higher up; two caches, writes to the local,
@@ -380,7 +382,8 @@ def config_from_make_conf(location="/etc/"):
cache = ('portdir cache',)
else:
cache = ('%s cache' % (portdir,),)
- new_config['portdir'] = basics.DictConfigSection(my_convert_hybrid, {
+ new_config['portdir'] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid, {
'inherit': ('ebuild-repo-common',),
'location': portdir,
'cache': cache,
@@ -388,15 +391,16 @@ def config_from_make_conf(location="/etc/"):
# reverse the ordering so that overlays override portdir
# (portage default)
- new_config["eclass stack"] = basics.DictConfigSection(
+ new_config["eclass stack"] = basics.FakeIncrementalDictConfigSection(
my_convert_hybrid, {
'class': 'pkgcore.ebuild.eclass_cache.StackedCaches',
'eclassdir': pjoin(portdir, "eclass"),
'caches': tuple(reversed(all_ecs))})
- new_config['repo-stack'] = basics.DictConfigSection(my_convert_hybrid,
- {'class': 'pkgcore.ebuild.overlay_repository.OverlayRepo',
- 'trees': tuple(reversed([portdir] + portdir_overlays))})
+ new_config['repo-stack'] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid, {
+ 'class': 'pkgcore.ebuild.overlay_repository.OverlayRepo',
+ 'trees': tuple(reversed([portdir] + portdir_overlays))})
# disabled code for using portage config defined cache modules;
# need to re-examine and see if they're still in sync with our cache subsystem
@@ -481,7 +485,7 @@ def config_from_make_conf(location="/etc/"):
elif stat.S_ISDIR(st.st_mode):
conf_dict[f + '-dirs'] = fp
- new_config['livefs domain'] = basics.DictConfigSection(my_convert_hybrid,
- conf_dict)
+ new_config['livefs domain'] = basics.FakeIncrementalDictConfigSection(
+ my_convert_hybrid, conf_dict)
return new_config
diff --git a/pkgcore/ebuild/processor.py b/pkgcore/ebuild/processor.py
index a8fdec01d..dd7d00400 100644
--- a/pkgcore/ebuild/processor.py
+++ b/pkgcore/ebuild/processor.py
@@ -28,15 +28,16 @@ inactive_ebp_list = []
active_ebp_list = []
import pkgcore.spawn, os, signal, errno, sys
-from pkgcore.util.currying import post_curry, partial
from pkgcore.const import (
depends_phase_path, EBUILD_DAEMON_PATH, EBUILD_ENV_PATH, EBD_ENV_PATH)
-from pkgcore.util.demandload import demandload
from pkgcore.os_data import portage_uid, portage_gid
+
+from snakeoil.currying import post_curry, partial
+from snakeoil.demandload import demandload
demandload(
globals(),
"pkgcore.log:logger "
- "pkgcore.util:osutils ")
+ "snakeoil:osutils ")
import traceback
@@ -131,7 +132,6 @@ def release_ebuild_processor(ebp):
# if it makes it this far, that means ebp was already in the inactive list.
# which is indicative of an internal fsck up.
- import traceback
print ("ebp was requested to be free'd, yet it already is claimed "
"inactive _and_ was in the active list")
print "this means somethings horked, badly"
@@ -160,7 +160,6 @@ class EbuildProcessor:
spawn_opts = {}
if fakeroot and (sandbox or not userpriv):
- import traceback
traceback.print_stack()
print "warning, was asking to enable fakeroot but-"
print "sandbox", sandbox, "userpriv", userpriv
diff --git a/pkgcore/ebuild/profiles.py b/pkgcore/ebuild/profiles.py
index a1bbe7f02..2b9e01478 100644
--- a/pkgcore/ebuild/profiles.py
+++ b/pkgcore/ebuild/profiles.py
@@ -3,16 +3,18 @@
import errno, os
from itertools import chain
+
from pkgcore.config import ConfigHint
from pkgcore.ebuild import const
-from pkgcore.util.osutils import abspath, join as pjoin, readlines
from pkgcore.ebuild import ebuild_src
-from pkgcore.util.containers import InvertedContains
-from pkgcore.util.file import iter_read_bash, read_bash_dict
-from pkgcore.util.caching import WeakInstMeta
from pkgcore.repository import virtual
-from pkgcore.util.currying import partial
-from pkgcore.util.demandload import demandload
+
+from snakeoil.osutils import abspath, join as pjoin, readlines
+from snakeoil.containers import InvertedContains
+from snakeoil.fileutils import iter_read_bash, read_bash_dict
+from snakeoil.caching import WeakInstMeta
+from snakeoil.currying import partial
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.interfaces.data_source:local_source "
"pkgcore.ebuild:cpv "
@@ -392,6 +394,18 @@ class OnDiskProfile(object):
return tuple(self.default_env["USE_EXPAND"])
return tuple(self.default_env["USE_EXPAND"].split())
+ @property
+ def use_expand_hidden(self):
+ if "USE_EXPAND_HIDDEN" in self.incrementals:
+ return tuple(self.default_env["USE_EXPAND_HIDDEN"])
+ return tuple(self.default_env["USE_EXPAND_HIDDEN"].split())
+
+ @property
+ def use_expand_hidden(self):
+ if "USE_EXPAND_HIDDEN" in self.incrementals:
+ return tuple(self.default_env["USE_EXPAND_HIDDEN"])
+ return tuple(self.default_env["USE_EXPAND_HIDDEN"].split())
+
def _collapse_virtuals(self):
d = {}
for profile in self.stack:
diff --git a/pkgcore/ebuild/repo_objs.py b/pkgcore/ebuild/repo_objs.py
index a19de9bdb..bee97f5b7 100644
--- a/pkgcore/ebuild/repo_objs.py
+++ b/pkgcore/ebuild/repo_objs.py
@@ -5,12 +5,12 @@
package class for buildable ebuilds
"""
-from pkgcore.util.currying import post_curry
-from pkgcore.util.demandload import demandload
+from snakeoil.currying import post_curry
+from snakeoil.demandload import demandload
demandload(globals(),
- "pkgcore.util.xml:etree "
+ "snakeoil.xml:etree "
"pkgcore.ebuild:digest "
- "pkgcore.util:mappings "
+ "snakeoil:mappings "
"errno ")
diff --git a/pkgcore/ebuild/repository.py b/pkgcore/ebuild/repository.py
index 7da5f0b35..06c20292c 100644
--- a/pkgcore/ebuild/repository.py
+++ b/pkgcore/ebuild/repository.py
@@ -6,16 +6,21 @@ ebuild repository, specific to gentoo ebuild trees (whether cvs or rsync)
"""
import os, stat
+
from pkgcore.repository import prototype, errors, configured, syncable
-from pkgcore.util.file import read_dict, iter_read_bash
-from pkgcore.util import currying
-from pkgcore.util.osutils import (listdir_files, readfile, listdir_dirs,
- join as pjoin)
from pkgcore.ebuild import eclass_cache as eclass_cache_module
-from pkgcore.util.demandload import demandload
-from pkgcore.util.containers import InvertedContains
-from pkgcore.util.obj import make_kls
-from pkgcore.util.weakrefs import WeakValCache
+from pkgcore.config import ConfigHint
+from pkgcore.plugin import get_plugin
+
+from snakeoil.fileutils import read_dict, iter_read_bash
+from snakeoil import currying
+from snakeoil.osutils import (listdir_files, readfile, listdir_dirs,
+ join as pjoin)
+from snakeoil.containers import InvertedContains
+from snakeoil.obj import make_kls
+from snakeoil.weakrefs import WeakValCache
+
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.ebuild.ebd:buildable "
"pkgcore.interfaces.data_source:local_source "
"pkgcore.ebuild:digest "
@@ -24,8 +29,7 @@ demandload(globals(), "pkgcore.ebuild.ebd:buildable "
"random:shuffle "
"errno ")
-from pkgcore.config import ConfigHint
-from pkgcore.plugin import get_plugin
+
metadata_offset = "profiles"
diff --git a/pkgcore/ebuild/resolver.py b/pkgcore/ebuild/resolver.py
index 5c96b611d..9514c59a1 100644
--- a/pkgcore/ebuild/resolver.py
+++ b/pkgcore/ebuild/resolver.py
@@ -10,10 +10,10 @@ __all__ = ["upgrade_resolver", "min_install_resolver"]
from pkgcore.repository import virtual
from pkgcore.repository.misc import nodeps_repo
from pkgcore.resolver import plan
-from pkgcore.util.demandload import demandload
from itertools import chain
-from pkgcore.util.iterables import iter_sort
+from snakeoil.iterables import iter_sort
+from snakeoil.demandload import demandload
demandload(globals(),
"pkgcore.restrictions:packages,values "
"pkgcore.pkgsets.glsa:KeyedAndRestriction ")
diff --git a/pkgcore/ebuild/triggers.py b/pkgcore/ebuild/triggers.py
index 2ced04362..25fde69e3 100644
--- a/pkgcore/ebuild/triggers.py
+++ b/pkgcore/ebuild/triggers.py
@@ -7,14 +7,16 @@ gentoo/ebuild specific triggers
import os, errno
from pkgcore.merge import triggers, const, errors
-from pkgcore.util.file import read_bash_dict, AtomicWriteFile
from pkgcore.fs import livefs
-from pkgcore.util.osutils import normpath
from pkgcore.restrictions import values
-from pkgcore.util.osutils import listdir_files
-from pkgcore.util.lists import stable_unique, iflatten_instance
-from pkgcore.util.osutils import join as pjoin
-from pkgcore.util.demandload import demandload
+
+from snakeoil.osutils import normpath
+from snakeoil.fileutils import read_bash_dict, AtomicWriteFile
+from snakeoil.osutils import listdir_files
+from snakeoil.lists import stable_unique, iflatten_instance
+from snakeoil.osutils import join as pjoin
+
+from snakeoil.demandload import demandload
demandload(globals(), "fnmatch")
colon_parsed = frozenset(
@@ -383,7 +385,7 @@ class InfoRegen(triggers.InfoRegen):
@property
def locations(self):
- collapsed_d, inc, colon = collapse_envd(self.path)
+ collapsed_d = collapse_envd(self.path)[0]
l = collapsed_d.get("INFOPATH", [])
if not l:
return triggers.InfoRegen.locations
diff --git a/pkgcore/fetch/__init__.py b/pkgcore/fetch/__init__.py
index d2f1f0524..c9065b78f 100644
--- a/pkgcore/fetch/__init__.py
+++ b/pkgcore/fetch/__init__.py
@@ -5,7 +5,7 @@
functionality related to downloading files
"""
-from pkgcore.util.klass import generic_equality
+from snakeoil.klass import generic_equality
class fetchable(object):
diff --git a/pkgcore/fetch/custom.py b/pkgcore/fetch/custom.py
index a467ce438..9ed7f0928 100644
--- a/pkgcore/fetch/custom.py
+++ b/pkgcore/fetch/custom.py
@@ -8,9 +8,9 @@ fetcher class that pulls files via executing another program to do the fetching
import os
from pkgcore.spawn import spawn_bash, is_userpriv_capable
from pkgcore.os_data import portage_uid, portage_gid
-from pkgcore.util.osutils import ensure_dirs, join as pjoin
from pkgcore.fetch import errors, base, fetchable
from pkgcore.config import ConfigHint
+from snakeoil.osutils import ensure_dirs, join as pjoin
class MalformedCommand(errors.base):
diff --git a/pkgcore/fs/contents.py b/pkgcore/fs/contents.py
index 2a6301eb5..5a502aa44 100644
--- a/pkgcore/fs/contents.py
+++ b/pkgcore/fs/contents.py
@@ -6,9 +6,9 @@ contents set- container of fs objects
"""
from pkgcore.fs import fs
-from pkgcore.util.compatibility import all
-from pkgcore.util.klass import generic_equality
-from pkgcore.util.demandload import demandload
+from snakeoil.compatibility import all
+from snakeoil.klass import generic_equality
+from snakeoil.demandload import demandload
demandload(globals(),
"pkgcore.fs.ops:offset_rewriter,change_offset_rewriter "
)
diff --git a/pkgcore/fs/fs.py b/pkgcore/fs/fs.py
index f88c1ea48..89c1226c3 100644
--- a/pkgcore/fs/fs.py
+++ b/pkgcore/fs/fs.py
@@ -6,10 +6,10 @@ filesystem entry abstractions
"""
import stat
-from pkgcore.util.mappings import LazyFullValLoadDict
from pkgcore.chksum import get_handlers, get_chksums
from os.path import sep as path_seperator, abspath
from pkgcore.interfaces.data_source import local_source
+from snakeoil.mappings import LazyFullValLoadDict
# goofy set of classes representating the fs objects pkgcore knows of.
diff --git a/pkgcore/fs/livefs.py b/pkgcore/fs/livefs.py
index 8f0b11c1c..3628154a0 100644
--- a/pkgcore/fs/livefs.py
+++ b/pkgcore/fs/livefs.py
@@ -7,14 +7,16 @@ interaction with the livefs: generating fs objects to represent the livefs.
import os, collections
from stat import S_IMODE, S_ISDIR, S_ISREG, S_ISLNK, S_ISFIFO
+
from pkgcore.fs.fs import (
fsFile, fsDir, fsSymlink, fsDev, fsFifo, get_major_minor)
-from pkgcore.util.osutils import normpath, join as pjoin
from pkgcore.fs.contents import contentsSet
from pkgcore.chksum import get_handlers
-from pkgcore.util.mappings import LazyValDict
from pkgcore.interfaces.data_source import local_source
-from pkgcore.util.osutils import listdir
+
+from snakeoil.osutils import normpath, join as pjoin
+from snakeoil.mappings import LazyValDict
+from snakeoil.osutils import listdir
__all__ = ["gen_obj", "scan", "iter_scan"]
diff --git a/pkgcore/fs/ops.py b/pkgcore/fs/ops.py
index f57e26c95..bd9d712c6 100644
--- a/pkgcore/fs/ops.py
+++ b/pkgcore/fs/ops.py
@@ -9,12 +9,14 @@ L{pkgcore.plugins} to get at these ops.
"""
import os, errno
+
from pkgcore.fs import gen_obj, contents, fs
-from pkgcore.util.osutils import ensure_dirs, pjoin, normpath
from pkgcore.spawn import spawn
from pkgcore.const import COPY_BINARY
from pkgcore.plugin import get_plugin
-from pkgcore.util.currying import partial
+
+from snakeoil.currying import partial
+from snakeoil.osutils import ensure_dirs, pjoin, normpath
__all__ = [
"merge_contents", "unmerge_contents", "default_ensure_perms",
diff --git a/pkgcore/fs/tar.py b/pkgcore/fs/tar.py
index 3c0ca8ff9..950c9bd60 100644
--- a/pkgcore/fs/tar.py
+++ b/pkgcore/fs/tar.py
@@ -5,12 +5,13 @@
binpkg tar utilities
"""
import os, stat
-from pkgcore.util.tar import tarfile
from pkgcore.fs.fs import fsFile, fsDir, fsSymlink, fsFifo, fsDev
from pkgcore.fs import contents
-from pkgcore.util.mappings import OrderedDict, StackedDict
from pkgcore.interfaces.data_source import data_source
-from pkgcore.util.currying import partial
+
+from snakeoil.tar import tarfile
+from snakeoil.mappings import OrderedDict, StackedDict
+from snakeoil.currying import partial
class tar_data_source(data_source):
def get_fileobj(self):
diff --git a/pkgcore/interfaces/data_source.py b/pkgcore/interfaces/data_source.py
index 8fd507f45..abd4e4be3 100644
--- a/pkgcore/interfaces/data_source.py
+++ b/pkgcore/interfaces/data_source.py
@@ -6,9 +6,9 @@ data source.
Think of it as a far more minimal form of file protocol
"""
-from pkgcore.util.currying import pre_curry
import StringIO
+from snakeoil.currying import pre_curry
def generic_immutable_method(attr, self, *a, **kwds):
raise AttributeError("%s doesn't have %s" % (self.__class__, attr))
diff --git a/pkgcore/interfaces/format.py b/pkgcore/interfaces/format.py
index 1c4a15f39..eead68d1c 100644
--- a/pkgcore/interfaces/format.py
+++ b/pkgcore/interfaces/format.py
@@ -5,7 +5,7 @@
build operation
"""
-from pkgcore.util.dependant_methods import ForcedDepends
+from snakeoil.dependant_methods import ForcedDepends
__all__ = ["base", "FailedDirectory", "GenericBuildError", "errors"]
diff --git a/pkgcore/interfaces/observer.py b/pkgcore/interfaces/observer.py
index 91e11733c..a98377e9c 100644
--- a/pkgcore/interfaces/observer.py
+++ b/pkgcore/interfaces/observer.py
@@ -1,7 +1,7 @@
# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
# License: GPL2
-from pkgcore.util.currying import pre_curry
+from snakeoil.currying import pre_curry
class base(object):
@@ -25,10 +25,12 @@ class file_phase_observer(phase_observer):
self._semiquiet = semiquiet
def phase_start(self, phase):
- self._out.write("starting %s\n" % phase)
+ if not self._semiquiet:
+ self._out.write("starting %s\n" % phase)
def info(self, msg):
- self._out.write("info: %s\n" % msg)
+ if not self._semiquiet:
+ self._out.write("info: %s\n" % msg)
def warn(self, msg):
self._out.write("warning: %s\n" % msg)
@@ -70,7 +72,8 @@ class file_repo_observer(file_phase_observer, repo_base):
self._semiquiet = semiquiet
def trigger_start(self, hook, trigger):
- self._out.write("hook %s: trigger: starting %r\n" % (hook, trigger))
+ if not self._semiquiet:
+ self._out.write("hook %s: trigger: starting %r\n" % (hook, trigger))
def trigger_end(self, hook, trigger):
if not self._semiquiet:
diff --git a/pkgcore/interfaces/repo.py b/pkgcore/interfaces/repo.py
index 310a5759c..f59752853 100644
--- a/pkgcore/interfaces/repo.py
+++ b/pkgcore/interfaces/repo.py
@@ -5,9 +5,10 @@
repository modifications (installing, removing, replacing)
"""
-from pkgcore.util.dependant_methods import ForcedDepends
-from pkgcore.merge.engine import MergeEngine, errors as merge_errors
-from pkgcore.util.demandload import demandload
+from pkgcore.merge import errors as merge_errors
+from pkgcore.merge.engine import MergeEngine
+from snakeoil.dependant_methods import ForcedDepends
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.log:logger ")
diff --git a/pkgcore/merge/engine.py b/pkgcore/merge/engine.py
index 54bb36653..955d1de4a 100644
--- a/pkgcore/merge/engine.py
+++ b/pkgcore/merge/engine.py
@@ -15,14 +15,15 @@ import operator
from pkgcore.fs import contents
from pkgcore.fs import gen_obj as gen_fs_obj
-from pkgcore.util.mappings import LazyValDict, ImmutableDict, StackedDict
-from pkgcore.util import currying
from pkgcore.plugin import get_plugins
from pkgcore.merge import errors
from pkgcore.interfaces import observer as observer_mod
from pkgcore.merge.const import REPLACE_MODE, INSTALL_MODE, UNINSTALL_MODE
-from pkgcore.util.demandload import demandload
+from snakeoil.mappings import LazyValDict, ImmutableDict, StackedDict
+from snakeoil import currying
+
+from snakeoil.demandload import demandload
demandload(globals(), "errno "
)
diff --git a/pkgcore/merge/triggers.py b/pkgcore/merge/triggers.py
index 8a90f74ae..6e1b45a6b 100644
--- a/pkgcore/merge/triggers.py
+++ b/pkgcore/merge/triggers.py
@@ -15,14 +15,15 @@ __all__ = [
from pkgcore.merge import errors, const
import pkgcore.os_data
-from pkgcore.util.demandload import demandload
-from pkgcore.util.osutils import listdir_files, pjoin, ensure_dirs, normpath
+
+from snakeoil.osutils import listdir_files, pjoin, ensure_dirs, normpath
+from snakeoil.demandload import demandload
demandload(globals(), "os errno "
"pkgcore.plugin:get_plugin "
"pkgcore:spawn "
"pkgcore.fs.livefs:gen_obj "
"pkgcore.fs:fs,contents "
- "pkgcore.util.file:iter_read_bash "
+ "snakeoil.fileutils:iter_read_bash "
"time "
"math:floor "
)
diff --git a/pkgcore/package/conditionals.py b/pkgcore/package/conditionals.py
index 69c3f86af..c984e88c6 100644
--- a/pkgcore/package/conditionals.py
+++ b/pkgcore/package/conditionals.py
@@ -7,12 +7,13 @@ conditional attributes on a package.
Changing them triggering regen of other attributes on the package instance.
"""
-from pkgcore.package.base import wrapper
-from pkgcore.util.containers import LimitedChangeSet, Unchangable
-from pkgcore.util.demandload import demandload
-from pkgcore.util.klass import GetAttrProxy
from operator import attrgetter
-from pkgcore.util.currying import partial
+from pkgcore.package.base import wrapper
+
+from snakeoil.containers import LimitedChangeSet, Unchangable
+from snakeoil.klass import GetAttrProxy
+from snakeoil.currying import partial
+from snakeoil.demandload import demandload
demandload(globals(), "copy")
diff --git a/pkgcore/package/metadata.py b/pkgcore/package/metadata.py
index e2a0cec77..46b46d183 100644
--- a/pkgcore/package/metadata.py
+++ b/pkgcore/package/metadata.py
@@ -6,11 +6,11 @@
package with it's metadata accessible (think 'no longer abstract')
"""
-from pkgcore.util.weakrefs import WeakValCache
-
from pkgcore.ebuild.cpv import CPV
from pkgcore.ebuild.atom import atom
+from snakeoil.weakrefs import WeakValCache
+
def DeriveMetadataKls(original_kls):
if getattr(original_kls, "_derived_metadata_kls", False):
return original_kls
diff --git a/pkgcore/pkgsets/filelist.py b/pkgcore/pkgsets/filelist.py
index bba603b26..152bd3a5a 100644
--- a/pkgcore/pkgsets/filelist.py
+++ b/pkgcore/pkgsets/filelist.py
@@ -5,15 +5,15 @@
pkgset based around loading a list of atoms from a world file
"""
-from pkgcore.ebuild.atom import atom
-from pkgcore.util.demandload import demandload
import pkgcore.const
-demandload(globals(),
- "pkgcore.util.file:AtomicWriteFile "
- "pkgcore.util.osutils:readlines ")
-
+from pkgcore.ebuild.atom import atom
from pkgcore.config import ConfigHint
+from snakeoil.demandload import demandload
+demandload(globals(),
+ "snakeoil.fileutils:AtomicWriteFile "
+ "snakeoil.osutils:readlines ")
+
class FileList(object):
pkgcore_config_type = ConfigHint({'location':'str'}, typename='pkgset')
diff --git a/pkgcore/pkgsets/glsa.py b/pkgcore/pkgsets/glsa.py
index b8c3a4b3c..65d16a918 100644
--- a/pkgcore/pkgsets/glsa.py
+++ b/pkgcore/pkgsets/glsa.py
@@ -6,18 +6,20 @@ Gentoo Linux Security Advisories (GLSA) support
"""
import os
-from pkgcore.util.iterables import caching_iter
+
from pkgcore.restrictions import packages, restriction, boolean, values
from pkgcore.config import ConfigHint
-from pkgcore.util.osutils import listdir_files, join as pjoin
-from pkgcore.util.klass import generic_equality
-from pkgcore.util.demandload import demandload
-demandload(globals(), "pkgcore.util.xml:etree "
- "pkgcore.util.repo_utils:get_virtual_repos "
+from snakeoil.osutils import listdir_files, join as pjoin
+from snakeoil.klass import generic_equality
+from snakeoil.iterables import caching_iter
+from snakeoil.demandload import demandload
+demandload(globals(),
"pkgcore.package:mutated "
"pkgcore.ebuild:cpv,atom "
"pkgcore.log:logger "
+ "pkgcore.util.repo_utils:get_virtual_repos "
+ "snakeoil.xml:etree "
)
@@ -60,7 +62,7 @@ class GlsaDirSet(object):
"""
if not isinstance(src, basestring):
- src = tuple(sorted(filter(os.path.isdir,
+ src = tuple(sorted(filter(os.path.isdir,
(pjoin(repo.base, 'metadata', 'glsa') for repo in
get_virtual_repos(src, False) if hasattr(repo, 'base'))
)))
@@ -103,6 +105,7 @@ class GlsaDirSet(object):
#"glsa-1234-12.xml
if not (fn.startswith("glsa-") and fn.endswith(".xml")):
continue
+ # What does this do?
try:
[int(x) for x in fn[5:-4].split("-")]
except ValueError:
diff --git a/pkgcore/plugin.py b/pkgcore/plugin.py
index 64e31d5a3..76337d7a6 100644
--- a/pkgcore/plugin.py
+++ b/pkgcore/plugin.py
@@ -19,11 +19,13 @@ import operator
import os.path
from pkgcore import plugins
-from pkgcore.util.osutils import join as pjoin
-from pkgcore.util import modules, demandload
+from snakeoil.osutils import join as pjoin
+from snakeoil import modules, demandload
demandload.demandload(globals(), 'tempfile errno pkgcore.log:logger')
+CACHE_HEADER = 'pkgcore plugin cache v2\n'
+
# Global plugin cache. Mapping of package to package cache, which is a
# mapping of plugin key to a list of module names.
_cache = {}
@@ -48,7 +50,7 @@ def initialize_cache(package):
# Directory cache, mapping modulename to
# (mtime, set([keys]))
stored_cache = {}
- stored_cache_name = pjoin(path, 'plugincache')
+ stored_cache_name = pjoin(path, 'plugincache2')
try:
cachefile = open(stored_cache_name)
except IOError:
@@ -61,11 +63,22 @@ def initialize_cache(package):
try:
# Remove this extra nesting once we require python 2.5
try:
+ if cachefile.readline() != CACHE_HEADER:
+ raise ValueError('bogus header')
for line in cachefile:
module, mtime, entries = line[:-1].split(':', 2)
mtime = int(mtime)
- entries = set(entries.split(':'))
- stored_cache[module] = (mtime, entries)
+ result = set()
+ # Needed because ''.split(':') == [''], not []
+ if entries:
+ for s in entries.split(':'):
+ name, max_prio = s.split(',')
+ if max_prio:
+ max_prio = int(max_prio)
+ else:
+ max_prio = None
+ result.add((name, max_prio))
+ stored_cache[module] = (mtime, result)
except ValueError:
# Corrupt cache, treat as empty.
stored_cache = {}
@@ -108,8 +121,23 @@ def initialize_cache(package):
# try to continue.
logger.exception('plugin import failed')
else:
- keys = set(getattr(module, 'pkgcore_plugins', ()))
- actual_cache[modname] = (mtime, keys)
+ values = set()
+ registry = getattr(module, 'pkgcore_plugins', {})
+ for key, plugs in registry.iteritems():
+ max_prio = None
+ for plug in plugs:
+ priority = getattr(plug, 'priority', None)
+ if priority is not None \
+ and not isinstance(priority, int):
+ # This happens rather a lot with
+ # plugins not meant for use with
+ # get_plugin. Just ignore it.
+ priority = None
+ if priority is not None and (
+ max_prio is None or priority > max_prio):
+ max_prio = priority
+ values.add((key, max_prio))
+ actual_cache[modname] = (mtime, values)
# Cache is also stale if it sees entries that are no longer there.
for key in stored_cache:
if key not in actual_cache and key not in assumed_valid:
@@ -131,10 +159,17 @@ def initialize_cache(package):
stored_cache_name, e)
else:
cachefile = os.fdopen(fd, 'w')
+ cachefile.write(CACHE_HEADER)
try:
for module, (mtime, entries) in actual_cache.iteritems():
+ strings = []
+ for plugname, max_prio in entries:
+ if max_prio is None:
+ strings.append(plugname + ',')
+ else:
+ strings.append('%s,%s' % (plugname, max_prio))
cachefile.write(
- '%s:%s:%s\n' % (module, mtime, ':'.join(entries)))
+ '%s:%s:%s\n' % (module, mtime, ':'.join(strings)))
finally:
cachefile.close()
os.chmod(name, 0644)
@@ -142,8 +177,8 @@ def initialize_cache(package):
# Update the package_cache.
for module, (mtime, entries) in actual_cache.iteritems():
seen_modnames.add(module)
- for key in entries:
- package_cache.setdefault(key, []).append(module)
+ for key, max_prio in entries:
+ package_cache.setdefault(key, []).append((module, max_prio))
return package_cache
@@ -155,7 +190,7 @@ def get_plugins(key, package=plugins):
cache = _cache.get(package)
if cache is None:
cache = _cache[package] = initialize_cache(package)
- for modname in cache.get(key, ()):
+ for modname, max_prio in cache.get(key, ()):
module = modules.load_module('.'.join((package.__name__, modname)))
for obj in module.pkgcore_plugins.get(key, ()):
if not getattr(obj, 'disabled', False):
@@ -170,8 +205,20 @@ def get_plugin(key, package=plugins):
@return: highest-priority plugin or None if no plugin available.
"""
- candidates = list(plugin for plugin in get_plugins(key, package))
- if not candidates:
- return None
- candidates.sort(key=operator.attrgetter('priority'))
- return candidates[-1]
+ cache = _cache.get(package)
+ if cache is None:
+ cache = _cache[package] = initialize_cache(package)
+ modlist = cache.get(key, [])
+ modlist.sort(key=operator.itemgetter(1), reverse=True)
+ plugs = []
+ for i, (modname, max_prio) in enumerate(modlist):
+ module = modules.load_module('.'.join((package.__name__, modname)))
+ plugs.extend(
+ plug for plug in module.pkgcore_plugins.get(key, ())
+ if not getattr(plug, 'disabled', False))
+ if not plugs:
+ continue
+ plugs.sort(key=operator.attrgetter('priority'), reverse=True)
+ if i + 1 == len(modlist) or plugs[0].priority > modlist[i + 1][1]:
+ return plugs[0]
+ return None
diff --git a/pkgcore/plugins/pkgcore_ebuild_built.py b/pkgcore/plugins/pkgcore_ebuild_built.py
new file mode 100644
index 000000000..73b42c736
--- /dev/null
+++ b/pkgcore/plugins/pkgcore_ebuild_built.py
@@ -0,0 +1,8 @@
+# Copyright: 2007 Marien Zwart <marienz@gentoo.org>
+# License: GPL2
+
+from pkgcore.ebuild import ebuild_built
+
+pkgcore_plugins = {
+ 'format.ebuild_built': [ebuild_built.generate_new_factory],
+ }
diff --git a/pkgcore/plugins/pkgcore_ebuild_src.py b/pkgcore/plugins/pkgcore_ebuild_src.py
new file mode 100644
index 000000000..164e1e7e2
--- /dev/null
+++ b/pkgcore/plugins/pkgcore_ebuild_src.py
@@ -0,0 +1,8 @@
+# Copyright: 2007 Marien Zwart <marienz@gentoo.org>
+# License: GPL2
+
+from pkgcore.ebuild import ebuild_src
+
+pkgcore_plugins = {
+ 'format.ebuild_src': [ebuild_src.generate_new_factory],
+ }
diff --git a/pkgcore/plugins/pkgcore_formats_default.py b/pkgcore/plugins/pkgcore_formats_default.py
deleted file mode 100644
index 1ec9d27b7..000000000
--- a/pkgcore/plugins/pkgcore_formats_default.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-from pkgcore.ebuild import ebuild_built, ebuild_src
-
-pkgcore_plugins = {
- 'format.ebuild_built': [ebuild_built.generate_new_factory],
- 'format.ebuild_src': [ebuild_src.generate_new_factory],
- }
diff --git a/pkgcore/repository/configured.py b/pkgcore/repository/configured.py
index dff6e06d7..d5fe08ef4 100644
--- a/pkgcore/repository/configured.py
+++ b/pkgcore/repository/configured.py
@@ -7,8 +7,8 @@ wrap a repository, binding configuration to pkgs returned from the repository
from pkgcore.repository import prototype
from pkgcore.package.conditionals import make_wrapper
-from pkgcore.util.currying import partial
-from pkgcore.util.klass import GetAttrProxy
+from snakeoil.currying import partial
+from snakeoil.klass import GetAttrProxy
class tree(prototype.tree):
diff --git a/pkgcore/repository/misc.py b/pkgcore/repository/misc.py
index 8e481f47b..a1e67e7c6 100644
--- a/pkgcore/repository/misc.py
+++ b/pkgcore/repository/misc.py
@@ -3,8 +3,8 @@
from pkgcore.restrictions import packages
from pkgcore.package.mutated import MutatedPkg
-from pkgcore.util.iterables import caching_iter
-from pkgcore.util.klass import GetAttrProxy
+from snakeoil.iterables import caching_iter
+from snakeoil.klass import GetAttrProxy
__all__ = ("nodeps_repo", "caching_repo")
@@ -26,7 +26,7 @@ class nodeps_repo(object):
self.raw_repo = repo
def itermatch(self, *a, **kwds):
- return (MutatedPkg(x,
+ return (MutatedPkg(x,
overrides={"depends":self.default_depends,
"rdepends":self.default_rdepends,
"post_rdepends":self.default_post_rdepends})
diff --git a/pkgcore/repository/multiplex.py b/pkgcore/repository/multiplex.py
index 10bf27265..8f49dd252 100644
--- a/pkgcore/repository/multiplex.py
+++ b/pkgcore/repository/multiplex.py
@@ -5,10 +5,10 @@
repository that combines multiple repositories together
"""
-from pkgcore.repository import prototype, errors
-from pkgcore.util.currying import partial
-from pkgcore.util.iterables import iter_sort
from operator import itemgetter
+from pkgcore.repository import prototype, errors
+from snakeoil.currying import partial
+from snakeoil.iterables import iter_sort
class tree(prototype.tree):
diff --git a/pkgcore/repository/prototype.py b/pkgcore/repository/prototype.py
index e9cacbf64..1367a92e5 100644
--- a/pkgcore/repository/prototype.py
+++ b/pkgcore/repository/prototype.py
@@ -5,13 +5,14 @@
base repository template
"""
-from pkgcore.util.mappings import LazyValDict, DictMixin
-from pkgcore.util.lists import iflatten_instance
+
from pkgcore.ebuild.atom import atom
from pkgcore.restrictions import values, boolean, restriction
-from pkgcore.util.compatibility import any
from pkgcore.restrictions.util import collect_package_restrictions
+from snakeoil.mappings import LazyValDict, DictMixin
+from snakeoil.lists import iflatten_instance
+from snakeoil.compatibility import any
class IterValLazyDict(LazyValDict):
diff --git a/pkgcore/repository/virtual.py b/pkgcore/repository/virtual.py
index 4db593a9a..76edc5d08 100644
--- a/pkgcore/repository/virtual.py
+++ b/pkgcore/repository/virtual.py
@@ -7,7 +7,7 @@ virtual repository, pkgs generated via callable
from pkgcore.repository import prototype
from pkgcore.package import virtual
-from pkgcore.util.currying import partial
+from snakeoil.currying import partial
def mangle_args(new_package_func, mangler_func, *args):
return new_package_func(*mangler_func(args))
diff --git a/pkgcore/repository/visibility.py b/pkgcore/repository/visibility.py
index 847742df5..d7b3bef2d 100644
--- a/pkgcore/repository/visibility.py
+++ b/pkgcore/repository/visibility.py
@@ -9,7 +9,7 @@ filtering repository
# ~harring
from pkgcore.repository import prototype, errors
from pkgcore.restrictions.restriction import base
-from pkgcore.util.klass import GetAttrProxy
+from snakeoil.klass import GetAttrProxy
class filterTree(prototype.tree):
diff --git a/pkgcore/repository/wrapper.py b/pkgcore/repository/wrapper.py
index 85b12012f..af553a443 100644
--- a/pkgcore/repository/wrapper.py
+++ b/pkgcore/repository/wrapper.py
@@ -8,7 +8,7 @@ simple repository wrapping to override the package instances returned
# icky.
# ~harring
from pkgcore.repository import prototype, errors
-from pkgcore.util.klass import GetAttrProxy
+from snakeoil.klass import GetAttrProxy
class tree(prototype.tree):
diff --git a/pkgcore/resolver/choice_point.py b/pkgcore/resolver/choice_point.py
index 1a1190e4b..06e28c6ad 100644
--- a/pkgcore/resolver/choice_point.py
+++ b/pkgcore/resolver/choice_point.py
@@ -2,7 +2,7 @@
# License: GPL2
-from pkgcore.util.lists import iter_stable_unique
+from snakeoil.lists import iter_stable_unique
class choice_point(object):
diff --git a/pkgcore/resolver/plan.py b/pkgcore/resolver/plan.py
index d0527526c..cbe34b61d 100644
--- a/pkgcore/resolver/plan.py
+++ b/pkgcore/resolver/plan.py
@@ -4,15 +4,16 @@
import operator
from itertools import chain, islice
from collections import deque
-from pkgcore.util.compatibility import any
-from pkgcore.util.iterables import caching_iter, iter_sort
-from pkgcore.util.containers import RefCountingSet
+
from pkgcore.resolver.pigeonholes import PigeonHoledSlots
from pkgcore.resolver.choice_point import choice_point
-from pkgcore.util.currying import partial, post_curry
from pkgcore.restrictions import packages, values, restriction
from pkgcore.repository.misc import caching_repo
+from snakeoil.currying import partial, post_curry
+from snakeoil.compatibility import any
+from snakeoil.iterables import caching_iter, iter_sort
+from snakeoil.containers import RefCountingSet
limiters = set(["cycle"]) # [None])
def dprint(fmt, args=None, label=None):
@@ -242,10 +243,10 @@ class merge_plan(object):
while index != -1:
looped = True
# see if it's a vdb node already; if it's a cycle between
- # the same vdb node, ignore it (ignore self-dependant
+ # the same vdb node, ignore it (ignore self-dependant
# depends level installed deps for the same node iow)
- if ((index < stack_end and index) and
- (current_stack[index - 1].current_pkg ==
+ if ((index < stack_end and index) and
+ (current_stack[index - 1].current_pkg ==
cur_frame.current_pkg)
and cur_frame.current_pkg.repo.livefs):
# we're in a cycle of depends level vdb nodes;
@@ -255,7 +256,7 @@ class merge_plan(object):
"for %s via %s, exempting" %
(cur_frame.depth *2 * " ", cur_frame.atom,
cur_frame.choices.current_pkg))
- ignore=True
+ ignore = True
break
else:
# ok, so the first candidate wasn't vdb.
@@ -263,7 +264,7 @@ class merge_plan(object):
index = is_cycle(current_stack, datom,
cur_frame.choices, None, start=index + 1)
else:
- # ok. it exited on it's own. meaning either no cycles,
+ # ok. it exited on its own. meaning either no cycles,
# or no vdb exemptions where found.
if looped:
# non vdb level cycle. vdb bound?
@@ -461,8 +462,8 @@ class merge_plan(object):
"""
internal function to discern if an atom is viable, returning
the matches iter if so
-
- @return: 3 possible; None (not viable), True (presolved),
+
+ @return: 3 possible; None (not viable), True (presolved),
L{caching_iter} (not solved, but viable)
"""
if atom in self.insoluble:
@@ -634,7 +635,7 @@ class merge_plan(object):
c = choice_point(x, m)
add_op(c, c.current_pkg, force=True).apply(
self.state)
- break;
+ break
rewrote_blocker = self.generate_mangled_blocker(choices, x)
l = self.state.add_blocker(choices, rewrote_blocker, key=x.key)
@@ -750,7 +751,7 @@ class merge_plan(object):
def prefer_livefs_dbs(cls, dbs, just_vdb=None):
"""
@param dbs: db list to walk
- @param just_vdb: if None, no filtering; if True, just vdb, if False,
+ @param just_vdb: if None, no filtering; if True, just vdb, if False,
non-vdb only
@return: yields repositories in requested ordering
"""
diff --git a/pkgcore/restrictions/boolean.py b/pkgcore/restrictions/boolean.py
index 1e4d92954..0552c4017 100644
--- a/pkgcore/restrictions/boolean.py
+++ b/pkgcore/restrictions/boolean.py
@@ -12,7 +12,7 @@ __all__ = ("AndRestriction", "OrRestriction")
from itertools import islice
from pkgcore.restrictions import restriction
-from pkgcore.util.klass import generic_equality
+from snakeoil.klass import generic_equality
class base(restriction.base):
@@ -375,14 +375,6 @@ class OrRestriction(base):
if not self.restrictions:
return []
- def f(arg, *others):
- if others:
- for node2 in f(*others):
- yield arg + node2
- else:
- yield [arg]
-
-
dcnf = []
cnf = []
for x in self.restrictions:
diff --git a/pkgcore/restrictions/packages.py b/pkgcore/restrictions/packages.py
index 98bb73943..3010adabb 100644
--- a/pkgcore/restrictions/packages.py
+++ b/pkgcore/restrictions/packages.py
@@ -6,9 +6,9 @@ restriction classes designed for package level matching
"""
from pkgcore.restrictions import restriction, boolean
-from pkgcore.util.demandload import demandload
-from pkgcore.util.compatibility import any
-from pkgcore.util.klass import chained_getter, generic_equality
+from snakeoil.compatibility import any
+from snakeoil.klass import chained_getter, generic_equality
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.log:logger")
# Backwards compatibility.
diff --git a/pkgcore/restrictions/restriction.py b/pkgcore/restrictions/restriction.py
index 96f1e08df..cebc9c699 100644
--- a/pkgcore/restrictions/restriction.py
+++ b/pkgcore/restrictions/restriction.py
@@ -6,8 +6,8 @@
base restriction class
"""
-from pkgcore.util import caching
-from pkgcore.util.currying import partial, pretty_docs
+from snakeoil import caching
+from snakeoil.currying import partial, pretty_docs
class base(object):
diff --git a/pkgcore/restrictions/util.py b/pkgcore/restrictions/util.py
index c4163d156..e926000a4 100644
--- a/pkgcore/restrictions/util.py
+++ b/pkgcore/restrictions/util.py
@@ -5,8 +5,8 @@
restriction related utilities
"""
-from pkgcore.util.lists import iflatten_func
from pkgcore.restrictions import packages, boolean, restriction
+from snakeoil.lists import iflatten_func
def _is_package_instance(inst):
return (getattr(inst, "type", None) == packages.package_type
diff --git a/pkgcore/restrictions/values.py b/pkgcore/restrictions/values.py
index 4f7ace745..c31de24b8 100644
--- a/pkgcore/restrictions/values.py
+++ b/pkgcore/restrictions/values.py
@@ -12,9 +12,9 @@ attr from a package instance and hand it to their wrapped restriction
"""
from pkgcore.restrictions import restriction, boolean, packages
-from pkgcore.util.klass import generic_equality
-from pkgcore.util import demandload
-demandload.demandload(globals(), 're pkgcore.util:lists')
+from snakeoil.klass import generic_equality
+from snakeoil import demandload
+demandload.demandload(globals(), 're snakeoil:lists')
# Backwards compatibility.
value_type = restriction.value_type
@@ -196,7 +196,7 @@ else:
base_StrExactMatch = extension.StrExactMatch
# these are broken out so that it is easier to
-# generate native/cpy version of the class for
+# generate native/cpy version of the class for
# testing each.
def _StrExact_intersect(self, other):
s1, s2 = self.exact, other.exact
@@ -492,9 +492,9 @@ class ContainmentMatch(hashed_base):
return True
else:
l = len(self.vals)
- def filter(truths): return truths.count(True) < l
- def true(r, pvals): return pkg.request_enable(attr, r)
- def false(r, pvals): return pkg.request_disable(attr, r)
+ def filter(truths): return truths.count(True) < l
+ def true(r, pvals): return pkg.request_enable(attr, r)
+ def false(r, pvals): return pkg.request_disable(attr, r)
truths = [x in val for x in self.vals]
for x in boolean.iterative_quad_toggling(
pkg, None, list(self.vals), 0, l, truths, filter,
@@ -540,9 +540,9 @@ class ContainmentMatch(hashed_base):
if pkg.request_disable(attr, *self.vals):
return True
else:
- def filter(truths): return True not in truths
- def true(r, pvals): return pkg.request_enable(attr, r)
- def false(r, pvals): return pkg.request_disable(attr, r)
+ def filter(truths): return True not in truths
+ def true(r, pvals): return pkg.request_enable(attr, r)
+ def false(r, pvals): return pkg.request_disable(attr, r)
truths = [x in val for x in self.vals]
for x in boolean.iterative_quad_toggling(
pkg, None, list(self.vals), 0, len(self.vals), truths, filter,
@@ -578,7 +578,7 @@ class FlatteningRestriction(hashed_base):
@type dont_iter: type or tuple of types
@param dont_iter: type(s) not to flatten.
- Passed to L{pkgcore.util.lists.iflatten_instance}.
+ Passed to L{snakeoil.lists.iflatten_instance}.
@type childrestriction: restriction
@param childrestriction: restriction applied to the flattened list.
"""
diff --git a/pkgcore/scripts/__init__.py b/pkgcore/scripts/__init__.py
index bb7bbe08e..089357cd4 100644
--- a/pkgcore/scripts/__init__.py
+++ b/pkgcore/scripts/__init__.py
@@ -5,12 +5,12 @@
Modules in here are accessible through the pwrapper script. They
should have an C{OptionParser} attribute that is a
-L{pkgcore.util.commandline.OptionParser} subclass and a C{main}
+L{snakeoil.commandline.OptionParser} subclass and a C{main}
attribute that is a function usable with
-L{pkgcore.util.commandline.main}.
+L{snakeoil.commandline.main}.
The goal of this is avoiding boilerplate and making sure the scripts
have a similar look and feel. If your script needs to do something
-L{pkgcore.util.commandline} does not support please improve it instead
+L{snakeoil.commandline} does not support please improve it instead
of bypassing it.
"""
diff --git a/pkgcore/scripts/pconfig.py b/pkgcore/scripts/pconfig.py
index 13fad3388..c2d7646d8 100644
--- a/pkgcore/scripts/pconfig.py
+++ b/pkgcore/scripts/pconfig.py
@@ -7,9 +7,9 @@
import traceback
from pkgcore.config import errors, basics
-from pkgcore.util import commandline, modules
from pkgcore.plugin import get_plugins
-
+from pkgcore.util import commandline
+from snakeoil import modules
class DescribeClassParser(commandline.OptionParser):
@@ -128,8 +128,6 @@ def write_type(out, type_obj):
out.write('%s: %s' % (name, typename), autoline=False)
if name in type_obj.required:
out.write(' (required)', autoline=False)
- if name in type_obj.incrementals:
- out.write(' (incremental)', autoline=False)
out.write()
@@ -240,29 +238,44 @@ def _dump_uncollapsed_section(config, out, section):
for key in sorted(section.keys()):
kind, value = section.get_value(config, key, 'repr')
out.write('# type: %s' % (kind,))
+ if kind == 'list':
+ for name, val in zip((
+ key + '.prepend', key, key + '.append'), value):
+ if val:
+ out.write(
+ repr(name), ' = ', ' '.join(repr(v) for v in val))
+ continue
+ if kind in ('refs', 'str'):
+ for name, val in zip((
+ key + '.prepend', key, key + '.append'), value):
+ if not val:
+ continue
+ out.write(repr(name), ' = ', autoline=False)
+ if kind == 'str':
+ out.write(repr(val))
+ else:
+ out.write()
+ out.first_prefix.append(' ')
+ try:
+ for subnr, subsection in enumerate(val):
+ subname = 'nested section %s' % (subnr + 1,)
+ out.write(subname)
+ out.write('=' * len(subname))
+ _dump_uncollapsed_section(config, out, subsection)
+ out.write()
+ finally:
+ out.first_prefix.pop()
+ continue
out.write('%r = ' % (key,), autoline=False)
- if kind == 'str':
- out.write(repr(value))
- elif kind == 'list':
- out.write(' '.join(repr(v) for v in value))
- elif kind == 'callable':
+ if kind == 'callable':
out.write(value.__module__, value.__name__)
elif kind == 'bool':
out.write(str(value))
- elif kind == 'ref' or kind == 'refs':
+ elif kind == 'ref':
out.first_prefix.append(' ')
try:
- if kind == 'ref':
- out.write()
- _dump_uncollapsed_section(config, out, value)
- else:
- out.write()
- for subnr, subsection in enumerate(value):
- name = 'nested section %s' % (subnr + 1,)
- out.write(name)
- out.write('=' * len(name))
- _dump_uncollapsed_section(config, out, subsection)
- out.write()
+ out.write()
+ _dump_uncollapsed_section(config, out, value)
finally:
out.first_prefix.pop()
else:
diff --git a/pkgcore/scripts/pmaint.py b/pkgcore/scripts/pmaint.py
index 62de368b2..0ec89b289 100644
--- a/pkgcore/scripts/pmaint.py
+++ b/pkgcore/scripts/pmaint.py
@@ -1,4 +1,5 @@
-# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
+# Copyright: 2005-2007 Brian Harring <ferringb@gmail.com>
+# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
# License: GPL2
"""
@@ -6,7 +7,7 @@ repository maintainence
"""
from pkgcore.util import commandline
-from pkgcore.util.demandload import demandload
+from snakeoil.demandload import demandload
demandload(globals(), "pkgcore.repository:multiplex "
"pkgcore.util:parserestrict "
@@ -15,7 +16,11 @@ demandload(globals(), "pkgcore.repository:multiplex "
"pkgcore.restrictions:packages "
"pkgcore.restrictions.boolean:OrRestriction "
"errno "
- )
+ "threading:Event "
+ "threading:Thread "
+ "Queue:Queue "
+ "time:time "
+)
commandline_commands = {}
@@ -52,7 +57,7 @@ class SyncParser(commandline.OptionParser):
return values, []
def sync_main(options, out, err):
- """update a local repositories to match their remote parent"""
+ """Update a local repositories to match their remote parent"""
config = options.config
succeeded, failed = [], []
seen = set()
@@ -135,7 +140,7 @@ class CopyParser(commandline.OptionParser):
values.candidates = []
if values.copy_missing:
- restrict = OrRestriction(self.convert_to_restrict(args))
+ restrict = OrRestriction(*self.convert_to_restrict(args))
for package in values.source_repo.itermatch(restrict):
if not values.target_repo.match(package.versioned_atom):
values.candidates.append(package.versioned_atom)
@@ -146,7 +151,7 @@ class CopyParser(commandline.OptionParser):
def copy_main(options, out, err):
- "copy pkgs between repositories"
+ """Copy pkgs between repositories."""
trg_repo = options.target_repo
src_repo = options.source_repo
@@ -213,3 +218,109 @@ def copy_main(options, out, err):
return 0
commandline_commands['copy'] = (CopyParser, copy_main)
+
+
+class RegenParser(commandline.OptionParser):
+
+ def __init__(self, **kwargs):
+ commandline.OptionParser.__init__(
+ self, description=__doc__, usage='%prog [options] repo [threads]',
+ **kwargs)
+
+ def check_values(self, values, args):
+ values, args = commandline.OptionParser.check_values(
+ self, values, args)
+ if not args:
+ self.error('Need a repository name.')
+ if len(args) > 2:
+ self.error('I do not know what to do with more than 2 arguments')
+
+ if len(args) == 2:
+ try:
+ values.thread_count = int(args[1])
+ except ValueError:
+ self.error('%r should be an integer' % (args[1],))
+ if values.thread_count <= 0:
+ self.error('thread count needs to be at least 1')
+ else:
+ values.thread_count = 1
+
+ try:
+ values.repo = values.config.repo[args[0]]
+ except KeyError:
+ self.error('repo %r was not found! known repos: %s' % (
+ args[0], ', '.join(str(x) for x in values.config.repo)))
+
+ return values, ()
+
+
+def regen_iter(iterable, err):
+ for x in iterable:
+ try:
+ x.keywords
+ except RuntimeError:
+ raise
+ except Exception, e:
+ err.write("caught exception %s for %s" % (e, x))
+
+def reclaim_threads(threads, err):
+ for x in threads:
+ try:
+ x.join()
+ except RuntimeError:
+ raise
+ except Exception, e:
+ err.write("caught exception %s reclaiming thread" % (e,))
+
+def regen_main(options, out, err):
+ """Regenerate a repository cache."""
+ start_time = time()
+ # HACK: store this here so we can assign to it from inside def passthru.
+ options.count = 0
+ if options.thread_count == 1:
+ def passthru(iterable):
+ for x in iterable:
+ options.count += 1
+ yield x
+ regen_iter(passthru(options.repo), err)
+ else:
+ queue = Queue(options.thread_count * 2)
+ kill = Event()
+ kill.clear()
+ def iter_queue(kill, qlist, timeout=0.25):
+ while not kill.isSet():
+ try:
+ yield qlist.get(timeout=timeout)
+ except Queue.Empty:
+ continue
+ regen_threads = [
+ Thread(
+ target=regen_iter, args=(iter_queue(kill, queue), err))
+ for x in xrange(options.thread_count)]
+ out.write('starting %d threads' % (options.thread_count,))
+ try:
+ for x in regen_threads:
+ x.start()
+ out.write('started')
+ # now we feed the queue.
+ for pkg in options.repo:
+ options.count += 1
+ queue.put(pkg)
+ except Exception:
+ kill.set()
+ reclaim_threads(regen_threads, err)
+ raise
+
+ # by now, queue is fed. reliable for our uses since the queue
+ # is only subtracted from.
+ while not queue.empty():
+ sleep(.5)
+ kill.set()
+ reclaim_threads(regen_threads, err)
+ assert queue.empty()
+ out.write(
+ "finished %d nodes in in %.2f seconds" % (
+ options.count, time() - start_time))
+ return 0
+
+commandline_commands['regen'] = (RegenParser, regen_main)
diff --git a/pkgcore/scripts/pmerge.py b/pkgcore/scripts/pmerge.py
index 1c6bb94ee..c6d1e117a 100644
--- a/pkgcore/scripts/pmerge.py
+++ b/pkgcore/scripts/pmerge.py
@@ -5,20 +5,20 @@
"""Mess with the resolver and vdb."""
+from time import time
-import time
-
-from pkgcore.restrictions import packages, values
-from pkgcore.util import commandline, parserestrict, lists, repo_utils
-from pkgcore.util.compatibility import any
+from pkgcore.util import commandline, parserestrict, repo_utils
from pkgcore.ebuild import resolver
from pkgcore.repository import multiplex
from pkgcore.interfaces import observer, format
-from pkgcore.util.formatters import ObserverFormatter
-from pkgcore.util.packages import get_raw_pkg
+from pkgcore.ebuild.formatter import formatters
from pkgcore.pkgsets.glsa import KeyedAndRestriction
from pkgcore.ebuild.atom import atom
+from snakeoil import lists
+from snakeoil.formatters import ObserverFormatter
+from snakeoil.compatibility import any
+
class OptionParser(commandline.OptionParser):
def __init__(self, **kwargs):
@@ -76,11 +76,21 @@ a depends on b, and b depends on a, with neither built is an example""")
self.add_option('--force', action='store_true',
dest='force',
help="force merging to a repo, regardless of if it's frozen")
- self.add_option('--oneshot', '-o', action='store_true',
+ self.add_option('--oneshot', '-o', '-1', action='store_true',
default=False,
help="do not record changes in the world file; if a set is "
- "involved, defaults to forcing oneshot")
-
+ "involved, defaults to forcing oneshot")
+ self.add_option(
+ '--formatter', '-F', type='choice', choices=formatters.keys(),
+ default='portage',
+ help='which formatter to output --pretend or --ask output through. '
+ 'valid values are: %s' % (', '.join(formatters),))
+ self.add_option('--verbose', '-v', action='store_true',
+ help="be more verbose about the buildplan. Currently only "
+ 'supported by the portage formatter')
+ self.add_option('--quiet', '-q', action='store_true',
+ help="be quieter about the buildplan. "
+ "*Not* the same as omitting verbose")
def check_values(self, options, args):
options, args = commandline.OptionParser.check_values(
@@ -89,13 +99,13 @@ a depends on b, and b depends on a, with neither built is an example""")
if options.unmerge:
if options.set:
- self.error("Sorry, using sets with -C probably isn't wise")
+ self.error("Using sets with -C probably isn't wise, aborting")
if options.upgrade:
- self.error("can't combine upgrade and unmerging")
+ self.error("Cannot upgrade and unmerge simultaneously")
if not options.targets:
- self.error("need at least one atom")
+ self.error("You must provide at least one atom")
if options.clean:
- self.error("Sorry, -C cannot be used with --clean")
+ self.error("Cannot use -C with --clean")
if options.clean:
if options.set or options.targets:
self.error("--clean currently has set/targets disabled; in "
@@ -113,7 +123,6 @@ a depends on b, and b depends on a, with neither built is an example""")
self.error('Need at least one atom/set')
return options, ()
-
class AmbiguousQuery(parserestrict.ParseError):
def __init__(self, token, keys):
parserestrict.ParseError.__init__(
@@ -155,47 +164,6 @@ class Failure(ValueError):
"""Raised internally to indicate an "expected" failure condition."""
-def userquery(prompt, out, err, responses=None, default_answer=None, limit=3):
- """Ask the user to choose from a set of options.
-
- Displays a prompt and a set of responses, then waits for a
- response which is checked against the responses. If there is an
- unambiguous match the value is returned.
-
- @type prompt: C{basestring}.
- @type out: formatter.
- @type err: file-like object.
- @type responses: mapping with C{basestring} keys
- @param responses: mapping of user input to function result.
- Defaults to {"Yes": True, "No": False}.
- @param default_answer: returned if there is no input
- (user just hits enter). Defaults to True if responses is unset,
- unused otherwise.
- @param limit: number of allowed tries.
- """
- if responses is None:
- responses = {'Yes': True, 'No': False}
- if default_answer is None:
- default_answer = True
- for i in range(limit):
- response = raw_input('%s [%s] ' % (prompt, '/'.join(responses)))
- if not response and default_answer is not None:
- return default_answer
-
- results = set(
- (key, value) for key, value in responses.iteritems()
- if key[:len(response)].upper() == response.upper())
- if not results:
- out.write('Sorry, response "%s" not understood.' % (response,))
- elif len(results) > 1:
- out.write('Response "%s" is ambiguous (%s)' % (
- response, ', '.join(key for key, val in results)))
- else:
- return list(results)[0][1]
-
- raise Failure('You have input a wrong response too many times.')
-
-
def unmerge(out, err, vdb, tokens, options, world_set=None):
"""Unmerge tokens. hackish, should be rolled back into the resolver"""
all_matches = set()
@@ -215,12 +183,12 @@ def unmerge(out, err, vdb, tokens, options, world_set=None):
categories = set(pkg.category for pkg in matches)
if len(categories) > 1:
raise parserestrict.ParseError(
- '%s matches in multiple categories (%s)' % (
+ '%s is in multiple categories (%s)' % (
token, ', '.join(set(pkg.key for pkg in matches))))
all_matches.update(matches)
matches = sorted(all_matches)
- out.write(out.bold, 'Unmerge:')
+ out.write(out.bold, 'The following packages are to be unmerged:')
out.prefix = [out.bold, ' * ', out.reset]
for match in matches:
out.write(match.cpvstr)
@@ -232,7 +200,7 @@ def unmerge(out, err, vdb, tokens, options, world_set=None):
return
if (options.ask and not
- userquery("Would you like to unmerge these packages?", out, err)):
+ formatter.ask("Would you like to unmerge these packages?")):
return
return do_unmerge(options, out, err, vdb, matches, world_set, repo_obs)
@@ -240,7 +208,7 @@ def do_unmerge(options, out, err, vdb, matches, world_set, repo_obs):
if vdb.frozen:
if options.force:
out.write(
- out.fg(out.red), out.bold,
+ out.fg('red'), out.bold,
'warning: vdb is frozen, overriding')
vdb.frozen = False
else:
@@ -254,7 +222,7 @@ def do_unmerge(options, out, err, vdb, matches, world_set, repo_obs):
if not ret:
if not options.ignore_failures:
raise Failure('failed unmerging %s' % (match,))
- out.write(out.fg(out.red), 'failed unmerging ', match)
+ out.write(out.fg('red'), 'failed unmerging ', match)
update_worldset(world_set, match, remove=True)
out.write("finished; removed %i packages" % len(matches))
@@ -288,14 +256,18 @@ def main(options, out, err):
domain = config.get_default('domain')
vdb = domain.all_vdbs
+ formatter = formatters[options.formatter](out=out, err=err,
+ verbose=options.verbose, use_expand=domain.use_expand,
+ use_expand_hidden=domain.use_expand_hidden)
+
# This mode does not care about sets and packages so bypass all that.
if options.unmerge:
world_set = None
if not options.oneshot:
world_set = get_pkgset(config, err, "world")
if world_set is None:
- err.write("disable world updating via --oneshot, or fix your "
- "config")
+ err.write("Disable world updating via --oneshot, or fix your "
+ "configuration")
return 1
try:
unmerge(
@@ -340,18 +312,18 @@ def main(options, out, err):
if a is None:
if token in config.pkgset:
out.error(
- 'No package matches for %r, but there is a set with '
+ 'No package matches %r, but there is a set with '
'that name. Use -s to specify a set.' % (token,))
return 2
elif not options.ignore_failures:
- out.error('No matches for %r; ignoring' % token)
+ out.error('No matches for %r; ignoring it' % token)
else:
return -1
else:
atoms.append(a)
if not atoms:
- out.error('No targets specified- nothing to do')
+ out.error('No targets specified; nothing to do')
return 1
atoms = lists.stable_unique(atoms)
@@ -360,8 +332,8 @@ def main(options, out, err):
if (not options.set or options.clean) and not options.oneshot:
world_set = get_pkgset(config, err, 'world')
if world_set is None:
- err.write("disable world updating via --oneshot, or fix your "
- "config")
+ err.write("Disable world updating via --oneshot, or fix your "
+ "configuration")
return 1
if options.upgrade:
@@ -381,14 +353,14 @@ def main(options, out, err):
if options.preload_vdb_state:
out.write(out.bold, ' * ', out.reset, 'Preloading vdb... ')
- vdb_time = time.time()
+ vdb_time = time()
resolver_inst.load_vdb_state()
- vdb_time = time.time() - vdb_time
+ vdb_time = time() - vdb_time
else:
vdb_time = 0.0
failures = []
- resolve_time = time.time()
+ resolve_time = time()
out.write(out.bold, ' * ', out.reset, 'Resolving...')
out.title('Resolving...')
for restrict in atoms:
@@ -400,7 +372,7 @@ def main(options, out, err):
failures.append(restrict)
if not options.ignore_failures:
break
- resolve_time = time.time() - resolve_time
+ resolve_time = time() - resolve_time
if failures:
out.write()
out.write('Failures encountered:')
@@ -415,20 +387,20 @@ def main(options, out, err):
"repo %s: [ %s ]" % (r, ", ".join(str(x) for x in l)))
match_count += len(l)
if not match_count:
- out.write("no matches found in %s" % (repos,))
+ out.write("No matches found in %s" % (repos,))
out.write()
if not options.ignore_failures:
return 1
if options.clean:
- out.write(out.bold, ' * ', out.reset, 'packages to remove')
+ out.write(out.bold, ' * ', out.reset, 'Packages to be removed:')
vset = set(vdb)
len_vset = len(vset)
vset.difference_update(y.pkg for y in
resolver_inst.state.iter_ops(True))
wipes = sorted(x for x in vset if x.package_is_real)
for x in wipes:
- out.write("remove %s" % x)
+ out.write("Remove %s" % x)
out.write()
if len(wipes):
out.write("removing %i packages of %i installed, %0.2f%%." %
@@ -436,44 +408,37 @@ def main(options, out, err):
else:
out.write("no packages to remove")
if options.pretend:
- return 0;
+ return 0
if options.ask:
- if not userquery("do you wish to proceed (default answer is no)?",
- out, err, default_answer=False):
+ if not formatter.ask("Do you wish to proceed?", default_answer=False):
return 1
out.write()
repo_obs = observer.file_repo_observer(ObserverFormatter(out))
do_unmerge(options, out, err, vdb, wipes, world_set, repo_obs)
- return 0;
+ return 0
- out.write(out.bold, ' * ', out.reset, 'buildplan')
changes = list(x for x in resolver_inst.state.iter_ops()
if x.pkg.package_is_real)
- ops_count = {}
- for op in changes:
- ops_count.setdefault(op.desc, 0)
- ops_count[op.desc] += 1
- if op.desc == "replace":
- out.write("replace %s, %s" %
- (get_raw_pkg(op.old_pkg), get_raw_pkg(op.pkg)))
- else:
- out.write("%s %s" % (op.desc.ljust(7), get_raw_pkg(op.pkg)))
-
- out.write()
- out.write("%i ops: %s" % (sum(ops_count.itervalues()),
- ", ".join("%i %ss" % (ops_count[k], k) for k in sorted(ops_count))))
- out.write()
- out.title('Resolved')
- out.write(out.bold, '%.2f' % (resolve_time,), out.reset,
- ' seconds resolving')
+
+ if options.ask or options.pretend:
+ for op in changes:
+ formatter.format(op)
+ formatter.end()
+
+ if options.verbose:
+ out.write()
+ out.title('Resolved')
+ out.write(out.bold, '%.2f' % (resolve_time,), out.reset,
+ ' seconds resolving')
+
if vdb_time:
- out.write(out.bold, '%.2f' % (vdb_time,), out.reset,
- ' seconds preloading vdb state')
+ out.write(out.bold, 'Took %.2f' % (vdb_time,), out.reset,
+ ' seconds to preload vdb state')
if options.pretend:
return
if (options.ask and not
- userquery("Would you like to merge these packages?", out, err)):
+ formatter.ask("Would you like to merge these packages?")):
return
build_obs = observer.file_build_observer(ObserverFormatter(out))
@@ -481,14 +446,12 @@ def main(options, out, err):
change_count = len(changes)
for count, op in enumerate(changes):
- status_str = "%i/%i, %s" % (count + 1, change_count,
- get_raw_pkg(op.pkg))
- out.write("processing %i of %i: %s" % (count + 1, change_count,
- get_raw_pkg(op.pkg)))
- out.title("%i/%i: %s" % (count + 1, change_count, get_raw_pkg(op.pkg)))
+ out.write("Processing %i of %i: %s" % (count + 1, change_count,
+ op.pkg.cpvstr))
+ out.title("%i/%i: %s" % (count + 1, change_count, op.pkg.cpvstr))
if op.desc != "remove":
- if not options.fetchonly:
- out.write("forcing cleaning of workdir")
+ if not options.fetchonly and options.debug:
+ out.write("Forcing a clean of workdir")
buildop = op.pkg.build(observer=build_obs, clean=True)
if options.fetchonly:
out.write("\n%i files required-" % len(op.pkg.fetchables))
@@ -506,7 +469,6 @@ def main(options, out, err):
del buildop, ret
continue
- out.write("building...")
ret = None
try:
built_pkg = buildop.finalize()
@@ -517,15 +479,18 @@ def main(options, out, err):
if ret is None:
out.write()
if op.desc == "replace":
- out.write("replace: %s with %s" %
- (get_raw_pkg(op.old_pkg), get_raw_pkg(built_pkg)))
+ if op.old_pkg == op.pkg:
+ out.write(">>> Reinstalling %s" % (built_pkg.cpvstr))
+ else:
+ out.write(">>> Replacing %s with %s" % (
+ op.old_pkg.cpvstr, built_pkg.cpvstr))
i = vdb.replace(op.old_pkg, built_pkg, observer=repo_obs)
else:
- out.write("install: %s" % get_raw_pkg(built_pkg))
+
+ out.write(">>> Installing %s" % built_pkg.cpvstr)
i = vdb.install(built_pkg, observer=repo_obs)
else:
- out.error("failure building %s: %s" % (get_raw_pkg(op.pkg),
- ret))
+ out.error("Failed to build %s: %s" % (op.pkg, ret))
if not options.ignore_failures:
return 1
continue
@@ -533,7 +498,7 @@ def main(options, out, err):
# then we would like.
del built_pkg
else:
- out.write("remove: %s" % get_raw_pkg(op.pkg))
+ out.write(">>> Removing %s" % op.pkg.cpvstr)
i = vdb.uninstall(op.pkg, observer=repo_objs)
ret = i.finish()
if ret != True:
@@ -543,8 +508,10 @@ def main(options, out, err):
buildop.cleanup()
if world_set:
if op.desc == "remove":
+ out.write('>>> Removing %s from world file' % op.pkg.cpvstr)
update_worldset(world_set, op.pkg, remove=True)
elif any(x.match(op.pkg) for x in atoms):
+ out.write('>>> Adding %s to world file' % op.pkg.cpvstr)
update_worldset(world_set, op.pkg)
out.write("finished")
return 0
diff --git a/pkgcore/scripts/pplugincache.py b/pkgcore/scripts/pplugincache.py
index 915744e41..ce5cdc71b 100644
--- a/pkgcore/scripts/pplugincache.py
+++ b/pkgcore/scripts/pplugincache.py
@@ -5,9 +5,9 @@
"""Update the plugin cache."""
-from pkgcore.util import commandline, modules
+from pkgcore.util import commandline
from pkgcore import plugin
-
+from snakeoil import modules
class OptionParser(commandline.OptionParser):
diff --git a/pkgcore/scripts/pquery.py b/pkgcore/scripts/pquery.py
index 03d4fe4fd..cc9a9f0a8 100644
--- a/pkgcore/scripts/pquery.py
+++ b/pkgcore/scripts/pquery.py
@@ -8,11 +8,10 @@
import optparse
-from pkgcore.util import (
- commandline, repo_utils, parserestrict, packages as pkgutils)
from pkgcore.restrictions import packages, values, boolean, restriction
from pkgcore.ebuild import conditionals, atom
-
+from pkgcore.util import (
+ commandline, repo_utils, parserestrict, packages as pkgutils)
# To add a new restriction you have to do the following:
# - add a parse function for it here.
diff --git a/pkgcore/scripts/pregen.py b/pkgcore/scripts/pregen.py
deleted file mode 100644
index ebd9ac06e..000000000
--- a/pkgcore/scripts/pregen.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# Copyright: 2005-2006 Brian Harring <ferringb@gmail.com>
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-"""Regenerate a repository cache."""
-
-
-import threading
-import Queue
-import time
-
-from pkgcore.util import commandline
-
-
-class OptionParser(commandline.OptionParser):
-
- def __init__(self, **kwargs):
- commandline.OptionParser.__init__(
- self, description=__doc__, usage='%prog [options] repo [threads]',
- **kwargs)
-
- def check_values(self, values, args):
- values, args = commandline.OptionParser.check_values(
- self, values, args)
- if not args:
- self.error('Need a repository name.')
- if len(args) > 2:
- self.error('I do not know what to do with more than 2 arguments')
-
- if len(args) == 2:
- try:
- values.thread_count = int(args[1])
- except ValueError:
- self.error('%r should be an integer' % (args[1],))
- if values.thread_count <= 0:
- self.error('thread count needs to be at least 1')
- else:
- values.thread_count = 1
-
- try:
- values.repo = values.config.repo[args[0]]
- except KeyError:
- self.error('repo %r was not found! known repos: %s' % (
- args[0], ', '.join(str(x) for x in values.config.repo)))
-
- return values, ()
-
-
-def regen_iter(iterable, err):
- for x in iterable:
- try:
- x.keywords
- except RuntimeError:
- raise
- except Exception, e:
- err.write("caught exception %s for %s" % (e, x))
-
-def reclaim_threads(threads, err):
- for x in threads:
- try:
- x.join()
- except RuntimeError:
- raise
- except Exception, e:
- err.write("caught exception %s reclaiming thread" % (e,))
-
-
-def main(options, out, err):
- start_time = time.time()
- # HACK: store this here so we can assign to it from inside def passthru.
- options.count = 0
- if options.thread_count == 1:
- def passthru(iterable):
- for x in iterable:
- options.count += 1
- yield x
- regen_iter(passthru(options.repo), err)
- else:
- queue = Queue.Queue(options.thread_count * 2)
- kill = threading.Event()
- kill.clear()
- def iter_queue(kill, qlist, timeout=0.25):
- while not kill.isSet():
- try:
- yield qlist.get(timeout=timeout)
- except Queue.Empty:
- continue
- regen_threads = [
- threading.Thread(
- target=regen_iter, args=(iter_queue(kill, queue), err))
- for x in xrange(options.thread_count)]
- out.write('starting %d threads' % (options.thread_count,))
- try:
- for x in regen_threads:
- x.start()
- out.write('started')
- # now we feed the queue.
- for pkg in options.repo:
- options.count += 1
- queue.put(pkg)
- except Exception:
- kill.set()
- reclaim_threads(regen_threads, err)
- raise
-
- # by now, queue is fed. reliable for our uses since the queue
- # is only subtracted from.
- while not queue.empty():
- time.sleep(.5)
- kill.set()
- reclaim_threads(regen_threads, err)
- assert queue.empty()
- out.write(
- "finished %d nodes in in %.2f seconds" % (
- options.count, time.time() - start_time))
- return 0
diff --git a/pkgcore/spawn.py b/pkgcore/spawn.py
index be02ac22a..7eb8a0b3c 100644
--- a/pkgcore/spawn.py
+++ b/pkgcore/spawn.py
@@ -14,12 +14,13 @@ __all__ = [
import os, atexit, signal, sys
-from pkgcore.util.osutils import listdir
-from pkgcore.util.mappings import ProtectedDict
-
from pkgcore.const import (
BASH_BINARY, SANDBOX_BINARY, FAKED_PATH, LIBFAKEROOT_PATH)
+from snakeoil.osutils import listdir
+from snakeoil.mappings import ProtectedDict
+
+
try:
import resource
max_fd_limit = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
diff --git a/pkgcore/sync/base.py b/pkgcore/sync/base.py
index 243024844..0b0c9e826 100644
--- a/pkgcore/sync/base.py
+++ b/pkgcore/sync/base.py
@@ -2,11 +2,13 @@
# License: GPL2
from pkgcore.config import ConfigHint, configurable
-from pkgcore.util import demandload, descriptors
-demandload.demandload(globals(), "pkgcore:spawn "
- "os pwd stat "
+from snakeoil import demandload, descriptors
+demandload.demandload(globals(),
+ "os pwd stat errno "
+ "pkgcore:spawn "
"pkgcore:plugin "
- "pkgcore:os_data ")
+ "pkgcore:os_data "
+)
class syncer(object):
@@ -120,7 +122,7 @@ class dvcs_syncer(ExternalSyncer):
try:
st = os.stat(self.basedir)
except (IOError, OSError), ie:
- if errno.ENOENT != ie.errno:
+ if ie.errno != errno.ENOENT:
raise base.generic_exception(self, self.basedir, ie)
command = self._initial_pull()
chdir = None
diff --git a/pkgcore/sync/bzr.py b/pkgcore/sync/bzr.py
index 37b16292a..ee5fd8ba0 100644
--- a/pkgcore/sync/bzr.py
+++ b/pkgcore/sync/bzr.py
@@ -11,8 +11,8 @@ class bzr_syncer(base.dvcs_syncer):
('bzr+', 5),
)
- @classmethod
- def parse_uri(staticmethod, raw_uri):
+ @staticmethod
+ def parse_uri(raw_uri):
if not raw_uri.startswith("bzr+"):
raise base.uri_exception(raw_uri, "doesn't start with bzr+")
return raw_uri[4:]
diff --git a/pkgcore/sync/rsync.py b/pkgcore/sync/rsync.py
index ae2010589..c15397de2 100644
--- a/pkgcore/sync/rsync.py
+++ b/pkgcore/sync/rsync.py
@@ -3,12 +3,12 @@
from pkgcore.sync import base
from pkgcore.config import ConfigHint
-from pkgcore.util.demandload import demandload
+from snakeoil.demandload import demandload
demandload(globals(), "os "
"socket "
"errno "
- "pkgcore.util.osutils:pjoin"
+ "snakeoil.osutils:pjoin"
)
class rsync_syncer(base.ExternalSyncer):
diff --git a/pkgcore/test/__init__.py b/pkgcore/test/__init__.py
index b12792b7d..7a4fa6715 100644
--- a/pkgcore/test/__init__.py
+++ b/pkgcore/test/__init__.py
@@ -87,7 +87,7 @@ class TestCase(unittest.TestCase, object):
def assertLen(self, obj, length, msg=None):
self.failUnless(len(obj) == length,
msg or '%r needs to be len %i, is %i' % (obj, length, len(obj)))
-
+
def assertInstance(self, obj, kls, msg=None):
"""
assert that obj is an instance of kls
@@ -284,7 +284,7 @@ class TestRestriction(TestCase):
self.assertMatch(obj, args, negated=negated, msg=msg)
self.assertForceTrue(obj, force_args, negated=negated, msg=msg)
self.assertNotForceFalse(obj, force_args, negated=negated, msg=msg)
-
+
def assertNotMatches(self, obj, args, force_args=None, negated=False,
msg=None):
if force_args is None:
@@ -292,9 +292,8 @@ class TestRestriction(TestCase):
self.assertNotMatch(obj, args, negated=negated, msg=msg)
self.assertNotForceTrue(obj, force_args, negated=negated, msg=msg)
self.assertForceFalse(obj, force_args, negated=negated, msg=msg)
-
+
class mallable_obj(object):
def __init__(self, **kwds):
self.__dict__.update(kwds)
-
diff --git a/pkgcore/test/chksum/test_defaults.py b/pkgcore/test/chksum/test_defaults.py
index e7505236d..fd2b76560 100644
--- a/pkgcore/test/chksum/test_defaults.py
+++ b/pkgcore/test/chksum/test_defaults.py
@@ -1,11 +1,11 @@
# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
# License: GPL2
+import tempfile, os
from pkgcore.test import TestCase, SkipTest
from pkgcore import chksum
from pkgcore.interfaces.data_source import data_source, local_source
-from pkgcore.util.currying import post_curry
-import tempfile, os
+from snakeoil.currying import post_curry
data = "afsd123klawerponzzbnzsdf;h89y23746123;haas"
multi = 40000
diff --git a/pkgcore/test/chksum/test_init.py b/pkgcore/test/chksum/test_init.py
index d0520d026..58eec66db 100644
--- a/pkgcore/test/chksum/test_init.py
+++ b/pkgcore/test/chksum/test_init.py
@@ -12,7 +12,7 @@ class Test_funcs(TestCase):
chksum.__inited__ = False
chksum.chksum_types.clear()
chksum.init = self._saved_init
-
+
def setUp(self):
chksum.__inited__ = False
chksum.chksum_types.clear()
@@ -42,4 +42,4 @@ class Test_funcs(TestCase):
self.assertEqual(1, chksum.get_handler("x"))
self.assertEqual(2, chksum.get_handler("y"))
self.assertEqual(self._inited_count, 1)
-
+
diff --git a/pkgcore/test/config/test_basics.py b/pkgcore/test/config/test_basics.py
index 2208cd3f3..f439ca22f 100644
--- a/pkgcore/test/config/test_basics.py
+++ b/pkgcore/test/config/test_basics.py
@@ -87,7 +87,6 @@ class ConfigTypeFromFunctionTest(TestCase):
self.assertEqual(
nonopt_type.types,
{'one': 'str', 'two': 'str'})
- self.assertEqual(nonopt_type.incrementals, [])
self.assertEqual(nonopt_type.required, ('one', 'two'))
self.assertEqual(nonopt_type.positional, ('one', 'two'))
@@ -164,18 +163,28 @@ class ConfigHintCloneTest(TestCase):
def test_clone(self):
c = ConfigHint(types={'foo':'list', 'one':'str'},
positional=['one'], required=['one'],
- incrementals=['foo'], typename='barn', doc='orig doc')
+ typename='barn', doc='orig doc')
c2 = c.clone(types={'foo':'list', 'one':'str', 'two':'str'},
required=['one', 'two'])
self.assertEqual(c2.types, {'foo':'list', 'one':'str', 'two':'str'})
self.assertEqual(c2.positional, c.positional)
self.assertEqual(c2.required, ['one', 'two'])
- self.assertEqual(c2.incrementals, c.incrementals)
self.assertEqual(c2.typename, c.typename)
self.assertEqual(c2.allow_unknowns, c.allow_unknowns)
self.assertEqual(c2.doc, c.doc)
+class SectionRefTest(TestCase):
+
+ # Silly testcase just to make something drop off the --coverage radar.
+
+ def test_collapse(self):
+ ref = basics.LazySectionRef(None, 'ref:foon')
+ self.assertRaises(NotImplementedError, ref._collapse)
+ self.assertRaises(NotImplementedError, ref.collapse)
+ self.assertRaises(NotImplementedError, ref.instantiate)
+
+
class ConfigSectionTest(TestCase):
def test_basics(self):
@@ -186,58 +195,152 @@ class ConfigSectionTest(TestCase):
NotImplementedError, section.get_value, None, 'a', 'str')
-class ConfigSectionFromStringDictTest(TestCase):
+class DictConfigSectionTest(TestCase):
- def setUp(self):
- self.source = {
+ def test_misc(self):
+ def convert(central, value, arg_type):
+ return central, value, arg_type
+ section = basics.DictConfigSection(convert, {'list': [1, 2]})
+ self.failIf('foo' in section)
+ self.failUnless('list' in section)
+ self.assertEqual(['list'], section.keys())
+ self.assertEqual(
+ (None, [1, 2], 'spoon'), section.get_value(None, 'list', 'spoon'))
+
+ def test_failure(self):
+ def fail(central, value, arg_type):
+ raise errors.ConfigurationError('fail')
+ section = basics.DictConfigSection(fail, {'list': [1, 2]})
+ self.assertRaises(
+ errors.ConfigurationError,
+ section.get_value, None, 'list', 'spoon')
+
+
+class FakeIncrementalDictConfigSectionTest(TestCase):
+
+ @staticmethod
+ def _convert(central, value, arg_type):
+ return central, value, arg_type
+
+ @staticmethod
+ def _fail(central, value, arg_type):
+ raise errors.ConfigurationError('fail')
+
+ def test_misc(self):
+ section = basics.FakeIncrementalDictConfigSection(
+ self._convert, {'list': [1, 2]})
+ self.failIf('foo' in section)
+ self.failUnless('list' in section)
+ self.assertEqual(['list'], section.keys())
+ self.assertRaises(
+ errors.ConfigurationError,
+ basics.FakeIncrementalDictConfigSection(
+ self._fail, {'a': 'b'}).get_value,
+ None, 'a', 'str')
+
+ def test_fake_incrementals(self):
+ section = basics.FakeIncrementalDictConfigSection(
+ self._convert, {'seq.append': [1, 2]})
+ manager = object()
+ self.assertEqual(
+ [None, None, (manager, [1, 2], 'list')],
+ section.get_value(manager, 'seq', 'list'))
+ def _repr(central, value, arg_type):
+ return 'list', ['thing']
+ section = basics.FakeIncrementalDictConfigSection(
+ _repr, {'foo': None})
+ self.assertEqual(
+ ('list', (None, ['thing'], None)),
+ section.get_value(manager, 'foo', 'repr'))
+ self.assertRaises(
+ errors.ConfigurationError,
+ basics.FakeIncrementalDictConfigSection(
+ self._fail, {'a.prepend': 'b'}).get_value,
+ None, 'a', 'list')
+
+ def test_repr(self):
+ def asis(central, value, arg_type):
+ assert arg_type == 'repr', arg_type
+ return value
+ section = basics.FakeIncrementalDictConfigSection(
+ asis, {'seq.append': ('list', [1, 2]),
+ 'simple': ('bool', True),
+ 'multistr': ('str', 'body'),
+ 'multistr.prepend': ('str', 'head'),
+ 'refs': ('str', 'lost'),
+ 'refs.append': ('ref', 'main'),
+ 'refs.prepend': ('refs', ['a', 'b']),
+ 'strlist': ('callable', asis),
+ 'strlist.prepend': ('str', 'whatever'),
+ 'wrong.prepend': ('wrong', 'wrong'),
+ })
+ manager = object()
+ self.assertRaises(
+ KeyError, section.get_value, manager, 'spoon', 'repr')
+ self.assertEqual(
+ ('list', [None, None, [1, 2]]),
+ section.get_value(manager, 'seq', 'repr'))
+ self.assertEqual(
+ ('bool', True), section.get_value(manager, 'simple', 'repr'))
+ self.assertEqual(
+ ('str', ['head', 'body', None]),
+ section.get_value(manager, 'multistr', 'repr'))
+ self.assertEqual(
+ ('refs', [['a', 'b'], ['lost'], ['main']]),
+ section.get_value(manager, 'refs', 'repr'))
+ self.assertEqual(
+ ('list', [
+ ['whatever'],
+ ['pkgcore.test.config.test_basics.asis'],
+ None]),
+ section.get_value(manager, 'strlist', 'repr'))
+ self.assertRaises(
+ errors.ConfigurationError,
+ section.get_value, manager, 'wrong', 'repr')
+
+
+class ConvertStringTest(TestCase):
+
+ def test_get_value(self):
+ source = {
'str': 'pkgcore.test',
'bool': 'yes',
'list': '0 1 2',
'callable': 'pkgcore.test.config.test_basics.passthrough',
}
- self.destination = {
+ destination = {
'str': 'pkgcore.test',
'bool': True,
'list': ['0', '1', '2'],
'callable': passthrough,
}
- self.section = basics.ConfigSectionFromStringDict(self.source)
-
- def test_contains(self):
- self.failIf('foo' in self.section)
- self.failUnless('list' in self.section)
-
- def test_keys(self):
- self.assertEqual(
- sorted(self.section.keys()), ['bool', 'callable', 'list', 'str'])
- def test_get_value(self):
# valid gets
- for typename, value in self.destination.iteritems():
+ for typename, value in destination.iteritems():
self.assertEqual(
- value, self.section.get_value(None, typename, typename))
+ value,
+ basics.convert_string(None, source[typename], typename))
# reprs
- for typename, value in self.source.iteritems():
+ for typename, value in source.iteritems():
self.assertEqual(
- ('str', value), self.section.get_value(None, typename, 'repr'))
+ ('str', value),
+ basics.convert_string(None, source[typename], 'repr'))
# invalid gets
# not callable
self.assertRaises(
errors.ConfigurationError,
- self.section.get_value, None, 'str', 'callable')
+ basics.convert_string, None, source['str'], 'callable')
# not importable
self.assertRaises(
errors.ConfigurationError,
- self.section.get_value, None, 'bool', 'callable')
+ basics.convert_string, None, source['bool'], 'callable')
# Bogus type.
self.assertRaises(
errors.ConfigurationError,
- self.section.get_value, None, 'bool', 'frob')
+ basics.convert_string, None, source['bool'], 'frob')
def test_section_ref(self):
- section = basics.ConfigSectionFromStringDict(
- {'goodref': 'target', 'badref': 'missing'})
def spoon():
"""Noop."""
target_config = central.CollapsedConfig(
@@ -249,17 +352,15 @@ class ConfigSectionFromStringDictTest(TestCase):
except KeyError:
raise errors.ConfigurationError(section)
self.assertEqual(
- section.get_value(
- TestCentral(), 'goodref', 'ref:spoon').collapse(),
+ basics.convert_string(
+ TestCentral(), 'target', 'ref:spoon').collapse(),
target_config)
self.assertRaises(
errors.ConfigurationError,
- section.get_value(
- TestCentral(), 'badref', 'ref:spoon').instantiate)
+ basics.convert_string(
+ TestCentral(), 'missing', 'ref:spoon').instantiate)
def test_section_refs(self):
- section = basics.ConfigSectionFromStringDict(
- {'goodrefs': '1 2', 'badrefs': '2 3'})
def spoon():
"""Noop."""
config1 = central.CollapsedConfig(
@@ -273,32 +374,22 @@ class ConfigSectionFromStringDictTest(TestCase):
except KeyError:
raise errors.ConfigurationError(section)
self.assertEqual(
- list(ref.collapse() for ref in section.get_value(
- TestCentral(), 'goodrefs', 'refs:spoon')),
+ list(ref.collapse() for ref in basics.convert_string(
+ TestCentral(), '1 2', 'refs:spoon')),
[config1, config2])
- lazy_refs = section.get_value(TestCentral(), 'badrefs', 'refs:spoon')
+ lazy_refs = basics.convert_string(TestCentral(), '2 3', 'refs:spoon')
self.assertEqual(2, len(lazy_refs))
self.assertRaises(errors.ConfigurationError, lazy_refs[1].collapse)
-class HardCodedConfigSectionTest(TestCase):
-
- def setUp(self):
- self.source = {
- 'str': 'pkgcore.test',
- 'bool': True,
- 'list': ['0', '1', '2'],
- 'callable': passthrough,
- }
- self.section = basics.HardCodedConfigSection(self.source)
-
- def test_contains(self):
- self.failIf('foo' in self.section)
- self.failUnless('str' in self.section)
+class ConvertAsIsTest(TestCase):
- def test_keys(self):
- self.assertEqual(
- sorted(self.section.keys()), ['bool', 'callable', 'list', 'str'])
+ source = {
+ 'str': 'pkgcore.test',
+ 'bool': True,
+ 'list': ['0', '1', '2'],
+ 'callable': passthrough,
+ }
def test_get_value(self):
# try all combinations
@@ -306,44 +397,39 @@ class HardCodedConfigSectionTest(TestCase):
for typename in self.source:
if arg == typename:
self.assertEqual(
- value, self.section.get_value(None, arg, typename))
+ value, basics.convert_asis(None, value, typename))
else:
self.assertRaises(
errors.ConfigurationError,
- self.section.get_value, None, arg, typename)
+ basics.convert_asis, None, value, typename)
def test_repr(self):
for typename, value in self.source.iteritems():
self.assertEqual(
(typename, value),
- self.section.get_value(None, typename, 'repr'))
- section = basics.HardCodedConfigSection({'bork': object()})
+ basics.convert_asis(None, value, 'repr'))
self.assertRaises(
errors.ConfigurationError,
- section.get_value, None, 'bork', 'repr')
+ basics.convert_asis, None, object(), 'repr')
def test_section_ref(self):
ref = basics.HardCodedConfigSection({})
- section = basics.HardCodedConfigSection({'ref': 42, 'ref2': ref})
self.assertRaises(errors.ConfigurationError,
- section.get_value, None, 'ref', 'ref:spoon')
+ basics.convert_asis, None, 42, 'ref:spoon')
self.assertIdentical(
- ref,
- section.get_value(None, 'ref2', 'ref:spoon').section)
+ ref, basics.convert_asis(None, ref, 'ref:spoon').section)
self.assertEqual(
- ('ref', ref), section.get_value(None, 'ref2', 'repr'))
+ ('ref', ref), basics.convert_asis(None, ref, 'repr'))
def test_section_refs(self):
ref = basics.HardCodedConfigSection({})
- section = basics.HardCodedConfigSection({'refs': [1, 2],
- 'refs2': [ref]})
self.assertRaises(errors.ConfigurationError,
- section.get_value, None, 'refs', 'refs:spoon')
+ basics.convert_asis, None, [1, 2], 'refs:spoon')
self.assertIdentical(
ref,
- section.get_value(None, 'refs2', 'refs:spoon')[0].section)
+ basics.convert_asis(None, [ref], 'refs:spoon')[0].section)
self.assertEqual(
- ('refs', [ref]), section.get_value(None, 'refs2', 'repr'))
+ ('refs', [ref]), basics.convert_asis(None, [ref], 'repr'))
class AliasTest(TestCase):
diff --git a/pkgcore/test/config/test_central.py b/pkgcore/test/config/test_central.py
index 0e6325e8d..b17a2803f 100644
--- a/pkgcore/test/config/test_central.py
+++ b/pkgcore/test/config/test_central.py
@@ -138,22 +138,6 @@ class ConfigManagerTest(TestCase):
self.assertEqual('available', manager.repo['actual repo'])
- def test_incremental(self):
- @configurable({'inc': 'list'}, required=['inc'], incrementals=['inc'])
- def myrepo(*args, **kwargs):
- return args, kwargs
- manager = central.ConfigManager(
- [{'baserepo': basics.HardCodedConfigSection({'inc': ['basic']}),
- 'actual repo': basics.HardCodedConfigSection({
- 'class': myrepo,
- 'inherit': ['baserepo'],
- 'inc': ['extended']
- }),
- }], [object()])
- self.assertEqual(
- ((), {'inc': ['basic', 'extended']}),
- manager.myrepo['actual repo'])
-
def test_no_object_returned(self):
def noop():
"""Do not do anything."""
@@ -397,23 +381,10 @@ class ConfigManagerTest(TestCase):
'foon': basics.ConfigSectionFromStringDict({
'class': 'pkgcore.test.config.test_central.drawer',
'inherit': 'spork'}),
- 'self': basics.ConfigSectionFromStringDict({
- 'class': 'pkgcore.test.config.test_central.drawer',
- 'inherit': 'self'}),
}], [object()])
self.check_error(
- "Collapsing section named 'self':\n"
- "Inherit 'self' is recursive",
- operator.getitem, manager.drawer, 'self')
- # There is a small wart here: because collapse_section does
- # not know the name of the section it is collapsing the
- # recursive inherit of spork by foon suceeds. The re-inherit
- # of foon after that does not. As far as I can tell the only
- # effect of this is the error message is slightly inaccurate
- # (should be "inherit 'spork' is recursive").
- self.check_error(
"Collapsing section named 'spork':\n"
- "Inherit 'foon' is recursive",
+ "Inherit 'spork' is recursive",
operator.getitem, manager.drawer, 'spork')
def test_alias(self):
@@ -672,3 +643,84 @@ class ConfigManagerTest(TestCase):
manager.collapse_named_section, 'source',
klass=errors.CollapseInheritOnly)
self.assertTrue(manager.collapse_named_section('target'))
+
+ def test_self_inherit(self):
+ section = basics.HardCodedConfigSection({'inherit': ['self']})
+ manager = central.ConfigManager([{
+ 'self': basics.ConfigSectionFromStringDict({
+ 'class': 'pkgcore.test.config.test_central.drawer',
+ 'inherit': 'self'}),
+ }], [RemoteSource()])
+ self.check_error(
+ "Collapsing section named 'self':\n"
+ "Self-inherit 'self' cannot be found",
+ operator.getitem, manager.drawer, 'self')
+ self.check_error(
+ "Self-inherit 'self' cannot be found",
+ manager.collapse_section, section)
+
+ manager = central.ConfigManager([{
+ 'self': basics.HardCodedConfigSection({
+ 'inherit': ['self'],
+ })}, {
+ 'self': basics.HardCodedConfigSection({
+ 'inherit': ['self'],
+ })}, {
+ 'self': basics.HardCodedConfigSection({
+ 'class': drawer})}], [object()])
+ self.assertTrue(manager.collapse_named_section('self'))
+ self.assertTrue(manager.collapse_section(section))
+
+ def test_prepend_inherit(self):
+ manager = central.ConfigManager([{
+ 'sect': basics.HardCodedConfigSection({
+ 'inherit.prepend': ['self']})}], [object()])
+ self.check_error(
+ "Collapsing section named 'sect':\n"
+ 'Prepending or appending to the inherit list makes no sense',
+ manager.collapse_named_section, 'sect')
+
+ def test_list_prepend(self):
+ @configurable({'seq': 'list'})
+ def seq(seq):
+ return seq
+ manager = central.ConfigManager([{
+ 'inh': basics.HardCodedConfigSection({
+ 'inherit': ['sect'],
+ 'seq.prepend': ['pre'],
+ }),
+ 'sect': basics.HardCodedConfigSection({
+ 'inherit': ['base'],
+ 'seq': ['1', '2'],
+ })}, {
+ 'base': basics.HardCodedConfigSection({
+ 'class': seq,
+ 'seq.prepend': ['-1'],
+ 'seq.append': ['post'],
+ })}], [object()])
+ self.assertEqual(['-1', 'post'], manager.seq['base'])
+ self.assertEqual(['1', '2'], manager.seq['sect'])
+ self.assertEqual(['pre', '1', '2'], manager.seq['inh'])
+
+ def test_str_prepend(self):
+ @configurable({'string': 'str'})
+ def sect(string):
+ return string
+ manager = central.ConfigManager([{
+ 'inh': basics.HardCodedConfigSection({
+ 'inherit': ['sect'],
+ 'string.prepend': 'pre',
+ }),
+ 'sect': basics.HardCodedConfigSection({
+ 'inherit': ['base'],
+ 'string': 'b',
+ })}, {
+ 'base':
+ basics.HardCodedConfigSection({
+ 'class': sect,
+ 'string.prepend': 'a',
+ 'string.append': 'c',
+ })}], [object()])
+ self.assertEqual('a c', manager.sect['base'])
+ self.assertEqual('b', manager.sect['sect'])
+ self.assertEqual('pre b', manager.sect['inh'])
diff --git a/pkgcore/test/config/test_cparser.py b/pkgcore/test/config/test_cparser.py
index 345e2b1b6..dfcbe36c3 100644
--- a/pkgcore/test/config/test_cparser.py
+++ b/pkgcore/test/config/test_cparser.py
@@ -31,14 +31,17 @@ class ConfigFromIniTest(TestCase):
[test]
string = 'hi I am a string'
list = foo bar baz
+list.prepend = pre bits
+list.append = post bits
true = yes
false = no
'''))
self.assertEqual(config.keys(), ['test'])
section = config['test']
for key, arg_type, value in [
- ('string', 'str', 'hi I am a string'),
- ('list', 'list', ['foo', 'bar', 'baz']),
+ ('string', 'str', [None, 'hi I am a string', None]),
+ ('list', 'list', [
+ ['pre', 'bits'], ['foo', 'bar', 'baz'], ['post', 'bits']]),
('true', 'bool', True),
('false', 'bool', False),
]:
diff --git a/pkgcore/test/config/test_dhcpformat.py b/pkgcore/test/config/test_dhcpformat.py
index 8afb6fe4e..1b902a434 100644
--- a/pkgcore/test/config/test_dhcpformat.py
+++ b/pkgcore/test/config/test_dhcpformat.py
@@ -49,7 +49,8 @@ test {
section = config['test']
self.failUnless('hi' in section)
self.assertEqual(section.keys(), ['hi'])
- self.assertEqual(section.get_value(None, 'hi', 'str'), 'there')
+ self.assertEqual(section.get_value(None, 'hi', 'str'),
+ [None, 'there', None])
def test_basic_types(self):
for parser, text in [
@@ -72,8 +73,8 @@ test {
config = parser(StringIO(text))
section = config['test']
for name, typename, value in (
- ('list', 'list', ['one', 'two', 'three']),
- ('string', 'str', 'hi'),
+ ('list', 'list', (None, ['one', 'two', 'three'], None)),
+ ('string', 'str', [None, 'hi', None]),
('bool', 'bool', True),
('callable', 'callable', passthrough),
):
@@ -130,7 +131,8 @@ test {
((), {'hi': 'here'}))
kind, ref = section.get_value(manager, 'inline', 'repr')
self.assertEqual('ref', kind)
- self.assertEqual('here', ref.get_value(None, 'hi', 'str'))
+ self.assertEqual(
+ [None, 'here', None], ref.get_value(None, 'hi', 'str'))
def test_multiple_section_ref(self):
for parser, text in [
@@ -189,7 +191,8 @@ test {
self.assertEqual('refs', kind)
self.assertEqual(2, len(refs))
self.assertEqual('target', refs[0])
- self.assertEqual('here', refs[1].get_value(None, 'hi', 'str'))
+ self.assertEqual(
+ [None, 'here', None], refs[1].get_value(None, 'hi', 'str'))
def test_section_refs(self):
for parser, text in [
diff --git a/pkgcore/test/config/test_init.py b/pkgcore/test/config/test_init.py
index f5cc60b6b..d197295f5 100644
--- a/pkgcore/test/config/test_init.py
+++ b/pkgcore/test/config/test_init.py
@@ -4,11 +4,12 @@
"""tests for pkgcore.config's package __init__.py"""
+import operator
import tempfile
from pkgcore.test import TestCase
-from pkgcore.config import load_config, configurable
+from pkgcore.config import load_config, configurable, basics
@configurable(typename='foo')
@@ -40,9 +41,23 @@ class ConfigLoadingTest(TestCase):
manager = load_config(user_conf_file=self.user_config.name)
self.assertEqual(manager.foo['foo'], ((), {}))
- def test_stacking(self):
- """Test user config overrides system config."""
+ # Test user config overrides system config.
manager = load_config(
user_conf_file=self.user_config.name,
system_conf_file=self.system_config.name)
self.assertEqual(manager.foo['foo'], ((), {}))
+
+ # Test prepends.
+ manager = load_config(
+ user_conf_file=self.user_config.name,
+ prepend_sources=[{'myfoo': basics.HardCodedConfigSection({
+ 'inherit': ['foo']})}])
+ self.assertEqual(manager.foo['myfoo'], ((), {}))
+
+ # Test disabling loading.
+ manager = load_config(
+ user_conf_file=self.user_config.name,
+ skip_config_files=True)
+ self.assertRaises(
+ KeyError,
+ operator.getitem, manager.foo, 'foo')
diff --git a/pkgcore/test/ebuild/test_atom.py b/pkgcore/test/ebuild/test_atom.py
index e4aff7f20..533ff6acb 100644
--- a/pkgcore/test/ebuild/test_atom.py
+++ b/pkgcore/test/ebuild/test_atom.py
@@ -5,8 +5,7 @@ from pkgcore.test import TestCase
from pkgcore.ebuild import atom, errors, atom_restricts
from pkgcore.ebuild.cpv import CPV
from pkgcore.restrictions.boolean import AndRestriction
-from pkgcore.util.currying import partial
-from pkgcore.util.pickling import dumps, loads
+from snakeoil.pickling import dumps, loads
class FakePkg(CPV):
__slots__ = ("__dict__")
@@ -163,7 +162,7 @@ class Test_native_atom(TestCase):
slot = ''
def f():
for pref, ver in (('', ''), ('=', '-0.1')):
- for repo in ('', '::gentoo'):
+ for repo in ('', '::gentoo'):
for slot in ('', ':1'):
for use in ('', '[x]'):
yield pref, ver, repo, slot, use
@@ -181,7 +180,7 @@ class Test_native_atom(TestCase):
restricts = o.restrictions
self.assertEqual(len(restricts), count,
- msg="%r, restrictions count must be %i, got %i" %
+ msg="%r, restrictions count must be %i, got %i" %
(o, count, len(restricts)))
self.assertTrue([getattr(x, 'type', None)
for x in restricts], ['package'] * count)
@@ -192,7 +191,7 @@ class Test_native_atom(TestCase):
if ver:
self.assertInstance(restricts[pos], atom_restricts.VersionMatch,
msg="expected %r, got %r; repo(%s), ver(%s), use(%s) "
- "slot(%s)" %
+ "slot(%s)" %
(atom_restricts.VersionMatch, restricts[pos],
repo, ver, use, slot))
pos += 1
@@ -200,7 +199,7 @@ class Test_native_atom(TestCase):
pos = assertAttr('slot')
if use:
pos = assertAttr('use')
-
+
def test_repo_id(self):
astr = "dev-util/bsdiff"
@@ -284,14 +283,14 @@ class Test_native_atom(TestCase):
self.assertNotEqual(o1, o2)
c = cmp(o1, o2)
self.assertNotEqual(c, 0,
- msg="checking cmp for %r, %r, not supposed to be equal, got %i"
+ msg="checking cmp for %r, %r, not supposed to be equal, got %i"
% (o1, o2, c))
self.assertNotEqual(o2, o1)
c = cmp(o2, o1)
self.assertNotEqual(c, 0,
- msg="checking cmp for %r, %r, not supposed to be equal, got %i"
+ msg="checking cmp for %r, %r, not supposed to be equal, got %i"
% (o2, o1, c))
-
+
def test_comparison(self):
self.assertEqual2(self.kls('cat/pkg'), self.kls('cat/pkg'))
diff --git a/pkgcore/test/ebuild/test_conditionals.py b/pkgcore/test/ebuild/test_conditionals.py
index 4dcd066dc..976f31242 100644
--- a/pkgcore/test/ebuild/test_conditionals.py
+++ b/pkgcore/test/ebuild/test_conditionals.py
@@ -2,13 +2,12 @@
# License: GPL2
from pkgcore.test import TestCase
-
from pkgcore.ebuild import conditionals
from pkgcore.ebuild.errors import ParseError
from pkgcore.restrictions import boolean, packages
-from pkgcore.util.currying import post_curry
-from pkgcore.util.iterables import expandable_chain
-from pkgcore.util.lists import iflatten_instance
+from snakeoil.currying import post_curry
+from snakeoil.iterables import expandable_chain
+from snakeoil.lists import iflatten_instance
class base(TestCase):
diff --git a/pkgcore/test/ebuild/test_cpv.py b/pkgcore/test/ebuild/test_cpv.py
index 2be38fc93..4364e1189 100644
--- a/pkgcore/test/ebuild/test_cpv.py
+++ b/pkgcore/test/ebuild/test_cpv.py
@@ -9,7 +9,7 @@ class native_CpvTest(TestCase):
kls = staticmethod(cpv.native_CPV)
run_cpy_ver_cmp = False
-
+
good_cats = [
"dev-util", "dev+", "dev-util+", "DEV-UTIL", "aaa0",
"aaa-0", "multi/depth", "cross-dev_idiot.hacks-suck", "a"]
@@ -28,10 +28,10 @@ class native_CpvTest(TestCase):
suf_nums = list(xrange(100))
shuffle(suf_nums)
- good_sufs = (simple_good_sufs +["%s%i" % (x, suf_nums.pop())
+ good_sufs = (simple_good_sufs +["%s%i" % (x, suf_nums.pop())
for x in simple_good_sufs])
del suf_nums
-
+
l = len(good_sufs)
good_sufs = good_sufs + [
good_sufs[x] + good_sufs[l - x - 1] for x in xrange(l)]
diff --git a/pkgcore/test/ebuild/test_ebuild_src.py b/pkgcore/test/ebuild/test_ebuild_src.py
index eca30940c..c3af26f94 100644
--- a/pkgcore/test/ebuild/test_ebuild_src.py
+++ b/pkgcore/test/ebuild/test_ebuild_src.py
@@ -1,18 +1,19 @@
# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
# License: GPL2
-from pkgcore.ebuild import ebuild_src, repo_objs, const, eclass_cache
+import os
+
+from pkgcore import fetch
+from pkgcore.ebuild import ebuild_src, repo_objs, const
from pkgcore.package import errors
from pkgcore.ebuild import errors as ebuild_errors
from pkgcore import fetch
-from pkgcore.util.osutils import pjoin
-from pkgcore.util.currying import post_curry, partial
-from pkgcore.util.lists import iflatten_instance
-
from pkgcore.test import TestCase, mallable_obj
from pkgcore.test.mixins import tempdir_decorator
from pkgcore.test.ebuild.test_eclass_cache import FakeEclassCache
-import os
+
+from snakeoil.osutils import pjoin
+from snakeoil.currying import post_curry, partial
class test_base(TestCase):
@@ -263,7 +264,7 @@ class test_package_factory(TestCase):
self.assertEqual(sorted(pf.mirrors), sorted(mirrors))
self.assertEqual(pf.mirrors['gentoo'], mirrors['gentoo'])
self.assertEqual(pf.default_mirrors, None)
-
+
def_mirrors = ['http://def1/', 'http://blah1/']
pf = self.mkinst(default_mirrors=def_mirrors)
self.assertEqual(pf.mirrors, {})
@@ -287,32 +288,32 @@ class test_package_factory(TestCase):
os.stat(f).st_mtime)
finally:
os.stat_float_times(cur)
-
+
def test_get_metadata(self):
ec = FakeEclassCache('/nonexistant/path')
pkg = mallable_obj(_mtime_=100, cpvstr='dev-util/diffball-0.71')
class fake_cache(dict):
readonly = False
-
+
cache1 = fake_cache({pkg.cpvstr:
{'_mtime_':100, '_eclasses_':{'eclass1':(None, 100)}, 'marker':1}
})
cache2 = fake_cache({})
-
- class explode_kls(AssertionError):pass
-
+
+ class explode_kls(AssertionError): pass
+
def explode(name, *args, **kwargs):
raise explode_kls("%s was called with %r and %r, "
"shouldn't be invoked." % (name, args, kwargs))
-
+
pf = self.mkinst(cache=(cache2, cache1), eclasses=ec,
_update_metadata=partial(explode, '_update_metadata'))
self.assertEqual(pf._get_metadata(pkg),
{'_eclasses_':{'eclass1':(None, 100)}, 'marker':1},
reflective=False)
-
+
self.assertEqual(cache1.keys(), [pkg.cpvstr])
self.assertFalse(cache2)
@@ -321,7 +322,7 @@ class test_package_factory(TestCase):
self.assertRaises(explode_kls, pf._get_metadata, pkg)
self.assertFalse(cache2)
self.assertFalse(cache1)
-
+
cache2.update({pkg.cpvstr:
{'_mtime_':200, '_eclasses_':{'eclass1':(None, 100)}, 'marker':2}
})
@@ -332,4 +333,4 @@ class test_package_factory(TestCase):
# thus, modifying (popping _mtime_) _is_ valid
self.assertEqual(cache2[pkg.cpvstr],
{'_eclasses_':{'eclass1':(None, 100)}, 'marker':2})
-
+
diff --git a/pkgcore/test/ebuild/test_eclass_cache.py b/pkgcore/test/ebuild/test_eclass_cache.py
index 70eb829dd..a8b80406b 100644
--- a/pkgcore/test/ebuild/test_eclass_cache.py
+++ b/pkgcore/test/ebuild/test_eclass_cache.py
@@ -1,15 +1,15 @@
# Copyright: 2006 Brian Harring <ferringb@gmail.com>
# License: GPL2
+import os
from pkgcore.ebuild import eclass_cache
from pkgcore.interfaces import data_source
-from pkgcore.util.osutils import pjoin
-import os
-
from pkgcore.test.mixins import TempDirMixin
from pkgcore.test import TestCase
+from snakeoil.osutils import pjoin
+
class FakeEclassCache(eclass_cache.base):
def __init__(self, path):
diff --git a/pkgcore/test/ebuild/test_formatter.py b/pkgcore/test/ebuild/test_formatter.py
new file mode 100644
index 000000000..0721491c1
--- /dev/null
+++ b/pkgcore/test/ebuild/test_formatter.py
@@ -0,0 +1,339 @@
+# Copyright 2007 Charlie Shepherd <masterdriverz@gentoo.org>
+# License: GPL2
+
+import difflib
+
+from pkgcore.test import TestCase
+from pkgcore.package.base import wrapper
+from pkgcore.package.mutated import MutatedPkg
+from pkgcore.package.metadata import factory
+from pkgcore.ebuild.cpv import native_CPV
+from pkgcore.test.misc import FakePkg as FakePkgBase
+from pkgcore.ebuild.formatter import BasicFormatter, PkgcoreFormatter, PortageFormatter, PaludisFormatter
+from pkgcore.test.scripts.helpers import FakeStreamFormatter, Color
+
+from snakeoil.caching import WeakInstMeta
+from snakeoil.formatters import Formatter
+
+class FakeRepo(object):
+ def __init__(self, repoid='', location=''):
+ self.repo_id = repoid
+ self.location = location
+
+class FakePkg(FakePkgBase):
+ __setattr__ = object.__setattr__
+ def __init__(self, cpv, slot='0', iuse=(), use=(), repo=factory(FakeRepo()), restrict=''):
+ FakePkgBase.__init__(self, cpv, repo=repo)
+ self.slot = slot
+ self.restrict = restrict
+ self.use = set(use)
+ self.iuse = set(iuse)
+
+class FakeMutatedPkg(FakePkg):
+ def __str__(self):
+ # Yes this should be less hackish (and hardcoded values suck),
+ # but we can't really subclass MutatedPkg so this will have to do
+ return "MutatedPkg(built ebuild: %s, overrides=('depends', 'rdepends'))" % self.cpvstr
+
+class FakeEbuildSrc(FakePkg):
+ def __str__(self):
+ # Yes this should be less hackish (and hardcoded values suck)
+ # but we can't really subclass ebuild_src so this will have to do
+ return "config wrapped(use): ebuild src: %s" % self.cpvstr
+
+class FakeOp(object):
+ def __init__(self, package, oldpackage=None):
+ self.pkg = package
+ if oldpackage:
+ self.old_pkg = oldpackage
+ self.desc = 'replace'
+ else:
+ self.desc = 'add'
+
+class BaseFormatterTest(object):
+ suffix = ['\n']
+ def setUp(self):
+ self.fakeout = FakeStreamFormatter()
+ self.fakeerr = FakeStreamFormatter()
+ self.formatter = self.newFormatter()
+
+ def newFormatter(self, **kwargs):
+ kwargs.setdefault("out", self.fakeout)
+ kwargs.setdefault("err", self.fakeerr)
+ kwargs.setdefault("verbose", 0)
+ return self.formatterClass(**kwargs)
+
+ def assertOut(self, *args, **kwargs):
+ stringlist = []
+ objectlist = []
+ for arg in list(args)+kwargs.setdefault("suffix", self.suffix):
+ if isinstance(arg, basestring):
+ stringlist.append(arg)
+ else:
+ objectlist.append(''.join(stringlist))
+ stringlist = []
+ objectlist.append(arg)
+ objectlist.append(''.join(stringlist))
+
+ # Hack because a list with an empty string in is True
+ if objectlist == ['']: objectlist = []
+
+ self.assertEqual(self.fakeout.stream, objectlist, '\n' + '\n'.join(
+ difflib.unified_diff(
+ list(repr(s) for s in objectlist),
+ list(repr(s) for s in self.fakeout.stream),
+ 'expected', 'actual', lineterm='')))
+ self.fakeout.resetstream()
+
+ def test_persistant_autoline(self):
+ origautoline = object()
+ self.fakeout.autoline = origautoline
+ self.formatter = self.newFormatter()
+ self.formatter.format(FakeOp(FakeMutatedPkg('dev-util/diffball-1.1')))
+ self.formatter.end()
+ self.assertEqual(self.fakeout.autoline, origautoline)
+
+ def test_end(self):
+ """Sub-classes should override this if they print something in end()"""
+ self.formatter.format(FakeOp(FakeMutatedPkg('dev-util/diffball-1.1')))
+ self.fakeout.resetstream()
+ self.formatter.end()
+ self.assertOut(suffix=[])
+
+class TestBasicFormatter(BaseFormatterTest, TestCase):
+ formatterClass = staticmethod(BasicFormatter)
+ def test_op(self):
+ # Make sure we ignore versions...
+ self.formatter.format(FakeOp(FakeMutatedPkg('dev-util/diffball-1.1')))
+ self.assertOut('dev-util/diffball')
+
+class TestPkgcoreFormatter(BaseFormatterTest, TestCase):
+ formatterClass = staticmethod(PkgcoreFormatter)
+ def test_op(self):
+ # This basically just tests string methods
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('dev-util/diffball-1.2'),
+ FakeMutatedPkg('dev-util/diffball-1.1')))
+ self.assertOut(
+ "replace MutatedPkg(built ebuild: dev-util/diffball-1.1, "
+ "overrides=('depends', 'rdepends')), config wrapped(use): "
+ "ebuild src: dev-util/diffball-1.2")
+
+ # And again...
+ self.formatter.format(FakeOp(FakeEbuildSrc('dev-util/diffball-1.0')))
+ self.assertOut("add config wrapped(use): "
+ "ebuild src: dev-util/diffball-1.0")
+
+ # unmerge
+ #app-arch/bzip2-1.0.3-r6
+
+class TestPaludisFormatter(BaseFormatterTest, TestCase):
+ formatterClass = staticmethod(PaludisFormatter)
+
+ def setUp(self):
+ BaseFormatterTest.setUp(self)
+ self.repo = factory(FakeRepo('gentoo', '/usr/portage'))
+
+ def FakeEbuildSrc(self, *args, **kwargs):
+ kwargs.setdefault("repo", self.repo)
+ return FakeEbuildSrc(*args, **kwargs)
+
+ def test_upgrade(self):
+ self.formatter.format(
+ FakeOp(self.FakeEbuildSrc('app-arch/bzip2-1.0.4'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut("* ", Color('fg', 'blue'), "app-arch/bzip2-1.0.4::gentoo ",
+ Color('fg', 'blue'), "{:0} ", Color('fg', 'yellow'), "[U 1.0.3-r6]")
+
+ def test_downgrade(self):
+ self.formatter.format(
+ FakeOp(self.FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ self.assertOut("* ", Color('fg', 'blue'), "app-arch/bzip2-1.0.3-r6::gentoo ",
+ Color('fg', 'blue'), "{:0} ", Color('fg', 'yellow'), "[D 1.0.4]")
+
+ def test_reinstall(self):
+ self.formatter.format(
+ FakeOp(self.FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut("* ", Color('fg', 'blue'), "app-arch/bzip2-1.0.3-r6::gentoo ",
+ Color('fg', 'blue'), "{:0} ", Color('fg', 'yellow'), "[R]")
+
+ def test_nouse(self):
+ self.formatter.format(
+ FakeOp(self.FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut("* ", Color('fg', 'blue'), "app-arch/bzip2", "-1.0.3-r6", "::gentoo ",
+ Color('fg', 'blue'), "{:0} ", Color('fg', 'yellow'), "[R] ",
+ Color('fg', 'red'), "-static")
+
+ def test_use(self):
+ self.formatter.format(
+ FakeOp(self.FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut("* ", Color('fg', 'blue'), "app-arch/bzip2", "-1.0.3-r6", "::gentoo ",
+ Color('fg', 'blue'), "{:0} ", Color('fg', 'yellow'), "[R] ",
+ Color('fg', 'green'), "static")
+
+ def test_multiuse(self):
+ self.formatter.format(
+ FakeOp(self.FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap'], use=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut("* ", Color('fg', 'blue'), "app-arch/bzip2", "-1.0.3-r6", "::gentoo ",
+ Color('fg', 'blue'), "{:0} ", Color('fg', 'yellow'), "[R] ",
+ Color('fg', 'red'), "-bootstrap ", Color('fg', 'green'), "static")
+
+ def test_end(self):
+ self.formatter.end()
+ self.assertOut('Total: 0 packages (0 new, 0 upgrades, 0 downgrades, 0 in new slots)')
+
+ def test_end_new(self):
+ self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6')))
+ self.fakeout.resetstream()
+ self.formatter.end()
+ self.assertOut('Total: 1 packages (1 new, 0 upgrades, 0 downgrades, 0 in new slots)')
+
+ def test_end_newslot(self):
+ self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='1')))
+ self.fakeout.resetstream()
+ self.formatter.end()
+ self.assertOut('Total: 1 packages (0 new, 0 upgrades, 0 downgrades, 1 in new slots)')
+
+ def test_end_downgrade(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ self.fakeout.resetstream()
+ self.formatter.end()
+ self.assertOut('Total: 1 packages (0 new, 0 upgrades, 1 downgrades, 0 in new slots)')
+
+ def test_end_upgrade(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.fakeout.resetstream()
+ self.formatter.end()
+ self.assertOut('Total: 1 packages (0 new, 1 upgrades, 0 downgrades, 0 in new slots)')
+
+class TestPortageFormatter(BaseFormatterTest, TestCase):
+ formatterClass = staticmethod(PortageFormatter)
+
+ def test_upgrade(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.4'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut('[ebuild ', Color('fg', 'cyan'), 'U ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.4 ',
+ Color('fg', 'blue'), '[1.0.3-r6] ')
+
+ def test_downgrade(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.4')))
+ self.assertOut('[ebuild ', Color('fg', 'cyan'), 'U',
+ Color('fg', 'blue'), 'D] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ Color('fg', 'blue'), '[1.0.4] ')
+
+ def test_reinstall(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6'),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ')
+
+ def test_nouse(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ 'USE="', Color('fg', 'yellow'), '-static%" ')
+
+ def test_use(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ 'USE="', Color('fg', 'yellow'), 'static%" ')
+
+ def test_multiuse(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap'], use=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ 'USE="', Color('fg', 'yellow'), 'static% ',
+ Color('fg', 'yellow'), '-bootstrap%" ')
+
+ def test_misc(self):
+ self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', slot='1')))
+ self.assertOut('[ebuild ', Color('fg', 'green'), ' N', Color('fg', 'green'),
+ 'S ] ', Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ')
+
+ self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', restrict=['fetch'])))
+ self.assertOut('[ebuild ', Color('fg', 'green'), ' N ', Color('fg', 'red'), 'F ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ')
+ self.formatter.format(FakeOp(FakeEbuildSrc(
+ 'app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap'],
+ use=['static'])))
+ self.assertOut(
+ '[ebuild ', Color('fg', 'green'), ' N ] ',
+ Color('fg', 'green'),
+ 'app-arch/bzip2-1.0.3-r6 USE="', Color('fg', 'red'), 'static ',
+ Color('fg', 'yellow'), '-bootstrap" ')
+
+
+ def test_changed_use(self):
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', iuse=['static', 'bootstrap'], use=['static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6', iuse=['static'], use=['static'])))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ 'USE="', Color('fg', 'red'), 'static ',
+ Color('fg', 'yellow'), '-bootstrap%" ')
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6',
+ iuse=['static', 'bootstrap', 'perl', 'foobar', 'rice'],
+ use=['static', 'rice']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6',
+ iuse=['bootstrap', 'foobar', 'rice', 'kazaam'],
+ use=['foobar'])))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ 'USE="', Color('fg', 'green'), 'rice* ',
+ Color('fg', 'yellow'), 'static% ',
+ Color('fg', 'blue'), '-bootstrap ',
+ Color('fg', 'yellow'), '-foobar* ',
+ Color('fg', 'yellow'), '(-kazaam%) ',
+ Color('fg', 'yellow'), '-perl%" ')
+
+ def test_use_expand(self):
+ self.formatter = self.newFormatter(use_expand=set(["foo"]))
+ self.formatter.format(
+ FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6',
+ iuse=['foo_static', 'foo_bootstrap'], use=['foo_static']),
+ FakeMutatedPkg('app-arch/bzip2-1.0.3-r6')))
+ self.assertOut('[ebuild ', Color('fg', 'yellow'), ' R ] ',
+ Color('fg', 'green'), 'app-arch/bzip2-1.0.3-r6 ',
+ 'FOO="', Color('fg', 'yellow'), 'static% ',
+ Color('fg', 'yellow'), '-bootstrap%" ')
+
+
+class TestPortageVerboseFormatter(TestPortageFormatter):
+ suffix = [Color("fg", "blue"), ' [1]\n']
+
+ def setUp(self):
+ TestPortageFormatter.setUp(self)
+ self.repo = factory(FakeRepo('gentoo', '/usr/portage'))
+
+ def newFormatter(self, **kwargs):
+ kwargs.setdefault("verbose", 1)
+ return TestPortageFormatter.newFormatter(self, **kwargs)
+
+ def test_end(self):
+ self.formatter.format(FakeOp(FakeEbuildSrc('app-arch/bzip2-1.0.3-r6', repo=self.repo)))
+ self.fakeout.resetstream()
+ self.formatter.end()
+ self.assertOut('\n', Color('fg', 'blue'), '[1] /usr/portage', suffix=['\n'])
diff --git a/pkgcore/test/ebuild/test_misc.py b/pkgcore/test/ebuild/test_misc.py
index 3eae6fa37..3b483be5b 100644
--- a/pkgcore/test/ebuild/test_misc.py
+++ b/pkgcore/test/ebuild/test_misc.py
@@ -2,8 +2,7 @@
# License: GPL2
from pkgcore.ebuild import misc
-from pkgcore.ebuild.atom import atom
-from pkgcore.restrictions import packages, values
+from pkgcore.restrictions import packages
AlwaysTrue = packages.AlwaysTrue
AlwaysFalse = packages.AlwaysFalse
diff --git a/pkgcore/test/ebuild/test_profiles.py b/pkgcore/test/ebuild/test_profiles.py
index 2cc9f97da..19ad16bc1 100644
--- a/pkgcore/test/ebuild/test_profiles.py
+++ b/pkgcore/test/ebuild/test_profiles.py
@@ -2,14 +2,16 @@
# License: GPL2
import os, shutil
+
from pkgcore.test import TestCase
from pkgcore.test.mixins import TempDirMixin
from pkgcore.ebuild import profiles
-from pkgcore.util.osutils import pjoin, ensure_dirs
from pkgcore.ebuild.atom import atom
from pkgcore.ebuild.cpv import CPV
from pkgcore.restrictions import packages
+from snakeoil.osutils import pjoin, ensure_dirs
+
class ProfileNode(profiles.ProfileNode):
# re-inherited to disable inst-caching
pass
@@ -93,7 +95,7 @@ class TestProfileNode(profile_mixin, TestCase):
def test_masks(self):
path = pjoin(self.dir, self.profile)
- self.assertEqual(ProfileNode(path).masks, empty);
+ self.assertEqual(ProfileNode(path).masks, empty)
self.parsing_checks("package.mask", "masks")
self.write_file("package.mask", "dev-util/diffball")
self.assertEqual(ProfileNode(path).masks, ((),
diff --git a/pkgcore/test/ebuild/test_repository.py b/pkgcore/test/ebuild/test_repository.py
index e21bafee0..de4e062a1 100644
--- a/pkgcore/test/ebuild/test_repository.py
+++ b/pkgcore/test/ebuild/test_repository.py
@@ -3,7 +3,7 @@
from pkgcore.test import mixins
-from pkgcore.util import osutils
+from snakeoil import osutils
from pkgcore.ebuild import repository
from pkgcore.ebuild.atom import atom
from pkgcore.repository import errors
diff --git a/pkgcore/test/fetch/test_base.py b/pkgcore/test/fetch/test_base.py
index bdbe6841e..f12390977 100644
--- a/pkgcore/test/fetch/test_base.py
+++ b/pkgcore/test/fetch/test_base.py
@@ -3,14 +3,14 @@
import os
-from pkgcore.fetch import base, fetchable, errors
from pkgcore import chksum
+from pkgcore.fetch import base, fetchable, errors
from pkgcore.interfaces import data_source
-from pkgcore.util.currying import partial
-
from pkgcore.test.mixins import TempDirMixin
from pkgcore.test import TestCase
+from snakeoil.currying import partial
+
repeating_str = 'asdf'
data = repeating_str * 4000
handlers = chksum.get_handlers()
diff --git a/pkgcore/test/fetch/test_init.py b/pkgcore/test/fetch/test_init.py
index 0ec01cdec..9fd974a3c 100644
--- a/pkgcore/test/fetch/test_init.py
+++ b/pkgcore/test/fetch/test_init.py
@@ -2,9 +2,8 @@
# License: GPL2
from pkgcore import fetch
-
from pkgcore.test import TestCase
-from pkgcore.util.lists import iflatten_instance
+from snakeoil.lists import iflatten_instance
class base(TestCase):
@@ -83,7 +82,7 @@ class Test_uri_list(base):
self.uril.add_mirror(mirror)
self.assertEqual(list(self.uril), ["me/cows", "WI/cows"])
self.uril.add_mirror(mirror, "foon/boon")
- self.assertUri(self.uril,
+ self.assertUri(self.uril,
["me/cows", "WI/cows", "me/foon/boon", "WI/foon/boon"])
def test_uris(self):
diff --git a/pkgcore/test/fs/test_contents.py b/pkgcore/test/fs/test_contents.py
index 7abf4f5db..cc7bb55df 100644
--- a/pkgcore/test/fs/test_contents.py
+++ b/pkgcore/test/fs/test_contents.py
@@ -1,12 +1,10 @@
# Copyright: 2006 Brian Harring <ferringb@gmail.com>
# License: GPL2
+import os
from pkgcore.test import TestCase
-
from pkgcore.fs import fs, contents
-from pkgcore.util.currying import post_curry
-
-import os
+from snakeoil.currying import post_curry
class TestContentsSet(TestCase):
diff --git a/pkgcore/test/fs/test_ops.py b/pkgcore/test/fs/test_ops.py
index 83d9b7118..0346b1189 100644
--- a/pkgcore/test/fs/test_ops.py
+++ b/pkgcore/test/fs/test_ops.py
@@ -1,12 +1,12 @@
# Copyright: 2006 Brian Harring <ferringb@gmail.com>
# License: GPL2
+import os, shutil
from pkgcore.fs import ops, fs, livefs, contents
from pkgcore.interfaces.data_source import local_source
from pkgcore.test import TestCase
from pkgcore.test.mixins import TempDirMixin
-from pkgcore.util.osutils import pjoin
-import os, shutil
+from snakeoil.osutils import pjoin
class VerifyMixin(object):
diff --git a/pkgcore/test/merge/test_triggers.py b/pkgcore/test/merge/test_triggers.py
index 9dfb586b0..e0aefa9a9 100644
--- a/pkgcore/test/merge/test_triggers.py
+++ b/pkgcore/test/merge/test_triggers.py
@@ -1,19 +1,21 @@
# Copyright: 2007 Brian Harring <ferringb@gmail.com>
# License: GPL2
-from pkgcore.merge import triggers, const
-from pkgcore.fs import fs
-from pkgcore.fs.contents import contentsSet
-from pkgcore.fs.livefs import gen_obj, scan
-from pkgcore.util.currying import partial, post_curry
-from pkgcore.util.osutils import pjoin, ensure_dirs, normpath
-from pkgcore import spawn
-from pkgcore.test import TestCase, SkipTest, mixins
import os, shutil, time
from math import floor, ceil
from operator import attrgetter
from itertools import izip
+from pkgcore.fs import fs
+from pkgcore import spawn
+from pkgcore.merge import triggers, const
+from pkgcore.fs.contents import contentsSet
+from pkgcore.fs.livefs import gen_obj, scan
+from pkgcore.test import TestCase, SkipTest, mixins
+
+from snakeoil.currying import partial, post_curry
+from snakeoil.osutils import pjoin, ensure_dirs, normpath
+
class fake_trigger(triggers.base):
def __init__(self, **kwargs):
@@ -54,7 +56,7 @@ class fake_reporter(object):
class TestBase(TestCase):
kls = fake_trigger
-
+
def mk_trigger(self, kls=None, **kwargs):
if kls is None:
kls = self.kls
@@ -101,7 +103,7 @@ class TestBase(TestCase):
self.assertRaises(TypeError, self.mk_trigger(mode=1, _hooks=2).register,
engine)
self.assertFalse(engine._triggers)
-
+
# shouldn't puke.
o = self.mk_trigger(mode=1, _hooks=("2"))
o.register(engine)
@@ -112,13 +114,13 @@ class TestBase(TestCase):
o = self.mk_trigger(mode=1, _hooks=("2"), required_csets=())
o.register(engine)
self.assertEqual(engine._triggers, [('2', o, ())])
-
+
# should handle keyerror thrown from the engine for missing hooks.
engine = fake_engine(mode=1, blocked_hooks=("foon", "dar"))
self.mk_trigger(mode=1, _hooks="foon").register(engine)
self.mk_trigger(mode=1, _hooks=("foon", "dar")).register(engine)
self.assertFalse(engine._triggers)
-
+
o = self.mk_trigger(mode=1, _hooks=("foon", "bar"), required_csets=(3,))
o.register(engine)
self.assertEqual(engine._triggers, [('bar', o, (3,))])
@@ -177,7 +179,7 @@ class Test_mtime_watcher(mixins.TempDirMixin, TestCase):
o.sort()
t.set_state([x.location for x in o])
self.assertEqual(sorted(t.saved_mtimes), o)
-
+
# test syms.
src = pjoin(self.dir, 'dir2')
os.mkdir(src)
@@ -216,7 +218,7 @@ class Test_mtime_watcher(mixins.TempDirMixin, TestCase):
t.set_state(locs)
self.assertEqual(sorted(t.saved_mtimes), o)
self.assertFalse(t.check_state())
-
+
def test_float_mtime(self):
cur = os.stat_float_times()
try:
@@ -262,7 +264,7 @@ def castrate_trigger(base_kls, **kwargs):
def __init__(self, *args2, **kwargs2):
self._passed_in_args = []
base_kls.__init__(self, *args2, **kwargs2)
-
+
def regen(self, *args):
self._passed_in_args.append(list(args))
if self.enable_regen:
@@ -275,7 +277,7 @@ def castrate_trigger(base_kls, **kwargs):
class trigger_mixin(mixins.TempDirMixin):
-
+
def setUp(self):
mixins.TempDirMixin.setUp(self)
self.reset_objects()
@@ -306,7 +308,7 @@ class Test_ldconfig(trigger_mixin, TestCase):
self.assertEqual(o.mode, 0755)
self.assertTrue(fs.isdir(o))
self.assertTrue(os.path.exists(pjoin(self.dir, 'etc/ld.so.conf')))
-
+
# test normal functioning.
open(pjoin(self.dir, 'etc/ld.so.conf'), 'w').write("\n".join(
["/foon", "dar", "blarnsball", "#comment"]))
@@ -315,7 +317,7 @@ class Test_ldconfig(trigger_mixin, TestCase):
def assertTrigger(self, touches, ran, dirs=['test-lib', 'test-lib2'],
hook='merge', mode=const.INSTALL_MODE, mkdirs=True, same_mtime=False):
-
+
# wipe whats there.
for x in scan(self.dir).iterdirs():
if x.location == self.dir:
@@ -415,7 +417,7 @@ END-INFO-DIR-ENTRY
self.assertEqual(list(o.regen(path, self.dir)), [])
self.assertTrue(os.path.exists(pjoin(self.dir, 'dir')),
msg="info dir file wasn't created")
-
+
# drop the last line, verify it returns that file.
open(pjoin(self.dir, "foo2.info"), 'w').write(
'\n'.join(self.info_data.splitlines()[:-1]))
@@ -438,7 +440,7 @@ END-INFO-DIR-ENTRY
self.assertEqual(map(normpath, (x[1] for x in self.trigger._passed_in_args)),
map(normpath, expected_regen))
return l
-
+
def test_trigger(self):
cur = os.environ.get("PATH", self)
try:
@@ -476,7 +478,7 @@ END-INFO-DIR-ENTRY
self.assertFalse(self.run_trigger('pre_merge', []))
open(pjoin(self.dir, "blaidd drwg.info"), "w").write(self.info_data)
self.assertFalse(self.run_trigger('post_merge', [self.dir]))
-
+
# verify it passes back failures.
self.reset_objects()
self.trigger.enable_regen = True
@@ -536,7 +538,7 @@ class single_attr_change_base(object):
# abuse self as unique singleton.
self.assertEqual(getattr(x, attr, self),
getattr(y, attr, self))
-
+
def test_trigger(self):
self.assertContents()
self.assertContents([fs.fsFile("/foon", mode=0644, uid=2, gid=1,
@@ -557,10 +559,10 @@ class single_attr_change_base(object):
class Test_fix_uid_perms(single_attr_change_base, TestCase):
-
+
kls = triggers.fix_uid_perms
attr = 'uid'
-
+
class Test_fix_gid_perms(single_attr_change_base, TestCase):
@@ -598,7 +600,7 @@ class Test_detect_world_writable(single_attr_change_base, TestCase):
self.assertEqual(self._trigger_override, None,
msg="bug in test code; good_val should not be invoked when a "
"trigger override is in place.")
- return val & ~0002
+ return val & ~0002
def test_lazyness(self):
# ensure it doesn't even look if it won't make noise, and no reporter
@@ -614,40 +616,40 @@ class Test_detect_world_writable(single_attr_change_base, TestCase):
def test_observer_warn(self):
warnings = []
engine = fake_engine(observer=fake_reporter(warn=warnings.append))
-
+
self._trigger_override = self.kls()
-
+
def run(fs_objs, fix_perms=False):
self.kls(fix_perms=fix_perms).trigger(engine,
contentsSet(fs_objs))
-
+
run([fs.fsFile('/foon', mode=0770, strict=False)])
self.assertFalse(warnings)
run([fs.fsFile('/foon', mode=0772, strict=False)])
self.assertEqual(len(warnings), 1)
self.assertIn('/foon', warnings[0])
-
+
warnings[:] = []
-
+
run([fs.fsFile('/dar', mode=0776, strict=False),
fs.fsFile('/bar', mode=0776, strict=False),
fs.fsFile('/far', mode=0770, strict=False)])
-
+
self.assertEqual(len(warnings), 2)
self.assertIn('/dar', ' '.join(warnings))
self.assertIn('/bar', ' '.join(warnings))
self.assertNotIn('/far', ' '.join(warnings))
-
+
class TestPruneFiles(TestCase):
-
+
kls = triggers.PruneFiles
-
+
def test_metadata(self):
self.assertEqual(self.kls.required_csets, ('new_cset',))
self.assertEqual(self.kls._hooks, ('pre_merge',))
self.assertEqual(self.kls._engine_types, triggers.INSTALLING_MODES)
-
+
def test_it(self):
orig = contentsSet([
fs.fsFile('/cheddar', strict=False),
@@ -655,7 +657,7 @@ class TestPruneFiles(TestCase):
fs.fsDir('/foons-rule', strict=False),
fs.fsDir('/mango', strict=False)
])
-
+
engine = fake_engine(mode=const.INSTALL_MODE)
def run(func):
new = contentsSet(orig)
@@ -671,7 +673,7 @@ class TestPruneFiles(TestCase):
info = []
engine = fake_engine(observer=fake_reporter(info=info.append),
mode=const.REPLACE_MODE)
-
+
run(lambda s:False)
self.assertFalse(info)
run(post_curry(isinstance, fs.fsDir))
@@ -682,4 +684,3 @@ class TestPruneFiles(TestCase):
self.assertNotIn('/sporks-suck', ' '.join(info))
self.assertIn('/foons-rule', ' '.join(info))
self.assertIn('/mango', ' '.join(info))
-
diff --git a/pkgcore/test/misc.py b/pkgcore/test/misc.py
new file mode 100644
index 000000000..89e190457
--- /dev/null
+++ b/pkgcore/test/misc.py
@@ -0,0 +1,56 @@
+# Copyright: 2007 Brian Harring <ferringb@gmail.com>
+# License: GPL2
+
+# misc things useful for tests.
+
+from pkgcore.ebuild.ebuild_src import package
+from pkgcore.ebuild.cpv import CPV
+from pkgcore.ebuild.atom import atom
+from pkgcore.repository.util import SimpleTree
+from pkgcore.ebuild.misc import collapsed_restrict_to_data
+from pkgcore.restrictions.packages import AlwaysTrue
+
+default_arches = set(["x86", "ppc", "amd64", "ia64"])
+
+class FakePkg(package):
+ def __init__(self, cpvstr, data=None, shared=None, repo=None):
+ if data is None:
+ data = {}
+
+ for x in ("DEPEND", "RDEPEND", "PDEPEND", "IUSE", "LICENSE"):
+ data.setdefault(x, "")
+
+ cpv = CPV(cpvstr)
+ package.__init__(self, shared, repo, cpv.category, cpv.package,
+ cpv.fullver)
+ object.__setattr__(self, "data", data)
+
+
+class Options(dict):
+ __setattr__ = dict.__setitem__
+ __getattr__ = dict.__getitem__
+ __delattr__ = dict.__delitem__
+
+
+class FakeProfile(object):
+
+ def __init__(self, masked_use={}, forced_use={},
+ provides={}, masks=[], virtuals={}, arch='x86', name='none'):
+ self.provides_repo = SimpleTree(provides)
+ self.masked_use = dict((atom(k), v) for k,v in masked_use.iteritems())
+ self.forced_use = dict((atom(k), v) for k,v in forced_use.iteritems())
+ self.masks = tuple(map(atom, masks))
+ self.virtuals = SimpleTree(virtuals)
+ self.arch = arch
+ self.name = name
+
+ self.forced_data = collapsed_restrict_to_data(
+ [(AlwaysTrue, (self.arch,))],
+ self.forced_use.iteritems())
+
+ self.masked_data = collapsed_restrict_to_data(
+ [(AlwaysTrue, default_arches)],
+ self.masked_use.iteritems())
+
+ def make_virtuals_repo(self, repo):
+ return self.virtuals
diff --git a/pkgcore/test/package/test_base.py b/pkgcore/test/package/test_base.py
index 786784f99..e3ab89432 100644
--- a/pkgcore/test/package/test_base.py
+++ b/pkgcore/test/package/test_base.py
@@ -3,7 +3,7 @@
from pkgcore.test import TestCase
from pkgcore.package import base
-from pkgcore.util.currying import partial
+from snakeoil.currying import partial
class TestBasePkg(TestCase):
diff --git a/pkgcore/test/package/test_mutated.py b/pkgcore/test/package/test_mutated.py
index 856c1f7a9..92f709721 100644
--- a/pkgcore/test/package/test_mutated.py
+++ b/pkgcore/test/package/test_mutated.py
@@ -4,7 +4,7 @@
from pkgcore.test import TestCase
from pkgcore.package.mutated import MutatedPkg
from pkgcore.package.base import base
-from pkgcore.util.currying import partial
+from snakeoil.currying import partial
def passthru(val, self):
return val
diff --git a/pkgcore/test/pkgsets/test_glsa.py b/pkgcore/test/pkgsets/test_glsa.py
index eb6d3a549..b53cc85da 100644
--- a/pkgcore/test/pkgsets/test_glsa.py
+++ b/pkgcore/test/pkgsets/test_glsa.py
@@ -3,12 +3,13 @@
from pkgcore.test import TestCase
from pkgcore.test.mixins import TempDirMixin
-from pkgcore.util.osutils import pjoin
from pkgcore.pkgsets import glsa
-from pkgcore.util.currying import post_curry
from pkgcore.restrictions.packages import OrRestriction
from pkgcore.ebuild import cpv
+from snakeoil.osutils import pjoin
+from snakeoil.currying import post_curry
+
# misc setup code for generating glsas for testing
glsa_template = \
diff --git a/pkgcore/test/repository/test_multiplex.py b/pkgcore/test/repository/test_multiplex.py
index f442acc36..2f741a5e0 100644
--- a/pkgcore/test/repository/test_multiplex.py
+++ b/pkgcore/test/repository/test_multiplex.py
@@ -1,13 +1,13 @@
# Copyright: 2006 Brian Harring <ferringb@gmail.com>
# License: GPL2
+from pkgcore.test import TestCase
from pkgcore.repository.multiplex import tree
from pkgcore.restrictions import packages, values
from pkgcore.repository.util import SimpleTree
-from pkgcore.util.mappings import OrderedDict
-from pkgcore.util.currying import partial
-from pkgcore.test import TestCase
+from snakeoil.mappings import OrderedDict
+from snakeoil.currying import partial
rev_sorted = partial(sorted, reverse=True)
diff --git a/pkgcore/test/repository/test_prototype.py b/pkgcore/test/repository/test_prototype.py
index 4b9851cbe..845802dd1 100644
--- a/pkgcore/test/repository/test_prototype.py
+++ b/pkgcore/test/repository/test_prototype.py
@@ -5,9 +5,9 @@ from pkgcore.test import TestCase
from pkgcore.restrictions import packages, values
from pkgcore.ebuild.atom import atom
from pkgcore.ebuild.cpv import CPV
-from pkgcore.util.mappings import OrderedDict
from pkgcore.repository.util import SimpleTree
+from snakeoil.mappings import OrderedDict
class TestPrototype(TestCase):
diff --git a/pkgcore/test/restrictions/test_packages.py b/pkgcore/test/restrictions/test_packages.py
index 9f2af3ad7..d8145bba5 100644
--- a/pkgcore/test/restrictions/test_packages.py
+++ b/pkgcore/test/restrictions/test_packages.py
@@ -2,19 +2,18 @@
# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
# License: GPL2
+import exceptions
+from pkgcore import log
from pkgcore.test import (TestCase, protect_logging, TestRestriction,
mallable_obj, quiet_logger)
from pkgcore.restrictions import packages, values
-from pkgcore.util.currying import partial, post_curry
-from pkgcore import log
-import exceptions
class callback_logger(log.logging.Handler):
def __init__(self, callback):
log.logging.Handler.__init__(self)
self.callback = callback
-
+
def emit(self, record):
self.callback(record)
@@ -79,22 +78,21 @@ class native_PackageRestrictionTest(TestRestriction):
class foo:
def __getattr__(self, attr):
if attr.startswith("exc"):
- import exceptions
raise getattr(exceptions, attr[4:])()
raise AttributeError("monkey lover")
for mode in ("match", "force_True", "force_False"):
excepts[:] = []
- self.assertRaises(AttributeError,
+ self.assertRaises(AttributeError,
getattr(self.kls("foon", AlwaysSelfIntersect), mode),
foo())
self.assertEqual(len(excepts), 1,
msg="expected one exception, got %r" % excepts)
-
+
# ensure various exceptions are passed through
for k in (KeyboardInterrupt, RuntimeError, SystemExit):
self.assertRaises(k,
- getattr(self.kls("exc_%s" % k.__name__,
+ getattr(self.kls("exc_%s" % k.__name__,
AlwaysSelfIntersect), mode),
foo())
diff --git a/pkgcore/test/restrictions/test_restriction.py b/pkgcore/test/restrictions/test_restriction.py
index fadee2a69..2a725c46d 100644
--- a/pkgcore/test/restrictions/test_restriction.py
+++ b/pkgcore/test/restrictions/test_restriction.py
@@ -5,7 +5,7 @@
from pkgcore.test import TestRestriction
from pkgcore.restrictions import restriction
-from pkgcore.util.currying import partial
+from snakeoil.currying import partial
class SillyBool(restriction.base):
@@ -40,7 +40,7 @@ class BaseTest(TestRestriction):
self.assertMatch(true, args)
self.assertForceTrue(true, args)
self.assertNotForceFalse(true, args)
-
+
self.assertNotMatch(false, args)
self.assertNotForceTrue(false, args)
self.assertForceFalse(false, args)
@@ -56,7 +56,7 @@ class AlwaysBoolTest(TestRestriction):
self.assertMatch(true, false)
self.assertForceTrue(true, false)
self.assertNotForceFalse(true, false)
-
+
self.assertNotMatch(false, true)
self.assertNotForceTrue(false, true)
self.assertForceFalse(false, true)
diff --git a/pkgcore/test/scripts/helpers.py b/pkgcore/test/scripts/helpers.py
index 210a976c2..88f4066c4 100644
--- a/pkgcore/test/scripts/helpers.py
+++ b/pkgcore/test/scripts/helpers.py
@@ -4,12 +4,10 @@
"""Helpers for testing scripts."""
-
-import StringIO
import difflib
-
-from pkgcore.util import formatters
from pkgcore.config import central
+from snakeoil.formatters import PlainTextFormatter
+from snakeoil.caching import WeakInstMeta
class Exit(Exception):
@@ -42,6 +40,62 @@ def mangle_parser(parser):
parser.error = noerror
return parser
+class FormatterObject(object):
+ __metaclass__ = WeakInstMeta
+ __inst_caching__ = True
+ def __call__(self, formatter):
+ formatter.stream.write(self)
+
+class Color(FormatterObject):
+ __inst_caching__ = True
+ def __init__(self, mode, color):
+ self.mode = mode
+ self.color = color
+ def __repr__(self):
+ return '<Color: mode - %s; color - %s>' % (self.mode, self.color)
+
+class Reset(FormatterObject):
+ __inst_caching__ = True
+ def __repr__(self):
+ return '<Reset>'
+
+class Bold(FormatterObject):
+ __inst_caching__ = True
+ def __repr__(self):
+ return '<Bold>'
+
+class ListStream(list):
+ def write(self, *args):
+ stringlist = []
+ objectlist = []
+ for arg in args:
+ if isinstance(arg, basestring):
+ stringlist.append(arg)
+ else:
+ objectlist.append(''.join(stringlist))
+ stringlist = []
+ objectlist.append(arg)
+ objectlist.append(''.join(stringlist))
+ # We use len because boolean ops shortcircuit
+ if (len(self) and isinstance(self[-1], basestring) and
+ isinstance(objectlist[0], basestring)):
+ self[-1] = self[-1] + objectlist.pop(0)
+ self.extend(objectlist)
+
+class FakeStreamFormatter(PlainTextFormatter):
+ def __init__(self):
+ PlainTextFormatter.__init__(self, ListStream([]))
+ self.reset = Reset()
+ self.bold = Bold()
+ self.first_prefix = [None]
+ def resetstream(self):
+ self.stream = ListStream([])
+ def fg(self, color=None):
+ return Color('fg', color)
+ def bg(self, color=None):
+ return Color('bg', color)
+ def get_text_stream(self):
+ return ''.join([x for x in self.stream if not isinstance(x, FormatterObject)])
class MainMixin(object):
@@ -102,17 +156,15 @@ class MainMixin(object):
@return: the L{central.ConfigManager}.
"""
options = self.parse(*args, **kwargs)
- outstream = StringIO.StringIO()
- errstream = StringIO.StringIO()
- outformatter = formatters.PlainTextFormatter(outstream)
- errformatter = formatters.PlainTextFormatter(errstream)
+ outformatter = FakeStreamFormatter()
+ errformatter = FakeStreamFormatter()
self.main(options, outformatter, errformatter)
diffs = []
- for name, strings, stream in [('out', out, outstream),
- ('err', err, errstream)]:
- actual = stream.getvalue()
+ for name, strings, formatter in [('out', out, outformatter),
+ ('err', err, errformatter)]:
+ actual = formatter.get_text_stream()
if strings:
- expected = '\n'.join(strings) + '\n'
+ expected = '\n'.join(strings)
else:
expected = ''
if expected != actual:
diff --git a/pkgcore/test/scripts/test_pconfig.py b/pkgcore/test/scripts/test_pconfig.py
index dc1f8f88b..94ee4777e 100644
--- a/pkgcore/test/scripts/test_pconfig.py
+++ b/pkgcore/test/scripts/test_pconfig.py
@@ -2,10 +2,9 @@
# License: GPL2
from pkgcore.test import TestCase
-
from pkgcore.scripts import pconfig
from pkgcore.test.scripts import helpers
-from pkgcore.config import configurable, basics
+from pkgcore.config import configurable, basics, errors
from pkgcore.util import commandline
@configurable({'reff': 'ref:spork'})
@@ -33,7 +32,7 @@ def multi(**kwargs):
def broken_type(*args):
"""Noop."""
-@configurable(incrementals=['inc'], types={'inc': 'list'}, allow_unknowns=True)
+@configurable(types={'inc': 'list'}, allow_unknowns=True)
def increment(inc=()):
"""Noop."""
@@ -51,6 +50,7 @@ class DescribeClassTest(TestCase, helpers.MainMixin):
'need exactly one argument: class to describe.')
self.assertError(
'need exactly one argument: class to describe.', 'a', 'b')
+ self.parse('pkgcore.scripts')
def test_describe_class(self):
self.assertOut(
@@ -64,7 +64,7 @@ class DescribeClassTest(TestCase, helpers.MainMixin):
'Noop.',
'values not listed are handled as strings',
'',
- 'inc: list (incremental)'],
+ 'inc: list'],
'pkgcore.test.scripts.test_pconfig.increment')
def test_broken_type(self):
@@ -259,7 +259,9 @@ class WeirdSection(basics.ConfigSection):
raise KeyError(name)
if arg_type != 'repr':
raise errors.ConfigurationError('%r unsupported' % (arg_type,))
- return 'refs', ['spork', basics.HardCodedConfigSection({'foo': 'bar'})]
+ return 'refs', [
+ ['spork', basics.HardCodedConfigSection({'foo': 'bar'})],
+ None, None]
class DumpUncollapsedTest(TestCase, helpers.MainMixin):
@@ -297,7 +299,7 @@ class DumpUncollapsedTest(TestCase, helpers.MainMixin):
' nested section 2',
' ================',
' # type: refs',
- " 'sects' = ",
+ " 'sects.prepend' = ",
' nested section 1',
' ================',
" named section 'spork'",
diff --git a/pkgcore/test/scripts/test_pebuild.py b/pkgcore/test/scripts/test_pebuild.py
index 167dad25d..1e3f85d4b 100644
--- a/pkgcore/test/scripts/test_pebuild.py
+++ b/pkgcore/test/scripts/test_pebuild.py
@@ -17,3 +17,4 @@ class CommandlineTest(TestCase, helpers.MainMixin):
self.assertError('Specify an atom and at least one phase.', 'foo')
self.assertError("atom 'spork' is malformed: error spork",
'spork', 'unpack')
+ self.assertEqual(self.parse('foo/bar', 'baz', 'spork').phases, ['baz', 'spork'])
diff --git a/pkgcore/test/scripts/test_pmaint.py b/pkgcore/test/scripts/test_pmaint.py
index 1f5a2cf62..e55a8c9b2 100644
--- a/pkgcore/test/scripts/test_pmaint.py
+++ b/pkgcore/test/scripts/test_pmaint.py
@@ -3,17 +3,19 @@
# License: GPL2
from StringIO import StringIO
-from pkgcore.util.formatters import PlainTextFormatter
+
from pkgcore.interfaces.repo import (nonlivefs_install,
nonlivefs_uninstall, nonlivefs_replace)
from pkgcore.test import TestCase
from pkgcore.scripts import pmaint
from pkgcore.test.scripts import helpers
-from pkgcore.config import basics, ConfigHint
+from pkgcore.config import basics, ConfigHint, configurable
from pkgcore.repository import util, syncable
from pkgcore.sync import base
from pkgcore.ebuild.cpv import CPV
-from pkgcore.util.currying import partial
+
+from snakeoil.formatters import PlainTextFormatter
+from snakeoil.currying import partial
class Options(dict):
@@ -134,7 +136,7 @@ class fake_repo(util.SimpleTree):
def make_repo_config(repo_data, livefs=False, frozen=False):
def repo():
return fake_repo(repo_data, livefs=livefs, frozen=frozen)
- repo.pkgcore_config_type=ConfigHint(typename='repo')
+ repo.pkgcore_config_type = ConfigHint(typename='repo')
return basics.HardCodedConfigSection({'class':repo})
@@ -237,7 +239,35 @@ class CopyTest(TestCase, helpers.MainMixin):
ret, config, out = self.execute_main(
'--target-repo', 'trg', '--source-repo', 'src',
'--copy-missing',
- src=make_repo_config({'sys-apps':{'portage':['2.1', '2.3']}}),
- trg=make_repo_config({'sys-apps':{'portage':['2.1']}})
+ src=make_repo_config({'sys-apps':{'portage':['2.1', '2.3']}}),
+ trg=make_repo_config({'sys-apps':{'portage':['2.1']}})
)
self.assertEqual(config.candidates[0].cpvstr, "sys-apps/portage-2.3")
+
+
+class TestRegen(TestCase, helpers.MainMixin):
+
+ parser = helpers.mangle_parser(pmaint.RegenParser())
+ main = staticmethod(pmaint.regen_main)
+
+ def test_parser(self):
+
+ class TestSimpleTree(util.SimpleTree):
+ pass
+
+ @configurable(typename='repo')
+ def fake_repo():
+ return TestSimpleTree({})
+
+
+ self.assertError('Need a repository name.')
+ self.assertError('I do not know what to do with more than 2 arguments',
+ '1', '2', '3')
+ self.assertError('thread count needs to be at least 1', '1', '0')
+ self.assertError("repo 'spork' was not found! known repos: ", 'spork')
+ options = self.parse(
+ 'spork', '2', spork=basics.HardCodedConfigSection(
+ {'class': fake_repo}))
+ self.assertEqual(
+ [options.repo.__class__, options.thread_count],
+ [TestSimpleTree, 2])
diff --git a/pkgcore/test/scripts/test_pmerge.py b/pkgcore/test/scripts/test_pmerge.py
index 1092812da..c60c24a32 100644
--- a/pkgcore/test/scripts/test_pmerge.py
+++ b/pkgcore/test/scripts/test_pmerge.py
@@ -26,7 +26,7 @@ class AtomParsingTest(TestCase):
'spork2': {'foon': ('2',)}})
self.assertRaises(pmerge.NoMatches,
pmerge.parse_atom, "foo", repo)
- self.assertRaises(pmerge.AmbiguousQuery,
+ self.assertRaises(pmerge.AmbiguousQuery,
pmerge.parse_atom, "foon", repo)
@@ -37,7 +37,12 @@ class CommandlineTest(TestCase, helpers.MainMixin):
def test_parser(self):
self.assertError(
- "Sorry, using sets with -C probably isn't wise", '-Cs', 'boo')
+ "Using sets with -C probably isn't wise, aborting", '-Cs', 'boo')
self.assertError(
'--usepkg is redundant when --usepkgonly is used', '-Kk')
- self.assertError("need at least one atom", '--unmerge')
+ self.assertError("You must provide at least one atom", '--unmerge')
+ options = self.parse('-s world')
+ self.assertFalse(options.replace)
+ options = self.parse('--clean')
+ self.assertEqual(options.set, ['world', 'system'])
+ self.assertTrue(options.deep, True)
diff --git a/pkgcore/test/scripts/test_pregen.py b/pkgcore/test/scripts/test_pregen.py
deleted file mode 100644
index d161bce32..000000000
--- a/pkgcore/test/scripts/test_pregen.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-from pkgcore.test import TestCase
-
-from pkgcore.scripts import pregen
-from pkgcore.test.scripts import helpers
-
-
-class CommandlineTest(TestCase, helpers.MainMixin):
-
- parser = helpers.mangle_parser(pregen.OptionParser())
- main = staticmethod(pregen.main)
-
- def test_parser(self):
- self.assertError('Need a repository name.')
- self.assertError('I do not know what to do with more than 2 arguments',
- '1', '2', '3')
- self.assertError('thread count needs to be at least 1', '1', '0')
- self.assertError("repo 'spork' was not found! known repos: ", 'spork')
diff --git a/pkgcore/test/test_demandload_usage.py b/pkgcore/test/test_demandload_usage.py
index f6bbb7cda..e9f4d582b 100644
--- a/pkgcore/test/test_demandload_usage.py
+++ b/pkgcore/test/test_demandload_usage.py
@@ -1,10 +1,10 @@
# Copyright: 2006 Brian Harring <ferringb@gmail.com>
# License: GPL2
-from pkgcore.util import demandload
-import sys
+import sys
from pkgcore.test import TestCase
+from snakeoil import demandload
class TestDemandLoadTargets(TestCase):
diff --git a/pkgcore/test/test_plugin.py b/pkgcore/test/test_plugin.py
index a5ea4681a..fb77105ec 100644
--- a/pkgcore/test/test_plugin.py
+++ b/pkgcore/test/test_plugin.py
@@ -1,16 +1,14 @@
# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
# License: GPL2
-
-from pkgcore.test import TestCase, quiet_logger, protect_logging
-from pkgcore import plugin
-from pkgcore.util import lists
-
import os
import sys
import shutil
import tempfile
import logging
+from pkgcore.test import TestCase, quiet_logger, protect_logging
+from pkgcore import plugin
+from snakeoil import lists
class ModulesTest(TestCase):
@@ -43,11 +41,12 @@ class LowPlug(object):
priority = 0
low_plug = LowPlug()
+high_plug = HighPlug()
pkgcore_plugins = {
'plugtest': [
DisabledPlug,
- HighPlug(),
+ high_plug,
low_plug,
]
}
@@ -98,16 +97,17 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
def _runit(self, method):
plugin._cache = {}
method()
- mtime = os.path.getmtime(os.path.join(self.packdir, 'plugincache'))
+ mtime = os.path.getmtime(os.path.join(self.packdir, 'plugincache2'))
method()
plugin._cache = {}
method()
method()
self.assertEqual(
- mtime, os.path.getmtime(os.path.join(self.packdir, 'plugincache')))
+ mtime,
+ os.path.getmtime(os.path.join(self.packdir, 'plugincache2')))
# We cannot write this since it contains an unimportable plugin.
self.assertFalse(
- os.path.exists(os.path.join(self.packdir2, 'plugincache')))
+ os.path.exists(os.path.join(self.packdir2, 'plugincache2')))
def _test_plug(self):
import mod_testplug
@@ -117,13 +117,15 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
self.assertEqual(
'HighPlug',
plugin.get_plugin('plugtest', mod_testplug).__class__.__name__)
- lines = list(open(os.path.join(self.packdir, 'plugincache')))
- self.assertEqual(2, len(lines))
+ lines = list(open(os.path.join(self.packdir, 'plugincache2')))
+ self.assertEqual(3, len(lines))
+ self.assertEqual(plugin.CACHE_HEADER, lines[0])
+ lines.pop(0)
lines.sort()
mtime = int(os.path.getmtime(os.path.join(self.packdir, 'plug2.py')))
self.assertEqual('plug2:%s:\n' % (mtime,), lines[0])
mtime = int(os.path.getmtime(os.path.join(self.packdir, 'plug.py')))
- self.assertEqual('plug:%s:plugtest\n' % (mtime,), lines[1])
+ self.assertEqual('plug:%s:plugtest,7\n' % (mtime,), lines[1])
def test_plug(self):
self._runit(self._test_plug)
@@ -135,8 +137,9 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
# This one is not loaded if we are testing with a good cache.
sys.modules.pop('mod_testplug.plug2', None)
list(plugin.get_plugins('plugtest', mod_testplug))
- self.assertIn('mod_testplug.plug', sys.modules)
- self.assertNotIn('mod_testplug.plug2', sys.modules)
+ # Extra messages since getting all of sys.modules printed is annoying.
+ self.assertIn('mod_testplug.plug', sys.modules, 'plug not loaded')
+ self.assertNotIn('mod_testplug.plug2', sys.modules, 'plug2 loaded')
def test_no_unneeded_import(self):
self._runit(self._test_no_unneeded_import)
@@ -144,7 +147,7 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
def test_cache_corruption(self):
import mod_testplug
list(plugin.get_plugins('spork', mod_testplug))
- filename = os.path.join(self.packdir, 'plugincache')
+ filename = os.path.join(self.packdir, 'plugincache2')
cachefile = open(filename, 'a')
try:
cachefile.write('corruption\n')
@@ -158,11 +161,11 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
plugin._cache = {}
self._test_plug()
good_mtime = os.path.getmtime(
- os.path.join(self.packdir, 'plugincache'))
+ os.path.join(self.packdir, 'plugincache2'))
plugin._cache = {}
self._test_plug()
self.assertEqual(good_mtime, os.path.getmtime(
- os.path.join(self.packdir, 'plugincache')))
+ os.path.join(self.packdir, 'plugincache2')))
self.assertNotEqual(good_mtime, corrupt_mtime)
@protect_logging(logging.root)
@@ -178,7 +181,7 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
plugin._cache = {}
self._test_plug()
- filename = os.path.join(self.packdir, 'plugincache')
+ filename = os.path.join(self.packdir, 'plugincache2')
st = os.stat(filename)
mtime = st.st_mtime - 2
os.utime(filename, (st.st_atime, mtime))
@@ -188,7 +191,8 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
# Should never write a usable cache.
self.assertNotEqual(
- mtime, os.path.getmtime(os.path.join(self.packdir, 'plugincache')))
+ mtime,
+ os.path.getmtime(os.path.join(self.packdir, 'plugincache2')))
def test_rewrite_on_remove(self):
filename = os.path.join(self.packdir, 'extra.py')
@@ -207,3 +211,98 @@ pkgcore_plugins = {'plugtest': [HiddenPlug]}
plugin._cache = {}
self._test_plug()
+
+ def test_priority_caching(self):
+ plug3 = open(os.path.join(self.packdir, 'plug3.py'), 'w')
+ try:
+ plug3.write('''
+class LowPlug(object):
+ priority = 6
+
+pkgcore_plugins = {
+ 'plugtest': [LowPlug()],
+}
+''')
+ finally:
+ plug3.close()
+ plug4 = open(os.path.join(self.packdir, 'plug4.py'), 'w')
+ try:
+ plug4.write('''
+# First file tried, only a disabled plugin.
+class HighDisabledPlug(object):
+ priority = 15
+ disabled = True
+
+pkgcore_plugins = {
+ 'plugtest': [HighDisabledPlug()],
+}
+''')
+ finally:
+ plug4.close()
+ plug5 = open(os.path.join(self.packdir, 'plug5.py'), 'w')
+ try:
+ plug5.write('''
+# Second file tried, with a skipped low priority plugin.
+class HighDisabledPlug(object):
+ priority = 12
+ disabled = True
+
+class LowPlug(object):
+ priority = 6
+
+pkgcore_plugins = {
+ 'plugtest': [HighDisabledPlug(), LowPlug()],
+}
+''')
+ finally:
+ plug5.close()
+ plug6 = open(os.path.join(self.packdir, 'plug6.py'), 'w')
+ try:
+ plug6.write('''
+# Not tried, bogus priority.
+class BogusPlug(object):
+ priority = 'spoon'
+
+pkgcore_plugins = {
+ 'plugtest': [BogusPlug()],
+}
+''')
+ finally:
+ plug6.close()
+ self._runit(self._test_priority_caching)
+
+ def _test_priority_caching(self):
+ import mod_testplug
+ list(plugin.get_plugins('spork', mod_testplug))
+ sys.modules.pop('mod_testplug.plug', None)
+ sys.modules.pop('mod_testplug.plug2', None)
+ sys.modules.pop('mod_testplug.plug3', None)
+ sys.modules.pop('mod_testplug.plug4', None)
+ sys.modules.pop('mod_testplug.plug5', None)
+ sys.modules.pop('mod_testplug.plug6', None)
+ best_plug = plugin.get_plugin('plugtest', mod_testplug)
+ from mod_testplug import plug
+ self.assertEqual(plug.high_plug, best_plug)
+ # Extra messages since getting all of sys.modules printed is annoying.
+ self.assertIn('mod_testplug.plug', sys.modules, 'plug not loaded')
+ self.assertNotIn('mod_testplug.plug2', sys.modules, 'plug2 loaded')
+ self.assertNotIn('mod_testplug.plug3', sys.modules, 'plug3 loaded')
+ self.assertIn('mod_testplug.plug4', sys.modules, 'plug4 not loaded')
+ self.assertIn('mod_testplug.plug5', sys.modules, 'plug4 not loaded')
+ self.assertNotIn('mod_testplug.plug6', sys.modules, 'plug6 loaded')
+
+ def test_header_change_invalidates_cache(self):
+ # Write the cache
+ plugin._cache = {}
+ import mod_testplug
+ list(plugin.get_plugins('testplug', mod_testplug))
+
+ # Modify the cache.
+ filename = os.path.join(self.packdir, 'plugincache2')
+ cache = list(open(filename))
+ cache[0] = 'not really a pkgcore plugin cache\n'
+ open(filename, 'w').write(''.join(cache))
+
+ # And test if it is properly rewritten.
+ plugin._cache = {}
+ self._test_plug()
diff --git a/pkgcore/test/test_spawn.py b/pkgcore/test/test_spawn.py
index 94bf28f26..6e4accde6 100644
--- a/pkgcore/test/test_spawn.py
+++ b/pkgcore/test/test_spawn.py
@@ -1,11 +1,11 @@
# Copyright: 2006 Brian Harring <ferringb@gmail.com>
# License: GPL2
+import os, pwd, signal
from pkgcore.test import TestCase, SkipTest
from pkgcore import spawn
from pkgcore.test.mixins import TempDirMixin
-from pkgcore.util.currying import post_curry
-import os, pwd, signal
+from snakeoil.currying import post_curry
def capability_based(capable, msg):
def internal_f(f):
diff --git a/pkgcore/test/util/test_caching.py b/pkgcore/test/util/test_caching.py
deleted file mode 100644
index 8f3736a4c..000000000
--- a/pkgcore/test/util/test_caching.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util import caching
-
-def gen_test(WeakInstMeta):
- class weak_slotted(object):
- __metaclass__ = WeakInstMeta
- __inst_caching__ = True
- __slots__ = ('one',)
-
- class weak_inst(object):
- __metaclass__ = WeakInstMeta
- __inst_caching__ = True
- counter = 0
- def __new__(cls, *args, **kwargs):
- cls.counter += 1
- return object.__new__(cls)
- def __init__(self, *args, **kwargs):
- pass
- @classmethod
- def reset(cls):
- cls.counter = 0
-
- class automatic_disabled_weak_inst(weak_inst):
- pass
-
- class explicit_disabled_weak_inst(weak_inst):
- __inst_caching__ = False
-
- class reenabled_weak_inst(automatic_disabled_weak_inst):
- __inst_caching__ = True
-
- class TestWeakInstMeta(TestCase):
-
- def test_reuse(self, kls=weak_inst):
- kls.reset()
- o = kls()
- self.assertIdentical(o, kls())
- self.assertEqual(kls.counter, 1)
- del o
- kls()
- self.assertEqual(kls.counter, 2)
-
- def test_disabling_inst(self):
- weak_inst.reset()
- for x in (1, 2):
- o = weak_inst(disable_inst_caching=True)
- self.assertIdentical(weak_inst.counter, x)
- del o
- o = weak_inst()
- self.assertFalse(o is weak_inst(disable_inst_caching=True))
-
- def test_class_disabling(self):
- automatic_disabled_weak_inst.reset()
- self.assertNotIdentical(
- automatic_disabled_weak_inst(), automatic_disabled_weak_inst())
- self.assertNotIdentical(
- explicit_disabled_weak_inst(), explicit_disabled_weak_inst())
-
- def test_reenabled(self):
- self.test_reuse(reenabled_weak_inst)
-
- # Read this before doing anything with the warnings-related
- # tests unless you really enjoy debugging Heisenbugs.
- #
- # The warnings module is optimized for the common case of
- # warnings that should be ignored: it stores a "key"
- # consisting of the type of warning, the warning message and
- # the module it originates from in a dict (cleverly hidden
- # away in the globals() of the frame calling warn()) if a
- # warning should be ignored, and then immediately ignores
- # warnings matching that key, *without* looking at the current
- # filters list.
- #
- # This means that if our test(s) with warnings ignored run
- # before tests with warnings turned into exceptions (test
- # order is random, enter Heisenbugs) and both tests involve
- # the same exception message they will screw up the tests.
- #
- # To make matters more interesting the warning message we deal
- # with here is not constant. Specifically it contains the
- # repr() of an argument tuple, containing a class instance,
- # which means the message will contain the address that object
- # is stored at!
- #
- # This exposed itself as crazy test failures where running
- # from .py fails and from .pyc works (perhaps related to the
- # warnings module taking a different codepath for this) and
- # creating objects or setting pdb breakpoints before that
- # failure caused the test to pass again.
- #
- # What all this means: Be 100% positively absolutely sure
- # test_uncachable and test_uncachable_warnings do not see the
- # same warning message ever. We do that by making sure their
- # warning messages contain a different classname
- # (RaisingHashFor...).
-
- def test_uncachable(self):
- weak_inst.reset()
-
- # This name is *important*, see above.
- class RaisingHashForTestUncachable(object):
- def __init__(self, error):
- self.error = error
- def __hash__(self):
- raise self.error
-
- self.assertTrue(weak_inst([]) is not weak_inst([]))
- self.assertEqual(weak_inst.counter, 2)
- for x in (TypeError, NotImplementedError):
- self.assertNotIdentical(
- weak_inst(RaisingHashForTestUncachable(x)),
- weak_inst(RaisingHashForTestUncachable(x)))
-
- # These are applied in reverse order. Effect is UserWarning is
- # ignored and everything else is an error.
- test_uncachable.suppress = [
- (('error',), {}), (('ignore',), {'category': UserWarning})]
-
- def test_uncachable_warning(self):
- # This name is *important*, see above.
- class RaisingHashForTestUncachableWarnings(object):
- def __init__(self, error):
- self.error = error
- def __hash__(self):
- raise self.error
-
- for x in (TypeError, NotImplementedError):
- self.assertRaises(UserWarning, weak_inst,
- RaisingHashForTestUncachableWarnings(x))
-
- test_uncachable_warning.suppress = [
- (('error',), {'category': UserWarning})]
-
- def test_hash_collision(self):
- class BrokenHash(object):
- def __hash__(self):
- return 1
- self.assertNotIdentical(weak_inst(BrokenHash()),
- weak_inst(BrokenHash()))
-
- def test_weak_slot(self):
- weak_slotted()
-
- def test_keyword_args(self):
- o = weak_inst(argument=1)
- self.assertIdentical(o, weak_inst(argument=1))
- self.assertNotIdentical(o, weak_inst(argument=2))
-
- # Hack to make it show up with a different name in trial's output
- TestWeakInstMeta.__name__ = WeakInstMeta.__name__ + 'Test'
-
- return TestWeakInstMeta
-
-# "Invalid name"
-# pylint: disable-msg=C0103
-
-TestNativeWeakInstMeta = gen_test(caching.native_WeakInstMeta)
-
-if caching.cpy_WeakInstMeta is not None:
- CPY_TestWeakInstMeta = gen_test(caching.cpy_WeakInstMeta)
-else:
- # generate fake test and mark it as skip
- CPY_TestWeakInstMeta = gen_test(type)
- CPY_TestWeakInstMeta.skip = "cpython cpv extension isn't available"
-
diff --git a/pkgcore/test/util/test_commandline.py b/pkgcore/test/util/test_commandline.py
index 6356f7ea1..53024d9e5 100644
--- a/pkgcore/test/util/test_commandline.py
+++ b/pkgcore/test/util/test_commandline.py
@@ -16,6 +16,9 @@ from pkgcore.config import basics, central, configurable, errors
# Careful: the tests should not hit a load_config() call!
+def sect():
+ """Just a no-op to use as configurable class."""
+
class OptionsTest(TestCase):
@@ -35,9 +38,6 @@ class OptionsTest(TestCase):
self.assertFalse(section.debug)
confdict['sect'] = section
- def sect():
- """Just a no-op to use as configurable class."""
-
parser = commandline.OptionParser(option_list=[
optparse.Option('-a', action='callback', callback=callback)])
parser = helpers.mangle_parser(parser)
@@ -97,6 +97,74 @@ class OptionsTest(TestCase):
self.fail('no exception raised')
+class ModifyParser(commandline.OptionParser):
+
+ def _trigger(self, option, opt_str, value, parser):
+ """Fake a config load."""
+ # HACK: force skipping the actual config loading. Might want
+ # to do something more complicated here to allow testing if
+ # --empty-config actually works.
+ parser.values.empty_config = True
+ parser.values.config
+
+ def __init__(self):
+ commandline.OptionParser.__init__(self)
+ self.add_option('--trigger', action='callback', callback=self._trigger)
+
+
+class ModifyConfigTest(TestCase, helpers.MainMixin):
+
+ parser = helpers.mangle_parser(ModifyParser())
+
+ def parse(self, *args, **kwargs):
+ """Overridden to allow the load_config call."""
+ values = self.parser.get_default_values()
+ # optparse needs a list (it does make a copy, but it uses [:]
+ # to do it, which is a noop on a tuple).
+ options, args = self.parser.parse_args(list(args), values)
+ self.assertFalse(args)
+ return options
+
+ def test_empty_config(self):
+ self.assertError(
+ 'Configuration already loaded. If moving the option earlier on '
+ 'the commandline does not fix this report it as a bug.',
+ '--trigger', '--empty-config')
+ self.assertTrue(self.parse('--empty-config', '--trigger'))
+
+ def test_modify_config(self):
+ self.assertError(
+ 'Configuration already loaded. If moving the option earlier on '
+ 'the commandline does not fix this report it as a bug.',
+ '--empty-config', '--trigger',
+ '--new-config','foo', 'class', 'sect')
+ values = self.parse(
+ '--empty-config', '--new-config',
+ 'foo', 'class', 'pkgcore.test.util.test_commandline.sect',
+ '--trigger')
+ self.assertTrue(values.config.collapse_named_section('foo'))
+ values = self.parse(
+ '--empty-config', '--new-config',
+ 'foo', 'class', 'pkgcore.test.util.test_commandline.missing',
+ '--add-config', 'foo', 'class',
+ 'pkgcore.test.util.test_commandline.sect',
+ '--trigger')
+ self.assertTrue(values.config.collapse_named_section('foo'))
+ self.assertError(
+ "'class' is already set (to 'first')",
+ '--empty-config',
+ '--new-config', 'foo', 'class', 'first',
+ '--new-config', 'foo', 'class', 'foon',
+ '--trigger')
+ values = self.parse(
+ '--empty-config',
+ '--add-config', 'foo', 'inherit', 'missing',
+ '--trigger')
+ self.assertRaises(
+ errors.ConfigurationError,
+ values.config.collapse_named_section, 'foo')
+
+
def main(options, out, err):
return options
@@ -160,7 +228,7 @@ Use --help after a subcommand for more help.
self.assertEqual(options.progname, 'fo sub')
self.assertMain(
- None, '', '',
+ None, '\n', '',
{'sub': (SubParser, submain)}, ['sub', 'subarg'], script_name='fo')
def test_configuration_error(self):
@@ -173,7 +241,7 @@ Use --help after a subcommand for more help.
values._config = central.ConfigManager()
return values
self.assertMain(
- 1, '', 'Error in configuration:\nbork\n',
+ 1, '\n', 'Error in configuration:\nbork\n',
{None: (NoLoadParser, error_main)}, [])
self.assertRaises(
errors.ConfigurationError, self.assertMain,
diff --git a/pkgcore/test/util/test_compatibility.py b/pkgcore/test/util/test_compatibility.py
deleted file mode 100644
index fb98c99a4..000000000
--- a/pkgcore/test/util/test_compatibility.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util import compatibility
-from pkgcore.util.currying import post_curry
-
-class mixin(object):
- def test_builtin_override(self):
- if self.func_name in __builtins__:
- self.assertIdentical(__builtins__[self.func_name],
- getattr(compatibility, self.func_name))
-
- def check_func(self, result1, result2, test3, result3):
- i = iter(xrange(100))
- f = getattr(compatibility, self.func_name)
- self.assertEqual(f(x==3 for x in i), result1)
- self.assertEqual(i.next(), result2)
- self.assertEqual(f(test3), result3)
-
-class AnyTest(TestCase, mixin):
- func_name = "any"
- test_any = post_curry(
- mixin.check_func, True, 4, (x==3 for x in xrange(2)), False)
-
-
-class AllTest(TestCase, mixin):
- func_name = "all"
- test_all = post_curry(mixin.check_func, False, 1,
- (isinstance(x, int) for x in xrange(100)), True)
diff --git a/pkgcore/test/util/test_containers.py b/pkgcore/test/util/test_containers.py
deleted file mode 100644
index 1916af0da..000000000
--- a/pkgcore/test/util/test_containers.py
+++ /dev/null
@@ -1,201 +0,0 @@
-# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
-# Copyright: 2005 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-from pkgcore.test import TestCase
-from pkgcore.util import containers
-
-
-class InvertedContainsTest(TestCase):
-
- def setUp(self):
- self.set = containers.InvertedContains(range(12))
-
- def test_basic(self):
- self.failIf(7 in self.set)
- self.failUnless(-7 in self.set)
-
-
-class LimitedChangeSetTest(TestCase):
-
- def setUp(self):
- self.set = containers.LimitedChangeSet(range(12))
-
- def test_basic(self, changes=0):
- # this should be a no-op
- self.set.rollback(changes)
- # and this is invalid
- self.assertRaises(TypeError, self.set.rollback, changes + 1)
- self.failUnless(0 in self.set)
- self.failIf(12 in self.set)
- self.assertEqual(12, len(self.set))
- self.assertEqual(sorted(list(self.set)), list(range(12)))
- self.assertEqual(changes, self.set.changes_count())
- self.assertRaises(TypeError, self.set.rollback, -1)
-
- def test_dummy_commit(self):
- # this should be a no-op
- self.set.commit()
- # so this should should run just as before
- self.test_basic()
-
- def test_adding(self):
- self.set.add(13)
- self.failUnless(13 in self.set)
- self.assertEqual(13, len(self.set))
- self.assertEqual(sorted(list(self.set)), list(range(12)) + [13])
- self.assertEqual(1, self.set.changes_count())
- self.set.add(13)
- self.assertRaises(containers.Unchangable, self.set.remove, 13)
-
- def test_add_rollback(self):
- self.set.add(13)
- self.set.rollback(0)
- # this should run just as before
- self.test_basic()
-
- def test_add_commit_remove_commit(self):
- self.set.add(13)
- self.set.commit()
- # should look like right before commit
- self.assertEqual(13, len(self.set))
- self.assertEqual(sorted(list(self.set)), list(range(12)) + [13])
- self.assertEqual(0, self.set.changes_count())
- # and remove...
- self.set.remove(13)
- # should be back to basic, but with 1 change
- self.test_basic(1)
- self.set.commit()
- self.test_basic()
-
- def test_removing(self):
- self.set.remove(0)
- self.failIf(0 in self.set)
- self.assertEqual(11, len(self.set))
- self.assertEqual(sorted(list(self.set)), list(range(1, 12)))
- self.assertEqual(1, self.set.changes_count())
- self.assertRaises(containers.Unchangable, self.set.add, 0)
- self.assertRaises(KeyError, self.set.remove, 0)
-
- def test_remove_rollback(self):
- self.set.remove(0)
- self.set.rollback(0)
- self.test_basic()
-
- def test_remove_commit_add_commit(self):
- self.set.remove(0)
- self.set.commit()
- self.failIf(0 in self.set)
- self.assertEqual(11, len(self.set))
- self.assertEqual(sorted(list(self.set)), list(range(1, 12)))
- self.assertEqual(0, self.set.changes_count())
- self.set.add(0)
- self.test_basic(1)
- self.set.commit()
- self.test_basic()
-
- def test_longer_transaction(self):
- self.set.add(12)
- self.set.remove(7)
- self.set.rollback(1)
- self.set.add(-1)
- self.set.commit()
- self.assertEqual(sorted(list(self.set)), list(range(-1, 13)))
-
- def test_str(self):
- self.assertEqual(
- str(containers.LimitedChangeSet([7])), 'LimitedChangeSet([7])')
-
-
- def test__eq__(self):
- c = containers.LimitedChangeSet(range(99))
- c.add(99)
- self.assertEqual(c, containers.LimitedChangeSet(range(100)))
- s = set(c)
- # ordering here matters.
- self.assertEqual(c, s, reflective=False)
- s.add(100)
- self.assertNotEqual(c, s, reflective=False)
- self.assertNotEqual(c, None, reflective=False)
-
-
-class LimitedChangeSetWithBlacklistTest(TestCase):
-
- def setUp(self):
- self.set = containers.LimitedChangeSet(range(12), [3, 13])
-
- def test_basic(self):
- self.failUnless(0 in self.set)
- self.failIf(12 in self.set)
- self.assertEqual(12, len(self.set))
- self.assertEqual(sorted(list(self.set)), list(range(12)))
- self.assertEqual(0, self.set.changes_count())
- self.assertRaises(TypeError, self.set.rollback, -1)
-
- def test_adding_blacklisted(self):
- self.assertRaises(containers.Unchangable, self.set.add, 13)
-
- def test_removing_blacklisted(self):
- self.assertRaises(containers.Unchangable, self.set.remove, 3)
-
-
-class TestProtectedSet(TestCase):
-
- kls = containers.ProtectedSet
-
- def test_contains(self):
- c = self.kls(frozenset(xrange(100)))
- self.assertIn(1, c)
- self.assertNotIn(100, c)
- c.add(100)
- self.assertIn(1, c)
-
- def test_len(self):
- c = self.kls(frozenset(xrange(100)))
- self.assertEqual(len(c), 100)
- c.add(1)
- self.assertEqual(len(c), 100)
- c.add(100)
- self.assertEqual(len(c), 101)
-
- def test_add(self):
- s = set(xrange(10))
- c = self.kls(s)
- c.add(10)
- self.assertIn(10, c)
- self.assertNotIn(10, s)
- # finally, verify it's not adding duplicates to new.
- c.add(1)
- s.remove(1)
- self.assertNotIn(1, c)
-
- def test_iter(self):
- s = set(xrange(100))
- c = self.kls(s)
- self.assertEqual(sorted(xrange(100)), sorted(c))
- c.add(100)
- s.add(100)
- self.assertEqual(sorted(xrange(101)), sorted(c))
-
-
-
-class TestRefCountingSet(TestCase):
-
- def test_it(self):
- c = containers.RefCountingSet((1, 2))
- self.assertIn(1, c)
- self.assertIn(2, c)
- c.remove(1)
- self.assertNotIn(1, c)
- self.assertRaises(KeyError, c.remove, 1)
- c.add(2)
- self.assertIn(2, c)
- c.remove(2)
- self.assertIn(2, c)
- c.remove(2)
- self.assertNotIn(2, c)
- c.add(3)
- self.assertIn(3, c)
- c.remove(3)
- self.assertNotIn(3, c)
diff --git a/pkgcore/test/util/test_currying.py b/pkgcore/test/util/test_currying.py
deleted file mode 100644
index 5ec47bc45..000000000
--- a/pkgcore/test/util/test_currying.py
+++ /dev/null
@@ -1,156 +0,0 @@
-# Copyright: 2005 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util import currying
-
-
-# Magic to make trial doctest our docstrings.
-__doctests__ = [currying]
-
-def passthrough(*args, **kwargs):
- return args, kwargs
-
-# docstring is part of the test
-
-def documented():
- """original docstring"""
-
-class PreCurryTest(TestCase):
-
- pre_curry = staticmethod(currying.pre_curry)
-
- def test_pre_curry(self):
- noop = self.pre_curry(passthrough)
- self.assertEqual(noop(), ((), {}))
- self.assertEqual(noop('foo', 'bar'), (('foo', 'bar'), {}))
- self.assertEqual(noop(foo='bar'), ((), {'foo': 'bar'}))
- self.assertEqual(noop('foo', bar='baz'), (('foo',), {'bar': 'baz'}))
-
- one_arg = self.pre_curry(passthrough, 42)
- self.assertEqual(one_arg(), ((42,), {}))
- self.assertEqual(one_arg('foo', 'bar'), ((42, 'foo', 'bar'), {}))
- self.assertEqual(one_arg(foo='bar'), ((42,), {'foo': 'bar'}))
- self.assertEqual(
- one_arg('foo', bar='baz'), ((42, 'foo'), {'bar': 'baz'}))
-
- keyword_arg = self.pre_curry(passthrough, foo=42)
- self.assertEqual(keyword_arg(), ((), {'foo': 42}))
- self.assertEqual(
- keyword_arg('foo', 'bar'), (('foo', 'bar'), {'foo': 42}))
- self.assertEqual(keyword_arg(foo='bar'), ((), {'foo': 'bar'}))
- self.assertEqual(
- keyword_arg('foo', bar='baz'),
- (('foo',), {'bar': 'baz', 'foo': 42}))
-
- both = self.pre_curry(passthrough, 42, foo=42)
- self.assertEqual(both(), ((42,), {'foo': 42}))
- self.assertEqual(
- both('foo', 'bar'), ((42, 'foo', 'bar'), {'foo': 42}))
- self.assertEqual(both(foo='bar'), ((42,), {'foo': 'bar'}))
- self.assertEqual(
- both('foo', bar='baz'), ((42, 'foo'), {'bar': 'baz', 'foo': 42}))
-
- def test_curry_original(self):
- self.assertIdentical(self.pre_curry(passthrough).func, passthrough)
-
- def test_module_magic(self):
- self.assertIdentical(
- currying.pretty_docs(self.pre_curry(passthrough)).__module__,
- passthrough.__module__)
- # test is kinda useless if they are identical without pretty_docs
- self.assertNotIdentical(
- getattr(self.pre_curry(passthrough), '__module__', None),
- passthrough.__module__)
-
- def test_pretty_docs(self):
- for func in (passthrough, documented):
- self.assertEqual(
- currying.pretty_docs(
- self.pre_curry(func), 'new doc').__doc__,
- 'new doc')
- self.assertIdentical(
- currying.pretty_docs(self.pre_curry(func)).__doc__,
- func.__doc__)
-
- def test_instancemethod(self):
- class Test(object):
- method = self.pre_curry(passthrough, 'test')
- test = Test()
- self.assertEqual((('test', test), {}), test.method())
-
-
-class NativePartialTest(PreCurryTest):
-
- pre_curry = staticmethod(currying.native_partial)
-
- def test_instancemethod(self):
- class Test(object):
- method = self.pre_curry(passthrough, 'test')
- test = Test()
- self.assertEqual((('test',), {}), test.method())
-
-
-class CPyPartialTest(NativePartialTest):
-
- pre_curry = staticmethod(currying.partial)
-
- if currying.native_partial is currying.partial:
- skip = 'cpy partial not available.'
-
-
-class PostCurryTest(TestCase):
-
- def test_post_curry(self):
- noop = currying.post_curry(passthrough)
- self.assertEqual(noop(), ((), {}))
- self.assertEqual(noop('foo', 'bar'), (('foo', 'bar'), {}))
- self.assertEqual(noop(foo='bar'), ((), {'foo': 'bar'}))
- self.assertEqual(noop('foo', bar='baz'), (('foo',), {'bar': 'baz'}))
-
- one_arg = currying.post_curry(passthrough, 42)
- self.assertEqual(one_arg(), ((42,), {}))
- self.assertEqual(one_arg('foo', 'bar'), (('foo', 'bar', 42), {}))
- self.assertEqual(one_arg(foo='bar'), ((42,), {'foo': 'bar'}))
- self.assertEqual(
- one_arg('foo', bar='baz'), (('foo', 42), {'bar': 'baz'}))
-
- keyword_arg = currying.post_curry(passthrough, foo=42)
- self.assertEqual(keyword_arg(), ((), {'foo': 42}))
- self.assertEqual(
- keyword_arg('foo', 'bar'), (('foo', 'bar'), {'foo': 42}))
- self.assertEqual(
- keyword_arg(foo='bar'), ((), {'foo': 42}))
- self.assertEqual(
- keyword_arg('foo', bar='baz'),
- (('foo',), {'bar': 'baz', 'foo': 42}))
-
- both = currying.post_curry(passthrough, 42, foo=42)
- self.assertEqual(both(), ((42,), {'foo': 42}))
- self.assertEqual(
- both('foo', 'bar'), (('foo', 'bar', 42), {'foo': 42}))
- self.assertEqual(both(foo='bar'), ((42,), {'foo': 42}))
- self.assertEqual(
- both('foo', bar='baz'), (('foo', 42), {'bar': 'baz', 'foo': 42}))
-
- def test_curry_original(self):
- self.assertIdentical(
- currying.post_curry(passthrough).func, passthrough)
-
- def test_instancemethod(self):
- class Test(object):
- method = currying.post_curry(passthrough, 'test')
- test = Test()
- self.assertEqual(((test, 'test'), {}), test.method())
-
-class TestAliasClassAttr(TestCase):
- def test_alias_class_method(self):
- class kls(object):
- __len__ = lambda s: 3
- lfunc = currying.alias_class_method("__len__")
-
- c = kls()
- self.assertEqual(c.__len__(), c.lfunc())
- c.__len__ = lambda : 4
- self.assertEqual(c.__len__(), c.lfunc())
-
diff --git a/pkgcore/test/util/test_dependant_methods.py b/pkgcore/test/util/test_dependant_methods.py
deleted file mode 100644
index c8225f54a..000000000
--- a/pkgcore/test/util/test_dependant_methods.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util import dependant_methods as dm
-from pkgcore.util import currying
-
-
-def func(self, seq, data, val=True):
- seq.append(data)
- return val
-
-
-class TestDependantMethods(TestCase):
-
- @staticmethod
- def generate_instance(methods, dependencies):
- class Class(object):
- __metaclass__ = dm.ForcedDepends
- stage_depends = dict(dependencies)
-
- for k, v in methods.iteritems():
- setattr(Class, k, v)
-
- return Class()
-
- def test_no_dependant_methods(self):
- self.failUnless(self.generate_instance({}, {}))
-
- def test_return_checking(self):
- results = []
- o = self.generate_instance(
- dict((str(x), currying.post_curry(func, results, x))
- for x in range(10)),
- dict((str(x), str(x - 1)) for x in xrange(1, 10)))
- getattr(o, "9")()
- self.assertEqual(results, range(10))
- results = []
- o = self.generate_instance(
- dict((str(x), currying.post_curry(func, results, x, False))
- for x in range(10)),
- dict((str(x), str(x - 1)) for x in xrange(1, 10)))
- getattr(o, "9")()
- self.assertEqual(results, [0])
- getattr(o, "9")()
- self.assertEqual(results, [0, 0])
-
- def test_stage_awareness(self):
- results = []
- o = self.generate_instance(
- dict((str(x), currying.post_curry(func, results, x))
- for x in range(10)),
- dict((str(x), str(x - 1)) for x in xrange(1, 10)))
- getattr(o, "1")()
- self.assertEqual(results, [0, 1])
- getattr(o, "2")()
- self.assertEqual(results, [0, 1, 2])
- getattr(o, "2")()
- self.assertEqual(results, [0, 1, 2])
-
- def test_stage_depends(self):
- results = []
- methods = dict((str(x), currying.post_curry(func, results, x))
- for x in range(10))
- deps = dict((str(x), str(x - 1)) for x in xrange(1, 10))
- deps["1"] = ["0", "a"]
- methods["a"] = currying.post_curry(func, results, "a")
- o = self.generate_instance(methods, deps)
- getattr(o, "1")()
- self.assertEqual(results, [0, "a", 1])
- getattr(o, "2")()
- self.assertEqual(results, [0, "a", 1, 2])
diff --git a/pkgcore/test/util/test_descriptors.py b/pkgcore/test/util/test_descriptors.py
deleted file mode 100644
index d0da63ea4..000000000
--- a/pkgcore/test/util/test_descriptors.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-from pkgcore.test import TestCase
-
-from pkgcore.util import descriptors
-
-
-class ClassProp(object):
-
- @descriptors.classproperty
- def test(cls):
- """Just an example."""
- return 'good', cls
-
-
-class DescriptorTest(TestCase):
-
- def test_classproperty(self):
- self.assertEqual(('good', ClassProp), ClassProp.test)
- self.assertEqual(('good', ClassProp), ClassProp().test)
diff --git a/pkgcore/test/util/test_file.py b/pkgcore/test/util/test_file.py
deleted file mode 100644
index 6e6bad7c0..000000000
--- a/pkgcore/test/util/test_file.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# Copyright: 2005 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-import tempfile, os
-from StringIO import StringIO
-
-from pkgcore.test import TestCase
-
-# ick, a module shadowing a builtin. import its contents instead.
-from pkgcore.util.file import (
- iter_read_bash, read_bash, read_dict, AtomicWriteFile, read_bash_dict,
- ParseError)
-from pkgcore.test.mixins import TempDirMixin
-
-
-class TestBashCommentStripping(TestCase):
-
- def test_iter_read_bash(self):
- self.assertEqual(
- list(iter_read_bash(StringIO(
- '\n'
- '# hi I am a comment\n'
- 'I am not\n'))),
- ['I am not'])
-
- def test_read_bash(self):
- self.assertEqual(
- read_bash(StringIO(
- '\n'
- '# hi I am a comment\n'
- 'I am not\n')),
- ['I am not'])
-
-
-class TestReadBashConfig(TestCase):
-
- def test_read_dict(self):
- self.assertEqual(
- read_dict(StringIO(
- '\n'
- '# hi I am a comment\n'
- 'foo1=bar\n'
- 'foo2="bar"\n'
- 'foo3=\'bar"\n'
- )),
- {'foo1': 'bar',
- 'foo2': 'bar',
- 'foo3': '\'bar"',
- })
- self.assertEqual(
- read_dict(['foo=bar'], source_isiter=True), {'foo': 'bar'})
- self.assertRaises(
- ParseError, read_dict, ['invalid'], source_isiter=True)
-
-
-class ReadBashDictTest(TestCase):
-
- def setUp(self):
- self.valid_file = tempfile.NamedTemporaryFile()
- self.valid_file.write(
- '# hi I am a comment\n'
- 'foo1=bar\n'
- "foo2='bar'\n"
- 'foo3="bar"\n'
- 'foo4=-/:j4\n'
- 'foo5=\n')
- self.valid_file.flush()
- self.sourcing_file = tempfile.NamedTemporaryFile()
- self.sourcing_file.write('source "%s"\n' % self.valid_file.name)
- self.sourcing_file.flush()
- self.advanced_file = tempfile.NamedTemporaryFile()
- self.advanced_file.write(
- 'one1=1\n'
- 'one_=$one1\n'
- 'two1=2\n'
- 'two_=${two1}\n'
- )
- self.advanced_file.flush()
- self.env_file = tempfile.NamedTemporaryFile()
- self.env_file.write(
- 'imported=${external}\n'
- )
- self.env_file.flush()
- self.escaped_file = tempfile.NamedTemporaryFile()
- self.escaped_file.write(
- 'end=bye\n'
- 'quoteddollar="\${dollar}"\n'
- 'quotedexpansion="\${${end}}"\n'
- )
- self.escaped_file.flush()
- self.unclosed_file = tempfile.NamedTemporaryFile()
- self.unclosed_file.write('foo="bar')
- self.unclosed_file.flush()
-
- def tearDown(self):
- del self.valid_file
- del self.sourcing_file
- del self.advanced_file
- del self.env_file
- del self.escaped_file
- del self.unclosed_file
-
- def test_read_bash_dict(self):
- # TODO this is not even close to complete
- self.assertEqual(
- read_bash_dict(self.valid_file.name),
- {'foo1': 'bar', 'foo2': 'bar', 'foo3': 'bar', 'foo4': '-/:j4',
- 'foo5': ''})
- s = "a=b\ny='"
- self.assertRaises(ParseError, read_bash_dict, StringIO(s))
-
- def test_var_read(self):
- self.assertEqual(read_bash_dict(StringIO("x=y@a\n")),
- {'x':'y@a'})
- self.assertEqual(read_bash_dict(StringIO("x=y~a\n")),
- {'x':'y~a'})
- self.assertEqual(read_bash_dict(StringIO("x=y^a\n")),
- {'x':'y^a'})
-
- def test_empty_assign(self):
- open(self.valid_file.name, 'w').write("foo=\ndar=blah\n")
- self.assertEqual(read_bash_dict(self.valid_file.name),
- {'foo':'', 'dar':'blah'})
- open(self.valid_file.name, 'w').write("foo=\ndar=\n")
- self.assertEqual(read_bash_dict(self.valid_file.name),
- {'foo':'', 'dar':''})
- open(self.valid_file.name, 'w').write("foo=blah\ndar=\n")
- self.assertEqual(read_bash_dict(self.valid_file.name),
- {'foo':'blah', 'dar':''})
-
- def test_quoting(self):
- self.assertEqual(read_bash_dict(StringIO("x='y \\\na'")),
- {'x':'y \\\na'})
- self.assertEqual(read_bash_dict(StringIO('x="y \\\nasdf"')),
- {'x':'y asdf'})
-
- def test_sourcing(self):
- # TODO this is not even close to complete
- self.assertEqual(
- read_bash_dict(self.sourcing_file.name, sourcing_command='source'),
- {'foo1': 'bar', 'foo2': 'bar', 'foo3': 'bar', 'foo4': '-/:j4',
- 'foo5':''})
-
- def test_read_advanced(self):
- self.assertEqual(
- read_bash_dict(self.advanced_file.name),
- {'one1': '1',
- 'one_': '1',
- 'two1': '2',
- 'two_': '2',
- })
-
- def test_env(self):
- self.assertEqual(
- read_bash_dict(self.env_file.name),
- {'imported': ''})
- env = {'external': 'imported foo'}
- env_backup = env.copy()
- self.assertEqual(
- read_bash_dict(self.env_file.name, env),
- {'imported': 'imported foo'})
- self.assertEqual(env_backup, env)
-
- def test_escaping(self):
- self.assertEqual(
- read_bash_dict(self.escaped_file.name), {
- 'end': 'bye',
- 'quoteddollar': '${dollar}',
- 'quotedexpansion': '${bye}',
- })
-
- def test_unclosed(self):
- self.assertRaises(ParseError, read_bash_dict, self.unclosed_file.name)
-
-
-class TestAtomicWriteFile(TempDirMixin, TestCase):
-
- def test_normal_ops(self):
- fp = os.path.join(self.dir, "target")
- open(fp, "w").write("me")
- af = AtomicWriteFile(fp)
- af.write("dar")
- self.assertEqual(open(fp, "r").read(), "me")
- af.close()
- self.assertEqual(open(fp, "r").read(), "dar")
-
- def test_del(self):
- fp = os.path.join(self.dir, "target")
- open(fp, "w").write("me")
- self.assertEqual(open(fp, "r").read(), "me")
- af = AtomicWriteFile(fp)
- af.write("dar")
- del af
- self.assertEqual(open(fp, "r").read(), "me")
- self.assertEqual(len(os.listdir(self.dir)), 1)
diff --git a/pkgcore/test/util/test_formatters.py b/pkgcore/test/util/test_formatters.py
deleted file mode 100644
index 71e43b454..000000000
--- a/pkgcore/test/util/test_formatters.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-import StringIO
-import tempfile
-
-from pkgcore.test import TestCase
-
-from pkgcore.util import formatters
-
-
-class PlainTextFormatterTest(TestCase):
-
- def test_basics(self):
- # As many sporks as fit in 20 chars.
- sporks = ' '.join(3 * ('spork',))
- for inputs, output in [
- ((u'\N{SNOWMAN}',), '?'),
- ((7 * 'spork ',), '%s\n%s\n%s' % (sporks, sporks, 'spork ')),
- (7 * ('spork ',), '%s \n%s \n%s' % (sporks, sporks, 'spork ')),
- ((30 * 'a'), 20 * 'a' + '\n' + 10 * 'a'),
- (30 * ('a',), 20 * 'a' + '\n' + 10 * 'a'),
- ]:
- stream = StringIO.StringIO()
- formatter = formatters.PlainTextFormatter(stream, encoding='ascii')
- formatter.width = 20
- formatter.write(autoline=False, wrap=True, *inputs)
- self.assertEqual(output, stream.getvalue())
-
- def test_first_prefix(self):
- # As many sporks as fit in 20 chars.
- for inputs, output in [
- ((u'\N{SNOWMAN}',), 'foon:?'),
- ((7 * 'spork ',),
- 'foon:spork spork\n'
- 'spork spork spork\n'
- 'spork spork '),
- (7 * ('spork ',),
- 'foon:spork spork \n'
- 'spork spork spork \n'
- 'spork spork '),
- ((30 * 'a'), 'foon:' + 15 * 'a' + '\n' + 15 * 'a'),
- (30 * ('a',), 'foon:' + 15 * 'a' + '\n' + 15 * 'a'),
- ]:
- stream = StringIO.StringIO()
- formatter = formatters.PlainTextFormatter(stream, encoding='ascii')
- formatter.width = 20
- formatter.write(autoline=False, wrap=True, first_prefix='foon:',
- *inputs)
- self.assertEqual(output, stream.getvalue())
-
- def test_later_prefix(self):
- for inputs, output in [
- ((u'\N{SNOWMAN}',), '?'),
- ((7 * 'spork ',),
- 'spork spork spork\n'
- 'foon:spork spork\n'
- 'foon:spork spork '),
- (7 * ('spork ',),
- 'spork spork spork \n'
- 'foon:spork spork \n'
- 'foon:spork spork '),
- ((30 * 'a'), 20 * 'a' + '\n' + 'foon:' + 10 * 'a'),
- (30 * ('a',), 20 * 'a' + '\n' + 'foon:' + 10 * 'a'),
- ]:
- stream = StringIO.StringIO()
- formatter = formatters.PlainTextFormatter(stream, encoding='ascii')
- formatter.width = 20
- formatter.later_prefix = ['foon:']
- formatter.write(wrap=True, autoline=False, *inputs)
- self.assertEqual(output, stream.getvalue())
-
- def test_wrap_autoline(self):
- for inputs, output in [
- ((3 * ('spork',)), 'spork\nspork\nspork\n'),
- (3 * (('spork',),), 'spork\nspork\nspork\n'),
- (((3 * 'spork',),),
- '\n'
- 'foonsporks\n'
- 'foonporksp\n'
- 'foonork\n'),
- ((('fo',), (2 * 'spork',),), 'fo\nsporkspork\n'),
- ((('fo',), (3 * 'spork',),),
- 'fo\n'
- '\n'
- 'foonsporks\n'
- 'foonporksp\n'
- 'foonork\n'),
- ]:
- stream = StringIO.StringIO()
- formatter = formatters.PlainTextFormatter(stream, encoding='ascii')
- formatter.width = 10
- for input in inputs:
- formatter.write(wrap=True, later_prefix='foon', *input)
- self.assertEqual(output, stream.getvalue())
-
-
-class TerminfoFormatterTest(TestCase):
-
- def _test_stream(self, stream, formatter, *data):
- for inputs, outputs in data:
- stream.seek(0)
- stream.truncate()
- formatter.write(*inputs)
- stream.seek(0)
- self.assertEqual(''.join(outputs), stream.read())
-
- def test_terminfo(self):
- esc = '\x1b['
- stream = tempfile.TemporaryFile()
- f = formatters.TerminfoFormatter(stream, 'ansi', True, 'ascii')
- f.autoline = False
- self._test_stream(
- stream, f,
- ((f.bold, 'bold'), (esc, '1m', 'bold', esc, '0;10m')),
- ((f.underline, 'underline'),
- (esc, '4m', 'underline', esc, '0;10m')),
- ((f.fg('red'), 'red'), (esc, '31m', 'red', esc, '39;49m')),
- ((f.fg('red'), 'red', f.bold, 'boldred', f.fg(), 'bold',
- f.reset, 'done'),
- (esc, '31m', 'red', esc, '1m', 'boldred', esc, '39;49m', 'bold',
- esc, '0;10m', 'done')),
- ((42,), ('42',)),
- ((u'\N{SNOWMAN}',), ('?',))
- )
- f.autoline = True
- self._test_stream(
- stream, f, (('lala',), ('lala', '\n')))
diff --git a/pkgcore/test/util/test_iterables.py b/pkgcore/test/util/test_iterables.py
deleted file mode 100644
index 459d30f2d..000000000
--- a/pkgcore/test/util/test_iterables.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util.iterables import expandable_chain, caching_iter, iter_sort
-
-
-class ExpandableChainTest(TestCase):
-
- def test_normal_function(self):
- i = [iter(xrange(100)) for x in xrange(3)]
- e = expandable_chain()
- e.extend(i)
- self.assertEqual(list(e), range(100)*3)
- for x in i + [e]:
- self.assertRaises(StopIteration, x.next)
-
- def test_extend(self):
- e = expandable_chain()
- e.extend(xrange(100) for i in (1, 2))
- self.assertEqual(list(e), range(100)*2)
- self.assertRaises(StopIteration, e.extend, [[]])
-
- def test_extendleft(self):
- e = expandable_chain(xrange(20, 30))
- e.extendleft([xrange(10, 20), xrange(10)])
- self.assertEqual(list(e), range(30))
- self.assertRaises(StopIteration, e.extendleft, [[]])
-
- def test_append(self):
- e = expandable_chain()
- e.append(xrange(100))
- self.assertEqual(list(e), range(100))
- self.assertRaises(StopIteration, e.append, [])
-
- def test_appendleft(self):
- e = expandable_chain(xrange(10, 20))
- e.appendleft(xrange(10))
- self.assertEqual(list(e), range(20))
- self.assertRaises(StopIteration, e.append, [])
-
-
-class CachingIterTest(TestCase):
-
- def test_iter_consumption(self):
- i = iter(xrange(100))
- c = caching_iter(i)
- i2 = iter(c)
- for _ in xrange(20):
- i2.next()
- self.assertEqual(i.next(), 20)
- # note we consumed one ourselves
- self.assertEqual(c[20], 21)
- list(c)
- self.assertRaises(StopIteration, i.next)
- self.assertEqual(list(c), range(20) + range(21, 100))
-
- def test_init(self):
- self.assertEqual(caching_iter(list(xrange(100)))[0], 0)
-
- def test_full_consumption(self):
- i = iter(xrange(100))
- c = caching_iter(i)
- self.assertEqual(list(c), range(100))
- # do it twice, to verify it returns properly
- self.assertEqual(list(c), range(100))
-
- def test_len(self):
- self.assertEqual(100, len(caching_iter(xrange(100))))
-
- def test_hash(self):
- self.assertEqual(hash(caching_iter(xrange(100))),
- hash(tuple(range(100))))
-
- def test_nonzero(self):
- self.assertEqual(bool(caching_iter(xrange(100))), True)
- self.assertEqual(bool(caching_iter(iter([]))), False)
-
- def test_cmp(self):
- self.assertEqual(caching_iter(xrange(100)), tuple(xrange(100)))
- self.assertNotEqual(caching_iter(xrange(90)), tuple(xrange(100)))
- self.assertTrue(caching_iter(xrange(100)) > tuple(xrange(90)))
- self.assertFalse(caching_iter(xrange(90)) > tuple(xrange(100)))
- self.assertTrue(caching_iter(xrange(100)) >= tuple(xrange(100)))
-
- def test_sorter(self):
- self.assertEqual(
- caching_iter(xrange(100, 0, -1), sorted), tuple(xrange(1, 101)))
- c = caching_iter(xrange(100, 0, -1), sorted)
- self.assertTrue(c)
- self.assertEqual(c, tuple(xrange(1, 101)))
-
-
-class iter_sortTest(TestCase):
- def test_ordering(self):
- f = lambda l: sorted(l, key=lambda x:x[0])
- self.assertEqual(
- list(iter_sort(
- f, *[iter(xrange(x, x+10)) for x in (30, 20, 0, 10)])),
- list(xrange(40)))
diff --git a/pkgcore/test/util/test_klass.py b/pkgcore/test/util/test_klass.py
deleted file mode 100644
index 8d26decda..000000000
--- a/pkgcore/test/util/test_klass.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util import klass, currying
-
-
-class Test_native_GetAttrProxy(TestCase):
- kls = staticmethod(klass.native_GetAttrProxy)
-
- def test_it(self):
- class foo1(object):
- def __init__(self, obj):
- self.obj = obj
- __getattr__ = self.kls('obj')
-
- class foo2(object):
- pass
-
- o2 = foo2()
- o = foo1(o2)
- self.assertRaises(AttributeError, getattr, o, "blah")
- self.assertEqual(o.obj, o2)
- o2.foon = "dar"
- self.assertEqual(o.foon, "dar")
- o.foon = "foo"
- self.assertEqual(o.foon, 'foo')
-
- def test_attrlist(self):
- def make_class(attr_list=None):
- class foo(object):
- __metaclass__ = self.kls
-
- if attr_list is not None:
- locals()['__attr_comparison__'] = attr_list
-
- self.assertRaises(TypeError, make_class)
- self.assertRaises(TypeError, make_class, [u'foon'])
- self.assertRaises(TypeError, make_class, [None])
-
-
-class Test_CPY_GetAttrProxy(Test_native_GetAttrProxy):
-
- kls = staticmethod(klass.GetAttrProxy)
- if klass.GetAttrProxy is klass.native_GetAttrProxy:
- skip = "cpython extension isn't available"
-
- def test_sane_recursion_bail(self):
- # people are stupid; if protection isn't in place, we wind up blowing
- # the c stack, which doesn't result in a friendly Exception being
- # thrown.
- # results in a segfault.. so if it's horked, this will bail the test
- # runner.
-
- class c(object):
- __getattr__ = self.kls("obj")
-
- o = c()
- o.obj = o
- # now it's cyclical.
- self.assertRaises(RuntimeError, getattr, o, "hooey")
-
-
-class Test_native_contains(TestCase):
- func = staticmethod(klass.native_contains)
-
- def test_it(self):
- class c(dict):
- __contains__ = self.func
- d = c({"1":2})
- self.assertIn("1", d)
- self.assertNotIn(1, d)
-
-
-class Test_CPY_contains(Test_native_contains):
- func = staticmethod(klass.contains)
-
- if klass.contains is klass.native_contains:
- skip = "cpython extension isn't available"
-
-
-class Test_native_get(TestCase):
- func = staticmethod(klass.native_get)
-
- def test_it(self):
- class c(dict):
- get = self.func
- d = c({"1":2})
- self.assertEqual(d.get("1"), 2)
- self.assertEqual(d.get("1", 3), 2)
- self.assertEqual(d.get(1), None)
- self.assertEqual(d.get(1, 3), 3)
-
-class Test_CPY_get(Test_native_get):
- func = staticmethod(klass.get)
-
- if klass.get is klass.native_get:
- skip = "cpython extension isn't available"
-
-class Test_native_generic_equality(TestCase):
- op_prefix = "native_"
-
- kls = currying.partial(klass.generic_equality,
- ne=klass.native_generic_ne, eq=klass.native_generic_eq)
-
- def test_it(self):
- class c(object):
- __attr_comparison__ = ("foo", "bar")
- __metaclass__ = self.kls
- def __init__(self, foo, bar):
- self.foo, self.bar = foo, bar
-
- def __repr__(self):
- return "<c: foo=%r, bar=%r, %i>" % (
- getattr(self, 'foo', 'unset'),
- getattr(self, 'bar', 'unset'),
- id(self))
-
- self.assertEqual(c(1, 2), c(1, 2))
- c1 = c(1, 3)
- self.assertEqual(c1, c1)
- del c1
- self.assertNotEqual(c(2,1), c(1,2))
- c1 = c(1, 2)
- del c1.foo
- c2 = c(1, 2)
- self.assertNotEqual(c1, c2)
- del c2.foo
- self.assertEqual(c1, c2)
-
- def test_call(self):
- def mk_class(meta):
- class c(object):
- __metaclass__ = meta
- return c
- self.assertRaises(TypeError, mk_class)
-
-
-class Test_cpy_generic_equality(Test_native_generic_equality):
- op_prefix = ''
- if klass.native_generic_eq is klass.generic_eq:
- skip = "extension not available"
-
- kls = staticmethod(klass.generic_equality)
-
-
-class Test_chained_getter(TestCase):
-
- kls = klass.chained_getter
-
- def test_hash(self):
- self.assertEqual(hash(self.kls("foon")), hash("foon"))
- self.assertEqual(hash(self.kls("foon.dar")), hash("foon.dar"))
-
- def test_caching(self):
- l = []
- for x in "abcdefghij":
- l.append(id(self.kls("fa2341f%s" % x)))
- self.assertEqual(id(self.kls("fa2341fa")), l[0])
-
- def test_eq(self):
- self.assertEqual(self.kls("asdf", disable_inst_caching=True),
- self.kls("asdf", disable_inst_caching=True))
-
- self.assertNotEqual(self.kls("asdf2", disable_inst_caching=True),
- self.kls("asdf", disable_inst_caching=True))
-
- def test_it(self):
- class maze(object):
- def __init__(self, kwargs):
- self.__data__ = kwargs
-
- def __getattr__(self, attr):
- return self.__data__.get(attr, self)
-
- d = {}
- m = maze(d)
- f = self.kls
- self.assertEqual(f('foon')(m), m)
- d["foon"] = 1
- self.assertEqual(f('foon')(m), 1)
- self.assertEqual(f('dar.foon')(m), 1)
- self.assertEqual(f('.'.join(['blah']*10))(m), m)
- self.assertRaises(AttributeError, f('foon.dar'), m)
diff --git a/pkgcore/test/util/test_lists.py b/pkgcore/test/util/test_lists.py
deleted file mode 100644
index 19a9915a2..000000000
--- a/pkgcore/test/util/test_lists.py
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright: 2005 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-from pkgcore.test import TestCase
-from pkgcore.util import lists
-from pkgcore.util.mappings import OrderedDict
-
-class UnhashableComplex(complex):
-
- def __hash__(self):
- raise TypeError
-
-
-class UniqueTest(TestCase):
-
- def common_check(self, func):
- # silly
- self.assertEqual(func(()), [])
- # hashable
- self.assertEqual(sorted(func([1, 1, 2, 3, 2])), [1, 2, 3])
- # neither
-
- def test_stable_unique(self):
- self.common_check(lists.stable_unique)
-
- def test_unstable_unique(self):
- self.common_check(lists.unstable_unique)
- uc = UnhashableComplex
- res = lists.unstable_unique([uc(1, 0), uc(0, 1), uc(1, 0)])
- # sortable
- self.assertEqual(sorted(lists.unstable_unique(
- [[1, 2], [1, 3], [1, 2], [1, 3]])), [[1, 2], [1, 3]])
- self.failUnless(
- res == [uc(1, 0), uc(0, 1)] or res == [uc(0, 1), uc(1, 0)], res)
-
-
-class ChainedListsTest(TestCase):
-
- @staticmethod
- def gen_cl():
- return lists.ChainedLists(range(3), range(3, 6), range(6, 100))
-
- def test_contains(self):
- cl = self.gen_cl()
- for x in (1, 2, 4, 99):
- self.assertTrue(x in cl)
-
- def test_iter(self):
- self.assertEqual(list(self.gen_cl()), list(xrange(100)))
-
- def test_len(self):
- self.assertEqual(100, len(self.gen_cl()))
-
- def test_getitem(self):
- cl = self.gen_cl()
- for x in (1, 2, 4, 98, -1, -99, 0):
- # "Statement seems to have no effect"
- # pylint: disable-msg=W0104
- cl[x]
- self.assertRaises(IndexError, cl.__getitem__, 100)
- self.assertRaises(IndexError, cl.__getitem__, -101)
-
- def test_mutable(self):
- self.assertRaises(TypeError, self.gen_cl().__delitem__, 1)
- self.assertRaises(TypeError, self.gen_cl().__setitem__, (1, 2))
-
- def test_append(self):
- cl = self.gen_cl()
- cl.append(range(10))
- self.assertEqual(110, len(cl))
-
- def test_extend(self):
- cl = self.gen_cl()
- cl.extend(range(10) for i in range(5))
- self.assertEqual(150, len(cl))
-
-
-class Test_iflatten_instance(TestCase):
- func = staticmethod(lists.native_iflatten_instance)
-
- def test_it(self):
- o = OrderedDict((k, None) for k in xrange(10))
- for l, correct, skip in [
- (["asdf", ["asdf", "asdf"], 1, None],
- ["asdf", "asdf", "asdf", 1, None], basestring),
- ([o, 1, "fds"], [o, 1, "fds"], (basestring, OrderedDict)),
- ([o, 1, "fds"], range(10) + [1, "fds"], basestring),
- ("fds", ["fds"], basestring),
- ]:
- iterator = self.func(l, skip)
- self.assertEqual(list(iterator), correct)
- self.assertEqual([], list(iterator))
- # There is a small difference between the cpython and native
- # version: the cpython one raises immediately, for native we
- # have to iterate.
- def fail():
- return list(self.func(None))
- self.assertRaises(TypeError, fail)
-
- # Yes, no sane code does this, but even insane code shouldn't
- # kill the cpython version.
- iters = []
- iterator = self.func(iters)
- iters.append(iterator)
- self.assertRaises(ValueError, iterator.next)
-
- # Regression test: this was triggered through demandload.
- self.failUnless(self.func((), **{}))
-
-
-class Test_iflatten_func(TestCase):
- func = staticmethod(lists.native_iflatten_func)
-
- def test_it(self):
- o = OrderedDict((k, None) for k in xrange(10))
- for l, correct, skip in [
- (["asdf", ["asdf", "asdf"], 1, None],
- ["asdf", "asdf", "asdf", 1, None], basestring),
- ([o, 1, "fds"], [o, 1, "fds"], (basestring, OrderedDict)),
- ([o, 1, "fds"], range(10) + [1, "fds"], basestring),
- ("fds", ["fds"], basestring),
- ]:
- iterator = self.func(l, lambda x:isinstance(x, skip))
- self.assertEqual(list(iterator), correct)
- self.assertEqual(list(iterator), [])
- # There is a small difference between the cpython and native
- # version: the cpython one raises immediately, for native we
- # have to iterate.
- def fail():
- return list(self.func(None, lambda x: False))
- self.assertRaises(TypeError, fail)
-
- # Yes, no sane code does this, but even insane code shouldn't
- # kill the cpython version.
- iters = []
- iterator = self.func(iters, lambda x: False)
- iters.append(iterator)
- self.assertRaises(ValueError, iterator.next)
-
- # Regression test: this was triggered through demandload.
- self.failUnless(self.func((), lambda x: True, **{}))
-
-
-class CPY_Test_iflatten_instance(Test_iflatten_instance):
- func = staticmethod(lists.iflatten_instance)
- if not lists.cpy_builtin:
- skip = "cpython extension isn't available"
-
-class CPY_Test_iflatten_func(Test_iflatten_func):
- func = staticmethod(lists.iflatten_func)
- if not lists.cpy_builtin:
- skip = "cpython extension isn't available"
diff --git a/pkgcore/test/util/test_mappings.py b/pkgcore/test/util/test_mappings.py
deleted file mode 100644
index d56f97a70..000000000
--- a/pkgcore/test/util/test_mappings.py
+++ /dev/null
@@ -1,484 +0,0 @@
-# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
-# Copyright: 2005-2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-import operator
-
-from pkgcore.test import TestCase
-from pkgcore.util import mappings, currying
-from itertools import chain
-
-
-def a_dozen():
- return range(12)
-
-
-class RememberingNegateMixin(object):
-
- def setUp(self):
- self.negate_calls = []
- def negate(i):
- self.negate_calls.append(i)
- return -i
- self.negate = negate
-
- def tearDown(self):
- del self.negate
- del self.negate_calls
-
-
-class TestDictMixin(TestCase):
-
- kls = mappings.DictMixin
- class ro_dict(kls):
- __getitem__ = lambda s, k:s.__dict__[k]
- iterkeys = lambda s:s.__dict__.iterkeys()
-
- class wr_dict(ro_dict):
- __setitem__ = lambda s, k, v: s.__dict__.__setitem__(k, v)
- __delitem__ = lambda s, k: s.__dict__.__delitem__(k)
-
- class test_dict(ro_dict):
- def __init__(self, initial=[]):
- self.__dict__.update(initial)
-
- def test_init(self):
- # shouldn't write any keys.
- self.kls()
- self.assertRaises(NotImplementedError, self.ro_dict, ((1,2), (3,4)))
- self.assertEqual(self.wr_dict(((1,2), (3,4))).__dict__,
- {1:2, 3:4})
- self.assertEqual(self.wr_dict({1:2, 3:4}.iteritems()).__dict__,
- {1:2, 3:4})
-
- def test_iter(self, method='__iter__', values=range(100)):
- d = self.test_dict({}.fromkeys(xrange(100)).iteritems())
- i = getattr(d, method)()
- if 'iter' not in method:
- self.assertInstance(i, (list, tuple))
- self.assertEqual(list(i), list(values))
-
- test_iterkeys = currying.post_curry(test_iter, method='iterkeys')
- test_iterkeys = currying.post_curry(test_iter, method='keys')
-
- test_itervalues = currying.post_curry(test_iter, method='itervalues',
- values=[None]*100)
-
- test_values = currying.post_curry(test_iter, method='values',
- values=[None]*100)
-
- test_iteritems = currying.post_curry(test_iter, method='iteritems',
- values={}.fromkeys(xrange(100)).items())
-
- test_items = currying.post_curry(test_iter, method='items',
- values={}.fromkeys(xrange(100)).items())
-
- def test_update(self):
- d = self.wr_dict({}.fromkeys(xrange(100)).iteritems())
- self.assertEqual(list(d.iteritems()), [(x, None) for x in xrange(100)])
- d.update((x, x) for x in xrange(100))
- self.assertEqual(list(d.iteritems()), [(x, x) for x in xrange(100)])
-
- def test_get(self):
- d = self.wr_dict([(1,2)])
- self.assertEqual(d.get(1), 2)
- self.assertEqual(d.get(1, None), 2)
- self.assertEqual(d.get(2), None)
- self.assertEqual(d.get(2, 3), 3)
-
- def test_contains(self):
- # ensure the default 'in' op is a key pull.
- l, state = [], False
- class tracker_dict(self.wr_dict):
- def __getitem__(self, key):
- l.append(key)
- if state:
- return True
- raise KeyError
-
- d = tracker_dict()
- self.assertNotIn(1, d)
- self.assertFalse(d.has_key(1))
- self.assertEqual(l, [1, 1])
- state = True
- l[:] = []
- self.assertIn(2, d)
- self.assertTrue(d.has_key(3))
- self.assertEqual(l, [2, 3])
-
- def test_cmp(self):
- self.assertEqual(self.test_dict(), self.test_dict())
- d1 = self.wr_dict({})
- d2 = self.test_dict({1:2}.iteritems())
- self.assertTrue(d1 < d2)
- self.assertNotEqual(d1, d2)
- d1[1] = 2
- self.assertEqual(d1, d2)
- d1[1] = 3
- self.assertNotEqual(d1, d2)
- del d1[1]
- d1[0] = 2
- self.assertNotEqual(d1, d2)
-
- def test_pop(self):
- class c(self.ro_dict): __externally_mutable__ = False
- self.assertRaises(AttributeError, c().pop, 1)
- d = self.wr_dict()
- self.assertRaises(KeyError, d.pop, 1)
- self.assertEqual(d.pop(1, 2), 2)
- d[1] = 2
- # ensure it gets pissy about too many args.
- self.assertRaises(TypeError, d.pop, 1, 2, 3)
- self.assertEqual(len(d), 1)
- self.assertEqual(d.pop(1), 2)
- self.assertEqual(len(d), 0)
-
- def test_popitem(self):
- # hate this method.
- d = self.wr_dict()
- self.assertRaises(KeyError, d.popitem)
- self.assertRaises(TypeError, d.popitem, 1)
- d.update(((0,1), (1,2), (2,3)))
- self.assertLen(d, 3)
- got = d.popitem()
- self.assertNotIn(got[0], d)
- self.assertEqual(got[1], got[0] + 1)
- self.assertLen(d, 2)
- self.assertEqual(d, dict((x, x + 1) for x in xrange(3) if x != got[0]))
-
- def test_setdefault(self):
- d = self.wr_dict()
- self.assertEqual(d.setdefault(1, 2), 2)
- self.assertEqual(d.setdefault(1, 3), 2)
-
- def test_clear(self):
- d = self.wr_dict({}.fromkeys(xrange(100)).iteritems())
- self.assertEqual(d, {}.fromkeys(xrange(100)))
- self.assertEqual(d.clear(), None)
- self.assertEqual(d, {})
- d[1] = 2
- self.assertEqual(d, {1:2})
-
- def test_len(self):
- self.assertLen(self.ro_dict(), 0)
- d = self.wr_dict({}.fromkeys(xrange(100)).iteritems())
- self.assertLen(d, 100)
- del d[99]
- self.assertLen(d, 99)
-
-
-class LazyValDictTestMixin(object):
-
- def test_invalid_operations(self):
- self.assertRaises(AttributeError, operator.setitem, self.dict, 7, 7)
- self.assertRaises(AttributeError, operator.delitem, self.dict, 7)
-
- def test_contains(self):
- self.failUnless(7 in self.dict)
- self.failIf(12 in self.dict)
-
- def test_keys(self):
- # Called twice because the first call will trigger a keyfunc call.
- self.failUnlessEqual(sorted(self.dict.keys()), list(xrange(12)))
- self.failUnlessEqual(sorted(self.dict.keys()), list(xrange(12)))
-
- def test_iterkeys(self):
- # Called twice because the first call will trigger a keyfunc call.
- self.failUnlessEqual(sorted(self.dict.iterkeys()), list(xrange(12)))
- self.failUnlessEqual(sorted(self.dict.iterkeys()), list(xrange(12)))
-
- def test_iteritems(self):
- i = iter(xrange(12))
- for idx, kv in enumerate(self.dict.iteritems()):
- self.assertEqual(kv, (idx, -idx))
-
- def test_len(self):
- # Called twice because the first call will trigger a keyfunc call.
- self.assertEqual(12, len(self.dict))
- self.assertEqual(12, len(self.dict))
-
- def test_getkey(self):
- self.assertEqual(self.dict[3], -3)
- # missing key
- def get():
- return self.dict[42]
- self.assertRaises(KeyError, get)
-
- def test_caching(self):
- # "Statement seems to have no effect"
- # pylint: disable-msg=W0104
- self.dict[11]
- self.dict[11]
- self.assertEqual(self.negate_calls, [11])
-
-
-class LazyValDictWithListTest(
- TestCase, LazyValDictTestMixin, RememberingNegateMixin):
-
- def setUp(self):
- RememberingNegateMixin.setUp(self)
- self.dict = mappings.LazyValDict(range(12), self.negate)
-
- def tearDown(self):
- RememberingNegateMixin.tearDown(self)
-
- def test_itervalues(self):
- self.assertEqual(sorted(self.dict.itervalues()), range(-11, 1))
-
- def test_len(self):
- self.assertEqual(len(self.dict), 12)
-
- def test_iter(self):
- self.assertEqual(list(self.dict), range(12))
-
- def test_contains(self):
- self.assertIn(1, self.dict)
-
- def test_has_key(self):
- self.assertEqual(True, self.dict.has_key(1))
-
-class LazyValDictWithFuncTest(
- TestCase, LazyValDictTestMixin, RememberingNegateMixin):
-
- def setUp(self):
- RememberingNegateMixin.setUp(self)
- self.dict = mappings.LazyValDict(a_dozen, self.negate)
-
- def tearDown(self):
- RememberingNegateMixin.tearDown(self)
-
-
-class LazyValDictTest(TestCase):
-
- def test_invalid_init_args(self):
- self.assertRaises(TypeError, mappings.LazyValDict, [1], 42)
- self.assertRaises(TypeError, mappings.LazyValDict, 42, a_dozen)
-
-
-# TODO check for valid values for dict.new, since that seems to be
-# part of the interface?
-class ProtectedDictTest(TestCase):
-
- def setUp(self):
- self.orig = {1: -1, 2: -2}
- self.dict = mappings.ProtectedDict(self.orig)
-
- def test_basic_operations(self):
- self.assertEqual(self.dict[1], -1)
- def get(i):
- return self.dict[i]
- self.assertRaises(KeyError, get, 3)
- self.assertEqual(sorted(self.dict.keys()), [1, 2])
- self.failIf(-1 in self.dict)
- self.failUnless(2 in self.dict)
- def remove(i):
- del self.dict[i]
- self.assertRaises(KeyError, remove, 50)
-
- def test_basic_mutating(self):
- # add something
- self.dict[7] = -7
- def check_after_adding():
- self.assertEqual(self.dict[7], -7)
- self.failUnless(7 in self.dict)
- self.assertEqual(sorted(self.dict.keys()), [1, 2, 7])
- check_after_adding()
- # remove it again
- del self.dict[7]
- self.failIf(7 in self.dict)
- def get(i):
- return self.dict[i]
- self.assertRaises(KeyError, get, 7)
- self.assertEqual(sorted(self.dict.keys()), [1, 2])
- # add it back
- self.dict[7] = -7
- check_after_adding()
- # remove something not previously added
- del self.dict[1]
- self.failIf(1 in self.dict)
- self.assertRaises(KeyError, get, 1)
- self.assertEqual(sorted(self.dict.keys()), [2, 7])
- # and add it back
- self.dict[1] = -1
- check_after_adding()
- # Change an existing value, then remove it:
- self.dict[1] = 33
- del self.dict[1]
- self.assertNotIn(1, self.dict)
-
-
-class ImmutableDictTest(TestCase):
-
- def setUp(self):
- self.dict = mappings.ImmutableDict(**{1: -1, 2: -2})
-
- def test_invalid_operations(self):
- initial_hash = hash(self.dict)
- self.assertRaises(TypeError, operator.delitem, self.dict, 1)
- self.assertRaises(TypeError, operator.delitem, self.dict, 7)
- self.assertRaises(TypeError, operator.setitem, self.dict, 1, -1)
- self.assertRaises(TypeError, operator.setitem, self.dict, 7, -7)
- self.assertRaises(TypeError, self.dict.clear)
- self.assertRaises(TypeError, self.dict.update, {6: -6})
- self.assertRaises(TypeError, self.dict.pop, 1)
- self.assertRaises(TypeError, self.dict.popitem)
- self.assertRaises(TypeError, self.dict.setdefault, 6, -6)
- self.assertEqual(initial_hash, hash(self.dict))
-
-
-class StackedDictTest(TestCase):
-
- orig_dict = dict.fromkeys(xrange(100))
- new_dict = dict.fromkeys(xrange(100, 200))
-
- def test_contains(self):
- std = mappings.StackedDict(self.orig_dict, self.new_dict)
- self.failUnless(1 in std)
- self.failUnless(std.has_key(1))
-
- def test_stacking(self):
- o = dict(self.orig_dict)
- std = mappings.StackedDict(o, self.new_dict)
- for x in chain(*map(iter, (self.orig_dict, self.new_dict))):
- self.failUnless(x in std)
-
- map(o.__delitem__, iter(self.orig_dict))
- for x in self.orig_dict:
- self.failIf(x in std)
- for x in self.new_dict:
- self.failUnless(x in std)
-
- def test_len(self):
- self.assertEqual(sum(map(len, (self.orig_dict, self.new_dict))),
- len(mappings.StackedDict(self.orig_dict, self.new_dict)))
-
- def test_setattr(self):
- self.assertRaises(TypeError, mappings.StackedDict().__setitem__, (1, 2))
-
- def test_delattr(self):
- self.assertRaises(TypeError, mappings.StackedDict().__delitem__, (1, 2))
-
- def test_clear(self):
- self.assertRaises(TypeError, mappings.StackedDict().clear)
-
- def test_iter(self):
- s = set()
- map(s.add, chain(iter(self.orig_dict), iter(self.new_dict)))
- for x in mappings.StackedDict(self.orig_dict, self.new_dict):
- self.failUnless(x in s)
- s.remove(x)
- self.assertEqual(len(s), 0)
-
- def test_keys(self):
- self.assertEqual(
- sorted(mappings.StackedDict(self.orig_dict, self.new_dict)),
- sorted(self.orig_dict.keys() + self.new_dict.keys()))
-
- def test_getitem(self):
- o = mappings.StackedDict({1:1}, {1:1, 2:2}, {1:3, 3:3})
- self.assertEqual(o[1], 1)
- self.assertEqual(o[2], 2)
- self.assertEqual(o[3], 3)
- self.assertRaises(KeyError, o.__getitem__, 4)
-
-
-class IndeterminantDictTest(TestCase):
-
- def test_disabled_methods(self):
- d = mappings.IndeterminantDict(lambda *a: None)
- for x in ("clear", ("update", {}), ("setdefault", 1),
- "__iter__", "__len__", "__hash__", ("__delitem__", 1),
- ("__setitem__", 2), ("popitem", 2), "iteritems", "iterkeys",
- "keys", "items", "itervalues", "values"):
- if isinstance(x, tuple):
- self.assertRaises(TypeError, getattr(d, x[0]), x[1])
- else:
- self.assertRaises(TypeError, getattr(d, x))
-
- def test_starter_dict(self):
- d = mappings.IndeterminantDict(
- lambda key: False, starter_dict={}.fromkeys(xrange(100), True))
- for x in xrange(100):
- self.assertEqual(d[x], True)
- for x in xrange(100, 110):
- self.assertEqual(d[x], False)
-
- def test_behaviour(self):
- val = []
- d = mappings.IndeterminantDict(
- lambda key: val.append(key), {}.fromkeys(xrange(10), True))
- self.assertEqual(d[0], True)
- self.assertEqual(d[11], None)
- self.assertEqual(val, [11])
- def func(*a):
- raise KeyError
- self.assertRaises(
- KeyError, mappings.IndeterminantDict(func).__getitem__, 1)
- self.assertEqual(mappings.IndeterminantDict(func).pop(100, 1), 1)
- self.assertEqual(mappings.IndeterminantDict(func).pop(100), None)
-
- d.pop(1)
- self.assertEqual(d[1], True)
-
- def test_get(self):
- def func(key):
- if key == 2:
- raise KeyError
- return True
- d = mappings.IndeterminantDict(func, {1:1})
- self.assertEqual(d.get(1, 1), 1)
- self.assertEqual(d.get(1, 2), 1)
- self.assertEqual(d.get(2), None)
- self.assertEqual(d.get(2, 2), 2)
- self.assertEqual(d.get(3), True)
-
-
-class TestOrderedDict(TestCase):
-
- @staticmethod
- def gen_dict():
- return mappings.OrderedDict(enumerate(xrange(100)))
-
- def test_items(self):
- self.assertEqual(list(self.gen_dict().iteritems()),
- list(enumerate(xrange(100))))
- self.assertEqual(self.gen_dict().items(),
- list(enumerate(xrange(100))))
-
- def test_values(self):
- self.assertEqual(list(self.gen_dict().itervalues()),
- list(xrange(100)))
- l = ["asdf", "fdsa", "Dawefa", "3419", "pas", "1"]
- l = [s+"12" for s in l] + l
- l = ["1231adsfasdfagqwer"+s for s in l] + l
- self.assertEqual(
- list(mappings.OrderedDict(
- (v, k) for k, v in enumerate(l)).itervalues()),
- list(xrange(len(l))))
-
- def test_keys(self):
- self.assertEqual(list(self.gen_dict().iterkeys()), list(xrange(100)))
- self.assertEqual(self.gen_dict().keys(), list(xrange(100)))
-
- def test_iter(self):
- self.assertEqual(list(self.gen_dict()), list(xrange(100)))
- l = ["asdf", "fdsa", "Dawefa", "3419", "pas", "1"]
- l = [s+"12" for s in l] + l
- l = ["1231adsfasdfagqwer"+s for s in l] + l
- self.assertEqual(list(mappings.OrderedDict((x, None) for x in l)), l)
-
- def test_del(self):
- d = self.gen_dict()
- del d[50]
- self.assertEqual(list(d), list(range(50) + range(51, 100)))
- self.assertRaises(KeyError, operator.delitem, d, 50)
- self.assertRaises(KeyError, operator.delitem, d, 'spork')
-
- def test_set(self):
- d = self.gen_dict()
- d.setdefault(120)
- d.setdefault(110)
- self.assertEqual(list(d), list(range(100)) + [120, 110])
-
- def test_clear(self):
- self.gen_dict().clear()
diff --git a/pkgcore/test/util/test_modules.py b/pkgcore/test/util/test_modules.py
deleted file mode 100644
index 94dbd5ad1..000000000
--- a/pkgcore/test/util/test_modules.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# Copyright: 2005 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-from pkgcore.test import TestCase
-
-import os
-import sys
-import shutil
-import tempfile
-
-from pkgcore.util import modules
-
-
-class ModulesTest(TestCase):
-
- def setUp(self):
- # set up some test modules for our use
- self.dir = tempfile.mkdtemp()
- packdir = os.path.join(self.dir, 'mod_testpack')
- os.mkdir(packdir)
- # create an empty file
- open(os.path.join(packdir, '__init__.py'), 'w').close()
- for directory in [self.dir, packdir]:
- for i in range(3):
- testmod = open(
- os.path.join(directory, 'mod_test%s.py' % i), 'w')
- testmod.write('def foo(): pass\n')
- testmod.close()
- horkedmod = open(os.path.join(directory, 'mod_horked.py'), 'w')
- horkedmod.write('1/0\n')
- horkedmod.close()
-
- # append them to path
- sys.path.insert(0, self.dir)
-
- def tearDown(self):
- # pop the test module dir from path
- sys.path.pop(0)
- # and kill it
- shutil.rmtree(self.dir)
- # make sure we don't keep the sys.modules entries around
- for i in range(3):
- sys.modules.pop('mod_test%s' % i, None)
- sys.modules.pop('mod_testpack.mod_test%s' % i, None)
- sys.modules.pop('mod_testpack', None)
- sys.modules.pop('mod_horked', None)
- sys.modules.pop('mod_testpack.mod_horked', None)
-
- def test_load_module(self):
- # import an already-imported module
- self.assertIdentical(
- modules.load_module('pkgcore.util.modules'), modules)
- # and a system one, just for kicks
- self.assertIdentical(modules.load_module('sys'), sys)
- # non-existing module from an existing package
- self.assertRaises(
- modules.FailedImport, modules.load_module, 'pkgcore.__not_there')
- # (hopefully :) non-existing top-level module/package
- self.assertRaises(
- modules.FailedImport, modules.load_module, '__not_there')
-
- # "Unable to import"
- # pylint: disable-msg=F0401
-
- # unimported toplevel module
- modtest1 = modules.load_module('mod_test1')
- import mod_test1
- self.assertIdentical(mod_test1, modtest1)
- # unimported in-package module
- packtest2 = modules.load_module('mod_testpack.mod_test2')
- from mod_testpack import mod_test2
- self.assertIdentical(mod_test2, packtest2)
-
- def test_load_attribute(self):
- # already imported
- self.assertIdentical(modules.load_attribute('sys.path'), sys.path)
- # unimported
- myfoo = modules.load_attribute('mod_testpack.mod_test2.foo')
-
- # "Unable to import"
- # pylint: disable-msg=F0401
-
- from mod_testpack.mod_test2 import foo
- self.assertIdentical(foo, myfoo)
- # nonexisting attribute
- self.assertRaises(
- modules.FailedImport,
- modules.load_attribute, 'pkgcore.froznicator')
- # nonexisting top-level
- self.assertRaises(
- modules.FailedImport, modules.load_attribute,
- 'spork_does_not_exist.foo')
- # not an attr
- self.assertRaises(
- modules.FailedImport, modules.load_attribute, 'sys')
- # not imported yet
- self.assertRaises(
- modules.FailedImport,
- modules.load_attribute, 'mod_testpack.mod_test3')
-
- def test_broken_module(self):
- self.assertRaises(
- modules.FailedImport,
- modules.load_module, 'mod_testpack.mod_horked')
- self.failIf('mod_testpack.mod_horked' in sys.modules)
diff --git a/pkgcore/test/util/test_obj.py b/pkgcore/test/util/test_obj.py
deleted file mode 100644
index b3be994a2..000000000
--- a/pkgcore/test/util/test_obj.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-
-import operator
-from random import random
-
-from pkgcore.test import TestCase
-from pkgcore.util import obj
-
-# sorry, but the name is good, just too long for these tests
-make_DI = obj.DelayedInstantiation
-make_DIkls = obj.DelayedInstantiation_kls
-
-class TestDelayedInstantiation(TestCase):
-
- def test_simple(self):
- t = (1, 2, 3)
- o = make_DI(tuple, lambda:t)
- objs = [o, t]
- self.assertEqual(*map(str, objs))
- self.assertEqual(*map(repr, objs))
- self.assertEqual(*map(hash, objs))
- self.assertEqual(*objs)
- self.assertTrue(cmp(t, o) == 0)
- self.assertFalse(t < o)
- self.assertTrue(t <= o)
- self.assertTrue(t == o)
- self.assertTrue(t >= o)
- self.assertFalse(t > o)
- self.assertFalse(t != o)
-
- # test pass through; __doc__ is useful anyways, and
- # always available on tuple due to it being a builtin
- self.assertIdentical(t.__doc__, o.__doc__)
-
- def test_nonspecial(self):
- class foo(object):
- pass
- f = make_DI(foo, lambda:None)
- # it lies about it's class. thus bypass it's web of lies...
- self.assertIdentical(object.__getattribute__(f, '__class__'),
- obj.BaseDelayedObject)
-
- def test_DelayedInstantiation_kls(self):
- t = (1, 2, 3)
- self.assertEqual(make_DIkls(tuple, [1,2,3]), t)
-
- def test_descriptor_awareness(self):
- o = set(obj.kls_descriptors.difference(dir(object)))
- o.difference_update(dir(1))
- o.difference_update(dir('s'))
- o.difference_update(dir(list))
- o.difference_update(dir({}))
-
- def test_BaseDelayedObject(self):
- # assert that all methods/descriptors of object
- # are covered via the base.
- o = set(dir(object)).difference("__%s__" % x for x in
- ["class", "getattribute", "new", "init"])
- self.assertFalse(o.difference(obj.base_kls_descriptors))
-
- def test__class__(self):
- l = []
- def f():
- l.append(False)
- return True
- o = make_DI(bool, f)
- # note, this *must* be isinstance, not assertInstance.
- # assertInstance triggers a repr on it, thus triggering expansion.
- # we're specifically testing that it doesn't instantiate just for
- # class.
- self.assertTrue(isinstance(o, bool))
- self.assertFalse(l)
-
-
-class SlottedDictTest(TestCase):
-
- kls = staticmethod(obj.make_SlottedDict_kls)
-
- def test_reuse(self):
- # intentionally randomizing this a bit.
- a_ord = ord('a')
- z_ord = ord('z')
- l = []
- for x in xrange(10):
- s = ''
- for c in xrange(10):
- s += chr(a_ord + int(random() * (z_ord - a_ord)))
- l.append(s)
- d = self.kls(l)
- self.assertEqual(tuple(sorted(l)), d.__slots__)
- # check sorting.
- d2 = self.kls(reversed(l))
- self.assertIdentical(d, d2)
-
- def test_dict_basics(self):
- d = self.kls(['spork'])()
- for op in (operator.getitem, operator.delitem):
- self.assertRaises(KeyError, op, d, 'spork')
- self.assertRaises(KeyError, op, d, 'foon')
-
- d = self.kls(['spork', 'foon'])((('spork', 1),))
- self.assertLen(d, 1)
- self.assertEqual(d.get('spork'), 1)
- self.assertIn('spork', d)
- del d['spork']
- self.assertEqual(d.get('spork'), None)
- self.assertEqual(d.get('spork', 3), 3)
-
- d['spork'] = 2
- self.assertLen(d, 1)
- self.assertEqual(d.get('spork'), 2)
- self.assertEqual(d.pop('spork'), 2)
- self.assertRaises(KeyError, d.pop, 'spork')
- # check pop complains about too many args.
- self.assertRaises(TypeError, d.pop, 'spork', 'foon', 'dar')
- self.assertEqual(d.pop('spork', 2), 2)
-
- self.assertLen(d, 0)
- self.assertRaises(KeyError, d.__getitem__, 'spork')
- self.assertLen(d, 0)
- self.assertNotIn('spork', d)
- d['foon'] = 2
- self.assertIn('foon', d)
- d['spork'] = 1
- self.assertIn('spork', d)
- self.assertLen(d, 2)
- self.assertEqual(sorted(d), ['foon', 'spork'])
- self.assertEqual(sorted(d.itervalues()), [1,2])
- self.assertEqual(sorted(d.iterkeys()), ['foon', 'spork'])
- self.assertEqual(sorted(d.keys()), sorted(d.iterkeys()),
- reflective=False)
- self.assertEqual(sorted(d.values()), sorted(d.itervalues()),
- reflective=False)
- d.clear()
- self.assertLen(d, 0)
-
diff --git a/pkgcore/test/util/test_osutils.py b/pkgcore/test/util/test_osutils.py
deleted file mode 100644
index aff61db49..000000000
--- a/pkgcore/test/util/test_osutils.py
+++ /dev/null
@@ -1,404 +0,0 @@
-# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-import os, grp, stat, fcntl
-from itertools import izip
-
-from pkgcore.test import TestCase, SkipTest
-from pkgcore.util import osutils
-from pkgcore.util.osutils import native_readdir
-from pkgcore.test.mixins import TempDirMixin
-
-pjoin = osutils.pjoin
-
-class Native_listdir_Test(TempDirMixin):
-
- module = native_readdir
-
- def setUp(self):
- TempDirMixin.setUp(self)
- self.subdir = pjoin(self.dir, 'dir')
- os.mkdir(self.subdir)
- f = open(pjoin(self.dir, 'file'), 'w')
- f.close()
- os.mkfifo(pjoin(self.dir, 'fifo'))
- self.sym = pjoin(self.dir, 'sym')
- os.symlink(self.subdir, self.sym)
- os.symlink(pjoin(self.dir, 'non-existant'),
- pjoin(self.dir, 'broke-sym'))
-
- def test_listdir(self):
- self.assertEqual(['broke-sym', 'dir', 'fifo', 'file', 'sym'],
- sorted(self.module.listdir(self.dir)))
- self.assertEqual([], self.module.listdir(self.subdir))
- self.assertEqual([], self.module.listdir(self.sym))
-
- def test_listdir_dirs(self):
- self.assertEqual(['dir', 'sym'],
- sorted(self.module.listdir_dirs(self.dir)))
- self.assertEqual(['dir', 'sym'],
- sorted(self.module.listdir_dirs(self.dir, True)))
- self.assertEqual([], self.module.listdir_dirs(self.subdir))
- self.assertEqual([], self.module.listdir_dirs(self.sym))
- self.assertEqual(['dir'],
- sorted(self.module.listdir_dirs(self.dir, False)))
-
- def test_listdir_files(self):
- self.assertEqual(['file'], self.module.listdir_files(self.dir))
- self.assertEqual(['file'],
- self.module.listdir_files(self.dir, True))
- self.assertEqual(['file'],
- sorted(self.module.listdir_files(self.dir, False)))
- self.assertEqual([], self.module.listdir_dirs(self.subdir))
- self.assertEqual([], self.module.listdir_dirs(self.sym))
- os.unlink(self.sym)
- os.symlink(pjoin(self.dir, 'file'), self.sym)
- self.assertEqual(['file', 'sym'],
- sorted(self.module.listdir_files(self.dir)))
- self.assertEqual(['file', 'sym'],
- sorted(self.module.listdir_files(self.dir, True)))
- self.assertEqual(['file'],
- sorted(self.module.listdir_files(self.dir, False)))
-
- def test_missing(self):
- for func in (
- self.module.listdir,
- self.module.listdir_dirs,
- self.module.listdir_files,
- ):
- self.assertRaises(OSError, func, pjoin(self.dir, 'spork'))
-
- def test_dangling_sym(self):
- os.symlink("foon", pjoin(self.dir, "monkeys"))
- self.assertEqual(["file"], self.module.listdir_files(self.dir))
-
-try:
- # No name "readdir" in module osutils
- # pylint: disable-msg=E0611
- from pkgcore.util.osutils import _readdir
-except ImportError:
- _readdir = None
-
-class cpy_listdir_Test(Native_listdir_Test):
- module = _readdir
- if _readdir is None:
- skip = "cpython extension isn't available"
-
-class EnsureDirsTest(TempDirMixin, TestCase):
-
- default_mode = 0755
-
- def check_dir(self, path, uid, gid, mode):
- self.failUnless(os.path.isdir(path))
- st = os.stat(path)
- self.failUnlessEqual(stat.S_IMODE(st.st_mode), mode,
- '0%o != 0%o' % (stat.S_IMODE(st.st_mode), mode))
- self.failUnlessEqual(st.st_uid, uid)
- self.failUnlessEqual(st.st_gid, gid)
-
-
- def test_ensure_dirs(self):
- # default settings
- path = pjoin(self.dir, 'foo', 'bar')
- self.failUnless(osutils.ensure_dirs(path))
- self.check_dir(path, os.geteuid(), os.getegid(), self.default_mode)
-
- def test_minimal_nonmodifying(self):
- path = pjoin(self.dir, 'foo', 'bar')
- self.failUnless(osutils.ensure_dirs(path, mode=0775))
- self.assertEqual(os.stat(path).st_mode & 07777, 0775)
- self.failUnless(osutils.ensure_dirs(path, mode=0755, minimal=True))
- self.check_dir(path, os.geteuid(), os.getegid(), 0775)
-
- def test_minimal_modifying(self):
- path = pjoin(self.dir, 'foo', 'bar')
- self.failUnless(osutils.ensure_dirs(path, mode=0750))
- self.failUnless(osutils.ensure_dirs(path, mode=0005, minimal=True))
- self.check_dir(path, os.geteuid(), os.getegid(), 0755)
-
- def test_create_unwritable_subdir(self):
- path = pjoin(self.dir, 'restricted', 'restricted')
- # create the subdirs without 020 first
- self.failUnless(osutils.ensure_dirs(os.path.dirname(path)))
- self.failUnless(osutils.ensure_dirs(path, mode=0020))
- self.check_dir(path, os.geteuid(), os.getegid(), 0020)
- # unrestrict it
- osutils.ensure_dirs(path, minimal=False)
- self.check_dir(path, os.geteuid(), os.getegid(), self.default_mode)
-
- def test_mode(self):
- path = pjoin(self.dir, 'mode', 'mode')
- self.failUnless(osutils.ensure_dirs(path, mode=0700))
- self.check_dir(path, os.geteuid(), os.getegid(), 0700)
- # unrestrict it
- osutils.ensure_dirs(path)
- self.check_dir(path, os.geteuid(), os.getegid(), self.default_mode)
-
- def test_gid(self):
- # abuse the portage group as secondary group
- portage_gid = grp.getgrnam('portage').gr_gid
- if portage_gid not in os.getgroups():
- raise SkipTest('you are not in the portage group')
- path = pjoin(self.dir, 'group', 'group')
- self.failUnless(osutils.ensure_dirs(path, gid=portage_gid))
- self.check_dir(path, os.geteuid(), portage_gid, self.default_mode)
- self.failUnless(osutils.ensure_dirs(path))
- self.check_dir(path, os.geteuid(), portage_gid, self.default_mode)
- self.failUnless(osutils.ensure_dirs(path, gid=os.getegid()))
- self.check_dir(path, os.geteuid(), os.getegid(), self.default_mode)
-
-
-class Test_abspath(TempDirMixin, TestCase):
-
- func = staticmethod(osutils.abspath)
-
- def test_it(self):
- trg = pjoin(self.dir, "foon")
- sym = pjoin(self.dir, "spork")
- os.symlink(trg, sym)
- self.assertRaises(OSError, self.func, trg)
- self.assertEqual(trg, self.func(sym))
- open(trg, 'w')
- self.assertEqual(trg, self.func(sym))
- self.assertEqual(trg, self.func(trg))
-
-
-class Test_abssymlink(TempDirMixin, TestCase):
-
- def test_it(self):
- target = pjoin(self.dir, 'target')
- linkname = pjoin(self.dir, 'link')
- os.mkdir(target)
- os.symlink('target', linkname)
- self.assertEqual(osutils.abssymlink(linkname), target)
-
-
-class Native_NormPathTest(TestCase):
-
- func = staticmethod(osutils.native_normpath)
-
- def test_normpath(self):
- f = self.func
- self.assertEqual(f('/foo/'), '/foo')
- self.assertEqual(f('//foo/'), '/foo')
- self.assertEqual(f('//foo/.'), '/foo')
- self.assertEqual(f('//..'), '/')
- self.assertEqual(f('//..//foo'), '/foo')
- self.assertEqual(f('/foo/..'), '/')
- self.assertEqual(f('..//foo'), '../foo')
- self.assertEqual(f('.//foo'), 'foo')
- self.assertEqual(f('//foo/.///somewhere//..///bar//'), '/foo/bar')
-
-
-class Cpy_NormPathTest(Native_NormPathTest):
-
- func = staticmethod(osutils.normpath)
- if osutils.normpath is osutils.native_normpath:
- skip = "extension isn't compiled"
-
-
-class Cpy_JoinTest(TestCase):
-
- if osutils.join is osutils.native_join:
- skip = "extension isn't compiled"
-
- def assertSame(self, val):
- self.assertEqual(osutils.native_join(*val),
- osutils.join(*val),
- msg="for %r, expected %r, got %r" % (val,
- osutils.native_join(*val),
- osutils.join(*val)))
-
- def test_reimplementation(self):
- map(self.assertSame, [
- ["", "foo"],
- ["foo", "dar"],
- ["foo", "/bar"],
- ["/bar", "dar"],
- ["/bar", "../dar"],
- ["", "../dar"],
- ["foo//dar", "foon"],
- ["//foo", "dar"],
- ])
-
-
-# TODO: more error condition testing
-class FsLockTest(TempDirMixin, TestCase):
-
- def test_nonexistant(self):
- self.assertRaises(osutils.NonExistant, osutils.FsLock,
- pjoin(self.dir, 'missing'))
-
- def test_locking(self):
- path = pjoin(self.dir, 'lockfile')
- lock = osutils.FsLock(path, True)
- # do this all non-blocking to avoid hanging tests
- self.failUnless(lock.acquire_read_lock(False))
- # file should exist now
- f = open(path)
- # acquire and release a read lock
- fcntl.flock(f, fcntl.LOCK_SH | fcntl.LOCK_NB)
- fcntl.flock(f, fcntl.LOCK_UN | fcntl.LOCK_NB)
- # we can't acquire an exclusive lock
- self.assertRaises(
- IOError, fcntl.flock, f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- lock.release_read_lock()
- # but now we can
- fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- self.failIf(lock.acquire_read_lock(False))
- self.failIf(lock.acquire_write_lock(False))
- fcntl.flock(f, fcntl.LOCK_UN | fcntl.LOCK_NB)
- # acquire an exclusive/write lock
- self.failUnless(lock.acquire_write_lock(False))
- self.assertRaises(
- IOError, fcntl.flock, f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- # downgrade to read lock
- self.failUnless(lock.acquire_read_lock())
- fcntl.flock(f, fcntl.LOCK_SH | fcntl.LOCK_NB)
- fcntl.flock(f, fcntl.LOCK_UN | fcntl.LOCK_NB)
- self.assertRaises(
- IOError, fcntl.flock, f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- # and release
- lock.release_read_lock()
- fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- fcntl.flock(f, fcntl.LOCK_UN | fcntl.LOCK_NB)
-
- self.failUnless(lock.acquire_write_lock(False))
- lock.release_write_lock()
- fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
- fcntl.flock(f, fcntl.LOCK_UN | fcntl.LOCK_NB)
-
-
-class Test_readfile(TempDirMixin, TestCase):
-
- # note no trailing newline.
- line_content = "\n".join(str(x) for x in xrange(100))
- func = staticmethod(osutils.native_readfile)
-
- largefile_testing_len = 2**17
-
- def setUp(self):
- TempDirMixin.setUp(self)
- self.fp = pjoin(self.dir, "foon")
- open(self.fp, 'w').write(self.line_content)
-
- def test_it(self):
- self.assertRaises(IOError, self.func, self.fp+"2")
- self.assertRaises(IOError, self.func, self.fp+"2", False)
- self.assertEqual(None, self.func(self.fp+"2", True))
- self.assertEqual(self.line_content, self.func(self.fp))
- # test big files; forces the cpy to switch over to mmap
-
- f = open(self.fp, "a")
- # keep in mind, we already have a line in the file.
- count = self.largefile_testing_len / len(self.line_content)
- for x in xrange(count -1):
- f.write(self.line_content)
- f.close()
- self.assertEqual(self.line_content * count, self.func(self.fp),
- reflective=False,
- msg="big file failed; len(%r) must equal len(%r)" %
- (os.stat(self.fp).st_size, len(self.line_content) * count))
-
-
-class Test_cpy_readfile(Test_readfile):
-
- if osutils.native_readfile is osutils.readfile:
- skip = "cpython extension not available"
- else:
- func = staticmethod(osutils.readfile)
-
-
-# mostly out of lazyness, we derive directly from test_readfile instead of
-# splitting out a common base. whoever changes readfile, gets to do the split
-# ;)
-
-class Test_readlines(Test_readfile):
-
- func = staticmethod(osutils.native_readlines)
- lines = Test_readfile.line_content.split("\n")
- nlines = [x+"\n" for x in lines]
- nlines[-1] = nlines[-1].rstrip("\n")
-
- def assertList(self, expected, got):
- if not isinstance(expected, list):
- expected = list(expected)
- if not isinstance(got, list):
- got = list(got)
- self.assertEqual(len(expected), len(got))
- for idx, tup in enumerate(izip(expected, got)):
- self.assertEqual(tup[0], tup[1], reflective=False,
- msg="expected %r, got %r for item %i" %
- (tup[0], tup[1], idx))
-
- def test_it(self):
- func = self.func
- fp = self.fp
- bad_fp = "%s2" % fp
-
- self.assertRaises(IOError, func, bad_fp)
- # check defaults for none_on_missing, and swallow_missing
- self.assertRaises(IOError, func, bad_fp, False, False)
- self.assertRaises(IOError, func, bad_fp, False, False, False)
- # ensure non_on_missing=True && swallow_missing=False pukes
- self.assertRaises(IOError, func, bad_fp, False, False, True)
-
- # the occasional True as the first arg is ensuring strip_newlines
- # isn't doing anything.
-
- self.assertEqual(None, func(bad_fp, False, True, True))
- self.assertList([], func(bad_fp, False, True, False))
- self.assertList([], func(bad_fp, True, True, False))
-
- self.assertList(self.lines, func(fp))
- self.assertList(self.lines, func(fp, True))
- self.assertList(self.nlines, func(fp, False, False, False))
-
- # check it does produce an extra line for trailing newline.
- open(fp, 'a').write("\n")
- self.assertList([x+"\n" for x in self.lines], func(fp, False))
- self.assertList(self.lines+[], func(fp))
- self.assertList(self.lines+[], func(fp, True))
-
- # test big files.
- count = self.largefile_testing_len / len(self.line_content)
- f = open(self.fp, 'a')
- for x in xrange(count - 1):
- f.write(self.line_content+"\n")
- f.close()
- self.assertList(self.lines * count, func(fp))
- self.assertList(self.lines * count, func(fp, True))
- l = [x + "\n" for x in self.lines] * count
- self.assertList([x +"\n" for x in self.lines] * count,
- func(fp, False))
- self.assertList(self.lines * count, func(fp))
- self.assertList(self.lines * count, func(fp, True))
-
-
- def test_mtime(self):
- cur = os.stat_float_times()
- try:
- for x in (False, True):
- os.stat_float_times(False)
- self.assertEqual(os.stat(self.fp).st_mtime,
- self.func(self.fp, x).mtime)
-
- os.stat_float_times(True)
- self.assertEqual(os.stat(self.fp).st_mtime,
- self.func(self.fp, x).mtime)
-
- # ensure that when swallow, but not none_on_missing, mtime is None
- self.assertEqual(self.func(self.fp+"2", False, True, False).mtime,
- None)
-
- finally:
- os.stat_float_times(cur)
-
-
-class Test_cpy_readlines(Test_readlines):
-
- if osutils.native_readlines is osutils.readlines:
- skip = "cpy extension isn't available"
- else:
- func = staticmethod(osutils.readlines)
diff --git a/pkgcore/test/util/test_parserestrict.py b/pkgcore/test/util/test_parserestrict.py
index 292144a1b..fe25c4f52 100644
--- a/pkgcore/test/util/test_parserestrict.py
+++ b/pkgcore/test/util/test_parserestrict.py
@@ -5,9 +5,9 @@
from pkgcore.test import TestCase
from pkgcore.ebuild.atom import atom
from pkgcore.restrictions import packages, values, boolean
-from pkgcore.util import parserestrict
-from pkgcore.util.currying import post_curry
from pkgcore.repository import util
+from pkgcore.util import parserestrict
+from snakeoil.currying import post_curry
class MatchTest(TestCase):
diff --git a/pkgcore/test/util/test_weakrefs.py b/pkgcore/test/util/test_weakrefs.py
deleted file mode 100644
index 125b059ba..000000000
--- a/pkgcore/test/util/test_weakrefs.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from pkgcore.test import TestCase
-from pkgcore.util.weakrefs import WeakValCache
-from weakref import WeakValueDictionary
-
-class RefObj(object):
- pass
-
-class TestWeakValCache(TestCase):
- if WeakValueDictionary is WeakValCache:
- skip = "WeakValCache is weakref.WeakValueDictionary; indicates " \
- "pkgcore.util._caching isn't compiled"
-
- def setUp(self):
- self.o = RefObj()
- self.w = WeakValCache()
-
- def test_setitem(self):
- s = "asdf"
- self.w[s] = self.o
- self.w["fds"] = self.o
- self.w[s] = self.o
-
- def test_getitem(self):
- s = "asdf"
- self.w[s] = self.o
- self.assertIdentical(self.w[s], self.o)
-
- def test_expiring(self):
- s = "asdf"
- self.w[s] = self.o
- self.assertTrue(self.w[s])
- del self.o
- self.assertRaises(KeyError, self.w.__getitem__, s)
-
- def test_get(self):
- s = "asdf"
- self.assertRaises(KeyError, self.w.__getitem__, s)
- self.w[s] = self.o
- self.assertIdentical(self.w.get(s), self.o)
diff --git a/pkgcore/util/bzip2.py b/pkgcore/util/bzip2.py
index b1280d3da..f5cef5151 100644
--- a/pkgcore/util/bzip2.py
+++ b/pkgcore/util/bzip2.py
@@ -12,7 +12,7 @@ Should use this module unless its absolutely critical that bz2 module be used
"""
-from pkgcore.util.demandload import demandload
+from snakeoil.demandload import demandload
demandload(globals(), "tempfile pkgcore.spawn:find_binary,spawn_get_output")
def process_compress(in_data, compress_level=9):
diff --git a/pkgcore/util/caching.py b/pkgcore/util/caching.py
deleted file mode 100644
index 322dae1b4..000000000
--- a/pkgcore/util/caching.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-instance caching metaclass
-"""
-from pkgcore.util.demandload import demandload
-demandload(globals(), "warnings weakref:WeakValueDictionary")
-
-class native_WeakInstMeta(type):
- """"metaclass for instance caching, resulting in reuse of unique instances
-
- few notes-
- - instances must be immutable (or effectively so).
- Since creating a new instance may return a preexisting instance,
- this requirement B{must} be honored.
- - due to the potential for mishap, each subclass of a caching class must
- assign __inst_caching__ = True to enable caching for the derivative.
- - conversely, __inst_caching__ = False does nothing
- (although it's useful as a sign of
- I{do not enable caching for this class}
- - instance caching can be disabled per instantiation via passing
- disabling_inst_caching=True into the class constructor.
-
- Being a metaclass, the voodoo used doesn't require modification of
- the class itself.
-
- Examples of usage are the restriction modules
- L{packages<pkgcore.restrictions.packages>} and
- L{values<pkgcore.restrictions.values>}
- """
- def __new__(cls, name, bases, d):
- if d.get("__inst_caching__", False):
- d["__inst_caching__"] = True
- d["__inst_dict__"] = WeakValueDictionary()
- else:
- d["__inst_caching__"] = False
- slots = d.get('__slots__')
- if slots is not None:
- for base in bases:
- if getattr(base, '__weakref__', False):
- break
- else:
- d['__slots__'] = tuple(slots) + ('__weakref__',)
- return type.__new__(cls, name, bases, d)
-
- def __call__(cls, *a, **kw):
- """disable caching via disable_inst_caching=True"""
- if cls.__inst_caching__ and not kw.pop("disable_inst_caching", False):
- kwlist = kw.items()
- kwlist.sort()
- key = (a, tuple(kwlist))
- try:
- instance = cls.__inst_dict__.get(key)
- except (NotImplementedError, TypeError), t:
- warnings.warn(
- "caching keys for %s, got %s for a=%s, kw=%s" % (
- cls, t, a, kw))
- del t
- key = instance = None
-
- if instance is None:
- instance = super(native_WeakInstMeta, cls).__call__(*a, **kw)
-
- if key is not None:
- cls.__inst_dict__[key] = instance
- else:
- instance = super(native_WeakInstMeta, cls).__call__(*a, **kw)
-
- return instance
-
-# "Invalid name"
-# pylint: disable-msg=C0103
-
-try:
- # No name in module
- # pylint: disable-msg=E0611
- from pkgcore.util._caching import WeakInstMeta
- cpy_WeakInstMeta = WeakInstMeta
-except ImportError:
- cpy_WeakInstMeta = None
- WeakInstMeta = native_WeakInstMeta
diff --git a/pkgcore/util/commandline.py b/pkgcore/util/commandline.py
index fd552271d..484b29d61 100644
--- a/pkgcore/util/commandline.py
+++ b/pkgcore/util/commandline.py
@@ -20,16 +20,22 @@ import os.path
import logging
from pkgcore.config import load_config, errors
-from pkgcore.util import formatters, demandload
+from snakeoil import formatters, demandload
demandload.demandload(
globals(),
'pkgcore:version '
+ 'pkgcore.config:basics '
'pkgcore.restrictions:packages '
'pkgcore.util:parserestrict '
)
+CONFIG_LOADED_MSG = (
+ 'Configuration already loaded. If moving the option earlier '
+ 'on the commandline does not fix this report it as a bug.')
+
+
class FormattingHandler(logging.Handler):
"""Logging handler printing through a formatter."""
@@ -74,9 +80,34 @@ class Values(optparse.Values, object):
>>> parser.parse_args(args, vals)
"""
+ def __init__(self, defaults=None):
+ optparse.Values.__init__(self, defaults)
+ self.new_config = {}
+ self.add_config = {}
+
def load_config(self):
"""Override this if you need a different way of loading config."""
- return load_config(debug=self.debug)
+ # This makes mixing --new-config and --add-config sort of
+ # work. Not sure if that is a good thing, but detecting and
+ # erroring is about as much work as making it mostly work :)
+ new_config = dict(
+ (name, basics.ConfigSectionFromStringDict(val))
+ for name, val in self.new_config.iteritems())
+ add_config = {}
+ for name, config in self.add_config.iteritems():
+ inherit = config.pop('inherit', None)
+ # XXX this will likely not be quite correctly quoted.
+ if inherit is None:
+ config['inherit'] = repr(name)
+ else:
+ config['inherit'] = '%s %r' % (inherit, name)
+ add_config[name] = basics.ConfigSectionFromStringDict(config)
+ # Triggers failures if these get mucked with after this point
+ # (instead of silently ignoring).
+ self.add_config = self.new_config = None
+ return load_config(
+ debug=self.debug, prepend_sources=(add_config, new_config),
+ skip_config_files=self.empty_config)
@property
def config(self):
@@ -149,6 +180,33 @@ def debug_callback(option, opt_str, value, parser):
collapsed.debug = True
+def new_config_callback(option, opt_str, value, parser):
+ """Add a configsection to our values object.
+
+ Munges three arguments: section name, key name, value.
+
+ dest defines an attr name on the values object to store in.
+ """
+ if getattr(parser.values, '_config', None) is not None:
+ raise optparse.OptionValueError(CONFIG_LOADED_MSG)
+ section_name, key, val = value
+ section = getattr(parser.values, option.dest).setdefault(section_name, {})
+ if key in section:
+ raise optparse.OptionValueError(
+ '%r is already set (to %r)' % (key, section[key]))
+ section[key] = val
+
+
+def empty_config_callback(option, opt_str, value, parser):
+ """Remember not to load the user/system configuration.
+
+ Error out if we have already loaded it.
+ """
+ if getattr(parser.values, '_config', None) is not None:
+ raise optparse.OptionValueError(CONFIG_LOADED_MSG)
+ parser.values.empty_config = True
+
+
class Option(optparse.Option):
def __init__(self, *args, **kwargs):
@@ -175,8 +233,21 @@ class OptionParser(optparse.OptionParser):
'to set this as first argument for debugging certain '
'configuration problems.'),
Option('--nocolor', action='store_true',
- help='disable color in the output.'),
+ help='disable color in the output.'),
Option('--version', action='version'),
+ Option(
+ '--add-config', action='callback', callback=new_config_callback,
+ type='str', nargs=3, help='Add a new configuration section. '
+ 'Takes three arguments: section name, value name, value.'),
+ Option(
+ '--new-config', action='callback', callback=new_config_callback,
+ type='str', nargs=3, help='Expand a configuration section. '
+ 'Just like --add-config but with an implied inherit=sectionname.'),
+ Option(
+ '--empty-config', action='callback',
+ callback=empty_config_callback,
+ help='Do not load the user or system configuration. Can be useful '
+ 'combined with --new-config.')
]
def __init__(self, *args, **kwargs):
@@ -185,6 +256,7 @@ class OptionParser(optparse.OptionParser):
optparse.OptionParser.__init__(self, *args, **kwargs)
# It is a callback so it cannot set a default value the "normal" way.
self.set_default('debug', False)
+ self.set_default('empty_config', False)
def get_version(self):
"""Add pkgcore's version to the version information."""
@@ -242,7 +314,7 @@ class OptionParser(optparse.OptionParser):
l.append(parserestrict.parse_match(x))
except parserestrict.ParseError, e:
self.error("arg %r isn't a valid atom: %s" % (x, e))
- return l or default
+ return l or [default]
class MySystemExit(SystemExit):
@@ -264,7 +336,7 @@ def main(subcommands, args=None, outfile=sys.stdout, errfile=sys.stderr,
The keys are a subcommand name or None for other/unknown/no subcommand.
The values are tuples of OptionParser subclasses and functions called
as main_func(config, out, err) with a L{Values} instance, two
- L{pkgcore.util.formatters.Formatter} instances for output (stdout)
+ L{snakeoil.formatters.Formatter} instances for output (stdout)
and errors (stderr). It should return an integer used as
exit status or None as synonym for 0.
@type args: sequence of strings
@@ -337,7 +409,9 @@ def main(subcommands, args=None, outfile=sys.stdout, errfile=sys.stderr,
except KeyboardInterrupt:
if options is not None and options.debug:
raise
+ out.write()
if out is not None:
+ out.write()
if exitstatus:
out.title('%s failed' % (prog,))
else:
diff --git a/pkgcore/util/compatibility.py b/pkgcore/util/compatibility.py
deleted file mode 100644
index f2a640b66..000000000
--- a/pkgcore/util/compatibility.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-Compatibility module providing reimplementations of python2.5 functionality.
-
-Uses the native implementation from C{__builtins__} if available.
-"""
-
-# using variable before assignment
-# pylint: disable-msg=E0601
-
-if "any" in __builtins__:
- any = any
-else:
- def any(iterable):
- for x in iterable:
- if x:
- return True
- return False
-
-if "all" in __builtins__:
- all = all
-else:
- def all(iterable):
- for x in iterable:
- if not x:
- return False
- return True
diff --git a/pkgcore/util/containers.py b/pkgcore/util/containers.py
deleted file mode 100644
index c55db6877..000000000
--- a/pkgcore/util/containers.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# Copyright: 2005-2007 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-# $Id:$
-
-
-"""
-collection of container classes
-"""
-
-from pkgcore.util.demandload import demandload
-demandload(globals(),
- "pkgcore.util.lists:iter_stable_unique "
- "itertools:chain "
-)
-
-class InvertedContains(set):
-
- """Set that inverts all contains lookups results
-
- Mainly useful in conjuection with LimitedChangeSet for converting
- from blacklist to whitelist.
-
- Not able to be iterated over also
- """
-
- def __contains__(self, key):
- return not set.__contains__(self, key)
-
- def __iter__(self):
- # infinite set, non iterable.
- raise TypeError
-
-
-class LimitedChangeSet(object):
-
- """Set used to limit the number of times a key can be removed/added
-
- specifically deleting/adding a key only once per commit,
- optionally blocking changes to certain keys.
- """
-
- _removed = 0
- _added = 1
-
- def __init__(self, initial_keys, unchangable_keys=None):
- self._new = set(initial_keys)
- if unchangable_keys is None:
- self._blacklist = []
- else:
- if isinstance(unchangable_keys, (list, tuple)):
- unchangable_keys = set(unchangable_keys)
- self._blacklist = unchangable_keys
- self._changed = set()
- self._change_order = []
- self._orig = frozenset(self._new)
-
- def add(self, key):
- if key in self._changed or key in self._blacklist:
- # it's been del'd already once upon a time.
- if key in self._new:
- return
- raise Unchangable(key)
-
- self._new.add(key)
- self._changed.add(key)
- self._change_order.append((self._added, key))
-
- def remove(self, key):
- if key in self._changed or key in self._blacklist:
- if key not in self._new:
- raise KeyError(key)
- raise Unchangable(key)
-
- if key in self._new:
- self._new.remove(key)
- self._changed.add(key)
- self._change_order.append((self._removed, key))
-
- def __contains__(self, key):
- return key in self._new
-
- def changes_count(self):
- return len(self._change_order)
-
- def commit(self):
- self._orig = frozenset(self._new)
- self._changed.clear()
- self._change_order = []
-
- def rollback(self, point=0):
- l = self.changes_count()
- if point < 0 or point > l:
- raise TypeError(
- "%s point must be >=0 and <= changes_count()" % point)
- while l > point:
- change, key = self._change_order.pop(-1)
- self._changed.remove(key)
- if change == self._removed:
- self._new.add(key)
- else:
- self._new.remove(key)
- l -= 1
-
- def __str__(self):
- return str(self._new).replace("set(", "LimitedChangeSet(", 1)
-
- def __iter__(self):
- return iter(self._new)
-
- def __len__(self):
- return len(self._new)
-
- def __eq__(self, other):
- if isinstance(other, LimitedChangeSet):
- return self._new == other._new
- elif isinstance(other, set):
- return self._new == other
- return False
-
- def __ne__(self, other):
- return not (self == other)
-
-
-class Unchangable(Exception):
-
- def __init__(self, key):
- Exception.__init__(self, "key '%s' is unchangable" % (key,))
- self.key = key
-
-
-class ProtectedSet(object):
-
- """
- Wraps a set pushing all changes into a secondary set.
-
- Be aware that it lacks the majority of set methods.
- """
- def __init__(self, orig_set):
- self._orig = orig_set
- self._new = set()
-
- def __contains__(self, key):
- return key in self._orig or key in self._new
-
- def __iter__(self):
- return iter_stable_unique(chain(self._new, self._orig))
-
- def __len__(self):
- return len(self._orig.union(self._new))
-
- def add(self, key):
- if key not in self._orig:
- self._new.add(key)
-
-
-class RefCountingSet(dict):
-
- def __init__(self, iterable=None):
- if iterable is not None:
- dict.__init__(self, ((x, 1) for x in iterable))
-
- def add(self, item):
- count = self.get(item, 0)
- self[item] = count + 1
-
- def remove(self, item):
- count = self[item]
- if count == 1:
- del self[item]
- else:
- self[item] = count - 1
diff --git a/pkgcore/util/currying.py b/pkgcore/util/currying.py
deleted file mode 100644
index 12ad6148a..000000000
--- a/pkgcore/util/currying.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright: 2005 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-Function currying, generating a functor with a set of args/defaults pre bound.
-
-L{pre_curry} and L{post_curry} return "normal" python functions.
-L{partial} returns a callable object. The difference between
-L{pre_curry} and L{partial} is this::
-
- >>> def func(arg=None, self=None):
- ... return arg, self
- >>> curry = pre_curry(func, True)
- >>> part = partial(func, True)
- >>> class Test(object):
- ... curry = pre_curry(func, True)
- ... part = partial(func, True)
- ... def __repr__(self):
- ... return '<Test object>'
- >>> curry()
- (True, None)
- >>> Test().curry()
- (True, <Test object>)
- >>> part()
- (True, None)
- >>> Test().part()
- (True, None)
-
-If your curried function is not used as a class attribute the results
-should be identical. Because L{partial} has an implementation in c
-while L{pre_curry} is python you should use L{partial} if possible.
-"""
-
-from operator import attrgetter
-
-__all__ = [
- "pre_curry", "partial", "post_curry", "pretty_docs", "alias_class_method"]
-
-def pre_curry(func, *args, **kwargs):
- """passed in args are prefixed, with further args appended"""
-
- if not kwargs:
- def callit(*moreargs, **morekwargs):
- return func(*(args + moreargs), **morekwargs)
- elif not args:
- def callit(*moreargs, **morekwargs):
- kw = kwargs.copy()
- kw.update(morekwargs)
- return func(*moreargs, **kw)
- else:
- def callit(*moreargs, **morekwargs):
- kw = kwargs.copy()
- kw.update(morekwargs)
- return func(*(args+moreargs), **kw)
-
- callit.func = func
- return callit
-
-
-class native_partial(object):
-
- """Like pre_curry, but does not get turned into an instance method."""
-
- def __init__(self, func, *args, **kwargs):
- self.func = func
- self.args = args
- self.kwargs = kwargs
-
- def __call__(self, *moreargs, **morekwargs):
- kw = self.kwargs.copy()
- kw.update(morekwargs)
- return self.func(*(self.args + moreargs), **kw)
-
-# Unused import, unable to import
-# pylint: disable-msg=W0611,F0401
-try:
- from functools import partial
-except ImportError:
- try:
- from pkgcore.util._functools import partial
- except ImportError:
- partial = native_partial
-
-
-def post_curry(func, *args, **kwargs):
- """passed in args are appended to any further args supplied"""
-
- if not kwargs:
- def callit(*moreargs, **morekwargs):
- return func(*(moreargs+args), **morekwargs)
- elif not args:
- def callit(*moreargs, **morekwargs):
- kw = morekwargs.copy()
- kw.update(kwargs)
- return func(*moreargs, **kw)
- else:
- def callit(*moreargs, **morekwargs):
- kw = morekwargs.copy()
- kw.update(kwargs)
- return func(*(moreargs+args), **kw)
-
- callit.func = func
- return callit
-
-def pretty_docs(wrapped, extradocs=None):
- wrapped.__module__ = wrapped.func.__module__
- doc = wrapped.func.__doc__
- if extradocs is None:
- wrapped.__doc__ = doc
- else:
- wrapped.__doc__ = extradocs
- return wrapped
-
-
-def alias_class_method(attr):
- """at runtime, redirect to another method
-
- attr is the desired attr name to lookup, and supply all later passed in
- args/kws to
-
- Useful for when setting has_key to __contains__ for example, and
- __contains__ may be overriden.
- """
- grab_attr = attrgetter(attr)
-
- def _asecond_level_call(self, *a, **kw):
- return grab_attr(self)(*a, **kw)
-
- return _asecond_level_call
diff --git a/pkgcore/util/demandload.py b/pkgcore/util/demandload.py
deleted file mode 100644
index 36c8b3d22..000000000
--- a/pkgcore/util/demandload.py
+++ /dev/null
@@ -1,202 +0,0 @@
-'''Demand load modules when used, not when imported.'''
-
-import sys
-import re
-
-__author__ = '''Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>.
-This software may be used and distributed according to the terms
-of the GNU General Public License, incorporated herein by reference.'''
-
-# this is based on matt's original demandload module. it is a
-# complete rewrite. some time, we may need to support syntax of
-# "import foo as bar".
-
-
-class _importer(object):
- '''import a module. it is not imported until needed, and is
- imported at most once per scope.'''
-
- def __init__(self, scope, modname, fromlist):
- '''scope is context (globals() or locals()) in which import
- should be made. modname is name of module to import.
- fromlist is list of modules for "from foo import ..."
- emulation.'''
-
- self.scope = scope
- self.modname = modname
- self.fromlist = fromlist
- self.mod = None
-
- def module(self):
- '''import the module if needed, and return.'''
- if self.mod is None:
- self.mod = __import__(self.modname, self.scope, self.scope,
- self.fromlist)
- if isinstance(self.mod, _replacer):
- del sys.modules[self.modname]
- self.mod = __import__(self.modname, self.scope, self.scope,
- self.fromlist)
- del self.modname, self.fromlist
- return self.mod
-
-
-class _replacer(object):
- '''placeholder for a demand loaded module. demandload puts this in
- a target scope. when an attribute of this object is looked up,
- this object is replaced in the target scope with the actual
- module.
-
- we use __getattribute__ to avoid namespace clashes between
- placeholder object and real module.'''
-
- def __init__(self, importer, target):
- self.importer = importer
- self.target = target
- # consider case where we do this:
- # demandload(globals(), 'foo.bar foo.quux')
- # foo will already exist in target scope when we get to
- # foo.quux. so we remember that we will need to demandload
- # quux into foo's scope when we really load it.
- self.later = []
-
- def module(self):
- return object.__getattribute__(self, 'importer').module()
-
- def __getattribute__(self, key):
- '''look up an attribute in a module and return it. replace the
- name of the module in the caller\'s dict with the actual
- module.'''
-
- module = object.__getattribute__(self, 'module')()
- target = object.__getattribute__(self, 'target')
- importer = object.__getattribute__(self, 'importer')
- later = object.__getattribute__(self, 'later')
-
- if later:
- demandload(module.__dict__, ' '.join(later))
-
- importer.scope[target] = module
-
- return getattr(module, key)
-
-
-class _replacer_from(_replacer):
- '''placeholder for a demand loaded module. used for "from foo
- import ..." emulation. semantics of this are different than
- regular import, so different implementation needed.'''
-
- def module(self):
- importer = object.__getattribute__(self, 'importer')
- target = object.__getattribute__(self, 'target')
-
- return getattr(importer.module(), target)
-
- def __call__(self, *args, **kwargs):
- target = object.__getattribute__(self, 'module')()
- return target(*args, **kwargs)
-
-
-def _demandload(scope, modules):
- '''import modules into scope when each is first used.
-
- scope should be the value of globals() in the module calling this
- function, or locals() in the calling function.
-
- modules is a string listing module names, separated by white
- space. names are handled like this:
-
- foo import foo
- foo bar import foo, bar
- foo.bar import foo.bar
- foo:bar from foo import bar
- foo:bar,quux from foo import bar, quux
- foo.bar:quux from foo.bar import quux'''
-
- for mod in modules.split():
- if not mod: #Ignore empty entries
- continue
- col = mod.find(':')
- if col >= 0:
- fromlist = mod[col+1:].split(',')
- mod = mod[:col]
- else:
- fromlist = []
- importer = _importer(scope, mod, fromlist)
- if fromlist:
- for name in fromlist:
- scope[name] = _replacer_from(importer, name)
- else:
- dot = mod.find('.')
- if dot >= 0:
- basemod = mod[:dot]
- val = scope.get(basemod)
- # if base module has already been demandload()ed,
- # remember to load this submodule into its namespace
- # when needed.
- if isinstance(val, _replacer):
- later = object.__getattribute__(val, 'later')
- later.append(mod[dot+1:])
- continue
- else:
- basemod = mod
- scope[basemod] = _replacer(importer, basemod)
-
-
-def disabled_demandload(scope, modules):
- for mod in modules.split():
- if not mod: #Ignore empty entries
- continue
- col = mod.find(':')
- if col >= 0:
- fromlist = mod[col+1:].split(',')
- mod = mod[:col]
- else:
- fromlist = []
- mod_obj = __import__(mod, scope, {}, [])
- if not fromlist:
- scope[mod.split(".", 1)[0]] = mod_obj
- else:
- for sub in mod.split(".")[1:]:
- mod_obj = getattr(mod_obj, sub)
- for name in fromlist:
- if name in dir(mod_obj):
- scope[name] = getattr(mod_obj, name)
- else:
- loc = mod + "." + name
- m = __import__(loc, scope, {}, [])
- for sub in loc.split(".")[1:]:
- m = getattr(m, sub)
- scope[name] = m
-
-
-demandload = _demandload
-
-_real_compile = re.compile
-class _delayed_compiler(object):
- """A class which just waits to compile a regex until it is actually requested.
-
- It might be possible to use the scope swapping to prevent any overhead after
- the first request.
- """
-
- __slots__ = ['_args', '_kwargs', '_regex']
-
- def __init__(self, args, kwargs):
- self._args = args
- self._kwargs = kwargs
- self._regex = None
-
- def __getattribute__(self, attr):
- regex = object.__getattribute__(self, '_regex')
- if regex is None:
- args = object.__getattribute__(self, '_args')
- kwargs = object.__getattribute__(self, '_kwargs')
- regex = _real_compile(*args, **kwargs)
- self._regex = regex
- return getattr(regex, attr)
-
-def demand_compile(*args, **kwargs):
- return _delayed_compiler(args, kwargs)
-
-#re.compile = demand_compile
-
diff --git a/pkgcore/util/dependant_methods.py b/pkgcore/util/dependant_methods.py
deleted file mode 100644
index c3363b33a..000000000
--- a/pkgcore/util/dependant_methods.py
+++ /dev/null
@@ -1,85 +0,0 @@
-# Copyright: 2005-2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""Metaclass to inject dependencies into method calls.
-
-Essentially, method a must be ran prior to method b, invoking method a
-if b is called first.
-"""
-
-from pkgcore.util.lists import iflatten_instance
-from pkgcore.util.currying import partial
-
-__all__ = ["ForcedDepends"]
-
-def ensure_deps(self, name, *a, **kw):
- ignore_deps = "ignore_deps" in kw
- if ignore_deps:
- del kw["ignore_deps"]
- s = [name]
- else:
- s = yield_deps(self, self.stage_depends, name)
-
- r = True
- for dep in s:
- if dep not in self._stage_state:
- r = getattr(self, dep).raw_func(*a, **kw)
- if r:
- self._stage_state.add(dep)
- else:
- return r
- return r
-
-def yield_deps(inst, d, k):
- # XXX: this sucks. rewrite it to use expandable_chain...
- if k not in d:
- yield k
- return
- s = [k, iflatten_instance(d.get(k, ()))]
- while s:
- if isinstance(s[-1], basestring):
- yield s.pop(-1)
- continue
- exhausted = True
- for x in s[-1]:
- v = d.get(x)
- if v:
- s.append(x)
- s.append(iflatten_instance(v))
- exhausted = False
- break
- yield x
- if exhausted:
- s.pop(-1)
-
-
-class ForcedDepends(type):
- """
- Metaclass forcing methods to run in a certain order.
-
- Dependencies are controlled by the existance of a stage_depends
- dict in the class namespace. Its keys are method names, values are
- either a string (name of preceeding method), or list/tuple
- (proceeding methods).
-
- L{pkgcore.interfaces.format.build_base} is a consumer of this metaclass
- to look at for example usage.
- """
- def __call__(cls, *a, **kw):
- o = super(ForcedDepends, cls).__call__(*a, **kw)
- if not getattr(cls, "stage_depends"):
- return o
-
- if not hasattr(o, "_stage_state"):
- o._stage_state = set()
-
- # wrap the funcs
-
- for x in set(x for x in iflatten_instance(o.stage_depends.iteritems())
- if x):
- f = getattr(o, x)
- f2 = partial(ensure_deps, o, x)
- f2.raw_func = f
- setattr(o, x, f2)
-
- return o
diff --git a/pkgcore/util/descriptors.py b/pkgcore/util/descriptors.py
deleted file mode 100644
index bf31b9f88..000000000
--- a/pkgcore/util/descriptors.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-
-"""Classes implementing the descriptor protocol."""
-
-
-class classproperty(object):
-
- """Like the builtin C{property} but takes a single classmethod.
-
- Used like this:
-
- class Example(object):
-
- @classproperty
- def test(cls):
- # Do stuff with cls here (it is Example or a subclass).
-
- Now both C{Example.test} and C{Example().test} invoke the getter.
- A "normal" property only works on instances.
- """
-
- def __init__(self, getter):
- self.getter = getter
-
- def __get__(self, instance, owner):
- return self.getter(owner)
diff --git a/pkgcore/util/file.py b/pkgcore/util/file.py
deleted file mode 100644
index 58dd64796..000000000
--- a/pkgcore/util/file.py
+++ /dev/null
@@ -1,270 +0,0 @@
-# Copyright: 2005-2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-file related operations, mainly reading
-"""
-
-import re, os
-from shlex import shlex
-from pkgcore.util.mappings import ProtectedDict
-from pkgcore.util.osutils import readlines
-
-class AtomicWriteFile(file):
-
- """File class that stores the changes in a tempfile.
-
- Upon close call, uses rename to replace the destination.
-
- Similar to file protocol behaviour, except for the C{__init__}, and
- that close *must* be called for the changes to be made live,
-
- if C{__del__} is triggered it's assumed that an exception occured,
- thus the changes shouldn't be made live.
- """
- def __init__(self, fp, binary=False, **kwds):
- self.is_finalized = False
- if binary:
- mode = "wb"
- else:
- mode = "w"
- fp = os.path.realpath(fp)
- self.original_fp = fp
- self.temp_fp = os.path.join(
- os.path.dirname(fp), ".update.%s" % os.path.basename(fp))
- file.__init__(self, self.temp_fp, mode=mode, **kwds)
-
- def close(self):
- file.close(self)
- os.rename(self.temp_fp, self.original_fp)
- self.is_finalized = True
-
- def __del__(self):
- file.close(self)
- if not self.is_finalized:
- os.unlink(self.temp_fp)
-
-
-def iter_read_bash(bash_source):
- """
- Read file honoring bash commenting rules.
-
- Note that it's considered good behaviour to close filehandles, as
- such, either iterate fully through this, or use read_bash instead.
- Once the file object is no longer referenced the handle will be
- closed, but be proactive instead of relying on the garbage
- collector.
-
- @param bash_source: either a file to read from
- or a string holding the filename to open.
- """
- if isinstance(bash_source, basestring):
- bash_source = readlines(bash_source, True)
- for s in bash_source:
- s = s.strip()
- if s and s[0] != "#":
- yield s
-
-
-def read_bash(bash_source):
- return list(iter_read_bash(bash_source))
-read_bash.__doc__ = iter_read_bash.__doc__
-
-
-def read_dict(bash_source, splitter="=", source_isiter=False):
- """
- read key value pairs, ignoring bash-style comments.
-
- @param splitter: the string to split on.
- @param bash_source: either a file to read from,
- or a string holding the filename to open.
- """
- d = {}
- if not source_isiter:
- filename = bash_source
- i = iter_read_bash(bash_source)
- else:
- # XXX what to do?
- filename = '<unknown>'
- i = bash_source
- line_count = 1
- try:
- for k in i:
- line_count += 1
- try:
- k, v = k.split(splitter, 1)
- except ValueError:
- raise ParseError(filename, line_count)
- if len(v) > 2 and v[0] == v[-1] and v[0] in ("'", '"'):
- v = v[1:-1]
- d[k] = v
- finally:
- del i
- return d
-
-def read_bash_dict(bash_source, vars_dict=None, sourcing_command=None):
- """
- read bash source, yielding a dict of vars
-
- @param bash_source: either a file to read from
- or a string holding the filename to open
- @param vars_dict: initial 'env' for the sourcing.
- Is protected from modification.
- @type vars_dict: dict or None
- @param sourcing_command: controls whether a source command exists.
- If one does and is encountered, then this func is called.
- @type sourcing_command: callable
- @raise ParseError: thrown if invalid syntax is encountered.
- @return: dict representing the resultant env if bash executed the source.
- """
-
- # quite possibly I'm missing something here, but the original
- # portage_util getconfig/varexpand seemed like it only went
- # halfway. The shlex posix mode *should* cover everything.
-
- if vars_dict is not None:
- d, protected = ProtectedDict(vars_dict), True
- else:
- d, protected = {}, False
- if isinstance(bash_source, basestring):
- f = open(bash_source, "r")
- else:
- f = bash_source
- s = bash_parser(f, sourcing_command=sourcing_command, env=d)
- orig_whitespace = s.whitespace
- assign_whitespace = ''.join(c for c in orig_whitespace if c != '\n')
-
- try:
- tok = ""
- try:
- while tok is not None:
- key = s.get_token()
- if key is None:
- break
- elif key.isspace():
- # we specifically have to check this, since we're
- # screwing with the whitespace filters below to
- # detect empty assigns
- continue
- eq = s.get_token()
- if eq != '=':
- raise ParseError(bash_source, s.lineno)
- val = s.get_token()
- if val is None:
- val = ''
- # look ahead to see if we just got an empty assign.
- next_tok = s.get_token()
- if next_tok == '=':
- # ... we did.
- # leftmost insertions, thus reversed ordering
- s.push_token(next_tok)
- s.push_token(val)
- val = ''
- else:
- s.push_token(next_tok)
- d[key] = val
- except ValueError:
- raise ParseError(bash_source, s.lineno)
- finally:
- del f
- if protected:
- d = d.new
- return d
-
-
-var_find = re.compile(r'\\?(\${\w+}|\$\w+)')
-backslash_find = re.compile(r'\\.')
-def nuke_backslash(s):
- s = s.group()
- if s == "\\\n":
- return "\n"
- try:
- return chr(ord(s))
- except TypeError:
- return s[1]
-
-class bash_parser(shlex):
- def __init__(self, source, sourcing_command=None, env=None):
- shlex.__init__(self, source, posix=True)
- self.wordchars += "@${}/.-+/:~^"
- if sourcing_command is not None:
- self.source = sourcing_command
- if env is None:
- env = {}
- self.env = env
- self.__pos = 0
-
- def __setattr__(self, attr, val):
- if attr == "state" and "state" in self.__dict__:
- if (self.state, val) in (
- ('"', 'a'), ('a', '"'), ('a', ' '), ("'", 'a')):
- strl = len(self.token)
- if self.__pos != strl:
- self.changed_state.append(
- (self.state, self.token[self.__pos:]))
- self.__pos = strl
- self.__dict__[attr] = val
-
- def sourcehook(self, newfile):
- try:
- return shlex.sourcehook(self, newfile)
- except IOError, ie:
- raise ParseError(newfile, 0, str(ie))
-
- def read_token(self):
- self.changed_state = []
- self.__pos = 0
- tok = shlex.read_token(self)
- if tok is None:
- return tok
- self.changed_state.append((self.state, self.token[self.__pos:]))
- tok = ''
- for s, t in self.changed_state:
- if s in ('"', "a"):
- tok += self.var_expand(t).replace("\\\n", '')
- else:
- tok += t
- return tok
-
- def var_expand(self, val):
- prev, pos = 0, 0
- l = []
- match = var_find.search(val)
- while match is not None:
- pos = match.start()
- if val[pos] == '\\':
- # it's escaped. either it's \\$ or \\${ , either way,
- # skipping two ahead handles it.
- pos += 2
- else:
- var = val[match.start():match.end()].strip("${}")
- if prev != pos:
- l.append(val[prev:pos])
- if var in self.env:
- if not isinstance(self.env[var], basestring):
- raise ValueError(
- "env key %r must be a string, not %s: %r" % (
- var, type(self.env[var]), self.env[var]))
- l.append(self.env[var])
- else:
- l.append("")
- prev = pos = match.end()
- match = var_find.search(val, pos)
-
- # do \\ cleansing, collapsing val down also.
- val = backslash_find.sub(nuke_backslash, ''.join(l) + val[prev:])
- return val
-
-
-class ParseError(Exception):
-
- def __init__(self, filename, line, errmsg=None):
- if errmsg is not None:
- Exception.__init__(self,
- "error parsing '%s' on or before %i: err %s" %
- (filename, line, errmsg))
- else:
- Exception.__init__(self,
- "error parsing '%s' on or before %i" %
- (filename, line))
- self.file, self.line, self.errmsg = filename, line, errmsg
diff --git a/pkgcore/util/formatters.py b/pkgcore/util/formatters.py
deleted file mode 100644
index 997d9c5ae..000000000
--- a/pkgcore/util/formatters.py
+++ /dev/null
@@ -1,476 +0,0 @@
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-"""Classes wrapping a file-like object to do fancy output on it."""
-
-import os
-import errno
-
-from pkgcore.util.klass import GetAttrProxy
-from pkgcore.util import demandload
-demandload.demandload(globals(), 'locale')
-
-
-class StreamClosed(KeyboardInterrupt):
- """Raised by L{Formatter.write} if the stream it prints to was closed.
-
- This inherits from C{KeyboardInterrupt} because it should usually
- be handled the same way: a common way of triggering this exception
- is by closing a pager before the script finished outputting, which
- should be handled like control+c, not like an error.
- """
-
-
-# "Invalid name" (for fg and bg methods, too short)
-# pylint: disable-msg=C0103
-
-
-class Formatter(object):
-
- """Abstract formatter base class.
-
- The types of most of the instance attributes is undefined (depends
- on the implementation of the particular Formatter subclass).
-
- @ivar bold: object to pass to L{write} to switch to bold mode.
- @ivar underline: object to pass to L{write} to switch to underlined mode.
- @ivar reset: object to pass to L{write} to turn off bold and underline.
- @ivar wrap: boolean indicating we auto-linewrap (defaults to off).
- @ivar autoline: boolean indicating we are in auto-newline mode
- (defaults to on).
- """
-
- def __init__(self):
- self.autoline = True
- self.wrap = False
-
- def write(self, *args, **kwargs):
- """Write something to the stream.
-
- Acceptable arguments are:
- - Strings are simply written to the stream.
- - C{None} is ignored.
- - Functions are called with the formatter as argument.
- Their return value is then used the same way as the other
- arguments.
- - Formatter subclasses might special-case certain objects.
-
- Accepts wrap and autoline as keyword arguments. Effect is
- the same as setting them before the write call and resetting
- them afterwards.
-
- Accepts first_prefixes and later_prefixes as keyword
- arguments. They should be sequences that are temporarily
- appended to the first_prefix and later_prefix attributes.
-
- Accepts prefixes as a keyword argument. Effect is the same as
- setting first_prefixes and later_prefixes to the same value.
-
- Accepts first_prefix, later_prefix and prefix as keyword
- argument. Effect is the same as setting first_prefixes,
- later_prefixes or prefixes to a one-element tuple.
-
- The formatter has a couple of attributes that are useful as argument
- to write.
- """
-
- def fg(self, color=None):
- """Change foreground color.
-
- @type color: a string or C{None}.
- @param color: color to change to. A default is used if omitted.
- C{None} resets to the default color.
- """
-
- def bg(self, color=None):
- """Change background color.
-
- @type color: a string or C{None}.
- @param color: color to change to. A default is used if omitted.
- C{None} resets to the default color.
- """
-
- def error(self, message):
- """Format a string as an error message."""
- self.write(message, prefixes=(
- self.fg('red'), self.bold, '!!! ', self.reset))
-
- def warn(self, message):
- """Format a string as a warning message."""
- self.write(message, prefixes=(
- self.fg('yellow'), self.bold, '*** ', self.reset))
-
- def title(self, string):
- pass
-
-class PlainTextFormatter(Formatter):
-
- """Formatter writing plain text to a file-like object.
-
- @ivar width: contains the current maximum line length.
- """
-
- bold = underline = reset = ''
-
- def __init__(self, stream, width=79, encoding=None):
- """Initialize.
-
- @type stream: file-like object.
- @param stream: stream to output to.
- @param width: maximum line width.
- """
- Formatter.__init__(self)
- self.stream = stream
- if encoding is None:
- encoding = getattr(self.stream, 'encoding', None)
- if encoding is None:
- try:
- encoding = locale.getpreferredencoding()
- except locale.Error, e:
- encoding = 'ascii'
- self.encoding = encoding
- self.width = width
- self._pos = 0
- self._in_first_line = True
- self._wrote_something = False
- self.first_prefix = ['']
- self.later_prefix = ['']
-
-
- def _write_prefix(self, wrap):
- if self._in_first_line:
- prefix = self.first_prefix
- else:
- prefix = self.later_prefix
- # This is a bit braindead since it duplicates a lot of code
- # from write. Avoids fun things like word wrapped prefix though.
-
- for thing in prefix:
- while callable(thing):
- thing = thing(self)
- if thing is None:
- continue
- if not isinstance(thing, basestring):
- thing = str(thing)
- self._pos += len(thing)
- if isinstance(thing, unicode):
- thing = thing.encode(self.encoding, 'replace')
- self.stream.write(thing)
- if wrap and self._pos >= self.width:
- # XXX What to do? Our prefix does not fit.
- # This makes sure we still output something,
- # but it is completely arbitrary.
- self._pos = self.width - 10
-
-
- def write(self, *args, **kwargs):
- wrap = kwargs.get('wrap', self.wrap)
- autoline = kwargs.get('autoline', self.autoline)
- prefixes = kwargs.get('prefixes')
- first_prefixes = kwargs.get('first_prefixes')
- later_prefixes = kwargs.get('later_prefixes')
- if prefixes is not None:
- if first_prefixes is not None or later_prefixes is not None:
- raise TypeError(
- 'do not pass first_prefixes or later_prefixes '
- 'if prefixes is passed')
- first_prefixes = later_prefixes = prefixes
- prefix = kwargs.get('prefix')
- first_prefix = kwargs.get('first_prefix')
- later_prefix = kwargs.get('later_prefix')
- if prefix is not None:
- if first_prefix is not None or later_prefix is not None:
- raise TypeError(
- 'do not pass first_prefix or later_prefix with prefix')
- first_prefix = later_prefix = prefix
- if first_prefix is not None:
- if first_prefixes is not None:
- raise TypeError(
- 'do not pass both first_prefix and first_prefixes')
- first_prefixes = (first_prefix,)
- if later_prefix is not None:
- if later_prefixes is not None:
- raise TypeError(
- 'do not pass both later_prefix and later_prefixes')
- later_prefixes = (later_prefix,)
- if first_prefixes is not None:
- self.first_prefix.extend(first_prefixes)
- if later_prefixes is not None:
- self.later_prefix.extend(later_prefixes)
- # Remove this nested try block once we depend on python 2.5
- try:
- try:
- for arg in args:
- # If we're at the start of the line, write our prefix.
- # There is a deficiency here: if neither our arg nor our
- # prefix affect _pos (both are escape sequences or empty)
- # we will write prefix more than once. This should not
- # matter.
- if not self._pos:
- self._write_prefix(wrap)
- while callable(arg):
- arg = arg(self)
- if arg is None:
- continue
- if not isinstance(arg, basestring):
- arg = str(arg)
- is_unicode = isinstance(arg, unicode)
- while wrap and self._pos + len(arg) > self.width:
- # We have to split.
- maxlen = self.width - self._pos
- space = arg.rfind(' ', 0, maxlen)
- if space == -1:
- # No space to split on.
-
- # If we are on the first line we can simply go to
- # the next (this helps if the "later" prefix is
- # shorter and should not really matter if not).
-
- # If we are on the second line and have already
- # written something we can also go to the next
- # line.
- if self._in_first_line or self._wrote_something:
- bit = ''
- else:
- # Forcibly split this as far to the right as
- # possible.
- bit = arg[:maxlen]
- arg = arg[maxlen:]
- else:
- bit = arg[:space]
- # Omit the space we split on.
- arg = arg[space+1:]
- if is_unicode:
- bit = bit.encode(self.encoding, 'replace')
- self.stream.write(bit)
- self.stream.write('\n')
- self._pos = 0
- self._in_first_line = False
- self._wrote_something = False
- self._write_prefix(wrap)
-
- # This fits.
- self._wrote_something = True
- self._pos += len(arg)
- if is_unicode:
- arg = arg.encode(self.encoding, 'replace')
- self.stream.write(arg)
- if autoline:
- self.stream.write('\n')
- self._wrote_something = False
- self._pos = 0
- self._in_first_line = True
- except IOError, e:
- if e.errno == errno.EPIPE:
- raise StreamClosed(e)
- raise
- finally:
- if first_prefixes is not None:
- self.first_prefix = self.first_prefix[:-len(first_prefixes)]
- if later_prefixes is not None:
- self.later_prefix = self.later_prefix[:-len(later_prefixes)]
-
- def fg(self, color=None):
- return ''
-
- def bg(self, color=None):
- return ''
-
-
-# This is necessary because the curses module is optional (and we
-# should run on a very minimal python for bootstrapping).
-try:
- import curses
-except ImportError:
- TerminfoColor = None
-else:
- class TerminfoColor(object):
-
- def __init__(self, mode, color):
- self.mode = mode
- self.color = color
-
- def __call__(self, formatter):
- if self.color is None:
- formatter._current_colors[self.mode] = None
- res = formatter._color_reset
- # slight abuse of boolean True/False and 1/0 equivalence
- other = formatter._current_colors[not self.mode]
- if other is not None:
- res = res + other
- else:
- if self.mode == 0:
- default = curses.COLOR_WHITE
- else:
- default = curses.COLOR_BLACK
- color = formatter._colors.get(self.color, default)
- # The curses module currently segfaults if handed a
- # bogus template so check explicitly.
- template = formatter._set_color[self.mode]
- if template:
- res = curses.tparm(template, color)
- else:
- res = ''
- formatter._current_colors[self.mode] = res
- formatter.stream.write(res)
-
-
- class TerminfoCode(object):
- def __init__(self, value):
- self.value = value
-
- class TerminfoMode(TerminfoCode):
- def __call__(self, formatter):
- formatter._modes.add(self)
- formatter.stream.write(self.value)
-
- class TerminfoReset(TerminfoCode):
- def __call__(self, formatter):
- formatter._modes.clear()
- formatter.stream.write(self.value)
-
-
- class TerminfoFormatter(PlainTextFormatter):
-
- """Formatter writing to a tty, using terminfo to do colors."""
-
- _colors = dict(
- black = curses.COLOR_BLACK,
- red = curses.COLOR_RED,
- green = curses.COLOR_GREEN,
- yellow = curses.COLOR_YELLOW,
- blue = curses.COLOR_BLUE,
- magenta = curses.COLOR_MAGENTA,
- cyan = curses.COLOR_CYAN,
- white = curses.COLOR_WHITE)
-
- # Remapping of TERM setting to more capable equivalent.
- # Mainly used to force on the hardstatus (aka title bar updates)
- # capability for terminals that do not support this by default.
- term_alternates = {
- 'xterm': 'xterm+sl',
- 'screen': 'screen-s',
- }
-
- def __init__(self, stream, term=None, forcetty=False, encoding=None):
- """Initialize.
-
- @type stream: file-like object.
- @param stream: stream to output to, defaulting to C{sys.stdout}.
- @type term: string.
- @param term: terminal type, pulled from the environment if omitted.
- @type forcetty: bool
- @param forcetty: force output of colors even if the wrapped stream
- is not a tty.
- """
- PlainTextFormatter.__init__(self, stream, encoding=encoding)
- fd = stream.fileno()
- if term is None:
- # We only apply the remapping if we are guessing the
- # terminal type from the environment. If we get a term
- # type passed explicitly we just use it as-is (if the
- # caller wants the remap just doing the
- # term_alternates lookup there is easy enough.)
- term_env = os.environ.get('TERM')
- term_alt = self.term_alternates.get(term_env)
- for term in (term_alt, term_env, 'dumb'):
- if term is not None:
- try:
- curses.setupterm(fd=fd, term=term)
- except curses.error:
- pass
- else:
- break
- else:
- raise ValueError(
- 'no terminfo entries, not even for "dumb"?')
- else:
- # TODO maybe do something more useful than raising curses.error
- # if term is not in the terminfo db here?
- curses.setupterm(fd=fd, term=term)
- self.width = curses.tigetnum('cols')
- self.reset = TerminfoReset(curses.tigetstr('sgr0'))
- self.bold = TerminfoMode(curses.tigetstr('bold'))
- self.underline = TerminfoMode(curses.tigetstr('smul'))
- self._color_reset = curses.tigetstr('op')
- self._set_color = (
- curses.tigetstr('setaf'), curses.tigetstr('setab'))
- self._width = curses.tigetstr('cols')
- # [fg, bg]
- self._current_colors = [None, None]
- self._modes = set()
- self._pos = 0
-
- def fg(self, color=None):
- return TerminfoColor(0, color)
-
- def bg(self, color=None):
- return TerminfoColor(1, color)
-
- def write(self, *args, **kwargs):
- try:
- PlainTextFormatter.write(self, *args, **kwargs)
- if self._modes:
- self.reset(self)
- if self._current_colors != [None, None]:
- self._current_colors = [None, None]
- self.stream.write(self._color_reset)
- except IOError, e:
- if e.errno == errno.EPIPE:
- raise StreamClosed(e)
- raise
-
- def title(self, string):
- # I want to use curses.tigetflag('hs') here but at least
- # the screen-s entry defines a tsl and fsl string but does
- # not set the hs flag. So just check for the ability to
- # jump to and out of the status line, without checking if
- # the status line we're using exists.
- if curses.tigetstr('tsl') and curses.tigetstr('fsl'):
- self.stream.write(
- curses.tigetstr('tsl') + string + curses.tigetstr('fsl'))
-
-
-class ObserverFormatter(object):
-
- def __init__(self, real_formatter):
- self._formatter = real_formatter
-
- def write(self, *args):
- self._formatter.write(autoline=False, *args)
-
- __getattr__ = GetAttrProxy("_formatter")
-
-
-def get_formatter(stream):
- """TerminfoFormatter if the stream is a tty, else PlainTextFormatter."""
- if TerminfoColor is None:
- return PlainTextFormatter(stream)
- try:
- fd = stream.fileno()
- except AttributeError:
- pass
- else:
- # We do this instead of stream.isatty() because TerminfoFormatter
- # needs an fd to pass to curses, not just a filelike talking to a tty.
- if os.isatty(fd):
- try:
- return TerminfoFormatter(stream)
- except curses.error:
- # This happens if TERM is unset and possibly in more cases.
- # Just fall back to the PlainTextFormatter.
- pass
- return PlainTextFormatter(stream)
-
-
-def decorate_forced_wrapping(setting=True):
- def wrapped_func(func):
- def f(out, *args, **kwds):
- oldwrap = out.wrap
- out.wrap = setting
- try:
- return func(out, *args, **kwds)
- finally:
- out.wrap = oldwrap
- return f
- return wrapped_func
diff --git a/pkgcore/util/iterables.py b/pkgcore/util/iterables.py
deleted file mode 100644
index b87cd13e7..000000000
--- a/pkgcore/util/iterables.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from collections import deque
-
-class expandable_chain(object):
- """
- chained iterables, with the ability to add new iterables to the chain
- as long as the instance hasn't raise StopIteration already
- """
-
- __slot__ = ("iterables", "__weakref__")
-
- def __init__(self, *iterables):
- """
- accepts N iterables, must have at least one specified
- """
- self.iterables = deque()
- self.extend(iterables)
-
- def __iter__(self):
- return self
-
- def next(self):
- if self.iterables is not None:
- while self.iterables:
- try:
- return self.iterables[0].next()
- except StopIteration:
- self.iterables.popleft()
- self.iterables = None
- raise StopIteration()
-
- def append(self, iterable):
- """append an iterable to the chain to be consumed"""
- if self.iterables is None:
- raise StopIteration()
- self.iterables.append(iter(iterable))
-
- def appendleft(self, iterable):
- """prepend an iterable to in the chain"""
- if self.iterables is None:
- raise StopIteration()
- self.iterables.appendleft(iter(iterable))
-
- def extend(self, iterables):
- """extend multiple iterable to the chain to be consumed"""
- if self.iterables is None:
- raise StopIteration()
- self.iterables.extend(iter(x) for x in iterables)
-
- def extendleft(self, iterables):
- """prepend multiple iterables to the chain to be consumed"""
- if self.iterables is None:
- raise StopIteration()
- self.iterables.extendleft(iter(x) for x in iterables)
-
-
-class caching_iter(object):
- """
- On demand consumes from an iterable so as to appear like a tuple
- """
- __slots__ = ("iterable", "__weakref__", "cached_list", "sorter")
-
- def __init__(self, iterable, sorter=None):
- self.sorter = sorter
- self.iterable = iter(iterable)
- self.cached_list = []
-
- def __setitem__(self, key, val):
- raise TypeError("non modifiable")
-
- def __getitem__(self, index):
- existing_len = len(self.cached_list)
- if self.iterable is not None and self.sorter:
- self.cached_list.extend(self.iterable)
- self.cached_list = tuple(self.sorter(self.cached_list))
- self.iterable = self.sorter = None
- existing_len = len(self.cached_list)
-
- if index < 0:
- if self.iterable is not None:
- self.cached_list = tuple(self.cached_list + list(self.iterable))
- self.iterable = None
- existing_len = len(self.cached_list)
-
- index = existing_len + index
- if index < 0:
- raise IndexError("list index out of range")
-
- elif index >= existing_len - 1:
- if self.iterable is not None:
- try:
- self.cached_list.extend(self.iterable.next()
- for i in xrange(existing_len - index + 1))
- except StopIteration:
- # consumed, baby.
- self.iterable = None
- self.cached_list = tuple(self.cached_list)
- raise IndexError("list index out of range")
-
- return self.cached_list[index]
-
- def __cmp__(self, other):
- if self.iterable is not None:
- if self.sorter:
- self.cached_list.extend(self.iterable)
- self.cached_list = tuple(self.sorter(self.cached_list))
- self.sorter = None
- else:
- self.cached_list = tuple(self.cached_list + list(self.iterable))
- self.iterable = None
- return cmp(self.cached_list, other)
-
- def __nonzero__(self):
- if self.cached_list:
- return True
-
- if self.iterable:
- for x in self.iterable:
- self.cached_list.append(x)
- return True
- # if we've made it here... then nothing more in the iterable.
- self.iterable = self.sorter = None
- self.cached_list = ()
- return False
-
- def __len__(self):
- if self.iterable is not None:
- self.cached_list.extend(self.iterable)
- if self.sorter:
- self.cached_list = tuple(self.sorter(self.cached_list))
- self.sorter = None
- else:
- self.cached_list = tuple(self.cached_list)
- self.iterable = None
- return len(self.cached_list)
-
- def __iter__(self):
- if (self.sorter is not None and
- self.iterable is not None and
- len(self.cached_list) == 0):
- self.cached_list = tuple(self.sorter(self.iterable))
- self.iterable = self.sorter = None
-
- for x in self.cached_list:
- yield x
- if self.iterable is not None:
- for x in self.iterable:
- self.cached_list.append(x)
- yield x
- else:
- return
- self.iterable = None
- self.cached_list = tuple(self.cached_list)
-
- def __hash__(self):
- if self.iterable is not None:
- self.cached_list.extend(self.iterable)
- self.cached_list = tuple(self.cached_list)
- self.iterable = None
- return hash(self.cached_list)
-
- def __str__(self):
- return "iterable(%s), cached: %s" % (
- self.iterable, str(self.cached_list))
-
-def iter_sort(sorter, *iterables):
- """Merge a number of sorted iterables into a single sorted iterable.
-
- @type sorter: callable.
- @param sorter: function, passed a list of [element, iterable].
- @param iterables: iterables to consume from.
- B{Required} to yield in presorted order.
- """
- l = []
- for x in iterables:
- try:
- x = iter(x)
- l.append([x.next(), x])
- except StopIteration:
- pass
- if len(l) == 1:
- yield l[0][0]
- for x in l[0][1]:
- yield x
- return
- l = sorter(l)
- while l:
- yield l[0][0]
- for y in l[0][1]:
- l[0][0] = y
- break
- else:
- del l[0]
- if len(l) == 1:
- yield l[0][0]
- for x in l[0][1]:
- yield x
- break
- continue
- l = sorter(l)
diff --git a/pkgcore/util/klass.py b/pkgcore/util/klass.py
deleted file mode 100644
index f26821df1..000000000
--- a/pkgcore/util/klass.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from operator import attrgetter
-from pkgcore.util.caching import WeakInstMeta
-from collections import deque
-
-def native_GetAttrProxy(target):
- def reflected_getattr(self, attr):
- return getattr(getattr(self, target), attr)
- return reflected_getattr
-
-def native_contains(self, key):
- try:
- self[key]
- return True
- except KeyError:
- return False
-
-def native_get(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
-
-
-attrlist_getter = attrgetter("__attr_comparison__")
-def native_generic_eq(inst1, inst2, sentinel=object()):
- if inst1 is inst2:
- return True
- for attr in attrlist_getter(inst1):
- if getattr(inst1, attr, sentinel) != \
- getattr(inst2, attr, sentinel):
- return False
- return True
-
-def native_generic_ne(inst1, inst2, sentinel=object()):
- if inst1 is inst2:
- return False
- for attr in attrlist_getter(inst1):
- if getattr(inst1, attr, sentinel) != \
- getattr(inst2, attr, sentinel):
- return True
- return False
-
-try:
- from pkgcore.util._klass import (GetAttrProxy, contains, get,
- generic_eq, generic_ne)
-except ImportError:
- GetAttrProxy = native_GetAttrProxy
- contains = native_contains
- get = native_get
- generic_eq = native_generic_eq
- generic_ne = native_generic_ne
-
-
-def generic_equality(name, bases, scope, real_type=type,
- eq=generic_eq, ne=generic_ne):
- attrlist = scope.pop("__attr_comparison__", None)
- if attrlist is None:
- raise TypeError("__attr_comparison__ must be in the classes scope")
- for x in attrlist:
- if not isinstance(x, str):
- raise TypeError("all members of attrlist must be strings- "
- " got %r %s" % (type(x), repr(x)))
-
- scope["__attr_comparison__"] = tuple(attrlist)
- scope.setdefault("__eq__", eq)
- scope.setdefault("__ne__", ne)
- return real_type(name, bases, scope)
-
-
-class chained_getter(object):
- def __metaclass__(name, bases, scope):
- return generic_equality(name, bases, scope, real_type=WeakInstMeta)
- __slots__ = ('namespace', 'chain')
- __fifo_cache__ = deque()
- __inst_caching__ = True
- __attr_comparison__ = ("namespace",)
-
- def __init__(self, namespace):
- self.namespace = namespace
- self.chain = map(attrgetter, namespace.split("."))
- if len(self.__fifo_cache__) > 10:
- self.__fifo_cache__.popleft()
- self.__fifo_cache__.append(self)
-
- def __hash__(self):
- return hash(self.namespace)
-
- def __call__(self, obj):
- o = obj
- for f in self.chain:
- o = f(o)
- return o
diff --git a/pkgcore/util/lists.py b/pkgcore/util/lists.py
deleted file mode 100644
index f1e78088c..000000000
--- a/pkgcore/util/lists.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Copyright: 2005-2006 Brian Harring <ferringb@gmail.com>
-# Copyright: ???? Time Peters, pulled from python cookbook.
-# License: GPL2
-
-"""
-sequence related operations
-"""
-
-from pkgcore.util.iterables import expandable_chain
-
-def unstable_unique(sequence):
- """
- lifted from python cookbook, credit: Tim Peters
- Return a list of the elements in s in arbitrary order, sans duplicates
- """
-
- n = len(sequence)
- # assume all elements are hashable, if so, it's linear
- try:
- return list(set(sequence))
- except TypeError:
- pass
-
- # so much for linear. abuse sort.
- try:
- t = sorted(sequence)
- except TypeError:
- pass
- else:
- assert n > 0
- last = t[0]
- lasti = i = 1
- while i < n:
- if t[i] != last:
- t[lasti] = last = t[i]
- lasti += 1
- i += 1
- return t[:lasti]
-
- # blah. back to original portage.unique_array
- u = []
- for x in sequence:
- if x not in u:
- u.append(x)
- return u
-
-def stable_unique(iterable):
- """
- return unique list from iterable, preserving ordering
- """
- return list(iter_stable_unique(iterable))
-
-def iter_stable_unique(iterable):
- """
- generator yielding unique elements from iterable, preserving ordering
- """
- s = set()
- for x in iterable:
- if x not in s:
- yield x
- s.add(x)
-
-def native_iflatten_instance(l, skip_flattening=(basestring,)):
- """
- collapse [(1),2] into [1,2]
-
- @param skip_flattening: list of classes to not descend through
- """
- if isinstance(l, skip_flattening):
- yield l
- return
- iters = expandable_chain(l)
- try:
- while True:
- x = iters.next()
- if hasattr(x, '__iter__') and not isinstance(x, skip_flattening):
- iters.appendleft(x)
- else:
- yield x
- except StopIteration:
- pass
-
-def native_iflatten_func(l, skip_func):
- """
- collapse [(1),2] into [1,2]
-
- @param skip_func: a callable that returns True when iflatten_func should
- descend no further
- """
- if skip_func(l):
- yield l
- return
- iters = expandable_chain(l)
- try:
- while True:
- x = iters.next()
- if hasattr(x, '__iter__') and not skip_func(x):
- iters.appendleft(x)
- else:
- yield x
- except StopIteration:
- pass
-
-try:
- # No name "readdir" in module osutils
- # pylint: disable-msg=E0611
- from pkgcore.util._lists import iflatten_instance, iflatten_func
- cpy_builtin = True
-except ImportError:
- cpy_builtin = False
- cpy_iflatten_instance = cpy_iflatten_func = None
- iflatten_instance = native_iflatten_instance
- iflatten_func = native_iflatten_func
-
-
-def iter_flatten(l, skip_flattening=(basestring,), skip_func=None):
- """Deprecated, call iflatten_instance or iflatten_func instead.
-
- @param skip_flattening: list of classes to not descend through
- @param skip_func: if None, skip_flattening is used- else it must be a
- callable that returns True when iter_flatten should descend no further.
- """
- import warnings
- warnings.warn('iter_flatten is deprecated, use iflatten_{instance,func}.')
- if skip_func is None:
- return iflatten_instance(l, skip_flattening)
- else:
- return iflatten_func(l, skip_func)
-
-
-class ChainedLists(object):
- """
- sequences chained together, without collapsing into a list
- """
- __slots__ = ("_lists", "__weakref__")
-
- def __init__(self, *lists):
- """
- all args must be sequences
- """
- # ensure they're iterable
- for x in lists:
- iter(x)
-
- if isinstance(lists, tuple):
- lists = list(lists)
- self._lists = lists
-
- def __len__(self):
- return sum(len(l) for l in self._lists)
-
- def __getitem__(self, idx):
- if idx < 0:
- idx += len(self)
- if idx < 0:
- raise IndexError
- for l in self._lists:
- l2 = len(l)
- if idx < l2:
- return l[idx]
- idx -= l2
- else:
- raise IndexError
-
- def __setitem__(self, idx, val):
- raise TypeError("not mutable")
-
- def __delitem__(self, idx):
- raise TypeError("not mutable")
-
- def __iter__(self):
- for l in self._lists:
- for x in l:
- yield x
-
- def __contains__(self, obj):
- return obj in iter(self)
-
- def __str__(self):
- return "[ %s ]" % ", ".join(str(l) for l in self._lists)
-
- def append(self, item):
- self._lists.append(item)
-
- def extend(self, items):
- self._lists.extend(items)
diff --git a/pkgcore/util/mappings.py b/pkgcore/util/mappings.py
deleted file mode 100644
index d89eb80f6..000000000
--- a/pkgcore/util/mappings.py
+++ /dev/null
@@ -1,414 +0,0 @@
-# Copyright: 2005-2007 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-miscellanious mapping/dict related classes
-"""
-
-import operator
-from itertools import imap, chain, ifilterfalse, izip
-from pkgcore.util.klass import get, contains
-from collections import deque
-
-
-class DictMixin(object):
- """
- new style class replacement for L{UserDict.DictMixin}
- designed around iter* methods rather then forcing lists as DictMixin does
- """
-
- __slots__ = ()
-
- __externally_mutable__ = True
-
- def __init__(self, iterable=[]):
- for k, v in iterable:
- self[k] = v
-
- def __iter__(self):
- return self.iterkeys()
-
- def keys(self):
- return list(self.iterkeys())
-
- def values(self):
- return list(self.itervalues())
-
- def items(self):
- return list(self.iteritems())
-
- def update(self, iterable):
- for k, v in iterable:
- self[k] = v
-
- get = get
- __contains__ = contains
-
- # default cmp actually operates based on key len comparison, oddly enough
- def __cmp__(self, other):
- for k1, k2 in izip(sorted(self), sorted(other)):
- c = cmp(k1, k2)
- if c != 0:
- return c
- c = cmp(self[k1], other[k2])
- if c != 0:
- return c
- c = cmp(len(self), len(other))
- return c
-
- def __eq__(self, other):
- return self.__cmp__(other) == 0
-
- def __ne__(self, other):
- return self.__cmp__(other) != 0
-
- def pop(self, key, *args):
- if not self.__externally_mutable__:
- raise AttributeError(self, "pop")
- if len(args) > 1:
- raise TypeError("pop expects at most 2 arguements, got %i" %
- len(args) + 1)
- try:
- val = self[key]
- del self[key]
- except KeyError:
- if args:
- return args[0]
- raise
- return val
-
- def setdefault(self, key, default=None):
- if not self.__externally_mutable__:
- raise AttributeError(self, "setdefault")
- if key in self:
- return self[key]
- self[key] = default
- return default
-
- def has_key(self, key):
- return key in self
-
- def iterkeys(self):
- raise NotImplementedError(self, "iterkeys")
-
- def itervalues(self):
- return imap(self.__getitem__, self)
-
- def iteritems(self):
- for k in self:
- yield k, self[k]
-
- def __getitem__(self, key):
- raise NotImplementedError(self, "__getitem__")
-
- def __setitem__(self, key, val):
- if not self.__externally_mutable__:
- raise AttributeError(self, "__setitem__")
- raise NotImplementedError(self, "__setitem__")
-
- def __delitem__(self, key):
- if not self.__externally_mutable__:
- raise AttributeError(self, "__delitem__")
- raise NotImplementedError(self, "__delitem__")
-
- def clear(self):
- if not self.__externally_mutable__:
- raise AttributeError(self, "clear")
- # crappy, override if faster method exists.
- map(self.__delitem__, self.keys())
-
- def __len__(self):
- c = 0
- for x in self:
- c += 1
- return c
-
- def popitem(self):
- if not self.__externally_mutable__:
- raise AttributeError(self, "popitem")
- # do it this way so python handles the stopiteration; faster
- for key, val in self.iteritems():
- del self[key]
- return key, val
- raise KeyError("container is empty")
-
-
-class LazyValDict(DictMixin):
-
- """
- Mapping that loads values via a callable
-
- given a function to get keys, and to look up the val for those keys, it'll
- lazy load key definitions, and values as requested
- """
- __slots__ = ("_keys", "_keys_func", "_vals", "_val_func")
- __externally_mutable__ = False
-
- def __init__(self, get_keys_func, get_val_func):
- """
- @param get_keys_func: either a container, or func to call to get keys.
- @param get_val_func: a callable that is JIT called
- with the key requested.
- """
- if not callable(get_val_func):
- raise TypeError("get_val_func isn't a callable")
- if hasattr(get_keys_func, "__iter__"):
- self._keys = get_keys_func
- self._keys_func = None
- else:
- if not callable(get_keys_func):
- raise TypeError(
- "get_keys_func isn't iterable nor is it callable")
- self._keys_func = get_keys_func
- self._val_func = get_val_func
- self._vals = {}
-
- def __getitem__(self, key):
- if self._keys_func is not None:
- self._keys = set(self._keys_func())
- self._keys_func = None
- if key in self._vals:
- return self._vals[key]
- if key in self._keys:
- v = self._vals[key] = self._val_func(key)
- return v
- raise KeyError(key)
-
- def keys(self):
- if self._keys_func is not None:
- self._keys = set(self._keys_func())
- self._keys_func = None
- return list(self._keys)
-
- def iterkeys(self):
- if self._keys_func is not None:
- self._keys = set(self._keys_func())
- self._keys_func = None
- return iter(self._keys)
-
- def itervalues(self):
- return imap(self.__getitem__, self.iterkeys())
-
- def iteritems(self):
- return ((k, self[k]) for k in self.iterkeys())
-
- def __contains__(self, key):
- if self._keys_func is not None:
- self._keys = set(self._keys_func())
- self._keys_func = None
- return key in self._keys
-
- def __len__(self):
- if self._keys_func is not None:
- self._keys = set(self._keys_func())
- self._keys_func = None
- return len(self._keys)
-
-
-class LazyFullValLoadDict(LazyValDict):
-
- def __getitem__(self, key):
- if self._keys_func is not None:
- self._keys = set(self._keys_func())
- self._keys_func = None
- if key in self._vals:
- return self._vals[key]
- if key in self._keys:
- if self._val_func is not None:
- self._vals.update(self._val_func(self._keys))
- return self._vals[key]
- raise KeyError(key)
-
-
-class ProtectedDict(DictMixin):
-
- """
- Mapping wrapper storing changes to a dict without modifying the original.
-
- Changes are stored in a secondary dict, protecting the underlying
- mapping from changes.
- """
-
- __slots__ = ("orig", "new", "blacklist")
-
- def __init__(self, orig):
- self.orig = orig
- self.new = {}
- self.blacklist = {}
-
- def __setitem__(self, key, val):
- self.new[key] = val
- if key in self.blacklist:
- del self.blacklist[key]
-
- def __getitem__(self, key):
- if key in self.new:
- return self.new[key]
- if key in self.blacklist:
- raise KeyError(key)
- return self.orig[key]
-
- def __delitem__(self, key):
- if key in self.new:
- del self.new[key]
- self.blacklist[key] = True
- return
- elif key in self.orig:
- if key not in self.blacklist:
- self.blacklist[key] = True
- return
- raise KeyError(key)
-
- def iterkeys(self):
- for k in self.new.iterkeys():
- yield k
- for k in self.orig.iterkeys():
- if k not in self.blacklist and k not in self.new:
- yield k
-
- def __contains__(self, key):
- return key in self.new or (key not in self.blacklist and
- key in self.orig)
-
-
-class ImmutableDict(dict):
-
- """Immutable Dict, non changable after instantiating"""
-
- _hash_key_grabber = operator.itemgetter(0)
-
- def __delitem__(self, *args):
- raise TypeError("non modifiable")
-
- __setitem__ = __delitem__
- clear = __delitem__
- update = __delitem__
- pop = __delitem__
- popitem = __delitem__
- setdefault = __delitem__
-
- def __hash__(self):
- k = self.items()
- k.sort(key=self._hash_key_grabber)
- return hash(tuple(k))
-
- __delattr__ = __setitem__
- __setattr__ = __setitem__
-
-
-class IndeterminantDict(object):
-
- """A wrapped dict with constant defaults, and a function for other keys."""
-
- __slots__ = ("__initial", "__pull")
-
- def __init__(self, pull_func, starter_dict=None):
- object.__init__(self)
- if starter_dict is None:
- self.__initial = {}
- else:
- self.__initial = starter_dict
- self.__pull = pull_func
-
- def __getitem__(self, key):
- if key in self.__initial:
- return self.__initial[key]
- else:
- return self.__pull(key)
-
- def get(self, key, val=None):
- try:
- return self[key]
- except KeyError:
- return val
-
- def __hash__(self):
- raise TypeError("non hashable")
-
- def __delitem__(self, *args):
- raise TypeError("non modifiable")
-
- def pop(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
-
- clear = update = popitem = setdefault = __setitem__ = __delitem__
- __iter__ = keys = values = items = __len__ = __delitem__
- iteritems = iterkeys = itervalues = __delitem__
-
-
-class StackedDict(DictMixin):
-
- """A non modifiable dict that makes multiple dicts appear as one"""
-
- def __init__(self, *dicts):
- self._dicts = dicts
-
- def __getitem__(self, key):
- for x in self._dicts:
- if key in x:
- return x[key]
- raise KeyError(key)
-
- def iterkeys(self):
- s = set()
- for k in ifilterfalse(s.__contains__, chain(*map(iter, self._dicts))):
- s.add(k)
- yield k
-
- def __contains__(self, key):
- for x in self._dicts:
- if key in x:
- return True
- return False
-
- def __setitem__(self, *a):
- raise TypeError("non modifiable")
-
- __delitem__ = clear = __setitem__
-
-
-class OrderedDict(DictMixin):
-
- """Dict that preserves insertion ordering which is used for iteration ops"""
-
- __slots__ = ("_data", "_order")
-
- def __init__(self, iterable=()):
- self._order = deque()
- self._data = {}
- for k, v in iterable:
- self[k] = v
-
- def __setitem__(self, key, val):
- if key not in self:
- self._order.append(key)
- self._data[key] = val
-
- def __delitem__(self, key):
- del self._data[key]
-
- for idx, o in enumerate(self._order):
- if o == key:
- del self._order[idx]
- break
- else:
- raise AssertionError("orderdict lost it's internal ordering")
-
- def __getitem__(self, key):
- return self._data[key]
-
- def __len__(self):
- return len(self._order)
-
- def iterkeys(self):
- return iter(self._order)
-
- def clear(self):
- self._order = deque()
- self._data = {}
-
- def __contains__(self, key):
- return key in self._data
diff --git a/pkgcore/util/modules.py b/pkgcore/util/modules.py
deleted file mode 100644
index 7031f382f..000000000
--- a/pkgcore/util/modules.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Copyright: 2005 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-dynamic import functionality
-"""
-
-import sys
-
-class FailedImport(ImportError):
- def __init__(self, trg, e):
- ImportError.__init__(
- self, "Failed importing target '%s': '%s'" % (trg, e))
- self.trg, self.e = trg, e
-
-
-def load_module(name):
- """load 'name' module, throwing a FailedImport if __import__ fails"""
- if name in sys.modules:
- return sys.modules[name]
- try:
- m = __import__(name)
- nl = name.split('.')
- # __import__ returns nl[0]... so.
- nl.pop(0)
- while nl:
- m = getattr(m, nl[0])
- nl.pop(0)
- return m
- except (KeyboardInterrupt, SystemExit):
- raise
- except Exception, e:
- raise FailedImport(name, e)
-
-def load_attribute(name):
- """load a specific attribute, rather then a module"""
- i = name.rfind(".")
- if i == -1:
- raise FailedImport(name, "it isn't an attribute, it's a module")
- try:
- m = load_module(name[:i])
- m = getattr(m, name[i+1:])
- return m
- except (AttributeError, ImportError), e:
- raise FailedImport(name, e)
diff --git a/pkgcore/util/obj.py b/pkgcore/util/obj.py
deleted file mode 100644
index 10893e6da..000000000
--- a/pkgcore/util/obj.py
+++ /dev/null
@@ -1,206 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-from operator import attrgetter
-from pkgcore.util.currying import pre_curry
-from pkgcore.util.mappings import DictMixin
-
-def alias_method(getter, self, *a, **kwd):
- return getter(self.__obj__)(*a, **kwd)
-
-def instantiate(inst):
- delayed = object.__getattribute__(inst, "__delayed__")
- obj = delayed[1](*delayed[2], **delayed[3])
- object.__setattr__(inst, "__obj__", obj)
- object.__delattr__(inst, "__delayed__")
- return obj
-
-
-# we exempt __getattribute__ since we cover it already, same
-# for __new__ and __init__
-base_kls_descriptors = frozenset(
- ('__delattr__', '__doc__', '__hash__', '__reduce__',
- '__reduce_ex__', '__repr__', '__setattr__', '__str__'))
-
-class BaseDelayedObject(object):
- """
- delay actual instantiation
- """
-
- def __new__(cls, desired_kls, func, *a, **kwd):
- o = object.__new__(cls)
- object.__setattr__(o, "__delayed__", (desired_kls, func, a, kwd))
- object.__setattr__(o, "__obj__", None)
- return o
-
- def __getattribute__(self, attr):
- obj = object.__getattribute__(self, "__obj__")
- if obj is None:
- if attr == "__class__":
- return object.__getattribute__(self, "__delayed__")[0]
-
- obj = instantiate(self)
- # now we grow some attributes.
-
- if attr == "__obj__":
- # special casing for alias_method
- return obj
- return getattr(obj, attr)
-
- # special case the normal descriptors
- for x in base_kls_descriptors:
- locals()[x] = pre_curry(alias_method, attrgetter(x))
- del x
-
-
-# note that we ignore __getattribute__; we already handle it.
-kls_descriptors = frozenset([
- # simple comparison protocol...
- '__cmp__',
- # rich comparison protocol...
- '__le__', '__lt__', '__eq__', '__ne__', '__gt__', '__ge__',
- # unicode conversion
- '__unicode__',
- # truth...
- '__nonzero__',
- # container protocol...
- '__len__', '__getitem__', '__setitem__', '__delitem__',
- '__iter__', '__contains__',
- # deprecated sequence protocol bits...
- '__getslice__', '__setslice__', '__delslice__',
- # numeric...
- '__add__', '__sub__', '__mul__', '__floordiv__', '__mod__',
- '__divmod__', '__pow__', '__lshift__', '__rshift__',
- '__and__', '__xor__', '__or__', '__div__', '__truediv__',
- '__rad__', '__rsub__', '__rmul__', '__rdiv__', '__rtruediv__',
- '__rfloordiv__', '__rmod__', '__rdivmod__', '__rpow__',
- '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__',
- '__iadd__', '__isub__', '__imul__', '__idiv__', '__itruediv__',
- '__ifloordiv__', '__imod__', '__ipow__', '__ilshift__',
- '__irshift__', '__iand__', '__ixor__', '__ior__',
- '__neg__', '__pos__', '__abs__', '__invert__', '__complex__',
- '__int__', '__long__', '__float__', '__oct__', '__hex__',
- '__coerce__',
- # remaining...
- '__call__'])
-
-descriptor_overrides = dict((k, pre_curry(alias_method, attrgetter(k)))
- for k in kls_descriptors)
-
-method_cache = {}
-def make_kls(kls):
- special_descriptors = tuple(sorted(kls_descriptors.intersection(dir(kls))))
- if not special_descriptors:
- return BaseDelayedObject
- o = method_cache.get(special_descriptors, None)
- if o is None:
- class CustomDelayedObject(BaseDelayedObject):
- locals().update((k, descriptor_overrides[k])
- for k in special_descriptors)
-
- o = CustomDelayedObject
- method_cache[special_descriptors] = o
- return o
-
-def DelayedInstantiation_kls(kls, *a, **kwd):
- return DelayedInstantiation(kls, kls, *a, **kwd)
-
-class_cache = {}
-def DelayedInstantiation(resultant_kls, func, *a, **kwd):
- """Generate an objects that does not get initialized before it is used.
-
- The returned object can be passed around without triggering
- initialization. The first time it is actually used (an attribute
- is accessed) it is initialized once.
-
- The returned "fake" object cannot completely reliably mimic a
- builtin type. It will usually work but some corner cases may fail
- in confusing ways. Make sure to test if DelayedInstantiation has
- no unwanted side effects.
-
- @param resultant_kls: type object to fake an instance of.
- @param func: callable, the return value is used as initialized object.
- """
- o = class_cache.get(resultant_kls, None)
- if o is None:
- o = make_kls(resultant_kls)
- class_cache[resultant_kls] = o
- return o(resultant_kls, func, *a, **kwd)
-
-
-slotted_dict_cache = {}
-def make_SlottedDict_kls(keys):
- new_keys = tuple(sorted(keys))
- o = slotted_dict_cache.get(new_keys, None)
- if o is None:
- class SlottedDict(DictMixin):
- __slots__ = new_keys
- __externally_mutable__ = True
-
- def __init__(self, iterables=()):
- if iterables:
- self.update(iterables)
-
- __setitem__ = object.__setattr__
-
- def __getitem__(self, key):
- try:
- return getattr(self, key)
- except AttributeError:
- raise KeyError(key)
-
- def __delitem__(self, key):
- # Python does not raise anything if you delattr an
- # unset slot (works ok if __slots__ is not involved).
- try:
- getattr(self, key)
- except AttributeError:
- raise KeyError(key)
- delattr(self, key)
-
- def __iter__(self):
- for k in self.__slots__:
- if hasattr(self, k):
- yield k
-
- def iterkeys(self):
- return iter(self)
-
- def itervalues(self):
- for k in self:
- yield self[k]
-
- def get(self, key, default=None):
- return getattr(self, key, default)
-
- def pop(self, key, *a):
- # faster then the exception form...
- l = len(a)
- if l > 1:
- raise TypeError("pop accepts 1 or 2 args only")
- if hasattr(self, key):
- o = getattr(self, key)
- object.__delattr__(self, key)
- elif l:
- o = a[0]
- else:
- raise KeyError(key)
- return o
-
- def clear(self):
- for k in self:
- del self[k]
-
- def update(self, iterable):
- for k, v in iterable:
- setattr(self, k, v)
-
- def __len__(self):
- return len(self.keys())
-
- def __contains__(self, key):
- return hasattr(self, key)
-
- o = SlottedDict
- slotted_dict_cache[new_keys] = o
- return o
diff --git a/pkgcore/util/osutils/__init__.py b/pkgcore/util/osutils/__init__.py
deleted file mode 100644
index 123e5f26d..000000000
--- a/pkgcore/util/osutils/__init__.py
+++ /dev/null
@@ -1,341 +0,0 @@
-# Copyright 2004-2007 Brian Harring <ferringb@gmail.com>
-# Copyright 2006 Marien Zwart <marienz@gentoo.org>
-# Distributed under the terms of the GNU General Public License v2
-
-"""
-os specific utilities, FS access mainly
-
-"""
-
-import os, stat
-import fcntl
-import errno
-
-__all__ = ['abspath', 'abssymlink', 'ensure_dirs', 'join', 'pjoin', 'listdir_files',
- 'listdir_dirs', 'listdir', 'readlines', 'readfile']
-
-
-# No name '_readdir' in module osutils
-# pylint: disable-msg=E0611
-
-try:
- from pkgcore.util.osutils import _readdir as module
-except ImportError:
- from pkgcore.util.osutils import native_readdir as module
-
-listdir = module.listdir
-listdir_dirs = module.listdir_dirs
-listdir_files = module.listdir_files
-
-del module
-
-
-def ensure_dirs(path, gid=-1, uid=-1, mode=0755, minimal=True):
- """
- ensure dirs exist, creating as needed with (optional) gid, uid, and mode.
-
- be forewarned- if mode is specified to a mode that blocks the euid
- from accessing the dir, this code *will* try to create the dir.
- """
-
- try:
- st = os.stat(path)
- except OSError:
- base = os.path.sep
- try:
- um = os.umask(0)
- # if the dir perms would lack +wx, we have to force it
- force_temp_perms = ((mode & 0300) != 0300)
- resets = []
- apath = normpath(os.path.abspath(path))
- sticky_parent = False
-
- for directory in apath.split(os.path.sep):
- base = join(base, directory)
- try:
- try:
- st = os.stat(base)
- except TypeError:
- raise
- if not stat.S_ISDIR(st.st_mode):
- return False
-
- # if it's a subdir, we need +wx at least
- if apath != base:
- if ((st.st_mode & 0300) != 0300):
- try:
- os.chmod(base, (st.st_mode | 0300))
- except OSError:
- return False
- resets.append((base, st.st_mode))
- sticky_parent = (st.st_gid & stat.S_ISGID)
-
- except OSError:
- # nothing exists.
- try:
- if force_temp_perms:
- os.mkdir(base, 0700)
- resets.append((base, mode))
- else:
- os.mkdir(base, mode)
- if base == apath and sticky_parent:
- resets.append((base, mode))
- if gid != -1 or uid != -1:
- os.chown(base, uid, gid)
- except OSError:
- return False
-
- try:
- for base, m in reversed(resets):
- os.chmod(base, m)
- if uid != -1 or gid != -1:
- os.chown(base, uid, gid)
- except OSError:
- return False
-
- finally:
- os.umask(um)
- return True
- else:
- try:
- if ((gid != -1 and gid != st.st_gid) or
- (uid != -1 and uid != st.st_uid)):
- os.chown(path, uid, gid)
- if minimal:
- if mode != (st.st_mode & mode):
- os.chmod(path, st.st_mode | mode)
- elif mode != (st.st_mode & 07777):
- os.chmod(path, mode)
- except OSError:
- return False
- return True
-
-
-def abssymlink(symlink):
- """
- Read a symlink, resolving if it is relative, returning the absolute.
- If the path doesn't exist, OSError is thrown.
-
- @param symlink: filepath to resolve
- @return: resolve path.
- """
- mylink = os.readlink(symlink)
- if mylink[0] != '/':
- mydir = os.path.dirname(symlink)
- mylink = mydir+"/"+mylink
- return os.path.normpath(mylink)
-
-
-def abspath(path):
- """
- resolve a path absolutely, including symlink resolving.
- Throws OSError if the path doesn't exist
-
- Note that if it's a symlink and the target doesn't exist, it'll still
- return the target.
-
- @param path: filepath to resolve.
- @return: resolve path
- """
- path = os.path.abspath(path)
- try:
- return abssymlink(path)
- except OSError, e:
- if e.errno == errno.EINVAL:
- return path
- raise
-
-
-def native_normpath(mypath):
- """
- normalize path- //usr/bin becomes /usr/bin
- """
- newpath = os.path.normpath(mypath)
- if newpath.startswith('//'):
- return newpath[1:]
- return newpath
-
-native_join = os.path.join
-
-def native_readfile(mypath, none_on_missing=False):
- """
- read a file, returning the contents
-
- @param mypath: fs path for the file to read
- @param none_on_missing: whether to return None if the file is missing,
- else through the exception
- """
- try:
- return open(mypath, "r").read()
- except IOError, oe:
- if none_on_missing and oe.errno == errno.ENOENT:
- return None
- raise
-
-
-class readlines_iter(object):
- __slots__ = ("iterable", "mtime")
- def __init__(self, iterable, mtime):
- self.iterable = iterable
- self.mtime = mtime
-
- def __iter__(self):
- return self.iterable
-
-
-def native_readlines(mypath, strip_newlines=True, swallow_missing=False,
- none_on_missing=False):
- """
- read a file, yielding each line
-
- @param mypath: fs path for the file to read
- @param strip_newlines: strip trailing newlines?
- @param swallow_missing: throw an IOError if missing, or swallow it?
- @param none_on_missing: if the file is missing, return None, else
- if the file is missing return an empty iterable
- """
- try:
- f = open(mypath, "r")
- except IOError, ie:
- if ie.errno != errno.ENOENT or not swallow_missing:
- raise
- if none_on_missing:
- return None
- return readlines_iter(iter([]), None)
-
- if not strip_newlines:
- return readlines_iter(f, os.fstat(f.fileno()).st_mtime)
-
- return readlines_iter((x.strip("\n") for x in f), os.fstat(f.fileno()).st_mtime)
-
-
-try:
- from pkgcore.util.osutils._posix import normpath, join, readfile, readlines
-except ImportError:
- normpath = native_normpath
- join = native_join
- readfile = native_readfile
- readlines = native_readlines
-
-# convenience. importing join into a namespace is ugly, pjoin less so
-pjoin = join
-
-class LockException(Exception):
- """Base lock exception class"""
- def __init__(self, path, reason):
- Exception.__init__(self, path, reason)
- self.path, self.reason = path, reason
-
-class NonExistant(LockException):
- """Missing file/dir exception"""
- def __init__(self, path, reason=None):
- LockException.__init__(self, path, reason)
- def __str__(self):
- return (
- "Lock action for '%s' failed due to not being a valid dir/file %s"
- % (self.path, self.reason))
-
-class GenericFailed(LockException):
- """The fallback lock exception class.
-
- Covers perms, IOError's, and general whackyness.
- """
- def __str__(self):
- return "Lock action for '%s' failed due to '%s'" % (
- self.path, self.reason)
-
-
-# should the fd be left open indefinitely?
-# IMO, it shouldn't, but opening/closing everytime around is expensive
-
-
-class FsLock(object):
-
- """
- fnctl based locks
- """
-
- __slots__ = ("path", "fd", "create")
- def __init__(self, path, create=False):
- """
- @param path: fs path for the lock
- @param create: controls whether the file will be created
- if the file doesn't exist.
- If true, the base dir must exist, and it will create a file.
- If you want to lock via a dir, you have to ensure it exists
- (create doesn't suffice).
- @raise NonExistant: if no file/dir exists for that path,
- and cannot be created
- """
- self.path = path
- self.fd = None
- self.create = create
- if not create:
- if not os.path.exists(path):
- raise NonExistant(path)
-
- def _acquire_fd(self):
- if self.create:
- try:
- self.fd = os.open(self.path, os.R_OK|os.O_CREAT)
- except OSError, oe:
- raise GenericFailed(self.path, oe)
- else:
- try:
- self.fd = os.open(self.path, os.R_OK)
- except OSError, oe:
- raise NonExistant(self.path, oe)
-
- def _enact_change(self, flags, blocking):
- if self.fd is None:
- self._acquire_fd()
- # we do it this way, due to the fact try/except is a bit of a hit
- if not blocking:
- try:
- fcntl.flock(self.fd, flags|fcntl.LOCK_NB)
- except IOError, ie:
- if ie.errno == errno.EAGAIN:
- return False
- raise GenericFailed(self.path, ie)
- else:
- fcntl.flock(self.fd, flags)
- return True
-
- def acquire_write_lock(self, blocking=True):
- """
- Acquire an exclusive lock
-
- Note if you have a read lock, it implicitly upgrades atomically
-
- @param blocking: if enabled, don't return until we have the lock
- @return: True if lock is acquired, False if not.
- """
- return self._enact_change(fcntl.LOCK_EX, blocking)
-
- def acquire_read_lock(self, blocking=True):
- """
- Acquire a shared lock
-
- Note if you have a write lock, it implicitly downgrades atomically
-
- @param blocking: if enabled, don't return until we have the lock
- @return: True if lock is acquired, False if not.
- """
- return self._enact_change(fcntl.LOCK_SH, blocking)
-
- def release_write_lock(self):
- """Release an write/exclusive lock if held"""
- self._enact_change(fcntl.LOCK_UN, False)
-
- def release_read_lock(self):
- """Release an shared/read lock if held"""
- self._enact_change(fcntl.LOCK_UN, False)
-
- def __del__(self):
- # alright, it's 5:45am, yes this is weird code.
- try:
- if self.fd is not None:
- self.release_read_lock()
- finally:
- if self.fd is not None:
- os.close(self.fd)
diff --git a/pkgcore/util/osutils/native_readdir.py b/pkgcore/util/osutils/native_readdir.py
deleted file mode 100644
index 2d58507cc..000000000
--- a/pkgcore/util/osutils/native_readdir.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
-# Copyright: 2006 Marien Zwart <marienz@gentoo.org>
-# License: GPL2
-
-"""Wrapper for readdir which grabs file type from d_type."""
-
-
-import os, errno
-from stat import S_ISDIR, S_ISREG
-
-
-def listdir(path):
- return os.listdir(path)
-
-def stat_swallow_enoent(path, check, default=False, stat=os.stat):
- try:
- return check(stat(path).st_mode)
- except OSError, oe:
- if oe.errno == errno.ENOENT:
- return default
- raise
-
-def listdir_dirs(path, followSymlinks=True):
- pjoin = os.path.join
- scheck = S_ISDIR
- if followSymlinks:
- return [x for x in os.listdir(path) if
- stat_swallow_enoent(pjoin(path, x), scheck)]
- lstat = os.lstat
- return [x for x in os.listdir(path) if
- scheck(lstat(pjoin(path, x)).st_mode)]
-
-def listdir_files(path, followSymlinks=True):
- pjoin = os.path.join
- scheck = S_ISREG
- if followSymlinks:
- return [x for x in os.listdir(path) if
- stat_swallow_enoent(pjoin(path, x), scheck)]
- lstat = os.lstat
- return [x for x in os.listdir(path) if
- scheck(lstat(pjoin(path, x)).st_mode)]
diff --git a/pkgcore/util/parserestrict.py b/pkgcore/util/parserestrict.py
index ee3e69f1a..4f7f17cd7 100644
--- a/pkgcore/util/parserestrict.py
+++ b/pkgcore/util/parserestrict.py
@@ -7,10 +7,10 @@
@var parse_funcs: dict of the functions that are available.
"""
-from pkgcore.util.containers import InvertedContains
from pkgcore.restrictions import packages, values, util
from pkgcore.package import errors
from pkgcore.ebuild import atom, cpv, errors
+from snakeoil.containers import InvertedContains
class ParseError(ValueError):
diff --git a/pkgcore/util/pickling.py b/pkgcore/util/pickling.py
deleted file mode 100644
index fe1b39f60..000000000
--- a/pkgcore/util/pickling.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright: 2007 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-convenience module using cPickle if available, else failing back to pickle
-"""
-
-try:
- from cPickle import *
-except ImportError:
- from pickle import *
-
-def iter_stream(stream):
- try:
- while True:
- yield load(stream)
- except EOFError:
- pass
diff --git a/pkgcore/util/tar.py b/pkgcore/util/tar.py
deleted file mode 100644
index 8f581b155..000000000
--- a/pkgcore/util/tar.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-tar file access
-
-monkey patching of stdlib tarfile to reduce mem usage (33% reduction).
-
-note this is also racey; N threads trying an import, if they're after
-the *original* tarfile, they may inadvertantly get ours.
-"""
-
-import sys
-t = sys.modules.pop("tarfile", None)
-tarfile = __import__("tarfile")
-if t is not None:
- sys.modules["tarfile"] = t
-else:
- del sys.modules["tarfile"]
-del t
-# ok, we now have our own local copy to monkey patch
-
-class TarInfo(tarfile.TarInfo):
- __slots__ = (
- "name", "mode", "uid", "gid", "size", "mtime", "chksum", "type",
- "linkname", "uname", "gname", "devmajor", "devminor", "prefix",
- "offset", "offset_data", "buf", "sparse", "_link_target")
-
-tarfile.TarInfo = TarInfo
-# finished monkey patching. now to lift things out of our tarfile
-# module into this scope so from/import behaves properly.
-
-for x in tarfile.__all__:
- locals()[x] = getattr(tarfile, x)
-del x
diff --git a/pkgcore/util/weakrefs.py b/pkgcore/util/weakrefs.py
deleted file mode 100644
index 1ff99fba2..000000000
--- a/pkgcore/util/weakrefs.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-# Unused import
-# pylint: disable-msg=W0611
-
-try:
- # No name in module
- # pylint: disable-msg=E0611
- from pkgcore.util._caching import WeakValCache
-except ImportError:
- from weakref import WeakValueDictionary as WeakValCache
diff --git a/pkgcore/util/xml/__init__.py b/pkgcore/util/xml/__init__.py
deleted file mode 100644
index 93b154647..000000000
--- a/pkgcore/util/xml/__init__.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright: 2006 Brian Harring <ferringb@gmail.com>
-# License: GPL2
-
-"""
-indirection to load ElementTree
-"""
-# essentially... prefer cElementTree, then 2.5 bundled, then
-# elementtree, then 2.5 bundled, then our own bundled
-
-# "No name etree in module xml", "Reimport cElementTree"
-# pylint: disable-msg=E0611,W0404
-
-gotit = True
-try:
- import cElementTree as etree
-except ImportError:
- gotit = False
-if not gotit:
- try:
- from xml.etree import cElementTree as etree
- gotit = True
- except ImportError:
- pass
-if not gotit:
- try:
- from elementtree import ElementTree as etree
- gotit = True
- except ImportError:
- pass
-if not gotit:
- try:
- from xml.etree import ElementTree as etree
- gotit = True
- except ImportError:
- pass
-
-if not gotit:
- from pkgcore.util.xml import bundled_elementtree as etree
-del gotit
-
-def escape(string):
- """
- simple escaping of &, <, and >
- """
- return string.replace("&", "&amp;").replace("<", "&lt;").replace(">",
- "&gt;")
diff --git a/pkgcore/util/xml/bundled_elementtree.py b/pkgcore/util/xml/bundled_elementtree.py
deleted file mode 100644
index 5d8b1d3ea..000000000
--- a/pkgcore/util/xml/bundled_elementtree.py
+++ /dev/null
@@ -1,1254 +0,0 @@
-#
-# ElementTree
-# $Id: ElementTree.py 2326 2005-03-17 07:45:21Z fredrik $
-#
-# light-weight XML support for Python 1.5.2 and later.
-#
-# history:
-# 2001-10-20 fl created (from various sources)
-# 2001-11-01 fl return root from parse method
-# 2002-02-16 fl sort attributes in lexical order
-# 2002-04-06 fl TreeBuilder refactoring, added PythonDoc markup
-# 2002-05-01 fl finished TreeBuilder refactoring
-# 2002-07-14 fl added basic namespace support to ElementTree.write
-# 2002-07-25 fl added QName attribute support
-# 2002-10-20 fl fixed encoding in write
-# 2002-11-24 fl changed default encoding to ascii; fixed attribute encoding
-# 2002-11-27 fl accept file objects or file names for parse/write
-# 2002-12-04 fl moved XMLTreeBuilder back to this module
-# 2003-01-11 fl fixed entity encoding glitch for us-ascii
-# 2003-02-13 fl added XML literal factory
-# 2003-02-21 fl added ProcessingInstruction/PI factory
-# 2003-05-11 fl added tostring/fromstring helpers
-# 2003-05-26 fl added ElementPath support
-# 2003-07-05 fl added makeelement factory method
-# 2003-07-28 fl added more well-known namespace prefixes
-# 2003-08-15 fl fixed typo in ElementTree.findtext (Thomas Dartsch)
-# 2003-09-04 fl fall back on emulator if ElementPath is not installed
-# 2003-10-31 fl markup updates
-# 2003-11-15 fl fixed nested namespace bug
-# 2004-03-28 fl added XMLID helper
-# 2004-06-02 fl added default support to findtext
-# 2004-06-08 fl fixed encoding of non-ascii element/attribute names
-# 2004-08-23 fl take advantage of post-2.1 expat features
-# 2005-02-01 fl added iterparse implementation
-# 2005-03-02 fl fixed iterparse support for pre-2.2 versions
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh. All rights reserved.
-#
-# fredrik@pythonware.com
-# http://www.pythonware.com
-#
-# --------------------------------------------------------------------
-# The ElementTree toolkit is
-#
-# Copyright (c) 1999-2005 by Fredrik Lundh
-#
-# By obtaining, using, and/or copying this software and/or its
-# associated documentation, you agree that you have read, understood,
-# and will comply with the following terms and conditions:
-#
-# Permission to use, copy, modify, and distribute this software and
-# its associated documentation for any purpose and without fee is
-# hereby granted, provided that the above copyright notice appears in
-# all copies, and that both that copyright notice and this permission
-# notice appear in supporting documentation, and that the name of
-# Secret Labs AB or the author not be used in advertising or publicity
-# pertaining to distribution of the software without specific, written
-# prior permission.
-#
-# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
-# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
-# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
-# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
-# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
-# OF THIS SOFTWARE.
-# --------------------------------------------------------------------
-
-__all__ = [
- # public symbols
- "Comment",
- "dump",
- "Element", "ElementTree",
- "fromstring",
- "iselement", "iterparse",
- "parse",
- "PI", "ProcessingInstruction",
- "QName",
- "SubElement",
- "tostring",
- "TreeBuilder",
- "VERSION", "XML",
- "XMLTreeBuilder",
- ]
-
-##
-# The <b>Element</b> type is a flexible container object, designed to
-# store hierarchical data structures in memory. The type can be
-# described as a cross between a list and a dictionary.
-# <p>
-# Each element has a number of properties associated with it:
-# <ul>
-# <li>a <i>tag</i>. This is a string identifying what kind of data
-# this element represents (the element type, in other words).</li>
-# <li>a number of <i>attributes</i>, stored in a Python dictionary.</li>
-# <li>a <i>text</i> string.</li>
-# <li>an optional <i>tail</i> string.</li>
-# <li>a number of <i>child elements</i>, stored in a Python sequence</li>
-# </ul>
-#
-# To create an element instance, use the {@link #Element} or {@link
-# #SubElement} factory functions.
-# <p>
-# The {@link #ElementTree} class can be used to wrap an element
-# structure, and convert it from and to XML.
-##
-
-import string, sys, re
-
-class _SimpleElementPath:
- # emulate pre-1.2 find/findtext/findall behaviour
- def find(self, element, tag):
- for elem in element:
- if elem.tag == tag:
- return elem
- return None
- def findtext(self, element, tag, default=None):
- for elem in element:
- if elem.tag == tag:
- return elem.text or ""
- return default
- def findall(self, element, tag):
- if tag[:3] == ".//":
- return element.getiterator(tag[3:])
- result = []
- for elem in element:
- if elem.tag == tag:
- result.append(elem)
- return result
-
-try:
- import ElementPath
-except ImportError:
- # FIXME: issue warning in this case?
- ElementPath = _SimpleElementPath()
-
-# TODO: add support for custom namespace resolvers/default namespaces
-# TODO: add improved support for incremental parsing
-
-VERSION = "1.2.6"
-
-##
-# Internal element class. This class defines the Element interface,
-# and provides a reference implementation of this interface.
-# <p>
-# You should not create instances of this class directly. Use the
-# appropriate factory functions instead, such as {@link #Element}
-# and {@link #SubElement}.
-#
-# @see Element
-# @see SubElement
-# @see Comment
-# @see ProcessingInstruction
-
-class _ElementInterface:
- # <tag attrib>text<child/>...</tag>tail
-
- ##
- # (Attribute) Element tag.
-
- tag = None
-
- ##
- # (Attribute) Element attribute dictionary. Where possible, use
- # {@link #_ElementInterface.get},
- # {@link #_ElementInterface.set},
- # {@link #_ElementInterface.keys}, and
- # {@link #_ElementInterface.items} to access
- # element attributes.
-
- attrib = None
-
- ##
- # (Attribute) Text before first subelement. This is either a
- # string or the value None, if there was no text.
-
- text = None
-
- ##
- # (Attribute) Text after this element's end tag, but before the
- # next sibling element's start tag. This is either a string or
- # the value None, if there was no text.
-
- tail = None # text after end tag, if any
-
- def __init__(self, tag, attrib):
- self.tag = tag
- self.attrib = attrib
- self._children = []
-
- def __repr__(self):
- return "<Element %s at %x>" % (self.tag, id(self))
-
- ##
- # Creates a new element object of the same type as this element.
- #
- # @param tag Element tag.
- # @param attrib Element attributes, given as a dictionary.
- # @return A new element instance.
-
- def makeelement(self, tag, attrib):
- return Element(tag, attrib)
-
- ##
- # Returns the number of subelements.
- #
- # @return The number of subelements.
-
- def __len__(self):
- return len(self._children)
-
- ##
- # Returns the given subelement.
- #
- # @param index What subelement to return.
- # @return The given subelement.
- # @exception IndexError If the given element does not exist.
-
- def __getitem__(self, index):
- return self._children[index]
-
- ##
- # Replaces the given subelement.
- #
- # @param index What subelement to replace.
- # @param element The new element value.
- # @exception IndexError If the given element does not exist.
- # @exception AssertionError If element is not a valid object.
-
- def __setitem__(self, index, element):
- assert iselement(element)
- self._children[index] = element
-
- ##
- # Deletes the given subelement.
- #
- # @param index What subelement to delete.
- # @exception IndexError If the given element does not exist.
-
- def __delitem__(self, index):
- del self._children[index]
-
- ##
- # Returns a list containing subelements in the given range.
- #
- # @param start The first subelement to return.
- # @param stop The first subelement that shouldn't be returned.
- # @return A sequence object containing subelements.
-
- def __getslice__(self, start, stop):
- return self._children[start:stop]
-
- ##
- # Replaces a number of subelements with elements from a sequence.
- #
- # @param start The first subelement to replace.
- # @param stop The first subelement that shouldn't be replaced.
- # @param elements A sequence object with zero or more elements.
- # @exception AssertionError If a sequence member is not a valid object.
-
- def __setslice__(self, start, stop, elements):
- for element in elements:
- assert iselement(element)
- self._children[start:stop] = list(elements)
-
- ##
- # Deletes a number of subelements.
- #
- # @param start The first subelement to delete.
- # @param stop The first subelement to leave in there.
-
- def __delslice__(self, start, stop):
- del self._children[start:stop]
-
- ##
- # Adds a subelement to the end of this element.
- #
- # @param element The element to add.
- # @exception AssertionError If a sequence member is not a valid object.
-
- def append(self, element):
- assert iselement(element)
- self._children.append(element)
-
- ##
- # Inserts a subelement at the given position in this element.
- #
- # @param index Where to insert the new subelement.
- # @exception AssertionError If the element is not a valid object.
-
- def insert(self, index, element):
- assert iselement(element)
- self._children.insert(index, element)
-
- ##
- # Removes a matching subelement. Unlike the <b>find</b> methods,
- # this method compares elements based on identity, not on tag
- # value or contents.
- #
- # @param element What element to remove.
- # @exception ValueError If a matching element could not be found.
- # @exception AssertionError If the element is not a valid object.
-
- def remove(self, element):
- assert iselement(element)
- self._children.remove(element)
-
- ##
- # Returns all subelements. The elements are returned in document
- # order.
- #
- # @return A list of subelements.
- # @defreturn list of Element instances
-
- def getchildren(self):
- return self._children
-
- ##
- # Finds the first matching subelement, by tag name or path.
- #
- # @param path What element to look for.
- # @return The first matching element, or None if no element was found.
- # @defreturn Element or None
-
- def find(self, path):
- return ElementPath.find(self, path)
-
- ##
- # Finds text for the first matching subelement, by tag name or path.
- #
- # @param path What element to look for.
- # @param default What to return if the element was not found.
- # @return The text content of the first matching element, or the
- # default value no element was found. Note that if the element
- # has is found, but has no text content, this method returns an
- # empty string.
- # @defreturn string
-
- def findtext(self, path, default=None):
- return ElementPath.findtext(self, path, default)
-
- ##
- # Finds all matching subelements, by tag name or path.
- #
- # @param path What element to look for.
- # @return A list or iterator containing all matching elements,
- # in document order.
- # @defreturn list of Element instances
-
- def findall(self, path):
- return ElementPath.findall(self, path)
-
- ##
- # Resets an element. This function removes all subelements, clears
- # all attributes, and sets the text and tail attributes to None.
-
- def clear(self):
- self.attrib.clear()
- self._children = []
- self.text = self.tail = None
-
- ##
- # Gets an element attribute.
- #
- # @param key What attribute to look for.
- # @param default What to return if the attribute was not found.
- # @return The attribute value, or the default value, if the
- # attribute was not found.
- # @defreturn string or None
-
- def get(self, key, default=None):
- return self.attrib.get(key, default)
-
- ##
- # Sets an element attribute.
- #
- # @param key What attribute to set.
- # @param value The attribute value.
-
- def set(self, key, value):
- self.attrib[key] = value
-
- ##
- # Gets a list of attribute names. The names are returned in an
- # arbitrary order (just like for an ordinary Python dictionary).
- #
- # @return A list of element attribute names.
- # @defreturn list of strings
-
- def keys(self):
- return self.attrib.keys()
-
- ##
- # Gets element attributes, as a sequence. The attributes are
- # returned in an arbitrary order.
- #
- # @return A list of (name, value) tuples for all attributes.
- # @defreturn list of (string, string) tuples
-
- def items(self):
- return self.attrib.items()
-
- ##
- # Creates a tree iterator. The iterator loops over this element
- # and all subelements, in document order, and returns all elements
- # with a matching tag.
- # <p>
- # If the tree structure is modified during iteration, the result
- # is undefined.
- #
- # @param tag What tags to look for (default is to return all elements).
- # @return A list or iterator containing all the matching elements.
- # @defreturn list or iterator
-
- def getiterator(self, tag=None):
- nodes = []
- if tag == "*":
- tag = None
- if tag is None or self.tag == tag:
- nodes.append(self)
- for node in self._children:
- nodes.extend(node.getiterator(tag))
- return nodes
-
-# compatibility
-_Element = _ElementInterface
-
-##
-# Element factory. This function returns an object implementing the
-# standard Element interface. The exact class or type of that object
-# is implementation dependent, but it will always be compatible with
-# the {@link #_ElementInterface} class in this module.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param tag The element name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def Element(tag, attrib={}, **extra):
- attrib = attrib.copy()
- attrib.update(extra)
- return _ElementInterface(tag, attrib)
-
-##
-# Subelement factory. This function creates an element instance, and
-# appends it to an existing element.
-# <p>
-# The element name, attribute names, and attribute values can be
-# either 8-bit ASCII strings or Unicode strings.
-#
-# @param parent The parent element.
-# @param tag The subelement name.
-# @param attrib An optional dictionary, containing element attributes.
-# @param **extra Additional attributes, given as keyword arguments.
-# @return An element instance.
-# @defreturn Element
-
-def SubElement(parent, tag, attrib={}, **extra):
- attrib = attrib.copy()
- attrib.update(extra)
- element = parent.makeelement(tag, attrib)
- parent.append(element)
- return element
-
-##
-# Comment element factory. This factory function creates a special
-# element that will be serialized as an XML comment.
-# <p>
-# The comment string can be either an 8-bit ASCII string or a Unicode
-# string.
-#
-# @param text A string containing the comment string.
-# @return An element instance, representing a comment.
-# @defreturn Element
-
-def Comment(text=None):
- element = Element(Comment)
- element.text = text
- return element
-
-##
-# PI element factory. This factory function creates a special element
-# that will be serialized as an XML processing instruction.
-#
-# @param target A string containing the PI target.
-# @param text A string containing the PI contents, if any.
-# @return An element instance, representing a PI.
-# @defreturn Element
-
-def ProcessingInstruction(target, text=None):
- element = Element(ProcessingInstruction)
- element.text = target
- if text:
- element.text = element.text + " " + text
- return element
-
-PI = ProcessingInstruction
-
-##
-# QName wrapper. This can be used to wrap a QName attribute value, in
-# order to get proper namespace handling on output.
-#
-# @param text A string containing the QName value, in the form {uri}local,
-# or, if the tag argument is given, the URI part of a QName.
-# @param tag Optional tag. If given, the first argument is interpreted as
-# an URI, and this argument is interpreted as a local name.
-# @return An opaque object, representing the QName.
-
-class QName:
- def __init__(self, text_or_uri, tag=None):
- if tag:
- text_or_uri = "{%s}%s" % (text_or_uri, tag)
- self.text = text_or_uri
- def __str__(self):
- return self.text
- def __hash__(self):
- return hash(self.text)
- def __cmp__(self, other):
- if isinstance(other, QName):
- return cmp(self.text, other.text)
- return cmp(self.text, other)
-
-##
-# ElementTree wrapper class. This class represents an entire element
-# hierarchy, and adds some extra support for serialization to and from
-# standard XML.
-#
-# @param element Optional root element.
-# @keyparam file Optional file handle or name. If given, the
-# tree is initialized with the contents of this XML file.
-
-class ElementTree:
-
- def __init__(self, element=None, file=None):
- assert element is None or iselement(element)
- self._root = element # first node
- if file:
- self.parse(file)
-
- ##
- # Gets the root element for this tree.
- #
- # @return An element instance.
- # @defreturn Element
-
- def getroot(self):
- return self._root
-
- ##
- # Replaces the root element for this tree. This discards the
- # current contents of the tree, and replaces it with the given
- # element. Use with care.
- #
- # @param element An element instance.
-
- def _setroot(self, element):
- assert iselement(element)
- self._root = element
-
- ##
- # Loads an external XML document into this element tree.
- #
- # @param source A file name or file object.
- # @param parser An optional parser instance. If not given, the
- # standard {@link XMLTreeBuilder} parser is used.
- # @return The document root element.
- # @defreturn Element
-
- def parse(self, source, parser=None):
- if not hasattr(source, "read"):
- source = open(source, "rb")
- if not parser:
- parser = XMLTreeBuilder()
- while 1:
- data = source.read(32768)
- if not data:
- break
- parser.feed(data)
- self._root = parser.close()
- return self._root
-
- ##
- # Creates a tree iterator for the root element. The iterator loops
- # over all elements in this tree, in document order.
- #
- # @param tag What tags to look for (default is to return all elements)
- # @return An iterator.
- # @defreturn iterator
-
- def getiterator(self, tag=None):
- assert self._root is not None
- return self._root.getiterator(tag)
-
- ##
- # Finds the first toplevel element with given tag.
- # Same as getroot().find(path).
- #
- # @param path What element to look for.
- # @return The first matching element, or None if no element was found.
- # @defreturn Element or None
-
- def find(self, path):
- assert self._root is not None
- if path[:1] == "/":
- path = "." + path
- return self._root.find(path)
-
- ##
- # Finds the element text for the first toplevel element with given
- # tag. Same as getroot().findtext(path).
- #
- # @param path What toplevel element to look for.
- # @param default What to return if the element was not found.
- # @return The text content of the first matching element, or the
- # default value no element was found. Note that if the element
- # has is found, but has no text content, this method returns an
- # empty string.
- # @defreturn string
-
- def findtext(self, path, default=None):
- assert self._root is not None
- if path[:1] == "/":
- path = "." + path
- return self._root.findtext(path, default)
-
- ##
- # Finds all toplevel elements with the given tag.
- # Same as getroot().findall(path).
- #
- # @param path What element to look for.
- # @return A list or iterator containing all matching elements,
- # in document order.
- # @defreturn list of Element instances
-
- def findall(self, path):
- assert self._root is not None
- if path[:1] == "/":
- path = "." + path
- return self._root.findall(path)
-
- ##
- # Writes the element tree to a file, as XML.
- #
- # @param file A file name, or a file object opened for writing.
- # @param encoding Optional output encoding (default is US-ASCII).
-
- def write(self, file, encoding="us-ascii"):
- assert self._root is not None
- if not hasattr(file, "write"):
- file = open(file, "wb")
- if not encoding:
- encoding = "us-ascii"
- elif encoding != "utf-8" and encoding != "us-ascii":
- file.write("<?xml version='1.0' encoding='%s'?>\n" % encoding)
- self._write(file, self._root, encoding, {})
-
- def _write(self, file, node, encoding, namespaces):
- # write XML to file
- tag = node.tag
- if tag is Comment:
- file.write("<!-- %s -->" % _escape_cdata(node.text, encoding))
- elif tag is ProcessingInstruction:
- file.write("<?%s?>" % _escape_cdata(node.text, encoding))
- else:
- items = node.items()
- xmlns_items = [] # new namespaces in this scope
- try:
- if isinstance(tag, QName) or tag[:1] == "{":
- tag, xmlns = fixtag(tag, namespaces)
- if xmlns: xmlns_items.append(xmlns)
- except TypeError:
- _raise_serialization_error(tag)
- file.write("<" + _encode(tag, encoding))
- if items or xmlns_items:
- items.sort() # lexical order
- for k, v in items:
- try:
- if isinstance(k, QName) or k[:1] == "{":
- k, xmlns = fixtag(k, namespaces)
- if xmlns: xmlns_items.append(xmlns)
- except TypeError:
- _raise_serialization_error(k)
- try:
- if isinstance(v, QName):
- v, xmlns = fixtag(v, namespaces)
- if xmlns: xmlns_items.append(xmlns)
- except TypeError:
- _raise_serialization_error(v)
- file.write(" %s=\"%s\"" % (_encode(k, encoding),
- _escape_attrib(v, encoding)))
- for k, v in xmlns_items:
- file.write(" %s=\"%s\"" % (_encode(k, encoding),
- _escape_attrib(v, encoding)))
- if node.text or len(node):
- file.write(">")
- if node.text:
- file.write(_escape_cdata(node.text, encoding))
- for n in node:
- self._write(file, n, encoding, namespaces)
- file.write("</" + _encode(tag, encoding) + ">")
- else:
- file.write(" />")
- for k, v in xmlns_items:
- del namespaces[v]
- if node.tail:
- file.write(_escape_cdata(node.tail, encoding))
-
-# --------------------------------------------------------------------
-# helpers
-
-##
-# Checks if an object appears to be a valid element object.
-#
-# @param An element instance.
-# @return A true value if this is an element object.
-# @defreturn flag
-
-def iselement(element):
- # FIXME: not sure about this; might be a better idea to look
- # for tag/attrib/text attributes
- return isinstance(element, _ElementInterface) or hasattr(element, "tag")
-
-##
-# Writes an element tree or element structure to sys.stdout. This
-# function should be used for debugging only.
-# <p>
-# The exact output format is implementation dependent. In this
-# version, it's written as an ordinary XML file.
-#
-# @param elem An element tree or an individual element.
-
-def dump(elem):
- # debugging
- if not isinstance(elem, ElementTree):
- elem = ElementTree(elem)
- elem.write(sys.stdout)
- tail = elem.getroot().tail
- if not tail or tail[-1] != "\n":
- sys.stdout.write("\n")
-
-def _encode(s, encoding):
- try:
- return s.encode(encoding)
- except AttributeError:
- return s # 1.5.2: assume the string uses the right encoding
-
-if sys.version[:3] == "1.5":
- _escape = re.compile(r"[&<>\"\x80-\xff]+") # 1.5.2
-else:
- _escape = re.compile(eval(r'u"[&<>\"\u0080-\uffff]+"'))
-
-_escape_map = {
- "&": "&amp;",
- "<": "&lt;",
- ">": "&gt;",
- '"': "&quot;",
-}
-
-_namespace_map = {
- # "well-known" namespace prefixes
- "http://www.w3.org/XML/1998/namespace": "xml",
- "http://www.w3.org/1999/xhtml": "html",
- "http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
- "http://schemas.xmlsoap.org/wsdl/": "wsdl",
-}
-
-def _raise_serialization_error(text):
- raise TypeError(
- "cannot serialize %r (type %s)" % (text, type(text).__name__)
- )
-
-def _encode_entity(text, pattern=_escape):
- # map reserved and non-ascii characters to numerical entities
- def escape_entities(m, map=_escape_map):
- out = []
- append = out.append
- for char in m.group():
- text = map.get(char)
- if text is None:
- text = "&#%d;" % ord(char)
- append(text)
- return string.join(out, "")
- try:
- return _encode(pattern.sub(escape_entities, text), "ascii")
- except TypeError:
- _raise_serialization_error(text)
-
-#
-# the following functions assume an ascii-compatible encoding
-# (or "utf-16")
-
-def _escape_cdata(text, encoding=None, replace=string.replace):
- # escape character data
- try:
- if encoding:
- try:
- text = _encode(text, encoding)
- except UnicodeError:
- return _encode_entity(text)
- text = replace(text, "&", "&amp;")
- text = replace(text, "<", "&lt;")
- text = replace(text, ">", "&gt;")
- return text
- except (TypeError, AttributeError):
- _raise_serialization_error(text)
-
-def _escape_attrib(text, encoding=None, replace=string.replace):
- # escape attribute value
- try:
- if encoding:
- try:
- text = _encode(text, encoding)
- except UnicodeError:
- return _encode_entity(text)
- text = replace(text, "&", "&amp;")
- text = replace(text, "'", "&apos;") # FIXME: overkill
- text = replace(text, "\"", "&quot;")
- text = replace(text, "<", "&lt;")
- text = replace(text, ">", "&gt;")
- return text
- except (TypeError, AttributeError):
- _raise_serialization_error(text)
-
-def fixtag(tag, namespaces):
- # given a decorated tag (of the form {uri}tag), return prefixed
- # tag and namespace declaration, if any
- if isinstance(tag, QName):
- tag = tag.text
- namespace_uri, tag = string.split(tag[1:], "}", 1)
- prefix = namespaces.get(namespace_uri)
- if prefix is None:
- prefix = _namespace_map.get(namespace_uri)
- if prefix is None:
- prefix = "ns%d" % len(namespaces)
- namespaces[namespace_uri] = prefix
- if prefix == "xml":
- xmlns = None
- else:
- xmlns = ("xmlns:%s" % prefix, namespace_uri)
- else:
- xmlns = None
- return "%s:%s" % (prefix, tag), xmlns
-
-##
-# Parses an XML document into an element tree.
-#
-# @param source A filename or file object containing XML data.
-# @param parser An optional parser instance. If not given, the
-# standard {@link XMLTreeBuilder} parser is used.
-# @return An ElementTree instance
-
-def parse(source, parser=None):
- tree = ElementTree()
- tree.parse(source, parser)
- return tree
-
-##
-# Parses an XML document into an element tree incrementally, and reports
-# what's going on to the user.
-#
-# @param source A filename or file object containing XML data.
-# @param events A list of events to report back. If omitted, only "end"
-# events are reported.
-# @return A (event, elem) iterator.
-
-class iterparse:
-
- def __init__(self, source, events=None):
- if not hasattr(source, "read"):
- source = open(source, "rb")
- self._file = source
- self._events = []
- self._index = 0
- self.root = self._root = None
- self._parser = XMLTreeBuilder()
- # wire up the parser for event reporting
- parser = self._parser._parser
- append = self._events.append
- if events is None:
- events = ["end"]
- for event in events:
- if event == "start":
- try:
- parser.ordered_attributes = 1
- parser.specified_attributes = 1
- def handler(tag, attrib_in, event=event, append=append,
- start=self._parser._start_list):
- append((event, start(tag, attrib_in)))
- parser.StartElementHandler = handler
- except AttributeError:
- def handler(tag, attrib_in, event=event, append=append,
- start=self._parser._start):
- append((event, start(tag, attrib_in)))
- parser.StartElementHandler = handler
- elif event == "end":
- def handler(tag, event=event, append=append,
- end=self._parser._end):
- append((event, end(tag)))
- parser.EndElementHandler = handler
- elif event == "start-ns":
- def handler(prefix, uri, event=event, append=append):
- try:
- uri = _encode(uri, "ascii")
- except UnicodeError:
- pass
- append((event, (prefix or "", uri)))
- parser.StartNamespaceDeclHandler = handler
- elif event == "end-ns":
- def handler(prefix, event=event, append=append):
- append((event, None))
- parser.EndNamespaceDeclHandler = handler
-
- def next(self):
- while 1:
- try:
- item = self._events[self._index]
- except IndexError:
- if self._parser is None:
- self.root = self._root
- try:
- raise StopIteration
- except NameError:
- raise IndexError
- # load event buffer
- del self._events[:]
- self._index = 0
- data = self._file.read(16384)
- if data:
- self._parser.feed(data)
- else:
- self._root = self._parser.close()
- self._parser = None
- else:
- self._index = self._index + 1
- return item
-
- try:
- iter
- def __iter__(self):
- return self
- except NameError:
- def __getitem__(self, index):
- return self.next()
-
-##
-# Parses an XML document from a string constant. This function can
-# be used to embed "XML literals" in Python code.
-#
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-def XML(text):
- parser = XMLTreeBuilder()
- parser.feed(text)
- return parser.close()
-
-##
-# Parses an XML document from a string constant, and also returns
-# a dictionary which maps from element id:s to elements.
-#
-# @param source A string containing XML data.
-# @return A tuple containing an Element instance and a dictionary.
-# @defreturn (Element, dictionary)
-
-def XMLID(text):
- parser = XMLTreeBuilder()
- parser.feed(text)
- tree = parser.close()
- ids = {}
- for elem in tree.getiterator():
- id = elem.get("id")
- if id:
- ids[id] = elem
- return tree, ids
-
-##
-# Parses an XML document from a string constant. Same as {@link #XML}.
-#
-# @def fromstring(text)
-# @param source A string containing XML data.
-# @return An Element instance.
-# @defreturn Element
-
-fromstring = XML
-
-##
-# Generates a string representation of an XML element, including all
-# subelements.
-#
-# @param element An Element instance.
-# @return An encoded string containing the XML data.
-# @defreturn string
-
-def tostring(element, encoding=None):
- class dummy:
- pass
- data = []
- file = dummy()
- file.write = data.append
- ElementTree(element).write(file, encoding)
- return string.join(data, "")
-
-##
-# Generic element structure builder. This builder converts a sequence
-# of {@link #TreeBuilder.start}, {@link #TreeBuilder.data}, and {@link
-# #TreeBuilder.end} method calls to a well-formed element structure.
-# <p>
-# You can use this class to build an element structure using a custom XML
-# parser, or a parser for some other XML-like format.
-#
-# @param element_factory Optional element factory. This factory
-# is called to create new Element instances, as necessary.
-
-class TreeBuilder:
-
- def __init__(self, element_factory=None):
- self._data = [] # data collector
- self._elem = [] # element stack
- self._last = None # last element
- self._tail = None # true if we're after an end tag
- if element_factory is None:
- element_factory = _ElementInterface
- self._factory = element_factory
-
- ##
- # Flushes the parser buffers, and returns the toplevel documen
- # element.
- #
- # @return An Element instance.
- # @defreturn Element
-
- def close(self):
- assert len(self._elem) == 0, "missing end tags"
- assert self._last is not None, "missing toplevel element"
- return self._last
-
- def _flush(self):
- if self._data:
- if self._last is not None:
- text = string.join(self._data, "")
- if self._tail:
- assert self._last.tail is None, "internal error (tail)"
- self._last.tail = text
- else:
- assert self._last.text is None, "internal error (text)"
- self._last.text = text
- self._data = []
-
- ##
- # Adds text to the current element.
- #
- # @param data A string. This should be either an 8-bit string
- # containing ASCII text, or a Unicode string.
-
- def data(self, data):
- self._data.append(data)
-
- ##
- # Opens a new element.
- #
- # @param tag The element name.
- # @param attrib A dictionary containing element attributes.
- # @return The opened element.
- # @defreturn Element
-
- def start(self, tag, attrs):
- self._flush()
- self._last = elem = self._factory(tag, attrs)
- if self._elem:
- self._elem[-1].append(elem)
- self._elem.append(elem)
- self._tail = 0
- return elem
-
- ##
- # Closes the current element.
- #
- # @param tag The element name.
- # @return The closed element.
- # @defreturn Element
-
- def end(self, tag):
- self._flush()
- self._last = self._elem.pop()
- assert self._last.tag == tag,\
- "end tag mismatch (expected %s, got %s)" % (
- self._last.tag, tag)
- self._tail = 1
- return self._last
-
-##
-# Element structure builder for XML source data, based on the
-# <b>expat</b> parser.
-#
-# @keyparam target Target object. If omitted, the builder uses an
-# instance of the standard {@link #TreeBuilder} class.
-# @keyparam html Predefine HTML entities. This flag is not supported
-# by the current implementation.
-# @see #ElementTree
-# @see #TreeBuilder
-
-class XMLTreeBuilder:
-
- def __init__(self, html=0, target=None):
- try:
- from xml.parsers import expat
- except ImportError:
- raise ImportError(
- "No module named expat; use SimpleXMLTreeBuilder instead"
- )
- self._parser = parser = expat.ParserCreate(None, "}")
- if target is None:
- target = TreeBuilder()
- self._target = target
- self._names = {} # name memo cache
- # callbacks
- parser.DefaultHandlerExpand = self._default
- parser.StartElementHandler = self._start
- parser.EndElementHandler = self._end
- parser.CharacterDataHandler = self._data
- # let expat do the buffering, if supported
- try:
- self._parser.buffer_text = 1
- except AttributeError:
- pass
- # use new-style attribute handling, if supported
- try:
- self._parser.ordered_attributes = 1
- self._parser.specified_attributes = 1
- parser.StartElementHandler = self._start_list
- except AttributeError:
- pass
- encoding = None
- if not parser.returns_unicode:
- encoding = "utf-8"
- # target.xml(encoding, None)
- self._doctype = None
- self.entity = {}
-
- def _fixtext(self, text):
- # convert text string to ascii, if possible
- try:
- return _encode(text, "ascii")
- except UnicodeError:
- return text
-
- def _fixname(self, key):
- # expand qname, and convert name string to ascii, if possible
- try:
- name = self._names[key]
- except KeyError:
- name = key
- if "}" in name:
- name = "{" + name
- self._names[key] = name = self._fixtext(name)
- return name
-
- def _start(self, tag, attrib_in):
- fixname = self._fixname
- tag = fixname(tag)
- attrib = {}
- for key, value in attrib_in.items():
- attrib[fixname(key)] = self._fixtext(value)
- return self._target.start(tag, attrib)
-
- def _start_list(self, tag, attrib_in):
- fixname = self._fixname
- tag = fixname(tag)
- attrib = {}
- if attrib_in:
- for i in xrange(0, len(attrib_in), 2):
- attrib[fixname(attrib_in[i])] = self._fixtext(attrib_in[i+1])
- return self._target.start(tag, attrib)
-
- def _data(self, text):
- return self._target.data(self._fixtext(text))
-
- def _end(self, tag):
- return self._target.end(self._fixname(tag))
-
- def _default(self, text):
- prefix = text[:1]
- if prefix == "&":
- # deal with undefined entities
- try:
- self._target.data(self.entity[text[1:-1]])
- except KeyError:
- from xml.parsers import expat
- raise expat.error(
- "undefined entity %s: line %d, column %d" %
- (text, self._parser.ErrorLineNumber,
- self._parser.ErrorColumnNumber)
- )
- elif prefix == "<" and text[:9] == "<!DOCTYPE":
- self._doctype = [] # inside a doctype declaration
- elif self._doctype is not None:
- # parse doctype contents
- if prefix == ">":
- self._doctype = None
- return
- text = string.strip(text)
- if not text:
- return
- self._doctype.append(text)
- n = len(self._doctype)
- if n > 2:
- type = self._doctype[1]
- if type == "PUBLIC" and n == 4:
- name, type, pubid, system = self._doctype
- elif type == "SYSTEM" and n == 3:
- name, type, system = self._doctype
- pubid = None
- else:
- return
- if pubid:
- pubid = pubid[1:-1]
- self.doctype(name, pubid, system[1:-1])
- self._doctype = None
-
- ##
- # Handles a doctype declaration.
- #
- # @param name Doctype name.
- # @param pubid Public identifier.
- # @param system System identifier.
-
- def doctype(self, name, pubid, system):
- pass
-
- ##
- # Feeds data to the parser.
- #
- # @param data Encoded data.
-
- def feed(self, data):
- self._parser.Parse(data, 0)
-
- ##
- # Finishes feeding data to the parser.
- #
- # @return An element structure.
- # @defreturn Element
-
- def close(self):
- self._parser.Parse("", 1) # end of data
- tree = self._target.close()
- del self._target, self._parser # get rid of circular references
- return tree
diff --git a/pkgcore/vdb/__init__.py b/pkgcore/vdb/__init__.py
index 495641a28..6ffee723f 100644
--- a/pkgcore/vdb/__init__.py
+++ b/pkgcore/vdb/__init__.py
@@ -4,7 +4,7 @@
from pkgcore.restrictions.packages import OrRestriction
from pkgcore.repository import multiplex, virtual
from pkgcore.vdb.ondisk import tree as vdb_repository
-from pkgcore.util.currying import partial
+from snakeoil.currying import partial
def _grab_virtuals(parent_repo):
virtuals = {}
diff --git a/pkgcore/vdb/contents.py b/pkgcore/vdb/contents.py
index 5cd48d19c..ffea183c6 100644
--- a/pkgcore/vdb/contents.py
+++ b/pkgcore/vdb/contents.py
@@ -3,13 +3,14 @@
from pkgcore.fs.contents import contentsSet
from pkgcore.fs import fs
-from pkgcore.util.file import AtomicWriteFile
from pkgcore.interfaces import data_source
-from pkgcore.util.compatibility import any
-from pkgcore.util.demandload import demandload
+
+from snakeoil.fileutils import AtomicWriteFile
+from snakeoil.compatibility import any
+from snakeoil.demandload import demandload
demandload(globals(), "os stat errno "
- "pkgcore.util.osutils:readlines "
- "pkgcore.chksum:get_handler ")
+ "pkgcore.chksum:get_handler "
+ "snakeoil.osutils:readlines ")
class LookupFsDev(fs.fsDev):
@@ -140,7 +141,7 @@ class ContentsFile(contentsSet):
for obj in sorted(self):
if isinstance(obj, fs.fsFile):
- s = " ".join(("obj", obj.location,
+ s = " ".join(("obj", obj.location,
md5_handler.long2str(obj.chksums["md5"]),
str(long(obj.mtime))))
diff --git a/pkgcore/vdb/ondisk.py b/pkgcore/vdb/ondisk.py
index 8c59f0b65..03c555fad 100644
--- a/pkgcore/vdb/ondisk.py
+++ b/pkgcore/vdb/ondisk.py
@@ -2,28 +2,28 @@
# License: GPL2
import os, stat, errno, shutil
-from pkgcore.repository import prototype, errors
-#needed to grab the PN
-from pkgcore.ebuild.cpv import CPV as cpv
-from pkgcore.util.osutils import ensure_dirs, pjoin
-from pkgcore.util.mappings import IndeterminantDict
-from pkgcore.util.currying import partial
+from pkgcore.repository import prototype, errors
from pkgcore.vdb.contents import ContentsFile
from pkgcore.vdb import virtuals
from pkgcore.plugin import get_plugin
from pkgcore.interfaces import repo as repo_interfaces
from pkgcore.interfaces import data_source
-from pkgcore.util.osutils import listdir_dirs, readfile
from pkgcore.repository import multiplex
-from pkgcore.util import bzip2
from pkgcore.config import ConfigHint
+#needed to grab the PN
+from pkgcore.ebuild.cpv import CPV as cpv
-from pkgcore.util.demandload import demandload
+from snakeoil.osutils import ensure_dirs, pjoin
+from snakeoil.mappings import IndeterminantDict
+from snakeoil.currying import partial
+from snakeoil.osutils import listdir_dirs, readfile
+from pkgcore.util import bzip2
+from snakeoil.demandload import demandload
demandload(globals(),
"time "
"pkgcore.ebuild:conditionals "
- "pkgcore.const "
+ "pkgcore.const:VERSION "
"pkgcore.ebuild:triggers "
"pkgcore.log:logger "
"pkgcore.fs.ops:change_offset_rewriter "
@@ -287,7 +287,7 @@ class install(repo_interfaces.livefs_install):
#finally, we mark who made this.
open(pjoin(dirpath, "PKGMANAGER"), "w").write(
- "pkgcore-%s" % pkgcore.const.VERSION)
+ "pkgcore-%s" % VERSION)
return True
diff --git a/pkgcore/vdb/virtuals.py b/pkgcore/vdb/virtuals.py
index f9f9d302a..2f9c1d079 100644
--- a/pkgcore/vdb/virtuals.py
+++ b/pkgcore/vdb/virtuals.py
@@ -2,16 +2,18 @@
# License: GPL2
import os, stat
-from pkgcore.util.osutils import listdir, ensure_dirs, join as pjoin, readlines
+
from pkgcore.restrictions import packages, values
from pkgcore.ebuild.atom import atom
from pkgcore.package.errors import InvalidDependency
from pkgcore.os_data import portage_gid
-from pkgcore.util.lists import iflatten_instance
from pkgcore.repository import virtual
-from pkgcore.util.currying import partial
-from pkgcore.util.file import read_dict, AtomicWriteFile
-from pkgcore.util.demandload import demandload
+
+from snakeoil.lists import iflatten_instance
+from snakeoil.osutils import listdir, ensure_dirs, join as pjoin, readlines
+from snakeoil.currying import partial
+from snakeoil.fileutils import read_dict, AtomicWriteFile
+from snakeoil.demandload import demandload
demandload(globals(), "errno")
# generic functions.
@@ -83,7 +85,6 @@ def _write_mtime_cache(mtimes, data, location):
os.chown(location, -1, portage_gid)
def _read_mtime_cache(location):
- f = None
try:
d = {}
for k, v in read_dict(readlines(location), splitter=None,
diff --git a/setup.py b/setup.py
index bb730ecbb..811c90579 100644
--- a/setup.py
+++ b/setup.py
@@ -195,7 +195,7 @@ class pkgcore_build_ext(build_ext.build_ext):
if self.debug:
# say it with me kids... distutils sucks!
for x in ("compiler_so", "compiler", "compiler_cxx"):
- setattr(self.compiler, x,
+ setattr(self.compiler, x,
[y for y in getattr(self.compiler, x) if y != '-DNDEBUG'])
return build_ext.build_ext.build_extensions(self)
@@ -297,17 +297,6 @@ packages = [
if '__init__.py' in files]
extra_flags = ['-Wall']
-common_includes = ['src/py24-compatibility.h',
- 'src/heapdef.h',
- 'src/common.h',
- ]
-
-extensions = []
-if sys.version_info < (2, 5):
- # Almost unmodified copy from the python 2.5 source.
- extensions.append(core.Extension(
- 'pkgcore.util._functools', ['src/functoolsmodule.c'],
- extra_compile_args=extra_flags, depends=common_includes))
from pkgcore.const import VERSION
core.setup(
@@ -324,37 +313,22 @@ core.setup(
},
ext_modules=[
core.Extension(
- 'pkgcore.util.osutils._posix', ['src/posix.c'],
- extra_compile_args=extra_flags, depends=common_includes),
- core.Extension(
- 'pkgcore.util._klass', ['src/klass.c'],
- extra_compile_args=extra_flags, depends=common_includes),
- core.Extension(
- 'pkgcore.util._caching', ['src/caching.c'],
- extra_compile_args=extra_flags, depends=common_includes),
- core.Extension(
- 'pkgcore.util._lists', ['src/lists.c'],
- extra_compile_args=extra_flags, depends=common_includes),
- core.Extension(
'pkgcore.ebuild._cpv', ['src/cpv.c'],
- extra_compile_args=extra_flags, depends=common_includes),
+ extra_compile_args=extra_flags),
core.Extension(
'pkgcore.ebuild._depset', ['src/depset.c'],
- extra_compile_args=extra_flags, depends=common_includes),
+ extra_compile_args=extra_flags),
core.Extension(
'pkgcore.ebuild._atom', ['src/atom.c'],
- extra_compile_args=extra_flags, depends=common_includes),
+ extra_compile_args=extra_flags),
core.Extension(
'pkgcore.restrictions._restrictions', ['src/restrictions.c'],
- extra_compile_args=extra_flags, depends=common_includes),
+ extra_compile_args=extra_flags),
core.Extension(
'pkgcore.ebuild._filter_env', [
'src/filter_env.c', 'src/bmh_search.c'],
- extra_compile_args=extra_flags, depends=common_includes),
- core.Extension(
- 'pkgcore.util.osutils._readdir', ['src/readdir.c'],
- extra_compile_args=extra_flags, depends=common_includes),
- ] + extensions,
+ extra_compile_args=extra_flags),
+ ],
cmdclass={
'sdist': mysdist,
'build_py': pkgcore_build_py,
diff --git a/src/atom.c b/src/atom.c
index 4624bb5e6..4d21f72bc 100644
--- a/src/atom.c
+++ b/src/atom.c
@@ -12,7 +12,7 @@
#define PY_SSIZE_T_CLEAN
-#include "common.h"
+#include <snakeoil/common.h>
// exceptions, loaded during initialization.
static PyObject *pkgcore_atom_MalformedAtom_Exc = NULL;
@@ -926,9 +926,9 @@ pkgcore_atom_getattr(PyObject *getattr_inst, PyObject *args)
return tup;
}
-PKGCORE_FUNC_BINDING("__init__", "pkgcore.ebuild._atom.__init__",
+snakeoil_FUNC_BINDING("__init__", "pkgcore.ebuild._atom.__init__",
pkgcore_atom_init, METH_VARARGS|METH_KEYWORDS)
-PKGCORE_FUNC_BINDING("__getattr__", "pkgcore.ebuild._atom.__getattr__",
+snakeoil_FUNC_BINDING("__getattr__", "pkgcore.ebuild._atom.__getattr__",
pkgcore_atom_getattr, METH_O|METH_COEXIST)
PyDoc_STRVAR(
diff --git a/src/caching.c b/src/caching.c
deleted file mode 100644
index 535321e33..000000000
--- a/src/caching.c
+++ /dev/null
@@ -1,711 +0,0 @@
-/*
- * Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
- * Copyright: 2006 Marien Zwart <marienz@gentoo.org>
- * License: GPL2
- *
- * C version of some of pkgcore (for extra speed).
- */
-
-/* This does not really do anything since we do not use the "#"
- * specifier in a PyArg_Parse or similar call, but hey, not using it
- * means we are Py_ssize_t-clean too!
- */
-
-#define PY_SSIZE_T_CLEAN
-
-#include <Python.h>
-#include "py24-compatibility.h"
-
-static PyObject *pkgcore_caching_disable_str = NULL;
-
-/*
- * WeakValFinalizer: holds a reference to a dict and key,
- * does "del dict[key]" when called. Used as weakref callback.
- * Only used internally (does not expose a constructor/new method).
- *
- * Together with a "normal" PyDict this is used as a much more minimal
- * version of python's weakref.WeakValueDictionary. One noticable
- * difference between that and this is that WeakValueDictionary gives
- * the weakref callbacks responsible for removing an item from the
- * dict a weakref to the dict, while we use a "hard" reference to it.
- *
- * WeakValueDictionary has to do it that way to prevent objects in the
- * dict from keeping the dict alive. That should not be an issue here:
- * the objects in the dict have a hard reference to the dict through
- * their type anyway. So this simplifies things a bit (especially
- * since you cannot weakref a PyDict, it would have to be subclassed
- * to add that ability (WeakValueDictionary is a UserDict, not a
- * "real" dict, so it does not have that problem)).
- */
-
-typedef struct {
- PyObject_HEAD
- PyObject *dict;
- PyObject *key;
-} pkgcore_WeakValFinalizer;
-
-static void
-pkgcore_WeakValFinalizer_dealloc(pkgcore_WeakValFinalizer *self)
-{
- Py_CLEAR(self->dict);
- Py_CLEAR(self->key);
- self->ob_type->tp_free((PyObject*) self);
-}
-
-static PyObject *
-pkgcore_WeakValFinalizer_call(pkgcore_WeakValFinalizer *self,
- PyObject *args, PyObject *kwargs)
-{
- /* We completely ignore whatever arguments are passed to us
- (should be a single positional (the weakref) we do not need). */
- if (PyDict_DelItem(self->dict, self->key) < 0)
- return NULL;
- Py_RETURN_NONE;
-}
-
-static int
-pkgcore_WeakValFinalizer_traverse(
- pkgcore_WeakValFinalizer *self, visitproc visit, void *arg)
-{
- Py_VISIT(self->dict);
- Py_VISIT(self->key);
- return 0;
-}
-
-static int
-pkgcore_WeakValFinalizer_heapyrelate(NyHeapRelate *r)
-{
- pkgcore_WeakValFinalizer *v = (pkgcore_WeakValFinalizer*)r->src;
- INTERATTR(dict);
- INTERATTR(key);
- return 0;
-}
-
-static PyTypeObject pkgcore_WeakValFinalizerType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "pkgcore.util._caching.WeakValFinalizer", /* tp_name */
- sizeof(pkgcore_WeakValFinalizer), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)pkgcore_WeakValFinalizer_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- (ternaryfunc)pkgcore_WeakValFinalizer_call, /* tp_call */
- (reprfunc)0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- "WeakValFinalizer objects", /* tp_doc */
- (traverseproc)pkgcore_WeakValFinalizer_traverse, /* tp_traverse */
-};
-
-static pkgcore_WeakValFinalizer *
-pkgcore_WeakValFinalizer_create(PyObject *dict, PyObject *key)
-{
- pkgcore_WeakValFinalizer *finalizer = PyObject_GC_New(
- pkgcore_WeakValFinalizer, &pkgcore_WeakValFinalizerType);
-
- if (!finalizer)
- return NULL;
-
- Py_INCREF(dict);
- finalizer->dict = dict;
- Py_INCREF(key);
- finalizer->key = key;
-
- PyObject_GC_Track(finalizer);
-
- return finalizer;
-}
-
-typedef struct {
- PyObject_HEAD
- PyObject *dict;
-} pkgcore_WeakValCache;
-
-static PyObject *
-pkgcore_WeakValCache_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
-{
- pkgcore_WeakValCache *self;
- self = (pkgcore_WeakValCache *)type->tp_alloc(type, 0);
- if(!self)
- return NULL;
- self->dict = PyDict_New();
- if(!self->dict) {
- Py_DECREF(self);
- return NULL;
- }
- return (PyObject *)self;
-}
-
-static int
-pkgcore_WeakValCache_traverse(
- pkgcore_WeakValCache *self, visitproc visit, void *arg)
-{
- Py_VISIT(self->dict);
- return 0;
-}
-
-static int
-pkgcore_WeakValCache_heapyrelate(NyHeapRelate *r)
-{
- pkgcore_WeakValCache *v = (pkgcore_WeakValCache*) r->src;
- INTERATTR(dict);
- return 0;
-}
-
-static int
-pkgcore_WeakValCache_clear(pkgcore_WeakValCache *self)
-{
- PyDict_Clear(self->dict);
- return 0;
-}
-
-static PyObject *
-pkgcore_WeakValCache_clear_method(pkgcore_WeakValCache *self)
-{
- pkgcore_WeakValCache_clear(self);
- Py_RETURN_NONE;
-}
-
-static void
-pkgcore_WeakValCache_dealloc(pkgcore_WeakValCache *self)
-{
- Py_CLEAR(self->dict);
- self->ob_type->tp_free((PyObject *)self);
-}
-
-static Py_ssize_t
-pkgcore_WeakValCache_len(pkgcore_WeakValCache *self)
-{
- return PyDict_Size(self->dict);
-}
-
-static int
-pkgcore_WeakValCache_setitem(pkgcore_WeakValCache *self, PyObject *key,
- PyObject *val)
-{
- if(!val) {
- return PyDict_SetItem(self->dict, (PyObject*)key, (PyObject*)val);
- }
- if(PyWeakref_Check(val)) {
- PyErr_SetString(PyExc_TypeError, "cannot set value to a weakref");
- return -1;
- }
-
- int ret = -1;
- pkgcore_WeakValFinalizer *finalizer = pkgcore_WeakValFinalizer_create(
- self->dict, key);
- if (finalizer) {
- PyObject *weakref = PyWeakref_NewRef(val, (PyObject*)finalizer);
- Py_DECREF(finalizer);
- if (weakref) {
- ret = PyDict_SetItem(self->dict, key, (PyObject*)weakref);
- Py_DECREF(weakref);
- }
- }
- return ret;
-}
-
-PyObject *
-pkgcore_WeakValCache_getitem(pkgcore_WeakValCache *self, PyObject *key)
-{
- PyObject *resobj, *actual = NULL;
- resobj = PyDict_GetItem(self->dict, key);
- if(resobj) {
- actual = PyWeakref_GetObject(resobj);
- if (actual == Py_None) {
- // PyWeakref_GetObject returns a borrowed reference, do not
- // clear it
- actual = NULL;
- /* wipe the weakref err */
- PyErr_Clear();
- PyDict_DelItem(self->dict, key);
- if(!PyErr_Occurred()) {
- PyErr_SetObject(PyExc_KeyError, key);
- }
- } else {
- Py_INCREF(actual);
- }
- } else {
- PyErr_SetObject(PyExc_KeyError, key);
- }
- return actual;
-}
-
-static PyObject *
-pkgcore_WeakValCache_get(pkgcore_WeakValCache *self, PyObject *args)
-{
- Py_ssize_t size = PyTuple_Size(args);
- if(-1 == size)
- return NULL;
- PyObject *key, *resobj;
- if(size < 1 || size > 2) {
- PyErr_SetString(PyExc_TypeError,
- "get requires one arg (key), with optional default to return");
- return NULL;
- }
- key = PyTuple_GET_ITEM(args, 0);
- if(!key) {
- assert(PyErr_Occurred());
- return NULL;
- }
-
- PyErr_Clear();
- resobj = PyObject_GetItem((PyObject *)self, key);
- if(resobj) {
- assert(!PyErr_Occurred());
- return resobj;
-
- } else if(PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_KeyError)) {
- // if the error wasn't that the key isn't found, return
- return resobj;
- }
-
- PyErr_Clear();
- if(size == 2) {
- resobj = PyTuple_GET_ITEM(args, 1);
- } else {
- resobj = Py_None;
- }
- Py_INCREF(resobj);
- return resobj;
-}
-
-static PyMappingMethods pkgcore_WeakValCache_as_mapping = {
- (lenfunc)pkgcore_WeakValCache_len, /* len() */
- (binaryfunc)pkgcore_WeakValCache_getitem, /* getitem */
- (objobjargproc)pkgcore_WeakValCache_setitem, /* setitem */
-};
-
-
-static PyMethodDef pkgcore_WeakValCache_methods[] = {
- {"get", (PyCFunction)pkgcore_WeakValCache_get, METH_VARARGS,
- "get(key, default=None)"},
- {"clear", (PyCFunction)pkgcore_WeakValCache_clear_method, METH_NOARGS,
- "clear()"},
- {NULL}
-};
-
-/* WeakValCache; simplified WeakValDictionary. */
-
-static PyTypeObject pkgcore_WeakValCacheType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "pkgcore.util._caching.WeakValCache", /* tp_name */
- sizeof(pkgcore_WeakValCache), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)pkgcore_WeakValCache_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- &pkgcore_WeakValCache_as_mapping, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- (reprfunc)0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
- 0, /* tp_doc */
- (traverseproc)pkgcore_WeakValCache_traverse, /* tp_traverse */
- (inquiry)pkgcore_WeakValCache_clear, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- pkgcore_WeakValCache_methods, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- pkgcore_WeakValCache_new, /* tp_new */
-};
-
-
-/* WeakInstMeta: metaclass for instance caching. */
-
-typedef struct {
- PyHeapTypeObject type;
- PyObject *inst_dict;
- int inst_caching;
-} pkgcore_WeakInstMeta;
-
-static void
-pkgcore_WeakInstMeta_dealloc(pkgcore_WeakInstMeta* self)
-{
- Py_CLEAR(self->inst_dict);
- ((PyObject*)self)->ob_type->tp_free((PyObject *)self);
-}
-
-static PyTypeObject pkgcore_WeakInstMetaType;
-
-static PyObject *
-pkgcore_WeakInstMeta_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
-{
- const char *name;
- PyTupleObject *bases;
- PyObject *d;
- int inst_caching = 0;
- static char *kwlist[] = {"name", "bases", "dict", 0};
-
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "sO!O!", kwlist,
- &name,
- &PyTuple_Type, &bases,
- &PyDict_Type, &d))
- return NULL;
-
- PyObject *cachesetting = PyMapping_GetItemString(d, "__inst_caching__");
- if (cachesetting) {
- inst_caching = PyObject_IsTrue(cachesetting);
- Py_DECREF(cachesetting);
- if (inst_caching < 0)
- return NULL;
- } else {
- if (!PyErr_ExceptionMatches(PyExc_KeyError))
- return NULL;
- PyErr_Clear();
- }
- if (PyDict_SetItemString(d, "__inst_caching__",
- inst_caching ? Py_True : Py_False) < 0)
- return NULL;
-
- if (inst_caching) {
- PyObject *slots = PyMapping_GetItemString(d, "__slots__");
- if (slots) {
- int has_weakref = 0;
- PyObject *base;
- int i, n = PyTuple_GET_SIZE(bases);
- for (i = 0; i < n; i++) {
- base = PyTuple_GET_ITEM(bases, i);
- if (PyObject_HasAttrString(base, "__weakref__")) {
- has_weakref = 1;
- break;
- }
- }
- if (!has_weakref) {
- PyObject *slottuple = Py_BuildValue("(s)", "__weakref__");
- if (!slottuple) {
- Py_DECREF(slots);
- return NULL;
- }
- PyObject *newslots = PySequence_Concat(slots, slottuple);
- Py_DECREF(slottuple);
- if (!newslots)
- return NULL;
- if (PyDict_SetItemString(d, "__slots__", newslots) < 0) {
- Py_DECREF(newslots);
- Py_DECREF(slots);
- return NULL;
- }
- Py_DECREF(newslots);
- }
- Py_DECREF(slots);
- } else {
- if (!PyErr_ExceptionMatches(PyExc_KeyError))
- return NULL;
- PyErr_Clear();
- }
- }
-
- pkgcore_WeakInstMeta *self;
- self = (pkgcore_WeakInstMeta*)PyType_Type.tp_new(type, args, kwargs);
- if (!self)
- return NULL;
-
- self->inst_caching = inst_caching;
-
- if (inst_caching) {
- if (!(self->inst_dict = PyDict_New())) {
- Py_DECREF((PyObject*)self);
- return NULL;
- }
- }
- return (PyObject*) self;
-}
-
-
-static PyObject *
-pkgcore_WeakInstMeta_call(pkgcore_WeakInstMeta *self,
- PyObject *args, PyObject *kwargs)
-{
- PyObject *key, *kwlist, *kwtuple, *resobj = NULL;
- int result;
- if (!self->inst_caching)
- /* No caching, just do what a "normal" type does */
- return PyType_Type.tp_call((PyObject*)self, args, kwargs);
-
- Py_ssize_t len = kwargs ? PyDict_Size(kwargs) : 0;
- if (len) {
- /* If disable_inst_caching=True is passed pop it and disable caching */
- PyObject *obj = PyDict_GetItem(kwargs, pkgcore_caching_disable_str);
- if (obj) {
- result = PyObject_IsTrue(obj);
- if (result < 0)
- return NULL;
-
- if (PyDict_DelItem(kwargs, pkgcore_caching_disable_str))
- return NULL;
-
- if (result)
- return PyType_Type.tp_call((PyObject*)self, args, kwargs);
- }
- /* Convert kwargs to a sorted tuple so we can hash it. */
- if (!(kwlist = PyDict_Items(kwargs)))
- return NULL;
-
- if (len > 1 && PyList_Sort(kwlist) < 0) {
- Py_DECREF(kwlist);
- return NULL;
- }
-
- kwtuple = PyList_AsTuple(kwlist);
- Py_DECREF(kwlist);
- if (!kwtuple)
- return NULL;
- } else {
- /* null kwargs is equivalent to a zero-length tuple */
- Py_INCREF(Py_None);
- kwtuple = Py_None;
- }
-
- /* Construct the dict key. Be careful not to leak this below! */
- key = PyTuple_Pack(2, args, kwtuple);
- Py_DECREF(kwtuple);
- if (!key)
- return NULL;
-
- // borrowed reference from PyDict_GetItem...
- resobj = PyDict_GetItem(self->inst_dict, key);
-
- if (resobj) {
- /* We have a weakref cached, return the value if it is still there */
- PyObject *actual = PyWeakref_GetObject(resobj);
- if (!actual) {
- Py_DECREF(key);
- return NULL;
- }
- if (actual != Py_None) {
- Py_INCREF(actual);
- Py_DECREF(key);
- return actual;
- }
- /* PyWeakref_GetObject returns a borrowed reference, do not clear it */
- }
- // if we got here, it's either not cached, or the key is unhashable.
- // we catch the unhashable when we try to save the key.
-
- resobj = PyType_Type.tp_call((PyObject*)self, args, kwargs);
- if (!resobj) {
- Py_DECREF(key);
- return NULL;
- }
-
- pkgcore_WeakValFinalizer *finalizer = pkgcore_WeakValFinalizer_create(
- self->inst_dict, key);
- if (!finalizer) {
- Py_DECREF(key);
- Py_DECREF(resobj);
- return NULL;
- }
-
- PyObject *weakref = PyWeakref_NewRef(resobj, (PyObject*)finalizer);
- Py_DECREF(finalizer);
- if (!weakref) {
- Py_DECREF(key);
- Py_DECREF(resobj);
- return NULL;
- }
-
- result = PyDict_SetItem(self->inst_dict, key, weakref);
- Py_DECREF(weakref);
-
- if (result < 0) {
- if (PyErr_ExceptionMatches(PyExc_TypeError) ||
- PyErr_ExceptionMatches(PyExc_NotImplementedError)) {
- PyErr_Clear();
- PyObject *format, *formatargs, *message;
- if ((format = PyString_FromString(
- "caching for %s, key=%s is unhashable"))) {
- if ((formatargs = PyTuple_Pack(2, self, key))) {
- if ((message = PyString_Format(format, formatargs))) {
- /* Leave resobj NULL if PyErr_Warn raises. */
- if (PyErr_Warn(
- PyExc_UserWarning,
- PyString_AsString(message))) {
- resobj = NULL;
- }
- Py_DECREF(message);
- }
- Py_DECREF(formatargs);
- }
- Py_DECREF(format);
- }
- } else {
- // unexpected exception... let it go.
- resobj = NULL;
- }
- }
- Py_DECREF(key);
- return resobj;
-}
-
-
-PyDoc_STRVAR(
- pkgcore_WeakInstMetaType__doc__,
- "metaclass for instance caching, resulting in reuse of unique instances.\n"
- "few notes-\n"
- " - instances must be immutable (or effectively so). Since creating a\n"
- " new instance may return a preexisting instance, this requirement\n"
- " B{must} be honored.\n"
- " - due to the potential for mishap, each subclass of a caching class \n"
- " must assign __inst_caching__ = True to enable caching for the\n"
- " derivative.\n"
- " - conversely, __inst_caching__ = False does nothing (although it's\n"
- " useful as a sign of I{do not enable caching for this class}\n"
- " - instance caching can be disabled per instantiation via passing\n"
- " disabling_inst_caching=True into the class constructor.\n"
- "\n"
- "Being a metaclass, the voodoo used doesn't require modification of the\n"
- "class itself.\n"
- "\n"
- "Examples of usage are the restriction modules\n"
- "L{packages<pkgcore.restrictions.packages>} and\n"
- "L{values<pkgcore.restrictions.values>}\n"
- );
-
-static PyTypeObject pkgcore_WeakInstMetaType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "pkgcore.util._caching.WeakInstMeta", /* tp_name */
- sizeof(pkgcore_WeakInstMeta), /* tp_basicsize */
- 0, /* tp_itemsize */
- /* methods */
- (destructor)pkgcore_WeakInstMeta_dealloc, /* tp_dealloc */
- (printfunc)0, /* tp_print */
- (getattrfunc)0, /* tp_getattr */
- (setattrfunc)0, /* tp_setattr */
- (cmpfunc)0, /* tp_compare */
- (reprfunc)0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- (hashfunc)0, /* tp_hash */
- (ternaryfunc)pkgcore_WeakInstMeta_call, /* tp_call */
- (reprfunc)0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- pkgcore_WeakInstMetaType__doc__, /* tp_doc */
- (traverseproc)0, /* tp_traverse */
- (inquiry)0, /* tp_clear */
- (richcmpfunc)0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)0, /* tp_iter */
- (iternextfunc)0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* set to &PyType_Type later */ /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)0, /* tp_init */
- 0, /* tp_alloc */
- pkgcore_WeakInstMeta_new, /* tp_new */
-};
-
-
-static NyHeapDef pkgcore_caching_heapdefs[] = {
- {
- 0, /* flags */
- &pkgcore_WeakValFinalizerType, /* type */
- 0, /* size */
- 0, /* traverse */
- pkgcore_WeakValFinalizer_heapyrelate /* relate */
- },
- {
- 0, /* flags */
- &pkgcore_WeakValCacheType, /* type */
- 0, /* size */
- 0, /* traverse */
- pkgcore_WeakValCache_heapyrelate /* relate */
- },
- {0}
-};
-
-/* Module initialization */
-
-PyDoc_STRVAR(
- pkgcore_module_documentation,
- "C reimplementation of pkgcore.util.caching.");
-
-PyMODINIT_FUNC
-init_caching()
-{
- /* Create the module and add the functions */
- PyObject *m = Py_InitModule3(
- "_caching", NULL, pkgcore_module_documentation);
- if (!m)
- return;
-
- pkgcore_WeakInstMetaType.tp_base = &PyType_Type;
-
- if (PyType_Ready(&pkgcore_WeakInstMetaType) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_WeakValCacheType) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_WeakValFinalizerType) < 0)
- return;
-
- if (!pkgcore_caching_disable_str) {
- if (!(pkgcore_caching_disable_str =
- PyString_FromString("disable_inst_caching")))
- /* We can just return here, since the only way to get at
- * this is through pkgcore_WeakInstMeta_call and that
- * cannot be accessed yet.
- */
- return;
- }
-
- Py_INCREF(&pkgcore_WeakInstMetaType);
- if (PyModule_AddObject(
- m, "WeakInstMeta", (PyObject *)&pkgcore_WeakInstMetaType) == -1)
- return;
-
- Py_INCREF(&pkgcore_WeakValCacheType);
- if (PyModule_AddObject(
- m, "WeakValCache", (PyObject *)&pkgcore_WeakValCacheType) == -1)
- return;
-
- PyObject *cobject = PyCObject_FromVoidPtrAndDesc(
- &pkgcore_caching_heapdefs, "NyHeapDef[] v1.0", 0);
- if (!cobject)
- return;
-
- if (PyModule_AddObject(m, "_NyHeapDefs_", cobject) == -1)
- return;
-
- /* Success! */
-}
diff --git a/src/chflags.c b/src/chflags.c
index fb73ccef7..2bee07af8 100644
--- a/src/chflags.c
+++ b/src/chflags.c
@@ -3,7 +3,7 @@
* Modified by Marien Zwart <marienz@gentoo.org>
*/
-#include "Python.h"
+#include <Python.h>
#include <sys/stat.h>
diff --git a/src/common.h b/src/common.h
deleted file mode 100644
index 8317fd65a..000000000
--- a/src/common.h
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright: 2006 Brian Harring <ferringb@gmail.com>
- * License: GPL2
- *
- * common macros.
- */
-
-#ifndef PKGCORE_COMMON_HEADER
-#define PKGCORE_COMMON_HEADER 1
-
-#include <Python.h>
-#include "py24-compatibility.h"
-
-#define PKGCORE_IMMUTABLE_ATTR_BOOL(type, name, attr, test) \
-static int \
-type##_set_##attr (type *self, PyObject *v, void *closure) \
-{ \
- PyErr_SetString(PyExc_AttributeError, name" is immutable"); \
- return -1; \
-} \
- \
-static PyObject * \
-type##_get_##attr (type *self, void *closure) \
-{ \
- PyObject *s = (test) ? Py_True : Py_False; \
- Py_INCREF(s); \
- return s; \
-}
-
-#define PKGCORE_GETSET(type, doc, attr) \
- {doc, (getter)type##_get_##attr , \
- (setter)type##_set_##attr , NULL}
-
-
-#define PKGCORE_FUNC_DESC(meth_name, class_name, func, methargs) \
-_PKGCORE_FUNC_DESC(meth_name, class_name, func, methargs, 0)
-#define _PKGCORE_FUNC_DESC(meth_name, class_name, func, methargs, desc) \
- \
-static PyTypeObject func##_type = { \
- PyObject_HEAD_INIT(NULL) \
- 0, /* ob_size */ \
- class_name, /* tp_name */ \
- sizeof(PyObject), /* tp_basicsize */ \
- 0, /* tp_itemsize */ \
- 0, /* tp_dealloc */ \
- 0, /* tp_print */ \
- 0, /* tp_getattr */ \
- 0, /* tp_setattr */ \
- 0, /* tp_compare */ \
- 0, /* tp_repr */ \
- 0, /* tp_as_number */ \
- 0, /* tp_as_sequence */ \
- 0, /* tp_as_mapping */ \
- 0, /* tp_hash */ \
- (ternaryfunc)func, /* tp_call */ \
- 0, /* tp_str */ \
- 0, /* tp_getattro */ \
- 0, /* tp_setattro */ \
- 0, /* tp_as_buffer */ \
- Py_TPFLAGS_DEFAULT, /* tp_flags */ \
- "cpython version of "#meth_name, /* tp_doc */ \
- 0, /* tp_traverse */ \
- 0, /* tp_clear */ \
- 0, /* tp_richcompare */ \
- 0, /* tp_weaklistoffset */ \
- 0, /* tp_iter */ \
- 0, /* tp_iternext */ \
- 0, /* tp_methods */ \
- 0, /* tp_members */ \
- 0, /* tp_getset */ \
- 0, /* tp_base */ \
- 0, /* tp_dict */ \
- desc, /* tp_descr_get */ \
- 0, /* tp_descr_set */ \
-};
-
-#define PKGCORE_FUNC_BINDING(meth_name, class_name, func, methargs) \
-static PyObject * \
-func##_get_descr(PyObject *self, PyObject *obj, PyObject *type) \
-{ \
- static PyMethodDef mdef = {meth_name, (PyCFunction)func, methargs, \
- NULL}; \
- return PyCFunction_New(&mdef, obj); \
-} \
- \
-_PKGCORE_FUNC_DESC(meth_name, class_name, func, methargs, \
- func##_get_descr)
-
-#endif
diff --git a/src/cpv.c b/src/cpv.c
index 4c8c5e124..db105c8f2 100644
--- a/src/cpv.c
+++ b/src/cpv.c
@@ -12,7 +12,7 @@
#define PY_SSIZE_T_CLEAN
-#include "common.h"
+#include <snakeoil/common.h>
#include <structmember.h>
#include <string.h>
@@ -92,7 +92,7 @@ pkgcore_cpv_get_cpvstr(pkgcore_cpv *self, void *closure)
static PyGetSetDef pkgcore_cpv_getsetters[] = {
-PKGCORE_GETSET(pkgcore_cpv, "cpvstr", cpvstr),
+snakeoil_GETSET(pkgcore_cpv, "cpvstr", cpvstr),
{NULL}
};
diff --git a/src/depset.c b/src/depset.c
index a453e936c..eb30868c1 100644
--- a/src/depset.c
+++ b/src/depset.c
@@ -13,7 +13,7 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include "py24-compatibility.h"
+#include <snakeoil/py24-compatibility.h>
// exceptions, loaded during initialization.
static PyObject *pkgcore_depset_ParseErrorExc = NULL;
diff --git a/src/filter_env.c b/src/filter_env.c
index c45559fcc..35d0bc450 100644
--- a/src/filter_env.c
+++ b/src/filter_env.c
@@ -8,7 +8,7 @@
#define PY_SSIZE_T_CLEAN
#include <Python.h>
-#include "py24-compatibility.h"
+#include <snakeoil/py24-compatibility.h>
PyDoc_STRVAR(
module_doc,
diff --git a/src/functoolsmodule.c b/src/functoolsmodule.c
deleted file mode 100644
index c565d0559..000000000
--- a/src/functoolsmodule.c
+++ /dev/null
@@ -1,279 +0,0 @@
-
-#include "Python.h"
-#include "structmember.h"
-
-#include "py24-compatibility.h"
-
-/* _functools module written and maintained
- by Hye-Shik Chang <perky@FreeBSD.org>
- with adaptations by Raymond Hettinger <python@rcn.com>
- Copyright (c) 2004, 2005, 2006 Python Software Foundation.
- All rights reserved.
-*/
-
-/* partial object **********************************************************/
-
-typedef struct {
- PyObject_HEAD
- PyObject *fn;
- PyObject *args;
- PyObject *kw;
- PyObject *dict;
- PyObject *weakreflist; /* List of weak references */
-} partialobject;
-
-static PyTypeObject partial_type;
-
-static PyObject *
-partial_new(PyTypeObject *type, PyObject *args, PyObject *kw)
-{
- PyObject *func;
- partialobject *pto;
-
- if (PyTuple_GET_SIZE(args) < 1) {
- PyErr_SetString(PyExc_TypeError,
- "type 'partial' takes at least one argument");
- return NULL;
- }
-
- func = PyTuple_GET_ITEM(args, 0);
- if (!PyCallable_Check(func)) {
- PyErr_SetString(PyExc_TypeError,
- "the first argument must be callable");
- return NULL;
- }
-
- /* create partialobject structure */
- pto = (partialobject *)type->tp_alloc(type, 0);
- if (pto == NULL)
- return NULL;
-
- pto->fn = func;
- Py_INCREF(func);
- pto->args = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX);
- if (pto->args == NULL) {
- pto->kw = NULL;
- Py_DECREF(pto);
- return NULL;
- }
- if (kw != NULL) {
- pto->kw = PyDict_Copy(kw);
- if (pto->kw == NULL) {
- Py_DECREF(pto);
- return NULL;
- }
- } else {
- pto->kw = Py_None;
- Py_INCREF(Py_None);
- }
-
- pto->weakreflist = NULL;
- pto->dict = NULL;
-
- return (PyObject *)pto;
-}
-
-static void
-partial_dealloc(partialobject *pto)
-{
- PyObject_GC_UnTrack(pto);
- if (pto->weakreflist != NULL)
- PyObject_ClearWeakRefs((PyObject *) pto);
- Py_XDECREF(pto->fn);
- Py_XDECREF(pto->args);
- Py_XDECREF(pto->kw);
- Py_XDECREF(pto->dict);
- pto->ob_type->tp_free(pto);
-}
-
-static PyObject *
-partial_call(partialobject *pto, PyObject *args, PyObject *kw)
-{
- PyObject *ret;
- PyObject *argappl = NULL, *kwappl = NULL;
-
- assert (PyCallable_Check(pto->fn));
- assert (PyTuple_Check(pto->args));
- assert (pto->kw == Py_None || PyDict_Check(pto->kw));
-
- if (PyTuple_GET_SIZE(pto->args) == 0) {
- argappl = args;
- Py_INCREF(args);
- } else if (PyTuple_GET_SIZE(args) == 0) {
- argappl = pto->args;
- Py_INCREF(pto->args);
- } else {
- argappl = PySequence_Concat(pto->args, args);
- if (argappl == NULL)
- return NULL;
- }
-
- if (pto->kw == Py_None) {
- kwappl = kw;
- Py_XINCREF(kw);
- } else {
- kwappl = PyDict_Copy(pto->kw);
- if (kwappl == NULL) {
- Py_DECREF(argappl);
- return NULL;
- }
- if (kw != NULL) {
- if (PyDict_Merge(kwappl, kw, 1) != 0) {
- Py_DECREF(argappl);
- Py_DECREF(kwappl);
- return NULL;
- }
- }
- }
-
- ret = PyObject_Call(pto->fn, argappl, kwappl);
- Py_DECREF(argappl);
- Py_XDECREF(kwappl);
- return ret;
-}
-
-static int
-partial_traverse(partialobject *pto, visitproc visit, void *arg)
-{
- Py_VISIT(pto->fn);
- Py_VISIT(pto->args);
- Py_VISIT(pto->kw);
- Py_VISIT(pto->dict);
- return 0;
-}
-
-PyDoc_STRVAR(partial_doc,
-"partial(func, *args, **keywords) - new function with partial application\n\
- of the given arguments and keywords.\n");
-
-#define OFF(x) offsetof(partialobject, x)
-static PyMemberDef partial_memberlist[] = {
- {"func", T_OBJECT, OFF(fn), READONLY,
- "function object to use in future partial calls"},
- {"args", T_OBJECT, OFF(args), READONLY,
- "tuple of arguments to future partial calls"},
- {"keywords", T_OBJECT, OFF(kw), READONLY,
- "dictionary of keyword arguments to future partial calls"},
- {NULL} /* Sentinel */
-};
-
-static PyObject *
-partial_get_dict(partialobject *pto)
-{
- if (pto->dict == NULL) {
- pto->dict = PyDict_New();
- if (pto->dict == NULL)
- return NULL;
- }
- Py_INCREF(pto->dict);
- return pto->dict;
-}
-
-static int
-partial_set_dict(partialobject *pto, PyObject *value)
-{
- PyObject *tmp;
-
- /* It is illegal to del p.__dict__ */
- if (value == NULL) {
- PyErr_SetString(PyExc_TypeError,
- "a partial object's dictionary may not be deleted");
- return -1;
- }
- /* Can only set __dict__ to a dictionary */
- if (!PyDict_Check(value)) {
- PyErr_SetString(PyExc_TypeError,
- "setting partial object's dictionary to a non-dict");
- return -1;
- }
- tmp = pto->dict;
- Py_INCREF(value);
- pto->dict = value;
- Py_XDECREF(tmp);
- return 0;
-}
-
-static PyGetSetDef partial_getsetlist[] = {
- {"__dict__", (getter)partial_get_dict, (setter)partial_set_dict},
- {NULL} /* Sentinel */
-};
-
-static PyTypeObject partial_type = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "functools.partial", /* tp_name */
- sizeof(partialobject), /* tp_basicsize */
- 0, /* tp_itemsize */
- /* methods */
- (destructor)partial_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- (ternaryfunc)partial_call, /* tp_call */
- 0, /* tp_str */
- PyObject_GenericGetAttr, /* tp_getattro */
- PyObject_GenericSetAttr, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
- Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_WEAKREFS, /* tp_flags */
- partial_doc, /* tp_doc */
- (traverseproc)partial_traverse, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- offsetof(partialobject, weakreflist), /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- partial_memberlist, /* tp_members */
- partial_getsetlist, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- offsetof(partialobject, dict), /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- partial_new, /* tp_new */
- PyObject_GC_Del, /* tp_free */
-};
-
-
-/* module level code ********************************************************/
-
-PyDoc_STRVAR(module_doc,
-"Tools that operate on functions.");
-
-static PyMethodDef module_methods[] = {
- {NULL, NULL} /* sentinel */
-};
-
-PyMODINIT_FUNC
-init_functools(void)
-{
- int i;
- PyObject *m;
- char *name;
- PyTypeObject *typelist[] = {
- &partial_type,
- NULL
- };
-
- m = Py_InitModule3("_functools", module_methods, module_doc);
- if (m == NULL)
- return;
-
- for (i=0 ; typelist[i] != NULL ; i++) {
- if (PyType_Ready(typelist[i]) < 0)
- return;
- name = strchr(typelist[i]->tp_name, '.');
- assert (name != NULL);
- Py_INCREF(typelist[i]);
- PyModule_AddObject(m, name+1, (PyObject *)typelist[i]);
- }
-}
diff --git a/src/heapdef.h b/src/heapdef.h
deleted file mode 100644
index e45fb06bc..000000000
--- a/src/heapdef.h
+++ /dev/null
@@ -1,80 +0,0 @@
-#ifndef Ny_HEAPDEF_H
-#define Ny_HEAPDEF_H
-
-/* NyHeapTraverse - argument to traverse
- Defined to avoid complicated function defs
-*/
-
-typedef struct {
- int flags;
- PyObject *hv; /* A HeapView object providing context to the traversal
- function, if necessary. It is defined as a PyObject
- rather than HeapView to avoid include file dependency. */
- PyObject *obj; /* The object that is to be traversed */
- void *arg; /* the argument to pass when visiting referred objects. */
- visitproc visit; /* The visit procedure to call */
- PyObject *_hiding_tag_; /* The hiding tag in use by current context. */
-
-
-} NyHeapTraverse;
-
-/* NyHeapRelate - argument to relate
- Defined to avoid complicated function defs
-*/
-
-typedef struct NyHeapRelate {
- int flags; /* As yet unused */
- PyObject *hv; /* Heap view object */
- PyObject *src; /* Source of relation, and which is dispatched on */
- PyObject *tgt; /* Target of relation */
-
- /* visit() should be called once for each unique pointer
- from src to tgt.
- The relation type is indicated by the relatype argument
- and defined in the NYHR_ definitions below.
- The relator argument is an object describing the relation
- and should be newly allocated or INCREFED.
- The arg argument should be the arg passed in NyHeapRelate
- below.
-
- Return value: non-zero, means the relate function should
- not provide any more relations but should return. A zero
- return value means visit may be called again.
- */
-
- int (*visit)(unsigned int relatype, PyObject *relator, struct NyHeapRelate *arg);
-} NyHeapRelate;
-
-/* Values for 'relatype' argument to be passed to visit callback in NyHeapRelate */
-
-
-#define NYHR_ATTRIBUTE 1 /* src.relator is tgt */
-#define NYHR_INDEXVAL 2 /* src[relator] is tgt */
-#define NYHR_INDEXKEY 3 /* src has key tgt */
-#define NYHR_INTERATTR 4 /* src->relator == tgt in C only */
-#define NYHR_HASATTR 5 /* src has attribute tgt (stored as string) */
-#define NYHR_LOCAL_VAR 6 /* src (a frame) has local variable named <relator> with value tgt */
-#define NYHR_CELL 7 /* src has cell variable named <relator> containing value tgt */
-#define NYHR_STACK 8 /* src has a stack entry numbered <relator> with value tgt */
-#define NYHR_RELSRC 9 /* relator % src is tgt ; tgt is relator % src*/
-#define NYHR_LIMIT 10 /* All others are < NYHR_LIMIT */
-
-/* NyHeapDef - structure to define by external type providers to define themselves wrt heapy
-*/
-
-/* Definitions of its function types, useful for casting. */
-
-typedef int (*NyHeapDef_SizeGetter) (PyObject *obj);
-typedef int (*NyHeapDef_Traverser) (NyHeapTraverse *arg);
-typedef int (*NyHeapDef_RelationGetter) (NyHeapRelate *r);
-
-typedef struct {
- int flags; /* As yet, only 0 */
- PyTypeObject *type; /* The type it regards */
- NyHeapDef_SizeGetter size;
- NyHeapDef_Traverser traverse;
- NyHeapDef_RelationGetter relate;
- void *resv3, *resv4, *resv5; /* Reserved for future bin. comp. */
-} NyHeapDef;
-
-#endif /* Ny_HEAPDEF_H */
diff --git a/src/klass.c b/src/klass.c
deleted file mode 100644
index 3ada70ec7..000000000
--- a/src/klass.c
+++ /dev/null
@@ -1,411 +0,0 @@
-/*
- * Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
- * License: GPL2
- *
- * C version of some of pkgcore (for extra speed).
- */
-
-/* This does not really do anything since we do not use the "#"
- * specifier in a PyArg_Parse or similar call, but hey, not using it
- * means we are Py_ssize_t-clean too!
- */
-
-#define PY_SSIZE_T_CLEAN
-#include "common.h"
-#include <ceval.h>
-
-static PyObject *pkgcore_equality_attr = NULL;
-
-typedef struct {
- PyObject_HEAD
- PyObject *redirect_target;
-} pkgcore_GetAttrProxy;
-
-static void
-pkgcore_GetAttrProxy_dealloc(pkgcore_GetAttrProxy *self)
-{
- Py_CLEAR(self->redirect_target);
- self->ob_type->tp_free((PyObject *)self);
-}
-
-static PyObject *
-pkgcore_GetAttrProxy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
-{
- pkgcore_GetAttrProxy *self;
- PyObject *alias_attr;
-
- if(!PyArg_ParseTuple(args, "S:__new__", &alias_attr))
- return NULL;
- self = (pkgcore_GetAttrProxy *)type->tp_alloc(type, 0);
-
- if (self) {
- self->redirect_target = alias_attr;
- Py_INCREF(alias_attr);
- }
- return (PyObject *)self;
-}
-
-static PyObject *
-pkgcore_GetAttrProxy_call(pkgcore_GetAttrProxy *self, PyObject *args,
- PyObject *kwds)
-{
- PyObject *attr, *real_obj, *tmp = NULL;
-
- if(PyArg_ParseTuple(args, "OS:__call__", &real_obj, &attr)) {
- if(Py_EnterRecursiveCall(" in GetAttrProxy.__call__ "))
- return NULL;
- real_obj = PyObject_GenericGetAttr(real_obj, self->redirect_target);
- if(real_obj) {
- tmp = PyObject_GetAttr(real_obj, attr);
- Py_DECREF(real_obj);
- }
- Py_LeaveRecursiveCall();
- }
- return (PyObject *)tmp;
-}
-
-
-static PyTypeObject pkgcore_GetAttrProxyType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "pkgcore.util._klass.GetAttrProxy", /* tp_name */
- sizeof(pkgcore_GetAttrProxy), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)pkgcore_GetAttrProxy_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- (ternaryfunc)pkgcore_GetAttrProxy_call, /* tp_call */
- (reprfunc)0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
- "GetAttrProxy object; used mainly for native __getattr__ speed",
- /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- 0, /* tp_init */
- 0, /* tp_alloc */
- pkgcore_GetAttrProxy_new, /* tp_new */
-
-};
-
-static PyObject *
-pkgcore_mapping_get(PyObject *self, PyObject *args)
-{
- PyObject *key, *default_val = Py_None;
- if(!self) {
- PyErr_SetString(PyExc_TypeError,
- "need to be called with a mapping as the first arg");
- return NULL;
- }
- if(!PyArg_UnpackTuple(args, "get", 1, 2, &key, &default_val))
- return NULL;
-
- PyObject *ret = PyObject_GetItem(self, key);
- if(ret) {
- return ret;
- } else if (!PyErr_ExceptionMatches(PyExc_KeyError)) {
- return NULL;
- }
-
- PyErr_Clear();
- Py_INCREF(default_val);
- return default_val;
-}
-
-static inline PyObject *
-internal_generic_equality(PyObject *inst1, PyObject *inst2,
- int desired)
-{
- if(inst1 == inst2) {
- PyObject *res = desired == Py_EQ ? Py_True : Py_False;
- Py_INCREF(res);
- return res;
- }
-
- PyObject *attrs = PyObject_GetAttr(inst1, pkgcore_equality_attr);
- if(!attrs)
- return NULL;
- if(!PyTuple_CheckExact(attrs)) {
- PyErr_SetString(PyExc_TypeError,
- "__attr_comparison__ must be a tuple");
- return NULL;
- }
-
- Py_ssize_t idx = 0;
- PyObject *attr1, *attr2;
- // if Py_EQ, break on not equal, else on equal
- for(; idx < PyTuple_GET_SIZE(attrs); idx++) {
-
- attr1 = PyObject_GetAttr(inst1, PyTuple_GET_ITEM(attrs, idx));
- if(!attr1) {
- if(!PyErr_ExceptionMatches(PyExc_AttributeError))
- return NULL;
- PyErr_Clear();
- }
-
- attr2 = PyObject_GetAttr(inst2, PyTuple_GET_ITEM(attrs, idx));
- if(!attr2) {
- if(!PyErr_ExceptionMatches(PyExc_AttributeError)) {
- Py_XDECREF(attr1);
- return NULL;
- }
- PyErr_Clear();
- }
- if(!attr1) {
- if(attr2) {
- Py_DECREF(attr2);
- Py_DECREF(attrs);
- if(desired == Py_EQ) {
- Py_RETURN_FALSE;
- }
- Py_RETURN_TRUE;
- }
- continue;
- } else if (!attr2) {
- Py_DECREF(attr1);
- Py_DECREF(attrs);
- if(desired == Py_EQ) {
- Py_RETURN_FALSE;
- }
- Py_RETURN_TRUE;
- }
- int ret = PyObject_RichCompareBool(attr1, attr2, desired);
- Py_DECREF(attr1);
- Py_DECREF(attr2);
- if(0 > ret) {
- Py_DECREF(attrs);
- return NULL;
- } else if (0 == ret) {
- if(desired == Py_EQ) {
- Py_DECREF(attrs);
- Py_RETURN_FALSE;
- }
- } else if(desired == Py_NE) {
- Py_DECREF(attrs);
- Py_RETURN_TRUE;
- }
- }
- Py_DECREF(attrs);
- if(desired == Py_EQ) {
- Py_RETURN_TRUE;
- }
- Py_RETURN_FALSE;
-}
-
-static PyObject *
-pkgcore_generic_equality_eq(PyObject *self, PyObject *other)
-{
- return internal_generic_equality(self, other, Py_EQ);
-}
-
-static PyObject *
-pkgcore_generic_equality_ne(PyObject *self, PyObject *other)
-{
- return internal_generic_equality(self, other, Py_NE);
-}
-
-PKGCORE_FUNC_BINDING("generic_eq", "pkgcore.util._klass.generic_eq",
- pkgcore_generic_equality_eq, METH_O|METH_COEXIST)
-
-PKGCORE_FUNC_BINDING("generic_ne", "pkgcore.util._klass.generic_ne",
- pkgcore_generic_equality_ne, METH_O)
-
-
-static PyMethodDef pkgcore_mapping_get_def = {
- "get", pkgcore_mapping_get, METH_VARARGS, NULL};
-
-static PyObject *
-pkgcore_mapping_get_descr(PyObject *self, PyObject *obj, PyObject *type)
-{
- return PyCFunction_New(&pkgcore_mapping_get_def, obj);
-}
-
-static PyTypeObject pkgcore_GetType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "pkgcore_get_type", /* tp_name */
- sizeof(PyObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- 0, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "type of the get proxy", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- pkgcore_mapping_get_descr, /* tp_descr_get */
- 0, /* tp_descr_set */
-};
-
-static PyObject *
-pkgcore_mapping_contains(PyObject *self, PyObject *key)
-{
- if(!self) {
- PyErr_SetString(PyExc_TypeError,
- "need to be called with a mapping as the first arg");
- return NULL;
- }
-
- PyObject *ret = PyObject_GetItem(self, key);
- if(ret) {
- Py_DECREF(ret);
- ret = Py_True;
- } else if (!PyErr_ExceptionMatches(PyExc_KeyError)) {
- return NULL;
- } else {
- PyErr_Clear();
- ret = Py_False;
- }
- Py_INCREF(ret);
- return ret;
-}
-
-static PyMethodDef pkgcore_mapping_contains_def = {
- "contains", pkgcore_mapping_contains, METH_O|METH_COEXIST, NULL};
-
-static PyObject *
-pkgcore_mapping_contains_descr(PyObject *self, PyObject *obj, PyObject *type)
-{
- return PyCFunction_New(&pkgcore_mapping_contains_def, obj);
-}
-
-static PyTypeObject pkgcore_ContainsType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "pkgcore_contains_type", /* tp_name */
- sizeof(PyObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- 0, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "type of the contains proxy", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- pkgcore_mapping_contains_descr, /* tp_descr_get */
- 0, /* tp_descr_set */
-};
-
-PyDoc_STRVAR(
- pkgcore_klass_documentation,
- "misc cpython class functionality");
-
-
-PyMODINIT_FUNC
-init_klass()
-{
- PyObject *m = Py_InitModule3("_klass", NULL, pkgcore_klass_documentation);
- if (!m)
- return;
-
- if (PyType_Ready(&pkgcore_GetAttrProxyType) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_GetType) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_ContainsType) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_generic_equality_eq_type) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_generic_equality_ne_type) < 0)
- return;
-
- if(!pkgcore_equality_attr) {
- if(!(pkgcore_equality_attr = PyString_FromString(
- "__attr_comparison__")))
- return;
- }
-
- PyObject *tmp;
- if (!(tmp = PyType_GenericNew(&pkgcore_GetType, NULL, NULL)))
- return;
- if (PyModule_AddObject(m, "get", tmp) == -1)
- return;
-
- if (!(tmp = PyType_GenericNew(&pkgcore_ContainsType, NULL, NULL)))
- return;
- if (PyModule_AddObject(m, "contains", tmp) == -1)
- return;
-
- Py_INCREF(&pkgcore_GetAttrProxyType);
- if (PyModule_AddObject(
- m, "GetAttrProxy", (PyObject *)&pkgcore_GetAttrProxyType) == -1)
- return;
-
- tmp = PyType_GenericNew(&pkgcore_generic_equality_eq_type, NULL, NULL);
- if(!tmp)
- return;
- if (PyModule_AddObject(m, "generic_eq", tmp) == -1)
- return;
- tmp = PyType_GenericNew(&pkgcore_generic_equality_ne_type, NULL, NULL);
- if(!tmp)
- return;
- if (PyModule_AddObject(m, "generic_ne", tmp) == -1)
- return;
-}
diff --git a/src/lists.c b/src/lists.c
deleted file mode 100644
index 5e2ea18c3..000000000
--- a/src/lists.c
+++ /dev/null
@@ -1,482 +0,0 @@
-/*
- * Copyright: 2006 Marien Zwart <marienz@gentoo.org>
- * License: GPL2
- *
- * C version of some of pkgcore (for extra speed).
- */
-
-/* This does not really do anything since we do not use the "#"
- * specifier in a PyArg_Parse or similar call, but hey, not using it
- * means we are Py_ssize_t-clean too!
- */
-
-#define PY_SSIZE_T_CLEAN
-
-#include "Python.h"
-#include "py24-compatibility.h"
-
-
-/* Helper functions */
-
-static PyObject *
-build_initial_iterables(PyObject *l) {
- PyObject *result, *iter = PyObject_GetIter(l);
- if (!iter)
- return NULL;
-
- result = PyList_New(1);
- if (!result) {
- Py_DECREF(iter);
- return NULL;
- }
- PyList_SET_ITEM(result, 0, iter);
- return result;
-}
-
-/* iflatten_func: recursively flatten an iterable with a func as filter. */
-
-typedef struct {
- PyObject_HEAD
- PyObject *skip_func;
- PyObject *iterables;
- char in_iternext;
-} pkgcore_iflatten_func;
-
-static void
-pkgcore_iflatten_func_dealloc(pkgcore_iflatten_func *self)
-{
- Py_CLEAR(self->skip_func);
- Py_CLEAR(self->iterables);
- self->ob_type->tp_free((PyObject*) self);
-}
-
-static PyObject *
-pkgcore_iflatten_func_new(PyTypeObject *type,
- PyObject *args, PyObject *kwargs)
-{
- pkgcore_iflatten_func *self;
- PyObject *l=NULL, *skip_func=NULL, *tmp;
- int res;
-
- if (kwargs && PyDict_Size(kwargs)) {
- PyErr_SetString(PyExc_TypeError,
- "iflatten_func takes no keyword arguments");
- return NULL;
- }
- if (!PyArg_UnpackTuple(args, "iflatten_func", 2, 2, &l, &skip_func)) {
- return NULL;
- }
-
- /* Check if we got a single argument that should be skipped. */
- tmp = PyObject_CallFunctionObjArgs(skip_func, l, NULL);
- if (!tmp) {
- return NULL;
- }
- // Py_(True|False) are singletons, thus we're trying to bypass
- // the PyObject_IsTrue triggering __nonzero__ protocol.
- if(tmp == Py_True) {
- res = 1;
- } else if (tmp == Py_False) {
- res = 0;
- } else {
- res = PyObject_IsTrue(tmp);
- if(res == -1) {
- Py_DECREF(tmp);
- return NULL;
- }
- }
- Py_DECREF(tmp);
- if (res) {
- PyObject *tuple = PyTuple_Pack(1, l);
- if (!tuple) {
- return NULL;
- }
- PyObject *iter = PyObject_GetIter(tuple);
- Py_DECREF(tuple);
- return iter;
- }
-
- self = (pkgcore_iflatten_func *)type->tp_alloc(type, 0);
- if (!self)
- return NULL;
-
- self->in_iternext = 0;
-
- if (!(self->iterables = build_initial_iterables(l))) {
- Py_DECREF(self);
- return NULL;
- }
-
- Py_INCREF(skip_func);
- self->skip_func = skip_func;
-
- return (PyObject *)self;
-}
-
-static PyObject *
-pkgcore_iflatten_func_iternext(pkgcore_iflatten_func *self) {
- PyObject *tail, *result, *tmp;
- int res;
- Py_ssize_t n;
-
- if (self->in_iternext) {
- /* We do not allow this because it means our list could be
- * manipulated while we are running. Exception raised matches
- * what a generator raises if you try the same thing.
- */
- PyErr_SetString(PyExc_ValueError,
- "Recursive calls to iflatten_func.next are illegal");
- return NULL;
- }
- self->in_iternext = 1;
-
- /* Look at the final iterator on our stack: */
- while ((n = PyList_GET_SIZE(self->iterables))) {
- tail = PyList_GET_ITEM(self->iterables, n - 1);
-
- /* See if it has any results left: */
-
- /* (This reference is borrowed from the list, but since we
- disallow recursive calls in here it should be safe to not
- increment it). */
-
- result = PyIter_Next(tail);
- if (result) {
-
- /* See if we need to iterate over this new result: */
-
- tmp = PyObject_CallFunctionObjArgs(self->skip_func, result, NULL);
- if (!tmp) {
- Py_DECREF(result);
- self->in_iternext = 0;
- return NULL;
- }
- res = PyObject_IsTrue(tmp);
- Py_DECREF(tmp);
- if (res == -1) {
- Py_DECREF(result);
- result = NULL;
- } else if (!res) {
- /* False from our skip func. */
- /* If it is an iterator add it to our chain, else return it. */
- tmp = PyObject_GetIter(result);
- if (tmp) {
- /* Iterable, append to our stack and continue. */
- Py_DECREF(result);
- result = NULL;
- res = PyList_Append(self->iterables, tmp);
- Py_DECREF(tmp);
- if (res != -1) {
- continue;
- }
- /* Fall through and propagate the error. */
- } else {
- /* If we get here PyObject_GetIter raised an exception.
- * If it was TypeError we have a non-iterator we can
- * just return, else we propagate the error.
- */
- if (PyErr_ExceptionMatches(PyExc_TypeError)) {
- PyErr_Clear();
- } else {
- Py_DECREF(result);
- result = NULL;
- }
- }
- }
- } else {
- /* PyIter_Next did not return an item. If this was not
- * because of an error we should pop the exhausted
- * iterable off and continue. */
- if (!PyErr_Occurred() &&
- PySequence_DelItem(self->iterables, n - 1) != -1) {
- continue;
- }
- }
- self->in_iternext = 0;
- return result;
- }
-
- /* We ran out of iterables entirely, so we are done */
- self->in_iternext = 0;
- return NULL;
-}
-
-PyDoc_STRVAR(
- pkgcore_iflatten_func_documentation,
- "iflatten_func(iters, func): collapse [(1),2] into [1,2]\n"
- "\n"
- "func is called with one argument and should return true if this \n"
- "should not be iterated over.\n"
- );
-
-static PyTypeObject pkgcore_iflatten_func_type = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size*/
- "pkgcore.util._lists.iflatten_func", /* tp_name*/
- sizeof(pkgcore_iflatten_func), /* tp_basicsize*/
- 0, /* tp_itemsize*/
- (destructor)pkgcore_iflatten_func_dealloc, /* tp_dealloc*/
- 0, /* tp_print*/
- 0, /* tp_getattr*/
- 0, /* tp_setattr*/
- 0, /* tp_compare*/
- 0, /* tp_repr*/
- 0, /* tp_as_number*/
- 0, /* tp_as_sequence*/
- 0, /* tp_as_mapping*/
- 0, /* tp_hash */
- (ternaryfunc)0, /* tp_call*/
- (reprfunc)0, /* tp_str*/
- 0, /* tp_getattro*/
- 0, /* tp_setattro*/
- 0, /* tp_as_buffer*/
- Py_TPFLAGS_DEFAULT, /* tp_flags*/
- pkgcore_iflatten_func_documentation, /* tp_doc */
- (traverseproc)0, /* tp_traverse */
- (inquiry)0, /* tp_clear */
- (richcmpfunc)0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)PyObject_SelfIter, /* tp_iter */
- (iternextfunc)pkgcore_iflatten_func_iternext, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)0, /* tp_init */
- 0, /* tp_alloc */
- pkgcore_iflatten_func_new, /* tp_new */
-};
-
-/* iflatten_instance: recursively flatten an iterable
- except for some instances */
-
-typedef struct {
- PyObject_HEAD
- PyObject *skip_flattening;
- PyObject *iterables;
- char in_iternext;
-} pkgcore_iflatten_instance;
-
-static void
-pkgcore_iflatten_instance_dealloc(pkgcore_iflatten_instance *self)
-{
- Py_CLEAR(self->skip_flattening);
- Py_CLEAR(self->iterables);
- self->ob_type->tp_free((PyObject*) self);
-}
-
-static PyObject *
-pkgcore_iflatten_instance_new(PyTypeObject *type,
- PyObject *args, PyObject *kwargs)
-{
- pkgcore_iflatten_instance *self;
- PyObject *l=NULL, *skip_flattening=(PyObject*)&PyBaseString_Type;
- int res;
-
- if (kwargs && PyDict_Size(kwargs)) {
- PyErr_SetString(PyExc_TypeError,
- "iflatten_instance takes no keyword arguments");
- return NULL;
- }
- if (!PyArg_UnpackTuple(args, "iflatten_instance", 1, 2,
- &l, &skip_flattening)) {
- return NULL;
- }
-
- /* Check if we got a single argument that should be skipped. */
- res = PyObject_IsInstance(l, skip_flattening);
- if (res == -1) {
- return NULL;
- } else if (res) {
- PyObject *tuple = PyTuple_Pack(1, l);
- if (!tuple) {
- return NULL;
- }
- PyObject *iter = PyObject_GetIter(tuple);
- Py_DECREF(tuple);
- return iter;
- }
-
- self = (pkgcore_iflatten_instance *)type->tp_alloc(type, 0);
- if (!self)
- return NULL;
-
- self->in_iternext = 0;
-
- if (!(self->iterables = build_initial_iterables(l))) {
- Py_DECREF(self);
- return NULL;
- }
-
- Py_INCREF(skip_flattening);
- self->skip_flattening = skip_flattening;
-
- return (PyObject *)self;
-}
-
-static PyObject *
-pkgcore_iflatten_instance_iternext(pkgcore_iflatten_instance *self) {
- PyObject *tail, *result, *iter;
- int n, res;
-
- if (self->in_iternext) {
- /* We do not allow this because it means our list could be
- * manipulated while we are running. Exception raised matches
- * what a generator raises if you try the same thing.
- */
- PyErr_SetString(
- PyExc_ValueError,
- "Recursive calls to iflatten_instance.next are illegal");
- return NULL;
- }
- self->in_iternext = 1;
-
- /* Look at the final iterator on our stack: */
-
- while ((n = PyList_GET_SIZE(self->iterables))) {
- tail = PyList_GET_ITEM(self->iterables, n - 1);
-
- /* See if it has any results left: */
- /* (This reference is borrowed from the list, but since we
- disallow recursive calls in here it should be safe to not
- increment it). */
-
- result = PyIter_Next(tail);
- if (result) {
- /* See if we need to iterate over this new result: */
-
- res = PyObject_IsInstance(result, self->skip_flattening);
- if (res == -1) {
- Py_DECREF(result);
- result = NULL;
- } else if (!res) {
- /* Not in skip_flattening. */
- /* If it is an iterator add it to our chain, else return it. */
- iter = PyObject_GetIter(result);
- if (iter) {
- /* Iterable, append to our stack and continue. */
- Py_DECREF(result);
- result = NULL;
- res = PyList_Append(self->iterables, iter);
- Py_DECREF(iter);
- if (res != -1) {
- continue;
- }
- /* Fall through and propagate the error. */
- } else {
- /* If we get here PyObject_GetIter raised an exception.
- * If it was TypeError we have a non-iterator we can
- * just return, else we propagate the error.
- */
- if (PyErr_ExceptionMatches(PyExc_TypeError)) {
- PyErr_Clear();
- } else {
- Py_DECREF(result);
- result = NULL;
- }
- }
- }
- } else {
- /* PyIter_Next did not return an item. If this was not
- * because of an error we should pop the exhausted
- * iterable off and continue. */
- if (!PyErr_Occurred() &&
- PySequence_DelItem(self->iterables, n - 1) != -1) {
- continue;
- }
- }
- self->in_iternext = 0;
- return result;
- }
-
- /* We ran out of iterables entirely, so we are done */
- self->in_iternext = 0;
- return NULL;
-}
-
-PyDoc_STRVAR(
- pkgcore_iflatten_instance_documentation,
- "iflatten_func(iters, skip_flattening=basestring)\n"
- "\n"
- "collapse [(1),2] into [1,2]\n"
- "skip_flattening is a list of classes to not descend through\n"
- );
-
-static PyTypeObject pkgcore_iflatten_instance_type = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size*/
- "pkgcore.util._lists.iflatten_instance", /* tp_name*/
- sizeof(pkgcore_iflatten_instance), /* tp_basicsize*/
- 0, /* tp_itemsize*/
- (destructor)pkgcore_iflatten_instance_dealloc, /* tp_dealloc*/
- 0, /* tp_print*/
- 0, /* tp_getattr*/
- 0, /* tp_setattr*/
- 0, /* tp_compare*/
- 0, /* tp_repr*/
- 0, /* tp_as_number*/
- 0, /* tp_as_sequence*/
- 0, /* tp_as_mapping*/
- 0, /* tp_hash */
- (ternaryfunc)0, /* tp_call*/
- (reprfunc)0, /* tp_str*/
- 0, /* tp_getattro*/
- 0, /* tp_setattro*/
- 0, /* tp_as_buffer*/
- Py_TPFLAGS_DEFAULT, /* tp_flags*/
- pkgcore_iflatten_instance_documentation, /* tp_doc */
- (traverseproc)0, /* tp_traverse */
- (inquiry)0, /* tp_clear */
- (richcmpfunc)0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)PyObject_SelfIter, /* tp_iter */
- (iternextfunc)pkgcore_iflatten_instance_iternext, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- 0, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)0, /* tp_init */
- 0, /* tp_alloc */
- pkgcore_iflatten_instance_new, /* tp_new */
-};
-
-
-/* Initialization function for the module */
-
-PyDoc_STRVAR(
- pkgcore_lists_documentation,
- "C reimplementation of some of pkgcore.util.lists.");
-
-PyMODINIT_FUNC
-init_lists()
-{
- /* Create the module and add the functions */
- PyObject *m = Py_InitModule3("_lists", NULL, pkgcore_lists_documentation);
- if (!m)
- return;
-
- if (PyType_Ready(&pkgcore_iflatten_func_type) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_iflatten_instance_type) < 0)
- return;
-
- Py_INCREF(&pkgcore_iflatten_func_type);
- if (PyModule_AddObject(
- m, "iflatten_func", (PyObject *)&pkgcore_iflatten_func_type) == -1)
- return;
-
- Py_INCREF(&pkgcore_iflatten_instance_type);
- if (PyModule_AddObject(
- m, "iflatten_instance",
- (PyObject *)&pkgcore_iflatten_instance_type) == -1)
- return;
-}
diff --git a/src/posix.c b/src/posix.c
deleted file mode 100644
index 1c0eebec6..000000000
--- a/src/posix.c
+++ /dev/null
@@ -1,721 +0,0 @@
-/*
- * Copyright: 2006-2007 Brian Harring <ferringb@gmail.com>
- * License: GPL2
- *
- * C version of some of pkgcore (for extra speed).
- */
-
-/* This does not really do anything since we do not use the "#"
- * specifier in a PyArg_Parse or similar call, but hey, not using it
- * means we are Py_ssize_t-clean too!
- */
-
-#define PY_SSIZE_T_CLEAN
-
-#include "common.h"
-#include <structmember.h>
-#include <sys/mman.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-
-// only 2.5.46 kernels and up have this.
-#ifndef MAP_POPULATE
-#define MAP_POPULATE 0
-#endif
-
-static PyObject *pkgcore_stat_float_times = NULL;
-static PyObject *pkgcore_empty_tuple = NULL;
-static PyObject *pkgcore_readlines_empty_iter_singleton = NULL;
-
-
-#define SKIP_SLASHES(ptr) while('/' == *(ptr)) (ptr)++;
-
-static PyObject *
-pkgcore_normpath(PyObject *self, PyObject *old_path)
-{
- if(!PyString_CheckExact(old_path)) {
- PyErr_SetString(PyExc_TypeError,
- "old_path must be a str");
- return NULL;
- }
- Py_ssize_t len = PyString_Size(old_path);
- if(!len)
- return PyString_FromString(".");
-
- char *oldstart, *oldp, *newstart, *newp, *real_newstart;
- oldstart = oldp = PyString_AsString(old_path);
-
- PyObject *new_path = PyString_FromStringAndSize(NULL, len);
- if(!new_path)
- return new_path;
- real_newstart = newstart = newp = PyString_AS_STRING(new_path);
-
-
- int leading_slash;
- Py_ssize_t slash_count = 0;
- // /../ == / , ../foo == ../foo , ../foo/../../ == ../../../
- if('/' == *oldp) {
- *newp = '/';
- newp++;
- leading_slash = 1;
- slash_count++;
- SKIP_SLASHES(oldp);
- newstart = newp;
- } else {
- leading_slash = 0;
- }
-
- while('\0' != *oldp) {
- if('/' == *oldp) {
- *newp = '/';
- newp++;
- slash_count++;
- SKIP_SLASHES(oldp);
- }
- if('.' == *oldp) {
- oldp++;
- if('\0' == *oldp)
- break;
- if('/' == *oldp) {
- oldp++;
- SKIP_SLASHES(oldp);
- continue;
- }
- if(*oldp == '.' && ('/' == oldp[1] || '\0' == oldp[1])) {
- // for newp, ../ == ../ , /../ == /
- if(leading_slash == slash_count) {
- if(!leading_slash) {
- // ../ case.
- newp[0] = '.';
- newp[1] = '.';
- newp[2] = '/';
- newp += 3;
- }
- } else if (slash_count != 1 || '/' != *newstart) {
- // if its /, then the stripping would be ignored.
- newp--;
- while(newp > newstart && '/' != newp[-1])
- newp--;
- }
- oldp++;
- SKIP_SLASHES(oldp);
- continue;
- }
- // funky file name.
- oldp--;
- }
- while('/' != *oldp && '\0' != *oldp) {
- *newp = *oldp;
- ++newp;
- ++oldp;
- }
- }
-
- *newp = '\0';
- // protect leading slash, but strip trailing.
- --newp;
- while(newp > real_newstart && '/' == *newp)
- newp--;
-
- // resize it now.
- _PyString_Resize(&new_path, newp - real_newstart + 1);
- return new_path;
-}
-
-static PyObject *
-pkgcore_join(PyObject *self, PyObject *args)
-{
- if(!args) {
- PyErr_SetString(PyExc_TypeError, "requires at least one path");
- return NULL;
- }
- PyObject *fast = PySequence_Fast(args, "arg must be a sequence");
- if(!fast)
- return NULL;
- Py_ssize_t end = PySequence_Fast_GET_SIZE(fast);
- if(!end) {
- PyErr_SetString(PyExc_TypeError,
- "join takes at least one arguement (0 given)");
- return NULL;
- }
-
- PyObject **items = PySequence_Fast_ITEMS(fast);
- Py_ssize_t start = 0, len, i = 0;
- char *s;
- int leading_slash = 0;
- // find the right most item with a prefixed '/', else 0.
- for(; i < end; i++) {
- if(!PyString_CheckExact(items[i])) {
- PyErr_SetString(PyExc_TypeError, "all args must be strings");
- Py_DECREF(fast);
- return NULL;
- }
- s = PyString_AsString(items[i]);
- if('/' == *s) {
- leading_slash = 1;
- start = i;
- }
- }
- // know the relevant slice now; figure out the size.
- len = 0;
- char *s_start;
- for(i = start; i < end; i++) {
- // this is safe because we're using CheckExact above.
- s_start = s = PyString_AS_STRING(items[i]);
- while('\0' != *s)
- s++;
- if(s_start == s)
- continue;
- len += s - s_start;
- char *s_end = s;
- if(i + 1 != end) {
- // cut the length down for trailing duplicate slashes
- while(s != s_start && '/' == s[-1])
- s--;
- // allocate for a leading slash if needed
- if(s_end == s && (s_start != s ||
- (s_end == s_start && i != start))) {
- len++;
- } else if(s_start != s) {
- len -= s_end - s -1;
- }
- }
- }
-
- // ok... we know the length. allocate a string, and copy it.
- PyObject *ret = PyString_FromStringAndSize(NULL, len);
- if(!ret)
- return NULL;
- char *buf = PyString_AS_STRING(ret);
- if(leading_slash) {
- *buf = '/';
- buf++;
- }
- for(i = start; i < end; i++) {
- s_start = s = PyString_AS_STRING(items[i]);
- if(i == start && leading_slash) {
- // a slash is inserted anywas, thus we skip one ahead
- // so it doesn't gain an extra.
- s_start++;
- s = s_start;
- }
-
- if('\0' == *s)
- continue;
- while('\0' != *s) {
- *buf = *s;
- buf++;
- if('/' == *s) {
- char *tmp_s = s + 1;
- SKIP_SLASHES(s);
- if('\0' == *s) {
- if(i + 1 != end) {
- buf--;
- } else {
- // copy the cracked out trailing slashes on the
- // last item
- while(tmp_s < s) {
- *buf = '/';
- buf++;
- tmp_s++;
- }
- }
- break;
- } else {
- // copy the cracked out intermediate slashes.
- while(tmp_s < s) {
- *buf = '/';
- buf++;
- tmp_s++;
- }
- }
- } else
- s++;
- }
- if(i + 1 != end) {
- *buf = '/';
- buf++;
- }
- }
- *buf = '\0';
- Py_DECREF(fast);
- return ret;
-}
-
-// returns 0 on success opening, 1 on ENOENT but ignore, and -1 on failure
-// if failure condition, appropriate exception is set.
-
-static inline int
-pkgcore_read_open_and_stat(PyObject *path,
- int *fd, struct stat *st)
-{
- errno = 0;
- if((*fd = open(PyString_AsString(path), O_RDONLY)) >= 0) {
- int ret = fstat(*fd, st);
- if(!ret) {
- return 0;
- }
- }
- return 1;
-}
-
-static inline int
-handle_failed_open_stat(int fd, PyObject *path, PyObject *swallow_missing)
-{
- if(fd < 0) {
- if(errno == ENOENT) {
- if(swallow_missing) {
- if(PyObject_IsTrue(swallow_missing)) {
- errno = 0;
- return 0;
- }
- if(PyErr_Occurred())
- return 1;
- }
- }
- PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, path);
- return 1;
- }
- PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path);
- if(close(fd))
- PyErr_SetFromErrnoWithFilenameObject(PyExc_IOError, path);
- return 1;
-}
-
-static PyObject *
-pkgcore_readfile(PyObject *self, PyObject *args)
-{
- PyObject *path, *swallow_missing = NULL;
- if(!args || !PyArg_ParseTuple(args, "S|O:readfile", &path,
- &swallow_missing)) {
- return NULL;
- }
-// Py_ssize_t size;
- int fd;
- struct stat st;
- Py_BEGIN_ALLOW_THREADS
- if(pkgcore_read_open_and_stat(path, &fd, &st)) {
- Py_BLOCK_THREADS
- if(handle_failed_open_stat(fd, path, swallow_missing))
- return NULL;
- Py_RETURN_NONE;
- }
- Py_END_ALLOW_THREADS
-
- int ret = 0;
- PyObject *data = PyString_FromStringAndSize(NULL, st.st_size);
-
- Py_BEGIN_ALLOW_THREADS
- errno = 0;
- if(data) {
- ret = read(fd, PyString_AS_STRING(data), st.st_size) != st.st_size ? 1 : 0;
- }
- ret += close(fd);
- Py_END_ALLOW_THREADS
-
- if(ret) {
- Py_CLEAR(data);
- data = PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path);
- }
- return data;
-}
-
-typedef struct {
- PyObject_HEAD
-} pkgcore_readlines_empty_iter;
-
-static PyObject *
-pkgcore_readlines_empty_iter_get_mtime(pkgcore_readlines_empty_iter *self)
-{
- Py_RETURN_NONE;
-}
-
-static int
-pkgcore_readlines_empty_iter_set_mtime(pkgcore_readlines_empty_iter *self,
- PyObject *v, void *closure)
-{
- PyErr_SetString(PyExc_AttributeError, "mtime is immutable");
- return -1;
-}
-
-static PyObject *
-pkgcore_readlines_empty_iter_next(pkgcore_readlines_empty_iter *self)
-{
- PyErr_SetNone(PyExc_StopIteration);
- return NULL;
-}
-
-struct PyGetSetDef pkgcore_readlines_empty_iter_getsetters[] = {
- PKGCORE_GETSET(pkgcore_readlines_empty_iter, "mtime", mtime),
- {NULL}
-};
-
-static PyTypeObject pkgcore_readlines_empty_iter_type = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "readlines.empty_iter", /* tp_name */
- sizeof(pkgcore_readlines_empty_iter), /* tp_size */
- 0, /* tp_itemsize*/
- 0, /* tp_dealloc*/
- 0, /* tp_print*/
- 0, /* tp_getattr*/
- 0, /* tp_setattr*/
- 0, /* tp_compare*/
- 0, /* tp_repr*/
- 0, /* tp_as_number*/
- 0, /* tp_as_sequence*/
- 0, /* tp_as_mapping*/
- 0, /* tp_hash */
- (ternaryfunc)0, /* tp_call*/
- (reprfunc)0, /* tp_str*/
- 0, /* tp_getattro*/
- 0, /* tp_setattro*/
- 0, /* tp_as_buffer*/
- Py_TPFLAGS_DEFAULT, /* tp_flags*/
- 0, /* tp_doc */
- (traverseproc)0, /* tp_traverse */
- (inquiry)0, /* tp_clear */
- (richcmpfunc)0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)PyObject_SelfIter, /* tp_iter */
- (iternextfunc)pkgcore_readlines_empty_iter_next, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- pkgcore_readlines_empty_iter_getsetters, /* tp_getset */
-};
-
-typedef struct {
- PyObject_HEAD
- char *start;
- char *end;
- char *map;
- int fd;
- int strip_newlines;
- time_t mtime;
- unsigned long mtime_nsec;
- PyObject *fallback;
-} pkgcore_readlines;
-
-static PyObject *
-pkgcore_readlines_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
-{
- PyObject *path, *swallow_missing = NULL, *strip_newlines = NULL;
- PyObject *none_on_missing = NULL;
- pkgcore_readlines *self = NULL;
- if(kwargs && PyDict_Size(kwargs)) {
- PyErr_SetString(PyExc_TypeError,
- "readlines.__new__ doesn't accept keywords");
- return NULL;
- } else if (!PyArg_ParseTuple(args, "S|OOOO:readlines.__new__",
- &path, &strip_newlines, &swallow_missing, &none_on_missing)) {
- return NULL;
- }
-
- int fd;
- struct stat st;
-// Py_ssize_t size;
- void *ptr = NULL;
- PyObject *fallback = NULL;
- Py_BEGIN_ALLOW_THREADS
- errno = 0;
- if(pkgcore_read_open_and_stat(path, &fd, &st)) {
- Py_BLOCK_THREADS
-
- if(handle_failed_open_stat(fd, path, swallow_missing))
- return NULL;
-
- // return an empty tuple, and let them iter over that.
- if(none_on_missing && PyObject_IsTrue(none_on_missing)) {
- Py_RETURN_NONE;
- }
-
- Py_INCREF(pkgcore_readlines_empty_iter_singleton);
- return pkgcore_readlines_empty_iter_singleton;
- }
- if(st.st_size >= 0x4000) {
- ptr = (char *)mmap(NULL, st.st_size, PROT_READ,
- MAP_SHARED|MAP_NORESERVE|MAP_POPULATE, fd, 0);
- if(ptr == MAP_FAILED)
- ptr = NULL;
- } else {
- Py_BLOCK_THREADS
- fallback = PyString_FromStringAndSize(NULL, st.st_size);
- Py_UNBLOCK_THREADS
- if(fallback) {
- errno = 0;
- ptr = (read(fd, PyString_AS_STRING(fallback), st.st_size) != st.st_size) ?
- MAP_FAILED : NULL;
- }
- int ret = close(fd);
- if(ret) {
- Py_CLEAR(fallback);
- PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path);
- Py_BLOCK_THREADS
- return NULL;
- } else if(!fallback) {
- Py_BLOCK_THREADS
- return NULL;
- }
- }
- Py_END_ALLOW_THREADS
-
- if(ptr == MAP_FAILED) {
- PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path);
- if(close(fd))
- PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path);
- Py_CLEAR(fallback);
- return NULL;
- }
-
- self = (pkgcore_readlines *)type->tp_alloc(type, 0);
- if(!self) {
- // you've got to be kidding me...
- if(ptr) {
- munmap(ptr, st.st_size);
- close(fd);
- errno = 0;
- } else {
- Py_DECREF(fallback);
- }
- if(self) {
- Py_DECREF(self);
- }
- return NULL;
- }
- self->fallback = fallback;
- self->map = ptr;
- self->mtime = st.st_mtime;
-#ifdef HAVE_STAT_TV_NSEC
- self->mtime_nsec = st.st_mtim.tv_nsec;
-#else
- self->mtime_nsec = 0;
-#endif
- if (ptr) {
- self->start = ptr;
- self->fd = fd;
- } else {
- self->start = PyString_AS_STRING(fallback);
- self->fd = -1;
- }
- self->end = self->start + st.st_size;
-
- if(strip_newlines) {
- if(strip_newlines == Py_True) {
- self->strip_newlines = 1;
- } else if (strip_newlines == Py_False) {
- self->strip_newlines = 0;
- } else {
- self->strip_newlines = PyObject_IsTrue(strip_newlines) ? 1 : 0;
- if(PyErr_Occurred()) {
- Py_DECREF(self);
- return NULL;
- }
- }
- } else
- self->strip_newlines = 1;
- return (PyObject *)self;
-}
-
-static void
-pkgcore_readlines_dealloc(pkgcore_readlines *self)
-{
- if(self->fallback) {
- Py_DECREF(self->fallback);
- } else if(self->map) {
- if(munmap(self->map, self->end - self->map))
- // swallow it, no way to signal an error
- errno = 0;
- if(close(self->fd))
- // swallow it, no way to signal an error
- errno = 0;
- }
- self->ob_type->tp_free((PyObject *)self);
-}
-
-static PyObject *
-pkgcore_readlines_iternext(pkgcore_readlines *self)
-{
- if(self->start == self->end) {
- // at the end, thus return
- return NULL;
- }
- char *p = self->start;
- assert(self->end);
- assert(self->start);
- assert(self->map || self->fallback);
- assert(self->end > self->start);
-
- while(p != self->end && '\n' != *p)
- p++;
-
- PyObject *ret;
- if(self->strip_newlines) {
- ret = PyString_FromStringAndSize(self->start, p - self->start);
- } else {
- if(p == self->end)
- ret = PyString_FromStringAndSize(self->start, p - self->start);
- else
- ret = PyString_FromStringAndSize(self->start, p - self->start + 1);
- }
- if(p != self->end) {
- p++;
- }
- self->start = p;
- return ret;
-}
-
-static int
-pkgcore_readlines_set_mtime(pkgcore_readlines *self, PyObject *v,
- void *closure)
-{
- PyErr_SetString(PyExc_AttributeError, "mtime is immutable");
- return -1;
-}
-
-static PyObject *
-pkgcore_readlines_get_mtime(pkgcore_readlines *self)
-{
- PyObject *ret = PyObject_Call(pkgcore_stat_float_times,
- pkgcore_empty_tuple, NULL);
- if(!ret)
- return NULL;
- int is_float;
- if(ret == Py_True) {
- is_float = 1;
- } else if (ret == Py_False) {
- is_float = 0;
- } else {
- is_float = PyObject_IsTrue(ret);
- if(is_float == -1) {
- Py_DECREF(ret);
- return NULL;
- }
- }
- Py_DECREF(ret);
- if(is_float)
- return PyFloat_FromDouble(self->mtime + 1e-9 * self->mtime_nsec);
-#if SIZEOF_TIME_T > SIZEOF_LONG
- return PyLong_FromLong((Py_LONG_LONG)self->mtime);
-#else
- return PyInt_FromLong((long)self->mtime);
-#endif
-}
-
-static PyGetSetDef pkgcore_readlines_getsetters[] = {
-PKGCORE_GETSET(pkgcore_readlines, "mtime", mtime),
- {NULL}
-};
-
-PyDoc_STRVAR(
- pkgcore_readlines_documentation,
- "readline(path [, strip_newlines [, swallow_missing [, none_on_missing]]])"
- " -> iterable yielding"
- " each line of a file\n\n"
- "if strip_newlines is True, the trailing newline is stripped\n"
- "if swallow_missing is True, for missing files it returns an empty "
- "iterable\n"
- "if none_on_missing and the file is missing, return None instead"
- );
-
-
-static PyTypeObject pkgcore_readlines_type = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size*/
- "pkgcore.util.osutils._posix.readlines", /* tp_name*/
- sizeof(pkgcore_readlines), /* tp_basicsize*/
- 0, /* tp_itemsize*/
- (destructor)pkgcore_readlines_dealloc, /* tp_dealloc*/
- 0, /* tp_print*/
- 0, /* tp_getattr*/
- 0, /* tp_setattr*/
- 0, /* tp_compare*/
- 0, /* tp_repr*/
- 0, /* tp_as_number*/
- 0, /* tp_as_sequence*/
- 0, /* tp_as_mapping*/
- 0, /* tp_hash */
- (ternaryfunc)0, /* tp_call*/
- (reprfunc)0, /* tp_str*/
- 0, /* tp_getattro*/
- 0, /* tp_setattro*/
- 0, /* tp_as_buffer*/
- Py_TPFLAGS_DEFAULT, /* tp_flags*/
- pkgcore_readlines_documentation, /* tp_doc */
- (traverseproc)0, /* tp_traverse */
- (inquiry)0, /* tp_clear */
- (richcmpfunc)0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- (getiterfunc)PyObject_SelfIter, /* tp_iter */
- (iternextfunc)pkgcore_readlines_iternext, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- pkgcore_readlines_getsetters, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)0, /* tp_init */
- 0, /* tp_alloc */
- pkgcore_readlines_new, /* tp_new */
-};
-
-static PyMethodDef pkgcore_posix_methods[] = {
- {"normpath", (PyCFunction)pkgcore_normpath, METH_O,
- "normalize a path entry"},
- {"join", pkgcore_join, METH_VARARGS,
- "join multiple path items"},
- {"readfile", pkgcore_readfile, METH_VARARGS,
- "fast read of a file: requires a string path, and an optional bool "
- "indicating whether to swallow ENOENT; defaults to false"},
- {NULL}
-};
-
-PyDoc_STRVAR(
- pkgcore_posix_documentation,
- "cpython posix path functionality");
-
-PyMODINIT_FUNC
-init_posix()
-{
- PyObject *s = PyString_FromString("os");
- if(!s)
- return;
-
- PyObject *mos = PyImport_Import(s);
- Py_DECREF(s);
- if(!mos)
- return;
- pkgcore_stat_float_times = PyObject_GetAttrString(mos, "stat_float_times");
- Py_DECREF(mos);
- if(!pkgcore_stat_float_times)
- return;
-
- pkgcore_empty_tuple = PyTuple_New(0);
- if(!pkgcore_empty_tuple)
- return;
-
- PyObject *m = Py_InitModule3("_posix", pkgcore_posix_methods,
- pkgcore_posix_documentation);
- if (!m)
- return;
-
- if (PyType_Ready(&pkgcore_readlines_type) < 0)
- return;
-
- if (PyType_Ready(&pkgcore_readlines_empty_iter_type) < 0)
- return;
-
- Py_INCREF(&pkgcore_readlines_empty_iter_type);
- pkgcore_readlines_empty_iter_singleton = _PyObject_New(
- &pkgcore_readlines_empty_iter_type);
-
-
- Py_INCREF(&pkgcore_readlines_type);
- if (PyModule_AddObject(
- m, "readlines", (PyObject *)&pkgcore_readlines_type) == -1)
- return;
-
- /* Success! */
-}
diff --git a/src/py24-compatibility.h b/src/py24-compatibility.h
deleted file mode 100644
index 08dd95252..000000000
--- a/src/py24-compatibility.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright: 2006 Brian Harring <ferringb@gmail.com>
- * Copyright: 2006 Marien Zwart <marienz@gentoo.org>
- * License: GPL2
- *
- * C version of some of pkgcore (for extra speed).
- */
-
-/* This does not really do anything since we do not use the "#"
- * specifier in a PyArg_Parse or similar call, but hey, not using it
- * means we are Py_ssize_t-clean too!
- */
-
-#ifndef PKGCORE_COMMON_INCLUDE
-#define PKGCORE_COMMON_INCLUDE 1
-
-/* Compatibility with python < 2.5 */
-
-#if PY_VERSION_HEX < 0x02050000
-typedef int Py_ssize_t;
-#define PY_SSIZE_T_MAX INT_MAX
-#define PY_SSIZE_T_MIN INT_MIN
-typedef Py_ssize_t (*lenfunc)(PyObject *);
-#endif
-
-/* From heapy */
-#include "heapdef.h"
-
-/* Copied from stdtypes.c in guppy */
-#define INTERATTR(name) \
- if ((PyObject *)v->name == r->tgt && \
- (r->visit(NYHR_INTERATTR, PyString_FromString(#name), r))) \
- return 1;
-
-#endif
diff --git a/src/readdir.c b/src/readdir.c
deleted file mode 100644
index 9606a014b..000000000
--- a/src/readdir.c
+++ /dev/null
@@ -1,397 +0,0 @@
-/*
- * Copyright: 2006 Brian Harring <ferringb@gmail.com>
- * Copyright: 2006-2007 Marien Zwart <marienz@gentoo.org>
- * License: GPL2
- *
- * C version of some of pkgcore (for extra speed).
- */
-
-/* This does not really do anything since we do not use the "#"
- * specifier in a PyArg_Parse or similar call, but hey, not using it
- * means we are Py_ssize_t-clean too!
- */
-
-#define PY_SSIZE_T_CLEAN
-
-#include "Python.h"
-
-/* Compatibility with python < 2.5 */
-
-#if PY_VERSION_HEX < 0x02050000
-typedef int Py_ssize_t;
-#define PY_SSIZE_T_MAX INT_MAX
-#define PY_SSIZE_T_MIN INT_MIN
-#endif
-
-#include <dirent.h>
-#include <sys/stat.h>
-
-
-static PyObject *pkgcore_DIRSTR,
- *pkgcore_CHRSTR,
- *pkgcore_BLKSTR,
- *pkgcore_REGSTR,
- *pkgcore_FIFOSTR,
- *pkgcore_LNKSTR,
- *pkgcore_SOCKSTR,
- *pkgcore_UNKNOWNSTR;
-
-/* This function does the actual work for listdir_files and listdir_dirs. */
-
-static PyObject*
-pkgcore_readdir_actual_listdir(const char* path, int followsyms,
- int dkind, int skind)
-{
- DIR *the_dir;
- struct dirent *entry;
-
- PyObject *string;
-
- int pathlen = strlen(path);
-
- PyObject *result = PyList_New(0);
- if (!result) {
- return NULL;
- }
- if (!(the_dir = opendir(path))) {
- return PyErr_SetFromErrno(PyExc_OSError);
- }
- errno = 0;
- while ((entry = readdir(the_dir))) {
- const char *name = entry->d_name;
- /* skip over "." and ".." */
- if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' &&
- name[2] == 0))) {
- continue;
- }
- if (entry->d_type == DT_UNKNOWN ||
- (followsyms && entry->d_type == DT_LNK)) {
-
- /* both path components, the "/", the trailing null */
-
- size_t size = pathlen + strlen(name) + 2;
- char *buffer = (char *) malloc(size);
- if (!buffer) {
- Py_DECREF(result);
- return PyErr_NoMemory();
- }
- snprintf(buffer, size, "%s/%s", path, name);
-
- struct stat st;
- int ret;
- if (followsyms) {
- ret = stat(buffer, &st);
- } else {
- ret = lstat(buffer, &st);
- }
- free(buffer);
- if (ret != 0) {
- if (followsyms && errno == ENOENT) {
- /* hit a dangling symlimk; skip. */
- errno = 0;
- continue;
- }
- Py_DECREF(result);
- result = NULL;
- break;
- }
-
- if ((st.st_mode & S_IFMT) != skind) {
- continue;
- }
- } else if (entry->d_type != dkind) {
- continue;
- }
- if (!(string = PyString_FromString(name))) {
- Py_DECREF(result);
- result = NULL;
- break;
- }
- if (PyList_Append(result, string) == -1) {
- Py_DECREF(string);
- Py_DECREF(result);
- result = NULL;
- break;
- }
- Py_DECREF(string);
- }
- closedir(the_dir);
- if (errno) {
- return PyErr_SetFromErrno(PyExc_OSError);
- }
- return result;
-}
-
-static PyObject*
-pkgcore_readdir_listdir_dirs(PyObject* self, PyObject* args)
-{
- char *path;
- PyObject *follow_symlinks_obj = Py_True;
-
- if (!PyArg_ParseTuple(args, "s|O", &path, &follow_symlinks_obj)) {
- return NULL;
- }
-
- int follow_symlinks = PyObject_IsTrue(follow_symlinks_obj);
- if (follow_symlinks == -1) {
- return NULL;
- }
-
- return pkgcore_readdir_actual_listdir(path, follow_symlinks,
- DT_DIR, S_IFDIR);
-}
-
-static PyObject*
-pkgcore_readdir_listdir_files(PyObject* self, PyObject* args)
-{
- char *path;
- PyObject *follow_symlinks_obj = Py_True;
-
- if (!PyArg_ParseTuple(args, "s|O", &path, &follow_symlinks_obj)) {
- return NULL;
- }
-
- int follow_symlinks = PyObject_IsTrue(follow_symlinks_obj);
- if (follow_symlinks == -1) {
- return NULL;
- }
-
- return pkgcore_readdir_actual_listdir(path, follow_symlinks,
- DT_REG, S_IFREG);
-}
-
-static PyObject*
-pkgcore_readdir_listdir(PyObject* self, PyObject* args)
-{
- char *path;
-
- if (!PyArg_ParseTuple(args, "s", &path)) {
- return NULL;
- }
-
- PyObject *result = PyList_New(0);
- if (!result) {
- return NULL;
- }
-
- DIR *the_dir = opendir(path);
- if (!the_dir) {
- return PyErr_SetFromErrno(PyExc_OSError);
- }
- errno = 0;
- struct dirent *entry;
- while ((entry = readdir(the_dir))) {
- const char *name = entry->d_name;
- /* skip over "." and ".." */
- if (!(name[0] == '.' && (name[1] == 0 ||
- (name[1] == '.' && name[2] == 0)))) {
-
- PyObject *string = PyString_FromString(name);
- if (!string) {
- Py_DECREF(result);
- result = NULL;
- break;
- }
- int res = PyList_Append(result, string);
- Py_DECREF(string);
- if (res == -1) {
- Py_DECREF(result);
- result = NULL;
- break;
- }
- }
- }
- closedir(the_dir);
- if (errno) {
- return PyErr_SetFromErrno(PyExc_OSError);
- }
- return result;
-}
-
-static PyObject*
-pkgcore_readdir_read_dir(PyObject* self, PyObject* args)
-{
- char *path;
-
- if (!PyArg_ParseTuple(args, "s", &path)) {
- return NULL;
- }
- ssize_t pathlen = strlen(path);
-
- PyObject *result = PyList_New(0);
- if (!result) {
- return NULL;
- }
-
- DIR *the_dir = opendir(path);
- if (!the_dir) {
- return PyErr_SetFromErrno(PyExc_OSError);
- }
-
- struct dirent *entry;
- while ((entry = readdir(the_dir))) {
- const char *name = entry->d_name;
- /* skip over "." and ".." */
- if (name[0] == '.' && (name[1] == 0 ||
- (name[1] == '.' && name[2] == 0))) {
- continue;
- }
-
- PyObject *typestr;
- switch (entry->d_type) {
- case DT_REG:
- typestr = pkgcore_REGSTR;
- break;
- case DT_DIR:
- typestr = pkgcore_DIRSTR;
- break;
- case DT_FIFO:
- typestr = pkgcore_FIFOSTR;
- break;
- case DT_SOCK:
- typestr = pkgcore_SOCKSTR;
- break;
- case DT_CHR:
- typestr = pkgcore_CHRSTR;
- break;
- case DT_BLK:
- typestr = pkgcore_BLKSTR;
- break;
- case DT_LNK:
- typestr = pkgcore_LNKSTR;
- break;
- case DT_UNKNOWN:
- {
- /* both path components, the "/", the trailing null */
- size_t size = pathlen + strlen(name) + 2;
- char *buffer = (char *) malloc(size);
- if (!buffer) {
- closedir(the_dir);
- return PyErr_NoMemory();
- }
- snprintf(buffer, size, "%s/%s", path, name);
- struct stat st;
- int ret = lstat(buffer, &st);
- free(buffer);
- if (ret == -1) {
- closedir(the_dir);
- return PyErr_SetFromErrno(PyExc_OSError);
- }
- switch (st.st_mode & S_IFMT) {
- case S_IFDIR:
- typestr = pkgcore_DIRSTR;
- break;
- case S_IFCHR:
- typestr = pkgcore_CHRSTR;
- break;
- case S_IFBLK:
- typestr = pkgcore_BLKSTR;
- break;
- case S_IFREG:
- typestr = pkgcore_REGSTR;
- break;
- case S_IFLNK:
- typestr = pkgcore_LNKSTR;
- break;
- case S_IFSOCK:
- typestr = pkgcore_SOCKSTR;
- break;
- case S_IFIFO:
- typestr = pkgcore_FIFOSTR;
- break;
- default:
- /* XXX does this make sense? probably not. */
- typestr = pkgcore_UNKNOWNSTR;
- }
- }
- break;
-
- default:
- /* XXX does this make sense? probably not. */
- typestr = pkgcore_UNKNOWNSTR;
- }
-
- PyObject *namestr = PyString_FromString(name);
- if (!namestr) {
- Py_DECREF(result);
- result = NULL;
- break;
- }
- /* Slight hack: incref typestr after our error checks. */
- PyObject *tuple = PyTuple_Pack(2, namestr, typestr);
- Py_DECREF(namestr);
- if (!tuple) {
- Py_DECREF(result);
- result = NULL;
- break;
- }
- Py_INCREF(typestr);
-
- int res = PyList_Append(result, tuple);
- Py_DECREF(tuple);
- if (res == -1) {
- Py_DECREF(result);
- result = NULL;
- break;
- }
- }
- if (closedir(the_dir) == -1) {
- return PyErr_SetFromErrno(PyExc_OSError);
- }
- return result;
-}
-
-/* Module initialization */
-
-static PyMethodDef pkgcore_readdir_methods[] = {
- {"listdir", (PyCFunction)pkgcore_readdir_listdir, METH_VARARGS,
- "listdir(path, followSymlinks=True, kinds=everything)"},
- {"listdir_dirs", (PyCFunction)pkgcore_readdir_listdir_dirs, METH_VARARGS,
- "listdir_dirs(path, followSymlinks=True)"},
- {"listdir_files", (PyCFunction)pkgcore_readdir_listdir_files, METH_VARARGS,
- "listdir_files(path, followSymlinks=True)"},
- {"readdir", (PyCFunction)pkgcore_readdir_read_dir, METH_VARARGS,
- "read_dir(path)"},
- {NULL}
-};
-
-PyDoc_STRVAR(
- pkgcore_module_documentation,
- "C reimplementation of some of pkgcore.util.osutils");
-
-PyMODINIT_FUNC
-init_readdir()
-{
- PyObject *m;
-
- /* XXX we have to initialize these before we call InitModule3 because
- * the pkgcore_readdir_methods use them, which screws up error handling.
- */
- pkgcore_DIRSTR = PyString_FromString("directory");
- pkgcore_CHRSTR = PyString_FromString("chardev");
- pkgcore_BLKSTR = PyString_FromString("block");
- pkgcore_REGSTR = PyString_FromString("file");
- pkgcore_FIFOSTR = PyString_FromString("fifo");
- pkgcore_LNKSTR = PyString_FromString("symlink");
- pkgcore_SOCKSTR = PyString_FromString("socket");
- pkgcore_UNKNOWNSTR = PyString_FromString("unknown");
-
- if (!(pkgcore_DIRSTR &&
- pkgcore_CHRSTR &&
- pkgcore_BLKSTR &&
- pkgcore_REGSTR &&
- pkgcore_FIFOSTR &&
- pkgcore_LNKSTR &&
- pkgcore_SOCKSTR &&
- pkgcore_UNKNOWNSTR)) {
- Py_FatalError("Can't initialize module _readdir (strings)");
- }
-
- /* Create the module and add the functions */
- m = Py_InitModule3("_readdir", pkgcore_readdir_methods,
- pkgcore_module_documentation);
- if (!m)
- return;
-
- /* Success! */
-}
diff --git a/src/restrictions.c b/src/restrictions.c
index 530b1a2bc..3a9a645cd 100644
--- a/src/restrictions.c
+++ b/src/restrictions.c
@@ -12,7 +12,7 @@
#define PY_SSIZE_T_CLEAN
-#include "common.h"
+#include <snakeoil/common.h>
#include <structmember.h>
static PyObject *pkgcore_restrictions_type = NULL;
@@ -188,14 +188,14 @@ static PyMemberDef pkgcore_StrExactMatch_members[] = {
{NULL}
};
-PKGCORE_IMMUTABLE_ATTR_BOOL(pkgcore_StrExactMatch, "negate", negate,
+snakeoil_IMMUTABLE_ATTR_BOOL(pkgcore_StrExactMatch, "negate", negate,
(self->flags & NEGATED_RESTRICT))
-PKGCORE_IMMUTABLE_ATTR_BOOL(pkgcore_StrExactMatch, "case_sensitive", case,
+snakeoil_IMMUTABLE_ATTR_BOOL(pkgcore_StrExactMatch, "case_sensitive", case,
(self->flags & CASE_SENSITIVE))
static PyGetSetDef pkgcore_StrExactMatch_attrs[] = {
-PKGCORE_GETSET(pkgcore_StrExactMatch, "negate", negate),
-PKGCORE_GETSET(pkgcore_StrExactMatch, "case_sensitive", case),
+snakeoil_GETSET(pkgcore_StrExactMatch, "negate", negate),
+snakeoil_GETSET(pkgcore_StrExactMatch, "case_sensitive", case),
{NULL}
};
@@ -390,14 +390,14 @@ static PyMemberDef pkgcore_PackageRestriction_members[] = {
{NULL}
};
-PKGCORE_IMMUTABLE_ATTR_BOOL(pkgcore_PackageRestriction, "negate", negate,
+snakeoil_IMMUTABLE_ATTR_BOOL(pkgcore_PackageRestriction, "negate", negate,
(self->flags & NEGATED_RESTRICT))
-PKGCORE_IMMUTABLE_ATTR_BOOL(pkgcore_PackageRestriction, "ignore_missing",
+snakeoil_IMMUTABLE_ATTR_BOOL(pkgcore_PackageRestriction, "ignore_missing",
ignore_missing, (self->flags & IGNORE_MISSING))
static PyGetSetDef pkgcore_PackageRestriction_attrs[] = {
-PKGCORE_GETSET(pkgcore_PackageRestriction, "negate", negate),
-PKGCORE_GETSET(pkgcore_PackageRestriction, "ignore_missing", ignore_missing),
+snakeoil_GETSET(pkgcore_PackageRestriction, "negate", negate),
+snakeoil_GETSET(pkgcore_PackageRestriction, "ignore_missing", ignore_missing),
{NULL}
};