summaryrefslogtreecommitdiff
path: root/eclass
diff options
context:
space:
mode:
authorRobin H. Johnson <robbat2@gentoo.org>2015-08-08 13:49:04 -0700
committerRobin H. Johnson <robbat2@gentoo.org>2015-08-08 17:38:18 -0700
commit56bd759df1d0c750a065b8c845e93d5dfa6b549d (patch)
tree3f91093cdb475e565ae857f1c5a7fd339e2d781e /eclass
downloadgentoo-56bd759df1d0c750a065b8c845e93d5dfa6b549d.tar.gz
gentoo-56bd759df1d0c750a065b8c845e93d5dfa6b549d.tar.bz2
gentoo-56bd759df1d0c750a065b8c845e93d5dfa6b549d.zip
proj/gentoo: Initial commit
This commit represents a new era for Gentoo: Storing the gentoo-x86 tree in Git, as converted from CVS. This commit is the start of the NEW history. Any historical data is intended to be grafted onto this point. Creation process: 1. Take final CVS checkout snapshot 2. Remove ALL ChangeLog* files 3. Transform all Manifests to thin 4. Remove empty Manifests 5. Convert all stale $Header$/$Id$ CVS keywords to non-expanded Git $Id$ 5.1. Do not touch files with -kb/-ko keyword flags. Signed-off-by: Robin H. Johnson <robbat2@gentoo.org> X-Thanks: Alec Warner <antarus@gentoo.org> - did the GSoC 2006 migration tests X-Thanks: Robin H. Johnson <robbat2@gentoo.org> - infra guy, herding this project X-Thanks: Nguyen Thai Ngoc Duy <pclouds@gentoo.org> - Former Gentoo developer, wrote Git features for the migration X-Thanks: Brian Harring <ferringb@gentoo.org> - wrote much python to improve cvs2svn X-Thanks: Rich Freeman <rich0@gentoo.org> - validation scripts X-Thanks: Patrick Lauer <patrick@gentoo.org> - Gentoo dev, running new 2014 work in migration X-Thanks: Michał Górny <mgorny@gentoo.org> - scripts, QA, nagging X-Thanks: All of other Gentoo developers - many ideas and lots of paint on the bikeshed
Diffstat (limited to 'eclass')
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-cmds-c11
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-cmds-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-expsym-c14
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-expsym-cxx14
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-noundef-c11
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-noundef-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-soname12
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-usertl-c10
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.0-usertl-cxx10
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.22-cmds-c11
-rw-r--r--eclass/ELT-patches/aixrtl/1.5.22-cmds-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/2.0.0-fpic-c12
-rw-r--r--eclass/ELT-patches/aixrtl/2.0.0-fpic-cxx12
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-cmds-c11
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-cmds-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-expsym-c14
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-expsym-cxx14
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-noundef-c11
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-noundef-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.0-usertl-cxx10
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.8-cmds-c11
-rw-r--r--eclass/ELT-patches/aixrtl/2.2.8-cmds-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/2.4.0-expsym-c14
-rw-r--r--eclass/ELT-patches/aixrtl/2.4.0-expsym-cxx14
-rw-r--r--eclass/ELT-patches/aixrtl/2.4.2.418-cmds-c11
-rw-r--r--eclass/ELT-patches/aixrtl/2.4.2.418-cmds-cxx11
-rw-r--r--eclass/ELT-patches/aixrtl/2.4.2.418-soname10
-rw-r--r--eclass/ELT-patches/aixrtl/2.4.4-with-svr411
-rw-r--r--eclass/ELT-patches/as-needed/1.530
-rw-r--r--eclass/ELT-patches/as-needed/1.5.2638
-rw-r--r--eclass/ELT-patches/as-needed/2.2.638
-rw-r--r--eclass/ELT-patches/as-needed/2.4.238
-rw-r--r--eclass/ELT-patches/cross/link-ROOT20
-rw-r--r--eclass/ELT-patches/darwin-conf/1.524
-rw-r--r--eclass/ELT-patches/darwin-conf/1.5b33
-rw-r--r--eclass/ELT-patches/darwin-conf/2.2.429
-rw-r--r--eclass/ELT-patches/darwin-conf/2.2.643
-rw-r--r--eclass/ELT-patches/darwin-ltconf/1.2.066
-rw-r--r--eclass/ELT-patches/darwin-ltconf/1.3.067
-rw-r--r--eclass/ELT-patches/darwin-ltmain/1.2.040
-rw-r--r--eclass/ELT-patches/darwin-ltmain/1.3.064
-rw-r--r--eclass/ELT-patches/deplibs/1.518
-rw-r--r--eclass/ELT-patches/deplibs/2.1b18
-rw-r--r--eclass/ELT-patches/fbsd-conf/00broken-libglade33
-rw-r--r--eclass/ELT-patches/fbsd-conf/1.4.032
-rw-r--r--eclass/ELT-patches/fbsd-conf/1.5.10120
-rw-r--r--eclass/ELT-patches/fbsd-conf/1.5.18116
-rw-r--r--eclass/ELT-patches/fbsd-conf/1.5.20116
-rw-r--r--eclass/ELT-patches/fbsd-conf/1.5.20b61
-rw-r--r--eclass/ELT-patches/fbsd-conf/1.9f60
-rw-r--r--eclass/ELT-patches/fbsd-ltconf/1.2.026
-rw-r--r--eclass/ELT-patches/fbsd-ltconf/1.3.032
-rw-r--r--eclass/ELT-patches/fbsd-ltconf/1.4a32
-rw-r--r--eclass/ELT-patches/fbsd-ltconf/1.4a-GCC3.032
-rw-r--r--eclass/ELT-patches/fix-relink/1.4.063
-rw-r--r--eclass/ELT-patches/fix-relink/1.4.362
-rw-r--r--eclass/ELT-patches/fix-relink/1.5.051
-rw-r--r--eclass/ELT-patches/gold-conf/2.2.610
-rw-r--r--eclass/ELT-patches/hardcode-relink/1.3d8
-rw-r--r--eclass/ELT-patches/hardcode/1.3d21
-rw-r--r--eclass/ELT-patches/hardcode/2.1b36
-rw-r--r--eclass/ELT-patches/hc-flag-ld/1.518
-rw-r--r--eclass/ELT-patches/hpux-conf/1.5.22-syslibpath24
-rw-r--r--eclass/ELT-patches/install-sh/1.514
-rw-r--r--eclass/ELT-patches/install-sh/1.5.414
-rw-r--r--eclass/ELT-patches/install-sh/1.5.614
-rw-r--r--eclass/ELT-patches/irix-ltmain/2.2.630
-rw-r--r--eclass/ELT-patches/ltmain/1.518
-rw-r--r--eclass/ELT-patches/max_cmd_len/1.5.033
-rw-r--r--eclass/ELT-patches/max_cmd_len/1.5.1433
-rw-r--r--eclass/ELT-patches/max_cmd_len/1.5.2033
-rw-r--r--eclass/ELT-patches/mint-conf/2.2.615
-rw-r--r--eclass/ELT-patches/no-lc/1.3d13
-rw-r--r--eclass/ELT-patches/no-lc/1.5.2211
-rw-r--r--eclass/ELT-patches/nocxx/nocxx-2.64.patch13
-rw-r--r--eclass/ELT-patches/nocxx/nocxx.patch15
-rw-r--r--eclass/ELT-patches/portage/1.2.07
-rw-r--r--eclass/ELT-patches/portage/1.3.0c66
-rw-r--r--eclass/ELT-patches/portage/1.3.371
-rw-r--r--eclass/ELT-patches/portage/1.4.074
-rw-r--r--eclass/ELT-patches/portage/1.5.1077
-rw-r--r--eclass/ELT-patches/portage/2.269
-rw-r--r--eclass/ELT-patches/relink-prog/1.3d24
-rw-r--r--eclass/ELT-patches/relink-prog/2.1b24
-rw-r--r--eclass/ELT-patches/relink/1.4.0a99
-rw-r--r--eclass/ELT-patches/relink/1.4.0a-GCC3.0-199
-rw-r--r--eclass/ELT-patches/relink/1.4.0a-GCC3.0-2100
-rw-r--r--eclass/ELT-patches/relink/1.4.1124
-rw-r--r--eclass/ELT-patches/relink/1.4.299
-rw-r--r--eclass/ELT-patches/relink/1.4.3111
-rw-r--r--eclass/ELT-patches/rem-int-dep/1.3.511
-rw-r--r--eclass/ELT-patches/sed/1.3.414
-rw-r--r--eclass/ELT-patches/sed/1.4.014
-rw-r--r--eclass/ELT-patches/sed/1.5.616
-rw-r--r--eclass/ELT-patches/sol2-conf/2.4.214
-rw-r--r--eclass/ELT-patches/sol2-ltmain/2.4.211
-rw-r--r--eclass/ELT-patches/sys-lib-dlsearch/2.421
-rw-r--r--eclass/ELT-patches/target-nm/2.4.245
-rw-r--r--eclass/ELT-patches/test/1.4.0291
-rw-r--r--eclass/ELT-patches/test/1.4.2578
-rw-r--r--eclass/ELT-patches/tmp/1.3.515
-rw-r--r--eclass/ELT-patches/uclibc-conf/1.2.048
-rw-r--r--eclass/ELT-patches/uclibc-conf/1.3.0c48
-rw-r--r--eclass/ELT-patches/uclibc-ltconf/1.2.036
-rw-r--r--eclass/ELT-patches/uclibc-ltconf/1.3.039
-rw-r--r--eclass/alternatives.eclass143
-rw-r--r--eclass/ant-tasks.eclass180
-rw-r--r--eclass/apache-2.eclass631
-rw-r--r--eclass/apache-module.eclass239
-rw-r--r--eclass/aspell-dict.eclass66
-rw-r--r--eclass/autotools-multilib.eclass90
-rw-r--r--eclass/autotools-utils.eclass403
-rw-r--r--eclass/autotools.eclass607
-rw-r--r--eclass/base.eclass194
-rw-r--r--eclass/bash-completion-r1.eclass134
-rw-r--r--eclass/bash-completion.eclass101
-rw-r--r--eclass/bitcoincore.eclass309
-rw-r--r--eclass/bsdmk.eclass85
-rw-r--r--eclass/bzr.eclass341
-rw-r--r--eclass/cannadic.eclass150
-rw-r--r--eclass/cdrom.eclass247
-rw-r--r--eclass/check-reqs.eclass355
-rw-r--r--eclass/chromium.eclass266
-rw-r--r--eclass/clutter.eclass72
-rw-r--r--eclass/cmake-multilib.eclass74
-rw-r--r--eclass/cmake-utils.eclass751
-rw-r--r--eclass/common-lisp-3.eclass211
-rw-r--r--eclass/common-lisp-common-2.eclass80
-rw-r--r--eclass/common-lisp-common-3.eclass82
-rw-r--r--eclass/common-lisp-common.eclass209
-rw-r--r--eclass/common-lisp.eclass78
-rw-r--r--eclass/confutils.eclass478
-rw-r--r--eclass/cron.eclass161
-rw-r--r--eclass/cuda.eclass134
-rw-r--r--eclass/cvs.eclass583
-rw-r--r--eclass/darcs.eclass205
-rw-r--r--eclass/db-use.eclass116
-rw-r--r--eclass/db.eclass188
-rw-r--r--eclass/depend.apache.eclass317
-rw-r--r--eclass/depend.php.eclass258
-rw-r--r--eclass/distutils-r1.eclass822
-rw-r--r--eclass/distutils.eclass594
-rw-r--r--eclass/elisp-common.eclass415
-rw-r--r--eclass/elisp.eclass204
-rw-r--r--eclass/embassy.eclass92
-rw-r--r--eclass/emboss-r1.eclass133
-rw-r--r--eclass/emul-linux-x86.eclass96
-rw-r--r--eclass/enlightenment.eclass199
-rw-r--r--eclass/eutils.eclass1794
-rw-r--r--eclass/fcaps.eclass217
-rw-r--r--eclass/fdo-mime.eclass38
-rw-r--r--eclass/findlib.eclass59
-rw-r--r--eclass/fixheadtails.eclass44
-rw-r--r--eclass/flag-o-matic.eclass673
-rw-r--r--eclass/font-ebdftopcf.eclass46
-rw-r--r--eclass/font.eclass250
-rw-r--r--eclass/fortran-2.eclass256
-rw-r--r--eclass/fox.eclass230
-rw-r--r--eclass/freebsd.eclass267
-rw-r--r--eclass/freedict.eclass50
-rw-r--r--eclass/games-mods.eclass319
-rw-r--r--eclass/games.eclass391
-rw-r--r--eclass/gdesklets.eclass165
-rw-r--r--eclass/ghc-package.eclass339
-rw-r--r--eclass/git-2.eclass604
-rw-r--r--eclass/git-r3.eclass988
-rw-r--r--eclass/gkrellm-plugin.eclass83
-rw-r--r--eclass/gnat.eclass457
-rw-r--r--eclass/gnatbuild.eclass795
-rw-r--r--eclass/gnome-games.eclass116
-rw-r--r--eclass/gnome-python-common-r1.eclass106
-rw-r--r--eclass/gnome.org.eclass49
-rw-r--r--eclass/gnome2-utils.eclass502
-rw-r--r--eclass/gnome2.eclass263
-rw-r--r--eclass/gnuconfig.eclass99
-rw-r--r--eclass/gnustep-2.eclass27
-rw-r--r--eclass/gnustep-base.eclass268
-rw-r--r--eclass/go-mono.eclass137
-rw-r--r--eclass/golang-base.eclass78
-rw-r--r--eclass/golang-build.eclass71
-rw-r--r--eclass/golang-vcs-snapshot.eclass56
-rw-r--r--eclass/golang-vcs.eclass153
-rw-r--r--eclass/gpe.eclass115
-rw-r--r--eclass/gst-plugins-bad.eclass43
-rw-r--r--eclass/gst-plugins-base.eclass31
-rw-r--r--eclass/gst-plugins-good.eclass42
-rw-r--r--eclass/gst-plugins-ugly.eclass31
-rw-r--r--eclass/gst-plugins10.eclass299
-rw-r--r--eclass/gstreamer.eclass279
-rw-r--r--eclass/gtk-sharp-module.eclass547
-rw-r--r--eclass/haskell-cabal.eclass774
-rw-r--r--eclass/horde.eclass184
-rw-r--r--eclass/intel-sdp.eclass521
-rw-r--r--eclass/java-ant-2.eclass522
-rw-r--r--eclass/java-mvn-src.eclass65
-rw-r--r--eclass/java-osgi.eclass292
-rw-r--r--eclass/java-pkg-2.eclass158
-rw-r--r--eclass/java-pkg-opt-2.eclass67
-rw-r--r--eclass/java-pkg-simple.eclass209
-rw-r--r--eclass/java-utils-2.eclass2826
-rw-r--r--eclass/java-virtuals-2.eclass55
-rw-r--r--eclass/java-vm-2.eclass396
-rw-r--r--eclass/kde4-base.eclass922
-rw-r--r--eclass/kde4-functions.eclass413
-rw-r--r--eclass/kde4-meta-pkg.eclass24
-rw-r--r--eclass/kde4-meta.eclass630
-rw-r--r--eclass/kde5-functions.eclass239
-rw-r--r--eclass/kde5.eclass553
-rw-r--r--eclass/kernel-2.eclass1347
-rw-r--r--eclass/l10n.eclass119
-rw-r--r--eclass/latex-package.eclass238
-rw-r--r--eclass/leechcraft.eclass85
-rw-r--r--eclass/libtool.eclass513
-rw-r--r--eclass/linux-info.eclass923
-rw-r--r--eclass/linux-mod.eclass741
-rw-r--r--eclass/makeedit.eclass37
-rw-r--r--eclass/mercurial.eclass197
-rw-r--r--eclass/mono-env.eclass45
-rw-r--r--eclass/mono.eclass81
-rw-r--r--eclass/mount-boot.eclass157
-rw-r--r--eclass/mozconfig-3.eclass104
-rw-r--r--eclass/mozconfig-v5.31.eclass218
-rw-r--r--eclass/mozconfig-v5.34.eclass232
-rw-r--r--eclass/mozconfig-v5.36.eclass232
-rw-r--r--eclass/mozconfig-v5.38.eclass231
-rw-r--r--eclass/mozconfig-v6.38.eclass239
-rw-r--r--eclass/mozconfig-v6.39.eclass240
-rw-r--r--eclass/mozcoreconf-2.eclass274
-rw-r--r--eclass/mozcoreconf-v3.eclass261
-rw-r--r--eclass/mozextension.eclass88
-rw-r--r--eclass/mozilla-launcher.eclass123
-rw-r--r--eclass/mozlinguas.eclass315
-rw-r--r--eclass/multibuild.eclass269
-rw-r--r--eclass/multilib-build.eclass659
-rw-r--r--eclass/multilib-minimal.eclass124
-rw-r--r--eclass/multilib.eclass451
-rw-r--r--eclass/multiprocessing.eclass273
-rw-r--r--eclass/myspell-r2.eclass118
-rw-r--r--eclass/myspell.eclass257
-rw-r--r--eclass/mysql-cmake.eclass534
-rw-r--r--eclass/mysql-multilib.eclass1113
-rw-r--r--eclass/mysql-v2.eclass921
-rw-r--r--eclass/mysql_fx.eclass306
-rw-r--r--eclass/mythtv-plugins.eclass129
-rw-r--r--eclass/mythtv.eclass48
-rw-r--r--eclass/netsurf.eclass178
-rw-r--r--eclass/nsplugins.eclass80
-rw-r--r--eclass/nvidia-driver.eclass188
-rw-r--r--eclass/oasis.eclass121
-rw-r--r--eclass/obs-download.eclass43
-rw-r--r--eclass/obs-service.eclass112
-rw-r--r--eclass/office-ext-r1.eclass230
-rw-r--r--eclass/openib.eclass155
-rw-r--r--eclass/pam.eclass262
-rw-r--r--eclass/pax-utils.eclass189
-rw-r--r--eclass/perl-app.eclass35
-rw-r--r--eclass/perl-module.eclass547
-rw-r--r--eclass/php-ext-pecl-r2.eclass105
-rw-r--r--eclass/php-ext-source-r2.eclass377
-rw-r--r--eclass/php-lib-r1.eclass61
-rw-r--r--eclass/php-pear-lib-r1.eclass97
-rw-r--r--eclass/php-pear-r1.eclass118
-rw-r--r--eclass/phpconfutils.eclass458
-rw-r--r--eclass/portability.eclass156
-rw-r--r--eclass/prefix.eclass52
-rw-r--r--eclass/python-any-r1.eclass326
-rw-r--r--eclass/python-r1.eclass656
-rw-r--r--eclass/python-single-r1.eclass468
-rw-r--r--eclass/python-utils-r1.eclass1315
-rw-r--r--eclass/python.eclass3181
-rw-r--r--eclass/qmail.eclass536
-rw-r--r--eclass/qmake-utils.eclass323
-rw-r--r--eclass/qt4-build-multilib.eclass835
-rw-r--r--eclass/qt4-build.eclass804
-rw-r--r--eclass/qt4-r2.eclass138
-rw-r--r--eclass/qt5-build.eclass787
-rw-r--r--eclass/readme.gentoo.eclass130
-rw-r--r--eclass/rpm.eclass127
-rw-r--r--eclass/ruby-fakegem.eclass532
-rw-r--r--eclass/ruby-ng-gnome2.eclass93
-rw-r--r--eclass/ruby-ng.eclass724
-rw-r--r--eclass/ruby-single.eclass90
-rw-r--r--eclass/ruby-utils.eclass83
-rw-r--r--eclass/s6.eclass119
-rw-r--r--eclass/savedconfig.eclass155
-rw-r--r--eclass/scons-utils.eclass235
-rw-r--r--eclass/scsh.eclass73
-rw-r--r--eclass/selinux-policy-2.eclass357
-rw-r--r--eclass/sgml-catalog.eclass101
-rw-r--r--eclass/ssl-cert.eclass250
-rw-r--r--eclass/stardict.eclass60
-rw-r--r--eclass/subversion.eclass525
-rw-r--r--eclass/sword-module.eclass34
-rw-r--r--eclass/systemd.eclass387
-rwxr-xr-xeclass/tests/autotools_eaclocal_amflags.sh32
-rwxr-xr-xeclass/tests/distutils-r1.sh33
-rwxr-xr-xeclass/tests/eutils_eshopts.sh44
-rwxr-xr-xeclass/tests/eutils_estack.sh53
-rwxr-xr-xeclass/tests/eutils_evar.sh100
-rwxr-xr-xeclass/tests/eutils_path_exists.sh36
-rwxr-xr-xeclass/tests/flag-o-matic.sh149
-rwxr-xr-xeclass/tests/git-r3.sh204
-rwxr-xr-xeclass/tests/git-r3_GIT_DIR.sh61
-rwxr-xr-xeclass/tests/git-r3_subrepos.sh38
-rwxr-xr-xeclass/tests/linux-info_get_running_version.sh39
-rwxr-xr-xeclass/tests/multiprocessing.sh43
-rwxr-xr-xeclass/tests/multiprocessing_makeopts_jobs.sh39
-rwxr-xr-xeclass/tests/python-utils-r1.sh170
-rwxr-xr-xeclass/tests/savedconfig.sh79
-rwxr-xr-xeclass/tests/scons-utils.sh64
-rw-r--r--eclass/tests/tests-common.sh147
-rwxr-xr-xeclass/tests/toolchain-funcs.sh115
-rwxr-xr-xeclass/tests/toolchain.sh83
-rwxr-xr-xeclass/tests/versionator_version_compare.sh200
-rw-r--r--eclass/texlive-common.eclass170
-rw-r--r--eclass/texlive-module.eclass401
-rw-r--r--eclass/toolchain-binutils.eclass498
-rw-r--r--eclass/toolchain-funcs.eclass839
-rw-r--r--eclass/toolchain.eclass2263
-rw-r--r--eclass/twisted-r1.eclass236
-rw-r--r--eclass/udev.eclass115
-rw-r--r--eclass/unpacker.eclass462
-rw-r--r--eclass/user.eclass466
-rw-r--r--eclass/vala.eclass149
-rw-r--r--eclass/vcs-snapshot.eclass78
-rw-r--r--eclass/vdr-plugin-2.eclass656
-rw-r--r--eclass/versionator.eclass510
-rw-r--r--eclass/vim-doc.eclass73
-rw-r--r--eclass/vim-plugin.eclass157
-rw-r--r--eclass/vim-spell.eclass127
-rw-r--r--eclass/virtualx.eclass199
-rw-r--r--eclass/virtuoso.eclass131
-rw-r--r--eclass/vmware-bundle.eclass83
-rw-r--r--eclass/waf-utils.eclass129
-rw-r--r--eclass/webapp.eclass581
-rw-r--r--eclass/wxwidgets.eclass145
-rw-r--r--eclass/x-modular.eclass621
-rw-r--r--eclass/xemacs-elisp-common.eclass311
-rw-r--r--eclass/xemacs-elisp.eclass55
-rw-r--r--eclass/xemacs-packages.eclass68
-rw-r--r--eclass/xfconf.eclass154
-rw-r--r--eclass/xorg-2.eclass605
342 files changed, 76926 insertions, 0 deletions
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-cmds-c b/eclass/ELT-patches/aixrtl/1.5.0-cmds-c
new file mode 100644
index 000000000000..9be05686ad79
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-cmds-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -7234,7 +7235,7 @@
+ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+- archive_expsym_cmds="\$CC"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-cmds-cxx b/eclass/ELT-patches/aixrtl/1.5.0-cmds-cxx
new file mode 100644
index 000000000000..13ffb7ac4c09
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-cmds-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -10041,7 +10043,7 @@
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+- archive_expsym_cmds_CXX="\$CC"' -o $output_objdir/$soname $libobjs $deplibs $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$no_entry_flag \${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds_CXX='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-expsym-c b/eclass/ELT-patches/aixrtl/1.5.0-expsym-c
new file mode 100644
index 000000000000..2b54887d8fcd
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-expsym-c
@@ -0,0 +1,14 @@
+--- configure
++++ configure
+@@ -7113,9 +7113,9 @@
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to AIX nm, but means don't demangle with GNU nm
+ if $NM -V 2>&1 | grep 'GNU' > /dev/null; then
+- export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+- export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds='`echo $NM | sed -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ aix_use_runtimelinking=no
+
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-expsym-cxx b/eclass/ELT-patches/aixrtl/1.5.0-expsym-cxx
new file mode 100644
index 000000000000..9b6bdc2f9526
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-expsym-cxx
@@ -0,0 +1,14 @@
+--- configure
++++ configure
+@@ -11217,9 +11217,9 @@
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to AIX nm, but means don't demangle with GNU nm
+ if $NM -V 2>&1 | grep 'GNU' > /dev/null; then
+- export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+- export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$2 == "T") || (\$2 == "D") || (\$2 == "B")) && (substr(\$3,1,1) != ".")) { print \$3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds_CXX='`echo $NM | sed -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ ;;
+ pw32*)
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-noundef-c b/eclass/ELT-patches/aixrtl/1.5.0-noundef-c
new file mode 100644
index 000000000000..c893f9c6ab70
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-noundef-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -7189,7 +7190,7 @@
+ if test "$aix_use_runtimelinking" = yes; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+- allow_undefined_flag='-berok'
++ no_undefined_flag=' ${wl}-bernotok'
+ # Determine the default libpath from the value encoded in an empty executable.
+ cat >conftest.$ac_ext <<_ACEOF
+ #line $LINENO "configure"
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-noundef-cxx b/eclass/ELT-patches/aixrtl/1.5.0-noundef-cxx
new file mode 100644
index 000000000000..dc01fcab6dd2
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-noundef-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -9995,7 +9997,7 @@
+ if test "$aix_use_runtimelinking" = yes; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+- allow_undefined_flag_CXX='-berok'
++ no_undefined_flag_CXX=' ${wl}-bernotok'
+ # Determine the default libpath from the value encoded in an empty executable.
+ cat >conftest.$ac_ext <<_ACEOF
+ #line $LINENO "configure"
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-soname b/eclass/ELT-patches/aixrtl/1.5.0-soname
new file mode 100644
index 000000000000..113fe2d84e16
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-soname
@@ -0,0 +1,12 @@
+--- configure
++++ configure
+@@ -14426,7 +14426,8 @@
+ # If using run time linking (on AIX 4.2 or later) use lib<name>.so
+ # instead of lib<name>.a to let people know that these are not
+ # typical AIX shared libraries.
+- library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
++ library_names_spec='$libname$release$shared_ext$major $libname$shared_ext'
++ [ "${OBJECT_MODE:-32}" = '64' ] && soname_spec='$libname$release$shared_ext$major(shr_64.o)' || soname_spec='$libname$release$shared_ext$major(shr.o)'
+ else
+ # We preserve .a as extension for shared libraries through AIX4.2
+ # and later when we are not doing run time linking.
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-usertl-c b/eclass/ELT-patches/aixrtl/1.5.0-usertl-c
new file mode 100644
index 000000000000..b5ff1866c895
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-usertl-c
@@ -0,0 +1,10 @@
+--- configure
++++ configure
+@@ -7130,6 +7130,7 @@
+ fi
+ done
+ esac
++ aix_use_runtimelinking=yes
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
diff --git a/eclass/ELT-patches/aixrtl/1.5.0-usertl-cxx b/eclass/ELT-patches/aixrtl/1.5.0-usertl-cxx
new file mode 100644
index 000000000000..36c84e9809b2
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.0-usertl-cxx
@@ -0,0 +1,10 @@
+--- configure
++++ configure
+@@ -9936,6 +9937,7 @@
+ esac
+ done
+ esac
++ aix_use_runtimelinking=yes
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
diff --git a/eclass/ELT-patches/aixrtl/1.5.22-cmds-c b/eclass/ELT-patches/aixrtl/1.5.22-cmds-c
new file mode 100644
index 000000000000..a97485a3c127
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.22-cmds-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -8679,7 +8679,7 @@
+ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+- archive_expsym_cmds="\$CC"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/1.5.22-cmds-cxx b/eclass/ELT-patches/aixrtl/1.5.22-cmds-cxx
new file mode 100644
index 000000000000..222a0f716918
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/1.5.22-cmds-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -11731,7 +11731,7 @@
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+- archive_expsym_cmds_CXX="\$CC"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then echo "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds_CXX='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.0.0-fpic-c b/eclass/ELT-patches/aixrtl/2.0.0-fpic-c
new file mode 100644
index 000000000000..bb3cb42bbee3
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.0.0-fpic-c
@@ -0,0 +1,12 @@
+--- configure
++++ configure
+@@ -1,6 +1,9 @@
+ if test "$host_cpu" = ia64; then
+ # AIX 5 now supports IA64 processor
+ lt_prog_compiler_static='-Bstatic'
++ lt_prog_compiler_pic='-fPIC'
++ else
++ lt_prog_compiler_pic='-fPIC'
+ fi
+ ;;
+
diff --git a/eclass/ELT-patches/aixrtl/2.0.0-fpic-cxx b/eclass/ELT-patches/aixrtl/2.0.0-fpic-cxx
new file mode 100644
index 000000000000..532bbcf421e4
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.0.0-fpic-cxx
@@ -0,0 +1,12 @@
+--- configure
++++ configure
+@@ -1,6 +1,9 @@
+ if test "$host_cpu" = ia64; then
+ # AIX 5 now supports IA64 processor
+ lt_prog_compiler_static_CXX='-Bstatic'
++ lt_prog_compiler_pic_CXX='-fPIC'
++ else
++ lt_prog_compiler_pic_CXX='-fPIC'
+ fi
+ ;;
+
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-cmds-c b/eclass/ELT-patches/aixrtl/2.2.0-cmds-c
new file mode 100644
index 000000000000..6c2cd7ea5b57
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-cmds-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -8619,7 +8619,7 @@
+ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-cmds-cxx b/eclass/ELT-patches/aixrtl/2.2.0-cmds-cxx
new file mode 100644
index 000000000000..af3c7b7c5a30
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-cmds-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -13946,7 +13947,7 @@
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+- archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds_CXX='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-expsym-c b/eclass/ELT-patches/aixrtl/2.2.0-expsym-c
new file mode 100644
index 000000000000..196a5a36e60c
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-expsym-c
@@ -0,0 +1,14 @@
+--- configure
++++ configure
+@@ -8475,9 +8475,9 @@
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to AIX nm, but means don't demangle with GNU nm
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+- export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+- export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds='`echo $NM | sed -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ aix_use_runtimelinking=no
+
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-expsym-cxx b/eclass/ELT-patches/aixrtl/2.2.0-expsym-cxx
new file mode 100644
index 000000000000..ee4d163989dd
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-expsym-cxx
@@ -0,0 +1,14 @@
+--- configure
++++ configure
+@@ -15506,9 +15506,9 @@
+ # If we're using GNU nm, then we don't want the "-C" option.
+ # -C means demangle to AIX nm, but means don't demangle with GNU nm
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+- export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+- export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds_CXX='`echo $NM | sed -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ ;;
+ pw32*)
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-noundef-c b/eclass/ELT-patches/aixrtl/2.2.0-noundef-c
new file mode 100644
index 000000000000..d9520cbbd730
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-noundef-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -8558,7 +8558,7 @@
+ if test "$aix_use_runtimelinking" = yes; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+- allow_undefined_flag='-berok'
++ no_undefined_flag=' ${wl}-bernotok'
+ # Determine the default libpath from the value encoded in an
+ # empty executable.
+ cat >conftest.$ac_ext <<_ACEOF
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-noundef-cxx b/eclass/ELT-patches/aixrtl/2.2.0-noundef-cxx
new file mode 100644
index 000000000000..37d02dba8c6a
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-noundef-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -13884,7 +13885,7 @@
+ if test "$aix_use_runtimelinking" = yes; then
+ # Warning - without using the other runtime loading flags (-brtl),
+ # -berok will link without error, but may produce a broken library.
+- allow_undefined_flag_CXX='-berok'
++ no_undefined_flag_CXX=' ${wl}-bernotok'
+ # Determine the default libpath from the value encoded in an empty
+ # executable.
+ cat >conftest.$ac_ext <<_ACEOF
diff --git a/eclass/ELT-patches/aixrtl/2.2.0-usertl-cxx b/eclass/ELT-patches/aixrtl/2.2.0-usertl-cxx
new file mode 100644
index 000000000000..c59a093c94fc
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.0-usertl-cxx
@@ -0,0 +1,10 @@
+--- configure
++++ configure
+@@ -13819,6 +13819,7 @@
+ done
+ ;;
+ esac
++ aix_use_runtimelinking=yes
+
+ exp_sym_flag='-bexport'
+ no_entry_flag='-bnoentry'
diff --git a/eclass/ELT-patches/aixrtl/2.2.8-cmds-c b/eclass/ELT-patches/aixrtl/2.2.8-cmds-c
new file mode 100644
index 000000000000..e1e09347b21f
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.8-cmds-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -9214,7 +9214,7 @@
+ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi
+
+ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
+- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.2.8-cmds-cxx b/eclass/ELT-patches/aixrtl/2.2.8-cmds-cxx
new file mode 100644
index 000000000000..7a31f661d44c
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.2.8-cmds-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -13307,7 +13307,7 @@
+
+ hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath"
+
+- archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
++ archive_expsym_cmds_CXX='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~rm -f -r $lib.d~mkdir -p $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~rm -f -r $lib.d'
+ else
+ if test "$host_cpu" = ia64; then
+ hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.4.0-expsym-c b/eclass/ELT-patches/aixrtl/2.4.0-expsym-c
new file mode 100644
index 000000000000..6860317739a7
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.4.0-expsym-c
@@ -0,0 +1,14 @@
+--- configure
++++ configure
+@@ -9660,9 +9660,9 @@
+ # Also, AIX nm treats weak defined symbols like other global
+ # defined symbols, whereas GNU nm marks them as "W".
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+- export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+- export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds='`echo $NM | sed -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ aix_use_runtimelinking=no
+
diff --git a/eclass/ELT-patches/aixrtl/2.4.0-expsym-cxx b/eclass/ELT-patches/aixrtl/2.4.0-expsym-cxx
new file mode 100644
index 000000000000..ca9fd97be522
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.4.0-expsym-cxx
@@ -0,0 +1,14 @@
+--- configure
++++ configure
+@@ -15711,9 +15711,9 @@
+ # Also, AIX nm treats weak defined symbols like other global defined
+ # symbols, whereas GNU nm marks them as "W".
+ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
+- export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols'
+ else
+- export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
++ export_symbols_cmds_CXX='`echo $NM | sed -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols'
+ fi
+ ;;
+ pw32*)
diff --git a/eclass/ELT-patches/aixrtl/2.4.2.418-cmds-c b/eclass/ELT-patches/aixrtl/2.4.2.418-cmds-c
new file mode 100644
index 000000000000..d92ce344b4f1
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.4.2.418-cmds-c
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -9214,7 +9214,7 @@
+ fi
+
+ hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath"
+- archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag
++ archive_expsym_cmds='eval `echo "$soname" | $SED -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~$RM -r $lib.d~$MKDIR $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~$RM -r $lib.d'
+ else
+ if test ia64 = "$host_cpu"; then
+ hardcode_libdir_flag_spec='$wl-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.4.2.418-cmds-cxx b/eclass/ELT-patches/aixrtl/2.4.2.418-cmds-cxx
new file mode 100644
index 000000000000..851be2763982
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.4.2.418-cmds-cxx
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -13307,7 +13307,7 @@
+
+ hardcode_libdir_flag_spec_CXX='$wl-blibpath:$libdir:'"$aix_libpath"
+
+- archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag
++ archive_expsym_cmds_CXX='eval `echo "$soname" | sed -e "s,^,: ,; s,(,; membnam=,; s,\.o),,"`~$RM -r $lib.d~$MKDIR $lib.d~$CC -o $lib.d/$membnam.o $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags -shared $wl-G$allow_undefined_flag $wl'$exp_sym_flag':$export_symbols~$STRIP -e $lib.d/$membnam.o~( echo "#! $soname"; if test $membnam = shr_64; then echo "# 64"; else echo "# 32"; fi; cat $export_symbols ) > $lib.d/$membnam.imp~$AR $AR_FLAGS $lib $lib.d/$membnam.o $lib.d/$membnam.imp~$RM -r $lib.d'
+ else
+ if test ia64 = "$host_cpu"; then
+ hardcode_libdir_flag_spec_CXX='$wl-R $libdir:/usr/lib:/lib'
diff --git a/eclass/ELT-patches/aixrtl/2.4.2.418-soname b/eclass/ELT-patches/aixrtl/2.4.2.418-soname
new file mode 100644
index 000000000000..de3e93e8ae55
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.4.2.418-soname
@@ -0,0 +1,10 @@
+--- configure
++++ configure
+@@ -14426,6 +14426,7 @@
+ # instead of lib<name>.a to let people know that these are not
+ # typical AIX shared libraries.
+ library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext'
++ [ "${OBJECT_MODE:-32}" = '64' ] && soname_spec='$libname$release$shared_ext$major(shr_64.o)' || soname_spec='$libname$release$shared_ext$major(shr.o)'
+ else
+ # We preserve .a as extension for shared libraries through AIX4.2
+ # and later when we are not doing run time linking.
diff --git a/eclass/ELT-patches/aixrtl/2.4.4-with-svr4 b/eclass/ELT-patches/aixrtl/2.4.4-with-svr4
new file mode 100644
index 000000000000..f356ea9883a9
--- /dev/null
+++ b/eclass/ELT-patches/aixrtl/2.4.4-with-svr4
@@ -0,0 +1,11 @@
+--- configure
++++ configure
+@@ -10835,7 +10835,7 @@
+ if ${lt_cv_with_aix_soname+:} false; then :
+ $as_echo_n "(cached) " >&6
+ else
+- lt_cv_with_aix_soname=aix
++ lt_cv_with_aix_soname=svr4
+ fi
+
+ with_aix_soname=$lt_cv_with_aix_soname
diff --git a/eclass/ELT-patches/as-needed/1.5 b/eclass/ELT-patches/as-needed/1.5
new file mode 100644
index 000000000000..01759c93f903
--- /dev/null
+++ b/eclass/ELT-patches/as-needed/1.5
@@ -0,0 +1,30 @@
+--- ltmain.sh.orig 2006-03-29 15:45:36.000000000 +0200
++++ ltmain.sh 2006-03-29 16:39:30.000000000 +0200
+@@ -1754,6 +1754,11 @@
+ arg=`$echo "X$arg" | $Xsed -e "s/^ //"`
+ ;;
+
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ deplibs="$deplibs $arg"
++ continue
++ ;;
++
+ -Wl,*)
+ args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'`
+ arg=
+@@ -2094,6 +2099,15 @@
+ lib=
+ found=no
+ case $deplib in
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ if test "$linkmode,$pass" = "prog,link"; then
++ compile_deplibs="$deplib $compile_deplibs"
++ finalize_deplibs="$deplib $finalize_deplibs"
++ else
++ deplibs="$deplib $deplibs"
++ fi
++ continue
++ ;;
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe)
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
diff --git a/eclass/ELT-patches/as-needed/1.5.26 b/eclass/ELT-patches/as-needed/1.5.26
new file mode 100644
index 000000000000..6836c55e1433
--- /dev/null
+++ b/eclass/ELT-patches/as-needed/1.5.26
@@ -0,0 +1,38 @@
+--- ltmain.sh.orig 2009-04-18 16:51:52.000000000 +0200
++++ ltmain.sh 2009-04-18 16:55:05.000000000 +0200
+@@ -1812,10 +1812,15 @@
+ done
+ IFS="$save_ifs"
+ arg=`$echo "X$arg" | $Xsed -e "s/^ //"`
+ ;;
+
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ deplibs="$deplibs $arg"
++ continue
++ ;;
++
+ -Wl,*)
+ args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'`
+ arg=
+ save_ifs="$IFS"; IFS=','
+ for flag in $args; do
+@@ -2152,10 +2157,19 @@
+ fi
+ for deplib in $libs; do
+ lib=
+ found=no
+ case $deplib in
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ if test "$linkmode,$pass" = "prog,link"; then
++ compile_deplibs="$deplib $compile_deplibs"
++ finalize_deplibs="$deplib $finalize_deplibs"
++ else
++ deplibs="$deplib $deplibs"
++ fi
++ continue
++ ;;
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
diff --git a/eclass/ELT-patches/as-needed/2.2.6 b/eclass/ELT-patches/as-needed/2.2.6
new file mode 100644
index 000000000000..7e71de4618dc
--- /dev/null
+++ b/eclass/ELT-patches/as-needed/2.2.6
@@ -0,0 +1,38 @@
+--- ltmain.sh.orig 2009-04-18 14:37:16.000000000 +0200
++++ ltmain.sh 2009-04-18 14:40:08.000000000 +0200
+@@ -4721,10 +4721,15 @@
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$arg"
+ arg=$func_stripname_result
+ ;;
+
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ deplibs="$deplibs $arg"
++ continue
++ ;;
++
+ -Wl,*)
+ func_stripname '-Wl,' '' "$arg"
+ args=$func_stripname_result
+ arg=
+ save_ifs="$IFS"; IFS=','
+@@ -5075,10 +5080,19 @@
+
+ for deplib in $libs; do
+ lib=
+ found=no
+ case $deplib in
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ if test "$linkmode,$pass" = "prog,link"; then
++ compile_deplibs="$deplib $compile_deplibs"
++ finalize_deplibs="$deplib $finalize_deplibs"
++ else
++ deplibs="$deplib $deplibs"
++ fi
++ continue
++ ;;
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads)
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
diff --git a/eclass/ELT-patches/as-needed/2.4.2 b/eclass/ELT-patches/as-needed/2.4.2
new file mode 100644
index 000000000000..526877a6d35b
--- /dev/null
+++ b/eclass/ELT-patches/as-needed/2.4.2
@@ -0,0 +1,38 @@
+--- ltmain.sh.orig 2012-08-19 10:18:57.929178597 +0200
++++ ltmain.sh 2012-08-19 10:31:43.409388998 +0200
+@@ -5798,10 +5798,15 @@
+ IFS="$save_ifs"
+ func_stripname ' ' '' "$arg"
+ arg=$func_stripname_result
+ ;;
+
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ deplibs="$deplibs $arg"
++ continue
++ ;;
++
+ -Wl,*)
+ func_stripname '-Wl,' '' "$arg"
+ args=$func_stripname_result
+ arg=
+ save_ifs="$IFS"; IFS=','
+@@ -6158,10 +6163,19 @@
+
+ for deplib in $libs; do
+ lib=
+ found=no
+ case $deplib in
++ -Wl,--as-needed|-Wl,--no-as-needed)
++ if test "$linkmode,$pass" = "prog,link"; then
++ compile_deplibs="$deplib $compile_deplibs"
++ finalize_deplibs="$deplib $finalize_deplibs"
++ else
++ deplibs="$deplib $deplibs"
++ fi
++ continue
++ ;;
+ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \
+ |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*)
+ if test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$deplib $compile_deplibs"
+ finalize_deplibs="$deplib $finalize_deplibs"
diff --git a/eclass/ELT-patches/cross/link-ROOT b/eclass/ELT-patches/cross/link-ROOT
new file mode 100644
index 000000000000..3c7d99be1f20
--- /dev/null
+++ b/eclass/ELT-patches/cross/link-ROOT
@@ -0,0 +1,20 @@
+--- libltdl/config/ltmain.sh 2008-09-07 19:56:33.000000000 +0200
++++ libltdl/config/ltmain.sh.new 2009-02-15 20:37:47.000000000 +0100
+@@ -5768,7 +5768,7 @@
+ test "$hardcode_direct_absolute" = no; then
+ add="$libdir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+- add_dir="-L$libdir"
++ add_dir="-L$ROOT/$libdir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+@@ -5785,7 +5785,7 @@
+ fi
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+- add_dir="-L$libdir"
++ add_dir="-L$ROOT/$libdir"
+ # Try looking first in the location we're being installed to.
+ if test -n "$inst_prefix_dir"; then
+ case $libdir in
diff --git a/eclass/ELT-patches/darwin-conf/1.5 b/eclass/ELT-patches/darwin-conf/1.5
new file mode 100644
index 000000000000..46d9e5a885e8
--- /dev/null
+++ b/eclass/ELT-patches/darwin-conf/1.5
@@ -0,0 +1,24 @@
+In Gentoo Prefix we go by Apple's convention to give modules the .bundle
+extension.
+http://developer.apple.com/documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html
+(Above link was removed by Apple, the link below contains a copy:
+http://disanji.net/iOS_Doc/#documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html,
+in case it disappears also, the relevant quote:
+
+Bundle. Bundles are executable files that can be loaded at runtime by
+other products. Plug-ins are implemented using bundles. The term bundle
+in this context refers to the binary itself, not to a structured
+hierarchy. Bundles have the .bundle extension; for example,
+MyBundle.bundle.)
+
+--- configure.orig 2009-05-22 12:05:31 +0200
++++ configure 2009-05-22 12:05:39 +0200
+@@ -15461,7 +15461,7 @@
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
++ shrext_cmds='`test .$module = .yes && echo .bundle || echo .dylib`'
+ # Apple's gcc prints 'gcc -print-search-dirs' doesn't operate the same.
+ if test "$GCC" = yes; then
+ sys_lib_search_path_spec=`$CC -print-search-dirs | tr "\n" "$PATH_SEPARATOR" | sed -e 's/libraries:/@libraries:/' | tr "@" "\n" | grep "^libraries:" | sed -e "s/^libraries://" -e "s,=/,/,g" -e "s,$PATH_SEPARATOR, ,g" -e "s,.*,& /lib /usr/lib /usr/local/lib,g"`
diff --git a/eclass/ELT-patches/darwin-conf/1.5b b/eclass/ELT-patches/darwin-conf/1.5b
new file mode 100644
index 000000000000..65633a818419
--- /dev/null
+++ b/eclass/ELT-patches/darwin-conf/1.5b
@@ -0,0 +1,33 @@
+In Gentoo Prefix we go by Apple's convention to give modules the .bundle
+extension.
+http://developer.apple.com/documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html
+(Above link was removed by Apple, the link below contains a copy:
+http://disanji.net/iOS_Doc/#documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html,
+in case it disappears also, the relevant quote:
+
+Bundle. Bundles are executable files that can be loaded at runtime by
+other products. Plug-ins are implemented using bundles. The term bundle
+in this context refers to the binary itself, not to a structured
+hierarchy. Bundles have the .bundle extension; for example,
+MyBundle.bundle.)
+
+--- configure
++++ configure
+@@ -10570,7 +10570,7 @@
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
++ shrext_cmds='`test .$module = .yes && echo .bundle || echo .dylib`'
+
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+@@ -14213,7 +14213,7 @@
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
++ shrext_cmds='`test .$module = .yes && echo .bundle || echo .dylib`'
+
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+ ;;
diff --git a/eclass/ELT-patches/darwin-conf/2.2.4 b/eclass/ELT-patches/darwin-conf/2.2.4
new file mode 100644
index 000000000000..5ad829343f7d
--- /dev/null
+++ b/eclass/ELT-patches/darwin-conf/2.2.4
@@ -0,0 +1,29 @@
+In Gentoo Prefix we go by Apple's convention to give modules the .bundle
+extension.
+http://developer.apple.com/documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html
+(Above link was removed by Apple, the link below contains a copy:
+http://disanji.net/iOS_Doc/#documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html,
+in case it disappears also, the relevant quote:
+
+Bundle. Bundles are executable files that can be loaded at runtime by
+other products. Plug-ins are implemented using bundles. The term bundle
+in this context refers to the binary itself, not to a structured
+hierarchy. Bundles have the .bundle extension; for example,
+MyBundle.bundle.)
+
+--- configure
++++ configure
+@@ -12172,11 +12172,11 @@
+ version_type=darwin
+ need_lib_prefix=no
+ need_version=no
+- library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
++ library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
++ shrext_cmds='`test .$module = .yes && echo .bundle || echo .dylib`'
+
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
diff --git a/eclass/ELT-patches/darwin-conf/2.2.6 b/eclass/ELT-patches/darwin-conf/2.2.6
new file mode 100644
index 000000000000..669a68d09a10
--- /dev/null
+++ b/eclass/ELT-patches/darwin-conf/2.2.6
@@ -0,0 +1,43 @@
+In Gentoo Prefix we go by Apple's convention to give modules the .bundle
+extension.
+http://developer.apple.com/documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html
+(Above link was removed by Apple, the link below contains a copy:
+http://disanji.net/iOS_Doc/#documentation/DeveloperTools/Conceptual/XcodeBuildSystem/500-Linking/bs_linking.html,
+in case it disappears also, the relevant quote:
+
+Bundle. Bundles are executable files that can be loaded at runtime by
+other products. Plug-ins are implemented using bundles. The term bundle
+in this context refers to the binary itself, not to a structured
+hierarchy. Bundles have the .bundle extension; for example,
+MyBundle.bundle.)
+
+--- configure
++++ configure
+@@ -11455,11 +11455,11 @@
+ version_type=darwin
+ need_lib_prefix=no
+ need_version=no
+- library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
++ library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
++ shrext_cmds='`test .$module = .yes && echo .bundle || echo .dylib`'
+
+ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+@@ -15321,11 +15331,11 @@
+ version_type=darwin
+ need_lib_prefix=no
+ need_version=no
+- library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
++ library_names_spec='${libname}${release}${versuffix}$shared_ext ${libname}${release}${major}$shared_ext ${libname}$shared_ext'
+ soname_spec='${libname}${release}${major}$shared_ext'
+ shlibpath_overrides_runpath=yes
+ shlibpath_var=DYLD_LIBRARY_PATH
+- shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
++ shrext_cmds='`test .$module = .yes && echo .bundle || echo .dylib`'
+
+ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
+ ;;
diff --git a/eclass/ELT-patches/darwin-ltconf/1.2.0 b/eclass/ELT-patches/darwin-ltconf/1.2.0
new file mode 100644
index 000000000000..da9f7e484db5
--- /dev/null
+++ b/eclass/ELT-patches/darwin-ltconf/1.2.0
@@ -0,0 +1,66 @@
+--- ltconfig-1.2 Wed May 19 09:17:36 1999
++++ ltconfig Wed Oct 6 14:29:08 2004
+@@ -459,6 +459,11 @@
+ os2*)
+ # We can build DLLs from non-PIC.
+ ;;
++ darwin* | rhapsody*)
++ # PIC is the default on this platform
++ # Common symbols not allowed in MH_DYLIB files
++ pic_flag='-fno-common'
++ ;;
+ amigaos*)
+ # FIXME: we need at least 68020 code to build shared libraries, but
+ # adding the `-m68020' flag to GCC prevents building anything better,
+@@ -785,6 +790,23 @@
+ hardcode_shlibpath_var=no
+ ;;
+
++ darwin* | rhapsody*)
++ case "$host_os" in
++ rhapsody* | darwin1.[012])
++ allow_undefined_flag='-undefined suppress'
++ ;;
++ *) # Darwin 1.3 on
++ allow_undefined_flag='-flat_namespace -undefined suppress'
++ ;;
++ esac
++ archive_cmds='$nonopt $(test .$module = .yes && echo -bundle || echo -dynamiclib) $allow_undefined_flag -o $lib $libobjs $deplibs $linkopts $(test .$module != .yes && echo -install_name $rpath/$soname $verstring)'
++ # We need to add '_' to the symbols in $export_symbols first
++ #archive_expsym_cmds="$archive_cmds"' && strip -s $export_symbols $lib'
++ hardcode_direct=yes
++ hardcode_shlibpath_var=no
++ whole_archive_flag_spec='-all_load $convenience'
++ ;;
++
+ hpux9*)
+ archive_cmds='$rm $objdir/$soname;$LD -b +s +b $install_libdir -o $objdir/$soname$libobjs;mv $objdir/$soname $lib'
+ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
+@@ -1134,6 +1156,27 @@
+ version_type=sunos
+ library_names_spec='${libname}${release}.so.$versuffix'
+ shlibpath_var=LD_LIBRARY_PATH
++ ;;
++
++darwin* | rhapsody*)
++ dynamic_linker="$host_os dyld"
++ version_type=darwin
++ need_lib_prefix=no
++ need_version=no
++ deplibs_check_method='file_magic Mach-O dynamically linked shared library'
++ file_magic_cmd='/usr/bin/file -L'
++ case "$host_os" in
++ rhapsody* | darwin1.[012])
++ file_magic_test_file='/System/Library/Frameworks/System.framework/System'
++ ;;
++ *) # Darwin 1.3 on
++ file_magic_test_file='/usr/lib/libSystem.dylib'
++ ;;
++ esac
++ library_names_spec='${libname}${release}${versuffix}.$(test .$module = .yes && echo so || echo dylib) ${libname}${release}${major}.$(test .$module = .yes && echo so || echo dylib) ${libname}.$(test .$module = .yes && echo so || echo dylib)'
++ soname_spec='${libname}${release}${major}.$(test .$module = .yes && echo so || echo dylib)'
++ shlibpath_overrides_runpath=yes
++ shlibpath_var=DYLD_LIBRARY_PATH
+ ;;
+
+ hpux9* | hpux10*)
diff --git a/eclass/ELT-patches/darwin-ltconf/1.3.0 b/eclass/ELT-patches/darwin-ltconf/1.3.0
new file mode 100644
index 000000000000..49ae67df4116
--- /dev/null
+++ b/eclass/ELT-patches/darwin-ltconf/1.3.0
@@ -0,0 +1,67 @@
+--- ltconfig.darwin 2000-02-02 19:53:22.000000000 -0500
++++ ltconfig 2004-09-23 20:25:11.000000000 -0400
+@@ -678,6 +678,11 @@
+ cygwin* | mingw* | os2*)
+ # We can build DLLs from non-PIC.
+ ;;
++ darwin* | rhapsody*)
++ # PIC is the default on this platform
++ # Common symbols not allowed in MH_DYLIB files
++ pic_flag='-fno-common'
++ ;;
+ amigaos*)
+ # FIXME: we need at least 68020 code to build shared libraries, but
+ # adding the `-m68020' flag to GCC prevents building anything better,
+@@ -1361,6 +1372,23 @@
+ hardcode_shlibpath_var=no
+ ;;
+
++ darwin* | rhapsody*)
++ case "$host_os" in
++ rhapsody* | darwin1.[012])
++ allow_undefined_flag='-undefined suppress'
++ ;;
++ *) # Darwin 1.3 on
++ allow_undefined_flag='-flat_namespace -undefined suppress'
++ ;;
++ esac
++ archive_cmds='$nonopt $(test .$module = .yes && echo -bundle || echo -dynamiclib) $allow_undefined_flag -o $lib $libobjs $deplibs $linkopts $(test .$module != .yes && echo -install_name $rpath/$soname $verstring)'
++ # We need to add '_' to the symbols in $export_symbols first
++ #archive_expsym_cmds="$archive_cmds"' && strip -s $export_symbols $lib'
++ hardcode_direct=yes
++ hardcode_shlibpath_var=no
++ whole_archive_flag_spec='-all_load $convenience'
++ ;;
++
+ hpux9* | hpux10* | hpux11*)
+ case "$host_os" in
+ hpux9*) archive_cmds='$rm $objdir/$soname~$LD -b +b $install_libdir -o $objdir/$soname $libobjs $deplibs $linkopts~test $objdir/$soname = $lib || mv $objdir/$soname $lib' ;;
+@@ -1897,6 +1931,27 @@
+ esac
+ ;;
+
++darwin* | rhapsody*)
++ dynamic_linker="$host_os dyld"
++ version_type=darwin
++ need_lib_prefix=no
++ need_version=no
++ deplibs_check_method='file_magic Mach-O dynamically linked shared library'
++ file_magic_cmd='/usr/bin/file -L'
++ case "$host_os" in
++ rhapsody* | darwin1.[012])
++ file_magic_test_file='/System/Library/Frameworks/System.framework/System'
++ ;;
++ *) # Darwin 1.3 on
++ file_magic_test_file='/usr/lib/libSystem.dylib'
++ ;;
++ esac
++ library_names_spec='${libname}${release}${versuffix}.$(test .$module = .yes && echo so || echo dylib) ${libname}${release}${major}.$(test .$module = .yes && echo so || echo dylib) ${libname}.$(test .$module = .yes && echo so || echo dylib)'
++ soname_spec='${libname}${release}${major}.$(test .$module = .yes && echo so || echo dylib)'
++ shlibpath_overrides_runpath=yes
++ shlibpath_var=DYLD_LIBRARY_PATH
++ ;;
++
+ gnu*)
+ version_type=linux
+ need_lib_prefix=no
+
diff --git a/eclass/ELT-patches/darwin-ltmain/1.2.0 b/eclass/ELT-patches/darwin-ltmain/1.2.0
new file mode 100644
index 000000000000..aaab86a72337
--- /dev/null
+++ b/eclass/ELT-patches/darwin-ltmain/1.2.0
@@ -0,0 +1,40 @@
+--- ltmain.sh-1.2 Wed May 19 09:17:36 1999
++++ ltmain.sh Wed Oct 6 14:38:37 2004
+@@ -967,6 +967,16 @@
+ versuffix="$current.$revision"
+ ;;
+
++ darwin)
++ # Like Linux, but with the current version available in
++ # verstring for coding it into the library header
++ major=.`expr $current - $age`
++ versuffix="$major.$age.$revision"
++ # Darwin ld doesn't like 0 for these options...
++ minor_current=`expr $current + 1`
++ verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
++ ;;
++
+ *)
+ $echo "$modename: unknown library version type \`$version_type'" 1>&2
+ echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
+@@ -1000,9 +1010,17 @@
+ allow_undefined_flag="$no_undefined_flag"
+ fi
+
+- # Add libc to deplibs on all systems.
+- dependency_libs="$deplibs"
+- deplibs="$deplibs -lc"
++ case "$version_type" in
++ rhapsody|darwin)
++ # Rhapsody C library is in the System framework
++ deplibs="$deplibs -framework System"
++ ;;
++ *)
++ # Add libc to deplibs on all other systems.
++ dependency_libs="$deplibs"
++ deplibs="$deplibs -lc"
++ ;;
++ esac
+
+ if test "$build_libtool_libs" = yes; then
+ # Get the real and link names of the library.
diff --git a/eclass/ELT-patches/darwin-ltmain/1.3.0 b/eclass/ELT-patches/darwin-ltmain/1.3.0
new file mode 100644
index 000000000000..7148efb99326
--- /dev/null
+++ b/eclass/ELT-patches/darwin-ltmain/1.3.0
@@ -0,0 +1,64 @@
+--- ltmain.sh.darwin 2000-02-02 19:53:22.000000000 -0500
++++ ltmain.sh 2004-09-23 20:25:11.000000000 -0400
+@@ -24,6 +24,8 @@
+ # configuration script generated by Autoconf, you may include it under
+ # the same distribution terms that you use for the rest of that program.
+
++_S_=${LIBTOOL_CMD_SEP-\~}
++
+ # Check that we have a working $echo.
+ if test "X$1" = X--no-reexec; then
+ # Discard the --no-reexec flag, and continue.
+@@ -1079,6 +1079,11 @@
+ # These systems don't actually have c library (as such)
+ continue
+ ;;
++ *-*-rhapsody* | *-*-darwin1.[012])
++ # Rhapsody C library is in the System framework
++ deplibs="$deplibs -framework System"
++ continue
++ ;;
+ esac
+ elif test "$arg" = "-lm"; then
+ case "$host" in
+@@ -1086,6 +1091,11 @@
+ # These systems don't actually have math library (as such)
+ continue
+ ;;
++ *-*-rhapsody* | *-*-darwin1.[012])
++ # Rhapsody math library is in the System framework
++ deplibs="$deplibs -framework System"
++ continue
++ ;;
+ esac
+ fi
+ deplibs="$deplibs $arg"
+@@ -1753,6 +1763,16 @@
+ versuffix="-$major-$age-$revision"
+ ;;
+
++ darwin)
++ # Like Linux, but with the current version available in
++ # verstring for coding it into the library header
++ major=.`expr $current - $age`
++ versuffix="$major.$age.$revision"
++ # Darwin ld doesn't like 0 for these options...
++ minor_current=`expr $current + 1`
++ verstring="-compatibility_version $minor_current -current_version $minor_current.$revision"
++ ;;
++
+ *)
+ $echo "$modename: unknown library version type \`$version_type'" 1>&2
+ echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
+@@ -1795,6 +1824,10 @@
+ *-*-cygwin* | *-*-mingw* | *-*-os2* | *-*-beos*)
+ # these systems don't actually have a c library (as such)!
+ ;;
++ *-*-rhapsody* | *-*-darwin1.[012])
++ # Rhapsody C library is in the System framework
++ deplibs="$deplibs -framework System"
++ ;;
+ *)
+ # Add libc to deplibs on all other systems.
+ deplibs="$deplibs -lc"
+
diff --git a/eclass/ELT-patches/deplibs/1.5 b/eclass/ELT-patches/deplibs/1.5
new file mode 100644
index 000000000000..36c507ade3f3
--- /dev/null
+++ b/eclass/ELT-patches/deplibs/1.5
@@ -0,0 +1,18 @@
+--- libtool.orig 2009-07-15 16:25:32 +0200
++++ libtool 2009-07-15 16:51:56 +0200
+@@ -3319,7 +3319,14 @@
+ ;;
+ esac
+ if grep "^installed=no" $deplib > /dev/null; then
+- path="$absdir/$objdir"
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
++ eval library_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
++ for library_name in ${library_names}; do :; done
++ if test $mode = relink && test -f "${inst_prefix_dir}${libdir}/${library_name}"; then
++ path="${inst_prefix_dir}${libdir}"
++ else
++ path="$absdir/$objdir"
++ fi
+ else
+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+ if test -z "$libdir"; then
diff --git a/eclass/ELT-patches/deplibs/2.1b b/eclass/ELT-patches/deplibs/2.1b
new file mode 100644
index 000000000000..3df590df7f68
--- /dev/null
+++ b/eclass/ELT-patches/deplibs/2.1b
@@ -0,0 +1,18 @@
+--- ltmain-release-2-1b.in 2009-06-22 15:33:26.000000000 +0200
++++ ltmain-release-2-1b.in.new 2009-07-15 16:59:45.000000000 +0200
+@@ -4946,7 +4946,14 @@
+ fi
+ ;;
+ *)
+- path="-L$absdir/$objdir"
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
++ eval library_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
++ for library_name in ${library_names}; do :; done
++ if test $mode = relink && test -f "${inst_prefix_dir}${libdir}/${library_name}"; then
++ path="-L${inst_prefix_dir}${libdir}"
++ else
++ path="-L$absdir/$objdir"
++ fi
+ ;;
+ esac
+ else
diff --git a/eclass/ELT-patches/fbsd-conf/00broken-libglade b/eclass/ELT-patches/fbsd-conf/00broken-libglade
new file mode 100644
index 000000000000..df3c4384c445
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/00broken-libglade
@@ -0,0 +1,33 @@
+Index: tiff-3.8.2/configure
+===================================================================
+--- tiff-3.8.2.orig/configure
++++ tiff-3.8.2/configure
+@@ -8199,7 +8199,14 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ # Handle Gentoo/FreeBSD as it was Linux
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -8210,6 +8216,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in \ No newline at end of file
diff --git a/eclass/ELT-patches/fbsd-conf/1.4.0 b/eclass/ELT-patches/fbsd-conf/1.4.0
new file mode 100644
index 000000000000..b986f8d8f519
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/1.4.0
@@ -0,0 +1,32 @@
+Index: expat-1.95.8/configure
+===================================================================
+--- expat-1.95.8.orig/configure
++++ expat-1.95.8/configure
+@@ -5479,7 +5479,13 @@ freebsd1*)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so $libname.so'
+@@ -5490,6 +5496,12 @@ freebsd*)
+ library_names_spec='${libname}${release}.so$versuffix $libname.so$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major ${libname}.so'
++ soname_spec='${libname}${release}.so$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-conf/1.5.10 b/eclass/ELT-patches/fbsd-conf/1.5.10
new file mode 100644
index 000000000000..90c63475bcce
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/1.5.10
@@ -0,0 +1,120 @@
+Index: libiconv-1.10/configure
+===================================================================
+--- libiconv-1.10.orig/configure
++++ libiconv-1.10/configure
+@@ -8902,7 +8902,14 @@ kfreebsd*-gnu)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ # Gentoo/FreeBSD uses linux-style versioning to be user-friendly.
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -8913,6 +8920,12 @@ freebsd*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -12620,7 +12633,14 @@ kfreebsd*-gnu)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ # Gentoo/FreeBSD uses linux-style versioning to be user-friendly.
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -12631,6 +12651,12 @@ freebsd*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -15784,7 +15810,14 @@ kfreebsd*-gnu)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ # Gentoo/FreeBSD uses linux-style versioning to be user-friendly.
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -15795,6 +15828,12 @@ freebsd*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -18150,7 +18189,14 @@ kfreebsd*-gnu)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ # Gentoo/FreeBSD uses linux-style versioning to be user-friendly.
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -18161,6 +18207,12 @@ freebsd*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-conf/1.5.18 b/eclass/ELT-patches/fbsd-conf/1.5.18
new file mode 100644
index 000000000000..0636cebfcfa7
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/1.5.18
@@ -0,0 +1,116 @@
+Index: libtool-1.5.18/configure
+===================================================================
+--- libtool-1.5.18.orig/configure
++++ libtool-1.5.18/configure
+@@ -8433,7 +8433,13 @@ freebsd* | dragonfly*)
+ # DragonFly does not have aout. When/if they implement a new
+ # versioning mechanism, adjust this.
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -8444,6 +8450,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -12205,7 +12217,13 @@ freebsd* | dragonfly*)
+ # DragonFly does not have aout. When/if they implement a new
+ # versioning mechanism, adjust this.
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -12216,6 +12234,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -15461,7 +15485,13 @@ freebsd* | dragonfly*)
+ # DragonFly does not have aout. When/if they implement a new
+ # versioning mechanism, adjust this.
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -15472,6 +15502,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -17929,7 +17965,13 @@ freebsd* | dragonfly*)
+ # DragonFly does not have aout. When/if they implement a new
+ # versioning mechanism, adjust this.
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -17940,6 +17982,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-conf/1.5.20 b/eclass/ELT-patches/fbsd-conf/1.5.20
new file mode 100644
index 000000000000..e17920f8f809
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/1.5.20
@@ -0,0 +1,116 @@
+Index: libtool-1.5.20/configure
+===================================================================
+--- libtool-1.5.20.orig/configure
++++ libtool-1.5.20/configure
+@@ -8441,7 +8441,13 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -8452,6 +8458,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -12228,7 +12240,13 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -12239,6 +12257,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -15492,7 +15516,13 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -15503,6 +15533,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -17968,7 +18004,13 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -17979,6 +18021,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-conf/1.5.20b b/eclass/ELT-patches/fbsd-conf/1.5.20b
new file mode 100644
index 000000000000..0659b1c3cb5f
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/1.5.20b
@@ -0,0 +1,61 @@
+Index: tiff-3.8.2/configure
+===================================================================
+--- tiff-3.8.2.orig/configure
++++ tiff-3.8.2/configure
+@@ -8199,7 +8199,14 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ # Handle Gentoo/FreeBSD as it was Linux
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -8210,6 +8216,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -19253,7 +19265,13 @@ freebsd* | dragonfly*)
+ *) objformat=elf ;;
+ esac
+ fi
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -19264,6 +19282,12 @@ freebsd* | dragonfly*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-conf/1.9f b/eclass/ELT-patches/fbsd-conf/1.9f
new file mode 100644
index 000000000000..f16ddb874c19
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-conf/1.9f
@@ -0,0 +1,60 @@
+Index: libtorrent-0.10.0/configure
+===================================================================
+--- libtorrent-0.10.0.orig/configure
++++ libtorrent-0.10.0/configure
+@@ -8173,7 +8173,13 @@ kfreebsd*-gnu)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || $ECHO aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -8184,6 +8190,12 @@ freebsd*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
+@@ -12461,7 +12473,13 @@ kfreebsd*-gnu)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || $ECHO aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
+@@ -12472,6 +12490,12 @@ freebsd*)
+ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
++ soname_spec='${libname}${release}${shared_ext}$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-ltconf/1.2.0 b/eclass/ELT-patches/fbsd-ltconf/1.2.0
new file mode 100644
index 000000000000..7d2f554f253b
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-ltconf/1.2.0
@@ -0,0 +1,26 @@
+Index: jpeg-6b/ltconfig
+===================================================================
+--- jpeg-6b.orig/ltconfig
++++ jpeg-6b/ltconfig
+@@ -1126,9 +1126,18 @@ amigaos*)
+ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$echo "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $rm /sys/libs/${libname}_ixlibrary.a; $show "(cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a)"; (cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a) || exit 1; done'
+ ;;
+
+-freebsd2* | freebsd3*)
+- version_type=sunos
+- library_names_spec='${libname}${release}.so.$versuffix $libname.so'
++freebsd[234567]*)
++ case $host_vendor in
++ gentoo)
++ version_type=linux
++ library_names_spec='${libname}${release}.so.$versuffix ${libname}${release}.so.$major ${libname}.so'
++ soname_spec='${libname}${release}.so.$major'
++ ;;
++ *)
++ version_type=sunos
++ library_names_spec='${libname}${release}.so.$versuffix $libname.so'
++ ;;
++ esac
+ finish_cmds='PATH="$PATH:/sbin" ldconfig -m $libdir'
+ shlibpath_var=LD_LIBRARY_PATH
+ ;;
diff --git a/eclass/ELT-patches/fbsd-ltconf/1.3.0 b/eclass/ELT-patches/fbsd-ltconf/1.3.0
new file mode 100644
index 000000000000..69aa94e678f2
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-ltconf/1.3.0
@@ -0,0 +1,32 @@
+Index: libtool-1.3.5/ltconfig
+===================================================================
+--- libtool-1.3.5.orig/ltconfig
++++ libtool-1.3.5/ltconfig
+@@ -1888,7 +1888,13 @@ freebsd1*)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case "$version_type" in
+ freebsd-elf*)
+ deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB shared object'
+@@ -1903,6 +1909,12 @@ freebsd*)
+ library_names_spec='${libname}${release}.so$versuffix $libname.so$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major ${libname}.so'
++ soname_spec='${libname}${release}.so$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case "$host_os" in
diff --git a/eclass/ELT-patches/fbsd-ltconf/1.4a b/eclass/ELT-patches/fbsd-ltconf/1.4a
new file mode 100644
index 000000000000..00084070dfb7
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-ltconf/1.4a
@@ -0,0 +1,32 @@
+Index: binutils-2.16.1/ltconfig
+===================================================================
+--- binutils-2.16.1.orig/ltconfig
++++ binutils-2.16.1/ltconfig
+@@ -1129,7 +1129,13 @@ freebsd1*)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo aout`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so $libname.so'
+@@ -1141,6 +1147,12 @@ freebsd*)
+ library_names_spec='${libname}${release}.so$versuffix $libname.so$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major ${libname}.so'
++ soname_spec='${libname}${release}.so$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fbsd-ltconf/1.4a-GCC3.0 b/eclass/ELT-patches/fbsd-ltconf/1.4a-GCC3.0
new file mode 100644
index 000000000000..e88bb4582aed
--- /dev/null
+++ b/eclass/ELT-patches/fbsd-ltconf/1.4a-GCC3.0
@@ -0,0 +1,32 @@
+Index: binutils-2.16.1/ltconfig
+===================================================================
+--- binutils-2.16.1.orig/ltconfig
++++ binutils-2.16.1/ltconfig
+@@ -1129,7 +1129,13 @@ freebsd1*)
+
+ freebsd*)
+ objformat=`test -x /usr/bin/objformat && /usr/bin/objformat || echo elf`
+- version_type=freebsd-$objformat
++ case $host_vendor in
++ gentoo)
++ version_type=linux ;;
++ *)
++ version_type=freebsd-$objformat ;;
++ esac
++
+ case $version_type in
+ freebsd-elf*)
+ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so $libname.so'
+@@ -1141,6 +1147,12 @@ freebsd*)
+ library_names_spec='${libname}${release}.so$versuffix $libname.so$versuffix'
+ need_version=yes
+ ;;
++ linux)
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major ${libname}.so'
++ soname_spec='${libname}${release}.so$major'
++ need_lib_prefix=no
++ need_version=no
++ ;;
+ esac
+ shlibpath_var=LD_LIBRARY_PATH
+ case $host_os in
diff --git a/eclass/ELT-patches/fix-relink/1.4.0 b/eclass/ELT-patches/fix-relink/1.4.0
new file mode 100644
index 000000000000..1b12c9b1a360
--- /dev/null
+++ b/eclass/ELT-patches/fix-relink/1.4.0
@@ -0,0 +1,63 @@
+--- ltmain.sh 2003-09-24 18:22:17.528129376 +0200
++++ ltmain.sh 2003-09-24 18:23:17.101072912 +0200
+@@ -1582,6 +1582,8 @@
+ # Only check for convenience libraries
+ deplibs="$lib $deplibs"
+ tmp_libs=
++ # PKGW
++ dependency_libs=
+ for deplib in $dependency_libs; do
+ #echo "Adding $deplib to \$deplibs"
+ deplibs="$deplib $deplibs"
+@@ -1699,6 +1701,8 @@
+ fi
+
+ tmp_libs=
++ #PKGW
++ dependency_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test
+@@ -1860,6 +1864,16 @@
+ add="$dir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+ add_dir="-L$dir"
++ # Try looking first in the location we're being installed to.
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ # Add the install location to the beginning, minimising possiblilty
++ # of linking to older version of the lib already installed.
++ add_dir="-L$inst_prefix_dir$libdir $add_dir"
++ ;;
++ esac
++ fi
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ add_shlibpath="$dir"
+@@ -1918,6 +1932,16 @@
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+ add_dir="-L$libdir"
++ # Try looking first in the location we're being installed to.
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ # Add the install location to the beginning, minimising possiblilty
++ # of linking to older version of the lib already installed.
++ add_dir="-L$inst_prefix_dir$libdir $add_dir"
++ ;;
++ esac
++ fi
+ add="-l$name"
+ fi
+
+@@ -2117,7 +2141,7 @@
+ -L*)
+ case " $tmp_libs " in
+ *" $deplib "*) ;;
+- *) tmp_libs="$tmp_libs $deplib" ;;
++ *) tmp_libs="$deplib $tmp_libs" ;;
+ esac
+ ;;
+ *) tmp_libs="$tmp_libs $deplib" ;;
diff --git a/eclass/ELT-patches/fix-relink/1.4.3 b/eclass/ELT-patches/fix-relink/1.4.3
new file mode 100644
index 000000000000..959fe1dd76f6
--- /dev/null
+++ b/eclass/ELT-patches/fix-relink/1.4.3
@@ -0,0 +1,62 @@
+--- ltmain.sh 2003-09-24 19:48:39.367370072 +0200
++++ ltmain.sh 2003-09-24 19:49:31.773403144 +0200
+@@ -1593,6 +1593,8 @@
+ convenience="$convenience $ladir/$objdir/$old_library"
+ old_convenience="$old_convenience $ladir/$objdir/$old_library"
+ tmp_libs=
++ # PKGW
++ dependency_libs=
+ for deplib in $dependency_libs; do
+ deplibs="$deplib $deplibs"
+ if test "X$duplicate_deps" = "Xyes" ; then
+@@ -1710,6 +1712,8 @@
+ fi
+
+ tmp_libs=
++ #PKGW
++ dependency_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test
+@@ -1871,6 +1875,16 @@
+ add="$dir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+ add_dir="-L$dir"
++ # Try looking first in the location we're being installed to.
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ # Add the install location to the beginning, minimising possiblilty
++ # of linking to older version of the lib already installed.
++ add_dir="-L$inst_prefix_dir$libdir $add_dir"
++ ;;
++ esac
++ fi
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ add_shlibpath="$dir"
+@@ -1938,11 +1952,13 @@
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+ # Try looking first in the location we're being installed to.
+- add_dir=
++ add_dir="-L$dir"
+ if test -n "$inst_prefix_dir"; then
+ case "$libdir" in
+ [\\/]*)
+- add_dir="-L$inst_prefix_dir$libdir"
++ # Add the install location to the beginning, minimising possiblilty
++ # of linking to older version of the lib already installed.
++ add_dir="-L$inst_prefix_dir$libdir $add_dir"
+ ;;
+ esac
+ fi
+@@ -2146,7 +2162,7 @@
+ -L*)
+ case " $tmp_libs " in
+ *" $deplib "*) ;;
+- *) tmp_libs="$tmp_libs $deplib" ;;
++ *) tmp_libs="$deplib $tmp_libs" ;;
+ esac
+ ;;
+ *) tmp_libs="$tmp_libs $deplib" ;;
diff --git a/eclass/ELT-patches/fix-relink/1.5.0 b/eclass/ELT-patches/fix-relink/1.5.0
new file mode 100644
index 000000000000..29b7db7b1dd4
--- /dev/null
+++ b/eclass/ELT-patches/fix-relink/1.5.0
@@ -0,0 +1,51 @@
+--- ltmain.sh 2003-09-24 18:18:14.961005184 +0200
++++ ltmain.sh 2003-09-24 18:20:11.204333512 +0200
+@@ -2022,6 +2022,8 @@
+ convenience="$convenience $ladir/$objdir/$old_library"
+ old_convenience="$old_convenience $ladir/$objdir/$old_library"
+ tmp_libs=
++ # PKGW
++ dependency_libs=
+ for deplib in $dependency_libs; do
+ deplibs="$deplib $deplibs"
+ if test "X$duplicate_deps" = "Xyes" ; then
+@@ -2143,6 +2145,8 @@
+ fi
+
+ tmp_libs=
++ #PKGW
++ dependency_libs=
+ for deplib in $dependency_libs; do
+ case $deplib in
+ -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test
+@@ -2359,7 +2363,9 @@
+ if test -n "$inst_prefix_dir"; then
+ case "$libdir" in
+ [\\/]*)
+- add_dir="$add_dir -L$inst_prefix_dir$libdir"
++ # Add the install location to the beginning, minimising possiblilty
++ # of linking to older version of the lib already installed.
++ add_dir="-L$inst_prefix_dir$libdir $add_dir"
+ ;;
+ esac
+ fi
+@@ -2431,7 +2437,9 @@
+ if test -n "$inst_prefix_dir"; then
+ case "$libdir" in
+ [\\/]*)
+- add_dir="$add_dir -L$inst_prefix_dir$libdir"
++ # Add the install location to the beginning, minimising possiblilty
++ # of linking to older version of the lib already installed.
++ add_dir="-L$inst_prefix_dir$libdir $add_dir"
+ ;;
+ esac
+ fi
+@@ -2691,7 +2699,7 @@
+ -L*)
+ case " $tmp_libs " in
+ *" $deplib "*) ;;
+- *) tmp_libs="$tmp_libs $deplib" ;;
++ *) tmp_libs="$deplib $tmp_libs" ;;
+ esac
+ ;;
+ *) tmp_libs="$tmp_libs $deplib" ;;
diff --git a/eclass/ELT-patches/gold-conf/2.2.6 b/eclass/ELT-patches/gold-conf/2.2.6
new file mode 100644
index 000000000000..048cbad01187
--- /dev/null
+++ b/eclass/ELT-patches/gold-conf/2.2.6
@@ -0,0 +1,10 @@
+--- configure.orig
++++ configure
+@@ -9933,6 +9933,7 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie
+ fi
+ supports_anon_versioning=no
+ case `$LD -v 2>&1` in
++ *GNU\ gold*) supports_anon_versioning=yes ;;
+ *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
+ *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
+ *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
diff --git a/eclass/ELT-patches/hardcode-relink/1.3d b/eclass/ELT-patches/hardcode-relink/1.3d
new file mode 100644
index 000000000000..dc7819f447e3
--- /dev/null
+++ b/eclass/ELT-patches/hardcode-relink/1.3d
@@ -0,0 +1,8 @@
+--- ltmain.sh
++++ ltmain.sh
+@@ -2208,4 +2208,4 @@
+ relink)
+- if test "$hardcode_direct" = yes; then
++ if false && test "$hardcode_direct" = yes; then
+ add="$dir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
diff --git a/eclass/ELT-patches/hardcode/1.3d b/eclass/ELT-patches/hardcode/1.3d
new file mode 100644
index 000000000000..7408b372aec7
--- /dev/null
+++ b/eclass/ELT-patches/hardcode/1.3d
@@ -0,0 +1,21 @@
+http://lists.gnu.org/archive/html/bug-libtool/2008-03/msg00124.html
+
+but this patch disables hardcoding independent of inst_prefix_dir at all,
+for both hardcode_direct=yes and hardcode_minus_L=yes.
+
+If you update this patch, please also update libtool/files/libtool-1.3d-hardcode.patch
+
+--- ltmain.in.orig 2008-04-16 15:54:04.000000000 +0200
++++ ltmain.in 2008-04-16 16:21:04.000000000 +0200
+@@ -1900,9 +1900,9 @@
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+- if test "$hardcode_direct" = yes; then
++ if false && test "$hardcode_direct" = yes; then
+ add="$libdir/$linklib"
+- elif test "$hardcode_minus_L" = yes; then
++ elif false && test "$hardcode_minus_L" = yes; then
+ add_dir="-L$libdir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
diff --git a/eclass/ELT-patches/hardcode/2.1b b/eclass/ELT-patches/hardcode/2.1b
new file mode 100644
index 000000000000..48571f39379f
--- /dev/null
+++ b/eclass/ELT-patches/hardcode/2.1b
@@ -0,0 +1,36 @@
+Hardcoding library path does not work with DESTDIR installs.
+
+Affects any platform with 'hardcode_direct=yes' or 'hardcode_minus_L=yes'.
+According to libtool.m4 (as of libtool-1.5.26), these are:
+ *-aix*
+ *-hpux9
+ hppa-hpux* (not hppa64 or ia64)
+ *-netbsd
+ *-openbsd
+ *-freebsd
+ *-dragonfly
+ *-newsos6
+ *-os2
+ *-amigaos
+ *-sunos4
+ *-sysv4
+It definitely is required for aix (to support DESTDIR),
+although it should help for others too...
+
+--- ../../libtool.orig 2009-07-16 18:06:59 +0200
++++ ../../libtool 2009-07-16 18:07:45 +0200
+@@ -6298,13 +6298,13 @@
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+ if test "$hardcode_direct" = yes &&
+ test "$hardcode_direct_absolute" = no; then
+ add="$libdir/$linklib"
+- elif test "$hardcode_minus_L" = yes; then
++ elif false && test "$hardcode_minus_L" = yes; then
+ add_dir="-L$libdir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+ *":$libdir:"*) ;;
+ *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
diff --git a/eclass/ELT-patches/hc-flag-ld/1.5 b/eclass/ELT-patches/hc-flag-ld/1.5
new file mode 100644
index 000000000000..bf88873c8f16
--- /dev/null
+++ b/eclass/ELT-patches/hc-flag-ld/1.5
@@ -0,0 +1,18 @@
+http://lists.gnu.org/archive/html/libtool/2006-01/msg00012.html
+
+--- ltmain.sh.orig 2007-11-19 17:07:34.946648000 +0100
++++ ltmain.sh 2007-11-19 17:07:57.785975000 +0100
+@@ -3915,8 +3915,11 @@
+ test -n "$hardcode_libdirs"; then
+ libdir="$hardcode_libdirs"
+ if test -n "$hardcode_libdir_flag_spec_ld"; then
+- eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
+- else
++ case $archive_cmds in
++ *\$LD*) eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" ;;
++ *) eval dep_rpath=\"$hardcode_libdir_flag_spec\" ;;
++ esac
++ else
+ eval dep_rpath=\"$hardcode_libdir_flag_spec\"
+ fi
+ fi
diff --git a/eclass/ELT-patches/hpux-conf/1.5.22-syslibpath b/eclass/ELT-patches/hpux-conf/1.5.22-syslibpath
new file mode 100644
index 000000000000..27fef4873ead
--- /dev/null
+++ b/eclass/ELT-patches/hpux-conf/1.5.22-syslibpath
@@ -0,0 +1,24 @@
+must not search /usr/local in prefix
+
+--- configure.orig 2007-12-02 19:14:55 +0100
++++ configure 2007-12-02 19:14:59 +0100
+@@ -8920,9 +8920,17 @@
+ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
+ soname_spec='${libname}${release}${shared_ext}$major'
+ if test "X$HPUX_IA64_MODE" = X32; then
+- sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
++ if test "$GCC" = yes; then
++ sys_lib_search_path_spec="${sys_lib_search_path_spec} /usr/lib/hpux32"
++ else
++ sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
++ fi
+ else
+- sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
++ if test "$GCC" = yes; then
++ sys_lib_search_path_spec="${sys_lib_search_path_spec} /usr/lib/hpux64"
++ else
++ sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
++ fi
+ fi
+ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
+ ;;
diff --git a/eclass/ELT-patches/install-sh/1.5 b/eclass/ELT-patches/install-sh/1.5
new file mode 100644
index 000000000000..1b8b20bebdac
--- /dev/null
+++ b/eclass/ELT-patches/install-sh/1.5
@@ -0,0 +1,14 @@
+--- install-sh 2007-07-04 14:36:49 +0100
++++ install-sh 2007-09-14 11:00:41 +0100
+@@ -192,7 +192,10 @@
+
+ if [ x"$dir_arg" != x ]
+ then
+- $doit $instcmd $dst &&
++ { $doit $instcmd "$dst" || lasterr=$?
++ # It's ok for mkdir to fail if the directory already exists.
++ test -d "$dst" || { (exit ${lasterr-1}); exit; }
++ } &&
+
+ if [ x"$chowncmd" != x ]; then $doit $chowncmd $dst; else : ; fi &&
+ if [ x"$chgrpcmd" != x ]; then $doit $chgrpcmd $dst; else : ; fi &&
diff --git a/eclass/ELT-patches/install-sh/1.5.4 b/eclass/ELT-patches/install-sh/1.5.4
new file mode 100644
index 000000000000..0b1c22043bff
--- /dev/null
+++ b/eclass/ELT-patches/install-sh/1.5.4
@@ -0,0 +1,14 @@
+--- install-sh 2006-03-25 20:04:46 +0000
++++ install-sh 2007-09-14 10:53:29 +0100
+@@ -246,7 +246,10 @@
+ fi
+
+ if test -n "$dir_arg"; then
+- $doit $mkdircmd "$dst" \
++ { $doit $mkdircmd "$dst" || lasterr=$?
++ # It's ok for mkdir to fail if the directory already exists.
++ test -d "$dst" || { (exit ${lasterr-1}); exit; }
++ } \
+ && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \
+ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \
+ && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \
diff --git a/eclass/ELT-patches/install-sh/1.5.6 b/eclass/ELT-patches/install-sh/1.5.6
new file mode 100644
index 000000000000..d107740d1b2c
--- /dev/null
+++ b/eclass/ELT-patches/install-sh/1.5.6
@@ -0,0 +1,14 @@
+--- install-sh 2006-03-14 12:57:54 -0300
++++ install-sh 2008-06-19 22:23:04 -0300
+@@ -240,7 +240,10 @@
+ fi
+
+ if test -n "$dir_arg"; then
+- $doit $instcmd "$dst" \
++ { $doit $instcmd "$dst" || lasterr=$?
++ # It's ok for mkdir to fail if the directory already exists.
++ test -d "$dst" || { (exit ${lasterr-1}); exit; }
++ } \
+ && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \
+ && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \
+ && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \
diff --git a/eclass/ELT-patches/irix-ltmain/2.2.6 b/eclass/ELT-patches/irix-ltmain/2.2.6
new file mode 100644
index 000000000000..70916377b5e7
--- /dev/null
+++ b/eclass/ELT-patches/irix-ltmain/2.2.6
@@ -0,0 +1,30 @@
+I notice that $EPREFIX/usr/share/libtool/config/ltmain.sh also has this
+stray increment - so I'd assume that this is a bug in libtool (which is
+perhaps trying to fix a deprecated behaviour of old IRIX versions?).
+
+http://bugs.gentoo.org/show_bug.cgi?id=301520
+
+
+--- ltmain.sh.dist 2010-01-19 14:04:22.101603800 +0000
++++ ltmain.sh 2010-01-19 14:08:29.361500240 +0000
+@@ -6225,7 +6225,6 @@ func_mode_link ()
+ current=$func_arith_result
+ age="$number_minor"
+ revision="$number_minor"
+- lt_irix_increment=no
+ ;;
+ esac
+ ;;
+@@ -6297,11 +6296,7 @@ func_mode_link ()
+ ;;
+
+ irix | nonstopux)
+- if test "X$lt_irix_increment" = "Xno"; then
+- func_arith $current - $age
+- else
+- func_arith $current - $age + 1
+- fi
++ func_arith $current - $age
+ major=$func_arith_result
+
+ case $version_type in
diff --git a/eclass/ELT-patches/ltmain/1.5 b/eclass/ELT-patches/ltmain/1.5
new file mode 100644
index 000000000000..39abee03efa1
--- /dev/null
+++ b/eclass/ELT-patches/ltmain/1.5
@@ -0,0 +1,18 @@
+--- ltmain.sh 2007-09-15 07:35:19 +0100
++++ ltmain.sh 2004-01-25 12:40:26 +0000
+@@ -1854,6 +1840,15 @@
+ lib=
+ found=no
+ case $deplib in
++ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe)
++ if test "$linkmode,$pass" = "prog,link"; then
++ compile_deplibs="$deplib $compile_deplibs"
++ finalize_deplibs="$deplib $finalize_deplibs"
++ else
++ deplibs="$deplib $deplibs"
++ fi
++ continue
++ ;;
+ -l*)
+ if test "$linkmode" != lib && test "$linkmode" != prog; then
+ $echo "$modename: warning: \`-l' is ignored for archives/objects" 1>&2
diff --git a/eclass/ELT-patches/max_cmd_len/1.5.0 b/eclass/ELT-patches/max_cmd_len/1.5.0
new file mode 100644
index 000000000000..6d5f88b59669
--- /dev/null
+++ b/eclass/ELT-patches/max_cmd_len/1.5.0
@@ -0,0 +1,33 @@
+--- ltmain.sh 2004-02-11 20:55:14.587581248 +0200
++++ ltmain.sh 2004-02-11 21:15:33.467283368 +0200
+@@ -117,6 +117,30 @@
+ execute_dlfiles=
+ lo2o="s/\\.lo\$/.${objext}/"
+ o2lo="s/\\.${objext}\$/.lo/"
++
++if test -z "$max_cmd_len"; then
++ i=0
++ testring="ABCD"
++ new_result=
++
++ # If test is not a shell built-in, we'll probably end up computing a
++ # maximum length that is only half of the actual maximum length, but
++ # we can't tell.
++ while (test "X"`$SHELL $0 --fallback-echo "X$testring" 2>/dev/null` \
++ = "XX$testring") >/dev/null 2>&1 &&
++ new_result=`expr "X$testring" : ".*" 2>&1` &&
++ max_cmd_len="$new_result" &&
++ test $i != 17 # 1/2 MB should be enough
++ do
++ i=`expr $i + 1`
++ testring="$testring$testring"
++ done
++ testring=
++ # Add a significant safety factor because C++ compilers can tack on massive
++ # amounts of additional arguments before passing them to the linker.
++ # It appears as though 1/2 is a usable value.
++ max_cmd_len=`expr $max_cmd_len \/ 2`
++fi
+
+ #####################################
+ # Shell function definitions:
diff --git a/eclass/ELT-patches/max_cmd_len/1.5.14 b/eclass/ELT-patches/max_cmd_len/1.5.14
new file mode 100644
index 000000000000..8f299e8b9560
--- /dev/null
+++ b/eclass/ELT-patches/max_cmd_len/1.5.14
@@ -0,0 +1,33 @@
+--- ltmain.sh
++++ ltmain.sh
+@@ -136,6 +136,30 @@
+ lo2o="s/\\.lo\$/.${objext}/"
+ o2lo="s/\\.${objext}\$/.lo/"
+ quote_scanset='[[~#^*{};<>?'"'"' ]'
++
++if test -z "$max_cmd_len"; then
++ i=0
++ testring="ABCD"
++ new_result=
++
++ # If test is not a shell built-in, we'll probably end up computing a
++ # maximum length that is only half of the actual maximum length, but
++ # we can't tell.
++ while (test "X"`$SHELL $0 --fallback-echo "X$testring" 2>/dev/null` \
++ = "XX$testring") >/dev/null 2>&1 &&
++ new_result=`expr "X$testring" : ".*" 2>&1` &&
++ max_cmd_len="$new_result" &&
++ test "$i" != 17 # 1/2 MB should be enough
++ do
++ i=`expr $i + 1`
++ testring="$testring$testring"
++ done
++ testring=
++ # Add a significant safety factor because C++ compilers can tack on massive
++ # amounts of additional arguments before passing them to the linker.
++ # It appears as though 1/2 is a usable value.
++ max_cmd_len=`expr $max_cmd_len \/ 2`
++fi
+
+ #####################################
+ # Shell function definitions:
diff --git a/eclass/ELT-patches/max_cmd_len/1.5.20 b/eclass/ELT-patches/max_cmd_len/1.5.20
new file mode 100644
index 000000000000..76dc21364a54
--- /dev/null
+++ b/eclass/ELT-patches/max_cmd_len/1.5.20
@@ -0,0 +1,33 @@
+--- ltmain.sh
++++ ltmain.sh
+@@ -136,6 +136,30 @@
+ execute_dlfiles=
+ lo2o="s/\\.lo\$/.${objext}/"
+ o2lo="s/\\.${objext}\$/.lo/"
++
++if test -z "$max_cmd_len"; then
++ i=0
++ testring="ABCD"
++ new_result=
++
++ # If test is not a shell built-in, we'll probably end up computing a
++ # maximum length that is only half of the actual maximum length, but
++ # we can't tell.
++ while (test "X"`$SHELL $0 --fallback-echo "X$testring" 2>/dev/null` \
++ = "XX$testring") >/dev/null 2>&1 &&
++ new_result=`expr "X$testring" : ".*" 2>&1` &&
++ max_cmd_len="$new_result" &&
++ test "$i" != 17 # 1/2 MB should be enough
++ do
++ i=`expr $i + 1`
++ testring="$testring$testring"
++ done
++ testring=
++ # Add a significant safety factor because C++ compilers can tack on massive
++ # amounts of additional arguments before passing them to the linker.
++ # It appears as though 1/2 is a usable value.
++ max_cmd_len=`expr $max_cmd_len \/ 2`
++fi
+
+ #####################################
+ # Shell function definitions:
diff --git a/eclass/ELT-patches/mint-conf/2.2.6 b/eclass/ELT-patches/mint-conf/2.2.6
new file mode 100644
index 000000000000..34f2e5f28c59
--- /dev/null
+++ b/eclass/ELT-patches/mint-conf/2.2.6
@@ -0,0 +1,15 @@
+Calculating the max args here takes hours.
+
+--- configure
++++ configure
+@@ -7351,6 +7351,10 @@
+ lt_cv_sys_max_cmd_len=8192;
+ ;;
+
++ mint*)
++ lt_cv_sys_max_cmd_len=8192;
++ ;;
++
+ amigaos*)
+ # On AmigaOS with pdksh, this test takes hours, literally.
+ # So we just punt and use a minimum line length of 8192.
diff --git a/eclass/ELT-patches/no-lc/1.3d b/eclass/ELT-patches/no-lc/1.3d
new file mode 100644
index 000000000000..de20b0ceed3c
--- /dev/null
+++ b/eclass/ELT-patches/no-lc/1.3d
@@ -0,0 +1,13 @@
+--- release-1-3d.orig 2009-12-03 17:05:15.608916799 +0100
++++ release-1-3d 2009-12-14 22:01:06.634927600 +0100
+@@ -1035,6 +1035,10 @@
+ esac
+ elif test "$arg" = "-lm"; then
+ case $host in
++ *-*-hpux*)
++ # Compiler inserts libc in the correct place for threads to work
++ test "X$arg" = "X-lc" && continue
++ ;;
+ *-*-cygwin* | *-*-pw32* | *-*-beos*)
+ # These systems don't actually have math library (as such)
+ continue
diff --git a/eclass/ELT-patches/no-lc/1.5.22 b/eclass/ELT-patches/no-lc/1.5.22
new file mode 100644
index 000000000000..786284d49744
--- /dev/null
+++ b/eclass/ELT-patches/no-lc/1.5.22
@@ -0,0 +1,11 @@
+--- release-1-5-22.orig 2009-12-03 17:05:15.605534999 +0100
++++ release-1-5-22 2009-12-14 21:56:58.884956366 +0100
+@@ -1573,7 +1573,7 @@
+ # Causes problems with __ctype
+ test "X$arg" = "X-lc" && continue
+ ;;
+- *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*)
++ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX* | *-*-hpux*)
+ # Compiler inserts libc in the correct place for threads to work
+ test "X$arg" = "X-lc" && continue
+ ;;
diff --git a/eclass/ELT-patches/nocxx/nocxx-2.64.patch b/eclass/ELT-patches/nocxx/nocxx-2.64.patch
new file mode 100644
index 000000000000..4b10e4697185
--- /dev/null
+++ b/eclass/ELT-patches/nocxx/nocxx-2.64.patch
@@ -0,0 +1,13 @@
+--- a/configure
++++ b/configure
+@@ -5148,8 +5148,8 @@
+ else
+ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
+ $as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
+-as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check
+-See \`config.log' for more details" "$LINENO" 5; }
++$as_echo "See \`config.log' for more details." >&5;
++$ac_echo "C++ sucks, ignoring ..." >&5; }
+ fi
+
+ ac_ext=c
diff --git a/eclass/ELT-patches/nocxx/nocxx.patch b/eclass/ELT-patches/nocxx/nocxx.patch
new file mode 100644
index 000000000000..4b326924e18f
--- /dev/null
+++ b/eclass/ELT-patches/nocxx/nocxx.patch
@@ -0,0 +1,15 @@
+--- a/configure
++++ b/configure
+@@ -5148,10 +5148,8 @@
+ :
+ else
+ { { echo "$as_me:$LINENO: error: C++ preprocessor \"$CXXCPP\" fails sanity check
+-See \`config.log' for more details." >&5
+-echo "$as_me: error: C++ preprocessor \"$CXXCPP\" fails sanity check
+-See \`config.log' for more details." >&2;}
+- { (exit 1); exit 1; }; }
++See \`config.log' for more details." >&5;}
++ { echo "C++ sucks, ignoring ..." >&5; }; }
+ fi
+
+ ac_ext=cc
diff --git a/eclass/ELT-patches/portage/1.2.0 b/eclass/ELT-patches/portage/1.2.0
new file mode 100644
index 000000000000..70b02619c601
--- /dev/null
+++ b/eclass/ELT-patches/portage/1.2.0
@@ -0,0 +1,7 @@
+# Dummy patch, not needed by libtool-1.2
+
+--- ltmain.sh
++++ ltmain.sh
+@@ -32,1 +32,1 @@
+-PACKAGE=libtool
++PACKAGE=libtool
diff --git a/eclass/ELT-patches/portage/1.3.0c b/eclass/ELT-patches/portage/1.3.0c
new file mode 100644
index 000000000000..ad071aed2f08
--- /dev/null
+++ b/eclass/ELT-patches/portage/1.3.0c
@@ -0,0 +1,66 @@
+--- ltmain.sh 2005-09-02 22:19:17.000000000 +0200
++++ ltmain.sh 2005-09-02 22:20:55.000000000 +0200
+@@ -3769,9 +3769,50 @@
+ $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
+ exit 1
+ fi
+- newdependency_libs="$newdependency_libs $libdir/$name"
++ # We do not want portage's install root ($D) present. Check only for
++ # this if the .la is being installed.
++ if test "$installed" = yes && test "$D"; then
++ eval mynewdependency_lib=`echo "$libdir/$name" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$libdir/$name"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_1=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_1"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
++ ;;
++ *)
++ if test "$installed" = yes; then
++ # Rather use S=WORKDIR if our version of portage supports it.
++ # This is because some ebuild (gcc) do not use $S as buildroot.
++ if test "$WORKDIR"; then
++ S="$WORKDIR"
++ fi
++ # We do not want portage's build root ($S) present.
++ my_little_ninja_foo_2=`echo $deplib |$EGREP -e "$S"`
++ # We do not want portage's install root ($D) present.
++ my_little_ninja_foo_3=`echo $deplib |$EGREP -e "$D"`
++ if test -n "$my_little_ninja_foo_2" && test "$S"; then
++ mynewdependency_lib=""
++ elif test -n "$my_little_ninja_foo_3" && test "$D"; then
++ eval mynewdependency_lib=`echo "$deplib" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_4=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_4"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
+ ;;
+- *) newdependency_libs="$newdependency_libs $deplib" ;;
+ esac
+ done
+ dependency_libs="$newdependency_libs"
+@@ -3799,6 +3840,10 @@
+ dlprefiles="$newdlprefiles"
+ fi
+ $rm $output
++ # Do not add duplicates
++ if test "$installed" = yes && test "$D"; then
++ install_libdir=`echo "$install_libdir" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ fi
+ $echo > $output "\
+ # $outputname - a libtool library file
+ # Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
diff --git a/eclass/ELT-patches/portage/1.3.3 b/eclass/ELT-patches/portage/1.3.3
new file mode 100644
index 000000000000..731076bce485
--- /dev/null
+++ b/eclass/ELT-patches/portage/1.3.3
@@ -0,0 +1,71 @@
+--- ltmain.sh 2005-05-13 10:53:28.000000000 +0200
++++ ltmain.sh 2005-05-13 11:44:15.000000000 +0200
+@@ -3078,6 +3078,68 @@
+ break
+ fi
+ output="$output_objdir/$outputname"i
++ # Replace all uninstalled libtool libraries with the installed ones
++ newdependency_libs=
++ for deplib in $dependency_libs; do
++ case $deplib in
++ *.la)
++ name=`$echo "X$deplib" | $Xsed -e 's%^.*/%%'`
++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
++ if test -z "$libdir"; then
++ $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
++ exit $EXIT_FAILURE
++ fi
++ # We do not want portage's install root ($D) present. Check only for
++ # this if the .la is being installed.
++ if test "$installed" = yes && test "$D"; then
++ eval mynewdependency_lib=`echo "$libdir/$name" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$libdir/$name"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_1=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_1"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
++ ;;
++ *)
++ if test "$installed" = yes; then
++ # Rather use S=WORKDIR if our version of portage supports it.
++ # This is because some ebuild (gcc) do not use $S as buildroot.
++ if test "$WORKDIR"; then
++ S="$WORKDIR"
++ fi
++ # We do not want portage's build root ($S) present.
++ my_little_ninja_foo_2=`echo $deplib |$EGREP -e "$S"`
++ # We do not want portage's install root ($D) present.
++ my_little_ninja_foo_3=`echo $deplib |$EGREP -e "$D"`
++ if test -n "$my_little_ninja_foo_2" && test "$S"; then
++ mynewdependency_lib=""
++ elif test -n "$my_little_ninja_foo_3" && test "$D"; then
++ eval mynewdependency_lib=`echo "$deplib" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_4=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_4"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
++ ;;
++ esac
++ done
++ dependency_libs="$newdependency_libs"
++ fi
++ # Do not add duplicates
++ if test "$installed" = yes && test "$D"; then
++ install_libdir=`echo "$install_libdir" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
+ fi
+ $rm $output
+ $echo > $output "\
diff --git a/eclass/ELT-patches/portage/1.4.0 b/eclass/ELT-patches/portage/1.4.0
new file mode 100644
index 000000000000..5566a39af49a
--- /dev/null
+++ b/eclass/ELT-patches/portage/1.4.0
@@ -0,0 +1,74 @@
+Note that if you update this patch, please update this one as well:
+
+ eclass/ELT-patches/portage/1.4.1
+
+The file name can stay 1.4.1, as it will still apply to all versions. Only
+when a new version of libtool comes out that it do not apply to, then the
+name should be bumped, but the patch content should stay fairly the same.
+
+--- ltmain.sh Wed Apr 3 01:19:37 2002
++++ ltmain.sh Sun May 26 19:50:52 2002
+@@ -3940,9 +3940,50 @@
+ $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
+ exit 1
+ fi
+- newdependency_libs="$newdependency_libs $libdir/$name"
++ # We do not want portage's install root ($D) present. Check only for
++ # this if the .la is being installed.
++ if test "$installed" = yes && test "$D"; then
++ eval mynewdependency_lib=`echo "$libdir/$name" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$libdir/$name"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_1=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_1"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
++ ;;
++ *)
++ if test "$installed" = yes; then
++ # Rather use S=WORKDIR if our version of portage supports it.
++ # This is because some ebuild (gcc) do not use $S as buildroot.
++ if test "$WORKDIR"; then
++ S="$WORKDIR"
++ fi
++ # We do not want portage's build root ($S) present.
++ my_little_ninja_foo_2=`echo $deplib |$EGREP -e "$S"`
++ # We do not want portage's install root ($D) present.
++ my_little_ninja_foo_3=`echo $deplib |$EGREP -e "$D"`
++ if test -n "$my_little_ninja_foo_2" && test "$S"; then
++ mynewdependency_lib=""
++ elif test -n "$my_little_ninja_foo_3" && test "$D"; then
++ eval mynewdependency_lib=`echo "$deplib" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_4=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_4"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
+ ;;
+- *) newdependency_libs="$newdependency_libs $deplib" ;;
+ esac
+ done
+ dependency_libs="$newdependency_libs"
+@@ -3975,6 +4005,10 @@
+ case $host,$output,$installed,$module,$dlname in
+ *cygwin*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;;
+ esac
++ # Do not add duplicates
++ if test "$installed" = yes && test "$D"; then
++ install_libdir=`echo "$install_libdir" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ fi
+ $echo > $output "\
+ # $outputname - a libtool library file
+ # Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
diff --git a/eclass/ELT-patches/portage/1.5.10 b/eclass/ELT-patches/portage/1.5.10
new file mode 100644
index 000000000000..f33d906ed519
--- /dev/null
+++ b/eclass/ELT-patches/portage/1.5.10
@@ -0,0 +1,77 @@
+Note that if you update this patch, please update this one as well:
+
+ eclass/ELT-patches/portage/1.4.1
+
+The file name can stay 1.4.1, as it will still apply to all versions. Only
+when a new version of libtool comes out that it do not apply to, then the
+name should be bumped, but the patch content should stay fairly the same.
+
+--- ltmain.sh Wed Apr 3 01:19:37 2002
++++ ltmain.sh Sun May 26 19:50:52 2002
+@@ -3940,9 +3940,53 @@
+ $echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
+ exit 1
+ fi
+- newdependency_libs="$newdependency_libs $libdir/$name"
++ if test "x$EGREP" = x ; then
++ EGREP=egrep
++ fi
++ # We do not want portage's install root ($D) present. Check only for
++ # this if the .la is being installed.
++ if test "$installed" = yes && test "$D"; then
++ eval mynewdependency_lib=`echo "$libdir/$name" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$libdir/$name"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_1=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_1"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
++ ;;
++ *)
++ if test "$installed" = yes; then
++ # Rather use S=WORKDIR if our version of portage supports it.
++ # This is because some ebuild (gcc) do not use $S as buildroot.
++ if test "$WORKDIR"; then
++ S="$WORKDIR"
++ fi
++ # We do not want portage's build root ($S) present.
++ my_little_ninja_foo_2=`echo $deplib |$EGREP -e "$S"`
++ # We do not want portage's install root ($D) present.
++ my_little_ninja_foo_3=`echo $deplib |$EGREP -e "$D"`
++ if test -n "$my_little_ninja_foo_2" && test "$S"; then
++ mynewdependency_lib=""
++ elif test -n "$my_little_ninja_foo_3" && test "$D"; then
++ eval mynewdependency_lib=`echo "$deplib" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_4=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_4"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
+ ;;
+- *) newdependency_libs="$newdependency_libs $deplib" ;;
+ esac
+ done
+ dependency_libs="$newdependency_libs"
+@@ -3975,6 +4005,10 @@
+ case $host,$output,$installed,$module,$dlname in
+ *cygwin*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;;
+ esac
++ # Do not add duplicates
++ if test "$installed" = yes && test "$D"; then
++ install_libdir=`echo "$install_libdir" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ fi
+ $echo > $output "\
+ # $outputname - a libtool library file
+ # Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
diff --git a/eclass/ELT-patches/portage/2.2 b/eclass/ELT-patches/portage/2.2
new file mode 100644
index 000000000000..1f724e7984eb
--- /dev/null
+++ b/eclass/ELT-patches/portage/2.2
@@ -0,0 +1,69 @@
+--- ltmain.sh
++++ ltmain.sh
+@@ -7410,9 +7410,53 @@
+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
+ test -z "$libdir" && \
+ func_fatal_error "\`$deplib' is not a valid libtool archive"
+- newdependency_libs="$newdependency_libs $libdir/$name"
++ if test "x$EGREP" = x ; then
++ EGREP=egrep
++ fi
++ # We do not want portage's install root ($D) present. Check only for
++ # this if the .la is being installed.
++ if test "$installed" = yes && test "$D"; then
++ eval mynewdependency_lib=`echo "$libdir/$name" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$libdir/$name"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_1=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_1"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
++ ;;
++ *)
++ if test "$installed" = yes; then
++ # Rather use S=WORKDIR if our version of portage supports it.
++ # This is because some ebuild (gcc) do not use $S as buildroot.
++ if test "$WORKDIR"; then
++ S="$WORKDIR"
++ fi
++ # We do not want portage's build root ($S) present.
++ my_little_ninja_foo_2=`echo $deplib |$EGREP -e "$S"`
++ # We do not want portage's install root ($D) present.
++ my_little_ninja_foo_3=`echo $deplib |$EGREP -e "$D"`
++ if test -n "$my_little_ninja_foo_2" && test "$S"; then
++ mynewdependency_lib=""
++ elif test -n "$my_little_ninja_foo_3" && test "$D"; then
++ eval mynewdependency_lib=`echo "$deplib" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ else
++ mynewdependency_lib="$deplib"
++ fi
++ # Do not add duplicates
++ if test "$mynewdependency_lib"; then
++ my_little_ninja_foo_4=`echo $newdependency_libs |$EGREP -e "$mynewdependency_lib"`
++ if test -z "$my_little_ninja_foo_4"; then
++ newdependency_libs="$newdependency_libs $mynewdependency_lib"
++ fi
++ fi
+ ;;
+- *) newdependency_libs="$newdependency_libs $deplib" ;;
+ esac
+ done
+ dependency_libs="$newdependency_libs"
+@@ -7476,6 +7520,10 @@
+ case $host,$output,$installed,$module,$dlname in
+ *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;;
+ esac
++ # Do not add duplicates
++ if test "$installed" = yes && test "$D"; then
++ install_libdir=`echo "$install_libdir" |sed -e "s:$D:/:g" -e 's:/\+:/:g'`
++ fi
+ $ECHO > $output "\
+ # $outputname - a libtool library file
+ # Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION
diff --git a/eclass/ELT-patches/relink-prog/1.3d b/eclass/ELT-patches/relink-prog/1.3d
new file mode 100644
index 000000000000..ccb6cb811c51
--- /dev/null
+++ b/eclass/ELT-patches/relink-prog/1.3d
@@ -0,0 +1,24 @@
+--- ltmain.sh
++++ ltmain.sh
+@@ -3203,6 +3203,12 @@
+ add_dir="$add_dir -L$inst_prefix_dir$libdir"
+ ;;
+ esac
++ else
++ case $libdir in
++ [\\/]*)
++ add_dir="$add_dir -L\\\"\\\${D}\\\"$libdir"
++ ;;
++ esac
+ fi
+ add="-l$name"
+ fi
+@@ -6630,7 +6636,7 @@
+ esac
+ fi
+ libfile="$libdir/"`$echo "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test
+- if test -n "$libdir" && test ! -f "$libfile"; then
++ if test -n "$libdir" && test ! -f "${D}$libfile"; then
+ $echo "$modename: warning: \`$lib' has not been installed in \`$libdir'" 1>&2
+ finalize=no
+ fi
diff --git a/eclass/ELT-patches/relink-prog/2.1b b/eclass/ELT-patches/relink-prog/2.1b
new file mode 100644
index 000000000000..71a0970d5b49
--- /dev/null
+++ b/eclass/ELT-patches/relink-prog/2.1b
@@ -0,0 +1,24 @@
+--- ltmain-v2.2.6.in 2009-06-22 15:33:27.000000000 +0200
++++ ltmain-v2.2.6.in.new 2009-07-16 16:21:04.000000000 +0200
+@@ -1742,7 +1742,7 @@
+ func_source "$lib"
+ fi
+ libfile="$libdir/"`$ECHO "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test
+- if test -n "$libdir" && test ! -f "$libfile"; then
++ if test -n "$libdir" && test ! -f "${D}$libfile"; then
+ func_warning "\`$lib' has not been installed in \`$libdir'"
+ finalize=no
+ fi
+@@ -5351,6 +5351,12 @@
+ add_dir="$add_dir -L$inst_prefix_dir$libdir"
+ ;;
+ esac
++ else
++ case $libdir in
++ [\\/]*)
++ add_dir="$add_dir -L\\\"\\\${D}\\\"$libdir"
++ ;;
++ esac
+ fi
+ add="-l$name"
+ fi
diff --git a/eclass/ELT-patches/relink/1.4.0a b/eclass/ELT-patches/relink/1.4.0a
new file mode 100644
index 000000000000..382d76087e62
--- /dev/null
+++ b/eclass/ELT-patches/relink/1.4.0a
@@ -0,0 +1,99 @@
+--- ltmain.sh Wed Oct 3 02:05:35 2001
++++ ltmain.sh Wed Oct 3 05:16:14 2001
+@@ -754,6 +754,7 @@
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
++ inst_prefix_dir=
+
+ avoid_version=no
+ dlfiles=
+@@ -884,6 +885,11 @@
+ prev=
+ continue
+ ;;
++ inst_prefix)
++ inst_prefix_dir="$arg"
++ prev=
++ continue
++ ;;
+ release)
+ release="-$arg"
+ prev=
+@@ -985,6 +991,11 @@
+ continue
+ ;;
+
++ -inst-prefix-dir)
++ prev=inst_prefix
++ continue
++ ;;
++
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+@@ -1866,6 +1877,7 @@
+
+ if test "$linkmode" = prog || test "$mode" = relink; then
+ add_shlibpath=
++ add_prefix_dir=
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+@@ -1886,10 +1898,22 @@
+ add="-l$name"
+ fi
+
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_prefix_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++
++ # add_prefix_dir must be appended instead, otherwise it can
++ # possibly be overrided by any hardcoded -L/... path in deplibs
+ if test "$linkmode" = prog; then
++ test -n "$add_prefix_dir" && finalize_deplibs="$finalize_deplibs $add_prefix_dir"
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
++ test -n "$add_prefix_dir" && deplibs="$deplibs $add_prefix_dir"
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+@@ -3856,7 +3880,7 @@
+ fi
+ done
+ # Quote the link command for shipping.
+- relink_command="($relink_command; cd `pwd`; $SHELL $0 --mode=relink $libtool_args)"
++ relink_command="($relink_command; cd `pwd`; $SHELL $0 --mode=relink $libtool_args @inst_prefix_dir@)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -4157,6 +4181,24 @@
+ dir="$dir$objdir"
+
+ if test -n "$relink_command"; then
++ # Determine the prefix the user has applied to our future dir.
++ inst_prefix_dir=`$echo "$destdir" | sed "s%$libdir\$%%"`
++
++ # Don't allow the user to place us outside of our expected
++ # location b/c this prevents finding dependent libraries that
++ # are installed to the same prefix.
++ if test "$inst_prefix_dir" = "$destdir"; then
++ $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
++ exit 1
++ fi
++
++ if test -n "$inst_prefix_dir"; then
++ # Stick the inst_prefix_dir data into the link command.
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
++ else
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%%"`
++ fi
++
+ $echo "$modename: warning: relinking \`$file'" 1>&2
+ $show "$relink_command"
+ if $run eval "$relink_command"; then :
diff --git a/eclass/ELT-patches/relink/1.4.0a-GCC3.0-1 b/eclass/ELT-patches/relink/1.4.0a-GCC3.0-1
new file mode 100644
index 000000000000..6bf45e58c57d
--- /dev/null
+++ b/eclass/ELT-patches/relink/1.4.0a-GCC3.0-1
@@ -0,0 +1,99 @@
+--- ltmain.sh Wed Oct 3 02:05:35 2001
++++ ltmain.sh Wed Oct 3 05:16:14 2001
+@@ -754,6 +754,7 @@
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
++ inst_prefix_dir=
+
+ avoid_version=no
+ dlfiles=
+@@ -884,6 +885,11 @@
+ prev=
+ continue
+ ;;
++ inst_prefix)
++ inst_prefix_dir="$arg"
++ prev=
++ continue
++ ;;
+ release)
+ release="-$arg"
+ prev=
+@@ -985,6 +991,11 @@
+ continue
+ ;;
+
++ -inst-prefix-dir)
++ prev=inst_prefix
++ continue
++ ;;
++
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+@@ -1866,6 +1877,7 @@
+
+ if test "$linkmode" = prog || test "$mode" = relink; then
+ add_shlibpath=
++ add_prefix_dir=
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+@@ -1886,10 +1898,22 @@
+ add="-l$name"
+ fi
+
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_prefix_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++
++ # add_prefix_dir must be appended instead, otherwise it can
++ # possibly be overrided by any hardcoded -L/... path in deplibs
+ if test "$linkmode" = prog; then
++ test -n "$add_prefix_dir" && finalize_deplibs="$finalize_deplibs $add_prefix_dir"
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
++ test -n "$add_prefix_dir" && deplibs="$deplibs $add_prefix_dir"
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+@@ -3856,7 +3880,7 @@
+ for tag in $taglist; do
+ tagopts="$tagopts --tag $tag"
+ done
+- relink_command="(cd `pwd`; $SHELL $0$tagopts --mode=relink $libtool_args)"
++ relink_command="(cd `pwd`; $SHELL $0$tagopts --mode=relink $libtool_args @inst_prefix_dir@)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -4157,6 +4181,24 @@
+ dir="$dir$objdir"
+
+ if test -n "$relink_command"; then
++ # Determine the prefix the user has applied to our future dir.
++ inst_prefix_dir=`$echo "$destdir" | sed "s%$libdir\$%%"`
++
++ # Don't allow the user to place us outside of our expected
++ # location b/c this prevents finding dependent libraries that
++ # are installed to the same prefix.
++ if test "$inst_prefix_dir" = "$destdir"; then
++ $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
++ exit 1
++ fi
++
++ if test -n "$inst_prefix_dir"; then
++ # Stick the inst_prefix_dir data into the link command.
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
++ else
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%%"`
++ fi
++
+ $echo "$modename: warning: relinking \`$file'" 1>&2
+ $show "$relink_command"
+ if $run eval "$relink_command"; then :
diff --git a/eclass/ELT-patches/relink/1.4.0a-GCC3.0-2 b/eclass/ELT-patches/relink/1.4.0a-GCC3.0-2
new file mode 100644
index 000000000000..55103a42e204
--- /dev/null
+++ b/eclass/ELT-patches/relink/1.4.0a-GCC3.0-2
@@ -0,0 +1,100 @@
+--- ltmain.sh Wed Oct 3 02:05:35 2001
++++ ltmain.sh Wed Oct 3 05:16:14 2001
+@@ -754,6 +754,7 @@
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
++ inst_prefix_dir=
+
+ avoid_version=no
+ dlfiles=
+@@ -884,6 +885,11 @@
+ prev=
+ continue
+ ;;
++ inst_prefix)
++ inst_prefix_dir="$arg"
++ prev=
++ continue
++ ;;
+ release)
+ release="-$arg"
+ prev=
+@@ -985,6 +991,11 @@
+ continue
+ ;;
+
++ -inst-prefix-dir)
++ prev=inst_prefix
++ continue
++ ;;
++
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+@@ -1866,6 +1877,7 @@
+
+ if test "$linkmode" = prog || test "$mode" = relink; then
+ add_shlibpath=
++ add_prefix_dir=
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+@@ -1886,10 +1898,22 @@
+ add="-l$name"
+ fi
+
+- if test $linkmode = prog; then
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_prefix_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++
++ # add_prefix_dir must be appended instead, otherwise it can
++ # possibly be overrided by any hardcoded -L/... path in deplibs
++ if test "$linkmode" = prog; then
++ test -n "$add_prefix_dir" && finalize_deplibs="$finalize_deplibs $add_prefix_dir"
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
++ test -n "$add_prefix_dir" && deplibs="$deplibs $add_prefix_dir"
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+@@ -3856,7 +3880,7 @@
+ for tag in $taglist; do
+ tagopts="$tagopts --tag $tag"
+ done
+- relink_command="(cd `pwd`; $SHELL $0$tagopts --mode=relink $libtool_args)"
++ relink_command="(cd `pwd`; $SHELL $0$tagopts --mode=relink $libtool_args @inst_prefix_dir@)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -4157,6 +4181,24 @@
+ dir="$dir$objdir"
+
+ if test -n "$relink_command"; then
++ # Determine the prefix the user has applied to our future dir.
++ inst_prefix_dir=`$echo "$destdir" | sed "s%$libdir\$%%"`
++
++ # Don't allow the user to place us outside of our expected
++ # location b/c this prevents finding dependent libraries that
++ # are installed to the same prefix.
++ if test "$inst_prefix_dir" = "$destdir"; then
++ $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
++ exit 1
++ fi
++
++ if test -n "$inst_prefix_dir"; then
++ # Stick the inst_prefix_dir data into the link command.
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
++ else
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%%"`
++ fi
++
+ $echo "$modename: warning: relinking \`$file'" 1>&2
+ $show "$relink_command"
+ if $run eval "$relink_command"; then :
diff --git a/eclass/ELT-patches/relink/1.4.1 b/eclass/ELT-patches/relink/1.4.1
new file mode 100644
index 000000000000..f34863f685e9
--- /dev/null
+++ b/eclass/ELT-patches/relink/1.4.1
@@ -0,0 +1,124 @@
+--- ltmain.sh Sun Aug 12 18:08:05 2001
++++ ltmain.sh Tue Aug 28 18:55:13 2001
+@@ -827,6 +827,7 @@
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
++ inst_prefix_dir=
+
+ avoid_version=no
+ dlfiles=
+@@ -959,6 +960,11 @@
+ prev=
+ continue
+ ;;
++ inst_prefix)
++ inst_prefix_dir="$arg"
++ prev=
++ continue
++ ;;
+ release)
+ release="-$arg"
+ prev=
+@@ -1167,6 +1173,11 @@
+ continue
+ ;;
+
++ -inst-prefix-dir)
++ prev=inst_prefix
++ continue
++ ;;
++
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+@@ -2231,7 +2242,16 @@
+ if test "$hardcode_direct" = yes; then
+ add="$libdir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+- add_dir="-L$libdir"
++ # Try looking first in the location we're being installed to.
++ add_dir=
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++ add_dir="$add_dir -L$libdir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+@@ -2241,7 +2261,16 @@
+ add="-l$name"
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+- add_dir="-L$libdir"
++ # Try looking first in the location we're being installed to.
++ add_dir=
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++ add_dir="$add_dir -L$libdir"
+ add="-l$name"
+ fi
+
+@@ -4321,7 +4350,7 @@
+ fi
+ done
+ # Quote the link command for shipping.
+- relink_command="cd `pwd`; $SHELL $0 --mode=relink $libtool_args"
++ relink_command="cd `pwd`; $SHELL $0 --mode=relink $libtool_args @inst_prefix_dir@"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -4622,12 +4651,30 @@
+ dir="$dir$objdir"
+
+ if test -n "$relink_command"; then
++ # Determine the prefix the user has applied to our future dir.
++ inst_prefix_dir=`$echo "$destdir" | sed "s%$libdir\$%%"`
++
++ # Don't allow the user to place us outside of our expected
++ # location b/c this prevents finding dependent libraries that
++ # are installed to the same prefix.
++ if test "$inst_prefix_dir" = "$destdir"; then
++ $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
++ exit 1
++ fi
++
++ if test -n "$inst_prefix_dir"; then
++ # Stick the inst_prefix_dir data into the link command.
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
++ else
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%%"`
++ fi
++
+ $echo "$modename: warning: relinking \`$file'" 1>&2
+ $show "$relink_command"
+ if $run eval "$relink_command"; then :
+ else
+ $echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2
+- continue
++ exit 1
+ fi
+ fi
+
+@@ -4782,7 +4829,11 @@
+ if test "$finalize" = yes && test -z "$run"; then
+ tmpdir="/tmp"
+ test -n "$TMPDIR" && tmpdir="$TMPDIR"
+- tmpdir="$tmpdir/libtool-$$"
++ tmpdir=`mktemp -d $tmpdir/libtool-XXXXXX 2> /dev/null`
++ if test $? = 0 ; then :
++ else
++ tmpdir="$tmpdir/libtool-$$"
++ fi
+ if $mkdir -p "$tmpdir" && chmod 700 "$tmpdir"; then :
+ else
+ $echo "$modename: error: cannot create temporary directory \`$tmpdir'" 1>&2
diff --git a/eclass/ELT-patches/relink/1.4.2 b/eclass/ELT-patches/relink/1.4.2
new file mode 100644
index 000000000000..17a630b7cfe8
--- /dev/null
+++ b/eclass/ELT-patches/relink/1.4.2
@@ -0,0 +1,99 @@
+--- ltmain.sh Wed Oct 3 02:05:35 2001
++++ ltmain.sh Wed Oct 3 05:16:14 2001
+@@ -754,6 +754,7 @@
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
++ inst_prefix_dir=
+
+ avoid_version=no
+ dlfiles=
+@@ -884,6 +885,11 @@
+ prev=
+ continue
+ ;;
++ inst_prefix)
++ inst_prefix_dir="$arg"
++ prev=
++ continue
++ ;;
+ release)
+ release="-$arg"
+ prev=
+@@ -985,6 +991,11 @@
+ continue
+ ;;
+
++ -inst-prefix-dir)
++ prev=inst_prefix
++ continue
++ ;;
++
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+@@ -1866,6 +1877,7 @@
+
+ if test "$linkmode" = prog || test "$mode" = relink; then
+ add_shlibpath=
++ add_prefix_dir=
+ add_dir=
+ add=
+ # Finalize command for both is simple: just hardcode it.
+@@ -1886,10 +1898,22 @@
+ add="-l$name"
+ fi
+
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_prefix_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++
++ # add_prefix_dir must be appended instead, otherwise it can
++ # possibly be overrided by any hardcoded -L/... path in deplibs
+ if test "$linkmode" = prog; then
++ test -n "$add_prefix_dir" && finalize_deplibs="$finalize_deplibs $add_prefix_dir"
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
++ test -n "$add_prefix_dir" && deplibs="$deplibs $add_prefix_dir"
+ test -n "$add_dir" && deplibs="$add_dir $deplibs"
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+@@ -3856,7 +3880,7 @@
+ fi
+ done
+ # Quote the link command for shipping.
+- relink_command="(cd `pwd`; $SHELL $0 --mode=relink $libtool_args)"
++ relink_command="(cd `pwd`; $SHELL $0 --mode=relink $libtool_args @inst_prefix_dir@)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -4157,6 +4181,24 @@
+ dir="$dir$objdir"
+
+ if test -n "$relink_command"; then
++ # Determine the prefix the user has applied to our future dir.
++ inst_prefix_dir=`$echo "$destdir" | sed "s%$libdir\$%%"`
++
++ # Don't allow the user to place us outside of our expected
++ # location b/c this prevents finding dependent libraries that
++ # are installed to the same prefix.
++ if test "$inst_prefix_dir" = "$destdir"; then
++ $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
++ exit 1
++ fi
++
++ if test -n "$inst_prefix_dir"; then
++ # Stick the inst_prefix_dir data into the link command.
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
++ else
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%%"`
++ fi
++
+ $echo "$modename: warning: relinking \`$file'" 1>&2
+ $show "$relink_command"
+ if $run eval "$relink_command"; then :
diff --git a/eclass/ELT-patches/relink/1.4.3 b/eclass/ELT-patches/relink/1.4.3
new file mode 100644
index 000000000000..3ed5fe3784a6
--- /dev/null
+++ b/eclass/ELT-patches/relink/1.4.3
@@ -0,0 +1,111 @@
+--- ltmain.sh Mon Feb 4 15:12:15 2002
++++ ltmain.sh Mon Feb 4 15:12:15 2002
+@@ -745,6 +745,7 @@
+ linker_flags=
+ dllsearchpath=
+ lib_search_path=`pwd`
++ inst_prefix_dir=
+
+ avoid_version=no
+ dlfiles=
+@@ -875,6 +876,11 @@
+ prev=
+ continue
+ ;;
++ inst_prefix)
++ inst_prefix_dir="$arg"
++ prev=
++ continue
++ ;;
+ release)
+ release="-$arg"
+ prev=
+@@ -976,6 +982,11 @@
+ continue
+ ;;
+
++ -inst-prefix-dir)
++ prev=inst_prefix
++ continue
++ ;;
++
+ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
+ # so, if we see these flags be careful not to treat them like -L
+ -L[A-Z][A-Z]*:*)
+@@ -1851,7 +1862,16 @@
+ if test "$hardcode_direct" = yes; then
+ add="$libdir/$linklib"
+ elif test "$hardcode_minus_L" = yes; then
+- add_dir="-L$libdir"
++ # Try looking first in the location we're being installed to.
++ add_dir=
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++ add_dir="$add_dir -L$libdir"
+ add="-l$name"
+ elif test "$hardcode_shlibpath_var" = yes; then
+ case :$finalize_shlibpath: in
+@@ -1861,7 +1881,16 @@
+ add="-l$name"
+ else
+ # We cannot seem to hardcode it, guess we'll fake it.
+- add_dir="-L$libdir"
++ # Try looking first in the location we're being installed to.
++ add_dir=
++ if test -n "$inst_prefix_dir"; then
++ case "$libdir" in
++ [\\/]*)
++ add_dir="-L$inst_prefix_dir$libdir"
++ ;;
++ esac
++ fi
++ add_dir="$add_dir -L$libdir"
+ add="-l$name"
+ fi
+
+@@ -3823,7 +3852,7 @@
+ fi
+ done
+ # Quote the link command for shipping.
+- relink_command="(cd `pwd`; $SHELL $0 --mode=relink $libtool_args)"
++ relink_command="(cd `pwd`; $SHELL $0 --mode=relink $libtool_args @inst_prefix_dir@)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -4124,12 +4153,30 @@
+ dir="$dir$objdir"
+
+ if test -n "$relink_command"; then
++ # Determine the prefix the user has applied to our future dir.
++ inst_prefix_dir=`$echo "$destdir" | sed "s%$libdir\$%%"`
++
++ # Don't allow the user to place us outside of our expected
++ # location b/c this prevents finding dependent libraries that
++ # are installed to the same prefix.
++ if test "$inst_prefix_dir" = "$destdir"; then
++ $echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
++ exit 1
++ fi
++
++ if test -n "$inst_prefix_dir"; then
++ # Stick the inst_prefix_dir data into the link command.
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
++ else
++ relink_command=`$echo "$relink_command" | sed "s%@inst_prefix_dir@%%"`
++ fi
++
+ $echo "$modename: warning: relinking \`$file'" 1>&2
+ $show "$relink_command"
+ if $run eval "$relink_command"; then :
+ else
+ $echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2
+- continue
++ exit 1
+ fi
+ fi
+
diff --git a/eclass/ELT-patches/rem-int-dep/1.3.5 b/eclass/ELT-patches/rem-int-dep/1.3.5
new file mode 100644
index 000000000000..4302623607b6
--- /dev/null
+++ b/eclass/ELT-patches/rem-int-dep/1.3.5
@@ -0,0 +1,11 @@
+--- ltmain.sh 2002-11-01 19:56:50.000000000 -0600
++++ ltmain.sh 2002-11-01 19:57:03.000000000 -0600
+@@ -4551,6 +4551,8 @@
+ if test "$installed" = yes && test "$D"; then
+ install_libdir="`echo "$install_libdir" |sed -e "s:$D::g" -e 's://:/:g'`"
+ fi
++ # Removing @REM_INT_DEP@ from dependency_libs in .la
++ dependency_libs=`echo $dependency_libs | $Xsed -e 's%\([^ ]*lib@REM_INT_DEP@\.\(so\|la\|a\)\)\|\(-l@REM_INT_DEP@\)%%g'`
+ \$echo > \$output "\\
+ # \$outputname - a libtool library file
+ # Generated by \$PROGRAM - GNU \$PACKAGE \$VERSION\$TIMESTAMP
diff --git a/eclass/ELT-patches/sed/1.3.4 b/eclass/ELT-patches/sed/1.3.4
new file mode 100644
index 000000000000..c88ff727a7e1
--- /dev/null
+++ b/eclass/ELT-patches/sed/1.3.4
@@ -0,0 +1,14 @@
+--- ltmain.sh 2005-05-13 11:48:24.000000000 +0200
++++ ltmain.sh 2005-05-13 11:48:42.000000000 +0200
+@@ -47,6 +47,11 @@
+ exit 0
+ fi
+
++# define variables for historic ltconfig's generated by Libtool 1.3
++test -z "$SED" && SED=sed
++test -z "$EGREP" && EGREP=egrep
++test -z "$LTCC" && LTCC=${CC-gcc}
++
+ # The name of this program.
+ progname=`$echo "$0" | sed 's%^.*/%%'`
+ modename="$progname"
diff --git a/eclass/ELT-patches/sed/1.4.0 b/eclass/ELT-patches/sed/1.4.0
new file mode 100644
index 000000000000..9c51ac2de0ff
--- /dev/null
+++ b/eclass/ELT-patches/sed/1.4.0
@@ -0,0 +1,14 @@
+--- ltmain.sh 2003-02-13 14:54:24.000000000 +0100
++++ ltmain.sh 2003-02-13 15:24:49.000000000 +0100
+@@ -48,6 +48,11 @@ EOF
+ exit 0
+ fi
+
++# define variables for historic ltconfig's generated by Libtool 1.3
++test -z "$SED" && SED=sed
++test -z "$EGREP" && EGREP=egrep
++test -z "$LTCC" && LTCC=${CC-gcc}
++
+ # The name of this program.
+ progname=`$echo "$0" | ${SED} 's%^.*/%%'`
+ modename="$progname"
diff --git a/eclass/ELT-patches/sed/1.5.6 b/eclass/ELT-patches/sed/1.5.6
new file mode 100644
index 000000000000..5efd5310dcff
--- /dev/null
+++ b/eclass/ELT-patches/sed/1.5.6
@@ -0,0 +1,16 @@
+--- ltmain.sh 2005-04-16 16:50:02.000000000 +0200
++++ ltmain.sh 2005-04-16 16:46:46.000000000 +0200
+@@ -39,6 +39,13 @@
+ if [ "x$SED" = x ] ; then
+ SED=sed
+ fi
++# Same for EGREP, and just to be sure, do LTCC as well
++if test "x$EGREP" = x ; then
++ EGREP=egrep
++fi
++if test "x$LTCC" = x ; then
++ LTCC=${CC-gcc}
++fi
+
+ # The name of this program:
+ progname=`echo "$progpath" | $SED $basename`
diff --git a/eclass/ELT-patches/sol2-conf/2.4.2 b/eclass/ELT-patches/sol2-conf/2.4.2
new file mode 100644
index 000000000000..a57f22c9ca3a
--- /dev/null
+++ b/eclass/ELT-patches/sol2-conf/2.4.2
@@ -0,0 +1,14 @@
+Unbreak x86_64-pc-solaris2.1[01], it IS 64-bits too. Without this,
+libtool thinks the linker is called ld_sol2.
+
+--- configure
++++ configure
+@@ -1383,7 +1383,7 @@
+ case $lt_cv_prog_gnu_ld in
+ yes*)
+ case $host in
+- i?86-*-solaris*)
++ i?86-*-solaris*|x86_64-*-solaris*)
+ LD="${LD-ld} -m elf_x86_64"
+ ;;
+ sparc*-*-solaris*)
diff --git a/eclass/ELT-patches/sol2-ltmain/2.4.2 b/eclass/ELT-patches/sol2-ltmain/2.4.2
new file mode 100644
index 000000000000..ad35ed88db45
--- /dev/null
+++ b/eclass/ELT-patches/sol2-ltmain/2.4.2
@@ -0,0 +1,11 @@
+--- libltdl/config/ltmain.sh
++++ libltdl/config/ltmain.sh
+@@ -1180,7 +1180,7 @@
+ test "$opt_debug" = : || func_append preserve_args " --debug"
+
+ case $host in
+- *cygwin* | *mingw* | *pw32* | *cegcc*)
++ *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2*)
+ # don't eliminate duplications in $postdeps and $predeps
+ opt_duplicate_compiler_generated_deps=:
+ ;;
diff --git a/eclass/ELT-patches/sys-lib-dlsearch/2.4 b/eclass/ELT-patches/sys-lib-dlsearch/2.4
new file mode 100644
index 000000000000..89eb6992e391
--- /dev/null
+++ b/eclass/ELT-patches/sys-lib-dlsearch/2.4
@@ -0,0 +1,21 @@
+the linux target parses /etc/ld.so.conf to see what paths are
+searched at runtime, and hardcodes /lib /usr/lib as a fallback.
+this works poorly when cross-compiling, so tweak the behavior:
+ - search $SYSROOT/etc/ld.so.conf
+ - default to Gentoo's notion of the active multilib
+
+--- a/configure
++++ b/configure
+@@ -10405,9 +10405,9 @@
+ hardcode_into_libs=yes
+
+ # Append ld.so.conf contents to the search path
+- if test -f /etc/ld.so.conf; then
+- lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
+- sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
++ if test -f "$SYSROOT"/etc/ld.so.conf; then
++ lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < "$SYSROOT"/etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
++ sys_lib_dlsearch_path_spec="/@GENTOO_LIBDIR@ /usr/@GENTOO_LIBDIR@ $lt_ld_extra"
+ fi
+
+ # We used to test for /lib/ld.so.1 and disable shared libraries on
diff --git a/eclass/ELT-patches/target-nm/2.4.2 b/eclass/ELT-patches/target-nm/2.4.2
new file mode 100644
index 000000000000..1f22220b6fd9
--- /dev/null
+++ b/eclass/ELT-patches/target-nm/2.4.2
@@ -0,0 +1,45 @@
+https://bugs.gentoo.org/465558
+
+From a4629ebff263dcb2e05feb9e41df649ea5ce3f78 Mon Sep 17 00:00:00 2001
+From: Peter Rosin <peda@lysator.liu.se>
+Date: Sun, 28 Apr 2013 09:16:56 +0200
+Subject: [PATCH] libtool: break all the way out when a good nm is found
+
+The current code tries to locate a compatible nm tool. It starts with
+a prefixed nm tool (great!) and includes a plain nm too (that's fine).
+The problem is that the code searches for the prefixed nm before the
+plain nm (normally fine), but doesn't break once it has found a valid
+match, and the plain nm ends up the winner.
+
+Report and analysis by Mike Frysinger.
+
+* m4/libtool.m4 (LT_PATH_NM): Break all the way out on a good match.
+
+Signed-off-by: Peter Rosin <peda@lysator.liu.se>
+---
+ m4/libtool.m4 | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/m4/libtool.m4 b/m4/libtool.m4
+index 3f50b0c..d7013c5 100644
+--- a/m4/libtool.m4
++++ b/m4/libtool.m4
+@@ -3397,13 +3397,13 @@ else
+ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
+ */dev/null* | *'Invalid file or object type'*)
+ lt_cv_path_NM="$tmp_nm -B"
+- break
++ break 2
+ ;;
+ *)
+ case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
+ */dev/null*)
+ lt_cv_path_NM="$tmp_nm -p"
+- break
++ break 2
+ ;;
+ *)
+ lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
+--
+1.8.2.1
+
diff --git a/eclass/ELT-patches/test/1.4.0 b/eclass/ELT-patches/test/1.4.0
new file mode 100644
index 000000000000..d6f442f0e38d
--- /dev/null
+++ b/eclass/ELT-patches/test/1.4.0
@@ -0,0 +1,291 @@
+--- ltmain.sh Tue May 29 19:16:03 2001
++++ ltmain.sh Tue May 29 21:26:50 2001
+@@ -459,7 +459,7 @@
+ pic_mode=default
+ ;;
+ esac
+- if test $pic_mode = no && test "$deplibs_check_method" != pass_all; then
++ if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+ # non-PIC code in shared libraries is not supported
+ pic_mode=default
+ fi
+@@ -1343,7 +1343,7 @@
+ ;;
+ esac
+ for pass in $passes; do
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ # Determine which files to process
+ case $pass in
+ dlopen)
+@@ -1360,11 +1360,11 @@
+ found=no
+ case $deplib in
+ -l*)
+- if test $linkmode = oldlib && test $linkmode = obj; then
++ if test "$linkmode" = oldlib && test "$linkmode" = obj; then
+ $echo "$modename: warning: \`-l' is ignored for archives/objects: $deplib" 1>&2
+ continue
+ fi
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+@@ -1384,7 +1384,7 @@
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+- test $linkmode = lib && newdependency_libs="$deplib $newdependency_libs"
++ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+ fi
+ continue
+ fi
+@@ -1393,16 +1393,16 @@
+ case $linkmode in
+ lib)
+ deplibs="$deplib $deplibs"
+- test $pass = conv && continue
++ test "$pass" = conv && continue
+ newdependency_libs="$deplib $newdependency_libs"
+ newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
+ ;;
+ prog)
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+- if test $pass = scan; then
++ if test "$pass" = scan; then
+ deplibs="$deplib $deplibs"
+ newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
+ else
+@@ -1417,7 +1417,7 @@
+ continue
+ ;; # -L
+ -R*)
+- if test $pass = link; then
++ if test "$pass" = link; then
+ dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'`
+ # Make sure the xrpath contains only unique directories.
+ case "$xrpath " in
+@@ -1430,7 +1430,7 @@
+ ;;
+ *.la) lib="$deplib" ;;
+ *.$libext)
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+@@ -1451,7 +1451,7 @@
+ continue
+ ;;
+ prog)
+- if test $pass != link; then
++ if test "$pass" != link; then
+ deplibs="$deplib $deplibs"
+ else
+ compile_deplibs="$deplib $compile_deplibs"
+@@ -1462,7 +1462,7 @@
+ esac # linkmode
+ ;; # *.$libext
+ *.lo | *.$objext)
+- if test $pass = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
++ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+ # If there is no dlopen support or we're linking statically,
+ # we need to preload.
+ newdlprefiles="$newdlprefiles $deplib"
+@@ -1512,13 +1512,13 @@
+
+ if test "$linkmode,$pass" = "lib,link" ||
+ test "$linkmode,$pass" = "prog,scan" ||
+- { test $linkmode = oldlib && test $linkmode = obj; }; then
++ { test "$linkmode" = oldlib && test "$linkmode" = obj; }; then
+ # Add dl[pre]opened files of deplib
+ test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
+ test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
+ fi
+
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ # Only check for convenience libraries
+ deplibs="$lib $deplibs"
+ if test -z "$libdir"; then
+@@ -1537,7 +1537,7 @@
+ esac
+ tmp_libs="$tmp_libs $deplib"
+ done
+- elif test $linkmode != prog && test $linkmode != lib; then
++ elif test "$linkmode" != prog && test "$linkmode" != lib; then
+ $echo "$modename: \`$lib' is not a convenience library" 1>&2
+ exit 1
+ fi
+@@ -1555,7 +1555,7 @@
+ fi
+
+ # This library was specified with -dlopen.
+- if test $pass = dlopen; then
++ if test "$pass" = dlopen; then
+ if test -z "$libdir"; then
+ $echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2
+ exit 1
+@@ -1604,7 +1604,7 @@
+ name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'`
+
+ # This library was specified with -dlpreopen.
+- if test $pass = dlpreopen; then
++ if test "$pass" = dlpreopen; then
+ if test -z "$libdir"; then
+ $echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2
+ exit 1
+@@ -1623,7 +1623,7 @@
+
+ if test -z "$libdir"; then
+ # Link the convenience library
+- if test $linkmode = lib; then
++ if test "$linkmode" = lib; then
+ deplibs="$dir/$old_library $deplibs"
+ elif test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$dir/$old_library $compile_deplibs"
+@@ -1634,7 +1634,7 @@
+ continue
+ fi
+
+- if test $linkmode = prog && test $pass != link; then
++ if test "$linkmode" = prog && test "$pass" != link; then
+ newlib_search_path="$newlib_search_path $ladir"
+ deplibs="$lib $deplibs"
+
+@@ -1671,7 +1671,7 @@
+ # Link against this shared library
+
+ if test "$linkmode,$pass" = "prog,link" ||
+- { test $linkmode = lib && test $hardcode_into_libs = yes; }; then
++ { test "$linkmode" = lib && test "$hardcode_into_libs" = yes; }; then
+ # Hardcode the library path.
+ # Skip directories that are in the system default run-time
+ # search path.
+@@ -1693,7 +1693,7 @@
+ esac
+ ;;
+ esac
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ # We need to hardcode the library path
+ if test -n "$shlibpath_var"; then
+ # Make sure the rpath contains only unique directories.
+@@ -1777,7 +1777,7 @@
+ linklib=$newlib
+ fi # test -n $old_archive_from_expsyms_cmds
+
+- if test $linkmode = prog || test "$mode" != relink; then
++ if test "$linkmode" = prog || test "$mode" != relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+@@ -1826,7 +1826,7 @@
+ *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
+ esac
+ fi
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+ test -n "$add" && compile_deplibs="$add $compile_deplibs"
+ else
+@@ -1843,7 +1843,7 @@
+ fi
+ fi
+
+- if test $linkmode = prog || test "$mode" = relink; then
++ if test "$linkmode" = prog || test "$mode" = relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+@@ -1865,7 +1865,7 @@
+ add="-l$name"
+ fi
+
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
+@@ -1873,7 +1873,7 @@
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+ fi
+- elif test $linkmode = prog; then
++ elif test "$linkmode" = prog; then
+ if test "$alldeplibs" = yes &&
+ { test "$deplibs_check_method" = pass_all ||
+ { test "$build_libtool_libs" = yes &&
+@@ -1932,9 +1932,9 @@
+ fi
+ fi # link shared/static library?
+
+- if test $linkmode = lib; then
++ if test "$linkmode" = lib; then
+ if test -n "$dependency_libs" &&
+- { test $hardcode_into_libs != yes || test $build_old_libs = yes ||
++ { test "$hardcode_into_libs" != yes || test $build_old_libs = yes ||
+ test $link_static = yes; }; then
+ # Extract -R from dependency_libs
+ temp_deplibs=
+@@ -1964,7 +1964,7 @@
+ tmp_libs="$tmp_libs $deplib"
+ done
+
+- if test $link_all_deplibs != no; then
++ if test "$link_all_deplibs" != no; then
+ # Add the search paths of all dependency libraries
+ for deplib in $dependency_libs; do
+ case $deplib in
+@@ -2007,15 +2007,15 @@
+ fi # link_all_deplibs != no
+ fi # linkmode = lib
+ done # for deplib in $libs
+- if test $pass = dlpreopen; then
++ if test "$pass" = dlpreopen; then
+ # Link the dlpreopened libraries before other libraries
+ for deplib in $save_deplibs; do
+ deplibs="$deplib $deplibs"
+ done
+ fi
+- if test $pass != dlopen; then
+- test $pass != scan && dependency_libs="$newdependency_libs"
+- if test $pass != conv; then
++ if test "$pass" != dlopen; then
++ test "$pass" != scan && dependency_libs="$newdependency_libs"
++ if test "$pass" != conv; then
+ # Make sure lib_search_path contains only unique directories.
+ lib_search_path=
+ for dir in $newlib_search_path; do
+@@ -2073,7 +2073,7 @@
+ deplibs=
+ fi
+ done # for pass
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ dlfiles="$newdlfiles"
+ dlprefiles="$newdlprefiles"
+ fi
+@@ -2410,7 +2410,7 @@
+ ;;
+ *)
+ # Add libc to deplibs on all other systems if necessary.
+- if test $build_libtool_need_lc = "yes"; then
++ if test "$build_libtool_need_lc" = "yes"; then
+ deplibs="$deplibs -lc"
+ fi
+ ;;
+@@ -2683,7 +2683,7 @@
+
+ # Test again, we may have decided not to build it any more
+ if test "$build_libtool_libs" = yes; then
+- if test $hardcode_into_libs = yes; then
++ if test "$hardcode_into_libs" = yes; then
+ # Hardcode the library paths
+ hardcode_libdirs=
+ dep_rpath=
diff --git a/eclass/ELT-patches/test/1.4.2 b/eclass/ELT-patches/test/1.4.2
new file mode 100644
index 000000000000..8ae65ef00280
--- /dev/null
+++ b/eclass/ELT-patches/test/1.4.2
@@ -0,0 +1,578 @@
+--- ltmain.sh 2001-09-10 19:40:18.000000000 -0400
++++ ltmain.sh 2002-07-11 14:49:35.000000000 -0400
+@@ -467,7 +467,7 @@ if test -z "$show_help"; then
+ pic_mode=default
+ ;;
+ esac
+- if test $pic_mode = no && test "$deplibs_check_method" != pass_all; then
++ if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
+ # non-PIC code in shared libraries is not supported
+ pic_mode=default
+ fi
+@@ -1303,11 +1303,11 @@ compiler."
+ output_objdir="$output_objdir/$objdir"
+ fi
+ # Create the object directory.
+- if test ! -d $output_objdir; then
++ if test ! -d "$output_objdir"; then
+ $show "$mkdir $output_objdir"
+ $run $mkdir $output_objdir
+ status=$?
+- if test $status -ne 0 && test ! -d $output_objdir; then
++ if test "$status" -ne 0 && test ! -d "$output_objdir"; then
+ exit $status
+ fi
+ fi
+@@ -1366,7 +1366,7 @@ compiler."
+ ;;
+ esac
+ for pass in $passes; do
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ # Determine which files to process
+ case $pass in
+ dlopen)
+@@ -1383,11 +1383,11 @@ compiler."
+ found=no
+ case $deplib in
+ -l*)
+- if test $linkmode = oldlib && test $linkmode = obj; then
++ if test "$linkmode" = oldlib && test "$linkmode" = obj; then
+ $echo "$modename: warning: \`-l' is ignored for archives/objects: $deplib" 1>&2
+ continue
+ fi
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+@@ -1407,7 +1407,7 @@ compiler."
+ finalize_deplibs="$deplib $finalize_deplibs"
+ else
+ deplibs="$deplib $deplibs"
+- test $linkmode = lib && newdependency_libs="$deplib $newdependency_libs"
++ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
+ fi
+ continue
+ fi
+@@ -1416,16 +1416,16 @@ compiler."
+ case $linkmode in
+ lib)
+ deplibs="$deplib $deplibs"
+- test $pass = conv && continue
++ test "$pass" = conv && continue
+ newdependency_libs="$deplib $newdependency_libs"
+ newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
+ ;;
+ prog)
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+- if test $pass = scan; then
++ if test "$pass" = scan; then
+ deplibs="$deplib $deplibs"
+ newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
+ else
+@@ -1440,7 +1440,7 @@ compiler."
+ continue
+ ;; # -L
+ -R*)
+- if test $pass = link; then
++ if test "$pass" = link; then
+ dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'`
+ # Make sure the xrpath contains only unique directories.
+ case "$xrpath " in
+@@ -1453,7 +1453,7 @@ compiler."
+ ;;
+ *.la) lib="$deplib" ;;
+ *.$libext)
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ deplibs="$deplib $deplibs"
+ continue
+ fi
+@@ -1474,7 +1474,7 @@ compiler."
+ continue
+ ;;
+ prog)
+- if test $pass != link; then
++ if test "$pass" != link; then
+ deplibs="$deplib $deplibs"
+ else
+ compile_deplibs="$deplib $compile_deplibs"
+@@ -1485,7 +1485,7 @@ compiler."
+ esac # linkmode
+ ;; # *.$libext
+ *.lo | *.$objext)
+- if test $pass = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
++ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
+ # If there is no dlopen support or we're linking statically,
+ # we need to preload.
+ newdlprefiles="$newdlprefiles $deplib"
+@@ -1501,7 +1501,7 @@ compiler."
+ continue
+ ;;
+ esac # case $deplib
+- if test $found = yes || test -f "$lib"; then :
++ if test "$found" = yes || test -f "$lib"; then :
+ else
+ $echo "$modename: cannot find the library \`$lib'" 1>&2
+ exit 1
+@@ -1535,13 +1535,13 @@ compiler."
+
+ if test "$linkmode,$pass" = "lib,link" ||
+ test "$linkmode,$pass" = "prog,scan" ||
+- { test $linkmode = oldlib && test $linkmode = obj; }; then
++ { test "$linkmode" = oldlib && test "$linkmode" = obj; }; then
+ # Add dl[pre]opened files of deplib
+ test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
+ test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
+ fi
+
+- if test $pass = conv; then
++ if test "$pass" = conv; then
+ # Only check for convenience libraries
+ deplibs="$lib $deplibs"
+ if test -z "$libdir"; then
+@@ -1560,7 +1560,7 @@ compiler."
+ esac
+ tmp_libs="$tmp_libs $deplib"
+ done
+- elif test $linkmode != prog && test $linkmode != lib; then
++ elif test "$linkmode" != prog && test "$linkmode" != lib; then
+ $echo "$modename: \`$lib' is not a convenience library" 1>&2
+ exit 1
+ fi
+@@ -1578,7 +1578,7 @@ compiler."
+ fi
+
+ # This library was specified with -dlopen.
+- if test $pass = dlopen; then
++ if test "$pass" = dlopen; then
+ if test -z "$libdir"; then
+ $echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2
+ exit 1
+@@ -1627,7 +1627,7 @@ compiler."
+ name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'`
+
+ # This library was specified with -dlpreopen.
+- if test $pass = dlpreopen; then
++ if test "$pass" = dlpreopen; then
+ if test -z "$libdir"; then
+ $echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2
+ exit 1
+@@ -1646,7 +1646,7 @@ compiler."
+
+ if test -z "$libdir"; then
+ # Link the convenience library
+- if test $linkmode = lib; then
++ if test "$linkmode" = lib; then
+ deplibs="$dir/$old_library $deplibs"
+ elif test "$linkmode,$pass" = "prog,link"; then
+ compile_deplibs="$dir/$old_library $compile_deplibs"
+@@ -1657,7 +1657,7 @@ compiler."
+ continue
+ fi
+
+- if test $linkmode = prog && test $pass != link; then
++ if test "$linkmode" = prog && test "$pass" != link; then
+ newlib_search_path="$newlib_search_path $ladir"
+ deplibs="$lib $deplibs"
+
+@@ -1673,7 +1673,7 @@ compiler."
+ -L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test
+ esac
+ # Need to link against all dependency_libs?
+- if test $linkalldeplibs = yes; then
++ if test "$linkalldeplibs" = yes; then
+ deplibs="$deplib $deplibs"
+ else
+ # Need to hardcode shared library paths
+@@ -1694,7 +1694,7 @@ compiler."
+ # Link against this shared library
+
+ if test "$linkmode,$pass" = "prog,link" ||
+- { test $linkmode = lib && test $hardcode_into_libs = yes; }; then
++ { test "$linkmode" = lib && test "$hardcode_into_libs" = yes; }; then
+ # Hardcode the library path.
+ # Skip directories that are in the system default run-time
+ # search path.
+@@ -1716,7 +1716,7 @@ compiler."
+ esac
+ ;;
+ esac
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ # We need to hardcode the library path
+ if test -n "$shlibpath_var"; then
+ # Make sure the rpath contains only unique directories.
+@@ -1798,9 +1798,9 @@ compiler."
+ # make sure the library variables are pointing to the new library
+ dir=$output_objdir
+ linklib=$newlib
+- fi # test -n $old_archive_from_expsyms_cmds
++ fi # test -n "$old_archive_from_expsyms_cmds"
+
+- if test $linkmode = prog || test "$mode" != relink; then
++ if test "$linkmode" = prog || test "$mode" != relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+@@ -1849,7 +1849,7 @@ compiler."
+ *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
+ esac
+ fi
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
+ test -n "$add" && compile_deplibs="$add $compile_deplibs"
+ else
+@@ -1866,7 +1866,7 @@ compiler."
+ fi
+ fi
+
+- if test $linkmode = prog || test "$mode" = relink; then
++ if test "$linkmode" = prog || test "$mode" = relink; then
+ add_shlibpath=
+ add_dir=
+ add=
+@@ -1888,7 +1888,7 @@ compiler."
+ add="-l$name"
+ fi
+
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
+ test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
+ else
+@@ -1896,7 +1896,7 @@ compiler."
+ test -n "$add" && deplibs="$add $deplibs"
+ fi
+ fi
+- elif test $linkmode = prog; then
++ elif test "$linkmode" = prog; then
+ if test "$alldeplibs" = yes &&
+ { test "$deplibs_check_method" = pass_all ||
+ { test "$build_libtool_libs" = yes &&
+@@ -1955,10 +1955,10 @@ compiler."
+ fi
+ fi # link shared/static library?
+
+- if test $linkmode = lib; then
++ if test "$linkmode" = lib; then
+ if test -n "$dependency_libs" &&
+- { test $hardcode_into_libs != yes || test $build_old_libs = yes ||
+- test $link_static = yes; }; then
++ { test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes ||
++ test "$link_static" = yes; }; then
+ # Extract -R from dependency_libs
+ temp_deplibs=
+ for libdir in $dependency_libs; do
+@@ -1987,7 +1987,7 @@ compiler."
+ tmp_libs="$tmp_libs $deplib"
+ done
+
+- if test $link_all_deplibs != no; then
++ if test "$link_all_deplibs" != no; then
+ # Add the search paths of all dependency libraries
+ for deplib in $dependency_libs; do
+ case $deplib in
+@@ -2030,15 +2030,15 @@ compiler."
+ fi # link_all_deplibs != no
+ fi # linkmode = lib
+ done # for deplib in $libs
+- if test $pass = dlpreopen; then
++ if test "$pass" = dlpreopen; then
+ # Link the dlpreopened libraries before other libraries
+ for deplib in $save_deplibs; do
+ deplibs="$deplib $deplibs"
+ done
+ fi
+- if test $pass != dlopen; then
+- test $pass != scan && dependency_libs="$newdependency_libs"
+- if test $pass != conv; then
++ if test "$pass" != dlopen; then
++ test "$pass" != scan && dependency_libs="$newdependency_libs"
++ if test "$pass" != conv; then
+ # Make sure lib_search_path contains only unique directories.
+ lib_search_path=
+ for dir in $newlib_search_path; do
+@@ -2096,7 +2096,7 @@ compiler."
+ deplibs=
+ fi
+ done # for pass
+- if test $linkmode = prog; then
++ if test "$linkmode" = prog; then
+ dlfiles="$newdlfiles"
+ dlprefiles="$newdlprefiles"
+ fi
+@@ -2173,7 +2173,7 @@ compiler."
+ fi
+
+ set dummy $rpath
+- if test $# -gt 2; then
++ if test "$#" -gt 2; then
+ $echo "$modename: warning: ignoring multiple \`-rpath's for a libtool library" 1>&2
+ fi
+ install_libdir="$2"
+@@ -2240,7 +2240,7 @@ compiler."
+ ;;
+ esac
+
+- if test $age -gt $current; then
++ if test "$age" -gt "$current"; then
+ $echo "$modename: AGE \`$age' is greater than the current interface number \`$current'" 1>&2
+ $echo "$modename: \`$vinfo' is not valid version information" 1>&2
+ exit 1
+@@ -2279,7 +2279,7 @@ compiler."
+
+ # Add in all the interfaces that we are compatible with.
+ loop=$revision
+- while test $loop != 0; do
++ while test "$loop" -ne 0; do
+ iface=`expr $revision - $loop`
+ loop=`expr $loop - 1`
+ verstring="sgi$major.$iface:$verstring"
+@@ -2302,7 +2302,7 @@ compiler."
+
+ # Add in all the interfaces that we are compatible with.
+ loop=$age
+- while test $loop != 0; do
++ while test "$loop" -ne 0; do
+ iface=`expr $current - $loop`
+ loop=`expr $loop - 1`
+ verstring="$verstring:${iface}.0"
+@@ -2403,7 +2403,7 @@ compiler."
+ *) finalize_rpath="$finalize_rpath $libdir" ;;
+ esac
+ done
+- if test $hardcode_into_libs != yes || test $build_old_libs = yes; then
++ if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
+ dependency_libs="$temp_xrpath $dependency_libs"
+ fi
+ fi
+@@ -2446,7 +2446,7 @@ compiler."
+ ;;
+ *)
+ # Add libc to deplibs on all other systems if necessary.
+- if test $build_libtool_need_lc = "yes"; then
++ if test "$build_libtool_need_lc" = "yes"; then
+ deplibs="$deplibs -lc"
+ fi
+ ;;
+@@ -2487,7 +2487,7 @@ compiler."
+ EOF
+ $rm conftest
+ $CC -o conftest conftest.c $deplibs
+- if test $? -eq 0 ; then
++ if test "$?" -eq 0 ; then
+ ldd_output=`ldd conftest`
+ for i in $deplibs; do
+ name="`expr $i : '-l\(.*\)'`"
+@@ -2521,7 +2521,7 @@ EOF
+ $rm conftest
+ $CC -o conftest conftest.c $i
+ # Did it work?
+- if test $? -eq 0 ; then
++ if test "$?" -eq 0 ; then
+ ldd_output=`ldd conftest`
+ libname=`eval \\$echo \"$libname_spec\"`
+ deplib_matches=`eval \\$echo \"$library_names_spec\"`
+@@ -2692,7 +2692,7 @@ EOF
+ echo "*** automatically added whenever a program is linked with this library"
+ echo "*** or is declared to -dlopen it."
+
+- if test $allow_undefined = no; then
++ if test "$allow_undefined" = no; then
+ echo
+ echo "*** Since this library must not contain undefined symbols,"
+ echo "*** because either the platform does not support them or"
+@@ -2719,7 +2719,7 @@ EOF
+
+ # Test again, we may have decided not to build it any more
+ if test "$build_libtool_libs" = yes; then
+- if test $hardcode_into_libs = yes; then
++ if test "$hardcode_into_libs" = yes; then
+ # Hardcode the library paths
+ hardcode_libdirs=
+ dep_rpath=
+@@ -2850,7 +2850,7 @@ EOF
+ $show "mkdir $gentop"
+ $run mkdir "$gentop"
+ status=$?
+- if test $status -ne 0 && test ! -d "$gentop"; then
++ if test "$status" -ne 0 && test ! -d "$gentop"; then
+ exit $status
+ fi
+ generated="$generated $gentop"
+@@ -2869,7 +2869,7 @@ EOF
+ $show "mkdir $xdir"
+ $run mkdir "$xdir"
+ status=$?
+- if test $status -ne 0 && test ! -d "$xdir"; then
++ if test "$status" -ne 0 && test ! -d "$xdir"; then
+ exit $status
+ fi
+ $show "(cd $xdir && $AR x $xabs)"
+@@ -2989,7 +2989,7 @@ EOF
+ $show "mkdir $gentop"
+ $run mkdir "$gentop"
+ status=$?
+- if test $status -ne 0 && test ! -d "$gentop"; then
++ if test "$status" -ne 0 && test ! -d "$gentop"; then
+ exit $status
+ fi
+ generated="$generated $gentop"
+@@ -3008,7 +3008,7 @@ EOF
+ $show "mkdir $xdir"
+ $run mkdir "$xdir"
+ status=$?
+- if test $status -ne 0 && test ! -d "$xdir"; then
++ if test "$status" -ne 0 && test ! -d "$xdir"; then
+ exit $status
+ fi
+ $show "(cd $xdir && $AR x $xabs)"
+@@ -3403,7 +3403,7 @@ static const void *lt_preloaded_setup()
+ finalize_command=`$echo "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"`
+ fi
+
+- if test $need_relink = no || test "$build_libtool_libs" != yes; then
++ if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
+ # Replace the output file specification.
+ compile_command=`$echo "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
+ link_command="$compile_command$compile_rpath"
+@@ -3528,7 +3528,7 @@ static const void *lt_preloaded_setup()
+ relink_command="$var=\"$var_value\"; export $var; $relink_command"
+ fi
+ done
+- relink_command="cd `pwd`; $relink_command"
++ relink_command="(cd `pwd`; $relink_command)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+ fi
+
+@@ -3771,7 +3771,7 @@ fi\
+ $show "mkdir $gentop"
+ $run mkdir "$gentop"
+ status=$?
+- if test $status -ne 0 && test ! -d "$gentop"; then
++ if test "$status" -ne 0 && test ! -d "$gentop"; then
+ exit $status
+ fi
+ generated="$generated $gentop"
+@@ -3791,7 +3791,7 @@ fi\
+ $show "mkdir $xdir"
+ $run mkdir "$xdir"
+ status=$?
+- if test $status -ne 0 && test ! -d "$xdir"; then
++ if test "$status" -ne 0 && test ! -d "$xdir"; then
+ exit $status
+ fi
+ $show "(cd $xdir && $AR x $xabs)"
+@@ -3858,7 +3858,7 @@ fi\
+ fi
+ done
+ # Quote the link command for shipping.
+- relink_command="cd `pwd`; $SHELL $0 --mode=relink $libtool_args"
++ relink_command="(cd `pwd`; $SHELL $0 --mode=relink $libtool_args)"
+ relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
+
+ # Only create the output if not a dry run.
+@@ -3948,7 +3948,7 @@ dlpreopen='$dlprefiles'
+
+ # Directory that this library needs to be installed in:
+ libdir='$install_libdir'"
+- if test "$installed" = no && test $need_relink = yes; then
++ if test "$installed" = no && test "$need_relink" = yes; then
+ $echo >> $output "\
+ relink_command=\"$relink_command\""
+ fi
+@@ -4084,7 +4084,7 @@ relink_command=\"$relink_command\""
+
+ # Not a directory, so check to see that there is only one file specified.
+ set dummy $files
+- if test $# -gt 2; then
++ if test "$#" -gt 2; then
+ $echo "$modename: \`$dest' is not a directory" 1>&2
+ $echo "$help" 1>&2
+ exit 1
+@@ -4186,7 +4186,7 @@ relink_command=\"$relink_command\""
+ $run eval "$striplib $destdir/$realname" || exit $?
+ fi
+
+- if test $# -gt 0; then
++ if test "$#" -gt 0; then
+ # Delete the old symlinks, and create new ones.
+ for linkname
+ do
+@@ -4444,7 +4444,7 @@ relink_command=\"$relink_command\""
+ fi
+
+ # Exit here if they wanted silent mode.
+- test "$show" = ":" && exit 0
++ test "$show" = : && exit 0
+
+ echo "----------------------------------------------------------------------"
+ echo "Libraries have been installed in:"
+@@ -4610,7 +4610,7 @@ relink_command=\"$relink_command\""
+ fi
+
+ # Now prepare to actually exec the command.
+- exec_cmd='"$cmd"$args'
++ exec_cmd="\$cmd$args"
+ else
+ # Display what would be done.
+ if test -n "$shlibpath_var"; then
+@@ -4660,10 +4660,10 @@ relink_command=\"$relink_command\""
+ objdir="$dir/$objdir"
+ fi
+ name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
+- test $mode = uninstall && objdir="$dir"
++ test "$mode" = uninstall && objdir="$dir"
+
+ # Remember objdir for removal later, being careful to avoid duplicates
+- if test $mode = clean; then
++ if test "$mode" = clean; then
+ case " $rmdirs " in
+ *" $objdir "*) ;;
+ *) rmdirs="$rmdirs $objdir" ;;
+@@ -4695,9 +4695,9 @@ relink_command=\"$relink_command\""
+ rmfiles="$rmfiles $objdir/$n"
+ done
+ test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
+- test $mode = clean && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
++ test "$mode" = clean && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
+
+- if test $mode = uninstall; then
++ if test "$mode" = uninstall; then
+ if test -n "$library_names"; then
+ # Do each command in the postuninstall commands.
+ eval cmds=\"$postuninstall_cmds\"
+@@ -4706,7 +4706,7 @@ relink_command=\"$relink_command\""
+ IFS="$save_ifs"
+ $show "$cmd"
+ $run eval "$cmd"
+- if test $? != 0 && test "$rmforce" != yes; then
++ if test "$?" -ne 0 && test "$rmforce" != yes; then
+ exit_status=1
+ fi
+ done
+@@ -4721,7 +4721,7 @@ relink_command=\"$relink_command\""
+ IFS="$save_ifs"
+ $show "$cmd"
+ $run eval "$cmd"
+- if test $? != 0 && test "$rmforce" != yes; then
++ if test "$?" -ne 0 && test "$rmforce" != yes; then
+ exit_status=1
+ fi
+ done
+@@ -4741,7 +4741,7 @@ relink_command=\"$relink_command\""
+
+ *)
+ # Do a test to see if this is a libtool program.
+- if test $mode = clean &&
++ if test "$mode" = clean &&
+ (sed -e '4q' $file | egrep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
+ relink_command=
+ . $dir/$file
diff --git a/eclass/ELT-patches/tmp/1.3.5 b/eclass/ELT-patches/tmp/1.3.5
new file mode 100644
index 000000000000..29b70e09378d
--- /dev/null
+++ b/eclass/ELT-patches/tmp/1.3.5
@@ -0,0 +1,15 @@
+--- ltmain.sh Fri Jul 7 18:49:44 2000
++++ ltmain.sh Fri May 26 21:53:15 2000
+@@ -3462,7 +3462,11 @@
+ if test "$finalize" = yes && test -z "$run"; then
+ tmpdir="/tmp"
+ test -n "$TMPDIR" && tmpdir="$TMPDIR"
+- tmpdir="$tmpdir/libtool-$$"
++ tmpdir=`mktemp -d $tmpdir/libtool-XXXXXX 2> /dev/null`
++ if test $? = 0 ; then :
++ else
++ tmpdir="$tmpdir/libtool-$$"
++ fi
+ if $mkdir -p "$tmpdir" && chmod 700 "$tmpdir"; then :
+ else
+ $echo "$modename: error: cannot create temporary directory \`$tmpdir'" 1>&2
diff --git a/eclass/ELT-patches/uclibc-conf/1.2.0 b/eclass/ELT-patches/uclibc-conf/1.2.0
new file mode 100644
index 000000000000..acd804c8f404
--- /dev/null
+++ b/eclass/ELT-patches/uclibc-conf/1.2.0
@@ -0,0 +1,48 @@
+--- configure.libltdl~ Fri Jun 11 08:54:04 2004
++++ configure Fri Jun 11 08:56:33 2004
+@@ -1978,6 +1978,11 @@
+ lt_cv_file_magic_test_file=`echo /lib/libc.so* /lib/libc-*.so`
+ ;;
+
++linux-uclibc*)
++ lt_cv_deplibs_check_method=pass_all
++ lt_cv_file_magic_test_file=`echo /lib/libuClibc*.so`
++ ;;
++
+ netbsd*)
+ if echo __ELF__ | $CC -E - | grep __ELF__ > /dev/null; then
+ lt_cv_deplibs_check_method='match_pattern /lib[^/\.]+\.so\.[0-9]+\.[0-9]+$'
+@@ -3030,7 +3035,7 @@
+
+ # Transform linux* to *-*-linux-gnu*, to support old configure scripts.
+ case $host_os in
+-linux-gnu*) ;;
++linux-gnu*|linux-uclibc*) ;;
+ linux*) host=`echo $host | sed 's/^\(.*-.*-linux\)\(.*\)$/\1-gnu\2/'`
+ esac
+
+@@ -4504,6 +4509,24 @@
+ dynamic_linker='GNU/Linux ld.so'
+ ;;
+
++linux-uclibc*)
++ version_type=linux
++ need_lib_prefix=no
++ need_version=no
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major $libname.so'
++ soname_spec='${libname}${release}.so$major'
++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
++ shlibpath_var=LD_LIBRARY_PATH
++ shlibpath_overrides_runpath=no
++ # This implies no fast_install, which is unacceptable.
++ # Some rework will be needed to allow for fast_install
++ # before this can be enabled.
++ # Note: copied from linux-gnu, and may not be appropriate.
++ hardcode_into_libs=yes
++ # Assume using the uClibc dynamic linker.
++ dynamic_linker="uClibc ld.so"
++ ;;
++
+ netbsd*)
+ version_type=sunos
+ need_lib_prefix=no
diff --git a/eclass/ELT-patches/uclibc-conf/1.3.0c b/eclass/ELT-patches/uclibc-conf/1.3.0c
new file mode 100644
index 000000000000..03efc5dc9c86
--- /dev/null
+++ b/eclass/ELT-patches/uclibc-conf/1.3.0c
@@ -0,0 +1,48 @@
+--- configure.libltdl~ Fri Jun 11 08:54:04 2004
++++ configure Fri Jun 11 08:56:33 2004
+@@ -1978,6 +1978,11 @@
+ lt_cv_file_magic_test_file=`echo /lib/libc.so* /lib/libc-*.so`
+ ;;
+
++linux-uclibc*)
++ lt_cv_deplibs_check_method=pass_all
++ lt_cv_file_magic_test_file=`echo /lib/libuClibc*.so`
++ ;;
++
+ netbsd*)
+ if echo __ELF__ | $CC -E - | grep __ELF__ > /dev/null; then
+ lt_cv_deplibs_check_method='match_pattern /lib[^/\.]+\.so\.[0-9]+\.[0-9]+$'
+@@ -3030,7 +3035,7 @@
+
+ # Transform linux* to *-*-linux-gnu*, to support old configure scripts.
+ case "$host_os" in
+-linux-gnu*) ;;
++linux-gnu*|linux-uclibc*) ;;
+ linux*) host=`echo $host | sed 's/^\(.*-.*-linux\)\(.*\)$/\1-gnu\2/'`
+ esac
+
+@@ -4504,6 +4509,24 @@
+ dynamic_linker='GNU/Linux ld.so'
+ ;;
+
++linux-uclibc*)
++ version_type=linux
++ need_lib_prefix=no
++ need_version=no
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major $libname.so'
++ soname_spec='${libname}${release}.so$major'
++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
++ shlibpath_var=LD_LIBRARY_PATH
++ shlibpath_overrides_runpath=no
++ # This implies no fast_install, which is unacceptable.
++ # Some rework will be needed to allow for fast_install
++ # before this can be enabled.
++ # Note: copied from linux-gnu, and may not be appropriate.
++ hardcode_into_libs=yes
++ # Assume using the uClibc dynamic linker.
++ dynamic_linker="uClibc ld.so"
++ ;;
++
+ netbsd*)
+ version_type=sunos
+ need_lib_prefix=no
diff --git a/eclass/ELT-patches/uclibc-ltconf/1.2.0 b/eclass/ELT-patches/uclibc-ltconf/1.2.0
new file mode 100644
index 000000000000..bbc6bc96571b
--- /dev/null
+++ b/eclass/ELT-patches/uclibc-ltconf/1.2.0
@@ -0,0 +1,36 @@
+--- ltconfig.uclibc 2004-01-14 22:07:42.000000000 +0100
++++ ltconfig 2004-03-10 15:43:37.000000000 +0100
+@@ -603,6 +603,7 @@
+ # Transform linux* to *-*-linux-gnu*, to support old configure scripts.
+ case $host_os in
+ linux-gnu*) ;;
++linux-uclibc*) ;;
+ linux*) host=`echo $host | sed 's/^\(.*-.*-linux\)\(.*\)$/\1-gnu\2/'`
+ esac
+
+@@ -1259,6 +1260,25 @@
+ fi
+ ;;
+
++linux-uclibc*)
++ version_type=linux
++ need_lib_prefix=no
++ need_version=no
++ library_names_spec='${libname}${release}.so.$versuffix ${libname}${release}.so.$major $libname.so'
++ soname_spec='${libname}${release}.so.$major'
++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
++ shlibpath_var=LD_LIBRARY_PATH
++ shlibpath_overrides_runpath=no
++ deplibs_check_method=pass_all
++ # This implies no fast_install, which is unacceptable.
++ # Some rework will be needed to allow for fast_install
++ # before this can be enabled.
++ # Note: copied from linux-gnu, and may not be appropriate.
++ hardcode_into_libs=yes
++ # Assume using the uClibc dynamic linker.
++ dynamic_linker="uClibc ld.so"
++ ;;
++
+ netbsd* | openbsd*)
+ version_type=sunos
+ library_names_spec='${libname}${release}.so.$versuffix'
diff --git a/eclass/ELT-patches/uclibc-ltconf/1.3.0 b/eclass/ELT-patches/uclibc-ltconf/1.3.0
new file mode 100644
index 000000000000..0a5a9ead8f0c
--- /dev/null
+++ b/eclass/ELT-patches/uclibc-ltconf/1.3.0
@@ -0,0 +1,39 @@
+--- ltconfig.uclibc 2004-01-14 22:07:42.000000000 +0100
++++ ltconfig 2004-03-10 15:43:37.000000000 +0100
+@@ -603,6 +603,7 @@
+ # Transform linux* to *-*-linux-gnu*, to support old configure scripts.
+ case $host_os in
+ linux-gnu*) ;;
++linux-uclibc*) ;;
+ linux*) host=`echo $host | sed 's/^\(.*-.*-linux\)\(.*\)$/\1-gnu\2/'`
+ esac
+
+@@ -1259,6 +1260,28 @@
+ dynamic_linker='GNU/Linux ld.so'
+ ;;
+
++linux-uclibc*)
++ version_type=linux
++ need_lib_prefix=no
++ need_version=no
++ library_names_spec='${libname}${release}.so$versuffix ${libname}${release}.so$major $libname.so'
++ soname_spec='${libname}${release}.so$major'
++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
++ shlibpath_var=LD_LIBRARY_PATH
++ shlibpath_overrides_runpath=no
++ # deplibs_check_method=pass_all
++ deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
++ file_magic_cmd=/usr/bin/file
++ file_magic_test_file=`echo /lib/libuClibc-*.so`
++ # This implies no fast_install, which is unacceptable.
++ # Some rework will be needed to allow for fast_install
++ # before this can be enabled.
++ # Note: copied from linux-gnu, and may not be appropriate.
++ hardcode_into_libs=yes
++ # Assume using the uClibc dynamic linker.
++ dynamic_linker="uClibc ld.so"
++ ;;
++
+ netbsd*)
+ need_lib_prefix=no
+ need_version=no
diff --git a/eclass/alternatives.eclass b/eclass/alternatives.eclass
new file mode 100644
index 000000000000..62633dc63e22
--- /dev/null
+++ b/eclass/alternatives.eclass
@@ -0,0 +1,143 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: alternatives.eclass
+# @AUTHOR:
+# Original author: Alastair Tse <liquidx@gentoo.org> (03 Oct 2003)
+# @BLURB: Creates symlink to the latest version of multiple slotted packages.
+# @DESCRIPTION:
+# When a package is SLOT'ed, very often we need to have a symlink to the
+# latest version. However, depending on the order the user has merged them,
+# more often than not, the symlink maybe clobbered by the older versions.
+#
+# This eclass provides a convenience function that needs to be given a
+# list of alternatives (descending order of recent-ness) and the symlink.
+# It will choose the latest version it can find installed and create
+# the desired symlink.
+#
+# There are two ways to use this eclass. First is by declaring two variables
+# $SOURCE and $ALTERNATIVES where $SOURCE is the symlink to be created and
+# $ALTERNATIVES is a list of alternatives. Second way is the use the function
+# alternatives_makesym() like the example below.
+# @EXAMPLE:
+# pkg_postinst() {
+# alternatives_makesym "/usr/bin/python" "/usr/bin/python2.3" "/usr/bin/python2.2"
+# }
+#
+# The above example will create a symlink at /usr/bin/python to either
+# /usr/bin/python2.3 or /usr/bin/python2.2. It will choose python2.3 over
+# python2.2 if both exist.
+#
+# Alternatively, you can use this function:
+#
+# pkg_postinst() {
+# alternatives_auto_makesym "/usr/bin/python" "/usr/bin/python[0-9].[0-9]"
+# }
+#
+# This will use bash pathname expansion to fill a list of alternatives it can
+# link to. It is probably more robust against version upgrades. You should
+# consider using this unless you are want to do something special.
+
+# @ECLASS-VARIABLE: SOURCE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The symlink to be created
+
+# @ECLASS-VARIABLE: ALTERNATIVES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The list of alternatives
+
+# @FUNCTION: alternatives_auto_makesym
+# @DESCRIPTION:
+# automatic deduction based on a symlink and a regex mask
+alternatives_auto_makesym() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local SYMLINK REGEX ALT myregex
+ SYMLINK=$1
+ REGEX=$2
+ if [ "${REGEX:0:1}" != "/" ]
+ then
+ #not an absolute path:
+ #inherit the root directory of our main link path for our regex search
+ myregex="${SYMLINK%/*}/${REGEX}"
+ else
+ myregex=${REGEX}
+ fi
+
+ # sort a space delimited string by converting it to a multiline list
+ # and then run sort -r over it.
+ # make sure we use ${EROOT} because otherwise stage-building will break
+ ALT="$(for i in $(echo ${EROOT}${myregex}); do echo ${i#${EROOT}}; done | sort -r)"
+ alternatives_makesym ${SYMLINK} ${ALT}
+}
+
+alternatives_makesym() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ local ALTERNATIVES=""
+ local SYMLINK=""
+ local alt pref
+
+ # usage: alternatives_makesym <resulting symlink> [alternative targets..]
+ # make sure it is in the prefix, allow it already to be in the prefix
+ SYMLINK=${EPREFIX}/${1#${EPREFIX}}
+ # this trick removes the trailing / from ${ROOT}
+ pref=${ROOT%/}
+ shift
+ ALTERNATIVES=$@
+
+ # step through given alternatives from first to last
+ # and if one exists, link it and finish.
+
+ for alt in ${ALTERNATIVES}; do
+ alt=${EPREFIX}/${alt#${EPREFIX}}
+ if [ -f "${pref}${alt}" ]; then
+ #are files in same directory?
+ if [ "${alt%/*}" = "${SYMLINK%/*}" ]
+ then
+ #yes; strip leading dirname from alt to create relative symlink
+ einfo "Linking ${alt} to ${pref}${SYMLINK} (relative)"
+ ln -sf ${alt##*/} ${pref}${SYMLINK}
+ else
+ #no; keep absolute path
+ einfo "Linking ${alt} to ${pref}${SYMLINK} (absolute)"
+ ln -sf ${pref}${alt} ${pref}${SYMLINK}
+ fi
+ break
+ fi
+ done
+
+ # report any errors
+ if [ ! -L ${pref}${SYMLINK} ]; then
+ ewarn "Unable to establish ${pref}${SYMLINK} symlink"
+ else
+ # we need to check for either the target being in relative path form
+ # or absolute path form
+ if [ ! -f "`dirname ${pref}${SYMLINK}`/`readlink ${pref}${SYMLINK}`" -a \
+ ! -f "`readlink ${pref}${SYMLINK}`" ]; then
+ ewarn "Removing dead symlink ${pref}${SYMLINK}"
+ rm -f ${pref}${SYMLINK}
+ fi
+ fi
+}
+
+# @FUNCTION: alernatives-pkg_postinst
+# @DESCRIPTION:
+# The alternatives pkg_postinst, this function will be exported
+alternatives_pkg_postinst() {
+ if [ -n "${ALTERNATIVES}" -a -n "${SOURCE}" ]; then
+ alternatives_makesym ${SOURCE} ${ALTERNATIVES}
+ fi
+}
+
+# @FUNCTION: alternatives_pkg_postrm
+# @DESCRIPTION:
+# The alternatives pkg_postrm, this function will be exported
+alternatives_pkg_postrm() {
+ if [ -n "${ALTERNATIVES}" -a -n "${SOURCE}" ]; then
+ alternatives_makesym ${SOURCE} ${ALTERNATIVES}
+ fi
+}
+
+EXPORT_FUNCTIONS pkg_postinst pkg_postrm
diff --git a/eclass/ant-tasks.eclass b/eclass/ant-tasks.eclass
new file mode 100644
index 000000000000..fe9405d1da10
--- /dev/null
+++ b/eclass/ant-tasks.eclass
@@ -0,0 +1,180 @@
+# Eclass for building dev-java/ant-* packages
+#
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License, v2 or later
+# Author Vlastimil Babka <caster@gentoo.org>
+# $Id$
+
+# @ECLASS: ant-tasks.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# Vlastimil Babka <caster@gentoo.org>
+# @BLURB: Eclass for building dev-java/ant-* packages
+# @DESCRIPTION:
+# This eclass provides functionality and default ebuild variables for building
+# dev-java/ant-* packages easily.
+
+
+# we set ant-core dep ourselves, restricted
+JAVA_ANT_DISABLE_ANT_CORE_DEP=true
+# rewriting build.xml for are the testcases has no reason atm
+JAVA_PKG_BSFIX_ALL=no
+inherit versionator java-pkg-2 java-ant-2
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
+
+# @ECLASS-VARIABLE: ANT_TASK_JDKVER
+# @DESCRIPTION:
+# Affects the >=virtual/jdk version set in DEPEND string. Defaults to 1.5, can
+# be overriden from ebuild BEFORE inheriting this eclass.
+ANT_TASK_JDKVER=${ANT_TASK_JDKVER-1.5}
+
+# @ECLASS-VARIABLE: ANT_TASK_JREVER
+# @DESCRIPTION:
+# Affects the >=virtual/jre version set in DEPEND string. Defaults to 1.5, can
+# be overriden from ebuild BEFORE inheriting this eclass.
+ANT_TASK_JREVER=${ANT_TASK_JREVER-1.5}
+
+# @ECLASS-VARIABLE: ANT_TASK_NAME
+# @DESCRIPTION:
+# The name of this ant task as recognized by ant's build.xml, derived from $PN
+# by removing the ant- prefix. Read-only.
+ANT_TASK_NAME="${PN#ant-}"
+
+# @ECLASS-VARIABLE: ANT_TASK_DEPNAME
+# @DESCRIPTION:
+# Specifies JAVA_PKG_NAME (PN{-SLOT} used with java-pkg_jar-from) of the package
+# that this one depends on. Defaults to the name of ant task, ebuild can
+# override it before inheriting this eclass.
+ANT_TASK_DEPNAME=${ANT_TASK_DEPNAME-${ANT_TASK_NAME}}
+
+# @ECLASS-VARIABLE: ANT_TASK_DISABLE_VM_DEPS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set, no JDK/JRE deps are added.
+
+# @VARIABLE: ANT_TASK_PV
+# @INTERNAL
+# Version of ant-core this task is intended to register and thus load with.
+ANT_TASK_PV="${PV}"
+
+# special care for beta/RC releases
+if [[ ${PV} == *beta2* ]]; then
+ MY_PV=${PV/_beta2/beta}
+ UPSTREAM_PREFIX="http://people.apache.org/dist/ant/v1.7.1beta2/src"
+ GENTOO_PREFIX="http://dev.gentoo.org/~caster/distfiles"
+ ANT_TASK_PV=$(get_version_component_range 1-3)
+elif [[ ${PV} == *_rc* ]]; then
+ MY_PV=${PV/_rc/RC}
+ UPSTREAM_PREFIX="http://dev.gentoo.org/~caster/distfiles"
+ GENTOO_PREFIX="http://dev.gentoo.org/~caster/distfiles"
+ ANT_TASK_PV=$(get_version_component_range 1-3)
+else
+ # default for final releases
+ MY_PV=${PV}
+ UPSTREAM_PREFIX="mirror://apache/ant/source"
+ case ${PV} in
+ 1.9.*)
+ GENTOO_PREFIX="http://dev.gentoo.org/~tomwij/files/dist"
+ ;;
+ 1.8.4)
+ GENTOO_PREFIX="http://dev.gentoo.org/~sera/distfiles"
+ ;;
+ *)
+ GENTOO_PREFIX="http://dev.gentoo.org/~caster/distfiles"
+ ;;
+ esac
+fi
+
+# source/workdir name
+MY_P="apache-ant-${MY_PV}"
+
+# Default values for standard ebuild variables, can be overriden from ebuild.
+DESCRIPTION="Apache Ant's optional tasks depending on ${ANT_TASK_DEPNAME}"
+HOMEPAGE="http://ant.apache.org/"
+SRC_URI="${UPSTREAM_PREFIX}/${MY_P}-src.tar.bz2
+ ${GENTOO_PREFIX}/ant-${PV}-gentoo.tar.bz2"
+LICENSE="Apache-2.0"
+SLOT="0"
+
+RDEPEND="~dev-java/ant-core-${PV}:0"
+DEPEND="${RDEPEND}"
+
+if [[ -z "${ANT_TASK_DISABLE_VM_DEPS}" ]]; then
+ RDEPEND+=" >=virtual/jre-${ANT_TASK_JREVER}"
+ DEPEND+=" >=virtual/jdk-${ANT_TASK_JDKVER}"
+fi
+
+# we need direct blockers with old ant-tasks for file collisions - bug #252324
+if version_is_at_least 1.7.1 ; then
+ DEPEND+=" !dev-java/ant-tasks"
+fi
+
+# Would run the full ant test suite for every ant task
+RESTRICT="test"
+
+S="${WORKDIR}/${MY_P}"
+
+# @FUNCTION: ant-tasks_src_unpack
+# @USAGE: [ base ] [ jar-dep ] [ all ]
+# @DESCRIPTION:
+# The function Is split into two parts, defaults to both of them ('all').
+#
+# base: performs the unpack, build.xml replacement and symlinks ant.jar from
+# ant-core
+#
+# jar-dep: symlinks the jar file(s) from dependency package
+ant-tasks_src_unpack() {
+ [[ -z "${1}" ]] && ant-tasks_src_unpack all
+
+ while [[ -n "${1}" ]]; do
+ case ${1} in
+ base)
+ unpack ${A}
+ cd "${S}"
+
+ # replace build.xml with our modified for split building
+ mv -f "${WORKDIR}"/build.xml .
+
+ cd lib
+ # remove bundled xerces
+ rm -f *.jar
+
+ # ant.jar to build against
+ java-pkg_jar-from --build-only ant-core ant.jar;;
+ jar-dep)
+ # get jar from the dependency package
+ if [[ -n "${ANT_TASK_DEPNAME}" ]]; then
+ java-pkg_jar-from ${ANT_TASK_DEPNAME}
+ fi;;
+ all)
+ ant-tasks_src_unpack base jar-dep;;
+ esac
+ shift
+ done
+
+}
+
+# @FUNCTION: ant-tasks_src_compile
+# @DESCRIPTION:
+# Compiles the jar with installed ant-core.
+ant-tasks_src_compile() {
+ ANT_TASKS="none" eant -Dbuild.dep=${ANT_TASK_NAME} jar-dep
+}
+
+# @FUNCTION: ant-tasks_src_install
+# @DESCRIPTION:
+# Installs the jar and registers its presence for the ant launcher script.
+# Version param ensures it won't get loaded (thus break) when ant-core is
+# updated to newer version.
+ant-tasks_src_install() {
+ java-pkg_dojar build/lib/${PN}.jar
+ java-pkg_register-ant-task --version "${ANT_TASK_PV}"
+
+ # create the compatibility symlink
+ if version_is_at_least 1.7.1_beta2; then
+ dodir /usr/share/ant/lib
+ dosym /usr/share/${PN}/lib/${PN}.jar /usr/share/ant/lib/${PN}.jar
+ fi
+}
diff --git a/eclass/apache-2.eclass b/eclass/apache-2.eclass
new file mode 100644
index 000000000000..549b43abcd60
--- /dev/null
+++ b/eclass/apache-2.eclass
@@ -0,0 +1,631 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: apache-2.eclass
+# @MAINTAINER:
+# polynomial-c@gentoo.org
+# @BLURB: Provides a common set of functions for apache-2.x ebuilds
+# @DESCRIPTION:
+# This eclass handles apache-2.x ebuild functions such as LoadModule generation
+# and inter-module dependency checking.
+
+inherit autotools eutils flag-o-matic multilib ssl-cert user toolchain-funcs versionator
+
+[[ ${CATEGORY}/${PN} != www-servers/apache ]] \
+ && die "Do not use this eclass with anything else than www-servers/apache ebuilds!"
+
+case ${EAPI:-0} in
+ 0|1|2|3|4)
+ die "This eclass requires >=EAPI-5"
+ ;;
+esac
+
+# settings which are version specific go in here:
+case $(get_version_component_range 1-2) in
+ 2.4)
+ DEFAULT_MPM_THREADED="event" #509922
+ RDEPEND=">=dev-libs/apr-1.5.1
+ !www-apache/mod_macro" #492578 #477702
+ ;;
+ *)
+ DEFAULT_MPM_THREADED="worker"
+ RDEPEND=">=dev-libs/apr-1.4.5" #368651
+ ;;
+esac
+
+# ==============================================================================
+# INTERNAL VARIABLES
+# ==============================================================================
+
+# @ECLASS-VARIABLE: GENTOO_PATCHNAME
+# @DESCRIPTION:
+# This internal variable contains the prefix for the patch tarball.
+# Defaults to the full name and version (including revision) of the package.
+# If you want to override this in an ebuild, use:
+# ORIG_PR="(revision of Gentoo stuff you want)"
+# GENTOO_PATCHNAME="gentoo-${PN}-${PV}${ORIG_PR:+-${ORIG_PR}}"
+[[ -n "$GENTOO_PATCHNAME" ]] || GENTOO_PATCHNAME="gentoo-${PF}"
+
+# @ECLASS-VARIABLE: GENTOO_PATCHDIR
+# @DESCRIPTION:
+# This internal variable contains the working directory where patches and config
+# files are located.
+# Defaults to the patchset name appended to the working directory.
+[[ -n "$GENTOO_PATCHDIR" ]] || GENTOO_PATCHDIR="${WORKDIR}/${GENTOO_PATCHNAME}"
+
+# @VARIABLE: GENTOO_DEVELOPER
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains the name of the
+# gentoo developer who created the patch tarball
+
+# @VARIABLE: GENTOO_PATCHSTAMP
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains the date the patch
+# tarball was created at in YYYYMMDD format
+
+# @VARIABLE: GENTOO_PATCH_A
+# @DESCRIPTION:
+# This variable should contain the entire filename of patch tarball.
+# Defaults to the name of the patchset, with a datestamp.
+[[ -n "$GENTOO_PATCH_A" ]] || GENTOO_PATCH_A="${GENTOO_PATCHNAME}-${GENTOO_PATCHSTAMP}.tar.bz2"
+
+SRC_URI="mirror://apache/httpd/httpd-${PV}.tar.bz2
+ http://dev.gentoo.org/~${GENTOO_DEVELOPER}/dist/apache/${GENTOO_PATCH_A}"
+
+# @VARIABLE: IUSE_MPMS_FORK
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains a list of forking
+# (i.e. non-threaded) MPMs
+
+# @VARIABLE: IUSE_MPMS_THREAD
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains a list of threaded
+# MPMs
+
+# @VARIABLE: IUSE_MODULES
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains a list of available
+# built-in modules
+
+IUSE_MPMS="${IUSE_MPMS_FORK} ${IUSE_MPMS_THREAD}"
+IUSE="${IUSE} debug doc ldap selinux ssl static suexec threads"
+
+for module in ${IUSE_MODULES} ; do
+ IUSE="${IUSE} apache2_modules_${module}"
+done
+
+for mpm in ${IUSE_MPMS} ; do
+ IUSE="${IUSE} apache2_mpms_${mpm}"
+done
+
+DEPEND="dev-lang/perl
+ =dev-libs/apr-1*
+ =dev-libs/apr-util-1*[ldap?]
+ dev-libs/libpcre
+ apache2_modules_deflate? ( sys-libs/zlib )
+ apache2_modules_mime? ( app-misc/mime-types )
+ ldap? ( =net-nds/openldap-2* )
+ ssl? ( >=dev-libs/openssl-0.9.8m:0= )
+ !=www-servers/apache-1*"
+RDEPEND+=" ${DEPEND}
+ selinux? ( sec-policy/selinux-apache )"
+PDEPEND="~app-admin/apache-tools-${PV}"
+
+S="${WORKDIR}/httpd-${PV}"
+
+# ==============================================================================
+# INTERNAL FUNCTIONS
+# ==============================================================================
+
+# @ECLASS-VARIABLE: MY_MPM
+# @DESCRIPTION:
+# This internal variable contains the selected MPM after a call to setup_mpm()
+
+# @FUNCTION: setup_mpm
+# @DESCRIPTION:
+# This internal function makes sure that only one of APACHE2_MPMS was selected
+# or a default based on USE=threads is selected if APACHE2_MPMS is empty
+setup_mpm() {
+ MY_MPM=""
+ for x in ${IUSE_MPMS} ; do
+ if use apache2_mpms_${x} ; then
+ if [[ -z "${MY_MPM}" ]] ; then
+ MY_MPM=${x}
+ elog
+ elog "Selected MPM: ${MY_MPM}"
+ elog
+ else
+ eerror "You have selected more then one mpm USE-flag."
+ eerror "Only one MPM is supported."
+ die "more then one mpm was specified"
+ fi
+ fi
+ done
+
+ if [[ -z "${MY_MPM}" ]] ; then
+ if use threads ; then
+ MY_MPM=${DEFAULT_MPM_THREADED}
+ elog
+ elog "Selected default threaded MPM: ${MY_MPM}"
+ elog
+ else
+ MY_MPM=prefork
+ elog
+ elog "Selected default MPM: ${MY_MPM}"
+ elog
+ fi
+ fi
+
+ if has ${MY_MPM} ${IUSE_MPMS_THREAD} && ! use threads ; then
+ eerror "You have selected a threaded MPM but USE=threads is disabled"
+ die "invalid use flag combination"
+ fi
+
+ if has ${MY_MPM} ${IUSE_MPMS_FORK} && use threads ; then
+ eerror "You have selected a non-threaded MPM but USE=threads is enabled"
+ die "invalid use flag combination"
+ fi
+}
+
+# @VARIABLE: MODULE_CRITICAL
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains a space-separated
+# list of modules critical for the default apache. A user may still
+# disable these modules for custom minimal installation at their own risk.
+
+# @FUNCTION: check_module_critical
+# @DESCRIPTION:
+# This internal function warns the user about modules critical for the default
+# apache configuration.
+check_module_critical() {
+ local unsupported=0
+
+ for m in ${MODULE_CRITICAL} ; do
+ if ! has ${m} ${MY_MODS[@]} ; then
+ ewarn "Module '${m}' is required in the default apache configuration."
+ unsupported=1
+ fi
+ done
+
+ if [[ ${unsupported} -ne 0 ]] ; then
+ ewarn
+ ewarn "You have disabled one or more required modules"
+ ewarn "for the default apache configuration."
+ ewarn "Although this is not an error, please be"
+ ewarn "aware that this setup is UNSUPPORTED."
+ ewarn
+ fi
+}
+
+# @VARIABLE: MODULE_DEPENDS
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains a space-separated
+# list of dependency tokens each with a module and the module it depends on
+# separated by a colon
+
+# @FUNCTION: check_module_depends
+# @DESCRIPTION:
+# This internal function makes sure that all inter-module dependencies are
+# satisfied with the current module selection
+check_module_depends() {
+ local err=0
+
+ for m in ${MY_MODS[@]} ; do
+ for dep in ${MODULE_DEPENDS} ; do
+ if [[ "${m}" == "${dep%:*}" ]] ; then
+ if ! use apache2_modules_${dep#*:} ; then
+ eerror "Module '${m}' depends on '${dep#*:}'"
+ err=1
+ fi
+ fi
+ done
+ done
+
+ if [[ ${err} -ne 0 ]] ; then
+ die "invalid use flag combination"
+ fi
+}
+
+# @ECLASS-VARIABLE: MY_CONF
+# @DESCRIPTION:
+# This internal variable contains the econf options for the current module
+# selection after a call to setup_modules()
+
+# @ECLASS-VARIABLE: MY_MODS
+# @DESCRIPTION:
+# This internal variable contains a sorted, space separated list of currently
+# selected modules after a call to setup_modules()
+
+# @FUNCTION: setup_modules
+# @DESCRIPTION:
+# This internal function selects all built-in modules based on USE flags and
+# APACHE2_MODULES USE_EXPAND flags
+setup_modules() {
+ local mod_type=
+
+ if use static ; then
+ mod_type="static"
+ else
+ mod_type="shared"
+ fi
+
+ MY_CONF=( --enable-so=static )
+ MY_MODS=()
+
+ if use ldap ; then
+ MY_CONF+=( --enable-authnz_ldap=${mod_type} --enable-ldap=${mod_type} )
+ MY_MODS+=( ldap authnz_ldap )
+ else
+ MY_CONF+=( --disable-authnz_ldap --disable-ldap )
+ fi
+
+ if use ssl ; then
+ MY_CONF+=( --with-ssl="${EPREFIX}"/usr --enable-ssl=${mod_type} )
+ MY_MODS+=( ssl )
+ else
+ MY_CONF+=( --without-ssl --disable-ssl )
+ fi
+
+ if use suexec ; then
+ elog "You can manipulate several configure options of suexec"
+ elog "through the following environment variables:"
+ elog
+ elog " SUEXEC_SAFEPATH: Default PATH for suexec (default: '${EPREFIX}/usr/local/bin:${EPREFIX}/usr/bin:${EPREFIX}/bin')"
+ elog " SUEXEC_LOGFILE: Path to the suexec logfile (default: '${EPREFIX}/var/log/apache2/suexec_log')"
+ elog " SUEXEC_CALLER: Name of the user Apache is running as (default: apache)"
+ elog " SUEXEC_DOCROOT: Directory in which suexec will run scripts (default: '${EPREFIX}/var/www')"
+ elog " SUEXEC_MINUID: Minimum UID, which is allowed to run scripts via suexec (default: 1000)"
+ elog " SUEXEC_MINGID: Minimum GID, which is allowed to run scripts via suexec (default: 100)"
+ elog " SUEXEC_USERDIR: User subdirectories (like /home/user/html) (default: public_html)"
+ elog " SUEXEC_UMASK: Umask for the suexec process (default: 077)"
+ elog
+
+ MY_CONF+=( --with-suexec-safepath="${SUEXEC_SAFEPATH:-${EPREFIX}/usr/local/bin:${EPREFIX}/usr/bin:${EPREFIX}/bin}" )
+ MY_CONF+=( --with-suexec-logfile="${SUEXEC_LOGFILE:-${EPREFIX}/var/log/apache2/suexec_log}" )
+ MY_CONF+=( --with-suexec-bin="${EPREFIX}/usr/sbin/suexec" )
+ MY_CONF+=( --with-suexec-userdir=${SUEXEC_USERDIR:-public_html} )
+ MY_CONF+=( --with-suexec-caller=${SUEXEC_CALLER:-apache} )
+ MY_CONF+=( --with-suexec-docroot="${SUEXEC_DOCROOT:-${EPREFIX}/var/www}" )
+ MY_CONF+=( --with-suexec-uidmin=${SUEXEC_MINUID:-1000} )
+ MY_CONF+=( --with-suexec-gidmin=${SUEXEC_MINGID:-100} )
+ MY_CONF+=( --with-suexec-umask=${SUEXEC_UMASK:-077} )
+ MY_CONF+=( --enable-suexec=${mod_type} )
+ MY_MODS+=( suexec )
+ else
+ MY_CONF+=( --disable-suexec )
+ fi
+
+ for x in ${IUSE_MODULES} ; do
+ if use apache2_modules_${x} ; then
+ MY_CONF+=( --enable-${x}=${mod_type} )
+ MY_MODS+=( ${x} )
+ else
+ MY_CONF+=( --disable-${x} )
+ fi
+ done
+
+ # sort and uniquify MY_MODS
+ MY_MODS=( $(echo ${MY_MODS[@]} | tr ' ' '\n' | sort -u) )
+ check_module_depends
+ check_module_critical
+}
+
+# @VARIABLE: MODULE_DEFINES
+# @DESCRIPTION:
+# This variable needs to be set in the ebuild and contains a space-separated
+# list of tokens each mapping a module to a runtime define which can be
+# specified in APACHE2_OPTS in /etc/conf.d/apache2 to enable this particular
+# module.
+
+# @FUNCTION: generate_load_module
+# @DESCRIPTION:
+# This internal function generates the LoadModule lines for httpd.conf based on
+# the current module selection and MODULE_DEFINES
+generate_load_module() {
+ local endit=0 mod_lines= mod_dir="${ED}/usr/$(get_libdir)/apache2/modules"
+
+ if use static; then
+ sed -i -e "/%%LOAD_MODULE%%/d" \
+ "${GENTOO_PATCHDIR}"/conf/httpd.conf
+ return
+ fi
+
+ for m in ${MY_MODS[@]} ; do
+ if [[ -e "${mod_dir}/mod_${m}.so" ]] ; then
+ for def in ${MODULE_DEFINES} ; do
+ if [[ "${m}" == "${def%:*}" ]] ; then
+ mod_lines="${mod_lines}\n<IfDefine ${def#*:}>"
+ endit=1
+ fi
+ done
+
+ mod_lines="${mod_lines}\nLoadModule ${m}_module modules/mod_${m}.so"
+
+ if [[ ${endit} -ne 0 ]] ; then
+ mod_lines="${mod_lines}\n</IfDefine>"
+ endit=0
+ fi
+ fi
+ done
+
+ sed -i -e "s:%%LOAD_MODULE%%:${mod_lines}:" \
+ "${GENTOO_PATCHDIR}"/conf/httpd.conf
+}
+
+# @FUNCTION: check_upgrade
+# @DESCRIPTION:
+# This internal function checks if the previous configuration file for built-in
+# modules exists in ROOT and prevents upgrade in this case. Users are supposed
+# to convert this file to the new APACHE2_MODULES USE_EXPAND variable and remove
+# it afterwards.
+check_upgrade() {
+ if [[ -e "${EROOT}"etc/apache2/apache2-builtin-mods ]]; then
+ eerror "The previous configuration file for built-in modules"
+ eerror "(${EROOT}etc/apache2/apache2-builtin-mods) exists on your"
+ eerror "system."
+ eerror
+ eerror "Please read http://www.gentoo.org/doc/en/apache-upgrading.xml"
+ eerror "for detailed information how to convert this file to the new"
+ eerror "APACHE2_MODULES USE_EXPAND variable."
+ eerror
+ die "upgrade not possible with existing ${ROOT}etc/apache2/apache2-builtin-mods"
+ fi
+}
+
+# ==============================================================================
+# EXPORTED FUNCTIONS
+# ==============================================================================
+
+# @FUNCTION: apache-2_pkg_setup
+# @DESCRIPTION:
+# This function selects built-in modules, the MPM and other configure options,
+# creates the apache user and group and informs about CONFIG_SYSVIPC being
+# needed (we don't depend on kernel sources and therefore cannot check).
+apache-2_pkg_setup() {
+ check_upgrade
+
+ # setup apache user and group
+ enewgroup apache 81
+ enewuser apache 81 -1 /var/www apache
+
+ setup_mpm
+ setup_modules
+
+ if use debug; then
+ MY_CONF+=( --enable-maintainer-mode --enable-exception-hook )
+ fi
+
+ elog "Please note that you need SysV IPC support in your kernel."
+ elog "Make sure CONFIG_SYSVIPC=y is set."
+ elog
+
+ if use userland_BSD; then
+ elog "On BSD systems you need to add the following line to /boot/loader.conf:"
+ elog " accf_http_load=\"YES\""
+ elog
+ fi
+}
+
+# @FUNCTION: apache-2_src_prepare
+# @DESCRIPTION:
+# This function applies patches, configures a custom file-system layout and
+# rebuilds the configure scripts.
+apache-2_src_prepare() {
+ #fix prefix in conf files etc (bug #433736)
+ use !prefix || sed -e "s@/\(usr\|var\|etc\|run\)/@${EPREFIX}&@g" \
+ -i "${GENTOO_PATCHDIR}"/conf/httpd.conf "${GENTOO_PATCHDIR}"/scripts/* \
+ "${GENTOO_PATCHDIR}"/docs/*.example "${GENTOO_PATCHDIR}"/patches/*.layout \
+ "${GENTOO_PATCHDIR}"/init/* "${GENTOO_PATCHDIR}"/conf/vhosts.d/* \
+ "${GENTOO_PATCHDIR}"/conf/modules.d/* || die
+
+ # 03_all_gentoo-apache-tools.patch injects -Wl,-z,now, which is not a good
+ # idea for everyone
+ case ${CHOST} in
+ *-linux-gnu|*-solaris*|*-freebsd*)
+ # do nothing, these use GNU binutils
+ :
+ ;;
+ *-darwin*)
+ sed -i -e 's/-Wl,-z,now/-Wl,-bind_at_load/g' \
+ "${GENTOO_PATCHDIR}"/patches/03_all_gentoo_apache-tools.patch
+ ;;
+ *)
+ # patch it out to be like upstream
+ sed -i -e 's/-Wl,-z,now//g' \
+ "${GENTOO_PATCHDIR}"/patches/03_all_gentoo_apache-tools.patch
+ ;;
+ esac
+
+ # Use correct multilib libdir in gentoo patches
+ sed -i -e "s:/usr/lib:/usr/$(get_libdir):g" \
+ "${GENTOO_PATCHDIR}"/{conf/httpd.conf,init/*,patches/config.layout} \
+ || die "libdir sed failed"
+
+ epatch "${GENTOO_PATCHDIR}"/patches/*.patch
+
+ # setup the filesystem layout config
+ cat "${GENTOO_PATCHDIR}"/patches/config.layout >> "${S}"/config.layout || \
+ die "Failed preparing config.layout!"
+ sed -i -e "s:version:${PF}:g" "${S}"/config.layout
+
+ # apache2.8 instead of httpd.8 (bug #194828)
+ mv docs/man/{httpd,apache2}.8
+ sed -i -e 's/httpd\.8/apache2.8/g' Makefile.in
+
+ # patched-in MPMs need the build environment rebuilt
+ sed -i -e '/sinclude/d' configure.in
+ AT_M4DIR=build eautoreconf
+
+ # ${T} must be not group-writable, else grsec TPE will block it
+ chmod g-w "${T}"
+
+ # This package really should upgrade to using pcre's .pc file.
+ cat <<-\EOF >"${T}"/pcre-config
+ #!/bin/sh
+ [ "${flag}" = "--version" ] && set -- --modversion
+ exec ${PKG_CONFIG} libpcre "$@"
+ EOF
+ chmod a+x "${T}"/pcre-config
+}
+
+# @FUNCTION: apache-2_src_configure
+# @DESCRIPTION:
+# This function adds compiler flags and runs econf and emake based on MY_MPM and
+# MY_CONF
+apache-2_src_configure() {
+ tc-export PKG_CONFIG
+
+ # Sanity check in case people have bad mounts/TPE settings. #500928
+ if ! "${T}"/pcre-config --help >/dev/null ; then
+ eerror "Could not execute ${T}/pcre-config; do you have bad mount"
+ eerror "permissions in ${T} or have TPE turned on in your kernel?"
+ die "check your runtime settings #500928"
+ fi
+
+ # Instead of filtering --as-needed (bug #128505), append --no-as-needed
+ # Thanks to Harald van Dijk
+ append-ldflags $(no-as-needed)
+
+ # peruser MPM debugging with -X is nearly impossible
+ if has peruser ${IUSE_MPMS} && use apache2_mpms_peruser ; then
+ use debug && append-flags -DMPM_PERUSER_DEBUG
+ fi
+
+ # econf overwrites the stuff from config.layout, so we have to put them into
+ # our myconf line too
+ ac_cv_path_PKGCONFIG=${PKG_CONFIG} \
+ econf \
+ --includedir="${EPREFIX}"/usr/include/apache2 \
+ --libexecdir="${EPREFIX}"/usr/$(get_libdir)/apache2/modules \
+ --datadir="${EPREFIX}"/var/www/localhost \
+ --sysconfdir="${EPREFIX}"/etc/apache2 \
+ --localstatedir="${EPREFIX}"/var \
+ --with-mpm=${MY_MPM} \
+ --with-apr="${SYSROOT}${EPREFIX}"/usr \
+ --with-apr-util="${SYSROOT}${EPREFIX}"/usr \
+ --with-pcre="${T}"/pcre-config \
+ --with-z="${EPREFIX}"/usr \
+ --with-port=80 \
+ --with-program-name=apache2 \
+ --enable-layout=Gentoo \
+ "${MY_CONF[@]}"
+
+ sed -i -e 's:apache2\.conf:httpd.conf:' include/ap_config_auto.h
+}
+
+# @FUNCTION: apache-2_src_install
+# @DESCRIPTION:
+# This function runs `emake install' and generates, installs and adapts the gentoo
+# specific configuration files found in the tarball
+apache-2_src_install() {
+ emake DESTDIR="${D}" MKINSTALLDIRS="mkdir -p" install
+
+ # install our configuration files
+ keepdir /etc/apache2/vhosts.d
+ keepdir /etc/apache2/modules.d
+
+ generate_load_module
+ insinto /etc/apache2
+ doins -r "${GENTOO_PATCHDIR}"/conf/*
+ use apache2_modules_mime_magic && doins docs/conf/magic
+
+ insinto /etc/logrotate.d
+ newins "${GENTOO_PATCHDIR}"/scripts/apache2-logrotate apache2
+
+ # generate a sane default APACHE2_OPTS
+ APACHE2_OPTS="-D DEFAULT_VHOST -D INFO"
+ use doc && APACHE2_OPTS="${APACHE2_OPTS} -D MANUAL"
+ use ssl && APACHE2_OPTS="${APACHE2_OPTS} -D SSL -D SSL_DEFAULT_VHOST"
+ use suexec && APACHE2_OPTS="${APACHE2_OPTS} -D SUEXEC"
+ if has negotiation ${APACHE2_MODULES} && use apache2_modules_negotiation; then
+ APACHE2_OPTS="${APACHE2_OPTS} -D LANGUAGE"
+ fi
+
+ sed -i -e "s:APACHE2_OPTS=\".*\":APACHE2_OPTS=\"${APACHE2_OPTS}\":" \
+ "${GENTOO_PATCHDIR}"/init/apache2.confd || die "sed failed"
+
+ newconfd "${GENTOO_PATCHDIR}"/init/apache2.confd apache2
+ newinitd "${GENTOO_PATCHDIR}"/init/apache2.initd apache2
+
+ # install apache2ctl wrapper for our init script if available
+ if test -e "${GENTOO_PATCHDIR}"/scripts/apache2ctl; then
+ exeinto /usr/sbin
+ doexe "${GENTOO_PATCHDIR}"/scripts/apache2ctl
+ else
+ dosym /etc/init.d/apache2 /usr/sbin/apache2ctl
+ fi
+
+ # provide legacy symlink for apxs, bug 177697
+ dosym apxs /usr/sbin/apxs2
+
+ # install some documentation
+ dodoc ABOUT_APACHE CHANGES LAYOUT README README.platforms VERSIONING
+ dodoc "${GENTOO_PATCHDIR}"/docs/*
+
+ # drop in a convenient link to the manual
+ if use doc ; then
+ sed -i -e "s:VERSION:${PVR}:" "${ED}/etc/apache2/modules.d/00_apache_manual.conf"
+ docompress -x /usr/share/doc/${PF}/manual # 503640
+ else
+ rm -f "${ED}/etc/apache2/modules.d/00_apache_manual.conf"
+ rm -Rf "${ED}/usr/share/doc/${PF}/manual"
+ fi
+
+ # the default icons and error pages get stored in
+ # /usr/share/apache2/{error,icons}
+ dodir /usr/share/apache2
+ mv -f "${ED}/var/www/localhost/error" "${ED}/usr/share/apache2/error"
+ mv -f "${ED}/var/www/localhost/icons" "${ED}/usr/share/apache2/icons"
+ rm -rf "${ED}/var/www/localhost/"
+ eend $?
+
+ # set some sane permissions for suexec
+ if use suexec ; then
+ fowners 0:${SUEXEC_CALLER:-apache} /usr/sbin/suexec
+ fperms 4710 /usr/sbin/suexec
+ # provide legacy symlink for suexec, bug 177697
+ dosym /usr/sbin/suexec /usr/sbin/suexec2
+ fi
+
+ # empty dirs
+ for i in /var/lib/dav /var/log/apache2 /var/cache/apache2 ; do
+ keepdir ${i}
+ fowners apache:apache ${i}
+ fperms 0750 ${i}
+ done
+}
+
+# @FUNCTION: apache-2_pkg_postinst
+# @DESCRIPTION:
+# This function creates test certificates if SSL is enabled and installs the
+# default index.html to /var/www/localhost if it does not exist. We do this here
+# because the default webroot is a copy of the files that exist elsewhere and we
+# don't want them to be managed/removed by portage when apache is upgraded.
+apache-2_pkg_postinst() {
+ if use ssl && [[ ! -e "${EROOT}/etc/ssl/apache2/server.pem" ]]; then
+ SSL_ORGANIZATION="${SSL_ORGANIZATION:-Apache HTTP Server}"
+ install_cert /etc/ssl/apache2/server
+ ewarn
+ ewarn "The location of SSL certificates has changed. If you are"
+ ewarn "upgrading from ${CATEGORY}/${PN}-2.2.13 or earlier (or remerged"
+ ewarn "*any* apache version), you might want to move your old"
+ ewarn "certificates from /etc/apache2/ssl/ to /etc/ssl/apache2/ and"
+ ewarn "update your config files."
+ ewarn
+ fi
+
+ if [[ ! -e "${EROOT}/var/www/localhost" ]] ; then
+ mkdir -p "${EROOT}/var/www/localhost/htdocs"
+ echo "<html><body><h1>It works!</h1></body></html>" > "${EROOT}/var/www/localhost/htdocs/index.html"
+ fi
+
+ echo
+ elog "Attention: cgi and cgid modules are now handled via APACHE2_MODULES flags"
+ elog "in make.conf. Make sure to enable those in order to compile them."
+ elog "In general, you should use 'cgid' with threaded MPMs and 'cgi' otherwise."
+ echo
+
+}
+
+EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_install pkg_postinst
diff --git a/eclass/apache-module.eclass b/eclass/apache-module.eclass
new file mode 100644
index 000000000000..16fbddd79237
--- /dev/null
+++ b/eclass/apache-module.eclass
@@ -0,0 +1,239 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: apache-module.eclass
+# @MAINTAINER:
+# apache-devs@gentoo.org
+# @BLURB: Provides a common set of functions for apache modules
+# @DESCRIPTION:
+# This eclass handles apache modules in a sane way.
+#
+# To make use of this eclass simply call one of the need/want_apache functions
+# described in depend.apache.eclass. Make sure you use the need/want_apache call
+# after you have defined DEPEND and RDEPEND. Also note that you can not rely on
+# the automatic RDEPEND=DEPEND that portage does if you use this eclass.
+#
+# See Bug 107127 for more information.
+#
+# @EXAMPLE:
+#
+# Here is a simple example of an ebuild for mod_foo:
+#
+# @CODE
+# APACHE2_MOD_CONF="42_mod_foo"
+# APACHE2_MOD_DEFINE="FOO"
+# need_apache2
+# @CODE
+#
+# A more complicated example for a module with non-standard locations:
+#
+# @CODE
+# APXS2_S="${S}/apache22/src"
+# APACHE2_MOD_FILE="${APXS2_S}/${PN}.so"
+# APACHE2_MOD_CONF="42_${PN}"
+# APACHE2_MOD_DEFINE="FOO"
+# DOCFILES="docs/*.html"
+# need_apache2_2
+# @CODE
+#
+# A basic module configuration which just loads the module into apache:
+#
+# @CODE
+# <IfDefine FOO>
+# LoadModule foo_module modules/mod_foo.so
+# </IfDefine>
+# @CODE
+
+inherit depend.apache
+
+# ==============================================================================
+# PUBLIC VARIABLES
+# ==============================================================================
+
+# @VARIABLE: APXS2_S
+# @DESCRIPTION:
+# Path to temporary build directory. (Defaults to `${S}/src' if it exists,
+# `${S}' otherwise)
+
+# @VARIABLE: APXS2_ARGS
+# @DESCRIPTION:
+# Arguments to pass to the apxs tool. (Defaults to `-c ${PN}.c')
+
+# @VARIABLE: APACHE2_EXECFILES
+# @DESCRIPTION:
+# List of files that will be installed into ${APACHE_MODULE_DIR} beside
+# ${APACHE2_MOD_FILE}. In addition, this function also sets the executable
+# permission on those files.
+
+# @VARIABLE: APACHE2_MOD_CONF
+# @DESCRIPTION:
+# Module configuration file installed by src_install (minus the .conf suffix and
+# relative to ${FILESDIR}).
+
+# @VARIABLE: APACHE2_MOD_DEFINE
+# @DESCRIPTION:
+# Name of define (e.g. FOO) to use in conditional loading of the installed
+# module/its config file, multiple defines should be space separated.
+
+# @VARIABLE: APACHE2_MOD_FILE
+# @DESCRIPTION:
+# Name of the module that src_install installs minus the .so suffix. (Defaults
+# to `${APXS2_S}/.libs/${PN}.so')
+
+# @VARIABLE: APACHE2_VHOST_CONF
+# @DESCRIPTION:
+# Virtual host configuration file installed by src_install (minus the .conf
+# suffix and relative to ${FILESDIR}).
+
+# @VARIABLE: DOCFILES
+# @DESCRIPTION:
+# If the exported src_install() is being used, and ${DOCFILES} is non-zero, some
+# sed-fu is applied to split out html documentation (if any) from normal
+# documentation, and dodoc'd or dohtml'd.
+
+# ==============================================================================
+# INTERNAL FUNCTIONS
+# ==============================================================================
+
+# Internal function to construct the default ${APXS2_S} path if required.
+apache_cd_dir() {
+ debug-print-function $FUNCNAME $*
+
+ local CD_DIR="${APXS2_S}"
+
+ if [[ -z "${CD_DIR}" ]] ; then
+ if [[ -d "${S}/src" ]] ; then
+ CD_DIR="${S}/src"
+ else
+ CD_DIR="${S}"
+ fi
+ fi
+
+ debug-print $FUNCNAME "CD_DIR=${CD_DIR}"
+ echo "${CD_DIR}"
+}
+
+# Internal function to construct the default ${APACHE2_MOD_FILE} if required.
+apache_mod_file() {
+ debug-print-function $FUNCNAME $*
+
+ local MOD_FILE="${APACHE2_MOD_FILE:-$(apache_cd_dir)/.libs/${PN}.so}"
+
+ debug-print $FUNCNAME "MOD_FILE=${MOD_FILE}"
+ echo "${MOD_FILE}"
+}
+
+# Internal function for picking out html files from ${DOCFILES}. It takes an
+# optional first argument `html'; if the first argument is equals `html', only
+# html files are returned, otherwise normal (non-html) docs are returned.
+apache_doc_magic() {
+ debug-print-function $FUNCNAME $*
+
+ local DOCS=
+
+ if [[ -n "${DOCFILES}" ]] ; then
+ if [[ "x$1" == "xhtml" ]] ; then
+ DOCS="`echo ${DOCFILES} | sed -e 's/ /\n/g' | sed -e '/^[^ ]*.html$/ !d'`"
+ else
+ DOCS="`echo ${DOCFILES} | sed 's, *[^ ]*\+.html, ,g'`"
+ fi
+ fi
+
+ debug-print $FUNCNAME "DOCS=${DOCS}"
+ echo "${DOCS}"
+}
+
+# ==============================================================================
+# EXPORTED FUNCTIONS
+# ==============================================================================
+
+# @FUNCTION: apache-module_src_compile
+# @DESCRIPTION:
+# The default action is to call ${APXS} with the value of ${APXS2_ARGS}. If a
+# module requires a different build setup than this, use ${APXS} in your own
+# src_compile routine.
+apache-module_src_compile() {
+ debug-print-function $FUNCNAME $*
+
+ local CD_DIR=$(apache_cd_dir)
+ cd "${CD_DIR}" || die "cd ${CD_DIR} failed"
+
+ APXS2_ARGS="${APXS2_ARGS:--c ${PN}.c}"
+ ${APXS} ${APXS2_ARGS} || die "${APXS} ${APXS2_ARGS} failed"
+}
+
+# @FUNCTION: apache-module_src_install
+# @DESCRIPTION:
+# This installs the files into apache's directories. The module is installed
+# from a directory chosen as above (apache_cd_dir). In addition, this function
+# can also set the executable permission on files listed in
+# ${APACHE2_EXECFILES}. The configuration file name is listed in
+# ${APACHE2_MOD_CONF} without the .conf extensions, so if you configuration is
+# 55_mod_foo.conf, APACHE2_MOD_CONF would be 55_mod_foo. ${DOCFILES} contains
+# the list of files you want filed as documentation.
+apache-module_src_install() {
+ debug-print-function $FUNCNAME $*
+
+ local CD_DIR=$(apache_cd_dir)
+ pushd "${CD_DIR}" >/dev/null || die "cd ${CD_DIR} failed"
+
+ local MOD_FILE=$(apache_mod_file)
+
+ exeinto "${APACHE_MODULESDIR}"
+ doexe ${MOD_FILE} || die "internal ebuild error: '${MOD_FILE}' not found"
+ [[ -n "${APACHE2_EXECFILES}" ]] && doexe ${APACHE2_EXECFILES}
+
+ if [[ -n "${APACHE2_MOD_CONF}" ]] ; then
+ insinto "${APACHE_MODULES_CONFDIR}"
+ set -- ${APACHE2_MOD_CONF}
+ newins "${FILESDIR}/${1}.conf" "$(basename ${2:-$1}).conf" \
+ || die "internal ebuild error: '${FILESDIR}/${1}.conf' not found"
+ fi
+
+ if [[ -n "${APACHE2_VHOST_CONF}" ]] ; then
+ insinto "${APACHE_VHOSTS_CONFDIR}"
+ set -- ${APACHE2_VHOST_CONF}
+ newins "${FILESDIR}/${1}.conf" "$(basename ${2:-$1}).conf " \
+ || die "internal ebuild error: '${FILESDIR}/${1}.conf' not found"
+ fi
+
+ cd "${S}"
+
+ if [[ -n "${DOCFILES}" ]] ; then
+ local OTHER_DOCS=$(apache_doc_magic)
+ local HTML_DOCS=$(apache_doc_magic html)
+
+ [[ -n "${OTHER_DOCS}" ]] && dodoc ${OTHER_DOCS}
+ [[ -n "${HTML_DOCS}" ]] && dohtml ${HTML_DOCS}
+ fi
+
+ popd >/dev/null
+}
+
+# @FUNCTION: apache-module_pkg_postinst
+# @DESCRIPTION:
+# This prints out information about the installed module and how to enable it.
+apache-module_pkg_postinst() {
+ debug-print-function $FUNCNAME $*
+
+ if [[ -n "${APACHE2_MOD_DEFINE}" ]] ; then
+ local my_opts="-D ${APACHE2_MOD_DEFINE// / -D }"
+
+ einfo
+ einfo "To enable ${PN}, you need to edit your /etc/conf.d/apache2 file and"
+ einfo "add '${my_opts}' to APACHE2_OPTS."
+ einfo
+ fi
+
+ if [[ -n "${APACHE2_MOD_CONF}" ]] ; then
+ set -- ${APACHE2_MOD_CONF}
+ einfo
+ einfo "Configuration file installed as"
+ einfo " ${APACHE_MODULES_CONFDIR}/$(basename ${2:-$1}).conf"
+ einfo "You may want to edit it before turning the module on in /etc/conf.d/apache2"
+ einfo
+ fi
+}
+
+EXPORT_FUNCTIONS src_compile src_install pkg_postinst
diff --git a/eclass/aspell-dict.eclass b/eclass/aspell-dict.eclass
new file mode 100644
index 000000000000..6207f6a15fc9
--- /dev/null
+++ b/eclass/aspell-dict.eclass
@@ -0,0 +1,66 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: aspell-dict.eclass
+# @MAINTAINER:
+# maintainer-needed@gentoo.org
+# @AUTHOR:
+# Original author: Seemant Kulleen
+# @BLURB: An eclass to streamline the construction of ebuilds for new aspell dicts
+# @DESCRIPTION:
+# The aspell-dict eclass is designed to streamline the construction of
+# ebuilds for the new aspell dictionaries (from gnu.org) which support
+# aspell-0.50. Support for aspell-0.60 has been added by Sergey Ulanov.
+
+# @ECLASS-VARIABLE: ASPELL_LANG
+# @REQUIRED
+# @DESCRIPTION:
+# Which language is the dictionary for? It's used for the DESCRIPTION of the
+# package.
+
+# @ECLASS-VARIABLE: ASPOSTFIX
+# @REQUIRED
+# @DESCRIPTION:
+# What major version of aspell is this dictionary for?
+
+EXPORT_FUNCTIONS src_compile src_install
+
+#MY_P=${PN}-${PV%.*}-${PV#*.*.}
+MY_P=${P%.*}-${PV##*.}
+MY_P=aspell${ASPOSTFIX}-${MY_P/aspell-/}
+SPELLANG=${PN/aspell-/}
+S="${WORKDIR}/${MY_P}"
+DESCRIPTION="${ASPELL_LANG} language dictionary for aspell"
+HOMEPAGE="http://aspell.net"
+SRC_URI="mirror://gnu/aspell/dict/${SPELLANG}/${MY_P}.tar.bz2"
+
+IUSE=""
+SLOT="0"
+
+if [ x${ASPOSTFIX} = x6 ] ; then
+ RDEPEND=">=app-text/aspell-0.60"
+ DEPEND="${RDEPEND}"
+else
+ RDEPEND=">=app-text/aspell-0.50"
+ DEPEND="${RDEPEND}"
+fi
+
+# @FUNCTION: aspell-dict_src_compile
+# @DESCRIPTION:
+# The aspell-dict src_compile function which is exported.
+aspell-dict_src_compile() {
+ ./configure || die
+ emake || die
+}
+
+# @FUNCTION: aspell-dict_src_install
+# @DESCRIPTION:
+# The aspell-dict src_install function which is exported.
+aspell-dict_src_install() {
+ make DESTDIR="${D}" install || die
+
+ for doc in README info ; do
+ [ -s "$doc" ] && dodoc $doc
+ done
+}
diff --git a/eclass/autotools-multilib.eclass b/eclass/autotools-multilib.eclass
new file mode 100644
index 000000000000..758a6191cedb
--- /dev/null
+++ b/eclass/autotools-multilib.eclass
@@ -0,0 +1,90 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: autotools-multilib.eclass
+# @MAINTAINER:
+# gx86-multilib team <multilib@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# @BLURB: autotools-utils wrapper for multilib builds
+# @DESCRIPTION:
+# The autotools-multilib.eclass provides a glue between
+# autotools-utils.eclass(5) and multilib-minimal.eclass(5), aiming
+# to provide a convenient way to build packages using autotools
+# for multiple ABIs.
+#
+# Inheriting this eclass sets IUSE and exports default multilib_src_*()
+# sub-phases that call autotools-utils phase functions for each ABI
+# enabled. The multilib_src_*() functions can be defined in ebuild just
+# like in multilib-minimal.
+
+# EAPI=4 is required for meaningful MULTILIB_USEDEP.
+case ${EAPI:-0} in
+ 4|5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+inherit autotools-utils eutils multilib-build multilib-minimal
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
+
+# Note: _at_args[@] passing is a backwards compatibility measure.
+# Don't use it in new packages.
+
+autotools-multilib_src_prepare() {
+ autotools-utils_src_prepare "${@}"
+
+ [[ ${AUTOTOOLS_IN_SOURCE_BUILD} ]] && multilib_copy_sources
+}
+
+multilib_src_configure() {
+ [[ ${AUTOTOOLS_IN_SOURCE_BUILD} ]] && local ECONF_SOURCE=${BUILD_DIR}
+ autotools-utils_src_configure "${_at_args[@]}"
+}
+
+autotools-multilib_src_configure() {
+ local _at_args=( "${@}" )
+
+ multilib-minimal_src_configure
+}
+
+multilib_src_compile() {
+ emake "${_at_args[@]}"
+}
+
+autotools-multilib_src_compile() {
+ local _at_args=( "${@}" )
+
+ multilib-minimal_src_compile
+}
+
+multilib_src_test() {
+ autotools-utils_src_test "${_at_args[@]}"
+}
+
+autotools-multilib_src_test() {
+ local _at_args=( "${@}" )
+
+ multilib-minimal_src_test
+}
+
+multilib_src_install() {
+ emake DESTDIR="${D}" "${_at_args[@]}" install
+}
+
+multilib_src_install_all() {
+ einstalldocs
+
+ # Remove libtool files and unnecessary static libs
+ local prune_ltfiles=${AUTOTOOLS_PRUNE_LIBTOOL_FILES}
+ if [[ ${prune_ltfiles} != none ]]; then
+ prune_libtool_files ${prune_ltfiles:+--${prune_ltfiles}}
+ fi
+}
+
+autotools-multilib_src_install() {
+ local _at_args=( "${@}" )
+
+ multilib-minimal_src_install
+}
diff --git a/eclass/autotools-utils.eclass b/eclass/autotools-utils.eclass
new file mode 100644
index 000000000000..de3c65a78303
--- /dev/null
+++ b/eclass/autotools-utils.eclass
@@ -0,0 +1,403 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: autotools-utils.eclass
+# @MAINTAINER:
+# Maciej Mrozowski <reavertm@gentoo.org>
+# Michał Górny <mgorny@gentoo.org>
+# @BLURB: common ebuild functions for autotools-based packages
+# @DESCRIPTION:
+# autotools-utils.eclass is autotools.eclass(5) and base.eclass(5) wrapper
+# providing all inherited features along with econf arguments as Bash array,
+# out of source build with overridable build dir location, static archives
+# handling, libtool files removal.
+#
+# Please note that autotools-utils does not support mixing of its phase
+# functions with regular econf/emake calls. If necessary, please call
+# autotools-utils_src_compile instead of the latter.
+#
+# @EXAMPLE:
+# Typical ebuild using autotools-utils.eclass:
+#
+# @CODE
+# EAPI="2"
+#
+# inherit autotools-utils
+#
+# DESCRIPTION="Foo bar application"
+# HOMEPAGE="http://example.org/foo/"
+# SRC_URI="mirror://sourceforge/foo/${P}.tar.bz2"
+#
+# LICENSE="LGPL-2.1"
+# KEYWORDS=""
+# SLOT="0"
+# IUSE="debug doc examples qt4 static-libs tiff"
+#
+# CDEPEND="
+# media-libs/libpng:0
+# qt4? (
+# dev-qt/qtcore:4
+# dev-qt/qtgui:4
+# )
+# tiff? ( media-libs/tiff:0 )
+# "
+# RDEPEND="${CDEPEND}
+# !media-gfx/bar
+# "
+# DEPEND="${CDEPEND}
+# doc? ( app-doc/doxygen )
+# "
+#
+# # bug 123456
+# AUTOTOOLS_IN_SOURCE_BUILD=1
+#
+# DOCS=(AUTHORS ChangeLog README "Read me.txt" TODO)
+#
+# PATCHES=(
+# "${FILESDIR}/${P}-gcc44.patch" # bug 123458
+# "${FILESDIR}/${P}-as-needed.patch"
+# "${FILESDIR}/${P}-unbundle_libpng.patch"
+# )
+#
+# src_configure() {
+# local myeconfargs=(
+# $(use_enable debug)
+# $(use_with qt4)
+# $(use_enable threads multithreading)
+# $(use_with tiff)
+# )
+# autotools-utils_src_configure
+# }
+#
+# src_compile() {
+# autotools-utils_src_compile
+# use doc && autotools-utils_src_compile docs
+# }
+#
+# src_install() {
+# use doc && HTML_DOCS=("${BUILD_DIR}/apidocs/html/")
+# autotools-utils_src_install
+# if use examples; then
+# dobin "${BUILD_DIR}"/foo_example{1,2,3} \\
+# || die 'dobin examples failed'
+# fi
+# }
+#
+# @CODE
+
+# Keep variable names synced with cmake-utils and the other way around!
+
+case ${EAPI:-0} in
+ 2|3|4|5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+# @ECLASS-VARIABLE: AUTOTOOLS_AUTORECONF
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set to a non-empty value before calling inherit to enable running autoreconf
+# in src_prepare() and adding autotools dependencies.
+#
+# This is usually necessary when using live sources or applying patches
+# modifying configure.ac or Makefile.am files. Note that in the latter case
+# setting this variable is obligatory even though the eclass will work without
+# it (to add the necessary dependencies).
+#
+# The eclass will try to determine the correct autotools to run including a few
+# external tools: gettext, glib-gettext, intltool, gtk-doc, gnome-doc-prepare.
+# If your tool is not supported, please open a bug and we'll add support for it.
+#
+# Note that dependencies are added for autoconf, automake and libtool only.
+# If your package needs one of the external tools listed above, you need to add
+# appropriate packages to DEPEND yourself.
+[[ ${AUTOTOOLS_AUTORECONF} ]] || : ${AUTOTOOLS_AUTO_DEPEND:=no}
+
+inherit autotools eutils libtool
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install src_test
+
+# @ECLASS-VARIABLE: BUILD_DIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Build directory, location where all autotools generated files should be
+# placed. For out of source builds it defaults to ${WORKDIR}/${P}_build.
+#
+# This variable has been called AUTOTOOLS_BUILD_DIR formerly.
+# It is set under that name for compatibility.
+
+# @ECLASS-VARIABLE: AUTOTOOLS_IN_SOURCE_BUILD
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set to enable in-source build.
+
+# @ECLASS-VARIABLE: ECONF_SOURCE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Specify location of autotools' configure script. By default it uses ${S}.
+
+# @ECLASS-VARIABLE: DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array containing documents passed to dodoc command.
+#
+# In EAPIs 4+, can list directories as well.
+#
+# Example:
+# @CODE
+# DOCS=( NEWS README )
+# @CODE
+
+# @ECLASS-VARIABLE: HTML_DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array containing documents passed to dohtml command.
+#
+# Example:
+# @CODE
+# HTML_DOCS=( doc/html/ )
+# @CODE
+
+# @ECLASS-VARIABLE: PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# PATCHES array variable containing all various patches to be applied.
+#
+# Example:
+# @CODE
+# PATCHES=( "${FILESDIR}"/${P}-mypatch.patch )
+# @CODE
+
+# @ECLASS-VARIABLE: AUTOTOOLS_PRUNE_LIBTOOL_FILES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Sets the mode of pruning libtool files. The values correspond to
+# prune_libtool_files parameters, with leading dashes stripped.
+#
+# Defaults to pruning the libtool files when static libraries are not
+# installed or can be linked properly without them. Libtool files
+# for modules (plugins) will be kept in case plugin loader needs them.
+#
+# If set to 'modules', the .la files for modules will be removed
+# as well. This is often the preferred option.
+#
+# If set to 'all', all .la files will be removed unconditionally. This
+# option is discouraged and shall be used only if 'modules' does not
+# remove the files.
+#
+# If set to 'none', no .la files will be pruned ever. Use in corner
+# cases only.
+
+# Determine using IN or OUT source build
+_check_build_dir() {
+ : ${ECONF_SOURCE:=${S}}
+ # Respect both the old variable and the new one, depending
+ # on which one was set by the ebuild.
+ if [[ ! ${BUILD_DIR} && ${AUTOTOOLS_BUILD_DIR} ]]; then
+ eqawarn "The AUTOTOOLS_BUILD_DIR variable has been renamed to BUILD_DIR."
+ eqawarn "Please migrate the ebuild to use the new one."
+
+ # In the next call, both variables will be set already
+ # and we'd have to know which one takes precedence.
+ _RESPECT_AUTOTOOLS_BUILD_DIR=1
+ fi
+
+ if [[ ${_RESPECT_AUTOTOOLS_BUILD_DIR} ]]; then
+ BUILD_DIR=${AUTOTOOLS_BUILD_DIR:-${WORKDIR}/${P}_build}
+ else
+ if [[ -n ${AUTOTOOLS_IN_SOURCE_BUILD} ]]; then
+ : ${BUILD_DIR:=${ECONF_SOURCE}}
+ else
+ : ${BUILD_DIR:=${WORKDIR}/${P}_build}
+ fi
+ fi
+
+ # Backwards compatibility for getting the value.
+ AUTOTOOLS_BUILD_DIR=${BUILD_DIR}
+ echo ">>> Working in BUILD_DIR: \"${BUILD_DIR}\""
+}
+
+# @FUNCTION: autotools-utils_src_prepare
+# @DESCRIPTION:
+# The src_prepare function.
+#
+# Supporting PATCHES array and user patches. See base.eclass(5) for reference.
+autotools-utils_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local want_autoreconf=${AUTOTOOLS_AUTORECONF}
+
+ [[ ${PATCHES} ]] && epatch "${PATCHES[@]}"
+
+ at_checksum() {
+ find '(' -name 'Makefile.am' \
+ -o -name 'configure.ac' \
+ -o -name 'configure.in' ')' \
+ -exec cksum {} + | sort -k2
+ }
+
+ [[ ! ${want_autoreconf} ]] && local checksum=$(at_checksum)
+ epatch_user
+ if [[ ! ${want_autoreconf} ]]; then
+ if [[ ${checksum} != $(at_checksum) ]]; then
+ einfo 'Will autoreconfigure due to user patches applied.'
+ want_autoreconf=yep
+ fi
+ fi
+
+ [[ ${want_autoreconf} ]] && eautoreconf
+ elibtoolize --patch-only
+}
+
+# @FUNCTION: autotools-utils_src_configure
+# @DESCRIPTION:
+# The src_configure function. For out of source build it creates build
+# directory and runs econf there. Configuration parameters defined
+# in myeconfargs are passed here to econf. Additionally following USE
+# flags are known:
+#
+# IUSE="static-libs" passes --enable-shared and either --disable-static/--enable-static
+# to econf respectively.
+
+# @VARIABLE: myeconfargs
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Optional econf arguments as Bash array. Should be defined before calling src_configure.
+# @CODE
+# src_configure() {
+# local myeconfargs=(
+# --disable-readline
+# --with-confdir="/etc/nasty foo confdir/"
+# $(use_enable debug cnddebug)
+# $(use_enable threads multithreading)
+# )
+# autotools-utils_src_configure
+# }
+# @CODE
+autotools-utils_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${myeconfargs+1} || $(declare -p myeconfargs) == 'declare -a'* ]] \
+ || die 'autotools-utils.eclass: myeconfargs has to be an array.'
+
+ [[ ${EAPI} == 2 ]] && ! use prefix && EPREFIX=
+
+ # Common args
+ local econfargs=()
+
+ _check_build_dir
+ if "${ECONF_SOURCE}"/configure --help 2>&1 | grep -q '^ *--docdir='; then
+ econfargs+=(
+ --docdir="${EPREFIX}"/usr/share/doc/${PF}
+ )
+ fi
+
+ # Handle static-libs found in IUSE, disable them by default
+ if in_iuse static-libs; then
+ econfargs+=(
+ --enable-shared
+ $(use_enable static-libs static)
+ )
+ fi
+
+ # Append user args
+ econfargs+=("${myeconfargs[@]}")
+
+ mkdir -p "${BUILD_DIR}" || die
+ pushd "${BUILD_DIR}" > /dev/null || die
+ econf "${econfargs[@]}" "$@"
+ popd > /dev/null || die
+}
+
+# @FUNCTION: autotools-utils_src_compile
+# @DESCRIPTION:
+# The autotools src_compile function, invokes emake in specified BUILD_DIR.
+autotools-utils_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _check_build_dir
+ pushd "${BUILD_DIR}" > /dev/null || die
+ emake "$@" || die 'emake failed'
+ popd > /dev/null || die
+}
+
+# @FUNCTION: autotools-utils_src_install
+# @DESCRIPTION:
+# The autotools src_install function. Runs emake install, unconditionally
+# removes unnecessary static libs (based on shouldnotlink libtool property)
+# and removes unnecessary libtool files when static-libs USE flag is defined
+# and unset.
+#
+# DOCS and HTML_DOCS arrays are supported. See base.eclass(5) for reference.
+autotools-utils_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _check_build_dir
+ pushd "${BUILD_DIR}" > /dev/null || die
+ emake DESTDIR="${D}" "$@" install || die "emake install failed"
+ popd > /dev/null || die
+
+ # Move docs installed by autotools (in EAPI < 4).
+ if [[ ${EAPI} == [23] ]] \
+ && path_exists "${D}${EPREFIX}"/usr/share/doc/${PF}/*; then
+ if [[ $(find "${D}${EPREFIX}"/usr/share/doc/${PF}/* -type d) ]]; then
+ eqawarn "autotools-utils: directories in docdir require at least EAPI 4"
+ else
+ mkdir "${T}"/temp-docdir
+ mv "${D}${EPREFIX}"/usr/share/doc/${PF}/* "${T}"/temp-docdir/ \
+ || die "moving docs to tempdir failed"
+
+ dodoc "${T}"/temp-docdir/* || die "docdir dodoc failed"
+ rm -r "${T}"/temp-docdir || die
+ fi
+ fi
+
+ # XXX: support installing them from builddir as well?
+ if declare -p DOCS &>/dev/null; then
+ # an empty list == don't install anything
+ if [[ ${DOCS[@]} ]]; then
+ if [[ ${EAPI} == [23] ]]; then
+ dodoc "${DOCS[@]}" || die
+ else
+ # dies by itself
+ dodoc -r "${DOCS[@]}"
+ fi
+ fi
+ else
+ local f
+ # same list as in PMS
+ for f in README* ChangeLog AUTHORS NEWS TODO CHANGES \
+ THANKS BUGS FAQ CREDITS CHANGELOG; do
+ if [[ -s ${f} ]]; then
+ dodoc "${f}" || die "(default) dodoc ${f} failed"
+ fi
+ done
+ fi
+ if [[ ${HTML_DOCS} ]]; then
+ dohtml -r "${HTML_DOCS[@]}" || die "dohtml failed"
+ fi
+
+ # Remove libtool files and unnecessary static libs
+ local prune_ltfiles=${AUTOTOOLS_PRUNE_LIBTOOL_FILES}
+ if [[ ${prune_ltfiles} != none ]]; then
+ prune_libtool_files ${prune_ltfiles:+--${prune_ltfiles}}
+ fi
+}
+
+# @FUNCTION: autotools-utils_src_test
+# @DESCRIPTION:
+# The autotools src_test function. Runs emake check in build directory.
+autotools-utils_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _check_build_dir
+ pushd "${BUILD_DIR}" > /dev/null || die
+
+ if make -ni check "${@}" &>/dev/null; then
+ emake check "${@}" || die 'emake check failed.'
+ elif make -ni test "${@}" &>/dev/null; then
+ emake test "${@}" || die 'emake test failed.'
+ fi
+
+ popd > /dev/null || die
+}
diff --git a/eclass/autotools.eclass b/eclass/autotools.eclass
new file mode 100644
index 000000000000..22f2f39a3c82
--- /dev/null
+++ b/eclass/autotools.eclass
@@ -0,0 +1,607 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: autotools.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: Regenerates auto* build scripts
+# @DESCRIPTION:
+# This eclass is for safely handling autotooled software packages that need to
+# regenerate their build scripts. All functions will abort in case of errors.
+
+# Note: We require GNU m4, as does autoconf. So feel free to use any features
+# from the GNU version of m4 without worrying about other variants (i.e. BSD).
+
+if [[ ${__AUTOTOOLS_AUTO_DEPEND+set} == "set" ]] ; then
+ # See if we were included already, but someone changed the value
+ # of AUTOTOOLS_AUTO_DEPEND on us. We could reload the entire
+ # eclass at that point, but that adds overhead, and it's trivial
+ # to re-order inherit in eclasses/ebuilds instead. #409611
+ if [[ ${__AUTOTOOLS_AUTO_DEPEND} != ${AUTOTOOLS_AUTO_DEPEND} ]] ; then
+ die "AUTOTOOLS_AUTO_DEPEND changed value between inherits; please inherit autotools.eclass first! ${__AUTOTOOLS_AUTO_DEPEND} -> ${AUTOTOOLS_AUTO_DEPEND}"
+ fi
+fi
+
+if [[ -z ${_AUTOTOOLS_ECLASS} ]]; then
+_AUTOTOOLS_ECLASS=1
+
+inherit libtool
+
+# @ECLASS-VARIABLE: WANT_AUTOCONF
+# @DESCRIPTION:
+# The major version of autoconf your package needs
+: ${WANT_AUTOCONF:=latest}
+
+# @ECLASS-VARIABLE: WANT_AUTOMAKE
+# @DESCRIPTION:
+# The major version of automake your package needs
+: ${WANT_AUTOMAKE:=latest}
+
+# @ECLASS-VARIABLE: WANT_LIBTOOL
+# @DESCRIPTION:
+# Do you want libtool? Valid values here are "latest" and "none".
+: ${WANT_LIBTOOL:=latest}
+
+# @ECLASS-VARIABLE: _LATEST_AUTOMAKE
+# @INTERNAL
+# @DESCRIPTION:
+# CONSTANT!
+# The latest major version/slot of automake available on each arch. #312315
+# We should list both the latest stable, and the latest unstable. #465732
+# This way the stable builds will still work, but the unstable are allowed
+# to build & test things for us ahead of time (if they have it installed).
+# If a newer slot is stable on any arch, and is NOT reflected in this list,
+# then circular dependencies may arise during emerge @system bootstraps.
+# Do NOT change this variable in your ebuilds!
+# If you want to force a newer minor version, you can specify the correct
+# WANT value by using a colon: <PV>:<WANT_AUTOMAKE>
+_LATEST_AUTOMAKE=( 1.15:1.15 )
+
+_automake_atom="sys-devel/automake"
+_autoconf_atom="sys-devel/autoconf"
+if [[ -n ${WANT_AUTOMAKE} ]]; then
+ case ${WANT_AUTOMAKE} in
+ # Even if the package doesn't use automake, we still need to depend
+ # on it because we run aclocal to process m4 macros. This matches
+ # the autoreconf tool, so this requirement is correct. #401605
+ none) ;;
+ latest)
+ # Use SLOT deps if we can. For EAPI=0, we get pretty close.
+ if [[ ${EAPI:-0} != 0 ]] ; then
+ _automake_atom="|| ( `printf '>=sys-devel/automake-%s:%s ' ${_LATEST_AUTOMAKE[@]/:/ }` )"
+ else
+ _automake_atom="|| ( `printf '>=sys-devel/automake-%s ' ${_LATEST_AUTOMAKE[@]/%:*}` )"
+ fi
+ ;;
+ *) _automake_atom="=sys-devel/automake-${WANT_AUTOMAKE}*" ;;
+ esac
+ export WANT_AUTOMAKE
+fi
+
+if [[ -n ${WANT_AUTOCONF} ]] ; then
+ case ${WANT_AUTOCONF} in
+ none) _autoconf_atom="" ;; # some packages don't require autoconf at all
+ 2.1) _autoconf_atom="=sys-devel/autoconf-${WANT_AUTOCONF}*" ;;
+ # if you change the "latest" version here, change also autotools_env_setup
+ latest|2.5) _autoconf_atom=">=sys-devel/autoconf-2.69" ;;
+ *) die "Invalid WANT_AUTOCONF value '${WANT_AUTOCONF}'" ;;
+ esac
+ export WANT_AUTOCONF
+fi
+
+_libtool_atom=">=sys-devel/libtool-2.4"
+if [[ -n ${WANT_LIBTOOL} ]] ; then
+ case ${WANT_LIBTOOL} in
+ none) _libtool_atom="" ;;
+ latest) ;;
+ *) die "Invalid WANT_LIBTOOL value '${WANT_LIBTOOL}'" ;;
+ esac
+ export WANT_LIBTOOL
+fi
+
+# Force people (nicely) to upgrade to a newer version of gettext as
+# older ones are known to be crappy. #496454
+AUTOTOOLS_DEPEND="!<sys-devel/gettext-0.18.1.1-r3
+ ${_automake_atom}
+ ${_autoconf_atom}
+ ${_libtool_atom}"
+RDEPEND=""
+
+# @ECLASS-VARIABLE: AUTOTOOLS_AUTO_DEPEND
+# @DESCRIPTION:
+# Set to 'no' to disable automatically adding to DEPEND. This lets
+# ebuilds former conditional depends by using ${AUTOTOOLS_DEPEND} in
+# their own DEPEND string.
+: ${AUTOTOOLS_AUTO_DEPEND:=yes}
+if [[ ${AUTOTOOLS_AUTO_DEPEND} != "no" ]] ; then
+ DEPEND=${AUTOTOOLS_DEPEND}
+fi
+__AUTOTOOLS_AUTO_DEPEND=${AUTOTOOLS_AUTO_DEPEND} # See top of eclass
+
+unset _automake_atom _autoconf_atom
+
+# @ECLASS-VARIABLE: AM_OPTS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Additional options to pass to automake during
+# eautoreconf call.
+
+# @ECLASS-VARIABLE: AT_NOEAUTOMAKE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Don't run eautomake command if set to 'yes'; only used to workaround
+# broken packages. Generally you should, instead, fix the package to
+# not call AM_INIT_AUTOMAKE if it doesn't actually use automake.
+
+# @ECLASS-VARIABLE: AT_NOELIBTOOLIZE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Don't run elibtoolize command if set to 'yes',
+# useful when elibtoolize needs to be ran with
+# particular options
+
+# @ECLASS-VARIABLE: AT_M4DIR
+# @DESCRIPTION:
+# Additional director(y|ies) aclocal should search
+: ${AT_M4DIR:=}
+
+# @ECLASS-VARIABLE: AT_SYS_M4DIR
+# @INTERNAL
+# @DESCRIPTION:
+# For system integrators, a list of additional aclocal search paths.
+# This variable gets eval-ed, so you can use variables in the definition
+# that may not be valid until eautoreconf & friends are run.
+: ${AT_SYS_M4DIR:=}
+
+# @FUNCTION: eautoreconf
+# @DESCRIPTION:
+# This function mimes the behavior of autoreconf, but uses the different
+# eauto* functions to run the tools. It doesn't accept parameters, but
+# the directory with include files can be specified with AT_M4DIR variable.
+#
+# Should do a full autoreconf - normally what most people will be interested in.
+# Also should handle additional directories specified by AC_CONFIG_SUBDIRS.
+eautoreconf() {
+ local x g
+
+ # Subdirs often share a common build dir #529404. If so, we can't safely
+ # run in parallel because many tools clobber the content in there. Libtool
+ # and automake both `rm && cp` while aclocal reads the output. We might be
+ # able to handle this if we split the steps and grab locks on the dirs the
+ # tools actually write to. Then we'd run all the common tools that use
+ # those inputs. Doing this in bash does not scale easily.
+ # If we do re-enable parallel support, make sure #426512 is handled.
+ if [[ -z ${AT_NO_RECURSIVE} ]] ; then
+ # Take care of subdirs
+ for x in $(autotools_check_macro_val AC_CONFIG_SUBDIRS) ; do
+ if [[ -d ${x} ]] ; then
+ pushd "${x}" >/dev/null
+ # Avoid unsafe nested multijob_finish_one for bug #426512.
+ AT_NOELIBTOOLIZE="yes" eautoreconf || die
+ popd >/dev/null
+ fi
+ done
+ fi
+
+ einfo "Running eautoreconf in '${PWD}' ..."
+
+ local m4dirs=$(autotools_check_macro_val AC_CONFIG_{AUX,MACRO}_DIR)
+ [[ -n ${m4dirs} ]] && mkdir -p ${m4dirs}
+
+ # Run all the tools before aclocal so we can gather the .m4 files.
+ local i tools=(
+ # <tool> <was run> <command>
+ glibgettext false "autotools_run_tool glib-gettextize --copy --force"
+ gettext false "autotools_run_tool --at-missing autopoint --force"
+ # intltool must come after autopoint.
+ intltool false "autotools_run_tool intltoolize --automake --copy --force"
+ gtkdoc false "autotools_run_tool --at-missing gtkdocize --copy"
+ gnomedoc false "autotools_run_tool --at-missing gnome-doc-prepare --copy --force"
+ libtool false "_elibtoolize --install --copy --force"
+ )
+ for (( i = 0; i < ${#tools[@]}; i += 3 )) ; do
+ if _at_uses_${tools[i]} ; then
+ tools[i+1]=true
+ ${tools[i+2]}
+ fi
+ done
+
+ # Generate aclocal.m4 with our up-to-date m4 files.
+ local rerun_aclocal=false
+ eaclocal
+
+ # Check to see if we had macros expanded by other macros or in other
+ # m4 files that we couldn't detect early. This is uncommon, but some
+ # packages do this, so we have to handle it correctly.
+ for (( i = 0; i < ${#tools[@]}; i += 3 )) ; do
+ if ! ${tools[i+1]} && _at_uses_${tools[i]} ; then
+ ${tools[i+2]}
+ rerun_aclocal=true
+ fi
+ done
+ ${rerun_aclocal} && eaclocal
+
+ if [[ ${WANT_AUTOCONF} = 2.1 ]] ; then
+ eautoconf
+ else
+ eautoconf --force
+ fi
+ eautoheader
+ [[ ${AT_NOEAUTOMAKE} != "yes" ]] && FROM_EAUTORECONF="yes" eautomake ${AM_OPTS}
+
+ if [[ ${AT_NOELIBTOOLIZE} != "yes" ]] ; then
+ # Call it here to prevent failures due to elibtoolize called _before_
+ # eautoreconf.
+ elibtoolize --force "${PWD}"
+ fi
+
+ return 0
+}
+
+# @FUNCTION: _at_uses_pkg
+# @USAGE: <macros>
+# @INTERNAL
+# See if the specified macros are enabled.
+_at_uses_pkg() {
+ if [[ -n $(autotools_check_macro "$@") ]] ; then
+ return 0
+ else
+ # If the trace didn't find it (perhaps because aclocal.m4 hasn't
+ # been generated yet), cheat, but be conservative.
+ local macro args=()
+ for macro ; do
+ args+=( -e "^[[:space:]]*${macro}\>" )
+ done
+ egrep -q "${args[@]}" configure.??
+ fi
+}
+_at_uses_autoheader() { _at_uses_pkg A{C,M}_CONFIG_HEADER{S,}; }
+_at_uses_automake() { _at_uses_pkg AM_INIT_AUTOMAKE; }
+_at_uses_gettext() { _at_uses_pkg AM_GNU_GETTEXT_VERSION; }
+_at_uses_glibgettext() { _at_uses_pkg AM_GLIB_GNU_GETTEXT; }
+_at_uses_intltool() { _at_uses_pkg {AC,IT}_PROG_INTLTOOL; }
+_at_uses_gtkdoc() { _at_uses_pkg GTK_DOC_CHECK; }
+_at_uses_gnomedoc() { _at_uses_pkg GNOME_DOC_INIT; }
+_at_uses_libtool() { _at_uses_pkg A{C,M}_PROG_LIBTOOL LT_INIT; }
+
+# @FUNCTION: eaclocal_amflags
+# @DESCRIPTION:
+# Extract the ACLOCAL_AMFLAGS value from the Makefile.am and try to handle
+# (most) of the crazy crap that people throw at us.
+eaclocal_amflags() {
+ local aclocal_opts amflags_file
+
+ for amflags_file in GNUmakefile.am Makefile.am GNUmakefile.in Makefile.in ; do
+ [[ -e ${amflags_file} ]] || continue
+ # setup the env in case the pkg does something crazy
+ # in their ACLOCAL_AMFLAGS. like run a shell script
+ # which turns around and runs autotools. #365401
+ # or split across multiple lines. #383525
+ autotools_env_setup
+ aclocal_opts=$(sed -n \
+ "/^ACLOCAL_AMFLAGS[[:space:]]*=/{ \
+ # match the first line
+ s:[^=]*=::p; \
+ # then gobble up all escaped lines
+ : nextline /\\\\$/{ n; p; b nextline; } \
+ }" ${amflags_file})
+ eval aclocal_opts=\""${aclocal_opts}"\"
+ break
+ done
+
+ echo ${aclocal_opts}
+}
+
+# @FUNCTION: eaclocal
+# @DESCRIPTION:
+# These functions runs the autotools using autotools_run_tool with the
+# specified parametes. The name of the tool run is the same of the function
+# without e prefix.
+# They also force installing the support files for safety.
+# Respects AT_M4DIR for additional directories to search for macro's.
+eaclocal() {
+ [[ ! -f aclocal.m4 || -n $(grep -e 'generated.*by aclocal' aclocal.m4) ]] && \
+ autotools_run_tool --at-m4flags aclocal "$@" $(eaclocal_amflags)
+}
+
+# @FUNCTION: _elibtoolize
+# @DESCRIPTION:
+# Runs libtoolize.
+#
+# Note the '_' prefix: avoid collision with elibtoolize() from libtool.eclass.
+_elibtoolize() {
+ local LIBTOOLIZE=${LIBTOOLIZE:-$(type -P glibtoolize > /dev/null && echo glibtoolize || echo libtoolize)}
+
+ [[ -f GNUmakefile.am || -f Makefile.am ]] && set -- "$@" --automake
+
+ autotools_run_tool ${LIBTOOLIZE} "$@"
+}
+
+# @FUNCTION: eautoheader
+# @DESCRIPTION:
+# Runs autoheader.
+eautoheader() {
+ _at_uses_autoheader || return 0
+ autotools_run_tool --at-no-fail --at-m4flags autoheader "$@"
+}
+
+# @FUNCTION: eautoconf
+# @DESCRIPTION:
+# Runs autoconf.
+eautoconf() {
+ if [[ ! -f configure.ac && ! -f configure.in ]] ; then
+ echo
+ eerror "No configure.{ac,in} present in '${PWD}'!"
+ echo
+ die "No configure.{ac,in} present!"
+ fi
+ if [[ ${WANT_AUTOCONF} != "2.1" && -e configure.in ]] ; then
+ eqawarn "This package has a configure.in file which has long been deprecated. Please"
+ eqawarn "update it to use configure.ac instead as newer versions of autotools will die"
+ eqawarn "when it finds this file. See https://bugs.gentoo.org/426262 for details."
+ fi
+
+ autotools_run_tool --at-m4flags autoconf "$@"
+}
+
+# @FUNCTION: eautomake
+# @DESCRIPTION:
+# Runs automake.
+eautomake() {
+ local extra_opts=()
+ local makefile_name
+
+ # Run automake if:
+ # - a Makefile.am type file exists
+ # - the configure script is using the AM_INIT_AUTOMAKE directive
+ for makefile_name in {GNUmakefile,{M,m}akefile}.am "" ; do
+ [[ -f ${makefile_name} ]] && break
+ done
+
+ _automake_version() {
+ autotools_run_tool --at-output automake --version 2>/dev/null |
+ sed -n -e '1{s:.*(GNU automake) ::p;q}'
+ }
+
+ if [[ -z ${makefile_name} ]] ; then
+ _at_uses_automake || return 0
+
+ elif [[ -z ${FROM_EAUTORECONF} && -f ${makefile_name%.am}.in ]]; then
+ local used_automake
+ local installed_automake
+
+ installed_automake=$(WANT_AUTOMAKE= _automake_version)
+ used_automake=$(head -n 1 < ${makefile_name%.am}.in | \
+ sed -e 's:.*by automake \(.*\) from .*:\1:')
+
+ if [[ ${installed_automake} != ${used_automake} ]]; then
+ ewarn "Automake used for the package (${used_automake}) differs from" \
+ "the installed version (${installed_automake})."
+ ewarn "Forcing a full rebuild of the autotools to workaround."
+ eautoreconf
+ return 0
+ fi
+ fi
+
+ [[ -f INSTALL && -f AUTHORS && -f ChangeLog && -f NEWS && -f README ]] \
+ || extra_opts+=( --foreign )
+
+ # Older versions of automake do not support --force-missing. But we want
+ # to use this whenever possible to update random bundled files #133489.
+ case $(_automake_version) in
+ 1.4|1.4[.-]*) ;;
+ *) extra_opts+=( --force-missing ) ;;
+ esac
+
+ autotools_run_tool automake --add-missing --copy "${extra_opts[@]}" "$@"
+}
+
+# @FUNCTION: eautopoint
+# @DESCRIPTION:
+# Runs autopoint (from the gettext package).
+eautopoint() {
+ autotools_run_tool autopoint "$@"
+}
+
+# @FUNCTION: config_rpath_update
+# @USAGE: [destination]
+# @DESCRIPTION:
+# Some packages utilize the config.rpath helper script, but don't
+# use gettext directly. So we have to copy it in manually since
+# we can't let `autopoint` do it for us.
+config_rpath_update() {
+ local dst src=$(type -P gettext | sed 's:bin/gettext:share/gettext/config.rpath:')
+
+ [[ $# -eq 0 ]] && set -- $(find -name config.rpath)
+ [[ $# -eq 0 ]] && return 0
+
+ einfo "Updating all config.rpath files"
+ for dst in "$@" ; do
+ einfo " ${dst}"
+ cp "${src}" "${dst}" || die
+ done
+}
+
+# @FUNCTION: autotools_env_setup
+# @INTERNAL
+# @DESCRIPTION:
+# Process the WANT_AUTO{CONF,MAKE} flags.
+autotools_env_setup() {
+ # We do the "latest" → version switch here because it solves
+ # possible order problems, see bug #270010 as an example.
+ if [[ ${WANT_AUTOMAKE} == "latest" ]]; then
+ local pv
+ for pv in ${_LATEST_AUTOMAKE[@]/#*:} ; do
+ # has_version respects ROOT, but in this case, we don't want it to,
+ # thus "ROOT=/" prefix:
+ ROOT=/ has_version "=sys-devel/automake-${pv}*" && export WANT_AUTOMAKE="${pv}"
+ done
+ [[ ${WANT_AUTOMAKE} == "latest" ]] && \
+ die "Cannot find the latest automake! Tried ${_LATEST_AUTOMAKE[*]}"
+ fi
+ [[ ${WANT_AUTOCONF} == "latest" ]] && export WANT_AUTOCONF=2.5
+}
+
+# @FUNCTION: autotools_run_tool
+# @USAGE: [--at-no-fail] [--at-m4flags] [--at-missing] [--at-output] <autotool> [tool-specific flags]
+# @INTERNAL
+# @DESCRIPTION:
+# Run the specified autotool helper, but do logging and error checking
+# around it in the process.
+autotools_run_tool() {
+ # Process our own internal flags first
+ local autofail=true m4flags=false missing_ok=false return_output=false
+ while [[ -n $1 ]] ; do
+ case $1 in
+ --at-no-fail) autofail=false;;
+ --at-m4flags) m4flags=true;;
+ --at-missing) missing_ok=true;;
+ --at-output) return_output=true;;
+ # whatever is left goes to the actual tool
+ *) break;;
+ esac
+ shift
+ done
+
+ if [[ ${EBUILD_PHASE} != "unpack" && ${EBUILD_PHASE} != "prepare" ]]; then
+ ewarn "QA Warning: running $1 in ${EBUILD_PHASE} phase"
+ fi
+
+ if ${missing_ok} && ! type -P ${1} >/dev/null ; then
+ einfo "Skipping '$*' due $1 not installed"
+ return 0
+ fi
+
+ autotools_env_setup
+
+ local STDERR_TARGET="${T}/$1.out"
+ # most of the time, there will only be one run, but if there are
+ # more, make sure we get unique log filenames
+ if [[ -e ${STDERR_TARGET} ]] ; then
+ local i=1
+ while :; do
+ STDERR_TARGET="${T}/$1-${i}.out"
+ [[ -e ${STDERR_TARGET} ]] || break
+ : $(( i++ ))
+ done
+ fi
+
+ if ${m4flags} ; then
+ set -- "${1}" $(autotools_m4dir_include) "${@:2}" $(autotools_m4sysdir_include)
+ fi
+
+ # If the caller wants to probe something, then let them do it directly.
+ if ${return_output} ; then
+ "$@"
+ return
+ fi
+
+ printf "***** $1 *****\n***** PWD: ${PWD}\n***** $*\n\n" > "${STDERR_TARGET}"
+
+ ebegin "Running $@"
+ "$@" >> "${STDERR_TARGET}" 2>&1
+ if ! eend $? && ${autofail} ; then
+ echo
+ eerror "Failed Running $1 !"
+ eerror
+ eerror "Include in your bugreport the contents of:"
+ eerror
+ eerror " ${STDERR_TARGET}"
+ echo
+ die "Failed Running $1 !"
+ fi
+}
+
+# Internal function to check for support
+
+# Keep a list of all the macros we might use so that we only
+# have to run the trace code once. Order doesn't matter.
+ALL_AUTOTOOLS_MACROS=(
+ A{C,M}_PROG_LIBTOOL LT_INIT
+ A{C,M}_CONFIG_HEADER{S,}
+ AC_CONFIG_SUBDIRS
+ AC_CONFIG_AUX_DIR AC_CONFIG_MACRO_DIR
+ AM_INIT_AUTOMAKE
+ AM_GLIB_GNU_GETTEXT
+ AM_GNU_GETTEXT_VERSION
+ {AC,IT}_PROG_INTLTOOL
+ GTK_DOC_CHECK
+ GNOME_DOC_INIT
+)
+autotools_check_macro() {
+ [[ -f configure.ac || -f configure.in ]] || return 0
+
+ # We can run in multiple dirs, so we have to cache the trace
+ # data in $PWD rather than an env var.
+ local trace_file=".__autoconf_trace_data"
+ if [[ ! -e ${trace_file} ]] || [[ ! aclocal.m4 -ot ${trace_file} ]] ; then
+ WANT_AUTOCONF="2.5" autoconf \
+ $(autotools_m4dir_include) \
+ ${ALL_AUTOTOOLS_MACROS[@]/#/--trace=} > ${trace_file} 2>/dev/null
+ fi
+
+ local macro args=()
+ for macro ; do
+ has ${macro} ${ALL_AUTOTOOLS_MACROS[@]} || die "internal error: add ${macro} to ALL_AUTOTOOLS_MACROS"
+ args+=( -e ":${macro}:" )
+ done
+ grep "${args[@]}" ${trace_file}
+}
+
+# @FUNCTION: autotools_check_macro_val
+# @USAGE: <macro> [macros]
+# @INTERNAL
+# @DESCRIPTION:
+# Look for a macro and extract its value.
+autotools_check_macro_val() {
+ local macro scan_out
+
+ for macro ; do
+ autotools_check_macro "${macro}" | \
+ gawk -v macro="${macro}" \
+ '($0 !~ /^[[:space:]]*(#|dnl)/) {
+ if (match($0, macro ":(.*)$", res))
+ print res[1]
+ }' | uniq
+ done
+
+ return 0
+}
+
+_autotools_m4dir_include() {
+ local x include_opts flag
+
+ # Use the right flag to autoconf based on the version #448986
+ [[ ${WANT_AUTOCONF} == "2.1" ]] \
+ && flag="l" \
+ || flag="I"
+
+ for x in "$@" ; do
+ case ${x} in
+ # We handle it below
+ -${flag}) ;;
+ *)
+ [[ ! -d ${x} ]] && ewarn "autotools.eclass: '${x}' does not exist"
+ include_opts+=" -${flag} ${x}"
+ ;;
+ esac
+ done
+
+ echo ${include_opts}
+}
+autotools_m4dir_include() { _autotools_m4dir_include ${AT_M4DIR} ; }
+autotools_m4sysdir_include() {
+ # First try to use the paths the system integrator has set up.
+ local paths=( $(eval echo ${AT_SYS_M4DIR}) )
+
+ if [[ ${#paths[@]} -eq 0 && -n ${SYSROOT} ]] ; then
+ # If they didn't give us anything, then default to the SYSROOT.
+ # This helps when cross-compiling.
+ local path="${SYSROOT}/usr/share/aclocal"
+ [[ -d ${path} ]] && paths+=( "${path}" )
+ fi
+ _autotools_m4dir_include "${paths[@]}"
+}
+
+fi
diff --git a/eclass/base.eclass b/eclass/base.eclass
new file mode 100644
index 000000000000..fffdacb76c6b
--- /dev/null
+++ b/eclass/base.eclass
@@ -0,0 +1,194 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: base.eclass
+# @MAINTAINER:
+# QA Team <qa@gentoo.org>
+# @AUTHOR:
+# Original author: Dan Armak <danarmak@gentoo.org>
+# @BLURB: The base eclass defines some default functions and variables.
+# @DESCRIPTION:
+# The base eclass defines some default functions and variables.
+
+if [[ -z ${_BASE_ECLASS} ]]; then
+_BASE_ECLASS=1
+
+inherit eutils
+
+BASE_EXPF="src_unpack src_compile src_install"
+case "${EAPI:-0}" in
+ 2|3|4|5) BASE_EXPF+=" src_prepare src_configure" ;;
+ *) ;;
+esac
+
+EXPORT_FUNCTIONS ${BASE_EXPF}
+
+# @ECLASS-VARIABLE: DOCS
+# @DESCRIPTION:
+# Array containing documents passed to dodoc command.
+#
+# DOCS=( "${S}/doc/document.txt" "${S}/doc/doc_folder/" )
+
+# @ECLASS-VARIABLE: HTML_DOCS
+# @DESCRIPTION:
+# Array containing documents passed to dohtml command.
+#
+# HTML_DOCS=( "${S}/doc/document.html" "${S}/doc/html_folder/" )
+
+# @ECLASS-VARIABLE: PATCHES
+# @DESCRIPTION:
+# PATCHES array variable containing all various patches to be applied.
+# This variable is expected to be defined in global scope of ebuild.
+# Make sure to specify the full path. This variable is utilised in
+# src_unpack/src_prepare phase based on EAPI.
+#
+# NOTE: if using patches folders with special file suffixes you have to
+# define one additional variable EPATCH_SUFFIX="something"
+#
+# PATCHES=( "${FILESDIR}/mypatch.patch" "${FILESDIR}/patches_folder/" )
+
+
+# @FUNCTION: base_src_unpack
+# @DESCRIPTION:
+# The base src_unpack function, which is exported.
+# Calls also src_prepare with eapi older than 2.
+base_src_unpack() {
+ debug-print-function $FUNCNAME "$@"
+
+ pushd "${WORKDIR}" > /dev/null
+
+ if [[ $(type -t unpacker_src_unpack) == "function" ]] ; then
+ unpacker_src_unpack
+ elif [[ -n ${A} ]] ; then
+ unpack ${A}
+ fi
+ has src_prepare ${BASE_EXPF} || base_src_prepare
+
+ popd > /dev/null
+}
+
+# @FUNCTION: base_src_prepare
+# @DESCRIPTION:
+# The base src_prepare function, which is exported
+# EAPI is greater or equal to 2. Here the PATCHES array is evaluated.
+base_src_prepare() {
+ debug-print-function $FUNCNAME "$@"
+ debug-print "$FUNCNAME: PATCHES=$PATCHES"
+
+ local patches_failed=0
+
+ pushd "${S}" > /dev/null
+ if [[ "$(declare -p PATCHES 2>/dev/null 2>&1)" == "declare -a"* ]]; then
+ for x in "${PATCHES[@]}"; do
+ debug-print "$FUNCNAME: applying patch from ${x}"
+ if [[ -d "${x}" ]]; then
+ # Use standardized names and locations with bulk patching
+ # Patch directory is ${WORKDIR}/patch
+ # See epatch() in eutils.eclass for more documentation
+ EPATCH_SUFFIX=${EPATCH_SUFFIX:=patch}
+
+ # in order to preserve normal EPATCH_SOURCE value that can
+ # be used other way than with base eclass store in local
+ # variable and restore later
+ oldval=${EPATCH_SOURCE}
+ EPATCH_SOURCE=${x}
+ EPATCH_FORCE=yes
+ epatch
+ EPATCH_SOURCE=${oldval}
+ elif [[ -f "${x}" ]]; then
+ epatch "${x}"
+ else
+ ewarn "QA: File or directory \"${x}\" does not exist."
+ ewarn "QA: Check your PATCHES array or add missing file/directory."
+ patches_failed=1
+ fi
+ done
+ [[ ${patches_failed} -eq 1 ]] && die "Some patches failed. See above messages."
+ else
+ for x in ${PATCHES}; do
+ debug-print "$FUNCNAME: patching from ${x}"
+ epatch "${x}"
+ done
+ fi
+
+ # Apply user patches
+ debug-print "$FUNCNAME: applying user patches"
+ epatch_user
+
+ popd > /dev/null
+}
+
+# @FUNCTION: base_src_configure
+# @DESCRIPTION:
+# The base src_configure function, which is exported when
+# EAPI is greater or equal to 2. Runs basic econf.
+base_src_configure() {
+ debug-print-function $FUNCNAME "$@"
+
+ # there is no pushd ${S} so we can override its place where to run
+ [[ -x ${ECONF_SOURCE:-.}/configure ]] && econf "$@"
+}
+
+# @FUNCTION: base_src_compile
+# @DESCRIPTION:
+# The base src_compile function, calls src_configure with
+# EAPI older than 2.
+base_src_compile() {
+ debug-print-function $FUNCNAME "$@"
+
+ has src_configure ${BASE_EXPF} || base_src_configure
+ base_src_make "$@"
+}
+
+# @FUNCTION: base_src_make
+# @DESCRIPTION:
+# Actual function that runs emake command.
+base_src_make() {
+ debug-print-function $FUNCNAME "$@"
+
+ if [[ -f Makefile || -f GNUmakefile || -f makefile ]]; then
+ emake "$@" || die "died running emake, $FUNCNAME"
+ fi
+}
+
+# @FUNCTION: base_src_install
+# @DESCRIPTION:
+# The base src_install function. Runs make install and
+# installs documents and html documents from DOCS and HTML_DOCS
+# arrays.
+base_src_install() {
+ debug-print-function $FUNCNAME "$@"
+
+ emake DESTDIR="${D}" "$@" install || die "died running make install, $FUNCNAME"
+ base_src_install_docs
+}
+
+# @FUNCTION: base_src_install_docs
+# @DESCRIPTION:
+# Actual function that install documentation from
+# DOCS and HTML_DOCS arrays.
+base_src_install_docs() {
+ debug-print-function $FUNCNAME "$@"
+
+ local x
+
+ pushd "${S}" > /dev/null
+
+ if [[ "$(declare -p DOCS 2>/dev/null 2>&1)" == "declare -a"* ]]; then
+ for x in "${DOCS[@]}"; do
+ debug-print "$FUNCNAME: docs: creating document from ${x}"
+ dodoc "${x}" || die "dodoc failed"
+ done
+ fi
+ if [[ "$(declare -p HTML_DOCS 2>/dev/null 2>&1)" == "declare -a"* ]]; then
+ for x in "${HTML_DOCS[@]}"; do
+ debug-print "$FUNCNAME: docs: creating html document from ${x}"
+ dohtml -r "${x}" || die "dohtml failed"
+ done
+ fi
+
+ popd > /dev/null
+}
+
+fi
diff --git a/eclass/bash-completion-r1.eclass b/eclass/bash-completion-r1.eclass
new file mode 100644
index 000000000000..255197361229
--- /dev/null
+++ b/eclass/bash-completion-r1.eclass
@@ -0,0 +1,134 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: bash-completion-r1.eclass
+# @MAINTAINER:
+# mgorny@gentoo.org
+# @BLURB: A few quick functions to install bash-completion files
+# @EXAMPLE:
+#
+# @CODE
+# EAPI=5
+#
+# src_configure() {
+# econf \
+# --with-bash-completion-dir="$(get_bashcompdir)"
+# }
+#
+# src_install() {
+# default
+#
+# newbashcomp contrib/${PN}.bash-completion ${PN}
+# }
+# @CODE
+
+inherit toolchain-funcs
+
+case ${EAPI:-0} in
+ 0|1|2|3|4|5) ;;
+ *) die "EAPI ${EAPI} unsupported (yet)."
+esac
+
+# @FUNCTION: _bash-completion-r1_get_bashdir
+# @INTERNAL
+# @DESCRIPTION:
+# First argument is name of the string in bash-completion.pc
+# Second argument is the fallback directory if the string is not found
+# @EXAMPLE:
+# _bash-completion-r1_get_bashdir completionsdir /usr/share/bash-completion
+_bash-completion-r1_get_bashdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if $(tc-getPKG_CONFIG) --exists bash-completion &>/dev/null; then
+ local path="$($(tc-getPKG_CONFIG) --variable=$1 bash-completion)"
+ # we need to return unprefixed, so strip from what pkg-config returns
+ # to us, bug #477692
+ echo "${path#${EPREFIX}}"
+ else
+ echo $2
+ fi
+}
+
+# @FUNCTION: _bash-completion-r1_get_bashcompdir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed bash-completion completions directory.
+_bash-completion-r1_get_bashcompdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ _bash-completion-r1_get_bashdir completionsdir /usr/share/bash-completion/completions
+}
+
+# @FUNCTION: _bash-completion-r1_get_helpersdir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed bash-completion helpers directory.
+_bash-completion-r1_get_bashhelpersdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ _bash-completion-r1_get_bashdir helpersdir /usr/share/bash-completion/helpers
+}
+
+# @FUNCTION: get_bashcompdir
+# @DESCRIPTION:
+# Get the bash-completion completions directory.
+get_bashcompdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_bash-completion-r1_get_bashcompdir)"
+}
+
+# @FUNCTION: get_bashhelpersdir
+# @INTERNAL
+# @DESCRIPTION:
+# Get the bash-completion helpers directory.
+get_bashhelpersdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_bash-completion-r1_get_bashhelpersdir)"
+}
+
+# @FUNCTION: dobashcomp
+# @USAGE: file [...]
+# @DESCRIPTION:
+# Install bash-completion files passed as args. Has EAPI-dependant failure
+# behavior (like doins).
+dobashcomp() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_bash-completion-r1_get_bashcompdir)"
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: newbashcomp
+# @USAGE: file newname
+# @DESCRIPTION:
+# Install bash-completion file under a new name. Has EAPI-dependant failure
+# behavior (like newins).
+newbashcomp() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_bash-completion-r1_get_bashcompdir)"
+ newins "${@}"
+ )
+}
+
+# @FUNCTION: bashcomp_alias
+# @USAGE: <basename> <alias>...
+# @DESCRIPTION:
+# Alias <basename> completion to one or more commands (<alias>es).
+bashcomp_alias() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -lt 2 ]] && die "Usage: ${FUNCNAME} <basename> <alias>..."
+ local base=${1} f
+ shift
+
+ for f; do
+ dosym "${base}" "$(_bash-completion-r1_get_bashcompdir)/${f}"
+ done
+}
diff --git a/eclass/bash-completion.eclass b/eclass/bash-completion.eclass
new file mode 100644
index 000000000000..846e8c8e6345
--- /dev/null
+++ b/eclass/bash-completion.eclass
@@ -0,0 +1,101 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# DEPRECATED
+# This eclass has been superseded by bash-completion-r1 eclass.
+# Please modify your ebuilds to use that one instead.
+
+# @ECLASS: bash-completion.eclass
+# @MAINTAINER:
+# shell-tools@gentoo.org.
+# @AUTHOR:
+# Original author: Aaron Walker <ka0ttic@gentoo.org>
+# @BLURB: An Interface for installing contributed bash-completion scripts
+# @DESCRIPTION:
+# Simple eclass that provides an interface for installing
+# contributed (ie not included in bash-completion proper)
+# bash-completion scripts.
+#
+# Note: this eclass has been deprecated in favor of bash-completion-r1. Please
+# use that one instead.
+
+# @ECLASS-VARIABLE: BASHCOMPLETION_NAME
+# @DESCRIPTION:
+# Install the completion script with this name (see also dobashcompletion)
+
+# @ECLASS-VARIABLE: BASHCOMPFILES
+# @DESCRIPTION:
+# Space delimited list of files to install if dobashcompletion is called without
+# arguments.
+
+inherit eutils
+
+EXPORT_FUNCTIONS pkg_postinst
+
+IUSE="bash-completion"
+
+# Allow eclass to be inherited by eselect without a circular dependency
+if [[ ${CATEGORY}/${PN} != app-admin/eselect ]]; then
+ RDEPEND="bash-completion? ( app-admin/eselect )"
+fi
+PDEPEND="bash-completion? ( app-shells/bash-completion )"
+
+# @FUNCTION: dobashcompletion
+# @USAGE: [file] [new_file]
+# @DESCRIPTION:
+# The first argument is the location of the bash-completion script to install,
+# and is required if BASHCOMPFILES is not set. The second argument is the name
+# the script will be installed as. If BASHCOMPLETION_NAME is set, it overrides
+# the second argument. If no second argument is given and BASHCOMPLETION_NAME
+# is not set, it will default to ${PN}.
+dobashcompletion() {
+ local f
+
+ eqawarn "bash-completion.eclass has been deprecated."
+ eqawarn "Please update your ebuilds to use bash-completion-r1 instead."
+
+ if [[ -z ${1} && -z ${BASHCOMPFILES} ]]; then
+ die "Usage: dobashcompletion [file] [new file]"
+ fi
+
+ if use bash-completion; then
+ insinto /usr/share/bash-completion
+ if [[ -n ${1} ]]; then
+ [[ -z ${BASHCOMPLETION_NAME} ]] && BASHCOMPLETION_NAME="${2:-${PN}}"
+ newins "${1}" "${BASHCOMPLETION_NAME}" || die "Failed to install ${1}"
+ else
+ set -- ${BASHCOMPFILES}
+ for f in "$@"; do
+ if [[ -e ${f} ]]; then
+ doins "${f}" || die "Failed to install ${f}"
+ fi
+ done
+ fi
+ fi
+}
+
+# @FUNCTION: bash-completion_pkg_postinst
+# @DESCRIPTION:
+# The bash-completion pkg_postinst function, which is exported
+bash-completion_pkg_postinst() {
+ local f
+
+ if use bash-completion ; then
+ elog "The following bash-completion scripts have been installed:"
+ if [[ -n ${BASHCOMPLETION_NAME} ]]; then
+ elog " ${BASHCOMPLETION_NAME}"
+ else
+ set -- ${BASHCOMPFILES}
+ for f in "$@"; do
+ elog " $(basename ${f})"
+ done
+ fi
+ elog
+ elog "To enable command-line completion on a per-user basis run:"
+ elog " eselect bashcomp enable <script>"
+ elog
+ elog "To enable command-line completion system-wide run:"
+ elog " eselect bashcomp enable --global <script>"
+ fi
+}
diff --git a/eclass/bitcoincore.eclass b/eclass/bitcoincore.eclass
new file mode 100644
index 000000000000..69ed9d69d6c1
--- /dev/null
+++ b/eclass/bitcoincore.eclass
@@ -0,0 +1,309 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: bitcoincore.eclass
+# @MAINTAINER:
+# Luke Dashjr <luke_gentoo_bitcoin@dashjr.org>
+# @BLURB: common code for Bitcoin Core ebuilds
+# @DESCRIPTION:
+# This eclass is used in Bitcoin Core ebuilds (bitcoin-qt, bitcoind,
+# libbitcoinconsensus) to provide a single common place for the common ebuild
+# stuff.
+#
+# The eclass provides all common dependencies as well as common use flags.
+
+has "${EAPI:-0}" 5 || die "EAPI=${EAPI} not supported"
+
+if [[ ! ${_BITCOINCORE_ECLASS} ]]; then
+
+in_bcc_iuse() {
+ local liuse=( ${BITCOINCORE_IUSE} )
+ has "${1}" "${liuse[@]#[+-]}"
+}
+
+in_bcc_policy() {
+ local liuse=( ${BITCOINCORE_POLICY_PATCHES} )
+ has "${1}" "${liuse[@]#[+-]}"
+}
+
+DB_VER="4.8"
+inherit autotools db-use eutils
+
+if [ -z "$BITCOINCORE_COMMITHASH" ]; then
+ inherit git-2
+fi
+
+fi
+
+EXPORT_FUNCTIONS src_prepare src_test src_install
+
+if in_bcc_iuse ljr || in_bcc_iuse 1stclassmsg || in_bcc_iuse zeromq || [ -n "$BITCOINCORE_POLICY_PATCHES" ]; then
+ EXPORT_FUNCTIONS pkg_pretend
+fi
+
+if [[ ! ${_BITCOINCORE_ECLASS} ]]; then
+
+# @ECLASS-VARIABLE: BITCOINCORE_COMMITHASH
+# @DESCRIPTION:
+# Set this variable before the inherit line, to the upstream commit hash.
+
+# @ECLASS-VARIABLE: BITCOINCORE_IUSE
+# @DESCRIPTION:
+# Set this variable before the inherit line, to the USE flags supported.
+
+# @ECLASS-VARIABLE: BITCOINCORE_LJR_DATE
+# @DESCRIPTION:
+# Set this variable before the inherit line, to the datestamp of the ljr
+# patchset.
+
+# @ECLASS-VARIABLE: BITCOINCORE_POLICY_PATCHES
+# @DESCRIPTION:
+# Set this variable before the inherit line, to a space-delimited list of
+# supported policies.
+
+MyPV="${PV/_/}"
+MyPN="bitcoin"
+MyP="${MyPN}-${MyPV}"
+
+# These are expected to change in future versions
+DOCS="${DOCS} doc/README.md doc/release-notes.md"
+OPENSSL_DEPEND="dev-libs/openssl:0[-bindist]"
+WALLET_DEPEND="sys-libs/db:$(db_ver_to_slot "${DB_VER}")[cxx]"
+[ -n "${BITCOINCORE_LJR_PV}" ] || BITCOINCORE_LJR_PV="${PV}"
+
+case "${PV}" in
+0.10*)
+ BITCOINCORE_SERIES="0.10.x"
+ LIBSECP256K1_DEPEND="=dev-libs/libsecp256k1-0.0.0_pre20141212"
+ case "${PVR}" in
+ 0.10.2)
+ BITCOINCORE_RBF_DIFF="16f45600c8c372a738ffef544292864256382601...a23678edc70204599299459a206709a00e039db7"
+ BITCOINCORE_RBF_PATCHFILE="${MyPN}-rbf-v0.10.2.patch"
+ ;;
+ *)
+ BITCOINCORE_RBF_DIFF="16f45600c8c372a738ffef544292864256382601...4890416cde655559eba09d3fd6f79db7d0d6314a"
+ BITCOINCORE_RBF_PATCHFILE="${MyPN}-rbf-v0.10.2-r1.patch"
+ ;;
+ esac
+ BITCOINCORE_XT_DIFF="047a89831760ff124740fe9f58411d57ee087078...d4084b62c42c38bfe302d712b98909ab26ecce2f"
+ ;;
+0.11*)
+ BITCOINCORE_SERIES="0.11.x"
+ LIBSECP256K1_DEPEND="=dev-libs/libsecp256k1-0.0.0_pre20150423"
+ BITCOINCORE_RBF_DIFF="5f032c75eefb0fe8ff79ed9595da1112c05f5c4a...660b96d24916b8ef4e0677e5d6162e24e2db447e"
+ BITCOINCORE_RBF_PATCHFILE="${MyPN}-rbf-v0.11.0rc3.patch"
+ ;;
+9999*)
+ BITCOINCORE_SERIES="9999"
+ LIBSECP256K1_DEPEND=">dev-libs/libsecp256k1-0.0.0_pre20150422"
+ ;;
+*)
+ die "Unrecognised version"
+ ;;
+esac
+
+LJR_PV() { echo "${BITCOINCORE_LJR_PV}.${1}${BITCOINCORE_LJR_DATE}"; }
+LJR_PATCHDIR="${MyPN}-$(LJR_PV ljr).patches"
+LJR_PATCH() { echo "${WORKDIR}/${LJR_PATCHDIR}/${MyPN}-$(LJR_PV ljr).$@.patch"; }
+LJR_PATCH_DESC="http://luke.dashjr.org/programs/${MyPN}/files/${MyPN}d/luke-jr/${BITCOINCORE_SERIES}/$(LJR_PV ljr)/${MyPN}-$(LJR_PV ljr).desc.txt"
+
+HOMEPAGE="https://github.com/bitcoin/bitcoin"
+
+if [ -z "$BITCOINCORE_COMMITHASH" ]; then
+ EGIT_PROJECT='bitcoin'
+ EGIT_REPO_URI="git://github.com/bitcoin/bitcoin.git https://github.com/bitcoin/bitcoin.git"
+else
+ SRC_URI="https://github.com/${MyPN}/${MyPN}/archive/${BITCOINCORE_COMMITHASH}.tar.gz -> ${MyPN}-v${PV}${BITCOINCORE_SRC_SUFFIX}.tgz"
+ if [ -z "${BITCOINCORE_NO_SYSLIBS}" ]; then
+ SRC_URI="${SRC_URI} http://luke.dashjr.org/programs/${MyPN}/files/${MyPN}d/luke-jr/${BITCOINCORE_SERIES}/$(LJR_PV ljr)/${LJR_PATCHDIR}.txz -> ${LJR_PATCHDIR}.tar.xz"
+ fi
+ if in_bcc_iuse xt; then
+ BITCOINXT_PATCHFILE="${MyPN}xt-v${PV}.patch"
+ SRC_URI="${SRC_URI} xt? ( https://github.com/bitcoinxt/bitcoinxt/compare/${BITCOINCORE_XT_DIFF}.diff -> ${BITCOINXT_PATCHFILE} )"
+ fi
+ if in_bcc_policy rbf; then
+ SRC_URI="${SRC_URI} bitcoin_policy_rbf? ( https://github.com/petertodd/bitcoin/compare/${BITCOINCORE_RBF_DIFF}.diff -> ${BITCOINCORE_RBF_PATCHFILE} )"
+ fi
+ S="${WORKDIR}/${MyPN}-${BITCOINCORE_COMMITHASH}"
+fi
+
+bitcoincore_policy_iuse() {
+ local mypolicy iuse_def new_BITCOINCORE_IUSE=
+ for mypolicy in ${BITCOINCORE_POLICY_PATCHES}; do
+ if [[ "${mypolicy:0:1}" =~ ^[+-] ]]; then
+ iuse_def=${mypolicy:0:1}
+ mypolicy="${mypolicy:1}"
+ else
+ iuse_def=
+ fi
+ new_BITCOINCORE_IUSE="$new_BITCOINCORE_IUSE ${iuse_def}bitcoin_policy_${mypolicy}"
+ done
+ echo $new_BITCOINCORE_IUSE
+}
+IUSE="$IUSE $BITCOINCORE_IUSE $(bitcoincore_policy_iuse)"
+if in_bcc_policy rbf && in_bcc_iuse xt; then
+ REQUIRED_USE="${REQUIRED_USE} bitcoin_policy_rbf? ( !xt )"
+fi
+
+BITCOINCORE_COMMON_DEPEND="
+ ${OPENSSL_DEPEND}
+"
+if [ "${BITCOINCORE_NEED_LIBSECP256K1}" = "1" ]; then
+ BITCOINCORE_COMMON_DEPEND="${BITCOINCORE_COMMON_DEPEND} $LIBSECP256K1_DEPEND"
+fi
+if [ "${PN}" != "libbitcoinconsensus" ]; then
+ BITCOINCORE_COMMON_DEPEND="${BITCOINCORE_COMMON_DEPEND} >=dev-libs/boost-1.52.0[threads(+)]"
+fi
+bitcoincore_common_depend_use() {
+ in_bcc_iuse "$1" || return
+ BITCOINCORE_COMMON_DEPEND="${BITCOINCORE_COMMON_DEPEND} $1? ( $2 )"
+}
+bitcoincore_common_depend_use upnp net-libs/miniupnpc
+bitcoincore_common_depend_use wallet "${WALLET_DEPEND}"
+bitcoincore_common_depend_use zeromq net-libs/zeromq
+RDEPEND="${RDEPEND} ${BITCOINCORE_COMMON_DEPEND}"
+DEPEND="${DEPEND} ${BITCOINCORE_COMMON_DEPEND}
+ >=app-shells/bash-4.1
+ sys-apps/sed
+"
+if [ "${BITCOINCORE_NEED_LEVELDB}" = "1" ]; then
+ RDEPEND="${RDEPEND} virtual/bitcoin-leveldb"
+fi
+if in_bcc_iuse ljr && [ "$BITCOINCORE_SERIES" = "0.10.x" ]; then
+ DEPEND="${DEPEND} ljr? ( dev-vcs/git )"
+fi
+
+bitcoincore_policymsg() {
+ local USEFlag="bitcoin_policy_$1"
+ in_iuse "${USEFlag}" || return
+ if use "${USEFlag}"; then
+ [ -n "$2" ] && einfo "$2"
+ else
+ [ -n "$3" ] && einfo "$3"
+ fi
+ bitcoincore_policymsg_flag=true
+}
+
+bitcoincore_pkg_pretend() {
+ bitcoincore_policymsg_flag=false
+ if use_if_iuse ljr || use_if_iuse 1stclassmsg || use_if_iuse xt || use_if_iuse zeromq; then
+ einfo "Extra functionality improvements to Bitcoin Core are enabled."
+ bitcoincore_policymsg_flag=true
+ fi
+ bitcoincore_policymsg cltv \
+ "CLTV policy is enabled: Your node will recognise and assist OP_CHECKLOCKTIMEVERIFY (BIP65) transactions." \
+ "CLTV policy is disabled: Your node will not recognise OP_CHECKLOCKTIMEVERIFY (BIP65) transactions."
+ bitcoincore_policymsg cpfp \
+ "CPFP policy is enabled: If you mine, you will give consideration to child transaction fees to pay for their parents." \
+ "CPFP policy is disabled: If you mine, you will ignore transactions unless they have sufficient fee themselves, even if child transactions offer a fee to cover their cost."
+ bitcoincore_policymsg dcmp \
+ "Data Carrier Multi-Push policy is enabled: Your node will assist transactions with at most a single multiple-'push' data carrier output." \
+ "Data Carrier Multi-Push policy is disabled: Your node will assist transactions with at most a single data carrier output with only a single 'push'."
+ bitcoincore_policymsg rbf \
+ "Replace By Fee policy is enabled: Your node will preferentially mine and relay transactions paying the highest fee, regardless of receive order." \
+ "Replace By Fee policy is disabled: Your node will only accept the first transaction seen consuming a conflicting input, regardless of fee offered by later ones."
+ bitcoincore_policymsg spamfilter \
+ "Enhanced spam filter is enabled: A blacklist (seen as controversial by some) will be used by your node. This may impact your ability to use some services (see link for a list)." \
+ "Enhanced spam filter is disabled: Your node will not be checking for notorious spammers, and may assist them."
+ $bitcoincore_policymsg_flag && einfo "For more information on any of the above, see ${LJR_PATCH_DESC}"
+}
+
+bitcoincore_prepare() {
+ if [ -n "${BITCOINCORE_NO_SYSLIBS}" ]; then
+ true
+ elif [ "${PV}" = "9999" ]; then
+ epatch "${FILESDIR}/${PV}-syslibs.patch"
+ else
+ epatch "$(LJR_PATCH syslibs)"
+ fi
+ if use_if_iuse ljr; then
+ if [ "${BITCOINCORE_SERIES}" = "0.10.x" ]; then
+ # Regular epatch won't work with binary files
+ local patchfile="$(LJR_PATCH ljrF)"
+ einfo "Applying ${patchfile##*/} ..."
+ git apply --whitespace=nowarn "${patchfile}" || die
+ else
+ epatch "$(LJR_PATCH ljrF)"
+ fi
+ fi
+ if use_if_iuse 1stclassmsg; then
+ epatch "$(LJR_PATCH 1stclassmsg)"
+ fi
+ if use_if_iuse xt; then
+ epatch "${DISTDIR}/${BITCOINXT_PATCHFILE}"
+ fi
+ use_if_iuse zeromq && epatch "$(LJR_PATCH zeromq)"
+ for mypolicy in ${BITCOINCORE_POLICY_PATCHES}; do
+ mypolicy="${mypolicy#[-+]}"
+ use bitcoin_policy_${mypolicy} || continue
+ case "${mypolicy}" in
+ rbf)
+ epatch "${DISTDIR}/${BITCOINCORE_RBF_PATCHFILE}"
+ ;;
+ *)
+ epatch "$(LJR_PATCH ${mypolicy})"
+ ;;
+ esac
+ done
+}
+
+bitcoincore_autoreconf() {
+ eautoreconf
+ rm -r src/leveldb || die
+ rm -r src/secp256k1 || die
+}
+
+bitcoincore_src_prepare() {
+ bitcoincore_prepare
+ bitcoincore_autoreconf
+}
+
+bitcoincore_conf() {
+ local my_econf=
+ if use_if_iuse upnp; then
+ my_econf="${my_econf} --with-miniupnpc --enable-upnp-default"
+ else
+ my_econf="${my_econf} --without-miniupnpc --disable-upnp-default"
+ fi
+ if use_if_iuse test; then
+ my_econf="${my_econf} --enable-tests"
+ else
+ my_econf="${my_econf} --disable-tests"
+ fi
+ if use_if_iuse wallet; then
+ my_econf="${my_econf} --enable-wallet"
+ else
+ my_econf="${my_econf} --disable-wallet"
+ fi
+ if [ -z "${BITCOINCORE_NO_SYSLIBS}" ]; then
+ my_econf="${my_econf} --disable-util-cli --disable-util-tx"
+ else
+ my_econf="${my_econf} --without-utils"
+ fi
+ if [ "${BITCOINCORE_NEED_LEVELDB}" = "1" ]; then
+ # Passing --with-system-leveldb fails if leveldb is not installed, so only use it for targets that use LevelDB
+ my_econf="${my_econf} --with-system-leveldb"
+ fi
+ econf \
+ --disable-ccache \
+ --disable-static \
+ --with-system-libsecp256k1 \
+ --without-libs \
+ --without-daemon \
+ --without-gui \
+ ${my_econf} \
+ "$@"
+}
+
+bitcoincore_src_test() {
+ emake check
+}
+
+bitcoincore_src_install() {
+ default
+ [ "${PN}" = "libbitcoinconsensus" ] || rm "${D}/usr/bin/test_bitcoin"
+}
+
+_BITCOINCORE_ECLASS=1
+fi
diff --git a/eclass/bsdmk.eclass b/eclass/bsdmk.eclass
new file mode 100644
index 000000000000..5a02d052341f
--- /dev/null
+++ b/eclass/bsdmk.eclass
@@ -0,0 +1,85 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: bsdmk.eclass
+# @MAINTAINER:
+# Otavio R. Piske "AngusYoung" <angusyoung@gentoo.org>
+# Diego Pettenò <flameeyes@gentoo.org>
+# Benigno B. Junior <bbj@gentoo.org>
+# @BLURB: Some functions for BSDmake
+
+inherit toolchain-funcs portability flag-o-matic
+
+EXPORT_FUNCTIONS src_compile src_install
+
+RDEPEND=""
+# this should actually be BDEPEND, but this works.
+DEPEND="virtual/pmake"
+
+ESED="/usr/bin/sed"
+
+# @ECLASS-VARIABLE: mymakeopts
+# @DESCRIPTION:
+# Options for bsd-make
+
+# @FUNCTION: append-opt
+# @USAGE: < options >
+# @DESCRIPTION:
+# append options to enable or disable features
+append-opt() {
+ mymakeopts="${mymakeopts} $@"
+}
+
+# @FUNCTION: mkmake
+# @USAGE: [ options ]
+# @DESCRIPTION:
+# calls bsd-make command with the given options, passing ${mymakeopts} to
+# enable ports to useflags bridge.
+mkmake() {
+ [[ -z ${BMAKE} ]] && BMAKE="$(get_bmake)"
+
+ tc-export CC CXX LD RANLIB
+
+ ${BMAKE} ${MAKEOPTS} ${EXTRA_EMAKE} ${mymakeopts} NO_WERROR= STRIP= "$@"
+}
+
+# @FUNCTION: mkinstall
+# @USAGE: [ options ]
+# @DESCRIPTION:
+# Calls "bsd-make install" with the given options, passing ${mamakeopts} to
+# enable ports to useflags bridge
+mkinstall() {
+ [[ -z ${BMAKE} ]] && BMAKE="$(get_bmake)"
+
+ # STRIP= will replace the default value of -s, leaving to portage the
+ # task of stripping executables.
+ ${BMAKE} ${mymakeopts} NO_WERROR= STRIP= MANSUBDIR= DESTDIR="${D}" "$@" install
+}
+
+# @FUNCTION: dummy_mk
+# @USAGE: < dirnames >
+# @DESCRIPTION:
+# removes the specified subdirectories and creates a dummy makefile in them
+# useful to remove the need for "minimal" patches
+dummy_mk() {
+ for dir in $@; do
+ [ -d ${dir} ] || ewarn "dummy_mk called on a non-existing directory: $dir"
+ [ -f ${dir}/Makefile ] || ewarn "dummy_mk called on a directory without Makefile: $dir"
+ echo ".include <bsd.lib.mk>" > ${dir}/Makefile
+ done
+}
+
+# @FUNCTION: bsdmk_src_compile
+# @DESCRIPTION:
+# The bsdmk src_compile function, which is exported
+bsdmk_src_compile() {
+ mkmake "$@" || die "make failed"
+}
+
+# @FUNCTION: bsdmk_src_install
+# @DESCRIPTION:
+# The bsdmk src_install function, which is exported
+bsdmk_src_install() {
+ mkinstall || die "install failed"
+}
diff --git a/eclass/bzr.eclass b/eclass/bzr.eclass
new file mode 100644
index 000000000000..9b457f2b6d77
--- /dev/null
+++ b/eclass/bzr.eclass
@@ -0,0 +1,341 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: bzr.eclass
+# @MAINTAINER:
+# Bazaar team <bazaar@gentoo.org>
+# @AUTHOR:
+# Jorge Manuel B. S. Vicetto <jmbsvicetto@gentoo.org>
+# Mark Lee <bzr-gentoo-overlay@lazymalevolence.com>
+# Ulrich Müller <ulm@gentoo.org>
+# Christian Faulhammer <fauli@gentoo.org>
+# @BLURB: generic fetching functions for the Bazaar VCS
+# @DESCRIPTION:
+# The bzr.eclass provides functions to fetch, unpack, patch, and
+# bootstrap sources from repositories of the Bazaar distributed version
+# control system. The eclass was originally derived from git.eclass.
+#
+# Note: Just set EBZR_REPO_URI to the URI of the branch and src_unpack()
+# of this eclass will export the branch to ${WORKDIR}/${P}.
+
+inherit eutils
+
+EBZR="bzr.eclass"
+
+case "${EAPI:-0}" in
+ 0|1) EXPORT_FUNCTIONS src_unpack ;;
+ *) EXPORT_FUNCTIONS src_unpack src_prepare ;;
+esac
+
+DEPEND=">=dev-vcs/bzr-2.0.1"
+case "${EAPI:-0}" in
+ 0|1) ;;
+ *) [[ ${EBZR_REPO_URI%%:*} = sftp ]] \
+ && DEPEND=">=dev-vcs/bzr-2.0.1[sftp]" ;;
+esac
+
+# @ECLASS-VARIABLE: EBZR_STORE_DIR
+# @DESCRIPTION:
+# The directory to store all fetched Bazaar live sources.
+: ${EBZR_STORE_DIR:=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/bzr-src}
+
+# @ECLASS-VARIABLE: EBZR_UNPACK_DIR
+# @DESCRIPTION:
+# The working directory where the sources are copied to.
+: ${EBZR_UNPACK_DIR:=${WORKDIR}/${P}}
+
+# @ECLASS-VARIABLE: EBZR_INIT_REPO_CMD
+# @DESCRIPTION:
+# The Bazaar command to initialise a shared repository.
+: ${EBZR_INIT_REPO_CMD:="bzr init-repository --no-trees"}
+
+# @ECLASS-VARIABLE: EBZR_FETCH_CMD
+# @DESCRIPTION:
+# The Bazaar command to fetch the sources.
+: ${EBZR_FETCH_CMD:="bzr branch --no-tree"}
+
+# @ECLASS-VARIABLE: EBZR_UPDATE_CMD
+# @DESCRIPTION:
+# The Bazaar command to update the sources.
+: ${EBZR_UPDATE_CMD:="bzr pull"}
+
+# @ECLASS-VARIABLE: EBZR_EXPORT_CMD
+# @DESCRIPTION:
+# The Bazaar command to export a branch.
+: ${EBZR_EXPORT_CMD:="bzr export"}
+
+# @ECLASS-VARIABLE: EBZR_CHECKOUT_CMD
+# @DESCRIPTION:
+# The Bazaar command to checkout a branch.
+: ${EBZR_CHECKOUT_CMD:="bzr checkout --lightweight -q"}
+
+# @ECLASS-VARIABLE: EBZR_REVNO_CMD
+# @DESCRIPTION:
+# The Bazaar command to list a revision number of the branch.
+: ${EBZR_REVNO_CMD:="bzr revno"}
+
+# @ECLASS-VARIABLE: EBZR_OPTIONS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The options passed to the fetch and update commands.
+
+# @ECLASS-VARIABLE: EBZR_REPO_URI
+# @DEFAULT_UNSET
+# @REQUIRED
+# @DESCRIPTION:
+# The repository URI for the source package.
+#
+# Note: If the ebuild uses an sftp:// URI, then in EAPI 0 or 1 it must
+# make sure that dev-vcs/bzr was built with USE="sftp". In EAPI 2 or
+# later, the eclass will depend on dev-vcs/bzr[sftp].
+
+# @ECLASS-VARIABLE: EBZR_INITIAL_URI
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The URI used for initial branching of the source repository. If this
+# variable is set, the initial branch will be cloned from the location
+# specified, followed by a pull from ${EBZR_REPO_URI}. This is intended
+# for special cases, e.g. when download from the original repository is
+# slow, but a fast mirror exists but may be out of date.
+#
+# Normally, this variable needs not be set.
+
+# @ECLASS-VARIABLE: EBZR_BOOTSTRAP
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Bootstrap script or command like autogen.sh or etc.
+
+# @ECLASS-VARIABLE: EBZR_PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# bzr.eclass can apply patches in bzr_bootstrap(). You can use regular
+# expressions in this variable like *.diff or *.patch and the like.
+# Note: These patches will be applied before EBZR_BOOTSTRAP is processed.
+#
+# Patches are searched both in ${PWD} and ${FILESDIR}. If not found in
+# either location, the installation dies.
+
+# @ECLASS-VARIABLE: EBZR_PROJECT
+# @DESCRIPTION:
+# The project name of your ebuild. Normally, the branch will be stored
+# in the ${EBZR_STORE_DIR}/${EBZR_PROJECT} directory.
+#
+# If EBZR_BRANCH is set (see below), then a shared repository will be
+# created in that directory, and the branch will be located in
+# ${EBZR_STORE_DIR}/${EBZR_PROJECT}/${EBZR_BRANCH}.
+: ${EBZR_PROJECT:=${PN}}
+
+# @ECLASS-VARIABLE: EBZR_BRANCH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The directory where to store the branch within a shared repository,
+# relative to ${EBZR_STORE_DIR}/${EBZR_PROJECT}.
+#
+# This variable should be set if there are several live ebuilds for
+# different branches of the same upstream project. The branches can
+# then share the same repository in EBZR_PROJECT, which will save both
+# data traffic volume and disk space.
+#
+# If there is only a live ebuild for one single branch, EBZR_BRANCH
+# needs not be set. In this case, the branch will be stored in a
+# stand-alone repository directly in EBZR_PROJECT.
+
+# @ECLASS-VARIABLE: EBZR_REVISION
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Revision to fetch, defaults to the latest
+# (see http://bazaar-vcs.org/BzrRevisionSpec or bzr help revisionspec).
+
+# @ECLASS-VARIABLE: EBZR_OFFLINE
+# @DESCRIPTION:
+# Set this variable to a non-empty value to disable automatic updating
+# of a bzr source tree. This is intended to be set outside the ebuild
+# by users.
+: ${EBZR_OFFLINE=${EVCS_OFFLINE}}
+
+# @ECLASS-VARIABLE: EBZR_WORKDIR_CHECKOUT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If this variable is set to a non-empty value, EBZR_CHECKOUT_CMD will
+# be used instead of EBZR_EXPORT_CMD to copy the sources to WORKDIR.
+
+# @FUNCTION: bzr_initial_fetch
+# @USAGE: <repository URI> <branch directory>
+# @DESCRIPTION:
+# Internal function, retrieves the source code from a repository for the
+# first time, using ${EBZR_FETCH_CMD}.
+bzr_initial_fetch() {
+ local repo_uri=$1 branch_dir=$2
+
+ if [[ -n "${EBZR_OFFLINE}" ]]; then
+ ewarn "EBZR_OFFLINE cannot be used when there is no local branch yet."
+ fi
+
+ # fetch branch
+ einfo "bzr branch start -->"
+ einfo " repository: ${repo_uri} => ${branch_dir}"
+
+ ${EBZR_FETCH_CMD} ${EBZR_OPTIONS} "${repo_uri}" "${branch_dir}" \
+ || die "${EBZR}: can't branch from ${repo_uri}"
+}
+
+# @FUNCTION: bzr_update
+# @USAGE: <repository URI> <branch directory>
+# @DESCRIPTION:
+# Internal function, updates the source code from a repository, using
+# ${EBZR_UPDATE_CMD}.
+bzr_update() {
+ local repo_uri=$1 branch_dir=$2
+
+ if [[ -n "${EBZR_OFFLINE}" ]]; then
+ einfo "skipping bzr pull -->"
+ einfo " repository: ${repo_uri}"
+ else
+ # update branch
+ einfo "bzr pull start -->"
+ einfo " repository: ${repo_uri}"
+
+ pushd "${branch_dir}" > /dev/null \
+ || die "${EBZR}: can't chdir to ${branch_dir}"
+ ${EBZR_UPDATE_CMD} ${EBZR_OPTIONS} "${repo_uri}" \
+ || die "${EBZR}: can't pull from ${repo_uri}"
+ popd > /dev/null
+ fi
+}
+
+# @FUNCTION: bzr_fetch
+# @DESCRIPTION:
+# Wrapper function to fetch sources from a Bazaar repository with
+# bzr branch or bzr pull, depending on whether there is an existing
+# working copy.
+bzr_fetch() {
+ local repo_dir branch_dir
+ local save_sandbox_write=${SANDBOX_WRITE}
+
+ [[ -n ${EBZR_REPO_URI} ]] || die "${EBZR}: EBZR_REPO_URI is empty"
+
+ if [[ ! -d ${EBZR_STORE_DIR} ]] ; then
+ addwrite /
+ mkdir -p "${EBZR_STORE_DIR}" \
+ || die "${EBZR}: can't mkdir ${EBZR_STORE_DIR}"
+ SANDBOX_WRITE=${save_sandbox_write}
+ fi
+
+ pushd "${EBZR_STORE_DIR}" > /dev/null \
+ || die "${EBZR}: can't chdir to ${EBZR_STORE_DIR}"
+
+ repo_dir=${EBZR_STORE_DIR}/${EBZR_PROJECT}
+ branch_dir=${repo_dir}${EBZR_BRANCH:+/${EBZR_BRANCH}}
+
+ addwrite "${EBZR_STORE_DIR}"
+
+ if [[ ! -d ${branch_dir}/.bzr ]]; then
+ if [[ ${repo_dir} != "${branch_dir}" && ! -d ${repo_dir}/.bzr ]]; then
+ einfo "creating shared bzr repository: ${repo_dir}"
+ ${EBZR_INIT_REPO_CMD} "${repo_dir}" \
+ || die "${EBZR}: can't create shared repository"
+ fi
+
+ if [[ -z ${EBZR_INITIAL_URI} ]]; then
+ bzr_initial_fetch "${EBZR_REPO_URI}" "${branch_dir}"
+ else
+ # Workaround for faster initial download. This clones the
+ # branch from a fast server (which may be out of date), and
+ # subsequently pulls from the slow original repository.
+ bzr_initial_fetch "${EBZR_INITIAL_URI}" "${branch_dir}"
+ if [[ ${EBZR_REPO_URI} != "${EBZR_INITIAL_URI}" ]]; then
+ EBZR_UPDATE_CMD="${EBZR_UPDATE_CMD} --remember --overwrite" \
+ EBZR_OFFLINE="" \
+ bzr_update "${EBZR_REPO_URI}" "${branch_dir}"
+ fi
+ fi
+ else
+ bzr_update "${EBZR_REPO_URI}" "${branch_dir}"
+ fi
+
+ # Restore sandbox environment
+ SANDBOX_WRITE=${save_sandbox_write}
+
+ cd "${branch_dir}" || die "${EBZR}: can't chdir to ${branch_dir}"
+
+ # Save revision number in environment. #311101
+ export EBZR_REVNO=$(${EBZR_REVNO_CMD})
+
+ if [[ -n ${EBZR_WORKDIR_CHECKOUT} ]]; then
+ einfo "checking out ..."
+ ${EBZR_CHECKOUT_CMD} ${EBZR_REVISION:+-r ${EBZR_REVISION}} \
+ . "${EBZR_UNPACK_DIR}" || die "${EBZR}: checkout failed"
+ else
+ einfo "exporting ..."
+ ${EBZR_EXPORT_CMD} ${EBZR_REVISION:+-r ${EBZR_REVISION}} \
+ "${EBZR_UNPACK_DIR}" . || die "${EBZR}: export failed"
+ fi
+ einfo \
+ "revision ${EBZR_REVISION:-${EBZR_REVNO}} is now in ${EBZR_UNPACK_DIR}"
+
+ popd > /dev/null
+}
+
+# @FUNCTION: bzr_bootstrap
+# @DESCRIPTION:
+# Apply patches in ${EBZR_PATCHES} and run ${EBZR_BOOTSTRAP} if specified.
+bzr_bootstrap() {
+ local patch lpatch
+
+ pushd "${S}" > /dev/null || die "${EBZR}: can't chdir to ${S}"
+
+ if [[ -n ${EBZR_PATCHES} ]] ; then
+ einfo "apply patches -->"
+
+ for patch in ${EBZR_PATCHES} ; do
+ if [[ -f ${patch} ]] ; then
+ epatch ${patch}
+ else
+ # This loop takes care of wildcarded patches given via
+ # EBZR_PATCHES in an ebuild
+ for lpatch in "${FILESDIR}"/${patch} ; do
+ if [[ -f ${lpatch} ]] ; then
+ epatch ${lpatch}
+ else
+ die "${EBZR}: ${patch} is not found"
+ fi
+ done
+ fi
+ done
+ fi
+
+ if [[ -n ${EBZR_BOOTSTRAP} ]] ; then
+ einfo "begin bootstrap -->"
+
+ if [[ -f ${EBZR_BOOTSTRAP} ]] && [[ -x ${EBZR_BOOTSTRAP} ]] ; then
+ einfo " bootstrap with a file: ${EBZR_BOOTSTRAP}"
+ "./${EBZR_BOOTSTRAP}" \
+ || die "${EBZR}: can't execute EBZR_BOOTSTRAP"
+ else
+ einfo " bootstrap with commands: ${EBZR_BOOTSTRAP}"
+ "${EBZR_BOOTSTRAP}" \
+ || die "${EBZR}: can't eval EBZR_BOOTSTRAP"
+ fi
+ fi
+
+ popd > /dev/null
+}
+
+# @FUNCTION: bzr_src_unpack
+# @DESCRIPTION:
+# Default src_unpack(), calls bzr_fetch. For EAPIs 0 and 1, also calls
+# bzr_src_prepare.
+bzr_src_unpack() {
+ bzr_fetch
+ case "${EAPI:-0}" in
+ 0|1) bzr_src_prepare ;;
+ esac
+}
+
+# @FUNCTION: bzr_src_prepare
+# @DESCRIPTION:
+# Default src_prepare(), calls bzr_bootstrap.
+bzr_src_prepare() {
+ bzr_bootstrap
+}
diff --git a/eclass/cannadic.eclass b/eclass/cannadic.eclass
new file mode 100644
index 000000000000..015e13451a6a
--- /dev/null
+++ b/eclass/cannadic.eclass
@@ -0,0 +1,150 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: cannadic.eclass
+# @AUTHOR:
+# Original author: Mamoru KOMACHI <usata@gentoo.org>
+# @BLURB: Function for Canna compatible dictionaries
+# @DESCRIPTION:
+# The cannadic eclass is used for installation and setup of Canna
+# compatible dictionaries within the Portage system.
+
+
+EXPORT_FUNCTIONS src_install pkg_setup pkg_postinst pkg_postrm
+
+IUSE=""
+
+HOMEPAGE="http://canna.sourceforge.jp/" # you need to change this!
+SRC_URI="mirror://gentoo/${P}.tar.gz"
+
+LICENSE="public-domain"
+SLOT="0"
+
+S="${WORKDIR}"
+
+DICSDIRFILE="${FILESDIR}/*.dics.dir"
+CANNADICS="${CANNADICS}" # (optional)
+DOCS="README*"
+
+# You don't need to modify these
+#local cannadir dicsdir
+cannadir="${ROOT}/var/lib/canna/dic/canna"
+dicsdir="${ROOT}/var/lib/canna/dic/dics.d"
+
+# @FUNCTION: cannadic_pkg_setup
+# @DESCRIPTION:
+# Sets up cannadic dir
+cannadic_pkg_setup() {
+
+ keepdir $cannadir
+ fowners bin:bin $cannadir
+ fperms 0775 $cannadir
+}
+
+# @FUNCTION: cannadic-install
+# @DESCRIPTION:
+# Installs dictionaries to cannadir
+cannadic-install() {
+
+ insinto $cannadir
+ insopts -m0664 -o bin -g bin
+ doins "$@"
+}
+
+# @FUNCTION: dicsdir-install
+# @DESCRIPTION:
+# Installs dics.dir from ${DICSDIRFILE}
+dicsdir-install() {
+
+ insinto ${dicsdir}
+ doins ${DICSDIRFILE}
+}
+
+# @FUNCTION: cannadic_src_install
+# @DESCRIPTION:
+# Installs all dictionaries under ${WORKDIR}
+# plus dics.dir and docs
+cannadic_src_install() {
+
+ for f in *.c[btl]d *.t ; do
+ cannadic-install $f
+ done 2>/dev/null
+
+ dicsdir-install || die
+
+ dodoc ${DOCS}
+}
+
+# @FUNCTION: update-cannadic-dir
+# @DESCRIPTION:
+# Updates dics.dir for Canna Server, script for this part taken from Debian GNU/Linux
+#
+# compiles dics.dir files for Canna Server
+# Copyright 2001 ISHIKAWA Mutsumi
+# Licensed under the GNU General Public License, version 2. See the file
+# /usr/portage/license/GPL-2 or <http://www.gnu.org/copyleft/gpl.txt>.
+update-cannadic-dir() {
+
+ einfo
+ einfo "Updating dics.dir for Canna ..."
+ einfo
+
+ # write new dics.dir file in case we are interrupted
+ cat >${cannadir}/dics.dir.update-new<<-EOF
+ # dics.dir -- automatically generated file by Portage.
+ # DO NOT EDIT BY HAND.
+ EOF
+
+ for file in ${dicsdir}/*.dics.dir ; do
+ echo "# $file" >> ${cannadir}/dics.dir.update-new
+ cat $file >> ${cannadir}/dics.dir.update-new
+ einfo "Added $file."
+ done
+
+ mv ${cannadir}/dics.dir.update-new ${cannadir}/dics.dir
+
+ einfo
+ einfo "Done."
+ einfo
+}
+
+# @FUNCTION: cannadic_pkg_postinst
+# @DESCRIPTION:
+# Updates dics.dir and print out notice after install
+cannadic_pkg_postinst() {
+ update-cannadic-dir
+ einfo
+ einfo "Please restart cannaserver to fit the changes."
+ einfo "You need to modify your config file (~/.canna) to enable dictionaries."
+
+ if [ -n "${CANNADICS}" ] ; then
+ einfo "e.g) add $(for d in ${CANNADICS}; do
+ echo -n "\"$d\" "
+ done)to section use-dictionary()."
+ einfo "For details, see documents under /usr/share/doc/${PF}"
+ fi
+
+ einfo "If you do not have ~/.canna, you can find sample files in /usr/share/canna."
+ ewarn "If you are upgrading from existing dictionary, you may need to recreate"
+ ewarn "user dictionary if you have one."
+ einfo
+}
+
+# @FUNCTION: cannadic_pkg_postrm
+# @DESCRIPTION:
+# Updates dics.dir and print out notice after uninstall
+cannadic_pkg_postrm() {
+ update-cannadic-dir
+ einfo
+ einfo "Please restart cannaserver to fit changes."
+ einfo "and modify your config file (~/.canna) to disable dictionary."
+
+ if [ -n "${CANNADICS}" ] ; then
+ einfo "e.g) delete $(for d in ${CANNADICS}; do
+ echo -n "\"$d\" "
+ done)from section use-dictionary()."
+ fi
+
+ einfo
+}
diff --git a/eclass/cdrom.eclass b/eclass/cdrom.eclass
new file mode 100644
index 000000000000..76e6b4974e38
--- /dev/null
+++ b/eclass/cdrom.eclass
@@ -0,0 +1,247 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: cdrom.eclass
+# @MAINTAINER:
+# games@gentoo.org
+# @BLURB: Functions for CD-ROM handling
+# @DESCRIPTION:
+# Acquire cd(s) for those lovely cd-based emerges. Yes, this violates
+# the whole 'non-interactive' policy, but damnit I want CD support!
+#
+# With these cdrom functions we handle all the user interaction and
+# standardize everything. All you have to do is call cdrom_get_cds()
+# and when the function returns, you can assume that the cd has been
+# found at CDROM_ROOT.
+
+if [[ -z ${_CDROM_ECLASS} ]]; then
+_CDROM_ECLASS=1
+
+inherit portability
+
+# @ECLASS-VARIABLE: CDROM_OPTIONAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# By default, the eclass sets PROPERTIES="interactive" on the assumption
+# that people will be using these. If your package optionally supports
+# disc based installed, then set this to "yes", and we'll set things
+# conditionally based on USE=cdinstall.
+if [[ ${CDROM_OPTIONAL} == "yes" ]] ; then
+ IUSE="cdinstall"
+ PROPERTIES="cdinstall? ( interactive )"
+else
+ PROPERTIES="interactive"
+fi
+
+# @FUNCTION: cdrom_get_cds
+# @USAGE: <file on cd1> [file on cd2] [file on cd3] [...]
+# @DESCRIPTION:
+# The function will attempt to locate a cd based upon a file that is on
+# the cd. The more files you give this function, the more cds the cdrom
+# functions will handle.
+#
+# Normally the cdrom functions will refer to the cds as 'cd #1', 'cd #2',
+# etc... If you want to give the cds better names, then just export
+# the appropriate CDROM_NAME variable before calling cdrom_get_cds().
+# Use CDROM_NAME for one cd, or CDROM_NAME_# for multiple cds. You can
+# also use the CDROM_NAME_SET bash array.
+#
+# For those multi cd ebuilds, see the cdrom_load_next_cd() function.
+cdrom_get_cds() {
+ # first we figure out how many cds we're dealing with by
+ # the # of files they gave us
+ local cdcnt=0
+ local f=
+ for f in "$@" ; do
+ ((++cdcnt))
+ export CDROM_CHECK_${cdcnt}="$f"
+ done
+ export CDROM_TOTAL_CDS=${cdcnt}
+ export CDROM_CURRENT_CD=1
+
+ # now we see if the user gave use CD_ROOT ...
+ # if they did, let's just believe them that it's correct
+ if [[ -n ${CD_ROOT}${CD_ROOT_1} ]] ; then
+ local var=
+ cdcnt=0
+ while [[ ${cdcnt} -lt ${CDROM_TOTAL_CDS} ]] ; do
+ ((++cdcnt))
+ var="CD_ROOT_${cdcnt}"
+ [[ -z ${!var} ]] && var="CD_ROOT"
+ if [[ -z ${!var} ]] ; then
+ eerror "You must either use just the CD_ROOT"
+ eerror "or specify ALL the CD_ROOT_X variables."
+ eerror "In this case, you will need" \
+ "${CDROM_TOTAL_CDS} CD_ROOT_X variables."
+ die "could not locate CD_ROOT_${cdcnt}"
+ fi
+ done
+ export CDROM_ROOT=${CD_ROOT_1:-${CD_ROOT}}
+ einfo "Found CD #${CDROM_CURRENT_CD} root at ${CDROM_ROOT}"
+ export CDROM_SET=-1
+ for f in ${CDROM_CHECK_1//:/ } ; do
+ ((++CDROM_SET))
+ [[ -e ${CDROM_ROOT}/${f} ]] && break
+ done
+ export CDROM_MATCH=${f}
+ return
+ fi
+
+ # User didn't help us out so lets make sure they know they can
+ # simplify the whole process ...
+ if [[ ${CDROM_TOTAL_CDS} -eq 1 ]] ; then
+ einfo "This ebuild will need the ${CDROM_NAME:-cdrom for ${PN}}"
+ echo
+ einfo "If you do not have the CD, but have the data files"
+ einfo "mounted somewhere on your filesystem, just export"
+ einfo "the variable CD_ROOT so that it points to the"
+ einfo "directory containing the files."
+ echo
+ einfo "For example:"
+ einfo "export CD_ROOT=/mnt/cdrom"
+ echo
+ else
+ if [[ -n ${CDROM_NAME_SET} ]] ; then
+ # Translate the CDROM_NAME_SET array into CDROM_NAME_#
+ cdcnt=0
+ while [[ ${cdcnt} -lt ${CDROM_TOTAL_CDS} ]] ; do
+ ((++cdcnt))
+ export CDROM_NAME_${cdcnt}="${CDROM_NAME_SET[$((${cdcnt}-1))]}"
+ done
+ fi
+
+ einfo "This package will need access to ${CDROM_TOTAL_CDS} cds."
+ cdcnt=0
+ while [[ ${cdcnt} -lt ${CDROM_TOTAL_CDS} ]] ; do
+ ((++cdcnt))
+ var="CDROM_NAME_${cdcnt}"
+ [[ ! -z ${!var} ]] && einfo " CD ${cdcnt}: ${!var}"
+ done
+ echo
+ einfo "If you do not have the CDs, but have the data files"
+ einfo "mounted somewhere on your filesystem, just export"
+ einfo "the following variables so they point to the right place:"
+ einfon ""
+ cdcnt=0
+ while [[ ${cdcnt} -lt ${CDROM_TOTAL_CDS} ]] ; do
+ ((++cdcnt))
+ echo -n " CD_ROOT_${cdcnt}"
+ done
+ echo
+ einfo "Or, if you have all the files in the same place, or"
+ einfo "you only have one cdrom, you can export CD_ROOT"
+ einfo "and that place will be used as the same data source"
+ einfo "for all the CDs."
+ echo
+ einfo "For example:"
+ einfo "export CD_ROOT_1=/mnt/cdrom"
+ echo
+ fi
+
+ export CDROM_SET=""
+ export CDROM_CURRENT_CD=0
+ cdrom_load_next_cd
+}
+
+# @FUNCTION: cdrom_load_next_cd
+# @DESCRIPTION:
+# Some packages are so big they come on multiple CDs. When you're done
+# reading files off a CD and want access to the next one, just call this
+# function. Again, all the messy details of user interaction are taken
+# care of for you. Once this returns, just read the variable CDROM_ROOT
+# for the location of the mounted CD. Note that you can only go forward
+# in the CD list, so make sure you only call this function when you're
+# done using the current CD.
+cdrom_load_next_cd() {
+ local var
+ ((++CDROM_CURRENT_CD))
+
+ unset CDROM_ROOT
+ var=CD_ROOT_${CDROM_CURRENT_CD}
+ [[ -z ${!var} ]] && var="CD_ROOT"
+ if [[ -z ${!var} ]] ; then
+ var="CDROM_CHECK_${CDROM_CURRENT_CD}"
+ _cdrom_locate_file_on_cd ${!var}
+ else
+ export CDROM_ROOT=${!var}
+ fi
+
+ einfo "Found CD #${CDROM_CURRENT_CD} root at ${CDROM_ROOT}"
+}
+
+# this is used internally by the cdrom_get_cds() and cdrom_load_next_cd()
+# functions. this should *never* be called from an ebuild.
+# all it does is try to locate a give file on a cd ... if the cd isn't
+# found, then a message asking for the user to insert the cdrom will be
+# displayed and we'll hang out here until:
+# (1) the file is found on a mounted cdrom
+# (2) the user hits CTRL+C
+_cdrom_locate_file_on_cd() {
+ local mline=""
+ local showedmsg=0 showjolietmsg=0
+
+ while [[ -z ${CDROM_ROOT} ]] ; do
+ local i=0
+ local -a cdset=(${*//:/ })
+ if [[ -n ${CDROM_SET} ]] ; then
+ cdset=(${cdset[${CDROM_SET}]})
+ fi
+
+ while [[ -n ${cdset[${i}]} ]] ; do
+ local dir=$(dirname ${cdset[${i}]})
+ local file=$(basename ${cdset[${i}]})
+
+ local point= node= fs= foo=
+ while read point node fs foo ; do
+ [[ " cd9660 iso9660 udf " != *" ${fs} "* ]] && \
+ ! [[ ${fs} == "subfs" && ",${opts}," == *",fs=cdfss,"* ]] \
+ && continue
+ point=${point//\040/ }
+ [[ ! -d ${point}/${dir} ]] && continue
+ [[ -z $(find "${point}/${dir}" -maxdepth 1 -iname "${file}") ]] \
+ && continue
+ export CDROM_ROOT=${point}
+ export CDROM_SET=${i}
+ export CDROM_MATCH=${cdset[${i}]}
+ return
+ done <<< "$(get_mounts)"
+
+ ((++i))
+ done
+
+ echo
+ if [[ ${showedmsg} -eq 0 ]] ; then
+ if [[ ${CDROM_TOTAL_CDS} -eq 1 ]] ; then
+ if [[ -z ${CDROM_NAME} ]] ; then
+ einfo "Please insert+mount the cdrom for ${PN} now !"
+ else
+ einfo "Please insert+mount the ${CDROM_NAME} cdrom now !"
+ fi
+ else
+ if [[ -z ${CDROM_NAME_1} ]] ; then
+ einfo "Please insert+mount cd #${CDROM_CURRENT_CD}" \
+ "for ${PN} now !"
+ else
+ local var="CDROM_NAME_${CDROM_CURRENT_CD}"
+ einfo "Please insert+mount the ${!var} cdrom now !"
+ fi
+ fi
+ showedmsg=1
+ fi
+ einfo "Press return to scan for the cd again"
+ einfo "or hit CTRL+C to abort the emerge."
+ echo
+ if [[ ${showjolietmsg} -eq 0 ]] ; then
+ showjolietmsg=1
+ else
+ ewarn "If you are having trouble with the detection"
+ ewarn "of your CD, it is possible that you do not have"
+ ewarn "Joliet support enabled in your kernel. Please"
+ ewarn "check that CONFIG_JOLIET is enabled in your kernel."
+ fi
+ read || die "something is screwed with your system"
+ done
+}
+
+fi
diff --git a/eclass/check-reqs.eclass b/eclass/check-reqs.eclass
new file mode 100644
index 000000000000..edf60cfabf5d
--- /dev/null
+++ b/eclass/check-reqs.eclass
@@ -0,0 +1,355 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: check-reqs.eclass
+# @MAINTAINER:
+# QA Team <qa@gentoo.org>
+# @AUTHOR:
+# Bo Ørsted Andresen <zlin@gentoo.org>
+# Original Author: Ciaran McCreesh <ciaranm@gentoo.org>
+# @BLURB: Provides a uniform way of handling ebuild which have very high build requirements
+# @DESCRIPTION:
+# This eclass provides a uniform way of handling ebuilds which have very high
+# build requirements in terms of memory or disk space. It provides a function
+# which should usually be called during pkg_setup().
+#
+# The chosen action only happens when the system's resources are detected
+# correctly and only if they are below the threshold specified by the package.
+#
+# @CODE
+# # need this much memory (does *not* check swap)
+# CHECKREQS_MEMORY="256M"
+#
+# # need this much temporary build space
+# CHECKREQS_DISK_BUILD="2G"
+#
+# # install will need this much space in /usr
+# CHECKREQS_DISK_USR="1G"
+#
+# # install will need this much space in /var
+# CHECKREQS_DISK_VAR="1024M"
+#
+# @CODE
+#
+# If you don't specify a value for, say, CHECKREQS_MEMORY, then the test is not
+# carried out.
+#
+# These checks should probably mostly work on non-Linux, and they should
+# probably degrade gracefully if they don't. Probably.
+
+inherit eutils
+
+# @ECLASS-VARIABLE: CHECKREQS_MEMORY
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# How much RAM is needed? Eg.: CHECKREQS_MEMORY=15M
+
+# @ECLASS-VARIABLE: CHECKREQS_DISK_BUILD
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# How much diskspace is needed to build the package? Eg.: CHECKREQS_DISK_BUILD=2T
+
+# @ECLASS-VARIABLE: CHECKREQS_DISK_USR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# How much space in /usr is needed to install the package? Eg.: CHECKREQS_DISK_USR=15G
+
+# @ECLASS-VARIABLE: CHECKREQS_DISK_VAR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# How much space is needed in /var? Eg.: CHECKREQS_DISK_VAR=3000M
+
+EXPORT_FUNCTIONS pkg_setup
+case "${EAPI:-0}" in
+ 0|1|2|3) ;;
+ 4|5) EXPORT_FUNCTIONS pkg_pretend ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+# @FUNCTION: check_reqs
+# @DESCRIPTION:
+# Obsolete function executing all the checks and priting out results
+check_reqs() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ echo
+ ewarn "QA: Package calling old ${FUNCNAME} function."
+ ewarn "QA: Please file a bug against the package."
+ ewarn "QA: It should call check-reqs_pkg_pretend and check-reqs_pkg_setup"
+ ewarn "QA: and possibly use EAPI=4 or later."
+ echo
+
+ check-reqs_pkg_setup "$@"
+}
+
+# @FUNCTION: check-reqs_pkg_setup
+# @DESCRIPTION:
+# Exported function running the resources checks in pkg_setup phase.
+# It should be run in both phases to ensure condition changes between
+# pkg_pretend and pkg_setup won't affect the build.
+check-reqs_pkg_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ check-reqs_prepare
+ check-reqs_run
+ check-reqs_output
+}
+
+# @FUNCTION: check-reqs_pkg_pretend
+# @DESCRIPTION:
+# Exported function running the resources checks in pkg_pretend phase.
+check-reqs_pkg_pretend() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ check-reqs_pkg_setup "$@"
+}
+
+# @FUNCTION: check-reqs_prepare
+# @DESCRIPTION:
+# Internal function that checks the variables that should be defined.
+check-reqs_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ -z ${CHECKREQS_MEMORY} &&
+ -z ${CHECKREQS_DISK_BUILD} &&
+ -z ${CHECKREQS_DISK_USR} &&
+ -z ${CHECKREQS_DISK_VAR} ]]; then
+ eerror "Set some check-reqs eclass variables if you want to use it."
+ eerror "If you are user and see this message file a bug against the package."
+ die "${FUNCNAME}: check-reqs eclass called but not actualy used!"
+ fi
+}
+
+# @FUNCTION: check-reqs_run
+# @DESCRIPTION:
+# Internal function that runs the check based on variable settings.
+check-reqs_run() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # some people are *censored*
+ unset CHECKREQS_FAILED
+
+ # use != in test, because MERGE_TYPE only exists in EAPI 4 and later
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ [[ -n ${CHECKREQS_MEMORY} ]] && \
+ check-reqs_memory \
+ ${CHECKREQS_MEMORY}
+
+ [[ -n ${CHECKREQS_DISK_BUILD} ]] && \
+ check-reqs_disk \
+ "${T}" \
+ "${CHECKREQS_DISK_BUILD}"
+ fi
+
+ if [[ ${MERGE_TYPE} != buildonly ]]; then
+ [[ -n ${CHECKREQS_DISK_USR} ]] && \
+ check-reqs_disk \
+ "${EROOT}/usr" \
+ "${CHECKREQS_DISK_USR}"
+
+ [[ -n ${CHECKREQS_DISK_VAR} ]] && \
+ check-reqs_disk \
+ "${EROOT}/var" \
+ "${CHECKREQS_DISK_VAR}"
+ fi
+}
+
+# @FUNCTION: check-reqs_get_mebibytes
+# @DESCRIPTION:
+# Internal function that returns number in mebibytes.
+# Returns 1024 for 1G or 1048576 for 1T.
+check-reqs_get_mebibytes() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
+
+ local unit=${1:(-1)}
+ local size=${1%[GMT]}
+
+ case ${unit} in
+ G) echo $((1024 * size)) ;;
+ [M0-9]) echo ${size} ;;
+ T) echo $((1024 * 1024 * size)) ;;
+ *)
+ die "${FUNCNAME}: Unknown unit: ${unit}"
+ ;;
+ esac
+}
+
+# @FUNCTION: check-reqs_get_number
+# @DESCRIPTION:
+# Internal function that returns the numerical value without the unit.
+# Returns "1" for "1G" or "150" for "150T".
+check-reqs_get_number() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
+
+ local unit=${1:(-1)}
+ local size=${1%[GMT]}
+
+ # Check for unset units and warn about them.
+ # Backcompat.
+ if [[ ${size} == ${1} ]]; then
+ ewarn "QA: Package does not specify unit for the size check"
+ ewarn "QA: Assuming mebibytes."
+ ewarn "QA: File bug against the package. It should specify the unit."
+ fi
+
+ echo ${size}
+}
+
+# @FUNCTION: check-reqs_get_unit
+# @DESCRIPTION:
+# Internal function that return the unit without the numerical value.
+# Returns "GiB" for "1G" or "TiB" for "150T".
+check-reqs_get_unit() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
+
+ local unit=${1:(-1)}
+
+ case ${unit} in
+ G) echo "GiB" ;;
+ [M0-9]) echo "MiB" ;;
+ T) echo "TiB" ;;
+ *)
+ die "${FUNCNAME}: Unknown unit: ${unit}"
+ ;;
+ esac
+}
+
+# @FUNCTION: check-reqs_output
+# @DESCRIPTION:
+# Internal function that prints the warning and dies if required based on
+# the test results.
+check-reqs_output() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local msg="ewarn"
+
+ [[ ${EBUILD_PHASE} == "pretend" && -z ${I_KNOW_WHAT_I_AM_DOING} ]] && msg="eerror"
+ if [[ -n ${CHECKREQS_FAILED} ]]; then
+ ${msg}
+ ${msg} "Space constrains set in the ebuild were not met!"
+ ${msg} "The build will most probably fail, you should enhance the space"
+ ${msg} "as per failed tests."
+ ${msg}
+
+ [[ ${EBUILD_PHASE} == "pretend" && -z ${I_KNOW_WHAT_I_AM_DOING} ]] && \
+ die "Build requirements not met!"
+ fi
+}
+
+# @FUNCTION: check-reqs_memory
+# @DESCRIPTION:
+# Internal function that checks size of RAM.
+check-reqs_memory() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
+
+ local size=${1}
+ local actual_memory
+
+ check-reqs_start_phase \
+ ${size} \
+ "RAM"
+
+ if [[ -r /proc/meminfo ]] ; then
+ actual_memory=$(awk '/MemTotal/ { print $2 }' /proc/meminfo)
+ else
+ actual_memory=$(sysctl hw.physmem 2>/dev/null )
+ [[ "$?" == "0" ]] &&
+ actual_memory=$(echo $actual_memory | sed -e 's/^[^:=]*[:=]//' )
+ fi
+ if [[ -n ${actual_memory} ]] ; then
+ if [[ ${actual_memory} -lt $((1024 * $(check-reqs_get_mebibytes ${size}))) ]] ; then
+ eend 1
+ check-reqs_unsatisfied \
+ ${size} \
+ "RAM"
+ else
+ eend 0
+ fi
+ else
+ eend 1
+ ewarn "Couldn't determine amount of memory, skipping..."
+ fi
+}
+
+# @FUNCTION: check-reqs_disk
+# @DESCRIPTION:
+# Internal function that checks space on the harddrive.
+check-reqs_disk() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${2} ]] && die "Usage: ${FUNCNAME} [path] [size]"
+
+ local path=${1}
+ local size=${2}
+ local space_megs
+
+ check-reqs_start_phase \
+ ${size} \
+ "disk space at \"${path}\""
+
+ space_megs=$(df -Pm "${1}" 2>/dev/null | awk 'FNR == 2 {print $4}')
+
+ if [[ $? == 0 && -n ${space_megs} ]] ; then
+ if [[ ${space_megs} -lt $(check-reqs_get_mebibytes ${size}) ]] ; then
+ eend 1
+ check-reqs_unsatisfied \
+ ${size} \
+ "disk space at \"${path}\""
+ else
+ eend 0
+ fi
+ else
+ eend 1
+ ewarn "Couldn't determine disk space, skipping..."
+ fi
+}
+
+# @FUNCTION: check-reqs_start_phase
+# @DESCRIPTION:
+# Internal function that inform about started check
+check-reqs_start_phase() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${2} ]] && die "Usage: ${FUNCNAME} [size] [location]"
+
+ local size=${1}
+ local location=${2}
+ local sizeunit="$(check-reqs_get_number ${size}) $(check-reqs_get_unit ${size})"
+
+ ebegin "Checking for at least ${sizeunit} ${location}"
+}
+
+# @FUNCTION: check-reqs_unsatisfied
+# @DESCRIPTION:
+# Internal function that inform about check result.
+# It has different output between pretend and setup phase,
+# where in pretend phase it is fatal.
+check-reqs_unsatisfied() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -z ${2} ]] && die "Usage: ${FUNCNAME} [size] [location]"
+
+ local msg="ewarn"
+ local size=${1}
+ local location=${2}
+ local sizeunit="$(check-reqs_get_number ${size}) $(check-reqs_get_unit ${size})"
+
+ [[ ${EBUILD_PHASE} == "pretend" && -z ${I_KNOW_WHAT_I_AM_DOING} ]] && msg="eerror"
+ ${msg} "There is NOT at least ${sizeunit} ${location}"
+
+ # @ECLASS-VARIABLE: CHECKREQS_FAILED
+ # @DESCRIPTION:
+ # @INTERNAL
+ # If set the checks failed and eclass should abort the build.
+ # Internal, do not set yourself.
+ CHECKREQS_FAILED="true"
+}
diff --git a/eclass/chromium.eclass b/eclass/chromium.eclass
new file mode 100644
index 000000000000..cd641251a312
--- /dev/null
+++ b/eclass/chromium.eclass
@@ -0,0 +1,266 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: chromium.eclass
+# @MAINTAINER:
+# Chromium Herd <chromium@gentoo.org>
+# @AUTHOR:
+# Mike Gilbert <floppym@gentoo.org>
+# @BLURB: Shared functions for chromium and google-chrome
+
+inherit eutils fdo-mime gnome2-utils linux-info
+
+if [[ ${CHROMIUM_EXPORT_PHASES} != no ]]; then
+ EXPORT_FUNCTIONS pkg_preinst pkg_postinst pkg_postrm
+fi
+
+if [[ ${PN} == chromium ]]; then
+ IUSE+=" custom-cflags"
+fi
+
+# @FUNCTION: chromium_suid_sandbox_check_kernel_config
+# @USAGE:
+# @DESCRIPTION:
+# Ensures the system kernel supports features needed for SUID sandbox to work.
+chromium_suid_sandbox_check_kernel_config() {
+ has "${EAPI:-0}" 0 1 2 3 && die "EAPI=${EAPI} is not supported"
+
+ if [[ "${MERGE_TYPE}" == "source" || "${MERGE_TYPE}" == "binary" ]]; then
+ # Warn if the kernel does not support features needed for sandboxing.
+ # Bug #363987.
+ ERROR_PID_NS="PID_NS is required for sandbox to work"
+ ERROR_NET_NS="NET_NS is required for sandbox to work"
+ ERROR_USER_NS="USER_NS is required for sandbox to work"
+ ERROR_SECCOMP_FILTER="SECCOMP_FILTER is required for sandbox to work"
+ CONFIG_CHECK="~PID_NS ~NET_NS ~SECCOMP_FILTER ~USER_NS"
+ check_extra_config
+ fi
+}
+
+# @ECLASS-VARIABLE: CHROMIUM_LANGS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of language packs available for this package.
+
+_chromium_set_linguas_IUSE() {
+ [[ ${EAPI:-0} == 0 ]] && die "EAPI=${EAPI} is not supported"
+
+ local lang
+ for lang in ${CHROMIUM_LANGS}; do
+ # Default to enabled since we bundle them anyway.
+ # USE-expansion will take care of disabling the langs the user has not
+ # selected via LINGUAS.
+ IUSE+=" +linguas_${lang}"
+ done
+}
+
+if [[ ${CHROMIUM_LANGS} ]]; then
+ _chromium_set_linguas_IUSE
+fi
+
+_chromium_crlang() {
+ local x
+ for x in "$@"; do
+ case $x in
+ es_LA) echo es-419 ;;
+ *) echo "${x/_/-}" ;;
+ esac
+ done
+}
+
+_chromium_syslang() {
+ local x
+ for x in "$@"; do
+ case $x in
+ es-419) echo es_LA ;;
+ *) echo "${x/-/_}" ;;
+ esac
+ done
+}
+
+_chromium_strip_pak() {
+ local x
+ for x in "$@"; do
+ echo "${x%.pak}"
+ done
+}
+
+# @FUNCTION: chromium_remove_language_paks
+# @USAGE:
+# @DESCRIPTION:
+# Removes pak files from the current directory for languages that the user has
+# not selected via the LINGUAS variable.
+# Also performs QA checks to ensure CHROMIUM_LANGS has been set correctly.
+chromium_remove_language_paks() {
+ local crlangs=$(_chromium_crlang ${CHROMIUM_LANGS})
+ local present_crlangs=$(_chromium_strip_pak *.pak)
+ local present_langs=$(_chromium_syslang ${present_crlangs})
+ local lang
+
+ # Look for missing pak files.
+ for lang in ${crlangs}; do
+ if ! has ${lang} ${present_crlangs}; then
+ eqawarn "LINGUAS warning: no .pak file for ${lang} (${lang}.pak not found)"
+ fi
+ done
+
+ # Look for extra pak files.
+ # Remove pak files that the user does not want.
+ for lang in ${present_langs}; do
+ if [[ ${lang} == en_US ]]; then
+ continue
+ fi
+ if ! has ${lang} ${CHROMIUM_LANGS}; then
+ eqawarn "LINGUAS warning: no ${lang} in LANGS"
+ continue
+ fi
+ if ! use linguas_${lang}; then
+ rm "$(_chromium_crlang ${lang}).pak" || die
+ fi
+ done
+}
+
+chromium_pkg_preinst() {
+ gnome2_icon_savelist
+}
+
+chromium_pkg_postinst() {
+ fdo-mime_desktop_database_update
+ gnome2_icon_cache_update
+
+ # For more info see bug #292201, bug #352263, bug #361859.
+ if ! has_version x11-themes/gnome-icon-theme &&
+ ! has_version x11-themes/oxygen-icons ; then
+ elog
+ elog "Depending on your desktop environment, you may need"
+ elog "to install additional packages to get icons on the Downloads page."
+ elog
+ elog "For KDE, the required package is kde-apps/oxygen-icons."
+ elog
+ elog "For other desktop environments, try one of the following:"
+ elog " - x11-themes/gnome-icon-theme"
+ elog " - x11-themes/tango-icon-theme"
+ fi
+
+ # For more info see bug #359153.
+ elog
+ elog "Some web pages may require additional fonts to display properly."
+ elog "Try installing some of the following packages if some characters"
+ elog "are not displayed properly:"
+ elog " - media-fonts/arphicfonts"
+ elog " - media-fonts/bitstream-cyberbit"
+ elog " - media-fonts/droid"
+ elog " - media-fonts/ipamonafont"
+ elog " - media-fonts/ja-ipafonts"
+ elog " - media-fonts/takao-fonts"
+ elog " - media-fonts/wqy-microhei"
+ elog " - media-fonts/wqy-zenhei"
+}
+
+chromium_pkg_postrm() {
+ gnome2_icon_cache_update
+}
+
+chromium_pkg_die() {
+ if [[ "${EBUILD_PHASE}" != "compile" ]]; then
+ return
+ fi
+
+ # Prevent user problems like bug #348235.
+ eshopts_push -s extglob
+ if is-flagq '-g?(gdb)?([1-9])'; then
+ ewarn
+ ewarn "You have enabled debug info (i.e. -g or -ggdb in your CFLAGS/CXXFLAGS)."
+ ewarn "This produces very large build files causes the linker to consume large"
+ ewarn "amounts of memory."
+ ewarn
+ ewarn "Please try removing -g{,gdb} before reporting a bug."
+ ewarn
+ fi
+ eshopts_pop
+
+ # ccache often causes bogus compile failures, especially when the cache gets
+ # corrupted.
+ if has ccache ${FEATURES}; then
+ ewarn
+ ewarn "You have enabled ccache. Please try disabling ccache"
+ ewarn "before reporting a bug."
+ ewarn
+ fi
+
+ # No ricer bugs.
+ if use_if_iuse custom-cflags; then
+ ewarn
+ ewarn "You have enabled the custom-cflags USE flag."
+ ewarn "Please disable it before reporting a bug."
+ ewarn
+ fi
+
+ # If the system doesn't have enough memory, the compilation is known to
+ # fail. Print info about memory to recognize this condition.
+ einfo
+ einfo "$(grep MemTotal /proc/meminfo)"
+ einfo "$(grep SwapTotal /proc/meminfo)"
+ einfo
+}
+
+# @VARIABLE: EGYP_CHROMIUM_COMMAND
+# @DESCRIPTION:
+# Path to the gyp_chromium script.
+: ${EGYP_CHROMIUM_COMMAND:=build/gyp_chromium}
+
+# @VARIABLE: EGYP_CHROMIUM_DEPTH
+# @DESCRIPTION:
+# Depth for egyp_chromium.
+: ${EGYP_CHROMIUM_DEPTH:=.}
+
+# @FUNCTION: egyp_chromium
+# @USAGE: [gyp arguments]
+# @DESCRIPTION:
+# Calls EGYP_CHROMIUM_COMMAND with depth EGYP_CHROMIUM_DEPTH and given
+# arguments. The full command line is echoed for logging.
+egyp_chromium() {
+ set -- "${EGYP_CHROMIUM_COMMAND}" --depth="${EGYP_CHROMIUM_DEPTH}" "$@"
+ echo "$@"
+ "$@"
+}
+
+# @FUNCTION: gyp_use
+# @USAGE: <USE flag> [GYP flag] [true suffix] [false suffix]
+# @DESCRIPTION:
+# If USE flag is set, echo -D[GYP flag]=[true suffix].
+#
+# If USE flag is not set, echo -D[GYP flag]=[false suffix].
+#
+# [GYP flag] defaults to use_[USE flag] with hyphens converted to underscores.
+#
+# [true suffix] defaults to 1. [false suffix] defaults to 0.
+gyp_use() {
+ local gypflag="-D${2:-use_${1//-/_}}="
+ usex "$1" "${gypflag}" "${gypflag}" "${3-1}" "${4-0}"
+}
+
+# @FUNCTION: chromium_bundled_v8_version
+# @USAGE: [path to version.cc]
+# @DESCRIPTION:
+# Outputs the version of v8 parsed from a (bundled) copy of the source code.
+chromium_bundled_v8_version() {
+ local vf=${1:-v8/src/version.cc}
+ local major minor build patch
+ major=$(sed -ne 's/#define MAJOR_VERSION *\([0-9]*\)/\1/p' "${vf}")
+ minor=$(sed -ne 's/#define MINOR_VERSION *\([0-9]*\)/\1/p' "${vf}")
+ build=$(sed -ne 's/#define BUILD_NUMBER *\([0-9]*\)/\1/p' "${vf}")
+ patch=$(sed -ne 's/#define PATCH_LEVEL *\([0-9]*\)/\1/p' "${vf}")
+ echo "${major}.${minor}.${build}.${patch}"
+}
+
+# @FUNCTION: chromium_installed_v8_version
+# @USAGE:
+# @DESCRIPTION:
+# Outputs the version of dev-lang/v8 currently installed on the host system.
+chromium_installed_v8_version() {
+ local cpf=$(best_version dev-lang/v8)
+ local pvr=${cpf#dev-lang/v8-}
+ echo "${pvr%-r*}"
+}
diff --git a/eclass/clutter.eclass b/eclass/clutter.eclass
new file mode 100644
index 000000000000..7451c173b6b5
--- /dev/null
+++ b/eclass/clutter.eclass
@@ -0,0 +1,72 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: clutter.eclass
+# @MAINTAINER:
+# GNOME Herd <gnome@gentoo.org>
+# @AUTHOR:
+# Nirbheek Chauhan <nirbheek@gentoo.org>
+# @BLURB: Sets SRC_URI, LICENSE, etc and exports src_install
+
+inherit versionator
+
+HOMEPAGE="http://www.clutter-project.org/"
+
+RV=($(get_version_components))
+SRC_URI="http://www.clutter-project.org/sources/${PN}/${RV[0]}.${RV[1]}/${P}.tar.bz2"
+
+# All official clutter packages use LGPL-2.1 or later
+LICENSE="${LICENSE:-LGPL-2.1+}"
+
+# This will be used by all clutter packages
+DEPEND="virtual/pkgconfig"
+
+# @ECLASS-VARIABLE: CLUTTER_LA_PUNT
+# @DESCRIPTION:
+# Set to anything except 'no' to remove *all* .la files before installing.
+# Not to be used without due consideration, sometimes .la files *are* needed.
+CLUTTER_LA_PUNT="${CLUTTER_LA_PUNT:-"no"}"
+
+# @ECLASS-VARIABLE: DOCS
+# @DESCRIPTION:
+# This variable holds relative paths of files to be dodoc-ed.
+# By default, it contains the standard list of autotools doc files
+DOCS="${DOCS:-AUTHORS ChangeLog NEWS README TODO}"
+
+# @ECLASS-VARIABLE: EXAMPLES
+# @DESCRIPTION:
+# This variable holds relative paths of files to be added as examples when the
+# "examples" USE-flag exists, and is switched on. Bash expressions can be used
+# since the variable is eval-ed before substitution. Empty by default.
+EXAMPLES="${EXAMPLES:-""}"
+
+# @FUNCTION: clutter_src_install
+# @DESCRIPTION:
+# Runs emake install, dodoc, and installs examples
+clutter_src_install() {
+ emake DESTDIR="${D}" install || die "emake install failed"
+ dodoc ${DOCS} || die "dodoc failed"
+
+ # examples
+ if has examples ${IUSE} && use examples; then
+ insinto /usr/share/doc/${PF}/examples
+
+ # We use eval to be able to use globs and other bash expressions
+ for example in $(eval echo ${EXAMPLES}); do
+ # If directory
+ if [[ ${example: -1} == "/" ]]; then
+ doins -r ${example} || die "doins ${example} failed!"
+ else
+ doins ${example} || die "doins ${example} failed!"
+ fi
+ done
+ fi
+
+ # Delete all .la files
+ if [[ "${CLUTTER_LA_PUNT}" != "no" ]]; then
+ find "${D}" -name '*.la' -exec rm -f '{}' + || die
+ fi
+}
+
+EXPORT_FUNCTIONS src_install
diff --git a/eclass/cmake-multilib.eclass b/eclass/cmake-multilib.eclass
new file mode 100644
index 000000000000..deec3f6eb59b
--- /dev/null
+++ b/eclass/cmake-multilib.eclass
@@ -0,0 +1,74 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: cmake-multilib.eclass
+# @MAINTAINER:
+# gx86-multilib team <multilib@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# @BLURB: cmake-utils wrapper for multilib builds
+# @DESCRIPTION:
+# The cmake-multilib.eclass provides a glue between cmake-utils.eclass(5)
+# and multilib-minimal.eclass(5), aiming to provide a convenient way
+# to build packages using cmake for multiple ABIs.
+#
+# Inheriting this eclass sets IUSE and exports default multilib_src_*()
+# sub-phases that call cmake-utils phase functions for each ABI enabled.
+# The multilib_src_*() functions can be defined in ebuild just like
+# in multilib-minimal, yet they ought to call appropriate cmake-utils
+# phase rather than 'default'.
+
+# EAPI=5 is required for meaningful MULTILIB_USEDEP.
+case ${EAPI:-0} in
+ 5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+if [[ ${CMAKE_IN_SOURCE_BUILD} ]]; then
+ die "${ECLASS}: multilib support requires out-of-source builds."
+fi
+
+inherit cmake-utils multilib-minimal
+
+EXPORT_FUNCTIONS src_configure src_compile src_test src_install
+
+cmake-multilib_src_configure() {
+ local _cmake_args=( "${@}" )
+
+ multilib-minimal_src_configure
+}
+
+multilib_src_configure() {
+ cmake-utils_src_configure "${_cmake_args[@]}"
+}
+
+cmake-multilib_src_compile() {
+ local _cmake_args=( "${@}" )
+
+ multilib-minimal_src_compile
+}
+
+multilib_src_compile() {
+ cmake-utils_src_compile "${_cmake_args[@]}"
+}
+
+cmake-multilib_src_test() {
+ local _cmake_args=( "${@}" )
+
+ multilib-minimal_src_test
+}
+
+multilib_src_test() {
+ cmake-utils_src_test "${_cmake_args[@]}"
+}
+
+cmake-multilib_src_install() {
+ local _cmake_args=( "${@}" )
+
+ multilib-minimal_src_install
+}
+
+multilib_src_install() {
+ cmake-utils_src_install "${_cmake_args[@]}"
+}
diff --git a/eclass/cmake-utils.eclass b/eclass/cmake-utils.eclass
new file mode 100644
index 000000000000..9e2b0886b7c3
--- /dev/null
+++ b/eclass/cmake-utils.eclass
@@ -0,0 +1,751 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: cmake-utils.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @AUTHOR:
+# Tomáš Chvátal <scarabeus@gentoo.org>
+# Maciej Mrozowski <reavertm@gentoo.org>
+# (undisclosed contributors)
+# Original author: Zephyrus (zephyrus@mirach.it)
+# @BLURB: common ebuild functions for cmake-based packages
+# @DESCRIPTION:
+# The cmake-utils eclass makes creating ebuilds for cmake-based packages much easier.
+# It provides all inherited features (DOCS, HTML_DOCS, PATCHES) along with out-of-source
+# builds (default), in-source builds and an implementation of the well-known use_enable
+# and use_with functions for CMake.
+
+if [[ -z ${_CMAKE_UTILS_ECLASS} ]]; then
+_CMAKE_UTILS_ECLASS=1
+
+
+# @ECLASS-VARIABLE: BUILD_DIR
+# @DESCRIPTION:
+# Build directory where all cmake processed files should be generated.
+# For in-source build it's fixed to ${CMAKE_USE_DIR}.
+# For out-of-source build it can be overridden, by default it uses
+# ${WORKDIR}/${P}_build.
+#
+# This variable has been called CMAKE_BUILD_DIR formerly.
+# It is set under that name for compatibility.
+
+# @ECLASS-VARIABLE: CMAKE_BINARY
+# @DESCRIPTION:
+# Eclass can use different cmake binary than the one provided in by system.
+: ${CMAKE_BINARY:=cmake}
+
+# @ECLASS-VARIABLE: CMAKE_BUILD_TYPE
+# @DESCRIPTION:
+# Set to override default CMAKE_BUILD_TYPE. Only useful for packages
+# known to make use of "if (CMAKE_BUILD_TYPE MATCHES xxx)".
+# If about to be set - needs to be set before invoking cmake-utils_src_configure.
+# You usualy do *NOT* want nor need to set it as it pulls CMake default build-type
+# specific compiler flags overriding make.conf.
+: ${CMAKE_BUILD_TYPE:=Gentoo}
+
+# @ECLASS-VARIABLE: CMAKE_IN_SOURCE_BUILD
+# @DESCRIPTION:
+# Set to enable in-source build.
+
+# @ECLASS-VARIABLE: CMAKE_MAKEFILE_GENERATOR
+# @DESCRIPTION:
+# Specify a makefile generator to be used by cmake.
+# At this point only "emake" and "ninja" are supported.
+: ${CMAKE_MAKEFILE_GENERATOR:=emake}
+
+# @ECLASS-VARIABLE: CMAKE_MIN_VERSION
+# @DESCRIPTION:
+# Specify the minimum required CMake version.
+: ${CMAKE_MIN_VERSION:=2.8.12}
+
+# @ECLASS-VARIABLE: CMAKE_REMOVE_MODULES
+# @DESCRIPTION:
+# Do we want to remove anything? yes or whatever else for no
+: ${CMAKE_REMOVE_MODULES:=yes}
+CMAKE_REMOVE_MODULES="${CMAKE_REMOVE_MODULES:-yes}"
+
+# @ECLASS-VARIABLE: CMAKE_REMOVE_MODULES_LIST
+# @DESCRIPTION:
+# Space-separated list of CMake modules that will be removed in $S during src_prepare,
+# in order to force packages to use the system version.
+: ${CMAKE_REMOVE_MODULES_LIST:=FindBLAS FindLAPACK}
+
+# @ECLASS-VARIABLE: CMAKE_USE_DIR
+# @DESCRIPTION:
+# Sets the directory where we are working with cmake.
+# For example when application uses autotools and only one
+# plugin needs to be done by cmake.
+# By default it uses ${S}.
+
+# @ECLASS-VARIABLE: CMAKE_VERBOSE
+# @DESCRIPTION:
+# Set to OFF to disable verbose messages during compilation
+: ${CMAKE_VERBOSE:=ON}
+
+# @ECLASS-VARIABLE: CMAKE_WARN_UNUSED_CLI
+# @DESCRIPTION:
+# Warn about variables that are declared on the command line
+# but not used. Might give false-positives.
+# "no" to disable (default) or anything else to enable.
+: ${CMAKE_WARN_UNUSED_CLI:=no}
+
+# @ECLASS-VARIABLE: PREFIX
+# @DESCRIPTION:
+# Eclass respects PREFIX variable, though it's not recommended way to set
+# install/lib/bin prefixes.
+# Use -DCMAKE_INSTALL_PREFIX=... CMake variable instead.
+: ${PREFIX:=/usr}
+
+# @ECLASS-VARIABLE: WANT_CMAKE
+# @DESCRIPTION:
+# Specify if cmake-utils eclass should depend on cmake optionally or not.
+# This is useful when only part of application is using cmake build system.
+# Valid values are: always [default], optional (where the value is the useflag
+# used for optionality)
+: ${WANT_CMAKE:=always}
+
+# @ECLASS-VARIABLE: CMAKE_EXTRA_CACHE_FILE
+# @DESCRIPTION:
+# Specifies an extra cache file to pass to cmake. This is the analog of EXTRA_ECONF
+# for econf and is needed to pass TRY_RUN results when cross-compiling.
+# Should be set by user in a per-package basis in /etc/portage/package.env.
+
+CMAKEDEPEND=""
+case ${WANT_CMAKE} in
+ always)
+ ;;
+ *)
+ IUSE+=" ${WANT_CMAKE}"
+ CMAKEDEPEND+="${WANT_CMAKE}? ( "
+ ;;
+esac
+inherit toolchain-funcs multilib flag-o-matic eutils
+
+case ${EAPI} in
+ 2|3|4|5) : ;;
+ *) die "EAPI=${EAPI:-0} is not supported" ;;
+esac
+
+CMAKE_EXPF="src_prepare src_configure src_compile src_test src_install"
+EXPORT_FUNCTIONS ${CMAKE_EXPF}
+
+case ${CMAKE_MAKEFILE_GENERATOR} in
+ emake)
+ CMAKEDEPEND+=" sys-devel/make"
+ ;;
+ ninja)
+ CMAKEDEPEND+=" dev-util/ninja"
+ ;;
+ *)
+ eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
+ die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
+ ;;
+esac
+
+if [[ ${PN} != cmake ]]; then
+ CMAKEDEPEND+=" >=dev-util/cmake-${CMAKE_MIN_VERSION}"
+fi
+
+CMAKEDEPEND+=" userland_GNU? ( >=sys-apps/findutils-4.4.0 )"
+
+[[ ${WANT_CMAKE} = always ]] || CMAKEDEPEND+=" )"
+
+DEPEND="${CMAKEDEPEND}"
+unset CMAKEDEPEND
+
+# Internal functions used by cmake-utils_use_*
+_use_me_now() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local uper capitalised x
+ [[ -z $2 ]] && die "cmake-utils_use-$1 <USE flag> [<flag name>]"
+ if [[ ! -z $3 ]]; then
+ # user specified the use name so use it
+ echo "-D$1$3=$(use $2 && echo ON || echo OFF)"
+ else
+ # use all various most used combinations
+ uper=$(echo ${2} | tr '[:lower:]' '[:upper:]')
+ capitalised=$(echo ${2} | sed 's/\<\(.\)\([^ ]*\)/\u\1\L\2/g')
+ for x in $2 $uper $capitalised; do
+ echo "-D$1$x=$(use $2 && echo ON || echo OFF) "
+ done
+ fi
+}
+_use_me_now_inverted() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local uper capitalised x
+ [[ -z $2 ]] && die "cmake-utils_use-$1 <USE flag> [<flag name>]"
+ if [[ ! -z $3 ]]; then
+ # user specified the use name so use it
+ echo "-D$1$3=$(use $2 && echo OFF || echo ON)"
+ else
+ # use all various most used combinations
+ uper=$(echo ${2} | tr '[:lower:]' '[:upper:]')
+ capitalised=$(echo ${2} | sed 's/\<\(.\)\([^ ]*\)/\u\1\L\2/g')
+ for x in $2 $uper $capitalised; do
+ echo "-D$1$x=$(use $2 && echo OFF || echo ON) "
+ done
+ fi
+}
+
+# Determine using IN or OUT source build
+_check_build_dir() {
+ : ${CMAKE_USE_DIR:=${S}}
+ if [[ -n ${CMAKE_IN_SOURCE_BUILD} ]]; then
+ # we build in source dir
+ BUILD_DIR="${CMAKE_USE_DIR}"
+ else
+ # Respect both the old variable and the new one, depending
+ # on which one was set by the ebuild.
+ if [[ ! ${BUILD_DIR} && ${CMAKE_BUILD_DIR} ]]; then
+ eqawarn "The CMAKE_BUILD_DIR variable has been renamed to BUILD_DIR."
+ eqawarn "Please migrate the ebuild to use the new one."
+
+ # In the next call, both variables will be set already
+ # and we'd have to know which one takes precedence.
+ _RESPECT_CMAKE_BUILD_DIR=1
+ fi
+
+ if [[ ${_RESPECT_CMAKE_BUILD_DIR} ]]; then
+ BUILD_DIR=${CMAKE_BUILD_DIR:-${WORKDIR}/${P}_build}
+ else
+ : ${BUILD_DIR:=${WORKDIR}/${P}_build}
+ fi
+ fi
+
+ # Backwards compatibility for getting the value.
+ CMAKE_BUILD_DIR=${BUILD_DIR}
+
+ mkdir -p "${BUILD_DIR}"
+ echo ">>> Working in BUILD_DIR: \"$BUILD_DIR\""
+}
+
+# Determine which generator to use
+_generator_to_use() {
+ local generator_name
+
+ case ${CMAKE_MAKEFILE_GENERATOR} in
+ ninja)
+ generator_name="Ninja"
+ ;;
+ emake)
+ generator_name="Unix Makefiles"
+ ;;
+ *)
+ eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
+ die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
+ ;;
+ esac
+
+ echo ${generator_name}
+}
+
+# @FUNCTION: comment_add_subdirectory
+# @USAGE: <subdirectory>
+# @DESCRIPTION:
+# Comment out an add_subdirectory call in CMakeLists.txt in the current directory
+comment_add_subdirectory() {
+ if [[ -z ${1} ]]; then
+ die "comment_add_subdirectory must be passed the directory name to comment"
+ fi
+
+ if [[ -e "CMakeLists.txt" ]]; then
+ sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${1//\//\\/}[[:space:]]*)/s/^/#DONOTCOMPILE /" \
+ -i CMakeLists.txt || die "failed to comment add_subdirectory(${1})"
+ fi
+}
+
+# @FUNCTION: cmake-utils_use_with
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_with. See ebuild(5).
+#
+# `cmake-utils_use_with foo FOO` echoes -DWITH_FOO=ON if foo is enabled
+# and -DWITH_FOO=OFF if it is disabled.
+cmake-utils_use_with() { _use_me_now WITH_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_enable
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use_enable foo FOO` echoes -DENABLE_FOO=ON if foo is enabled
+# and -DENABLE_FOO=OFF if it is disabled.
+cmake-utils_use_enable() { _use_me_now ENABLE_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_find_package
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use_find_package foo LibFoo` echoes -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=OFF
+# if foo is enabled and -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=ON if it is disabled.
+# This can be used to make find_package optional.
+cmake-utils_use_find_package() { _use_me_now_inverted CMAKE_DISABLE_FIND_PACKAGE_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_disable
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on inversion of use_enable. See ebuild(5).
+#
+# `cmake-utils_use_enable foo FOO` echoes -DDISABLE_FOO=OFF if foo is enabled
+# and -DDISABLE_FOO=ON if it is disabled.
+cmake-utils_use_disable() { _use_me_now_inverted DISABLE_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_no
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_disable. See ebuild(5).
+#
+# `cmake-utils_use_no foo FOO` echoes -DNO_FOO=OFF if foo is enabled
+# and -DNO_FOO=ON if it is disabled.
+cmake-utils_use_no() { _use_me_now_inverted NO_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_want
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use_want foo FOO` echoes -DWANT_FOO=ON if foo is enabled
+# and -DWANT_FOO=OFF if it is disabled.
+cmake-utils_use_want() { _use_me_now WANT_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_build
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use_build foo FOO` echoes -DBUILD_FOO=ON if foo is enabled
+# and -DBUILD_FOO=OFF if it is disabled.
+cmake-utils_use_build() { _use_me_now BUILD_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_has
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use_has foo FOO` echoes -DHAVE_FOO=ON if foo is enabled
+# and -DHAVE_FOO=OFF if it is disabled.
+cmake-utils_use_has() { _use_me_now HAVE_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use_use
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use_use foo FOO` echoes -DUSE_FOO=ON if foo is enabled
+# and -DUSE_FOO=OFF if it is disabled.
+cmake-utils_use_use() { _use_me_now USE_ "$@" ; }
+
+# @FUNCTION: cmake-utils_use
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_use foo FOO` echoes -DFOO=ON if foo is enabled
+# and -DFOO=OFF if it is disabled.
+cmake-utils_use() { _use_me_now "" "$@" ; }
+
+# @FUNCTION: cmake-utils_useno
+# @USAGE: <USE flag> [flag name]
+# @DESCRIPTION:
+# Based on use_enable. See ebuild(5).
+#
+# `cmake-utils_useno foo NOFOO` echoes -DNOFOO=OFF if foo is enabled
+# and -DNOFOO=ON if it is disabled.
+cmake-utils_useno() { _use_me_now_inverted "" "$@" ; }
+
+# Internal function for modifying hardcoded definitions.
+# Removes dangerous definitions that override Gentoo settings.
+_modify-cmakelists() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Only edit the files once
+ grep -qs "<<< Gentoo configuration >>>" "${CMAKE_USE_DIR}"/CMakeLists.txt && return 0
+
+ # Comment out all set (<some_should_be_user_defined_variable> value)
+ # TODO Add QA checker - inform when variable being checked for below is set in CMakeLists.txt
+ find "${CMAKE_USE_DIR}" -name CMakeLists.txt \
+ -exec sed -i -e '/^[[:space:]]*[sS][eE][tT][[:space:]]*([[:space:]]*CMAKE_BUILD_TYPE.*)/{s/^/#IGNORE /g}' {} + \
+ -exec sed -i -e '/^[[:space:]]*[sS][eE][tT][[:space:]]*([[:space:]]*CMAKE_COLOR_MAKEFILE.*)/{s/^/#IGNORE /g}' {} + \
+ -exec sed -i -e '/^[[:space:]]*[sS][eE][tT][[:space:]]*([[:space:]]*CMAKE_INSTALL_PREFIX.*)/{s/^/#IGNORE /g}' {} + \
+ -exec sed -i -e '/^[[:space:]]*[sS][eE][tT][[:space:]]*([[:space:]]*CMAKE_VERBOSE_MAKEFILE.*)/{s/^/#IGNORE /g}' {} + \
+ || die "${LINENO}: failed to disable hardcoded settings"
+
+ # NOTE Append some useful summary here
+ cat >> "${CMAKE_USE_DIR}"/CMakeLists.txt <<- _EOF_
+
+ MESSAGE(STATUS "<<< Gentoo configuration >>>
+ Build type \${CMAKE_BUILD_TYPE}
+ Install path \${CMAKE_INSTALL_PREFIX}
+ Compiler flags:
+ C \${CMAKE_C_FLAGS}
+ C++ \${CMAKE_CXX_FLAGS}
+ Linker flags:
+ Executable \${CMAKE_EXE_LINKER_FLAGS}
+ Module \${CMAKE_MODULE_LINKER_FLAGS}
+ Shared \${CMAKE_SHARED_LINKER_FLAGS}\n")
+ _EOF_
+}
+
+enable_cmake-utils_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ pushd "${S}" > /dev/null
+
+ debug-print "$FUNCNAME: PATCHES=$PATCHES"
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+
+ debug-print "$FUNCNAME: applying user patches"
+ epatch_user
+
+ popd > /dev/null
+}
+
+# @VARIABLE: mycmakeargs
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Optional cmake defines as a bash array. Should be defined before calling
+# src_configure.
+# @CODE
+# src_configure() {
+# local mycmakeargs=(
+# $(cmake-utils_use_with openconnect)
+# )
+#
+# cmake-utils_src_configure
+# }
+# @CODE
+
+enable_cmake-utils_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ "${CMAKE_REMOVE_MODULES}" == "yes" ]] && {
+ local name
+ for name in ${CMAKE_REMOVE_MODULES_LIST} ; do
+ find "${S}" -name ${name}.cmake -exec rm -v {} +
+ done
+ }
+
+ _check_build_dir
+
+ # check if CMakeLists.txt exist and if no then die
+ if [[ ! -e ${CMAKE_USE_DIR}/CMakeLists.txt ]] ; then
+ eerror "Unable to locate CMakeLists.txt under:"
+ eerror "\"${CMAKE_USE_DIR}/CMakeLists.txt\""
+ eerror "Consider not inheriting the cmake eclass."
+ die "FATAL: Unable to find CMakeLists.txt"
+ fi
+
+ # Remove dangerous things.
+ _modify-cmakelists
+
+ # Fix xdg collision with sandbox
+ export XDG_CONFIG_HOME="${T}"
+
+ # @SEE CMAKE_BUILD_TYPE
+ if [[ ${CMAKE_BUILD_TYPE} = Gentoo ]]; then
+ # Handle release builds
+ if ! has debug ${IUSE//+} || ! use debug; then
+ local CPPFLAGS=${CPPFLAGS}
+ append-cppflags -DNDEBUG
+ fi
+ fi
+
+ # Prepare Gentoo override rules (set valid compiler, append CPPFLAGS etc.)
+ local build_rules=${BUILD_DIR}/gentoo_rules.cmake
+ cat > "${build_rules}" <<- _EOF_
+ SET (CMAKE_AR $(type -P $(tc-getAR)) CACHE FILEPATH "Archive manager" FORCE)
+ SET (CMAKE_ASM_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> ${CFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "ASM compile command" FORCE)
+ SET (CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C compile command" FORCE)
+ SET (CMAKE_CXX_COMPILE_OBJECT "<CMAKE_CXX_COMPILER> <DEFINES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C++ compile command" FORCE)
+ SET (CMAKE_Fortran_COMPILE_OBJECT "<CMAKE_Fortran_COMPILER> <DEFINES> ${FCFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "Fortran compile command" FORCE)
+ SET (CMAKE_RANLIB $(type -P $(tc-getRANLIB)) CACHE FILEPATH "Archive index generator" FORCE)
+ SET (PKG_CONFIG_EXECUTABLE $(type -P $(tc-getPKG_CONFIG)) CACHE FILEPATH "pkg-config executable" FORCE)
+ _EOF_
+
+ local toolchain_file=${BUILD_DIR}/gentoo_toolchain.cmake
+ cat > ${toolchain_file} <<- _EOF_
+ SET (CMAKE_C_COMPILER $(tc-getCC))
+ SET (CMAKE_CXX_COMPILER $(tc-getCXX))
+ SET (CMAKE_Fortran_COMPILER $(tc-getFC))
+ _EOF_
+
+ if tc-is-cross-compiler; then
+ local sysname
+ case "${KERNEL:-linux}" in
+ Cygwin) sysname="CYGWIN_NT-5.1" ;;
+ HPUX) sysname="HP-UX" ;;
+ linux) sysname="Linux" ;;
+ Winnt) sysname="Windows" ;;
+ *) sysname="${KERNEL}" ;;
+ esac
+
+ cat >> "${toolchain_file}" <<- _EOF_
+ SET (CMAKE_SYSTEM_NAME "${sysname}")
+ _EOF_
+
+ if [ "${SYSROOT:-/}" != "/" ] ; then
+ # When cross-compiling with a sysroot (e.g. with crossdev's emerge wrappers)
+ # we need to tell cmake to use libs/headers from the sysroot but programs from / only.
+ cat >> "${toolchain_file}" <<- _EOF_
+ set(CMAKE_FIND_ROOT_PATH "${SYSROOT}")
+ set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+ set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+ set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+ _EOF_
+ fi
+ fi
+
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ if [[ ${EPREFIX} ]]; then
+ cat >> "${build_rules}" <<- _EOF_
+ # in Prefix we need rpath and must ensure cmake gets our default linker path
+ # right ... except for Darwin hosts
+ IF (NOT APPLE)
+ SET (CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
+ SET (CMAKE_PLATFORM_REQUIRED_RUNTIME_PATH "${EPREFIX}/usr/${CHOST}/lib/gcc;${EPREFIX}/usr/${CHOST}/lib;${EPREFIX}/usr/$(get_libdir);${EPREFIX}/$(get_libdir)"
+ CACHE STRING "" FORCE)
+
+ ELSE ()
+
+ SET(CMAKE_PREFIX_PATH "${EPREFIX}${PREFIX}" CACHE STRING "" FORCE)
+ SET(CMAKE_SKIP_BUILD_RPATH OFF CACHE BOOL "" FORCE)
+ SET(CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
+ SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE CACHE BOOL "")
+ SET(CMAKE_INSTALL_RPATH "${EPREFIX}${PREFIX}/lib;${EPREFIX}/usr/${CHOST}/lib/gcc;${EPREFIX}/usr/${CHOST}/lib;${EPREFIX}/usr/$(get_libdir);${EPREFIX}/$(get_libdir)" CACHE STRING "" FORCE)
+ SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE CACHE BOOL "" FORCE)
+ SET(CMAKE_INSTALL_NAME_DIR "${EPREFIX}${PREFIX}/lib" CACHE STRING "" FORCE)
+
+ ENDIF (NOT APPLE)
+ _EOF_
+ fi
+
+ # Common configure parameters (invariants)
+ local common_config=${BUILD_DIR}/gentoo_common_config.cmake
+ local libdir=$(get_libdir)
+ cat > "${common_config}" <<- _EOF_
+ SET (LIB_SUFFIX ${libdir/lib} CACHE STRING "library path suffix" FORCE)
+ SET (CMAKE_INSTALL_LIBDIR ${libdir} CACHE PATH "Output directory for libraries")
+ _EOF_
+ [[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && echo 'SET (CMAKE_COLOR_MAKEFILE OFF CACHE BOOL "pretty colors during make" FORCE)' >> "${common_config}"
+
+ # Convert mycmakeargs to an array, for backwards compatibility
+ # Make the array a local variable since <=portage-2.1.6.x does not
+ # support global arrays (see bug #297255).
+ if [[ $(declare -p mycmakeargs 2>&-) != "declare -a mycmakeargs="* ]]; then
+ local mycmakeargs_local=(${mycmakeargs})
+ else
+ local mycmakeargs_local=("${mycmakeargs[@]}")
+ fi
+
+ if [[ ${CMAKE_WARN_UNUSED_CLI} == no ]] ; then
+ local warn_unused_cli="--no-warn-unused-cli"
+ else
+ local warn_unused_cli=""
+ fi
+
+ # Common configure parameters (overridable)
+ # NOTE CMAKE_BUILD_TYPE can be only overriden via CMAKE_BUILD_TYPE eclass variable
+ # No -DCMAKE_BUILD_TYPE=xxx definitions will be in effect.
+ local cmakeargs=(
+ ${warn_unused_cli}
+ -C "${common_config}"
+ -G "$(_generator_to_use)"
+ -DCMAKE_INSTALL_PREFIX="${EPREFIX}${PREFIX}"
+ "${mycmakeargs_local[@]}"
+ -DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}"
+ -DCMAKE_INSTALL_DO_STRIP=OFF
+ -DCMAKE_USER_MAKE_RULES_OVERRIDE="${build_rules}"
+ -DCMAKE_TOOLCHAIN_FILE="${toolchain_file}"
+ "${MYCMAKEARGS}"
+ )
+
+ if [[ -n "${CMAKE_EXTRA_CACHE_FILE}" ]] ; then
+ cmakeargs+=( -C "${CMAKE_EXTRA_CACHE_FILE}" )
+ fi
+
+ pushd "${BUILD_DIR}" > /dev/null
+ debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: mycmakeargs is ${mycmakeargs_local[*]}"
+ echo "${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}"
+ "${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}" || die "cmake failed"
+ popd > /dev/null
+}
+
+enable_cmake-utils_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ cmake-utils_src_make "$@"
+}
+
+_ninjaopts_from_makeopts() {
+ if [[ ${NINJAOPTS+set} == set ]]; then
+ return 0
+ fi
+ local ninjaopts=()
+ set -- ${MAKEOPTS}
+ while (( $# )); do
+ case $1 in
+ -j|-l|-k)
+ ninjaopts+=( $1 $2 )
+ shift 2
+ ;;
+ -j*|-l*|-k*)
+ ninjaopts+=( $1 )
+ shift 1
+ ;;
+ *) shift ;;
+ esac
+ done
+ export NINJAOPTS="${ninjaopts[*]}"
+}
+
+# @FUNCTION: ninja_src_make
+# @INTERNAL
+# @DESCRIPTION:
+# Build the package using ninja generator
+ninja_src_make() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -e build.ninja ]] || die "build.ninja not found. Error during configure stage."
+
+ _ninjaopts_from_makeopts
+
+ if [[ "${CMAKE_VERBOSE}" != "OFF" ]]; then
+ set -- ninja ${NINJAOPTS} -v "$@"
+ else
+ set -- ninja ${NINJAOPTS} "$@"
+ fi
+
+ echo "$@"
+ "$@" || die
+}
+
+# @FUNCTION: emake_src_make
+# @INTERNAL
+# @DESCRIPTION:
+# Build the package using make generator
+emake_src_make() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ -e Makefile ]] || die "Makefile not found. Error during configure stage."
+
+ if [[ "${CMAKE_VERBOSE}" != "OFF" ]]; then
+ emake VERBOSE=1 "$@" || die
+ else
+ emake "$@" || die
+ fi
+
+}
+
+# @FUNCTION: cmake-utils_src_make
+# @DESCRIPTION:
+# Function for building the package. Automatically detects the build type.
+# All arguments are passed to emake.
+cmake-utils_src_make() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _check_build_dir
+ pushd "${BUILD_DIR}" > /dev/null
+
+ ${CMAKE_MAKEFILE_GENERATOR}_src_make "$@"
+
+ popd > /dev/null
+}
+
+enable_cmake-utils_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _check_build_dir
+ pushd "${BUILD_DIR}" > /dev/null
+ [[ -e CTestTestfile.cmake ]] || { echo "No tests found. Skipping."; return 0 ; }
+
+ [[ -n ${TEST_VERBOSE} ]] && myctestargs+=( --extra-verbose --output-on-failure )
+
+ if ctest "${myctestargs[@]}" "$@" ; then
+ einfo "Tests succeeded."
+ popd > /dev/null
+ return 0
+ else
+ if [[ -n "${CMAKE_YES_I_WANT_TO_SEE_THE_TEST_LOG}" ]] ; then
+ # on request from Diego
+ eerror "Tests failed. Test log ${BUILD_DIR}/Testing/Temporary/LastTest.log follows:"
+ eerror "--START TEST LOG--------------------------------------------------------------"
+ cat "${BUILD_DIR}/Testing/Temporary/LastTest.log"
+ eerror "--END TEST LOG----------------------------------------------------------------"
+ die "Tests failed."
+ else
+ die "Tests failed. When you file a bug, please attach the following file: \n\t${BUILD_DIR}/Testing/Temporary/LastTest.log"
+ fi
+
+ # die might not die due to nonfatal
+ popd > /dev/null
+ return 1
+ fi
+}
+
+enable_cmake-utils_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _check_build_dir
+ pushd "${BUILD_DIR}" > /dev/null
+ DESTDIR="${D}" ${CMAKE_MAKEFILE_GENERATOR} install "$@" || die "died running ${CMAKE_MAKEFILE_GENERATOR} install"
+ popd > /dev/null
+
+ pushd "${S}" > /dev/null
+ einstalldocs
+ popd > /dev/null
+}
+
+# @FUNCTION: cmake-utils_src_prepare
+# @DESCRIPTION:
+# Apply ebuild and user patches.
+cmake-utils_src_prepare() {
+ _execute_optionally "src_prepare" "$@"
+}
+
+# @FUNCTION: cmake-utils_src_configure
+# @DESCRIPTION:
+# General function for configuring with cmake. Default behaviour is to start an
+# out-of-source build.
+cmake-utils_src_configure() {
+ _execute_optionally "src_configure" "$@"
+}
+
+# @FUNCTION: cmake-utils_src_compile
+# @DESCRIPTION:
+# General function for compiling with cmake.
+# Automatically detects the build type. All arguments are passed to emake.
+cmake-utils_src_compile() {
+ _execute_optionally "src_compile" "$@"
+}
+
+# @FUNCTION: cmake-utils_src_test
+# @DESCRIPTION:
+# Function for testing the package. Automatically detects the build type.
+cmake-utils_src_test() {
+ _execute_optionally "src_test" "$@"
+}
+
+# @FUNCTION: cmake-utils_src_install
+# @DESCRIPTION:
+# Function for installing the package. Automatically detects the build type.
+cmake-utils_src_install() {
+ _execute_optionally "src_install" "$@"
+}
+
+# Optionally executes phases based on WANT_CMAKE variable/USE flag.
+_execute_optionally() {
+ local phase="$1" ; shift
+ if [[ ${WANT_CMAKE} = always ]]; then
+ enable_cmake-utils_${phase} "$@"
+ else
+ use ${WANT_CMAKE} && enable_cmake-utils_${phase} "$@"
+ fi
+}
+
+fi
diff --git a/eclass/common-lisp-3.eclass b/eclass/common-lisp-3.eclass
new file mode 100644
index 000000000000..ef6531ba3133
--- /dev/null
+++ b/eclass/common-lisp-3.eclass
@@ -0,0 +1,211 @@
+# Copyright 1999-2010 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: common-lisp-3.eclass
+# @MAINTAINER:
+# Common Lisp project <common-lisp@gentoo.org>
+# @BLURB: functions to support the installation of Common Lisp libraries
+# @DESCRIPTION:
+# Since Common Lisp libraries share similar structure, this eclass aims
+# to provide a simple way to write ebuilds with these characteristics.
+
+inherit eutils
+
+# CL packages in the overlay don't have their tarballs on the mirrors
+# so it's useless to mirror them
+RESTRICT="mirror"
+
+# @ECLASS-VARIABLE: CLSOURCEROOT
+# @DESCRIPTION:
+# Default path of Common Lisp libraries sources. Sources will
+# be installed into ${CLSOURCEROOT}/${CLPACKAGE}.
+CLSOURCEROOT="${ROOT%/}"/usr/share/common-lisp/source
+
+# @ECLASS-VARIABLE: CLSYSTEMROOT
+# @DESCRIPTION:
+# Default path to find any asdf file. Any asdf files will be
+# symlinked in ${CLSYSTEMROOT}/${CLSYSTEM} as they may be in
+# an arbitrarily deeply nested directory under ${CLSOURCEROOT}/${CLPACKAGE}.
+CLSYSTEMROOT="${ROOT%/}"/usr/share/common-lisp/systems
+
+# @ECLASS-VARIABLE: CLPACKAGE
+# @DESCRIPTION:
+# Default package name. To override, set these after inheriting this eclass.
+CLPACKAGE="${PN}"
+
+PDEPEND="virtual/commonlisp"
+
+EXPORT_FUNCTIONS src_compile src_install
+
+# @FUNCTION: common-lisp-3_src_compile
+# @DESCRIPTION:
+# Since there's nothing to build in most cases, default doesn't do
+# anything.
+common-lisp-3_src_compile() { true; }
+
+# @FUNCTION: absolute-path-p
+# @DESCRIPTION:
+# Returns true if ${1} is an absolute path.
+absolute-path-p() {
+ [[ $# -eq 1 ]] || die "${FUNCNAME[0]} must receive one argument"
+ [[ ${1} == /* ]]
+}
+
+# @FUNCTION: common-lisp-install-one-source
+# @DESCRIPTION:
+# Installs ${2} source file in ${3} inside CLSOURCEROOT/CLPACKAGE.
+common-lisp-install-one-source() {
+ [[ $# -eq 3 ]] || die "${FUNCNAME[0]} must receive exactly three arguments"
+
+ local fpredicate=${1}
+ local source=${2}
+ local target="${CLSOURCEROOT}/${CLPACKAGE}/${3}"
+
+ if absolute-path-p "${source}" ; then
+ die "Cannot install files with absolute path: ${source}"
+ fi
+
+ if ${fpredicate} "${source}" ; then
+ insinto "${target}"
+ doins "${source}" || die "Failed to install ${source} into $(dirname "${target}")"
+ fi
+}
+
+# @FUNCTION: lisp-file-p
+# @DESCRIPTION:
+# Returns true if ${1} is lisp source file.
+lisp-file-p() {
+ [[ $# -eq 1 ]] || die "${FUNCNAME[0]} must receive one argument"
+
+ [[ ${1} =~ \.(lisp|lsp|cl)$ ]]
+}
+
+# @FUNCTION: common-lisp-get-fpredicate
+# @DESCRIPTION:
+# Outputs the corresponding predicate to check files of type ${1}.
+common-lisp-get-fpredicate() {
+ [[ $# -eq 1 ]] || die "${FUNCNAME[0]} must receive one argument"
+
+ local ftype=${1}
+ case ${ftype} in
+ "lisp") echo "lisp-file-p" ;;
+ "all" ) echo "true" ;;
+ * ) die "Unknown filetype specifier ${ftype}" ;;
+ esac
+}
+
+# @FUNCTION: common-lisp-install-sources
+# @USAGE: common-lisp-install-sources path [<other_paths>...]
+# @DESCRIPTION:
+# Recursively install lisp sources of type ${2} if ${1} is -t or
+# Lisp by default. When given a directory, it will be recursively
+# scanned for Lisp source files with suffixes: .lisp, .lsp or .cl.
+common-lisp-install-sources() {
+ local ftype="lisp"
+ if [[ ${1} == "-t" ]] ; then
+ ftype=${2}
+ shift ; shift
+ fi
+
+ [[ $# -ge 1 ]] || die "${FUNCNAME[0]} must receive one non-option argument"
+
+ local fpredicate=$(common-lisp-get-fpredicate "${ftype}")
+
+ for path in "${@}" ; do
+ if [[ -f ${path} ]] ; then
+ common-lisp-install-one-source ${fpredicate} "${path}" "$(dirname "${path}")"
+ elif [[ -d ${path} ]] ; then
+ common-lisp-install-sources -t ${ftype} $(find "${path}" -type f)
+ else
+ die "${path} it neither a regular file nor a directory"
+ fi
+ done
+}
+
+# @FUNCTION: common-lisp-install-one-asdf
+# @DESCRIPTION:
+# Installs ${1} asdf file in CLSOURCEROOT/CLPACKAGE and symlinks it in
+# CLSYSTEMROOT.
+common-lisp-install-one-asdf() {
+ [[ $# != 1 ]] && die "${FUNCNAME[0]} must receive exactly one argument"
+
+ # the suffix «.asd» is optional
+ local source=${1/.asd}.asd
+ common-lisp-install-one-source true "${source}" "$(dirname "${source}")"
+ local target="${CLSOURCEROOT%/}/${CLPACKAGE}/${source}"
+ dosym "${target}" "${CLSYSTEMROOT%/}/$(basename ${target})"
+}
+
+# @FUNCTION: common-lisp-install-asdf
+# @USAGE: common-lisp-install-asdf path [<other_paths>...]
+# @DESCRIPTION:
+# Installs all ASDF files and creates symlinks in CLSYSTEMROOT.
+# When given a directory, it will be recursively scanned for ASDF
+# files with extension .asd.
+common-lisp-install-asdf() {
+ dodir "${CLSYSTEMROOT}"
+
+ [[ $# = 0 ]] && set - ${CLSYSTEMS}
+ [[ $# = 0 ]] && set - $(find . -type f -name \*.asd)
+ for sys in "${@}" ; do
+ common-lisp-install-one-asdf ${sys}
+ done
+}
+
+# @FUNCTION: common-lisp-3_src_install
+# @DESCRIPTION:
+# Recursively install Lisp sources, asdf files and most common doc files.
+common-lisp-3_src_install() {
+ common-lisp-install-sources .
+ common-lisp-install-asdf
+ for i in AUTHORS README* HEADER TODO* CHANGELOG Change[lL]og CHANGES BUGS CONTRIBUTORS *NEWS* ; do
+ [[ -f ${i} ]] && dodoc ${i}
+ done
+}
+
+# @FUNCTION: common-lisp-export-impl-args
+# @USAGE: common-lisp-export-impl-args <lisp-implementation>
+# @DESCRIPTION:
+# Export a few variables containing the switches necessary
+# to make the CL implementation perform basic functions:
+# * CL_NORC: don't load syste-wide or user-specific initfiles
+# * CL_LOAD: load a certain file
+# * CL_EVAL: eval a certain expression at startup
+common-lisp-export-impl-args() {
+ if [[ $# != 1 ]]; then
+ eerror "Usage: ${FUNCNAME[0]} lisp-implementation"
+ die "${FUNCNAME[0]}: wrong number of arguments: $#"
+ fi
+ case ${1} in
+ clisp)
+ CL_NORC="-norc"
+ CL_LOAD="-i"
+ CL_EVAL="-x"
+ ;;
+ clozure | ccl | openmcl)
+ CL_NORC="--no-init"
+ CL_LOAD="--load"
+ CL_EVAL="--eval"
+ ;;
+ cmucl)
+ CL_NORC="-nositeinit -noinit"
+ CL_LOAD="-load"
+ CL_EVAL="-eval"
+ ;;
+ ecl)
+ CL_NORC="-norc"
+ CL_LOAD="-load"
+ CL_EVAL="-eval"
+ ;;
+ sbcl)
+ CL_NORC="--sysinit /dev/null --userinit /dev/null"
+ CL_LOAD="--load"
+ CL_EVAL="--eval"
+ ;;
+ *)
+ die ${1} is not supported by ${0}
+ ;;
+ esac
+ export CL_NORC CL_LOAD CL_EVAL
+}
diff --git a/eclass/common-lisp-common-2.eclass b/eclass/common-lisp-common-2.eclass
new file mode 100644
index 000000000000..49fa9503fb7c
--- /dev/null
+++ b/eclass/common-lisp-common-2.eclass
@@ -0,0 +1,80 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Author Matthew Kennedy <mkennedy@gentoo.org>
+#
+# Sundry code common to many Common Lisp related ebuilds.
+
+# Some handy constants
+
+inherit eutils multilib
+
+CLSOURCEROOT=/usr/share/common-lisp/source/
+CLSYSTEMROOT=/usr/share/common-lisp/systems/
+
+# Many of our Common Lisp ebuilds are either inspired by, or actually
+# use packages and files from the Debian project's archives.
+
+do-debian-credits() {
+ docinto debian
+ for i in copyright README.Debian changelog; do
+ test -f $i && dodoc "${S}"/debian/${i}
+ done
+ docinto .
+}
+
+# BIG FAT HACK: Since the Portage emerge step kills file timestamp
+# information, we need to compensate by ensuring all FASL files are
+# more recent than their source files.
+
+# The following `impl-*-timestamp-hack' functions SHOULD NOT be used
+# outside of this eclass.
+
+impl-save-timestamp-hack() {
+ local impl=$1
+ dodir /usr/share/${impl}
+ tar cpjf "${D}"/usr/share/${impl}/portage-timestamp-compensate -C "${D}"/usr/$(get_libdir)/${impl} .
+}
+
+impl-restore-timestamp-hack() {
+ local impl=$1
+ tar xjpfo /usr/share/${impl}/portage-timestamp-compensate -C /usr/$(get_libdir)/${impl}
+}
+
+impl-remove-timestamp-hack() {
+ local impl=$1
+ rm -rf /usr/$(get_libdir)/${impl} &>/dev/null || true
+}
+
+standard-impl-postinst() {
+ local impl=$1
+ unregister-common-lisp-implementation cmucl
+ case ${impl} in
+ cmucl|sbcl)
+ impl-restore-timestamp-hack ${impl}
+ ;;
+ *)
+ ;;
+ esac
+ register-common-lisp-implementation ${impl}
+}
+
+standard-impl-postrm() {
+ local impl=$1 impl_binary=$2
+ if [ ! -x ${impl_binary} ]; then
+ case ${impl} in
+ cmucl|sbcl)
+ impl-remove-timestamp-hack ${impl}
+ ;;
+ *)
+ ;;
+ esac
+ rm -rf /var/cache/common-lisp-controller/*/${impl}
+ fi
+}
+
+# Local Variables: ***
+# mode: shell-script ***
+# tab-width: 4 ***
+# End: ***
diff --git a/eclass/common-lisp-common-3.eclass b/eclass/common-lisp-common-3.eclass
new file mode 100644
index 000000000000..b652b8397c8b
--- /dev/null
+++ b/eclass/common-lisp-common-3.eclass
@@ -0,0 +1,82 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Author Matthew Kennedy <mkennedy@gentoo.org>
+#
+# Sundry code common to many Common Lisp related ebuilds. Some
+# implementation use the Portage time stamp hack to ensure their
+# installed files have the right modification time relative to each
+# other.
+
+inherit eutils multilib
+
+CLSOURCEROOT=/usr/share/common-lisp/source/
+CLSYSTEMROOT=/usr/share/common-lisp/systems/
+
+# Many of our Common Lisp ebuilds are either inspired by, or actually
+# use packages and files from the Debian project's archives.
+
+do-debian-credits() {
+ docinto debian
+ for i in copyright README.Debian changelog; do
+ test -f $i && dodoc "${S}"/debian/${i}
+ done
+ docinto .
+}
+
+# BIG FAT HACK: Since the Portage emerge step kills file timestamp
+# information, we need to compensate by ensuring all FASL files are
+# more recent than their source files.
+
+# The following `impl-*-timestamp-hack' functions SHOULD NOT be used
+# outside of this eclass.
+
+# Bug http://bugs.gentoo.org/show_bug.cgi?id=16162 should remove the
+# need for this hack.
+
+impl-save-timestamp-hack() {
+ local impl=$1
+ dodir /usr/share/${impl}
+ tar cpjf "${D}"/usr/share/${impl}/portage-timestamp-compensate -C "${D}"/usr/$(get_libdir)/${impl} .
+}
+
+impl-restore-timestamp-hack() {
+ local impl=$1
+ tar xjpfo /usr/share/${impl}/portage-timestamp-compensate -C /usr/$(get_libdir)/${impl}
+}
+
+impl-remove-timestamp-hack() {
+ local impl=$1
+ rm -rf /usr/$(get_libdir)/${impl} &>/dev/null || true
+}
+
+standard-impl-postinst() {
+ local impl=$1
+ case ${impl} in
+ cmucl|sbcl)
+ impl-restore-timestamp-hack ${impl}
+ ;;
+ *)
+ ;;
+ esac
+}
+
+standard-impl-postrm() {
+ local impl=$1 impl_binary=$2
+ if [ ! -x ${impl_binary} ]; then
+ case ${impl} in
+ cmucl|sbcl)
+ impl-remove-timestamp-hack ${impl}
+ ;;
+ *)
+ ;;
+ esac
+ rm -rf /var/cache/common-lisp-controller/*/${impl}
+ fi
+}
+
+# Local Variables: ***
+# mode: shell-script ***
+# tab-width: 4 ***
+# End: ***
diff --git a/eclass/common-lisp-common.eclass b/eclass/common-lisp-common.eclass
new file mode 100644
index 000000000000..7350573238a7
--- /dev/null
+++ b/eclass/common-lisp-common.eclass
@@ -0,0 +1,209 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Author Matthew Kennedy <mkennedy@gentoo.org>
+#
+# Sundry code common to many Common Lisp related ebuilds.
+
+# Some handy constants
+
+inherit eutils multilib
+
+CLFASLROOT=/usr/$(get_libdir)/common-lisp/
+CLSOURCEROOT=/usr/share/common-lisp/source/
+CLSYSTEMROOT=/usr/share/common-lisp/systems/
+
+# Many of our Common Lisp ebuilds are either inspired by, or actually
+# use packages and files from the Debian project's archives.
+
+do-debian-credits() {
+ docinto debian
+ for i in copyright README.Debian changelog; do
+ # be silent, since all files are not always present
+ dodoc "${S}"/debian/${i} &>/dev/null || true
+ done
+ docinto .
+}
+
+# Most of the code below is from Debian's Common Lisp Controller
+# package
+
+register-common-lisp-implementation() {
+ PROGNAME=$(basename $0)
+ # first check if there is at least a compiler-name:
+ if [ -z "$1" ] ; then
+ cat <<EOF
+usage: $PROGNAME compiler-name
+
+registers a Common Lisp compiler to the
+Common-Lisp-Controller system.
+EOF
+ exit 1
+ fi
+ IMPL=$1
+ FILE="/usr/$(get_libdir)/common-lisp/bin/$IMPL.sh"
+ if [ ! -f "$FILE" ] ; then
+ cat <<EOF
+$PROGNAME: I cannot find the script $FILE for the implementation $IMPL
+EOF
+ exit 2
+ fi
+ if [ ! -r "$FILE" ] ; then
+ cat <<EOF
+$PROGNAME: I cannot read the script $FILE for the implementation $IMPL
+EOF
+ exit 2
+ fi
+ # install CLC into the lisp
+ sh "$FILE" install-clc || (echo "Installation of CLC failed" >&2 ; exit 3)
+ mkdir /usr/$(get_libdir)/common-lisp/$IMPL &>/dev/null || true
+ chown cl-builder:cl-builder /usr/$(get_libdir)/common-lisp/$IMPL
+
+ # now recompile the stuff
+ for i in /usr/share/common-lisp/systems/*.asd ; do
+ if [ -f $i -a -r $i ] ; then
+ i=${i%.asd}
+ package=${i##*/}
+ clc-autobuild-check $IMPL $package
+ if [ $? = 0 ]; then
+ echo recompiling package $package for implementation $IMPL
+ /usr/bin/clc-send-command --quiet recompile $package $IMPL
+ fi
+ fi
+ done
+ for i in /usr/share/common-lisp/systems/*.system ; do
+ if [ -f $i -a -r $i ] ; then
+ i=${i%.system}
+ package=${i##*/}
+ clc-autobuild-check $IMPL $package
+ if [ $? = 0 ]; then
+ echo recompiling package $package for implementation $IMPL
+ /usr/bin/clc-send-command --quiet recompile $package $IMPL
+ fi
+ fi
+ done
+ echo "$PROGNAME: Compiler $IMPL installed"
+}
+
+unregister-common-lisp-implementation() {
+ PROGNAME=$(basename $0)
+ if [ `id -u` != 0 ] ; then
+ echo $PROGNAME: you need to be root to run this program
+ exit 1
+ fi
+ if [ -z "$1" ] ; then
+ cat <<EOF
+usage: $PROGNAME compiler-name
+
+un-registers a Common Lisp compiler to the
+Common-Lisp-Controller system.
+EOF
+ exit 1
+ fi
+ IMPL=$1
+ IMPL_BIN="/usr/$(get_libdir)/common-lisp/bin/$IMPL.sh"
+ if [ ! -f "$IMPL_BIN" ] ; then
+ cat <<EOF
+$PROGNAME: No implementation of the name $IMPL is registered
+Cannot find the file $IMPL_BIN
+
+Maybe you already removed it?
+EOF
+ exit 0
+ fi
+ if [ ! -r "$IMPL_BIN" ] ; then
+ cat <<EOF
+$PROGNAME: No implementation of the name $IMPL is registered
+Cannot read the file $IMPL_BIN
+
+Maybe you already removed it?
+EOF
+ exit 0
+ fi
+ # Uninstall the CLC
+ sh $IMPL_BIN remove-clc || echo "De-installation of CLC failed" >&2
+ clc-autobuild-impl $IMPL inherit
+ # Just remove the damn subtree
+ (cd / ; rm -rf "/usr/$(get_libdir)/common-lisp/$IMPL/" ; true )
+ echo "$PROGNAME: Common Lisp implementation $IMPL uninstalled"
+}
+
+reregister-all-common-lisp-implementations() {
+ # Rebuilds all common lisp implementations
+ # Written by Kevin Rosenberg <kmr@debian.org>
+ # GPL-2 license
+ local clc_bin_dir=/usr/$(get_libdir)/common-lisp/bin
+ local opt=$(shopt nullglob); shopt -s nullglob
+ cd $clc_bin_dir
+ for impl_bin in *.sh; do
+ impl=$(echo $impl_bin | sed 's/\(.*\).sh/\1/')
+ unregister-common-lisp-implementation $impl
+ register-common-lisp-implementation $impl
+ done
+ cd - >/dev/null
+ [[ $opt = *off ]] && shopt -u nullglob
+}
+
+# BIG FAT HACK: Since the Portage emerge step kills file timestamp
+# information, we need to compensate by ensuring all FASL files are
+# more recent than their source files.
+
+# The following `impl-*-timestamp-hack' functions SHOULD NOT be used
+# outside of this eclass.
+
+impl-save-timestamp-hack() {
+ local impl=$1
+ dodir /usr/share/${impl}
+ tar cpjf "${D}"/usr/share/${impl}/portage-timestamp-compensate -C "${D}"/usr/$(get_libdir)/${impl} .
+}
+
+impl-restore-timestamp-hack() {
+ local impl=$1
+ tar xjpfo /usr/share/${impl}/portage-timestamp-compensate -C /usr/$(get_libdir)/${impl}
+}
+
+impl-remove-timestamp-hack() {
+ local impl=$1
+ rm -rf /usr/$(get_libdir)/${impl} &>/dev/null || true
+}
+
+test-in() {
+ local symbol=$1
+ shift
+ for i in $@; do
+ if [ $i == ${symbol} ]; then
+ return 0 # true
+ fi
+ done
+ false
+}
+
+standard-impl-postinst() {
+ local impl=$1
+ rm -rf /usr/$(get_libdir)/common-lisp/${impl}/* &>/dev/null || true
+ chown cl-builder:cl-builder /usr/$(get_libdir)/common-lisp/${impl}
+ if test-in ${impl} cmucl sbcl; then
+ impl-restore-timestamp-hack ${impl}
+ fi
+ chown -R root:0 /usr/$(get_libdir)/${impl}
+ /usr/bin/clc-autobuild-impl ${impl} yes
+ register-common-lisp-implementation ${impl}
+}
+
+standard-impl-postrm() {
+ local impl=$1 impl_binary=$2
+ # Since we keep our own time stamps we must manually remove them
+ # here.
+ if [ ! -x ${impl_binary} ]; then
+ if test-in ${impl} cmucl sbcl; then
+ impl-remove-timestamp-hack ${impl}
+ fi
+ rm -rf /usr/$(get_libdir)/common-lisp/${impl}/*
+ fi
+}
+
+# Local Variables: ***
+# mode: shell-script ***
+# tab-width: 4 ***
+# End: ***
diff --git a/eclass/common-lisp.eclass b/eclass/common-lisp.eclass
new file mode 100644
index 000000000000..a67c4c2f5fc3
--- /dev/null
+++ b/eclass/common-lisp.eclass
@@ -0,0 +1,78 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Author Matthew Kennedy <mkennedy@gentoo.org>
+#
+# This eclass supports the common-lisp-controller installation of many
+# Common Lisp libraries
+
+inherit common-lisp-common
+
+CLPACKAGE=
+DEPEND="dev-lisp/common-lisp-controller"
+
+EXPORT_FUNCTIONS pkg_preinst pkg_postinst pkg_postrm
+
+common-lisp_pkg_postinst() {
+ if [ -z "${CLPACKAGE}" ]; then
+ die "CLPACKAGE was empty or undefined upon call to pkg_prerm"
+ else
+ for package in ${CLPACKAGE}; do
+ einfo "Registering Common Lisp source for ${package}"
+ register-common-lisp-source ${package}
+ done
+ fi
+}
+
+common-lisp_pkg_postrm() {
+ if [ -z "${CLPACKAGE}" ]; then
+ die "CLPACKAGE was empty or undefined upon call to pkg_prerm"
+ else
+ for package in ${CLPACKAGE}; do
+ if [ ! -d ${CLSOURCEROOT}/${package} ]; then
+ einfo "Unregistering Common Lisp source for ${package}"
+# rm -rf ${CLFASLROOT}/*/${package}
+ unregister-common-lisp-source ${package}
+ fi
+ done
+ fi
+}
+
+#
+# In pkg_preinst, we remove the FASL files for the previous version of
+# the source.
+#
+common-lisp_pkg_preinst() {
+ if [ -z "${CLPACKAGE}" ]; then
+ die "CLPACKAGE was empty or undefined upon call to pkg_preinst"
+ else
+ for package in ${CLPACKAGE}; do
+ einfo "Removing FASL files for previous version of Common Lisp package ${package}"
+ rm -rf ${CLFASLROOT}/*/${package} || true
+ done
+ fi
+}
+
+common-lisp-install() {
+ insinto ${CLSOURCEROOT}/${CLPACKAGE}
+ doins $@
+}
+
+common-lisp-system-symlink() {
+ dodir ${CLSYSTEMROOT}/`dirname ${CLPACKAGE}`
+ if [ $# -eq 0 ]; then
+ dosym ${CLSOURCEROOT}/${CLPACKAGE}/${CLPACKAGE}.asd \
+ ${CLSYSTEMROOT}/${CLPACKAGE}.asd
+ else
+ for package in "$@" ; do
+ dosym ${CLSOURCEROOT}/$CLPACKAGE/${package}.asd \
+ ${CLSYSTEMROOT}/${package}.asd
+ done
+ fi
+}
+
+# Local Variables: ***
+# mode: shell-script ***
+# tab-width: 4 ***
+# End: ***
diff --git a/eclass/confutils.eclass b/eclass/confutils.eclass
new file mode 100644
index 000000000000..764ee31ebef2
--- /dev/null
+++ b/eclass/confutils.eclass
@@ -0,0 +1,478 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: confutils.eclass
+# @MAINTAINER:
+# Benedikt Böhm <hollow@gentoo.org>
+# @BLURB: utility functions to help with configuring a package
+# @DESCRIPTION:
+# The confutils eclass contains functions to handle use flag dependencies and
+# extended --with-*/--enable-* magic.
+#
+# Based on the PHP5 eclass by Stuart Herbert <stuart@stuartherbert.com>
+
+inherit eutils
+
+# @VARIABLE: EBUILD_SUPPORTS_SHAREDEXT
+# @DESCRIPTION:
+# Set this variable to 1 if your ebuild supports shared extensions. You need to
+# call confutils_init() in pkg_setup() if you use this variable.
+if [[ ${EBUILD_SUPPORTS_SHAREDEXT} == 1 ]]; then
+ IUSE="sharedext"
+fi
+
+# @FUNCTION: confutils_init
+# @USAGE: [value]
+# @DESCRIPTION:
+# Call this function from your pkg_setup() function to initialize this eclass
+# if EBUILD_SUPPORTS_SHAREDEXT is enabled. If no value is given `shared' is used
+# by default.
+confutils_init() {
+ if [[ ${EBUILD_SUPPORTS_SHAREDEXT} == 1 ]] && use sharedext; then
+ shared="=${1:-shared}"
+ else
+ shared=
+ fi
+}
+
+# @FUNCTION: confutils_require_one
+# @USAGE: <flag> [more flags ...]
+# @DESCRIPTION:
+# Use this function to ensure exactly one of the specified USE flags have been
+# enabled
+confutils_require_one() {
+ local required_flags="$@"
+ local success=0
+
+ for flag in ${required_flags}; do
+ use ${flag} && ((success++))
+ done
+
+ [[ ${success} -eq 1 ]] && return
+
+ echo
+ eerror "You *must* enable *exactly* one of the following USE flags:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling *one* of these flag in /etc/portage/package.use:"
+
+ set -- ${required_flags}
+ eerror " =${CATEGORY}/${PN}-${PVR} ${1}"
+ shift
+
+ for flag in $@; do
+ eerror " OR =${CATEGORY}/${PN}-${PVR} ${flag}"
+ done
+
+ echo
+ die "Missing or conflicting USE flags"
+}
+
+# @FUNCTION: confutils_require_any
+# @USAGE: <flag> [more flags ...]
+# @DESCRIPTION:
+# Use this function to ensure one or more of the specified USE flags have been
+# enabled
+confutils_require_any() {
+ local required_flags="$@"
+ local success=0
+
+ for flag in ${required_flags}; do
+ use ${flag} && success=1
+ done
+
+ [[ ${success} -eq 1 ]] && return
+
+ echo
+ eerror "You *must* enable one or more of the following USE flags:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} ${required_flags}"
+ echo
+ die "Missing USE flags"
+}
+
+# @FUNCTION: confutils_require_built_with_all
+# @USAGE: <foreign> <flag> [more flags ...]
+# @DESCRIPTION:
+# Use this function to ensure all of the specified USE flags have been enabled
+# in the specified foreign package
+confutils_require_built_with_all() {
+ local foreign=$1 && shift
+ local required_flags="$@"
+
+ built_with_use ${foreign} ${required_flags} && return
+
+ echo
+ eerror "You *must* enable all of the following USE flags in ${foreign}:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " ${foreign} ${required_flags}"
+ echo
+ die "Missing USE flags in ${foreign}"
+}
+
+# @FUNCTION: confutils_require_built_with_any
+# @USAGE: <foreign> <flag> [more flags ...]
+# @DESCRIPTION:
+# Use this function to ensure one or more of the specified USE flags have been
+# enabled in the specified foreign package
+confutils_require_built_with_any() {
+ local foreign=$1 && shift
+ local required_flags="$@"
+ local success=0
+
+ for flag in ${required_flags}; do
+ built_with_use ${foreign} ${flag} && success=1
+ done
+
+ [[ ${success} -eq 1 ]] && return
+
+ echo
+ eerror "You *must* enable one or more of the following USE flags in ${foreign}:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " ${foreign} ${required_flags}"
+ echo
+ die "Missing USE flags in ${foreign}"
+}
+
+# @FUNCTION: confutils_use_conflict
+# @USAGE: <enabled flag> <conflicting flag> [more conflicting flags ...]
+# @DESCRIPTION:
+# Use this function to automatically complain to the user if conflicting USE
+# flags have been enabled
+confutils_use_conflict() {
+ use $1 || return
+
+ local my_flag="$1" && shift
+ local my_present=
+ local my_remove=
+
+ for flag in "$@"; do
+ if use ${flag}; then
+ my_present="${my_present} ${flag}"
+ my_remove="${my_remove} -${flag}"
+ fi
+ done
+
+ [[ -z "${my_present}" ]] && return
+
+ echo
+ eerror "USE flag '${my_flag}' conflicts with these USE flag(s):"
+ eerror " ${my_present}"
+ eerror
+ eerror "You must disable these conflicting flags before you can emerge this package."
+ eerror "You can do this by disabling these flags in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} ${my_remove}"
+ eerror
+ eerror "You could disable this flag instead in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} -${my_flag}"
+ echo
+ die "Conflicting USE flags"
+}
+
+# @FUNCTION: confutils_use_depend_all
+# @USAGE: <enabled flag> <needed flag> [more needed flags ...]
+# @DESCRIPTION:
+# Use this function to automatically complain to the user if a USE flag depends
+# on another USE flag that hasn't been enabled
+confutils_use_depend_all() {
+ use $1 || return
+
+ local my_flag="$1" && shift
+ local my_missing=
+
+ for flag in "$@"; do
+ use ${flag} || my_missing="${my_missing} ${flag}"
+ done
+
+ [[ -z "${my_missing}" ]] && return
+
+ echo
+ eerror "USE flag '${my_flag}' needs these additional flag(s) set:"
+ eerror " ${my_missing}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} ${my_missing}"
+ eerror
+ eerror "You could disable this flag instead in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} -${my_flag}"
+ echo
+ die "Need missing USE flags"
+}
+
+# @FUNCTION: confutils_use_depend_any
+# @USAGE: <enabled flag> <needed flag> [more needed flags ...]
+# @DESCRIPTION:
+# Use this function to automatically complain to the user if a USE flag depends
+# on another USE flag that hasn't been enabled
+confutils_use_depend_any() {
+ use $1 || return
+
+ local my_flag="$1" && shift
+ local my_found=
+ local my_missing=
+
+ for flag in "$@"; do
+ if use ${flag}; then
+ my_found="${my_found} ${flag}"
+ else
+ my_missing="${my_missing} ${flag}"
+ fi
+ done
+
+ [[ -n "${my_found}" ]] && return
+
+ echo
+ eerror "USE flag '${my_flag}' needs one or more of these additional flag(s) set:"
+ eerror " ${my_missing}"
+ eerror
+ eerror "You can do this by enabling one of these flags in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} ${my_missing}"
+ eerror
+ eerror "You could disable this flag instead in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} -${my_flag}"
+ echo
+ die "Need missing USE flag(s)"
+}
+
+# @FUNCTION: confutils_use_depend_built_with_all
+# @USAGE: <enabled flag> <foreign> <needed flag> [more needed flags ...]
+# @DESCRIPTION:
+# Use this function to automatically complain to the user if a USE flag depends
+# on a USE flag in another package that hasn't been enabled
+confutils_use_depend_built_with_all() {
+ use $1 || return
+
+ local my_flag="$1" && shift
+ local foreign=$1 && shift
+ local required_flags="$@"
+
+ built_with_use ${foreign} ${required_flags} && return
+
+ echo
+ eerror "USE flag '${my_flag}' needs the following USE flags in ${foreign}:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " ${foreign} ${required_flags}"
+ eerror
+ eerror "You could disable this flag instead in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} -${my_flag}"
+ echo
+ die "Missing USE flags in ${foreign}"
+}
+
+# @FUNCTION: confutils_use_depend_built_with_any
+# @USAGE: <enabled flag> <foreign> <needed flag> [more needed flags ...]
+# @DESCRIPTION:
+# Use this function to automatically complain to the user if a USE flag depends
+# on a USE flag in another package that hasn't been enabled
+confutils_use_depend_built_with_any() {
+ use $1 || return
+
+ local my_flag="$1" && shift
+ local foreign=$1 && shift
+ local required_flags="$@"
+ local success=0
+
+ for flag in ${required_flags}; do
+ built_with_use ${foreign} ${flag} && success=1
+ done
+
+ [[ ${success} -eq 1 ]] && return
+
+ echo
+ eerror "USE flag '${my_flag}' needs one or more of the following USE flags in ${foreign}:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " ${foreign} ${required_flags}"
+ eerror
+ eerror "You could disable this flag instead in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} -${my_flag}"
+ echo
+ die "Missing USE flags in ${foreign}"
+}
+
+
+# internal function constructs the configure values for optional shared module
+# support and extra arguments
+_confutils_shared_suffix() {
+ local my_shared=
+
+ if [[ "$1" == "1" ]]; then
+ if [[ -n "${shared}" ]]; then
+ my_shared="${shared}"
+ if [[ -n "$2" ]]; then
+ my_shared="${my_shared},$2"
+ fi
+ elif [[ -n "$2" ]]; then
+ my_shared="=$2"
+ fi
+ else
+ if [[ -n "$2" ]]; then
+ my_shared="=$2"
+ fi
+ fi
+
+ echo "${my_shared}"
+}
+
+# @FUNCTION: enable_extension_disable
+# @USAGE: <extension> <flag> [msg]
+# @DESCRIPTION:
+# Use this function to disable an extension that is enabled by default. This is
+# provided for those rare configure scripts that don't support a --enable for
+# the corresponding --disable.
+enable_extension_disable() {
+ local my_msg=${3:-$1}
+
+ if use "$2" ; then
+ einfo " Enabling ${my_msg}"
+ else
+ my_conf="${my_conf} --disable-$1"
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_enable
+# @USAGE: <extension> <flag> [shared] [extra conf] [msg]
+# @DESCRIPTION:
+# This function is like use_enable(), except that it knows about enabling
+# modules as shared libraries, and it supports passing additional data with the
+# switch.
+enable_extension_enable() {
+ local my_shared=$(_confutils_shared_suffix $3 $4)
+ local my_msg=${5:-$1}
+
+ if use $2; then
+ my_conf="${my_conf} --enable-${1}${my_shared}"
+ einfo " Enabling ${my_msg}"
+ else
+ my_conf="${my_conf} --disable-$1"
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_enableonly
+# @USAGE: <extension> <flag> [shared] [extra conf] [msg]
+# @DESCRIPTION:
+# This function is like use_enable(), except that it knows about enabling
+# modules as shared libraries, and it supports passing additional data with the
+# switch. This function is provided for those rare configure scripts that support
+# --enable but not the corresponding --disable.
+enable_extension_enableonly() {
+ local my_shared=$(_confutils_shared_suffix $3 $4)
+ local my_msg=${5:-$1}
+
+ if use $2 ; then
+ my_conf="${my_conf} --enable-${1}${my_shared}"
+ einfo " Enabling ${my_msg}"
+ else
+ # note: we deliberately do *not* use a --disable switch here
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_without
+# @USAGE: <extension> <flag> [msg]
+# @DESCRIPTION:
+# Use this function to disable an extension that is enabled by default. This
+# function is provided for those rare configure scripts that support --without
+# but not the corresponding --with
+enable_extension_without() {
+ local my_msg=${3:-$1}
+
+ if use "$2"; then
+ einfo " Enabling ${my_msg}"
+ else
+ my_conf="${my_conf} --without-$1"
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_with
+# @USAGE: <extension> <flag> [shared] [extra conf] [msg]
+# @DESCRIPTION:
+# This function is like use_with(), except that it knows about enabling modules
+# as shared libraries, and it supports passing additional data with the switch.
+enable_extension_with() {
+ local my_shared=$(_confutils_shared_suffix $3 $4)
+ local my_msg=${5:-$1}
+
+ if use $2; then
+ my_conf="${my_conf} --with-${1}${my_shared}"
+ einfo " Enabling ${my_msg}"
+ else
+ my_conf="${my_conf} --without-$1"
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_withonly
+# @USAGE: <extension> <flag> [shared] [extra conf] [msg]
+# @DESCRIPTION:
+# This function is like use_with(), except that it knows about enabling modules
+# as shared libraries, and it supports passing additional data with the switch.
+# This function is provided for those rare configure scripts that support --enable
+# but not the corresponding --disable.
+enable_extension_withonly() {
+ local my_shared=$(_confutils_shared_suffix $3 $4)
+ local my_msg=${5:-$1}
+
+ if use $2; then
+ my_conf="${my_conf} --with-${1}${my_shared}"
+ einfo " Enabling ${my_msg}"
+ else
+ # note: we deliberately do *not* use a --without switch here
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_enable_built_with
+# @USAGE: <foreign> <flag> <extension> [shared] [extra conf] [msg]
+# @DESCRIPTION:
+# This function is like enable_extension_enable(), except that it
+# enables/disables modules based on a USE flag in a foreign package.
+enable_extension_enable_built_with() {
+ local my_shared=$(_confutils_shared_suffix $4 $5)
+ local my_msg=${6:-$3}
+
+ if built_with_use $1 $2; then
+ my_conf="${my_conf} --enable-${3}${my_shared}"
+ einfo " Enabling ${my_msg}"
+ else
+ my_conf="${my_conf} --disable-$3"
+ einfo " Disabling ${my_msg}"
+ fi
+}
+
+# @FUNCTION: enable_extension_with_built_with ()
+# @USAGE: <foreign> <flag> <extension> [shared] [extra conf] [msg]
+# @DESCRIPTION:
+# This function is like enable_extension_with(), except that it
+# enables/disables modules based on a USE flag in a foreign package.
+enable_extension_with_built_with() {
+ # legacy workaround
+ if [[ "$4" != "0" && "$4" != "1" ]]; then
+ enable_extension_with_built_with "$1" "$2" "$3" 0 "$4" "$5"
+ return
+ fi
+
+ local my_shared=$(_confutils_shared_suffix $4 $5)
+ local my_msg=${6:-$3}
+
+ if built_with_use $1 $2; then
+ my_conf="${my_conf} --with-${3}${my_shared}"
+ einfo " Enabling ${my_msg}"
+ else
+ my_conf="${my_conf} --disable-$3"
+ einfo " Disabling ${my_msg}"
+ fi
+}
diff --git a/eclass/cron.eclass b/eclass/cron.eclass
new file mode 100644
index 000000000000..1d97df3f73c3
--- /dev/null
+++ b/eclass/cron.eclass
@@ -0,0 +1,161 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: cron
+# @MAINTAINER:
+# cron-bugs@gentoo.org
+# @AUTHOR:
+# Original Author: Aaron Walker <ka0ttic@gentoo.org>
+# @BLURB: Some functions for cron
+# @DESCRIPTION:
+# Purpose: The main motivation for this eclass was to simplify
+# the jungle known as src_install() in cron ebuilds. Using these
+# functions also ensures that permissions are *always* reset,
+# preventing the accidental installation of files with wrong perms.
+#
+# NOTE on defaults: the default settings in the below functions were
+# chosen based on the most common setting among cron ebuilds.
+#
+# Please assign any bugs regarding this eclass to cron-bugs@gentoo.org.
+
+inherit eutils flag-o-matic
+
+EXPORT_FUNCTIONS pkg_postinst
+
+SLOT="0"
+
+DEPEND=">=sys-apps/sed-4.0.5"
+
+RDEPEND=">=sys-process/cronbase-0.3.2"
+for pn in vixie-cron bcron cronie dcron fcron; do
+ [[ ${pn} == "${PN}" ]] || RDEPEND="${RDEPEND} !sys-process/${pn}"
+done
+
+# @FUNCTION: docrondir
+# @USAGE: [ dir ] [ perms ]
+# @DESCRIPTION:
+# Creates crontab directory
+#
+# Both arguments are optional. Everything after 'dir' is considered
+# the permissions (same format as insopts).
+#
+# ex: docrondir /some/dir -m 0770 -o root -g cron
+# docrondir /some/dir (uses default perms)
+# docrondir -m0700 (uses default dir)
+
+docrondir() {
+ # defaults
+ local perms="-m0750 -o root -g cron" dir="/var/spool/cron/crontabs"
+
+ if [[ -n $1 ]] ; then
+ case "$1" in
+ */*)
+ dir=$1
+ shift
+ [[ -n $1 ]] && perms="$@"
+ ;;
+ *)
+ perms="$@"
+ ;;
+ esac
+ fi
+
+ diropts ${perms}
+ keepdir ${dir}
+
+ # reset perms to default
+ diropts -m0755
+}
+
+# @FUNCTION: docron
+# @USAGE: [ exe ] [ perms ]
+# @DESCRIPTION:
+# Install cron executable
+#
+# Both arguments are optional.
+#
+# ex: docron -m 0700 -o root -g root ('exe' defaults to "cron")
+# docron crond -m 0110
+
+docron() {
+ local cron="cron" perms="-m 0750 -o root -g wheel"
+
+ if [[ -n $1 ]] ; then
+ case "$1" in
+ -*)
+ perms="$@"
+ ;;
+ *)
+ cron=$1
+ shift
+ [[ -n $1 ]] && perms="$@"
+ ;;
+ esac
+ fi
+
+ exeopts ${perms}
+ exeinto /usr/sbin
+ doexe ${cron} || die "failed to install ${cron}"
+
+ # reset perms to default
+ exeopts -m0755
+}
+
+# @FUNCTION: docrontab
+# @USAGE: [ exe ] [ perms ]
+# @DESCRIPTION:
+# Install crontab executable
+#
+# Uses same semantics as docron.
+
+docrontab() {
+ local crontab="crontab" perms="-m 4750 -o root -g cron"
+
+ if [[ -n $1 ]] ; then
+ case "$1" in
+ -*)
+ perms="$@"
+ ;;
+ *)
+ crontab=$1
+ shift
+ [[ -n $1 ]] && perms="$@"
+ ;;
+ esac
+ fi
+
+ exeopts ${perms}
+ exeinto /usr/bin
+ doexe ${crontab} || die "failed to install ${crontab}"
+
+ # reset perms to default
+ exeopts -m0755
+
+ # users expect /usr/bin/crontab to exist...
+ if [[ "${crontab##*/}" != "crontab" ]] ; then
+ dosym ${crontab##*/} /usr/bin/crontab || \
+ die "failed to create /usr/bin/crontab symlink"
+ fi
+}
+
+# @FUNCTION: cron_pkg_postinst
+# @DESCRIPTION:
+# Outputs a message about system crontabs
+# daemons that have a true system crontab set CRON_SYSTEM_CRONTAB="yes"
+cron_pkg_postinst() {
+ echo
+ # daemons that have a true system crontab set CRON_SYSTEM_CRONTAB="yes"
+ if [ "${CRON_SYSTEM_CRONTAB:-no}" != "yes" ] ; then
+ einfo "To activate /etc/cron.{hourly|daily|weekly|monthly} please run:"
+ einfo " crontab /etc/crontab"
+ einfo
+ einfo "!!! That will replace root's current crontab !!!"
+ einfo
+ fi
+
+ einfo "You may wish to read the Gentoo Linux Cron Guide, which can be"
+ einfo "found online at:"
+ einfo " http://www.gentoo.org/doc/en/cron-guide.xml"
+ echo
+}
diff --git a/eclass/cuda.eclass b/eclass/cuda.eclass
new file mode 100644
index 000000000000..932353e3309f
--- /dev/null
+++ b/eclass/cuda.eclass
@@ -0,0 +1,134 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+inherit flag-o-matic toolchain-funcs versionator
+
+# @ECLASS: cuda.eclass
+# @MAINTAINER:
+# Justin Lecher <jlec@gentoo.org>
+# @BLURB: Common functions for cuda packages
+# @DESCRIPTION:
+# This eclass contains functions to be used with cuda package. Currently it is
+# setting and/or sanitizing NVCCFLAGS, the compiler flags for nvcc. This is
+# automatically done and exported in src_prepare() or manually by calling
+# cuda_sanatize.
+# @EXAMPLE:
+# inherit cuda
+
+# @ECLASS-VARIABLE: NVCCFLAGS
+# @DESCRIPTION:
+# nvcc compiler flags (see nvcc --help), which should be used like
+# CFLAGS for c compiler
+: ${NVCCFLAGS:=-O2}
+
+# @ECLASS-VARIABLE: CUDA_VERBOSE
+# @DESCRIPTION:
+# Being verbose during compilation to see underlying commands
+: ${CUDA_VERBOSE:=true}
+
+# @FUNCTION: cuda_gccdir
+# @USAGE: [-f]
+# @RETURN: gcc bindir compatible with current cuda, optionally (-f) prefixed with "--compiler-bindir="
+# @DESCRIPTION:
+# Helper for determination of the latest gcc bindir supported by
+# then current nvidia cuda toolkit.
+#
+# Example:
+# @CODE
+# cuda_gccdir -f
+# -> --compiler-bindir="/usr/x86_64-pc-linux-gnu/gcc-bin/4.6.3"
+# @CODE
+cuda_gccdir() {
+ local gcc_bindir ver args="" flag ret
+
+ # Currently we only support the gnu compiler suite
+ if [[ $(tc-getCXX) != *g++* ]]; then
+ ewarn "Currently we only support the gnu compiler suite"
+ return 2
+ fi
+
+ while [ "$1" ]; do
+ case $1 in
+ -f)
+ flag="--compiler-bindir="
+ ;;
+ *)
+ ;;
+ esac
+ shift
+ done
+
+ if ! args=$(cuda-config -s); then
+ eerror "Could not execute cuda-config"
+ eerror "Make sure >=dev-util/nvidia-cuda-toolkit-4.2.9-r1 is installed"
+ die "cuda-config not found"
+ else
+ args=$(version_sort ${args})
+ if [[ -z ${args} ]]; then
+ die "Could not determine supported gcc versions from cuda-config"
+ fi
+ fi
+
+ for ver in ${args}; do
+ has_version "=sys-devel/gcc-${ver}*" && \
+ gcc_bindir="$(ls -d ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}* | tail -n 1)"
+ done
+
+ if [[ -n ${gcc_bindir} ]]; then
+ if [[ -n ${flag} ]]; then
+ ret="${flag}\"${gcc_bindir}\""
+ else
+ ret="${gcc_bindir}"
+ fi
+ echo ${ret}
+ return 0
+ else
+ eerror "Only gcc version(s) ${args} are supported,"
+ eerror "of which none is installed"
+ die "Only gcc version(s) ${args} are supported"
+ return 1
+ fi
+}
+
+# @FUNCTION: cuda_sanitize
+# @DESCRIPTION:
+# Correct NVCCFLAGS by adding the necessary reference to gcc bindir and
+# passing CXXFLAGS to underlying compiler without disturbing nvcc.
+cuda_sanitize() {
+ local rawldflags=$(raw-ldflags)
+ # Be verbose if wanted
+ [[ "${CUDA_VERBOSE}" == true ]] && NVCCFLAGS+=" -v"
+
+ # Tell nvcc where to find a compatible compiler
+ NVCCFLAGS+=" $(cuda_gccdir -f)"
+
+ # Tell nvcc which flags should be used for underlying C compiler
+ NVCCFLAGS+=" --compiler-options=\"${CXXFLAGS}\" --linker-options=\"${rawldflags// /,}\""
+
+ debug-print "Using ${NVCCFLAGS} for cuda"
+ export NVCCFLAGS
+}
+
+# @FUNCTION: cuda_pkg_setup
+# @DESCRIPTION:
+# Call cuda_src_prepare for EAPIs not supporting src_prepare
+cuda_pkg_setup() {
+ cuda_src_prepare
+}
+
+# @FUNCTION: cuda_src_prepare
+# @DESCRIPTION:
+# Sanitise and export NVCCFLAGS by default
+cuda_src_prepare() {
+ cuda_sanitize
+}
+
+
+case "${EAPI:-0}" in
+ 0|1)
+ EXPORT_FUNCTIONS pkg_setup ;;
+ 2|3|4|5)
+ EXPORT_FUNCTIONS src_prepare ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
diff --git a/eclass/cvs.eclass b/eclass/cvs.eclass
new file mode 100644
index 000000000000..b9c8a818b6ca
--- /dev/null
+++ b/eclass/cvs.eclass
@@ -0,0 +1,583 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: cvs.eclass
+# @MAINTAINER:
+# vapier@gentoo.org (and anyone who wants to help)
+# @BLURB: This eclass provides generic cvs fetching functions
+# @DESCRIPTION:
+# This eclass provides the generic cvs fetching functions. To use this from an
+# ebuild, set the ECLASS VARIABLES as specified below in your ebuild before
+# inheriting. Then either leave the default src_unpack or extend over
+# cvs_src_unpack. If you find that you need to call the cvs_* functions
+# directly, I'd be interested to hear about it.
+
+if [[ -z ${_CVS_ECLASS} ]]; then
+_CVS_ECLASS=1
+
+inherit eutils
+
+# TODO:
+
+# Implement more auth types (gserver?, kserver?)
+
+# Support additional remote shells with `ext' authentication (does
+# anyone actually need to use it with anything other than SSH?)
+
+
+# Users shouldn't change these settings! The ebuild/eclass inheriting
+# this eclass will take care of that. If you want to set the global
+# KDE cvs ebuilds' settings, see the comments in kde-source.eclass.
+
+# @ECLASS-VARIABLE: ECVS_CVS_COMPRESS
+# @DESCRIPTION:
+# Set the default compression level. Has no effect when ECVS_CVS_COMMAND
+# is defined by ebuild/user.
+: ${ECVS_CVS_COMPRESS:=-z1}
+
+# @ECLASS-VARIABLE: ECVS_CVS_OPTIONS
+# @DESCRIPTION:
+# Additional options to the cvs commands. Has no effect when ECVS_CVS_COMMAND
+# is defined by ebuild/user.
+: ${ECVS_CVS_OPTIONS:=-q -f}
+
+# @ECLASS-VARIABLE: ECVS_CVS_COMMAND
+# @DESCRIPTION:
+# CVS command to run
+#
+# You can set, for example, "cvs -t" for extensive debug information
+# on the cvs connection. The default of "cvs -q -f -z4" means to be
+# quiet, to disregard the ~/.cvsrc config file and to use maximum
+# compression.
+: ${ECVS_CVS_COMMAND:=cvs ${ECVS_CVS_OPTIONS} ${ECVS_CVS_COMPRESS}}
+
+# @ECLASS-VARIABLE: ECVS_UP_OPTS
+# @DESCRIPTION:
+# CVS options given after the cvs update command. Don't remove "-dP" or things
+# won't work.
+: ${ECVS_UP_OPTS:=-dP}
+
+# @ECLASS-VARIABLE: ECVS_CO_OPTS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# CVS options given after the cvs checkout command.
+
+# @ECLASS-VARIABLE: ECVS_OFFLINE
+# @DESCRIPTION:
+# Set this variable to a non-empty value to disable the automatic updating of
+# a CVS source tree. This is intended to be set outside the cvs source
+# tree by users.
+: ${ECVS_OFFLINE:=${EVCS_OFFLINE}}
+
+# @ECLASS-VARIABLE: ECVS_LOCAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If this is set, the CVS module will be fetched non-recursively.
+# Refer to the information in the CVS man page regarding the -l
+# command option (not the -l global option).
+
+# @ECLASS-VARIABLE: ECVS_LOCALNAME
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Local name of checkout directory
+#
+# This is useful if the module on the server is called something
+# common like 'driver' or is nested deep in a tree, and you don't like
+# useless empty directories.
+#
+# WARNING: Set this only from within ebuilds! If set in your shell or
+# some such, things will break because the ebuild won't expect it and
+# have e.g. a wrong $S setting.
+
+# @ECLASS-VARIABLE: ECVS_TOP_DIR
+# @DESCRIPTION:
+# The directory under which CVS modules are checked out.
+: ${ECVS_TOP_DIR:="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/cvs-src"}
+
+# @ECLASS-VARIABLE: ECVS_SERVER
+# @DESCRIPTION:
+# CVS path
+#
+# The format is "server:/dir", e.g. "anoncvs.kde.org:/home/kde".
+# Remove the other parts of the full CVSROOT, which might look like
+# ":pserver:anonymous@anoncvs.kde.org:/home/kde"; this is generated
+# using other settings also.
+#
+# Set this to "offline" to disable fetching (i.e. to assume the module
+# is already checked out in ECVS_TOP_DIR).
+: ${ECVS_SERVER:="offline"}
+
+# @ECLASS-VARIABLE: ECVS_MODULE
+# @REQUIRED
+# @DESCRIPTION:
+# The name of the CVS module to be fetched
+#
+# This must be set when cvs_src_unpack is called. This can include
+# several directory levels, i.e. "foo/bar/baz"
+#[[ -z ${ECVS_MODULE} ]] && die "$ECLASS: error: ECVS_MODULE not set, cannot continue"
+
+# @ECLASS-VARIABLE: ECVS_DATE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The date of the checkout. See the -D date_spec option in the cvs
+# man page for more details.
+
+# @ECLASS-VARIABLE: ECVS_BRANCH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The name of the branch/tag to use
+#
+# The default is "HEAD". The following default _will_ reset your
+# branch checkout to head if used.
+#: ${ECVS_BRANCH:="HEAD"}
+
+# @ECLASS-VARIABLE: ECVS_AUTH
+# @DESCRIPTION:
+# Authentication method to use
+#
+# Possible values are "pserver" and "ext". If `ext' authentication is
+# used, the remote shell to use can be specified in CVS_RSH (SSH is
+# used by default). Currently, the only supported remote shell for
+# `ext' authentication is SSH.
+#
+# Armando Di Cianno <fafhrd@gentoo.org> 2004/09/27
+# - Added "no" as a server type, which uses no AUTH method, nor
+# does it login
+# e.g.
+# "cvs -danoncvs@savannah.gnu.org:/cvsroot/backbone co System"
+# ( from gnustep-apps/textedit )
+: ${ECVS_AUTH:="pserver"}
+
+# @ECLASS-VARIABLE: ECVS_USER
+# @DESCRIPTION:
+# Username to use for authentication on the remote server.
+: ${ECVS_USER:="anonymous"}
+
+# @ECLASS-VARIABLE: ECVS_PASS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Password to use for authentication on the remote server
+
+# @ECLASS-VARIABLE: ECVS_SSH_HOST_KEY
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If SSH is used for `ext' authentication, use this variable to
+# specify the host key of the remote server. The format of the value
+# should be the same format that is used for the SSH known hosts file.
+#
+# WARNING: If a SSH host key is not specified using this variable, the
+# remote host key will not be verified.
+
+# @ECLASS-VARIABLE: ECVS_CLEAN
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this to get a clean copy when updating (passes the
+# -C option to cvs update)
+
+# @ECLASS-VARIABLE: ECVS_RUNAS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Specifies an alternate (non-root) user to use to run cvs. Currently
+# b0rked and wouldn't work with portage userpriv anyway without
+# special magic.
+
+# : ${ECVS_RUNAS:=$(whoami)}
+
+# add cvs to deps
+# ssh is used for ext auth
+# sudo is used to run as a specified user
+DEPEND="dev-vcs/cvs"
+
+[[ -n ${ECVS_RUNAS} ]] && DEPEND+=" app-admin/sudo"
+
+if [[ ${ECVS_AUTH} == "ext" ]] ; then
+ #default to ssh
+ [[ -z ${CVS_RSH} ]] && export CVS_RSH="ssh"
+ if [[ ${CVS_RSH} != "ssh" ]] ; then
+ die "Support for ext auth with clients other than ssh has not been implemented yet"
+ fi
+ DEPEND+=" net-misc/openssh"
+fi
+
+# called from cvs_src_unpack
+cvs_fetch() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ # Make these options local variables so that the global values are
+ # not affected by modifications in this function.
+
+ local ECVS_COMMAND=${ECVS_COMMAND}
+ local ECVS_UP_OPTS=${ECVS_UP_OPTS}
+ local ECVS_CO_OPTS=${ECVS_CO_OPTS}
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Update variables that are modified by ebuild parameters, which
+ # should be effective every time cvs_fetch is called, and not just
+ # every time cvs.eclass is inherited
+
+ # Handle parameter for local (non-recursive) fetching
+
+ if [[ -n ${ECVS_LOCAL} ]] ; then
+ ECVS_UP_OPTS+=" -l"
+ ECVS_CO_OPTS+=" -l"
+ fi
+
+ # Handle ECVS_BRANCH option
+ #
+ # Because CVS auto-switches branches, we just have to pass the
+ # correct -rBRANCH option when updating.
+
+ if [[ -n ${ECVS_BRANCH} ]] ; then
+ ECVS_UP_OPTS+=" -r${ECVS_BRANCH}"
+ ECVS_CO_OPTS+=" -r${ECVS_BRANCH}"
+ fi
+
+ # Handle ECVS_LOCALNAME, which specifies the local directory name
+ # to use. Note that the -d command option is not equivalent to
+ # the global -d option.
+
+ if [[ ${ECVS_LOCALNAME} != "${ECVS_MODULE}" ]] ; then
+ ECVS_CO_OPTS+=" -d ${ECVS_LOCALNAME}"
+ fi
+
+ if [[ -n ${ECVS_CLEAN} ]] ; then
+ ECVS_UP_OPTS+=" -C"
+ fi
+
+ if [[ -n ${ECVS_DATE} ]] ; then
+ ECVS_CO_OPTS+=" -D ${ECVS_DATE}"
+ ECVS_UP_OPTS+=" -D ${ECVS_DATE}"
+ fi
+
+ # It would be easiest to always be in "run-as mode", logic-wise,
+ # if sudo didn't ask for a password even when sudo'ing to `whoami`.
+
+ if [[ -z ${ECVS_RUNAS} ]] ; then
+ run=""
+ else
+ run="sudo -u ${ECVS_RUNAS}"
+ fi
+
+ # Create the top dir if needed
+
+ if [[ ! -d ${ECVS_TOP_DIR} ]] ; then
+ # Note that the addwrite statements in this block are only
+ # there to allow creating ECVS_TOP_DIR; we allow writing
+ # inside it separately.
+
+ # This is because it's simpler than trying to find out the
+ # parent path of the directory, which would need to be the
+ # real path and not a symlink for things to work (so we can't
+ # just remove the last path element in the string)
+
+ debug-print "${FUNCNAME}: checkout mode. creating cvs directory"
+ addwrite /foobar
+ addwrite /
+ ${run} mkdir -p "/${ECVS_TOP_DIR}"
+ export SANDBOX_WRITE="${SANDBOX_WRITE//:\/foobar:\/}"
+ fi
+
+ # In case ECVS_TOP_DIR is a symlink to a dir, get the real path,
+ # otherwise addwrite() doesn't work.
+
+ cd -P "${ECVS_TOP_DIR}" >/dev/null
+ ECVS_TOP_DIR=$(pwd)
+
+ # Disable the sandbox for this dir
+ addwrite "${ECVS_TOP_DIR}"
+
+ # Chown the directory and all of its contents
+ if [[ -n ${ECVS_RUNAS} ]] ; then
+ ${run} chown -R "${ECVS_RUNAS}" "/${ECVS_TOP_DIR}"
+ fi
+
+ # Determine the CVS command mode (checkout or update)
+ if [[ ! -d ${ECVS_TOP_DIR}/${ECVS_LOCALNAME}/CVS ]] ; then
+ mode=checkout
+ else
+ mode=update
+ fi
+
+ # Our server string (i.e. CVSROOT) without the password so it can
+ # be put in Root
+ local connection="${ECVS_AUTH}"
+ if [[ ${ECVS_AUTH} == "no" ]] ; then
+ local server="${ECVS_USER}@${ECVS_SERVER}"
+ else
+ [[ -n ${ECVS_PROXY} ]] && connection+=";proxy=${ECVS_PROXY}"
+ [[ -n ${ECVS_PROXY_PORT} ]] && connection+=";proxyport=${ECVS_PROXY_PORT}"
+ local server=":${connection}:${ECVS_USER}@${ECVS_SERVER}"
+ fi
+
+ # Switch servers automagically if needed
+ if [[ ${mode} == "update" ]] ; then
+ cd "/${ECVS_TOP_DIR}/${ECVS_LOCALNAME}"
+ local oldserver=$(${run} cat CVS/Root)
+ if [[ ${server} != "${oldserver}" ]] ; then
+ einfo "Changing the CVS server from ${oldserver} to ${server}:"
+ debug-print "${FUNCNAME}: Changing the CVS server from ${oldserver} to ${server}:"
+
+ einfo "Searching for CVS directories ..."
+ local cvsdirs=$(${run} find . -iname CVS -print)
+ debug-print "${FUNCNAME}: CVS directories found:"
+ debug-print "${cvsdirs}"
+
+ einfo "Modifying CVS directories ..."
+ local x
+ for x in ${cvsdirs} ; do
+ debug-print "In ${x}"
+ ${run} echo "${server}" > "${x}/Root"
+ done
+ fi
+ fi
+
+ # Prepare a cvspass file just for this session, we don't want to
+ # mess with ~/.cvspass
+ touch "${T}/cvspass"
+ export CVS_PASSFILE="${T}/cvspass"
+ if [[ -n ${ECVS_RUNAS} ]] ; then
+ chown "${ECVS_RUNAS}" "${T}/cvspass"
+ fi
+
+ # The server string with the password in it, for login (only used for pserver)
+ cvsroot_pass=":${connection}:${ECVS_USER}:${ECVS_PASS}@${ECVS_SERVER}"
+
+ # Ditto without the password, for checkout/update after login, so
+ # that the CVS/Root files don't contain the password in plaintext
+ if [[ ${ECVS_AUTH} == "no" ]] ; then
+ cvsroot_nopass="${ECVS_USER}@${ECVS_SERVER}"
+ else
+ cvsroot_nopass=":${connection}:${ECVS_USER}@${ECVS_SERVER}"
+ fi
+
+ # Commands to run
+ cmdlogin="${run} ${ECVS_CVS_COMMAND} -d \"${cvsroot_pass}\" login"
+ cmdupdate="${run} ${ECVS_CVS_COMMAND} -d \"${cvsroot_nopass}\" update ${ECVS_UP_OPTS} ${ECVS_LOCALNAME}"
+ cmdcheckout="${run} ${ECVS_CVS_COMMAND} -d \"${cvsroot_nopass}\" checkout ${ECVS_CO_OPTS} ${ECVS_MODULE}"
+
+ # Execute commands
+
+ cd "${ECVS_TOP_DIR}"
+ if [[ ${ECVS_AUTH} == "pserver" ]] ; then
+ einfo "Running ${cmdlogin}"
+ eval ${cmdlogin} || die "cvs login command failed"
+ if [[ ${mode} == "update" ]] ; then
+ einfo "Running ${cmdupdate}"
+ eval ${cmdupdate} || die "cvs update command failed"
+ elif [[ ${mode} == "checkout" ]] ; then
+ einfo "Running ${cmdcheckout}"
+ eval ${cmdcheckout} || die "cvs checkout command failed"
+ fi
+ elif [[ ${ECVS_AUTH} == "ext" || ${ECVS_AUTH} == "no" ]] ; then
+ # Hack to support SSH password authentication
+
+ # Backup environment variable values
+ local CVS_ECLASS_ORIG_CVS_RSH="${CVS_RSH}"
+
+ if [[ ${SSH_ASKPASS+set} == "set" ]] ; then
+ local CVS_ECLASS_ORIG_SSH_ASKPASS="${SSH_ASKPASS}"
+ else
+ unset CVS_ECLASS_ORIG_SSH_ASKPASS
+ fi
+
+ if [[ ${DISPLAY+set} == "set" ]] ; then
+ local CVS_ECLASS_ORIG_DISPLAY="${DISPLAY}"
+ else
+ unset CVS_ECLASS_ORIG_DISPLAY
+ fi
+
+ if [[ ${CVS_RSH} == "ssh" ]] ; then
+ # Force SSH to use SSH_ASKPASS by creating python wrapper
+
+ export CVS_RSH="${T}/cvs_sshwrapper"
+ cat > "${CVS_RSH}"<<EOF
+#!${EPREFIX}/usr/bin/python
+import fcntl
+import os
+import sys
+try:
+ fd = os.open('/dev/tty', 2)
+ TIOCNOTTY=0x5422
+ try:
+ fcntl.ioctl(fd, TIOCNOTTY)
+ except:
+ pass
+ os.close(fd)
+except:
+ pass
+newarglist = sys.argv[:]
+EOF
+
+ # disable X11 forwarding which causes .xauth access violations
+ # - 20041205 Armando Di Cianno <fafhrd@gentoo.org>
+ echo "newarglist.insert(1, '-oClearAllForwardings=yes')" \
+ >> "${CVS_RSH}"
+ echo "newarglist.insert(1, '-oForwardX11=no')" \
+ >> "${CVS_RSH}"
+
+ # Handle SSH host key checking
+
+ local CVS_ECLASS_KNOWN_HOSTS="${T}/cvs_ssh_known_hosts"
+ echo "newarglist.insert(1, '-oUserKnownHostsFile=${CVS_ECLASS_KNOWN_HOSTS}')" \
+ >> "${CVS_RSH}"
+
+ if [[ -z ${ECVS_SSH_HOST_KEY} ]] ; then
+ ewarn "Warning: The SSH host key of the remote server will not be verified."
+ einfo "A temporary known hosts list will be used."
+ local CVS_ECLASS_STRICT_HOST_CHECKING="no"
+ touch "${CVS_ECLASS_KNOWN_HOSTS}"
+ else
+ local CVS_ECLASS_STRICT_HOST_CHECKING="yes"
+ echo "${ECVS_SSH_HOST_KEY}" > "${CVS_ECLASS_KNOWN_HOSTS}"
+ fi
+
+ echo -n "newarglist.insert(1, '-oStrictHostKeyChecking=" \
+ >> "${CVS_RSH}"
+ echo "${CVS_ECLASS_STRICT_HOST_CHECKING}')" \
+ >> "${CVS_RSH}"
+ echo "os.execv('${EPREFIX}/usr/bin/ssh', newarglist)" \
+ >> "${CVS_RSH}"
+
+ chmod a+x "${CVS_RSH}"
+
+ # Make sure DISPLAY is set (SSH will not use SSH_ASKPASS
+ # if DISPLAY is not set)
+
+ : ${DISPLAY:="DISPLAY"}
+ export DISPLAY
+
+ # Create a dummy executable to echo ${ECVS_PASS}
+
+ export SSH_ASKPASS="${T}/cvs_sshechopass"
+ if [[ ${ECVS_AUTH} != "no" ]] ; then
+ echo -en "#!/bin/bash\necho \"${ECVS_PASS}\"\n" \
+ > "${SSH_ASKPASS}"
+ else
+ echo -en "#!/bin/bash\nreturn\n" \
+ > "${SSH_ASKPASS}"
+ fi
+ chmod a+x "${SSH_ASKPASS}"
+ fi
+
+ if [[ ${mode} == "update" ]] ; then
+ einfo "Running ${cmdupdate}"
+ eval ${cmdupdate} || die "cvs update command failed"
+ elif [[ ${mode} == "checkout" ]] ; then
+ einfo "Running ${cmdcheckout}"
+ eval ${cmdcheckout} || die "cvs checkout command failed"
+ fi
+
+ # Restore environment variable values
+ export CVS_RSH="${CVS_ECLASS_ORIG_CVS_RSH}"
+ if [[ ${CVS_ECLASS_ORIG_SSH_ASKPASS+set} == "set" ]] ; then
+ export SSH_ASKPASS="${CVS_ECLASS_ORIG_SSH_ASKPASS}"
+ else
+ unset SSH_ASKPASS
+ fi
+
+ if [[ ${CVS_ECLASS_ORIG_DISPLAY+set} == "set" ]] ; then
+ export DISPLAY="${CVS_ECLASS_ORIG_DISPLAY}"
+ else
+ unset DISPLAY
+ fi
+ fi
+
+ # Restore ownership. Not sure why this is needed, but someone
+ # added it in the orig ECVS_RUNAS stuff.
+ if [[ -n ${ECVS_RUNAS} ]] ; then
+ chown $(whoami) "${T}/cvspass"
+ fi
+
+}
+
+# @FUNCTION: cvs_src_unpack
+# @DESCRIPTION:
+# The cvs src_unpack function, which will be exported
+cvs_src_unpack() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ debug-print "${FUNCNAME}: init:
+ ECVS_CVS_COMMAND=${ECVS_CVS_COMMAND}
+ ECVS_UP_OPTS=${ECVS_UP_OPTS}
+ ECVS_CO_OPTS=${ECVS_CO_OPTS}
+ ECVS_TOP_DIR=${ECVS_TOP_DIR}
+ ECVS_SERVER=${ECVS_SERVER}
+ ECVS_USER=${ECVS_USER}
+ ECVS_PASS=${ECVS_PASS}
+ ECVS_MODULE=${ECVS_MODULE}
+ ECVS_LOCAL=${ECVS_LOCAL}
+ ECVS_RUNAS=${ECVS_RUNAS}
+ ECVS_LOCALNAME=${ECVS_LOCALNAME}"
+
+ [[ -z ${ECVS_MODULE} ]] && die "ERROR: CVS module not set, cannot continue."
+
+ local ECVS_LOCALNAME=${ECVS_LOCALNAME:-${ECVS_MODULE}}
+
+ local sanitized_pn=$(echo "${PN}" | LC_ALL=C sed -e 's:[^A-Za-z0-9_]:_:g')
+ local offline_pkg_var="ECVS_OFFLINE_${sanitized_pn}"
+ if [[ -n ${!offline_pkg_var}${ECVS_OFFLINE} ]] || [[ ${ECVS_SERVER} == "offline" ]] ; then
+ # We're not required to fetch anything; the module already
+ # exists and shouldn't be updated.
+ if [[ -d ${ECVS_TOP_DIR}/${ECVS_LOCALNAME} ]] ; then
+ debug-print "${FUNCNAME}: offline mode"
+ else
+ debug-print "${FUNCNAME}: Offline mode specified but directory ${ECVS_TOP_DIR}/${ECVS_LOCALNAME} not found, exiting with error"
+ die "ERROR: Offline mode specified, but directory ${ECVS_TOP_DIR}/${ECVS_LOCALNAME} not found. Aborting."
+ fi
+ elif [[ -n ${ECVS_SERVER} ]] ; then # ECVS_SERVER!=offline --> real fetching mode
+ einfo "Fetching CVS module ${ECVS_MODULE} into ${ECVS_TOP_DIR} ..."
+ cvs_fetch
+ else # ECVS_SERVER not set
+ die "ERROR: CVS server not specified, cannot continue."
+ fi
+
+ einfo "Copying ${ECVS_MODULE} from ${ECVS_TOP_DIR} ..."
+ debug-print "Copying module ${ECVS_MODULE} local_mode=${ECVS_LOCAL} from ${ECVS_TOP_DIR} ..."
+
+ # This is probably redundant, but best to make sure.
+ mkdir -p "${WORKDIR}/${ECVS_LOCALNAME}"
+
+ if [[ -n ${ECVS_LOCAL} ]] ; then
+ cp -f "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}"/* "${WORKDIR}/${ECVS_LOCALNAME}"
+ else
+ cp -Rf "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}" "${WORKDIR}/${ECVS_LOCALNAME}/.."
+ fi
+
+ # Not exactly perfect, but should be pretty close #333773
+ export ECVS_VERSION=$(
+ find "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}/" -ipath '*/CVS/Entries' -exec cat {} + | \
+ LC_ALL=C sort | \
+ sha1sum | \
+ awk '{print $1}'
+ )
+
+ # If the directory is empty, remove it; empty directories cannot
+ # exist in cvs. This happens when, for example, kde-source
+ # requests module/doc/subdir which doesn't exist. Still create
+ # the empty directory in workdir though.
+ if [[ $(ls -A "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}") == "CVS" ]] ; then
+ debug-print "${FUNCNAME}: removing empty CVS directory ${ECVS_LOCALNAME}"
+ rm -rf "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}"
+ fi
+
+ # Implement some of base_src_unpack's functionality; note however
+ # that base.eclass may not have been inherited!
+ if [[ -n ${PATCHES} ]] ; then
+ debug-print "${FUNCNAME}: PATCHES=${PATCHES,} S=${S}, autopatching"
+ cd "${S}"
+ epatch ${PATCHES}
+ # Make sure we don't try to apply patches more than once,
+ # since cvs_src_unpack is usually called several times from
+ # e.g. kde-source_src_unpack
+ export PATCHES=""
+ fi
+
+ einfo "CVS module ${ECVS_MODULE} is now in ${WORKDIR}"
+}
+
+EXPORT_FUNCTIONS src_unpack
+
+fi
diff --git a/eclass/darcs.eclass b/eclass/darcs.eclass
new file mode 100644
index 000000000000..1fc1b28b0521
--- /dev/null
+++ b/eclass/darcs.eclass
@@ -0,0 +1,205 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: darcs.eclass
+# @MAINTAINER:
+# "Gentoo's Haskell Language team" <haskell@gentoo.org>
+# Sergei Trofimovich <slyfox@gentoo.org>
+# @AUTHOR:
+# Original Author: Jeffrey Yasskin <jyasskin@mail.utexas.edu>
+# <rphillips@gentoo.org> (tla eclass author)
+# Andres Loeh <kosmikus@gentoo.org> (darcs.eclass author)
+# Alexander Vershilov <alexander.vershilov@gmail.com> (various contributions)
+# @BLURB: This eclass provides functions for fetch and unpack darcs repositories
+# @DESCRIPTION:
+# This eclass provides the generic darcs fetching functions.
+#
+# Define the EDARCS_REPOSITORY variable at least.
+# The ${S} variable is set to ${WORKDIR}/${P}.
+
+# TODO:
+
+# support for tags
+
+inherit eutils # eshopts_{push,pop}
+
+# Don't download anything other than the darcs repository
+SRC_URI=""
+
+# You shouldn't change these settings yourself! The ebuild/eclass inheriting
+# this eclass will take care of that.
+
+# --- begin ebuild-configurable settings
+
+# darcs command to run
+# @ECLASS-VARIABLE: EDARCS_DARCS_CMD
+# @DESCRIPTION:
+# Path to darcs binary.
+: ${EDARCS_DARCS_CMD:=darcs}
+
+# darcs commands with command-specific options
+
+# @ECLASS-VARIABLE: EDARCS_GET_CMD
+# @DESCRIPTION:
+# First fetch darcs command.
+: ${EDARCS_GET_CMD:=get --lazy}
+
+# @ECLASS-VARIABLE: EDARCS_UPDATE_CMD
+# @DESCRIPTION:
+# Repo update darcs command.
+: ${EDARCS_UPDATE_CMD:=pull}
+
+# @ECLASS-VARIABLE: EDARCS_OPTIONS
+# @DESCRIPTION:
+# Options to pass to both the "get" and "update" commands
+: ${EDARCS_OPTIONS:=--set-scripts-executable}
+
+# @ECLASS-VARIABLE: EDARCS_TOP_DIR
+# @DESCRIPTION:
+# Where the darcs repositories are stored/accessed
+: ${EDARCS_TOP_DIR:=${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/darcs-src}
+
+# @ECLASS-VARIABLE: EDARCS_REPOSITORY
+# @DESCRIPTION:
+# The URI to the repository.
+: ${EDARCS_REPOSITORY:=}
+
+# @ECLASS-VARIABLE: EDARCS_OFFLINE
+# @DESCRIPTION:
+# Set this variable to a non-empty value to disable the automatic updating of
+# a darcs repository. This is intended to be set outside the darcs source
+# tree by users. Defaults to EVCS_OFFLINE value.
+: ${EDARCS_OFFLINE:=${EVCS_OFFLINE}}
+
+# @ECLASS-VARIABLE: EDARCS_CLEAN
+# @DESCRIPTION:
+# Set this to something to get a clean copy when updating
+# (removes the working directory, then uses EDARCS_GET_CMD to
+# re-download it.)
+: ${EDARCS_CLEAN:=}
+
+# --- end ebuild-configurable settings ---
+
+DEPEND="dev-vcs/darcs
+ net-misc/rsync"
+
+# @FUNCTION: darcs_patchcount
+# @DESCRIPTION:
+# Internal function to determine amount of patches in repository.
+darcs_patchcount() {
+ set -- $(HOME="${EDARCS_TOP_DIR}" ${EDARCS_DARCS_CMD} show repo --repodir="${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}" | grep "Num Patches")
+ # handle string like: " Num Patches: 3860"
+ echo ${3}
+}
+
+# @FUNCTION: darcs_fetch
+# @DESCRIPTION:
+# Internal function is called from darcs_src_unpack
+darcs_fetch() {
+ # The local directory to store the repository (useful to ensure a
+ # unique local name); relative to EDARCS_TOP_DIR
+ [[ -z ${EDARCS_LOCALREPO} ]] && [[ -n ${EDARCS_REPOSITORY} ]] \
+ && EDARCS_LOCALREPO=${EDARCS_REPOSITORY%/} \
+ && EDARCS_LOCALREPO=${EDARCS_LOCALREPO##*/}
+
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ -n ${EDARCS_CLEAN} ]]; then
+ addwrite "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"
+ rm -rf "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"
+ fi
+
+ # create the top dir if needed
+ if [[ ! -d ${EDARCS_TOP_DIR} ]]; then
+ # note that the addwrite statements in this block are only there to allow creating EDARCS_TOP_DIR;
+ # we've already allowed writing inside it
+ # this is because it's simpler than trying to find out the parent path of the directory, which
+ # would need to be the real path and not a symlink for things to work (so we can't just remove
+ # the last path element in the string)
+ debug-print "${FUNCNAME}: checkout mode. creating darcs directory"
+ addwrite /foobar
+ addwrite /
+ mkdir -p "${EDARCS_TOP_DIR}"
+ export SANDBOX_WRITE="${SANDBOX_WRITE//:\/foobar:\/}"
+ fi
+
+ # in case EDARCS_DARCS_DIR is a symlink to a dir, get the real
+ # dir's path, otherwise addwrite() doesn't work.
+ pushd .
+ cd -P "${EDARCS_TOP_DIR}" > /dev/null
+ EDARCS_TOP_DIR="`/bin/pwd`"
+
+ # disable the sandbox for this dir
+ addwrite "${EDARCS_TOP_DIR}"
+
+ # determine checkout or update mode and change to the right directory.
+ if [[ ! -d "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}/_darcs" ]]; then
+ mode=get
+ cd "${EDARCS_TOP_DIR}"
+ else
+ mode=update
+ cd "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"
+ fi
+
+ # commands to run
+ local cmdget="${EDARCS_DARCS_CMD} ${EDARCS_GET_CMD} ${EDARCS_OPTIONS} --repo-name=${EDARCS_LOCALREPO} ${EDARCS_REPOSITORY}"
+ local cmdupdate="${EDARCS_DARCS_CMD} ${EDARCS_UPDATE_CMD} --all ${EDARCS_OPTIONS} ${EDARCS_REPOSITORY}"
+
+ if [[ ${mode} == "get" ]]; then
+ einfo "Running ${cmdget}"
+ HOME="${EDARCS_TOP_DIR}" ${cmdget} || die "darcs get command failed"
+ elif [[ -n ${EDARCS_OFFLINE} ]] ; then
+ einfo "Offline update"
+ elif [[ ${mode} == "update" ]]; then
+ einfo "Running ${cmdupdate}"
+ HOME="${EDARCS_TOP_DIR}" ${cmdupdate} || die "darcs update command failed"
+ fi
+
+ export EDARCS_PATCHCOUNT=$(darcs_patchcount)
+ einfo " patches in repo: ${EDARCS_PATCHCOUNT}"
+
+ popd
+}
+
+# @FUNCTION: darcs_src_unpack
+# @DESCRIPTION:
+# src_upack function
+darcs_src_unpack() {
+ # The local directory to store the repository (useful to ensure a
+ # unique local name); relative to EDARCS_TOP_DIR
+ [[ -z ${EDARCS_LOCALREPO} ]] && [[ -n ${EDARCS_REPOSITORY} ]] \
+ && EDARCS_LOCALREPO=${EDARCS_REPOSITORY%/} \
+ && EDARCS_LOCALREPO=${EDARCS_LOCALREPO##*/}
+
+ debug-print-function ${FUNCNAME} $*
+
+ debug-print "${FUNCNAME}: init:
+ EDARCS_DARCS_CMD=${EDARCS_DARCS_CMD}
+ EDARCS_GET_CMD=${EDARCS_GET_CMD}
+ EDARCS_UPDATE_CMD=${EDARCS_UPDATE_CMD}
+ EDARCS_OPTIONS=${EDARCS_OPTIONS}
+ EDARCS_TOP_DIR=${EDARCS_TOP_DIR}
+ EDARCS_REPOSITORY=${EDARCS_REPOSITORY}
+ EDARCS_LOCALREPO=${EDARCS_LOCALREPO}
+ EDARCS_CLEAN=${EDARCS_CLEAN}"
+
+ einfo "Fetching darcs repository ${EDARCS_REPOSITORY} into ${EDARCS_TOP_DIR}..."
+ darcs_fetch
+
+ einfo "Copying ${EDARCS_LOCALREPO} from ${EDARCS_TOP_DIR}..."
+ debug-print "Copying ${EDARCS_LOCALREPO} from ${EDARCS_TOP_DIR}..."
+
+ # probably redundant, but best to make sure
+ # Use ${WORKDIR}/${P} rather than ${S} so user can point ${S} to something inside.
+ mkdir -p "${WORKDIR}/${P}"
+
+ eshopts_push -s dotglob # get any dotfiles too.
+ rsync -rlpgo "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"/* "${WORKDIR}/${P}"
+ eshopts_pop
+
+ einfo "Darcs repository contents are now in ${WORKDIR}/${P}"
+
+}
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/db-use.eclass b/eclass/db-use.eclass
new file mode 100644
index 000000000000..5a19b0599848
--- /dev/null
+++ b/eclass/db-use.eclass
@@ -0,0 +1,116 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+# This is a common location for functions that aid the use of sys-libs/db
+#
+# Bugs: maintainer-needed@gentoo.org
+
+inherit versionator multilib
+
+#Convert a version to a db slot
+db_ver_to_slot() {
+ if [ $# -ne 1 ]; then
+ eerror "Function db_ver_to_slot needs one argument" >&2
+ eerror "args given:" >&2
+ for f in $@
+ do
+ eerror " - \"$@\"" >&2
+ done
+ return 1
+ fi
+ # 5.0.x uses 5.0 as slot value, so this replacement will break it;
+ # older sys-libs/db might have been using this but it's no longer
+ # the case, so make it work for latest rather than older stuff.
+ # echo -n "${1/.0/}"
+ echo -n "$1"
+}
+
+#Find the version that correspond to the given atom
+db_findver() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ if [ $# -ne 1 ]; then
+ eerror "Function db_findver needs one argument" >&2
+ eerror "args given:" >&2
+ for f in $@
+ do
+ eerror " - \"$@\"" >&2
+ done
+ return 1
+ fi
+
+ PKG="$(best_version $1)"
+ VER="$(get_version_component_range 1-2 "${PKG/*db-/}")"
+ if [ -d "${EPREFIX}"/usr/include/db$(db_ver_to_slot "$VER") ]; then
+ #einfo "Found db version ${VER}" >&2
+ echo -n "$VER"
+ return 0
+ else
+ return 1
+ fi
+}
+
+# Get the include dir for berkeley db.
+# This function has two modes. Without any arguments it will give the best
+# version available. With arguments that form the versions of db packages
+# to test for, it will aim to find the library corresponding to it.
+
+db_includedir() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ if [ $# -eq 0 ]; then
+ VER="$(db_findver sys-libs/db)" || return 1
+ VER="$(db_ver_to_slot "$VER")"
+ echo "include version ${VER}" >&2
+ if [ -d "${EPREFIX}/usr/include/db${VER}" ]; then
+ echo -n "${EPREFIX}/usr/include/db${VER}"
+ return 0
+ else
+ eerror "sys-libs/db package requested, but headers not found" >&2
+ return 1
+ fi
+ else
+ #arguments given
+ for x in $@
+ do
+ if VER=$(db_findver "=sys-libs/db-${x}*") &&
+ [ -d "${EPREFIX}/usr/include/db$(db_ver_to_slot $VER)" ]; then
+ echo -n "${EPREFIX}/usr/include/db$(db_ver_to_slot $VER)"
+ return 0
+ fi
+ done
+ eerror "No suitable db version found"
+ return 1
+ fi
+}
+
+
+# Get the library name for berkeley db. Something like "db-4.2" will be the
+# outcome. This function has two modes. Without any arguments it will give
+# the best version available. With arguments that form the versions of db
+# packages to test for, it will aim to find the library corresponding to it.
+
+db_libname() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ if [ $# -eq 0 ]; then
+ VER="$(db_findver sys-libs/db)" || return 1
+ if [ -e "${EPREFIX}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
+ echo -n "db-${VER}"
+ return 0
+ else
+ eerror "sys-libs/db package requested, but library not found" >&2
+ return 1
+ fi
+ else
+ #arguments given
+ for x in $@
+ do
+ if VER=$(db_findver "=sys-libs/db-${x}*"); then
+ if [ -e "${EPREFIX}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
+ echo -n "db-${VER}"
+ return 0
+ fi
+ fi
+ done
+ eerror "No suitable db version found" >&2
+ return 1
+ fi
+}
diff --git a/eclass/db.eclass b/eclass/db.eclass
new file mode 100644
index 000000000000..b3c395902c75
--- /dev/null
+++ b/eclass/db.eclass
@@ -0,0 +1,188 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+# This is a common location for functions used in the sys-libs/db ebuilds
+#
+# Bugs: maintainer-needed@gentoo.org
+
+inherit eutils multilib
+
+IUSE="doc test examples"
+
+EXPORT_FUNCTIONS src_test
+
+DEPEND="test? ( >=dev-lang/tcl-8.4 )"
+
+RDEPEND=""
+
+db_fix_so() {
+ LIB="${ROOT}/usr/$(get_libdir)"
+
+ cd "${LIB}"
+
+ # first clean up old symlinks
+ find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*so' -exec rm \{} \;
+ find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*so.[23]' -exec rm \{} \;
+ find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*a' -exec rm \{} \;
+
+ # now rebuild all the correct ones
+ for ext in so a; do
+ for name in libdb libdb_{cxx,tcl,java,sql,stl}; do
+ target=`find . -maxdepth 1 -type f -name "${name}-*.${ext}" |sort -n |tail -n 1`
+ [ -n "${target}" ] && ln -sf ${target//.\//} ${name}.${ext}
+ done;
+ done;
+
+ # db[23] gets some extra-special stuff
+ if [ -f libdb1.so.2 ]; then
+ ln -sf libdb1.so.2 libdb.so.2
+ ln -sf libdb1.so.2 libdb1.so
+ ln -sf libdb1.so.2 libdb-1.so
+ fi
+ # what do we do if we ever get 3.3 ?
+ for i in libdb libdb_{cxx,tcl,java,sql,stl}; do
+ if [ -f $i-3.2.so ]; then
+ ln -sf $i-3.2.so $i-3.so
+ ln -sf $i-3.2.so $i.so.3
+ fi
+ done
+
+ # do the same for headers now
+ # but since there are only two of them, just overwrite them
+ cd "${ROOT}"/usr/include
+ target=`find . -maxdepth 1 -type d -name 'db[0-9]*' | sort -n |cut -d/ -f2- | tail -n1`
+ if [ -n "${target}" ] && [ -e "${target}/db.h" ] && ( ! [[ -e db.h ]] || [[ -h db.h ]] ); then
+ einfo "Creating db.h symlinks to ${target}"
+ ln -sf "${target}"/db.h .
+ ln -sf "${target}"/db_185.h .
+ elif [ ! -e "${target}/db.h" ]; then
+ if [ -n "${target}" ]; then
+ ewarn "Could not find ${target}/db.h"
+ elif [ -h db.h ]; then
+ einfo "Apparently you just removed the last instance of $PN. Removing the symlinks"
+ rm -f db.h db_185.h
+ fi
+ fi
+}
+
+db_src_install_doc() {
+ # not everybody wants this wad of documentation as it is primarily API docs
+ if use doc; then
+ dodir /usr/share/doc/${PF}/html
+ mv "${D}"/usr/docs/* "${D}"/usr/share/doc/${PF}/html/
+ rm -rf "${D}"/usr/docs
+ else
+ rm -rf "${D}"/usr/docs
+ fi
+
+ db_src_install_examples
+}
+
+db_src_install_examples() {
+ if use examples ; then
+ local langs="c cxx stl"
+ [[ "${IUSE/java}" != "${IUSE}" ]] \
+ && use java \
+ && langs="${langs} java"
+ for i in $langs ; do
+ destdir="/usr/share/doc/${PF}/"
+ src="${S}/../examples_${i}/"
+ if [ -f "${src}" ]; then
+ dodir "${destdir}"
+ cp -ra "${src}" "${D}${destdir}/"
+ fi
+ done
+ fi
+}
+
+db_src_install_usrbinslot() {
+ # slot all program names to avoid overwriting
+ for fname in "${D}"/usr/bin/db*
+ do
+ dn="$(dirname "${fname}")"
+ bn="$(basename "${fname}")"
+ bn="${bn/db/db${SLOT}}"
+ mv "${fname}" "${dn}/${bn}" || \
+ die "Failed to rename ${fname} to ${dn}/${bn}"
+ done
+}
+
+db_src_install_headerslot() {
+ # install all headers in a slotted location
+ dodir /usr/include/db${SLOT}
+ mv "${D}"/usr/include/*.h "${D}"/usr/include/db${SLOT}/
+}
+
+db_src_install_usrlibcleanup() {
+ LIB="${D}/usr/$(get_libdir)"
+ # Clean out the symlinks so that they will not be recorded in the
+ # contents (bug #60732)
+
+ if [ "${D}" = "" ]; then
+ die "Calling clean_links while \$D not defined"
+ fi
+
+ if [ -e "${LIB}"/libdb.a ] && [ ! -e "${LIB}"/libdb-${SLOT}.a ]; then
+ einfo "Moving libdb.a to a versioned name"
+ mv "${LIB}/libdb.a" "${LIB}/libdb-${SLOT}.a"
+ fi
+
+ if [ -e "${LIB}"/libdb_cxx.a ] && [ ! -e "${LIB}"/libdb_cxx-${SLOT}.a ]; then
+ einfo "Moving libdb_cxx.a to a versioned name"
+ mv "${LIB}/libdb_cxx.a" "${LIB}/libdb_cxx-${SLOT}.a"
+ fi
+
+ find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*so' -exec rm \{} \;
+ find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*so.[23]' -exec rm \{} \;
+ einfo "removing unversioned static archives"
+ find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*a' -exec rm \{} \;
+
+ rm -f \
+ "${D}"/usr/include/{db,db_185}.h \
+ "${LIB}"/libdb{,_{cxx,sql,stl,java,tcl}}.a
+}
+
+db_src_test() {
+ if [[ $UID -eq 0 ]]; then
+ M="You must run the testsuite as non-root, skipping"
+ ewarn "${M}"
+ elog "${M}"
+ return 0
+ fi
+
+ if use tcl; then
+ einfo "Running sys-libs/db testsuite"
+ ewarn "This can take 6+ hours on modern machines"
+ # Fix stuff that fails with relative paths, and upstream moving files
+ # around...
+ local test_parallel=''
+ for t in \
+ "${S}"/test/parallel.tcl \
+ "${S}"/../test/parallel.tcl \
+ "${S}"/test/tcl/parallel.tcl \
+ "${S}"/../test/tcl/parallel.tcl \
+ ; do
+ [[ -f "${t}" ]] && test_parallel="${t}" && break
+ done
+
+ sed -ri \
+ -e '/regsub .test_path ./s,(regsub),#\1,g' \
+ -e '/regsub .src_root ./s,(regsub),#\1,g' \
+ -e '/regsub .tcl_utils ./s,(regsub),#\1,g' \
+ "${test_parallel}"
+ cd "${S}"
+ for t in \
+ ../test/test.tcl \
+ ../test/tcl/test.tcl \
+ ; do
+ [[ -f "${t}" ]] && testbase="${t}" && break
+ done
+ echo "source ${t}" > testrunner.tcl
+ echo "run_parallel $(makeopts_jobs) run_std" >> testrunner.tcl
+
+ tclsh testrunner.tcl
+ egrep -qs '^FAIL' ALL.OUT* && die "Some tests failed, please see ${S}/ALL.OUT*"
+ else
+ eerror "You must have USE=tcl to run the sys-libs/db testsuite."
+ fi
+}
diff --git a/eclass/depend.apache.eclass b/eclass/depend.apache.eclass
new file mode 100644
index 000000000000..22a8216a02d9
--- /dev/null
+++ b/eclass/depend.apache.eclass
@@ -0,0 +1,317 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: depend.apache.eclass
+# @MAINTAINER:
+# apache-devs@gentoo.org
+# @BLURB: Functions to allow ebuilds to depend on apache
+# @DESCRIPTION:
+# This eclass handles depending on apache in a sane way and provides information
+# about where certain binaries and configuration files are located.
+#
+# To make use of this eclass simply call one of the need/want_apache functions
+# described below. Make sure you use the need/want_apache call after you have
+# defined DEPEND and RDEPEND. Also note that you can not rely on the automatic
+# RDEPEND=DEPEND that portage does if you use this eclass.
+#
+# See Bug 107127 for more information.
+#
+# @EXAMPLE:
+#
+# Here is an example of an ebuild depending on apache:
+#
+# @CODE
+# DEPEND="virtual/Perl-CGI"
+# RDEPEND="${DEPEND}"
+# need_apache2
+# @CODE
+#
+# Another example which demonstrates non-standard IUSE options for optional
+# apache support:
+#
+# @CODE
+# DEPEND="server? ( virtual/Perl-CGI )"
+# RDEPEND="${DEPEND}"
+# want_apache2 server
+#
+# pkg_setup() {
+# depend.apache_pkg_setup server
+# }
+# @CODE
+
+inherit multilib
+
+# ==============================================================================
+# INTERNAL VARIABLES
+# ==============================================================================
+
+# @ECLASS-VARIABLE: APACHE_VERSION
+# @DESCRIPTION:
+# Stores the version of apache we are going to be ebuilding.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APXS
+# @DESCRIPTION:
+# Path to the apxs tool.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_BIN
+# @DESCRIPTION:
+# Path to the apache binary.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_CTL
+# @DESCRIPTION:
+# Path to the apachectl tool.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_BASEDIR
+# @DESCRIPTION:
+# Path to the server root directory.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_CONFDIR
+# @DESCRIPTION:
+# Path to the configuration file directory.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_MODULES_CONFDIR
+# @DESCRIPTION:
+# Path where module configuration files are kept.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_VHOSTS_CONFDIR
+# @DESCRIPTION:
+# Path where virtual host configuration files are kept.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_MODULESDIR
+# @DESCRIPTION:
+# Path where we install modules.
+# This variable is set by the want/need_apache functions.
+
+# @ECLASS-VARIABLE: APACHE_DEPEND
+# @DESCRIPTION:
+# Dependencies for Apache
+APACHE_DEPEND="www-servers/apache"
+
+# @ECLASS-VARIABLE: APACHE2_DEPEND
+# @DESCRIPTION:
+# Dependencies for Apache 2.x
+APACHE2_DEPEND="=www-servers/apache-2*"
+
+# @ECLASS-VARIABLE: APACHE2_2_DEPEND
+# @DESCRIPTION:
+# Dependencies for Apache 2.2.x
+APACHE2_2_DEPEND="=www-servers/apache-2.2*"
+
+# @ECLASS-VARIABLE: APACHE2_4_DEPEND
+# @DESCRIPTION:
+# Dependencies for Apache 2.4.x
+APACHE2_4_DEPEND="=www-servers/apache-2.4*"
+
+
+# ==============================================================================
+# INTERNAL FUNCTIONS
+# ==============================================================================
+
+_init_apache2() {
+ debug-print-function $FUNCNAME $*
+
+ # WARNING: Do not use these variables with anything that is put
+ # into the dependency cache (DEPEND/RDEPEND/etc)
+ APACHE_VERSION="2"
+ APXS="/usr/sbin/apxs2"
+ APACHE_BIN="/usr/sbin/apache2"
+ APACHE_CTL="/usr/sbin/apache2ctl"
+ APACHE_INCLUDEDIR="/usr/include/apache2"
+ APACHE_BASEDIR="/usr/$(get_libdir)/apache2"
+ APACHE_CONFDIR="/etc/apache2"
+ APACHE_MODULES_CONFDIR="${APACHE_CONFDIR}/modules.d"
+ APACHE_VHOSTS_CONFDIR="${APACHE_CONFDIR}/vhosts.d"
+ APACHE_MODULESDIR="${APACHE_BASEDIR}/modules"
+}
+
+_init_no_apache() {
+ debug-print-function $FUNCNAME $*
+ APACHE_VERSION="0"
+}
+
+# ==============================================================================
+# PUBLIC FUNCTIONS
+# ==============================================================================
+
+# @FUNCTION: depend.apache_pkg_setup
+# @USAGE: [myiuse]
+# @DESCRIPTION:
+# An ebuild calls this in pkg_setup() to initialize variables for optional
+# apache-2.x support. If the myiuse parameter is not given it defaults to
+# apache2.
+depend.apache_pkg_setup() {
+ debug-print-function $FUNCNAME $*
+
+ if [[ "${EBUILD_PHASE}" != "setup" ]]; then
+ die "$FUNCNAME() should be called in pkg_setup()"
+ fi
+
+ local myiuse=${1:-apache2}
+ if has ${myiuse} ${IUSE}; then
+ if use ${myiuse}; then
+ _init_apache2
+ else
+ _init_no_apache
+ fi
+ fi
+}
+
+# @FUNCTION: want_apache
+# @USAGE: [myiuse]
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for optional apache
+# support. If the myiuse parameter is not given it defaults to apache2.
+# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
+# with the same myiuse parameter.
+want_apache() {
+ debug-print-function $FUNCNAME $*
+ want_apache2 "$@"
+}
+
+# @FUNCTION: want_apache2
+# @USAGE: [myiuse]
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for optional apache-2.x
+# support. If the myiuse parameter is not given it defaults to apache2.
+# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
+# with the same myiuse parameter.
+want_apache2() {
+ debug-print-function $FUNCNAME $*
+
+ local myiuse=${1:-apache2}
+ IUSE="${IUSE} ${myiuse}"
+ DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_DEPEND} )"
+ RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_DEPEND} )"
+}
+
+# @FUNCTION: want_apache2_2
+# @USAGE: [myiuse]
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for optional
+# apache-2.2.x support. If the myiuse parameter is not given it defaults to
+# apache2.
+# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
+# with the same myiuse parameter.
+want_apache2_2() {
+ debug-print-function $FUNCNAME $*
+
+ local myiuse=${1:-apache2}
+ IUSE="${IUSE} ${myiuse}"
+ DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
+ RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
+}
+
+# @FUNCTION: need_apache
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for apache.
+need_apache() {
+ debug-print-function $FUNCNAME $*
+ need_apache2
+}
+
+# @FUNCTION: need_apache2
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for apache-2.x.
+need_apache2() {
+ debug-print-function $FUNCNAME $*
+
+ DEPEND="${DEPEND} ${APACHE2_DEPEND}"
+ RDEPEND="${RDEPEND} ${APACHE2_DEPEND}"
+ _init_apache2
+}
+
+# @FUNCTION: need_apache2_2
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for apache-2.2.x.
+need_apache2_2() {
+ debug-print-function $FUNCNAME $*
+
+ DEPEND="${DEPEND} ${APACHE2_2_DEPEND}"
+ RDEPEND="${RDEPEND} ${APACHE2_2_DEPEND}"
+ _init_apache2
+}
+
+# @FUNCTION: need_apache2_4
+# @DESCRIPTION:
+# An ebuild calls this to get the dependency information for apache-2.4.x.
+need_apache2_4() {
+ debug-print-function $FUNCNAME $*
+
+ DEPEND="${DEPEND} ${APACHE2_4_DEPEND}"
+ RDEPEND="${RDEPEND} ${APACHE2_4_DEPEND}"
+ _init_apache2
+}
+
+# @FUNCTION: has_apache
+# @DESCRIPTION:
+# An ebuild calls this to get runtime variables for an indirect apache
+# dependency without USE-flag, in which case want_apache does not work.
+# DO NOT call this function in global scope.
+has_apache() {
+ debug-print-function $FUNCNAME $*
+
+ if has_version '>=www-servers/apache-2'; then
+ _init_apache2
+ else
+ _init_no_apache
+ fi
+}
+
+# @FUNCTION: has_apache_threads
+# @USAGE: [myflag]
+# @DESCRIPTION:
+# An ebuild calls this to make sure thread-safety is enabled if apache has been
+# built with a threaded MPM. If the myflag parameter is not given it defaults to
+# threads.
+has_apache_threads() {
+ debug-print-function $FUNCNAME $*
+
+ if ! built_with_use www-servers/apache threads; then
+ return
+ fi
+
+ local myflag="${1:-threads}"
+
+ if ! use ${myflag}; then
+ echo
+ eerror "You need to enable USE flag '${myflag}' to build a thread-safe version"
+ eerror "of ${CATEGORY}/${PN} for use with www-servers/apache"
+ die "Need missing USE flag '${myflag}'"
+ fi
+}
+
+# @FUNCTION: has_apache_threads_in
+# @USAGE: <myforeign> [myflag]
+# @DESCRIPTION:
+# An ebuild calls this to make sure thread-safety is enabled in a foreign
+# package if apache has been built with a threaded MPM. If the myflag parameter
+# is not given it defaults to threads.
+has_apache_threads_in() {
+ debug-print-function $FUNCNAME $*
+
+ if ! built_with_use www-servers/apache threads; then
+ return
+ fi
+
+ local myforeign="$1"
+ local myflag="${2:-threads}"
+
+ if ! built_with_use ${myforeign} ${myflag}; then
+ echo
+ eerror "You need to enable USE flag '${myflag}' in ${myforeign} to"
+ eerror "build a thread-safe version of ${CATEGORY}/${PN} for use"
+ eerror "with www-servers/apache"
+ die "Need missing USE flag '${myflag}' in ${myforeign}"
+ fi
+}
+
+EXPORT_FUNCTIONS pkg_setup
diff --git a/eclass/depend.php.eclass b/eclass/depend.php.eclass
new file mode 100644
index 000000000000..c29cda22ca23
--- /dev/null
+++ b/eclass/depend.php.eclass
@@ -0,0 +1,258 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @DEAD
+# @ECLASS: depend.php.eclass
+# @MAINTAINER:
+# Gentoo PHP team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Author: Stuart Herbert <stuart@gentoo.org>
+# Author: Luca Longinotti <chtekk@gentoo.org>
+# Author: Jakub Moc <jakub@gentoo.org> (documentation)
+# @BLURB: Functions to allow ebuilds to depend on php5 and check for specific features.
+# @DESCRIPTION:
+# This eclass provides functions that allow ebuilds to depend on php5 and check
+# for specific PHP features, SAPIs etc. Also provides dodoc-php wrapper to install
+# documentation for PHP packages to php-specific location.
+# This eclass is deprecated and is set to be removed 30 days after bug 552836 is resolved
+
+inherit eutils multilib
+
+# PHP5-only depend functions
+
+# @FUNCTION: need_php5
+# @DESCRIPTION:
+# Set this after setting DEPEND/RDEPEND in your ebuild if the ebuild requires PHP5
+# (with any SAPI).
+need_php5() {
+ DEPEND="${DEPEND} =dev-lang/php-5*"
+ RDEPEND="${RDEPEND} =dev-lang/php-5*"
+ PHP_VERSION="5"
+ PHP_SHARED_CAT="php5"
+}
+
+# common settings go in here
+uses_php5() {
+ # cache this
+ libdir=$(get_libdir)
+
+ PHPIZE="/usr/${libdir}/php5/bin/phpize"
+ PHPCONFIG="/usr/${libdir}/php5/bin/php-config"
+ PHPCLI="/usr/${libdir}/php5/bin/php"
+ PHPCGI="/usr/${libdir}/php5/bin/php-cgi"
+ PHP_PKG="$(best_version =dev-lang/php-5*)"
+ PHPPREFIX="/usr/${libdir}/php5"
+ EXT_DIR="$(${PHPCONFIG} --extension-dir 2>/dev/null)"
+
+ einfo
+ einfo "Using ${PHP_PKG}"
+ einfo
+}
+
+# general PHP depend functions
+
+# @FUNCTION: need_php_httpd
+# @DESCRIPTION:
+# Set this after setting DEPEND/RDEPEND in your ebuild if the ebuild requires PHP
+# (any version) with either cgi or apache2 SAPI.
+need_php_httpd() {
+ DEPEND="${DEPEND} virtual/httpd-php"
+ RDEPEND="${RDEPEND} virtual/httpd-php"
+}
+
+# @FUNCTION: need_php
+# @DESCRIPTION:
+# Set this after setting DEPEND/RDEPEND in your ebuild if the ebuild requires PHP
+# (any version with any SAPI).
+need_php() {
+ DEPEND="${DEPEND} dev-lang/php"
+ RDEPEND="${RDEPEND} dev-lang/php"
+ PHP_SHARED_CAT="php"
+}
+
+# @FUNCTION: has_php
+# @DESCRIPTION:
+# Call this function from your pkg_setup, src_compile, src_install etc. if you
+# need to know which PHP version is being used and where the PHP binaries/data
+# are installed.
+has_php() {
+ # Detect which PHP version we have installed
+ if has_version '=dev-lang/php-5*' ; then
+ PHP_VERSION="5"
+ else
+ die "Unable to find an installed dev-lang/php package"
+ fi
+
+ # If we get here, then PHP_VERSION tells us which version of PHP we
+ # want to use
+ uses_php${PHP_VERSION}
+}
+
+# @FUNCTION: require_php_with_use
+# @USAGE: <list of USE flags>
+# @DESCRIPTION:
+# Call this function from pkg_setup if your package requires PHP compiled
+# with specific USE flags. Returns if all of the listed USE flags are enabled.
+# Dies if any of the listed USE flags are disabled.
+
+# @VARIABLE: PHPCHECKNODIE
+# @DESCRIPTION:
+# You can set PHPCHECKNODIE to non-empty value in your ebuild to chain multiple
+# require_php_with_(any)_use checks without making the ebuild die on every failure.
+# This is useful in cases when certain PHP features are only required if specific
+# USE flag(s) are enabled for that ebuild.
+# @CODE
+# Example:
+#
+# local flags="pcre session snmp sockets wddx"
+# use mysql && flags="${flags} mysql"
+# use postgres && flags="${flags} postgres"
+# if ! PHPCHECKNODIE="yes" require_php_with_use ${flags} \
+# || ! PHPCHECKNODIE="yes" require_php_with_any_use gd gd-external ; then
+# die "Re-install ${PHP_PKG} with ${flags} and either gd or gd-external"
+# fi
+# @CODE
+require_php_with_use() {
+ has_php
+
+ local missing_use=""
+ local x
+
+ einfo "Checking for required PHP feature(s) ..."
+
+ for x in $@ ; do
+ case $x in
+ pcre|spl|reflection|mhash)
+ eqawarn "require_php_with_use MUST NOT check for the pcre, spl, mhash or reflection USE flag."
+ eqawarn "These USE flags are removed from >=dev-lang/php-5.3 and your ebuild will break"
+ eqawarn "if you check the USE flags against PHP 5.3 ebuilds."
+ eqawarn "Please use USE dependencies from EAPI 2 instead"
+ ;;
+ esac
+
+ if ! built_with_use =${PHP_PKG} ${x} ; then
+ einfo " Discovered missing USE flag: ${x}"
+ missing_use="${missing_use} ${x}"
+ fi
+ done
+
+ if [[ -z "${missing_use}" ]] ; then
+ if [[ -z "${PHPCHECKNODIE}" ]] ; then
+ return
+ else
+ return 0
+ fi
+ fi
+
+ if [[ -z "${PHPCHECKNODIE}" ]] ; then
+ eerror
+ eerror "${PHP_PKG} needs to be re-installed with all of the following"
+ eerror "USE flags enabled:"
+ eerror
+ eerror " $@"
+ eerror
+ die "Missing PHP USE flags found"
+ else
+ return 1
+ fi
+}
+
+
+# ========================================================================
+# require_*() functions
+#
+# These functions die() if PHP was built without the required features
+# ========================================================================
+
+# @FUNCTION: require_php_cgi
+# @DESCRIPTION:
+# Determines which installed PHP version has the CGI SAPI enabled.
+# Useful for anything which needs to run PHP scripts depending on the CGI SAPI.
+# @RETURN: die if feature is missing
+require_php_cgi() {
+ # If PHP_PKG is set, then we have remembered our PHP settings
+ # from last time
+ if [[ -n ${PHP_PKG} ]] ; then
+ return
+ fi
+
+ local PHP_PACKAGE_FOUND=""
+
+ if has_version '=dev-lang/php-5*' ; then
+ PHP_PACKAGE_FOUND="1"
+ pkg="$(best_version '=dev-lang/php-5*')"
+ if built_with_use =${pkg} cgi ; then
+ PHP_VERSION="5"
+ fi
+ fi
+
+ if [[ -z ${PHP_PACKAGE_FOUND} ]] ; then
+ die "Unable to find an installed dev-lang/php package"
+ fi
+
+ if [[ -z ${PHP_VERSION} ]] ; then
+ die "No PHP CGI installed. Re-emerge dev-lang/php with USE=cgi."
+ fi
+
+ # If we get here, then PHP_VERSION tells us which version of PHP we
+ # want to use
+ uses_php${PHP_VERSION}
+}
+
+# ========================================================================
+# Misc functions
+#
+# These functions provide miscellaneous checks and functionality.
+# ========================================================================
+
+# @FUNCTION: dodoc-php
+# @USAGE: <list of docs>
+# @DESCRIPTION:
+# Alternative to dodoc function for use in our PHP eclasses and ebuilds.
+# Stored here because depend.php gets always sourced everywhere in the PHP
+# ebuilds and eclasses. It simply is dodoc with a changed path to the docs.
+# NOTE: No support for docinto is provided!
+dodoc-php() {
+if [[ $# -lt 1 ]] ; then
+ echo "$0: at least one argument needed" 1>&2
+ exit 1
+fi
+
+phpdocdir="/usr/share/doc/${CATEGORY}/${PF}/"
+
+for x in $@ ; do
+ if [[ -s "${x}" ]] ; then
+ insinto "${phpdocdir}"
+ doins "${x}"
+ gzip -f -9 "${D}/${phpdocdir}/${x##*/}"
+ elif [[ ! -e "${x}" ]] ; then
+ echo "dodoc-php: ${x} does not exist" 1>&2
+ fi
+done
+}
+
+# @FUNCTION: dohtml-php
+# @USAGE: <list of html docs>
+# @DESCRIPTION:
+# Alternative to dohtml function for use in our PHP eclasses and ebuilds.
+# Stored here because depend.php gets always sourced everywhere in the PHP
+# ebuilds and eclasses. It simply is dohtml with a changed path to the docs.
+# NOTE: No support for [-a|-A|-p|-x] options is provided!
+dohtml-php() {
+if [[ $# -lt 1 ]] ; then
+ echo "$0: at least one argument needed" 1>&2
+ exit 1
+fi
+
+phphtmldir="/usr/share/doc/${CATEGORY}/${PF}/html"
+
+for x in $@ ; do
+ if [[ -s "${x}" ]] ; then
+ insinto "${phphtmldir}"
+ doins "${x}"
+ elif [[ ! -e "${x}" ]] ; then
+ echo "dohtml-php: ${x} does not exist" 1>&2
+ fi
+done
+}
diff --git a/eclass/distutils-r1.eclass b/eclass/distutils-r1.eclass
new file mode 100644
index 000000000000..e6be0524f0de
--- /dev/null
+++ b/eclass/distutils-r1.eclass
@@ -0,0 +1,822 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: distutils-r1
+# @MAINTAINER:
+# Python team <python@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Based on the work of: Krzysztof Pawlik <nelchael@gentoo.org>
+# @BLURB: A simple eclass to build Python packages using distutils.
+# @DESCRIPTION:
+# A simple eclass providing functions to build Python packages using
+# the distutils build system. It exports phase functions for all
+# the src_* phases. Each of the phases runs two pseudo-phases:
+# python_..._all() (e.g. python_prepare_all()) once in ${S}, then
+# python_...() (e.g. python_prepare()) for each implementation
+# (see: python_foreach_impl() in python-r1).
+#
+# In distutils-r1_src_prepare(), the 'all' function is run before
+# per-implementation ones (because it creates the implementations),
+# per-implementation functions are run in a random order.
+#
+# In remaining phase functions, the per-implementation functions are run
+# before the 'all' one, and they are ordered from the least to the most
+# preferred implementation (so that 'better' files overwrite 'worse'
+# ones).
+#
+# If the ebuild doesn't specify a particular pseudo-phase function,
+# the default one will be used (distutils-r1_...). Defaults are provided
+# for all per-implementation pseudo-phases, python_prepare_all()
+# and python_install_all(); whenever writing your own pseudo-phase
+# functions, you should consider calling the defaults (and especially
+# distutils-r1_python_prepare_all).
+#
+# Please note that distutils-r1 sets RDEPEND and DEPEND unconditionally
+# for you.
+#
+# Also, please note that distutils-r1 will always inherit python-r1
+# as well. Thus, all the variables defined and documented there are
+# relevant to the packages using distutils-r1.
+#
+# For more information, please see the wiki:
+# https://wiki.gentoo.org/wiki/Project:Python/distutils-r1
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+# @ECLASS-VARIABLE: DISTUTILS_OPTIONAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-null value, distutils part in the ebuild will
+# be considered optional. No dependencies will be added and no phase
+# functions will be exported.
+#
+# If you enable DISTUTILS_OPTIONAL, you have to set proper dependencies
+# for your package (using ${PYTHON_DEPS}) and to either call
+# distutils-r1 default phase functions or call the build system
+# manually.
+
+# @ECLASS-VARIABLE: DISTUTILS_SINGLE_IMPL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-null value, the ebuild will support setting a single
+# Python implementation only. It will effectively replace the python-r1
+# eclass inherit with python-single-r1.
+#
+# Note that inheriting python-single-r1 will cause pkg_setup()
+# to be exported. It must be run in order for the eclass functions
+# to function properly.
+
+if [[ ! ${_DISTUTILS_R1} ]]; then
+
+inherit eutils toolchain-funcs
+
+if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ inherit multiprocessing python-r1
+else
+ inherit python-single-r1
+fi
+
+fi
+
+if [[ ! ${DISTUTILS_OPTIONAL} ]]; then
+ EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
+fi
+
+if [[ ! ${_DISTUTILS_R1} ]]; then
+
+if [[ ! ${DISTUTILS_OPTIONAL} ]]; then
+ RDEPEND=${PYTHON_DEPS}
+ DEPEND=${PYTHON_DEPS}
+ REQUIRED_USE=${PYTHON_REQUIRED_USE}
+fi
+
+# @ECLASS-VARIABLE: PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing patches to be applied to the sources before
+# copying them.
+#
+# If unset, no custom patches will be applied.
+#
+# Please note, however, that at some point the eclass may apply
+# additional distutils patches/quirks independently of this variable.
+#
+# Example:
+# @CODE
+# PATCHES=( "${FILESDIR}"/${P}-make-gentoo-happy.patch )
+# @CODE
+
+# @ECLASS-VARIABLE: DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing documents installed using dodoc. The files listed
+# there must exist in the directory from which
+# distutils-r1_python_install_all() is run (${S} by default).
+#
+# If unset, the function will instead look up files matching default
+# filename pattern list (from the Package Manager Specification),
+# and install those found.
+#
+# Example:
+# @CODE
+# DOCS=( NEWS README )
+# @CODE
+
+# @ECLASS-VARIABLE: HTML_DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing documents installed using dohtml. The files
+# and directories listed there must exist in the directory from which
+# distutils-r1_python_install_all() is run (${S} by default).
+#
+# If unset, no HTML docs will be installed.
+#
+# Example:
+# @CODE
+# HTML_DOCS=( doc/html/. )
+# @CODE
+
+# @ECLASS-VARIABLE: EXAMPLES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing examples installed into 'examples' doc
+# subdirectory. The files and directories listed there must exist
+# in the directory from which distutils-r1_python_install_all() is run
+# (${S} by default).
+#
+# The 'examples' subdirectory will be marked not to be compressed
+# automatically.
+#
+# If unset, no examples will be installed.
+#
+# Example:
+# @CODE
+# EXAMPLES=( examples/. demos/. )
+# @CODE
+
+# @ECLASS-VARIABLE: DISTUTILS_IN_SOURCE_BUILD
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set to a non-null value, in-source builds will be enabled.
+# If unset, the default is to use in-source builds when python_prepare()
+# is declared, and out-of-source builds otherwise.
+#
+# If in-source builds are used, the eclass will create a copy of package
+# sources for each Python implementation in python_prepare_all(),
+# and work on that copy afterwards.
+#
+# If out-of-source builds are used, the eclass will instead work
+# on the sources directly, prepending setup.py arguments with
+# 'build --build-base ${BUILD_DIR}' to enforce keeping & using built
+# files in the specific root.
+
+# @ECLASS-VARIABLE: DISTUTILS_ALL_SUBPHASE_IMPLS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array of patterns specifying which implementations can be used
+# for *_all() sub-phase functions. If undefined, defaults to '*'
+# (allowing any implementation). If multiple values are specified,
+# implementations matching any of the patterns will be accepted.
+#
+# If the restriction needs to apply conditionally to a USE flag,
+# the variable should be set conditionally as well (e.g. in an early
+# phase function or other convenient location).
+#
+# Please remember to add a matching || block to REQUIRED_USE,
+# to ensure that at least one implementation matching the patterns will
+# be enabled.
+#
+# Example:
+# @CODE
+# REQUIRED_USE="doc? ( || ( $(python_gen_useflags 'python2*') ) )"
+#
+# pkg_setup() {
+# use doc && DISTUTILS_ALL_SUBPHASE_IMPLS=( 'python2*' )
+# }
+# @CODE
+
+# @ECLASS-VARIABLE: mydistutilsargs
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing options to be passed to setup.py.
+#
+# Example:
+# @CODE
+# python_configure_all() {
+# mydistutilsargs=( --enable-my-hidden-option )
+# }
+# @CODE
+
+# @FUNCTION: esetup.py
+# @USAGE: [<args>...]
+# @DESCRIPTION:
+# Run setup.py using currently selected Python interpreter
+# (if ${PYTHON} is set; fallback 'python' otherwise).
+#
+# setup.py will be passed the following, in order:
+# 1. ${mydistutilsargs[@]}
+# 2. additional arguments passed to the esetup.py function.
+#
+# Please note that setup.py will respect defaults (unless overriden
+# via command-line options) from setup.cfg that is created
+# in distutils-r1_python_compile and in distutils-r1_python_install.
+#
+# This command dies on failure.
+esetup.py() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ set -- "${PYTHON:-python}" setup.py "${mydistutilsargs[@]}" "${@}"
+
+ echo "${@}" >&2
+ "${@}" || die
+}
+
+# @FUNCTION: distutils_install_for_testing
+# @USAGE: [<args>...]
+# @DESCRIPTION:
+# Install the package into a temporary location for running tests.
+# Update PYTHONPATH appropriately and set TEST_DIR to the test
+# installation root. The Python packages will be installed in 'lib'
+# subdir, and scripts in 'scripts' subdir (like in BUILD_DIR).
+#
+# Please note that this function should be only used if package uses
+# namespaces (and therefore proper install needs to be done to enforce
+# PYTHONPATH) or tests rely on the results of install command.
+# For most of the packages, tests built in BUILD_DIR are good enough.
+distutils_install_for_testing() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ # A few notes:
+ # 1) because of namespaces, we can't use 'install --root',
+ # 2) 'install --home' is terribly broken on pypy, so we need
+ # to override --install-lib and --install-scripts,
+ # 3) non-root 'install' complains about PYTHONPATH and missing dirs,
+ # so we need to set it properly and mkdir them,
+ # 4) it runs a bunch of commands which write random files to cwd,
+ # in order to avoid that, we add the necessary path overrides
+ # in _distutils-r1_create_setup_cfg.
+
+ TEST_DIR=${BUILD_DIR}/test
+ local bindir=${TEST_DIR}/scripts
+ local libdir=${TEST_DIR}/lib
+ PYTHONPATH=${libdir}:${PYTHONPATH}
+
+ local add_args=(
+ install
+ --home="${TEST_DIR}"
+ --install-lib="${libdir}"
+ --install-scripts="${bindir}"
+ )
+
+ mkdir -p "${libdir}" || die
+ esetup.py "${add_args[@]}" "${@}"
+}
+
+# @FUNCTION: _distutils-r1_disable_ez_setup
+# @INTERNAL
+# @DESCRIPTION:
+# Stub out ez_setup.py and distribute_setup.py to prevent packages
+# from trying to download a local copy of setuptools.
+_distutils-r1_disable_ez_setup() {
+ local stub="def use_setuptools(*args, **kwargs): pass"
+ if [[ -f ez_setup.py ]]; then
+ echo "${stub}" > ez_setup.py || die
+ fi
+ if [[ -f distribute_setup.py ]]; then
+ echo "${stub}" > distribute_setup.py || die
+ fi
+}
+
+# @FUNCTION: distutils-r1_python_prepare_all
+# @DESCRIPTION:
+# The default python_prepare_all(). It applies the patches from PATCHES
+# array, then user patches and finally calls python_copy_sources to
+# create copies of resulting sources for each Python implementation.
+#
+# At some point in the future, it may also apply eclass-specific
+# distutils patches and/or quirks.
+distutils-r1_python_prepare_all() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${PATCHES} ]] && epatch "${PATCHES[@]}"
+
+ epatch_user
+
+ # by default, use in-source build if python_prepare() is used
+ if [[ ! ${DISTUTILS_IN_SOURCE_BUILD+1} ]]; then
+ if declare -f python_prepare >/dev/null; then
+ DISTUTILS_IN_SOURCE_BUILD=1
+ fi
+ fi
+
+ _distutils-r1_disable_ez_setup
+
+ if [[ ${DISTUTILS_IN_SOURCE_BUILD} && ! ${DISTUTILS_SINGLE_IMPL} ]]
+ then
+ # create source copies for each implementation
+ python_copy_sources
+ fi
+
+ _DISTUTILS_DEFAULT_CALLED=1
+}
+
+# @FUNCTION: distutils-r1_python_prepare
+# @DESCRIPTION:
+# The default python_prepare(). A no-op.
+distutils-r1_python_prepare() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ :
+}
+
+# @FUNCTION: distutils-r1_python_configure
+# @DESCRIPTION:
+# The default python_configure(). A no-op.
+distutils-r1_python_configure() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ :
+}
+
+# @FUNCTION: _distutils-r1_create_setup_cfg
+# @INTERNAL
+# @DESCRIPTION:
+# Create implementation-specific configuration file for distutils,
+# setting proper build-dir (and install-dir) paths.
+_distutils-r1_create_setup_cfg() {
+ cat > "${HOME}"/.pydistutils.cfg <<-_EOF_ || die
+ [build]
+ build-base = ${BUILD_DIR}
+
+ # using a single directory for them helps us export
+ # ${PYTHONPATH} and ebuilds find the sources independently
+ # of whether the package installs extensions or not
+ #
+ # note: due to some packages (wxpython) relying on separate
+ # platlib & purelib dirs, we do not set --build-lib (which
+ # can not be overriden with --build-*lib)
+ build-platlib = %(build-base)s/lib
+ build-purelib = %(build-base)s/lib
+
+ # make the ebuild writer lives easier
+ build-scripts = %(build-base)s/scripts
+
+ [egg_info]
+ egg-base = ${BUILD_DIR}
+
+ # this is needed by distutils_install_for_testing since
+ # setuptools like to create .egg files for install --home.
+ [bdist_egg]
+ dist-dir = ${BUILD_DIR}/dist
+ _EOF_
+
+ # we can't refer to ${D} before src_install()
+ if [[ ${EBUILD_PHASE} == install ]]; then
+ cat >> "${HOME}"/.pydistutils.cfg <<-_EOF_ || die
+
+ # installation paths -- allow calling extra install targets
+ # without the default 'install'
+ [install]
+ compile = True
+ optimize = 2
+ root = ${D}
+ _EOF_
+
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ cat >> "${HOME}"/.pydistutils.cfg <<-_EOF_ || die
+ install-scripts = $(python_get_scriptdir)
+ _EOF_
+ fi
+ fi
+}
+
+# @FUNCTION: _distutils-r1_copy_egg_info
+# @INTERNAL
+# @DESCRIPTION:
+# Copy egg-info files to the ${BUILD_DIR} (that's going to become
+# egg-base in esetup.py). This way, we respect whatever's in upstream
+# egg-info.
+_distutils-r1_copy_egg_info() {
+ mkdir -p "${BUILD_DIR}" || die
+ # stupid freebsd can't do 'cp -t ${BUILD_DIR} {} +'
+ find -name '*.egg-info' -type d -exec cp -pr {} "${BUILD_DIR}"/ ';' || die
+}
+
+# @FUNCTION: distutils-r1_python_compile
+# @USAGE: [additional-args...]
+# @DESCRIPTION:
+# The default python_compile(). Runs 'esetup.py build'. Any parameters
+# passed to this function will be appended to setup.py invocation,
+# i.e. passed as options to the 'build' command.
+#
+# This phase also sets up initial setup.cfg with build directories
+# and copies upstream egg-info files if supplied.
+distutils-r1_python_compile() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ _distutils-r1_create_setup_cfg
+ _distutils-r1_copy_egg_info
+
+ esetup.py build "${@}"
+}
+
+# @FUNCTION: _distutils-r1_wrap_scripts
+# @USAGE: <path> <bindir>
+# @INTERNAL
+# @DESCRIPTION:
+# Moves and wraps all installed scripts/executables as necessary.
+_distutils-r1_wrap_scripts() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -eq 2 ]] || die "usage: ${FUNCNAME} <path> <bindir>"
+ local path=${1}
+ local bindir=${2}
+
+ local PYTHON_SCRIPTDIR
+ python_export PYTHON_SCRIPTDIR
+
+ local f python_files=() non_python_files=()
+
+ if [[ -d ${path}${PYTHON_SCRIPTDIR} ]]; then
+ for f in "${path}${PYTHON_SCRIPTDIR}"/*; do
+ [[ -d ${f} ]] && die "Unexpected directory: ${f}"
+ debug-print "${FUNCNAME}: found executable at ${f#${path}/}"
+
+ local shebang
+ read -r shebang < "${f}"
+ if [[ ${shebang} == '#!'*${EPYTHON}* ]]; then
+ debug-print "${FUNCNAME}: matching shebang: ${shebang}"
+ python_files+=( "${f}" )
+ else
+ debug-print "${FUNCNAME}: non-matching shebang: ${shebang}"
+ non_python_files+=( "${f}" )
+ fi
+
+ mkdir -p "${path}${bindir}" || die
+ done
+
+ for f in "${python_files[@]}"; do
+ local basename=${f##*/}
+
+ debug-print "${FUNCNAME}: installing wrapper at ${bindir}/${basename}"
+ _python_ln_rel "${path}${EPREFIX}"/usr/lib/python-exec/python-exec2 \
+ "${path}${bindir}/${basename}" || die
+ done
+
+ for f in "${non_python_files[@]}"; do
+ local basename=${f##*/}
+
+ debug-print "${FUNCNAME}: moving ${f#${path}/} to ${bindir}/${basename}"
+ mv "${f}" "${path}${bindir}/${basename}" || die
+ done
+ fi
+}
+
+# @FUNCTION: distutils-r1_python_install
+# @USAGE: [additional-args...]
+# @DESCRIPTION:
+# The default python_install(). Runs 'esetup.py install', doing
+# intermediate root install and handling script wrapping afterwards.
+# Any parameters passed to this function will be appended
+# to the setup.py invocation (i.e. as options to the 'install' command).
+#
+# This phase updates the setup.cfg file with install directories.
+distutils-r1_python_install() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local args=( "${@}" )
+
+ # enable compilation for the install phase.
+ local -x PYTHONDONTWRITEBYTECODE=
+
+ # re-create setup.cfg with install paths
+ _distutils-r1_create_setup_cfg
+
+ # python likes to compile any module it sees, which triggers sandbox
+ # failures if some packages haven't compiled their modules yet.
+ addpredict "${EPREFIX}/usr/$(get_libdir)/${EPYTHON}"
+ addpredict /usr/lib/portage/pym
+ addpredict /usr/local # bug 498232
+
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ # user may override --install-scripts
+ # note: this is poor but distutils argv parsing is dumb
+ local mydistutilsargs=( "${mydistutilsargs[@]}" )
+ local scriptdir=${EPREFIX}/usr/bin
+
+ # construct a list of mydistutilsargs[0] args[0] args[1]...
+ local arg arg_vars
+ [[ ${mydistutilsargs[@]} ]] && eval arg_vars+=(
+ 'mydistutilsargs['{0..$(( ${#mydistutilsargs[@]} - 1 ))}']'
+ )
+ [[ ${args[@]} ]] && eval arg_vars+=(
+ 'args['{0..$(( ${#args[@]} - 1 ))}']'
+ )
+
+ set -- "${arg_vars[@]}"
+ while [[ ${@} ]]; do
+ local arg_var=${1}
+ shift
+ local a=${!arg_var}
+
+ case "${a}" in
+ --install-scripts=*)
+ scriptdir=${a#--install-scripts=}
+ unset "${arg_var}"
+ ;;
+ --install-scripts)
+ scriptdir=${!1}
+ unset "${arg_var}" "${1}"
+ shift
+ ;;
+ esac
+ done
+ fi
+
+ local root=${D}/_${EPYTHON}
+ [[ ${DISTUTILS_SINGLE_IMPL} ]] && root=${D}
+
+ esetup.py install --root="${root}" "${args[@]}"
+
+ local forbidden_package_names=( examples test tests )
+ local p
+ for p in "${forbidden_package_names[@]}"; do
+ if [[ -d ${root}$(python_get_sitedir)/${p} ]]; then
+ die "Package installs '${p}' package which is forbidden and likely a bug in the build system."
+ fi
+ done
+ if [[ -d ${root}/usr/$(get_libdir)/pypy/share ]]; then
+ eqawarn "Package installs 'share' in PyPy prefix, see bug #465546."
+ fi
+
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ _distutils-r1_wrap_scripts "${root}" "${scriptdir}"
+ multibuild_merge_root "${root}" "${D}"
+ fi
+}
+
+# @FUNCTION: distutils-r1_python_install_all
+# @DESCRIPTION:
+# The default python_install_all(). It installs the documentation.
+distutils-r1_python_install_all() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ einstalldocs
+
+ if declare -p EXAMPLES &>/dev/null; then
+ local INSDESTTREE=/usr/share/doc/${PF}/examples
+ doins -r "${EXAMPLES[@]}"
+ docompress -x "${INSDESTTREE}"
+ fi
+
+ _DISTUTILS_DEFAULT_CALLED=1
+}
+
+# @FUNCTION: distutils-r1_run_phase
+# @USAGE: [<argv>...]
+# @INTERNAL
+# @DESCRIPTION:
+# Run the given command.
+#
+# If out-of-source builds are used, the phase function is run in source
+# directory, with BUILD_DIR pointing at the build directory
+# and PYTHONPATH having an entry for the module build directory.
+#
+# If in-source builds are used, the command is executed in the directory
+# holding the per-implementation copy of sources. BUILD_DIR points
+# to the 'build' subdirectory.
+distutils-r1_run_phase() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${DISTUTILS_IN_SOURCE_BUILD} ]]; then
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ cd "${BUILD_DIR}" || die
+ fi
+ local BUILD_DIR=${BUILD_DIR}/build
+ fi
+ local -x PYTHONPATH="${BUILD_DIR}/lib:${PYTHONPATH}"
+
+ # We need separate home for each implementation, for .pydistutils.cfg.
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ local -x HOME=${HOME}/${EPYTHON}
+ mkdir -p "${HOME}" || die
+ fi
+
+ # Set up build environment, bug #513664.
+ local -x AR=${AR} CC=${CC} CPP=${CPP} CXX=${CXX}
+ tc-export AR CC CPP CXX
+
+ # How to build Python modules in different worlds...
+ local ldopts
+ case "${CHOST}" in
+ # provided by haubi, 2014-07-08
+ *-aix*) ldopts='-shared -Wl,-berok';; # good enough
+ # provided by grobian, 2014-06-22, bug #513664 c7
+ *-darwin*) ldopts='-bundle -undefined dynamic_lookup';;
+ *) ldopts='-shared';;
+ esac
+
+ local -x LDSHARED="${CC} ${ldopts}" LDCXXSHARED="${CXX} ${ldopts}"
+
+ "${@}"
+
+ cd "${_DISTUTILS_INITIAL_CWD}" || die
+}
+
+# @FUNCTION: _distutils-r1_run_common_phase
+# @USAGE: [<argv>...]
+# @INTERNAL
+# @DESCRIPTION:
+# Run the given command, restoring the state for a most preferred Python
+# implementation matching DISTUTILS_ALL_SUBPHASE_IMPLS.
+#
+# If in-source build is used, the command will be run in the copy
+# of sources made for the selected Python interpreter.
+_distutils-r1_run_common_phase() {
+ local DISTUTILS_ORIG_BUILD_DIR=${BUILD_DIR}
+
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ local best_impl patterns=( "${DISTUTILS_ALL_SUBPHASE_IMPLS[@]-*}" )
+ _distutils_try_impl() {
+ local pattern
+ for pattern in "${patterns[@]}"; do
+ if [[ ${EPYTHON} == ${pattern} ]]; then
+ best_impl=${MULTIBUILD_VARIANT}
+ fi
+ done
+ }
+ python_foreach_impl _distutils_try_impl
+
+ local PYTHON_COMPAT=( "${best_impl}" )
+ fi
+
+ _distutils-r1_run_foreach_impl "${@}"
+}
+
+# @FUNCTION: _distutils-r1_run_foreach_impl
+# @INTERNAL
+# @DESCRIPTION:
+# Run the given phase for each implementation if multiple implementations
+# are enabled, once otherwise.
+_distutils-r1_run_foreach_impl() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${DISTUTILS_NO_PARALLEL_BUILD} ]]; then
+ eqawarn "DISTUTILS_NO_PARALLEL_BUILD is no longer meaningful. Now all builds"
+ eqawarn "are non-parallel. Please remove it from the ebuild."
+
+ unset DISTUTILS_NO_PARALLEL_BUILD # avoid repeated warnings
+ fi
+
+ # store for restoring after distutils-r1_run_phase.
+ local _DISTUTILS_INITIAL_CWD=${PWD}
+ set -- distutils-r1_run_phase "${@}"
+
+ if [[ ! ${DISTUTILS_SINGLE_IMPL} ]]; then
+ python_foreach_impl "${@}"
+ else
+ if [[ ! ${EPYTHON} ]]; then
+ die "EPYTHON unset, python-single-r1_pkg_setup not called?!"
+ fi
+ local BUILD_DIR=${BUILD_DIR:-${S}}
+ BUILD_DIR=${BUILD_DIR%%/}_${EPYTHON}
+
+ "${@}"
+ fi
+}
+
+distutils-r1_src_prepare() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local _DISTUTILS_DEFAULT_CALLED
+
+ # common preparations
+ if declare -f python_prepare_all >/dev/null; then
+ python_prepare_all
+ else
+ distutils-r1_python_prepare_all
+ fi
+
+ if [[ ! ${_DISTUTILS_DEFAULT_CALLED} ]]; then
+ eqawarn "QA warning: python_prepare_all() didn't call distutils-r1_python_prepare_all"
+ fi
+
+ if declare -f python_prepare >/dev/null; then
+ _distutils-r1_run_foreach_impl python_prepare
+ fi
+}
+
+distutils-r1_src_configure() {
+ python_export_utf8_locale
+
+ if declare -f python_configure >/dev/null; then
+ _distutils-r1_run_foreach_impl python_configure
+ fi
+
+ if declare -f python_configure_all >/dev/null; then
+ _distutils-r1_run_common_phase python_configure_all
+ fi
+}
+
+distutils-r1_src_compile() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if declare -f python_compile >/dev/null; then
+ _distutils-r1_run_foreach_impl python_compile
+ else
+ _distutils-r1_run_foreach_impl distutils-r1_python_compile
+ fi
+
+ if declare -f python_compile_all >/dev/null; then
+ _distutils-r1_run_common_phase python_compile_all
+ fi
+}
+
+_clean_egg_info() {
+ # Work around for setuptools test behavior (bug 534058).
+ # https://bitbucket.org/pypa/setuptools/issue/292
+ rm -rf "${BUILD_DIR}"/lib/*.egg-info
+}
+
+distutils-r1_src_test() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if declare -f python_test >/dev/null; then
+ _distutils-r1_run_foreach_impl python_test
+ _distutils-r1_run_foreach_impl _clean_egg_info
+ fi
+
+ if declare -f python_test_all >/dev/null; then
+ _distutils-r1_run_common_phase python_test_all
+ fi
+}
+
+distutils-r1_src_install() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if declare -f python_install >/dev/null; then
+ _distutils-r1_run_foreach_impl python_install
+ else
+ _distutils-r1_run_foreach_impl distutils-r1_python_install
+ fi
+
+ local _DISTUTILS_DEFAULT_CALLED
+
+ if declare -f python_install_all >/dev/null; then
+ _distutils-r1_run_common_phase python_install_all
+ else
+ _distutils-r1_run_common_phase distutils-r1_python_install_all
+ fi
+
+ if [[ ! ${_DISTUTILS_DEFAULT_CALLED} ]]; then
+ eqawarn "QA warning: python_install_all() didn't call distutils-r1_python_install_all"
+ fi
+}
+
+# -- distutils.eclass functions --
+
+distutils_get_intermediate_installation_image() {
+ die "${FUNCNAME}() is invalid for distutils-r1"
+}
+
+distutils_src_unpack() {
+ die "${FUNCNAME}() is invalid for distutils-r1, and you don't want it in EAPI ${EAPI} anyway"
+}
+
+distutils_src_prepare() {
+ die "${FUNCNAME}() is invalid for distutils-r1, you probably want: ${FUNCNAME/_/-r1_}"
+}
+
+distutils_src_compile() {
+ die "${FUNCNAME}() is invalid for distutils-r1, you probably want: ${FUNCNAME/_/-r1_}"
+}
+
+distutils_src_test() {
+ die "${FUNCNAME}() is invalid for distutils-r1, you probably want: ${FUNCNAME/_/-r1_}"
+}
+
+distutils_src_install() {
+ die "${FUNCNAME}() is invalid for distutils-r1, you probably want: ${FUNCNAME/_/-r1_}"
+}
+
+distutils_pkg_postinst() {
+ die "${FUNCNAME}() is invalid for distutils-r1, and pkg_postinst is unnecessary"
+}
+
+distutils_pkg_postrm() {
+ die "${FUNCNAME}() is invalid for distutils-r1, and pkg_postrm is unnecessary"
+}
+
+_DISTUTILS_R1=1
+fi
diff --git a/eclass/distutils.eclass b/eclass/distutils.eclass
new file mode 100644
index 000000000000..5f5a06950d84
--- /dev/null
+++ b/eclass/distutils.eclass
@@ -0,0 +1,594 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: distutils.eclass
+# @MAINTAINER:
+# Gentoo Python Project <python@gentoo.org>
+# @BLURB: Eclass for packages with build systems using Distutils
+# @DESCRIPTION:
+# The distutils eclass defines phase functions for packages with build systems using Distutils.
+#
+# This eclass is DEPRECATED. Please use distutils-r1 instead.
+
+if [[ -z "${_PYTHON_ECLASS_INHERITED}" ]]; then
+ inherit python
+fi
+
+inherit multilib
+
+case "${EAPI:-0}" in
+ 0|1)
+ EXPORT_FUNCTIONS src_unpack src_compile src_install pkg_postinst pkg_postrm
+ ;;
+ *)
+ EXPORT_FUNCTIONS src_prepare src_compile src_install pkg_postinst pkg_postrm
+ ;;
+esac
+
+if [[ -z "$(declare -p PYTHON_DEPEND 2> /dev/null)" ]]; then
+ DEPEND="dev-lang/python"
+ RDEPEND="${DEPEND}"
+fi
+
+ if has "${EAPI:-0}" 0 1 && [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then
+ ewarn
+ ewarn "\"${EBUILD}\":"
+ ewarn "Deprecation Warning: Usage of distutils.eclass in packages supporting installation"
+ ewarn "for multiple Python ABIs in EAPI <=1 is deprecated."
+ ewarn "The ebuild should to be fixed. Please report a bug, if it has not been already reported."
+ ewarn
+ elif has "${EAPI:-0}" 0 1 2 && [[ -z "${SUPPORT_PYTHON_ABIS}" ]]; then
+ ewarn
+ ewarn "\"${EBUILD}\":"
+ ewarn "Deprecation Warning: Usage of distutils.eclass in packages not supporting installation"
+ ewarn "for multiple Python ABIs in EAPI <=2 is deprecated."
+ ewarn "The ebuild should to be fixed. Please report a bug, if it has not been already reported."
+ ewarn
+ fi
+
+# 'python' variable is deprecated. Use PYTHON() instead.
+if has "${EAPI:-0}" 0 1 2 && [[ -z "${SUPPORT_PYTHON_ABIS}" ]]; then
+ python="python"
+else
+ python="die"
+fi
+
+# @ECLASS-VARIABLE: DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES
+# @DESCRIPTION:
+# Set this to use separate source directories for each enabled version of Python.
+
+# @ECLASS-VARIABLE: DISTUTILS_SETUP_FILES
+# @DESCRIPTION:
+# Array of paths to setup files.
+# Syntax:
+# [current_working_directory|]path_to_setup_file
+
+# @ECLASS-VARIABLE: DISTUTILS_GLOBAL_OPTIONS
+# @DESCRIPTION:
+# Array of global options passed to setup files.
+# Syntax in EAPI <4:
+# global_option
+# Syntax in EAPI >=4:
+# Python_ABI_pattern global_option
+
+# @ECLASS-VARIABLE: DISTUTILS_SRC_TEST
+# @DESCRIPTION:
+# Type of test command used by distutils_src_test().
+# IUSE and DEPEND are automatically adjusted, unless DISTUTILS_DISABLE_TEST_DEPENDENCY is set.
+# Valid values:
+# setup.py
+# nosetests
+# py.test
+# trial [arguments]
+
+# @ECLASS-VARIABLE: DISTUTILS_DISABLE_TEST_DEPENDENCY
+# @DESCRIPTION:
+# Disable modification of IUSE and DEPEND caused by setting of DISTUTILS_SRC_TEST.
+
+if [[ -n "${DISTUTILS_SRC_TEST}" && ! "${DISTUTILS_SRC_TEST}" =~ ^(setup\.py|nosetests|py\.test|trial(\ .*)?)$ ]]; then
+ die "'DISTUTILS_SRC_TEST' variable has unsupported value '${DISTUTILS_SRC_TEST}'"
+fi
+
+if [[ -z "${DISTUTILS_DISABLE_TEST_DEPENDENCY}" ]]; then
+ if [[ "${DISTUTILS_SRC_TEST}" == "nosetests" ]]; then
+ IUSE="test"
+ DEPEND+="${DEPEND:+ }test? ( dev-python/nose )"
+ elif [[ "${DISTUTILS_SRC_TEST}" == "py.test" ]]; then
+ IUSE="test"
+ DEPEND+="${DEPEND:+ }test? ( dev-python/pytest )"
+ # trial requires an argument, which is usually equal to "${PN}".
+ elif [[ "${DISTUTILS_SRC_TEST}" =~ ^trial(\ .*)?$ ]]; then
+ IUSE="test"
+ DEPEND+="${DEPEND:+ }test? ( dev-python/twisted-core )"
+ fi
+fi
+
+if [[ -n "${DISTUTILS_SRC_TEST}" ]]; then
+ EXPORT_FUNCTIONS src_test
+fi
+
+# Scheduled for deletion on 2011-06-01.
+if [[ -n "${DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS}" ]]; then
+ eerror "Use PYTHON_NONVERSIONED_EXECUTABLES=(\".*\") instead of DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS variable."
+ die "DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS variable is banned"
+fi
+
+# @ECLASS-VARIABLE: DOCS
+# @DESCRIPTION:
+# Additional documentation files installed by distutils_src_install().
+
+_distutils_get_build_dir() {
+ if _python_package_supporting_installation_for_multiple_python_abis && [[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]]; then
+ echo "build-${PYTHON_ABI}"
+ else
+ echo "build"
+ fi
+}
+
+_distutils_get_PYTHONPATH() {
+ if _python_package_supporting_installation_for_multiple_python_abis && [[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]]; then
+ ls -d build-${PYTHON_ABI}/lib* 2> /dev/null
+ else
+ ls -d build/lib* 2> /dev/null
+ fi
+}
+
+_distutils_hook() {
+ if [[ "$#" -ne 1 ]]; then
+ die "${FUNCNAME}() requires 1 argument"
+ fi
+ if [[ "$(type -t "distutils_src_${EBUILD_PHASE}_$1_hook")" == "function" ]]; then
+ "distutils_src_${EBUILD_PHASE}_$1_hook"
+ fi
+}
+
+_distutils_prepare_global_options() {
+ local element option pattern
+
+ if [[ -n "$(declare -p DISTUTILS_GLOBAL_OPTIONS 2> /dev/null)" && "$(declare -p DISTUTILS_GLOBAL_OPTIONS)" != "declare -a DISTUTILS_GLOBAL_OPTIONS="* ]]; then
+ die "DISTUTILS_GLOBAL_OPTIONS should be indexed array"
+ fi
+
+ if has "${EAPI:-0}" 0 1 2 3; then
+ _DISTUTILS_GLOBAL_OPTIONS=("${DISTUTILS_GLOBAL_OPTIONS[@]}")
+ else
+ _DISTUTILS_GLOBAL_OPTIONS=()
+
+ for element in "${DISTUTILS_GLOBAL_OPTIONS[@]}"; do
+ if [[ ! "${element}" =~ ^[^[:space:]]+\ . ]]; then
+ die "Element '${element}' of DISTUTILS_GLOBAL_OPTIONS array has invalid syntax"
+ fi
+ pattern="${element%% *}"
+ option="${element#* }"
+ if _python_check_python_abi_matching "${PYTHON_ABI}" "${pattern}"; then
+ _DISTUTILS_GLOBAL_OPTIONS+=("${option}")
+ fi
+ done
+ fi
+}
+
+_distutils_prepare_current_working_directory() {
+ if [[ "$1" == *"|"*"|"* ]]; then
+ die "Element '$1' of DISTUTILS_SETUP_FILES array has invalid syntax"
+ fi
+
+ if [[ "$1" == *"|"* ]]; then
+ echo "${_BOLD}[${1%|*}]${_NORMAL}"
+ pushd "${1%|*}" > /dev/null || die "Entering directory '${1%|*}' failed"
+ fi
+}
+
+_distutils_restore_current_working_directory() {
+ if [[ "$1" == *"|"* ]]; then
+ popd > /dev/null || die "Leaving directory '${1%|*}' failed"
+ fi
+}
+
+# @FUNCTION: distutils_src_unpack
+# @DESCRIPTION:
+# The distutils src_unpack function. This function is exported.
+distutils_src_unpack() {
+ if ! has "${EAPI:-0}" 0 1; then
+ die "${FUNCNAME}() cannot be used in this EAPI"
+ fi
+
+ if [[ "${EBUILD_PHASE}" != "unpack" ]]; then
+ die "${FUNCNAME}() can be used only in src_unpack() phase"
+ fi
+
+ unpack ${A}
+ cd "${S}"
+
+ distutils_src_prepare
+}
+
+# @FUNCTION: distutils_src_prepare
+# @DESCRIPTION:
+# The distutils src_prepare function. This function is exported.
+distutils_src_prepare() {
+ if ! has "${EAPI:-0}" 0 1 && [[ "${EBUILD_PHASE}" != "prepare" ]]; then
+ die "${FUNCNAME}() can be used only in src_prepare() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ local distribute_setup_existence="0" ez_setup_existence="0"
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ # Delete ez_setup files to prevent packages from installing Setuptools on their own.
+ [[ -d ez_setup || -f ez_setup.py ]] && ez_setup_existence="1"
+ rm -fr ez_setup*
+ if [[ "${ez_setup_existence}" == "1" ]]; then
+ echo "def use_setuptools(*args, **kwargs): pass" > ez_setup.py
+ fi
+
+ # Delete distribute_setup files to prevent packages from installing Distribute on their own.
+ [[ -d distribute_setup || -f distribute_setup.py ]] && distribute_setup_existence="1"
+ rm -fr distribute_setup*
+ if [[ "${distribute_setup_existence}" == "1" ]]; then
+ echo "def use_setuptools(*args, **kwargs): pass" > distribute_setup.py
+ fi
+
+ if [[ -n "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]]; then
+ python_copy_sources
+ fi
+}
+
+# @FUNCTION: distutils_src_compile
+# @DESCRIPTION:
+# The distutils src_compile function. This function is exported.
+# In ebuilds of packages supporting installation for multiple versions of Python, this function
+# calls distutils_src_compile_pre_hook() and distutils_src_compile_post_hook(), if they are defined.
+distutils_src_compile() {
+ if [[ "${EBUILD_PHASE}" != "compile" ]]; then
+ die "${FUNCNAME}() can be used only in src_compile() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_set_color_variables
+
+ local setup_file
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ distutils_building() {
+ _distutils_hook pre
+
+ _distutils_prepare_global_options
+
+ for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do
+ _distutils_prepare_current_working_directory "${setup_file}"
+
+ echo ${_BOLD}"$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" build -b "$(_distutils_get_build_dir)" "$@"${_NORMAL}
+ "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" build -b "$(_distutils_get_build_dir)" "$@" || return "$?"
+
+ _distutils_restore_current_working_directory "${setup_file}"
+ done
+
+ _distutils_hook post
+ }
+ python_execute_function ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} distutils_building "$@"
+ unset -f distutils_building
+ else
+ _distutils_prepare_global_options
+
+ for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do
+ _distutils_prepare_current_working_directory "${setup_file}"
+
+ echo ${_BOLD}"$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" build "$@"${_NORMAL}
+ "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" build "$@" || die "Building failed"
+
+ _distutils_restore_current_working_directory "${setup_file}"
+ done
+ fi
+}
+
+_distutils_src_test_hook() {
+ if [[ "$#" -ne 1 ]]; then
+ die "${FUNCNAME}() requires 1 arguments"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ return
+ fi
+
+ if [[ "$(type -t "distutils_src_test_pre_hook")" == "function" ]]; then
+ eval "python_execute_$1_pre_hook() {
+ distutils_src_test_pre_hook
+ }"
+ fi
+
+ if [[ "$(type -t "distutils_src_test_post_hook")" == "function" ]]; then
+ eval "python_execute_$1_post_hook() {
+ distutils_src_test_post_hook
+ }"
+ fi
+}
+
+# @FUNCTION: distutils_src_test
+# @DESCRIPTION:
+# The distutils src_test function. This function is exported, when DISTUTILS_SRC_TEST variable is set.
+# In ebuilds of packages supporting installation for multiple versions of Python, this function
+# calls distutils_src_test_pre_hook() and distutils_src_test_post_hook(), if they are defined.
+distutils_src_test() {
+ if [[ "${EBUILD_PHASE}" != "test" ]]; then
+ die "${FUNCNAME}() can be used only in src_test() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_set_color_variables
+
+ local arguments setup_file
+
+ if [[ "${DISTUTILS_SRC_TEST}" == "setup.py" ]]; then
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ distutils_testing() {
+ _distutils_hook pre
+
+ _distutils_prepare_global_options
+
+ for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do
+ _distutils_prepare_current_working_directory "${setup_file}"
+
+ echo ${_BOLD}PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") test "$@"${_NORMAL}
+ PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") test "$@" || return "$?"
+
+ _distutils_restore_current_working_directory "${setup_file}"
+ done
+
+ _distutils_hook post
+ }
+ python_execute_function ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} distutils_testing "$@"
+ unset -f distutils_testing
+ else
+ _distutils_prepare_global_options
+
+ for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do
+ _distutils_prepare_current_working_directory "${setup_file}"
+
+ echo ${_BOLD}PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" test "$@"${_NORMAL}
+ PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" test "$@" || die "Testing failed"
+
+ _distutils_restore_current_working_directory "${setup_file}"
+ done
+ fi
+ elif [[ "${DISTUTILS_SRC_TEST}" == "nosetests" ]]; then
+ _distutils_src_test_hook nosetests
+
+ python_execute_nosetests -P '$(_distutils_get_PYTHONPATH)' ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} -- "$@"
+ elif [[ "${DISTUTILS_SRC_TEST}" == "py.test" ]]; then
+ _distutils_src_test_hook py.test
+
+ python_execute_py.test -P '$(_distutils_get_PYTHONPATH)' ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} -- "$@"
+ # trial requires an argument, which is usually equal to "${PN}".
+ elif [[ "${DISTUTILS_SRC_TEST}" =~ ^trial(\ .*)?$ ]]; then
+ if [[ "${DISTUTILS_SRC_TEST}" == "trial "* ]]; then
+ arguments="${DISTUTILS_SRC_TEST#trial }"
+ else
+ arguments="${PN}"
+ fi
+
+ _distutils_src_test_hook trial
+
+ python_execute_trial -P '$(_distutils_get_PYTHONPATH)' ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} -- ${arguments} "$@"
+ else
+ die "'DISTUTILS_SRC_TEST' variable has unsupported value '${DISTUTILS_SRC_TEST}'"
+ fi
+}
+
+# @FUNCTION: distutils_src_install
+# @DESCRIPTION:
+# The distutils src_install function. This function is exported.
+# In ebuilds of packages supporting installation for multiple versions of Python, this function
+# calls distutils_src_install_pre_hook() and distutils_src_install_post_hook(), if they are defined.
+# It also installs some standard documentation files (AUTHORS, Change*, CHANGELOG, CONTRIBUTORS,
+# KNOWN_BUGS, MAINTAINERS, NEWS, README*, TODO).
+distutils_src_install() {
+ if [[ "${EBUILD_PHASE}" != "install" ]]; then
+ die "${FUNCNAME}() can be used only in src_install() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+ _python_set_color_variables
+
+ local default_docs doc line nspkg_pth_file nspkg_pth_files=() setup_file
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ distutils_installation() {
+ _distutils_hook pre
+
+ _distutils_prepare_global_options
+
+ for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do
+ _distutils_prepare_current_working_directory "${setup_file}"
+
+ echo ${_BOLD}"$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") install --no-compile --root="${T}/images/${PYTHON_ABI}" "$@"${_NORMAL}
+ "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") install --no-compile --root="${T}/images/${PYTHON_ABI}" "$@" || return "$?"
+
+ _distutils_restore_current_working_directory "${setup_file}"
+ done
+
+ _distutils_hook post
+ }
+ python_execute_function ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} distutils_installation "$@"
+ unset -f distutils_installation
+
+ python_merge_intermediate_installation_images "${T}/images"
+ else
+ # Mark the package to be rebuilt after a Python upgrade.
+ python_need_rebuild
+
+ _distutils_prepare_global_options
+
+ for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do
+ _distutils_prepare_current_working_directory "${setup_file}"
+
+ echo ${_BOLD}"$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" install --root="${D}" --no-compile "$@"${_NORMAL}
+ "$(PYTHON)" "${setup_file#*|}" "${_DISTUTILS_GLOBAL_OPTIONS[@]}" install --root="${D}" --no-compile "$@" || die "Installation failed"
+
+ _distutils_restore_current_working_directory "${setup_file}"
+ done
+ fi
+
+ while read -d $'\0' -r nspkg_pth_file; do
+ nspkg_pth_files+=("${nspkg_pth_file}")
+ done < <(find "${ED}" -name "*-nspkg.pth" -type f -print0)
+
+ if [[ "${#nspkg_pth_files[@]}" -gt 0 ]]; then
+ einfo
+ einfo "Python namespaces:"
+ for nspkg_pth_file in "${nspkg_pth_files[@]}"; do
+ einfo " '${nspkg_pth_file#${ED%/}}':"
+ while read -r line; do
+ einfo " $(echo "${line}" | sed -e "s/.*types\.ModuleType('\([^']\+\)').*/\1/")"
+ done < "${nspkg_pth_file}"
+ #if ! has "${EAPI:-0}" 0 1 2 3; then
+ # rm -f "${nspkg_pth_file}" || die "Deletion of '${nspkg_pth_file}' failed"
+ #fi
+ done
+ einfo
+ fi
+
+ if [[ -e "${ED}usr/local" ]]; then
+ die "Illegal installation into /usr/local"
+ fi
+
+ default_docs="AUTHORS Change* CHANGELOG CONTRIBUTORS KNOWN_BUGS MAINTAINERS NEWS README* TODO"
+
+ for doc in ${default_docs}; do
+ [[ -s "${doc}" ]] && dodoc "${doc}"
+ done
+
+ if has "${EAPI:-0}" 0 1 2 3; then
+ if [[ -n "${DOCS}" ]]; then
+ dodoc ${DOCS} || die "dodoc failed"
+ fi
+ else
+ if [[ -n "${DOCS}" ]]; then
+ dodoc -r ${DOCS} || die "dodoc failed"
+ fi
+ fi
+
+ DISTUTILS_SRC_INSTALL_EXECUTED="1"
+}
+
+# @FUNCTION: distutils_pkg_postinst
+# @DESCRIPTION:
+# The distutils pkg_postinst function. This function is exported.
+# When PYTHON_MODNAME variable is set, then this function calls python_mod_optimize() with modules
+# specified in PYTHON_MODNAME variable. Otherwise it calls python_mod_optimize() with module, whose
+# name is equal to name of current package, if this module exists.
+distutils_pkg_postinst() {
+ if [[ "${EBUILD_PHASE}" != "postinst" ]]; then
+ die "${FUNCNAME}() can be used only in pkg_postinst() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ if [[ -z "${DISTUTILS_SRC_INSTALL_EXECUTED}" ]]; then
+ die "${FUNCNAME}() called illegally"
+ fi
+
+ local pylibdir pymod
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ if [[ -z "$(declare -p PYTHON_MODNAME 2> /dev/null)" ]]; then
+ for pylibdir in "${EROOT}"usr/$(get_libdir)/python* "${EROOT}"usr/share/jython-*/Lib; do
+ if [[ -d "${pylibdir}/site-packages/${PN}" ]]; then
+ PYTHON_MODNAME="${PN}"
+ fi
+ done
+ fi
+
+ if [[ -n "${PYTHON_MODNAME}" ]]; then
+ if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis; then
+ python_mod_optimize ${PYTHON_MODNAME}
+ else
+ for pymod in ${PYTHON_MODNAME}; do
+ python_mod_optimize "$(python_get_sitedir)/${pymod}"
+ done
+ fi
+ fi
+}
+
+# @FUNCTION: distutils_pkg_postrm
+# @DESCRIPTION:
+# The distutils pkg_postrm function. This function is exported.
+# When PYTHON_MODNAME variable is set, then this function calls python_mod_cleanup() with modules
+# specified in PYTHON_MODNAME variable. Otherwise it calls python_mod_cleanup() with module, whose
+# name is equal to name of current package, if this module exists.
+distutils_pkg_postrm() {
+ if [[ "${EBUILD_PHASE}" != "postrm" ]]; then
+ die "${FUNCNAME}() can be used only in pkg_postrm() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ if [[ -z "${DISTUTILS_SRC_INSTALL_EXECUTED}" ]]; then
+ die "${FUNCNAME}() called illegally"
+ fi
+
+ local pylibdir pymod
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ if [[ -z "$(declare -p PYTHON_MODNAME 2> /dev/null)" ]]; then
+ for pylibdir in "${EROOT}"usr/$(get_libdir)/python* "${EROOT}"usr/share/jython-*/Lib; do
+ if [[ -d "${pylibdir}/site-packages/${PN}" ]]; then
+ PYTHON_MODNAME="${PN}"
+ fi
+ done
+ fi
+
+ if [[ -n "${PYTHON_MODNAME}" ]]; then
+ if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis; then
+ python_mod_cleanup ${PYTHON_MODNAME}
+ else
+ for pymod in ${PYTHON_MODNAME}; do
+ for pylibdir in "${EROOT}"usr/$(get_libdir)/python*; do
+ if [[ -d "${pylibdir}/site-packages/${pymod}" ]]; then
+ python_mod_cleanup "${pylibdir#${EROOT%/}}/site-packages/${pymod}"
+ fi
+ done
+ done
+ fi
+ fi
+}
+
+# @FUNCTION: distutils_get_intermediate_installation_image
+# @DESCRIPTION:
+# Print path to intermediate installation image.
+#
+# This function can be used only in distutils_src_install_pre_hook() and distutils_src_install_post_hook().
+distutils_get_intermediate_installation_image() {
+ if [[ "${EBUILD_PHASE}" != "install" ]]; then
+ die "${FUNCNAME}() can be used only in src_install() phase"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ if [[ ! "${FUNCNAME[1]}" =~ ^distutils_src_install_(pre|post)_hook$ ]]; then
+ die "${FUNCNAME}() can be used only in distutils_src_install_pre_hook() and distutils_src_install_post_hook()"
+ fi
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ echo "${T}/images/${PYTHON_ABI}"
+}
diff --git a/eclass/elisp-common.eclass b/eclass/elisp-common.eclass
new file mode 100644
index 000000000000..15177886d6bf
--- /dev/null
+++ b/eclass/elisp-common.eclass
@@ -0,0 +1,415 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: elisp-common.eclass
+# @MAINTAINER:
+# Gentoo GNU Emacs project <emacs@gentoo.org>
+# @AUTHOR:
+# Matthew Kennedy <mkennedy@gentoo.org>
+# Jeremy Maitin-Shepard <jbms@attbi.com>
+# Mamoru Komachi <usata@gentoo.org>
+# Christian Faulhammer <fauli@gentoo.org>
+# Ulrich Müller <ulm@gentoo.org>
+# @BLURB: Emacs-related installation utilities
+# @DESCRIPTION:
+#
+# Usually you want to use this eclass for (optional) GNU Emacs support
+# of your package. This is NOT for XEmacs!
+#
+# Many of the steps here are sometimes done by the build system of your
+# package (especially compilation), so this is mainly for standalone
+# elisp files you gathered from somewhere else.
+#
+# When relying on the emacs USE flag, you need to add
+#
+# @CODE
+# emacs? ( virtual/emacs )
+# @CODE
+#
+# to your DEPEND/RDEPEND line and use the functions provided here to
+# bring the files to the correct locations.
+#
+# If your package requires a minimum Emacs version, e.g. Emacs 24, then
+# the dependency should be on >=virtual/emacs-24 instead. Because the
+# user can select the Emacs executable with eselect, you should also
+# make sure that the active Emacs version is sufficient. This can be
+# tested with function elisp-need-emacs(), which would typically be
+# called from pkg_setup(), as in the following example:
+#
+# @CODE
+# elisp-need-emacs 24 || die "Emacs version too low"
+# @CODE
+#
+# Please note that such tests should be limited to packages that are
+# known to fail with lower Emacs versions; the standard case is to
+# depend on virtual/emacs without version.
+#
+# @ROFF .SS
+# src_compile() usage:
+#
+# An elisp file is compiled by the elisp-compile() function defined
+# here and simply takes the source files as arguments. The case of
+# interdependent elisp files is also supported, since the current
+# directory is added to the load-path which makes sure that all files
+# are loadable.
+#
+# @CODE
+# elisp-compile *.el
+# @CODE
+#
+# Function elisp-make-autoload-file() can be used to generate a file
+# with autoload definitions for the lisp functions. It takes the output
+# file name (default: "${PN}-autoloads.el") and a list of directories
+# (default: working directory) as its arguments. Use of this function
+# requires that the elisp source files contain magic ";;;###autoload"
+# comments. See the Emacs Lisp Reference Manual (node "Autoload") for
+# a detailed explanation.
+#
+# @ROFF .SS
+# src_install() usage:
+#
+# The resulting compiled files (.elc) should be put in a subdirectory of
+# /usr/share/emacs/site-lisp/ which is named after the first argument
+# of elisp-install(). The following parameters are the files to be put
+# in that directory. Usually the subdirectory should be ${PN}, you can
+# choose something else, but remember to tell elisp-site-file-install()
+# (see below) the change, as it defaults to ${PN}.
+#
+# @CODE
+# elisp-install ${PN} *.el *.elc
+# @CODE
+#
+# To let the Emacs support be activated by Emacs on startup, you need
+# to provide a site file (shipped in ${FILESDIR}) which contains the
+# startup code (have a look in the documentation of your software).
+# Normally this would look like this:
+#
+# @CODE
+# (add-to-list 'load-path "@SITELISP@")
+# (add-to-list 'auto-mode-alist '("\\.csv\\'" . csv-mode))
+# (autoload 'csv-mode "csv-mode" "Major mode for csv files." t)
+# @CODE
+#
+# If your Emacs support files are installed in a subdirectory of
+# /usr/share/emacs/site-lisp/ (which is strongly recommended), you need
+# to extend Emacs' load-path as shown in the first non-comment line.
+# The elisp-site-file-install() function of this eclass will replace
+# "@SITELISP@" and "@SITEETC@" by the actual paths.
+#
+# The next line tells Emacs to load the mode opening a file ending
+# with ".csv" and load functions depending on the context and needed
+# features. Be careful though. Commands as "load-library" or "require"
+# bloat the editor as they are loaded on every startup. When having
+# many Emacs support files, users may be annoyed by the start-up time.
+# Also avoid keybindings as they might interfere with the user's
+# settings. Give a hint in pkg_postinst(), which should be enough.
+# The guiding principle is that emerging your package should not by
+# itself cause a change of standard Emacs behaviour.
+#
+# The naming scheme for this site-init file matches the shell pattern
+# "[1-8][0-9]*-gentoo*.el", where the two digits at the beginning define
+# the loading order (numbers below 10 or above 89 are reserved for
+# internal use). So if your initialisation depends on another Emacs
+# package, your site file's number must be higher! If there are no such
+# interdependencies then the number should be 50. Otherwise, numbers
+# divisible by 10 are preferred.
+#
+# Best practice is to define a SITEFILE variable in the global scope of
+# your ebuild (e.g., right after S or RDEPEND):
+#
+# @CODE
+# SITEFILE="50${PN}-gentoo.el"
+# @CODE
+#
+# Which is then installed by
+#
+# @CODE
+# elisp-site-file-install "${FILESDIR}/${SITEFILE}"
+# @CODE
+#
+# in src_install(). Any characters after the "-gentoo" part and before
+# the extension will be stripped from the destination file's name.
+# For example, a file "50${PN}-gentoo-${PV}.el" will be installed as
+# "50${PN}-gentoo.el". If your subdirectory is not named ${PN}, give
+# the differing name as second argument.
+#
+# @ROFF .SS
+# pkg_postinst() / pkg_postrm() usage:
+#
+# After that you need to recreate the start-up file of Emacs after
+# emerging and unmerging by using
+#
+# @CODE
+# pkg_postinst() {
+# elisp-site-regen
+# }
+#
+# pkg_postrm() {
+# elisp-site-regen
+# }
+# @CODE
+#
+# When having optional Emacs support, you should prepend "use emacs &&"
+# to above calls of elisp-site-regen().
+# Don't use "has_version virtual/emacs"! When unmerging the state of
+# the emacs USE flag is taken from the package database and not from the
+# environment, so it is no problem when you unset USE=emacs between
+# merge and unmerge of a package.
+
+# @ECLASS-VARIABLE: SITELISP
+# @DESCRIPTION:
+# Directory where packages install Emacs Lisp files.
+SITELISP=/usr/share/emacs/site-lisp
+
+# @ECLASS-VARIABLE: SITEETC
+# @DESCRIPTION:
+# Directory where packages install miscellaneous (not Lisp) files.
+SITEETC=/usr/share/emacs/etc
+
+# @ECLASS-VARIABLE: EMACS
+# @DESCRIPTION:
+# Path of Emacs executable.
+EMACS=${EPREFIX}/usr/bin/emacs
+
+# @ECLASS-VARIABLE: EMACSFLAGS
+# @DESCRIPTION:
+# Flags for executing Emacs in batch mode.
+# These work for Emacs versions 18-24, so don't change them.
+EMACSFLAGS="-batch -q --no-site-file"
+
+# @ECLASS-VARIABLE: BYTECOMPFLAGS
+# @DESCRIPTION:
+# Emacs flags used for byte-compilation in elisp-compile().
+BYTECOMPFLAGS="-L ."
+
+# @FUNCTION: elisp-emacs-version
+# @RETURN: exit status of Emacs
+# @DESCRIPTION:
+# Output version of currently active Emacs.
+
+elisp-emacs-version() {
+ local version ret
+ # The following will work for at least versions 18-24.
+ echo "(princ emacs-version)" >"${T}"/emacs-version.el
+ version=$(
+ # EMACS could be a microemacs variant that ignores the -batch
+ # option and would therefore hang, waiting for user interaction.
+ # Redirecting stdin and unsetting TERM and DISPLAY will cause
+ # most of them to exit with an error.
+ unset TERM DISPLAY
+ ${EMACS} ${EMACSFLAGS} -l "${T}"/emacs-version.el </dev/null
+ )
+ ret=$?
+ rm -f "${T}"/emacs-version.el
+ if [[ ${ret} -ne 0 ]]; then
+ eerror "elisp-emacs-version: Failed to run ${EMACS}"
+ return ${ret}
+ fi
+ if [[ -z ${version} ]]; then
+ eerror "elisp-emacs-version: Could not determine Emacs version"
+ return 1
+ fi
+ echo "${version}"
+}
+
+# @FUNCTION: elisp-need-emacs
+# @USAGE: <version>
+# @RETURN: 0 if true, 1 if false, 2 if trouble
+# @DESCRIPTION:
+# Test if the eselected Emacs version is at least the major version
+# of GNU Emacs specified as argument.
+
+elisp-need-emacs() {
+ local need_emacs=$1 have_emacs
+ have_emacs=$(elisp-emacs-version) || return 2
+ einfo "Emacs version: ${have_emacs}"
+ if [[ ${have_emacs} =~ XEmacs|Lucid ]]; then
+ eerror "This package needs GNU Emacs."
+ return 1
+ fi
+ if ! [[ ${have_emacs%%.*} -ge ${need_emacs%%.*} ]]; then
+ eerror "This package needs at least Emacs ${need_emacs%%.*}."
+ eerror "Use \"eselect emacs\" to select the active version."
+ return 1
+ fi
+ return 0
+}
+
+# @FUNCTION: elisp-compile
+# @USAGE: <list of elisp files>
+# @DESCRIPTION:
+# Byte-compile Emacs Lisp files.
+#
+# This function uses GNU Emacs to byte-compile all ".el" specified by
+# its arguments. The resulting byte-code (".elc") files are placed in
+# the same directory as their corresponding source file.
+#
+# The current directory is added to the load-path. This will ensure
+# that interdependent Emacs Lisp files are visible between themselves,
+# in case they require or load one another.
+
+elisp-compile() {
+ ebegin "Compiling GNU Emacs Elisp files"
+ ${EMACS} ${EMACSFLAGS} ${BYTECOMPFLAGS} -f batch-byte-compile "$@"
+ eend $? "elisp-compile: batch-byte-compile failed" || die
+}
+
+# @FUNCTION: elisp-make-autoload-file
+# @USAGE: [output file] [list of directories]
+# @DESCRIPTION:
+# Generate a file with autoload definitions for the lisp functions.
+
+elisp-make-autoload-file() {
+ local f="${1:-${PN}-autoloads.el}" null="" page=$'\f'
+ shift
+ ebegin "Generating autoload file for GNU Emacs"
+
+ cat >"${f}" <<-EOF
+ ;;; ${f##*/} --- autoloads for ${PN}
+
+ ;;; Commentary:
+ ;; Automatically generated by elisp-common.eclass
+ ;; DO NOT EDIT THIS FILE
+
+ ;;; Code:
+ ${page}
+ ;; Local ${null}Variables:
+ ;; version-control: never
+ ;; no-byte-compile: t
+ ;; no-update-autoloads: t
+ ;; End:
+
+ ;;; ${f##*/} ends here
+ EOF
+
+ ${EMACS} ${EMACSFLAGS} \
+ --eval "(setq make-backup-files nil)" \
+ --eval "(setq generated-autoload-file (expand-file-name \"${f}\"))" \
+ -f batch-update-autoloads "${@-.}"
+
+ eend $? "elisp-make-autoload-file: batch-update-autoloads failed" || die
+}
+
+# @FUNCTION: elisp-install
+# @USAGE: <subdirectory> <list of files>
+# @DESCRIPTION:
+# Install files in SITELISP directory.
+
+elisp-install() {
+ local subdir="$1"
+ shift
+ ebegin "Installing Elisp files for GNU Emacs support"
+ ( # subshell to avoid pollution of calling environment
+ insinto "${SITELISP}/${subdir}"
+ doins "$@"
+ )
+ eend $? "elisp-install: doins failed" || die
+}
+
+# @FUNCTION: elisp-site-file-install
+# @USAGE: <site-init file> [subdirectory]
+# @DESCRIPTION:
+# Install Emacs site-init file in SITELISP directory. Automatically
+# inserts a standard comment header with the name of the package (unless
+# it is already present). Tokens @SITELISP@ and @SITEETC@ are replaced
+# by the path to the package's subdirectory in SITELISP and SITEETC,
+# respectively.
+
+elisp-site-file-install() {
+ local sf="${1##*/}" my_pn="${2:-${PN}}" ret
+ local header=";;; ${PN} site-lisp configuration"
+
+ [[ ${sf} == [0-9][0-9]*-gentoo*.el ]] \
+ || ewarn "elisp-site-file-install: bad name of site-init file"
+ [[ ${sf%-gentoo*.el} != "${sf}" ]] && sf="${sf%-gentoo*.el}-gentoo.el"
+ sf="${T}/${sf}"
+ ebegin "Installing site initialisation file for GNU Emacs"
+ [[ $1 = "${sf}" ]] || cp "$1" "${sf}"
+ sed -i -e "1{:x;/^\$/{n;bx;};/^;.*${PN}/I!s:^:${header}\n\n:;1s:^:\n:;}" \
+ -e "s:@SITELISP@:${EPREFIX}${SITELISP}/${my_pn}:g" \
+ -e "s:@SITEETC@:${EPREFIX}${SITEETC}/${my_pn}:g;\$q" "${sf}"
+ ( # subshell to avoid pollution of calling environment
+ insinto "${SITELISP}/site-gentoo.d"
+ doins "${sf}"
+ )
+ ret=$?
+ rm -f "${sf}"
+ eend ${ret} "elisp-site-file-install: doins failed" || die
+}
+
+# @FUNCTION: elisp-site-regen
+# @DESCRIPTION:
+# Regenerate the site-gentoo.el file, based on packages' site
+# initialisation files in the /usr/share/emacs/site-lisp/site-gentoo.d/
+# directory.
+
+elisp-site-regen() {
+ local sitelisp=${ROOT}${EPREFIX}${SITELISP}
+ local sf i ret=0 null="" page=$'\f'
+ local -a sflist
+
+ if [[ ${EBUILD_PHASE} = *rm && ! -e ${sitelisp}/site-gentoo.el ]]; then
+ ewarn "Refusing to create site-gentoo.el in ${EBUILD_PHASE} phase."
+ return 0
+ fi
+
+ [[ -d ${sitelisp} ]] \
+ || die "elisp-site-regen: Directory ${sitelisp} does not exist"
+
+ [[ -d ${T} ]] \
+ || die "elisp-site-regen: Temporary directory ${T} does not exist"
+
+ ebegin "Regenerating site-gentoo.el for GNU Emacs (${EBUILD_PHASE})"
+
+ for sf in "${sitelisp}"/site-gentoo.d/[0-9][0-9]*.el; do
+ [[ -r ${sf} ]] && sflist+=("${sf}")
+ done
+
+ cat <<-EOF >"${T}"/site-gentoo.el || ret=$?
+ ;;; site-gentoo.el --- site initialisation for Gentoo-installed packages
+
+ ;;; Commentary:
+ ;; Automatically generated by elisp-common.eclass
+ ;; DO NOT EDIT THIS FILE
+
+ ;;; Code:
+ EOF
+ # Use sed instead of cat here, since files may miss a trailing newline.
+ sed '$q' "${sflist[@]}" </dev/null >>"${T}"/site-gentoo.el || ret=$?
+ cat <<-EOF >>"${T}"/site-gentoo.el || ret=$?
+
+ ${page}
+ (provide 'site-gentoo)
+
+ ;; Local ${null}Variables:
+ ;; no-byte-compile: t
+ ;; buffer-read-only: t
+ ;; End:
+
+ ;;; site-gentoo.el ends here
+ EOF
+
+ if [[ ${ret} -ne 0 ]]; then
+ eend ${ret} "elisp-site-regen: Writing site-gentoo.el failed."
+ die
+ elif cmp -s "${sitelisp}"/site-gentoo.el "${T}"/site-gentoo.el; then
+ # This prevents outputting unnecessary text when there
+ # was actually no change.
+ # A case is a remerge where we have doubled output.
+ rm -f "${T}"/site-gentoo.el
+ eend
+ einfo "... no changes."
+ else
+ mv "${T}"/site-gentoo.el "${sitelisp}"/site-gentoo.el
+ eend $? "elisp-site-regen: Replacing site-gentoo.el failed" || die
+ case ${#sflist[@]} in
+ 0) [[ ${PN} = emacs-common-gentoo ]] \
+ || ewarn "... Huh? No site initialisation files found." ;;
+ 1) einfo "... ${#sflist[@]} site initialisation file included." ;;
+ *) einfo "... ${#sflist[@]} site initialisation files included." ;;
+ esac
+ fi
+
+ return 0
+}
diff --git a/eclass/elisp.eclass b/eclass/elisp.eclass
new file mode 100644
index 000000000000..6f1a6afdb140
--- /dev/null
+++ b/eclass/elisp.eclass
@@ -0,0 +1,204 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: elisp.eclass
+# @MAINTAINER:
+# Gentoo GNU Emacs project <emacs@gentoo.org>
+# @AUTHOR:
+# Matthew Kennedy <mkennedy@gentoo.org>
+# Jeremy Maitin-Shepard <jbms@attbi.com>
+# Christian Faulhammer <fauli@gentoo.org>
+# Ulrich Müller <ulm@gentoo.org>
+# @BLURB: Eclass for Emacs Lisp packages
+# @DESCRIPTION:
+#
+# This eclass is designed to install elisp files of Emacs related
+# packages into the site-lisp directory. The majority of elisp packages
+# will only need to define the standard ebuild variables (like SRC_URI)
+# and optionally SITEFILE for successful installation.
+#
+# Emacs support for other than pure elisp packages is handled by
+# elisp-common.eclass where you won't have a dependency on Emacs itself.
+# All elisp-* functions are documented there.
+#
+# If the package's source is a single (in whatever way) compressed elisp
+# file with the file name ${P}.el, then this eclass will move ${P}.el to
+# ${PN}.el in src_unpack().
+
+# @ECLASS-VARIABLE: NEED_EMACS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If you need anything different from Emacs 23, use the NEED_EMACS
+# variable before inheriting elisp.eclass. Set it to the major version
+# your package uses and the dependency will be adjusted.
+
+# @ECLASS-VARIABLE: ELISP_PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space separated list of patches to apply after unpacking the sources.
+# Patch files are searched for in the current working dir, WORKDIR, and
+# FILESDIR.
+
+# @ECLASS-VARIABLE: ELISP_REMOVE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space separated list of files to remove after unpacking the sources.
+
+# @ECLASS-VARIABLE: SITEFILE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Name of package's site-init file. The filename must match the shell
+# pattern "[1-8][0-9]*-gentoo.el"; numbers below 10 and above 89 are
+# reserved for internal use. "50${PN}-gentoo.el" is a reasonable choice
+# in most cases.
+
+# @ECLASS-VARIABLE: ELISP_TEXINFO
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space separated list of Texinfo sources. Respective GNU Info files
+# will be generated in src_compile() and installed in src_install().
+
+# @ECLASS-VARIABLE: DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# DOCS="blah.txt ChangeLog" is automatically used to install the given
+# files by dodoc in src_install().
+
+inherit elisp-common eutils
+
+case "${EAPI:-0}" in
+ 0|1) EXPORT_FUNCTIONS src_{unpack,compile,install} \
+ pkg_{setup,postinst,postrm} ;;
+ *) EXPORT_FUNCTIONS src_{unpack,prepare,configure,compile,install} \
+ pkg_{setup,postinst,postrm} ;;
+esac
+
+DEPEND=">=virtual/emacs-${NEED_EMACS:-23}"
+RDEPEND="${DEPEND}"
+
+# @FUNCTION: elisp_pkg_setup
+# @DESCRIPTION:
+# Test if the eselected Emacs version is sufficient to fulfil the major
+# version requirement of the NEED_EMACS variable.
+
+elisp_pkg_setup() {
+ elisp-need-emacs "${NEED_EMACS:-23}"
+ case $? in
+ 0) ;;
+ 1) die "Emacs version too low" ;;
+ *) die "Could not determine Emacs version" ;;
+ esac
+}
+
+# @FUNCTION: elisp_src_unpack
+# @DESCRIPTION:
+# Unpack the sources; also handle the case of a single *.el file in
+# WORKDIR for packages distributed that way. For EAPIs without
+# src_prepare, call elisp_src_prepare.
+
+elisp_src_unpack() {
+ [[ -n ${A} ]] && unpack ${A}
+ if [[ -f ${P}.el ]]; then
+ # the "simple elisp" case with a single *.el file in WORKDIR
+ mv ${P}.el ${PN}.el || die
+ [[ -d ${S} ]] || S=${WORKDIR}
+ fi
+
+ case "${EAPI:-0}" in
+ 0|1) [[ -d ${S} ]] && cd "${S}"
+ elisp_src_prepare ;;
+ esac
+}
+
+# @FUNCTION: elisp_src_prepare
+# @DESCRIPTION:
+# Apply any patches listed in ELISP_PATCHES. Patch files are searched
+# for in the current working dir, WORKDIR, and FILESDIR.
+
+elisp_src_prepare() {
+ local patch
+ for patch in ${ELISP_PATCHES}; do
+ if [[ -f ${patch} ]]; then
+ epatch "${patch}"
+ elif [[ -f ${WORKDIR}/${patch} ]]; then
+ epatch "${WORKDIR}/${patch}"
+ elif [[ -f ${FILESDIR}/${patch} ]]; then
+ epatch "${FILESDIR}/${patch}"
+ else
+ die "Cannot find ${patch}"
+ fi
+ done
+
+ # apply any user patches
+ epatch_user
+
+ if [[ -n ${ELISP_REMOVE} ]]; then
+ rm ${ELISP_REMOVE} || die
+ fi
+}
+
+# @FUNCTION: elisp_src_configure
+# @DESCRIPTION:
+# Do nothing, because Emacs packages seldomly bring a full build system.
+
+elisp_src_configure() { :; }
+
+# @FUNCTION: elisp_src_compile
+# @DESCRIPTION:
+# Call elisp-compile to byte-compile all Emacs Lisp (*.el) files.
+# If ELISP_TEXINFO lists any Texinfo sources, call makeinfo to generate
+# GNU Info files from them.
+
+elisp_src_compile() {
+ elisp-compile *.el
+ if [[ -n ${ELISP_TEXINFO} ]]; then
+ makeinfo ${ELISP_TEXINFO} || die
+ fi
+}
+
+# @FUNCTION: elisp_src_install
+# @DESCRIPTION:
+# Call elisp-install to install all Emacs Lisp (*.el and *.elc) files.
+# If the SITEFILE variable specifies a site-init file, install it with
+# elisp-site-file-install. Also install any GNU Info files listed in
+# ELISP_TEXINFO and documentation listed in the DOCS variable.
+
+elisp_src_install() {
+ elisp-install ${PN} *.el *.elc
+ if [[ -n ${SITEFILE} ]]; then
+ elisp-site-file-install "${FILESDIR}/${SITEFILE}"
+ fi
+ if [[ -n ${ELISP_TEXINFO} ]]; then
+ set -- ${ELISP_TEXINFO}
+ set -- ${@##*/}
+ doinfo ${@/%.*/.info*} || die
+ fi
+ if [[ -n ${DOCS} ]]; then
+ dodoc ${DOCS} || die
+ fi
+ if declare -f readme.gentoo_create_doc >/dev/null; then
+ readme.gentoo_create_doc
+ fi
+}
+
+# @FUNCTION: elisp_pkg_postinst
+# @DESCRIPTION:
+# Call elisp-site-regen, in order to collect the site initialisation for
+# all installed Emacs Lisp packages in the site-gentoo.el file.
+
+elisp_pkg_postinst() {
+ elisp-site-regen
+ if declare -f readme.gentoo_print_elog >/dev/null; then
+ readme.gentoo_print_elog
+ fi
+}
+
+# @FUNCTION: elisp_pkg_postrm
+# @DESCRIPTION:
+# Call elisp-site-regen, in order to collect the site initialisation for
+# all installed Emacs Lisp packages in the site-gentoo.el file.
+
+elisp_pkg_postrm() {
+ elisp-site-regen
+}
diff --git a/eclass/embassy.eclass b/eclass/embassy.eclass
new file mode 100644
index 000000000000..a8f8e85180fa
--- /dev/null
+++ b/eclass/embassy.eclass
@@ -0,0 +1,92 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Author Olivier Fisette <ribosome@gentoo.org>
+
+# This eclass is used to install EMBASSY programs (EMBOSS add-ons).
+
+# The inheriting ebuild should provide a "DESCRIPTION", "KEYWORDS" and, if
+# necessary, add "(R|P)DEPEND"encies. Additionnaly, the inheriting ebuild's
+# name must begin by "embassy-". Also, before inheriting, the ebuild should
+# specify what version of EMBOSS is required by setting EBOV.
+
+inherit eutils multilib
+
+# The EMBASSY package name, retrieved from the inheriting ebuild's name
+EN=${PN:8}
+# The full name and version of the EMBASSY package (excluding the Gentoo
+# revision number)
+EF="$(echo ${EN} | tr "[:lower:]" "[:upper:]")-${PV}"
+
+HOMEPAGE="http://emboss.sourceforge.net/"
+LICENSE="LGPL-2 GPL-2"
+SRC_URI="ftp://emboss.open-bio.org/pub/EMBOSS/EMBOSS-${EBOV}.tar.gz
+ ftp://emboss.open-bio.org/pub/EMBOSS/${EF}.tar.gz"
+
+SLOT="0"
+IUSE="X png"
+
+DEPEND="=sci-biology/emboss-${EBOV}*
+ !<sci-biology/emboss-${EBOV}
+ X? ( x11-libs/libX11 )
+ png? ( sys-libs/zlib
+ media-libs/libpng
+ >=media-libs/gd-1.8
+ )"
+
+S=${WORKDIR}/EMBOSS-${EBOV}/embassy/${EF}
+
+embassy_src_unpack() {
+ unpack ${A}
+ mkdir EMBOSS-${EBOV}/embassy
+ mv ${EF} EMBOSS-${EBOV}/embassy/
+ cp /usr/$(get_libdir)/libplplot.la EMBOSS-${EBOV}/plplot/
+ cp /usr/$(get_libdir)/libeplplot.la EMBOSS-${EBOV}/plplot/
+ cp /usr/$(get_libdir)/libajax.la EMBOSS-${EBOV}/ajax/
+ cp /usr/$(get_libdir)/libajaxg.la EMBOSS-${EBOV}/ajax/
+ cp /usr/$(get_libdir)/libnucleus.la EMBOSS-${EBOV}/nucleus/
+ if [ -e "${FILESDIR}"/${PF}.patch ]; then
+ cd "${S}"
+ epatch "${FILESDIR}"/${PF}.patch
+ fi
+}
+
+embassy_src_compile() {
+ local PREFIX="${ROOT}/usr"
+ local EXTRA_CONF
+ ! use X && EXTRA_CONF="${EXTRA_CONF} --without-x"
+ ! use png && EXTRA_CONF="${EXTRA_CONF} --without-pngdriver"
+ ./configure \
+ "--bindir=${PREFIX}/bin" \
+ "--sbindir=${PREFIX}/sbin" \
+ "--libexecdir=${PREFIX}/libexec" \
+ "--sysconfdir=${ROOT}/etc" \
+ "--sharedstatedir=${ROOT}/var" \
+ "--localstatedir=${ROOT}/var" \
+ "--libdir=${PREFIX}/$(get_libdir)" \
+ "--includedir=${PREFIX}/include" \
+ "--datarootdir=${PREFIX}/share" \
+ "--datadir=${PREFIX}/share" \
+ "--infodir=${PREFIX}/share/info" \
+ "--localedir=${PREFIX}/share/locale" \
+ "--mandir=${PREFIX}/share/man" \
+ ${EXTRA_CONF} || die
+ emake || die "Before reporting this error as a bug, please make sure you compiled
+ EMBOSS and the EMBASSY packages with the same \"USE\" flags. Failure to
+ do so may prevent the compilation of some EMBASSY packages, or cause
+ runtime problems with some EMBASSY programs. For example, if you
+ compile EMBOSS with \"png\" support and then try to build DOMAINATRIX
+ without \"png\" support, compilation will fail when linking the binaries."
+}
+
+embassy_src_install() {
+ emake DESTDIR="${D}" install || die "Install failed"
+ dodoc AUTHORS ChangeLog NEWS README
+ dodir /usr/share
+ mv "${D}"/usr/local/share/* "${D}"/usr/share/
+ rmdir "${D}"/usr/local/share
+ rmdir "${D}"/usr/local
+}
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
diff --git a/eclass/emboss-r1.eclass b/eclass/emboss-r1.eclass
new file mode 100644
index 000000000000..3a494b9b0686
--- /dev/null
+++ b/eclass/emboss-r1.eclass
@@ -0,0 +1,133 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: emboss-r1.eclass
+# @MAINTAINER:
+# sci-biology@gentoo.org
+# jlec@gentoo.org
+# ted.tanberry@gmail.com
+# @AUTHOR:
+# Original author: Author Olivier Fisette <ofisette@gmail.com>
+# Next gen author: Justin Lecher <jlec@gentoo.org>
+# Next gen author: Ted Tanberry <ted.tanberry@gmail.com>
+# @BLURB: Use this to easy install EMBOSS and EMBASSY programs (EMBOSS add-ons).
+# @DESCRIPTION:
+# The inheriting ebuild must set at least EAPI=5 and provide EBO_DESCRIPTION before the inherit line.
+# KEYWORDS should be set. Additionally "(R|P)DEPEND"encies and other standard
+# ebuild variables can be extended (FOO+=" bar").
+#
+# Example:
+#
+# EAPI="5"
+#
+# EBO_DESCRIPTION="applications from the CBS group"
+#
+# inherit emboss-r1
+
+# @ECLASS-VARIABLE: EBO_DESCRIPTION
+# @DESCRIPTION:
+# Should be set. Completes the generic description of the embassy module as follows:
+#
+# EMBOSS integrated version of ${EBO_DESCRIPTION},
+# e.g.
+# "EMBOSS integrated version of applications from the CBS group"
+#
+# Defaults to the upstream name of the module.
+
+# @ECLASS-VARIABLE: EBO_EXTRA_ECONF
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Extra config options passed to econf.
+
+case ${EAPI:-0} in
+ 5) ;;
+ *) die "this eclass doesn't support < EAPI 5" ;;
+esac
+
+inherit autotools-utils eutils flag-o-matic
+
+HOMEPAGE="http://emboss.sourceforge.net/"
+LICENSE="LGPL-2 GPL-2"
+
+SLOT="0"
+IUSE="mysql pdf png postgres static-libs X"
+
+DEPEND="
+ dev-libs/expat
+ dev-libs/libpcre:3
+ sci-libs/plplot
+ sys-libs/zlib
+ mysql? ( virtual/mysql )
+ pdf? ( media-libs/libharu )
+ png? ( media-libs/gd[png] )
+ postgres? ( dev-db/postgresql:= )
+ X? ( x11-libs/libXt )"
+RDEPEND="${DEPEND}"
+
+if [[ ${PN} == embassy-* ]]; then
+ EMBASSY_PACKAGE=yes
+ # The EMBASSY package name, retrieved from the inheriting ebuild's name
+ EN=${PN:8}
+ # The full name and version of the EMBASSY package (excluding the Gentoo
+ # revision number)
+ EF=$(echo ${EN} | tr "[:lower:]" "[:upper:]")-${PV}
+ : ${EBO_DESCRIPTION:=${EN}}
+ DESCRIPTION="EMBOSS integrated version of ${EBO_DESCRIPTION}"
+ SRC_URI="ftp://emboss.open-bio.org/pub/EMBOSS/${EF}.tar.gz -> embassy-${EN}-${PVR}.tar.gz"
+ DEPEND+=" >=sci-biology/emboss-6.6.0[mysql=,pdf=,png=,postgres=,static-libs=,X=]"
+
+ S="${WORKDIR}"/${EF}
+fi
+
+# @FUNCTION: emboss_src_prepare
+# @DESCRIPTION:
+# Does the following things
+#
+# 1. Renames configure.in to configure.ac, if possible
+# 2. Applies ${PATCHES[@]} and runs autotools via autotools-utils.eclass
+#
+
+emboss-r1_src_prepare() {
+ if [[ -e configure.in ]]; then
+ mv configure.{in,ac} || die
+ fi
+
+ autotools-utils_src_prepare
+}
+
+# @FUNCTION: emboss_src_configure
+# @DESCRIPTION:
+# runs econf with following options.
+#
+# $(use_with X x)
+# $(use_with png pngdriver)
+# $(use_with pdf hpdf)
+# $(use_with mysql mysql)
+# $(use_with postgres postgresql)
+# $(use_enable static-libs static)
+# --enable-large
+# --without-java
+# --enable-systemlibs
+# ${EBO_EXTRA_ECONF}
+
+emboss-r1_src_configure() {
+ local myeconfargs=(
+ $(use_with X x)
+ $(use_with png pngdriver "${EPREFIX}/usr")
+ $(use_with pdf hpdf "${EPREFIX}/usr")
+ $(use_with mysql mysql "${EPREFIX}/usr/bin/mysql_config")
+ $(use_with postgres postgresql "${EPREFIX}/usr/bin/pg_config")
+ --enable-large
+ --without-java
+ --enable-systemlibs
+ ${EBO_EXTRA_ECONF}
+ )
+
+ [[ ${EMBASSY_PACKAGE} == yes ]] && \
+ append-cppflags "-I${EPREFIX}/usr/include/emboss"
+
+ autotools-utils_src_configure
+}
+
+EXPORT_FUNCTIONS src_prepare src_configure
diff --git a/eclass/emul-linux-x86.eclass b/eclass/emul-linux-x86.eclass
new file mode 100644
index 000000000000..80f6faf0129c
--- /dev/null
+++ b/eclass/emul-linux-x86.eclass
@@ -0,0 +1,96 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+#
+# Original Author: Mike Doty <kingtaco@gentoo.org>
+# Adapted from emul-libs.eclass
+# Purpose: Providing a template for the app-emulation/emul-linux-* packages
+#
+
+inherit eutils multilib
+
+case "${EAPI:-0}" in
+ 3|4|5)
+ EXPORT_FUNCTIONS src_prepare src_install
+ ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+DESCRIPTION="Provides precompiled 32bit libraries"
+#HOMEPAGE="http://amd64.gentoo.org/emul/content.xml"
+HOMEPAGE="http://dev.gentoo.org/~pacho/emul.html"
+SRC_URI="http://dev.gentoo.org/~pacho/emul/${P}.tar.xz"
+
+IUSE="+development"
+
+RESTRICT="strip"
+S=${WORKDIR}
+
+QA_PREBUILT="*"
+
+SLOT="0"
+
+DEPEND=">=sys-apps/findutils-4.2.26"
+RDEPEND=""
+
+emul-linux-x86_src_prepare() {
+ ALLOWED=${ALLOWED:-^${S}/etc/env.d}
+ use development && ALLOWED="${ALLOWED}|/usr/lib32/pkgconfig"
+ find "${S}" ! -type d ! '(' -name '*.so' -o -name '*.so.[0-9]*' -o -name '*.h' ')' | egrep -v "${ALLOWED}" | xargs -d $'\n' rm -f || die 'failed to remove everything but *.so*'
+}
+
+emul-linux-x86_src_install() {
+ for dir in etc/env.d etc/revdep-rebuild ; do
+ if [[ -d "${S}"/${dir} ]] ; then
+ for f in "${S}"/${dir}/* ; do
+ mv -f "$f"{,-emul}
+ done
+ fi
+ done
+
+ # remove void directories
+ find "${S}" -depth -type d -print0 | xargs -0 rmdir 2&>/dev/null
+
+ cp -pPR "${S}"/* "${ED}"/ || die "copying files failed!"
+
+ # Do not hardcode lib32, bug #429726
+ local x86_libdir=$(get_abi_LIBDIR x86)
+ if [[ ${x86_libdir} != "lib32" ]] ; then
+ ewarn "Moving lib32/ to ${x86_libdir}/; some libs might not work"
+ mv "${D}"/usr/lib32 "${D}"/usr/${x86_libdir} || die
+ if [[ -d ${D}/lib32 ]] ; then
+ mv "${D}"/lib32 "${D}"/${x86_libdir} || die
+ fi
+
+ pushd "${D}"/usr/${x86_libdir} >/dev/null
+
+ # Fix linker script paths.
+ local ldscripts
+ if ldscripts=( $(grep -ls '^GROUP.*/lib32/' *.so) ) ; then
+ sed -i \
+ -e "s:/lib32/:/${x86_libdir}/:" \
+ "${ldscripts[@]}" || die
+ fi
+
+ # Rewrite symlinks (if need be).
+ local sym tgt
+ while read sym ; do
+ tgt=$(readlink "${sym}")
+ ln -sf "${tgt/lib32/${x86_libdir}}" "${sym}" || die
+ done < <(find -xtype l)
+
+ popd >/dev/null
+ fi
+
+ # Since header wrapping is added as part of gx86-multilib support,
+ # all packages involved install their own copies of i686* headers
+ # when built with abi_x86_32.
+ if [[ -d "${D}"/usr/include ]] && use abi_x86_32; then
+ rm -r "${D}"/usr/include || die
+ fi
+ # The same goes for ${CHOST}- multilib tool prefixing.
+ if path_exists "${D}"/usr/bin/i686-pc-linux-gnu-* && use abi_x86_32; then
+ rm "${D}"/usr/bin/i686-pc-linux-gnu-* || die
+ fi
+}
diff --git a/eclass/enlightenment.eclass b/eclass/enlightenment.eclass
new file mode 100644
index 000000000000..1b04f467a762
--- /dev/null
+++ b/eclass/enlightenment.eclass
@@ -0,0 +1,199 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: enlightenment.eclass
+# @MAINTAINER:
+# enlightenment@gentoo.org
+# @BLURB: simplify enlightenment package management
+
+if [[ -z ${_ENLIGHTENMENT_ECLASS} ]]; then
+_ENLIGHTENMENT_ECLASS=1
+
+inherit eutils libtool
+
+# @ECLASS-VARIABLE: E_PYTHON
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# if defined, the package is based on Python/distutils
+
+# @ECLASS-VARIABLE: E_CYTHON
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# if defined, the package is Cython bindings (implies E_PYTHON)
+
+# @ECLASS-VARIABLE: E_ECONF
+# @DESCRIPTION:
+# Array of flags to pass to econf (obsoletes MY_ECONF)
+E_ECONF=()
+
+# E_STATE's:
+# release [default]
+# KEYWORDS arch
+# SRC_URI $P.tar.gz
+# S $WORKDIR/$P
+#
+# snap $PV has .200##### datestamp or .### counter
+# KEYWORDS ~arch
+# SRC_URI $P.tar.bz2
+# S $WORKDIR/$P
+#
+# live $PV has a 9999 marker
+# KEYWORDS ""
+# SRC_URI svn/etc... up
+# S $WORKDIR/$E_S_APPEND
+#
+# Overrides:
+# KEYWORDS EKEY_STATE
+# SRC_URI EURI_STATE
+# S EURI_STATE
+
+E_LIVE_SERVER_DEFAULT_SVN="http://svn.enlightenment.org/svn/e/trunk"
+E_LIVE_SERVER_DEFAULT_GIT="git://git.enlightenment.org"
+
+E_STATE="release"
+if [[ ${PV} == *9999* ]] ; then
+ if [[ ${EGIT_URI_APPEND} ]] ; then
+ E_LIVE_SERVER=${E_LIVE_SERVER:-${E_LIVE_SERVER_DEFAULT_GIT}}
+ EGIT_URI_APPEND=${EGIT_URI_APPEND:-${PN}}
+ EGIT_PROJECT="enlightenment/${EGIT_SUB_PROJECT}/${EGIT_URI_APPEND}"
+ EGIT_REPO_URI=${EGIT_SERVER:-${E_LIVE_SERVER_DEFAULT_GIT}}/${EGIT_SUB_PROJECT}/${EGIT_URI_APPEND}.git
+ E_S_APPEND=${EGIT_URI_APPEND}
+ E_LIVE_SOURCE="git"
+ inherit git-2
+ else
+ E_LIVE_SERVER=${E_LIVE_SERVER:-${E_LIVE_SERVER_DEFAULT_SVN}}
+
+ ESVN_URI_APPEND=${ESVN_URI_APPEND:-${PN}}
+ ESVN_PROJECT="enlightenment/${ESVN_SUB_PROJECT}"
+ ESVN_REPO_URI=${ESVN_SERVER:-${E_LIVE_SERVER_DEFAULT_SVN}}/${ESVN_SUB_PROJECT}/${ESVN_URI_APPEND}
+ E_S_APPEND=${ESVN_URI_APPEND}
+ E_LIVE_SOURCE="svn"
+ inherit subversion
+ fi
+ E_STATE="live"
+ WANT_AUTOTOOLS="yes"
+
+elif [[ -n ${E_SNAP_DATE} ]] ; then
+ E_STATE="snap"
+else
+ E_STATE="release"
+fi
+
+# Parse requested python state
+: ${E_PYTHON:=${E_CYTHON}}
+if [[ -n ${E_PYTHON} ]] ; then
+ PYTHON_DEPEND="2"
+
+ inherit python
+fi
+
+if [[ ${WANT_AUTOTOOLS} == "yes" ]] ; then
+ WANT_AUTOCONF=${E_WANT_AUTOCONF:-latest}
+ WANT_AUTOMAKE=${E_WANT_AUTOMAKE:-latest}
+ inherit autotools
+fi
+
+ENLIGHTENMENT_EXPF="src_unpack src_compile src_install"
+case "${EAPI:-0}" in
+2|3|4|5) ENLIGHTENMENT_EXPF+=" src_prepare src_configure" ;;
+*) ;;
+esac
+EXPORT_FUNCTIONS ${ENLIGHTENMENT_EXPF}
+
+DESCRIPTION="A DR17 production"
+HOMEPAGE="http://www.enlightenment.org/"
+if [[ -z ${SRC_URI} ]] ; then
+ case ${EURI_STATE:-${E_STATE}} in
+ release) SRC_URI="mirror://sourceforge/enlightenment/${P}.tar.gz";;
+ snap) SRC_URI="http://download.enlightenment.org/snapshots/${E_SNAP_DATE}/${P}.tar.bz2";;
+ live) SRC_URI="";;
+ esac
+fi
+
+LICENSE="BSD"
+SLOT="0"
+case ${EKEY_STATE:-${E_STATE}} in
+ release) KEYWORDS="alpha amd64 arm hppa ia64 ~mips ppc ppc64 sh sparc x86 ~amd64-fbsd ~x86-fbsd ~amd64-linux ~x86-linux ~ppc-macos ~x86-macos ~x86-interix ~x86-solaris ~x64-solaris";;
+ snap) KEYWORDS="~alpha ~amd64 ~arm ~hppa ~ia64 ~mips ~ppc ~ppc64 ~sh ~sparc ~x86 ~amd64-fbsd ~x86-fbsd ~amd64-linux ~x86-linux ~ppc-macos ~x86-macos ~x86-interix ~x86-solaris ~x64-solaris";;
+ live) KEYWORDS="";;
+esac
+IUSE="nls doc"
+
+DEPEND="doc? ( app-doc/doxygen )
+ ${E_PYTHON:+>=dev-python/setuptools-0.6_rc9}
+ ${E_CYTHON:+>=dev-python/cython-0.12.1}"
+RDEPEND="nls? ( sys-devel/gettext )"
+
+case ${EURI_STATE:-${E_STATE}} in
+ release) S=${WORKDIR}/${P};;
+ snap) S=${WORKDIR}/${P};;
+ live) S=${WORKDIR}/${E_S_APPEND};;
+esac
+
+enlightenment_src_unpack() {
+ if [[ ${E_STATE} == "live" ]] ; then
+ case ${E_LIVE_SOURCE} in
+ svn) subversion_src_unpack;;
+ git) git-2_src_unpack;;
+ *) die "eek!";;
+ esac
+ else
+ unpack ${A}
+ fi
+ if ! has src_prepare ${ENLIGHTENMENT_EXPF} ; then
+ cd "${S}" || die
+ enlightenment_src_prepare
+ fi
+}
+
+enlightenment_src_prepare() {
+ epatch_user
+ [[ -s gendoc ]] && chmod a+rx gendoc
+ if [[ ${WANT_AUTOTOOLS} == "yes" ]] ; then
+ [[ -d po ]] && eautopoint -f
+ # autotools require README, when README.in is around, but README
+ # is created later in configure step
+ [[ -f README.in ]] && touch README
+ export SVN_REPO_PATH=${ESVN_WC_PATH}
+ eautoreconf
+ fi
+ epunt_cxx
+ elibtoolize
+}
+
+enlightenment_src_configure() {
+ # gstreamer sucks, work around it doing stupid stuff
+ export GST_REGISTRY="${S}/registry.xml"
+ has static-libs ${IUSE} && E_ECONF+=( $(use_enable static-libs static) )
+
+ econf ${MY_ECONF} "${E_ECONF[@]}"
+}
+
+enlightenment_src_compile() {
+ has src_configure ${ENLIGHTENMENT_EXPF} || enlightenment_src_configure
+
+ V=1 emake || die
+
+ if use doc ; then
+ if [[ -x ./gendoc ]] ; then
+ ./gendoc || die
+ elif emake -j1 -n doc >&/dev/null ; then
+ V=1 emake doc || die
+ fi
+ fi
+}
+
+enlightenment_src_install() {
+ V=1 emake install DESTDIR="${D}" || die
+ find "${D}" '(' -name CVS -o -name .svn -o -name .git ')' -type d -exec rm -rf '{}' \; 2>/dev/null
+ for d in AUTHORS ChangeLog NEWS README TODO ${EDOCS}; do
+ [[ -f ${d} ]] && dodoc ${d}
+ done
+ use doc && [[ -d doc ]] && dohtml -r doc/*
+ if has static-libs ${IUSE} ; then
+ use static-libs || find "${D}" -name '*.la' -exec rm -f {} +
+ fi
+}
+
+fi
diff --git a/eclass/eutils.eclass b/eclass/eutils.eclass
new file mode 100644
index 000000000000..fecd375a6239
--- /dev/null
+++ b/eclass/eutils.eclass
@@ -0,0 +1,1794 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: eutils.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: many extra (but common) functions that are used in ebuilds
+# @DESCRIPTION:
+# The eutils eclass contains a suite of functions that complement
+# the ones that ebuild.sh already contain. The idea is that the functions
+# are not required in all ebuilds but enough utilize them to have a common
+# home rather than having multiple ebuilds implementing the same thing.
+#
+# Due to the nature of this eclass, some functions may have maintainers
+# different from the overall eclass!
+
+if [[ -z ${_EUTILS_ECLASS} ]]; then
+_EUTILS_ECLASS=1
+
+inherit multilib toolchain-funcs
+
+if has "${EAPI:-0}" 0 1 2; then
+
+# @FUNCTION: epause
+# @USAGE: [seconds]
+# @DESCRIPTION:
+# Sleep for the specified number of seconds (default of 5 seconds). Useful when
+# printing a message the user should probably be reading and often used in
+# conjunction with the ebeep function. If the EPAUSE_IGNORE env var is set,
+# don't wait at all. Defined in EAPIs 0 1 and 2.
+epause() {
+ [[ -z ${EPAUSE_IGNORE} ]] && sleep ${1:-5}
+}
+
+# @FUNCTION: ebeep
+# @USAGE: [number of beeps]
+# @DESCRIPTION:
+# Issue the specified number of beeps (default of 5 beeps). Useful when
+# printing a message the user should probably be reading and often used in
+# conjunction with the epause function. If the EBEEP_IGNORE env var is set,
+# don't beep at all. Defined in EAPIs 0 1 and 2.
+ebeep() {
+ local n
+ if [[ -z ${EBEEP_IGNORE} ]] ; then
+ for ((n=1 ; n <= ${1:-5} ; n++)) ; do
+ echo -ne "\a"
+ sleep 0.1 &>/dev/null ; sleep 0,1 &>/dev/null
+ echo -ne "\a"
+ sleep 1
+ done
+ fi
+}
+
+else
+
+ebeep() {
+ ewarn "QA Notice: ebeep is not defined in EAPI=${EAPI}, please file a bug at http://bugs.gentoo.org"
+}
+
+epause() {
+ ewarn "QA Notice: epause is not defined in EAPI=${EAPI}, please file a bug at http://bugs.gentoo.org"
+}
+
+fi
+
+# @FUNCTION: eqawarn
+# @USAGE: [message]
+# @DESCRIPTION:
+# Proxy to ewarn for package managers that don't provide eqawarn and use the PM
+# implementation if available. Reuses PORTAGE_ELOG_CLASSES as set by the dev
+# profile.
+if ! declare -F eqawarn >/dev/null ; then
+ eqawarn() {
+ has qa ${PORTAGE_ELOG_CLASSES} && ewarn "$@"
+ :
+ }
+fi
+
+# @FUNCTION: ecvs_clean
+# @USAGE: [list of dirs]
+# @DESCRIPTION:
+# Remove CVS directories recursiveley. Useful when a source tarball contains
+# internal CVS directories. Defaults to $PWD.
+ecvs_clean() {
+ [[ -z $* ]] && set -- .
+ find "$@" -type d -name 'CVS' -prune -print0 | xargs -0 rm -rf
+ find "$@" -type f -name '.cvs*' -print0 | xargs -0 rm -rf
+}
+
+# @FUNCTION: esvn_clean
+# @USAGE: [list of dirs]
+# @DESCRIPTION:
+# Remove .svn directories recursiveley. Useful when a source tarball contains
+# internal Subversion directories. Defaults to $PWD.
+esvn_clean() {
+ [[ -z $* ]] && set -- .
+ find "$@" -type d -name '.svn' -prune -print0 | xargs -0 rm -rf
+}
+
+# @FUNCTION: estack_push
+# @USAGE: <stack> [items to push]
+# @DESCRIPTION:
+# Push any number of items onto the specified stack. Pick a name that
+# is a valid variable (i.e. stick to alphanumerics), and push as many
+# items as you like onto the stack at once.
+#
+# The following code snippet will echo 5, then 4, then 3, then ...
+# @CODE
+# estack_push mystack 1 2 3 4 5
+# while estack_pop mystack i ; do
+# echo "${i}"
+# done
+# @CODE
+estack_push() {
+ [[ $# -eq 0 ]] && die "estack_push: incorrect # of arguments"
+ local stack_name="_ESTACK_$1_" ; shift
+ eval ${stack_name}+=\( \"\$@\" \)
+}
+
+# @FUNCTION: estack_pop
+# @USAGE: <stack> [variable]
+# @DESCRIPTION:
+# Pop a single item off the specified stack. If a variable is specified,
+# the popped item is stored there. If no more items are available, return
+# 1, else return 0. See estack_push for more info.
+estack_pop() {
+ [[ $# -eq 0 || $# -gt 2 ]] && die "estack_pop: incorrect # of arguments"
+
+ # We use the fugly _estack_xxx var names to avoid collision with
+ # passing back the return value. If we used "local i" and the
+ # caller ran `estack_pop ... i`, we'd end up setting the local
+ # copy of "i" rather than the caller's copy. The _estack_xxx
+ # garbage is preferable to using $1/$2 everywhere as that is a
+ # bit harder to read.
+ local _estack_name="_ESTACK_$1_" ; shift
+ local _estack_retvar=$1 ; shift
+ eval local _estack_i=\${#${_estack_name}\[@\]}
+ # Don't warn -- let the caller interpret this as a failure
+ # or as normal behavior (akin to `shift`)
+ [[ $(( --_estack_i )) -eq -1 ]] && return 1
+
+ if [[ -n ${_estack_retvar} ]] ; then
+ eval ${_estack_retvar}=\"\${${_estack_name}\[${_estack_i}\]}\"
+ fi
+ eval unset ${_estack_name}\[${_estack_i}\]
+}
+
+# @FUNCTION: evar_push
+# @USAGE: <variable to save> [more vars to save]
+# @DESCRIPTION:
+# This let's you temporarily modify a variable and then restore it (including
+# set vs unset semantics). Arrays are not supported at this time.
+#
+# This is meant for variables where using `local` does not work (such as
+# exported variables, or only temporarily changing things in a func).
+#
+# For example:
+# @CODE
+# evar_push LC_ALL
+# export LC_ALL=C
+# ... do some stuff that needs LC_ALL=C set ...
+# evar_pop
+#
+# # You can also save/restore more than one var at a time
+# evar_push BUTTERFLY IN THE SKY
+# ... do stuff with the vars ...
+# evar_pop # This restores just one var, SKY
+# ... do more stuff ...
+# evar_pop 3 # This pops the remaining 3 vars
+# @CODE
+evar_push() {
+ local var val
+ for var ; do
+ [[ ${!var+set} == "set" ]] \
+ && val=${!var} \
+ || val="unset_76fc3c462065bb4ca959f939e6793f94"
+ estack_push evar "${var}" "${val}"
+ done
+}
+
+# @FUNCTION: evar_push_set
+# @USAGE: <variable to save> [new value to store]
+# @DESCRIPTION:
+# This is a handy shortcut to save and temporarily set a variable. If a value
+# is not specified, the var will be unset.
+evar_push_set() {
+ local var=$1
+ evar_push ${var}
+ case $# in
+ 1) unset ${var} ;;
+ 2) printf -v "${var}" '%s' "$2" ;;
+ *) die "${FUNCNAME}: incorrect # of args: $*" ;;
+ esac
+}
+
+# @FUNCTION: evar_pop
+# @USAGE: [number of vars to restore]
+# @DESCRIPTION:
+# Restore the variables to the state saved with the corresponding
+# evar_push call. See that function for more details.
+evar_pop() {
+ local cnt=${1:-bad}
+ case $# in
+ 0) cnt=1 ;;
+ 1) isdigit "${cnt}" || die "${FUNCNAME}: first arg must be a number: $*" ;;
+ *) die "${FUNCNAME}: only accepts one arg: $*" ;;
+ esac
+
+ local var val
+ while (( cnt-- )) ; do
+ estack_pop evar val || die "${FUNCNAME}: unbalanced push"
+ estack_pop evar var || die "${FUNCNAME}: unbalanced push"
+ [[ ${val} == "unset_76fc3c462065bb4ca959f939e6793f94" ]] \
+ && unset ${var} \
+ || printf -v "${var}" '%s' "${val}"
+ done
+}
+
+# @FUNCTION: eshopts_push
+# @USAGE: [options to `set` or `shopt`]
+# @DESCRIPTION:
+# Often times code will want to enable a shell option to change code behavior.
+# Since changing shell options can easily break other pieces of code (which
+# assume the default state), eshopts_push is used to (1) push the current shell
+# options onto a stack and (2) pass the specified arguments to set.
+#
+# If the first argument is '-s' or '-u', we assume you want to call `shopt`
+# rather than `set` as there are some options only available via that.
+#
+# A common example is to disable shell globbing so that special meaning/care
+# may be used with variables/arguments to custom functions. That would be:
+# @CODE
+# eshopts_push -o noglob
+# for x in ${foo} ; do
+# if ...some check... ; then
+# eshopts_pop
+# return 0
+# fi
+# done
+# eshopts_pop
+# @CODE
+eshopts_push() {
+ if [[ $1 == -[su] ]] ; then
+ estack_push eshopts "$(shopt -p)"
+ [[ $# -eq 0 ]] && return 0
+ shopt "$@" || die "${FUNCNAME}: bad options to shopt: $*"
+ else
+ estack_push eshopts $-
+ [[ $# -eq 0 ]] && return 0
+ set "$@" || die "${FUNCNAME}: bad options to set: $*"
+ fi
+}
+
+# @FUNCTION: eshopts_pop
+# @USAGE:
+# @DESCRIPTION:
+# Restore the shell options to the state saved with the corresponding
+# eshopts_push call. See that function for more details.
+eshopts_pop() {
+ local s
+ estack_pop eshopts s || die "${FUNCNAME}: unbalanced push"
+ if [[ ${s} == "shopt -"* ]] ; then
+ eval "${s}" || die "${FUNCNAME}: sanity: invalid shopt options: ${s}"
+ else
+ set +$- || die "${FUNCNAME}: sanity: invalid shell settings: $-"
+ set -${s} || die "${FUNCNAME}: sanity: unable to restore saved shell settings: ${s}"
+ fi
+}
+
+# @FUNCTION: eumask_push
+# @USAGE: <new umask>
+# @DESCRIPTION:
+# Set the umask to the new value specified while saving the previous
+# value onto a stack. Useful for temporarily changing the umask.
+eumask_push() {
+ estack_push eumask "$(umask)"
+ umask "$@" || die "${FUNCNAME}: bad options to umask: $*"
+}
+
+# @FUNCTION: eumask_pop
+# @USAGE:
+# @DESCRIPTION:
+# Restore the previous umask state.
+eumask_pop() {
+ [[ $# -eq 0 ]] || die "${FUNCNAME}: we take no options"
+ local s
+ estack_pop eumask s || die "${FUNCNAME}: unbalanced push"
+ umask ${s} || die "${FUNCNAME}: sanity: could not restore umask: ${s}"
+}
+
+# @FUNCTION: isdigit
+# @USAGE: <number> [more numbers]
+# @DESCRIPTION:
+# Return true if all arguments are numbers.
+isdigit() {
+ local d
+ for d ; do
+ [[ ${d:-bad} == *[!0-9]* ]] && return 1
+ done
+ return 0
+}
+
+# @VARIABLE: EPATCH_SOURCE
+# @DESCRIPTION:
+# Default directory to search for patches.
+EPATCH_SOURCE="${WORKDIR}/patch"
+# @VARIABLE: EPATCH_SUFFIX
+# @DESCRIPTION:
+# Default extension for patches (do not prefix the period yourself).
+EPATCH_SUFFIX="patch.bz2"
+# @VARIABLE: EPATCH_OPTS
+# @DESCRIPTION:
+# Options to pass to patch. Meant for ebuild/package-specific tweaking
+# such as forcing the patch level (-p#) or fuzz (-F#) factor. Note that
+# for single patch tweaking, you can also pass flags directly to epatch.
+EPATCH_OPTS=""
+# @VARIABLE: EPATCH_COMMON_OPTS
+# @DESCRIPTION:
+# Common options to pass to `patch`. You probably should never need to
+# change these. If you do, please discuss it with base-system first to
+# be sure.
+# @CODE
+# -g0 - keep RCS, ClearCase, Perforce and SCCS happy #24571
+# --no-backup-if-mismatch - do not leave .orig files behind
+# -E - automatically remove empty files
+# @CODE
+EPATCH_COMMON_OPTS="-g0 -E --no-backup-if-mismatch"
+# @VARIABLE: EPATCH_EXCLUDE
+# @DESCRIPTION:
+# List of patches not to apply. Note this is only file names,
+# and not the full path. Globs accepted.
+EPATCH_EXCLUDE=""
+# @VARIABLE: EPATCH_SINGLE_MSG
+# @DESCRIPTION:
+# Change the printed message for a single patch.
+EPATCH_SINGLE_MSG=""
+# @VARIABLE: EPATCH_MULTI_MSG
+# @DESCRIPTION:
+# Change the printed message for multiple patches.
+EPATCH_MULTI_MSG="Applying various patches (bugfixes/updates) ..."
+# @VARIABLE: EPATCH_FORCE
+# @DESCRIPTION:
+# Only require patches to match EPATCH_SUFFIX rather than the extended
+# arch naming style.
+EPATCH_FORCE="no"
+# @VARIABLE: EPATCH_USER_EXCLUDE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of patches not to apply. Note this is only file names,
+# and not the full path. Globs accepted.
+
+# @FUNCTION: epatch
+# @USAGE: [options] [patches] [dirs of patches]
+# @DESCRIPTION:
+# epatch is designed to greatly simplify the application of patches. It can
+# process patch files directly, or directories of patches. The patches may be
+# compressed (bzip/gzip/etc...) or plain text. You generally need not specify
+# the -p option as epatch will automatically attempt -p0 to -p4 until things
+# apply successfully.
+#
+# If you do not specify any patches/dirs, then epatch will default to the
+# directory specified by EPATCH_SOURCE.
+#
+# Any options specified that start with a dash will be passed down to patch
+# for this specific invocation. As soon as an arg w/out a dash is found, then
+# arg processing stops.
+#
+# When processing directories, epatch will apply all patches that match:
+# @CODE
+# if ${EPATCH_FORCE} != "yes"
+# ??_${ARCH}_foo.${EPATCH_SUFFIX}
+# else
+# *.${EPATCH_SUFFIX}
+# @CODE
+# The leading ?? are typically numbers used to force consistent patch ordering.
+# The arch field is used to apply patches only for the host architecture with
+# the special value of "all" means apply for everyone. Note that using values
+# other than "all" is highly discouraged -- you should apply patches all the
+# time and let architecture details be detected at configure/compile time.
+#
+# If EPATCH_SUFFIX is empty, then no period before it is implied when searching
+# for patches to apply.
+#
+# Refer to the other EPATCH_xxx variables for more customization of behavior.
+epatch() {
+ _epatch_draw_line() {
+ # create a line of same length as input string
+ [[ -z $1 ]] && set "$(printf "%65s" '')"
+ echo "${1//?/=}"
+ }
+
+ unset P4CONFIG P4PORT P4USER # keep perforce at bay #56402
+
+ # First process options. We localize the EPATCH_OPTS setting
+ # from above so that we can pass it on in the loop below with
+ # any additional values the user has specified.
+ local EPATCH_OPTS=( ${EPATCH_OPTS[*]} )
+ while [[ $# -gt 0 ]] ; do
+ case $1 in
+ -*) EPATCH_OPTS+=( "$1" ) ;;
+ *) break ;;
+ esac
+ shift
+ done
+
+ # Let the rest of the code process one user arg at a time --
+ # each arg may expand into multiple patches, and each arg may
+ # need to start off with the default global EPATCH_xxx values
+ if [[ $# -gt 1 ]] ; then
+ local m
+ for m in "$@" ; do
+ epatch "${m}"
+ done
+ return 0
+ fi
+
+ local SINGLE_PATCH="no"
+ # no args means process ${EPATCH_SOURCE}
+ [[ $# -eq 0 ]] && set -- "${EPATCH_SOURCE}"
+
+ if [[ -f $1 ]] ; then
+ SINGLE_PATCH="yes"
+ set -- "$1"
+ # Use the suffix from the single patch (localize it); the code
+ # below will find the suffix for us
+ local EPATCH_SUFFIX=$1
+
+ elif [[ -d $1 ]] ; then
+ # We have to force sorting to C so that the wildcard expansion is consistent #471666.
+ evar_push_set LC_COLLATE C
+ # Some people like to make dirs of patches w/out suffixes (vim).
+ set -- "$1"/*${EPATCH_SUFFIX:+."${EPATCH_SUFFIX}"}
+ evar_pop
+
+ elif [[ -f ${EPATCH_SOURCE}/$1 ]] ; then
+ # Re-use EPATCH_SOURCE as a search dir
+ epatch "${EPATCH_SOURCE}/$1"
+ return $?
+
+ else
+ # sanity check ... if it isn't a dir or file, wtf man ?
+ [[ $# -ne 0 ]] && EPATCH_SOURCE=$1
+ echo
+ eerror "Cannot find \$EPATCH_SOURCE! Value for \$EPATCH_SOURCE is:"
+ eerror
+ eerror " ${EPATCH_SOURCE}"
+ eerror " ( ${EPATCH_SOURCE##*/} )"
+ echo
+ die "Cannot find \$EPATCH_SOURCE!"
+ fi
+
+ # Now that we know we're actually going to apply something, merge
+ # all of the patch options back in to a single variable for below.
+ EPATCH_OPTS="${EPATCH_COMMON_OPTS} ${EPATCH_OPTS[*]}"
+
+ local PIPE_CMD
+ case ${EPATCH_SUFFIX##*\.} in
+ xz) PIPE_CMD="xz -dc" ;;
+ lzma) PIPE_CMD="lzma -dc" ;;
+ bz2) PIPE_CMD="bzip2 -dc" ;;
+ gz|Z|z) PIPE_CMD="gzip -dc" ;;
+ ZIP|zip) PIPE_CMD="unzip -p" ;;
+ *) ;;
+ esac
+
+ [[ ${SINGLE_PATCH} == "no" ]] && einfo "${EPATCH_MULTI_MSG}"
+
+ local x
+ for x in "$@" ; do
+ # If the patch dir given contains subdirs, or our EPATCH_SUFFIX
+ # didn't match anything, ignore continue on
+ [[ ! -f ${x} ]] && continue
+
+ local patchname=${x##*/}
+
+ # Apply single patches, or forced sets of patches, or
+ # patches with ARCH dependant names.
+ # ???_arch_foo.patch
+ # Else, skip this input altogether
+ local a=${patchname#*_} # strip the ???_
+ a=${a%%_*} # strip the _foo.patch
+ if ! [[ ${SINGLE_PATCH} == "yes" || \
+ ${EPATCH_FORCE} == "yes" || \
+ ${a} == all || \
+ ${a} == ${ARCH} ]]
+ then
+ continue
+ fi
+
+ # Let people filter things dynamically
+ if [[ -n ${EPATCH_EXCLUDE}${EPATCH_USER_EXCLUDE} ]] ; then
+ # let people use globs in the exclude
+ eshopts_push -o noglob
+
+ local ex
+ for ex in ${EPATCH_EXCLUDE} ; do
+ if [[ ${patchname} == ${ex} ]] ; then
+ einfo " Skipping ${patchname} due to EPATCH_EXCLUDE ..."
+ eshopts_pop
+ continue 2
+ fi
+ done
+
+ for ex in ${EPATCH_USER_EXCLUDE} ; do
+ if [[ ${patchname} == ${ex} ]] ; then
+ einfo " Skipping ${patchname} due to EPATCH_USER_EXCLUDE ..."
+ eshopts_pop
+ continue 2
+ fi
+ done
+
+ eshopts_pop
+ fi
+
+ if [[ ${SINGLE_PATCH} == "yes" ]] ; then
+ if [[ -n ${EPATCH_SINGLE_MSG} ]] ; then
+ einfo "${EPATCH_SINGLE_MSG}"
+ else
+ einfo "Applying ${patchname} ..."
+ fi
+ else
+ einfo " ${patchname} ..."
+ fi
+
+ # Handle aliased patch command #404447 #461568
+ local patch="patch"
+ eval $(alias patch 2>/dev/null | sed 's:^alias ::')
+
+ # most of the time, there will only be one run per unique name,
+ # but if there are more, make sure we get unique log filenames
+ local STDERR_TARGET="${T}/${patchname}.out"
+ if [[ -e ${STDERR_TARGET} ]] ; then
+ STDERR_TARGET="${T}/${patchname}-$$.out"
+ fi
+
+ printf "***** %s *****\nPWD: %s\nPATCH TOOL: %s -> %s\nVERSION INFO:\n%s\n\n" \
+ "${patchname}" \
+ "${PWD}" \
+ "${patch}" \
+ "$(type -P "${patch}")" \
+ "$(${patch} --version)" \
+ > "${STDERR_TARGET}"
+
+ # Decompress the patch if need be
+ local count=0
+ local PATCH_TARGET
+ if [[ -n ${PIPE_CMD} ]] ; then
+ PATCH_TARGET="${T}/$$.patch"
+ echo "PIPE_COMMAND: ${PIPE_CMD} ${x} > ${PATCH_TARGET}" >> "${STDERR_TARGET}"
+
+ if ! (${PIPE_CMD} "${x}" > "${PATCH_TARGET}") >> "${STDERR_TARGET}" 2>&1 ; then
+ echo
+ eerror "Could not extract patch!"
+ #die "Could not extract patch!"
+ count=5
+ break
+ fi
+ else
+ PATCH_TARGET=${x}
+ fi
+
+ # Check for absolute paths in patches. If sandbox is disabled,
+ # people could (accidently) patch files in the root filesystem.
+ # Or trigger other unpleasantries #237667. So disallow -p0 on
+ # such patches.
+ local abs_paths=$(egrep -n '^[-+]{3} /' "${PATCH_TARGET}" | awk '$2 != "/dev/null" { print }')
+ if [[ -n ${abs_paths} ]] ; then
+ count=1
+ printf "NOTE: skipping -p0 due to absolute paths in patch:\n%s\n" "${abs_paths}" >> "${STDERR_TARGET}"
+ fi
+ # Similar reason, but with relative paths.
+ local rel_paths=$(egrep -n '^[-+]{3} [^ ]*[.][.]/' "${PATCH_TARGET}")
+ if [[ -n ${rel_paths} ]] ; then
+ echo
+ eerror "Rejected Patch: ${patchname} !"
+ eerror " ( ${PATCH_TARGET} )"
+ eerror
+ eerror "Your patch uses relative paths '../':"
+ eerror "${rel_paths}"
+ echo
+ die "you need to fix the relative paths in patch"
+ fi
+
+ # Dynamically detect the correct -p# ... i'm lazy, so shoot me :/
+ local patch_cmd
+ while [[ ${count} -lt 5 ]] ; do
+ patch_cmd="${patch} -p${count} ${EPATCH_OPTS}"
+
+ # Generate some useful debug info ...
+ (
+ _epatch_draw_line "***** ${patchname} *****"
+ echo
+ echo "PATCH COMMAND: ${patch_cmd} < '${PATCH_TARGET}'"
+ echo
+ _epatch_draw_line "***** ${patchname} *****"
+ ${patch_cmd} --dry-run -f < "${PATCH_TARGET}" 2>&1
+ ret=$?
+ echo
+ echo "patch program exited with status ${ret}"
+ exit ${ret}
+ ) >> "${STDERR_TARGET}"
+
+ if [ $? -eq 0 ] ; then
+ (
+ _epatch_draw_line "***** ${patchname} *****"
+ echo
+ echo "ACTUALLY APPLYING ${patchname} ..."
+ echo
+ _epatch_draw_line "***** ${patchname} *****"
+ ${patch_cmd} < "${PATCH_TARGET}" 2>&1
+ ret=$?
+ echo
+ echo "patch program exited with status ${ret}"
+ exit ${ret}
+ ) >> "${STDERR_TARGET}"
+
+ if [ $? -ne 0 ] ; then
+ echo
+ eerror "A dry-run of patch command succeeded, but actually"
+ eerror "applying the patch failed!"
+ #die "Real world sux compared to the dreamworld!"
+ count=5
+ fi
+ break
+ fi
+
+ : $(( count++ ))
+ done
+
+ # if we had to decompress the patch, delete the temp one
+ if [[ -n ${PIPE_CMD} ]] ; then
+ rm -f "${PATCH_TARGET}"
+ fi
+
+ if [[ ${count} -ge 5 ]] ; then
+ echo
+ eerror "Failed Patch: ${patchname} !"
+ eerror " ( ${PATCH_TARGET} )"
+ eerror
+ eerror "Include in your bugreport the contents of:"
+ eerror
+ eerror " ${STDERR_TARGET}"
+ echo
+ die "Failed Patch: ${patchname}!"
+ fi
+
+ # if everything worked, delete the full debug patch log
+ rm -f "${STDERR_TARGET}"
+
+ # then log away the exact stuff for people to review later
+ cat <<-EOF >> "${T}/epatch.log"
+ PATCH: ${x}
+ CMD: ${patch_cmd}
+ PWD: ${PWD}
+
+ EOF
+ eend 0
+ done
+
+ [[ ${SINGLE_PATCH} == "no" ]] && einfo "Done with patching"
+ : # everything worked
+}
+
+# @FUNCTION: epatch_user
+# @USAGE:
+# @DESCRIPTION:
+# Applies user-provided patches to the source tree. The patches are
+# taken from /etc/portage/patches/<CATEGORY>/<P-PR|P|PN>[:SLOT]/, where the first
+# of these three directories to exist will be the one to use, ignoring
+# any more general directories which might exist as well. They must end
+# in ".patch" to be applied.
+#
+# User patches are intended for quick testing of patches without ebuild
+# modifications, as well as for permanent customizations a user might
+# desire. Obviously, there can be no official support for arbitrarily
+# patched ebuilds. So whenever a build log in a bug report mentions that
+# user patches were applied, the user should be asked to reproduce the
+# problem without these.
+#
+# Not all ebuilds do call this function, so placing patches in the
+# stated directory might or might not work, depending on the package and
+# the eclasses it inherits and uses. It is safe to call the function
+# repeatedly, so it is always possible to add a call at the ebuild
+# level. The first call is the time when the patches will be
+# applied.
+#
+# Ideally, this function should be called after gentoo-specific patches
+# have been applied, so that their code can be modified as well, but
+# before calls to e.g. eautoreconf, as the user patches might affect
+# autotool input files as well.
+epatch_user() {
+ [[ $# -ne 0 ]] && die "epatch_user takes no options"
+
+ # Allow multiple calls to this function; ignore all but the first
+ local applied="${T}/epatch_user.log"
+ [[ -e ${applied} ]] && return 2
+
+ # don't clobber any EPATCH vars that the parent might want
+ local EPATCH_SOURCE check base=${PORTAGE_CONFIGROOT%/}/etc/portage/patches
+ for check in ${CATEGORY}/{${P}-${PR},${P},${PN}}{,:${SLOT}}; do
+ EPATCH_SOURCE=${base}/${CTARGET}/${check}
+ [[ -r ${EPATCH_SOURCE} ]] || EPATCH_SOURCE=${base}/${CHOST}/${check}
+ [[ -r ${EPATCH_SOURCE} ]] || EPATCH_SOURCE=${base}/${check}
+ if [[ -d ${EPATCH_SOURCE} ]] ; then
+ EPATCH_SOURCE=${EPATCH_SOURCE} \
+ EPATCH_SUFFIX="patch" \
+ EPATCH_FORCE="yes" \
+ EPATCH_MULTI_MSG="Applying user patches from ${EPATCH_SOURCE} ..." \
+ epatch
+ echo "${EPATCH_SOURCE}" > "${applied}"
+ has epatch_user_death_notice ${EBUILD_DEATH_HOOKS} || EBUILD_DEATH_HOOKS+=" epatch_user_death_notice"
+ return 0
+ fi
+ done
+ echo "none" > "${applied}"
+ return 1
+}
+# @FUNCTION: epatch_user_death_notice
+# @INTERNAL
+# @DESCRIPTION:
+# Include an explicit notice in the die message itself that user patches were
+# applied to this build.
+epatch_user_death_notice() {
+ ewarn "!!! User patches were applied to this build!"
+}
+
+# @FUNCTION: emktemp
+# @USAGE: [temp dir]
+# @DESCRIPTION:
+# Cheap replacement for when debianutils (and thus mktemp)
+# does not exist on the users system.
+emktemp() {
+ local exe="touch"
+ [[ $1 == -d ]] && exe="mkdir" && shift
+ local topdir=$1
+
+ if [[ -z ${topdir} ]] ; then
+ [[ -z ${T} ]] \
+ && topdir="/tmp" \
+ || topdir=${T}
+ fi
+
+ if ! type -P mktemp > /dev/null ; then
+ # system lacks `mktemp` so we have to fake it
+ local tmp=/
+ while [[ -e ${tmp} ]] ; do
+ tmp=${topdir}/tmp.${RANDOM}.${RANDOM}.${RANDOM}
+ done
+ ${exe} "${tmp}" || ${exe} -p "${tmp}"
+ echo "${tmp}"
+ else
+ # the args here will give slightly wierd names on BSD,
+ # but should produce a usable file on all userlands
+ if [[ ${exe} == "touch" ]] ; then
+ TMPDIR="${topdir}" mktemp -t tmp.XXXXXXXXXX
+ else
+ TMPDIR="${topdir}" mktemp -dt tmp.XXXXXXXXXX
+ fi
+ fi
+}
+
+# @FUNCTION: edos2unix
+# @USAGE: <file> [more files ...]
+# @DESCRIPTION:
+# A handy replacement for dos2unix, recode, fixdos, etc... This allows you
+# to remove all of these text utilities from DEPEND variables because this
+# is a script based solution. Just give it a list of files to convert and
+# they will all be changed from the DOS CRLF format to the UNIX LF format.
+edos2unix() {
+ [[ $# -eq 0 ]] && return 0
+ sed -i 's/\r$//' -- "$@" || die
+}
+
+# @FUNCTION: make_desktop_entry
+# @USAGE: make_desktop_entry(<command>, [name], [icon], [type], [fields])
+# @DESCRIPTION:
+# Make a .desktop file.
+#
+# @CODE
+# binary: what command does the app run with ?
+# name: the name that will show up in the menu
+# icon: the icon to use in the menu entry
+# this can be relative (to /usr/share/pixmaps) or
+# a full path to an icon
+# type: what kind of application is this?
+# for categories:
+# http://standards.freedesktop.org/menu-spec/latest/apa.html
+# if unset, function tries to guess from package's category
+# fields: extra fields to append to the desktop file; a printf string
+# @CODE
+make_desktop_entry() {
+ [[ -z $1 ]] && die "make_desktop_entry: You must specify the executable"
+
+ local exec=${1}
+ local name=${2:-${PN}}
+ local icon=${3:-${PN}}
+ local type=${4}
+ local fields=${5}
+
+ if [[ -z ${type} ]] ; then
+ local catmaj=${CATEGORY%%-*}
+ local catmin=${CATEGORY##*-}
+ case ${catmaj} in
+ app)
+ case ${catmin} in
+ accessibility) type="Utility;Accessibility";;
+ admin) type=System;;
+ antivirus) type=System;;
+ arch) type="Utility;Archiving";;
+ backup) type="Utility;Archiving";;
+ cdr) type="AudioVideo;DiscBurning";;
+ dicts) type="Office;Dictionary";;
+ doc) type=Documentation;;
+ editors) type="Utility;TextEditor";;
+ emacs) type="Development;TextEditor";;
+ emulation) type="System;Emulator";;
+ laptop) type="Settings;HardwareSettings";;
+ office) type=Office;;
+ pda) type="Office;PDA";;
+ vim) type="Development;TextEditor";;
+ xemacs) type="Development;TextEditor";;
+ esac
+ ;;
+
+ dev)
+ type="Development"
+ ;;
+
+ games)
+ case ${catmin} in
+ action|fps) type=ActionGame;;
+ arcade) type=ArcadeGame;;
+ board) type=BoardGame;;
+ emulation) type=Emulator;;
+ kids) type=KidsGame;;
+ puzzle) type=LogicGame;;
+ roguelike) type=RolePlaying;;
+ rpg) type=RolePlaying;;
+ simulation) type=Simulation;;
+ sports) type=SportsGame;;
+ strategy) type=StrategyGame;;
+ esac
+ type="Game;${type}"
+ ;;
+
+ gnome)
+ type="Gnome;GTK"
+ ;;
+
+ kde)
+ type="KDE;Qt"
+ ;;
+
+ mail)
+ type="Network;Email"
+ ;;
+
+ media)
+ case ${catmin} in
+ gfx)
+ type=Graphics
+ ;;
+ *)
+ case ${catmin} in
+ radio) type=Tuner;;
+ sound) type=Audio;;
+ tv) type=TV;;
+ video) type=Video;;
+ esac
+ type="AudioVideo;${type}"
+ ;;
+ esac
+ ;;
+
+ net)
+ case ${catmin} in
+ dialup) type=Dialup;;
+ ftp) type=FileTransfer;;
+ im) type=InstantMessaging;;
+ irc) type=IRCClient;;
+ mail) type=Email;;
+ news) type=News;;
+ nntp) type=News;;
+ p2p) type=FileTransfer;;
+ voip) type=Telephony;;
+ esac
+ type="Network;${type}"
+ ;;
+
+ sci)
+ case ${catmin} in
+ astro*) type=Astronomy;;
+ bio*) type=Biology;;
+ calc*) type=Calculator;;
+ chem*) type=Chemistry;;
+ elec*) type=Electronics;;
+ geo*) type=Geology;;
+ math*) type=Math;;
+ physics) type=Physics;;
+ visual*) type=DataVisualization;;
+ esac
+ type="Education;Science;${type}"
+ ;;
+
+ sys)
+ type="System"
+ ;;
+
+ www)
+ case ${catmin} in
+ client) type=WebBrowser;;
+ esac
+ type="Network;${type}"
+ ;;
+
+ *)
+ type=
+ ;;
+ esac
+ fi
+ local slot=${SLOT%/*}
+ if [[ ${slot} == "0" ]] ; then
+ local desktop_name="${PN}"
+ else
+ local desktop_name="${PN}-${slot}"
+ fi
+ local desktop="${T}/$(echo ${exec} | sed 's:[[:space:]/:]:_:g')-${desktop_name}.desktop"
+ #local desktop=${T}/${exec%% *:-${desktop_name}}.desktop
+
+ # Don't append another ";" when a valid category value is provided.
+ type=${type%;}${type:+;}
+
+ eshopts_push -s extglob
+ if [[ -n ${icon} && ${icon} != /* ]] && [[ ${icon} == *.xpm || ${icon} == *.png || ${icon} == *.svg ]]; then
+ ewarn "As described in the Icon Theme Specification, icon file extensions are not"
+ ewarn "allowed in .desktop files if the value is not an absolute path."
+ icon=${icon%.@(xpm|png|svg)}
+ fi
+ eshopts_pop
+
+ cat <<-EOF > "${desktop}"
+ [Desktop Entry]
+ Name=${name}
+ Type=Application
+ Comment=${DESCRIPTION}
+ Exec=${exec}
+ TryExec=${exec%% *}
+ Icon=${icon}
+ Categories=${type}
+ EOF
+
+ if [[ ${fields:-=} != *=* ]] ; then
+ # 5th arg used to be value to Path=
+ ewarn "make_desktop_entry: update your 5th arg to read Path=${fields}"
+ fields="Path=${fields}"
+ fi
+ [[ -n ${fields} ]] && printf '%b\n' "${fields}" >> "${desktop}"
+
+ (
+ # wrap the env here so that the 'insinto' call
+ # doesn't corrupt the env of the caller
+ insinto /usr/share/applications
+ doins "${desktop}"
+ ) || die "installing desktop file failed"
+}
+
+# @FUNCTION: _eutils_eprefix_init
+# @INTERNAL
+# @DESCRIPTION:
+# Initialized prefix variables for EAPI<3.
+_eutils_eprefix_init() {
+ has "${EAPI:-0}" 0 1 2 && : ${ED:=${D}} ${EPREFIX:=} ${EROOT:=${ROOT}}
+}
+
+# @FUNCTION: validate_desktop_entries
+# @USAGE: [directories]
+# @MAINTAINER:
+# Carsten Lohrke <carlo@gentoo.org>
+# @DESCRIPTION:
+# Validate desktop entries using desktop-file-utils
+validate_desktop_entries() {
+ _eutils_eprefix_init
+ if [[ -x "${EPREFIX}"/usr/bin/desktop-file-validate ]] ; then
+ einfo "Checking desktop entry validity"
+ local directories=""
+ for d in /usr/share/applications $@ ; do
+ [[ -d ${ED}${d} ]] && directories="${directories} ${ED}${d}"
+ done
+ if [[ -n ${directories} ]] ; then
+ for FILE in $(find ${directories} -name "*\.desktop" \
+ -not -path '*.hidden*' | sort -u 2>/dev/null)
+ do
+ local temp=$(desktop-file-validate ${FILE} | grep -v "warning:" | \
+ sed -e "s|error: ||" -e "s|${FILE}:|--|g" )
+ [[ -n $temp ]] && elog ${temp/--/${FILE/${ED}/}:}
+ done
+ fi
+ echo ""
+ else
+ einfo "Passing desktop entry validity check. Install dev-util/desktop-file-utils, if you want to help to improve Gentoo."
+ fi
+}
+
+# @FUNCTION: make_session_desktop
+# @USAGE: <title> <command> [command args...]
+# @DESCRIPTION:
+# Make a GDM/KDM Session file. The title is the file to execute to start the
+# Window Manager. The command is the name of the Window Manager.
+#
+# You can set the name of the file via the ${wm} variable.
+make_session_desktop() {
+ [[ -z $1 ]] && eerror "$0: You must specify the title" && return 1
+ [[ -z $2 ]] && eerror "$0: You must specify the command" && return 1
+
+ local title=$1
+ local command=$2
+ local desktop=${T}/${wm:-${PN}}.desktop
+ shift 2
+
+ cat <<-EOF > "${desktop}"
+ [Desktop Entry]
+ Name=${title}
+ Comment=This session logs you into ${title}
+ Exec=${command} $*
+ TryExec=${command}
+ Type=XSession
+ EOF
+
+ (
+ # wrap the env here so that the 'insinto' call
+ # doesn't corrupt the env of the caller
+ insinto /usr/share/xsessions
+ doins "${desktop}"
+ )
+}
+
+# @FUNCTION: domenu
+# @USAGE: <menus>
+# @DESCRIPTION:
+# Install the list of .desktop menu files into the appropriate directory
+# (/usr/share/applications).
+domenu() {
+ (
+ # wrap the env here so that the 'insinto' call
+ # doesn't corrupt the env of the caller
+ local i j ret=0
+ insinto /usr/share/applications
+ for i in "$@" ; do
+ if [[ -f ${i} ]] ; then
+ doins "${i}"
+ ((ret+=$?))
+ elif [[ -d ${i} ]] ; then
+ for j in "${i}"/*.desktop ; do
+ doins "${j}"
+ ((ret+=$?))
+ done
+ else
+ ((++ret))
+ fi
+ done
+ exit ${ret}
+ )
+}
+
+# @FUNCTION: newmenu
+# @USAGE: <menu> <newname>
+# @DESCRIPTION:
+# Like all other new* functions, install the specified menu as newname.
+newmenu() {
+ (
+ # wrap the env here so that the 'insinto' call
+ # doesn't corrupt the env of the caller
+ insinto /usr/share/applications
+ newins "$@"
+ )
+}
+
+# @FUNCTION: _iconins
+# @INTERNAL
+# @DESCRIPTION:
+# function for use in doicon and newicon
+_iconins() {
+ (
+ # wrap the env here so that the 'insinto' call
+ # doesn't corrupt the env of the caller
+ local funcname=$1; shift
+ local size dir
+ local context=apps
+ local theme=hicolor
+
+ while [[ $# -gt 0 ]] ; do
+ case $1 in
+ -s|--size)
+ if [[ ${2%%x*}x${2%%x*} == "$2" ]] ; then
+ size=${2%%x*}
+ else
+ size=${2}
+ fi
+ case ${size} in
+ 16|22|24|32|36|48|64|72|96|128|192|256|512)
+ size=${size}x${size};;
+ scalable)
+ ;;
+ *)
+ eerror "${size} is an unsupported icon size!"
+ exit 1;;
+ esac
+ shift 2;;
+ -t|--theme)
+ theme=${2}
+ shift 2;;
+ -c|--context)
+ context=${2}
+ shift 2;;
+ *)
+ if [[ -z ${size} ]] ; then
+ insinto /usr/share/pixmaps
+ else
+ insinto /usr/share/icons/${theme}/${size}/${context}
+ fi
+
+ if [[ ${funcname} == doicon ]] ; then
+ if [[ -f $1 ]] ; then
+ doins "${1}"
+ elif [[ -d $1 ]] ; then
+ shopt -s nullglob
+ doins "${1}"/*.{png,svg}
+ shopt -u nullglob
+ else
+ eerror "${1} is not a valid file/directory!"
+ exit 1
+ fi
+ else
+ break
+ fi
+ shift 1;;
+ esac
+ done
+ if [[ ${funcname} == newicon ]] ; then
+ newins "$@"
+ fi
+ ) || die
+}
+
+# @FUNCTION: doicon
+# @USAGE: [options] <icons>
+# @DESCRIPTION:
+# Install icon into the icon directory /usr/share/icons or into
+# /usr/share/pixmaps if "--size" is not set.
+# This is useful in conjunction with creating desktop/menu files.
+#
+# @CODE
+# options:
+# -s, --size
+# !!! must specify to install into /usr/share/icons/... !!!
+# size of the icon, like 48 or 48x48
+# supported icon sizes are:
+# 16 22 24 32 36 48 64 72 96 128 192 256 scalable
+# -c, --context
+# defaults to "apps"
+# -t, --theme
+# defaults to "hicolor"
+#
+# icons: list of icons
+#
+# example 1: doicon foobar.png fuqbar.svg suckbar.png
+# results in: insinto /usr/share/pixmaps
+# doins foobar.png fuqbar.svg suckbar.png
+#
+# example 2: doicon -s 48 foobar.png fuqbar.png blobbar.png
+# results in: insinto /usr/share/icons/hicolor/48x48/apps
+# doins foobar.png fuqbar.png blobbar.png
+# @CODE
+doicon() {
+ _iconins ${FUNCNAME} "$@"
+}
+
+# @FUNCTION: newicon
+# @USAGE: [options] <icon> <newname>
+# @DESCRIPTION:
+# Like doicon, install the specified icon as newname.
+#
+# @CODE
+# example 1: newicon foobar.png NEWNAME.png
+# results in: insinto /usr/share/pixmaps
+# newins foobar.png NEWNAME.png
+#
+# example 2: newicon -s 48 foobar.png NEWNAME.png
+# results in: insinto /usr/share/icons/hicolor/48x48/apps
+# newins foobar.png NEWNAME.png
+# @CODE
+newicon() {
+ _iconins ${FUNCNAME} "$@"
+}
+
+# @FUNCTION: strip-linguas
+# @USAGE: [<allow LINGUAS>|<-i|-u> <directories of .po files>]
+# @DESCRIPTION:
+# Make sure that LINGUAS only contains languages that
+# a package can support. The first form allows you to
+# specify a list of LINGUAS. The -i builds a list of po
+# files found in all the directories and uses the
+# intersection of the lists. The -u builds a list of po
+# files found in all the directories and uses the union
+# of the lists.
+strip-linguas() {
+ local ls newls nols
+ if [[ $1 == "-i" ]] || [[ $1 == "-u" ]] ; then
+ local op=$1; shift
+ ls=$(find "$1" -name '*.po' -exec basename {} .po ';'); shift
+ local d f
+ for d in "$@" ; do
+ if [[ ${op} == "-u" ]] ; then
+ newls=${ls}
+ else
+ newls=""
+ fi
+ for f in $(find "$d" -name '*.po' -exec basename {} .po ';') ; do
+ if [[ ${op} == "-i" ]] ; then
+ has ${f} ${ls} && newls="${newls} ${f}"
+ else
+ has ${f} ${ls} || newls="${newls} ${f}"
+ fi
+ done
+ ls=${newls}
+ done
+ else
+ ls="$@"
+ fi
+
+ nols=""
+ newls=""
+ for f in ${LINGUAS} ; do
+ if has ${f} ${ls} ; then
+ newls="${newls} ${f}"
+ else
+ nols="${nols} ${f}"
+ fi
+ done
+ [[ -n ${nols} ]] \
+ && einfo "Sorry, but ${PN} does not support the LINGUAS:" ${nols}
+ export LINGUAS=${newls:1}
+}
+
+# @FUNCTION: preserve_old_lib
+# @USAGE: <libs to preserve> [more libs]
+# @DESCRIPTION:
+# These functions are useful when a lib in your package changes ABI SONAME.
+# An example might be from libogg.so.0 to libogg.so.1. Removing libogg.so.0
+# would break packages that link against it. Most people get around this
+# by using the portage SLOT mechanism, but that is not always a relevant
+# solution, so instead you can call this from pkg_preinst. See also the
+# preserve_old_lib_notify function.
+preserve_old_lib() {
+ _eutils_eprefix_init
+ if [[ ${EBUILD_PHASE} != "preinst" ]] ; then
+ eerror "preserve_old_lib() must be called from pkg_preinst() only"
+ die "Invalid preserve_old_lib() usage"
+ fi
+ [[ -z $1 ]] && die "Usage: preserve_old_lib <library to preserve> [more libraries to preserve]"
+
+ # let portage worry about it
+ has preserve-libs ${FEATURES} && return 0
+
+ local lib dir
+ for lib in "$@" ; do
+ [[ -e ${EROOT}/${lib} ]] || continue
+ dir=${lib%/*}
+ dodir ${dir} || die "dodir ${dir} failed"
+ cp "${EROOT}"/${lib} "${ED}"/${lib} || die "cp ${lib} failed"
+ touch "${ED}"/${lib}
+ done
+}
+
+# @FUNCTION: preserve_old_lib_notify
+# @USAGE: <libs to notify> [more libs]
+# @DESCRIPTION:
+# Spit helpful messages about the libraries preserved by preserve_old_lib.
+preserve_old_lib_notify() {
+ if [[ ${EBUILD_PHASE} != "postinst" ]] ; then
+ eerror "preserve_old_lib_notify() must be called from pkg_postinst() only"
+ die "Invalid preserve_old_lib_notify() usage"
+ fi
+
+ # let portage worry about it
+ has preserve-libs ${FEATURES} && return 0
+
+ _eutils_eprefix_init
+
+ local lib notice=0
+ for lib in "$@" ; do
+ [[ -e ${EROOT}/${lib} ]] || continue
+ if [[ ${notice} -eq 0 ]] ; then
+ notice=1
+ ewarn "Old versions of installed libraries were detected on your system."
+ ewarn "In order to avoid breaking packages that depend on these old libs,"
+ ewarn "the libraries are not being removed. You need to run revdep-rebuild"
+ ewarn "in order to remove these old dependencies. If you do not have this"
+ ewarn "helper program, simply emerge the 'gentoolkit' package."
+ ewarn
+ fi
+ ewarn " # revdep-rebuild --library '${lib}' && rm '${lib}'"
+ done
+}
+
+# @FUNCTION: built_with_use
+# @USAGE: [--hidden] [--missing <action>] [-a|-o] <DEPEND ATOM> <List of USE flags>
+# @DESCRIPTION:
+#
+# Deprecated: Use EAPI 2 use deps in DEPEND|RDEPEND and with has_version calls.
+#
+# A temporary hack until portage properly supports DEPENDing on USE
+# flags being enabled in packages. This will check to see if the specified
+# DEPEND atom was built with the specified list of USE flags. The
+# --missing option controls the behavior if called on a package that does
+# not actually support the defined USE flags (aka listed in IUSE).
+# The default is to abort (call die). The -a and -o flags control
+# the requirements of the USE flags. They correspond to "and" and "or"
+# logic. So the -a flag means all listed USE flags must be enabled
+# while the -o flag means at least one of the listed IUSE flags must be
+# enabled. The --hidden option is really for internal use only as it
+# means the USE flag we're checking is hidden expanded, so it won't be found
+# in IUSE like normal USE flags.
+#
+# Remember that this function isn't terribly intelligent so order of optional
+# flags matter.
+built_with_use() {
+ _eutils_eprefix_init
+ local hidden="no"
+ if [[ $1 == "--hidden" ]] ; then
+ hidden="yes"
+ shift
+ fi
+
+ local missing_action="die"
+ if [[ $1 == "--missing" ]] ; then
+ missing_action=$2
+ shift ; shift
+ case ${missing_action} in
+ true|false|die) ;;
+ *) die "unknown action '${missing_action}'";;
+ esac
+ fi
+
+ local opt=$1
+ [[ ${opt:0:1} = "-" ]] && shift || opt="-a"
+
+ local PKG=$(best_version $1)
+ [[ -z ${PKG} ]] && die "Unable to resolve $1 to an installed package"
+ shift
+
+ local USEFILE=${EROOT}/var/db/pkg/${PKG}/USE
+ local IUSEFILE=${EROOT}/var/db/pkg/${PKG}/IUSE
+
+ # if the IUSE file doesn't exist, the read will error out, we need to handle
+ # this gracefully
+ if [[ ! -e ${USEFILE} ]] || [[ ! -e ${IUSEFILE} && ${hidden} == "no" ]] ; then
+ case ${missing_action} in
+ true) return 0;;
+ false) return 1;;
+ die) die "Unable to determine what USE flags $PKG was built with";;
+ esac
+ fi
+
+ if [[ ${hidden} == "no" ]] ; then
+ local IUSE_BUILT=( $(<"${IUSEFILE}") )
+ # Don't check USE_EXPAND #147237
+ local expand
+ for expand in $(echo ${USE_EXPAND} | tr '[:upper:]' '[:lower:]') ; do
+ if [[ $1 == ${expand}_* ]] ; then
+ expand=""
+ break
+ fi
+ done
+ if [[ -n ${expand} ]] ; then
+ if ! has $1 ${IUSE_BUILT[@]#[-+]} ; then
+ case ${missing_action} in
+ true) return 0;;
+ false) return 1;;
+ die) die "$PKG does not actually support the $1 USE flag!";;
+ esac
+ fi
+ fi
+ fi
+
+ local USE_BUILT=$(<${USEFILE})
+ while [[ $# -gt 0 ]] ; do
+ if [[ ${opt} = "-o" ]] ; then
+ has $1 ${USE_BUILT} && return 0
+ else
+ has $1 ${USE_BUILT} || return 1
+ fi
+ shift
+ done
+ [[ ${opt} = "-a" ]]
+}
+
+# @FUNCTION: epunt_cxx
+# @USAGE: [dir to scan]
+# @DESCRIPTION:
+# Many configure scripts wrongly bail when a C++ compiler could not be
+# detected. If dir is not specified, then it defaults to ${S}.
+#
+# http://bugs.gentoo.org/73450
+epunt_cxx() {
+ local dir=$1
+ [[ -z ${dir} ]] && dir=${S}
+ ebegin "Removing useless C++ checks"
+ local f p any_found
+ while IFS= read -r -d '' f; do
+ for p in "${PORTDIR}"/eclass/ELT-patches/nocxx/*.patch ; do
+ if patch --no-backup-if-mismatch -p1 "${f}" "${p}" >/dev/null ; then
+ any_found=1
+ break
+ fi
+ done
+ done < <(find "${dir}" -name configure -print0)
+
+ if [[ -z ${any_found} ]]; then
+ eqawarn "epunt_cxx called unnecessarily (no C++ checks to punt)."
+ fi
+ eend 0
+}
+
+# @FUNCTION: make_wrapper
+# @USAGE: <wrapper> <target> [chdir] [libpaths] [installpath]
+# @DESCRIPTION:
+# Create a shell wrapper script named wrapper in installpath
+# (defaults to the bindir) to execute target (default of wrapper) by
+# first optionally setting LD_LIBRARY_PATH to the colon-delimited
+# libpaths followed by optionally changing directory to chdir.
+make_wrapper() {
+ _eutils_eprefix_init
+ local wrapper=$1 bin=$2 chdir=$3 libdir=$4 path=$5
+ local tmpwrapper=$(emktemp)
+
+ (
+ echo '#!/bin/sh'
+ [[ -n ${chdir} ]] && printf 'cd "%s"\n' "${EPREFIX}${chdir}"
+ if [[ -n ${libdir} ]] ; then
+ local var
+ if [[ ${CHOST} == *-darwin* ]] ; then
+ var=DYLD_LIBRARY_PATH
+ else
+ var=LD_LIBRARY_PATH
+ fi
+ cat <<-EOF
+ if [ "\${${var}+set}" = "set" ] ; then
+ export ${var}="\${${var}}:${EPREFIX}${libdir}"
+ else
+ export ${var}="${EPREFIX}${libdir}"
+ fi
+ EOF
+ fi
+ # We don't want to quote ${bin} so that people can pass complex
+ # things as ${bin} ... "./someprog --args"
+ printf 'exec %s "$@"\n' "${bin/#\//${EPREFIX}/}"
+ ) > "${tmpwrapper}"
+ chmod go+rx "${tmpwrapper}"
+
+ if [[ -n ${path} ]] ; then
+ (
+ exeinto "${path}"
+ newexe "${tmpwrapper}" "${wrapper}"
+ ) || die
+ else
+ newbin "${tmpwrapper}" "${wrapper}" || die
+ fi
+}
+
+# @FUNCTION: path_exists
+# @USAGE: [-a|-o] <paths>
+# @DESCRIPTION:
+# Check if the specified paths exist. Works for all types of paths
+# (files/dirs/etc...). The -a and -o flags control the requirements
+# of the paths. They correspond to "and" and "or" logic. So the -a
+# flag means all the paths must exist while the -o flag means at least
+# one of the paths must exist. The default behavior is "and". If no
+# paths are specified, then the return value is "false".
+path_exists() {
+ local opt=$1
+ [[ ${opt} == -[ao] ]] && shift || opt="-a"
+
+ # no paths -> return false
+ # same behavior as: [[ -e "" ]]
+ [[ $# -eq 0 ]] && return 1
+
+ local p r=0
+ for p in "$@" ; do
+ [[ -e ${p} ]]
+ : $(( r += $? ))
+ done
+
+ case ${opt} in
+ -a) return $(( r != 0 )) ;;
+ -o) return $(( r == $# )) ;;
+ esac
+}
+
+# @FUNCTION: in_iuse
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Determines whether the given flag is in IUSE. Strips IUSE default prefixes
+# as necessary.
+#
+# Note that this function should not be used in the global scope.
+in_iuse() {
+ debug-print-function ${FUNCNAME} "${@}"
+ [[ ${#} -eq 1 ]] || die "Invalid args to ${FUNCNAME}()"
+
+ local flag=${1}
+ local liuse=( ${IUSE} )
+
+ has "${flag}" "${liuse[@]#[+-]}"
+}
+
+# @FUNCTION: use_if_iuse
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Return true if the given flag is in USE and IUSE.
+#
+# Note that this function should not be used in the global scope.
+use_if_iuse() {
+ in_iuse $1 || return 1
+ use $1
+}
+
+# @FUNCTION: usex
+# @USAGE: <USE flag> [true output] [false output] [true suffix] [false suffix]
+# @DESCRIPTION:
+# Proxy to declare usex for package managers or EAPIs that do not provide it
+# and use the package manager implementation when available (i.e. EAPI >= 5).
+# If USE flag is set, echo [true output][true suffix] (defaults to "yes"),
+# otherwise echo [false output][false suffix] (defaults to "no").
+if has "${EAPI:-0}" 0 1 2 3 4; then
+ usex() { use "$1" && echo "${2-yes}$4" || echo "${3-no}$5" ; } #382963
+fi
+
+# @FUNCTION: prune_libtool_files
+# @USAGE: [--all|--modules]
+# @DESCRIPTION:
+# Locate unnecessary libtool files (.la) and libtool static archives
+# (.a) and remove them from installation image.
+#
+# By default, .la files are removed whenever the static linkage can
+# either be performed using pkg-config or doesn't introduce additional
+# flags.
+#
+# If '--modules' argument is passed, .la files for modules (plugins) are
+# removed as well. This is usually useful when the package installs
+# plugins and the plugin loader does not use .la files.
+#
+# If '--all' argument is passed, all .la files are removed without
+# performing any heuristic on them. You shouldn't ever use that,
+# and instead report a bug in the algorithm instead.
+#
+# The .a files are only removed whenever corresponding .la files state
+# that they should not be linked to, i.e. whenever these files
+# correspond to plugins.
+#
+# Note: if your package installs both static libraries and .pc files
+# which use variable substitution for -l flags, you need to add
+# pkg-config to your DEPEND.
+prune_libtool_files() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local removing_all removing_modules opt
+ _eutils_eprefix_init
+ for opt; do
+ case "${opt}" in
+ --all)
+ removing_all=1
+ removing_modules=1
+ ;;
+ --modules)
+ removing_modules=1
+ ;;
+ *)
+ die "Invalid argument to ${FUNCNAME}(): ${opt}"
+ esac
+ done
+
+ local f
+ local queue=()
+ while IFS= read -r -d '' f; do # for all .la files
+ local archivefile=${f/%.la/.a}
+
+ # The following check is done by libtool itself.
+ # It helps us avoid removing random files which match '*.la',
+ # see bug #468380.
+ if ! sed -n -e '/^# Generated by .*libtool/q0;4q1' "${f}"; then
+ continue
+ fi
+
+ [[ ${f} != ${archivefile} ]] || die 'regex sanity check failed'
+ local reason= pkgconfig_scanned=
+ local snotlink=$(sed -n -e 's:^shouldnotlink=::p' "${f}")
+
+ if [[ ${snotlink} == yes ]]; then
+
+ # Remove static libs we're not supposed to link against.
+ if [[ -f ${archivefile} ]]; then
+ einfo "Removing unnecessary ${archivefile#${D%/}} (static plugin)"
+ queue+=( "${archivefile}" )
+ fi
+
+ # The .la file may be used by a module loader, so avoid removing it
+ # unless explicitly requested.
+ if [[ ${removing_modules} ]]; then
+ reason='module'
+ fi
+
+ else
+
+ # Remove .la files when:
+ # - user explicitly wants us to remove all .la files,
+ # - respective static archive doesn't exist,
+ # - they are covered by a .pc file already,
+ # - they don't provide any new information (no libs & no flags).
+
+ if [[ ${removing_all} ]]; then
+ reason='requested'
+ elif [[ ! -f ${archivefile} ]]; then
+ reason='no static archive'
+ elif [[ ! $(sed -nre \
+ "s/^(dependency_libs|inherited_linker_flags)='(.*)'$/\2/p" \
+ "${f}") ]]; then
+ reason='no libs & flags'
+ else
+ if [[ ! ${pkgconfig_scanned} ]]; then
+ # Create a list of all .pc-covered libs.
+ local pc_libs=()
+ if [[ ! ${removing_all} ]]; then
+ local pc
+ local tf=${T}/prune-lt-files.pc
+ local pkgconf=$(tc-getPKG_CONFIG)
+
+ while IFS= read -r -d '' pc; do # for all .pc files
+ local arg libs
+
+ # Use pkg-config if available (and works),
+ # fallback to sed.
+ if ${pkgconf} --exists "${pc}" &>/dev/null; then
+ sed -e '/^Requires:/d' "${pc}" > "${tf}"
+ libs=$(${pkgconf} --libs "${tf}")
+ else
+ libs=$(sed -ne 's/^Libs://p' "${pc}")
+ fi
+
+ for arg in ${libs}; do
+ if [[ ${arg} == -l* ]]; then
+ if [[ ${arg} == '*$*' ]]; then
+ eqawarn "${FUNCNAME}: variable substitution likely failed in ${pc}"
+ eqawarn "(arg: ${arg})"
+ eqawarn "Most likely, you need to add virtual/pkgconfig to DEPEND."
+ fi
+
+ pc_libs+=( lib${arg#-l}.la )
+ fi
+ done
+ done < <(find "${D}" -type f -name '*.pc' -print0)
+
+ rm -f "${tf}"
+ fi
+
+ pkgconfig_scanned=1
+ fi # pkgconfig_scanned
+
+ has "${f##*/}" "${pc_libs[@]}" && reason='covered by .pc'
+ fi # removal due to .pc
+
+ fi # shouldnotlink==no
+
+ if [[ ${reason} ]]; then
+ einfo "Removing unnecessary ${f#${D%/}} (${reason})"
+ queue+=( "${f}" )
+ fi
+ done < <(find "${ED}" -xtype f -name '*.la' -print0)
+
+ if [[ ${queue[@]} ]]; then
+ rm -f "${queue[@]}"
+ fi
+}
+
+# @FUNCTION: einstalldocs
+# @DESCRIPTION:
+# Install documentation using DOCS and HTML_DOCS.
+#
+# If DOCS is declared and non-empty, all files listed in it are
+# installed. The files must exist, otherwise the function will fail.
+# In EAPI 4 and subsequent EAPIs DOCS may specify directories as well,
+# in other EAPIs using directories is unsupported.
+#
+# If DOCS is not declared, the files matching patterns given
+# in the default EAPI implementation of src_install will be installed.
+# If this is undesired, DOCS can be set to empty value to prevent any
+# documentation from being installed.
+#
+# If HTML_DOCS is declared and non-empty, all files and/or directories
+# listed in it are installed as HTML docs (using dohtml).
+#
+# Both DOCS and HTML_DOCS can either be an array or a whitespace-
+# separated list. Whenever directories are allowed, '<directory>/.' may
+# be specified in order to install all files within the directory
+# without creating a sub-directory in docdir.
+#
+# Passing additional options to dodoc and dohtml is not supported.
+# If you needed such a thing, you need to call those helpers explicitly.
+einstalldocs() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local dodoc_opts=-r
+ has ${EAPI} 0 1 2 3 && dodoc_opts=
+
+ if ! declare -p DOCS &>/dev/null ; then
+ local d
+ for d in README* ChangeLog AUTHORS NEWS TODO CHANGES \
+ THANKS BUGS FAQ CREDITS CHANGELOG ; do
+ if [[ -s ${d} ]] ; then
+ dodoc "${d}" || die
+ fi
+ done
+ elif [[ $(declare -p DOCS) == "declare -a"* ]] ; then
+ if [[ ${DOCS[@]} ]] ; then
+ dodoc ${dodoc_opts} "${DOCS[@]}" || die
+ fi
+ else
+ if [[ ${DOCS} ]] ; then
+ dodoc ${dodoc_opts} ${DOCS} || die
+ fi
+ fi
+
+ if [[ $(declare -p HTML_DOCS 2>/dev/null) == "declare -a"* ]] ; then
+ if [[ ${HTML_DOCS[@]} ]] ; then
+ dohtml -r "${HTML_DOCS[@]}" || die
+ fi
+ else
+ if [[ ${HTML_DOCS} ]] ; then
+ dohtml -r ${HTML_DOCS} || die
+ fi
+ fi
+
+ return 0
+}
+
+check_license() { die "you no longer need this as portage supports ACCEPT_LICENSE itself"; }
+
+# @FUNCTION: optfeature
+# @USAGE: <short description> <package atom to match> [other atoms]
+# @DESCRIPTION:
+# Print out a message suggesting an optional package (or packages) which
+# provide the described functionality
+#
+# The following snippet would suggest app-misc/foo for optional foo support,
+# app-misc/bar or app-misc/baz[bar] for optional bar support
+# and either both app-misc/a and app-misc/b or app-misc/c for alphabet support.
+# @CODE
+# optfeature "foo support" app-misc/foo
+# optfeature "bar support" app-misc/bar app-misc/baz[bar]
+# optfeature "alphabet support" "app-misc/a app-misc/b" app-misc/c
+# @CODE
+optfeature() {
+ debug-print-function ${FUNCNAME} "$@"
+ local i j msg
+ local desc=$1
+ local flag=0
+ shift
+ for i; do
+ for j in ${i}; do
+ if has_version "${j}"; then
+ flag=1
+ else
+ flag=0
+ break
+ fi
+ done
+ if [[ ${flag} -eq 1 ]]; then
+ break
+ fi
+ done
+ if [[ ${flag} -eq 0 ]]; then
+ for i; do
+ msg=" "
+ for j in ${i}; do
+ msg+=" ${j} and"
+ done
+ msg="${msg:0: -4} for ${desc}"
+ elog "${msg}"
+ done
+ fi
+}
+
+fi
diff --git a/eclass/fcaps.eclass b/eclass/fcaps.eclass
new file mode 100644
index 000000000000..046043c031e7
--- /dev/null
+++ b/eclass/fcaps.eclass
@@ -0,0 +1,217 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: fcaps.eclass
+# @MAINTAINER:
+# Constanze Hausner <constanze@gentoo.org>
+# base-system@gentoo.org
+# @BLURB: function to set POSIX file-based capabilities
+# @DESCRIPTION:
+# This eclass provides a function to set file-based capabilities on binaries.
+# This is not the same as USE=caps which controls runtime capability changes,
+# often via packages like libcap.
+#
+# Due to probable capability-loss on moving or copying, this happens in
+# pkg_postinst-phase (at least for now).
+#
+# @EXAMPLE:
+# You can manually set the caps on ping and ping6 by doing:
+# @CODE
+# pkg_postinst() {
+# fcaps cap_net_raw bin/ping bin/ping6
+# }
+# @CODE
+#
+# Or set it via the global ebuild var FILECAPS:
+# @CODE
+# FILECAPS=(
+# cap_net_raw bin/ping bin/ping6
+# )
+# @CODE
+
+if [[ -z ${_FCAPS_ECLASS} ]]; then
+_FCAPS_ECLASS=1
+
+IUSE="+filecaps"
+
+# We can't use libcap-ng atm due to #471414.
+DEPEND="filecaps? ( sys-libs/libcap )"
+
+# @ECLASS-VARIABLE: FILECAPS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array of fcap arguments to use to automatically execute fcaps. See that
+# function for more details.
+#
+# All args are consumed until the '--' marker is found. So if you have:
+# @CODE
+# FILECAPS=( moo cow -- fat cat -- chubby penguin )
+# @CODE
+#
+# This will end up executing:
+# @CODE
+# fcaps moo cow
+# fcaps fat cat
+# fcaps chubby penguin
+# @CODE
+#
+# Note: If you override pkg_postinst, you must call fcaps_pkg_postinst yourself.
+
+# @FUNCTION: fcaps
+# @USAGE: [-o <owner>] [-g <group>] [-m <mode>] [-M <caps mode>] <capabilities> <file[s]>
+# @DESCRIPTION:
+# Sets the specified capabilities on the specified files.
+#
+# The caps option takes the form as expected by the cap_from_text(3) man page.
+# If no action is specified, then "=ep" will be used as a default.
+#
+# If the file is a relative path (e.g. bin/foo rather than /bin/foo), then the
+# appropriate path var ($D/$ROOT/etc...) will be prefixed based on the current
+# ebuild phase.
+#
+# The caps mode (default 711) is used to set the permission on the file if
+# capabilities were properly set on the file.
+#
+# If the system is unable to set capabilities, it will use the specified user,
+# group, and mode (presumably to make the binary set*id). The defaults there
+# are root:0 and 4711. Otherwise, the ownership and permissions will be
+# unchanged.
+fcaps() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Process the user options first.
+ local owner='root'
+ local group='0'
+ local mode='4711'
+ local caps_mode='711'
+
+ while [[ $# -gt 0 ]] ; do
+ case $1 in
+ -o) owner=$2; shift;;
+ -g) group=$2; shift;;
+ -m) mode=$2; shift;;
+ -M) caps_mode=$2; shift;;
+ *) break;;
+ esac
+ shift
+ done
+
+ [[ $# -lt 2 ]] && die "${FUNCNAME}: wrong arg count"
+
+ local caps=$1
+ [[ ${caps} == *[-=+]* ]] || caps+="=ep"
+ shift
+
+ local root
+ case ${EBUILD_PHASE} in
+ compile|install|preinst)
+ root=${ED:-${D}}
+ ;;
+ postinst)
+ root=${EROOT:-${ROOT}}
+ ;;
+ esac
+
+ # Process every file!
+ local file
+ for file ; do
+ [[ ${file} != /* ]] && file="${root}${file}"
+
+ if use filecaps ; then
+ # Try to set capabilities. Ignore errors when the
+ # fs doesn't support it, but abort on all others.
+ debug-print "${FUNCNAME}: setting caps '${caps}' on '${file}'"
+
+ # If everything goes well, we don't want the file to be readable
+ # by people.
+ chmod ${caps_mode} "${file}" || die
+
+ # Set/verify funcs for sys-libs/libcap.
+ _libcap() { setcap "${caps}" "${file}" ; }
+ _libcap_verify() { setcap -v "${caps}" "${file}" >/dev/null ; }
+
+ # Set/verify funcs for sys-libs/libcap-ng.
+ # Note: filecap only supports =ep mode.
+ # It also expects a different form:
+ # setcap cap_foo,cap_bar
+ # filecap foo bar
+ _libcap_ng() {
+ local caps=",${caps%=ep}"
+ filecap "${file}" "${caps//,cap_}"
+ }
+ _libcap_ng_verify() {
+ # libcap-ng has a crappy interface
+ local rcaps icaps caps=",${caps%=ep}"
+ rcaps=$(filecap "${file}" | \
+ sed -nr \
+ -e "s:^.{${#file}} +::" \
+ -e 's:, +:\n:g' \
+ -e 2p | \
+ LC_ALL=C sort)
+ [[ ${PIPESTATUS[0]} -eq 0 ]] || return 1
+ icaps=$(echo "${caps//,cap_}" | LC_ALL=C sort)
+ [[ ${rcaps} == ${icaps} ]]
+ }
+
+ local out cmd notfound=0
+ for cmd in _libcap _libcap_ng ; do
+ if ! out=$(LC_ALL=C ${cmd} 2>&1) ; then
+ case ${out} in
+ *"command not found"*)
+ : $(( ++notfound ))
+ continue
+ ;;
+ *"Operation not supported"*)
+ local fstype=$(stat -f -c %T "${file}")
+ ewarn "Could not set caps on '${file}' due to missing filesystem support:"
+ ewarn "* enable XATTR support for '${fstype}' in your kernel (if configurable)"
+ ewarn "* mount the fs with the user_xattr option (if not the default)"
+ ewarn "* enable the relevant FS_SECURITY option (if configurable)"
+ break
+ ;;
+ *)
+ eerror "Setting caps '${caps}' on file '${file}' failed:"
+ eerror "${out}"
+ die "could not set caps"
+ ;;
+ esac
+ else
+ # Sanity check that everything took.
+ ${cmd}_verify || die "Checking caps '${caps}' on '${file}' failed"
+
+ # Everything worked. Move on to the next file.
+ continue 2
+ fi
+ done
+ if [[ ${notfound} -eq 2 ]] && [[ -z ${_FCAPS_WARNED} ]] ; then
+ _FCAPS_WARNED="true"
+ ewarn "Could not find cap utils; make sure libcap or libcap-ng is available."
+ fi
+ fi
+
+ # If we're still here, setcaps failed.
+ debug-print "${FUNCNAME}: setting owner/mode on '${file}'"
+ chown "${owner}:${group}" "${file}" || die
+ chmod ${mode} "${file}" || die
+ done
+}
+
+# @FUNCTION: fcaps_pkg_postinst
+# @DESCRIPTION:
+# Process the FILECAPS array.
+fcaps_pkg_postinst() {
+ local arg args=()
+ for arg in "${FILECAPS[@]}" "--" ; do
+ if [[ ${arg} == "--" ]] ; then
+ fcaps "${args[@]}"
+ args=()
+ else
+ args+=( "${arg}" )
+ fi
+ done
+}
+
+EXPORT_FUNCTIONS pkg_postinst
+
+fi
diff --git a/eclass/fdo-mime.eclass b/eclass/fdo-mime.eclass
new file mode 100644
index 000000000000..8e0b56c792cb
--- /dev/null
+++ b/eclass/fdo-mime.eclass
@@ -0,0 +1,38 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: fdo-mime.eclass
+# @MAINTAINER:
+# freedesktop-bugs@gentoo.org
+# @AUTHOR:
+# Original author: foser <foser@gentoo.org>
+# @BLURB: Utility eclass to update the desktop mime info as laid out in the freedesktop specs & implementations
+
+# @FUNCTION: fdo-mime_desktop_database_update
+# @DESCRIPTION:
+# Updates the desktop database.
+# Generates a list of mimetypes linked to applications that can handle them
+fdo-mime_desktop_database_update() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ if [ -x "${EPREFIX}/usr/bin/update-desktop-database" ]
+ then
+ einfo "Updating desktop mime database ..."
+ "${EPREFIX}/usr/bin/update-desktop-database" -q "${EROOT}usr/share/applications"
+ fi
+}
+
+# @FUNCTION: fdo-mime_mime_database_update
+# @DESCRIPTION:
+# Update the mime database.
+# Creates a general list of mime types from several sources
+fdo-mime_mime_database_update() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ if [ -x "${EPREFIX}/usr/bin/update-mime-database" ]
+ then
+ einfo "Updating shared mime info database ..."
+ "${EPREFIX}/usr/bin/update-mime-database" "${EROOT}usr/share/mime"
+ fi
+}
diff --git a/eclass/findlib.eclass b/eclass/findlib.eclass
new file mode 100644
index 000000000000..e2d9b8fb7a1c
--- /dev/null
+++ b/eclass/findlib.eclass
@@ -0,0 +1,59 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: findlib.eclass
+# @MAINTAINER:
+# ml@gentoo.org
+# @AUTHOR:
+# Original author: Matthieu Sozeau <mattam@gentoo.org> (retired)
+# @BLURB: ocamlfind (a.k.a. findlib) eclass
+# @DESCRIPTION:
+# ocamlfind (a.k.a. findlib) eclass
+
+
+
+# From this findlib version there is proper stublibs support.
+DEPEND=">=dev-ml/findlib-1.0.4-r1"
+[[ ${FINDLIB_USE} ]] && DEPEND="${FINDLIB_USE}? ( ${DEPEND} )"
+
+check_ocamlfind() {
+ if [ ! -x "${EPREFIX}"/usr/bin/ocamlfind ]
+ then
+ eerror "In findlib.eclass: could not find the ocamlfind executable"
+ eerror "Please report this bug on gentoo's bugzilla, assigning to ml@gentoo.org"
+ die "ocamlfind executabled not found"
+ fi
+}
+
+# @FUNCTION: findlib_src_preinst
+# @DESCRIPTION:
+# Prepare the image for a findlib installation.
+# We use the stublibs style, so no ld.conf needs to be
+# updated when a package installs C shared libraries.
+findlib_src_preinst() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ has "${EAPI:-0}" 0 1 2 && use !prefix && ED="${D}"
+ check_ocamlfind
+
+ # destdir is the ocaml sitelib
+ local destdir=`ocamlfind printconf destdir`
+
+ # strip off prefix
+ destdir=${destdir#${EPREFIX}}
+
+ dodir ${destdir} || die "dodir failed"
+ export OCAMLFIND_DESTDIR=${ED}${destdir}
+
+ # stublibs style
+ dodir ${destdir}/stublibs || die "dodir failed"
+ export OCAMLFIND_LDCONF=ignore
+}
+
+# @FUNCTION: findlib_src_install
+# @DESCRIPTION:
+# Install with a properly setup findlib
+findlib_src_install() {
+ findlib_src_preinst
+ make DESTDIR="${D}" "$@" install || die "make failed"
+}
diff --git a/eclass/fixheadtails.eclass b/eclass/fixheadtails.eclass
new file mode 100644
index 000000000000..2bac496375b5
--- /dev/null
+++ b/eclass/fixheadtails.eclass
@@ -0,0 +1,44 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: fixheadtails.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @AUTHOR:
+# Original author John Mylchreest <johnm@gentoo.org>
+# @BLURB: functions to replace obsolete head/tail with POSIX compliant ones
+
+DEPEND=">=sys-apps/sed-4"
+
+_do_sed_fix() {
+ einfo " - fixed $1"
+ sed -i \
+ -e 's/head \+-\([0-9]\)/head -n \1/g' \
+ -e 's/tail \+\([-+][0-9]\+\)c/tail -c \1/g' \
+ -e 's/tail \+\([-+][0-9]\)/tail -n \1/g' ${1} || \
+ die "sed ${1} failed"
+}
+
+# @FUNCTION: ht_fix_file
+# @USAGE: <files>
+# @DESCRIPTION:
+# Fix all the specified files.
+ht_fix_file() {
+ local i
+ einfo "Replacing obsolete head/tail with POSIX compliant ones"
+ for i in "$@" ; do
+ _do_sed_fix "$i"
+ done
+}
+
+# @FUNCTION: ht_fix_all
+# @DESCRIPTION:
+# Find and fix all files in the current directory as needed.
+ht_fix_all() {
+ local MATCHES
+ MATCHES=$(grep -l -s -i -R -e "head -[ 0-9]" -e "tail [+-][ 0-9]" * | sort -u)
+ [[ -n ${MATCHES} ]] \
+ && ht_fix_file ${MATCHES} \
+ || einfo "No need for ht_fix_all anymore !"
+}
diff --git a/eclass/flag-o-matic.eclass b/eclass/flag-o-matic.eclass
new file mode 100644
index 000000000000..6a9acfb919ae
--- /dev/null
+++ b/eclass/flag-o-matic.eclass
@@ -0,0 +1,673 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: flag-o-matic.eclass
+# @MAINTAINER:
+# toolchain@gentoo.org
+# @BLURB: common functions to manipulate and query toolchain flags
+# @DESCRIPTION:
+# This eclass contains a suite of functions to help developers sanely
+# and safely manage toolchain flags in their builds.
+
+if [[ -z ${_FLAG_O_MATIC_ECLASS} ]]; then
+_FLAG_O_MATIC_ECLASS=1
+
+inherit eutils toolchain-funcs multilib
+
+# Return all the flag variables that our high level funcs operate on.
+all-flag-vars() {
+ echo {C,CPP,CXX,CCAS,F,FC,LD}FLAGS
+}
+
+# {C,CPP,CXX,CCAS,F,FC,LD}FLAGS that we allow in strip-flags
+# Note: shell globs and character lists are allowed
+setup-allowed-flags() {
+ ALLOWED_FLAGS="-pipe"
+ ALLOWED_FLAGS+=" -O -O1 -O2 -Os -Og -mcpu -march -mtune"
+ ALLOWED_FLAGS+=" -fstack-protector* -fsanitize=*"
+ ALLOWED_FLAGS+=" -fbounds-check -fbounds-checking -fno-strict-overflow"
+ ALLOWED_FLAGS+=" -fno-PIE -fno-pie -nopie -fno-unit-at-a-time"
+ ALLOWED_FLAGS+=" -g -g[0-9] -ggdb -ggdb[0-9] -gdwarf-* gstabs -gstabs+"
+ ALLOWED_FLAGS+=" -fno-ident -fpermissive -frecord-gcc-switches"
+ ALLOWED_FLAGS+=" -fdiagnostics*"
+ ALLOWED_FLAGS+=" -W* -w"
+
+ # allow a bunch of flags that negate features / control ABI
+ ALLOWED_FLAGS+=" -fno-stack-protector* -fabi-version=* \
+ -fno-strict-aliasing -fno-bounds-check -fno-bounds-checking -fstrict-overflow \
+ -fno-omit-frame-pointer -fno-builtin*"
+ ALLOWED_FLAGS+=" -mregparm -mno-app-regs -mapp-regs -mno-mmx -mno-sse \
+ -mno-sse2 -mno-sse3 -mno-ssse3 -mno-sse4 -mno-sse4.1 -mno-sse4.2 \
+ -mno-avx -mno-aes -mno-pclmul -mno-sse4a -mno-3dnow -mno-popcnt \
+ -mno-abm -mips1 -mips2 -mips3 -mips4 -mips32 -mips64 -mips16 -mplt \
+ -msoft-float -mno-soft-float -mhard-float -mno-hard-float -mfpu \
+ -mieee -mieee-with-inexact -mschedule -mfloat-gprs -mspe -mno-spe \
+ -mtls-direct-seg-refs -mno-tls-direct-seg-refs -mflat -mno-flat \
+ -mno-faster-structs -mfaster-structs -m32 -m64 -mx32 -mabi \
+ -mlittle-endian -mbig-endian -EL -EB -fPIC -mlive-g0 -mcmodel \
+ -mstack-bias -mno-stack-bias -msecure-plt -m*-toc -mfloat-abi \
+ -mfix-r10000 -mno-fix-r10000 -D* -U*"
+
+ # 4.5
+ ALLOWED_FLAGS+=" -mno-fma4 -mno-movbe -mno-xop -mno-lwp"
+ # 4.6
+ ALLOWED_FLAGS+=" -mno-fsgsbase -mno-rdrnd -mno-f16c -mno-bmi -mno-tbm"
+ # 4.7
+ ALLOWED_FLAGS+=" -mno-avx2 -mno-bmi2 -mno-fma -mno-lzcnt"
+ # 4.8
+ ALLOWED_FLAGS+=" -mno-fxsr -mno-rtm -mno-xsave -mno-xsaveopt"
+ # 4.9
+ ALLOWED_FLAGS+=" -mno-avx512cd -mno-avx512er -mno-avx512f -mno-avx512pf -mno-sha"
+
+ # CPPFLAGS and LDFLAGS
+ ALLOWED_FLAGS+=" -I* -L* -R* -Wl,*"
+
+ export ALLOWED_FLAGS
+ return 0
+}
+
+# inverted filters for hardened compiler. This is trying to unpick
+# the hardened compiler defaults.
+_filter-hardened() {
+ local f
+ for f in "$@" ; do
+ case "${f}" in
+ # Ideally we should only concern ourselves with PIE flags,
+ # not -fPIC or -fpic, but too many places filter -fPIC without
+ # thinking about -fPIE.
+ -fPIC|-fpic|-fPIE|-fpie|-Wl,pie|-pie)
+ gcc-specs-pie || continue
+ is-flagq -nopie || append-flags -nopie;;
+ -fstack-protector)
+ gcc-specs-ssp || continue
+ is-flagq -fno-stack-protector || append-flags $(test-flags -fno-stack-protector);;
+ -fstack-protector-all)
+ gcc-specs-ssp-to-all || continue
+ is-flagq -fno-stack-protector-all || append-flags $(test-flags -fno-stack-protector-all);;
+ -fno-strict-overflow)
+ gcc-specs-nostrict || continue
+ is-flagq -fstrict-overflow || append-flags $(test-flags -fstrict-overflow);;
+ esac
+ done
+}
+
+# Remove occurrences of strings from variable given in $1
+# Strings removed are matched as globs, so for example
+# '-O*' would remove -O1, -O2 etc.
+_filter-var() {
+ local f x var=$1 new=()
+ shift
+
+ for f in ${!var} ; do
+ for x in "$@" ; do
+ # Note this should work with globs like -O*
+ [[ ${f} == ${x} ]] && continue 2
+ done
+ new+=( "${f}" )
+ done
+ eval export ${var}=\""${new[*]}"\"
+}
+
+# @FUNCTION: filter-flags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Remove particular <flags> from {C,CPP,CXX,CCAS,F,FC,LD}FLAGS. Accepts shell globs.
+filter-flags() {
+ _filter-hardened "$@"
+ local v
+ for v in $(all-flag-vars) ; do
+ _filter-var ${v} "$@"
+ done
+ return 0
+}
+
+# @FUNCTION: filter-lfs-flags
+# @DESCRIPTION:
+# Remove flags that enable Large File Support.
+filter-lfs-flags() {
+ [[ $# -ne 0 ]] && die "filter-lfs-flags takes no arguments"
+ # http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html
+ # _LARGEFILE_SOURCE: enable support for new LFS funcs (ftello/etc...)
+ # _LARGEFILE64_SOURCE: enable support for 64bit variants (off64_t/fseeko64/etc...)
+ # _FILE_OFFSET_BITS: default to 64bit variants (off_t is defined as off64_t)
+ filter-flags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE
+}
+
+# @FUNCTION: filter-ldflags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Remove particular <flags> from LDFLAGS. Accepts shell globs.
+filter-ldflags() {
+ _filter-var LDFLAGS "$@"
+ return 0
+}
+
+# @FUNCTION: append-cppflags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Add extra <flags> to the current CPPFLAGS.
+append-cppflags() {
+ [[ $# -eq 0 ]] && return 0
+ export CPPFLAGS+=" $*"
+ return 0
+}
+
+# @FUNCTION: append-cflags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Add extra <flags> to the current CFLAGS. If a flag might not be supported
+# with different compilers (or versions), then use test-flags-CC like so:
+# @CODE
+# append-cflags $(test-flags-CC -funky-flag)
+# @CODE
+append-cflags() {
+ [[ $# -eq 0 ]] && return 0
+ # Do not do automatic flag testing ourselves. #417047
+ export CFLAGS+=" $*"
+ return 0
+}
+
+# @FUNCTION: append-cxxflags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Add extra <flags> to the current CXXFLAGS. If a flag might not be supported
+# with different compilers (or versions), then use test-flags-CXX like so:
+# @CODE
+# append-cxxflags $(test-flags-CXX -funky-flag)
+# @CODE
+append-cxxflags() {
+ [[ $# -eq 0 ]] && return 0
+ # Do not do automatic flag testing ourselves. #417047
+ export CXXFLAGS+=" $*"
+ return 0
+}
+
+# @FUNCTION: append-fflags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Add extra <flags> to the current {F,FC}FLAGS. If a flag might not be supported
+# with different compilers (or versions), then use test-flags-F77 like so:
+# @CODE
+# append-fflags $(test-flags-F77 -funky-flag)
+# @CODE
+append-fflags() {
+ [[ $# -eq 0 ]] && return 0
+ # Do not do automatic flag testing ourselves. #417047
+ export FFLAGS+=" $*"
+ export FCFLAGS+=" $*"
+ return 0
+}
+
+# @FUNCTION: append-lfs-flags
+# @DESCRIPTION:
+# Add flags that enable Large File Support.
+append-lfs-flags() {
+ [[ $# -ne 0 ]] && die "append-lfs-flags takes no arguments"
+ # see comments in filter-lfs-flags func for meaning of these
+ append-cppflags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE
+}
+
+# @FUNCTION: append-ldflags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Add extra <flags> to the current LDFLAGS.
+append-ldflags() {
+ [[ $# -eq 0 ]] && return 0
+ local flag
+ for flag in "$@"; do
+ [[ ${flag} == -l* ]] && \
+ eqawarn "Appending a library link instruction (${flag}); libraries to link to should not be passed through LDFLAGS"
+ done
+
+ export LDFLAGS="${LDFLAGS} $*"
+ return 0
+}
+
+# @FUNCTION: append-flags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Add extra <flags> to your current {C,CXX,F,FC}FLAGS.
+append-flags() {
+ [[ $# -eq 0 ]] && return 0
+ case " $* " in
+ *' '-[DIU]*) eqawarn 'please use append-cppflags for preprocessor flags' ;;
+ *' '-L*|\
+ *' '-Wl,*) eqawarn 'please use append-ldflags for linker flags' ;;
+ esac
+ append-cflags "$@"
+ append-cxxflags "$@"
+ append-fflags "$@"
+ return 0
+}
+
+# @FUNCTION: replace-flags
+# @USAGE: <old> <new>
+# @DESCRIPTION:
+# Replace the <old> flag with <new>. Accepts shell globs for <old>.
+replace-flags() {
+ [[ $# != 2 ]] && die "Usage: replace-flags <old flag> <new flag>"
+
+ local f var new
+ for var in $(all-flag-vars) ; do
+ # Looping over the flags instead of using a global
+ # substitution ensures that we're working with flag atoms.
+ # Otherwise globs like -O* have the potential to wipe out the
+ # list of flags.
+ new=()
+ for f in ${!var} ; do
+ # Note this should work with globs like -O*
+ [[ ${f} == ${1} ]] && f=${2}
+ new+=( "${f}" )
+ done
+ eval export ${var}=\""${new[*]}"\"
+ done
+
+ return 0
+}
+
+# @FUNCTION: replace-cpu-flags
+# @USAGE: <old> <new>
+# @DESCRIPTION:
+# Replace cpu flags (like -march/-mcpu/-mtune) that select the <old> cpu
+# with flags that select the <new> cpu. Accepts shell globs for <old>.
+replace-cpu-flags() {
+ local newcpu="$#" ; newcpu="${!newcpu}"
+ while [ $# -gt 1 ] ; do
+ # quote to make sure that no globbing is done (particularly on
+ # ${oldcpu}) prior to calling replace-flags
+ replace-flags "-march=${1}" "-march=${newcpu}"
+ replace-flags "-mcpu=${1}" "-mcpu=${newcpu}"
+ replace-flags "-mtune=${1}" "-mtune=${newcpu}"
+ shift
+ done
+ return 0
+}
+
+_is_flagq() {
+ local x var
+ eval var=\""\${$1[*]}"\"
+ for x in ${var} ; do
+ [[ ${x} == $2 ]] && return 0
+ done
+ return 1
+}
+
+# @FUNCTION: is-flagq
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Returns shell true if <flag> is in {C,CXX,F,FC}FLAGS, else returns shell false. Accepts shell globs.
+is-flagq() {
+ [[ -n $2 ]] && die "Usage: is-flag <flag>"
+
+ local var
+ for var in $(all-flag-vars) ; do
+ _is_flagq ${var} "$1" && return 0
+ done
+ return 1
+}
+
+# @FUNCTION: is-flag
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Echo's "true" if flag is set in {C,CXX,F,FC}FLAGS. Accepts shell globs.
+is-flag() {
+ is-flagq "$@" && echo true
+}
+
+# @FUNCTION: is-ldflagq
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Returns shell true if <flag> is in LDFLAGS, else returns shell false. Accepts shell globs.
+is-ldflagq() {
+ [[ -n $2 ]] && die "Usage: is-ldflag <flag>"
+ _is_flagq LDFLAGS $1
+}
+
+# @FUNCTION: is-ldflag
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Echo's "true" if flag is set in LDFLAGS. Accepts shell globs.
+is-ldflag() {
+ is-ldflagq "$@" && echo true
+}
+
+# @FUNCTION: filter-mfpmath
+# @USAGE: <math types>
+# @DESCRIPTION:
+# Remove specified math types from the fpmath flag. For example, if the user
+# has -mfpmath=sse,386, running `filter-mfpmath sse` will leave the user with
+# -mfpmath=386.
+filter-mfpmath() {
+ local orig_mfpmath new_math prune_math
+
+ # save the original -mfpmath flag
+ orig_mfpmath=$(get-flag -mfpmath)
+ # get the value of the current -mfpmath flag
+ new_math=$(get-flag mfpmath)
+ # convert "both" to something we can filter
+ new_math=${new_math/both/387,sse}
+ new_math=" ${new_math//[,+]/ } "
+ # figure out which math values are to be removed
+ prune_math=""
+ for prune_math in "$@" ; do
+ new_math=${new_math/ ${prune_math} / }
+ done
+ new_math=$(echo ${new_math})
+ new_math=${new_math// /,}
+
+ if [[ -z ${new_math} ]] ; then
+ # if we're removing all user specified math values are
+ # slated for removal, then we just filter the flag
+ filter-flags ${orig_mfpmath}
+ else
+ # if we only want to filter some of the user specified
+ # math values, then we replace the current flag
+ replace-flags ${orig_mfpmath} -mfpmath=${new_math}
+ fi
+ return 0
+}
+
+# @FUNCTION: strip-flags
+# @DESCRIPTION:
+# Strip *FLAGS of everything except known good/safe flags. This runs over all
+# flags returned by all_flag_vars().
+strip-flags() {
+ local x y var
+
+ setup-allowed-flags
+
+ set -f # disable pathname expansion
+
+ for var in $(all-flag-vars) ; do
+ local new=()
+
+ for x in ${!var} ; do
+ local flag=${x%%=*}
+ for y in ${ALLOWED_FLAGS} ; do
+ if [[ -z ${flag%%${y}} ]] ; then
+ new+=( "${x}" )
+ break
+ fi
+ done
+ done
+
+ # In case we filtered out all optimization flags fallback to -O2
+ if _is_flagq ${var} "-O*" && ! _is_flagq new "-O*" ; then
+ new+=( -O2 )
+ fi
+
+ if [[ ${!var} != "${new[*]}" ]] ; then
+ einfo "strip-flags: ${var}: changed '${!var}' to '${new[*]}'"
+ fi
+ eval export ${var}=\""${new[*]}"\"
+ done
+
+ set +f # re-enable pathname expansion
+
+ return 0
+}
+
+test-flag-PROG() {
+ local comp=$1
+ local lang=$2
+ local flag=$3
+
+ [[ -z ${comp} || -z ${flag} ]] && return 1
+
+ local cmdline=(
+ $(tc-get${comp})
+ # Clang will warn about unknown gcc flags but exit 0.
+ # Need -Werror to force it to exit non-zero.
+ -Werror
+ # Use -c so we can test the assembler as well.
+ -c -o /dev/null
+ )
+ if "${cmdline[@]}" -x${lang} - </dev/null >/dev/null 2>&1 ; then
+ "${cmdline[@]}" "${flag}" -x${lang} - </dev/null >/dev/null 2>&1
+ else
+ "${cmdline[@]}" "${flag}" -c -o /dev/null /dev/null >/dev/null 2>&1
+ fi
+}
+
+# @FUNCTION: test-flag-CC
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Returns shell true if <flag> is supported by the C compiler, else returns shell false.
+test-flag-CC() { test-flag-PROG "CC" c "$1"; }
+
+# @FUNCTION: test-flag-CXX
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Returns shell true if <flag> is supported by the C++ compiler, else returns shell false.
+test-flag-CXX() { test-flag-PROG "CXX" c++ "$1"; }
+
+# @FUNCTION: test-flag-F77
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Returns shell true if <flag> is supported by the Fortran 77 compiler, else returns shell false.
+test-flag-F77() { test-flag-PROG "F77" f77 "$1"; }
+
+# @FUNCTION: test-flag-FC
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Returns shell true if <flag> is supported by the Fortran 90 compiler, else returns shell false.
+test-flag-FC() { test-flag-PROG "FC" f95 "$1"; }
+
+test-flags-PROG() {
+ local comp=$1
+ local flags=()
+ local x
+
+ shift
+
+ [[ -z ${comp} ]] && return 1
+
+ for x ; do
+ test-flag-${comp} "${x}" && flags+=( "${x}" )
+ done
+
+ echo "${flags[*]}"
+
+ # Just bail if we dont have any flags
+ [[ ${#flags[@]} -gt 0 ]]
+}
+
+# @FUNCTION: test-flags-CC
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Returns shell true if <flags> are supported by the C compiler, else returns shell false.
+test-flags-CC() { test-flags-PROG "CC" "$@"; }
+
+# @FUNCTION: test-flags-CXX
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Returns shell true if <flags> are supported by the C++ compiler, else returns shell false.
+test-flags-CXX() { test-flags-PROG "CXX" "$@"; }
+
+# @FUNCTION: test-flags-F77
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Returns shell true if <flags> are supported by the Fortran 77 compiler, else returns shell false.
+test-flags-F77() { test-flags-PROG "F77" "$@"; }
+
+# @FUNCTION: test-flags-FC
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Returns shell true if <flags> are supported by the Fortran 90 compiler, else returns shell false.
+test-flags-FC() { test-flags-PROG "FC" "$@"; }
+
+# @FUNCTION: test-flags
+# @USAGE: <flags>
+# @DESCRIPTION:
+# Short-hand that should hopefully work for both C and C++ compiler, but
+# its really only present due to the append-flags() abomination.
+test-flags() { test-flags-CC "$@"; }
+
+# @FUNCTION: test_version_info
+# @USAGE: <version>
+# @DESCRIPTION:
+# Returns shell true if the current C compiler version matches <version>, else returns shell false.
+# Accepts shell globs.
+test_version_info() {
+ if [[ $($(tc-getCC) --version 2>&1) == *$1* ]]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# @FUNCTION: strip-unsupported-flags
+# @DESCRIPTION:
+# Strip {C,CXX,F,FC}FLAGS of any flags not supported by the active toolchain.
+strip-unsupported-flags() {
+ export CFLAGS=$(test-flags-CC ${CFLAGS})
+ export CXXFLAGS=$(test-flags-CXX ${CXXFLAGS})
+ export FFLAGS=$(test-flags-F77 ${FFLAGS})
+ export FCFLAGS=$(test-flags-FC ${FCFLAGS})
+}
+
+# @FUNCTION: get-flag
+# @USAGE: <flag>
+# @DESCRIPTION:
+# Find and echo the value for a particular flag. Accepts shell globs.
+get-flag() {
+ local f var findflag="$1"
+
+ # this code looks a little flaky but seems to work for
+ # everything we want ...
+ # for example, if CFLAGS="-march=i686":
+ # `get-flag -march` == "-march=i686"
+ # `get-flag march` == "i686"
+ for var in $(all-flag-vars) ; do
+ for f in ${!var} ; do
+ if [ "${f/${findflag}}" != "${f}" ] ; then
+ printf "%s\n" "${f/-${findflag}=}"
+ return 0
+ fi
+ done
+ done
+ return 1
+}
+
+# @FUNCTION: has_m64
+# @DESCRIPTION:
+# This doesn't test if the flag is accepted, it tests if the flag actually
+# WORKS. Non-multilib gcc will take both -m32 and -m64. If the flag works
+# return code is 0, else the return code is 1.
+has_m64() {
+ eqawarn "${FUNCNAME}: don't use this anymore"
+
+ # this doesnt test if the flag is accepted, it tests if the flag
+ # actually -WORKS-. non-multilib gcc will take both -m32 and -m64!
+ # please dont replace this function with test_flag in some future
+ # clean-up!
+
+ local temp="$(emktemp)"
+ echo "int main() { return(0); }" > "${temp}".c
+ MY_CC=$(tc-getCC)
+ ${MY_CC/ .*/} -m64 -o "$(emktemp)" "${temp}".c > /dev/null 2>&1
+ local ret=$?
+ rm -f "${temp}".c
+ [[ ${ret} != 1 ]] && return 0
+ return 1
+}
+
+has_m32() {
+ die "${FUNCNAME}: don't use this anymore"
+}
+
+# @FUNCTION: replace-sparc64-flags
+# @DESCRIPTION:
+# Sets mcpu to v8 and uses the original value as mtune if none specified.
+replace-sparc64-flags() {
+ local SPARC64_CPUS="ultrasparc3 ultrasparc v9"
+
+ if [ "${CFLAGS/mtune}" != "${CFLAGS}" ]; then
+ for x in ${SPARC64_CPUS}; do
+ CFLAGS="${CFLAGS/-mcpu=${x}/-mcpu=v8}"
+ done
+ else
+ for x in ${SPARC64_CPUS}; do
+ CFLAGS="${CFLAGS/-mcpu=${x}/-mcpu=v8 -mtune=${x}}"
+ done
+ fi
+
+ if [ "${CXXFLAGS/mtune}" != "${CXXFLAGS}" ]; then
+ for x in ${SPARC64_CPUS}; do
+ CXXFLAGS="${CXXFLAGS/-mcpu=${x}/-mcpu=v8}"
+ done
+ else
+ for x in ${SPARC64_CPUS}; do
+ CXXFLAGS="${CXXFLAGS/-mcpu=${x}/-mcpu=v8 -mtune=${x}}"
+ done
+ fi
+
+ export CFLAGS CXXFLAGS
+}
+
+# @FUNCTION: append-libs
+# @USAGE: <libs>
+# @DESCRIPTION:
+# Add extra <libs> to the current LIBS. All arguments should be prefixed with
+# either -l or -L. For compatibility, if arguments are not prefixed as
+# options, they are given a -l prefix automatically.
+append-libs() {
+ [[ $# -eq 0 ]] && return 0
+ local flag
+ for flag in "$@"; do
+ if [[ -z "${flag// }" ]]; then
+ eqawarn "Appending an empty argument to LIBS is invalid! Skipping."
+ continue
+ fi
+ case $flag in
+ -[lL]*)
+ export LIBS="${LIBS} ${flag}"
+ ;;
+ -*)
+ eqawarn "Appending non-library to LIBS (${flag}); Other linker flags should be passed via LDFLAGS"
+ export LIBS="${LIBS} ${flag}"
+ ;;
+ *)
+ export LIBS="${LIBS} -l${flag}"
+ esac
+ done
+
+ return 0
+}
+
+# @FUNCTION: raw-ldflags
+# @USAGE: [flags]
+# @DESCRIPTION:
+# Turn C style ldflags (-Wl,-foo) into straight ldflags - the results
+# are suitable for passing directly to 'ld'; note LDFLAGS is usually passed
+# to gcc where it needs the '-Wl,'.
+#
+# If no flags are specified, then default to ${LDFLAGS}.
+raw-ldflags() {
+ local x input="$@"
+ [[ -z ${input} ]] && input=${LDFLAGS}
+ set --
+ for x in ${input} ; do
+ case ${x} in
+ -Wl,*)
+ x=${x#-Wl,}
+ set -- "$@" ${x//,/ }
+ ;;
+ *) # Assume it's a compiler driver flag, so throw it away #441808
+ ;;
+ esac
+ done
+ echo "$@"
+}
+
+# @FUNCTION: no-as-needed
+# @RETURN: Flag to disable asneeded behavior for use with append-ldflags.
+no-as-needed() {
+ case $($(tc-getLD) -v 2>&1 </dev/null) in
+ *GNU*) # GNU ld
+ echo "-Wl,--no-as-needed" ;;
+ esac
+}
+
+fi
diff --git a/eclass/font-ebdftopcf.eclass b/eclass/font-ebdftopcf.eclass
new file mode 100644
index 000000000000..864cfdfc010b
--- /dev/null
+++ b/eclass/font-ebdftopcf.eclass
@@ -0,0 +1,46 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Author: Robin H. Johnson <robbat2@gentoo.org>
+
+# font-ebdftopcf.eclass
+# Eclass to make PCF font generator from BDF uniform and optimal
+# The manpage for this eclass is in media-gfx/ebdftopcf.
+
+# inherit this eclass after font.eclass
+
+# if USE="-X", this eclass is basically a no-op, since bdftopcf requires Xorg.
+IUSE="X"
+
+# Variable declarations
+DEPEND="X? ( media-gfx/ebdftopcf )"
+RDEPEND=""
+
+use X && FONT_SUFFIX="pcf.gz"
+use X || FONT_SUFFIX="bdf"
+
+#
+# Public functions
+#
+ebdftopcf() {
+ local bdffiles
+ bdffiles="$@"
+ [ -z "$bdffiles" ] && die "No BDF files specified."
+ emake -f "${EPREFIX}"/usr/share/ebdftopcf/Makefile.ebdftopcf \
+ BDFFILES="${bdffiles}" \
+ BDFTOPCF_PARAMS="${BDFTOPCF_PARAMS}" \
+ || die "Failed to build PCF files"
+}
+
+#
+# Public inheritable functions
+#
+font-ebdftopcf_src_compile() {
+ if use X; then
+ [ -z "${BDFFILES}" ] && BDFFILES="$(find . -name '*.bdf')"
+ ebdftopcf ${BDFFILES}
+ fi
+}
+
+EXPORT_FUNCTIONS src_compile
diff --git a/eclass/font.eclass b/eclass/font.eclass
new file mode 100644
index 000000000000..94a18c056097
--- /dev/null
+++ b/eclass/font.eclass
@@ -0,0 +1,250 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: font.eclass
+# @MAINTAINER:
+# fonts@gentoo.org
+# @BLURB: Eclass to make font installation uniform
+
+inherit eutils
+
+EXPORT_FUNCTIONS pkg_setup src_install pkg_postinst pkg_postrm
+
+# @ECLASS-VARIABLE: FONT_SUFFIX
+# @DEFAULT_UNSET
+# @REQUIRED
+# @DESCRIPTION:
+# Space delimited list of font suffixes to install.
+FONT_SUFFIX=${FONT_SUFFIX:-}
+
+# @ECLASS-VARIABLE: FONT_S
+# @REQUIRED
+# @DESCRIPTION:
+# Space delimited list of directories containing the fonts.
+FONT_S=${FONT_S:-${S}}
+
+# @ECLASS-VARIABLE: FONT_PN
+# @DESCRIPTION:
+# Font name (ie. last part of FONTDIR).
+FONT_PN=${FONT_PN:-${PN}}
+
+# @ECLASS-VARIABLE: FONTDIR
+# @DESCRIPTION:
+# Full path to installation directory.
+FONTDIR=${FONTDIR:-/usr/share/fonts/${FONT_PN}}
+
+# @ECLASS-VARIABLE: FONT_CONF
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array containing fontconfig conf files to install.
+FONT_CONF=( "" )
+
+# @ECLASS-VARIABLE: DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space delimited list of docs to install.
+# We always install these:
+# COPYRIGHT README{,.txt} NEWS AUTHORS BUGS ChangeLog FONTLOG.txt
+DOCS=${DOCS:-}
+
+IUSE="X"
+
+DEPEND="X? (
+ x11-apps/mkfontdir
+ media-fonts/encodings
+ )"
+RDEPEND=""
+
+# @FUNCTION: font_xfont_config
+# @DESCRIPTION:
+# Generate Xorg font files (mkfontscale/mkfontdir).
+font_xfont_config() {
+ local dir_name
+ if has X ${IUSE//+} && use X ; then
+ dir_name="${1:-${FONT_PN}}"
+ ebegin "Creating fonts.scale & fonts.dir in ${dir_name##*/}"
+ rm -f "${ED}${FONTDIR}/${1//${S}/}"/{fonts.{dir,scale},encodings.dir}
+ mkfontscale "${ED}${FONTDIR}/${1//${S}/}"
+ mkfontdir \
+ -e ${EPREFIX}/usr/share/fonts/encodings \
+ -e ${EPREFIX}/usr/share/fonts/encodings/large \
+ "${ED}${FONTDIR}/${1//${S}/}"
+ eend $?
+ if [[ -e fonts.alias ]] ; then
+ doins fonts.alias
+ fi
+ fi
+}
+
+# @FUNCTION: font_fontconfig
+# @DESCRIPTION:
+# Install fontconfig conf files given in FONT_CONF.
+font_fontconfig() {
+ local conffile
+ if [[ -n ${FONT_CONF[@]} ]]; then
+ insinto /etc/fonts/conf.avail/
+ for conffile in "${FONT_CONF[@]}"; do
+ [[ -e ${conffile} ]] && doins ${conffile}
+ done
+ fi
+}
+
+# @FUNCTION: font_cleanup_dirs
+# @DESCRIPTION:
+# Remove font directories containing only generated files.
+font_cleanup_dirs() {
+ local genfiles="encodings.dir fonts.alias fonts.cache-1 fonts.dir fonts.scale"
+ # fonts.alias isn't generated but it's a special case (see below).
+ local d f g generated candidate otherfile
+
+ ebegin "Cleaning up font directories"
+ find -L "${EROOT}"usr/share/fonts/ -type d -print0 | while read -d $'\0' d; do
+ candidate=false
+ otherfile=false
+ for f in "${d}"/*; do
+ generated=false
+ # make sure this is a file and not a subdir
+ [[ -e ${f} || -L ${f} ]] || continue
+ for g in ${genfiles}; do
+ if [[ ${f##*/} == ${g} ]]; then
+ # this is a generated file
+ generated=true
+ break
+ fi
+ done
+ # if the file is a generated file then we know this is a font dir (as
+ # opposed to something like encodings or util) and a candidate for
+ # removal. if it's not generated then it's an "otherfile".
+ ${generated} && candidate=true || otherfile=true
+ # if the directory is both a candidate for removal and contains at
+ # least one "otherfile" then don't remove it.
+ [[ ${candidate} == ${otherfile} ]] && break
+ done
+ # if in the end we only have generated files, purge the directory.
+ if [[ ${candidate} == true && ${otherfile} == false ]]; then
+ # we don't want to remove fonts.alias files that were installed by
+ # media-fonts/font-alias. any other fonts.alias files will have
+ # already been unmerged with their packages.
+ for g in ${genfiles}; do
+ [[ ${g} != fonts.alias && ( -e ${d}/${g} || -L ${d}/${g} ) ]] \
+ && rm "${d}"/${g}
+ done
+ # if there's nothing left remove the directory
+ find "${d}" -maxdepth 0 -type d -empty -exec rmdir '{}' \;
+ fi
+ done
+ eend 0
+}
+
+# @FUNCTION: font_pkg_setup
+# @DESCRIPTION:
+# The font pkg_setup function.
+# Collision protection and Prefix compat for eapi < 3.
+font_pkg_setup() {
+ # Prefix compat
+ case ${EAPI:-0} in
+ 0|1|2)
+ if ! use prefix; then
+ EPREFIX=
+ ED=${D}
+ EROOT=${ROOT}
+ [[ ${EROOT} = */ ]] || EROOT+="/"
+ fi
+ ;;
+ esac
+
+ # make sure we get no collisions
+ # setup is not the nicest place, but preinst doesn't cut it
+ [[ -e "${EROOT}/${FONTDIR}/fonts.cache-1" ]] && rm -f "${EROOT}/${FONTDIR}/fonts.cache-1"
+}
+
+# @FUNCTION: font_src_install
+# @DESCRIPTION:
+# The font src_install function.
+font_src_install() {
+ local dir suffix commondoc
+
+ set -- ${FONT_S:-${S}}
+ if [[ $# -gt 1 ]]; then
+ # if we have multiple FONT_S elements then we want to recreate the dir
+ # structure
+ for dir in ${FONT_S}; do
+ pushd "${dir}" > /dev/null
+ insinto "${FONTDIR}/${dir//${S}/}"
+ for suffix in ${FONT_SUFFIX}; do
+ doins *.${suffix}
+ done
+ font_xfont_config "${dir}"
+ popd > /dev/null
+ done
+ else
+ pushd "${FONT_S}" > /dev/null
+ insinto "${FONTDIR}"
+ for suffix in ${FONT_SUFFIX}; do
+ doins *.${suffix}
+ done
+ font_xfont_config
+ popd > /dev/null
+ fi
+
+ font_fontconfig
+
+ [[ -n ${DOCS} ]] && { dodoc ${DOCS} || die "docs installation failed" ; }
+
+ # install common docs
+ for commondoc in COPYRIGHT README{,.txt} NEWS AUTHORS BUGS ChangeLog FONTLOG.txt; do
+ [[ -s ${commondoc} ]] && dodoc ${commondoc}
+ done
+}
+
+# @FUNCTION: font_pkg_postinst
+# @DESCRIPTION:
+# The font pkg_postinst function.
+font_pkg_postinst() {
+ # unreadable font files = fontconfig segfaults
+ find "${EROOT}"usr/share/fonts/ -type f '!' -perm 0644 -print0 \
+ | xargs -0 chmod -v 0644 2>/dev/null
+
+ if [[ -n ${FONT_CONF[@]} ]]; then
+ local conffile
+ echo
+ elog "The following fontconfig configuration files have been installed:"
+ elog
+ for conffile in "${FONT_CONF[@]}"; do
+ if [[ -e ${EROOT}etc/fonts/conf.avail/$(basename ${conffile}) ]]; then
+ elog " $(basename ${conffile})"
+ fi
+ done
+ elog
+ elog "Use \`eselect fontconfig\` to enable/disable them."
+ echo
+ fi
+
+ if has_version media-libs/fontconfig && [[ ${ROOT} == / ]]; then
+ ebegin "Updating global fontcache"
+ fc-cache -fs
+ eend $?
+ else
+ einfo "Skipping fontcache update (media-libs/fontconfig is not installed or ROOT != /)"
+ fi
+}
+
+# @FUNCTION: font_pkg_postrm
+# @DESCRIPTION:
+# The font pkg_postrm function.
+font_pkg_postrm() {
+ font_cleanup_dirs
+
+ # unreadable font files = fontconfig segfaults
+ find "${EROOT}"usr/share/fonts/ -type f '!' -perm 0644 -print0 \
+ | xargs -0 chmod -v 0644 2>/dev/null
+
+ if has_version media-libs/fontconfig && [[ ${ROOT} == / ]]; then
+ ebegin "Updating global fontcache"
+ fc-cache -fs
+ eend $?
+ else
+ einfo "Skipping fontcache update (media-libs/fontconfig is not installed or ROOT != /)"
+ fi
+}
diff --git a/eclass/fortran-2.eclass b/eclass/fortran-2.eclass
new file mode 100644
index 000000000000..03fb46225790
--- /dev/null
+++ b/eclass/fortran-2.eclass
@@ -0,0 +1,256 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: fortran-2.eclass
+# @MAINTAINER:
+# jlec@gentoo.org
+# sci@gentoo.org
+# @AUTHOR:
+# Author Justin Lecher <jlec@gentoo.org>
+# Test functions provided by Sebastien Fabbro and Kacper Kowalik
+# @BLURB: Simplify fortran compiler management
+# @DESCRIPTION:
+# If you need a fortran compiler, then you should be inheriting this eclass.
+# In case you only need optional support, please export FORTRAN_NEEDED before
+# inheriting the eclass.
+#
+# The eclass tests for working fortran compilers
+# and exports the variables FC and F77.
+# Optionally, it checks for extended capabilities based on
+# the variable options selected in the ebuild
+# The only phase function exported is fortran-2_pkg_setup.
+# @EXAMPLE:
+# FORTRAN_NEEDED="lapack fortran"
+#
+# inherit fortran-2
+#
+# FORTRAN_NEED_OPENMP=1
+
+# @ECLASS-VARIABLE: FORTRAN_NEED_OPENMP
+# @DESCRIPTION:
+# Set to "1" in order to automatically have the eclass abort if the fortran
+# compiler lacks openmp support.
+: ${FORTRAN_NEED_OPENMP:=0}
+
+# @ECLASS-VARIABLE: FORTRAN_STANDARD
+# @DESCRIPTION:
+# Set this, if a special dialect needs to be supported.
+# Generally not needed as default is sufficient.
+#
+# Valid settings are any combination of: 77 90 95 2003
+: ${FORTRAN_STANDARD:=77}
+
+# @ECLASS-VARIABLE: FORTRAN_NEEDED
+# @DESCRIPTION:
+# If your package has an optional fortran support, set this variable
+# to the space separated list of USE triggering the fortran
+# dependency.
+#
+# e.g. FORTRAN_NEEDED=lapack would result in
+#
+# DEPEND="lapack? ( virtual/fortran )"
+#
+# If unset, we always depend on virtual/fortran.
+: ${FORTRAN_NEEDED:=always}
+
+inherit eutils toolchain-funcs
+
+for _f_use in ${FORTRAN_NEEDED}; do
+ case ${_f_use} in
+ always)
+ DEPEND+=" virtual/fortran"
+ break
+ ;;
+ no)
+ break
+ ;;
+ *)
+ DEPEND+=" ${_f_use}? ( virtual/fortran )"
+ ;;
+ esac
+done
+RDEPEND="${DEPEND}"
+
+# @FUNCTION: _fortran_write_testsuite
+# @INTERNAL
+# @DESCRIPTION:
+# writes fortran test code
+_fortran_write_testsuite() {
+ local filebase=${T}/test-fortran
+
+ # f77 code
+ cat <<- EOF > "${filebase}.f"
+ end
+ EOF
+
+ # f90/95 code
+ cat <<- EOF > "${filebase}.f90"
+ end
+ EOF
+
+ # f2003 code
+ cat <<- EOF > "${filebase}.f03"
+ procedure(), pointer :: p
+ end
+ EOF
+}
+
+# @FUNCTION: _fortran_compile_test
+# @USAGE: <compiler> [dialect]
+# @INTERNAL
+# @DESCRIPTION:
+# Takes fortran compiler as first argument and dialect as second.
+# Checks whether the passed fortran compiler speaks the fortran dialect
+_fortran_compile_test() {
+ local filebase=${T}/test-fortran
+ local fcomp=${1}
+ local fdia=${2}
+ local fcode=${filebase}.f${fdia}
+ local ret
+
+ [[ $# -lt 1 ]] && \
+ die "_fortran_compile_test() needs at least one argument"
+
+ [[ -f ${fcode} ]] || _fortran_write_testsuite
+
+ ${fcomp} "${fcode}" -o "${fcode}.x" \
+ >> "${T}"/_fortran_compile_test.log 2>&1
+ ret=$?
+
+ rm -f "${fcode}.x"
+ return ${ret}
+}
+
+# @FUNCTION: _fortran-has-openmp
+# @RETURN: return code of the compiler
+# @INTERNAL
+# @DESCRIPTION:
+# See if the fortran supports OpenMP.
+_fortran-has-openmp() {
+ local flag
+ local filebase=${T}/test-fc-openmp
+ local fcode=${filebase}.f
+ local ret
+ local _fc=$(tc-getFC)
+
+ cat <<- EOF > "${fcode}"
+ call omp_get_num_threads
+ end
+ EOF
+
+ for flag in -fopenmp -xopenmp -openmp -mp -omp -qsmp=omp; do
+ ${_fc} ${flag} "${fcode}" -o "${fcode}.x" \
+ &>> "${T}"/_fortran_compile_test.log
+ ret=$?
+ (( ${ret} )) || break
+ done
+
+ rm -f "${fcode}.x"
+ return ${ret}
+}
+
+# @FUNCTION: _fortran_die_msg
+# @INTERNAL
+# @DESCRIPTION:
+# Detailed description how to handle fortran support
+_fortran_die_msg() {
+ echo
+ eerror "Please install currently selected gcc version with USE=fortran."
+ eerror "If you intend to use a different compiler then gfortran, please"
+ eerror "set FC variable accordingly and take care that the necessary"
+ eerror "fortran dialects are supported."
+ echo
+ die "Currently no working fortran compiler is available"
+}
+
+# @FUNCTION: _fortran_test_function
+# @INTERNAL
+# @DESCRIPTION:
+# Internal test function for working fortran compiler.
+# It is called in fortran-2_pkg_setup.
+_fortran_test_function() {
+ local dialect
+
+ : ${F77:=$(tc-getFC)}
+
+ : ${FORTRAN_STANDARD:=77}
+ for dialect in ${FORTRAN_STANDARD}; do
+ case ${dialect} in
+ 77) _fortran_compile_test $(tc-getF77) || \
+ _fortran_die_msg ;;
+ 90|95) _fortran_compile_test $(tc-getFC) 90 || \
+ _fortran_die_msg ;;
+ 2003) _fortran_compile_test $(tc-getFC) 03 || \
+ _fortran_die_msg ;;
+ 2008) die "Future" ;;
+ *) die "${dialect} is not a Fortran dialect." ;;
+ esac
+ done
+
+ tc-export F77 FC
+ einfo "Using following Fortran compiler:"
+ einfo " F77: ${F77}"
+ einfo " FC: ${FC}"
+
+ if [[ ${FORTRAN_NEED_OPENMP} == 1 ]]; then
+ if _fortran-has-openmp; then
+ einfo "${FC} has OPENMP support"
+ else
+ die "Please install current gcc with USE=openmp or set the FC variable to a compiler that supports OpenMP"
+ fi
+ fi
+}
+
+# @FUNCTION: _fortran-2_pkg_setup
+# @INTERNAL
+# @DESCRIPTION:
+# _The_ fortran-2_pkg_setup() code
+_fortran-2_pkg_setup() {
+ for _f_use in ${FORTRAN_NEEDED}; do
+ case ${_f_use} in
+ always)
+ _fortran_test_function && break
+ ;;
+ no)
+ einfo "Forcing fortran support off"
+ break
+ ;;
+ *)
+ if use ${_f_use}; then
+ _fortran_test_function && break
+ else
+ unset FC
+ unset F77
+ fi
+ ;;
+ esac
+ done
+}
+
+
+# @FUNCTION: fortran-2_pkg_setup
+# @DESCRIPTION:
+# Setup functionality,
+# checks for a valid fortran compiler and optionally for its openmp support.
+fortran-2_pkg_setup() {
+ case ${EAPI:-0} in
+ 0|1|2|3)
+ eqawarn "Support for EAPI < 4 will be removed from the"
+ eqawarn "fortran-2.eclass in until 2013-09-30."
+ eqawarn "Please migrate your package to a higher EAPI"
+ eqawarn "or file a bug at https://bugs.gentoo.org"
+ _fortran-2_pkg_setup ;;
+ 4|5)
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ _fortran-2_pkg_setup
+ fi
+ ;;
+ esac
+}
+
+case ${EAPI:-0} in
+ 0|1|2|3|4|5) EXPORT_FUNCTIONS pkg_setup ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
diff --git a/eclass/fox.eclass b/eclass/fox.eclass
new file mode 100644
index 000000000000..6ac04c205c15
--- /dev/null
+++ b/eclass/fox.eclass
@@ -0,0 +1,230 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: fox.eclass
+# @MAINTAINER:
+# mabi@gentoo.org
+# @BLURB: Functionality required the FOX Toolkit and it's applications
+# @DESCRIPTION:
+# This eclass allows building SLOT-able FOX Toolkit installations
+# (x11-libs/fox: headers, libs, and docs), which are by design
+# parallel-installable, while installing only one version of the utils
+# (dev-util/reswrap) and apps (app-editors/adie, sci-calculators/calculator,
+# x11-misc/pathfinder, and x11-misc/shutterbug).
+#
+# Version numbering follows the kernel-style odd-even minor version
+# designation. Even-number minor versions are API stable, which patch
+# releases aimed mostly at the library; apps generally won't need to be
+# bumped for a patch release.
+#
+# Odd-number versions are development branches with their own SLOT and
+# are API unstable; changes are made to the apps, and likely need to be
+# bumped together with the library.
+#
+# Here are sample [R]DEPENDs for the fox apps
+# 1.6: 'x11-libs/fox:1.6'
+# 1.7: '~x11-libs/fox-${PV}'
+#
+# EAPI phase trickery borrowed from enlightenment.eclass
+
+inherit autotools versionator
+
+
+FOX_EXPF="src_unpack src_compile src_install pkg_postinst"
+case "${EAPI:-0}" in
+ 2|3|4|5) FOX_EXPF+=" src_prepare src_configure" ;;
+ *) ;;
+esac
+EXPORT_FUNCTIONS ${FOX_EXPF}
+
+# @ECLASS-VARIABLE: FOX_PV
+# @DESCRIPTION:
+# The version of the FOX Toolkit provided or required by the package
+: ${FOX_PV:=${PV}}
+
+# @ECLASS-VARIABLE: FOXVER
+# @INTERNAL
+# @DESCRIPTION:
+# The major.minor version of FOX_PV, usually acts as $SLOT and is used in
+# building the applications
+FOXVER=$(get_version_component_range 1-2 ${FOX_PV})
+
+# @ECLASS-VARIABLE: FOX_APPS
+# @INTERNAL
+# @DESCRIPTION:
+# The applications originally packaged in the FOX Toolkit
+FOX_APPS="adie calculator pathfinder shutterbug"
+
+# @ECLASS-VARIABLE: FOXCONF
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this to add additional configuration options during src_configure
+
+DESCRIPTION="C++ based Toolkit for developing Graphical User Interfaces easily and effectively"
+HOMEPAGE="http://www.fox-toolkit.org/"
+SRC_URI="ftp://ftp.fox-toolkit.org/pub/fox-${FOX_PV}.tar.gz"
+
+IUSE="debug doc profile"
+
+if [[ ${PN} != fox ]] ; then
+ FOX_COMPONENT="${FOX_COMPONENT:-${PN}}"
+fi
+
+if [[ -z ${FOX_COMPONENT} ]] ; then
+ DOXYGEN_DEP="doc? ( app-doc/doxygen )"
+fi
+
+if [[ ${PN} != reswrap ]] ; then
+ RESWRAP_DEP="dev-util/reswrap"
+fi
+
+DEPEND="${DOXYGEN_DEP}
+ ${RESWRAP_DEP}
+ >=sys-apps/sed-4"
+
+S="${WORKDIR}/fox-${FOX_PV}"
+
+fox_src_unpack() {
+ unpack ${A}
+ cd "${S}"
+
+ has src_prepare ${FOX_EXPF} || fox_src_prepare
+}
+
+fox_src_prepare() {
+ # fox changed from configure.in to configure.am in 1.6.38
+ local confFile="configure.ac"
+ [[ -r "configure.in" ]] && confFile="configure.in"
+
+ # Respect system CXXFLAGS
+ sed -i -e 's:CXXFLAGS=""::' $confFile || die "sed ${confFile} error"
+
+ # don't strip binaries
+ sed -i -e '/LDFLAGS="-s ${LDFLAGS}"/d' $confFile || die "sed ${confFile} error"
+
+ # don't build apps from top-level (i.e. x11-libs/fox)
+ # utils == reswrap
+ local d
+ for d in ${FOX_APPS} utils windows ; do
+ sed -i -e "s:${d}::" Makefile.am || die "sed Makefile.am error"
+ done
+
+ # use the installed reswrap for everything else
+ for d in ${FOX_APPS} chart controlpanel tests ; do
+ [[ -d ${d} ]] &&
+ (sed -i -e 's:$(top_builddir)/utils/reswrap:reswrap:' \
+ ${d}/Makefile.am || die "sed ${d}/Makefile.am error")
+ done
+
+ # use the installed headers and library for apps
+ for d in ${FOX_APPS} ; do
+ sed -i \
+ -e "s:-I\$(top_srcdir)/include -I\$(top_builddir)/include:-I\$(includedir)/fox-${FOXVER}:" \
+ -e 's:$(top_builddir)/src/libFOX:-lFOX:' \
+ -e 's:$(top_builddir)/lib/libFOX:-lFOX:' \
+ -e 's:\.la::' \
+ ${d}/Makefile.am || die "sed ${d}/Makefile.am error"
+ done
+
+ eautoreconf
+}
+
+fox_src_configure() {
+ use debug && FOXCONF+=" --enable-debug" \
+ || FOXCONF+=" --enable-release"
+
+ econf ${FOXCONF} \
+ $(use_with profile profiling)
+}
+
+
+fox_src_compile() {
+ has src_configure ${FOX_EXPF} || fox_src_configure
+
+ cd "${S}/${FOX_COMPONENT}"
+ emake || die "compile error"
+
+ # build class reference docs (FOXVER >= 1.2)
+ if use doc && [[ -z ${FOX_COMPONENT} ]] ; then
+ emake -C "${S}"/doc docs || die "doxygen error"
+ fi
+}
+
+fox_src_install() {
+ cd "${S}/${FOX_COMPONENT}"
+
+ emake install \
+ DESTDIR="${D}" \
+ htmldir=/usr/share/doc/${PF}/html \
+ artdir=/usr/share/doc/${PF}/html/art \
+ screenshotsdir=/usr/share/doc/${PF}/html/screenshots \
+ || die "install error"
+
+ # create desktop menu items for apps
+ case ${FOX_COMPONENT} in
+ adie)
+ newicon big_gif.gif adie.gif
+ make_desktop_entry adie "Adie Text Editor" adie.gif
+ ;;
+ calculator)
+ newicon bigcalc.gif foxcalc.gif
+ make_desktop_entry calculator "FOX Calculator" foxcalc.gif
+ ;;
+ pathfinder)
+ newicon iconpath.gif pathfinder.gif
+ make_desktop_entry PathFinder "PathFinder" pathfinder.gif "FileManager"
+ ;;
+ shutterbug)
+ doicon shutterbug.gif
+ make_desktop_entry shutterbug "ShutterBug" shutterbug.gif "Graphics"
+ ;;
+ esac
+
+ for doc in ADDITIONS AUTHORS LICENSE_ADDENDUM README TRACING ; do
+ [ -f $doc ] && dodoc $doc
+ done
+
+ # remove documentation if USE=-doc
+ use doc || rm -fr "${D}/usr/share/doc/${PF}/html"
+
+ # install class reference docs if USE=doc
+ if use doc && [[ -z ${FOX_COMPONENT} ]] ; then
+ dohtml -r "${S}/doc/ref"
+ fi
+
+ # slot fox-config
+ if [[ -f ${D}/usr/bin/fox-config ]] ; then
+ mv "${D}/usr/bin/fox-config" "${D}/usr/bin/fox-${FOXVER}-config" \
+ || die "failed to install fox-config"
+ fi
+}
+
+fox_pkg_postinst() {
+ if [ -z "${FOX_COMPONENT}" ] ; then
+ echo
+ einfo "Multiple versions of the FOX Toolkit library may now be installed"
+ einfo "in parallel SLOTs on the same system."
+ einfo
+ einfo "The reswrap utility and the applications included in the FOX Toolkit"
+ einfo "(adie, calculator, pathfinder, shutterbug) are now available as"
+ einfo "separate ebuilds."
+ echo
+
+ if version_is_at_least "1.7.25"; then
+ einfo "Fox versions after 1.7.25 ships a pkg-config file called fox17.pc"
+ einfo "instead of the previous fox-config tool."
+ einfo "You now get all info via pkg-config:"
+ einfo
+ einfo "pkg-config fox17 --libs (etc.)"
+ else
+ einfo "The fox-config script has been installed as fox-${FOXVER}-config."
+ einfo "The fox-wrapper package is used to direct calls to fox-config"
+ einfo "to the correct versioned script, based on the WANT_FOX variable."
+ einfo "For example:"
+ einfo
+ einfo " WANT_FOX=\"${FOXVER}\" fox-config <options>"
+ fi
+ einfo
+ fi
+}
diff --git a/eclass/freebsd.eclass b/eclass/freebsd.eclass
new file mode 100644
index 000000000000..583d59e30591
--- /dev/null
+++ b/eclass/freebsd.eclass
@@ -0,0 +1,267 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Diego Pettenò <flameeyes@gentoo.org>
+
+inherit versionator eutils flag-o-matic bsdmk
+
+# Drop patch level from ${PV}
+MY_PV=${PV/_p*}
+PLEVEL=${PV##*_p}
+
+LICENSE="BSD"
+HOMEPAGE="http://www.freebsd.org/"
+
+# Define global package names
+LIB="freebsd-lib-${PV}"
+BIN="freebsd-bin-${PV}"
+CONTRIB="freebsd-contrib-${PV}"
+SHARE="freebsd-share-${PV}"
+UBIN="freebsd-ubin-${PV}"
+USBIN="freebsd-usbin-${PV}"
+CRYPTO="freebsd-crypto-${PV}"
+LIBEXEC="freebsd-libexec-${PV}"
+SBIN="freebsd-sbin-${PV}"
+GNU="freebsd-gnu-${PV}"
+ETC="freebsd-etc-${PV}"
+SYS="freebsd-sys-${PV}"
+INCLUDE="freebsd-include-${PV}"
+RESCUE="freebsd-rescue-${PV}"
+CDDL="freebsd-cddl-${PV}"
+SECURE="freebsd-secure-${PV}"
+
+# Release version (5.3, 5.4, 6.0, etc)
+RV="$(get_version_component_range 1-2 ${MY_PV})"
+
+# SVN ebuild support.
+# 9.1.0.9999 --> release/9.1.0
+# 9.1.9999 --> releng/9.1
+# 9.9999 --> stable/9
+# 9999 --> head
+#
+# svn revision can be specified by patch level:
+# freebsd-lib-9.9999_p247000 --> set svn -r 247000
+
+if [[ ${MY_PV} == *9999* ]]; then
+ inherit subversion
+
+ # Set SVN revision using patch level.
+ [[ ${PV} == *_p* ]] && ESVN_REVISION="${PLEVEL}"
+
+ case ${MY_PV%.9999} in
+ *.*.*) BRANCH="release";;
+ *.*) BRANCH="releng" ;;
+ 9999) BRANCH="head" ;;
+ *) BRANCH="stable" ;;
+ esac
+
+ if [[ ${BRANCH} == head ]] ; then
+ SVN_SUB_URI="${BRANCH}"
+ else
+ SVN_SUB_URI="${BRANCH}/${MY_PV%.9999}"
+ fi
+
+ ESVN_REPO_URI="svn://svn.freebsd.org/base/${SVN_SUB_URI}"
+ ESVN_PROJECT="freebsd-${BRANCH}"
+fi
+
+# use the original source code.
+if [[ ${MY_PV} != *9999* ]] && version_is_at_least 10.0 ${RV} ; then
+ DL_PV=${MY_PV/_rc/-RC}
+ DL_PV=${DL_PV/_beta/-BETA}
+ DL_PV=${DL_PV/_alpha/-ALPHA}
+ if [[ ${DL_PV} == ${MY_PV} ]]; then
+ DL_PV="${DL_PV}-RELEASE"
+ fi
+ SRC_URI="mirror://freebsd/releases/i386/${DL_PV}/src.txz -> freebsd-src-${MY_PV}.tar.xz"
+fi
+
+IUSE="profile"
+
+#unalias -a
+alias install-info='/usr/bin/bsdinstall-info'
+
+EXPORT_FUNCTIONS src_compile src_install src_unpack
+
+# doperiodic <kind> <file> ...
+doperiodic() {
+ local kind=$1
+ shift
+
+ ( # dont want to pollute calling env
+ insinto /etc/periodic/${kind}
+ insopts -m 0755
+ doins "$@"
+ )
+}
+
+freebsd_get_bmake() {
+ local bmake
+ bmake=$(get_bmake)
+ [[ ${CBUILD} == *-freebsd* ]] || bmake="${bmake} -m /usr/share/mk/freebsd"
+
+ echo "${bmake}"
+}
+
+# Generates a patch SRC_URI or DISTDIR of upstream.
+freebsd_upstream_patches() {
+ local opt=$1
+ [[ ${#UPSTREAM_PATCHES[@]} -eq 0 ]] && return 1
+ for x in "${UPSTREAM_PATCHES[@]}"
+ do
+ local out=${PN}-${x/\//-}
+ out=${out/:/}
+ if [[ ${opt} == -s ]] ; then
+ echo "${DISTDIR}/${out}"
+ else
+ echo "https://security.freebsd.org/patches/${x} -> ${out}"
+ fi
+ done
+}
+
+freebsd_do_patches() {
+ if [[ ${#PATCHES[@]} -gt 1 ]] ; then
+ for x in "${PATCHES[@]}"; do
+ epatch "${x}"
+ done
+ else
+ for x in ${PATCHES} ; do
+ epatch "${x}"
+ done
+ fi
+ [[ ${#UPSTREAM_PATCHES[@]} -gt 0 ]] && epatch $(freebsd_upstream_patches -s)
+ epatch_user
+}
+
+freebsd_rename_libraries() {
+ ebegin "Renaming libraries"
+ # We don't use libtermcap, we use libncurses
+ find "${S}" -name Makefile -print0 | xargs -0 \
+ sed -i -e 's:-ltermcap:-lncurses:g; s:{LIBTERMCAP}:{LIBNCURSES}:g'
+ # flex provides libfl, not libl
+ find "${S}" -name Makefile -print0 | xargs -0 \
+ sed -i -e 's:-ll$:-lfl:g; s:-ll :-lfl :g; s:{LIBL}:{LIBFL}:g'
+ # ncurses provides libncursesw not libcursesw
+ find "${S}" -name Makefile -print0 | xargs -0 \
+ sed -i -e 's:-lcursesw:-lncursesw:g'
+ # we use expat instead of bsdxml
+ find "${S}" -name Makefile -print0 | xargs -0 \
+ sed -i -e 's:-lbsdxml:-lexpat:g'
+
+ eend $?
+}
+
+freebsd_src_unpack() {
+ if [[ ${MY_PV} == *9999* ]]; then
+ S="${WORKDIR}" subversion_src_unpack
+
+ # When share/mk exists in ${WORKDIR}, it is used on FreeBSD 10.0
+ # Removed "${WORKDIR}"/share/mk/*.mk, use to force /usr/share/mk.
+ if [[ ${PN} != freebsd-mk-defs ]] ; then
+ [[ -e "${WORKDIR}"/share/mk ]] && rm -rf "${WORKDIR}"/share/mk/*.mk
+ fi
+ else
+ if version_is_at_least 10.0 ${RV} ; then
+ local tarball="freebsd-src-${MY_PV}.tar.xz"
+ local topdir="usr/src/"
+ local extractlist=()
+ for i in ${EXTRACTONLY} ; do
+ extractlist+=( ${topdir}${i} )
+ done
+ ebegin "Unpacking parts of ${tarball} to ${WORKDIR}"
+ cd "${WORKDIR}" || die
+ tar -xJpf "${DISTDIR}/${tarball}" --strip-components=2 "${extractlist[@]}" 2> /dev/null || die "tar extract command failed"
+ cd - || die
+ else
+ for f in ${A} ; do
+ [[ ${f} == *.tar.* ]] && unpack ${f}
+ done
+ fi
+ fi
+ cd "${S}"
+
+ dummy_mk ${REMOVE_SUBDIRS}
+
+ freebsd_do_patches
+ freebsd_rename_libraries
+
+ # Starting from FreeBSD 9.2, its install command supports the -l option and
+ # they now use it. Emulate it if we are on a system that does not have it.
+ if version_is_at_least 9.2 ${RV} && ! has_version '>=sys-freebsd/freebsd-ubin-9.2_beta1' ; then
+ export INSTALL_LINK="ln -f"
+ export INSTALL_SYMLINK="ln -fs"
+ fi
+}
+
+freebsd_src_compile() {
+ use profile && filter-flags "-fomit-frame-pointer"
+ use profile || mymakeopts="${mymakeopts} NO_PROFILE= "
+
+ mymakeopts="${mymakeopts} NO_MANCOMPRESS= NO_INFOCOMPRESS= NO_FSCHG="
+
+ # Make sure to use FreeBSD definitions while crosscompiling
+ [[ -z "${BMAKE}" ]] && BMAKE="$(freebsd_get_bmake)"
+
+ # Create objdir if MAKEOBJDIRPREFIX is defined, so that we can make out of
+ # tree builds easily.
+ if [[ -n "${MAKEOBJDIRPREFIX}" ]] ; then
+ mkmake obj || die
+ fi
+
+ bsdmk_src_compile "$@"
+}
+
+# Helper function to make a multilib build with FreeBSD Makefiles.
+# Usage:
+# MULTIBUILD_VARIANTS=( $(get_all_abis) )
+# multibuild_foreach_variant freebsd_multilib_multibuild_wrapper my_function
+#
+# Important note: To use this function you _have_ to:
+# - inherit multilib.eclass and multibuild.eclass
+# - set MULTIBUILD_VARIANTS
+
+freebsd_multilib_multibuild_wrapper() {
+ # Get the ABI from multibuild.eclass
+ # This assumes MULTIBUILD_VARIANTS contains only valid ABIs.
+ local ABI=${MULTIBUILD_VARIANT}
+
+ # First, save the variables: CFLAGS, CXXFLAGS, LDFLAGS, LDADD and mymakeopts.
+ for i in CFLAGS CXXFLAGS LDFLAGS LDADD mymakeopts ; do
+ export ${i}_SAVE="${!i}"
+ done
+
+ # Setup the variables specific to this ABI.
+ multilib_toolchain_setup "${ABI}"
+
+ local target="$(tc-arch-kernel ${CHOST})"
+ mymakeopts="${mymakeopts} TARGET=${target} MACHINE=${target} MACHINE_ARCH=${target} SHLIBDIR=/usr/$(get_libdir) LIBDIR=/usr/$(get_libdir)"
+ if [ "${ABI}" != "${DEFAULT_ABI}" ] ; then
+ mymakeopts="${mymakeopts} COMPAT_32BIT="
+ fi
+
+ einfo "Building for ABI=${ABI} and TARGET=${target}"
+
+ export MAKEOBJDIRPREFIX="${BUILD_DIR}"
+ if [ ! -d "${MAKEOBJDIRPREFIX}" ] ; then
+ mkdir "${MAKEOBJDIRPREFIX}" || die "Could not create ${MAKEOBJDIRPREFIX}."
+ fi
+
+ CTARGET="${CHOST}" "$@"
+
+ # Restore the variables now.
+ for i in CFLAGS CXXFLAGS LDFLAGS LDADD mymakeopts ; do
+ ii="${i}_SAVE"
+ export ${i}="${!ii}"
+ done
+}
+
+freebsd_src_install() {
+ use profile || mymakeopts="${mymakeopts} NO_PROFILE= "
+
+ mymakeopts="${mymakeopts} NO_MANCOMPRESS= NO_INFOCOMPRESS= NO_FSCHG="
+
+ [[ -z "${BMAKE}" ]] && BMAKE="$(freebsd_get_bmake)"
+
+ bsdmk_src_install
+}
diff --git a/eclass/freedict.eclass b/eclass/freedict.eclass
new file mode 100644
index 000000000000..dc67316b7e6c
--- /dev/null
+++ b/eclass/freedict.eclass
@@ -0,0 +1,50 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: freedict.eclass
+# @MAINTAINER:
+# maintainer-needed@gentoo.org
+# @AUTHOR:
+# Original author: Seemant Kulleen
+# @BLURB: Ease the installation of freedict translation dictionaries
+# @DESCRIPTION:
+# This eclass exists to ease the installation of freedict translation
+# dictionaries. The only variables which need to be defined in the actual
+# ebuilds are FORLANG and TOLANG for the source and target languages,
+# respectively.
+
+# @ECLASS-VARIABLE: FORLANG
+# @DESCRIPTION:
+# Please see above for a description.
+
+# @ECLASS-VARIABLE: TOLANG
+# @DESCRIPTION:
+# Please see above for a description.
+
+inherit eutils multilib
+
+IUSE=""
+
+MY_P=${PN/freedict-/}
+
+S="${WORKDIR}"
+DESCRIPTION="Freedict for language translation from ${FORLANG} to ${TOLANG}"
+HOMEPAGE="http://www.freedict.de"
+SRC_URI="http://freedict.sourceforge.net/download/linux/${MY_P}.tar.gz"
+
+SLOT="0"
+LICENSE="GPL-2"
+
+DEPEND="app-text/dictd"
+
+# @FUNCTION: freedict_src_install
+# @DESCRIPTION:
+# The freedict src_install function, which is exported
+freedict_src_install() {
+ insinto /usr/$(get_libdir)/dict
+ doins ${MY_P}.dict.dz
+ doins ${MY_P}.index
+}
+
+EXPORT_FUNCTIONS src_install
diff --git a/eclass/games-mods.eclass b/eclass/games-mods.eclass
new file mode 100644
index 000000000000..7cefa45b0fbb
--- /dev/null
+++ b/eclass/games-mods.eclass
@@ -0,0 +1,319 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Variables to specify in an ebuild which uses this eclass:
+# GAME - (doom3, quake4 or ut2004, etc), unless ${PN} starts with e.g. "doom3-"
+# MOD_DESC - Description for the mod
+# MOD_NAME - Creates a command-line wrapper and desktop icon for the mod
+# MOD_DIR - Subdirectory name for the mod, if applicable
+# MOD_ICON - Custom icon for the mod, instead of the default
+
+inherit eutils games
+
+EXPORT_FUNCTIONS src_install pkg_postinst
+
+[[ -z ${GAME} ]] && GAME=${PN%%-*}
+
+case ${GAME} in
+ doom3)
+ GAME_PKGS="games-fps/doom3"
+ GAME_DIRS=( "${GAMES_PREFIX_OPT}"/doom3 )
+ GAME_NAME="Doom 3"
+ GAME_BIN="doom3"
+ GAME_ICON="doom3"
+ DED_PKGS=""
+ DED_BIN="doom3-ded"
+ DED_OPTS="+set dedicated 1 +exec server.cfg"
+ DED_CFG_DIR=".doom3"
+ SELECT_MOD="+set fs_game "
+ ;;
+ enemy-territory)
+ GAME_PKGS="games-fps/enemy-territory"
+ GAME_DIRS=( "${GAMES_PREFIX_OPT}"/enemy-territory )
+ GAME_NAME="Enemy Territory"
+ GAME_BIN="et"
+ GAME_ICON="ET"
+ DED_PKGS=""
+ DED_BIN="et-ded"
+ DED_OPTS="+set dedicated 1 +exec server.cfg"
+ DED_CFG_DIR=".etwolf"
+ SELECT_MOD="+set fs_game "
+ ;;
+ quake3)
+ GAME_PKGS="games-fps/quake3 games-fps/quake3-bin"
+ GAME_DIRS=( "${GAMES_DATADIR}"/quake3 "${GAMES_PREFIX_OPT}"/quake3 )
+ GAME_NAME="Quake III"
+ GAME_BIN="quake3"
+ GAME_ICON="quake3"
+ DED_PKGS=""
+ DED_BIN="quake3-ded"
+ DED_OPTS="+set dedicated 1 +exec server.cfg"
+ DED_CFG_DIR=".q3a"
+ SELECT_MOD="+set fs_game "
+ ;;
+ quake4)
+ GAME_PKGS="games-fps/quake4-bin"
+ GAME_DIRS=( "${GAMES_PREFIX_OPT}"/quake4 )
+ GAME_NAME="Quake 4"
+ GAME_BIN="quake4"
+ GAME_ICON="/usr/share/pixmaps/quake4.bmp"
+ DED_PKGS=""
+ DED_BIN="quake4-ded"
+ DED_OPTS="+set dedicated 1 +exec server.cfg"
+ DED_CFG_DIR=".quake4"
+ SELECT_MOD="+set fs_game "
+ ;;
+ ut2003)
+ GAME_PKGS="games-fps/ut2003"
+ GAME_DIRS=( "${GAMES_PREFIX_OPT}"/ut2003 )
+ GAME_NAME="UT2003"
+ GAME_BIN="ut2003"
+ GAME_ICON="ut2003"
+ DED_PKGS=""
+ DED_BIN="ucc"
+ DED_OPTS=""
+ DED_CFG_DIR=""
+ SELECT_MOD="-mod="
+ ;;
+ ut2004)
+ GAME_PKGS="games-fps/ut2004"
+ GAME_DIRS=( "${GAMES_PREFIX_OPT}"/{ut2004,ut2004-ded} )
+ GAME_NAME="UT2004"
+ GAME_BIN="ut2004"
+ GAME_ICON="ut2004"
+ DED_PKGS="games-server/ut2004-ded"
+ DED_BIN="ut2004-ded"
+ DED_OPTS=""
+ DED_CFG_DIR=""
+ SELECT_MOD="-mod="
+ ;;
+ *)
+ eerror "This game is either not supported or you must set the GAME"
+ eerror "variable to the proper game."
+ die "games-mods.eclass: unsupported GAME"
+ ;;
+esac
+
+MOD_BIN="${GAME_BIN}-${PN/${GAME}-}"
+MOD_DED_BIN="${MOD_BIN}-ded"
+
+games-mods_get_rdepend() {
+ local pkgs
+
+ if [[ ${1} == "--ded" ]] ; then
+ pkgs=( ${DED_PKGS} ${GAME_PKGS} )
+ else
+ pkgs=( ${GAME_PKGS} )
+ fi
+
+ [[ ${#pkgs[@]} -gt 1 ]] && echo -n "|| ( "
+
+ case ${EAPI:-0} in
+ 0|1) echo -n "${pkgs[@]}" ;;
+ 2)
+ local p
+ if [[ ${1} == "--ded" ]] ; then
+ echo -n "${DED_PKGS}"
+ for p in ${GAME_PKGS} ; do
+ echo -n " ${p}[dedicated]"
+ done
+ else
+ for p in ${GAME_PKGS} ; do
+ echo -n " || ( ${p}[opengl] ${p}[-dedicated] )"
+ done
+ fi
+ ;;
+ esac
+
+ [[ ${#pkgs[@]} -gt 1 ]] && echo -n " )"
+}
+
+DESCRIPTION="${GAME_NAME} ${MOD_NAME} - ${MOD_DESC}"
+
+SLOT="0"
+IUSE="dedicated opengl"
+RESTRICT="mirror strip"
+
+DEPEND="app-arch/unzip"
+RDEPEND="dedicated? ( $(games-mods_get_rdepend --ded) )
+ opengl? ( $(games-mods_get_rdepend) )
+ !dedicated? ( !opengl? ( $(games-mods_get_rdepend) ) )"
+
+S=${WORKDIR}
+
+INS_DIR=${GAMES_DATADIR}/${GAME}
+
+games-mods_use_opengl() {
+ [[ -z ${MOD_DIR} ]] && return 1
+
+ if use opengl || ! use dedicated ; then
+ # Use opengl by default
+ return 0
+ fi
+
+ return 1
+}
+
+games-mods_use_dedicated() {
+ [[ -z ${MOD_DIR} ]] && return 1
+
+ use dedicated && return 0 || return 1
+}
+
+games-mods_dosyms() {
+ # We are installing everything for these mods into ${INS_DIR},
+ # ${GAMES_DATADIR}/${GAME} in most cases, and symlinking it
+ # into ${GAMES_PREFIX_OPT}/${GAME} for each game. This should
+ # allow us to support both binary and source-based games easily.
+ local dir
+ for dir in "${GAME_DIRS[@]}" ; do
+ [[ -z ${dir} || ${INS_DIR} == ${dir} ]] && continue
+ pushd "${D}/${INS_DIR}" > /dev/null || die "pushd failed"
+ local i
+ for i in * ; do
+ if [[ -d ${i} ]] ; then
+ if [[ ${i} == ${MOD_DIR} ]] ; then
+ dosym "${INS_DIR}/${i}" "${dir}/${i}" \
+ || die "dosym ${i} failed"
+ else
+ local f
+ while read f ; do
+ dosym "${INS_DIR}/${f}" "${dir}/${f}" \
+ || die "dosym ${f} failed"
+ done < <(find "${i}" -type f)
+ fi
+ elif [[ -f ${i} ]] ; then
+ dosym "${INS_DIR}/${i}" "${dir}/${i}" \
+ || die "dosym ${i} failed"
+ else
+ die "${i} shouldn't be there"
+ fi
+ done
+ popd > /dev/null || die "popd failed"
+ done
+}
+
+games-mods_make_initd() {
+ cat <<EOF > "${T}"/${MOD_DED_BIN}
+#!/sbin/runscript
+$(head -n 2 ${PORTDIR}/header.txt)
+# Generated by games-mods.eclass
+
+depend() {
+ need net
+}
+
+start() {
+ ebegin "Starting ${MOD_DED_BIN}"
+ start-stop-daemon --start --quiet --background --make-pidfile \\
+ --pidfile /var/run/${MOD_DED_BIN}.pid \\
+ --chuid \${${MOD_DED_BIN//-/_}_user}:\${${MOD_DED_BIN//-/_}_group} \\
+ --env HOME="\${${MOD_DED_BIN//-/_}_home}" \\
+ --exec "${GAMES_BINDIR}/${MOD_DED_BIN}" \\
+ -- \${${MOD_DED_BIN//-/_}_opts}
+ eend \$?
+}
+
+stop() {
+ ebegin "Stopping ${MOD_DED_BIN}"
+ start-stop-daemon --stop \\
+ --pidfile /var/run/${MOD_DED_BIN}.pid
+ eend \$?
+}
+EOF
+
+ doinitd "${T}"/${MOD_DED_BIN} || die "doinitd failed"
+}
+
+games-mods_make_confd() {
+ cat <<-EOF > "${T}"/${MOD_DED_BIN}
+ # User and group the server should run as
+ ${MOD_DED_BIN//-/_}_user="${GAMES_USER_DED}"
+ ${MOD_DED_BIN//-/_}_group="${GAMES_GROUP}"
+
+ # Directory to use for HOME
+ ${MOD_DED_BIN//-/_}_home="${GAMES_PREFIX}"
+
+ # Any extra options you want to pass to the dedicated server
+ ${MOD_DED_BIN//-/_}_opts=""
+ EOF
+
+ doconfd "${T}"/${MOD_DED_BIN} || die "doconfd failed"
+}
+
+games-mods_src_install() {
+ if games-mods_use_opengl ; then
+ if [[ -n ${MOD_ICON} ]] ; then
+ # Install custom icon
+ local ext=${MOD_ICON##*.}
+ if [[ -f ${MOD_ICON} ]] ; then
+ newicon "${MOD_ICON}" ${PN}.${ext} || die "newicon failed"
+ else
+ newicon ${MOD_DIR}/"${MOD_ICON}" ${PN}.${ext} \
+ || die "newicon failed"
+ fi
+ case ${ext} in
+ bmp|ico)
+ MOD_ICON=/usr/share/pixmaps/${PN}.${ext}
+ ;;
+ *)
+ MOD_ICON=${PN}
+ ;;
+ esac
+ else
+ # Use the game's standard icon
+ MOD_ICON=${GAME_ICON}
+ fi
+
+ games_make_wrapper ${MOD_BIN} "${GAME_BIN} ${SELECT_MOD}${MOD_DIR}"
+ make_desktop_entry ${MOD_BIN} "${GAME_NAME} - ${MOD_NAME}" "${MOD_ICON}"
+ # Since only quake3 has both a binary and a source-based install,
+ # we only look for quake3 here.
+ case ${GAME} in
+ quake3)
+ if has_version games-fps/quake3-bin ; then
+ games_make_wrapper ${GAME_BIN}-bin-${PN/${GAME}-} \
+ "${GAME_BIN}-bin ${SELECT_MOD}${MOD_DIR}"
+ fi
+ make_desktop_entry ${GAME_BIN}-bin-${PN/${GAME}-} \
+ "${GAME_NAME} - ${MOD_NAME} (binary)" "${MOD_ICON}"
+ ;;
+ esac
+ fi
+
+ # We expect anything not wanted to have been deleted by the ebuild
+ insinto "${INS_DIR}"
+ doins -r * || die "doins -r failed"
+ games-mods_dosyms
+
+ if games-mods_use_dedicated ; then
+ if [[ -f ${FILESDIR}/server.cfg ]] ; then
+ insinto "${GAMES_SYSCONFDIR}"/${GAME}/${MOD_DIR}
+ doins "${FILESDIR}"/server.cfg || die "doins server.cfg failed"
+ dosym "${GAMES_SYSCONFDIR}"/${GAME}/${MOD_DIR}/server.cfg \
+ "${GAMES_PREFIX}"/${DED_CFG_DIR}/${MOD_DIR}/server.cfg \
+ || die "dosym server.cfg failed"
+ fi
+ games_make_wrapper ${MOD_DED_BIN} \
+ "\"${GAMES_BINDIR}/${DED_BIN}\" ${SELECT_MOD}${MOD_DIR} ${DED_OPTS}"
+ games-mods_make_initd
+ games-mods_make_confd
+ fi
+
+ prepgamesdirs
+}
+
+games-mods_pkg_postinst() {
+ games_pkg_postinst
+ if games-mods_use_opengl ; then
+ elog "To play this mod run:"
+ elog " ${MOD_BIN}"
+ fi
+ if games-mods_use_dedicated ; then
+ elog "To launch a dedicated server run:"
+ elog " ${MOD_DED_BIN}"
+ elog "To launch the server at startup run:"
+ elog " rc-update add ${MOD_DED_BIN} default"
+ fi
+}
diff --git a/eclass/games.eclass b/eclass/games.eclass
new file mode 100644
index 000000000000..9c07fc3b9ce0
--- /dev/null
+++ b/eclass/games.eclass
@@ -0,0 +1,391 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: games
+# @MAINTAINER:
+# Games team <games@gentoo.org>
+# @BLURB: Standardizing the install of games.
+# @DESCRIPTION:
+# This eclass makes sure that games are consistently handled in gentoo.
+# It installs game files by default in FHS-compatible directories
+# like /usr/share/games and sets more restrictive permissions in order
+# to avoid some security bugs.
+#
+# The installation directories as well as the user and group files are
+# installed as can be controlled by the user. See the variables like
+# GAMES_BINDIR, GAMES_USER etc. below. These are NOT supposed to be set
+# by ebuilds!
+#
+# For a general guide on writing games ebuilds, see:
+# https://wiki.gentoo.org/wiki/Project:Games/Ebuild_howto
+
+
+if [[ -z ${_GAMES_ECLASS} ]]; then
+_GAMES_ECLASS=1
+
+inherit base multilib toolchain-funcs eutils user
+
+case ${EAPI:-0} in
+ 0|1) EXPORT_FUNCTIONS pkg_setup src_compile pkg_preinst pkg_postinst ;;
+ 2|3|4|5) EXPORT_FUNCTIONS pkg_setup src_configure src_compile pkg_preinst pkg_postinst ;;
+ *) die "no support for EAPI=${EAPI} yet" ;;
+esac
+
+if [[ ${CATEGORY}/${PN} != "games-misc/games-envd" ]] ; then
+ # environment file
+ RDEPEND="games-misc/games-envd"
+fi
+
+# @ECLASS-VARIABLE: GAMES_PREFIX
+# @DESCRIPTION:
+# Prefix where to install games, mostly used by GAMES_BINDIR. Games data should
+# still go into GAMES_DATADIR. May be set by the user.
+GAMES_PREFIX=${GAMES_PREFIX:-/usr/games}
+
+# @ECLASS-VARIABLE: GAMES_PREFIX_OPT
+# @DESCRIPTION:
+# Prefix where to install precompiled/blob games, usually followed by
+# package name. May be set by the user.
+GAMES_PREFIX_OPT=${GAMES_PREFIX_OPT:-/opt}
+
+# @ECLASS-VARIABLE: GAMES_DATADIR
+# @DESCRIPTION:
+# Base directory where to install game data files, usually followed by
+# package name. May be set by the user.
+GAMES_DATADIR=${GAMES_DATADIR:-/usr/share/games}
+
+# @ECLASS-VARIABLE: GAMES_DATADIR_BASE
+# @DESCRIPTION:
+# Similar to GAMES_DATADIR, but only used when a package auto appends 'games'
+# to the path. May be set by the user.
+GAMES_DATADIR_BASE=${GAMES_DATADIR_BASE:-/usr/share}
+
+# @ECLASS-VARIABLE: GAMES_SYSCONFDIR
+# @DESCRIPTION:
+# Where to install global games configuration files, usually followed by
+# package name. May be set by the user.
+GAMES_SYSCONFDIR=${GAMES_SYSCONFDIR:-/etc/games}
+
+# @ECLASS-VARIABLE: GAMES_STATEDIR
+# @DESCRIPTION:
+# Where to install/store global variable game data, usually followed by
+# package name. May be set by the user.
+GAMES_STATEDIR=${GAMES_STATEDIR:-/var/games}
+
+# @ECLASS-VARIABLE: GAMES_LOGDIR
+# @DESCRIPTION:
+# Where to store global game log files, usually followed by
+# package name. May be set by the user.
+GAMES_LOGDIR=${GAMES_LOGDIR:-/var/log/games}
+
+# @ECLASS-VARIABLE: GAMES_BINDIR
+# @DESCRIPTION:
+# Where to install the game binaries. May be set by the user. This is in PATH.
+GAMES_BINDIR=${GAMES_BINDIR:-${GAMES_PREFIX}/bin}
+
+# @ECLASS-VARIABLE: GAMES_ENVD
+# @INTERNAL
+# @DESCRIPTION:
+# The games environment file name which sets games specific LDPATH and PATH.
+GAMES_ENVD="90games"
+
+# @ECLASS-VARIABLE: GAMES_USER
+# @DESCRIPTION:
+# The USER who owns all game files and usually has write permissions.
+# May be set by the user.
+GAMES_USER=${GAMES_USER:-root}
+
+# @ECLASS-VARIABLE: GAMES_USER_DED
+# @DESCRIPTION:
+# The USER who owns all game files related to the dedicated server part
+# of a package. May be set by the user.
+GAMES_USER_DED=${GAMES_USER_DED:-games}
+
+# @ECLASS-VARIABLE: GAMES_GROUP
+# @DESCRIPTION:
+# The GROUP that owns all game files and usually does not have
+# write permissions. May be set by the user.
+# If you want games world-executable, then you can at least set this variable
+# to 'users' which is almost the same.
+GAMES_GROUP=${GAMES_GROUP:-games}
+
+# @FUNCTION: games_get_libdir
+# @DESCRIPTION:
+# Gets the directory where to install games libraries. This is in LDPATH.
+games_get_libdir() {
+ echo ${GAMES_PREFIX}/$(get_libdir)
+}
+
+# @FUNCTION: egamesconf
+# @USAGE: [<args>...]
+# @DESCRIPTION:
+# Games equivalent to 'econf' for autotools based build systems. It passes
+# the necessary games specific directories automatically.
+egamesconf() {
+ # handle verbose build log pre-EAPI5
+ local _gamesconf
+ if has "${EAPI:-0}" 0 1 2 3 4 ; then
+ if grep -q -s disable-silent-rules "${ECONF_SOURCE:-.}"/configure ; then
+ _gamesconf="--disable-silent-rules"
+ fi
+ fi
+
+ # bug 493954
+ if grep -q -s datarootdir "${ECONF_SOURCE:-.}"/configure ; then
+ _gamesconf="${_gamesconf} --datarootdir=/usr/share"
+ fi
+
+ econf \
+ --prefix="${GAMES_PREFIX}" \
+ --libdir="$(games_get_libdir)" \
+ --datadir="${GAMES_DATADIR}" \
+ --sysconfdir="${GAMES_SYSCONFDIR}" \
+ --localstatedir="${GAMES_STATEDIR}" \
+ ${_gamesconf} \
+ "$@"
+}
+
+# @FUNCTION: gameswrapper
+# @USAGE: <command> [<args>...]
+# @INTERNAL
+# @DESCRIPTION:
+# Wraps an install command like dobin, dolib etc, so that
+# it has GAMES_PREFIX as prefix.
+gameswrapper() {
+ # dont want to pollute calling env
+ (
+ into "${GAMES_PREFIX}"
+ cmd=$1
+ shift
+ ${cmd} "$@"
+ )
+}
+
+# @FUNCTION: dogamesbin
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Install one or more games binaries.
+dogamesbin() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: dogamessbin
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Install one or more games system binaries.
+dogamessbin() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: dogameslib
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Install one or more games libraries.
+dogameslib() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: dogameslib.a
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Install one or more static games libraries.
+dogameslib.a() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: dogameslib.so
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Install one or more shared games libraries.
+dogameslib.so() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: newgamesbin
+# @USAGE: <path> <newname>
+# @DESCRIPTION:
+# Install one games binary with a new name.
+newgamesbin() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: newgamessbin
+# @USAGE: <path> <newname>
+# @DESCRIPTION:
+# Install one system games binary with a new name.
+newgamessbin() { gameswrapper ${FUNCNAME/games} "$@"; }
+
+# @FUNCTION: games_make_wrapper
+# @USAGE: <wrapper> <target> [chdir] [libpaths] [installpath]
+# @DESCRIPTION:
+# Create a shell wrapper script named wrapper in installpath
+# (defaults to the games bindir) to execute target (default of wrapper) by
+# first optionally setting LD_LIBRARY_PATH to the colon-delimited
+# libpaths followed by optionally changing directory to chdir.
+games_make_wrapper() { gameswrapper ${FUNCNAME/games_} "$@"; }
+
+# @FUNCTION: gamesowners
+# @USAGE: [<args excluding owner/group>...] <path>...
+# @DESCRIPTION:
+# Run 'chown' with the given args on the given files. Owner and
+# group are GAMES_USER and GAMES_GROUP and must not be passed
+# as args.
+gamesowners() { chown ${GAMES_USER}:${GAMES_GROUP} "$@"; }
+
+# @FUNCTION: gamesperms
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Run 'chmod' with games specific permissions on the given files.
+gamesperms() { chmod u+rw,g+r-w,o-rwx "$@"; }
+
+# @FUNCTION: prepgamesdirs
+# @DESCRIPTION:
+# Fix all permissions/owners of files in games related directories,
+# usually called at the end of src_install().
+prepgamesdirs() {
+ local dir f mode
+ for dir in \
+ "${GAMES_PREFIX}" "${GAMES_PREFIX_OPT}" "${GAMES_DATADIR}" \
+ "${GAMES_SYSCONFDIR}" "${GAMES_STATEDIR}" "$(games_get_libdir)" \
+ "${GAMES_BINDIR}" "$@"
+ do
+ [[ ! -d ${D}/${dir} ]] && continue
+ (
+ gamesowners -R "${D}/${dir}"
+ find "${D}/${dir}" -type d -print0 | xargs -0 chmod 750
+ mode=o-rwx,g+r,g-w
+ [[ ${dir} = ${GAMES_STATEDIR} ]] && mode=o-rwx,g+r
+ find "${D}/${dir}" -type f -print0 | xargs -0 chmod $mode
+
+ # common trees should not be games owned #264872 #537580
+ fowners root:root "${dir}"
+ fperms 755 "${dir}"
+ if [[ ${dir} == "${GAMES_PREFIX}" \
+ || ${dir} == "${GAMES_PREFIX_OPT}" ]] ; then
+ for d in $(get_libdir) bin ; do
+ # check if dirs exist to avoid "nonfatal" option
+ if [[ -e ${D}/${dir}/${d} ]] ; then
+ fowners root:root "${dir}/${d}"
+ fperms 755 "${dir}/${d}"
+ fi
+ done
+ fi
+ ) &>/dev/null
+
+ f=$(find "${D}/${dir}" -perm +4000 -a -uid 0 2>/dev/null)
+ if [[ -n ${f} ]] ; then
+ eerror "A game was detected that is setuid root!"
+ eerror "${f}"
+ die "refusing to merge a setuid root game"
+ fi
+ done
+ [[ -d ${D}/${GAMES_BINDIR} ]] || return 0
+ find "${D}/${GAMES_BINDIR}" -maxdepth 1 -type f -exec chmod 750 '{}' \;
+}
+
+# @FUNCTION: games_pkg_setup
+# @DESCRIPTION:
+# Export some toolchain specific variables and create games related groups
+# and users. This function is exported as pkg_setup().
+games_pkg_setup() {
+ tc-export CC CXX LD AR RANLIB
+
+ enewgroup "${GAMES_GROUP}" 35
+ [[ ${GAMES_USER} != "root" ]] \
+ && enewuser "${GAMES_USER}" 35 -1 "${GAMES_PREFIX}" "${GAMES_GROUP}"
+ [[ ${GAMES_USER_DED} != "root" ]] \
+ && enewuser "${GAMES_USER_DED}" 36 /bin/bash "${GAMES_PREFIX}" "${GAMES_GROUP}"
+
+ # Dear portage team, we are so sorry. Lots of love, games team.
+ # See Bug #61680
+ [[ ${USERLAND} != "GNU" ]] && return 0
+ [[ $(egetshell "${GAMES_USER_DED}") == "/bin/false" ]] \
+ && usermod -s /bin/bash "${GAMES_USER_DED}"
+}
+
+# @FUNCTION: games_src_configure
+# @DESCRIPTION:
+# Runs egamesconf if there is a configure file.
+# This function is exported as src_configure().
+games_src_configure() {
+ [[ -x "${ECONF_SOURCE:-.}"/configure ]] && egamesconf
+}
+
+# @FUNCTION: games_src_compile
+# @DESCRIPTION:
+# Runs base_src_make(). This function is exported as src_compile().
+games_src_compile() {
+ case ${EAPI:-0} in
+ 0|1) games_src_configure ;;
+ esac
+ base_src_make
+}
+
+# @FUNCTION: games_pkg_preinst
+# @DESCRIPTION:
+# Synchronizes GAMES_STATEDIR of the ebuild image with the live filesystem.
+games_pkg_preinst() {
+ local f
+
+ while read f ; do
+ if [[ -e ${ROOT}/${GAMES_STATEDIR}/${f} ]] ; then
+ cp -p \
+ "${ROOT}/${GAMES_STATEDIR}/${f}" \
+ "${D}/${GAMES_STATEDIR}/${f}" \
+ || die "cp failed"
+ # make the date match the rest of the install
+ touch "${D}/${GAMES_STATEDIR}/${f}"
+ fi
+ done < <(find "${D}/${GAMES_STATEDIR}" -type f -printf '%P\n' 2>/dev/null)
+}
+
+# @FUNCTION: games_pkg_postinst
+# @DESCRIPTION:
+# Prints some warnings and infos, also related to games groups.
+games_pkg_postinst() {
+ if [[ -z "${GAMES_SHOW_WARNING}" ]] ; then
+ ewarn "Remember, in order to play games, you have to"
+ ewarn "be in the '${GAMES_GROUP}' group."
+ echo
+ case ${CHOST} in
+ *-darwin*) ewarn "Just run 'niutil -appendprop / /groups/games users <USER>'";;
+ *-freebsd*|*-dragonfly*) ewarn "Just run 'pw groupmod ${GAMES_GROUP} -m <USER>'";;
+ *) ewarn "Just run 'gpasswd -a <USER> ${GAMES_GROUP}', then have <USER> re-login.";;
+ esac
+ echo
+ einfo "For more info about Gentoo gaming in general, see our website:"
+ einfo " http://games.gentoo.org/"
+ echo
+ fi
+}
+
+# @FUNCTION: games_ut_unpack
+# @USAGE: <directory or file to unpack>
+# @DESCRIPTION:
+# Unpack .uz2 files for UT2003/UT2004.
+games_ut_unpack() {
+ local ut_unpack="$1"
+ local f=
+
+ if [[ -z ${ut_unpack} ]] ; then
+ die "You must provide an argument to games_ut_unpack"
+ fi
+ if [[ -f ${ut_unpack} ]] ; then
+ uz2unpack "${ut_unpack}" "${ut_unpack%.uz2}" \
+ || die "uncompressing file ${ut_unpack}"
+ fi
+ if [[ -d ${ut_unpack} ]] ; then
+ while read f ; do
+ uz2unpack "${ut_unpack}/${f}" "${ut_unpack}/${f%.uz2}" \
+ || die "uncompressing file ${f}"
+ rm -f "${ut_unpack}/${f}" || die "deleting compressed file ${f}"
+ done < <(find "${ut_unpack}" -maxdepth 1 -name '*.uz2' -printf '%f\n' 2>/dev/null)
+ fi
+}
+
+# @FUNCTION: games_umod_unpack
+# @USAGE: <file to unpack>
+# @DESCRIPTION:
+# Unpacks .umod/.ut2mod/.ut4mod files for UT/UT2003/UT2004.
+# Don't forget to set 'dir' and 'Ddir'.
+games_umod_unpack() {
+ local umod=$1
+ mkdir -p "${Ddir}"/System
+ cp "${dir}"/System/{ucc-bin,{Manifest,Def{ault,User}}.ini,{Engine,Core,zlib,ogg,vorbis}.so,{Engine,Core}.int} "${Ddir}"/System
+ cd "${Ddir}"/System
+ UT_DATA_PATH=${Ddir}/System ./ucc-bin umodunpack -x "${S}/${umod}" -nohomedir &> /dev/null \
+ || die "uncompressing file ${umod}"
+ rm -f "${Ddir}"/System/{ucc-bin,{Manifest,Def{ault,User},User,UT200{3,4}}.ini,{Engine,Core,zlib,ogg,vorbis}.so,{Engine,Core}.int,ucc.log} &>/dev/null \
+ || die "Removing temporary files"
+}
+
+fi
diff --git a/eclass/gdesklets.eclass b/eclass/gdesklets.eclass
new file mode 100644
index 000000000000..4b2bb888bfd9
--- /dev/null
+++ b/eclass/gdesklets.eclass
@@ -0,0 +1,165 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License, v2 or later
+# $Id$
+
+# @ECLASS: gdesklets.eclass
+# @MAINTAINER:
+# gdesklets@gentoo.org
+# @AUTHOR:
+# Original author: Joe Sapp <nixphoeni@gentoo.org>
+# Original author: Mike Gardiner <obz@gentoo.org>
+# @BLURB: Installation functions for Desklets and Controls supported by gDesklets
+# @DESCRIPTION:
+# The gdesklets eclass provides a simple way to create ebuilds for
+# globally installing desktop applets ("Desklets") and supporting code
+# ("Controls") used in the gDesklets framework (provided by
+# gnome-extra/gdesklets-core)
+#
+# This eclass assumes a package following the instructions at
+# http://gdesklets.de/index.php?q=node/2 . Specifically, the package
+# should be a Desklet or Control ONLY (and *not* a Sensor). You
+# technically could have an ebuild that works around this limitation,
+# but no new packages should be added to the tree that do this (mainly
+# for ease of maintenance).
+
+# @ECLASS-VARIABLE: DESKLET_NAME
+# @DESCRIPTION:
+# *Optional* The name of the Desklet, if the package is one. The
+# default is to assume a Desklet with the name being ${PN} without the
+# "desklet-" prefix.
+
+# @ECLASS-VARIABLE: CONTROL_NAME
+# @DESCRIPTION:
+# *Optional* The name of the Control, if the package is one.
+
+# @ECLASS-VARIABLE: DOCS
+# @DESCRIPTION:
+# Anything (like a README) that should be dodoc'd.
+
+# @ECLASS-VARIABLE: SLOT
+# @DESCRIPTION:
+# Set only if the package is a Control and it provides a different
+# interface (i.e. expands to a different install directory) than a
+# previous version.
+
+inherit eutils multilib python
+
+if [[ -n "${CONTROL_NAME}" ]]; then
+ debug-print "Looking for a Control named \"${CONTROL_NAME}\""
+ MY_PN="${CONTROL_NAME}"
+ SRC_URI="http://gdesklets.de/files/controls/${MY_PN}/${MY_PN}-${PV}.tar.gz"
+ unset DESKLET_NAME
+else # [[ -n "${DESKLET_NAME}" ]]; then
+ # Assume an unset DESKLET_NAME means the name is ${PN} without
+ # the "desklet-" prefix
+ [[ -z "${DESKLET_NAME}" ]] && DESKLET_NAME="${PN#desklet-}"
+ debug-print "Looking for a Desklet named \"${DESKLET_NAME}\""
+ MY_PN="${DESKLET_NAME}"
+ SRC_URI="http://gdesklets.de/files/desklets/${MY_PN}/${MY_PN}-${PV}.tar.gz"
+fi
+
+MY_P="${MY_PN}-${PV}"
+S="${WORKDIR}/${MY_PN}"
+
+SLOT="0"
+# Ebuild writer shouldn't need to touch these (except maybe RDEPEND)
+IUSE=""
+RDEPEND=">=gnome-extra/gdesklets-core-0.36.1-r3"
+
+GDESKLETS_INST_DIR="${ROOT}usr/$(get_libdir)/gdesklets"
+
+# @FUNCTION: gdesklets_src_install
+# @DESCRIPTION:
+# Installs a Desklet or Control depending on which is set of
+# CONTROL_NAME or DESKLET_NAME
+gdesklets_src_install() {
+
+ debug-print-function $FUNCNAME $*
+
+ # Disable compilation of included python modules (for Controls)
+ python_disable_pyc
+
+ # Avoid sandbox violations caused by misbehaving packages (bug #128289)
+ addwrite "${ROOT}/root/.gnome2"
+
+ # Both Displays and Controls only need to be readable
+ insopts -m0744
+
+ debug-print-section docs_install
+
+ # Install some docs if so requested (and then delete them so they
+ # don't get copied into the installation directory)
+ [[ -n "${DOCS}" ]] && dodoc ${DOCS} && \
+ rm -f ${DOCS} \
+ debug-print "Installed and deleted ${DOCS}"
+ # LICENSE doesn't need to get installed if it exists
+ find . -name LICENSE -delete
+
+ if [[ -n "${DESKLET_NAME}" ]]; then
+
+ debug-print-section display_install
+
+ # Base installation directory for displays from this desklet
+ INSDIR="${GDESKLETS_INST_DIR}/Displays/${DESKLET_NAME}"
+
+ debug-print "Installing into ${INSDIR}"
+ debug-print "Exiting Display-specific installation code"
+
+ elif [[ -n "${CONTROL_NAME}" ]]; then
+
+ debug-print-section control_install
+
+ # Unique name for this Control and its interface
+ CTRL_DIRNAME=$( "${GDESKLETS_INST_DIR}/gdesklets-control-getid" `pwd` 2> /dev/null )
+ einfo "Installing Control ${CTRL_DIRNAME}"
+
+ # Base installation directory for this Control
+ INSDIR="${GDESKLETS_INST_DIR}/Controls/${CTRL_DIRNAME}"
+ debug-print "Installing into ${INSDIR}"
+
+ # Mercilessly delete all existing compiled code
+ find . -iname '*.py[co]' -delete
+
+ debug-print "Exiting Control-specific installation code"
+
+ else
+ die "nothing to install, is the ebuild written correctly?"
+ fi
+
+ debug-print-section common_install
+
+ # Create the proper subdirectory in the global Controls or
+ # Displays directory
+ dodir "${INSDIR}"
+ insinto "${INSDIR}"
+ doins -r *
+
+}
+
+# @FUNCTION: gdesklets_pkg_postinst
+# @DESCRIPTION:
+# Marks the Control for rebuilding on Python version change and
+# compiles the Python code or display a useful message to the user,
+# depending on which of CONTROL_NAME or DESKLET_NAME is set.
+gdesklets_pkg_postinst() {
+
+ # The only time compilation of python modules should occur is
+ # for Controls, since Displays are run from inside the sandbox
+ # (and therefore can't be compiled).
+ if [[ -n "${CONTROL_NAME}" ]]; then
+
+ CTRL_DIRNAME=$( "${GDESKLETS_INST_DIR}/gdesklets-control-getid" `pwd` 2> /dev/null )
+ python_need_rebuild
+ python_mod_optimize "${GDESKLETS_INST_DIR}/Controls/${CTRL_DIRNAME}"
+
+ else
+
+ einfo "Each user can now add this desklet to their desktop through the"
+ einfo "gDesklets shell or the command line (.display files can be"
+ einfo "found in ${GDESKLETS_INST_DIR}/Displays/${DESKLET_NAME})."
+
+ fi
+
+}
+
+EXPORT_FUNCTIONS src_install pkg_postinst
diff --git a/eclass/ghc-package.eclass b/eclass/ghc-package.eclass
new file mode 100644
index 000000000000..f71116bd50f2
--- /dev/null
+++ b/eclass/ghc-package.eclass
@@ -0,0 +1,339 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ghc-package.eclass
+# @MAINTAINER:
+# "Gentoo's Haskell Language team" <haskell@gentoo.org>
+# @AUTHOR:
+# Original Author: Andres Loeh <kosmikus@gentoo.org>
+# @BLURB: This eclass helps with the Glasgow Haskell Compiler's package configuration utility.
+# @DESCRIPTION:
+# Helper eclass to handle ghc installation/upgrade/deinstallation process.
+
+inherit versionator
+
+# @FUNCTION: ghc-getghc
+# @DESCRIPTION:
+# returns the name of the ghc executable
+ghc-getghc() {
+ type -P ghc
+}
+
+# @FUNCTION: ghc-getghcpkg
+# @DESCRIPTION:
+# Internal function determines returns the name of the ghc-pkg executable
+ghc-getghcpkg() {
+ type -P ghc-pkg
+}
+
+# @FUNCTION: ghc-getghcpkgbin
+# @DESCRIPTION:
+# returns the name of the ghc-pkg binary (ghc-pkg
+# itself usually is a shell script, and we have to
+# bypass the script under certain circumstances);
+# for Cabal, we add an empty global package config file,
+# because for some reason the global package file
+# must be specified
+ghc-getghcpkgbin() {
+ if version_is_at_least "7.9.20141222" "$(ghc-version)"; then
+ # ghc-7.10 stopped supporting single-file database
+ local empty_db="${T}/empty.conf.d" ghc_pkg="$(ghc-libdir)/bin/ghc-pkg"
+ if [[ ! -d ${empty_db} ]]; then
+ "${ghc_pkg}" init "${empty_db}" || die "Failed to initialize empty global db"
+ fi
+ echo "$(ghc-libdir)/bin/ghc-pkg" "--global-package-db=${empty_db}"
+
+ elif version_is_at_least "7.7.20121101" "$(ghc-version)"; then
+ # the ghc-pkg executable changed name in ghc 6.10, as it no longer needs
+ # the wrapper script with the static flags
+ # was moved to bin/ subtree by:
+ # http://www.haskell.org/pipermail/cvs-ghc/2012-September/076546.html
+ echo '[]' > "${T}/empty.conf"
+ echo "$(ghc-libdir)/bin/ghc-pkg" "--global-package-db=${T}/empty.conf"
+
+ elif version_is_at_least "7.5.20120516" "$(ghc-version)"; then
+ echo '[]' > "${T}/empty.conf"
+ echo "$(ghc-libdir)/ghc-pkg" "--global-package-db=${T}/empty.conf"
+
+ else
+ echo '[]' > "${T}/empty.conf"
+ echo "$(ghc-libdir)/ghc-pkg" "--global-conf=${T}/empty.conf"
+ fi
+}
+
+# @FUNCTION: ghc-version
+# @DESCRIPTION:
+# returns upstream version of ghc
+# as reported by '--numeric-version'
+# Examples: "7.10.2", "7.9.20141222"
+_GHC_VERSION_CACHE=""
+ghc-version() {
+ if [[ -z "${_GHC_VERSION_CACHE}" ]]; then
+ _GHC_VERSION_CACHE="$($(ghc-getghc) --numeric-version)"
+ fi
+ echo "${_GHC_VERSION_CACHE}"
+}
+
+# @FUNCTION: ghc-pm-version
+# @DESCRIPTION:
+# returns package manager(PM) version of ghc
+# as reported by '$(best_version)'
+# Examples: "PM:7.10.2", "PM:7.10.2_rc1", "PM:7.8.4-r4"
+_GHC_PM_VERSION_CACHE=""
+ghc-pm-version() {
+ local pm_ghc_p
+
+ if [[ -z "${_GHC_PM_VERSION_CACHE}" ]]; then
+ pm_ghc_p=$(best_version dev-lang/ghc)
+ _GHC_PM_VERSION_CACHE="PM:${pm_ghc_p#dev-lang/ghc-}"
+ fi
+ echo "${_GHC_PM_VERSION_CACHE}"
+}
+
+# @FUNCTION: ghc-cabal-version
+# @DESCRIPTION:
+# return version of the Cabal library bundled with ghc
+ghc-cabal-version() {
+ if version_is_at_least "7.9.20141222" "$(ghc-version)"; then
+ # outputs in format: 'version: 1.18.1.5'
+ set -- `$(ghc-getghcpkg) --package-db=$(ghc-libdir)/package.conf.d.initial field Cabal version`
+ echo "$2"
+ else
+ local cabal_package=`echo "$(ghc-libdir)"/Cabal-*`
+ # /path/to/ghc/Cabal-${VER} -> ${VER}
+ echo "${cabal_package/*Cabal-/}"
+ fi
+}
+
+# @FUNCTION: ghc-sanecabal
+# @DESCRIPTION:
+# check if a standalone Cabal version is available for the
+# currently used ghc; takes minimal version of Cabal as
+# an optional argument
+ghc-sanecabal() {
+ local f
+ local version
+ if [[ -z "$1" ]]; then version="1.0.1"; else version="$1"; fi
+ for f in $(ghc-confdir)/cabal-*; do
+ [[ -f "${f}" ]] && version_is_at_least "${version}" "${f#*cabal-}" && return
+ done
+ return 1
+}
+# @FUNCTION: ghc-is-dynamic
+# @DESCRIPTION:
+# checks if ghc is built against dynamic libraries
+# binaries linked against GHC library (and using plugin loading)
+# have to be linked the same way:
+# https://ghc.haskell.org/trac/ghc/ticket/10301
+ghc-is-dynamic() {
+ $(ghc-getghc) --info | grep "GHC Dynamic" | grep -q "YES"
+}
+
+# @FUNCTION: ghc-supports-shared-libraries
+# @DESCRIPTION:
+# checks if ghc is built with support for building
+# shared libraries (aka '-dynamic' option)
+ghc-supports-shared-libraries() {
+ $(ghc-getghc) --info | grep "RTS ways" | grep -q "dyn"
+}
+
+# @FUNCTION: ghc-supports-threaded-runtime
+# @DESCRIPTION:
+# checks if ghc is built with support for threaded
+# runtime (aka '-threaded' option)
+ghc-supports-threaded-runtime() {
+ $(ghc-getghc) --info | grep "RTS ways" | grep -q "thr"
+}
+
+# @FUNCTION: ghc-supports-smp
+# @DESCRIPTION:
+# checks if ghc is built with support for multiple cores runtime
+ghc-supports-smp() {
+ $(ghc-getghc) --info | grep "Support SMP" | grep -q "YES"
+}
+
+# @FUNCTION: ghc-supports-interpreter
+# @DESCRIPTION:
+# checks if ghc has interpreter mode (aka GHCi)
+# It usually means that ghc supports for template haskell.
+ghc-supports-interpreter() {
+ $(ghc-getghc) --info | grep "Have interpreter" | grep -q "YES"
+}
+
+# @FUNCTION: ghc-supports-parallel-make
+# @DESCRIPTION:
+# checks if ghc has support for '--make -j' mode
+# The option was introduced in ghc-7.8-rc1.
+ghc-supports-parallel-make() {
+ $(ghc-getghc) --info | grep "Support parallel --make" | grep -q "YES"
+}
+
+# @FUNCTION: ghc-extractportageversion
+# @DESCRIPTION:
+# extract the version of a portage-installed package
+ghc-extractportageversion() {
+ local pkg
+ local version
+ pkg="$(best_version $1)"
+ version="${pkg#$1-}"
+ version="${version%-r*}"
+ version="${version%_pre*}"
+ echo "${version}"
+}
+
+# @FUNCTION: ghc-libdir
+# @DESCRIPTION:
+# returns the library directory
+_GHC_LIBDIR_CACHE=""
+ghc-libdir() {
+ if [[ -z "${_GHC_LIBDIR_CACHE}" ]]; then
+ _GHC_LIBDIR_CACHE="$($(ghc-getghc) --print-libdir)"
+ fi
+ echo "${_GHC_LIBDIR_CACHE}"
+}
+
+# @FUNCTION: ghc-confdir
+# @DESCRIPTION:
+# returns the (Gentoo) library configuration directory, we
+# store here a hint for 'haskell-updater' about packages
+# installed for old ghc versions and current ones.
+ghc-confdir() {
+ echo "$(ghc-libdir)/gentoo"
+}
+
+# @FUNCTION: ghc-package-db
+# @DESCRIPTION:
+# returns the global package database directory
+ghc-package-db() {
+ echo "$(ghc-libdir)/package.conf.d"
+}
+
+# @FUNCTION: ghc-localpkgconfd
+# @DESCRIPTION:
+# returns the name of the local (package-specific)
+# package configuration file
+ghc-localpkgconfd() {
+ echo "${PF}.conf.d"
+}
+
+# @FUNCTION: ghc-package-exists
+# @DESCRIPTION:
+# tests if a ghc package exists
+ghc-package-exists() {
+ $(ghc-getghcpkg) describe "$1" > /dev/null 2>&1
+}
+
+# @FUNCTION: check-for-collisions
+# @DESCRIPTION: makes sure no packages
+# have the same version as initial package setup
+check-for-collisions() {
+ local localpkgconf=$1
+ local checked_pkg
+ local initial_pkg_db="$(ghc-libdir)/package.conf.d.initial"
+
+ for checked_pkg in `$(ghc-getghcpkgbin) -f "${localpkgconf}" list --simple-output`
+ do
+ # should return empty output
+ local collided=`$(ghc-getghcpkgbin) -f ${initial_pkg_db} list --simple-output "${checked_pkg}"`
+
+ if [[ -n ${collided} ]]; then
+ eerror "Cabal package '${checked_pkg}' is shipped with '$(ghc-pm-version)' ('$(ghc-version)')."
+ eerror "Ebuild author forgot an entry in CABAL_CORE_LIB_GHC_PV='${CABAL_CORE_LIB_GHC_PV}'."
+ eerror "Found in ${initial_pkg_db}."
+ die
+ fi
+ done
+}
+
+# @FUNCTION: ghc-install-pkg
+# @DESCRIPTION:
+# moves the local (package-specific) package configuration
+# file to its final destination
+ghc-install-pkg() {
+ local pkg_config_file=$1
+ local localpkgconf="${T}/$(ghc-localpkgconfd)"
+ local pkg_path pkg pkg_db="${D}/$(ghc-package-db)" hint_db="${D}/$(ghc-confdir)"
+
+ $(ghc-getghcpkgbin) init "${localpkgconf}" || die "Failed to initialize empty local db"
+ $(ghc-getghcpkgbin) -f "${localpkgconf}" update - --force \
+ < "${pkg_config_file}" || die "failed to register ${pkg}"
+
+ check-for-collisions "${localpkgconf}"
+
+ mkdir -p "${pkg_db}" || die
+ for pkg_path in "${localpkgconf}"/*.conf; do
+ pkg=$(basename "${pkg_path}")
+ cp "${pkg_path}" "${pkg_db}/${pkg}" || die
+ done
+
+ mkdir -p "${hint_db}" || die
+ cp "${pkg_config_file}" "${hint_db}/${PF}.conf" || die
+ chmod 0644 "${hint_db}/${PF}.conf" || die
+}
+
+# @FUNCTION: ghc-recache-db
+# @DESCRIPTION:
+# updates 'package.cache' binary cacne for registered '*.conf'
+# packages
+ghc-recache-db() {
+ einfo "Recaching GHC package DB"
+ $(ghc-getghcpkg) recache
+}
+
+# @FUNCTION: ghc-register-pkg
+# @DESCRIPTION:
+# registers all packages in the local (package-specific)
+# package configuration file
+ghc-register-pkg() {
+ ghc-recache-db
+}
+
+# @FUNCTION: ghc-reregister
+# @DESCRIPTION:
+# re-adds all available .conf files to the global
+# package conf file, to be used on a ghc reinstallation
+ghc-reregister() {
+ ghc-recache-db
+}
+
+# @FUNCTION: ghc-unregister-pkg
+# @DESCRIPTION:
+# unregisters a package configuration file
+ghc-unregister-pkg() {
+ ghc-recache-db
+}
+
+# @FUNCTION: ghc-pkgdeps
+# @DESCRIPTION:
+# exported function: loads a package dependency in a form
+# cabal_package version
+ghc-pkgdeps() {
+ echo $($(ghc-getghcpkg) describe "${1}") \
+ | sed \
+ -e '/depends/,/^.*:/ !d' \
+ -e 's/\(.*\)-\(.*\)-\(.*\)/\1 \2/' \
+ -e 's/^.*://g'
+}
+
+# @FUNCTION: ghc-package_pkg_postinst
+# @DESCRIPTION:
+# updates package.cache after package install
+ghc-package_pkg_postinst() {
+ ghc-recache-db
+}
+
+# @FUNCTION: ghc-package_pkg_prerm
+# @DESCRIPTION:
+# updates package.cache after package deinstall
+ghc-package_pkg_prerm() {
+ ewarn "ghc-package.eclass: 'ghc-package_pkg_prerm()' is a noop"
+ ewarn "ghc-package.eclass: consider 'haskell-cabal_pkg_postrm()' instead"
+}
+
+# @FUNCTION: ghc-package_pkg_postrm
+# @DESCRIPTION:
+# updates package.cache after package deinstall
+ghc-package_pkg_postrm() {
+ ghc-recache-db
+}
diff --git a/eclass/git-2.eclass b/eclass/git-2.eclass
new file mode 100644
index 000000000000..eae6ba1f4ccc
--- /dev/null
+++ b/eclass/git-2.eclass
@@ -0,0 +1,604 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: git-2.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# Donnie Berkholz <dberkholz@gentoo.org>
+# @BLURB: Eclass for fetching and unpacking git repositories.
+# @DESCRIPTION:
+# Eclass for easing maitenance of live ebuilds using git as remote repository.
+# Eclass support working with git submodules and branching.
+#
+# This eclass is DEPRECATED. Please use git-r3 instead.
+
+# This eclass support all EAPIs
+EXPORT_FUNCTIONS src_unpack
+
+DEPEND="dev-vcs/git"
+
+# @ECLASS-VARIABLE: EGIT_SOURCEDIR
+# @DESCRIPTION:
+# This variable specifies destination where the cloned
+# data are copied to.
+#
+# EGIT_SOURCEDIR="${S}"
+
+# @ECLASS-VARIABLE: EGIT_STORE_DIR
+# @DESCRIPTION:
+# Storage directory for git sources.
+#
+# EGIT_STORE_DIR="${DISTDIR}/egit-src"
+
+# @ECLASS-VARIABLE: EGIT_HAS_SUBMODULES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable enables support for git submodules in our
+# checkout. Also this makes the checkout to be non-bare for now.
+
+# @ECLASS-VARIABLE: EGIT_OPTIONS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Variable specifying additional options for fetch command.
+
+# @ECLASS-VARIABLE: EGIT_MASTER
+# @DESCRIPTION:
+# Variable for specifying master branch.
+# Usefull when upstream don't have master branch or name it differently.
+#
+# EGIT_MASTER="master"
+
+# @ECLASS-VARIABLE: EGIT_PROJECT
+# @DESCRIPTION:
+# Variable specifying name for the folder where we check out the git
+# repository. Value of this variable should be unique in the
+# EGIT_STORE_DIR as otherwise you would override another repository.
+#
+# EGIT_PROJECT="${EGIT_REPO_URI##*/}"
+
+# @ECLASS-VARIABLE: EGIT_DIR
+# @DESCRIPTION:
+# Directory where we want to store the git data.
+# This variable should not be overriden.
+#
+# EGIT_DIR="${EGIT_STORE_DIR}/${EGIT_PROJECT}"
+
+# @ECLASS-VARIABLE: EGIT_REPO_URI
+# @REQUIRED
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# URI for the repository
+# e.g. http://foo, git://bar
+#
+# It can be overriden via env using packagename_LIVE_REPO
+# variable.
+#
+# Support multiple values:
+# EGIT_REPO_URI="git://a/b.git http://c/d.git"
+
+# @ECLASS-VARIABLE: EVCS_OFFLINE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable prevents performance of any online
+# operations.
+
+# @ECLASS-VARIABLE: EGIT_BRANCH
+# @DESCRIPTION:
+# Variable containing branch name we want to check out.
+# It can be overriden via env using packagename_LIVE_BRANCH
+# variable.
+#
+# EGIT_BRANCH="${EGIT_MASTER}"
+
+# @ECLASS-VARIABLE: EGIT_COMMIT
+# @DESCRIPTION:
+# Variable containing commit hash/tag we want to check out.
+# It can be overriden via env using packagename_LIVE_COMMIT
+# variable.
+#
+# EGIT_COMMIT="${EGIT_BRANCH}"
+
+# @ECLASS-VARIABLE: EGIT_REPACK
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable specifies that repository will be repacked to
+# save space. However this can take a REALLY LONG time with VERY big
+# repositories.
+
+# @ECLASS-VARIABLE: EGIT_PRUNE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable enables pruning all loose objects on each fetch.
+# This is useful if upstream rewinds and rebases branches often.
+
+# @ECLASS-VARIABLE: EGIT_NONBARE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable specifies that all checkouts will be done using
+# non bare repositories. This is useful if you can't operate with bare
+# checkouts for some reason.
+
+# @ECLASS-VARIABLE: EGIT_NOUNPACK
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable bans unpacking of ${A} content into the srcdir.
+# Default behaviour is to unpack ${A} content.
+
+# @FUNCTION: git-2_init_variables
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function initializing all git variables.
+# We define it in function scope so user can define
+# all the variables before and after inherit.
+git-2_init_variables() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local esc_pn liverepo livebranch livecommit
+ esc_pn=${PN//[-+]/_}
+
+ : ${EGIT_SOURCEDIR="${S}"}
+
+ : ${EGIT_STORE_DIR:="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/egit-src"}
+
+ : ${EGIT_HAS_SUBMODULES:=}
+
+ : ${EGIT_OPTIONS:=}
+
+ : ${EGIT_MASTER:=master}
+
+ liverepo=${esc_pn}_LIVE_REPO
+ EGIT_REPO_URI=${!liverepo:-${EGIT_REPO_URI}}
+ [[ ${EGIT_REPO_URI} ]] || die "EGIT_REPO_URI must have some value"
+
+ : ${EVCS_OFFLINE:=}
+
+ livebranch=${esc_pn}_LIVE_BRANCH
+ [[ ${!livebranch} ]] && ewarn "QA: using \"${esc_pn}_LIVE_BRANCH\" variable, you won't get any support"
+ EGIT_BRANCH=${!livebranch:-${EGIT_BRANCH:-${EGIT_MASTER}}}
+
+ livecommit=${esc_pn}_LIVE_COMMIT
+ [[ ${!livecommit} ]] && ewarn "QA: using \"${esc_pn}_LIVE_COMMIT\" variable, you won't get any support"
+ EGIT_COMMIT=${!livecommit:-${EGIT_COMMIT:-${EGIT_BRANCH}}}
+
+ : ${EGIT_REPACK:=}
+
+ : ${EGIT_PRUNE:=}
+}
+
+# @FUNCTION: git-2_submodules
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function wrapping the submodule initialisation and update.
+git-2_submodules() {
+ debug-print-function ${FUNCNAME} "$@"
+ if [[ ${EGIT_HAS_SUBMODULES} ]]; then
+ if [[ ${EVCS_OFFLINE} ]]; then
+ # for submodules operations we need to be online
+ debug-print "${FUNCNAME}: not updating submodules in offline mode"
+ return 1
+ fi
+
+ debug-print "${FUNCNAME}: working in \"${1}\""
+ pushd "${EGIT_DIR}" > /dev/null
+
+ debug-print "${FUNCNAME}: git submodule init"
+ git submodule init || die
+ debug-print "${FUNCNAME}: git submodule sync"
+ git submodule sync || die
+ debug-print "${FUNCNAME}: git submodule update"
+ git submodule update || die
+
+ popd > /dev/null
+ fi
+}
+
+# @FUNCTION: git-2_branch
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function that changes branch for the repo based on EGIT_COMMIT and
+# EGIT_BRANCH variables.
+git-2_branch() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local branchname src
+
+ debug-print "${FUNCNAME}: working in \"${EGIT_SOURCEDIR}\""
+ pushd "${EGIT_SOURCEDIR}" > /dev/null
+
+ local branchname=branch-${EGIT_BRANCH} src=origin/${EGIT_BRANCH}
+ if [[ ${EGIT_COMMIT} != ${EGIT_BRANCH} ]]; then
+ branchname=tree-${EGIT_COMMIT}
+ src=${EGIT_COMMIT}
+ fi
+ debug-print "${FUNCNAME}: git checkout -b ${branchname} ${src}"
+ git checkout -b ${branchname} ${src} \
+ || die "${FUNCNAME}: changing the branch failed"
+
+ popd > /dev/null
+}
+
+# @FUNCTION: git-2_gc
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function running garbage collector on checked out tree.
+git-2_gc() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local args
+
+ if [[ ${EGIT_REPACK} || ${EGIT_PRUNE} ]]; then
+ pushd "${EGIT_DIR}" > /dev/null
+ ebegin "Garbage collecting the repository"
+ [[ ${EGIT_PRUNE} ]] && args='--prune'
+ debug-print "${FUNCNAME}: git gc ${args}"
+ git gc ${args}
+ eend $?
+ popd > /dev/null
+ fi
+}
+
+# @FUNCTION: git-2_prepare_storedir
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function preparing directory where we are going to store SCM
+# repository.
+git-2_prepare_storedir() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local clone_dir
+
+ # initial clone, we have to create master git storage directory and play
+ # nicely with sandbox
+ if [[ ! -d ${EGIT_STORE_DIR} ]]; then
+ debug-print "${FUNCNAME}: Creating git main storage directory"
+ addwrite /
+ mkdir -m 775 -p "${EGIT_STORE_DIR}" \
+ || die "${FUNCNAME}: can't mkdir \"${EGIT_STORE_DIR}\""
+ fi
+
+ # allow writing into EGIT_STORE_DIR
+ addwrite "${EGIT_STORE_DIR}"
+
+ # calculate git.eclass store dir for data
+ # We will try to clone the old repository,
+ # and we will remove it if we don't need it anymore.
+ EGIT_OLD_CLONE=
+ if [[ ${EGIT_STORE_DIR} == */egit-src ]]; then
+ local old_store_dir=${EGIT_STORE_DIR/%egit-src/git-src}
+ local old_location=${old_store_dir}/${EGIT_PROJECT:-${PN}}
+
+ if [[ -d ${old_location} ]]; then
+ EGIT_OLD_CLONE=${old_location}
+ # required to remove the old clone
+ addwrite "${old_store_dir}"
+ fi
+ fi
+
+ # calculate the proper store dir for data
+ # If user didn't specify the EGIT_DIR, we check if he did specify
+ # the EGIT_PROJECT or get the folder name from EGIT_REPO_URI.
+ EGIT_REPO_URI=${EGIT_REPO_URI%/}
+ if [[ ! ${EGIT_DIR} ]]; then
+ if [[ ${EGIT_PROJECT} ]]; then
+ clone_dir=${EGIT_PROJECT}
+ else
+ local strippeduri=${EGIT_REPO_URI%/.git}
+ clone_dir=${strippeduri##*/}
+ fi
+ EGIT_DIR=${EGIT_STORE_DIR}/${clone_dir}
+
+ if [[ ${EGIT_OLD_CLONE} && ! -d ${EGIT_DIR} ]]; then
+ elog "${FUNCNAME}: ${CATEGORY}/${PF} will be cloned from old location."
+ elog "It will be necessary to rebuild the package to fetch updates."
+ EGIT_REPO_URI="${EGIT_OLD_CLONE} ${EGIT_REPO_URI}"
+ fi
+ fi
+ export EGIT_DIR=${EGIT_DIR}
+ debug-print "${FUNCNAME}: Storing the repo into \"${EGIT_DIR}\"."
+}
+
+# @FUNCTION: git-2_move_source
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function moving sources from the EGIT_DIR to EGIT_SOURCEDIR dir.
+git-2_move_source() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ debug-print "${FUNCNAME}: ${MOVE_COMMAND} \"${EGIT_DIR}\" \"${EGIT_SOURCEDIR}\""
+ pushd "${EGIT_DIR}" > /dev/null
+ mkdir -p "${EGIT_SOURCEDIR}" \
+ || die "${FUNCNAME}: failed to create ${EGIT_SOURCEDIR}"
+ ${MOVE_COMMAND} "${EGIT_SOURCEDIR}" \
+ || die "${FUNCNAME}: sync to \"${EGIT_SOURCEDIR}\" failed"
+ popd > /dev/null
+}
+
+# @FUNCTION: git-2_initial_clone
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function running initial clone on specified repo_uri.
+git-2_initial_clone() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local repo_uri
+
+ EGIT_REPO_URI_SELECTED=""
+ for repo_uri in ${EGIT_REPO_URI}; do
+ debug-print "${FUNCNAME}: git clone ${EGIT_LOCAL_OPTIONS} \"${repo_uri}\" \"${EGIT_DIR}\""
+ if git clone ${EGIT_LOCAL_OPTIONS} "${repo_uri}" "${EGIT_DIR}"; then
+ # global variable containing the repo_name we will be using
+ debug-print "${FUNCNAME}: EGIT_REPO_URI_SELECTED=\"${repo_uri}\""
+ EGIT_REPO_URI_SELECTED="${repo_uri}"
+ break
+ fi
+ done
+
+ [[ ${EGIT_REPO_URI_SELECTED} ]] \
+ || die "${FUNCNAME}: can't fetch from ${EGIT_REPO_URI}"
+}
+
+# @FUNCTION: git-2_update_repo
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function running update command on specified repo_uri.
+git-2_update_repo() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local repo_uri
+
+ if [[ ${EGIT_LOCAL_NONBARE} ]]; then
+ # checkout master branch and drop all other local branches
+ git checkout ${EGIT_MASTER} || die "${FUNCNAME}: can't checkout master branch ${EGIT_MASTER}"
+ for x in $(git branch | grep -v "* ${EGIT_MASTER}" | tr '\n' ' '); do
+ debug-print "${FUNCNAME}: git branch -D ${x}"
+ git branch -D ${x} > /dev/null
+ done
+ fi
+
+ EGIT_REPO_URI_SELECTED=""
+ for repo_uri in ${EGIT_REPO_URI}; do
+ # git urls might change, so reset it
+ git config remote.origin.url "${repo_uri}"
+
+ debug-print "${EGIT_UPDATE_CMD}"
+ if ${EGIT_UPDATE_CMD} > /dev/null; then
+ # global variable containing the repo_name we will be using
+ debug-print "${FUNCNAME}: EGIT_REPO_URI_SELECTED=\"${repo_uri}\""
+ EGIT_REPO_URI_SELECTED="${repo_uri}"
+ break
+ fi
+ done
+
+ [[ ${EGIT_REPO_URI_SELECTED} ]] \
+ || die "${FUNCNAME}: can't update from ${EGIT_REPO_URI}"
+}
+
+# @FUNCTION: git-2_fetch
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function fetching repository from EGIT_REPO_URI and storing it in
+# specified EGIT_STORE_DIR.
+git-2_fetch() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local oldsha cursha repo_type
+
+ [[ ${EGIT_LOCAL_NONBARE} ]] && repo_type="non-bare repository" || repo_type="bare repository"
+
+ if [[ ! -d ${EGIT_DIR} ]]; then
+ git-2_initial_clone
+ pushd "${EGIT_DIR}" > /dev/null
+ cursha=$(git rev-parse ${UPSTREAM_BRANCH})
+ echo "GIT NEW clone -->"
+ echo " repository: ${EGIT_REPO_URI_SELECTED}"
+ echo " at the commit: ${cursha}"
+
+ popd > /dev/null
+ elif [[ ${EVCS_OFFLINE} ]]; then
+ pushd "${EGIT_DIR}" > /dev/null
+ cursha=$(git rev-parse ${UPSTREAM_BRANCH})
+ echo "GIT offline update -->"
+ echo " repository: $(git config remote.origin.url)"
+ echo " at the commit: ${cursha}"
+ popd > /dev/null
+ else
+ pushd "${EGIT_DIR}" > /dev/null
+ oldsha=$(git rev-parse ${UPSTREAM_BRANCH})
+ git-2_update_repo
+ cursha=$(git rev-parse ${UPSTREAM_BRANCH})
+
+ # fetch updates
+ echo "GIT update -->"
+ echo " repository: ${EGIT_REPO_URI_SELECTED}"
+ # write out message based on the revisions
+ if [[ "${oldsha}" != "${cursha}" ]]; then
+ echo " updating from commit: ${oldsha}"
+ echo " to commit: ${cursha}"
+ else
+ echo " at the commit: ${cursha}"
+ fi
+
+ # print nice statistic of what was changed
+ git --no-pager diff --stat ${oldsha}..${UPSTREAM_BRANCH}
+ popd > /dev/null
+ fi
+ # export the version the repository is at
+ export EGIT_VERSION="${cursha}"
+ # log the repo state
+ [[ ${EGIT_COMMIT} != ${EGIT_BRANCH} ]] \
+ && echo " commit: ${EGIT_COMMIT}"
+ echo " branch: ${EGIT_BRANCH}"
+ echo " storage directory: \"${EGIT_DIR}\""
+ echo " checkout type: ${repo_type}"
+
+ # Cleanup after git.eclass
+ if [[ ${EGIT_OLD_CLONE} ]]; then
+ einfo "${FUNCNAME}: removing old clone in ${EGIT_OLD_CLONE}."
+ rm -rf "${EGIT_OLD_CLONE}"
+ fi
+}
+
+# @FUNCTION: git_bootstrap
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function that runs bootstrap command on unpacked source.
+git-2_bootstrap() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # @ECLASS-VARIABLE: EGIT_BOOTSTRAP
+ # @DESCRIPTION:
+ # Command to be executed after checkout and clone of the specified
+ # repository.
+ # enviroment the package will fail if there is no update, thus in
+ # combination with --keep-going it would lead in not-updating
+ # pakcages that are up-to-date.
+ if [[ ${EGIT_BOOTSTRAP} ]]; then
+ pushd "${EGIT_SOURCEDIR}" > /dev/null
+ einfo "Starting bootstrap"
+
+ if [[ -f ${EGIT_BOOTSTRAP} ]]; then
+ # we have file in the repo which we should execute
+ debug-print "${FUNCNAME}: bootstraping with file \"${EGIT_BOOTSTRAP}\""
+
+ if [[ -x ${EGIT_BOOTSTRAP} ]]; then
+ eval "./${EGIT_BOOTSTRAP}" \
+ || die "${FUNCNAME}: bootstrap script failed"
+ else
+ eerror "\"${EGIT_BOOTSTRAP}\" is not executable."
+ eerror "Report upstream, or bug ebuild maintainer to remove bootstrap command."
+ die "\"${EGIT_BOOTSTRAP}\" is not executable"
+ fi
+ else
+ # we execute some system command
+ debug-print "${FUNCNAME}: bootstraping with commands \"${EGIT_BOOTSTRAP}\""
+
+ eval "${EGIT_BOOTSTRAP}" \
+ || die "${FUNCNAME}: bootstrap commands failed"
+ fi
+
+ einfo "Bootstrap finished"
+ popd > /dev/null
+ fi
+}
+
+# @FUNCTION: git-2_migrate_repository
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function migrating between bare and normal checkout repository.
+# This is based on usage of EGIT_SUBMODULES, at least until they
+# start to work with bare checkouts sanely.
+# This function also set some global variables that differ between
+# bare and non-bare checkout.
+git-2_migrate_repository() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local bare returnstate
+
+ # first find out if we have submodules
+ # or user explicitly wants us to use non-bare clones
+ if ! [[ ${EGIT_HAS_SUBMODULES} || ${EGIT_NONBARE} ]]; then
+ bare=1
+ fi
+
+ # test if we already have some repo and if so find out if we have
+ # to migrate the data
+ if [[ -d ${EGIT_DIR} ]]; then
+ if [[ ${bare} && -d ${EGIT_DIR}/.git ]]; then
+ debug-print "${FUNCNAME}: converting \"${EGIT_DIR}\" to bare copy"
+
+ ebegin "Converting \"${EGIT_DIR}\" from non-bare to bare copy"
+ mv "${EGIT_DIR}/.git" "${EGIT_DIR}.bare"
+ export GIT_DIR="${EGIT_DIR}.bare"
+ git config core.bare true > /dev/null
+ returnstate=$?
+ unset GIT_DIR
+ rm -rf "${EGIT_DIR}"
+ mv "${EGIT_DIR}.bare" "${EGIT_DIR}"
+ eend ${returnstate}
+ elif [[ ! ${bare} && ! -d ${EGIT_DIR}/.git ]]; then
+ debug-print "${FUNCNAME}: converting \"${EGIT_DIR}\" to non-bare copy"
+
+ ebegin "Converting \"${EGIT_DIR}\" from bare to non-bare copy"
+ git clone -l "${EGIT_DIR}" "${EGIT_DIR}.nonbare" > /dev/null
+ returnstate=$?
+ rm -rf "${EGIT_DIR}"
+ mv "${EGIT_DIR}.nonbare" "${EGIT_DIR}"
+ eend ${returnstate}
+ fi
+ fi
+ if [[ ${returnstate} -ne 0 ]]; then
+ debug-print "${FUNCNAME}: converting \"${EGIT_DIR}\" failed, removing to start from scratch"
+
+ # migration failed, remove the EGIT_DIR to play it safe
+ einfo "Migration failed, removing \"${EGIT_DIR}\" to start from scratch."
+ rm -rf "${EGIT_DIR}"
+ fi
+
+ # set various options to work with both targets
+ if [[ ${bare} ]]; then
+ debug-print "${FUNCNAME}: working in bare repository for \"${EGIT_DIR}\""
+ EGIT_LOCAL_OPTIONS+="${EGIT_OPTIONS} --bare"
+ MOVE_COMMAND="git clone -l -s -n ${EGIT_DIR// /\\ }"
+ EGIT_UPDATE_CMD="git fetch -t -f -u origin ${EGIT_BRANCH}:${EGIT_BRANCH}"
+ UPSTREAM_BRANCH="${EGIT_BRANCH}"
+ EGIT_LOCAL_NONBARE=
+ else
+ debug-print "${FUNCNAME}: working in bare repository for non-bare \"${EGIT_DIR}\""
+ MOVE_COMMAND="cp -pPR ."
+ EGIT_LOCAL_OPTIONS="${EGIT_OPTIONS}"
+ EGIT_UPDATE_CMD="git pull -f -u ${EGIT_OPTIONS}"
+ UPSTREAM_BRANCH="origin/${EGIT_BRANCH}"
+ EGIT_LOCAL_NONBARE="true"
+ fi
+}
+
+# @FUNCTION: git-2_cleanup
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function cleaning up all the global variables
+# that are not required after the unpack has been done.
+git-2_cleanup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Here we can unset only variables that are GLOBAL
+ # defined by the eclass, BUT NOT subject to change
+ # by user (like EGIT_PROJECT).
+ # If ebuild writer polutes his environment it is
+ # his problem only.
+ unset EGIT_DIR
+ unset MOVE_COMMAND
+ unset EGIT_LOCAL_OPTIONS
+ unset EGIT_UPDATE_CMD
+ unset UPSTREAM_BRANCH
+ unset EGIT_LOCAL_NONBARE
+}
+
+# @FUNCTION: git-2_src_unpack
+# @DESCRIPTION:
+# Default git src_unpack function.
+git-2_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ git-2_init_variables
+ git-2_prepare_storedir
+ git-2_migrate_repository
+ git-2_fetch "$@"
+ git-2_gc
+ git-2_submodules
+ git-2_move_source
+ git-2_branch
+ git-2_bootstrap
+ git-2_cleanup
+ echo ">>> Unpacked to ${EGIT_SOURCEDIR}"
+
+ # Users can specify some SRC_URI and we should
+ # unpack the files too.
+ if [[ ! ${EGIT_NOUNPACK} ]]; then
+ if has ${EAPI:-0} 0 1; then
+ [[ ${A} ]] && unpack ${A}
+ else
+ default_src_unpack
+ fi
+ fi
+}
diff --git a/eclass/git-r3.eclass b/eclass/git-r3.eclass
new file mode 100644
index 000000000000..0ee70bc4a51a
--- /dev/null
+++ b/eclass/git-r3.eclass
@@ -0,0 +1,988 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: git-r3.eclass
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @BLURB: Eclass for fetching and unpacking git repositories.
+# @DESCRIPTION:
+# Third generation eclass for easing maintenance of live ebuilds using
+# git as remote repository.
+
+case "${EAPI:-0}" in
+ 0|1|2|3|4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_GIT_R3} ]]; then
+
+inherit eutils
+
+fi
+
+EXPORT_FUNCTIONS src_unpack
+
+if [[ ! ${_GIT_R3} ]]; then
+
+if [[ ! ${_INHERITED_BY_GIT_2} ]]; then
+ DEPEND=">=dev-vcs/git-1.8.2.1"
+fi
+
+# @ECLASS-VARIABLE: EGIT_CLONE_TYPE
+# @DESCRIPTION:
+# Type of clone that should be used against the remote repository.
+# This can be either of: 'mirror', 'single', 'shallow'.
+#
+# This is intended to be set by user in make.conf. Ebuilds are supposed
+# to set EGIT_MIN_CLONE_TYPE if necessary instead.
+#
+# The 'mirror' type clones all remote branches and tags with complete
+# history and all notes. EGIT_COMMIT can specify any commit hash.
+# Upstream-removed branches and tags are purged from the local clone
+# while fetching. This mode is suitable for cloning the local copy
+# for development or hosting a local git mirror. However, clones
+# of repositories with large diverged branches may quickly grow large.
+#
+# The 'single+tags' type clones the requested branch and all tags
+# in the repository. All notes are fetched as well. EGIT_COMMIT
+# can safely specify hashes throughout the current branch and all tags.
+# No purging of old references is done (if you often switch branches,
+# you may need to remove stale branches yourself). This mode is intended
+# mostly for use with broken git servers such as Google Code that fail
+# to fetch tags along with the branch in 'single' mode.
+#
+# The 'single' type clones only the requested branch or tag. Tags
+# referencing commits throughout the branch history are fetched as well,
+# and all notes. EGIT_COMMIT can safely specify only hashes
+# in the current branch. No purging of old references is done (if you
+# often switch branches, you may need to remove stale branches
+# yourself). This mode is suitable for general use.
+#
+# The 'shallow' type clones only the newest commit on requested branch
+# or tag. EGIT_COMMIT can only specify tags, and since the history is
+# unavailable calls like 'git describe' will not reference prior tags.
+# No purging of old references is done. This mode is intended mostly for
+# embedded systems with limited disk space.
+: ${EGIT_CLONE_TYPE:=single}
+
+# @ECLASS-VARIABLE: EGIT_MIN_CLONE_TYPE
+# @DESCRIPTION:
+# 'Minimum' clone type supported by the ebuild. Takes same values
+# as EGIT_CLONE_TYPE. When user sets a type that's 'lower' (that is,
+# later on the list) than EGIT_MIN_CLONE_TYPE, the eclass uses
+# EGIT_MIN_CLONE_TYPE instead.
+#
+# This variable is intended to be used by ebuilds only. Users are
+# supposed to set EGIT_CLONE_TYPE instead.
+#
+# A common case is to use 'single' whenever the build system requires
+# access to full branch history, or 'single+tags' when Google Code
+# or a similar remote is used that does not support shallow clones
+# and fetching tags along with commits. Please use sparingly, and to fix
+# fatal errors rather than 'non-pretty versions'.
+: ${EGIT_MIN_CLONE_TYPE:=shallow}
+
+# @ECLASS-VARIABLE: EGIT3_STORE_DIR
+# @DESCRIPTION:
+# Storage directory for git sources.
+#
+# This is intended to be set by user in make.conf. Ebuilds must not set
+# it.
+#
+# EGIT3_STORE_DIR=${DISTDIR}/git3-src
+
+# @ECLASS-VARIABLE: EGIT_MIRROR_URI
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# 'Top' URI to a local git mirror. If specified, the eclass will try
+# to fetch from the local mirror instead of using the remote repository.
+#
+# The mirror needs to follow EGIT3_STORE_DIR structure. The directory
+# created by eclass can be used for that purpose.
+#
+# Example:
+# @CODE
+# EGIT_MIRROR_URI="git://mirror.lan/"
+# @CODE
+
+# @ECLASS-VARIABLE: EGIT_REPO_URI
+# @REQUIRED
+# @DESCRIPTION:
+# URIs to the repository, e.g. git://foo, https://foo. If multiple URIs
+# are provided, the eclass will consider them as fallback URIs to try
+# if the first URI does not work. For supported URI syntaxes, read up
+# the manpage for git-clone(1).
+#
+# It can be overriden via env using ${PN}_LIVE_REPO variable.
+#
+# Can be a whitespace-separated list or an array.
+#
+# Example:
+# @CODE
+# EGIT_REPO_URI="git://a/b.git https://c/d.git"
+# @CODE
+
+# @ECLASS-VARIABLE: EVCS_OFFLINE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty, this variable prevents any online operations.
+
+# @ECLASS-VARIABLE: EVCS_UMASK
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this variable to a custom umask. This is intended to be set by
+# users. By setting this to something like 002, it can make life easier
+# for people who do development as non-root (but are in the portage
+# group), and then switch over to building with FEATURES=userpriv.
+# Or vice-versa. Shouldn't be a security issue here as anyone who has
+# portage group write access already can screw the system over in more
+# creative ways.
+
+# @ECLASS-VARIABLE: EGIT_BRANCH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The branch name to check out. If unset, the upstream default (HEAD)
+# will be used.
+#
+# It can be overriden via env using ${PN}_LIVE_BRANCH variable.
+
+# @ECLASS-VARIABLE: EGIT_COMMIT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The tag name or commit identifier to check out. If unset, newest
+# commit from the branch will be used. If set, EGIT_BRANCH will
+# be ignored.
+#
+# It can be overriden via env using ${PN}_LIVE_COMMIT variable.
+
+# @ECLASS-VARIABLE: EGIT_CHECKOUT_DIR
+# @DESCRIPTION:
+# The directory to check the git sources out to.
+#
+# EGIT_CHECKOUT_DIR=${WORKDIR}/${P}
+
+# @FUNCTION: _git-r3_env_setup
+# @INTERNAL
+# @DESCRIPTION:
+# Set the eclass variables as necessary for operation. This can involve
+# setting EGIT_* to defaults or ${PN}_LIVE_* variables.
+_git-r3_env_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # check the clone type
+ case "${EGIT_CLONE_TYPE}" in
+ mirror|single+tags|single|shallow)
+ ;;
+ *)
+ die "Invalid EGIT_CLONE_TYPE=${EGIT_CLONE_TYPE}"
+ esac
+ case "${EGIT_MIN_CLONE_TYPE}" in
+ shallow)
+ ;;
+ single)
+ if [[ ${EGIT_CLONE_TYPE} == shallow ]]; then
+ einfo "git-r3: ebuild needs to be cloned in '\e[1msingle\e[22m' mode, adjusting"
+ EGIT_CLONE_TYPE=single
+ fi
+ ;;
+ single+tags)
+ if [[ ${EGIT_CLONE_TYPE} == shallow || ${EGIT_CLONE_TYPE} == single ]]; then
+ einfo "git-r3: ebuild needs to be cloned in '\e[1msingle+tags\e[22m' mode, adjusting"
+ EGIT_CLONE_TYPE=single+tags
+ fi
+ ;;
+ mirror)
+ if [[ ${EGIT_CLONE_TYPE} != mirror ]]; then
+ einfo "git-r3: ebuild needs to be cloned in '\e[1mmirror\e[22m' mode, adjusting"
+ EGIT_CLONE_TYPE=mirror
+ fi
+ ;;
+ *)
+ die "Invalid EGIT_MIN_CLONE_TYPE=${EGIT_MIN_CLONE_TYPE}"
+ esac
+
+ local esc_pn livevar
+ esc_pn=${PN//[-+]/_}
+
+ livevar=${esc_pn}_LIVE_REPO
+ EGIT_REPO_URI=${!livevar:-${EGIT_REPO_URI}}
+ [[ ${!livevar} ]] \
+ && ewarn "Using ${livevar}, no support will be provided"
+
+ livevar=${esc_pn}_LIVE_BRANCH
+ EGIT_BRANCH=${!livevar:-${EGIT_BRANCH}}
+ [[ ${!livevar} ]] \
+ && ewarn "Using ${livevar}, no support will be provided"
+
+ livevar=${esc_pn}_LIVE_COMMIT
+ EGIT_COMMIT=${!livevar:-${EGIT_COMMIT}}
+ [[ ${!livevar} ]] \
+ && ewarn "Using ${livevar}, no support will be provided"
+
+ # Migration helpers. Remove them when git-2 is removed.
+
+ if [[ ${EGIT_SOURCEDIR} ]]; then
+ eerror "EGIT_SOURCEDIR has been replaced by EGIT_CHECKOUT_DIR. While updating"
+ eerror "your ebuild, please check whether the variable is necessary at all"
+ eerror "since the default has been changed from \${S} to \${WORKDIR}/\${P}."
+ eerror "Therefore, proper setting of S may be sufficient."
+ die "EGIT_SOURCEDIR has been replaced by EGIT_CHECKOUT_DIR."
+ fi
+
+ if [[ ${EGIT_MASTER} ]]; then
+ eerror "EGIT_MASTER has been removed. Instead, the upstream default (HEAD)"
+ eerror "is used by the eclass. Please remove the assignment or use EGIT_BRANCH"
+ eerror "as necessary."
+ die "EGIT_MASTER has been removed."
+ fi
+
+ if [[ ${EGIT_HAS_SUBMODULES} ]]; then
+ eerror "EGIT_HAS_SUBMODULES has been removed. The eclass no longer needs"
+ eerror "to switch the clone type in order to support submodules and therefore"
+ eerror "submodules are detected and fetched automatically."
+ die "EGIT_HAS_SUBMODULES is no longer necessary."
+ fi
+
+ if [[ ${EGIT_PROJECT} ]]; then
+ eerror "EGIT_PROJECT has been removed. Instead, the eclass determines"
+ eerror "the local clone path using path in canonical EGIT_REPO_URI."
+ eerror "If the current algorithm causes issues for you, please report a bug."
+ die "EGIT_PROJECT is no longer necessary."
+ fi
+
+ if [[ ${EGIT_BOOTSTRAP} ]]; then
+ eerror "EGIT_BOOTSTRAP has been removed. Please create proper src_prepare()"
+ eerror "instead."
+ die "EGIT_BOOTSTRAP has been removed."
+ fi
+
+ if [[ ${EGIT_NOUNPACK} ]]; then
+ eerror "EGIT_NOUNPACK has been removed. The eclass no longer calls default"
+ eerror "unpack function. If necessary, please declare proper src_unpack()."
+ die "EGIT_NOUNPACK has been removed."
+ fi
+}
+
+# @FUNCTION: _git-r3_set_gitdir
+# @USAGE: <repo-uri>
+# @INTERNAL
+# @DESCRIPTION:
+# Obtain the local repository path and set it as GIT_DIR. Creates
+# a new repository if necessary.
+#
+# <repo-uri> may be used to compose the path. It should therefore be
+# a canonical URI to the repository.
+_git-r3_set_gitdir() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local repo_name=${1#*://*/}
+
+ # strip the trailing slash
+ repo_name=${repo_name%/}
+
+ # strip common prefixes to make paths more likely to match
+ # e.g. git://X/Y.git vs https://X/git/Y.git
+ # (but just one of the prefixes)
+ case "${repo_name}" in
+ # gnome.org... who else?
+ browse/*) repo_name=${repo_name#browse/};;
+ # cgit can proxy requests to git
+ cgit/*) repo_name=${repo_name#cgit/};;
+ # pretty common
+ git/*) repo_name=${repo_name#git/};;
+ # gentoo.org
+ gitroot/*) repo_name=${repo_name#gitroot/};;
+ # google code, sourceforge
+ p/*) repo_name=${repo_name#p/};;
+ # kernel.org
+ pub/scm/*) repo_name=${repo_name#pub/scm/};;
+ esac
+ # ensure a .git suffix, same reason
+ repo_name=${repo_name%.git}.git
+ # now replace all the slashes
+ repo_name=${repo_name//\//_}
+
+ local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
+ : ${EGIT3_STORE_DIR:=${distdir}/git3-src}
+
+ GIT_DIR=${EGIT3_STORE_DIR}/${repo_name}
+
+ if [[ ! -d ${EGIT3_STORE_DIR} ]]; then
+ (
+ addwrite /
+ mkdir -p "${EGIT3_STORE_DIR}" || die
+ ) || die "Unable to create ${EGIT3_STORE_DIR}"
+ fi
+
+ addwrite "${EGIT3_STORE_DIR}"
+ if [[ ! -d ${GIT_DIR} ]]; then
+ local saved_umask
+ if [[ ${EVCS_UMASK} ]]; then
+ saved_umask=$(umask)
+ umask "${EVCS_UMASK}" || die "Bad options to umask: ${EVCS_UMASK}"
+ fi
+ mkdir "${GIT_DIR}" || die
+ git init --bare || die
+ if [[ ${saved_umask} ]]; then
+ umask "${saved_umask}" || die
+ fi
+ fi
+}
+
+# @FUNCTION: _git-r3_set_submodules
+# @USAGE: <file-contents>
+# @INTERNAL
+# @DESCRIPTION:
+# Parse .gitmodules contents passed as <file-contents>
+# as in "$(cat .gitmodules)"). Composes a 'submodules' array that
+# contains in order (name, URL, path) for each submodule.
+_git-r3_set_submodules() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local data=${1}
+
+ # ( name url path ... )
+ submodules=()
+
+ local l
+ while read l; do
+ # submodule.<path>.path=<path>
+ # submodule.<path>.url=<url>
+ [[ ${l} == submodule.*.url=* ]] || continue
+
+ l=${l#submodule.}
+ local subname=${l%%.url=*}
+
+ # skip modules that have 'update = none', bug #487262.
+ local upd=$(echo "${data}" | git config -f /dev/fd/0 \
+ submodule."${subname}".update)
+ [[ ${upd} == none ]] && continue
+
+ submodules+=(
+ "${subname}"
+ "$(echo "${data}" | git config -f /dev/fd/0 \
+ submodule."${subname}".url || die)"
+ "$(echo "${data}" | git config -f /dev/fd/0 \
+ submodule."${subname}".path || die)"
+ )
+ done < <(echo "${data}" | git config -f /dev/fd/0 -l || die)
+}
+
+# @FUNCTION: _git-r3_set_subrepos
+# @USAGE: <submodule-uri> <parent-repo-uri>...
+# @INTERNAL
+# @DESCRIPTION:
+# Create 'subrepos' array containing absolute (canonical) submodule URIs
+# for the given <submodule-uri>. If the URI is relative, URIs will be
+# constructed using all <parent-repo-uri>s. Otherwise, this single URI
+# will be placed in the array.
+_git-r3_set_subrepos() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local suburl=${1}
+ subrepos=( "${@:2}" )
+
+ if [[ ${suburl} == ./* || ${suburl} == ../* ]]; then
+ # drop all possible trailing slashes for consistency
+ subrepos=( "${subrepos[@]%%/}" )
+
+ while true; do
+ if [[ ${suburl} == ./* ]]; then
+ suburl=${suburl:2}
+ elif [[ ${suburl} == ../* ]]; then
+ suburl=${suburl:3}
+
+ # XXX: correctness checking
+
+ # drop the last path component
+ subrepos=( "${subrepos[@]%/*}" )
+ # and then the trailing slashes, again
+ subrepos=( "${subrepos[@]%%/}" )
+ else
+ break
+ fi
+ done
+
+ # append the preprocessed path to the preprocessed URIs
+ subrepos=( "${subrepos[@]/%//${suburl}}")
+ else
+ subrepos=( "${suburl}" )
+ fi
+}
+
+
+# @FUNCTION: _git-r3_is_local_repo
+# @USAGE: <repo-uri>
+# @INTERNAL
+# @DESCRIPTION:
+# Determine whether the given URI specifies a local (on-disk)
+# repository.
+_git-r3_is_local_repo() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local uri=${1}
+
+ [[ ${uri} == file://* || ${uri} == /* ]]
+}
+
+# @FUNCTION: _git-r3_find_head
+# @USAGE: <head-ref>
+# @INTERNAL
+# @DESCRIPTION:
+# Given a ref to which remote HEAD was fetched, try to find
+# a branch matching the commit. Expects 'git show-ref'
+# or 'git ls-remote' output on stdin.
+_git-r3_find_head() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local head_ref=${1}
+ local head_hash=$(git rev-parse --verify "${1}" || die)
+ local matching_ref
+
+ # TODO: some transports support peeking at symbolic remote refs
+ # find a way to use that rather than guessing
+
+ # (based on guess_remote_head() in git-1.9.0/remote.c)
+ local h ref
+ while read h ref; do
+ # look for matching head
+ if [[ ${h} == ${head_hash} ]]; then
+ # either take the first matching ref, or master if it is there
+ if [[ ! ${matching_ref} || ${ref} == refs/heads/master ]]; then
+ matching_ref=${ref}
+ fi
+ fi
+ done
+
+ if [[ ! ${matching_ref} ]]; then
+ die "Unable to find a matching branch for remote HEAD (${head_hash})"
+ fi
+
+ echo "${matching_ref}"
+}
+
+# @FUNCTION: git-r3_fetch
+# @USAGE: [<repo-uri> [<remote-ref> [<local-id>]]]
+# @DESCRIPTION:
+# Fetch new commits to the local clone of repository.
+#
+# <repo-uri> specifies the repository URIs to fetch from, as a space-
+# -separated list. The first URI will be used as repository group
+# identifier and therefore must be used consistently. When not
+# specified, defaults to ${EGIT_REPO_URI}.
+#
+# <remote-ref> specifies the remote ref or commit id to fetch.
+# It is preferred to use 'refs/heads/<branch-name>' for branches
+# and 'refs/tags/<tag-name>' for tags. Other options are 'HEAD'
+# for upstream default branch and hexadecimal commit SHA1. Defaults
+# to the first of EGIT_COMMIT, EGIT_BRANCH or literal 'HEAD' that
+# is set to a non-null value.
+#
+# <local-id> specifies the local branch identifier that will be used to
+# locally store the fetch result. It should be unique to multiple
+# fetches within the repository that can be performed at the same time
+# (including parallel merges). It defaults to ${CATEGORY}/${PN}/${SLOT%/*}.
+# This default should be fine unless you are fetching multiple trees
+# from the same repository in the same ebuild.
+#
+# The fetch operation will affect the EGIT_STORE only. It will not touch
+# the working copy, nor export any environment variables.
+# If the repository contains submodules, they will be fetched
+# recursively.
+git-r3_fetch() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ ${EVCS_OFFLINE} ]] && return
+
+ local repos
+ if [[ ${1} ]]; then
+ repos=( ${1} )
+ elif [[ $(declare -p EGIT_REPO_URI) == "declare -a"* ]]; then
+ repos=( "${EGIT_REPO_URI[@]}" )
+ else
+ repos=( ${EGIT_REPO_URI} )
+ fi
+
+ local branch=${EGIT_BRANCH:+refs/heads/${EGIT_BRANCH}}
+ local remote_ref=${2:-${EGIT_COMMIT:-${branch:-HEAD}}}
+ local local_id=${3:-${CATEGORY}/${PN}/${SLOT%/*}}
+ local local_ref=refs/git-r3/${local_id}/__main__
+
+ [[ ${repos[@]} ]] || die "No URI provided and EGIT_REPO_URI unset"
+
+ local -x GIT_DIR
+ _git-r3_set_gitdir "${repos[0]}"
+
+ # prepend the local mirror if applicable
+ if [[ ${EGIT_MIRROR_URI} ]]; then
+ repos=(
+ "${EGIT_MIRROR_URI%/}/${GIT_DIR##*/}"
+ "${repos[@]}"
+ )
+ fi
+
+ # try to fetch from the remote
+ local r success saved_umask
+ if [[ ${EVCS_UMASK} ]]; then
+ saved_umask=$(umask)
+ umask "${EVCS_UMASK}" || die "Bad options to umask: ${EVCS_UMASK}"
+ fi
+ for r in "${repos[@]}"; do
+ einfo "Fetching \e[1m${r}\e[22m ..."
+
+ local fetch_command=( git fetch "${r}" )
+ local clone_type=${EGIT_CLONE_TYPE}
+
+ if [[ ${r} == https://* ]] && ! ROOT=/ has_version 'dev-vcs/git[curl]'; then
+ eerror "git-r3: fetching from https:// requested. In order to support https,"
+ eerror "dev-vcs/git needs to be built with USE=curl. Example solution:"
+ eerror
+ eerror " echo dev-vcs/git curl >> /etc/portage/package.use"
+ eerror " emerge -1v dev-vcs/git"
+ die "dev-vcs/git built with USE=curl required."
+ fi
+
+ if [[ ${r} == https://code.google.com/* ]]; then
+ # Google Code has special magic on top of git that:
+ # 1) can't handle shallow clones at all,
+ # 2) fetches duplicately when tags are pulled in with branch
+ # so automatically switch to single+tags mode.
+ if [[ ${clone_type} == shallow ]]; then
+ einfo " Google Code does not support shallow clones"
+ einfo " using \e[1mEGIT_CLONE_TYPE=single+tags\e[22m"
+ clone_type=single+tags
+ elif [[ ${clone_type} == single ]]; then
+ einfo " git-r3: Google Code does not send tags properly in 'single' mode"
+ einfo " using \e[1mEGIT_CLONE_TYPE=single+tags\e[22m"
+ clone_type=single+tags
+ fi
+ fi
+
+ if [[ ${clone_type} == mirror ]]; then
+ fetch_command+=(
+ --prune
+ # mirror the remote branches as local branches
+ "+refs/heads/*:refs/heads/*"
+ # pull tags explicitly in order to prune them properly
+ "+refs/tags/*:refs/tags/*"
+ # notes in case something needs them
+ "+refs/notes/*:refs/notes/*"
+ # and HEAD in case we need the default branch
+ # (we keep it in refs/git-r3 since otherwise --prune interferes)
+ "+HEAD:refs/git-r3/HEAD"
+ )
+ else # single or shallow
+ local fetch_l fetch_r
+
+ if [[ ${remote_ref} == HEAD ]]; then
+ # HEAD
+ fetch_l=HEAD
+ elif [[ ${remote_ref} == refs/heads/* ]]; then
+ # regular branch
+ fetch_l=${remote_ref}
+ else
+ # tag or commit...
+ # let ls-remote figure it out
+ local tagref=$(git ls-remote "${r}" "refs/tags/${remote_ref}")
+
+ # if it was a tag, ls-remote obtained a hash
+ if [[ ${tagref} ]]; then
+ # tag
+ fetch_l=refs/tags/${remote_ref}
+ else
+ # commit
+ # so we need to fetch the branch
+ if [[ ${branch} ]]; then
+ fetch_l=${branch}
+ else
+ fetch_l=HEAD
+ fi
+
+ # fetching by commit in shallow mode? can't do.
+ if [[ ${clone_type} == shallow ]]; then
+ clone_type=single
+ fi
+ fi
+ fi
+
+ if [[ ${fetch_l} == HEAD ]]; then
+ fetch_r=refs/git-r3/HEAD
+ else
+ fetch_r=${fetch_l}
+ fi
+
+ fetch_command+=(
+ "+${fetch_l}:${fetch_r}"
+ )
+
+ if [[ ${clone_type} == single+tags ]]; then
+ fetch_command+=(
+ # pull tags explicitly as requested
+ "+refs/tags/*:refs/tags/*"
+ )
+ fi
+ fi
+
+ if [[ ${clone_type} == shallow ]]; then
+ if _git-r3_is_local_repo; then
+ # '--depth 1' causes sandbox violations with local repos
+ # bug #491260
+ clone_type=single
+ elif [[ ! $(git rev-parse --quiet --verify "${fetch_r}") ]]
+ then
+ # use '--depth 1' when fetching a new branch
+ fetch_command+=( --depth 1 )
+ fi
+ else # non-shallow mode
+ if [[ -f ${GIT_DIR}/shallow ]]; then
+ fetch_command+=( --unshallow )
+ fi
+ fi
+
+ set -- "${fetch_command[@]}"
+ echo "${@}" >&2
+ if "${@}"; then
+ if [[ ${clone_type} == mirror ]]; then
+ # find remote HEAD and update our HEAD properly
+ git symbolic-ref HEAD \
+ "$(_git-r3_find_head refs/git-r3/HEAD \
+ < <(git show-ref --heads || die))" \
+ || die "Unable to update HEAD"
+ else # single or shallow
+ if [[ ${fetch_l} == HEAD ]]; then
+ # find out what branch we fetched as HEAD
+ local head_branch=$(_git-r3_find_head \
+ refs/git-r3/HEAD \
+ < <(git ls-remote --heads "${r}" || die))
+
+ # and move it to its regular place
+ git update-ref --no-deref "${head_branch}" \
+ refs/git-r3/HEAD \
+ || die "Unable to sync HEAD branch ${head_branch}"
+ git symbolic-ref HEAD "${head_branch}" \
+ || die "Unable to update HEAD"
+ fi
+ fi
+
+ # now let's see what the user wants from us
+ local full_remote_ref=$(
+ git rev-parse --verify --symbolic-full-name "${remote_ref}"
+ )
+
+ if [[ ${full_remote_ref} ]]; then
+ # when we are given a ref, create a symbolic ref
+ # so that we preserve the actual argument
+ set -- git symbolic-ref "${local_ref}" "${full_remote_ref}"
+ else
+ # otherwise, we were likely given a commit id
+ set -- git update-ref --no-deref "${local_ref}" "${remote_ref}"
+ fi
+
+ echo "${@}" >&2
+ if ! "${@}"; then
+ die "Referencing ${remote_ref} failed (wrong ref?)."
+ fi
+
+ success=1
+ break
+ fi
+ done
+ if [[ ${saved_umask} ]]; then
+ umask "${saved_umask}" || die
+ fi
+ [[ ${success} ]] || die "Unable to fetch from any of EGIT_REPO_URI"
+
+ # submodules can reference commits in any branch
+ # always use the 'clone' mode to accomodate that, bug #503332
+ local EGIT_CLONE_TYPE=mirror
+
+ # recursively fetch submodules
+ if git cat-file -e "${local_ref}":.gitmodules &>/dev/null; then
+ local submodules
+ _git-r3_set_submodules \
+ "$(git cat-file -p "${local_ref}":.gitmodules || die)"
+
+ while [[ ${submodules[@]} ]]; do
+ local subname=${submodules[0]}
+ local url=${submodules[1]}
+ local path=${submodules[2]}
+
+ # use only submodules for which path does exist
+ # (this is in par with 'git submodule'), bug #551100
+ # note: git cat-file does not work for submodules
+ if [[ $(git ls-tree -d "${local_ref}" "${path}") ]]
+ then
+ local commit=$(git rev-parse "${local_ref}:${path}" || die)
+
+ if [[ ! ${commit} ]]; then
+ die "Unable to get commit id for submodule ${subname}"
+ fi
+
+ local subrepos
+ _git-r3_set_subrepos "${url}" "${repos[@]}"
+
+ git-r3_fetch "${subrepos[*]}" "${commit}" "${local_id}/${subname}"
+ fi
+
+ submodules=( "${submodules[@]:3}" ) # shift
+ done
+ fi
+}
+
+# @FUNCTION: git-r3_checkout
+# @USAGE: [<repo-uri> [<checkout-path> [<local-id>]]]
+# @DESCRIPTION:
+# Check the previously fetched tree to the working copy.
+#
+# <repo-uri> specifies the repository URIs, as a space-separated list.
+# The first URI will be used as repository group identifier
+# and therefore must be used consistently with git-r3_fetch.
+# The remaining URIs are not used and therefore may be omitted.
+# When not specified, defaults to ${EGIT_REPO_URI}.
+#
+# <checkout-path> specifies the path to place the checkout. It defaults
+# to ${EGIT_CHECKOUT_DIR} if set, otherwise to ${WORKDIR}/${P}.
+#
+# <local-id> needs to specify the local identifier that was used
+# for respective git-r3_fetch.
+#
+# The checkout operation will write to the working copy, and export
+# the repository state into the environment. If the repository contains
+# submodules, they will be checked out recursively.
+git-r3_checkout() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local repos
+ if [[ ${1} ]]; then
+ repos=( ${1} )
+ elif [[ $(declare -p EGIT_REPO_URI) == "declare -a"* ]]; then
+ repos=( "${EGIT_REPO_URI[@]}" )
+ else
+ repos=( ${EGIT_REPO_URI} )
+ fi
+
+ local out_dir=${2:-${EGIT_CHECKOUT_DIR:-${WORKDIR}/${P}}}
+ local local_id=${3:-${CATEGORY}/${PN}/${SLOT%/*}}
+
+ local -x GIT_DIR
+ _git-r3_set_gitdir "${repos[0]}"
+
+ einfo "Checking out \e[1m${repos[0]}\e[22m to \e[1m${out_dir}\e[22m ..."
+
+ if ! git cat-file -e refs/git-r3/"${local_id}"/__main__; then
+ if [[ ${EVCS_OFFLINE} ]]; then
+ die "No local clone of ${repos[0]}. Unable to work with EVCS_OFFLINE."
+ else
+ die "Logic error: no local clone of ${repos[0]}. git-r3_fetch not used?"
+ fi
+ fi
+ local remote_ref=$(
+ git symbolic-ref --quiet refs/git-r3/"${local_id}"/__main__
+ )
+ local new_commit_id=$(
+ git rev-parse --verify refs/git-r3/"${local_id}"/__main__
+ )
+
+ git-r3_sub_checkout() {
+ local orig_repo=${GIT_DIR}
+ local -x GIT_DIR=${out_dir}/.git
+ local -x GIT_WORK_TREE=${out_dir}
+
+ mkdir -p "${out_dir}" || die
+
+ # use git init+fetch instead of clone since the latter doesn't like
+ # non-empty directories.
+
+ git init --quiet || die
+ # setup 'alternates' to avoid copying objects
+ echo "${orig_repo}/objects" > "${GIT_DIR}"/objects/info/alternates || die
+ # now copy the refs
+ # [htn]* safely catches heads, tags, notes without complaining
+ # on non-existing ones, and omits internal 'git-r3' ref
+ cp -R "${orig_repo}"/refs/[htn]* "${GIT_DIR}"/refs/ || die
+
+ # (no need to copy HEAD, we will set it via checkout)
+
+ if [[ -f ${orig_repo}/shallow ]]; then
+ cp "${orig_repo}"/shallow "${GIT_DIR}"/ || die
+ fi
+
+ set -- git checkout --quiet
+ if [[ ${remote_ref} ]]; then
+ set -- "${@}" "${remote_ref#refs/heads/}"
+ else
+ set -- "${@}" "${new_commit_id}"
+ fi
+ echo "${@}" >&2
+ "${@}" || die "git checkout ${remote_ref:-${new_commit_id}} failed"
+ }
+ git-r3_sub_checkout
+
+ local old_commit_id=$(
+ git rev-parse --quiet --verify refs/git-r3/"${local_id}"/__old__
+ )
+ if [[ ! ${old_commit_id} ]]; then
+ echo "GIT NEW branch -->"
+ echo " repository: ${repos[0]}"
+ echo " at the commit: ${new_commit_id}"
+ else
+ # diff against previous revision
+ echo "GIT update -->"
+ echo " repository: ${repos[0]}"
+ # write out message based on the revisions
+ if [[ "${old_commit_id}" != "${new_commit_id}" ]]; then
+ echo " updating from commit: ${old_commit_id}"
+ echo " to commit: ${new_commit_id}"
+
+ git --no-pager diff --stat \
+ ${old_commit_id}..${new_commit_id}
+ else
+ echo " at the commit: ${new_commit_id}"
+ fi
+ fi
+ git update-ref --no-deref refs/git-r3/"${local_id}"/{__old__,__main__} || die
+
+ # recursively checkout submodules
+ if [[ -f ${out_dir}/.gitmodules ]]; then
+ local submodules
+ _git-r3_set_submodules \
+ "$(<"${out_dir}"/.gitmodules)"
+
+ while [[ ${submodules[@]} ]]; do
+ local subname=${submodules[0]}
+ local url=${submodules[1]}
+ local path=${submodules[2]}
+
+ # use only submodules for which path does exist
+ # (this is in par with 'git submodule'), bug #551100
+ if [[ -d ${out_dir}/${path} ]]; then
+ local subrepos
+ _git-r3_set_subrepos "${url}" "${repos[@]}"
+
+ git-r3_checkout "${subrepos[*]}" "${out_dir}/${path}" \
+ "${local_id}/${subname}"
+ fi
+
+ submodules=( "${submodules[@]:3}" ) # shift
+ done
+ fi
+
+ # keep this *after* submodules
+ export EGIT_DIR=${GIT_DIR}
+ export EGIT_VERSION=${new_commit_id}
+}
+
+# @FUNCTION: git-r3_peek_remote_ref
+# @USAGE: [<repo-uri> [<remote-ref>]]
+# @DESCRIPTION:
+# Peek the reference in the remote repository and print the matching
+# (newest) commit SHA1.
+#
+# <repo-uri> specifies the repository URIs to fetch from, as a space-
+# -separated list. When not specified, defaults to ${EGIT_REPO_URI}.
+#
+# <remote-ref> specifies the remote ref to peek. It is preferred to use
+# 'refs/heads/<branch-name>' for branches and 'refs/tags/<tag-name>'
+# for tags. Alternatively, 'HEAD' may be used for upstream default
+# branch. Defaults to the first of EGIT_COMMIT, EGIT_BRANCH or literal
+# 'HEAD' that is set to a non-null value.
+#
+# The operation will be done purely on the remote, without using local
+# storage. If commit SHA1 is provided as <remote-ref>, the function will
+# fail due to limitations of git protocol.
+#
+# On success, the function returns 0 and writes hexadecimal commit SHA1
+# to stdout. On failure, the function returns 1.
+git-r3_peek_remote_ref() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local repos
+ if [[ ${1} ]]; then
+ repos=( ${1} )
+ elif [[ $(declare -p EGIT_REPO_URI) == "declare -a"* ]]; then
+ repos=( "${EGIT_REPO_URI[@]}" )
+ else
+ repos=( ${EGIT_REPO_URI} )
+ fi
+
+ local branch=${EGIT_BRANCH:+refs/heads/${EGIT_BRANCH}}
+ local remote_ref=${2:-${EGIT_COMMIT:-${branch:-HEAD}}}
+
+ [[ ${repos[@]} ]] || die "No URI provided and EGIT_REPO_URI unset"
+
+ local r success
+ for r in "${repos[@]}"; do
+ einfo "Peeking \e[1m${remote_ref}\e[22m on \e[1m${r}\e[22m ..." >&2
+
+ local is_branch lookup_ref
+ if [[ ${remote_ref} == refs/heads/* || ${remote_ref} == HEAD ]]
+ then
+ is_branch=1
+ lookup_ref=${remote_ref}
+ else
+ # ls-remote by commit is going to fail anyway,
+ # so we may as well pass refs/tags/ABCDEF...
+ lookup_ref=refs/tags/${remote_ref}
+ fi
+
+ # split on whitespace
+ local ref=(
+ $(git ls-remote "${r}" "${lookup_ref}")
+ )
+
+ if [[ ${ref[0]} ]]; then
+ echo "${ref[0]}"
+ return 0
+ fi
+ done
+
+ return 1
+}
+
+git-r3_src_fetch() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ! ${EGIT3_STORE_DIR} && ${EGIT_STORE_DIR} ]]; then
+ ewarn "You have set EGIT_STORE_DIR but not EGIT3_STORE_DIR. Please consider"
+ ewarn "setting EGIT3_STORE_DIR for git-r3.eclass. It is recommended to use"
+ ewarn "a different directory than EGIT_STORE_DIR to ease removing old clones"
+ ewarn "when git-2 eclass becomes deprecated."
+ fi
+
+ _git-r3_env_setup
+ git-r3_fetch
+}
+
+git-r3_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _git-r3_env_setup
+ git-r3_src_fetch
+ git-r3_checkout
+}
+
+# https://bugs.gentoo.org/show_bug.cgi?id=482666
+git-r3_pkg_needrebuild() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local new_commit_id=$(git-r3_peek_remote_ref)
+ [[ ${new_commit_id} && ${EGIT_VERSION} ]] || die "Lookup failed"
+
+ if [[ ${EGIT_VERSION} != ${new_commit_id} ]]; then
+ einfo "Update from \e[1m${EGIT_VERSION}\e[22m to \e[1m${new_commit_id}\e[22m"
+ else
+ einfo "Local and remote at \e[1m${EGIT_VERSION}\e[22m"
+ fi
+
+ [[ ${EGIT_VERSION} != ${new_commit_id} ]]
+}
+
+# 'export' locally until this gets into EAPI
+pkg_needrebuild() { git-r3_pkg_needrebuild; }
+
+_GIT_R3=1
+fi
diff --git a/eclass/gkrellm-plugin.eclass b/eclass/gkrellm-plugin.eclass
new file mode 100644
index 000000000000..207f3ca05271
--- /dev/null
+++ b/eclass/gkrellm-plugin.eclass
@@ -0,0 +1,83 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+#
+# Original Author: Jim Ramsay <lack@gentoo.org>
+#
+# Purpose:
+# Provides common methods used by (almost) all gkrellm plugins:
+# - Sets up default dependencies
+# - Adds pkg_setup check to ensure gkrellm was built with USE="X" (bug
+# 167227)
+# - Provides utility routines in lieu of hard-coding the plugin directories.
+# - Provides the most common src_install method to avoid code duplication.
+#
+# Utility Routines:
+# gkrellm-plugin_dir - Returns the gkrellm-2 plugin directory
+# gkrellm-plugin_server_dir - Returns the gkrellm-2 server plugin directory
+#
+# Environment:
+# For src_install:
+# PLUGIN_SO - The name of the plugin's .so file which will be installed in
+# the plugin dir. Defaults to "${PN}.so".
+# PLUGIN_DOCS - An optional list of docs to be installed. Defaults to
+# unset.
+# PLUGIN_SERVER_SO - The name of the plugin's server plugin .so portion.
+# Defaults to unset.
+# Important: This will also cause the pkg_setup check to be skipped, so
+# you need to check 'build_with_use app-admin/gkrellm X' in your
+# src_compile and only compile the GUI portion if that returns true. (see
+# x11-plugins/gkrelltop as an example)
+#
+# Changelog:
+# 12 March 2007: Jim Ramsay <lack@gentoo.org>
+# - Added server plugin support
+# 09 March 2007: Jim Ramsay <lack@gentoo.org>
+# - Initial commit
+#
+
+inherit multilib eutils
+
+RDEPEND="=app-admin/gkrellm-2*"
+DEPEND="${RDEPEND}
+ virtual/pkgconfig"
+
+gkrellm-plugin_dir() {
+ echo /usr/$(get_libdir)/gkrellm2/plugins
+}
+
+gkrellm-plugin_server_dir() {
+ echo /usr/$(get_libdir)/gkrellm2/plugins-gkrellmd
+}
+
+gkrellm-plugin_pkg_setup() {
+ if [[ -z "${PLUGIN_SERVER_SO}" ]] &&
+ ! built_with_use app-admin/gkrellm X; then
+ eerror "This plugin requires the X frontend of gkrellm."
+ eerror "Please re-emerge app-admin/gkrellm with USE=\"X\""
+ die "Please re-emerge app-admin/gkrellm with USE=\"X\""
+ fi
+}
+
+gkrellm-plugin_src_install() {
+ if built_with_use app-admin/gkrellm X; then
+ insinto $(gkrellm-plugin_dir)
+ doins ${PLUGIN_SO:-${PN}.so} || die "Plugin shared library was not installed"
+ fi
+
+ if [[ -n "${PLUGIN_SERVER_SO}" ]]; then
+ insinto $(gkrellm-plugin_server_dir)
+ doins ${PLUGIN_SERVER_SO} || die "Server plugin shared library was not installed"
+ fi
+
+ DDOCS="README* Change* AUTHORS FAQ TODO INSTALL"
+
+ for doc in ${DDOCS}; do
+ [ -s "$doc" ] && dodoc $doc
+ done
+
+ [ -n "${PLUGIN_DOCS}" ] && dodoc ${PLUGIN_DOCS}
+}
+
+EXPORT_FUNCTIONS pkg_setup src_install
diff --git a/eclass/gnat.eclass b/eclass/gnat.eclass
new file mode 100644
index 000000000000..6b786bcdffa0
--- /dev/null
+++ b/eclass/gnat.eclass
@@ -0,0 +1,457 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Author: George Shapovalov <george@gentoo.org>
+# Belongs to: ada herd <ada@gentoo.org>
+#
+# This eclass provides the framework for ada lib installation with the split and
+# SLOTted gnat compilers (gnat-xxx, gnatbuild.eclass). Each lib gets built once
+# for every installed gnat compiler. Activation of a particular bunary module is
+# performed by eselect-gnat, when the active compiler gets switched
+#
+# The ebuilds should define the lib_compile and lib_install functions that are
+# called from the (exported) gnat_src_compile function of eclass. These
+# functions should operate similarly to the starndard src_compile and
+# src_install. The only difference, that they should use $SL variable instead of
+# $S (this is where the working copy of source is held) and $DL instead of $D as
+# its installation point.
+
+inherit flag-o-matic eutils multilib
+
+# The environment is set locally in src_compile and src_install functions
+# by the common code sourced here and in gnat-eselect module.
+# This is the standard location for this code (belongs to eselect-gnat,
+# since eselect should work even in the absense of portage tree and we can
+# guarantee to some extent presence of gnat-eselect when anything gnat-related
+# gets processed. See #192505)
+#
+# Note!
+# It may not be safe to source this at top level. Only source inside local
+# functions!
+GnatCommon="/usr/share/gnat/lib/gnat-common.bash"
+
+# !!NOTE!!
+# src_install should not be exported!
+# Instead gnat_src_install should be explicitly called from within src_install.
+EXPORT_FUNCTIONS pkg_setup pkg_postinst src_compile
+
+DESCRIPTION="Common procedures for building Ada libs using split gnat compilers"
+
+# make sure we have an appropriately recent eselect-gnat installed, as we are
+# using some common code here.
+DEPEND=">=app-eselect/eselect-gnat-1.3"
+
+
+# ----------------------------------
+# Globals
+
+# Lib install locations
+#
+# Gnat profile dependent files go under ${LibTop}/${Gnat_Profile}/${PN}
+# and common files go under SpecsDir, DataDir..
+# In order not to pollute PATH and LDPATH attempt should be mabe to install
+# binaries and what makes sence for individual packages under
+# ${AdalibLibTop}/${Gnat_Profile}/bin
+PREFIX=/usr
+AdalibSpecsDir=${PREFIX}/include/ada
+AdalibDataDir=${PREFIX}/share/ada
+AdalibLibTop=${PREFIX}/$(get_libdir)/ada
+
+# build-time locations
+# SL is a "localized" S, - location where sources are copied for
+#bi profile-specific build
+SL=${WORKDIR}/LocalSource
+
+# DL* are "localized destinations" where ARCH/SLOT dependent stuff should be
+# installed in lib_install. There are three:
+#
+DL=${WORKDIR}/LocalDest
+# a generic location for the lib (.a, .so) files
+#
+DLbin=${WORKDIR}/LocalBinDest
+# binaries that should be in the PATH, will be moved to common Ada bin dir
+#
+DLgpr=${WORKDIR}/LocalGPRDest
+# gpr's should go here.
+
+# file containing environment formed by gnat-eselect (build-time)
+BuildEnv=${WORKDIR}/BuildEnv
+
+# environment for installed lib. Profile-specific stuff should use %DL% as a top
+# of their location. This (%DL%) will be substituted with a proper location upon
+# install
+LibEnv=${WORKDIR}/LibEnv
+
+
+# env file prepared by gnat.eselect only lists new settings for env vars
+# we need to change that to prepend, rather than replace action..
+# Takes one argument - the file to expand. This file should contain only
+# var=value like lines.. (commenst are Ok)
+expand_BuildEnv() {
+ local line
+ for line in $(cat $1); do
+ EnvVar=$(echo ${line}|cut -d"=" -f1)
+ if [[ "${EnvVar}" == "PATH" ]] ; then
+ echo "export ${line}:\${${EnvVar}}" >> $1.tmp
+ else
+ echo "export ${line}" >> $1.tmp
+ fi
+ done
+ mv $1.tmp $1
+}
+
+
+# ------------------------------------
+# Dependency processing related stuff
+
+# A simple wrapper to get the relevant part of the DEPEND
+# params:
+# $1 - should contain dependency specification analogous to DEPEND,
+# if omitted, DEPEND is processed
+get_ada_dep() {
+ [[ -z "$1" ]] && DEP="${DEPEND}" || DEP="$1"
+ local TempStr
+ for fn in $DEP; do # here $DEP should *not* be in ""
+ [[ $fn =~ "virtual/ada" ]] && TempStr=${fn/*virtual\//}
+ # above match should be to full virtual/ada, as simply "ada" is a common
+ # part of ${PN}, even for some packages under dev-ada
+ done
+# debug-print-function $FUNCNAME "TempStr=${TempStr:0:8}"
+ [[ -n ${TempStr} ]] && echo ${TempStr:0:8}
+}
+
+# This function is used to check whether the requested gnat profile matches the
+# requested Ada standard
+# !!ATTN!!
+# This must match dependencies as specified in vitrual/ada !!!
+#
+# params:
+# $1 - the requested gnat profile in usual form (e.g. x86_64-pc-linux-gnu-gnat-gcc-4.1)
+# $2 - Ada standard specification, as would be specified in DEPEND.
+# Valid values: ada-1995, ada-2005, ada
+#
+# This used to treat ada-1995 and ada alike, but some packages (still
+# requested by users) no longer compile with new compilers (not the
+# standard issue, but rather compiler becoming stricter most of the time).
+# Plus there are some "intermediary versions", not fully 2005 compliant
+# but already causing problems. Therefore, now we do exact matching.
+belongs_to_standard() {
+# debug-print-function $FUNCNAME $*
+ . ${GnatCommon} || die "failed to source gnat-common lib"
+ local GnatSlot=$(get_gnat_SLOT $1)
+ local ReducedSlot=${GnatSlot//\./}
+ #
+ if [[ $2 == 'ada' ]] ; then
+# debug-print-function "ada or ada-1995 match"
+ return 0 # no restrictions imposed
+ elif [[ "$2" == 'ada-1995' ]] ; then
+ if [[ $(get_gnat_Pkg $1) == "gcc" ]]; then
+# debug-print-function "got gcc profile, GnatSlot=${ReducedSlot}"
+ [[ ${ReducedSlot} -le "42" ]] && return 0 || return 1
+ elif [[ $(get_gnat_Pkg $1) == "gpl" ]]; then
+# debug-print-function "got gpl profile, GnatSlot=${ReducedSlot}"
+ [[ ${ReducedSlot} -lt "41" ]] && return 0 || return 1
+ else
+ return 1 # unknown compiler encountered
+ fi
+ elif [[ "$2" == 'ada-2005' ]] ; then
+ if [[ $(get_gnat_Pkg $1) == "gcc" ]]; then
+# debug-print-function "got gcc profile, GnatSlot=${ReducedSlot}"
+ [[ ${ReducedSlot} -ge "43" ]] && return 0 || return 1
+ elif [[ $(get_gnat_Pkg $1) == "gpl" ]]; then
+# debug-print-function "got gpl profile, GnatSlot=${ReducedSlot}"
+ [[ ${ReducedSlot} -ge "41" ]] && return 0 || return 1
+ else
+ return 1 # unknown compiler encountered
+ fi
+ else
+ return 1 # unknown standard requested, check spelling!
+ fi
+}
+
+
+# ------------------------------------
+# Helpers
+#
+
+
+# The purpose of this one is to remove all parts of the env entry specific to a
+# given lib. Usefull when some lib wants to act differently upon detecting
+# itself installed..
+#
+# params:
+# $1 - name of env var to process
+# $2 (opt) - name of the lib to filter out (defaults to ${PN})
+filter_env_var() {
+ local entries=(${!1//:/ })
+ local libName=${2:-${PN}}
+ local env_str
+ for entry in ${entries[@]} ; do
+ # this simply checks if $libname is a substring of the $entry, should
+ # work fine with all the present libs
+ if [[ ${entry/${libName}/} == ${entry} ]] ; then
+ env_str="${env_str}:${entry}"
+ fi
+ done
+ echo ${env_str}
+}
+
+# A simpler helper, for the libs that need to extract active gnat location
+# Returns a first entry for a specified env var. Relies on the (presently true)
+# convention that first gnat's entries are listed and then of the other
+# installed libs.
+#
+# params:
+# $1 - name of env var to process
+get_gnat_value() {
+ local entries=(${!1//:/ })
+ echo ${entries[0]}
+}
+
+
+# Returns a name of active gnat profile. Performs some validity checks. No input
+# parameters, analyzes the system setup directly.
+get_active_profile() {
+ # get common code and settings
+ . ${GnatCommon} || die "failed to source gnat-common lib"
+
+ local profiles=( $(get_env_list) )
+
+ if [[ ${profiles[@]} == "${MARKER}*" ]]; then
+ return
+ # returning empty string
+ fi
+
+ if (( 1 == ${#profiles[@]} )); then
+ local active=${profiles[0]#${MARKER}}
+ else
+ die "${ENVDIR} contains multiple gnat profiles, please cleanup!"
+ fi
+
+ if [[ -f ${SPECSDIR}/${active} ]]; then
+ echo ${active}
+ else
+ die "The profile active in ${ENVDIR} does not correspond to any installed gnat!"
+ fi
+}
+
+
+
+# ------------------------------------
+# Functions
+
+# Checks the gnat backend SLOT and filters flags correspondingly
+# To be called from scr_compile for each profile, before actual compilation
+# Parameters:
+# $1 - gnat profile, e.g. x86_64-pc-linux-gnu-gnat-gcc-3.4
+gnat_filter_flags() {
+ debug-print-function $FUNCNAME $*
+
+ # We only need to filter so severely if backends < 3.4 is detected, which
+ # means basically gnat-3.15
+ GnatProfile=$1
+ if [ -z ${GnatProfile} ]; then
+ # should not get here!
+ die "please specify a valid gnat profile for flag stripping!"
+ fi
+
+ local GnatSLOT="${GnatProfile//*-/}"
+ if [[ ${GnatSLOT} < 3.4 ]] ; then
+ filter-mfpmath sse 387
+
+ filter-flags -mmmx -msse -mfpmath -frename-registers \
+ -fprefetch-loop-arrays -falign-functions=4 -falign-jumps=4 \
+ -falign-loops=4 -msse2 -frerun-loop-opt -maltivec -mabi=altivec \
+ -fsigned-char -fno-strict-aliasing -pipe
+
+ export ADACFLAGS=${ADACFLAGS:-${CFLAGS}}
+ export ADACFLAGS=${ADACFLAGS//-Os/-O2}
+ export ADACFLAGS=${ADACFLAGS//pentium-mmx/i586}
+ export ADACFLAGS=${ADACFLAGS//pentium[234]/i686}
+ export ADACFLAGS=${ADACFLAGS//k6-[23]/k6}
+ export ADACFLAGS=${ADACFLAGS//athlon-tbird/i686}
+ export ADACFLAGS=${ADACFLAGS//athlon-4/i686}
+ export ADACFLAGS=${ADACFLAGS//athlon-[xm]p/i686}
+ # gcc-2.8.1 has no amd64 support, so the following two are safe
+ export ADACFLAGS=${ADACFLAGS//athlon64/i686}
+ export ADACFLAGS=${ADACFLAGS//athlon/i686}
+ else
+ export ADACFLAGS=${ADACFLAGS:-${CFLAGS}}
+ fi
+
+ export ADAMAKEFLAGS=${ADAMAKEFLAGS:-"-cargs ${ADACFLAGS} -margs"}
+ export ADABINDFLAGS=${ADABINDFLAGS:-""}
+}
+
+gnat_pkg_setup() {
+ debug-print-function $FUNCNAME $*
+
+ # check whether all the primary compilers are installed
+ . ${GnatCommon} || die "failed to source gnat-common lib"
+ for fn in $(cat ${PRIMELIST}); do
+ if [[ ! -f ${SPECSDIR}/${fn} ]]; then
+ elog "The ${fn} Ada compiler profile is specified as primary, but is not installed."
+ elog "Please rectify the situation before emerging Ada library!"
+ elog "Please either install again all the missing compilers listed"
+ elog "as primary, or edit /etc/ada/primary_compilers and update the"
+ elog "list of primary compilers there."
+ einfo ""
+ ewarn "If you do the latter, please don't forget to rebuild all"
+ ewarn "affected libs!"
+ die "Primary compiler is missing"
+ fi
+ done
+
+ export ADAC=${ADAC:-gnatgcc}
+ export ADAMAKE=${ADAMAKE:-gnatmake}
+ export ADABIND=${ADABIND:-gnatbind}
+}
+
+
+gnat_pkg_postinst() {
+ einfo "Updating gnat configuration to pick up ${PN} library..."
+ eselect gnat update
+ elog "The environment has been set up to make gnat automatically find files"
+ elog "for the installed library. In order to immediately activate these"
+ elog "settings please run:"
+ elog
+ #elog "env-update"
+ elog "source /etc/profile"
+ einfo
+ einfo "Otherwise the settings will become active next time you login"
+}
+
+
+
+
+# standard lib_compile plug. Adapted from base.eclass
+lib_compile() {
+ debug-print-function $FUNCNAME $*
+ [ -z "$1" ] && lib_compile all
+
+ cd ${SL}
+
+ while [ "$1" ]; do
+ case $1 in
+ configure)
+ debug-print-section configure
+ econf || die "died running econf, $FUNCNAME:configure"
+ ;;
+ make)
+ debug-print-section make
+ emake || die "died running emake, $FUNCNAME:make"
+ ;;
+ all)
+ debug-print-section all
+ lib_compile configure make
+ ;;
+ esac
+ shift
+ done
+}
+
+# Cycles through installed gnat profiles and calls lib_compile and then
+# lib_install in turn.
+# Use this function to build/install profile-specific binaries. The code
+# building/installing common stuff (docs, etc) can go before/after, as needed,
+# so that it is called only once..
+#
+# lib_compile and lib_install are passed the active gnat profile name - may be used or
+# discarded as needed..
+gnat_src_compile() {
+ debug-print-function $FUNCNAME $*
+
+ # We source the eselect-gnat module and use its functions directly, instead of
+ # duplicating code or trying to violate sandbox in some way..
+ . ${GnatCommon} || die "failed to source gnat-common lib"
+
+ compilers=( $(find_primary_compilers ) )
+ if [[ -n ${compilers[@]} ]] ; then
+ local i
+ local AdaDep=$(get_ada_dep)
+ for (( i = 0 ; i < ${#compilers[@]} ; i = i + 1 )) ; do
+ if $(belongs_to_standard ${compilers[${i}]} ${AdaDep}); then
+ einfo "compiling for gnat profile ${compilers[${i}]}"
+
+ # copy sources
+ mkdir "${DL}" "${DLbin}" "${DLgpr}"
+ cp -dpR "${S}" "${SL}"
+
+ # setup environment
+ # As eselect-gnat also manages the libs, this will ensure the right
+ # lib profiles are activated too (in case we depend on some Ada lib)
+ generate_envFile ${compilers[${i}]} ${BuildEnv} && \
+ expand_BuildEnv "${BuildEnv}" && \
+ . "${BuildEnv}" || die "failed to switch to ${compilers[${i}]}"
+ # many libs (notably xmlada and gtkada) do not like to see
+ # themselves installed. Need to strip them from ADA_*_PATH
+ # NOTE: this should not be done in pkg_setup, as we setup
+ # environment right above
+ export ADA_INCLUDE_PATH=$(filter_env_var ADA_INCLUDE_PATH)
+ export ADA_OBJECTS_PATH=$(filter_env_var ADA_OBJECTS_PATH)
+
+ # call compilation callback
+ cd "${SL}"
+ gnat_filter_flags ${compilers[${i}]}
+ lib_compile ${compilers[${i}]} || die "failed compiling for ${compilers[${i}]}"
+
+ # call install callback
+ cd "${SL}"
+ lib_install ${compilers[${i}]} || die "failed installing profile-specific part for ${compilers[${i}]}"
+ # move installed and cleanup
+ mv "${DL}" "${DL}-${compilers[${i}]}"
+ mv "${DLbin}" "${DLbin}-${compilers[${i}]}"
+ mv "${DLgpr}" "${DLgpr}-${compilers[${i}]}"
+ rm -rf "${SL}"
+ else
+ einfo "skipping gnat profile ${compilers[${i}]}"
+ fi
+ done
+ else
+ ewarn "Please note!"
+ elog "Treatment of installed Ada compilers has recently changed!"
+ elog "Libs are now being built only for \"primary\" compilers."
+ elog "Please list gnat profiles (as reported by \"eselect gnat list\")"
+ elog "that you want to regularly use (i.e., not just for testing)"
+ elog "in ${PRIMELIST}, one per line."
+ die "please make sure you have at least one gnat compiler installed and set as primary!"
+ fi
+}
+
+
+# This function simply moves gnat-profile-specific stuff into proper locations.
+# Use src_install in ebuild to install the rest of the package
+gnat_src_install() {
+ debug-print-function $FUNCNAME $*
+
+ # prep lib specs directory
+ . ${GnatCommon} || die "failed to source gnat-common lib"
+ dodir ${SPECSDIR}/${PN}
+
+ compilers=( $(find_primary_compilers) )
+ if [[ -n ${compilers[@]} ]] ; then
+ local i
+ local AdaDep=$(get_ada_dep)
+ for (( i = 0 ; i < ${#compilers[@]} ; i = i + 1 )) ; do
+ if $(belongs_to_standard ${compilers[${i}]} ${AdaDep}); then
+ debug-print-section "installing for gnat profile ${compilers[${i}]}"
+
+ local DLlocation=${AdalibLibTop}/${compilers[${i}]}
+ dodir ${DLlocation}
+ cp -dpR "${DL}-${compilers[${i}]}" "${D}/${DLlocation}/${PN}"
+ cp -dpR "${DLbin}-${compilers[${i}]}" "${D}/${DLlocation}"/bin
+ cp -dpR "${DLgpr}-${compilers[${i}]}" "${D}/${DLlocation}"/gpr
+ # create profile-specific specs file
+ cp ${LibEnv} "${D}/${SPECSDIR}/${PN}/${compilers[${i}]}"
+ sed -i -e "s:%DL%:${DLlocation}/${PN}:g" "${D}/${SPECSDIR}/${PN}/${compilers[${i}]}"
+ sed -i -e "s:%DLbin%:${DLlocation}/bin:g" "${D}/${SPECSDIR}/${PN}/${compilers[${i}]}"
+ sed -i -e "s:%DLgpr%:${DLlocation}/gpr:g" "${D}/${SPECSDIR}/${PN}/${compilers[${i}]}"
+ else
+ einfo "skipping gnat profile ${compilers[${i}]}"
+ fi
+ done
+ else
+ die "please make sure you have at least one gnat compiler installed!"
+ fi
+}
diff --git a/eclass/gnatbuild.eclass b/eclass/gnatbuild.eclass
new file mode 100644
index 000000000000..da8986a6cc8e
--- /dev/null
+++ b/eclass/gnatbuild.eclass
@@ -0,0 +1,795 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Authors: George Shapovalov <george@gentoo.org>
+# Steve Arnold <nerdboy@gentoo.org>
+#
+# Belongs to: ada herd <ada@gentoo.org>
+#
+# Notes:
+# HOMEPAGE and LICENSE are set in appropriate ebuild, as
+# gnat is developed by FSF and AdaCore "in parallel"
+#
+# The following vars can be set in ebuild before inheriting this eclass. They
+# will be respected:
+# SLOT
+# BOOT_SLOT - where old bootstrap is used as it works fine
+
+#WANT_AUTOMAKE="1.8"
+#WANT_AUTOCONF="2.1"
+
+inherit eutils versionator toolchain-funcs flag-o-matic multilib autotools \
+ libtool fixheadtails gnuconfig pax-utils
+
+EXPORT_FUNCTIONS pkg_setup pkg_postinst pkg_postrm src_unpack src_compile src_install
+
+IUSE="nls"
+# multilib is supported via profiles now, multilib usevar is deprecated
+
+DEPEND=">=app-eselect/eselect-gnat-1.3
+ sys-devel/bc
+"
+
+RDEPEND="app-eselect/eselect-gnat"
+
+# Note!
+# It may not be safe to source this at top level. Only source inside local
+# functions!
+GnatCommon="/usr/share/gnat/lib/gnat-common.bash"
+
+#---->> globals and SLOT <<----
+
+# just a check, this location seems to vary too much, easier to track it in
+# ebuild
+#[ -z "${GNATSOURCE}" ] && die "please set GNATSOURCE in ebuild! (before inherit)"
+
+# versioning
+# because of gnatpro/gnatgpl we need to track both gcc and gnat versions
+
+# these simply default to $PV
+GNATMAJOR=$(get_version_component_range 1)
+GNATMINOR=$(get_version_component_range 2)
+GNATBRANCH=$(get_version_component_range 1-2)
+GNATRELEASE=$(get_version_component_range 1-3)
+# this one is for the gnat-gpl which is versioned by gcc backend and ACT version
+# number added on top
+ACT_Ver=$(get_version_component_range 4)
+
+# GCCVER and SLOT logic
+#
+# I better define vars for package names, as there was discussion on proper
+# naming and it may change
+PN_GnatGCC="gnat-gcc"
+PN_GnatGpl="gnat-gpl"
+
+# ATTN! GCCVER stands for the provided backend gcc, not the one on the system
+# so tc-* functions are of no use here. The present versioning scheme makes
+# GCCVER basically a part of PV, but *this may change*!!
+#
+# GCCVER can be set in the ebuild.
+[[ -z ${GCCVER} ]] && GCCVER="${GNATRELEASE}"
+
+
+# finally extract GCC version strings
+GCCMAJOR=$(get_version_component_range 1 "${GCCVER}")
+GCCMINOR=$(get_version_component_range 2 "${GCCVER}")
+GCCBRANCH=$(get_version_component_range 1-2 "${GCCVER}")
+GCCRELEASE=$(get_version_component_range 1-3 "${GCCVER}")
+
+# SLOT logic, make it represent gcc backend, as this is what matters most
+# There are some special cases, so we allow it to be defined in the ebuild
+# ATTN!! If you set SLOT in the ebuild, don't forget to make sure that
+# BOOT_SLOT is also set properly!
+[[ -z ${SLOT} ]] && SLOT="${GCCBRANCH}"
+
+# possible future crosscompilation support
+export CTARGET=${CTARGET:-${CHOST}}
+
+is_crosscompile() {
+ [[ ${CHOST} != ${CTARGET} ]]
+}
+
+# Bootstrap CTARGET and SLOT logic. For now BOOT_TARGET=CHOST is "guaranteed" by
+# profiles, so mostly watch out for the right SLOT used in the bootstrap.
+# As above, with SLOT, it may need to be defined in the ebuild
+BOOT_TARGET=${CTARGET}
+[[ -z ${BOOT_SLOT} ]] && BOOT_SLOT=${SLOT}
+
+# set our install locations
+PREFIX=${GNATBUILD_PREFIX:-/usr} # not sure we need this hook, but may be..
+LIBPATH=${PREFIX}/$(get_libdir)/${PN}/${CTARGET}/${SLOT}
+LIBEXECPATH=${PREFIX}/libexec/${PN}/${CTARGET}/${SLOT}
+INCLUDEPATH=${LIBPATH}/include
+BINPATH=${PREFIX}/${CTARGET}/${PN}-bin/${SLOT}
+DATAPATH=${PREFIX}/share/${PN}-data/${CTARGET}/${SLOT}
+# ATTN! the one below should match the path defined in eselect-gnat module
+CONFIG_PATH="/usr/share/gnat/eselect"
+gnat_profile="${CTARGET}-${PN}-${SLOT}"
+gnat_config_file="${CONFIG_PATH}/${gnat_profile}"
+
+
+# ebuild globals
+if [[ ${PN} == "${PN_GnatPro}" ]] && [[ ${GNATMAJOR} == "3" ]]; then
+ DEPEND="x86? ( >=app-shells/tcsh-6.0 )"
+fi
+S="${WORKDIR}/gcc-${GCCVER}"
+
+# bootstrap globals, common to src_unpack and src_compile
+GNATBOOT="${WORKDIR}/usr"
+GNATBUILD="${WORKDIR}/build"
+
+# necessary for detecting lib locations and creating env.d entry
+#XGCC="${GNATBUILD}/gcc/xgcc -B${GNATBUILD}/gcc"
+
+#----<< globals and SLOT >>----
+
+# set SRC_URI's in ebuilds for now
+
+#----<< support checks >>----
+# skipping this section - do not care about hardened/multilib for now
+
+#---->> specs + env.d logic <<----
+# TODO!!!
+# set MANPATH, etc..
+#----<< specs + env.d logic >>----
+
+
+#---->> some helper functions <<----
+is_multilib() {
+ [[ ${GCCMAJOR} < 3 ]] && return 1
+ case ${CTARGET} in
+ mips64*|powerpc64*|s390x*|sparc64*|x86_64*)
+ has_multilib_profile || use multilib ;;
+ *) false ;;
+ esac
+}
+
+# adapted from toolchain,
+# left only basic multilib functionality and cut off mips stuff
+
+create_specs_file() {
+ einfo "Creating a vanilla gcc specs file"
+ "${WORKDIR}"/build/gcc/xgcc -dumpspecs > "${WORKDIR}"/build/vanilla.specs
+}
+
+
+# eselect stuff taken straight from toolchain.eclass and greatly simplified
+add_profile_eselect_conf() {
+ local gnat_config_file=$1
+ local abi=$2
+ local var
+
+ echo >> "${D}/${gnat_config_file}"
+ if ! is_multilib ; then
+ echo " ctarget=${CTARGET}" >> "${D}/${gnat_config_file}"
+ else
+ echo "[${abi}]" >> "${D}/${gnat_config_file}"
+ var="CTARGET_${abi}"
+ if [[ -n ${!var} ]] ; then
+ echo " ctarget=${!var}" >> "${D}/${gnat_config_file}"
+ else
+ var="CHOST_${abi}"
+ if [[ -n ${!var} ]] ; then
+ echo " ctarget=${!var}" >> "${D}/${gnat_config_file}"
+ else
+ echo " ctarget=${CTARGET}" >> "${D}/${gnat_config_file}"
+ fi
+ fi
+ fi
+
+ var="CFLAGS_${abi}"
+ if [[ -n ${!var} ]] ; then
+ echo " cflags=${!var}" >> "${D}/${gnat_config_file}"
+ fi
+}
+
+
+create_eselect_conf() {
+ local abi
+
+ dodir ${CONFIG_PATH}
+
+ echo "[global]" > "${D}/${gnat_config_file}"
+ echo " version=${CTARGET}-${SLOT}" >> "${D}/${gnat_config_file}"
+ echo " binpath=${BINPATH}" >> "${D}/${gnat_config_file}"
+ echo " libexecpath=${LIBEXECPATH}" >> "${D}/${gnat_config_file}"
+ echo " ldpath=${LIBPATH}" >> "${D}/${gnat_config_file}"
+ echo " manpath=${DATAPATH}/man" >> "${D}/${gnat_config_file}"
+ echo " infopath=${DATAPATH}/info" >> "${D}/${gnat_config_file}"
+ echo " bin_prefix=${CTARGET}" >> "${D}/${gnat_config_file}"
+
+ for abi in $(get_all_abis) ; do
+ add_profile_eselect_conf "${D}/${gnat_config_file}" "${abi}"
+ done
+}
+
+
+
+should_we_eselect_gnat() {
+ # we only want to switch compilers if installing to / or /tmp/stage1root
+ [[ ${ROOT} == "/" ]] || return 1
+
+ # if the current config is invalid, we definitely want a new one
+ # Note: due to bash quirkiness, the following must not be 1 line
+ local curr_config
+ curr_config=$(eselect gnat show | grep ${CTARGET} | awk '{ print $1 }') || return 0
+ [[ -z ${curr_config} ]] && return 0
+
+ # The logic is basically "try to keep the same profile if possible"
+
+ if [[ ${curr_config} == ${CTARGET}-${PN}-${SLOT} ]] ; then
+ return 0
+ else
+ elog "The current gcc config appears valid, so it will not be"
+ elog "automatically switched for you. If you would like to"
+ elog "switch to the newly installed gcc version, do the"
+ elog "following:"
+ echo
+ elog "eselect gnat set <profile>"
+ echo
+ ebeep
+ return 1
+ fi
+}
+
+# active compiler selection, called from pkg_postinst
+do_gnat_config() {
+ eselect gnat set ${CTARGET}-${PN}-${SLOT} &> /dev/null
+
+ elog "The following gnat profile has been activated:"
+ elog "${CTARGET}-${PN}-${SLOT}"
+ elog ""
+ elog "The compiler has been installed as gnatgcc, and the coverage testing"
+ elog "tool as gnatgcov."
+ elog ""
+ elog "Ada handling in Gentoo allows you to have multiple gnat variants"
+ elog "installed in parallel and automatically manage Ada libs."
+ elog "Please take a look at the Ada project page for some documentation:"
+ elog "http://www.gentoo.org/proj/en/prog_lang/ada/index.xml"
+}
+
+
+# Taken straight from the toolchain.eclass. Only removed the "obsolete hunk"
+#
+# The purpose of this DISGUSTING gcc multilib hack is to allow 64bit libs
+# to live in lib instead of lib64 where they belong, with 32bit libraries
+# in lib32. This hack has been around since the beginning of the amd64 port,
+# and we're only now starting to fix everything that's broken. Eventually
+# this should go away.
+#
+# Travis Tilley <lv@gentoo.org> (03 Sep 2004)
+#
+disgusting_gcc_multilib_HACK() {
+ local config
+ local libdirs
+ if has_multilib_profile ; then
+ case $(tc-arch) in
+ amd64)
+ config="i386/t-linux64"
+ libdirs="../$(get_abi_LIBDIR amd64) ../$(get_abi_LIBDIR x86)" \
+ ;;
+ ppc64)
+ config="rs6000/t-linux64"
+ libdirs="../$(get_abi_LIBDIR ppc64) ../$(get_abi_LIBDIR ppc)" \
+ ;;
+ esac
+ else
+ die "Your profile is no longer supported by portage."
+ fi
+
+ einfo "updating multilib directories to be: ${libdirs}"
+ sed -i -e "s:^MULTILIB_OSDIRNAMES.*:MULTILIB_OSDIRNAMES = ${libdirs}:" "${S}"/gcc/config/${config}
+}
+
+
+#---->> pkg_* <<----
+gnatbuild_pkg_setup() {
+ debug-print-function ${FUNCNAME} $@
+
+ # Setup variables which would normally be in the profile
+ if is_crosscompile ; then
+ multilib_env ${CTARGET}
+ fi
+
+ # we dont want to use the installed compiler's specs to build gnat!
+ unset GCC_SPECS
+}
+
+gnatbuild_pkg_postinst() {
+ if should_we_eselect_gnat; then
+ do_gnat_config
+ else
+ eselect gnat update
+ fi
+
+ # if primary compiler list is empty, add this profile to the list, so
+ # that users are not left without active compilers (making sure that
+ # libs are getting built for at least one)
+ elog
+ . ${GnatCommon} || die "failed to source common code"
+ if [[ ! -f ${PRIMELIST} ]] || [[ ! -s ${PRIMELIST} ]]; then
+ mkdir -p ${SETTINGSDIR}
+ echo "${gnat_profile}" > ${PRIMELIST}
+ elog "The list of primary compilers was empty and got assigned ${gnat_profile}."
+ fi
+ elog "Please edit ${PRIMELIST} and list there gnat profiles intended"
+ elog "for common use, one per line."
+}
+
+
+gnatbuild_pkg_postrm() {
+ # "eselect gnat update" now removes the env.d file if the corresponding
+ # gnat profile was unmerged
+ eselect gnat update
+ elog "If you just unmerged the last gnat in this SLOT, your active gnat"
+ elog "profile got unset. Please check what eselect gnat show tells you"
+ elog "and set the desired profile"
+}
+#---->> pkg_* <<----
+
+#---->> src_* <<----
+
+# common unpack stuff
+gnatbuild_src_unpack() {
+ debug-print-function ${FUNCNAME} $@
+ [ -z "$1" ] && gnatbuild_src_unpack all
+
+ while [ "$1" ]; do
+ case $1 in
+ base_unpack)
+ unpack ${A}
+ pax-mark E $(find ${GNATBOOT} -name gnat1)
+
+ cd "${S}"
+ # patching gcc sources, following the toolchain
+ # first, the common patches
+ if [[ -d "${FILESDIR}"/patches ]] && [[ ! -z $(ls "${FILESDIR}"/patches/*.patch 2>/dev/null) ]] ; then
+ EPATCH_MULTI_MSG="Applying common Gentoo patches ..." \
+ epatch "${FILESDIR}"/patches/*.patch
+ fi
+ #
+ # then per SLOT
+ if [[ -d "${FILESDIR}"/patches/${SLOT} ]] && [[ ! -z $(ls "${FILESDIR}"/patches/${SLOT}/*.patch 2>/dev/null) ]] ; then
+ EPATCH_MULTI_MSG="Applying SLOT-specific Gentoo patches ..." \
+ epatch "${FILESDIR}"/patches/${SLOT}/*.patch
+ fi
+ # Replacing obsolete head/tail with POSIX compliant ones
+ ht_fix_file */configure
+
+# if ! is_crosscompile && is_multilib && \
+# [[ ( $(tc-arch) == "amd64" || $(tc-arch) == "ppc64" ) && -z ${SKIP_MULTILIB_HACK} ]] ; then
+# disgusting_gcc_multilib_HACK || die "multilib hack failed"
+# fi
+
+ # Fixup libtool to correctly generate .la files with portage
+ cd "${S}"
+ elibtoolize --portage --shallow --no-uclibc
+
+ gnuconfig_update
+ # update configure files
+ einfo "Fixing misc issues in configure files"
+ for f in $(grep -l 'autoconf version 2.13' $(find "${S}" -name configure)) ; do
+ ebegin " Updating ${f}"
+ patch "${f}" "${FILESDIR}"/gcc-configure-LANG.patch >& "${T}"/configure-patch.log \
+ || eerror "Please file a bug about this"
+ eend $?
+ done
+
+# this is only needed for gnat-gpl-4.1 and breaks for gnat-gcc, so
+# this block was moved to corresponding ebuild
+# pushd "${S}"/gnattools &> /dev/null
+# eautoconf
+# popd &> /dev/null
+ ;;
+
+ common_prep)
+ # Prepare the gcc source directory
+ cd "${S}/gcc"
+ touch cstamp-h.in
+ touch ada/[es]info.h
+ touch ada/nmake.ad[bs]
+ # set the compiler name to gnatgcc
+ for i in `find ada/ -name '*.ad[sb]'`; do \
+ sed -i -e "s/\"gcc\"/\"gnatgcc\"/g" ${i}; \
+ done
+ # add -fPIC flag to shared libs for 3.4* backend
+ if [ "3.4" == "${GCCBRANCH}" ] ; then
+ cd ada
+ epatch "${FILESDIR}"/gnat-Make-lang.in.patch
+ fi
+
+ # gcc 4.3 sources seem to have a common omission of $(DESTDIR),
+ # that leads to make install trying to rm -f file on live system.
+ # As we do not need this rm, we simply remove the whole line
+ if [ "4.3" == "${GCCBRANCH}" ] ; then
+ sed -i -e "/\$(RM) \$(bindir)/d" "${S}"/gcc/ada/Make-lang.in
+ fi
+
+ mkdir -p "${GNATBUILD}"
+ ;;
+
+ all)
+ gnatbuild_src_unpack base_unpack common_prep
+ ;;
+ esac
+ shift
+ done
+}
+
+# it would be nice to split configure and make steps
+# but both need to operate inside specially tuned evironment
+# so just do sections for now (as in eclass section of handbook)
+# sections are: configure, make-tools, bootstrap,
+# gnatlib_and_tools, gnatlib-shared
+gnatbuild_src_compile() {
+ debug-print-function ${FUNCNAME} $@
+ if [[ -z "$1" ]]; then
+ gnatbuild_src_compile all
+ return $?
+ fi
+
+ if [[ "all" == "$1" ]]
+ then # specialcasing "all" to avoid scanning sources unnecessarily
+ gnatbuild_src_compile configure make-tools \
+ bootstrap gnatlib_and_tools gnatlib-shared
+
+ else
+ # Set some paths to our bootstrap compiler.
+ export PATH="${GNATBOOT}/bin:${PATH}"
+ # !ATTN! the bootstrap compilers have a very simplystic structure,
+ # so many paths are not identical to the installed ones.
+ # Plus it was simplified even more in new releases.
+ if [[ ${BOOT_SLOT} > 4.1 ]] ; then
+ GNATLIB="${GNATBOOT}/lib"
+ else
+ GNATLIB="${GNATBOOT}/lib/gnatgcc/${BOOT_TARGET}/${BOOT_SLOT}"
+ fi
+
+ export CC="${GNATBOOT}/bin/gnatgcc"
+ # CPATH is supposed to be applied for any language, thus
+ # superceding either of C/CPLUS/OBJC_INCLUDE_PATHs
+ export CPATH="${GNATLIB}/include"
+ #export INCLUDE_DIR="${GNATLIB}/include"
+ #export C_INCLUDE_PATH="${GNATLIB}/include"
+ #export CPLUS_INCLUDE_PATH="${GNATLIB}/include"
+ export LIB_DIR="${GNATLIB}"
+ export LDFLAGS="-L${GNATLIB}"
+
+ # additional vars from gnuada and elsewhere
+ #export LD_RUN_PATH="${LIBPATH}"
+ export LIBRARY_PATH="${GNATLIB}"
+ #export LD_LIBRARY_PATH="${GNATLIB}"
+# export COMPILER_PATH="${GNATBOOT}/bin/"
+
+ export ADA_OBJECTS_PATH="${GNATLIB}/adalib"
+ export ADA_INCLUDE_PATH="${GNATLIB}/adainclude"
+
+# einfo "CC=${CC},
+# ADA_INCLUDE_PATH=${ADA_INCLUDE_PATH},
+# LDFLAGS=${LDFLAGS},
+# PATH=${PATH}"
+
+ while [ "$1" ]; do
+ case $1 in
+ configure)
+ debug-print-section configure
+ # Configure gcc
+ local confgcc
+
+ # some cross-compile logic from toolchain
+ confgcc="${confgcc} --host=${CHOST}"
+ if is_crosscompile || tc-is-cross-compiler ; then
+ confgcc="${confgcc} --target=${CTARGET}"
+ fi
+ [[ -n ${CBUILD} ]] && confgcc="${confgcc} --build=${CBUILD}"
+
+ # Native Language Support
+ if use nls ; then
+ confgcc="${confgcc} --enable-nls --without-included-gettext"
+ else
+ confgcc="${confgcc} --disable-nls"
+ fi
+
+ if version_is_at_least 4.6 ; then
+ confgcc+=( $(use_enable lto) )
+ else
+ confgcc+=( --disable-lto )
+ fi
+
+ # reasonably sane globals (from toolchain)
+ # also disable mudflap and ssp
+ confgcc="${confgcc} \
+ --with-system-zlib \
+ --disable-checking \
+ --disable-werror \
+ --disable-libgomp \
+ --disable-libmudflap \
+ --disable-libssp \
+ --disable-libunwind-exceptions"
+
+ if in_iuse openmp ; then
+ # Make sure target has pthreads support. #326757 #335883
+ # There shouldn't be a chicken&egg problem here as openmp won't
+ # build without a C library, and you can't build that w/out
+ # already having a compiler ...
+ if ! is_crosscompile || \
+ $(tc-getCPP ${CTARGET}) -E - <<<"#include <pthread.h>" >& /dev/null
+ then
+ case $(tc-arch) in
+ arm)
+ confgcc+=( --disable-libgomp )
+ ;;
+ *)
+ confgcc+=( $(use_enable openmp libgomp) )
+ ;;
+ esac
+ else
+ # Force disable as the configure script can be dumb #359855
+ confgcc+=( --disable-libgomp )
+ fi
+ else
+ # For gcc variants where we don't want openmp (e.g. kgcc)
+ confgcc+=( --disable-libgomp )
+ fi
+
+ # ACT's gnat-gpl does not like libada for whatever reason..
+ if version_is_at_least 4.2 ; then
+ confgcc="${confgcc} --enable-libada"
+# else
+# einfo "ACT's gnat-gpl does not like libada, disabling"
+# confgcc="${confgcc} --disable-libada"
+ fi
+
+ # set some specifics available in later versions
+ if version_is_at_least 4.3 ; then
+ einfo "setting gnat thread model"
+ confgcc="${confgcc} --enable-threads=gnat"
+ confgcc="${confgcc} --enable-shared=boehm-gc,ada,libada"
+ else
+ confgcc="${confgcc} --enable-threads=posix"
+ confgcc="${confgcc} --enable-shared"
+ fi
+
+ # multilib support
+ if is_multilib ; then
+ confgcc="${confgcc} --enable-multilib"
+ else
+ confgcc="${confgcc} --disable-multilib"
+ fi
+
+ # __cxa_atexit is "essential for fully standards-compliant handling of
+ # destructors", but apparently requires glibc.
+ if [[ ${CTARGET} == *-gnu* ]] ; then
+ confgcc="${confgcc} --enable-__cxa_atexit"
+ confgcc="${confgcc} --enable-clocale=gnu"
+ fi
+
+ einfo "confgcc=${confgcc}"
+
+ # need to strip graphite flags or we'll get the
+ # dreaded C compiler cannot create executables...
+ # error.
+ strip-flags -floop-interchange -floop-strip-mine -floop-block
+
+ cd "${GNATBUILD}"
+ CFLAGS="${CFLAGS}" CXXFLAGS="${CXXFLAGS}" "${S}"/configure \
+ --prefix=${PREFIX} \
+ --bindir=${BINPATH} \
+ --includedir=${INCLUDEPATH} \
+ --libdir="${LIBPATH}" \
+ --libexecdir="${LIBEXECPATH}" \
+ --datadir=${DATAPATH} \
+ --mandir=${DATAPATH}/man \
+ --infodir=${DATAPATH}/info \
+ --program-prefix=gnat \
+ --enable-languages="c,ada" \
+ --with-gcc \
+ ${confgcc} || die "configure failed"
+ ;;
+
+ make-tools)
+ debug-print-section make-tools
+ # Compile helper tools
+ cd "${GNATBOOT}"
+ cp "${S}"/gcc/ada/xtreeprs.adb .
+ cp "${S}"/gcc/ada/xsinfo.adb .
+ cp "${S}"/gcc/ada/xeinfo.adb .
+ cp "${S}"/gcc/ada/xnmake.adb .
+ cp "${S}"/gcc/ada/xutil.ad{s,b} .
+ if (( ${GNATMINOR} > 5 )) ; then
+ cp "${S}"/gcc/ada/einfo.ad{s,b} .
+ cp "${S}"/gcc/ada/csinfo.adb .
+ cp "${S}"/gcc/ada/ceinfo.adb .
+ fi
+ gnatmake xtreeprs && \
+ gnatmake xsinfo && \
+ gnatmake xeinfo && \
+ gnatmake xnmake || die "building helper tools"
+ ;;
+
+ bootstrap)
+ debug-print-section bootstrap
+ # and, finally, the build itself
+ cd "${GNATBUILD}"
+ emake bootstrap || die "bootstrap failed"
+ ;;
+
+ gnatlib_and_tools)
+ debug-print-section gnatlib_and_tools
+ einfo "building gnatlib_and_tools"
+ cd "${GNATBUILD}"
+ emake -j1 -C gcc gnatlib_and_tools || \
+ die "gnatlib_and_tools failed"
+ ;;
+
+ gnatlib-shared)
+ debug-print-section gnatlib-shared
+ einfo "building shared lib"
+ cd "${GNATBUILD}"
+ rm -f gcc/ada/rts/*.{o,ali} || die
+ #otherwise make tries to reuse already compiled (without -fPIC) objs..
+ emake -j1 -C gcc gnatlib-shared LIBRARY_VERSION="${GCCBRANCH}" || \
+ die "gnatlib-shared failed"
+ ;;
+
+ esac
+ shift
+ done # while
+ fi # "all" == "$1"
+}
+# -- end gnatbuild_src_compile
+
+
+gnatbuild_src_install() {
+ debug-print-function ${FUNCNAME} $@
+
+ if [[ -z "$1" ]] ; then
+ gnatbuild_src_install all
+ return $?
+ fi
+
+ while [ "$1" ]; do
+ case $1 in
+ install) # runs provided make install
+ debug-print-section install
+
+ # Looks like we need an access to the bootstrap compiler here too
+ # as gnat apparently wants to compile something during the installation
+ # The spotted obuser was xgnatugn, used to process gnat_ugn_urw.texi,
+ # during preparison of the docs.
+ export PATH="${GNATBOOT}/bin:${PATH}"
+ if [[ ${BOOT_SLOT} > 4.1 ]] ; then
+ GNATLIB="${GNATBOOT}/lib"
+ else
+ GNATLIB="${GNATBOOT}/lib/gnatgcc/${BOOT_TARGET}/${BOOT_SLOT}"
+ fi
+
+ export CC="${GNATBOOT}/bin/gnatgcc"
+ export INCLUDE_DIR="${GNATLIB}/include"
+ export C_INCLUDE_PATH="${GNATLIB}/include"
+ export CPLUS_INCLUDE_PATH="${GNATLIB}/include"
+ export LIB_DIR="${GNATLIB}"
+ export LDFLAGS="-L${GNATLIB}"
+ export ADA_OBJECTS_PATH="${GNATLIB}/adalib"
+ export ADA_INCLUDE_PATH="${GNATLIB}/adainclude"
+
+ # Do not allow symlinks in /usr/lib/gcc/${CHOST}/${MY_PV}/include as
+ # this can break the build.
+ for x in "${GNATBUILD}"/gcc/include/* ; do
+ if [ -L ${x} ] ; then
+ rm -f ${x}
+ fi
+ done
+ # Remove generated headers, as they can cause things to break
+ # (ncurses, openssl, etc). (from toolchain.eclass)
+ for x in $(find "${WORKDIR}"/build/gcc/include/ -name '*.h') ; do
+ grep -q 'It has been auto-edited by fixincludes from' "${x}" \
+ && rm -f "${x}"
+ done
+
+
+ cd "${GNATBUILD}"
+ make DESTDIR="${D}" install || die
+
+ if use doc ; then
+ if (( $(bc <<< "${GNATBRANCH} > 4.3") )) ; then
+ #make a convenience info link
+ elog "Yay! Math is good."
+ dosym gnat_ugn.info ${DATAPATH}/info/gnat.info
+ fi
+ fi
+ ;;
+
+ move_libs)
+ debug-print-section move_libs
+
+ # first we need to remove some stuff to make moving easier
+ rm -rf "${D}${LIBPATH}"/{32,include,libiberty.a}
+ # gcc insists on installing libs in its own place
+ mv "${D}${LIBPATH}/gcc/${CTARGET}/${GCCRELEASE}"/* "${D}${LIBPATH}"
+ mv "${D}${LIBEXECPATH}/gcc/${CTARGET}/${GCCRELEASE}"/* "${D}${LIBEXECPATH}"
+
+ # libgcc_s and, with gcc>=4.0, other libs get installed in multilib specific locations by gcc
+ # we pull everything together to simplify working environment
+ if has_multilib_profile ; then
+ case $(tc-arch) in
+ amd64)
+ mv "${D}${LIBPATH}"/../$(get_abi_LIBDIR amd64)/* "${D}${LIBPATH}"
+ mv "${D}${LIBPATH}"/../$(get_abi_LIBDIR x86)/* "${D}${LIBPATH}"/32
+ ;;
+ ppc64)
+ # not supported yet, will have to be adjusted when we
+ # actually build gnat for that arch
+ ;;
+ esac
+ fi
+
+ # force gnatgcc to use its own specs - versions prior to 3.4.6 read specs
+ # from system gcc location. Do the simple wrapper trick for now
+ # !ATTN! change this if eselect-gnat starts to follow eselect-compiler
+ if [[ ${GCCVER} < 3.4.6 ]] ; then
+ # gcc 4.1 uses builtin specs. What about 4.0?
+ cd "${D}${BINPATH}"
+ mv gnatgcc gnatgcc_2wrap
+ cat > gnatgcc << EOF
+#! /bin/bash
+# wrapper to cause gnatgcc read appropriate specs and search for the right .h
+# files (in case no matching gcc is installed)
+BINDIR=\$(dirname \$0)
+# The paths in the next line have to be absolute, as gnatgcc may be called from
+# any location
+\${BINDIR}/gnatgcc_2wrap -specs="${LIBPATH}/specs" -I"${LIBPATH}/include" \$@
+EOF
+ chmod a+x gnatgcc
+ fi
+
+ # earlier gnat's generate some Makefile's at generic location, need to
+ # move to avoid collisions
+ [ -f "${D}${PREFIX}"/share/gnat/Makefile.generic ] &&
+ mv "${D}${PREFIX}"/share/gnat/Makefile.* "${D}${DATAPATH}"
+
+ # use gid of 0 because some stupid ports don't have
+ # the group 'root' set to gid 0 (toolchain.eclass)
+ chown -R root:0 "${D}${LIBPATH}"
+ ;;
+
+ cleanup)
+ debug-print-section cleanup
+
+ rm -rf "${D}${LIBPATH}"/{gcc,install-tools,../lib{32,64}}
+ rm -rf "${D}${LIBEXECPATH}"/{gcc,install-tools}
+
+ # this one is installed by gcc and is a duplicate even here anyway
+ rm -f "${D}${BINPATH}/${CTARGET}-gcc-${GCCRELEASE}"
+
+ # remove duplicate docs
+ rm -f "${D}${DATAPATH}"/info/{dir,gcc,cpp}*
+ rm -rf "${D}${DATAPATH}"/man/man7/
+
+ # fix .la path for lto plugin
+ if use lto ; then
+ sed -i -e \
+ "/libdir=/c\libdir='${LIBEXECPATH}'" \
+ "${D}${LIBEXECPATH}"/liblto_plugin.la \
+ || die "sed update of .la file failed!"
+ fi
+
+ # add config directory (bug 440660)
+ keepdir /etc/ada
+ ;;
+
+ prep_env)
+ # instead of putting junk under /etc/env.d/gnat we recreate env files as
+ # needed with eselect
+ create_eselect_conf
+ ;;
+
+ all)
+ gnatbuild_src_install install move_libs cleanup prep_env
+ ;;
+ esac
+ shift
+ done # while
+}
+# -- end gnatbuild_src_install
diff --git a/eclass/gnome-games.eclass b/eclass/gnome-games.eclass
new file mode 100644
index 000000000000..e7f7ab14c921
--- /dev/null
+++ b/eclass/gnome-games.eclass
@@ -0,0 +1,116 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnome-games
+# @MAINTAINER:
+# Gnome team <gnome@gentoo.org>
+# @AUTHOR:
+# Author: Pacho Ramos <pacho@gentoo.org>
+# @BLURB: An eclass to build gnome-games.
+# @DESCRIPTION:
+# An eclass to build gnome-games using proper phases from gnome2 and
+# games eclasses.
+
+case "${EAPI:-0}" in
+ 0|1)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 2|3|4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+inherit autotools games gnome2
+
+EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
+
+if [[ ! ${_GNOME_GAMES} ]]; then
+
+DEPEND=">=dev-util/intltool-0.50.2-r1"
+RDEPEND="!gnome-extra/gnome-games"
+
+# @FUNCTION: gnome-games_pkg_setup
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_pkg_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+ games_pkg_setup
+}
+
+# @FUNCTION: gnome-games_src_prepare
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_src_prepare() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ # Fix intltoolize broken file:
+ # https://bugs.launchpad.net/intltool/+bug/398571
+ intltoolize --force --copy --automake || die "intltoolize failed"
+ eautoreconf
+
+ gnome2_src_prepare
+}
+
+# @FUNCTION: gnome-games_src_configure
+# @DESCRIPTION:
+# Set proper phase defaults, relying on gnome2_src_configure
+# and passing extra arguments from egamesconf (games.eclass)
+gnome-games_src_configure() {
+ debug-print-function ${FUNCNAME} "${@}"
+ gnome2_src_configure \
+ --prefix="${GAMES_PREFIX}" \
+ --libdir="$(games_get_libdir)" \
+ --sysconfdir="${GAMES_SYSCONFDIR}" \
+ --localstatedir=/var \
+ --localedir=/usr/share/locale \
+ "$@"
+}
+
+# @FUNCTION: gnome-games_src_compile
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_src_compile() {
+ debug-print-function ${FUNCNAME} "${@}"
+ gnome2_src_compile
+}
+
+# @FUNCTION: gnome-games_src_install
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_src_install() {
+ debug-print-function ${FUNCNAME} "${@}"
+ gnome2_src_install
+ prepgamesdirs
+}
+
+# @FUNCTION: gnome-games_pkg_preinst
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_pkg_preinst() {
+ debug-print-function ${FUNCNAME} "${@}"
+ gnome2_pkg_preinst
+ games_pkg_preinst
+}
+
+# @FUNCTION: gnome-games_pkg_preinst
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "${@}"
+ gnome2_pkg_postinst
+ games_pkg_postinst
+}
+
+# @FUNCTION: gnome-games_pkg_postrm
+# @DESCRIPTION:
+# Set proper phase defaults
+gnome-games_pkg_postrm() {
+ debug-print-function ${FUNCNAME} "${@}"
+ gnome2_pkg_postrm
+}
+
+_GNOME_GAMES=1
+fi
diff --git a/eclass/gnome-python-common-r1.eclass b/eclass/gnome-python-common-r1.eclass
new file mode 100644
index 000000000000..b80c17a481b7
--- /dev/null
+++ b/eclass/gnome-python-common-r1.eclass
@@ -0,0 +1,106 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnome-python-common-r1
+# @MAINTAINER:
+# GNOME team <gnome@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Based on the work of: Arun raghaven <ford_prefect@gentoo.org>
+# which in turn was based on the work of Jim Ramsay <lack@gentoo.org>
+# @BLURB: Common functionality for building gnome-python* bindings
+# @DESCRIPTION:
+# This eclass provides python-r1 support for the GNOME2 library Python
+# bindings.
+
+: ${GNOME_ORG_MODULE:=gnome-python}
+: ${GNOME_TARBALL_SUFFIX:=bz2}
+: ${GNOME2_LA_PUNT:=yes}
+: ${GCONF_DEBUG:=no}
+
+# @ECLASS-VARIABLE: G_PY_BINDINGS
+# @DESCRIPTION:
+# The actual '--enable-<binding>' name. If multiple bindings are to
+# be enabled, must be an array.
+: ${G_PY_BINDINGS:=${PN%-python}}
+
+# @ECLASS-VARIABLE: EXAMPLES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The set of example files to be installed if the 'examples' USE flag
+# is set.
+
+case "${EAPI:-0}" in
+ 0|1|2|3|4)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+inherit eutils gnome2 python-r1
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
+
+HOMEPAGE="http://pygtk.org/"
+
+RESTRICT="${RESTRICT} test"
+
+DOCS="AUTHORS ChangeLog NEWS README"
+
+if [[ ${GNOME_ORG_MODULE} != "gnome-python" ]]; then
+ DOCS="${DOCS} MAINTAINERS"
+fi
+
+RDEPEND="~dev-python/${GNOME_ORG_MODULE}-base-${PV}
+ ${PYTHON_DEPS}"
+DEPEND="${RDEPEND}
+ virtual/pkgconfig"
+
+REQUIRED_USE=${PYTHON_REQUIRED_USE}
+
+gnome-python-common-r1_src_prepare() {
+ gnome2_src_prepare
+
+ # The .pc file is installed by respective gnome-python*-base package
+ sed -i '/^pkgconfig_DATA/d' Makefile.in || die "sed failed"
+ sed -i '/^pkgconfigdir/d' Makefile.in || die "sed failed"
+#
+# python_copy_sources
+}
+
+gnome-python-common-r1_src_configure() {
+ local myconf=(
+ --disable-allbindings
+ "${G_PY_BINDINGS[@]/#/--enable-}"
+ )
+
+ ECONF_SOURCE=${S} \
+ python_parallel_foreach_impl \
+ gnome2_src_configure "${myconf[@]}" "${@}"
+}
+
+gnome-python-common-r1_src_compile() {
+ python_foreach_impl default
+}
+
+gnome-python-common-r1_src_test() {
+ python_foreach_impl default
+}
+
+# Do a regular gnome2 src_install and then install examples if required.
+# Set the variable EXAMPLES to provide the set of examples to be installed.
+# (to install a directory recursively, specify it with a trailing '/' - for
+# example, foo/bar/)
+gnome-python-common-r1_src_install() {
+ python_foreach_impl gnome2_src_install
+
+ if in_iuse examples && use examples; then
+ docinto examples
+ dodoc -r "${EXAMPLES[@]}"
+ fi
+}
diff --git a/eclass/gnome.org.eclass b/eclass/gnome.org.eclass
new file mode 100644
index 000000000000..eb84e1b0a073
--- /dev/null
+++ b/eclass/gnome.org.eclass
@@ -0,0 +1,49 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnome.org.eclass
+# @MAINTAINER:
+# gnome@gentoo.org
+# @AUTHOR:
+# Authors: Spidler <spidler@gentoo.org> with help of carparski.
+# eclass variable additions and documentation: Gilles Dartiguelongue <eva@gentoo.org>
+# @BLURB: Helper eclass for gnome.org hosted archives
+# @DESCRIPTION:
+# Provide a default SRC_URI for tarball hosted on gnome.org mirrors.
+
+inherit versionator
+
+# @ECLASS-VARIABLE: GNOME_TARBALL_SUFFIX
+# @DESCRIPTION:
+# Most projects hosted on gnome.org mirrors provide tarballs as tar.bz2 or
+# tar.xz. This eclass defaults to bz2 for EAPI 0, 1, 2, 3 and defaults to xz for
+# everything else. This is because the gnome mirrors are moving to only have xz
+# tarballs for new releases.
+if has "${EAPI:-0}" 0 1 2 3; then
+ : ${GNOME_TARBALL_SUFFIX:="bz2"}
+else
+ : ${GNOME_TARBALL_SUFFIX:="xz"}
+fi
+
+# Even though xz-utils are in @system, they must still be added to DEPEND; see
+# http://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
+if [[ ${GNOME_TARBALL_SUFFIX} == "xz" ]]; then
+ DEPEND="${DEPEND} app-arch/xz-utils"
+fi
+
+# @ECLASS-VARIABLE: GNOME_ORG_MODULE
+# @DESCRIPTION:
+# Name of the module as hosted on gnome.org mirrors.
+# Leave unset if package name matches module name.
+: ${GNOME_ORG_MODULE:=$PN}
+
+# @ECLASS-VARIABLE: GNOME_ORG_PVP
+# @INTERNAL
+# @DESCRIPTION:
+# Major and minor numbers of the version number.
+: ${GNOME_ORG_PVP:=$(get_version_component_range 1-2)}
+
+SRC_URI="mirror://gnome/sources/${GNOME_ORG_MODULE}/${GNOME_ORG_PVP}/${GNOME_ORG_MODULE}-${PV}.tar.${GNOME_TARBALL_SUFFIX}"
+
+S="${WORKDIR}/${GNOME_ORG_MODULE}-${PV}"
diff --git a/eclass/gnome2-utils.eclass b/eclass/gnome2-utils.eclass
new file mode 100644
index 000000000000..7d3fc386ce0c
--- /dev/null
+++ b/eclass/gnome2-utils.eclass
@@ -0,0 +1,502 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnome2-utils.eclass
+# @MAINTAINER:
+# gnome@gentoo.org
+# @BLURB: Auxiliary functions commonly used by Gnome packages.
+# @DESCRIPTION:
+# This eclass provides a set of auxiliary functions needed by most Gnome
+# packages. It may be used by non-Gnome packages as needed for handling various
+# Gnome stack related functions such as:
+# * Gtk+ icon cache management
+# * GSettings schemas management
+# * GConf schemas management
+# * scrollkeeper (old Gnome help system) management
+
+inherit multilib
+
+case "${EAPI:-0}" in
+ 0|1|2|3|4|5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+# @ECLASS-VARIABLE: GCONFTOOL_BIN
+# @INTERNAL
+# @DESCRIPTION:
+# Path to gconftool-2
+: ${GCONFTOOL_BIN:="/usr/bin/gconftool-2"}
+
+# @ECLASS-VARIABLE: SCROLLKEEPER_DIR
+# @INTERNAL
+# @DESCRIPTION:
+# Directory where scrollkeeper-update should do its work
+: ${SCROLLKEEPER_DIR:="/var/lib/scrollkeeper"}
+
+# @ECLASS-VARIABLE: SCROLLKEEPER_UPDATE_BIN
+# @INTERNAL
+# @DESCRIPTION:
+# Path to scrollkeeper-update
+: ${SCROLLKEEPER_UPDATE_BIN:="/usr/bin/scrollkeeper-update"}
+
+# @ECLASS-VARIABLE: GTK_UPDATE_ICON_CACHE
+# @INTERNAL
+# @DESCRIPTION:
+# Path to gtk-update-icon-cache
+: ${GTK_UPDATE_ICON_CACHE:="/usr/bin/gtk-update-icon-cache"}
+
+# @ECLASS-VARIABLE: GLIB_COMPILE_SCHEMAS
+# @INTERNAL
+# @DESCRIPTION:
+# Path to glib-compile-schemas
+: ${GLIB_COMPILE_SCHEMAS:="/usr/bin/glib-compile-schemas"}
+
+# @ECLASS-VARIABLE: GNOME2_ECLASS_SCHEMAS
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of GConf schemas provided by the package
+
+# @ECLASS-VARIABLE: GNOME2_ECLASS_ICONS
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of icons provided by the package
+
+# @ECLASS-VARIABLE: GNOME2_ECLASS_SCROLLS
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of scrolls (documentation files) provided by the package
+
+# @ECLASS-VARIABLE: GNOME2_ECLASS_GLIB_SCHEMAS
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of GSettings schemas provided by the package
+
+# @ECLASS-VARIABLE: GNOME2_ECLASS_GDK_PIXBUF_LOADERS
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of gdk-pixbuf loaders provided by the package
+
+DEPEND=">=sys-apps/sed-4"
+
+
+# @FUNCTION: gnome2_environment_reset
+# @DESCRIPTION:
+# Reset various variables inherited from root's evironment to a reasonable
+# default for ebuilds to help avoid access violations and test failures.
+gnome2_environment_reset() {
+ # Respected by >=glib-2.30.1-r1
+ export G_HOME="${T}"
+
+ # GST_REGISTRY is to work around gst utilities trying to read/write /root
+ export GST_REGISTRY="${T}/registry.xml"
+
+ # XXX: code for resetting XDG_* directories should probably be moved into
+ # a separate function in a non-gnome eclass
+ export XDG_DATA_HOME="${T}/.local/share"
+ export XDG_CONFIG_HOME="${T}/.config"
+ export XDG_CACHE_HOME="${T}/.cache"
+ export XDG_RUNTIME_DIR="${T}/run"
+ mkdir -p "${XDG_DATA_HOME}" "${XDG_CONFIG_HOME}" "${XDG_CACHE_HOME}" \
+ "${XDG_RUNTIME_DIR}"
+ # This directory needs to be owned by the user, and chmod 0700
+ # http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+ chmod 0700 "${XDG_RUNTIME_DIR}"
+}
+
+# @FUNCTION: gnome2_gconf_savelist
+# @DESCRIPTION:
+# Find the GConf schemas that are about to be installed and save their location
+# in the GNOME2_ECLASS_SCHEMAS environment variable.
+# This function should be called from pkg_preinst.
+gnome2_gconf_savelist() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+ pushd "${ED}" &> /dev/null
+ export GNOME2_ECLASS_SCHEMAS=$(find 'etc/gconf/schemas/' -name '*.schemas' 2> /dev/null)
+ popd &> /dev/null
+}
+
+# @FUNCTION: gnome2_gconf_install
+# @DESCRIPTION:
+# Applies any schema files installed by the current ebuild to Gconf's database
+# using gconftool-2.
+# This function should be called from pkg_postinst.
+gnome2_gconf_install() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local updater="${EROOT}${GCONFTOOL_BIN}"
+
+ if [[ ! -x "${updater}" ]]; then
+ debug-print "${updater} is not executable"
+ return
+ fi
+
+ if [[ -z "${GNOME2_ECLASS_SCHEMAS}" ]]; then
+ debug-print "No GNOME 2 GConf schemas found"
+ return
+ fi
+
+ # We are ready to install the GCONF Scheme now
+ unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
+ export GCONF_CONFIG_SOURCE="$("${updater}" --get-default-source | sed "s;:/;:${ROOT};")"
+
+ einfo "Installing GNOME 2 GConf schemas"
+
+ local F
+ for F in ${GNOME2_ECLASS_SCHEMAS}; do
+ if [[ -e "${EROOT}${F}" ]]; then
+ debug-print "Installing schema: ${F}"
+ "${updater}" --makefile-install-rule "${EROOT}${F}" 1>/dev/null
+ fi
+ done
+
+ # have gconf reload the new schemas
+ pids=$(pgrep -x gconfd-2)
+ if [[ $? == 0 ]] ; then
+ ebegin "Reloading GConf schemas"
+ kill -HUP ${pids}
+ eend $?
+ fi
+}
+
+# @FUNCTION: gnome2_gconf_uninstall
+# @DESCRIPTION:
+# Removes schema files previously installed by the current ebuild from Gconf's
+# database.
+gnome2_gconf_uninstall() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local updater="${EROOT}${GCONFTOOL_BIN}"
+
+ if [[ ! -x "${updater}" ]]; then
+ debug-print "${updater} is not executable"
+ return
+ fi
+
+ if [[ -z "${GNOME2_ECLASS_SCHEMAS}" ]]; then
+ debug-print "No GNOME 2 GConf schemas found"
+ return
+ fi
+
+ unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
+ export GCONF_CONFIG_SOURCE="$("${updater}" --get-default-source | sed "s;:/;:${ROOT};")"
+
+ einfo "Uninstalling GNOME 2 GConf schemas"
+
+ local F
+ for F in ${GNOME2_ECLASS_SCHEMAS}; do
+ if [[ -e "${EROOT}${F}" ]]; then
+ debug-print "Uninstalling gconf schema: ${F}"
+ "${updater}" --makefile-uninstall-rule "${EROOT}${F}" 1>/dev/null
+ fi
+ done
+
+ # have gconf reload the new schemas
+ pids=$(pgrep -x gconfd-2)
+ if [[ $? == 0 ]] ; then
+ ebegin "Reloading GConf schemas"
+ kill -HUP ${pids}
+ eend $?
+ fi
+}
+
+# @FUNCTION: gnome2_icon_savelist
+# @DESCRIPTION:
+# Find the icons that are about to be installed and save their location
+# in the GNOME2_ECLASS_ICONS environment variable.
+# This function should be called from pkg_preinst.
+gnome2_icon_savelist() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+ pushd "${ED}" &> /dev/null
+ export GNOME2_ECLASS_ICONS=$(find 'usr/share/icons' -maxdepth 1 -mindepth 1 -type d 2> /dev/null)
+ popd &> /dev/null
+}
+
+# @FUNCTION: gnome2_icon_cache_update
+# @DESCRIPTION:
+# Updates Gtk+ icon cache files under /usr/share/icons if the current ebuild
+# have installed anything under that location.
+# This function should be called from pkg_postinst and pkg_postrm.
+gnome2_icon_cache_update() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local updater="${EROOT}${GTK_UPDATE_ICON_CACHE}"
+
+ if [[ ! -x "${updater}" ]] ; then
+ debug-print "${updater} is not executable"
+ return
+ fi
+
+ if [[ -z "${GNOME2_ECLASS_ICONS}" ]]; then
+ debug-print "No icon cache to update"
+ return
+ fi
+
+ ebegin "Updating icons cache"
+
+ local retval=0
+ local fails=( )
+
+ for dir in ${GNOME2_ECLASS_ICONS}
+ do
+ if [[ -f "${EROOT}${dir}/index.theme" ]] ; then
+ local rv=0
+
+ "${updater}" -qf "${EROOT}${dir}"
+ rv=$?
+
+ if [[ ! $rv -eq 0 ]] ; then
+ debug-print "Updating cache failed on ${EROOT}${dir}"
+
+ # Add to the list of failures
+ fails[$(( ${#fails[@]} + 1 ))]="${EROOT}${dir}"
+
+ retval=2
+ fi
+ elif [[ $(ls "${EROOT}${dir}") = "icon-theme.cache" ]]; then
+ # Clear stale cache files after theme uninstallation
+ rm "${EROOT}${dir}/icon-theme.cache"
+ fi
+
+ if [[ -z $(ls "${EROOT}${dir}") ]]; then
+ # Clear empty theme directories after theme uninstallation
+ rmdir "${EROOT}${dir}"
+ fi
+ done
+
+ eend ${retval}
+
+ for f in "${fails[@]}" ; do
+ eerror "Failed to update cache with icon $f"
+ done
+}
+
+# @FUNCTION: gnome2_omf_fix
+# @DESCRIPTION:
+# Workaround applied to Makefile rules in order to remove redundant
+# calls to scrollkeeper-update and sandbox violations.
+# This function should be called from src_prepare.
+gnome2_omf_fix() {
+ local omf_makefiles filename
+
+ omf_makefiles="$@"
+
+ if [[ -f ${S}/omf.make ]] ; then
+ omf_makefiles="${omf_makefiles} ${S}/omf.make"
+ fi
+
+ if [[ -f ${S}/gnome-doc-utils.make ]] ; then
+ omf_makefiles="${omf_makefiles} ${S}/gnome-doc-utils.make"
+ fi
+
+ # testing fixing of all makefiles found
+ # The sort is important to ensure .am is listed before the respective .in for
+ # maintainer mode regeneration not kicking in due to .am being newer than .in
+ for filename in $(find "${S}" -name "Makefile.in" -o -name "Makefile.am" |sort) ; do
+ omf_makefiles="${omf_makefiles} ${filename}"
+ done
+
+ ebegin "Fixing OMF Makefiles"
+
+ local retval=0
+ local fails=( )
+
+ for omf in ${omf_makefiles} ; do
+ sed -i -e 's:scrollkeeper-update:true:' "${omf}"
+ retval=$?
+
+ if [[ $retval -ne 0 ]] ; then
+ debug-print "updating of ${omf} failed"
+
+ # Add to the list of failures
+ fails[$(( ${#fails[@]} + 1 ))]=$omf
+
+ retval=2
+ fi
+ done
+
+ eend $retval
+
+ for f in "${fails[@]}" ; do
+ eerror "Failed to update OMF Makefile $f"
+ done
+}
+
+# @FUNCTION: gnome2_scrollkeeper_savelist
+# @DESCRIPTION:
+# Find the scrolls that are about to be installed and save their location
+# in the GNOME2_ECLASS_SCROLLS environment variable.
+# This function should be called from pkg_preinst.
+gnome2_scrollkeeper_savelist() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+ pushd "${ED}" &> /dev/null
+ export GNOME2_ECLASS_SCROLLS=$(find 'usr/share/omf' -type f -name "*.omf" 2> /dev/null)
+ popd &> /dev/null
+}
+
+# @FUNCTION: gnome2_scrollkeeper_update
+# @DESCRIPTION:
+# Updates the global scrollkeeper database.
+# This function should be called from pkg_postinst and pkg_postrm.
+gnome2_scrollkeeper_update() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local updater="${EROOT}${SCROLLKEEPER_UPDATE_BIN}"
+
+ if [[ ! -x "${updater}" ]] ; then
+ debug-print "${updater} is not executable"
+ return
+ fi
+
+ if [[ -z "${GNOME2_ECLASS_SCROLLS}" ]]; then
+ debug-print "No scroll cache to update"
+ return
+ fi
+
+ ebegin "Updating scrollkeeper database ..."
+ "${updater}" -q -p "${EROOT}${SCROLLKEEPER_DIR}"
+ eend $?
+}
+
+# @FUNCTION: gnome2_schemas_savelist
+# @DESCRIPTION:
+# Find if there is any GSettings schema to install and save the list in
+# GNOME2_ECLASS_GLIB_SCHEMAS variable.
+# This function should be called from pkg_preinst.
+gnome2_schemas_savelist() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+ pushd "${ED}" &>/dev/null
+ export GNOME2_ECLASS_GLIB_SCHEMAS=$(find 'usr/share/glib-2.0/schemas' -name '*.gschema.xml' 2>/dev/null)
+ popd &>/dev/null
+}
+
+# @FUNCTION: gnome2_schemas_update
+# @USAGE: gnome2_schemas_update
+# @DESCRIPTION:
+# Updates GSettings schemas if GNOME2_ECLASS_GLIB_SCHEMAS has some.
+# This function should be called from pkg_postinst and pkg_postrm.
+gnome2_schemas_update() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local updater="${EROOT}${GLIB_COMPILE_SCHEMAS}"
+
+ if [[ ! -x ${updater} ]]; then
+ debug-print "${updater} is not executable"
+ return
+ fi
+
+ if [[ -z ${GNOME2_ECLASS_GLIB_SCHEMAS} ]]; then
+ debug-print "No GSettings schemas to update"
+ return
+ fi
+
+ ebegin "Updating GSettings schemas"
+ ${updater} --allow-any-name "$@" "${EROOT%/}/usr/share/glib-2.0/schemas" &>/dev/null
+ eend $?
+}
+
+# @FUNCTION: gnome2_gdk_pixbuf_savelist
+# @DESCRIPTION:
+# Find if there is any gdk-pixbuf loader to install and save the list in
+# GNOME2_ECLASS_GDK_PIXBUF_LOADERS variable.
+# This function should be called from pkg_preinst.
+gnome2_gdk_pixbuf_savelist() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+ pushd "${ED}" 1>/dev/null
+ export GNOME2_ECLASS_GDK_PIXBUF_LOADERS=$(find usr/lib*/gdk-pixbuf-2.0 -type f 2>/dev/null)
+ popd 1>/dev/null
+}
+
+# @FUNCTION: gnome2_gdk_pixbuf_update
+# @USAGE: gnome2_gdk_pixbuf_update
+# @DESCRIPTION:
+# Updates gdk-pixbuf loader cache if GNOME2_ECLASS_GDK_PIXBUF_LOADERS has some.
+# This function should be called from pkg_postinst and pkg_postrm.
+gnome2_gdk_pixbuf_update() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local updater="${EROOT}/usr/bin/${CHOST}-gdk-pixbuf-query-loaders"
+
+ if [[ ! -x ${updater} ]]; then
+ updater="${EROOT}/usr/bin/gdk-pixbuf-query-loaders"
+ fi
+
+ if [[ ! -x ${updater} ]]; then
+ debug-print "${updater} is not executable"
+ return
+ fi
+
+ if [[ -z ${GNOME2_ECLASS_GDK_PIXBUF_LOADERS} ]]; then
+ debug-print "gdk-pixbuf loader cache does not need an update"
+ return
+ fi
+
+ ebegin "Updating gdk-pixbuf loader cache"
+ local tmp_file=$(mktemp -t tmp.XXXXXXXXXX_gdkpixbuf)
+ ${updater} 1> "${tmp_file}" &&
+ chmod 0644 "${tmp_file}" &&
+ cp -f "${tmp_file}" "${EROOT}usr/$(get_libdir)/gdk-pixbuf-2.0/2.10.0/loaders.cache" &&
+ rm "${tmp_file}" # don't replace this with mv, required for SELinux support
+ eend $?
+}
+
+# @FUNCTION: gnome2_query_immodules_gtk2
+# @USAGE: gnome2_query_immodules_gtk2
+# @DESCRIPTION:
+# Updates gtk2 immodules/gdk-pixbuf loaders listing.
+gnome2_query_immodules_gtk2() {
+ local updater=${EPREFIX}/usr/bin/${CHOST}-gtk-query-immodules-2.0
+ [[ ! -x ${updater} ]] && updater=${EPREFIX}/usr/bin/gtk-query-immodules-2.0
+
+ "${updater}" --update-cache
+}
+
+# @FUNCTION: gnome2_query_immodules_gtk3
+# @USAGE: gnome2_query_immodules_gtk3
+# @DESCRIPTION:
+# Updates gtk3 immodules/gdk-pixbuf loaders listing.
+gnome2_query_immodules_gtk3() {
+ local updater=${EPREFIX}/usr/bin/${CHOST}-gtk-query-immodules-3.0
+ [[ ! -x ${updater} ]] && updater=${EPREFIX}/usr/bin/gtk-query-immodules-3.0
+
+ "${updater}" --update-cache
+}
+
+# @FUNCTION: gnome2_disable_deprecation_warning
+# @DESCRIPTION:
+# Disable deprecation warnings commonly found in glib based packages.
+# Should be called from src_prepare.
+gnome2_disable_deprecation_warning() {
+ local retval=0
+ local fails=( )
+ local makefile
+
+ ebegin "Disabling deprecation warnings"
+ # The sort is important to ensure .am is listed before the respective .in for
+ # maintainer mode regeneration not kicking in due to .am being newer than .in
+ while read makefile ; do
+ if ! grep -qE "(DISABLE_DEPRECATED|GSEAL_ENABLE)" "${makefile}"; then
+ continue
+ fi
+
+ LC_ALL=C sed -r -i \
+ -e 's:-D[A-Z_]+_DISABLE_DEPRECATED:$(NULL):g' \
+ -e 's:-DGSEAL_ENABLE+[A-Z_]:$(NULL):g' \
+ -i "${makefile}"
+
+ if [[ $? -ne 0 ]]; then
+ # Add to the list of failures
+ fails+=( "${makefile}" )
+ retval=2
+ fi
+ done < <(find "${S}" -name "Makefile.in" \
+ -o -name "Makefile.am" -o -name "Makefile.decl" \
+ | sort; echo configure)
+# TODO: sedding configure.ac can trigger maintainer mode; bug #439602
+# -o -name "configure.ac" -o -name "configure.in" \
+# | sort; echo configure)
+ eend ${retval}
+
+ for makefile in "${fails[@]}" ; do
+ ewarn "Failed to disable deprecation warnings in ${makefile}"
+ done
+}
diff --git a/eclass/gnome2.eclass b/eclass/gnome2.eclass
new file mode 100644
index 000000000000..d48b5a6f0bbd
--- /dev/null
+++ b/eclass/gnome2.eclass
@@ -0,0 +1,263 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnome2.eclass
+# @MAINTAINER:
+# gnome@gentoo.org
+# @BLURB: Provides phases for Gnome/Gtk+ based packages.
+# @DESCRIPTION:
+# Exports portage base functions used by ebuilds written for packages using the
+# GNOME framework. For additional functions, see gnome2-utils.eclass.
+
+inherit eutils fdo-mime libtool gnome.org gnome2-utils
+
+case "${EAPI:-0}" in
+ 4|5)
+ EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
+ ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+# @ECLASS-VARIABLE: G2CONF
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Extra configure opts passed to econf
+G2CONF=${G2CONF:-""}
+
+# @ECLASS-VARIABLE: GNOME2_LA_PUNT
+# @DESCRIPTION:
+# Should we delete ALL the .la files?
+# NOT to be used without due consideration.
+if has ${EAPI:-0} 4; then
+ GNOME2_LA_PUNT=${GNOME2_LA_PUNT:-"no"}
+else
+ GNOME2_LA_PUNT=${GNOME2_LA_PUNT:-""}
+fi
+
+# @ECLASS-VARIABLE: ELTCONF
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Extra options passed to elibtoolize
+ELTCONF=${ELTCONF:-""}
+
+# @ECLASS-VARIABLE: DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# String containing documents passed to dodoc command.
+
+# @ECLASS-VARIABLE: GCONF_DEBUG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Whether to handle debug or not.
+# Some gnome applications support various levels of debugging (yes, no, minimum,
+# etc), but using --disable-debug also removes g_assert which makes debugging
+# harder. This variable should be set to yes for such packages for the eclass
+# to handle it properly. It will enable minimal debug with USE=-debug.
+# Note that this is most commonly found in configure.ac as GNOME_DEBUG_CHECK.
+
+
+if [[ ${GCONF_DEBUG} != "no" ]]; then
+ IUSE="debug"
+fi
+
+# @FUNCTION: gnome2_src_unpack
+# @DESCRIPTION:
+# Stub function for old EAPI.
+gnome2_src_unpack() {
+ unpack ${A}
+ cd "${S}"
+}
+
+# @FUNCTION: gnome2_src_prepare
+# @DESCRIPTION:
+# Prepare environment for build, fix build of scrollkeeper documentation,
+# run elibtoolize.
+gnome2_src_prepare() {
+ # Prevent assorted access violations and test failures
+ gnome2_environment_reset
+
+ # Prevent scrollkeeper access violations
+ gnome2_omf_fix
+
+ # Disable all deprecation warnings
+ gnome2_disable_deprecation_warning
+
+ # Run libtoolize
+ elibtoolize ${ELTCONF}
+}
+
+# @FUNCTION: gnome2_src_configure
+# @DESCRIPTION:
+# Gnome specific configure handling
+gnome2_src_configure() {
+ # Update the GNOME configuration options
+ if [[ ${GCONF_DEBUG} != 'no' ]] ; then
+ if use debug ; then
+ G2CONF="--enable-debug=yes ${G2CONF}"
+ fi
+ fi
+
+ # Starting with EAPI=5, we consider packages installing gtk-doc to be
+ # handled by adding DEPEND="dev-util/gtk-doc-am" which provides tools to
+ # relink URLs in documentation to already installed documentation.
+ # This decision also greatly helps with constantly broken doc generation.
+ # Remember to drop 'doc' USE flag from your package if it was only used to
+ # rebuild docs.
+ # Preserve old behavior for older EAPI.
+ if grep -q "enable-gtk-doc" "${ECONF_SOURCE:-.}"/configure ; then
+ if has ${EAPI:-0} 4 && in_iuse doc ; then
+ G2CONF="$(use_enable doc gtk-doc) ${G2CONF}"
+ else
+ G2CONF="--disable-gtk-doc ${G2CONF}"
+ fi
+ fi
+
+ # Pass --disable-maintainer-mode when needed
+ if grep -q "^[[:space:]]*AM_MAINTAINER_MODE(\[enable\])" \
+ "${ECONF_SOURCE:-.}"/configure.*; then
+ G2CONF="--disable-maintainer-mode ${G2CONF}"
+ fi
+
+ # Pass --disable-scrollkeeper when possible
+ if grep -q "disable-scrollkeeper" "${ECONF_SOURCE:-.}"/configure; then
+ G2CONF="--disable-scrollkeeper ${G2CONF}"
+ fi
+
+ # Pass --disable-silent-rules when possible (not needed for eapi5), bug #429308
+ if has ${EAPI:-0} 4; then
+ if grep -q "disable-silent-rules" "${ECONF_SOURCE:-.}"/configure; then
+ G2CONF="--disable-silent-rules ${G2CONF}"
+ fi
+ fi
+
+ # Pass --disable-schemas-install when possible
+ if grep -q "disable-schemas-install" "${ECONF_SOURCE:-.}"/configure; then
+ G2CONF="--disable-schemas-install ${G2CONF}"
+ fi
+
+ # Pass --disable-schemas-compile when possible
+ if grep -q "disable-schemas-compile" "${ECONF_SOURCE:-.}"/configure; then
+ G2CONF="--disable-schemas-compile ${G2CONF}"
+ fi
+
+ # Pass --enable-compile-warnings=minimum as we don't want -Werror* flags, bug #471336
+ if grep -q "enable-compile-warnings" "${ECONF_SOURCE:-.}"/configure; then
+ G2CONF="--enable-compile-warnings=minimum ${G2CONF}"
+ fi
+
+ # Pass --docdir with proper directory, bug #482646
+ if grep -q "^ *--docdir=" "${ECONF_SOURCE:-.}"/configure; then
+ G2CONF="--docdir="${EPREFIX}"/usr/share/doc/${PF} ${G2CONF}"
+ fi
+
+ # Avoid sandbox violations caused by gnome-vfs (bug #128289 and #345659)
+ addwrite "$(unset HOME; echo ~)/.gnome2"
+
+ econf ${G2CONF} "$@"
+}
+
+# @FUNCTION: gnome2_src_compile
+# @DESCRIPTION:
+# Only default src_compile for now
+gnome2_src_compile() {
+ emake
+}
+
+# @FUNCTION: gnome2_src_install
+# @DESCRIPTION:
+# Gnome specific install. Handles typical GConf and scrollkeeper setup
+# in packages and removal of .la files if requested
+gnome2_src_install() {
+ # if this is not present, scrollkeeper-update may segfault and
+ # create bogus directories in /var/lib/
+ local sk_tmp_dir="/var/lib/scrollkeeper"
+ dodir "${sk_tmp_dir}" || die "dodir failed"
+
+ # we must delay gconf schema installation due to sandbox
+ export GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL="1"
+
+ debug-print "Installing with 'make install'"
+ emake DESTDIR="${D}" "scrollkeeper_localstate_dir=${ED}${sk_tmp_dir} " "$@" install || die "install failed"
+
+ unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
+
+ # Handle documentation as 'default' for eapi5 and newer, bug #373131
+ if has ${EAPI:-0} 4; then
+ # Manual document installation
+ if [[ -n "${DOCS}" ]]; then
+ dodoc ${DOCS} || die "dodoc failed"
+ fi
+ else
+ einstalldocs
+ fi
+
+ # Do not keep /var/lib/scrollkeeper because:
+ # 1. The scrollkeeper database is regenerated at pkg_postinst()
+ # 2. ${ED}/var/lib/scrollkeeper contains only indexes for the current pkg
+ # thus it makes no sense if pkg_postinst ISN'T run for some reason.
+ rm -rf "${ED}${sk_tmp_dir}"
+ rmdir "${ED}/var/lib" 2>/dev/null
+ rmdir "${ED}/var" 2>/dev/null
+
+ # Make sure this one doesn't get in the portage db
+ rm -fr "${ED}/usr/share/applications/mimeinfo.cache"
+
+ # Delete all .la files
+ if has ${EAPI:-0} 4; then
+ if [[ "${GNOME2_LA_PUNT}" != "no" ]]; then
+ ebegin "Removing .la files"
+ if ! use_if_iuse static-libs ; then
+ find "${D}" -name '*.la' -exec rm -f {} + || die "la file removal failed"
+ fi
+ eend
+ fi
+ else
+ case "${GNOME2_LA_PUNT}" in
+ yes) prune_libtool_files --modules;;
+ no) ;;
+ *) prune_libtool_files;;
+ esac
+ fi
+}
+
+# @FUNCTION: gnome2_pkg_preinst
+# @DESCRIPTION:
+# Finds Icons, GConf and GSettings schemas for later handling in pkg_postinst
+gnome2_pkg_preinst() {
+ gnome2_gconf_savelist
+ gnome2_icon_savelist
+ gnome2_schemas_savelist
+ gnome2_scrollkeeper_savelist
+ gnome2_gdk_pixbuf_savelist
+}
+
+# @FUNCTION: gnome2_pkg_postinst
+# @DESCRIPTION:
+# Handle scrollkeeper, GConf, GSettings, Icons, desktop and mime
+# database updates.
+gnome2_pkg_postinst() {
+ gnome2_gconf_install
+ fdo-mime_desktop_database_update
+ fdo-mime_mime_database_update
+ gnome2_icon_cache_update
+ gnome2_schemas_update
+ gnome2_scrollkeeper_update
+ gnome2_gdk_pixbuf_update
+}
+
+# # FIXME Handle GConf schemas removal
+#gnome2_pkg_prerm() {
+# gnome2_gconf_uninstall
+#}
+
+# @FUNCTION: gnome2_pkg_postrm
+# @DESCRIPTION:
+# Handle scrollkeeper, GSettings, Icons, desktop and mime database updates.
+gnome2_pkg_postrm() {
+ fdo-mime_desktop_database_update
+ fdo-mime_mime_database_update
+ gnome2_icon_cache_update
+ gnome2_schemas_update
+ gnome2_scrollkeeper_update
+}
diff --git a/eclass/gnuconfig.eclass b/eclass/gnuconfig.eclass
new file mode 100644
index 000000000000..a0d6e1743b56
--- /dev/null
+++ b/eclass/gnuconfig.eclass
@@ -0,0 +1,99 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# THIS ECLASS IS DEAD: It has been integrated into portage
+#
+# Author: Will Woods <wwoods@gentoo.org>
+#
+# This eclass is used to automatically update files that typically come with
+# automake to the newest version available on the system. The most common use
+# of this is to update config.guess and config.sub when configure dies from
+# misguessing your canonical system name (CHOST). It can also be used to update
+# other files that come with automake, e.g. depcomp, mkinstalldirs, etc.
+#
+# usage: gnuconfig_update [file1 file2 ...]
+# if called without arguments, config.guess and config.sub will be updated.
+# All files in the source tree ($S) with the given name(s) will be replaced
+# with the newest available versions chosen from the list of locations in
+# gnuconfig_findnewest(), below.
+#
+# gnuconfig_update should generally be called from src_unpack()
+
+
+DEPEND="sys-devel/gnuconfig"
+
+# Wrapper function for gnuconfig_do_update. If no arguments are given, update
+# config.sub and config.guess (old default behavior), otherwise update the
+# named files.
+gnuconfig_update() {
+
+# hmm some packages (like binutils gcc glibc) still use this ...
+# echo
+# ewarn "QA Notice: Please stop using me, portage updates files for you."
+# echo
+
+ local startdir # declared here ... used in gnuconfig_do_update
+
+ if [[ $1 == /* ]] ; then
+ startdir=$1
+ shift
+ else
+ startdir=${S}
+ fi
+
+ if [[ $# -gt 0 ]] ; then
+ gnuconfig_do_update "$@"
+ else
+ gnuconfig_do_update config.sub config.guess
+ fi
+
+ return $?
+}
+
+# Copy the newest available version of specified files over any old ones in the
+# source dir. This function shouldn't be called directly - use gnuconfig_update
+#
+# Note that since bash using dynamic scoping, startdir is available here from
+# the gnuconfig_update function
+gnuconfig_do_update() {
+ local configsubs_dir target targetlist file
+
+ [[ $# -eq 0 ]] && die "do not call gnuconfig_do_update; use gnuconfig_update"
+
+ configsubs_dir=$(gnuconfig_findnewest)
+ einfo "Using GNU config files from ${configsubs_dir}"
+ for file in "$@" ; do
+ if [[ ! -r ${configsubs_dir}/${file} ]] ; then
+ eerror "Can't read ${configsubs_dir}/${file}, skipping.."
+ continue
+ fi
+ targetlist=$(find "${startdir}" -name "${file}")
+ if [[ -n ${targetlist} ]] ; then
+ for target in ${targetlist} ; do
+ [[ -L ${target} ]] && rm -f "${target}"
+ einfo " Updating ${target/$startdir\//}"
+ cp -f "${configsubs_dir}/${file}" "${target}"
+ eend $?
+ done
+ else
+ ewarn " No ${file} found in ${startdir}, skipping ..."
+ fi
+ done
+
+ return 0
+}
+
+# this searches the standard locations for the newest config.{sub|guess}, and
+# returns the directory where they can be found.
+gnuconfig_findnewest() {
+ local locations=(
+ "${EPREFIX}"/usr/share/misc/config.sub
+ "${EPREFIX}"/usr/share/gnuconfig/config.sub
+ "${EPREFIX}"/usr/share/automake*/config.sub
+ "${EPREFIX}"/usr/share/libtool/config.sub
+ )
+ grep -s '^timestamp' "${locations[@]}" | \
+ sort -r -n -t\' -k2 | \
+ sed -n '1{s,/config.sub:.*$,,;p;q}'
+}
diff --git a/eclass/gnustep-2.eclass b/eclass/gnustep-2.eclass
new file mode 100644
index 000000000000..95a13095b08e
--- /dev/null
+++ b/eclass/gnustep-2.eclass
@@ -0,0 +1,27 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnustep-2.eclass
+# @MAINTAINER:
+# GNUstep Herd <gnustep@gentoo.org>
+# @BLURB: eclass for GNUstep Apps, Frameworks, and Bundles build
+# @DESCRIPTION:
+# This eclass sets up GNUstep environment to properly install
+# GNUstep packages
+
+inherit gnustep-base
+
+DEPEND=">=gnustep-base/gnustep-make-2.0
+ virtual/gnustep-back"
+RDEPEND="${DEPEND}
+ debug? ( !<sys-devel/gdb-6.0 )"
+
+# The following gnustep-based EXPORT_FUNCTIONS are available:
+# * gnustep-base_pkg_setup
+# * gnustep-base_src_unpack (EAPI 0|1 only)
+# * gnustep-base_src_prepare (EAPI>=2 only)
+# * gnustep-base_src_configure (EAPI>=2 only)
+# * gnustep-base_src_compile
+# * gnustep-base_src_install
+# * gnustep-base_pkg_postinst
diff --git a/eclass/gnustep-base.eclass b/eclass/gnustep-base.eclass
new file mode 100644
index 000000000000..aea1f10d96e4
--- /dev/null
+++ b/eclass/gnustep-base.eclass
@@ -0,0 +1,268 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gnustep-base.eclass
+# @MAINTAINER:
+# GNUstep Herd <gnustep@gentoo.org>
+# @BLURB: Internal handling of GNUstep pacakges
+# @DESCRIPTION:
+# Inner gnustep eclass, should only be inherited directly by gnustep-base
+# packages
+
+inherit eutils flag-o-matic
+
+# IUSE variables across all GNUstep packages
+# "debug": enable code for debugging
+# "doc": build and install documentation, if available
+IUSE="debug doc"
+
+# packages needed to build any base gnustep package
+GNUSTEP_CORE_DEPEND="doc? ( virtual/texi2dvi dev-tex/latex2html app-text/texi2html )"
+
+# New layout is used when ${EPREFIX}/usr/share/GNUstep/Makefiles exists
+# Where to install GNUstep (with old layout)
+GNUSTEP_PREFIX="${EPREFIX}/usr/GNUstep"
+
+# GNUstep environment array
+typeset -a GS_ENV
+
+# Ebuild function overrides
+gnustep-base_pkg_setup() {
+ if test_version_info 3.3 ; then
+ strip-unsupported-flags
+ elif test_version_info 3.4 ; then
+ # strict-aliasing is known to break obj-c stuff in gcc-3.4*
+ filter-flags -fstrict-aliasing
+ fi
+
+ # known to break ObjC (bug 86089)
+ filter-flags -fomit-frame-pointer
+}
+
+gnustep-base_src_unpack() {
+ unpack ${A}
+ cd "${S}"
+
+ gnustep-base_src_prepare
+}
+
+gnustep-base_src_prepare() {
+ if [[ -f ./GNUmakefile ]] ; then
+ # Kill stupid includes that are simply overdone or useless on normal
+ # Gentoo, but (may) cause major headaches on Prefixed Gentoo. If this
+ # only removes a part of a path it's good that it bails out, as we want
+ # to know when they use some direct include.
+ ebegin "Cleaning paths from GNUmakefile"
+ sed -i \
+ -e 's|-I/usr/X11R6/include/\?||g' \
+ -e 's|-I/usr/include/\?||g' \
+ -e 's|-L/usr/X11R6/lib/\?||g' \
+ -e 's|-L/usr/lib/\?||g' \
+ GNUmakefile
+ eend $?
+ fi
+}
+
+gnustep-base_src_configure() {
+ egnustep_env
+ if [[ -x ./configure ]] ; then
+ econf || die "configure failed"
+ fi
+}
+
+gnustep-base_src_compile() {
+ egnustep_env
+ case ${EAPI:-0} in
+ 0|1) gnustep-base_src_configure ;;
+ esac
+
+ egnustep_make
+}
+
+gnustep-base_src_install() {
+ egnustep_env
+ egnustep_install
+ if use doc ; then
+ egnustep_env
+ egnustep_doc
+ fi
+ egnustep_install_config
+}
+
+gnustep-base_pkg_postinst() {
+ [[ $(type -t gnustep_config_script) != "function" ]] && return 0
+
+ local SCRIPT_PATH
+ if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
+ SCRIPT_PATH="/usr/bin"
+ else
+ SCRIPT_PATH=${GNUSTEP_SYSTEM_TOOLS}/Gentoo
+ fi
+ elog "To use this package, as *user* you should run:"
+ elog " ${SCRIPT_PATH}/config-${PN}.sh"
+}
+
+# Clean/reset an ebuild to the installed GNUstep environment
+egnustep_env() {
+ # Get additional variables
+ GNUSTEP_SH_EXPORT_ALL_VARIABLES="true"
+
+ # Makefiles path
+ local GS_MAKEFILES
+ if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
+ GS_MAKEFILES=${EPREFIX}/usr/share/GNUstep/Makefiles
+ else
+ GS_MAKEFILES=${GNUSTEP_PREFIX}/System/Library/Makefiles
+ fi
+ if [[ -f ${GS_MAKEFILES}/GNUstep.sh ]] ; then
+ # Reset GNUstep variables
+ source "${GS_MAKEFILES}"/GNUstep-reset.sh
+ source "${GS_MAKEFILES}"/GNUstep.sh
+
+ # Create compilation GNUstep.conf if it does not exist yet
+ if [[ ! -f ${WORKDIR}/GNUstep.conf ]]; then
+ cp "${EPREFIX}"/etc/GNUstep/GNUstep.conf "${WORKDIR}" \
+ || die "GNUstep.conf copy failed"
+ sed -e "s#\(GNUSTEP_USER_.*DIR.*=\)#\1${WORKDIR}/#" \
+ -i "${WORKDIR}"/GNUstep.conf || die "GNUstep.conf sed failed"
+ fi
+
+
+ if [[ ! -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
+ # Set rpath in ldflags when available
+ case ${CHOST} in
+ *-linux-gnu|*-solaris*)
+ is-ldflagq -Wl,-rpath="${GNUSTEP_SYSTEM_LIBRARIES}" \
+ || append-ldflags \
+ -Wl,-rpath="${GNUSTEP_SYSTEM_LIBRARIES}"
+ ;;
+ esac
+ fi
+
+ # Set up env vars for make operations
+ GS_ENV=( AUXILIARY_LDFLAGS="${LDFLAGS}" \
+ ADDITIONAL_NATIVE_LIB_DIRS="${GNUSTEP_SYSTEM_LIBRARIES}" \
+ DESTDIR="${D}" \
+ HOME="${T}" \
+ GNUSTEP_CONFIG_FILE="${WORKDIR}"/GNUstep.conf \
+ GNUSTEP_INSTALLATION_DOMAIN=SYSTEM \
+ TAR_OPTIONS="${TAR_OPTIONS} --no-same-owner" \
+ messages=yes )
+
+ use doc \
+ && GS_ENV=( "${GS_ENV[@]}" VARTEXFONTS="${T}"/fonts )
+
+ use debug \
+ && GS_ENV=( "${GS_ENV[@]}" "debug=yes" ) \
+ || GS_ENV=( "${GS_ENV[@]}" "debug=no" )
+
+ # About 20 gnustep packages still use EAPI 0
+ if built_with_use --missing false gnustep-base/gnustep-make libobjc2;
+ then
+ # Set clang for packages that do not respect gnustep-make
+ # settings (gnustep-base's configure for example)
+ export CC=clang CXX=clang CPP="clang -E" LD="clang"
+ fi
+
+ return 0
+ fi
+ die "gnustep-make not installed!"
+}
+
+# Make utilizing GNUstep Makefiles
+egnustep_make() {
+ if [[ -f ./Makefile || -f ./makefile || -f ./GNUmakefile ]] ; then
+ emake ${*} "${GS_ENV[@]}" all || die "package make failed"
+ return 0
+ fi
+ die "no Makefile found"
+}
+
+# Make-install utilizing GNUstep Makefiles
+egnustep_install() {
+ if [[ ! -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
+ # avoid problems due to our "weird" prefix, make sure it exists
+ mkdir -p "${D}"${GNUSTEP_SYSTEM_TOOLS}
+ fi
+ if [[ -f ./[mM]akefile || -f ./GNUmakefile ]] ; then
+ emake ${*} "${GS_ENV[@]}" install || die "package install failed"
+ return 0
+ fi
+ die "no Makefile found"
+}
+
+# Make and install docs using GNUstep Makefiles
+egnustep_doc() {
+ if [[ -d "${S}"/Documentation ]] ; then
+ # Check documentation presence
+ pushd "${S}"/Documentation || die
+ if [[ -f ./[mM]akefile || -f ./GNUmakefile ]] ; then
+ emake "${GS_ENV[@]}" all || die "doc make failed"
+ emake "${GS_ENV[@]}" install || die "doc install failed"
+ fi
+ popd || die
+ fi
+}
+
+egnustep_install_config() {
+ [[ $(type -t gnustep_config_script) != "function" ]] && return 0
+
+ local cfile=config-${PN}.sh
+
+ cat << 'EOF' > "${T}"/${cfile}
+#!/usr/bin/env bash
+gnustep_append_default() {
+ if [[ -z $1 || -z $2 || -z $3 ]]; then
+ echo "warning: invalid script invocation"
+ return
+ fi
+ dom=$1
+ key=$2
+ val=$3
+ cur=$(defaults read ${dom} ${key}) 2> /dev/null
+ if [[ -z $cur ]] ; then
+ echo " * setting ${dom} ${key}"
+ defaults write ${dom} ${key} "( ${val} )"
+ elif [[ ${cur} != *${val}* ]] ; then
+ echo " * adding ${val} to ${dom} ${key}"
+ echo "${cur%)\'}, \"${val}\" )'" | defaults write
+ else
+ echo " * ${val} already present in ${dom} ${key}"
+ fi
+}
+
+gnustep_set_default() {
+ if [[ -z $1 || -z $2 || -z $3 ]]; then
+ echo "warning: invalid script invocation"
+ return
+ fi
+ dom=$1
+ key=$2
+ val=$3
+ echo " * setting ${dom} ${key}"
+ defaults write ${dom} ${key} ${val}
+}
+
+EOF
+
+ echo "echo \"Applying ${P} default configuration ...\"" >> "${T}"/${cfile}
+
+ gnustep_config_script | \
+ while read line ; do
+ echo "${line}" >> "${T}"/${cfile}
+ done
+ echo 'echo "done"' >> "${T}"/${cfile}
+
+ if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
+ exeinto /usr/bin
+ else
+ exeinto ${GNUSTEP_SYSTEM_TOOLS#${EPREFIX}}/Gentoo
+ fi
+ doexe "${T}"/${cfile}
+}
+
+case ${EAPI:-0} in
+ 0|1) EXPORT_FUNCTIONS pkg_setup src_unpack src_compile src_install pkg_postinst ;;
+ *) EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_compile src_install pkg_postinst ;;
+esac
diff --git a/eclass/go-mono.eclass b/eclass/go-mono.eclass
new file mode 100644
index 000000000000..81ee68e5b5e8
--- /dev/null
+++ b/eclass/go-mono.eclass
@@ -0,0 +1,137 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: go-mono.eclass
+# @MAINTAINER:
+# dotnet@gentoo.org
+# @BLURB: Common functionality for go-mono.org apps
+# @DESCRIPTION:
+# Common functionality needed by all go-mono.org apps.
+
+inherit base versionator mono
+
+PRE_URI="http://mono.ximian.com/monobuild/preview/sources"
+
+GIT_PN="${PN/mono-debugger/debugger}"
+
+ESVN_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/svn-src/mono"
+
+GO_MONO_SUB_BRANCH=${GO_MONO_SUB_BRANCH}
+
+if [[ "${PV%_rc*}" != "${PV}" ]]
+then
+ GO_MONO_P="${P%_rc*}"
+ SRC_URI="${PRE_URI}/${PN}/${GO_MONO_P}.tar.bz2 -> ${P}.tar.bz2"
+ S="${WORKDIR}/${GO_MONO_P}"
+elif [[ "${PV%_pre*}" != "${PV}" ]]
+then
+ GO_MONO_P="${P%_pre*}"
+ SRC_URI="${PRE_URI}/${PN}/${GO_MONO_P}.tar.bz2 -> ${P}.tar.bz2"
+ S="${WORKDIR}/${GO_MONO_P}"
+elif [[ "${PV}" == "9999" ]]
+then
+ GO_MONO_P=${P}
+ EGIT_REPO_URI="http://github.com/mono/${GIT_PN}.git"
+ SRC_URI=""
+ inherit autotools git
+elif [[ "${PV%.9999}" != "${PV}" ]]
+then
+ GO_MONO_P=${P}
+ EGIT_REPO_URI="http://github.com/mono/${GIT_PN}.git"
+ EGIT_BRANCH="mono-$(get_version_component_range 1)-$(get_version_component_range 2)${GO_MONO_SUB_BRANCH}"
+ SRC_URI=""
+ inherit autotools git
+else
+ GO_MONO_P=${P}
+ SRC_URI="http://download.mono-project.com/sources/${PN}/${P}.tar.bz2"
+fi
+
+
+NO_MONO_DEPEND=( "dev-lang/mono" "dev-dotnet/libgdiplus" "dev-dotnet/gluezilla" )
+
+if [[ "$(get_version_component_range 3)" != "9999" ]]
+then
+ GO_MONO_REL_PV="$(get_version_component_range 1-2)"
+
+else
+ GO_MONO_REL_PV="${PV}"
+fi
+
+if ! has "${CATEGORY}/${PN}" "${NO_MONO_DEPEND[@]}"
+then
+ RDEPEND=">=dev-lang/mono-${GO_MONO_REL_PV}"
+ DEPEND="${RDEPEND}"
+fi
+
+DEPEND="${DEPEND}
+ virtual/pkgconfig
+ userland_GNU? ( >=sys-apps/findutils-4.4.0 )"
+
+# @FUNCTION: go-mono_src_unpack
+# @DESCRIPTION:
+# Runs default()
+go-mono_src_unpack() {
+ if [[ "${PV%.9999}" != "${PV}" || "${PV}" == "9999" ]]
+ then
+ default
+ git_src_unpack
+ else
+ default
+ fi
+}
+
+# @FUNCTION: go-mono_src_prepare
+# @DESCRIPTION:
+# Runs autopatch from base.eclass, if PATCHES is set.
+go-mono_src_prepare() {
+ if [[ "${PV%.9999}" != "${PV}" || "${PV}" == "9999" ]]
+ then
+ base_src_prepare
+ [[ "$EAUTOBOOTSTRAP" != "no" ]] && eautoreconf
+ else
+ base_src_prepare
+ fi
+}
+
+# @FUNCTION: go-mono_src_configure
+# @DESCRIPTION:
+# Runs econf, disabling static libraries and dependency-tracking.
+go-mono_src_configure() {
+ econf --disable-dependency-tracking \
+ --disable-static \
+ "$@"
+}
+
+# @FUNCTION: go-mono_src_compile
+# @DESCRIPTION:
+# Runs emake.
+go-mono_src_compile() {
+ emake "$@" || die "emake failed"
+}
+
+# @ECLASS-VARIABLE: DOCS
+# @DESCRIPTION:
+# Insert path of docs you want installed. If more than one,
+# consider using an array.
+
+# @FUNCTION: go-mono_src_install
+# @DESCRIPTION:
+# Rune emake, installs common doc files, if DOCS is
+# set, installs those. Gets rid of .la files.
+go-mono_src_install () {
+ emake -j1 DESTDIR="${D}" "$@" install || die "install failed"
+ mono_multilib_comply
+ local commondoc=( AUTHORS ChangeLog README TODO )
+ for docfile in "${commondoc[@]}"
+ do
+ [[ -e "${docfile}" ]] && dodoc "${docfile}"
+ done
+ if [[ "${DOCS[@]}" ]]
+ then
+ dodoc "${DOCS[@]}" || die "dodoc DOCS failed"
+ fi
+ find "${D}" -name '*.la' -exec rm -rf '{}' '+' || die "la removal failed"
+}
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install
diff --git a/eclass/golang-base.eclass b/eclass/golang-base.eclass
new file mode 100644
index 000000000000..0c54266e389b
--- /dev/null
+++ b/eclass/golang-base.eclass
@@ -0,0 +1,78 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: golang-build.eclass
+# @MAINTAINER:
+# William Hubbs <williamh@gentoo.org>
+# @BLURB: Eclass that provides base functions for Go packages.
+# @DESCRIPTION:
+# This eclass provides base functions for software written in the Go
+# programming language; it also provides the build-time dependency on
+# dev-lang/go.
+
+case "${EAPI:-0}" in
+ 5)
+ ;;
+ *)
+ die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
+ ;;
+esac
+
+if [[ -z ${_GOLANG_BASE} ]]; then
+
+_GOLANG_BASE=1
+
+DEPEND=">=dev-lang/go-1.4.2:="
+STRIP_MASK="*.a"
+
+# @ECLASS-VARIABLE: EGO_PN
+# @REQUIRED
+# @DESCRIPTION:
+# This is the import path for the go package to build. Please emerge
+# dev-lang/go and read "go help importpath" for syntax.
+#
+# Example:
+# @CODE
+# EGO_PN=github.com/user/package
+# @CODE
+
+# @FUNCTION: ego_pn_check
+# @DESCRIPTION:
+# Make sure EGO_PN has a value.
+ego_pn_check() {
+ [[ -z "${EGO_PN}" ]] &&
+ die "${ECLASS}.eclass: EGO_PN is not set"
+ return 0
+}
+
+# @FUNCTION: get_golibdir
+# @DESCRIPTION:
+# Return the non-prefixed library directory where Go packages
+# should be installed
+get_golibdir() {
+ echo /usr/lib/go-gentoo
+}
+
+# @FUNCTION: get_golibdir_gopath
+# @DESCRIPTION:
+# Return the library directory where Go packages should be installed
+# This is the prefixed version which should be included in GOPATH
+get_golibdir_gopath() {
+ echo "${EPREFIX}$(get_golibdir)"
+}
+
+# @FUNCTION: golang_install_pkgs
+# @DESCRIPTION:
+# Install Go packages.
+# This function assumes that $cwd is a Go workspace.
+golang_install_pkgs() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ego_pn_check
+ insinto "$(get_golibdir)"
+ insopts -m0644 -p # preserve timestamps for bug 551486
+ doins -r pkg src
+}
+
+fi
diff --git a/eclass/golang-build.eclass b/eclass/golang-build.eclass
new file mode 100644
index 000000000000..6f5a1578ed04
--- /dev/null
+++ b/eclass/golang-build.eclass
@@ -0,0 +1,71 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: golang-build.eclass
+# @MAINTAINER:
+# William Hubbs <williamh@gentoo.org>
+# @BLURB: Eclass for compiling go packages.
+# @DESCRIPTION:
+# This eclass provides default src_compile, src_test and src_install
+# functions for software written in the Go programming language.
+
+inherit golang-base
+
+case "${EAPI:-0}" in
+ 5)
+ ;;
+ *)
+ die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
+ ;;
+esac
+
+EXPORT_FUNCTIONS src_compile src_install src_test
+
+if [[ -z ${_GOLANG_BUILD} ]]; then
+
+_GOLANG_BUILD=1
+
+# @ECLASS-VARIABLE: EGO_PN
+# @REQUIRED
+# @DESCRIPTION:
+# This is the import path for the go package(s) to build. Please emerge
+# dev-lang/go and read "go help importpath" for syntax.
+#
+# Example:
+# @CODE
+# EGO_PN=github.com/user/package
+# @CODE
+
+golang-build_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ego_pn_check
+ set -- env GOPATH="${WORKDIR}/${P}:$(get_golibdir_gopath)" \
+ go build -v -work -x "${EGO_PN}"
+ echo "$@"
+ "$@" || die
+}
+
+golang-build_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ego_pn_check
+ set -- env GOPATH="${WORKDIR}/${P}:$(get_golibdir_gopath)" \
+ go install -v -work -x "${EGO_PN}"
+ echo "$@"
+ "$@" || die
+ golang_install_pkgs
+}
+
+golang-build_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ego_pn_check
+ set -- env GOPATH="${WORKDIR}/${P}:$(get_golibdir_gopath)" \
+ go test -v -work -x "${EGO_PN}"
+ echo "$@"
+ "$@" || die
+}
+
+fi
diff --git a/eclass/golang-vcs-snapshot.eclass b/eclass/golang-vcs-snapshot.eclass
new file mode 100644
index 000000000000..1cf5a56e7160
--- /dev/null
+++ b/eclass/golang-vcs-snapshot.eclass
@@ -0,0 +1,56 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: golang-vcs-snapshot.eclass
+# @MAINTAINER:
+# William Hubbs <williamh@gentoo.org>
+# @BLURB: support eclass for unpacking VCS snapshot tarballs for
+# software written in the Go programming language
+# @DESCRIPTION:
+# This eclass provides a convenience src_unpack() which unpacks the
+# first tarball mentioned in SRC_URI to its appropriate location in
+# ${WORKDIR}/${P}, treating ${WORKDIR}/${P} as a go workspace.
+#
+# The location where the tarball is extracted is defined as
+# ${WORKDIR}/${P}/src/${EGO_PN}.
+#
+# The typical use case is VCS snapshots coming from github, bitbucket
+# and similar services.
+#
+# Please note that this eclass currently handles only tarballs
+# (.tar.gz), but support for more formats may be added in the future.
+#
+# @EXAMPLE:
+#
+# @CODE
+# EGO_PN=github.com/user/package
+# inherit golang-vcs-snapshot
+#
+# SRC_URI="http://github.com/example/${PN}/tarball/v${PV} -> ${P}.tar.gz"
+# @CODE
+#
+# The above example will extract the tarball to
+# ${WORKDIR}/${P}/src/github.com/user/package
+
+inherit golang-base
+
+case ${EAPI:-0} in
+ 5) ;;
+ *) die "${ECLASS} API in EAPI ${EAPI} not yet established."
+esac
+
+EXPORT_FUNCTIONS src_unpack
+
+# @FUNCTION: golang-vcs-snapshot_src_unpack
+# @DESCRIPTION:
+# Extract the first archive from ${A} to the appropriate location for GOPATH.
+golang-vcs-snapshot_src_unpack() {
+ local x
+ ego_pn_check
+ set -- ${A}
+ x="$1"
+ mkdir -p "${WORKDIR}/${P}/src/${EGO_PN%/*}" || die
+ tar -C "${WORKDIR}/${P}/src/${EGO_PN%/*}" -x --strip-components 1 \
+ -f "${DISTDIR}/${x}" || die
+}
diff --git a/eclass/golang-vcs.eclass b/eclass/golang-vcs.eclass
new file mode 100644
index 000000000000..2fe3a848b251
--- /dev/null
+++ b/eclass/golang-vcs.eclass
@@ -0,0 +1,153 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: golang-vcs.eclass
+# @MAINTAINER:
+# William Hubbs <williamh@gentoo.org>
+# @BLURB: Eclass for fetching and unpacking go repositories.
+# @DESCRIPTION:
+# This eclass is written to ease the maintenance of live ebuilds
+# of software written in the Go programming language.
+
+inherit eutils golang-base
+
+case "${EAPI:-0}" in
+ 5)
+ ;;
+ *)
+ die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
+ ;;
+esac
+
+EXPORT_FUNCTIONS src_unpack
+
+if [[ -z ${_GOLANG_VCS} ]]; then
+
+_GOLANG_VCS=1
+
+# @ECLASS-VARIABLE: EGO_PN
+# @REQUIRED
+# @DESCRIPTION:
+# This is the import path for the go package(s). Please emerge dev-lang/go
+# and read "go help importpath" for syntax.
+#
+# Example:
+# @CODE
+# EGO_PN="github.com/user/package"
+# EGO_PN="github.com/user1/package1 github.com/user2/package2"
+# @CODE
+
+# @ECLASS-VARIABLE: EGO_SRC
+# @DESCRIPTION:
+# This is the Go upstream repository which will be copied to
+# ${WORKDIR}/${P}.
+# If it isn't set, it defaults to the first word of ${EGO_PN}.
+# This should be set if you are retrieving a repository that includes
+# multiple packages, e.g. golang.org/x/tools.
+#
+# Example:
+# @CODE
+# EGO_PN="github.com/user/repository/..."
+# EGO_SRC="github.com/user/repository"
+# @CODE
+
+# @ECLASS-VARIABLE: EGO_STORE_DIR
+# @DESCRIPTION:
+# Storage directory for Go sources.
+#
+# This is intended to be set by the user in make.conf. Ebuilds must not set
+# it.
+#
+# EGO_STORE_DIR=${DISTDIR}/go-src
+
+# @ECLASS-VARIABLE: EVCS_OFFLINE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty, this variable prevents any online operations.
+
+# @ECLASS-VARIABLE: EVCS_UMASK
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this variable to a custom umask. This is intended to be set by
+# users. By setting this to something like 002, it can make life easier
+# for people who do development as non-root (but are in the portage
+# group) and use FEATURES=userpriv.
+
+# @FUNCTION: _golang-vcs_env_setup
+# @INTERNAL
+# @DESCRIPTION:
+# Create EGO_STORE_DIR if necessary.
+_golang-vcs_env_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
+ : ${EGO_STORE_DIR:=${distdir}/go-src}
+
+ [[ -n ${EVCS_UMASK} ]] && eumask_push $EVCS_UMASK
+
+ if [[ ! -d ${EGO_STORE_DIR} ]]; then
+ (
+ addwrite /
+ mkdir -p "${EGO_STORE_DIR}"
+ ) || die "${ECLASS}: unable to create ${EGO_STORE_DIR}"
+ fi
+
+ addwrite "${EGO_STORE_DIR}"
+
+ [[ -n ${EVCS_UMASK} ]] && eumask_pop
+ mkdir -p "${WORKDIR}/${P}/src" ||
+ die "${ECLASS}: unable to create ${WORKDIR}/${P}"
+ if [ -z "${EGO_SRC}" ]; then
+ set -- ${EGO_PN}
+ EGO_SRC="$1"
+ fi
+ return 0
+}
+
+# @FUNCTION: _golang-vcs_fetch
+# @INTERNAL
+# @DESCRIPTION:
+# Retrieve the EGO_PN go package along with its dependencies.
+_golang-vcs_fetch() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ego_pn_check
+
+ if [[ -z ${EVCS_OFFLINE} ]]; then
+ [[ -n ${EVCS_UMASK} ]] && eumask_push ${EVCS_UMASK}
+
+ set -- env GOPATH="${EGO_STORE_DIR}" go get -d -t -u -v -x "${EGO_PN}"
+ echo "$@"
+ "$@" || die
+ # The above dies if you pass repositories in EGO_PN instead of
+ # packages, e.g. golang.org/x/tools instead of golang.org/x/tools/cmd/vet.
+ # This is being discussed in the following upstream issue:
+ # https://github.com/golang/go/issues/11090
+
+ [[ -n ${EVCS_UMASK} ]] && eumask_pop
+ fi
+ set -- mkdir -p "${WORKDIR}/${P}/src/${EGO_SRC%/*}"
+ echo "$@"
+ "$@" || die "Unable to create ${WORKDIR}/${P}/src"
+ set -- cp -r "${EGO_STORE_DIR}/src/${EGO_SRC}" \
+ "${WORKDIR}/${P}/src/${EGO_SRC%/*}"
+ echo "$@"
+ "$@" || die "Unable to copy sources to ${WORKDIR}/${P}"
+ return 0
+}
+
+golang-vcs_src_fetch() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _golang-vcs_env_setup
+ _golang-vcs_fetch
+}
+
+golang-vcs_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ golang-vcs_src_fetch
+}
+
+fi
diff --git a/eclass/gpe.eclass b/eclass/gpe.eclass
new file mode 100644
index 000000000000..e2146138532d
--- /dev/null
+++ b/eclass/gpe.eclass
@@ -0,0 +1,115 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gpe.eclass
+# @MAINTAINER:
+# gpe@gentoo.org
+# @AUTHOR:
+# Original Authors:
+# Rene Wagner <rw@handhelds.org>
+# Ned Ludd <solar@gentoo.org>
+# Angelo Arrifano <miknix@gentoo.org>
+# EAPI->EAPI2 patch: loki_val
+# multiple suggestions: Betelgeuse
+# Based on gnome2.eclass and gpe.bbclass (the latter from OpenEmbedded)
+# @BLURB: Provides common functionality for the G Palmtop Environment.
+
+inherit libtool toolchain-funcs
+
+case "${EAPI:-0}" in
+ 0|1)
+ EXPORT_FUNCTIONS src_unpack src_compile src_install
+ ;;
+ *)
+ EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install
+ ;;
+esac
+
+# @ECLASS-VARIABLE: ELTCONF
+# @DESCRIPTION:
+# Extra options passed to elibtoolize on gnome2 eclass.
+ELTCONF=""
+
+# @ECLASS-VARIABLE: GPE_DOCS
+# @DESCRIPTION:
+# Documentation files to be installed with dodoc.
+GPE_DOCS=""
+
+[[ -z "${GPE_MIRROR}" ]] && GPE_MIRROR="http://gpe.linuxtogo.org/download/source"
+[[ -z "${GPE_TARBALL_SUFFIX}" ]] && GPE_TARBALL_SUFFIX="gz"
+SRC_URI="${GPE_MIRROR}/${P}.tar.${GPE_TARBALL_SUFFIX}"
+
+HOMEPAGE="http://gpe.linuxtogo.org"
+
+IUSE="nls"
+GPECONF="${GPECONF} --enable-debug=no --disable-debug"
+
+RDEPEND=""
+DEPEND="
+ >=dev-util/intltool-0.29
+ virtual/pkgconfig"
+
+# @FUNCTION: gpe_src_unpack
+# @DESCRIPTION:
+# Unpacks and applies some required patches for GPE.
+gpe_src_unpack() {
+ unpack ${A}
+ cd "${S}"
+ has "${EAPI:-0}" 0 1 && gpe_src_prepare "$@"
+}
+
+# Do not call, use gpe_src_unpack() instead.
+gpe_src_prepare() {
+ # let portage handle stripping.
+ # sort is needed, see #272161 .
+ for file in $(find . -name 'Makefile*' | sort) ; do
+ sed -i -e s/'install -s'/'install'/g \
+ -e s/'install -Ds'/'install -D'/g \
+ -e 's/$(INSTALL) -s/$(INSTALL) /g' \
+ -e 's;strip ;#strip ;g' \
+ ${file} \
+ ||die "Sedding ${file} failed."
+ done
+ [[ -f configure ]] && elibtoolize
+}
+
+# @FUNCTION: gpe_src_configure
+# @DESCRIPTION:
+# Configures a GPE package in a cross-compile aware environment.
+gpe_src_configure() {
+ tc-export CC
+ [[ -f configure ]] && econf "$@" ${GPECONF}
+}
+
+# @FUNCTION: gpe_src_compile
+# @DESCRIPTION:
+# (Cross-)Compiles a GPE package.
+gpe_src_compile() {
+ tc-export CC
+ has "${EAPI:-0}" 0 1 && gpe_src_configure "$@"
+ emake PREFIX=/usr || die "emake failed"
+}
+
+# @FUNCTION: gpe_src_install
+# @DESCRIPTION:
+# Installs a GPE package in the correct way.
+gpe_src_install() {
+ local use_nls=yes
+
+ use nls || use_nls=no
+
+ if [ -f configure ]; then
+ einstall "$@" || die "einstall failed"
+ else
+ emake STRIP=true DESTDIR=${D} PREFIX=/usr \
+ ENABLE_NLS=${use_nls} "$@" install || die "emake install failed"
+ fi
+
+ use nls || rm -rf ${D}/usr/share/locale
+
+ # manual document installation
+ if [[ "${GPE_DOCS}" ]]; then
+ dodoc ${GPE_DOCS} || die "dodoc failed"
+ fi
+}
diff --git a/eclass/gst-plugins-bad.eclass b/eclass/gst-plugins-bad.eclass
new file mode 100644
index 000000000000..6340714cd5e5
--- /dev/null
+++ b/eclass/gst-plugins-bad.eclass
@@ -0,0 +1,43 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gst-plugins10-bad.eclass
+# @MAINTAINER:
+# gstreamer@gentoo.org
+# @AUTHOR:
+# Gilles Dartiguelongue <eva@gentoo.org>
+# Saleem Abdulrasool <compnerd@gentoo.org>
+# foser <foser@gentoo.org>
+# zaheerm <zaheerm@gentoo.org>
+# @BLURB: Manages build for invididual ebuild for gst-plugins-bad.
+# @DESCRIPTION:
+# See gst-plugins10.eclass documentation.
+
+GST_ORG_MODULE="gst-plugins-bad"
+
+inherit eutils gst-plugins10
+
+case "${EAPI:-0}" in
+ 1|2|3|4|5)
+ ;;
+ 0)
+ die "EAPI=\"${EAPI}\" is not supported anymore"
+ ;;
+ *)
+ die "EAPI=\"${EAPI}\" is not supported yet"
+ ;;
+esac
+
+
+if [[ ${PN} != ${GST_ORG_MODULE} ]]; then
+# -bad-0.10.20 uses orc optionally instead of liboil unconditionally.
+# While <0.10.20 configure always check for liboil, it is used only by
+# non-split plugins in gst/ (legacyresample and mpegdemux), so we only
+# builddep for all old packages, and have a RDEPEND in old versions of
+# media-libs/gst-plugins-bad
+ if [[ ${SLOT} = "0.10" ]] && ! version_is_at_least "0.10.20"; then
+ DEPEND="${DEPEND} >=dev-libs/liboil-0.3.8"
+ fi
+fi
+
diff --git a/eclass/gst-plugins-base.eclass b/eclass/gst-plugins-base.eclass
new file mode 100644
index 000000000000..aa48a4a18eb8
--- /dev/null
+++ b/eclass/gst-plugins-base.eclass
@@ -0,0 +1,31 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gst-plugins-base.eclass
+# @MAINTAINER:
+# gstreamer@gentoo.org
+# @AUTHOR:
+# Gilles Dartiguelongue <eva@gentoo.org>
+# Saleem Abdulrasool <compnerd@gentoo.org>
+# foser <foser@gentoo.org>
+# zaheerm <zaheerm@gentoo.org>
+# @BLURB: Manages build for invididual ebuild for gst-plugins-base.
+# @DESCRIPTION:
+# See gst-plugins10.eclass documentation.
+
+GST_ORG_MODULE="gst-plugins-base"
+
+inherit gst-plugins10
+
+case "${EAPI:-0}" in
+ 1|2|3|4|5)
+ ;;
+ 0)
+ die "EAPI=\"${EAPI}\" is not supported anymore"
+ ;;
+ *)
+ die "EAPI=\"${EAPI}\" is not supported yet"
+ ;;
+esac
+
diff --git a/eclass/gst-plugins-good.eclass b/eclass/gst-plugins-good.eclass
new file mode 100644
index 000000000000..6b8990dba135
--- /dev/null
+++ b/eclass/gst-plugins-good.eclass
@@ -0,0 +1,42 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gst-plugins-good.eclass
+# @MAINTAINER:
+# gstreamer@gentoo.org
+# @AUTHOR:
+# Gilles Dartiguelongue <eva@gentoo.org>
+# Saleem Abdulrasool <compnerd@gentoo.org>
+# foser <foser@gentoo.org>
+# zaheerm <zaheerm@gentoo.org>
+# @BLURB: Manages build for invididual ebuild for gst-plugins-good.
+# @DESCRIPTION:
+# See gst-plugins10.eclass documentation.
+
+GST_ORG_MODULE="gst-plugins-good"
+
+inherit eutils gst-plugins10
+
+case "${EAPI:-0}" in
+ 1|2|3|4|5)
+ ;;
+ 0)
+ die "EAPI=\"${EAPI}\" is not supported anymore"
+ ;;
+ *)
+ die "EAPI=\"${EAPI}\" is not supported yet"
+ ;;
+esac
+
+
+if [[ ${PN} != ${GST_ORG_MODULE} ]]; then
+# -good-0.10.24 uses orc optionally instead of liboil unconditionally.
+# While <0.10.24 configure always checks for liboil, it is linked to only by
+# non-split plugins in gst/, so we only builddep for all old packages, and have
+# a RDEPEND in old versions of media-libs/gst-plugins-good
+ if [[ ${SLOT} = "0.10" ]] && ! version_is_at_least "0.10.24"; then
+ DEPEND="${DEPEND} >=dev-libs/liboil-0.3.8"
+ fi
+fi
+
diff --git a/eclass/gst-plugins-ugly.eclass b/eclass/gst-plugins-ugly.eclass
new file mode 100644
index 000000000000..dece79fdae1f
--- /dev/null
+++ b/eclass/gst-plugins-ugly.eclass
@@ -0,0 +1,31 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gst-plugins-ugly.eclass
+# @MAINTAINER:
+# gstreamer@gentoo.org
+# @AUTHOR:
+# Gilles Dartiguelongue <eva@gentoo.org>
+# Saleem Abdulrasool <compnerd@gentoo.org>
+# foser <foser@gentoo.org>
+# zaheerm <zaheerm@gentoo.org>
+# @BLURB: Manages build for invididual ebuild for gst-plugins-ugly.
+# @DESCRIPTION:
+# See gst-plugins10.eclass documentation.
+
+GST_ORG_MODULE="gst-plugins-ugly"
+
+inherit gst-plugins10
+
+case "${EAPI:-0}" in
+ 1|2|3|4|5)
+ ;;
+ 0)
+ die "EAPI=\"${EAPI}\" is not supported anymore"
+ ;;
+ *)
+ die "EAPI=\"${EAPI}\" is not supported yet"
+ ;;
+esac
+
diff --git a/eclass/gst-plugins10.eclass b/eclass/gst-plugins10.eclass
new file mode 100644
index 000000000000..0a6b0db79b0c
--- /dev/null
+++ b/eclass/gst-plugins10.eclass
@@ -0,0 +1,299 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gst-plugins10.eclass
+# @MAINTAINER:
+# gstreamer@gentoo.org
+# @AUTHOR:
+# Gilles Dartiguelongue <eva@gentoo.org>
+# Saleem Abdulrasool <compnerd@gentoo.org>
+# foser <foser@gentoo.org>
+# zaheerm <zaheerm@gentoo.org>
+# @BLURB: Manages build for invididual ebuild for gst-plugins.
+# @DESCRIPTION:
+# Eclass to make external gst-plugins emergable on a per-plugin basis and
+# to solve the problem with gst-plugins generating far too much unneeded
+# dependancies.
+#
+# GStreamer consuming applications should depend on the specific plugins they
+# need as defined in their source code.
+#
+# In case of spider usage, obtain recommended plugins to use from Gentoo
+# developers responsible for gstreamer <gstreamer@gentoo.org> or the application
+# developer.
+
+inherit eutils multilib toolchain-funcs versionator
+
+GST_EXPF=""
+case "${EAPI:-0}" in
+ 2|3|4|5)
+ GST_EXPF="src_configure src_compile src_install"
+ ;;
+ 1)
+ GST_EXPF="src_compile src_install"
+ ;;
+ 0)
+ die "EAPI=\"${EAPI:-0}\" is not supported anymore"
+ ;;
+ *)
+ die "EAPI=\"${EAPI}\" is not supported yet"
+ ;;
+esac
+EXPORT_FUNCTIONS ${GST_EXPF}
+
+# @ECLASS-VARIABLE: GST_LA_PUNT
+# @DESCRIPTION:
+# Should we delete all the .la files?
+# NOT to be used without due consideration.
+# Defaults to no for EAPI < 5.
+if has "${EAPI:-0}" 0 1 2 3; then
+ : ${GST_LA_PUNT:="no"}
+else
+ : ${GST_LA_PUNT:="yes"}
+fi
+
+# @ECLASS-VARIABLE: GST_PLUGINS_BUILD
+# @DESCRIPTION:
+# Defines the plugins to be built.
+# May be set by an ebuild and contain more than one indentifier, space
+# seperated (only src_configure can handle mutiple plugins at this time).
+: ${GST_PLUGINS_BUILD:=${PN/gst-plugins-/}}
+
+# @ECLASS-VARIABLE: GST_PLUGINS_BUILD_DIR
+# @DESCRIPTION:
+# Actual build directory of the plugin.
+# Most often the same as the configure switch name.
+: ${GST_PLUGINS_BUILD_DIR:=${PN/gst-plugins-/}}
+
+# @ECLASS-VARIABLE: GST_TARBALL_SUFFIX
+# @DESCRIPTION:
+# Most projects hosted on gstreamer.freedesktop.org mirrors provide tarballs as
+# tar.bz2 or tar.xz. This eclass defaults to bz2 for EAPI 0, 1, 2, 3 and
+# defaults to xz for everything else. This is because the gstreamer mirrors
+# are moving to only have xz tarballs for new releases.
+if has "${EAPI:-0}" 0 1 2 3; then
+ : ${GST_TARBALL_SUFFIX:="bz2"}
+else
+ : ${GST_TARBALL_SUFFIX:="xz"}
+fi
+
+# Even though xz-utils are in @system, they must still be added to DEPEND; see
+# http://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
+if [[ ${GST_TARBALL_SUFFIX} == "xz" ]]; then
+ DEPEND="${DEPEND} app-arch/xz-utils"
+fi
+
+# @ECLASS-VARIABLE: GST_ORG_MODULE
+# @DESCRIPTION:
+# Name of the module as hosted on gstreamer.freedesktop.org mirrors.
+# Leave unset if package name matches module name.
+: ${GST_ORG_MODULE:=$PN}
+
+# @ECLASS-VARIABLE: GST_ORG_PVP
+# @INTERNAL
+# @DESCRIPTION:
+# Major and minor numbers of the version number.
+: ${GST_ORG_PVP:=$(get_version_component_range 1-2)}
+
+
+DESCRIPTION="${BUILD_GST_PLUGINS} plugin for gstreamer"
+HOMEPAGE="http://gstreamer.freedesktop.org/"
+SRC_URI="http://gstreamer.freedesktop.org/src/${GST_ORG_MODULE}/${GST_ORG_MODULE}-${PV}.tar.${GST_TARBALL_SUFFIX}"
+
+LICENSE="GPL-2"
+case ${GST_ORG_PVP} in
+ 0.10) SLOT="0.10" ;;
+ 1.*) SLOT="1.0" ;;
+ *) die "Unkown gstreamer release."
+esac
+
+S="${WORKDIR}/${GST_ORG_MODULE}-${PV}"
+
+RDEPEND="
+ >=dev-libs/glib-2.6:2
+ media-libs/gstreamer:${SLOT}
+"
+DEPEND="
+ >=sys-apps/sed-4
+ virtual/pkgconfig
+"
+
+if [[ ${PN} != ${GST_ORG_MODULE} ]]; then
+ # Do not run test phase for invididual plugin ebuilds.
+ RESTRICT="test"
+ RDEPEND="${RDEPEND} >=media-libs/${GST_ORG_MODULE}-${PV}:${SLOT}"
+else
+ IUSE="nls"
+ DEPEND="${DEPEND} nls? ( >=sys-devel/gettext-0.17 )"
+fi
+
+#if [[ ${SLOT} == "0.10" ]]; then
+# XXX: verify with old ebuilds.
+# DEPEND="${DEPEND} dev-libs/liboil"
+#fi
+
+DEPEND="${DEPEND} ${RDEPEND}"
+
+# @FUNCTION: gst-plugins10_get_plugins
+# @INTERNAL
+# @DESCRIPTION:
+# Get the list of plugins requiring external dependencies.
+gst-plugins10_get_plugins() {
+ # Must be called from src_prepare/src_configure
+ GST_PLUGINS_LIST=$(sed -rn 's/^AG_GST_CHECK_FEATURE\((\w+),.*/ \1 /p' \
+ "${S}"/configure.* | LC_ALL='C' tr '[:upper:]' '[:lower:]')
+}
+
+# @FUNCTION: gst-plugins10_find_plugin_dir
+# @USAGE: gst-plugins10_find_plugin_dir [<build_dir>]
+# @INTERNAL
+# @DESCRIPTION:
+# Finds plugin build directory and cd to it.
+# Defaults to ${GST_PLUGINS_BUILD_DIR} if argument is not provided
+gst-plugins10_find_plugin_dir() {
+ local build_dir=${1:-${GST_PLUGINS_BUILD_DIR}}
+
+ if [[ ! -d ${S}/ext/${build_dir} ]]; then
+ if [[ ! -d ${S}/sys/${build_dir} ]]; then
+ ewarn "No such plugin directory"
+ die
+ fi
+ einfo "Building system plugin in ${build_dir}..."
+ cd "${S}"/sys/${build_dir}
+ else
+ einfo "Building external plugin in ${build_dir}..."
+ cd "${S}"/ext/${build_dir}
+ fi
+}
+
+# @FUNCTION: gst-plugins10_system_link
+# @USAGE: gst-plugins10_system_link gst-libs/gst/audio:gstreamer-audio [...]
+# @DESCRIPTION:
+# Walks through makefiles in order to make sure build will link against system
+# librairies.
+# Takes a list of path fragments and corresponding pkgconfig libraries
+# separated by colon (:). Will replace the path fragment by the output of
+# pkgconfig.
+gst-plugins10_system_link() {
+ local directory libs pkgconfig pc tuple
+ pkgconfig=$(tc-getPKG_CONFIG)
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ gst-plugins10_find_plugin_dir ${plugin_dir}
+
+ for tuple in $@ ; do
+ directory="$(echo ${tuple} | cut -f1 -d':')"
+ pc="$(echo ${tuple} | cut -f2 -d':')-${SLOT}"
+ libs="$(${pkgconfig} --libs-only-l ${pc})"
+ sed -e "s:\$(top_builddir)/${directory}/.*\.la:${libs}:" \
+ -i Makefile.am Makefile.in || die
+ done
+ done
+}
+
+# @FUNCTION: gst-plugins10_remove_unversioned_binaries
+# @INTERNAL
+# @DESCRIPTION:
+# Remove the unversioned binaries gstreamer provides to prevent file collision
+# with other slots. DEPRECATED
+gst-plugins10_remove_unversioned_binaries() {
+ cd "${D}"/usr/bin
+ local gst_bins
+ for gst_bins in *-${SLOT} ; do
+ [[ -e ${gst_bins} ]] || continue
+ rm ${gst_bins/-${SLOT}/}
+ einfo "Removed ${gst_bins/-${SLOT}/}"
+ done
+}
+
+# @FUNCTION: gst-plugins10_src_configure
+# @DESCRIPTION:
+# Handles logic common to configuring gstreamer plugins
+gst-plugins10_src_configure() {
+ local plugin gst_conf
+
+ if has ${EAPI:-0} 0 1 2 3 ; then
+ gst_conf="${gst_conf} --disable-dependency-tracking"
+ fi
+
+ if has ${EAPI:-0} 0 1 2 3 4 ; then
+ gst_conf="${gst_conf} --disable-silent-rules"
+ fi
+
+ gst-plugins10_get_plugins
+
+ for plugin in ${GST_PLUGINS_LIST} ; do
+ if has ${plugin} ${GST_PLUGINS_BUILD} ; then
+ gst_conf="${gst_conf} --enable-${plugin}"
+ else
+ gst_conf="${gst_conf} --disable-${plugin}"
+ fi
+ done
+
+ if grep -q "ORC_CHECK" configure.* ; then
+ if in_iuse orc ; then
+ gst_conf="${gst_conf} $(use_enable orc)"
+ else
+ gst_conf="${gst_conf} --disable-orc"
+ fi
+ fi
+
+ if grep -q "AM_MAINTAINER_MODE" configure.* ; then
+ gst_conf="${gst_conf} --disable-maintainer-mode"
+ fi
+
+ if grep -q "disable-schemas-compile" configure ; then
+ gst_conf="${gst_conf} --disable-schemas-compile"
+ fi
+
+ if [[ ${PN} == ${GST_ORG_MODULE} ]]; then
+ gst_conf="${gst_conf} $(use_enable nls)"
+ fi
+
+ einfo "Configuring to build ${GST_PLUGINS_BUILD} plugin(s) ..."
+ econf \
+ --with-package-name="Gentoo GStreamer ebuild" \
+ --with-package-origin="http://www.gentoo.org" \
+ ${gst_conf} $@
+}
+
+# @FUNCTION: gst-plugins10_src_compile
+# @DESCRIPTION:
+# Compiles requested gstreamer plugin.
+gst-plugins10_src_compile() {
+ local plugin_dir
+
+ has ${EAPI:-0} 0 1 && gst-plugins10_src_configure "$@"
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ gst-plugins10_find_plugin_dir ${plugin_dir}
+
+ if has "${EAPI:-0}" 0 1 2 3 ; then
+ emake || die
+ else
+ default
+ fi
+ done
+}
+
+# @FUNCTION: gst-plugins10_src_install
+# @DESCRIPTION:
+# Installs requested gstreamer plugin.
+gst-plugins10_src_install() {
+ local plugin_dir
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ gst-plugins10_find_plugin_dir ${plugin_dir}
+
+ if has "${EAPI:-0}" 0 1 2 3 ; then
+ emake install DESTDIR="${D}" || die
+ [[ -e README ]] && dodoc README
+ else
+ default
+ fi
+ done
+
+ [[ ${GST_LA_PUNT} = "yes" ]] && prune_libtool_files --modules
+}
+
diff --git a/eclass/gstreamer.eclass b/eclass/gstreamer.eclass
new file mode 100644
index 000000000000..aef8bf57a344
--- /dev/null
+++ b/eclass/gstreamer.eclass
@@ -0,0 +1,279 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gstreamer.eclass
+# @MAINTAINER:
+# gstreamer@gentoo.org
+# @AUTHOR:
+# Michał Górny <mgorny@gentoo.org>
+# Gilles Dartiguelongue <eva@gentoo.org>
+# Saleem Abdulrasool <compnerd@gentoo.org>
+# foser <foser@gentoo.org>
+# zaheerm <zaheerm@gentoo.org>
+# @BLURB: Helps building core & split gstreamer plugins.
+# @DESCRIPTION:
+# Eclass to make external gst-plugins emergable on a per-plugin basis
+# and to solve the problem with gst-plugins generating far too much
+# unneeded dependencies.
+#
+# GStreamer consuming applications should depend on the specific plugins
+# they need as defined in their source code. Usually you can find that
+# out by grepping the source tree for 'factory_make'. If it uses playbin
+# plugin, consider adding media-plugins/gst-plugins-meta dependency, but
+# also list any packages that provide explicitly requested plugins.
+
+inherit eutils multilib multilib-minimal toolchain-funcs versionator
+
+case "${EAPI:-0}" in
+ 5)
+ ;;
+ 0|1|2|3|4)
+ die "EAPI=\"${EAPI:-0}\" is not supported anymore"
+ ;;
+ *)
+ die "EAPI=\"${EAPI}\" is not supported yet"
+ ;;
+esac
+
+# @ECLASS-VARIABLE: GST_PLUGINS_BUILD
+# @DESCRIPTION:
+# Defines the plugins to be built.
+# May be set by an ebuild and contain more than one indentifier, space
+# seperated (only src_configure can handle mutiple plugins at this time).
+: ${GST_PLUGINS_BUILD:=${PN/gst-plugins-/}}
+
+# @ECLASS-VARIABLE: GST_PLUGINS_BUILD_DIR
+# @DESCRIPTION:
+# Actual build directory of the plugin.
+# Most often the same as the configure switch name.
+: ${GST_PLUGINS_BUILD_DIR:=${PN/gst-plugins-/}}
+
+# @ECLASS-VARIABLE: GST_TARBALL_SUFFIX
+# @DESCRIPTION:
+# Most projects hosted on gstreamer.freedesktop.org mirrors provide
+# tarballs as tar.bz2 or tar.xz. This eclass defaults to xz. This is
+# because the gstreamer mirrors are moving to only have xz tarballs for
+# new releases.
+: ${GST_TARBALL_SUFFIX:="xz"}
+
+# Even though xz-utils are in @system, they must still be added to DEPEND; see
+# http://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
+if [[ ${GST_TARBALL_SUFFIX} == "xz" ]]; then
+ DEPEND="${DEPEND} app-arch/xz-utils"
+fi
+
+# @ECLASS-VARIABLE: GST_ORG_MODULE
+# @DESCRIPTION:
+# Name of the module as hosted on gstreamer.freedesktop.org mirrors.
+# Leave unset if package name matches module name.
+: ${GST_ORG_MODULE:=$PN}
+
+# @ECLASS-VARIABLE: GST_ORG_PVP
+# @INTERNAL
+# @DESCRIPTION:
+# Major and minor numbers of the version number.
+: ${GST_ORG_PVP:=$(get_version_component_range 1-2)}
+
+
+DESCRIPTION="${BUILD_GST_PLUGINS} plugin for gstreamer"
+HOMEPAGE="http://gstreamer.freedesktop.org/"
+SRC_URI="http://gstreamer.freedesktop.org/src/${GST_ORG_MODULE}/${GST_ORG_MODULE}-${PV}.tar.${GST_TARBALL_SUFFIX}"
+
+LICENSE="GPL-2"
+case ${GST_ORG_PVP} in
+ 0.10) SLOT="0.10"; GST_MIN_PV="0.10.36-r2" ;;
+ 1.*) SLOT="1.0"; GST_MIN_PV="1.2.4-r1" ;;
+ *) die "Unkown gstreamer release."
+esac
+
+S="${WORKDIR}/${GST_ORG_MODULE}-${PV}"
+
+RDEPEND="
+ >=dev-libs/glib-2.38.2-r1:2[${MULTILIB_USEDEP}]
+ >=media-libs/gstreamer-${GST_MIN_PV}:${SLOT}[${MULTILIB_USEDEP}]
+"
+DEPEND="
+ >=sys-apps/sed-4
+ >=virtual/pkgconfig-0-r1[${MULTILIB_USEDEP}]
+"
+
+# Export common multilib phases.
+multilib_src_configure() { gstreamer_multilib_src_configure; }
+
+if [[ ${PN} != ${GST_ORG_MODULE} ]]; then
+ # Do not run test phase for invididual plugin ebuilds.
+ RESTRICT="test"
+ RDEPEND="${RDEPEND}
+ >=media-libs/${GST_ORG_MODULE}-${PV}:${SLOT}[${MULTILIB_USEDEP}]"
+
+ # Export multilib phases used for split builds.
+ multilib_src_compile() { gstreamer_multilib_src_compile; }
+ multilib_src_install() { gstreamer_multilib_src_install; }
+ multilib_src_install_all() { gstreamer_multilib_src_install_all; }
+else
+ IUSE="nls"
+ DEPEND="${DEPEND} nls? ( >=sys-devel/gettext-0.17 )"
+fi
+
+if [[ ${SLOT} == "0.10" ]]; then
+ RDEPEND="${RDEPEND}
+ abi_x86_32? (
+ !app-emulation/emul-linux-x86-gstplugins[-abi_x86_32(-)]
+ )"
+fi
+
+DEPEND="${DEPEND} ${RDEPEND}"
+
+# @FUNCTION: gstreamer_environment_reset
+# @INTERNAL
+# @DESCRIPTION:
+# Clean up environment for clean builds.
+# >=dev-lang/orc-0.4.23 rely on environment variables to find a place to
+# allocate files to mmap.
+gstreamer_environment_reset() {
+ export XDG_RUNTIME_DIR="${T}/run"
+ mkdir -p "${XDG_RUNTIME_DIR}"
+ # This directory needs to be owned by the user, and chmod 0700
+ # http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
+ chmod 0700 "${XDG_RUNTIME_DIR}"
+}
+
+# @FUNCTION: gstreamer_get_plugins
+# @INTERNAL
+# @DESCRIPTION:
+# Get the list of plugins requiring external dependencies.
+gstreamer_get_plugins() {
+ # Must be called from src_prepare/src_configure
+ GST_PLUGINS_LIST=$(sed -rn 's/^AG_GST_CHECK_FEATURE\((\w+),.*/ \1 /p' \
+ "${ECONF_SOURCE:-${S}}"/configure.* | LC_ALL='C' tr '[:upper:]' '[:lower:]')
+}
+
+# @FUNCTION: gstreamer_get_plugin_dir
+# @USAGE: gstreamer_get_plugin_dir [<build_dir>]
+# @INTERNAL
+# @DESCRIPTION:
+# Finds plugin build directory and output it.
+# Defaults to ${GST_PLUGINS_BUILD_DIR} if argument is not provided
+gstreamer_get_plugin_dir() {
+ local build_dir=${1:-${GST_PLUGINS_BUILD_DIR}}
+
+ if [[ ! -d ${S}/ext/${build_dir} ]]; then
+ if [[ ! -d ${S}/sys/${build_dir} ]]; then
+ ewarn "No such plugin directory"
+ die
+ fi
+ einfo "Building system plugin in ${build_dir}..." >&2
+ echo sys/${build_dir}
+ else
+ einfo "Building external plugin in ${build_dir}..." >&2
+ echo ext/${build_dir}
+ fi
+}
+
+# @FUNCTION: gstreamer_system_link
+# @USAGE: gstreamer_system_link gst-libs/gst/audio:gstreamer-audio [...]
+# @DESCRIPTION:
+# Walks through makefiles in order to make sure build will link against system
+# librairies.
+# Takes a list of path fragments and corresponding pkgconfig libraries
+# separated by colon (:). Will replace the path fragment by the output of
+# pkgconfig.
+gstreamer_system_link() {
+ local pdir directory libs pkgconfig pc tuple
+ pkgconfig=$(tc-getPKG_CONFIG)
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ pdir=$(gstreamer_get_plugin_dir ${plugin_dir})
+
+ for tuple in $@ ; do
+ directory=${tuple%:*}
+ pc=${tuple#*:}-${SLOT}
+ libs="$(${pkgconfig} --libs-only-l ${pc} || die)"
+ sed -e "s:\$(top_builddir)/${directory}/.*\.la:${libs}:" \
+ -i "${pdir}"/Makefile.{am,in} || die
+ done
+ done
+}
+
+# @FUNCTION: gstreamer_multilib_src_configure
+# @DESCRIPTION:
+# Handles logic common to configuring gstreamer plugins
+gstreamer_multilib_src_configure() {
+ local plugin gst_conf=() ECONF_SOURCE=${ECONF_SOURCE:-${S}}
+
+ gstreamer_get_plugins
+ gstreamer_environment_reset
+
+ for plugin in ${GST_PLUGINS_LIST} ; do
+ if has ${plugin} ${GST_PLUGINS_BUILD} ; then
+ gst_conf+=( --enable-${plugin} )
+ else
+ gst_conf+=( --disable-${plugin} )
+ fi
+ done
+
+ if grep -q "ORC_CHECK" "${ECONF_SOURCE}"/configure.* ; then
+ if in_iuse orc ; then
+ gst_conf+=( $(use_enable orc) )
+ else
+ gst_conf+=( --disable-orc )
+ fi
+ fi
+
+ if grep -q "AM_MAINTAINER_MODE" "${ECONF_SOURCE}"/configure.* ; then
+ gst_conf+=( --disable-maintainer-mode )
+ fi
+
+ if grep -q "disable-schemas-compile" "${ECONF_SOURCE}"/configure ; then
+ gst_conf+=( --disable-schemas-compile )
+ fi
+
+ if [[ ${PN} == ${GST_ORG_MODULE} ]]; then
+ gst_conf+=( $(use_enable nls) )
+ fi
+
+ einfo "Configuring to build ${GST_PLUGINS_BUILD} plugin(s) ..."
+ econf \
+ --with-package-name="Gentoo GStreamer ebuild" \
+ --with-package-origin="http://www.gentoo.org" \
+ "${gst_conf[@]}" "${@}"
+}
+
+# @FUNCTION: gstreamer_multilib_src_compile
+# @DESCRIPTION:
+# Compiles requested gstreamer plugin.
+gstreamer_multilib_src_compile() {
+ local plugin_dir
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ emake -C "$(gstreamer_get_plugin_dir ${plugin_dir})"
+ done
+}
+
+# @FUNCTION: gstreamer_multilib_src_install
+# @DESCRIPTION:
+# Installs requested gstreamer plugin.
+gstreamer_multilib_src_install() {
+ local plugin_dir
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ emake -C "$(gstreamer_get_plugin_dir ${plugin_dir})" \
+ DESTDIR="${D}" install
+ done
+}
+
+# @FUNCTION: gstreamer_multilib_src_install_all
+# @DESCRIPTION:
+# Installs documentation for requested gstreamer plugin, and removes .la
+# files.
+gstreamer_multilib_src_install_all() {
+ local plugin_dir
+
+ for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
+ local dir=$(gstreamer_get_plugin_dir ${plugin_dir})
+ [[ -e ${dir}/README ]] && dodoc "${dir}"/README
+ done
+
+ prune_libtool_files --modules
+}
diff --git a/eclass/gtk-sharp-module.eclass b/eclass/gtk-sharp-module.eclass
new file mode 100644
index 000000000000..5479fd0abc01
--- /dev/null
+++ b/eclass/gtk-sharp-module.eclass
@@ -0,0 +1,547 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: gtk-sharp-module.eclass
+# @MAINTAINER:
+# dotnet@gentoo.org
+# @BLURB: Manages the modules of the gtk-, gnome-, and gnome-desktop-sharp tarballs
+# @DESCRIPTION:
+# This eclass provides phase functions and helper functions for the modules
+# of the gtk-sharp, gnome-sharp and gnome-desktop-sharp tarballs.
+# PLEASE TAKE NOTE: ONLY FOR EAPI-2 EBUILDS
+
+WANT_AUTOMAKE=none
+WANT_AUTOCONF=none
+
+inherit eutils mono multilib libtool autotools base versionator
+
+case ${EAPI:-0} in
+ 2|3|4|5) ;;
+ *) die "Unknown EAPI." ;;
+esac
+
+# @ECLASS-VARIABLE: GTK_SHARP_MODULE
+# @DESCRIPTION:
+# The name of the Gtk# module.
+# Default value: ${PN/-sharp/}
+GTK_SHARP_MODULE=${GTK_SHARP_MODULE:=${PN/-sharp/}}
+
+# @ECLASS-VARIABLE: GTK_SHARP_MODULE_DIR
+# @DESCRIPTION:
+# The subdirectory of S in which GTK_SHARP_MODULE is installed.
+# Default value: ${PN/-sharp/}
+GTK_SHARP_MODULE_DIR=${GTK_SHARP_MODULE_DIR:=${PN/-sharp/}}
+
+# @ECLASS-VARIABLE: GTK_SHARP_REQUIRED_VERSION
+# @DESCRIPTION:
+# The version of the gtk-sharp tarball this package requires.
+# Optional.
+GTK_SHARP_REQUIRED_VERSION="${GTK_SHARP_REQUIRED_VERSION}"
+
+# @ECLASS-VARIABLE: gapi_users_list
+# @DESCRIPTION:
+# List of modules that use one of gapi2-codegen, gapi2-fixup or gapi2-parser
+# No ebuild-serviceable parts inside.
+gapi_users_list="art gnome gnomevfs ${gnome_desktop_sharp_module_list} atk gtk gdk glade pango"
+
+# @ECLASS-VARIABLE: PV_MAJOR
+# @DESCRIPTION:
+# The first two components of the PV variable.
+PV_MAJOR=$(get_version_component_range 1-2)
+
+# @FUNCTION: add_bdepend
+# @USAGE: <package atom>
+# @DESCRIPTION:
+# Adds to the DEPEND variable
+add_bdepend() {
+ [[ ${#@} -eq 1 ]] || die "${FUNCNAME} needs ONE (1) argument"
+ DEPEND="${DEPEND} $@"
+}
+
+# @FUNCTION: add_rdepend
+# @USAGE: <package atom>
+# @DESCRIPTION:
+# Adds to the RDEPEND variable
+add_rdepend() {
+ [[ ${#@} -eq 1 ]] || die "${FUNCNAME} needs ONE (1) argument"
+ RDEPEND="${RDEPEND} $@"
+}
+
+# @FUNCTION: add_depend
+# @USAGE: <package atom>
+# @DESCRIPTION:
+# Adds to the DEPEND and RDEPEND variables
+add_depend() {
+ [[ ${#@} -eq 1 ]] || die "${FUNCNAME} needs ONE (1) argument"
+ DEPEND="${DEPEND} $@"
+ RDEPEND="${RDEPEND} $@"
+}
+
+# @ECLASS-VARIABLE: TARBALL
+# @DESCRIPTION:
+# The GtkSharp modules are currently divided into three seperate tarball
+# distributions. The TARBALL variable holds the name of the tarball
+# to which GTK_SHARP_MODULE belongs.
+case ${GTK_SHARP_MODULE} in
+ glib|glade|gtk|gdk|atk|pango|gtk-dotnet|gtk-gapi|gtk-docs)
+ TARBALL="gtk-sharp"
+ case ${PVR} in
+ 2.12.*)
+ SRC_URI="mirror://gentoo/${TARBALL}-2.12.7.patch.bz2"
+ #Upstream: https://bugzilla.novell.com/show_bug.cgi?id=$bugno
+ #Upstream bug #470390 for the gtk-sharp-2.12.7.patch
+ PATCHES=(
+ "${WORKDIR}/${TARBALL}-2.12.7.patch"
+ )
+ EAUTORECONF="YES"
+ add_bdepend "=sys-devel/automake-1.10*"
+ add_bdepend ">=sys-devel/autoconf-2.61"
+ ;;
+ esac
+ ;;
+ art|gnome|gnomevfs|gconf)
+ TARBALL="gnome-sharp"
+ add_depend "=dev-dotnet/gtk-sharp-${GTK_SHARP_REQUIRED_VERSION}*"
+ has "${GTK_SHARP_MODULE}" "${gapi_users_list}" && \
+ add_bdepend "=dev-dotnet/gtk-sharp-gapi-${GTK_SHARP_REQUIRED_VERSION}*"
+ case ${PVR} in
+ 2.24.1*)
+ SRC_URI="mirror://gentoo/${TARBALL}-2.24.1.patch.bz2"
+ # Upstream bug: https://bugzilla.novell.com/show_bug.cgi?id=483251
+ PATCHES=(
+ "${WORKDIR}/${TARBALL}-2.24.1.patch"
+ )
+ EAUTORECONF="YES"
+ add_bdepend "=sys-devel/automake-1.10*"
+ add_bdepend ">=sys-devel/autoconf-2.61"
+ ;;
+ 2.24.2*)
+ add_depend ">=dev-lang/mono-2.7"
+ ;;
+ esac
+ ;;
+ gnome-desktop|gnome-print|gnome-panel|gtkhtml|gtksourceview|nautilusburn|rsvg|vte|wnck)
+ TARBALL="gnome-desktop-sharp"
+ add_depend "=dev-dotnet/gtk-sharp-${GTK_SHARP_REQUIRED_VERSION}*"
+ add_depend "=dev-dotnet/gnome-sharp-2.24*"
+ add_depend "gnome-base/gnome-desktop:2"
+ add_bdepend "=dev-dotnet/gtk-sharp-gapi-${GTK_SHARP_REQUIRED_VERSION}*"
+ ;;
+ *)
+ eerror "Huh? Sonny boy, looks like your GTK_SHARP_MODULE is not on the approved list. BAILING!"
+ die "How did we get here!!?"
+ ;;
+esac
+
+case ${PF} in
+ #gtk-sharp tarball
+ gtk-sharp-docs*)
+ add_depend ">=dev-lang/mono-2.0"
+ ;;
+ gtk-sharp-gapi*)
+ add_rdepend "!<=dev-dotnet/gtk-sharp-2.12.7:2"
+ add_depend "dev-perl/XML-LibXML"
+ ;;
+ gtk-sharp-*)
+ add_bdepend "~dev-dotnet/gtk-sharp-gapi-${PV}"
+ add_depend "~dev-dotnet/glib-sharp-${PV}"
+ add_depend "~dev-dotnet/atk-sharp-${PV}"
+ add_depend "~dev-dotnet/gdk-sharp-${PV}"
+ add_depend "~dev-dotnet/pango-sharp-${PV}"
+ ;;
+ gdk-sharp-*)
+ add_bdepend "~dev-dotnet/gtk-sharp-gapi-${PV}"
+ add_depend "~dev-dotnet/glib-sharp-${PV}"
+ add_depend "x11-libs/gtk+:2"
+ add_depend "~dev-dotnet/pango-sharp-${PV}"
+ add_rdepend "!<=dev-dotnet/gtk-sharp-2.12.7:2"
+ ;;
+ atk-sharp-*)
+ add_bdepend "~dev-dotnet/gtk-sharp-gapi-${PV}"
+ add_depend "~dev-dotnet/glib-sharp-${PV}"
+ add_depend "dev-libs/atk"
+ add_rdepend "!<=dev-dotnet/gtk-sharp-2.12.7:2"
+ ;;
+ glib-sharp-*)
+ add_rdepend "!<=dev-dotnet/gtk-sharp-2.12.7:2"
+ add_depend "dev-libs/glib:2"
+ ;;
+ pango-sharp-*)
+ add_bdepend "~dev-dotnet/gtk-sharp-gapi-${PV}"
+ add_depend "~dev-dotnet/glib-sharp-${PV}"
+ add_depend "x11-libs/pango"
+ add_rdepend "!<=dev-dotnet/gtk-sharp-2.12.7:2"
+ ;;
+ gtk-dotnet-*)
+ add_depend "~dev-dotnet/glib-sharp-${PV}"
+ add_depend "~dev-dotnet/gdk-sharp-${PV}"
+ add_depend "~dev-dotnet/pango-sharp-${PV}"
+ add_depend "~dev-dotnet/gtk-sharp-${PV}"
+ add_depend "dev-lang/mono[-minimal]"
+ add_rdepend "!<=dev-dotnet/gtk-sharp-2.12.7:2"
+ ;;
+ glade-sharp-*)
+ add_bdepend "~dev-dotnet/gtk-sharp-gapi-${PV}"
+ add_depend "~dev-dotnet/glib-sharp-${PV}"
+ add_depend "~dev-dotnet/atk-sharp-${PV}"
+ add_depend "~dev-dotnet/gdk-sharp-${PV}"
+ add_depend "~dev-dotnet/gtk-sharp-${PV}"
+ add_depend "~dev-dotnet/pango-sharp-${PV}"
+ add_depend ">=gnome-base/libglade-2.3.6:2.0"
+ ;;
+ #gnome-sharp tarball
+ art-sharp-*)
+ add_depend ">=media-libs/libart_lgpl-2.3.20"
+ ;;
+ gnome-sharp-*)
+ add_depend ">=gnome-base/libgnomeui-${PV_MAJOR}"
+ add_depend "~dev-dotnet/gnomevfs-sharp-${PV}"
+ add_depend "~dev-dotnet/art-sharp-${PV}"
+ add_depend ">=gnome-base/libgnomecanvas-${GNOMECANVAS_REQUIRED_VERSION}"
+ add_depend ">=x11-libs/gtk+-2.14.0:2"
+ ;;
+ gconf-sharp-*)
+ add_depend ">=gnome-base/gconf-${PV_MAJOR}:2"
+ add_depend "=dev-dotnet/glade-sharp-${GTK_SHARP_REQUIRED_VERSION}*"
+ add_depend "~dev-dotnet/gnome-sharp-${PV}"
+ add_depend "~dev-dotnet/art-sharp-${PV}"
+ ;;
+ gnomevfs-sharp-*)
+ add_depend ">=gnome-base/gnome-vfs-${PV_MAJOR}:2"
+ ;;
+ #gnome-desktop-sharp tarball
+ gnome-desktop-sharp-*)
+ # NOTE: libgnome-desktop-2.so has been known to make binary-
+ # incompatible changes, requiring .so bumps. gnome-desktop-sharp
+ # is locked to a specific .so.n version, so strict dependencies
+ # may be required in the future (as it has in the past).
+ add_depend ">=gnome-base/gnome-desktop-${PV_MAJOR}:2"
+ ;;
+ gnome-panel-sharp-*)
+ add_depend ">=gnome-base/gnome-panel-${PV_MAJOR}"
+ ;;
+ gnome-print-sharp-*)
+ add_depend "gnome-base/libgnomeprint:2.2"
+ add_depend "gnome-base/libgnomeprintui:2.2"
+ ;;
+ gtkhtml-sharp-*)
+ #NOTE: gtkhtml dependency must follow gtkhtml-sharp version.
+ #i.e. gtkhtml-sharp-2.24.0 >=gtkhtml-3.24
+ # gtkhtml-sharp-2.16.0 >=gtkhtml-3.16
+ # See bug 249540 for unpleasant side effects.
+ add_depend ">=gnome-extra/gtkhtml-$(($(get_version_component_range 1) + 1 )).$(get_version_component_range 2):3.14"
+ ;;
+ gtksourceview-sharp-*)
+ add_depend ">=x11-libs/gtksourceview-${GTKSOURCEVIEW_REQUIRED_VERSION}:2.0"
+ ;;
+ nautilusburn-sharp-*)
+ add_depend ">=gnome-extra/nautilus-cd-burner-2.24.0"
+ ;;
+ rsvg-sharp-*)
+ add_depend ">=gnome-base/librsvg-${RSVG_REQUIRED_VERSION}:2"
+ ;;
+ vte-sharp-*)
+ add_depend ">=x11-libs/vte-${VTE_REQUIRED_VERSION}:0"
+ ;;
+ wnck-sharp-*)
+ add_depend ">=x11-libs/libwnck-${PV_MAJOR}:1"
+ ;;
+esac
+
+# @ECLASS-VARIABLE: DESCRIPTION
+# @DESCRIPTION:
+# Default value: GtkSharp's ${GTK_SHARP_MODULE} module of the ${TARBALL} tarball
+DESCRIPTION="GtkSharp's ${GTK_SHARP_MODULE} module of the ${TARBALL} tarball"
+# @ECLASS-VARIABLE: HOMEPAGE
+# @DESCRIPTION:
+# Default value: http://www.mono-project.com/GtkSharp
+HOMEPAGE="http://www.mono-project.com/GtkSharp"
+# @ECLASS-VARIABLE: LICENSE
+# @DESCRIPTION:
+# Default value: LGPL-2.1
+LICENSE="LGPL-2.1"
+
+add_depend ">=dev-lang/mono-2.0.1"
+add_bdepend ">=sys-apps/sed-4"
+add_bdepend "virtual/pkgconfig"
+add_bdepend ">=app-shells/bash-3.1"
+
+IUSE="debug"
+# @ECLASS-VARIABLE: S
+# @DESCRIPTION:
+# Default value: ${WORKDIR}/${TARBALL}-${PV}
+S="${WORKDIR}/${TARBALL}-${PV}"
+# @ECLASS-VARIABLE: SRC_URI
+# @DESCRIPTION:
+# Default value: mirror://gnome/sources/${TARBALL}/${PV_MAJOR}/${TARBALL}-${PV}.tar.bz2
+SRC_URI="${SRC_URI}
+ mirror://gnome/sources/${TARBALL}/${PV_MAJOR}/${TARBALL}-${PV}.tar.bz2"
+
+# @FUNCTION: get_sharp_apis
+# @USAGE: <type> <pkgconfig-package>
+# @RETURN: .NET API files
+# @DESCRIPTION:
+# Given a valid pkg-config package, will return a list of API xml files.
+# <type> can be either --prefixed or --bare. If prefixed, each API file
+# will be prefixed with -I:
+get_sharp_apis() {
+ [[ ${#@} -eq 2 ]] || die "${FUNCNAME} needs two arguments"
+ get_sharp_assemblies "$@"
+}
+
+# @FUNCTION: get_sharp_assemblies
+# @USAGE: <type> <pkgconfig-package>
+# @RETURN: .NET .dll assemblies
+# @DESCRIPTION:
+# Given a valid pkg-config package, will return a list of .dll assemblies.
+# <type> can be either --prefixed or --bare. If prefixed, each .dll file
+# will be prefixed with -r:
+get_sharp_assemblies() {
+ [[ ${#@} -eq 2 ]] || die "${FUNCNAME} needs two arguments"
+ local string config=libs prefix="-r:"
+ local -a rvalue
+ [[ "${FUNCNAME[1]}" = "get_sharp_apis" ]] && config=cflags && prefix="-I:"
+ for string in $(pkg-config --${config} ${2} 2> /dev/null)
+ do
+ rvalue+=( ${string#-?:} )
+ done
+
+ case $1 in
+ --bare)
+ :
+ ;;
+ --prefixed)
+ for (( i=0 ; i< ${#rvalue[@]} ; i++ ))
+ do
+ rvalue[$i]=${prefix}${rvalue[$i]}
+ done
+ ;;
+ *)
+ die "${FUNCNAME}: Unknown parameter"
+ ;;
+ esac
+ echo "${rvalue[@]}"
+}
+
+# @FUNCTION: phase_hook
+# @USAGE: <prefix>
+# @DESCRIPTION:
+# Looks for functions named <prefix>_caller_suffix and executes them.
+# _caller_suffix is the calling function with the prefix
+# gtk-sharp-module removed.
+phase_hook() {
+ [[ ${#@} -eq 1 ]] || die "${FUNCNAME} needs one argument"
+ if [[ "$(type -t ${1}${FUNCNAME[1]#gtk-sharp-module})" = "function" ]]
+ then
+ ebegin "Phase-hook: Running ${1}${FUNCNAME[1]#gtk-sharp-module}"
+ ${1}${FUNCNAME[1]#gtk-sharp-module}
+ eend 0
+ fi
+}
+
+# @FUNCTION: ac_path_prog_override
+# @USAGE: <PROG> [path]
+# @DESCRIPTION:
+# Override AC_PATH_PROG() autoconf macros. Path will be set to " " if
+# not specified.
+ac_path_prog_override() {
+ if [[ ${#@} -lt 1 || ${#@} -gt 2 ]]
+ then
+ eerror "${FUNCNAME[0]} requires at least one parameter and takes at most two:"
+ eerror "AC_PATH_PROG(PARAM1, param2)"
+ die "${FUNCNAME[0]} requires at least one parameter and takes at most two:"
+ fi
+ export ac_cv_path_${1}="${2:- }"
+}
+
+
+# @FUNCTION: pkg_check_modules_override
+# @USAGE: <GROUP> [package1] [package2]
+# @DESCRIPTION:
+# Will export the appropriate variables to override PKG_CHECK_MODULES autoconf
+# macros, with the string " " by default. If packages are specified, they will
+# be looked up with pkg-config and the appropriate LIBS and CFLAGS substituted.
+# LIBS and CFLAGS can also be specified per-package with the following syntax:
+# @CODE
+# package=LIBS%CFLAGS
+# @CODE
+# = and % have no effect unless both are specified.
+# Here is an example:
+# @CODE
+# pkg_check_modules_override GASH "gtk+-2.0=-jule%" gobject-2.0
+# @CODE
+# The above example will do:
+# export GASH_CFLAGS+=" -jule"
+# export GASH_LIBS+=" "
+# export GASH_CFLAGS+=" $(pkg-config --cflags gobject-2.0)"
+# export GASH_LIBS+=" $(pkg-config --libs gobject-2.0)"
+#
+# NOTE: If a package is not found, the string " " will be inserted in place of
+# <GROUP>_CFLAGS and <GROUP>_LIBS
+pkg_check_modules_override() {
+ local package
+ local group="${1}"
+ local packages="${*:2}"
+ export ${group}_CFLAGS=" "
+ export ${group}_LIBS=" "
+
+ if [[ ${#@} -lt 1 ]]
+ then
+ eerror "${FUNCNAME[0]} requires at least one parameter: GROUP"
+ eerror "PKG_CHECK_MODULES(GROUP, package1 package2 etc)"
+ die "${FUNCNAME[0]} requires at least one parameter: GROUP"
+ fi
+
+ for package in $packages
+ do
+ if [[ ${package/=} != ${package} && ${package/\%} != ${package} ]]
+ then
+ package_cflag_libs=${package##*=}
+ export ${group}_CFLAGS+=" ${package_cflag_libs%%\%*}"
+ export ${group}_LIBS+=" ${package_cflag_libs##*\%}"
+ else
+ if pkg-config --exists $package
+ then
+ export ${group}_CFLAGS+=" $(pkg-config --cflags $package)"
+ export ${group}_LIBS+=" $(pkg-config --libs $package)"
+ else
+ export ${group}_CFLAGS+=" "
+ export ${group}_LIBS+=" "
+ fi
+ fi
+ done
+}
+
+# @FUNCTION: gtk-sharp-tarball-post_src_prepare
+# @DESCRIPTION:
+# Runs a M-m-m-monster sed on GTK_SHARP_MODULE_DIR to convert references to
+# local assemblies to the installed ones. Is only called by src_prepare when
+# $GTK_SHARP_MODULE is a member of $gtk_sharp_module_list.
+gtk-sharp-tarball-post_src_prepare() {
+ has "${EAPI:-0}" 2 && ! use prefix && EPREFIX=
+ cd "${S}/${GTK_SHARP_MODULE_DIR}"
+ sed -i \
+ -e "s; \$(srcdir)/../glib/glib-api.xml; $(get_sharp_apis --bare glib-sharp-2.0);" \
+ -e "s; ../pango/pango-api.xml; $(get_sharp_apis --bare pango-sharp-2.0);" \
+ -e "s; ../atk/atk-api.xml; $(get_sharp_apis --bare atk-sharp-2.0);" \
+ -e "s; ../gdk/gdk-api.xml; $(get_sharp_apis --bare gdk-sharp-2.0);" \
+ -e "s; ../gtk/gtk-api.xml; $(get_sharp_apis --bare gtk-sharp-2.0);" \
+ -e "s; \.\./glib/glib-sharp.dll; $(get_sharp_assemblies --bare glib-sharp-2.0);g" \
+ -e "s; \.\./pango/pango-sharp.dll; $(get_sharp_assemblies --bare pango-sharp-2.0);g" \
+ -e "s; \.\./atk/atk-sharp.dll; $(get_sharp_assemblies --bare atk-sharp-2.0);g" \
+ -e "s; \.\./gdk/gdk-sharp.dll; $(get_sharp_assemblies --bare gdk-sharp-2.0);g" \
+ -e "s; \.\./gtk/gtk-sharp.dll; $(get_sharp_assemblies --bare gtk-sharp-2.0);g" \
+ -e "s;\$(RUNTIME) \$(top_builddir)/parser/gapi-fixup.exe;${EPREFIX}/usr/bin/gapi2-fixup;" \
+ -e "s;\$(RUNTIME) \$(top_builddir)/generator/gapi_codegen.exe;${EPREFIX}/usr/bin/gapi2-codegen;" \
+ -e "s:\$(SYMBOLS) \$(top_builddir)/parser/gapi-fixup.exe:\$(SYMBOLS):" \
+ -e "s:\$(INCLUDE_API) \$(top_builddir)/generator/gapi_codegen.exe:\$(INCLUDE_API):" \
+ $(find . -name Makefile.in) || die "failed to fix ${TARBALL}-tarball makefiles"
+}
+
+# @FUNCTION: gnome-sharp-tarball-post_src_prepare
+# @DESCRIPTION:
+# Runs a M-m-m-monster sed on GTK_SHARP_MODULE_DIR to convert references to
+# local assemblies to the installed ones. Is only called by src_prepare when
+# $GTK_SHARP_MODULE is a member of $gnome_sharp_module_list.
+gnome-sharp-tarball-post_src_prepare() {
+ cd "${S}/${GTK_SHARP_MODULE_DIR}"
+ sed -i \
+ -e "s; ../gnomevfs/gnome-vfs-api.xml; $(get_sharp_apis --bare gnome-vfs-sharp-2.0);" \
+ -e "s; ../art/art-api.xml; $(get_sharp_apis --bare art-sharp-2.0);" \
+ -e "s; \.\./art/art-sharp.dll; $(get_sharp_assemblies --bare art-sharp-2.0);g" \
+ -e "s; \.\./gnomevfs/gnome-vfs-sharp.dll; $(get_sharp_assemblies --bare gnome-vfs-sharp-2.0);g" \
+ -e "s;/r:\$(top_builddir)/art/art-sharp.dll;$(get_sharp_assemblies --prefixed art-sharp-2.0);" \
+ -e "s;/r:\$(top_builddir)/gnome/gnome-sharp.dll;$(get_sharp_assemblies --prefixed gnome-sharp-2.0);" \
+ $(find . -name Makefile.in) || die "failed to fix ${TARBALL}-tarball makefiles"
+}
+
+# @FUNCTION: gtk-sharp-module_src_prepare
+# @DESCRIPTION:
+# Runs autopatch from base.eclass, eautoreconf if EAUTORECONF is set to any
+# value.
+# Contains a phase_hook, runs very last.
+# phase_hook prefix trigger: ${TARBALL}-tarball-post
+# Is exported.
+gtk-sharp-module_src_prepare() {
+ base_src_prepare
+# @ECLASS-VARIABLE: EAUTORECONF
+# @DESCRIPTION:
+# If set, EAUTORECONF will be run during src_prepare.
+ [[ ${EAUTORECONF} ]] && eautoreconf
+ phase_hook ${TARBALL}-tarball-post
+ elibtoolize
+}
+
+# @FUNCTION: gtk-sharp-tarball_src_configure
+# @DESCRIPTION:
+# Sets some environment variables that will allow us to make the dependencies
+# for each ebuild be only its own dependencies, without patching configure.
+# Is only called by gtk-sharp-module_src_configure when $GTK_SHARP_MODULE
+# is a member of $gtk_sharp_module_list.
+gtk-sharp-tarball_src_configure() {
+ pkg_check_modules_override GLIB gobject-2.0
+ pkg_check_modules_override GIO gio-2.0
+ pkg_check_modules_override PANGO pango
+ pkg_check_modules_override ATK atk
+ pkg_check_modules_override GTK gtk+-2.0
+ pkg_check_modules_override GLADE libglade-2.0
+}
+
+# @FUNCTION: gnome-sharp-tarball_src_configure
+# @DESCRIPTION:
+# Sets some environment variables that will allow us to make the dependencies
+# for each ebuild be only its own dependencies. Without patching configure.
+# Is only called by gtk-sharp-module_src_configure when $GTK_SHARP_MODULE
+# is a member of $gnome_sharp_module_list.
+gnome-sharp-tarball_src_configure() {
+ has "${EAPI:-0}" 2 && ! use prefix && EPREFIX=
+ pkg_check_modules_override GLADESHARP glade-sharp-2.0
+ pkg_check_modules_override GAPI gapi-2.0
+ ac_path_prog_override GAPI_PARSER "${EPREFIX}"/usr/bin/gapi2-parser
+ ac_path_prog_override GAPI_CODEGEN "${EPREFIX}"/usr/bin/gapi2-codegen
+ ac_path_prog_override GAPI_FIXUP "${EPREFIX}"/usr/bin/gapi2-fixup
+}
+
+# @FUNCTION: gtk-sharp-module_src_configure
+# @USAGE: [econf-arguments]
+# @DESCRIPTION:
+# Calls econf with some default values.
+# Contains a phase_hook, run before econf.
+# phase_hook prefix trigger: ${TARBALL}-tarball
+# Is exported.
+gtk-sharp-module_src_configure() {
+ phase_hook ${TARBALL}-tarball
+ econf --disable-static \
+ --disable-dependency-tracking \
+ --disable-maintainer-mode \
+ $(use debug &&echo "--enable-debug" ) \
+ ${@} || die "econf failed"
+}
+
+# @FUNCTION: gtk-sharp-module_src_compile
+# @DESCRIPTION:
+# Calls emake in the subdir of the module.
+# Sets CSC=/usr/bin/gmcs. Deletes top_srcdir Makefiles to prevent recursing in
+# case we missed some dll references.
+# Is exported.
+gtk-sharp-module_src_compile() {
+ rm -f "${S}"/Makefile* &> /dev/null
+ cd "${S}/${GTK_SHARP_MODULE_DIR}"
+ emake CSC=/usr/bin/gmcs || die "emake failed"
+}
+
+# @FUNCTION: gtk-sharp-module_src_install
+# @DESCRIPTION:
+# Installs the module. Fixes up lib paths so they're multilib-safe.
+# Gets rid of .la files.
+# Is exported.
+gtk-sharp-module_src_install() {
+ cd "${S}/${GTK_SHARP_MODULE_DIR}"
+ emake DESTDIR="${D}" install || die "emake install failed"
+ mono_multilib_comply
+ find "${D}" -type f -name '*.la' -exec rm -rf '{}' '+' || die "la removal failed"
+ [[ $(find "${D}" -type f|wc -l) -lt 3 ]] && die "Too few files. This smells like a failed install."
+}
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install
diff --git a/eclass/haskell-cabal.eclass b/eclass/haskell-cabal.eclass
new file mode 100644
index 000000000000..47a9e1631a93
--- /dev/null
+++ b/eclass/haskell-cabal.eclass
@@ -0,0 +1,774 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: haskell-cabal.eclass
+# @MAINTAINER:
+# Haskell herd <haskell@gentoo.org>
+# @AUTHOR:
+# Original author: Andres Loeh <kosmikus@gentoo.org>
+# Original author: Duncan Coutts <dcoutts@gentoo.org>
+# @BLURB: for packages that make use of the Haskell Common Architecture for Building Applications and Libraries (cabal)
+# @DESCRIPTION:
+# Basic instructions:
+#
+# Before inheriting the eclass, set CABAL_FEATURES to
+# reflect the tools and features that the package makes
+# use of.
+#
+# Currently supported features:
+# haddock -- for documentation generation
+# hscolour -- generation of colourised sources
+# hoogle -- generation of documentation search index
+# alex -- lexer/scanner generator
+# happy -- parser generator
+# c2hs -- C interface generator
+# cpphs -- C preprocessor clone written in Haskell
+# profile -- if package supports to build profiling-enabled libraries
+# bootstrap -- only used for the cabal package itself
+# bin -- the package installs binaries
+# lib -- the package installs libraries
+# nocabaldep -- don't add dependency on cabal.
+# only used for packages that _must_ not pull the dependency
+# on cabal, but still use this eclass (e.g. haskell-updater).
+# ghcdeps -- constraint dependency on package to ghc onces
+# only used for packages that use libghc internally and _must_
+# not pull upper versions
+# test-suite -- add support for cabal test-suites (introduced in Cabal-1.8)
+
+inherit eutils ghc-package multilib multiprocessing
+
+# @ECLASS-VARIABLE: CABAL_EXTRA_CONFIGURE_FLAGS
+# @DESCRIPTION:
+# User-specified additional parameters passed to 'setup configure'.
+# example: /etc/portage/make.conf:
+# CABAL_EXTRA_CONFIGURE_FLAGS="--enable-shared --enable-executable-dynamic"
+: ${CABAL_EXTRA_CONFIGURE_FLAGS:=}
+
+# @ECLASS-VARIABLE: CABAL_EXTRA_BUILD_FLAGS
+# @DESCRIPTION:
+# User-specified additional parameters passed to 'setup build'.
+# example: /etc/portage/make.conf: CABAL_EXTRA_BUILD_FLAGS=-v
+: ${CABAL_EXTRA_BUILD_FLAGS:=}
+
+# @ECLASS-VARIABLE: GHC_BOOTSTRAP_FLAGS
+# @DESCRIPTION:
+# User-specified additional parameters for ghc when building
+# _only_ 'setup' binary bootstrap.
+# example: /etc/portage/make.conf: GHC_BOOTSTRAP_FLAGS=-dynamic to make
+# linking 'setup' faster.
+: ${GHC_BOOTSTRAP_FLAGS:=}
+
+# @ECLASS-VARIABLE: CABAL_DEBUG_LOOSENING
+# @DESCRIPTION:
+# Show debug output for 'cabal_chdeps' function if set.
+# Needs working 'diff'.
+: ${CABAL_DEBUG_LOOSENING:=}
+
+HASKELL_CABAL_EXPF="pkg_setup src_compile src_test src_install pkg_postinst pkg_postrm"
+
+# 'dev-haskell/cabal' passes those options with ./configure-based
+# configuration, but most packages don't need/don't accept it:
+# #515362, #515362
+QA_CONFIGURE_OPTIONS+=" --with-compiler --with-hc --with-hc-pkg --with-gcc"
+
+case "${EAPI:-0}" in
+ 2|3|4|5) HASKELL_CABAL_EXPF+=" src_configure" ;;
+ *) ;;
+esac
+
+EXPORT_FUNCTIONS ${HASKELL_CABAL_EXPF}
+
+for feature in ${CABAL_FEATURES}; do
+ case ${feature} in
+ haddock) CABAL_USE_HADDOCK=yes;;
+ hscolour) CABAL_USE_HSCOLOUR=yes;;
+ hoogle) CABAL_USE_HOOGLE=yes;;
+ alex) CABAL_USE_ALEX=yes;;
+ happy) CABAL_USE_HAPPY=yes;;
+ c2hs) CABAL_USE_C2HS=yes;;
+ cpphs) CABAL_USE_CPPHS=yes;;
+ profile) CABAL_USE_PROFILE=yes;;
+ bootstrap) CABAL_BOOTSTRAP=yes;;
+ bin) CABAL_HAS_BINARIES=yes;;
+ lib) CABAL_HAS_LIBRARIES=yes;;
+ nocabaldep) CABAL_FROM_GHC=yes;;
+ ghcdeps) CABAL_GHC_CONSTRAINT=yes;;
+ test-suite) CABAL_TEST_SUITE=yes;;
+ *) CABAL_UNKNOWN="${CABAL_UNKNOWN} ${feature}";;
+ esac
+done
+
+if [[ -n "${CABAL_USE_HADDOCK}" ]]; then
+ IUSE="${IUSE} doc"
+ # don't require depend on itself to build docs.
+ # ebuild bootstraps docs from just built binary
+ [[ ${CATEGORY}/${PN} = "dev-haskell/haddock" ]] || DEPEND="${DEPEND} doc? ( dev-haskell/haddock )"
+fi
+
+if [[ -n "${CABAL_USE_HSCOLOUR}" ]]; then
+ IUSE="${IUSE} hscolour"
+ DEPEND="${DEPEND} hscolour? ( dev-haskell/hscolour )"
+fi
+
+if [[ -n "${CABAL_USE_HOOGLE}" ]]; then
+ # enabled only in ::haskell
+ CABAL_USE_HOOGLE=
+fi
+
+if [[ -n "${CABAL_USE_ALEX}" ]]; then
+ DEPEND="${DEPEND} dev-haskell/alex"
+fi
+
+if [[ -n "${CABAL_USE_HAPPY}" ]]; then
+ DEPEND="${DEPEND} dev-haskell/happy"
+fi
+
+if [[ -n "${CABAL_USE_C2HS}" ]]; then
+ DEPEND="${DEPEND} dev-haskell/c2hs"
+fi
+
+if [[ -n "${CABAL_USE_CPPHS}" ]]; then
+ DEPEND="${DEPEND} dev-haskell/cpphs"
+fi
+
+if [[ -n "${CABAL_USE_PROFILE}" ]]; then
+ IUSE="${IUSE} profile"
+fi
+
+if [[ -n "${CABAL_TEST_SUITE}" ]]; then
+ IUSE="${IUSE} test"
+fi
+
+# We always use a standalone version of Cabal, rather than the one that comes
+# with GHC. But of course we can't depend on cabal when building cabal itself.
+if [[ -z ${CABAL_MIN_VERSION} ]]; then
+ CABAL_MIN_VERSION=1.1.4
+fi
+if [[ -z "${CABAL_BOOTSTRAP}" && -z "${CABAL_FROM_GHC}" ]]; then
+ DEPEND="${DEPEND} >=dev-haskell/cabal-${CABAL_MIN_VERSION}"
+fi
+
+# returns the version of cabal currently in use.
+# Rarely it's handy to pin cabal version from outside.
+: ${_CABAL_VERSION_CACHE:=""}
+cabal-version() {
+ if [[ -z "${_CABAL_VERSION_CACHE}" ]]; then
+ if [[ "${CABAL_BOOTSTRAP}" ]]; then
+ # We're bootstrapping cabal, so the cabal version is the version
+ # of this package itself.
+ _CABAL_VERSION_CACHE="${PV}"
+ elif [[ "${CABAL_FROM_GHC}" ]]; then
+ _CABAL_VERSION_CACHE="$(ghc-cabal-version)"
+ else
+ # We ask portage, not ghc, so that we only pick up
+ # portage-installed cabal versions.
+ _CABAL_VERSION_CACHE="$(ghc-extractportageversion dev-haskell/cabal)"
+ fi
+ fi
+ echo "${_CABAL_VERSION_CACHE}"
+}
+
+cabal-bootstrap() {
+ local setupmodule
+ local cabalpackage
+ local setup_bootstrap_args=()
+
+ if [[ -f "${S}/Setup.lhs" ]]; then
+ setupmodule="${S}/Setup.lhs"
+ elif [[ -f "${S}/Setup.hs" ]]; then
+ setupmodule="${S}/Setup.hs"
+ else
+ die "No Setup.lhs or Setup.hs found"
+ fi
+
+ if [[ -z "${CABAL_BOOTSTRAP}" && -z "${CABAL_FROM_GHC}" ]] && ! ghc-sanecabal "${CABAL_MIN_VERSION}"; then
+ eerror "The package dev-haskell/cabal is not correctly installed for"
+ eerror "the currently active version of ghc ($(ghc-version)). Please"
+ eerror "run haskell-updater or re-build dev-haskell/cabal."
+ die "cabal is not correctly installed"
+ fi
+
+ # We build the setup program using the latest version of
+ # cabal that we have installed
+ cabalpackage=Cabal-$(cabal-version)
+ einfo "Using cabal-$(cabal-version)."
+
+ if $(ghc-supports-threaded-runtime); then
+ # Cabal has a bug that deadlocks non-threaded RTS:
+ # https://bugs.gentoo.org/537500
+ # https://github.com/haskell/cabal/issues/2398
+ setup_bootstrap_args+=(-threaded)
+ fi
+
+ make_setup() {
+ set -- -package "${cabalpackage}" --make "${setupmodule}" \
+ "${setup_bootstrap_args[@]}" \
+ ${HCFLAGS} \
+ ${GHC_BOOTSTRAP_FLAGS} \
+ "$@" \
+ -o setup
+ echo $(ghc-getghc) "$@"
+ $(ghc-getghc) "$@"
+ }
+ if $(ghc-supports-shared-libraries); then
+ # # some custom build systems might use external libraries,
+ # # for which we don't have shared libs, so keep static fallback
+ # bug #411789, http://hackage.haskell.org/trac/ghc/ticket/5743#comment:3
+ # http://hackage.haskell.org/trac/ghc/ticket/7062
+ # http://hackage.haskell.org/trac/ghc/ticket/3072
+ # ghc does not set RPATH for extralibs, thus we do it ourselves by hands
+ einfo "Prepending $(ghc-libdir) to LD_LIBRARY_PATH"
+ if [[ ${CHOST} != *-darwin* ]]; then
+ LD_LIBRARY_PATH="$(ghc-libdir)${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}"
+ export LD_LIBRARY_PATH
+ else
+ DYLD_LIBRARY_PATH="$(ghc-libdir)${DYLD_LIBRARY_PATH:+:}${DYLD_LIBRARY_PATH}"
+ export DYLD_LIBRARY_PATH
+ fi
+ { make_setup -dynamic "$@" && ./setup --help >/dev/null; } ||
+ make_setup "$@" || die "compiling ${setupmodule} failed"
+ else
+ make_setup "$@" || die "compiling ${setupmodule} failed"
+ fi
+}
+
+cabal-mksetup() {
+ local setupdir=${1:-${S}}
+ local setup_src=${setupdir}/Setup.hs
+
+ rm -vf "${setupdir}"/Setup.{lhs,hs}
+ elog "Creating 'Setup.hs' for 'Simple' build type."
+
+ echo 'import Distribution.Simple; main = defaultMainWithHooks defaultUserHooks' \
+ > "${setup_src}" || die "failed to create default Setup.hs"
+}
+
+cabal-hscolour() {
+ set -- hscolour "$@"
+ echo ./setup "$@"
+ ./setup "$@" || die "setup hscolour failed"
+}
+
+cabal-haddock() {
+ set -- haddock "$@"
+ echo ./setup "$@"
+ ./setup "$@" || die "setup haddock failed"
+}
+
+cabal-hoogle() {
+ ewarn "hoogle USE flag requires doc USE flag, building without hoogle"
+}
+
+cabal-hscolour-haddock() {
+ # --hyperlink-source implies calling 'setup hscolour'
+ set -- haddock --hyperlink-source
+ echo ./setup "$@"
+ ./setup "$@" --hyperlink-source || die "setup haddock --hyperlink-source failed"
+}
+
+cabal-hoogle-haddock() {
+ set -- haddock --hoogle
+ echo ./setup "$@"
+ ./setup "$@" || die "setup haddock --hoogle failed"
+}
+
+cabal-hoogle-hscolour-haddock() {
+ cabal-hscolour-haddock
+ cabal-hoogle-haddock
+}
+
+cabal-hoogle-hscolour() {
+ ewarn "hoogle USE flag requires doc USE flag, building without hoogle"
+ cabal-hscolour
+}
+
+cabal-die-if-nonempty() {
+ local breakage_type=$1
+ shift
+
+ [[ "${#@}" == 0 ]] && return 0
+ eerror "Detected ${breakage_type} packages: ${@}"
+ die "//==-- Please, run 'haskell-updater' to fix ${breakage_type} packages --==//"
+}
+
+cabal-show-brokens() {
+ elog "ghc-pkg check: 'checking for other broken packages:'"
+ # pretty-printer
+ $(ghc-getghcpkg) check 2>&1 \
+ | egrep -v '^Warning: haddock-(html|interfaces): ' \
+ | egrep -v '^Warning: include-dirs: ' \
+ | head -n 20
+
+ cabal-die-if-nonempty 'broken' \
+ $($(ghc-getghcpkg) check --simple-output)
+}
+
+cabal-show-old() {
+ cabal-die-if-nonempty 'outdated' \
+ $("${EPREFIX}"/usr/sbin/haskell-updater --quiet --upgrade --list-only)
+}
+
+cabal-show-brokens-and-die() {
+ cabal-show-brokens
+ cabal-show-old
+
+ die "$@"
+}
+
+cabal-configure() {
+ local cabalconf=()
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ if [[ -n "${CABAL_USE_HADDOCK}" ]] && use doc; then
+ # We use the bundled with GHC version if exists
+ # Haddock is very picky about index files
+ # it generates for ghc's base and other packages.
+ local p=${EPREFIX}/usr/bin/haddock-ghc-$(ghc-version)
+ if [[ -f $p ]]; then
+ cabalconf+=(--with-haddock="${p}")
+ else
+ cabalconf+=(--with-haddock=${EPREFIX}/usr/bin/haddock)
+ fi
+ fi
+ if [[ -n "${CABAL_USE_PROFILE}" ]] && use profile; then
+ cabalconf+=(--enable-library-profiling)
+ fi
+ if [[ -n "${CABAL_USE_ALEX}" ]]; then
+ cabalconf+=(--with-alex=${EPREFIX}/usr/bin/alex)
+ fi
+
+ if [[ -n "${CABAL_USE_HAPPY}" ]]; then
+ cabalconf+=(--with-happy=${EPREFIX}/usr/bin/happy)
+ fi
+
+ if [[ -n "${CABAL_USE_C2HS}" ]]; then
+ cabalconf+=(--with-c2hs=${EPREFIX}/usr/bin/c2hs)
+ fi
+ if [[ -n "${CABAL_USE_CPPHS}" ]]; then
+ cabalconf+=(--with-cpphs=${EPREFIX}/usr/bin/cpphs)
+ fi
+ if [[ -n "${CABAL_TEST_SUITE}" ]]; then
+ cabalconf+=($(use_enable test tests))
+ fi
+
+ if [[ -n "${CABAL_GHC_CONSTRAINT}" ]]; then
+ cabalconf+=($(cabal-constraint "ghc"))
+ fi
+
+ local option
+ for option in ${HCFLAGS}
+ do
+ cabalconf+=(--ghc-option="$option")
+ done
+
+ # parallel on all available cores
+ if ghc-supports-parallel-make; then
+ local max_jobs=$(makeopts_jobs)
+
+ # limit to very small value, as parallelism
+ # helps slightly, but makes things severely worse
+ # when amount of threads is Very Large.
+ [[ ${max_jobs} -gt 4 ]] && max_jobs=4
+
+ cabalconf+=(--ghc-option=-j"$max_jobs")
+ fi
+
+ # Building GHCi libs on ppc64 causes "TOC overflow".
+ if use ppc64; then
+ cabalconf+=(--disable-library-for-ghci)
+ fi
+
+ # currently cabal does not respect CFLAGS and LDFLAGS on it's own (bug #333217)
+ # so translate LDFLAGS to ghc parameters (without filtering)
+ local flag
+ for flag in $CFLAGS; do cabalconf+=(--ghc-option="-optc$flag"); done
+ for flag in $LDFLAGS; do cabalconf+=(--ghc-option="-optl$flag"); done
+
+ # disable executable stripping for the executables, as portage will
+ # strip by itself, and pre-stripping gives a QA warning.
+ # cabal versions previous to 1.4 does not strip executables, and does
+ # not accept the flag.
+ # this fixes numerous bugs, amongst them;
+ # bug #251881, bug #251882, bug #251884, bug #251886, bug #299494
+ cabalconf+=(--disable-executable-stripping)
+
+ cabalconf+=(--docdir="${EPREFIX}"/usr/share/doc/${PF})
+ # As of Cabal 1.2, configure is quite quiet. For diagnostic purposes
+ # it's better if the configure chatter is in the build logs:
+ cabalconf+=(--verbose)
+
+ # We build shared version of our Cabal where ghc ships it's shared
+ # version of it. We will link ./setup as dynamic binary againt Cabal later.
+ [[ ${CATEGORY}/${PN} == "dev-haskell/cabal" ]] && \
+ $(ghc-supports-shared-libraries) && \
+ cabalconf+=(--enable-shared)
+
+ if $(ghc-supports-shared-libraries); then
+ # Experimental support for dynamically linked binaries.
+ # We are enabling it since 7.10.1_rc3
+ if version_is_at_least "7.10.0.20150316" "$(ghc-version)"; then
+ # we didn't enable it before as it was not stable on all arches
+ cabalconf+=(--enable-shared)
+ # Known to break on ghc-7.8/Cabal-1.18
+ # https://ghc.haskell.org/trac/ghc/ticket/9625
+ cabalconf+=(--enable-executable-dynamic)
+ fi
+ fi
+
+ # --sysconfdir appeared in Cabal-1.18+
+ if ./setup configure --help | grep -q -- --sysconfdir; then
+ cabalconf+=(--sysconfdir="${EPREFIX}"/etc)
+ fi
+
+ # appeared in Cabal-1.18+ (see '--disable-executable-stripping')
+ if ./setup configure --help | grep -q -- --disable-library-stripping; then
+ cabalconf+=(--disable-library-stripping)
+ fi
+
+ set -- configure \
+ --ghc --prefix="${EPREFIX}"/usr \
+ --with-compiler="$(ghc-getghc)" \
+ --with-hc-pkg="$(ghc-getghcpkg)" \
+ --prefix="${EPREFIX}"/usr \
+ --libdir="${EPREFIX}"/usr/$(get_libdir) \
+ --libsubdir=${P}/ghc-$(ghc-version) \
+ --datadir="${EPREFIX}"/usr/share/ \
+ --datasubdir=${P}/ghc-$(ghc-version) \
+ "${cabalconf[@]}" \
+ ${CABAL_CONFIGURE_FLAGS} \
+ ${CABAL_EXTRA_CONFIGURE_FLAGS} \
+ "$@"
+ echo ./setup "$@"
+ ./setup "$@" || cabal-show-brokens-and-die "setup configure failed"
+}
+
+cabal-build() {
+ set -- build ${CABAL_EXTRA_BUILD_FLAGS} "$@"
+ echo ./setup "$@"
+ ./setup "$@" \
+ || die "setup build failed"
+}
+
+cabal-copy() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && ED=${D}
+
+ set -- copy --destdir="${D}" "$@"
+ echo ./setup "$@"
+ ./setup "$@" || die "setup copy failed"
+
+ # cabal is a bit eager about creating dirs,
+ # so remove them if they are empty
+ rmdir "${ED}/usr/bin" 2> /dev/null
+}
+
+cabal-pkg() {
+ # This does not actually register since we're using true instead
+ # of ghc-pkg. So it just leaves the .conf file and we can
+ # register that ourselves (if it exists).
+
+ if [[ -n ${CABAL_HAS_LIBRARIES} ]]; then
+ # Newer cabal can generate a package conf for us:
+ ./setup register --gen-pkg-config="${T}/${P}.conf"
+ ghc-install-pkg "${T}/${P}.conf"
+ fi
+}
+
+# Some cabal libs are bundled along with some versions of ghc
+# eg filepath-1.0 comes with ghc-6.6.1
+# by putting CABAL_CORE_LIB_GHC_PV="6.6.1" in an ebuild we are declaring that
+# when building with this version of ghc, the ebuild is a dummy that is it will
+# install no files since the package is already included with ghc.
+# However portage still records the dependency and we can upgrade the package
+# to a later one that's not included with ghc.
+# You can also put a space separated list, eg CABAL_CORE_LIB_GHC_PV="6.6 6.6.1".
+# Those versions are taken as-is from ghc `--numeric-version`.
+# Package manager versions are also supported:
+# CABAL_CORE_LIB_GHC_PV="7.10.* PM:7.8.4-r1".
+cabal-is-dummy-lib() {
+ local bin_ghc_version=$(ghc-version)
+ local pm_ghc_version=$(ghc-pm-version)
+
+ for version in ${CABAL_CORE_LIB_GHC_PV}; do
+ [[ "${bin_ghc_version}" == ${version} ]] && return 0
+ [[ "${pm_ghc_version}" == ${version} ]] && return 0
+ done
+
+ return 1
+}
+
+# exported function: check if cabal is correctly installed for
+# the currently active ghc (we cannot guarantee this with portage)
+haskell-cabal_pkg_setup() {
+ if [[ -n ${CABAL_HAS_LIBRARIES} ]]; then
+ [[ ${RDEPEND} == *dev-lang/ghc* ]] || eqawarn "QA Notice: A library does not have runtime dependency on dev-lang/ghc."
+ fi
+ if [[ -z "${CABAL_HAS_BINARIES}" ]] && [[ -z "${CABAL_HAS_LIBRARIES}" ]]; then
+ eqawarn "QA Notice: Neither bin nor lib are in CABAL_FEATURES."
+ fi
+ if [[ -n "${CABAL_UNKNOWN}" ]]; then
+ eqawarn "QA Notice: Unknown entry in CABAL_FEATURES: ${CABAL_UNKNOWN}"
+ fi
+ if cabal-is-dummy-lib; then
+ einfo "${P} is included in ghc-${CABAL_CORE_LIB_GHC_PV}, nothing to install."
+ fi
+}
+
+haskell-cabal_src_configure() {
+ cabal-is-dummy-lib && return
+
+ pushd "${S}" > /dev/null
+
+ cabal-bootstrap
+
+ cabal-configure "$@"
+
+ popd > /dev/null
+}
+
+# exported function: nice alias
+cabal_src_configure() {
+ haskell-cabal_src_configure "$@"
+}
+
+# exported function: cabal-style bootstrap configure and compile
+cabal_src_compile() {
+ # it's a common mistake when one bumps ebuild to EAPI="2" (and upper)
+ # and forgets to separate src_compile() to src_configure()/src_compile().
+ # Such error leads to default src_configure and we lose all passed flags.
+ if ! has "${EAPI:-0}" 0 1; then
+ local passed_flag
+ for passed_flag in "$@"; do
+ [[ ${passed_flag} == --flags=* ]] && \
+ eqawarn "QA Notice: Cabal option '${passed_flag}' has effect only in src_configure()"
+ done
+ fi
+
+ cabal-is-dummy-lib && return
+
+ has src_configure ${HASKELL_CABAL_EXPF} || haskell-cabal_src_configure "$@"
+ cabal-build
+
+ if [[ -n "${CABAL_USE_HADDOCK}" ]] && use doc; then
+ if [[ -n "${CABAL_USE_HSCOLOUR}" ]] && use hscolour; then
+ if [[ -n "${CABAL_USE_HOOGLE}" ]] && use hoogle; then
+ # hoogle, hscolour and haddock
+ cabal-hoogle-hscolour-haddock
+ else
+ # haddock and hscolour
+ cabal-hscolour-haddock
+ fi
+ else
+ if [[ -n "${CABAL_USE_HOOGLE}" ]] && use hoogle; then
+ # hoogle and haddock
+ cabal-hoogle-haddock
+ else
+ # just haddock
+ cabal-haddock
+ fi
+ fi
+ else
+ if [[ -n "${CABAL_USE_HSCOLOUR}" ]] && use hscolour; then
+ if [[ -n "${CABAL_USE_HOOGLE}" ]] && use hoogle; then
+ # hoogle and hscolour
+ cabal-hoogle-hscolour
+ else
+ # just hscolour
+ cabal-hscolour
+ fi
+ else
+ if [[ -n "${CABAL_USE_HOOGLE}" ]] && use hoogle; then
+ # just hoogle
+ cabal-hoogle
+ fi
+ fi
+ fi
+}
+
+haskell-cabal_src_compile() {
+ pushd "${S}" > /dev/null
+
+ cabal_src_compile "$@"
+
+ popd > /dev/null
+}
+
+haskell-cabal_src_test() {
+ pushd "${S}" > /dev/null
+
+ if cabal-is-dummy-lib; then
+ einfo ">>> No tests for dummy library: ${CATEGORY}/${PF}"
+ else
+ einfo ">>> Test phase [cabal test]: ${CATEGORY}/${PF}"
+ set -- test "$@"
+ echo ./setup "$@"
+ ./setup "$@" || die "cabal test failed"
+ fi
+
+ popd > /dev/null
+}
+
+# exported function: cabal-style copy and register
+cabal_src_install() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ if ! cabal-is-dummy-lib; then
+ cabal-copy
+ cabal-pkg
+ fi
+
+ # create a dummy local package conf file for haskell-updater
+ # if it does not exist (dummy libraries and binaries w/o libraries)
+ local ghc_confdir_with_prefix="$(ghc-confdir)"
+ # remove EPREFIX
+ dodir ${ghc_confdir_with_prefix#${EPREFIX}}
+ local hint_db="${D}/$(ghc-confdir)"
+ local hint_file="${hint_db}/${PF}.conf"
+ mkdir -p "${hint_db}" || die
+ touch "${hint_file}" || die
+}
+
+haskell-cabal_src_install() {
+ pushd "${S}" > /dev/null
+
+ cabal_src_install
+
+ popd > /dev/null
+}
+
+haskell-cabal_pkg_postinst() {
+ ghc-package_pkg_postinst
+}
+
+haskell-cabal_pkg_postrm() {
+ ghc-package_pkg_postrm
+}
+
+# @FUNCTION: cabal_flag
+# @DESCRIPTION:
+# ebuild.sh:use_enable() taken as base
+#
+# Usage examples:
+#
+# CABAL_CONFIGURE_FLAGS=$(cabal_flag gui)
+# leads to "--flags=gui" or "--flags=-gui" (useflag 'gui')
+#
+# CABAL_CONFIGURE_FLAGS=$(cabal_flag gtk gui)
+# also leads to "--flags=gui" or " --flags=-gui" (useflag 'gtk')
+#
+cabal_flag() {
+ if [[ -z "$1" ]]; then
+ echo "!!! cabal_flag() called without a parameter." >&2
+ echo "!!! cabal_flag() <USEFLAG> [<cabal_flagname>]" >&2
+ return 1
+ fi
+
+ local UWORD=${2:-$1}
+
+ if use "$1"; then
+ echo "--flags=${UWORD}"
+ else
+ echo "--flags=-${UWORD}"
+ fi
+
+ return 0
+}
+
+# @FUNCTION: cabal_chdeps
+# @DESCRIPTION:
+# Allows easier patching of $CABAL_FILE (${S}/${PN}.cabal by default)
+# depends
+#
+# Accepts argument list as pairs of substitutions: <from-string> <to-string>...
+#
+# Dies on error.
+#
+# Usage examples:
+#
+# src_prepare() {
+# cabal_chdeps \
+# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7' \
+# 'containers ==0.4.*' 'containers >= 0.4 && < 0.6'
+#}
+# or
+# src_prepare() {
+# CABAL_FILE=${S}/${MY_PN}.cabal cabal_chdeps \
+# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7'
+# CABAL_FILE=${S}/${MY_PN}-tools.cabal cabal_chdeps \
+# 'base == 3.*' 'base >= 4.2 && < 4.7'
+#}
+#
+cabal_chdeps() {
+ local cabal_fn=${MY_PN:-${PN}}.cabal
+ local cf=${CABAL_FILE:-${S}/${cabal_fn}}
+ local from_ss # ss - substring
+ local to_ss
+ local orig_c # c - contents
+ local new_c
+
+ [[ -f $cf ]] || die "cabal file '$cf' does not exist"
+
+ orig_c=$(< "$cf")
+
+ while :; do
+ from_pat=$1
+ to_str=$2
+
+ [[ -n ${from_pat} ]] || break
+ [[ -n ${to_str} ]] || die "'${from_str}' does not have 'to' part"
+
+ einfo "CHDEP: '${from_pat}' -> '${to_str}'"
+
+ # escape pattern-like symbols
+ from_pat=${from_pat//\*/\\*}
+ from_pat=${from_pat//\[/\\[}
+
+ new_c=${orig_c//${from_pat}/${to_str}}
+
+ if [[ -n $CABAL_DEBUG_LOOSENING ]]; then
+ echo "${orig_c}" >"${T}/${cf}".pre
+ echo "${new_c}" >"${T}/${cf}".post
+ diff -u "${T}/${cf}".{pre,post}
+ fi
+
+ [[ "${orig_c}" == "${new_c}" ]] && die "no trigger for '${from_pat}'"
+ orig_c=${new_c}
+ shift
+ shift
+ done
+
+ echo "${new_c}" > "$cf" ||
+ die "failed to update"
+}
+
+# @FUNCTION: cabal-constraint
+# @DESCRIPTION:
+# Allowes to set contraint to the libraries that are
+# used by specified package
+cabal-constraint() {
+ while read p v ; do
+ echo "--constraint \"$p == $v\""
+ done < $(ghc-pkgdeps ${1})
+}
+
+# @FUNCTION: replace-hcflags
+# @USAGE: <old> <new>
+# @DESCRIPTION:
+# Replace the <old> flag with <new> in HCFLAGS. Accepts shell globs for <old>.
+# The implementation is picked from flag-o-matic.eclass:replace-flags()
+replace-hcflags() {
+ [[ $# != 2 ]] && die "Usage: replace-hcflags <old flag> <new flag>"
+
+ local f new=()
+ for f in ${HCFLAGS} ; do
+ # Note this should work with globs like -O*
+ if [[ ${f} == ${1} ]]; then
+ einfo "HCFLAGS: replacing '${f}' to '${2}'"
+ f=${2}
+ fi
+ new+=( "${f}" )
+ done
+ export HCFLAGS="${new[*]}"
+
+ return 0
+}
diff --git a/eclass/horde.eclass b/eclass/horde.eclass
new file mode 100644
index 000000000000..4d084d85c0a7
--- /dev/null
+++ b/eclass/horde.eclass
@@ -0,0 +1,184 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Help manage the horde project http://www.horde.org/
+#
+# Author: Mike Frysinger <vapier@gentoo.org>
+# CVS additions by Chris Aniszczyk <zx@mea-culpa.net>
+# SNAP additions by Jonathan Polansky <jpolansky@lsit.ucsb.edu>
+#
+# This eclass provides generic functions to make the writing of horde
+# ebuilds fairly trivial since there are many horde applications and
+# they all share the same basic install process.
+
+# EHORDE_SNAP
+# This variable tracks whether the user is using a snapshot version
+#
+# EHORDE_SNAP_BRANCH
+# You set this via the ebuild to whatever branch you wish to grab a
+# snapshot of. Typically this is 'HEAD' or 'RELENG'.
+#
+# EHORDE_CVS
+# This variable tracks whether the user is using a cvs version
+
+inherit webapp eutils
+[[ ${PN} != ${PN/-cvs} ]] && inherit cvs
+
+IUSE="vhosts"
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_install pkg_postinst
+
+[[ -z ${HORDE_PN} ]] && HORDE_PN="${PN/horde-}"
+[[ -z ${HORDE_MAJ} ]] && HORDE_MAJ=""
+
+EHORDE_CVS="false"
+EHORDE_SNAP="false"
+if [[ ${PN} != ${PN/-cvs} ]] ; then
+ EHORDE_CVS="true"
+ HORDE_PN=${HORDE_PN/-cvs}
+
+ ECVS_SERVER="anoncvs.horde.org:/repository"
+ ECVS_MODULE="${HORDE_PN}"
+ ECVS_TOP_DIR="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/cvs-src/${PN}"
+ ECVS_USER="cvsread"
+ ECVS_PASS="horde"
+
+ SRC_URI=""
+ S=${WORKDIR}/${HORDE_PN}
+
+elif [[ ${PN} != ${PN/-snap} ]] ; then
+ EHORDE_SNAP="true"
+ EHORDE_SNAP_BRANCH=${EHORDE_SNAP_BRANCH:-HEAD}
+ SNAP_PV=${PV:0:4}-${PV:4:2}-${PV:6:2}
+
+ HORDE_PN=${HORDE_PN/-snap}
+
+ SRC_URI="http://ftp.horde.org/pub/snaps/${SNAP_PV}/${HORDE_PN}-${EHORDE_SNAP_BRANCH}-${SNAP_PV}.tar.gz"
+ S=${WORKDIR}/${HORDE_PN}
+
+else
+ SRC_URI="http://ftp.horde.org/pub/${HORDE_PN}/${HORDE_PN}${HORDE_MAJ}-${PV/_/-}.tar.gz"
+ S=${WORKDIR}/${HORDE_PN}${HORDE_MAJ}-${PV/_/-}
+fi
+HOMEPAGE="http://www.horde.org/${HORDE_PN}"
+
+LICENSE="LGPL-2"
+
+# INSTALL_DIR is used by webapp.eclass when USE=-vhosts
+INSTALL_DIR="/horde"
+[[ ${HORDE_PN} != "horde" && ${HORDE_PN} != "horde-groupware" && ${HORDE_PN} != "horde-webmail" ]] && INSTALL_DIR="${INSTALL_DIR}/${HORDE_PN}"
+
+HORDE_APPLICATIONS="${HORDE_APPLICATIONS} ."
+
+horde_pkg_setup() {
+ webapp_pkg_setup
+
+ if [[ ! -z ${HORDE_PHP_FEATURES} ]] ; then
+ local param
+ if [[ ${HORDE_PHP_FEATURES:0:2} = "-o" ]] ; then
+ param="-o"
+ HORDE_PHP_FEATURES=${HORDE_PHP_FEATURES:2}
+ fi
+ if ! built_with_use ${param} dev-lang/php ${HORDE_PHP_FEATURES} ; then
+ echo
+ if [[ ${param} == "-o" ]] ; then
+ eerror "You MUST re-emerge php with at least one of"
+ else
+ eerror "You MUST re-emerge php with all of"
+ fi
+ eerror "the following options in your USE:"
+ eerror " ${HORDE_PHP_FEATURES}"
+ die "current php install cannot support ${HORDE_PN}"
+ fi
+ fi
+}
+
+horde_src_unpack() {
+ if [[ ${EHORDE_CVS} = "true" ]] ; then
+ cvs_src_unpack
+ else
+ unpack ${A}
+ fi
+ cd "${S}"
+
+ [[ -n ${EHORDE_PATCHES} ]] && epatch ${EHORDE_PATCHES}
+
+ for APP in ${HORDE_APPLICATIONS}
+ do
+ [[ -f ${APP}/test.php ]] && chmod 000 ${APP}/test.php
+ done
+}
+
+horde_src_install() {
+ webapp_src_preinst
+
+ local destdir=${MY_HTDOCSDIR}
+
+ # Work-around when dealing with CVS sources
+ [[ ${EHORDE_CVS} = "true" ]] && cd ${HORDE_PN}
+
+ # Install docs and then delete them (except for CREDITS which
+ # many horde apps include in their help page #121003)
+ dodoc README docs/*
+ mv docs/CREDITS "${T}"/
+ rm -rf COPYING LICENSE README docs/*
+ mv "${T}"/CREDITS docs/
+
+ dodir ${destdir}
+ cp -r . "${D}"/${destdir}/ || die "install files"
+
+ for APP in ${HORDE_APPLICATIONS}
+ do
+ for DISTFILE in ${APP}/config/*.dist
+ do
+ if [[ -f ${DISTFILE/.dist/} ]] ; then
+ webapp_configfile "${MY_HTDOCSDIR}"/${DISTFILE/.dist/}
+ fi
+ done
+ if [[ -f ${APP}/config/conf.php ]] ; then
+ webapp_serverowned "${MY_HTDOCSDIR}"/${APP}/config/conf.php
+ webapp_configfile "${MY_HTDOCSDIR}"/${APP}/config/conf.php
+ fi
+ done
+
+ [[ -n ${HORDE_RECONFIG} ]] && webapp_hook_script ${HORDE_RECONFIG}
+ [[ -n ${HORDE_POSTINST} ]] && webapp_postinst_txt en ${HORDE_POSTINST}
+
+ webapp_src_install
+}
+
+horde_pkg_postinst() {
+ if [ -e ${ROOT}/usr/share/doc/${PF}/INSTALL* ] ; then
+ elog "Please read the INSTALL file in /usr/share/doc/${PF}."
+ fi
+
+ einfo "Before this package will work, you have to setup the configuration files."
+ einfo "Please review the config/ subdirectory of ${HORDE_PN} in the webroot."
+
+ if [ -e ${ROOT}/usr/share/doc/${PF}/SECURITY* ] ; then
+ ewarn
+ ewarn "Users are HIGHLY recommended to consult the SECURITY guide in"
+ ewarn "/usr/share/doc/${PF} before going into production with Horde."
+ fi
+
+ if [[ ${HORDE_PN} != "horde" && ${HORDE_PN} != "horde-groupware" && ${HORDE_PN} != "horde-webmail" ]] ; then
+ ewarn
+ ewarn "Make sure ${HORDE_PN} is accounted for in Horde's root"
+ ewarn " config/registry.php"
+ fi
+
+ if [[ ${EHORDE_CVS} = "true" ]] ; then
+ ewarn
+ ewarn "Use these CVS versions at your own risk."
+ ewarn "They tend to break things when working with the non CVS versions of horde."
+ fi
+
+ if use vhosts ; then
+ ewarn
+ ewarn "When installing Horde into a vhost dir, you will need to use the"
+ ewarn "-d option so that it is installed into the proper location."
+ fi
+
+ webapp_pkg_postinst
+}
diff --git a/eclass/intel-sdp.eclass b/eclass/intel-sdp.eclass
new file mode 100644
index 000000000000..13667173de36
--- /dev/null
+++ b/eclass/intel-sdp.eclass
@@ -0,0 +1,521 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: intel-sdp.eclass
+# @MAINTAINER:
+# Justin Lecher <jlec@gentoo.org>
+# Sci Team <sci@gentoo.org>
+# @BLURB: Handling of Intel's Software Development Products package management
+
+# @ECLASS-VARIABLE: INTEL_DID
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The package download ID from Intel.
+# To find out its value, see the links to download in
+# https://registrationcenter.intel.com/RegCenter/MyProducts.aspx
+#
+# e.g. 2504
+#
+# Must be defined before inheriting the eclass
+
+# @ECLASS-VARIABLE: INTEL_DPN
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The package name to download from Intel.
+# To find out its value, see the links to download in
+# https://registrationcenter.intel.com/RegCenter/MyProducts.aspx
+#
+# e.g. parallel_studio_xe
+#
+# Must be defined before inheriting the eclass
+
+# @ECLASS-VARIABLE: INTEL_DPV
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The package download version from Intel.
+# To find out its value, see the links to download in
+# https://registrationcenter.intel.com/RegCenter/MyProducts.aspx
+#
+# e.g. 2011_sp1_update2
+#
+# Must be defined before inheriting the eclass
+
+# @ECLASS-VARIABLE: INTEL_TARX
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The package extention.
+# To find out its value, see the links to download in
+# https://registrationcenter.intel.com/RegCenter/MyProducts.aspx
+#
+# e.g. tar.gz
+#
+# Must be defined before inheriting the eclass
+: ${INTEL_TARX:=tgz}
+
+# @ECLASS-VARIABLE: INTEL_SUBDIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The package sub-directory where it will end-up in /opt/intel
+# To find out its value, you have to do a raw install from the Intel tar ball
+
+# @ECLASS-VARIABLE: INTEL_SKIP_LICENSE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Possibility to skip the mandatory check for licenses. Only set this if there
+# is really no fix.
+
+# @ECLASS-VARIABLE: INTEL_RPMS_DIR
+# @DESCRIPTION:
+# Main subdirectory which contains the rpms to extract.
+: ${INTEL_RPMS_DIR:=rpm}
+
+# @ECLASS-VARIABLE: INTEL_X86
+# @DESCRIPTION:
+# 32bit arch in rpm names
+#
+# e.g. i484
+: ${INTEL_X86:=i486}
+
+# @ECLASS-VARIABLE: INTEL_BIN_RPMS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Functional name of rpm without any version/arch tag
+#
+# e.g. compilerprof
+#
+# if the rpm is located in a directory different to INTEL_RPMS_DIR you can
+# specify the full path
+#
+# e.g. CLI_install/rpm/intel-vtune-amplifier-xe-cli
+
+# @ECLASS-VARIABLE: INTEL_DAT_RPMS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Functional name of rpm of common data which are arch free
+# without any version tag
+#
+# e.g. openmp
+#
+# if the rpm is located in a directory different to INTEL_RPMS_DIR you can
+# specify the full path
+#
+# e.g. CLI_install/rpm/intel-vtune-amplifier-xe-cli-common
+
+# @ECLASS-VARIABLE: INTEL_SINGLE_ARCH
+# @DESCRIPTION:
+# Unset, if only the multilib package will be provided by intel
+: ${INTEL_SINGLE_ARCH:=true}
+
+# @ECLASS-VARIABLE: INTEL_SDP_DB
+# @DESCRIPTION:
+# Full path to intel registry db
+INTEL_SDP_DB="${EROOT%/}"/opt/intel/intel-sdp-products.db
+
+inherit check-reqs eutils multilib versionator
+
+_INTEL_PV1=$(get_version_component_range 1)
+_INTEL_PV2=$(get_version_component_range 2)
+_INTEL_PV3=$(get_version_component_range 3)
+_INTEL_PV4=$(get_version_component_range 4)
+_INTEL_URI="http://registrationcenter-download.intel.com/irc_nas/${INTEL_DID}/${INTEL_DPN}"
+
+if [ ${INTEL_SINGLE_ARCH} == true ]; then
+ SRC_URI="
+ amd64? ( multilib? ( ${_INTEL_URI}_${INTEL_DPV}.${INTEL_TARX} ) )
+ amd64? ( !multilib? ( ${_INTEL_URI}_${INTEL_DPV}_intel64.${INTEL_TARX} ) )
+ x86? ( ${_INTEL_URI}_${INTEL_DPV}_ia32.${INTEL_TARX} )"
+else
+ SRC_URI="${_INTEL_URI}_${INTEL_DPV}.${INTEL_TARX}"
+fi
+
+LICENSE="Intel-SDP"
+# Future work, #394411
+#SLOT="${_INTEL_PV1}.${_INTEL_PV2}"
+SLOT="0"
+IUSE="examples multilib"
+
+RESTRICT="mirror"
+
+RDEPEND=""
+DEPEND="app-arch/rpm2targz"
+
+_INTEL_SDP_YEAR=${INTEL_DPV%_update*}
+_INTEL_SDP_YEAR=${INTEL_DPV%_sp*}
+
+# @ECLASS-VARIABLE: INTEL_SDP_DIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Full rootless path to installation dir
+INTEL_SDP_DIR="opt/intel/${INTEL_SUBDIR}-${_INTEL_SDP_YEAR:-${_INTEL_PV1}}.${_INTEL_PV3}.${_INTEL_PV4}"
+
+# @ECLASS-VARIABLE: INTEL_SDP_EDIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Full rooted path to installation dir
+INTEL_SDP_EDIR="${EROOT%/}/${INTEL_SDP_DIR}"
+
+S="${WORKDIR}"
+
+QA_PREBUILT="${INTEL_SDP_DIR}/*"
+
+# @ECLASS-VARIABLE: INTEL_ARCH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Intels internal names of the arches; will be set at runtime accordingly
+#
+# e.g. amd64-multilib -> INTEL_ARCH="intel64 ia32"
+
+# @FUNCTION: _isdp_link_eclipse_plugins
+# @INTERNAL
+# @DESCRIPTION:
+# Creating necessary links to use intel compiler with eclipse
+_isdp_link_eclipse_plugins() {
+ local c f
+ pushd ${INTEL_SDP_DIR}/eclipse_support > /dev/null
+ for c in cdt*; do
+ local cv=${c#cdt} ev=3.$(( ${cv:0:1} - 1))
+ if has_version "dev-util/eclipse-sdk:${ev}"; then
+ einfo "Linking eclipse (v${ev}) plugin cdt (v${cv})"
+ for f in cdt${cv}/eclipse/features/*; do
+ dodir /usr/$(get_libdir)/eclipse-${ev}/features
+ dosym "${INTEL_SDP_EDIR}"/eclipse_support/${f} \
+ /usr/$(get_libdir)/eclipse-${ev}/features/ || die
+ done
+ for f in cdt${cv}/eclipse/plugins/*; do
+ dodir /usr/$(get_libdir)/eclipse-${ev}/plugins
+ dosym "${INTEL_SDP_EDIR}"/eclipse_support/${f} \
+ /usr/$(get_libdir)/eclipse-${ev}/plugins/ || die
+ done
+ fi
+ done
+ popd > /dev/null
+}
+
+# @FUNCTION: _isdp_big-warning
+# @USAGE: [pre-check | test-failed]
+# @INTERNAL
+# @DESCRIPTION:
+# warn user that we really require a license
+_isdp_big-warning() {
+ case ${1} in
+ pre-check )
+ echo ""
+ ewarn "License file not found!"
+ ;;
+
+ test-failed )
+ echo
+ ewarn "Function test failed. Most probably due to an invalid license."
+ ewarn "This means you already tried to bypass the license check once."
+ ;;
+ esac
+
+ echo ""
+ ewarn "Make sure you have received an Intel license."
+ ewarn "To receive a non-commercial license, you need to register at:"
+ ewarn "https://software.intel.com/en-us/qualify-for-free-software"
+ ewarn "Install the license file into ${INTEL_SDP_EDIR}/licenses/"
+
+ case ${1} in
+ pre-check )
+ ewarn "before proceeding with installation of ${P}"
+ echo ""
+ ;;
+ * )
+ echo ""
+ ;;
+ esac
+}
+
+# @FUNCTION: _isdp_version_test
+# @INTERNAL
+# @DESCRIPTION:
+# Testing for valid license by asking for version information of the compiler
+_isdp_version_test() {
+ local comp comp_full arch warn
+ case ${PN} in
+ ifc )
+ debug-print "Testing ifort"
+ comp=ifort
+ ;;
+ icc )
+ debug-print "Testing icc"
+ comp=icc
+ ;;
+ *)
+ die "${PN} is not supported for testing"
+ ;;
+ esac
+
+ for arch in ${INTEL_ARCH}; do
+ case ${EBUILD_PHASE} in
+ install )
+ comp_full="${ED}/${INTEL_SDP_DIR}/bin/${arch}/${comp}"
+ ;;
+ postinst )
+ comp_full="${INTEL_SDP_EDIR}/bin/${arch}/${comp}"
+ ;;
+ * )
+ ewarn "Compile test not supported in ${EBUILD_PHASE}"
+ continue
+ ;;
+ esac
+
+ debug-print "LD_LIBRARY_PATH=\"${INTEL_SDP_EDIR}/bin/${arch}/\" \"${comp_full}\" -V"
+
+ LD_LIBRARY_PATH="${INTEL_SDP_EDIR}/bin/${arch}/" "${comp_full}" -V &>/dev/null
+ [[ $? -ne 0 ]] && warn=yes
+ done
+ [[ "${warn}" == "yes" ]] && _isdp_big-warning test-failed
+}
+
+# @FUNCTION: _isdp_run-test
+# @INTERNAL
+# Test if installed compiler is working
+_isdp_run-test() {
+ if [[ -z ${INTEL_SKIP_LICENSE} ]]; then
+ case ${PN} in
+ ifc | icc )
+ _isdp_version_test ;;
+ * )
+ debug-print "No test available for ${PN}"
+ ;;
+ esac
+ fi
+}
+
+# @FUNCTION: intel-sdp_pkg_pretend
+# @DESCRIPTION:
+# @CODE
+# * Check that the user has a (valid) license file before going on.
+# * Check for space requirements being fullfilled
+# @CODE
+intel-sdp_pkg_pretend() {
+ local warn=1 dir dirs ret arch a p
+
+ : ${CHECKREQS_DISK_BUILD:=256M}
+ check-reqs_pkg_pretend
+
+ if [[ -z ${INTEL_SKIP_LICENSE} ]]; then
+ if echo ${INTEL_LICENSE_FILE} | grep -q @; then
+ einfo "Looks like you are using following license server:"
+ einfo " ${INTEL_LICENSE_FILE}"
+ return 0
+ fi
+
+ dirs=(
+ "${INTEL_SDP_EDIR}/licenses"
+ "${INTEL_SDP_EDIR}/Licenses"
+ "${EPREFIX}/opt/intel/licenses"
+ )
+ for dir in "${dirs[@]}" ; do
+ ebegin "Checking for a license in: ${dir}"
+ #maybe use nullglob or [[ $(echo ${dir/*lic) != "${dir}/*lic" ]]
+ [[ $( ls "${dir}"/*lic 2>/dev/null ) ]]; ret=$?
+ eend ${ret}
+ if [[ ${ret} == "0" ]]; then
+ warn=${ret}
+ break
+ fi
+ done
+ if [[ ${warn} == "1" ]]; then
+ _isdp_big-warning pre-check
+ die "Could not find license file"
+ fi
+ else
+ eqawarn "The ebuild doesn't check for presents of a proper intel license!"
+ eqawarn "This shouldn't be done unless there is a serious reason."
+ fi
+}
+
+# @FUNCTION: intel-sdp_pkg_setup
+# @DESCRIPTION:
+# Setting up and sorting some internal variables
+intel-sdp_pkg_setup() {
+ local arch a p
+
+ if use x86; then
+ arch=${INTEL_X86}
+ INTEL_ARCH="ia32"
+ elif use amd64; then
+ arch=x86_64
+ INTEL_ARCH="intel64"
+ if has_multilib_profile; then
+ arch="x86_64 ${INTEL_X86}"
+ INTEL_ARCH="intel64 ia32"
+ fi
+ fi
+ INTEL_RPMS=()
+ INTEL_RPMS_FULL=()
+ if [[ $(declare -p INTEL_BIN_RPMS) = "declare -a "* ]] ; then
+ _INTEL_BIN_RPMS=( ${INTEL_BIN_RPMS[@]} )
+ else
+ read -r -d '' -a _INTEL_BIN_RPMS <<<"${INTEL_BIN_RPMS}"
+ fi
+ for p in "${_INTEL_BIN_RPMS[@]}"; do
+ for a in ${arch}; do
+ if [ ${p} == $(basename ${p}) ]; then
+ INTEL_RPMS+=( intel-${p}-${_INTEL_PV4}-${_INTEL_PV1}.${_INTEL_PV2}-${_INTEL_PV3}.${a}.rpm )
+ else
+ INTEL_RPMS_FULL+=( ${p}-${_INTEL_PV4}-${_INTEL_PV1}.${_INTEL_PV2}-${_INTEL_PV3}.${a}.rpm )
+ fi
+ done
+ done
+ if [[ $(declare -p INTEL_DAT_RPMS) = "declare -a "* ]] ; then
+ _INTEL_DAT_RPMS=( ${INTEL_DAT_RPMS[@]} )
+ else
+ read -r -d '' -a _INTEL_DAT_RPMS <<<"${INTEL_DAT_RPMS}"
+ fi
+ for p in "${_INTEL_DAT_RPMS[@]}"; do
+ if [ ${p} == $(basename ${p}) ]; then
+ INTEL_RPMS+=( intel-${p}-${_INTEL_PV4}-${_INTEL_PV1}.${_INTEL_PV2}-${_INTEL_PV3}.noarch.rpm )
+ else
+ INTEL_RPMS_FULL+=( ${p}-${_INTEL_PV4}-${_INTEL_PV1}.${_INTEL_PV2}-${_INTEL_PV3}.noarch.rpm )
+ fi
+ done
+}
+
+# @FUNCTION: intel-sdp_src_unpack
+# @DESCRIPTION:
+# Unpacking necessary rpms from tarball, extract them and rearrange the output.
+intel-sdp_src_unpack() {
+ local l r subdir rb t list=() debug_list
+
+ for t in ${A}; do
+ for r in "${INTEL_RPMS[@]}"; do
+ rpmdir=${t%%.*}/${INTEL_RPMS_DIR}
+ list+=( ${rpmdir}/${r} )
+ done
+
+ for r in "${INTEL_RPMS_FULL[@]}"; do
+ list+=( ${t%%.*}/${r} )
+ done
+
+ debug_list="$(IFS=$'\n'; echo ${list[@]} )"
+
+ debug-print "Adding to decompression list:"
+ debug-print ${debug_list}
+
+ tar xvf "${DISTDIR}"/${t} ${list[@]} &> "${T}"/rpm-extraction.log
+
+ for r in ${list[@]}; do
+ rb=$(basename ${r})
+ l=.${rb}_$(date +'%d%m%y_%H%M%S').log
+ einfo "Unpacking ${rb}"
+ rpm2tar -O ${r} | tar xvf - | sed -e \
+ "s:^\.:${EROOT#/}:g" > ${l}; assert "unpacking ${r} failed"
+ mv ${l} opt/intel/ || die "failed moving extract log file"
+ done
+ done
+
+ mv opt/intel/* ${INTEL_SDP_DIR} || die "mv to INTEL_SDP_DIR failed"
+}
+
+# @FUNCTION: intel-sdp_src_install
+# @DESCRIPTION:
+# Install everything
+intel-sdp_src_install() {
+ if path_exists "${INTEL_SDP_DIR}"/uninstall*; then
+ ebegin "Cleaning out uninstall information"
+ find "${INTEL_SDP_DIR}"/uninstall* -delete || die
+ eend
+ fi
+
+ if path_exists "${INTEL_SDP_DIR}"/Documentation; then
+ dodoc -r "${INTEL_SDP_DIR}"/Documentation/*
+
+ ebegin "Cleaning out documentation"
+ find "${INTEL_SDP_DIR}"/Documentation -delete || die
+ eend
+ fi
+
+ if path_exists "${INTEL_SDP_DIR}"/Samples; then
+ if use examples ; then
+ insinto /usr/share/${P}/examples/
+ doins -r "${INTEL_SDP_DIR}"/Samples/*
+ fi
+ ebegin "Cleaning out examples"
+ find "${INTEL_SDP_DIR}"/Samples -delete || die
+ eend
+ fi
+
+ if path_exists "${INTEL_SDP_DIR}"/eclipse_support; then
+ if has eclipse ${IUSE} && use eclipse; then
+ _isdp_link_eclipse_plugins
+ else
+ ebegin "Cleaning out eclipse plugin"
+ find "${INTEL_SDP_DIR}"/eclipse_support -delete || die
+ eend
+ fi
+ fi
+
+ if path_exists "${INTEL_SDP_DIR}"/man; then
+ path_exists "${INTEL_SDP_DIR}"/man/en_US/man1/* && \
+ doman "${INTEL_SDP_DIR}"/man/en_US/man1/*
+ path_exists "${INTEL_SDP_DIR}"/man/man1/* && \
+ doman "${INTEL_SDP_DIR}"/man/man1/*
+ has linguas_ja ${IUSE} && use linguas_ja && \
+ doman -i18n=ja_JP "${INTEL_SDP_DIR}"/man/ja_JP/man1/*
+
+ find "${INTEL_SDP_DIR}"/man -delete || die
+ fi
+
+ ebegin "Tagging ${PN}"
+ find opt -name \*sh -type f -exec sed -i \
+ -e "s:<.*DIR>:${INTEL_SDP_EDIR}:g" \
+ '{}' + || die
+ eend
+
+ [[ -d "${ED}" ]] || dodir /
+ mv opt "${ED}"/ || die "moving files failed"
+
+ dodir "${INTEL_SDP_DIR}"/licenses /opt/intel/ism/rm
+ keepdir "${INTEL_SDP_DIR}"/licenses /opt/intel/ism/rm
+}
+
+# @FUNCTION: intel-sdp_pkg_postinst
+# @DESCRIPTION:
+# Add things to intel database
+intel-sdp_pkg_postinst() {
+ # add product registry to intel "database"
+ local l r
+ for r in ${INTEL_RPMS}; do
+ l="$(ls -1 ${EROOT%/}/opt/intel/.${r}_*.log | head -n 1)"
+ echo >> ${INTEL_SDP_DB} \
+ "<:${r%-${_INTEL_PV4}*}-${_INTEL_PV4}:${r}:${INTEL_SDP_EDIR}:${l}:>"
+ done
+ _isdp_run-test
+
+ if [[ ${PN} = icc ]] && has_version ">=dev-util/ccache-3.1.9-r2" ; then
+ #add ccache links as icc might get installed after ccache
+ "${EROOT}"/usr/bin/ccache-config --install-links
+ fi
+}
+
+# @FUNCTION: intel-sdp_pkg_postrm
+# @DESCRIPTION:
+# Sanitize intel database
+intel-sdp_pkg_postrm() {
+ # remove from intel "database"
+ if [[ -e ${INTEL_SDP_DB} ]]; then
+ local r
+ for r in ${INTEL_RPMS}; do
+ sed -i \
+ -e "/${r}/d" \
+ ${INTEL_SDP_DB}
+ done
+ fi
+
+ if [[ ${PN} = icc ]] && has_version ">=dev-util/ccache-3.1.9-r2" && [[ -z ${REPLACED_BY_VERSION} ]]; then
+ # --remove-links would remove all links, --install-links updates them
+ "${EROOT}"/usr/bin/ccache-config --install-links
+ fi
+}
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_install pkg_postinst pkg_postrm pkg_pretend
+case "${EAPI:-0}" in
+ 0|1|2|3)die "EAPI=${EAPI} is not supported anymore" ;;
+ 4|5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
diff --git a/eclass/java-ant-2.eclass b/eclass/java-ant-2.eclass
new file mode 100644
index 000000000000..982e3eef7659
--- /dev/null
+++ b/eclass/java-ant-2.eclass
@@ -0,0 +1,522 @@
+# eclass for ant based Java packages
+#
+# Copyright (c) 2004-2005, Thomas Matthijs <axxo@gentoo.org>
+# Copyright (c) 2004-2011, Gentoo Foundation
+# Changes:
+# May 2007:
+# Made bsfix make one pass for all things and add some glocal targets for
+# setting up the whole thing. Contributed by kiorky
+# (kiorky@cryptelium.net).
+# December 2006:
+# I pretty much rewrote the logic of the bsfix functions
+# and xml-rewrite.py because they were so slow
+# Petteri Räty (betelgeuse@gentoo.org)
+#
+# Licensed under the GNU General Public License, v2
+#
+# $Id$
+
+# @ECLASS: java-ant-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# kiorky (kiorky@cryptelium.net), Petteri Räty (betelgeuse@gentoo.org)
+# @BLURB: eclass for ant based Java packages
+# @DESCRIPTION:
+# Eclass for Ant-based Java packages. Provides support for both automatic and
+# manual manipulation of build.xml files. Should be inherited after java-pkg-2
+# or java-pkg-opt-2 eclass.
+
+inherit java-utils-2 multilib
+
+# This eclass provides functionality for Java packages which use
+# ant to build. In particular, it will attempt to fix build.xml files, so that
+# they use the appropriate 'target' and 'source' attributes.
+
+# @ECLASS-VARIABLE: WANT_ANT_TASKS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An $IFS separated list of ant tasks.
+# Ebuild can specify this variable before inheriting java-ant-2 eclass to
+# determine ANT_TASKS it needs. They will be automatically translated to
+# DEPEND variable and ANT_TASKS variable. JAVA_PKG_FORCE_ANT_TASKS can override
+# ANT_TASKS set by WANT_ANT_TASKS, but not the DEPEND due to caching.
+# Ebuilds that need to depend conditionally on certain tasks and specify them
+# differently for different eant calls can't use this simplified approach.
+# You also cannot specify version or anything else than ant-*.
+#
+# @CODE
+# WANT_ANT_TASKS="ant-junit ant-trax"
+# @CODE
+
+#The implementation of dependencies is handled by java-utils-2.eclass
+#WANT_ANT_TASKS
+
+# @ECLASS-VARIABLE: JAVA_ANT_DISABLE_ANT_CORE_DEP
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Setting this variable non-empty before inheriting java-ant-2 disables adding
+# dev-java/ant-core into DEPEND.
+if [[ -z "${JAVA_ANT_DISABLE_ANT_CORE_DEP}" ]]; then
+ JAVA_ANT_E_DEPEND="${JAVA_ANT_E_DEPEND} >=dev-java/ant-core-1.8.2"
+fi
+
+# add ant tasks specified in WANT_ANT_TASKS to DEPEND
+local ANT_TASKS_DEPEND;
+ANT_TASKS_DEPEND="$(java-pkg_ant-tasks-depend)"
+# check that java-pkg_ant-tasks-depend didn't fail
+if [[ $? != 0 ]]; then
+ eerror "${ANT_TASKS_DEPEND}"
+ die "java-pkg_ant-tasks-depend() failed"
+fi
+
+# We need some tools from javatoolkit. We also need portage 2.1 for phase hooks
+# and ant dependencies constructed above. Python is there for
+# java-ant_remove-taskdefs
+JAVA_ANT_E_DEPEND="${JAVA_ANT_E_DEPEND}
+ ${ANT_TASKS_DEPEND}
+ ${JAVA_PKG_PORTAGE_DEP}
+ >=dev-java/javatoolkit-0.3.0-r2"
+
+# this eclass must be inherited after java-pkg-2 or java-pkg-opt-2
+# if it's java-pkg-opt-2, ant dependencies are pulled based on USE flag
+if has java-pkg-opt-2 ${INHERITED}; then
+ JAVA_ANT_E_DEPEND="${JAVA_PKG_OPT_USE}? ( ${JAVA_ANT_E_DEPEND} )"
+elif ! has java-pkg-2 ${INHERITED}; then
+ eerror "java-ant-2 eclass can only be inherited AFTER java-pkg-2 or java-pkg-opt-2"
+fi
+
+DEPEND="${JAVA_ANT_E_DEPEND}"
+
+# @ECLASS-VARIABLE: JAVA_PKG_BSFIX
+# @DESCRIPTION:
+# Should we attempt to 'fix' ant build files to include the source/target
+# attributes when calling javac?
+JAVA_PKG_BSFIX=${JAVA_PKG_BSFIX:-"on"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_ALL
+# @DESCRIPTION:
+# If we're fixing build files, should we try to fix all the ones we can find?
+JAVA_PKG_BSFIX_ALL=${JAVA_PKG_BSFIX_ALL:-"yes"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_NAME
+# @DESCRIPTION:
+# Filename of build files to fix/search for
+JAVA_PKG_BSFIX_NAME=${JAVA_PKG_BSFIX_NAME:-"build.xml"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_TARGET_TAGS
+# @DESCRIPTION:
+# Targets to fix the 'source' attribute in
+JAVA_PKG_BSFIX_TARGET_TAGS=${JAVA_PKG_BSFIX_TARGET_TAGS:-"javac xjavac javac.preset"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_SOURCE_TAGS
+# @DESCRIPTION:
+# Targets to fix the 'target' attribute in
+JAVA_PKG_BSFIX_SOURCE_TAGS=${JAVA_PKG_BSFIX_SOURCE_TAGS:-"javadoc javac xjavac javac.preset"}
+
+# @ECLASS-VARIABLE: JAVA_ANT_CLASSPATH_TAGS
+# @DESCRIPTION:
+# Targets to add the classpath attribute to
+JAVA_ANT_CLASSPATH_TAGS="javac xjavac"
+
+# @ECLASS-VARIABLE: JAVA_ANT_IGNORE_SYSTEM_CLASSES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# When set, <available> Ant tasks are rewritten to ignore Ant's runtime classpath.
+
+case "${EAPI:-0}" in
+ 0|1) : ;;
+ *) EXPORT_FUNCTIONS src_configure ;;
+esac
+
+# @FUNCTION: java-ant-2_src_configure
+# @DESCRIPTION:
+# src_configure rewrites the build.xml files automatically, unless EAPI is undefined, 0 or 1.
+java-ant-2_src_configure() {
+ # if java support is optional, don't perform this when the USE flag is off
+ if has java-pkg-opt-2 ${INHERITED}; then
+ use ${JAVA_PKG_OPT_USE} || return
+ fi
+
+ # eant will call us unless called by Portage
+ [[ -e "${T}/java-ant-2_src_configure-run" ]] && return
+
+ [[ "${JAVA_ANT_IGNORE_SYSTEM_CLASSES}" ]] \
+ && java-ant_ignore-system-classes "${S}/build.xml"
+
+ java-ant_bsfix
+ touch "${T}/java-ant-2_src_configure-run"
+}
+
+# @FUNCTION: java-ant_bsfix
+# @INTERNAL
+# @DESCRIPTION:
+# Attempts to fix build files.
+#
+# @CODE
+# Affected by variables:
+# JAVA_PKG_BSFIX
+# JAVA_PKG_BSFIX_ALL
+# JAVA_PKG_BSFIX_NAME,
+# @CODE
+java-ant_bsfix() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ "${JAVA_PKG_BSFIX}" != "on" ]] && return
+ if ! java-pkg_needs-vm; then
+ echo "QA Notice: Package is using java-ant, but doesn't depend on a Java VM"
+ fi
+
+ pushd "${S}" >/dev/null
+
+ local find_args=""
+ [[ "${JAVA_PKG_BSFIX_ALL}" == "yes" ]] || find_args="-maxdepth 1"
+
+ find_args="${find_args} -type f ( -name ${JAVA_PKG_BSFIX_NAME// / -o -name } )"
+
+ # This voodoo is done for paths with spaces
+ local bsfix_these
+ while read line; do
+ [[ -z ${line} ]] && continue
+ bsfix_these="${bsfix_these} '${line}'"
+ done <<-EOF
+ $(find . ${find_args})
+ EOF
+
+ [[ "${bsfix_these// /}" ]] && eval java-ant_bsfix_files ${bsfix_these}
+
+ popd > /dev/null
+}
+
+_bsfix_die() {
+ if has_version dev-python/pyxml; then
+ eerror "If the output above contains:"
+ eerror "ImportError:"
+ eerror "/usr/lib/python2.4/site-packages/_xmlplus/parsers/pyexpat.so:"
+ eerror "undefined symbol: PyUnicodeUCS2_DecodeUTF8"
+ eerror "Try re-emerging dev-python/pyxml"
+ die ${1} " Look at the eerror message above"
+ else
+ die ${1}
+ fi
+}
+
+# @FUNCTION: java-ant_bsfix_files
+# @USAGE: <path/to/first/build.xml> [path/to/second.build.xml ...]
+# @DESCRIPTION:
+# Attempts to fix named build files.
+#
+# @CODE
+# Affected by variables:
+# JAVA_PKG_BSFIX_SOURCE_TAGS
+# JAVA_PKG_BSFIX_TARGET_TAGS
+# JAVA_ANT_REWRITE_CLASSPATH
+# JAVA_ANT_JAVADOC_INPUT_DIRS: Where we can find java sources for javadoc
+# input. Can be a space separated list of
+# directories
+# JAVA_ANT_BSFIX_EXTRA_ARGS: You can use this to pass extra variables to the
+# rewriter if you know what you are doing.
+# @CODE
+#
+# If JAVA_ANT_JAVADOC_INPUT_DIRS is set, we will turn on the adding of a basic
+# javadoc target to the ant's build.xml with the javadoc xml-rewriter feature.
+# Then we will set EANT DOC TARGET to the added javadoc target
+# NOTE: the variable JAVA_ANT_JAVADOC_OUTPUT_DIR points where we will
+# generate the javadocs. This is a read-only variable, dont change it.
+
+# When changing this function, make sure that it works with paths with spaces in
+# them.
+java-ant_bsfix_files() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} = 0 ]] && die "${FUNCNAME} called without arguments"
+
+ local want_source="$(java-pkg_get-source)"
+ local want_target="$(java-pkg_get-target)"
+
+ debug-print "${FUNCNAME}: target: ${want_target} source: ${want_source}"
+
+ if [ -z "${want_source}" -o -z "${want_target}" ]; then
+ eerror "Could not find valid -source/-target values"
+ eerror "Please file a bug about this on bugs.gentoo.org"
+ die "Could not find valid -source/-target values"
+ else
+ local files
+
+ for file in "${@}"; do
+ debug-print "${FUNCNAME}: ${file}"
+
+ if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
+ cp "${file}" "${file}.orig" || die "failed to copy ${file}"
+ fi
+
+ if [[ ! -w "${file}" ]]; then
+ chmod u+w "${file}" || die "chmod u+w ${file} failed"
+ fi
+
+ files="${files} -f '${file}'"
+ done
+
+ # for javadoc target and all in one pass, we need the new rewriter.
+ local rewriter3="/usr/share/javatoolkit/xml-rewrite-3.py"
+ if [[ ! -f ${rewriter3} ]]; then
+ rewriter3="/usr/$(get_libdir)/javatoolkit/bin/xml-rewrite-3.py"
+ fi
+
+ local rewriter4="/usr/$(get_libdir)/javatoolkit/bin/build-xml-rewrite"
+
+ if [[ -x ${rewriter4} && ${JAVA_ANT_ENCODING} ]]; then
+ [[ ${JAVA_ANT_REWRITE_CLASSPATH} ]] && local gcp="-g"
+ [[ ${JAVA_ANT_ENCODING} ]] && local enc="-e ${JAVA_ANT_ENCODING}"
+ eval echo "cElementTree rewriter"
+ debug-print "${rewriter4} extra args: ${gcp} ${enc}"
+ ${rewriter4} ${gcp} ${enc} \
+ -c "${JAVA_PKG_BSFIX_SOURCE_TAGS}" source ${want_source} \
+ -c "${JAVA_PKG_BSFIX_TARGET_TAGS}" target ${want_target} \
+ "${@}" || die "build-xml-rewrite failed"
+ elif [[ ! -f ${rewriter3} ]]; then
+ debug-print "Using second generation rewriter"
+ eval echo "Rewriting source attributes"
+ eval xml-rewrite-2.py ${files} \
+ -c -e ${JAVA_PKG_BSFIX_SOURCE_TAGS// / -e } \
+ -a source -v ${want_source} || _bsfix_die "xml-rewrite2 failed: ${file}"
+
+ eval echo "Rewriting target attributes"
+ eval xml-rewrite-2.py ${files} \
+ -c -e ${JAVA_PKG_BSFIX_TARGET_TAGS// / -e } \
+ -a target -v ${want_target} || _bsfix_die "xml-rewrite2 failed: ${file}"
+
+ eval echo "Rewriting nowarn attributes"
+ eval xml-rewrite-2.py ${files} \
+ -c -e ${JAVA_PKG_BSFIX_TARGET_TAGS// / -e } \
+ -a nowarn -v yes || _bsfix_die "xml-rewrite2 failed: ${file}"
+
+ if [[ ${JAVA_ANT_REWRITE_CLASSPATH} ]]; then
+ eval echo "Adding gentoo.classpath to javac tasks"
+ eval xml-rewrite-2.py ${files} \
+ -c -e javac -e xjavac -a classpath -v \
+ '\${gentoo.classpath}' \
+ || _bsfix_die "xml-rewrite2 failed"
+ fi
+ else
+ debug-print "Using third generation rewriter"
+ eval echo "Rewriting attributes"
+ local bsfix_extra_args=""
+ # WARNING KEEP THE ORDER, ESPECIALLY FOR CHANGED ATTRIBUTES!
+ if [[ -n ${JAVA_ANT_REWRITE_CLASSPATH} ]]; then
+ local cp_tags="${JAVA_ANT_CLASSPATH_TAGS// / -e }"
+ bsfix_extra_args="${bsfix_extra_args} -g -e ${cp_tags}"
+ bsfix_extra_args="${bsfix_extra_args} -a classpath -v '\${gentoo.classpath}'"
+ fi
+ if [[ -n ${JAVA_ANT_JAVADOC_INPUT_DIRS} ]]; then
+ if [[ -n ${JAVA_ANT_JAVADOC_OUTPUT_DIR} ]]; then
+ die "Do not define JAVA_ANT_JAVADOC_OUTPUT_DIR!"
+ fi
+ # Where will our generated javadoc go.
+ readonly JAVA_ANT_JAVADOC_OUTPUT_DIR="${WORKDIR}/gentoo_javadoc"
+ mkdir -p "${JAVA_ANT_JAVADOC_OUTPUT_DIR}" || die
+
+ if has doc ${IUSE}; then
+ if use doc; then
+ if [[ -z ${EANT_DOC_TARGET} ]]; then
+ EANT_DOC_TARGET="gentoojavadoc"
+ else
+ die "You can't use javadoc adding and set EANT_DOC_TARGET too."
+ fi
+
+ for dir in ${JAVA_ANT_JAVADOC_INPUT_DIRS};do
+ if [[ ! -d ${dir} ]]; then
+ eerror "This dir: ${dir} doesnt' exists"
+ die "You must specify directories for javadoc input/output dirs."
+ fi
+ done
+ bsfix_extra_args="${bsfix_extra_args} --javadoc --source-directory "
+ # filter third/double spaces
+ JAVA_ANT_JAVADOC_INPUT_DIRS=${JAVA_ANT_JAVADOC_INPUT_DIRS// /}
+ JAVA_ANT_JAVADOC_INPUT_DIRS=${JAVA_ANT_JAVADOC_INPUT_DIRS// /}
+ bsfix_extra_args="${bsfix_extra_args} ${JAVA_ANT_JAVADOC_INPUT_DIRS// / --source-directory }"
+ bsfix_extra_args="${bsfix_extra_args} --output-directory ${JAVA_ANT_JAVADOC_OUTPUT_DIR}"
+ fi
+ else
+ die "You need to have doc in IUSE when using JAVA_ANT_JAVADOC_INPUT_DIRS"
+ fi
+ fi
+
+ [[ -n ${JAVA_ANT_BSFIX_EXTRA_ARGS} ]] \
+ && bsfix_extra_args="${bsfix_extra_args} ${JAVA_ANT_BSFIX_EXTRA_ARGS}"
+
+ debug-print "bsfix_extra_args: ${bsfix_extra_args}"
+
+ eval ${rewriter3} ${files} \
+ -c --source-element ${JAVA_PKG_BSFIX_SOURCE_TAGS// / --source-element } \
+ --source-attribute source --source-value ${want_source} \
+ --target-element ${JAVA_PKG_BSFIX_TARGET_TAGS// / --target-element } \
+ --target-attribute target --target-value ${want_target} \
+ --target-attribute nowarn --target-value yes \
+ ${bsfix_extra_args} \
+ || _bsfix_die "xml-rewrite2 failed: ${file}"
+ fi
+
+ if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
+ for file in "${@}"; do
+ diff -NurbB "${file}.orig" "${file}"
+ done
+ fi
+ fi
+ return 0 # so that the 1 for diff doesn't get reported
+}
+
+
+# @FUNCTION: java-ant_bsfix_one
+# @USAGE: <path/to/build.xml>
+# @DESCRIPTION:
+# Attempts to fix named build file.
+#
+# @CODE
+# Affected by variables:
+# JAVA_PKG_BSFIX_SOURCE_TAGS
+# JAVA_PKG_BSFIX_TARGET_TAGS
+# @CODE
+java-ant_bsfix_one() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [ -z "${1}" ]; then
+ eerror "${FUNCNAME} needs one argument"
+ die "${FUNCNAME} needs one argument"
+ fi
+
+ java-ant_bsfix_files "${1}"
+}
+
+# @FUNCTION: java-ant_rewrite-classpath
+# @USAGE: [path/to/build.xml]
+# @DESCRIPTION:
+# Adds 'classpath="${gentoo.classpath}"' to specified build file.
+#
+# Affected by:
+# JAVA_ANT_CLASSPATH_TAGS
+#
+# Parameter defaults to build.xml when not specified
+java-ant_rewrite-classpath() {
+ debug-print-function ${FUNCNAME} $*
+
+ local file="${1}"
+ [[ -z "${1}" ]] && file=build.xml
+ [[ ${#} -gt 1 ]] && die "${FUNCNAME} currently can only rewrite one file."
+
+ echo "Adding gentoo.classpath to ${file}"
+ debug-print "java-ant_rewrite-classpath: ${file}"
+
+ cp "${file}" "${file}.orig" || die "failed to copy ${file}"
+
+ chmod u+w "${file}"
+
+ java-ant_xml-rewrite -f "${file}" --change \
+ -e ${JAVA_ANT_CLASSPATH_TAGS// / -e } -a classpath -v '${gentoo.classpath}'
+
+ if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
+ diff -NurbB "${file}.orig" "${file}"
+ fi
+}
+
+# @FUNCTION: java-ant_remove-taskdefs
+# @USAGE: [--name NAME] [path/to/build.xml]
+# @DESCRIPTION:
+# Removes (named) taskdef elements from the build.xml file.
+# When --name NAME is specified, only remove taskdef with name NAME. Otherwise,
+# all taskdefs are removed.
+# The file to rewrite defaults to build.xml when not specified.
+java-ant_remove-taskdefs() {
+ debug-print-function ${FUNCNAME} $*
+
+ die "${FUNCNAME} has been banned, see bug #479838."
+
+ local task_name
+ if [[ "${1}" == --name ]]; then
+ task_name="${2}"
+ shift 2
+ fi
+ local file="${1:-build.xml}"
+ echo "Removing taskdefs from ${file}"
+ python <<EOF
+import sys
+from xml.dom.minidom import parse
+dom = parse("${file}")
+for elem in dom.getElementsByTagName('taskdef'):
+ if (len("${task_name}") == 0 or elem.getAttribute("name") == "${task_name}"):
+ elem.parentNode.removeChild(elem)
+ elem.unlink()
+f = open("${file}", "w")
+dom.writexml(f)
+f.close()
+EOF
+ [[ $? != 0 ]] && die "Removing taskdefs failed"
+}
+
+# @FUNCTION: java-ant_ignore-system-classes
+# @USAGE: [path/to/build.xml]
+# @DESCRIPTION:
+# Makes the available task ignore classes in the system classpath
+# Parameter defaults to build.xml when not specified
+java-ant_ignore-system-classes() {
+ debug-print-function ${FUNCNAME} $*
+ local file=${1:-build.xml}
+ echo "Changing ignoresystemclasses to true for available tasks in ${file}"
+ java-ant_xml-rewrite -f "${file}" --change \
+ -e available -a ignoresystemclasses -v "true"
+}
+
+# @FUNCTION: java-ant_xml-rewrite
+# @USAGE: <xml rewriter arguments>
+# @DESCRIPTION:
+# Run the right xml-rewrite binary with the given arguments
+java-ant_xml-rewrite() {
+ local gen2="/usr/bin/xml-rewrite-2.py"
+ local gen2_1="/usr/$(get_libdir)/javatoolkit/bin/xml-rewrite-2.py"
+ # gen1 is deprecated
+ if [[ -x "${gen2}" ]]; then
+ ${gen2} "${@}" || die "${gen2} failed"
+ elif [[ -x "${gen2_1}" ]]; then
+ ${gen2_1} "${@}" || die "${gen2_1} failed"
+ else
+ eerror "No binary for rewriting found."
+ eerror "Do you have dev-java/javatoolkit installed?"
+ die "xml-rewrite not found"
+ fi
+}
+
+# @FUNCTION: java-ant_rewrite-bootclasspath
+# @USAGE: <version> [path/to/build.xml] [prepend] [append]
+# @DESCRIPTION:
+# Adds bootclasspath to javac-like tasks in build.xml filled with jars of a
+# bootclasspath package of given version.
+#
+# @CODE
+# Affected by:
+# JAVA_PKG_BSFIX_TARGET_TAGS - the tags of javac tasks
+#
+# Parameters:
+# $1 - the version of bootclasspath (e.g. 1.5), 'auto' for bootclasspath
+# of the current JDK
+# $2 - path to desired build.xml file, defaults to 'build.xml'
+# $3 - (optional) what to prepend the bootclasspath with (to override)
+# $4 - (optional) what to append to the bootclasspath
+# @CODE
+java-ant_rewrite-bootclasspath() {
+ local version="${1}"
+ local file="${2-build.xml}"
+ local extra_before="${3}"
+ local extra_after="${4}"
+
+ local bcp="$(java-pkg_get-bootclasspath "${version}")"
+
+ if [[ -n "${extra_before}" ]]; then
+ bcp="${extra_before}:${bcp}"
+ fi
+ if [[ -n "${extra_after}" ]]; then
+ bcp="${bcp}:${extra_after}"
+ fi
+
+ java-ant_xml-rewrite -f "${file}" -c -e ${JAVA_PKG_BSFIX_TARGET_TAGS// / -e } \
+ -a bootclasspath -v "${bcp}"
+}
diff --git a/eclass/java-mvn-src.eclass b/eclass/java-mvn-src.eclass
new file mode 100644
index 000000000000..da60cf8ea8e0
--- /dev/null
+++ b/eclass/java-mvn-src.eclass
@@ -0,0 +1,65 @@
+# Eclass for Java packages from bare sources exported by Maven
+#
+# Copyright (c) 2004-2011, Gentoo Foundation
+#
+# Licensed under the GNU General Public License, v2
+#
+# $Id$
+
+inherit java-pkg-simple
+
+# -----------------------------------------------------------------------------
+# @eclass-begin
+# @eclass-summary Eclass for Java packages from bare sources exported by Maven
+#
+# This class is intended to build pure Java packages from the sources exported
+# from the source:jar goal of Maven 2. These archives contain bare Java source
+# files, with no build instructions or additional resource files. They are
+# unsuitable for packages that require resources besides compiled class files.
+# The benefit is that for artifacts developed with Maven, these source files
+# are often released together with binary packages, whereas the full build
+# environment might be contained in some revision control system or not
+# available at all.
+# -----------------------------------------------------------------------------
+
+# -----------------------------------------------------------------------------
+# @variable-external GROUP_ID
+# @variable-default ${PN}
+#
+# The groupId of the artifact, in dotted notation.
+# -----------------------------------------------------------------------------
+: ${GROUP_ID:=${PN}}
+
+# -----------------------------------------------------------------------------
+# @variable-external ARTIFACT_ID
+# @variable-default ${PN}
+#
+# The artifactId of the artifact.
+# -----------------------------------------------------------------------------
+: ${ARTIFACT_ID:=${PN}}
+
+# -----------------------------------------------------------------------------
+# @variable-external MAVEN2_REPOSITORIES
+# @variable-default http://repo2.maven.org/maven2 http://download.java.net/maven/2
+#
+# The repositories to search for the artifacts. Must follow Maven2 layout.
+# -----------------------------------------------------------------------------
+: ${MAVEN2_REPOSITORIES:="http://repo2.maven.org/maven2 http://download.java.net/maven/2"}
+
+# -----------------------------------------------------------------------------
+# @variable-internal RELATIVE_SRC_URI
+#
+# The path of the source artifact relative to the root of the repository.
+# Will be set by the eclass to follow Maven 2 repository layout.
+# -----------------------------------------------------------------------------
+RELATIVE_SRC_URI=${GROUP_ID//./\/}/${ARTIFACT_ID}/${PV}/${ARTIFACT_ID}-${PV}-sources.jar
+
+# Look for source jar in all listed repositories
+for repo in ${MAVEN2_REPOSITORIES}; do
+ SRC_URI="${SRC_URI} ${repo}/${RELATIVE_SRC_URI}"
+done
+unset repo
+
+# ------------------------------------------------------------------------------
+# @eclass-end
+# ------------------------------------------------------------------------------
diff --git a/eclass/java-osgi.eclass b/eclass/java-osgi.eclass
new file mode 100644
index 000000000000..7e13ed2edd5f
--- /dev/null
+++ b/eclass/java-osgi.eclass
@@ -0,0 +1,292 @@
+# Base eclass for Java packages that needs to be OSGi compliant
+#
+# Copyright (c) 2007, Jean-Noël Rivasseau <elvanor@gmail.com>
+# Copyright (c) 2007-2011, Gentoo Foundation
+#
+# Licensed under the GNU General Public License, v2
+#
+# $Id$
+
+# -----------------------------------------------------------------------------
+# @eclass-begin
+# @eclass-shortdesc Java OSGi eclass
+# @eclass-maintainer java@gentoo.org
+#
+# This eclass provides functionality which is used by
+# packages that need to be OSGi compliant. This means
+# that the generated jars will have special headers in their manifests.
+# Currently this is used only by Eclipse-3.3 - later
+# we could extend this so that Gentoo Java system would be
+# fully OSGi compliant.
+#
+# -----------------------------------------------------------------------------
+
+inherit java-utils-2
+
+# We define _OSGI_T so that it does not contain a slash at the end.
+# According to Paludis guys, there is currently a proposal for EAPIs that
+# would require all variables to end with a slash.
+
+_OSGI_T="${T/%\//}"
+
+# must get Diego to commit something like this to portability.eclass
+_canonicalise() {
+ if type -p realpath > /dev/null; then
+ realpath "${@}"
+ elif type -p readlink > /dev/null; then
+ readlink -f "${@}"
+ else
+ # can't die, subshell
+ eerror "No readlink nor realpath found, cannot canonicalise"
+ fi
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function _java-osgi_plugin
+#
+# This is an internal function, not to be called directly.
+#
+# @example
+# _java-osgi_plugin "JSch"
+#
+# @param $1 - bundle name
+#
+# ------------------------------------------------------------------------------
+
+_java-osgi_plugin() {
+ # We hardcode Gentoo as the vendor name
+
+ cat > "${_OSGI_T}/tmp_jar/plugin.properties" <<-EOF
+ bundleName="${1}"
+ vendorName="Gentoo"
+ EOF
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function _java-osgi_makejar
+#
+# This is an internal function, not to be called directly.
+#
+# @example
+# _java-osgi_makejar "dist/${PN}.jar" "com.jcraft.jsch" "JSch" "com.jcraft.jsch, com.jcraft.jsch.jce;x-internal:=true"
+#
+# @param $1 - name of jar to repackage with OSGi
+# @param $2 - bundle symbolic name
+# @param $3 - bundle name
+# @param $4 - export-package header
+#
+# ------------------------------------------------------------------------------
+
+_java-osgi_makejar() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ (( ${#} < 4 )) && die "Four arguments are needed for _java-osgi_makejar()"
+
+ local absoluteJarPath="$(_canonicalise ${1})"
+ local jarName="$(basename ${1})"
+
+ mkdir "${_OSGI_T}/tmp_jar" || die "Unable to create directory ${_OSGI_T}/tmp_jar"
+ [[ -d "${_OSGI_T}/osgi" ]] || mkdir "${_OSGI_T}/osgi" || die "Unable to create directory ${_OSGI_T}/osgi"
+
+ cd "${_OSGI_T}/tmp_jar" && jar xf "${absoluteJarPath}" && cd - > /dev/null \
+ || die "Unable to uncompress correctly the original jar"
+
+ cat > "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF" <<-EOF
+ Manifest-Version: 1.0
+ Bundle-ManifestVersion: 2
+ Bundle-Name: %bundleName
+ Bundle-Vendor: %vendorName
+ Bundle-Localization: plugin
+ Bundle-SymbolicName: ${2}
+ Bundle-Version: ${PV}
+ Export-Package: ${4}
+ EOF
+
+ _java-osgi_plugin "${3}"
+
+ jar cfm "${_OSGI_T}/osgi/${jarName}" "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF" \
+ -C "${_OSGI_T}/tmp_jar/" . > /dev/null || die "Unable to recreate the OSGi compliant jar"
+ rm -rf "${_OSGI_T}/tmp_jar"
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function java-osgi_dojar
+#
+# Rewrites a jar, and produce an OSGi compliant jar from arguments given on the command line.
+# The arguments given correspond to the minimal set of headers
+# that must be present on a Manifest file of an OSGi package.
+# If you need more headers, you should use the *-fromfile functions below,
+# that create the Manifest from a file.
+# It will call java-pkg_dojar at the end.
+#
+# @example
+# java-osgi_dojar "dist/${PN}.jar" "com.jcraft.jsch" "JSch" "com.jcraft.jsch, com.jcraft.jsch.jce;x-internal:=true"
+#
+# @param $1 - name of jar to repackage with OSGi
+# @param $2 - bundle symbolic name
+# @param $3 - bundle name
+# @param $4 - export-package-header
+#
+# ------------------------------------------------------------------------------
+
+java-osgi_dojar() {
+ debug-print-function ${FUNCNAME} "$@"
+ local jarName="$(basename ${1})"
+ _java-osgi_makejar "$@"
+ java-pkg_dojar "${_OSGI_T}/osgi/${jarName}"
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function java-osgi_newjar
+#
+# Rewrites a jar, and produce an OSGi compliant jar.
+# The arguments given correspond to the minimal set of headers
+# that must be present on a Manifest file of an OSGi package.
+# If you need more headers, you should use the *-fromfile functions below,
+# that create the Manifest from a file.
+# It will call java-pkg_newjar at the end.
+#
+# @example
+# java-osgi_newjar "dist/${PN}.jar" "com.jcraft.jsch" "JSch" "com.jcraft.jsch, com.jcraft.jsch.jce;x-internal:=true"
+#
+# @param $1 - name of jar to repackage with OSGi
+# @param $2 (optional) - name of the target jar. It will default to package name if not specified.
+# @param $3 - bundle symbolic name
+# @param $4 - bundle name
+# @param $5 - export-package header
+#
+# ------------------------------------------------------------------------------
+
+java-osgi_newjar() {
+ debug-print-function ${FUNCNAME} "$@"
+ local jarName="$(basename $1)"
+
+ if (( ${#} > 4 )); then
+ _java-osgi_makejar "${1}" "${3}" "${4}" "${5}"
+ java-pkg_newjar "${_OSGI_T}/osgi/${jarName}" "${2}"
+ else
+ _java-osgi_makejar "$@"
+ java-pkg_newjar "${_OSGI_T}/osgi/${jarName}"
+ fi
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function _java-osgi_makejar-fromfile
+#
+# This is an internal function, not to be called directly.
+#
+# @example
+# _java-osgi_makejar-fromfile "dist/${PN}.jar" "${FILESDIR}/MANIFEST.MF" "JSch" 1
+#
+# @param $1 - name of jar to repackage with OSGi
+# @param $2 - path to the Manifest file
+# @param $3 - bundle name
+# @param $4 - automatic version rewriting (0 or 1)
+#
+# ------------------------------------------------------------------------------
+
+_java-osgi_makejar-fromfile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ((${#} < 4)) && die "Four arguments are needed for _java-osgi_makejar-fromfile()"
+
+ local absoluteJarPath="$(_canonicalise ${1})"
+ local jarName="$(basename ${1})"
+
+ mkdir "${_OSGI_T}/tmp_jar" || die "Unable to create directory ${_OSGI_T}/tmp_jar"
+ [[ -d "${_OSGI_T}/osgi" ]] || mkdir "${_OSGI_T}/osgi" || die "Unable to create directory ${_OSGI_T}/osgi"
+
+ cd "${_OSGI_T}/tmp_jar" && jar xf "${absoluteJarPath}" && cd - > /dev/null \
+ || die "Unable to uncompress correctly the original jar"
+
+ [[ -e "${2}" ]] || die "Manifest file ${2} not found"
+
+ # We automatically change the version if automatic version rewriting is on
+
+ if (( ${4} )); then
+ cat "${2}" | sed "s/Bundle-Version:.*/Bundle-Version: ${PV}/" > \
+ "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF"
+ else
+ cat "${2}" > "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF"
+ fi
+
+ _java-osgi_plugin "${3}"
+
+ jar cfm "${_OSGI_T}/osgi/${jarName}" "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF" \
+ -C "${_OSGI_T}/tmp_jar/" . > /dev/null || die "Unable to recreate the OSGi compliant jar"
+ rm -rf "${_OSGI_T}/tmp_jar"
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function java-osgi_newjar-fromfile()
+#
+# This function produces an OSGi compliant jar from a given manifest file.
+# The Manifest Bundle-Version header will be replaced by the current version
+# of the package, unless the --no-auto-version option is given.
+# It will call java-pkg_newjar at the end.
+#
+# @example
+# java-osgi_newjar-fromfile "dist/${PN}.jar" "${FILESDIR}/MANIFEST.MF" "Standard Widget Toolkit for GTK 2.0"
+#
+# @param $opt
+# --no-auto-version - This option disables automatic rewriting of the
+# version in the Manifest file#
+# @param $1 - name of jar to repackage with OSGi
+# @param $2 (optional) - name of the target jar. It will default to package name if not specified.
+# @param $3 - path to the Manifest file
+# @param $4 - bundle name
+#
+# ------------------------------------------------------------------------------
+
+java-osgi_newjar-fromfile() {
+ debug-print-function ${FUNCNAME} "$@"
+ local versionRewriting=1
+
+ if [[ "${1}" == "--no-auto-version" ]]; then
+ versionRewriting=0
+ shift
+ fi
+ local jarName="$(basename ${1})"
+
+ if (( ${#} > 3 )); then
+ _java-osgi_makejar-fromfile "${1}" "${3}" "${4}" "${versionRewriting}"
+ java-pkg_newjar "${_OSGI_T}/osgi/${jarName}" "${2}"
+ else
+ _java-osgi_makejar-fromfile "$@" "${versionRewriting}"
+ java-pkg_newjar "${_OSGI_T}/osgi/${jarName}"
+ fi
+}
+
+# -----------------------------------------------------------------------------
+# @ebuild-function java-osgi_dojar-fromfile()
+#
+# This function produces an OSGi compliant jar from a given manifestfile.
+# The Manifest Bundle-Version header will be replaced by the current version
+# of the package, unless the --no-auto-version option is given.
+# It will call java-pkg_dojar at the end.
+#
+# @example
+# java-osgi_dojar-fromfile "dist/${PN}.jar" "${FILESDIR}/MANIFEST.MF" "Standard Widget Toolkit for GTK 2.0"
+#
+# @param $opt
+# --no-auto-version - This option disables automatic rewriting of the
+# version in the Manifest file
+# @param $1 - name of jar to repackage with OSGi
+# @param $2 - path to the Manifest file
+# @param $3 - bundle name
+#
+# ------------------------------------------------------------------------------
+
+java-osgi_dojar-fromfile() {
+ debug-print-function ${FUNCNAME} "$@"
+ local versionRewriting=1
+
+ if [[ "${1}" == "--no-auto-version" ]]; then
+ versionRewriting=0
+ shift
+ fi
+ local jarName="$(basename ${1})"
+
+ _java-osgi_makejar-fromfile "$@" "${versionRewriting}"
+ java-pkg_dojar "${_OSGI_T}/osgi/${jarName}"
+}
diff --git a/eclass/java-pkg-2.eclass b/eclass/java-pkg-2.eclass
new file mode 100644
index 000000000000..a0028c8200d7
--- /dev/null
+++ b/eclass/java-pkg-2.eclass
@@ -0,0 +1,158 @@
+# Eclass for Java packages
+#
+# Copyright (c) 2004-2005, Thomas Matthijs <axxo@gentoo.org>
+# Copyright (c) 2004-2015, Gentoo Foundation
+#
+# Licensed under the GNU General Public License, v2
+#
+# $Id$
+
+# @ECLASS: java-pkg-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# Thomas Matthijs <axxo@gentoo.org>
+# @BLURB: Eclass for Java Packages
+# @DESCRIPTION:
+# This eclass should be inherited for pure Java packages, or by packages which
+# need to use Java.
+
+inherit java-utils-2
+
+# @ECLASS-VARIABLE: JAVA_PKG_IUSE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Use JAVA_PKG_IUSE instead of IUSE for doc, source and examples so that
+# the eclass can automatically add the needed dependencies for the java-pkg_do*
+# functions.
+IUSE="${JAVA_PKG_IUSE}"
+
+# Java packages need java-config, and a fairly new release of Portage.
+# JAVA_PKG_E_DEPEND is defined in java-utils.eclass.
+DEPEND="${JAVA_PKG_E_DEPEND}"
+
+# Nothing special for RDEPEND... just the same as DEPEND.
+RDEPEND="${DEPEND}"
+
+# Commons packages follow the same rules so do it here
+if [[ ${CATEGORY} = dev-java && ${PN} = commons-* ]]; then
+ HOMEPAGE="http://commons.apache.org/${PN#commons-}/"
+ SRC_URI="mirror://apache/${PN/-///}/source/${P}-src.tar.gz"
+fi
+
+case "${EAPI:-0}" in
+ 0|1) EXPORT_FUNCTIONS pkg_setup src_compile pkg_preinst ;;
+ *) EXPORT_FUNCTIONS pkg_setup src_prepare src_compile pkg_preinst ;;
+esac
+
+# @FUNCTION: java-pkg-2_pkg_setup
+# @DESCRIPTION:
+# pkg_setup initializes the Java environment
+
+java-pkg-2_pkg_setup() {
+ java-pkg_init
+}
+
+
+# @FUNCTION: java-pkg-2_src_prepare
+# @DESCRIPTION:
+# wrapper for java-utils-2_src_prepare
+
+java-pkg-2_src_prepare() {
+ java-utils-2_src_prepare
+}
+
+
+# @FUNCTION: java-pkg-2_src_compile
+# @DESCRIPTION:
+# Default src_compile for java packages
+#
+# @CODE
+# Variables:
+# EANT_BUILD_XML - controls the location of the build.xml (default: ./build.xml)
+# EANT_FILTER_COMPILER - Calls java-pkg_filter-compiler with the value
+# EANT_BUILD_TARGET - the ant target/targets to execute (default: jar)
+# EANT_DOC_TARGET - the target to build extra docs under the doc use flag
+# (default: javadoc; declare empty to disable completely)
+# EANT_GENTOO_CLASSPATH - @see eant documention in java-utils-2.eclass
+# EANT_EXTRA_ARGS - extra arguments to pass to eant
+# EANT_ANT_TASKS - modifies the ANT_TASKS variable in the eant environment
+# @CODE
+
+java-pkg-2_src_compile() {
+ if [[ -e "${EANT_BUILD_XML:=build.xml}" ]]; then
+ [[ "${EANT_FILTER_COMPILER}" ]] && \
+ java-pkg_filter-compiler ${EANT_FILTER_COMPILER}
+ local antflags="${EANT_BUILD_TARGET:=jar}"
+ if has doc ${IUSE} && [[ -n "${EANT_DOC_TARGET=javadoc}" ]]; then
+ antflags="${antflags} $(use_doc ${EANT_DOC_TARGET})"
+ fi
+ local tasks
+ [[ ${EANT_ANT_TASKS} ]] && tasks="${ANT_TASKS} ${EANT_ANT_TASKS}"
+ ANT_TASKS="${tasks:-${ANT_TASKS}}" \
+ eant ${antflags} -f "${EANT_BUILD_XML}" ${EANT_EXTRA_ARGS} "${@}"
+ else
+ echo "${FUNCNAME}: ${EANT_BUILD_XML} not found so nothing to do."
+ fi
+}
+
+# @FUNCTION: java-pkg-2_src_test
+# @DESCRIPTION:
+# src_test, not exported.
+
+java-pkg-2_src_test() {
+ [[ -e "${EANT_BUILD_XML:=build.xml}" ]] || return
+
+ if [[ ${EANT_TEST_TARGET} ]] || < "${EANT_BUILD_XML}" tr -d "\n" | grep -Eq "<target\b[^>]*\bname=[\"']test[\"']"; then
+ local opts task_re junit_re pkg
+
+ if [[ ${EANT_TEST_JUNIT_INTO} ]]; then
+ java-pkg_jar-from --into "${EANT_TEST_JUNIT_INTO}" junit
+ fi
+
+ if [[ ${EANT_TEST_GENTOO_CLASSPATH} ]]; then
+ EANT_GENTOO_CLASSPATH="${EANT_TEST_GENTOO_CLASSPATH}"
+ fi
+
+ ANT_TASKS=${EANT_TEST_ANT_TASKS:-${ANT_TASKS:-${EANT_ANT_TASKS}}}
+
+ task_re="\bdev-java/ant-junit(4)?(-[^:]+)?(:\S+)\b"
+ junit_re="\bdev-java/junit(-[^:]+)?(:\S+)\b"
+
+ if [[ ${DEPEND} =~ ${task_re} ]]; then
+ pkg="ant-junit${BASH_REMATCH[1]}${BASH_REMATCH[3]}"
+ pkg="${pkg%:0}"
+
+ if [[ ${ANT_TASKS} && "${ANT_TASKS}" != none ]]; then
+ ANT_TASKS="${ANT_TASKS} ${pkg}"
+ else
+ ANT_TASKS="${pkg}"
+ fi
+ elif [[ ${DEPEND} =~ ${junit_re} ]]; then
+ pkg="junit${BASH_REMATCH[2]}"
+ pkg="${pkg%:0}"
+
+ opts="-Djunit.jar=\"$(java-pkg_getjar ${pkg} junit.jar)\""
+
+ if [[ ${EANT_GENTOO_CLASSPATH} ]]; then
+ EANT_GENTOO_CLASSPATH+=",${pkg}"
+ else
+ EANT_GENTOO_CLASSPATH="${pkg}"
+ fi
+ fi
+
+ eant ${opts} -f "${EANT_BUILD_XML}" \
+ ${EANT_EXTRA_ARGS} ${EANT_TEST_EXTRA_ARGS} ${EANT_TEST_TARGET:-test}
+
+ else
+ echo "${FUNCNAME}: No test target in ${EANT_BUILD_XML}"
+ fi
+}
+
+# @FUNCTION: java-pkg-2_pkg_preinst
+# @DESCRIPTION:
+# wrapper for java-utils-2_pkg_preinst
+
+java-pkg-2_pkg_preinst() {
+ java-utils-2_pkg_preinst
+}
diff --git a/eclass/java-pkg-opt-2.eclass b/eclass/java-pkg-opt-2.eclass
new file mode 100644
index 000000000000..9902837017c2
--- /dev/null
+++ b/eclass/java-pkg-opt-2.eclass
@@ -0,0 +1,67 @@
+# Eclass for optional Java packages
+#
+# Copyright (c) 2004-2005, Thomas Matthijs <axxo@gentoo.org>
+# Copyright (c) 2004-2011, Gentoo Foundation
+#
+# Licensed under the GNU General Public License, v2
+#
+# Major changes:
+# 20070805:
+# Removed phase hooks because Portage does proper env saving now.
+# <betelgeuse@gentoo.org>
+#
+# $Id$
+
+# @ECLASS: java-pkg-opt-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# Thomas Matthijs <axxo@gentoo.org>
+# @BLURB: Eclass for package with optional Java support
+# @DESCRIPTION:
+# Inherit this eclass instead of java-pkg-2 if you only need optional Java
+# support.
+
+inherit java-utils-2
+
+# @ECLASS-VARIABLE: JAVA_PKG_OPT_USE
+# @DESCRIPTION:
+# USE flag to control if optional Java stuff is build. Defaults to 'java'.
+JAVA_PKG_OPT_USE=${JAVA_PKG_OPT_USE:-java}
+
+DEPEND="${JAVA_PKG_OPT_USE}? ( ${JAVA_PKG_E_DEPEND} )"
+RDEPEND="${DEPEND}"
+
+# See java-pkg-2.eclass for JAVA_PKG_IUSE documentation
+IUSE="${JAVA_PKG_IUSE} ${JAVA_PKG_OPT_USE}"
+
+case "${EAPI:-0}" in
+ 0|1) EXPORT_FUNCTIONS pkg_setup pkg_preinst ;;
+ *) EXPORT_FUNCTIONS pkg_setup src_prepare pkg_preinst ;;
+esac
+
+# @FUNCTION: java-pkg-opt-2_pkg_setup
+# @DESCRIPTION:
+# default pkg_setup, wrapper for java-utils-2_pkg_init
+
+java-pkg-opt-2_pkg_setup() {
+ use ${JAVA_PKG_OPT_USE} && java-pkg_init
+}
+
+
+# @FUNCTION: java-pkg-opt-2_src_prepare
+# @DESCRIPTION:
+# default src_prepare, wrapper for java-utils-2_src_prepare
+
+java-pkg-opt-2_src_prepare() {
+ use ${JAVA_PKG_OPT_USE} && java-utils-2_src_prepare
+}
+
+
+# @FUNCTION: java-pkg-opt-2_pkg_preinst
+# @DESCRIPTION:
+# default pkg_preinst, wrapper for java-utils-2_pkg_preinst
+
+java-pkg-opt-2_pkg_preinst() {
+ use ${JAVA_PKG_OPT_USE} && java-utils-2_pkg_preinst
+}
diff --git a/eclass/java-pkg-simple.eclass b/eclass/java-pkg-simple.eclass
new file mode 100644
index 000000000000..d5309bb105be
--- /dev/null
+++ b/eclass/java-pkg-simple.eclass
@@ -0,0 +1,209 @@
+# Eclass for simple bare-source Java packages
+#
+# Copyright (c) 2004-2015, Gentoo Foundation
+#
+# Licensed under the GNU General Public License, v2
+#
+# $Id$
+
+inherit java-utils-2
+
+if ! has java-pkg-2 ${INHERITED}; then
+ eerror "java-pkg-simple eclass can only be inherited AFTER java-pkg-2"
+fi
+
+# -----------------------------------------------------------------------------
+# @eclass-begin
+# @eclass-summary Eclass for Java sources without build instructions
+#
+# This class is intended to build pure Java packages from Java sources
+# without the use of any build instructions shipped with the sources.
+# There is no support for resources besides the generated class files,
+# or for generating source files, or for controlling the META-INF of
+# the resulting jar, although these issues may be addressed by an
+# ebuild by putting corresponding files into the target directory
+# before calling the src_compile function of this eclass.
+# -----------------------------------------------------------------------------
+
+EXPORT_FUNCTIONS src_compile src_install
+
+# We are only interested in finding all java source files, wherever they may be.
+S="${WORKDIR}"
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVA_GENTOO_CLASSPATH
+# @variable-default ""
+#
+# Comma or space separated list of java packages to include in the
+# class path. The packages will also be registered as runtime
+# dependencies of this new package. Dependencies will be calculated
+# transitively. See "java-config -l" for appropriate package names.
+# -----------------------------------------------------------------------------
+# JAVA_GENTOO_CLASSPATH
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVA_CLASSPATH_EXTRA
+# @variable-default ""
+#
+# Extra list of colon separated path elements to be put on the
+# classpath when compiling sources.
+# -----------------------------------------------------------------------------
+# JAVA_CLASSPATH_EXTRA
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVA_SRC_DIR
+# @variable-default ""
+#
+# Directories relative to ${S} which contain the sources of the
+# application. The default of "" will be treated mostly as ${S}
+# itself. For the generated source package (if source is listed in
+# ${JAVA_PKG_IUSE}), it is important that these directories are
+# actually the roots of the corresponding source trees.
+# -----------------------------------------------------------------------------
+# JAVA_SRC_DIR
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVA_ENCODING
+# @variable-default UTF-8
+#
+# The character encoding used in the source files
+# -----------------------------------------------------------------------------
+: ${JAVA_ENCODING:=UTF-8}
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVAC_ARGS
+# @variable-default ""
+#
+# Additional arguments to be passed to javac
+# -----------------------------------------------------------------------------
+# JAVAC_ARGS
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVADOC_ARGS
+# @variable-default ""
+#
+# Additional arguments to be passed to javadoc
+# -----------------------------------------------------------------------------
+# JAVADOC_ARGS
+
+# -----------------------------------------------------------------------------
+# @variable-external JAVA_JAR_FILENAME
+# @variable-default ${PN}.jar
+#
+# The name of the jar file to create and install
+# -----------------------------------------------------------------------------
+: ${JAVA_JAR_FILENAME:=${PN}.jar}
+
+# ------------------------------------------------------------------------------
+# @eclass-src_compile
+#
+# src_compile for simple bare source java packages. Finds all *.java
+# sources in ${JAVA_SRC_DIR}, compiles them with the classpath
+# calculated from ${JAVA_GENTOO_CLASSPATH}, and packages the resulting
+# classes to ${JAVA_JAR_FILENAME}.
+#
+# variables:
+# JAVA_GENTOO_CLASSPATH - list java packages to put on the classpath.
+# JAVA_ENCODING - encoding of source files, used by javac and javadoc
+# JAVA_SRC_DIR - directories containing source files, relative to ${S}
+# JAVAC_ARGS - additional arguments to be passed to javac
+# JAVADOC_ARGS - additional arguments to be passed to javadoc
+# ------------------------------------------------------------------------------
+java-pkg-simple_src_compile() {
+ local sources=sources.lst classes=target/classes apidoc=target/api
+
+ # gather sources
+ find ${JAVA_SRC_DIR:-*} -name \*.java > ${sources}
+ mkdir -p ${classes} || die "Could not create target directory"
+
+ # compile
+ local classpath="${JAVA_CLASSPATH_EXTRA}" dependency
+ for dependency in ${JAVA_GENTOO_CLASSPATH}; do
+ classpath="${classpath}:$(java-pkg_getjars ${dependency})" \
+ || die "getjars failed for ${dependency}"
+ done
+ while [[ $classpath = *::* ]]; do classpath="${classpath//::/:}"; done
+ classpath=${classpath%:}
+ classpath=${classpath#:}
+ debug-print "CLASSPATH=${classpath}"
+ java-pkg-simple_verbose-cmd \
+ ejavac -d ${classes} -encoding ${JAVA_ENCODING} \
+ ${classpath:+-classpath ${classpath}} ${JAVAC_ARGS} \
+ @${sources}
+
+ # javadoc
+ if has doc ${JAVA_PKG_IUSE} && use doc; then
+ mkdir -p ${apidoc}
+ java-pkg-simple_verbose-cmd \
+ ejavadoc -d ${apidoc} \
+ -encoding ${JAVA_ENCODING} -docencoding UTF-8 -charset UTF-8 \
+ ${classpath:+-classpath ${classpath}} ${JAVADOC_ARGS:- -quiet} \
+ @${sources} || die "javadoc failed"
+ fi
+
+ # package
+ local jar_args="cf ${JAVA_JAR_FILENAME}"
+ if [[ -e ${classes}/META-INF/MANIFEST.MF ]]; then
+ jar_args="cfm ${JAVA_JAR_FILENAME} ${classes}/META-INF/MANIFEST.MF"
+ fi
+ java-pkg-simple_verbose-cmd \
+ jar ${jar_args} -C ${classes} . || die "jar failed"
+}
+
+# ------------------------------------------------------------------------------
+# @eclass-src_install
+#
+# src_install for simple single jar java packages. Simply packages the
+# contents from the target directory and installs it as
+# ${JAVA_JAR_FILENAME}. If the file target/META-INF/MANIFEST.MF exists,
+# it is used as the manifest of the created jar.
+# ------------------------------------------------------------------------------
+java-pkg-simple_src_install() {
+ local sources=sources.lst classes=target/classes apidoc=target/api
+
+ # main jar
+ java-pkg-simple_verbose-cmd \
+ java-pkg_dojar ${JAVA_JAR_FILENAME}
+
+ # javadoc
+ if has doc ${JAVA_PKG_IUSE} && use doc; then
+ java-pkg-simple_verbose-cmd \
+ java-pkg_dojavadoc ${apidoc}
+ fi
+
+ # dosrc
+ if has source ${JAVA_PKG_IUSE} && use source; then
+ local srcdirs=""
+ if [[ ${JAVA_SRC_DIR} ]]; then
+ local parent child
+ for parent in ${JAVA_SRC_DIR}; do
+ for child in ${parent}/*; do
+ srcdirs="${srcdirs} ${child}"
+ done
+ done
+ else
+ # take all directories actually containing any sources
+ srcdirs="$(cut -d/ -f1 ${sources} | sort -u)"
+ fi
+ java-pkg-simple_verbose-cmd \
+ java-pkg_dosrc ${srcdirs}
+ fi
+}
+
+# ------------------------------------------------------------------------------
+# @internal-function java-pkg-simple_verbose-cmd
+#
+# Print a command before executing it. To give user some feedback
+# about what is going on, where the time is being spent, and also to
+# help debugging ebuilds.
+#
+# @param $@ - command to be called and its arguments
+# ------------------------------------------------------------------------------
+java-pkg-simple_verbose-cmd() {
+ echo "$*"
+ "$@"
+}
+
+# ------------------------------------------------------------------------------
+# @eclass-end
+# ------------------------------------------------------------------------------
diff --git a/eclass/java-utils-2.eclass b/eclass/java-utils-2.eclass
new file mode 100644
index 000000000000..0a1a8a38b470
--- /dev/null
+++ b/eclass/java-utils-2.eclass
@@ -0,0 +1,2826 @@
+# Base eclass for Java packages
+#
+# Copyright (c) 2004-2005, Thomas Matthijs <axxo@gentoo.org>
+# Copyright (c) 2004, Karl Trygve Kalleberg <karltk@gentoo.org>
+# Copyright (c) 2004-2015, Gentoo Foundation
+#
+# Licensed under the GNU General Public License, v2
+#
+# $Id$
+
+# @ECLASS: java-utils-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# Thomas Matthijs <axxo@gentoo.org>, Karl Trygve Kalleberg <karltk@gentoo.org>
+# @BLURB: Base eclass for Java packages
+# @DESCRIPTION:
+# This eclass provides functionality which is used by java-pkg-2.eclass,
+# java-pkg-opt-2.eclass and java-ant-2 eclass, as well as from ebuilds.
+#
+# This eclass should not be inherited this directly from an ebuild. Instead,
+# you should inherit java-pkg-2 for Java packages or java-pkg-opt-2 for packages
+# that have optional Java support. In addition you can inherit java-ant-2 for
+# Ant-based packages.
+inherit eutils versionator multilib
+
+IUSE="elibc_FreeBSD"
+
+# Make sure we use java-config-2
+export WANT_JAVA_CONFIG="2"
+
+# @VARIABLE: JAVA_PKG_PORTAGE_DEP
+# @INTERNAL
+# @DESCRIPTION:
+# The version of portage we need to function properly. Previously it was
+# portage with phase hooks support but now we use a version with proper env
+# saving. For EAPI 2 we have new enough stuff so let's have cleaner deps.
+has "${EAPI}" 0 1 && JAVA_PKG_PORTAGE_DEP=">=sys-apps/portage-2.1.2.7"
+
+# @VARIABLE: JAVA_PKG_E_DEPEND
+# @INTERNAL
+# @DESCRIPTION:
+# This is a convience variable to be used from the other java eclasses. This is
+# the version of java-config we want to use. Usually the latest stable version
+# so that ebuilds can use new features without depending on specific versions.
+JAVA_PKG_E_DEPEND=">=dev-java/java-config-2.2.0 ${JAVA_PKG_PORTAGE_DEP}"
+has source ${JAVA_PKG_IUSE} && JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} source? ( app-arch/zip )"
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_BOOTCLASSPATH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The version of bootclasspath the package needs to work. Translates to a proper
+# dependency. The bootclasspath can then be obtained by java-ant_rewrite-bootclasspath
+if [[ -n "${JAVA_PKG_WANT_BOOTCLASSPATH}" ]]; then
+ if [[ "${JAVA_PKG_WANT_BOOTCLASSPATH}" == "1.5" ]]; then
+ JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} >=dev-java/gnu-classpath-0.98-r1:0.98"
+ else
+ eerror "Unknown value of JAVA_PKG_WANT_BOOTCLASSPATH"
+ # since die in global scope doesn't work, this will make repoman fail
+ JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} BAD_JAVA_PKG_WANT_BOOTCLASSPATH"
+ fi
+fi
+
+# @ECLASS-VARIABLE: JAVA_PKG_ALLOW_VM_CHANGE
+# @DESCRIPTION:
+# Allow this eclass to change the active VM?
+# If your system VM isn't sufficient for the package, the build will fail
+# instead of trying to switch to another VM.
+#
+# Overriding the default can be useful for testing specific VMs locally, but
+# should not be used in the final ebuild.
+JAVA_PKG_ALLOW_VM_CHANGE=${JAVA_PKG_ALLOW_VM_CHANGE:="yes"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_FORCE_VM
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Explicitly set a particular VM to use. If its not valid, it'll fall back to
+# whatever /etc/java-config-2/build/jdk.conf would elect to use.
+#
+# Should only be used for testing and debugging.
+#
+# Example: use sun-jdk-1.5 to emerge foo:
+# @CODE
+# JAVA_PKG_FORCE_VM=sun-jdk-1.5 emerge foo
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_BUILD_VM
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# A list of VM handles to choose a build VM from. If the list contains the
+# currently active VM use that one, otherwise step through the list till a
+# usable/installed VM is found.
+#
+# This allows to use an explicit list of JDKs in DEPEND instead of a virtual.
+# Users of this variable must make sure at least one of the listed handles is
+# covered by DEPEND.
+# Requires JAVA_PKG_WANT_SOURCE and JAVA_PKG_WANT_TARGET to be set as well.
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_SOURCE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Specify a non-standard Java source version for compilation (via javac -source
+# parameter or Ant equivalent via build.xml rewriting done by java-ant-2 eclass).
+# Normally this is determined from the jdk version specified in DEPEND.
+# See java-pkg_get-source function below.
+#
+# Should generally only be used for testing and debugging.
+#
+# Use 1.4 source to emerge baz
+# @CODE
+# JAVA_PKG_WANT_SOURCE=1.4 emerge baz
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_WANT_TARGET
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Same as JAVA_PKG_WANT_SOURCE (see above) but for javac -target parameter,
+# which affects the version of generated bytecode.
+# Normally this is determined from the jre/jdk version specified in RDEPEND.
+# See java-pkg_get-target function below.
+#
+# Should generallyonly be used for testing and debugging.
+#
+# emerge bar to be compatible with 1.3
+# @CODE
+# JAVA_PKG_WANT_TARGET=1.3 emerge bar
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_RM_FILES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An array containing a list of files to remove. If defined, this array will be
+# automatically handed over to java-pkg_rm_files for processing during the
+# src_prepare phase.
+#
+# @CODE
+# JAVA_RM_FILES=(
+# path/to/File1.java
+# DELETEME.txt
+# )
+# @CODE
+
+# @VARIABLE: JAVA_PKG_COMPILER_DIR
+# @INTERNAL
+# @DESCRIPTION:
+# Directory where compiler settings are saved, without trailing slash.
+# You probably shouldn't touch this variable except local testing.
+JAVA_PKG_COMPILER_DIR=${JAVA_PKG_COMPILER_DIR:="/usr/share/java-config-2/compiler"}
+
+# @VARIABLE: JAVA_PKG_COMPILERS_CONF
+# @INTERNAL
+# @DESCRIPTION:
+# Path to file containing information about which compiler to use.
+# Can be overloaded, but it should be overloaded only for local testing.
+JAVA_PKG_COMPILERS_CONF=${JAVA_PKG_COMPILERS_CONF:="/etc/java-config-2/build/compilers.conf"}
+
+# @ECLASS-VARIABLE: JAVA_PKG_FORCE_COMPILER
+# @INTERNAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Explicitly set a list of compilers to choose from. This is normally read from
+# JAVA_PKG_COMPILERS_CONF.
+#
+# Useful for local testing.
+#
+# Use jikes and javac, in that order
+# @CODE
+# JAVA_PKG_FORCE_COMPILER="jikes javac"
+# @CODE
+
+# @ECLASS-VARIABLE: JAVA_PKG_FORCE_ANT_TASKS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# An $IFS separated list of ant tasks. Can be set in environment before calling
+# emerge/ebuild to override variables set in ebuild, mainly for testing before
+# putting the resulting (WANT_)ANT_TASKS into ebuild. Affects only ANT_TASKS in
+# eant() call, not the dependencies specified in WANT_ANT_TASKS.
+#
+# @CODE
+# JAVA_PKG_FORCE_ANT_TASKS="ant-junit ant-trax" \
+# ebuild foo.ebuild compile
+# @CODE
+
+# TODO document me
+JAVA_PKG_QA_VIOLATIONS=0
+
+# @FUNCTION: java-pkg_doexamples
+# @USAGE: [--subdir <subdir>] <file1/dir1> [<file2> ...]
+# @DESCRIPTION:
+# Installs given arguments to /usr/share/doc/${PF}/examples
+# If you give it only one parameter and it is a directory it will install
+# everything in that directory to the examples directory.
+#
+# @CODE
+# Parameters:
+# --subdir - If the examples need a certain directory structure
+# $* - list of files to install
+#
+# Examples:
+# java-pkg_doexamples demo
+# java-pkg_doexamples demo/* examples/*
+# @CODE
+java-pkg_doexamples() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ local dest=/usr/share/doc/${PF}/examples
+ if [[ ${1} == --subdir ]]; then
+ local dest=${dest}/${2}
+ dodir ${dest}
+ shift 2
+ fi
+
+ if [[ ${#} = 1 && -d ${1} ]]; then
+ ( # dont want to pollute calling env
+ insinto "${dest}"
+ doins -r ${1}/*
+ ) || die "Installing examples failed"
+ else
+ ( # dont want to pollute calling env
+ insinto "${dest}"
+ doins -r "$@"
+ ) || die "Installing examples failed"
+ fi
+
+ # Let's make a symlink to the directory we have everything else under
+ dosym "${dest}" "${JAVA_PKG_SHAREPATH}/examples" || die
+}
+
+# @FUNCTION: java-pkg_addres
+# @USAGE: <jar> <dir> [<find arguments> ...]
+# @DESCRIPTION:
+# Adds resource files to an existing jar.
+# It is important that the directory given is actually the root of the
+# corresponding resource tree. The target directory as well as
+# sources.lst, MANIFEST.MF, *.class, *.jar, and *.java files are
+# automatically excluded. Symlinks are always followed. Additional
+# arguments are passed through to find.
+#
+# @CODE
+# java-pkg_addres ${PN}.jar resources ! -name "*.html"
+# @CODE
+#
+# @param $1 - jar file
+# @param $2 - resource tree directory
+# @param $* - arguments to pass to find
+java-pkg_addres() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 2 ]] && die "at least two arguments needed"
+
+ local jar=$(realpath "$1" || die "realpath $1 failed")
+ local dir="$2"
+ shift 2
+
+ pushd "${dir}" > /dev/null || die "pushd ${dir} failed"
+ find -L -type f ! -path "./target/*" ! -path "./sources.lst" ! -name "MANIFEST.MF" ! -regex ".*\.\(class\|jar\|java\)" "${@}" -print0 | xargs -0 jar uf "${jar}" || die "jar failed"
+ popd > /dev/null || die "popd failed"
+}
+
+# @FUNCTION: java-pkg_rm_files
+# @USAGE: java-pkg_rm_files File1.java File2.java ...
+# @DESCRIPTION:
+# Remove unneeded files in ${S}.
+#
+# Every now and then, you'll run into situations whereby a file needs removing,
+# be it a unit test or a regular java class.
+#
+# You can use this function by either:
+# - calling it yourself in java_prepare() and feeding java-pkg_rm_files with
+# the list of files you wish to remove.
+# - defining an array in the ebuild named JAVA_RM_FILES with the list of files
+# you wish to remove.
+#
+# Both way work and it is left to the developer's preferences. If the
+# JAVA_RM_FILES array is defined, it will be automatically handed over to
+# java-pkg_rm_files during the src_prepare phase.
+#
+# See java-utils-2_src_prepare.
+#
+# @CODE
+# java-pkg_rm_files File1.java File2.java
+# @CODE
+#
+# @param $* - list of files to remove.
+java-pkg_rm_files() {
+ debug-print-function ${FUNCNAME} $*
+ local IFS="\n"
+ for filename in "$@"; do
+ [[ ! -f "${filename}" ]] && die "${filename} is not a regular file. Aborting."
+ einfo "Removing unneeded file ${filename}"
+ rm -f "${S}/${filename}" || die "cannot remove ${filename}"
+ eend $?
+ done
+}
+
+# @FUNCTION: java-pkg_dojar
+# @USAGE: <jar1> [<jar2> ...]
+# @DESCRIPTION:
+# Installs any number of jars.
+# Jar's will be installed into /usr/share/${PN}(-${SLOT})/lib/ by default.
+# You can use java-pkg_jarinto to change this path.
+# You should never install a jar with a package version in the filename.
+# Instead, use java-pkg_newjar defined below.
+#
+# @CODE
+# java-pkg_dojar dist/${PN}.jar dist/${PN}-core.jar
+# @CODE
+#
+# @param $* - list of jars to install
+java-pkg_dojar() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ # Create JARDEST if it doesn't exist
+ dodir ${JAVA_PKG_JARDEST}
+
+ local jar
+ # for each jar
+ for jar in "${@}"; do
+ local jar_basename=$(basename "${jar}")
+
+ java-pkg_check-versioned-jar ${jar_basename}
+
+ # check if it exists
+ if [[ -e "${jar}" ]] ; then
+ # Don't overwrite if jar has already been installed with the same
+ # name
+ local dest="${D}${JAVA_PKG_JARDEST}/${jar_basename}"
+ if [[ -e "${dest}" ]]; then
+ ewarn "Overwriting ${dest}"
+ fi
+
+ # install it into JARDEST if it's a non-symlink
+ if [[ ! -L "${jar}" ]] ; then
+ #but first check class version when in strict mode.
+ is-java-strict && java-pkg_verify-classes "${jar}"
+
+ INSDESTTREE="${JAVA_PKG_JARDEST}" \
+ doins "${jar}" || die "failed to install ${jar}"
+ java-pkg_append_ JAVA_PKG_CLASSPATH "${JAVA_PKG_JARDEST}/${jar_basename}"
+ debug-print "installed ${jar} to ${D}${JAVA_PKG_JARDEST}"
+ # make a symlink to the original jar if it's symlink
+ else
+ # TODO use dosym, once we find something that could use it
+ # -nichoj
+ ln -s "$(readlink "${jar}")" "${D}${JAVA_PKG_JARDEST}/${jar_basename}"
+ debug-print "${jar} is a symlink, linking accordingly"
+ fi
+ else
+ die "${jar} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_regjar
+# @USAGE: </path/to/installed/jar>
+# @DESCRIPTION:
+# Records an already installed (in ${D}) jar in the package.env
+# This would mostly be used if the package has make or a custom script to
+# install things.
+#
+# WARNING:
+# if you want to use shell expansion, you have to use ${D}/... as the for in
+# this function will not be able to expand the path, here's an example:
+#
+# @CODE
+# java-pkg_regjar ${D}/opt/my-java/lib/*.jar
+# @CODE
+#
+
+# TODO should we be making sure the jar is present on ${D} or wherever?
+java-pkg_regjar() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "at least one argument needed"
+
+ java-pkg_init_paths_
+
+ local jar jar_dir jar_file
+ for jar in "${@}"; do
+ # TODO use java-pkg_check-versioned-jar
+ if [[ -e "${jar}" || -e "${D}${jar}" ]]; then
+ [[ -d "${jar}" || -d "${D}${jar}" ]] \
+ && die "Called ${FUNCNAME} on a directory $*"
+
+ #check that class version correct when in strict mode
+ is-java-strict && java-pkg_verify-classes "${jar}"
+
+ # nelchael: we should strip ${D} in this case too, here's why:
+ # imagine such call:
+ # java-pkg_regjar ${D}/opt/java/*.jar
+ # such call will fall into this case (-e ${jar}) and will
+ # record paths with ${D} in package.env
+ java-pkg_append_ JAVA_PKG_CLASSPATH "${jar#${D}}"
+ else
+ if [[ ${jar} = *\** ]]; then
+ eerror "The argument ${jar} to ${FUNCNAME}"
+ eerror "has * in it. If you want it to glob in"
+ eerror '${D} add ${D} to the argument.'
+ fi
+ debug-print "${jar} or ${D}${jar} not found"
+ die "${jar} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_newjar
+# @USAGE: <path/to/oldname.jar> [<newname.jar>]
+# @DESCRIPTION:
+# Installs a jar with a new name (defaults to $PN.jar)
+#
+# For example, installs a versioned jar without the version
+java-pkg_newjar() {
+ debug-print-function ${FUNCNAME} $*
+
+ local original_jar="${1}"
+ local new_jar="${2:-${PN}.jar}"
+ local new_jar_dest="${T}/${new_jar}"
+
+ [[ -z ${original_jar} ]] && die "Must specify a jar to install"
+ [[ ! -f ${original_jar} ]] \
+ && die "${original_jar} does not exist or is not a file!"
+
+ rm -f "${new_jar_dest}" || die "Failed to remove ${new_jar_dest}"
+ cp "${original_jar}" "${new_jar_dest}" \
+ || die "Failed to copy ${original_jar} to ${new_jar_dest}"
+ java-pkg_dojar "${new_jar_dest}"
+}
+
+# @FUNCTION: java-pkg_addcp
+# @USAGE: <classpath>
+# @DESCRIPTION:
+# Add something to the package's classpath. For jars, you should use dojar,
+# newjar, or regjar. This is typically used to add directories to the classpath.
+# The parameters of this function are appended to JAVA_PKG_CLASSPATH
+java-pkg_addcp() {
+ java-pkg_append_ JAVA_PKG_CLASSPATH "${@}"
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_doso
+# @USAGE: <path/to/file1.so> [...]
+# @DESCRIPTION:
+# Installs any number of JNI libraries
+# They will be installed into /usr/lib by default, but java-pkg_sointo
+# can be used change this path
+#
+# @CODE
+# Example:
+# java-pkg_doso *.so
+# @CODE
+java-pkg_doso() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "${FUNCNAME} requires at least one argument"
+
+ java-pkg_init_paths_
+
+ local lib
+ # for each lib
+ for lib in "$@" ; do
+ # if the lib exists...
+ if [[ -e "${lib}" ]] ; then
+ # install if it isn't a symlink
+ if [[ ! -L "${lib}" ]] ; then
+ INSDESTTREE="${JAVA_PKG_LIBDEST}" \
+ INSOPTIONS="-m0755" \
+ doins "${lib}" || die "failed to install ${lib}"
+ java-pkg_append_ JAVA_PKG_LIBRARY "${JAVA_PKG_LIBDEST}"
+ debug-print "Installing ${lib} to ${JAVA_PKG_LIBDEST}"
+ # otherwise make a symlink to the symlink's origin
+ else
+ dosym "$(readlink "${lib}")" "${JAVA_PKG_LIBDEST}/${lib##*/}"
+ debug-print "${lib} is a symlink, linking accordantly"
+ fi
+ # otherwise die
+ else
+ die "${lib} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_regso
+# @USAGE: <file1.so> [...]
+# @DESCRIPTION:
+# Registers an already installed JNI library in package.env.
+#
+# @CODE
+# Parameters:
+# $@ - JNI libraries to register
+#
+# Example:
+# java-pkg_regso *.so /path/*.so
+# @CODE
+java-pkg_regso() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "${FUNCNAME} requires at least one argument"
+
+ java-pkg_init_paths_
+
+ local lib target_dir
+ for lib in "$@" ; do
+ # Check the absolute path of the lib
+ if [[ -e "${lib}" ]] ; then
+ target_dir="$(java-pkg_expand_dir_ ${lib})"
+ java-pkg_append_ JAVA_PKG_LIBRARY "/${target_dir#${D}}"
+ # Check the path of the lib relative to ${D}
+ elif [[ -e "${D}${lib}" ]]; then
+ target_dir="$(java-pkg_expand_dir_ ${D}${lib})"
+ java-pkg_append_ JAVA_PKG_LIBRARY "${target_dir}"
+ else
+ die "${lib} does not exist"
+ fi
+ done
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_jarinto
+# @USAGE: </path/to/install/jars/into>
+# @DESCRIPTION:
+# Changes the path jars are installed into via subsequent java-pkg_dojar calls.
+java-pkg_jarinto() {
+ debug-print-function ${FUNCNAME} $*
+
+ JAVA_PKG_JARDEST="${1}"
+}
+
+# @FUNCTION: java-pkg_sointo
+# @USAGE: </path/to/install/sofiles/into>
+# @DESCRIPTION:
+# Changes the path that JNI libraries are installed into via subsequent
+# java-pkg_doso calls.
+java-pkg_sointo() {
+ debug-print-function ${FUNCNAME} $*
+
+ JAVA_PKG_LIBDEST="${1}"
+}
+
+# @FUNCTION: java-pkg_dohtml
+# @USAGE: <path/to/javadoc/documentation> [...]
+# @DESCRIPTION:
+# Install Javadoc HTML documentation. Usage of java-pkg_dojavadoc is preferred.
+#
+# @CODE
+# java-pkg_dohtml dist/docs/
+# @CODE
+java-pkg_dohtml() {
+ debug-print-function ${FUNCNAME} $*
+
+ [[ ${#} -lt 1 ]] && die "At least one argument required for ${FUNCNAME}"
+
+ # from /usr/lib/portage/bin/dohtml -h
+ # -f Set list of allowed extensionless file names.
+ dohtml -f package-list "$@"
+
+ # this probably shouldn't be here but it provides
+ # a reasonable way to catch # docs for all of the
+ # old ebuilds.
+ java-pkg_recordjavadoc
+}
+
+# @FUNCTION: java-pkg_dojavadoc
+# @USAGE: [--symlink destination] <path/to/javadocs/root>
+# @DESCRIPTION:
+# Installs javadoc documentation. This should be controlled by the doc use flag.
+#
+# @CODE
+# Parameters:
+# $1: optional --symlink creates to symlink like this for html
+# documentation bundles.
+# $2: - The javadoc root directory.
+#
+# Examples:
+# java-pkg_dojavadoc docs/api
+# java-pkg_dojavadoc --symlink apidocs docs/api
+# @CODE
+java-pkg_dojavadoc() {
+ debug-print-function ${FUNCNAME} $*
+
+ # For html documentation bundles that link to Javadoc
+ local symlink
+ if [[ ${1} = --symlink ]]; then
+ symlink=${2}
+ shift 2
+ fi
+
+ local dir="$1"
+ local dest=/usr/share/doc/${PF}/html
+
+ # QA checks
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ [[ -z "${dir}" ]] && die "Must specify a directory!"
+ [[ ! -d "${dir}" ]] && die "${dir} does not exist, or isn't a directory!"
+ if [[ ! -e "${dir}/index.html" ]]; then
+ local msg="No index.html in javadoc directory"
+ ewarn "${msg}"
+ is-java-strict && die "${msg}"
+ fi
+
+ if [[ -e ${D}/${dest}/api ]]; then
+ eerror "${dest} already exists. Will not overwrite."
+ die "${dest}"
+ fi
+
+ # Renaming to match our directory layout
+
+ local dir_to_install="${dir}"
+ if [[ "$(basename "${dir}")" != "api" ]]; then
+ dir_to_install="${T}/api"
+ # TODO use doins
+ cp -r "${dir}" "${dir_to_install}" || die "cp failed"
+ fi
+
+ # Actual installation
+
+ java-pkg_dohtml -r "${dir_to_install}"
+
+ # Let's make a symlink to the directory we have everything else under
+ dosym ${dest}/api "${JAVA_PKG_SHAREPATH}/api" || die
+
+ if [[ ${symlink} ]]; then
+ debug-print "symlinking ${dest}/{api,${symlink}}"
+ dosym ${dest}/{api,${symlink}} || die
+ fi
+}
+
+# @FUNCTION: java-pkg_dosrc
+# @USAGE: <path/to/sources> [...]
+# @DESCRIPTION:
+# Installs a zip containing the source for a package, so it can used in
+# from IDEs like eclipse and netbeans.
+# Ebuild needs to DEPEND on app-arch/zip to use this. It also should be controlled by USE=source.
+#
+# @CODE
+# Example:
+# java-pkg_dosrc src/*
+# @CODE
+
+# TODO change so it the arguments it takes are the base directories containing
+# source -nichoj
+#
+# TODO should we be able to handle multiple calls to dosrc? -nichoj
+#
+# TODO maybe we can take an existing zip/jar? -nichoj
+#
+# FIXME apparently this fails if you give it an empty directories
+java-pkg_dosrc() {
+ debug-print-function ${FUNCNAME} $*
+
+ [ ${#} -lt 1 ] && die "At least one argument needed"
+
+ java-pkg_check-phase install
+
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+
+ if ! [[ ${DEPEND} = *app-arch/zip* ]]; then
+ local msg="${FUNCNAME} called without app-arch/zip in DEPEND"
+ java-pkg_announce-qa-violation ${msg}
+ fi
+
+ java-pkg_init_paths_
+
+ local zip_name="${PN}-src.zip"
+ local zip_path="${T}/${zip_name}"
+ local dir
+ for dir in "${@}"; do
+ local dir_parent=$(dirname "${dir}")
+ local dir_name=$(basename "${dir}")
+ pushd ${dir_parent} > /dev/null || die "problem entering ${dir_parent}"
+ zip -q -r ${zip_path} ${dir_name} -i '*.java'
+ local result=$?
+ # 12 means zip has nothing to do
+ if [[ ${result} != 12 && ${result} != 0 ]]; then
+ die "failed to zip ${dir_name}"
+ fi
+ popd >/dev/null
+ done
+
+ # Install the zip
+ INSDESTTREE=${JAVA_PKG_SOURCESPATH} \
+ doins ${zip_path} || die "Failed to install source"
+
+ JAVA_SOURCES="${JAVA_PKG_SOURCESPATH}/${zip_name}"
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_dolauncher
+# @USAGE: <filename> [options]
+# @DESCRIPTION:
+# Make a wrapper script to lauch/start this package
+# If necessary, the wrapper will switch to the appropriate VM.
+#
+# Can be called without parameters if the package installs only one jar
+# that has the Main-class attribute set. The wrapper will be named ${PN}.
+#
+# @CODE
+# Parameters:
+# $1 - filename of launcher to create
+# $2 - options, as follows:
+# --main the.main.class.to.start
+# --jar /the/jar/too/launch.jar or just <name>.jar
+# --java_args 'Extra arguments to pass to java'
+# --pkg_args 'Extra arguments to pass to the package'
+# --pwd Directory the launcher changes to before executing java
+# -into Directory to install the launcher to, instead of /usr/bin
+# -pre Prepend contents of this file to the launcher
+# @CODE
+java-pkg_dolauncher() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+ java-pkg_init_paths_
+
+ if [[ ${#} = 0 ]]; then
+ local name="${PN}"
+ else
+ local name="${1}"
+ shift
+ fi
+
+ # TODO rename to launcher
+ local target="${T}/${name}"
+ local var_tmp="${T}/launcher_variables_tmp"
+ local target_dir pre
+
+ # Process the other the rest of the arguments
+ while [[ -n "${1}" && -n "${2}" ]]; do
+ local var="${1}" value="${2}"
+ if [[ "${var:0:2}" == "--" ]]; then
+ local var=${var:2}
+ echo "gjl_${var}=\"${value}\"" >> "${var_tmp}"
+ local gjl_${var}="${value}"
+ elif [[ "${var}" == "-into" ]]; then
+ target_dir="${value}"
+ elif [[ "${var}" == "-pre" ]]; then
+ pre="${value}"
+ fi
+ shift 2
+ done
+
+ # Test if no --jar and --main arguments were given and
+ # in that case check if the package only installs one jar
+ # and use that jar.
+ if [[ -z "${gjl_jar}" && -z "${gjl_main}" ]]; then
+ local cp="${JAVA_PKG_CLASSPATH}"
+ if [[ "${cp/:}" = "${cp}" && "${cp%.jar}" != "${cp}" ]]; then
+ echo "gjl_jar=\"${JAVA_PKG_CLASSPATH}\"" >> "${var_tmp}"
+ else
+ local msg="Not enough information to create a launcher given."
+ msg="${msg} Please give --jar or --main argument to ${FUNCNAME}."
+ die "${msg}"
+ fi
+ fi
+
+ # Write the actual script
+ echo "#!/bin/bash" > "${target}"
+ if [[ -n "${pre}" ]]; then
+ if [[ -f "${pre}" ]]; then
+ cat "${pre}" >> "${target}"
+ else
+ die "-pre specified file '${pre}' does not exist"
+ fi
+ fi
+ echo "gjl_package=${JAVA_PKG_NAME}" >> "${target}"
+ cat "${var_tmp}" >> "${target}"
+ rm -f "${var_tmp}"
+ echo "source /usr/share/java-config-2/launcher/launcher.bash" >> "${target}"
+
+ if [[ -n "${target_dir}" ]]; then
+ DESTTREE="${target_dir}" dobin "${target}"
+ local ret=$?
+ return ${ret}
+ else
+ dobin "${target}"
+ fi
+}
+
+# @FUNCTION: java-pkg_dowar
+# @DESCRIPTION:
+# Install war files.
+# TODO document
+java-pkg_dowar() {
+ debug-print-function ${FUNCNAME} $*
+
+ # Check for arguments
+ [[ ${#} -lt 1 ]] && die "At least one argument needed"
+ java-pkg_check-phase install
+
+ java-pkg_init_paths_
+
+ local war
+ for war in $* ; do
+ local warpath
+ # TODO evaluate if we want to handle symlinks differently -nichoj
+ # Check for symlink
+ if [[ -L "${war}" ]] ; then
+ cp "${war}" "${T}"
+ warpath="${T}$(basename "${war}")"
+ # Check for directory
+ # TODO evaluate if we want to handle directories differently -nichoj
+ elif [[ -d "${war}" ]] ; then
+ echo "dowar: warning, skipping directory ${war}"
+ continue
+ else
+ warpath="${war}"
+ fi
+
+ # Install those files like you mean it
+ INSOPTIONS="-m 0644" \
+ INSDESTTREE=${JAVA_PKG_WARDEST} \
+ doins ${warpath}
+ done
+}
+
+# @FUNCTION: java-pkg_recordjavadoc
+# @INTERNAL
+# @DESCRIPTION:
+# Scan for JavaDocs, and record their existence in the package.env file
+
+# TODO make sure this in the proper section
+java-pkg_recordjavadoc()
+{
+ debug-print-function ${FUNCNAME} $*
+ # the find statement is important
+ # as some packages include multiple trees of javadoc
+ JAVADOC_PATH="$(find ${D}/usr/share/doc/ -name allclasses-frame.html -printf '%h:')"
+ # remove $D - TODO: check this is ok with all cases of the above
+ JAVADOC_PATH="${JAVADOC_PATH//${D}}"
+ if [[ -n "${JAVADOC_PATH}" ]] ; then
+ debug-print "javadocs found in ${JAVADOC_PATH%:}"
+ java-pkg_do_write_
+ else
+ debug-print "No javadocs found"
+ fi
+}
+
+
+# @FUNCTION: java-pkg_jar-from
+# @USAGE: [--build-only] [--with-dependencies] [--virtual] [--into dir] <package> [<package.jar>] [<destination.jar>]
+# @DESCRIPTION:
+# Makes a symlink to a jar from a certain package
+# A lot of java packages include dependencies in a lib/ directory
+# You can use this function to replace these bundled dependencies.
+# The dependency is recorded into package.env DEPEND line, unless "--build-only"
+# is passed as the very first argument, for jars that have to be present only
+# at build time and are not needed on runtime (junit testing etc).
+#
+# @CODE
+# Example: get all jars from xerces slot 2
+# java-pkg_jar-from xerces-2
+#
+# Example: get a specific jar from xerces slot 2
+# java-pkg_jar-from xerces-2 xml-apis.jar
+#
+# Example: get a specific jar from xerces slot 2, and name it diffrently
+# java-pkg_jar-from xerces-2 xml-apis.jar xml.jar
+#
+# Example: get junit.jar which is needed only for building
+# java-pkg_jar-from --build-only junit junit.jar
+# @CODE
+#
+# @CODE
+# Parameters
+# --build-only - makes the jar(s) not added into package.env DEPEND line.
+# (assumed automatically when called inside src_test)
+# --with-dependencies - get jars also from requested package's dependencies
+# transitively.
+# --virtual - Packages passed to this function are to be handled as virtuals
+# and will not have individual jar dependencies recorded.
+# --into $dir - symlink jar(s) into $dir (must exist) instead of .
+# $1 - Package to get jars from, or comma-separated list of packages in
+# case other parameters are not used.
+# $2 - jar from package. If not specified, all jars will be used.
+# $3 - When a single jar is specified, destination filename of the
+# symlink. Defaults to the name of the jar.
+# @CODE
+
+# TODO could probably be cleaned up a little
+java-pkg_jar-from() {
+ debug-print-function ${FUNCNAME} $*
+
+ local build_only=""
+ local destdir="."
+ local deep=""
+ local virtual=""
+ local record_jar=""
+
+ [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build"
+
+ while [[ "${1}" == --* ]]; do
+ if [[ "${1}" = "--build-only" ]]; then
+ build_only="build"
+ elif [[ "${1}" = "--with-dependencies" ]]; then
+ deep="--with-dependencies"
+ elif [[ "${1}" = "--virtual" ]]; then
+ virtual="true"
+ elif [[ "${1}" = "--into" ]]; then
+ destdir="${2}"
+ shift
+ else
+ die "java-pkg_jar-from called with unknown parameter: ${1}"
+ fi
+ shift
+ done
+
+ local target_pkg="${1}" target_jar="${2}" destjar="${3}"
+
+ [[ -z ${target_pkg} ]] && die "Must specify a package"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ target_pkg="${target_pkg//:/-}"
+ fi
+
+ # default destjar to the target jar
+ [[ -z "${destjar}" ]] && destjar="${target_jar}"
+
+ local error_msg="There was a problem getting the classpath for ${target_pkg}."
+ local classpath
+ classpath="$(java-config ${deep} --classpath=${target_pkg})"
+ [[ $? != 0 ]] && die ${error_msg}
+
+ # When we have commas this functions is called to bring jars from multiple
+ # packages. This affects recording of dependencencies performed later
+ # which expects one package only, so we do it here.
+ if [[ ${target_pkg} = *,* ]]; then
+ for pkg in ${target_pkg//,/ }; do
+ java-pkg_ensure-dep "${build_only}" "${pkg}"
+ [[ -z "${build_only}" ]] && java-pkg_record-jar_ "${pkg}"
+ done
+ # setting this disables further record-jar_ calls later
+ record_jar="true"
+ else
+ java-pkg_ensure-dep "${build_only}" "${target_pkg}"
+ fi
+
+ # Record the entire virtual as a dependency so that
+ # no jars are missed.
+ if [[ -z "${build_only}" && -n "${virtual}" ]]; then
+ java-pkg_record-jar_ "${target_pkg}"
+ # setting this disables further record-jars_ calls later
+ record_jar="true"
+ fi
+
+ pushd ${destdir} > /dev/null \
+ || die "failed to change directory to ${destdir}"
+
+ local jar
+ for jar in ${classpath//:/ }; do
+ local jar_name=$(basename "${jar}")
+ if [[ ! -f "${jar}" ]] ; then
+ debug-print "${jar} from ${target_pkg} does not exist"
+ die "Installation problems with jars in ${target_pkg} - is it installed?"
+ fi
+ # If no specific target jar was indicated, link it
+ if [[ -z "${target_jar}" ]] ; then
+ [[ -f "${target_jar}" ]] && rm "${target_jar}"
+ ln -snf "${jar}" \
+ || die "Failed to make symlink from ${jar} to ${jar_name}"
+ if [[ -z "${record_jar}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${target_pkg}" "${jar}"
+ else
+ java-pkg_record-jar_ --build-only "${target_pkg}" "${jar}"
+ fi
+ fi
+ # otherwise, if the current jar is the target jar, link it
+ elif [[ "${jar_name}" == "${target_jar}" ]] ; then
+ [[ -f "${destjar}" ]] && rm "${destjar}"
+ ln -snf "${jar}" "${destjar}" \
+ || die "Failed to make symlink from ${jar} to ${destjar}"
+ if [[ -z "${record_jar}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${target_pkg}" "${jar}"
+ else
+ java-pkg_record-jar_ --build-only "${target_pkg}" "${jar}"
+ fi
+ fi
+ popd > /dev/null
+ return 0
+ fi
+ done
+ popd > /dev/null
+ # if no target was specified, we're ok
+ if [[ -z "${target_jar}" ]] ; then
+ return 0
+ # otherwise, die bitterly
+ else
+ die "Failed to find ${target_jar:-jar} in ${target_pkg}"
+ fi
+}
+
+# @FUNCTION: java-pkg_jarfrom
+# @DESCRIPTION:
+# See java-pkg_jar-from
+java-pkg_jarfrom() {
+ java-pkg_jar-from "$@"
+}
+
+# @FUNCTION: java-pkg_getjars
+# @USAGE: [--build-only] [--with-dependencies] <package1>[,<package2>...]
+# @DESCRIPTION:
+# Get the classpath provided by any number of packages
+# Among other things, this can be passed to 'javac -classpath' or 'ant -lib'.
+# The providing packages are recorded as dependencies into package.env DEPEND
+# line, unless "--build-only" is passed as the very first argument, for jars
+# that have to be present only at build time and are not needed on runtime
+# (junit testing etc).
+#
+# @CODE
+# Example: Get the classpath for xerces-2 and xalan,
+# java-pkg_getjars xerces-2,xalan
+#
+# Example Return:
+# /usr/share/xerces-2/lib/xml-apis.jar:/usr/share/xerces-2/lib/xmlParserAPIs.jar:/usr/share/xalan/lib/xalan.jar
+#
+#
+# Parameters:
+# --build-only - makes the jar(s) not added into package.env DEPEND line.
+# (assumed automatically when called inside src_test)
+# --with-dependencies - get jars also from requested package's dependencies
+# transitively.
+# $1 - list of packages to get jars from
+# (passed to java-config --classpath)
+# @CODE
+java-pkg_getjars() {
+ debug-print-function ${FUNCNAME} $*
+
+ local build_only=""
+ local deep=""
+
+ [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build"
+
+ while [[ "${1}" == --* ]]; do
+ if [[ "${1}" = "--build-only" ]]; then
+ build_only="build"
+ elif [[ "${1}" = "--with-dependencies" ]]; then
+ deep="--with-dependencies"
+ else
+ die "java-pkg_jar-from called with unknown parameter: ${1}"
+ fi
+ shift
+ done
+
+ [[ ${#} -ne 1 ]] && die "${FUNCNAME} takes only one argument besides --*"
+
+
+ local pkgs="${1}"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkgs="${pkgs//:/-}"
+ fi
+
+ jars="$(java-config ${deep} --classpath=${pkgs})"
+ [[ $? != 0 ]] && die "java-config --classpath=${pkgs} failed"
+ debug-print "${pkgs}:${jars}"
+
+ for pkg in ${pkgs//,/ }; do
+ java-pkg_ensure-dep "${build_only}" "${pkg}"
+ done
+
+ for pkg in ${pkgs//,/ }; do
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${pkg}"
+ else
+ java-pkg_record-jar_ --build-only "${pkg}"
+ fi
+ done
+
+ echo "${jars}"
+}
+
+# @FUNCTION: java-pkg_getjar
+# @USAGE: [--build-only] [--virtual] <package> <jarfile>
+# @DESCRIPTION:
+# Get the complete path of a single jar from a package
+# The providing package is recorded as runtime dependency into package.env
+# DEPEND line, unless "--build-only" is passed as the very first argument, for
+# jars that have to be present only at build time and are not needed on runtime
+# (junit testing etc).
+#
+# @CODE
+# Example:
+# java-pkg_getjar xerces-2 xml-apis.jar
+# returns
+# /usr/share/xerces-2/lib/xml-apis.jar
+#
+# Parameters:
+# --build-only - makes the jar not added into package.env DEPEND line.
+# --virtual - Packages passed to this function are to be handled as virtuals
+# and will not have individual jar dependencies recorded.
+# $1 - package to use
+# $2 - jar to get
+# @CODE
+java-pkg_getjar() {
+ debug-print-function ${FUNCNAME} $*
+
+ local build_only=""
+ local virtual=""
+ local record_jar=""
+
+ [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build"
+
+ while [[ "${1}" == --* ]]; do
+ if [[ "${1}" = "--build-only" ]]; then
+ build_only="build"
+ elif [[ "${1}" == "--virtual" ]]; then
+ virtual="true"
+ else
+ die "java-pkg_getjar called with unknown parameter: ${1}"
+ fi
+ shift
+ done
+
+ [[ ${#} -ne 2 ]] && die "${FUNCNAME} takes only two arguments besides --*"
+
+ local pkg="${1}" target_jar="${2}" jar
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkg="${pkg//:/-}"
+ fi
+
+ [[ -z ${pkg} ]] && die "Must specify package to get a jar from"
+ [[ -z ${target_jar} ]] && die "Must specify jar to get"
+
+ local error_msg="Could not find classpath for ${pkg}. Are you sure its installed?"
+ local classpath
+ classpath=$(java-config --classpath=${pkg})
+ [[ $? != 0 ]] && die ${error_msg}
+
+ java-pkg_ensure-dep "${build_only}" "${pkg}"
+
+ # Record the package(Virtual) as a dependency and then set build_only
+ # So that individual jars are not recorded.
+ if [[ -n "${virtual}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${pkg}"
+ else
+ java-pkg_record-jar_ --build-only "${pkg}"
+ fi
+ record_jar="true"
+ fi
+
+ for jar in ${classpath//:/ }; do
+ if [[ ! -f "${jar}" ]] ; then
+ die "Installation problem with jar ${jar} in ${pkg} - is it installed?"
+ fi
+
+ if [[ "$(basename ${jar})" == "${target_jar}" ]] ; then
+ # Only record jars that aren't build-only
+ if [[ -z "${record_jar}" ]]; then
+ if [[ -z "${build_only}" ]]; then
+ java-pkg_record-jar_ "${pkg}" "${jar}"
+ else
+ java-pkg_record-jar_ --build-only "${pkg}" "${jar}"
+ fi
+ fi
+ echo "${jar}"
+ return 0
+ fi
+ done
+
+ die "Could not find ${target_jar} in ${pkg}"
+ return 1
+}
+
+# @FUNCTION: java-pkg_register-dependency
+# @USAGE: <package>[,<package2>...] [<jarfile>]
+# @DESCRIPTION:
+# Registers runtime dependency on a package, list of packages, or a single jar
+# from a package, into package.env DEPEND line. Can only be called in
+# src_install phase.
+# Intended for binary packages where you don't need to symlink the jars or get
+# their classpath during build. As such, the dependencies only need to be
+# specified in ebuild's RDEPEND, and should be omitted in DEPEND.
+#
+# @CODE
+# Parameters:
+# $1 - comma-separated list of packages, or a single package
+# $2 - if param $1 is a single package, optionally specify the jar
+# to depend on
+#
+# Examples:
+# Record the dependency on whole xerces-2 and xalan,
+# java-pkg_register-dependency xerces-2,xalan
+#
+# Record the dependency on ant.jar from ant-core
+# java-pkg_register-dependency ant-core ant.jar
+# @CODE
+#
+# Note: Passing both list of packages as the first parameter AND specifying the
+# jar as the second is not allowed and will cause the function to die. We assume
+# that there's more chance one passes such combination as a mistake, than that
+# there are more packages providing identically named jar without class
+# collisions.
+java-pkg_register-dependency() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -gt 2 ]] && die "${FUNCNAME} takes at most two arguments"
+
+ local pkgs="${1}"
+ local jar="${2}"
+
+ [[ -z "${pkgs}" ]] && die "${FUNCNAME} called with no package(s) specified"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkgs="${pkgs//:/-}"
+ fi
+
+ if [[ -z "${jar}" ]]; then
+ for pkg in ${pkgs//,/ }; do
+ java-pkg_ensure-dep runtime "${pkg}"
+ java-pkg_record-jar_ "${pkg}"
+ done
+ else
+ [[ ${pkgs} == *,* ]] && \
+ die "${FUNCNAME} called with both package list and jar name"
+ java-pkg_ensure-dep runtime "${pkgs}"
+ java-pkg_record-jar_ "${pkgs}" "${jar}"
+ fi
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_register-optional-dependency
+# @USAGE: <package>[,<package2>...] [<jarfile>]
+# @DESCRIPTION:
+# Registers optional runtime dependency on a package, list of packages, or a
+# single jar from a package, into package.env OPTIONAL_DEPEND line. Can only be
+# called in src_install phase.
+# Intended for packages that can use other packages when those are in classpath.
+# Will be put on classpath by launcher if they are installed. Typical case is
+# JDBC implementations for various databases. It's better than having USE flag
+# for each implementation triggering hard dependency.
+#
+# @CODE
+# Parameters:
+# $1 - comma-separated list of packages, or a single package
+# $2 - if param $1 is a single package, optionally specify the jar to depend on
+#
+# Example:
+# Record the optional dependency on some jdbc providers
+# java-pkg_register-optional-dependency jdbc-jaybird,jtds-1.2,jdbc-mysql
+# @CODE
+#
+# Note: Passing both list of packages as the first parameter AND specifying the
+# jar as the second is not allowed and will cause the function to die. We assume
+# that there's more chance one passes such combination as a mistake, than that
+# there are more packages providing identically named jar without class
+# collisions.
+java-pkg_register-optional-dependency() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} -gt 2 ]] && die "${FUNCNAME} takes at most two arguments"
+
+ local pkgs="${1}"
+ local jar="${2}"
+
+ [[ -z "${pkgs}" ]] && die "${FUNCNAME} called with no package(s) specified"
+
+ if [[ "${EAPI}" == "1" ]]; then
+ pkgs="${pkgs//:/-}"
+ fi
+
+ if [[ -z "${jar}" ]]; then
+ for pkg in ${pkgs//,/ }; do
+ java-pkg_record-jar_ --optional "${pkg}"
+ done
+ else
+ [[ ${pkgs} == *,* ]] && \
+ die "${FUNCNAME} called with both package list and jar name"
+ java-pkg_record-jar_ --optional "${pkgs}" "${jar}"
+ fi
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_register-environment-variable
+# @USAGE: <name> <value>
+# @DESCRIPTION:
+# Register an arbitrary environment variable into package.env. The gjl launcher
+# for this package or any package depending on this will export it into
+# environement before executing java command.
+# Must only be called in src_install phase.
+JAVA_PKG_EXTRA_ENV="${T}/java-pkg-extra-env"
+JAVA_PKG_EXTRA_ENV_VARS=""
+java-pkg_register-environment-variable() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_check-phase install
+
+ [[ ${#} != 2 ]] && die "${FUNCNAME} takes two arguments"
+
+ echo "${1}=\"${2}\"" >> ${JAVA_PKG_EXTRA_ENV}
+ JAVA_PKG_EXTRA_ENV_VARS="${JAVA_PKG_EXTRA_ENV_VARS} ${1}"
+
+ java-pkg_do_write_
+}
+
+# @FUNCTION: java-pkg_get-bootclasspath
+# @USAGE: <version>
+# @DESCRIPTION:
+# Returns classpath of a given bootclasspath-providing package version.
+#
+# @param $1 - the version of bootclasspath (e.g. 1.5), 'auto' for bootclasspath
+# of the current JDK
+java-pkg_get-bootclasspath() {
+ local version="${1}"
+
+ local bcp
+ case "${version}" in
+ auto)
+ bcp="$(java-config -g BOOTCLASSPATH)"
+ ;;
+ 1.5)
+ bcp="$(java-pkg_getjars --build-only gnu-classpath-0.98)"
+ ;;
+ *)
+ eerror "unknown parameter of java-pkg_get-bootclasspath"
+ die "unknown parameter of java-pkg_get-bootclasspath"
+ ;;
+ esac
+
+ echo "${bcp}"
+}
+
+
+# This function reads stdin, and based on that input, figures out how to
+# populate jars from the filesystem.
+# Need to figure out a good way of making use of this, ie be able to use a
+# string that was built instead of stdin
+# NOTE: this isn't quite ready for primetime.
+#java-pkg_populate-jars() {
+# local line
+#
+# read line
+# while [[ -n "${line}" ]]; do
+# # Ignore comments
+# [[ ${line%%#*} == "" ]] && continue
+#
+# # get rid of any spaces
+# line="${line// /}"
+#
+# # format: path=jarinfo
+# local path=${line%%=*}
+# local jarinfo=${line##*=}
+#
+# # format: jar@package
+# local jar=${jarinfo%%@*}.jar
+# local package=${jarinfo##*@}
+# if [[ -n ${replace_only} ]]; then
+# [[ ! -f $path ]] && die "No jar exists at ${path}"
+# fi
+# if [[ -n ${create_parent} ]]; then
+# local parent=$(dirname ${path})
+# mkdir -p "${parent}"
+# fi
+# java-pkg_jar-from "${package}" "${jar}" "${path}"
+#
+# read line
+# done
+#}
+
+# @FUNCTION: java-pkg_find-normal-jars
+# @USAGE: [<path/to/directory>]
+# @DESCRIPTION:
+# Find the files with suffix .jar file in the given directory (default: $WORKDIR)
+java-pkg_find-normal-jars() {
+ local dir=$1
+ [[ "${dir}" ]] || dir="${WORKDIR}"
+ local found
+ for jar in $(find "${dir}" -name "*.jar" -type f); do
+ echo "${jar}"
+ found="true"
+ done
+ [[ "${found}" ]]
+ return $?
+}
+
+# @FUNCTION: java-pkg_ensure-no-bundled-jars
+# @DESCRIPTION:
+# Try to locate bundled jar files in ${WORKDIR} and die if found.
+# This function should be called after WORKDIR has been populated with symlink
+# to system jar files or bundled jars removed.
+java-pkg_ensure-no-bundled-jars() {
+ debug-print-function ${FUNCNAME} $*
+
+ local bundled_jars=$(java-pkg_find-normal-jars)
+ if [[ -n ${bundled_jars} ]]; then
+ echo "Bundled jars found:"
+ local jar
+ for jar in ${bundled_jars}; do
+ echo $(pwd)${jar/./}
+ done
+ die "Bundled jars found!"
+ fi
+}
+
+# @FUNCTION: java-pkg_ensure-vm-version-sufficient
+# @INTERNAL
+# @DESCRIPTION:
+# Checks if we have a sufficient VM and dies if we don't.
+java-pkg_ensure-vm-version-sufficient() {
+ debug-print-function ${FUNCNAME} $*
+
+ if ! java-pkg_is-vm-version-sufficient; then
+ debug-print "VM is not suffient"
+ eerror "Current Java VM cannot build this package"
+ einfo "Please use java-config -S to set the correct one"
+ die "Active Java VM cannot build this package"
+ fi
+}
+
+# @FUNCTION: java-pkg_is-vm-version-sufficient
+# @INTERNAL
+# @DESCRIPTION:
+# @RETURN: zero - VM is sufficient; non-zero - VM is not sufficient
+java-pkg_is-vm-version-sufficient() {
+ debug-print-function ${FUNCNAME} $*
+
+ depend-java-query --is-sufficient "${DEPEND}" > /dev/null
+ return $?
+}
+
+# @FUNCTION: java-pkg_ensure-vm-version-eq
+# @INTERNAL
+# @DESCRIPTION:
+# Die if the current VM is not equal to the argument passed.
+#
+# @param $@ - Desired VM version to ensure
+java-pkg_ensure-vm-version-eq() {
+ debug-print-function ${FUNCNAME} $*
+
+ if ! java-pkg_is-vm-version-eq $@ ; then
+ debug-print "VM is not suffient"
+ eerror "This package requires a Java VM version = $@"
+ einfo "Please use java-config -S to set the correct one"
+ die "Active Java VM too old"
+ fi
+}
+
+# @FUNCTION: java-pkg_is-vm-version-eq
+# @USAGE: <version>
+# @INTERNAL
+# @RETURN: zero - VM versions are equal; non-zero - VM version are not equal
+java-pkg_is-vm-version-eq() {
+ debug-print-function ${FUNCNAME} $*
+
+ local needed_version="$@"
+
+ [[ -z "${needed_version}" ]] && die "need an argument"
+
+ local vm_version="$(java-pkg_get-vm-version)"
+
+ vm_version="$(get_version_component_range 1-2 "${vm_version}")"
+ needed_version="$(get_version_component_range 1-2 "${needed_version}")"
+
+ if [[ -z "${vm_version}" ]]; then
+ debug-print "Could not get JDK version from DEPEND"
+ return 1
+ else
+ if [[ "${vm_version}" == "${needed_version}" ]]; then
+ debug-print "Detected a JDK(${vm_version}) = ${needed_version}"
+ return 0
+ else
+ debug-print "Detected a JDK(${vm_version}) != ${needed_version}"
+ return 1
+ fi
+ fi
+}
+
+# @FUNCTION: java-pkg_ensure-vm-version-ge
+# @INTERNAL
+# @DESCRIPTION:
+# Die if the current VM is not greater than the desired version
+#
+# @param $@ - VM version to compare current to
+java-pkg_ensure-vm-version-ge() {
+ debug-print-function ${FUNCNAME} $*
+
+ if ! java-pkg_is-vm-version-ge "$@" ; then
+ debug-print "vm is not suffient"
+ eerror "This package requires a Java VM version >= $@"
+ einfo "Please use java-config -S to set the correct one"
+ die "Active Java VM too old"
+ fi
+}
+
+# @FUNCTION: java-pkg_is-vm-version-ge
+# @INTERNAL
+# @DESCRIPTION:
+# @CODE
+# Parameters:
+# $@ - VM version to compare current VM to
+# @CODE
+# @RETURN: zero - current VM version is greater than checked version;
+# non-zero - current VM version is not greater than checked version
+java-pkg_is-vm-version-ge() {
+ debug-print-function ${FUNCNAME} $*
+
+ local needed_version=$@
+ local vm_version=$(java-pkg_get-vm-version)
+ if [[ -z "${vm_version}" ]]; then
+ debug-print "Could not get JDK version from DEPEND"
+ return 1
+ else
+ if version_is_at_least "${needed_version}" "${vm_version}"; then
+ debug-print "Detected a JDK(${vm_version}) >= ${needed_version}"
+ return 0
+ else
+ debug-print "Detected a JDK(${vm_version}) < ${needed_version}"
+ return 1
+ fi
+ fi
+}
+
+java-pkg_set-current-vm() {
+ export GENTOO_VM=${1}
+}
+
+java-pkg_get-current-vm() {
+ echo ${GENTOO_VM}
+}
+
+java-pkg_current-vm-matches() {
+ has $(java-pkg_get-current-vm) ${@}
+ return $?
+}
+
+# @FUNCTION: java-pkg_get-source
+# @DESCRIPTION:
+# Determines what source version should be used, for passing to -source.
+# Unless you want to break things you probably shouldn't set _WANT_SOURCE
+#
+# @RETURN: string - Either the lowest possible source, or JAVA_PKG_WANT_SOURCE
+java-pkg_get-source() {
+ echo ${JAVA_PKG_WANT_SOURCE:-$(depend-java-query --get-lowest "${DEPEND} ${RDEPEND}")}
+}
+
+# @FUNCTION: java-pkg_get-target
+# @DESCRIPTION:
+# Determines what target version should be used, for passing to -target.
+# If you don't care about lower versions, you can set _WANT_TARGET to the
+# version of your JDK.
+#
+# @RETURN: string - Either the lowest possible target, or JAVA_PKG_WANT_TARGET
+java-pkg_get-target() {
+ echo ${JAVA_PKG_WANT_TARGET:-$(depend-java-query --get-lowest "${DEPEND} ${RDEPEND}")}
+}
+
+# @FUNCTION: java-pkg_get-javac
+# @DESCRIPTION:
+# Returns the compiler executable
+java-pkg_get-javac() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-pkg_init-compiler_
+ local compiler="${GENTOO_COMPILER}"
+
+ local compiler_executable
+ if [[ "${compiler}" = "javac" ]]; then
+ # nothing fancy needs to be done for javac
+ compiler_executable="javac"
+ else
+ # for everything else, try to determine from an env file
+
+ local compiler_env="/usr/share/java-config-2/compiler/${compiler}"
+ if [[ -f ${compiler_env} ]]; then
+ local old_javac=${JAVAC}
+ unset JAVAC
+ # try to get value of JAVAC
+ compiler_executable="$(source ${compiler_env} 1>/dev/null 2>&1; echo ${JAVAC})"
+ export JAVAC=${old_javac}
+
+ if [[ -z ${compiler_executable} ]]; then
+ die "JAVAC is empty or undefined in ${compiler_env}"
+ fi
+
+ # check that it's executable
+ if [[ ! -x ${compiler_executable} ]]; then
+ die "${compiler_executable} doesn't exist, or isn't executable"
+ fi
+ else
+ die "Could not find environment file for ${compiler}"
+ fi
+ fi
+ echo ${compiler_executable}
+}
+
+# @FUNCTION: java-pkg_javac-args
+# @DESCRIPTION:
+# If an ebuild uses javac directly, instead of using ejavac, it should call this
+# to know what -source/-target to use.
+#
+# @RETURN: string - arguments to pass to javac, complete with -target and -source
+java-pkg_javac-args() {
+ debug-print-function ${FUNCNAME} $*
+
+ local want_source="$(java-pkg_get-source)"
+ local want_target="$(java-pkg_get-target)"
+
+ local source_str="-source ${want_source}"
+ local target_str="-target ${want_target}"
+
+ debug-print "want source: ${want_source}"
+ debug-print "want target: ${want_target}"
+
+ if [[ -z "${want_source}" || -z "${want_target}" ]]; then
+ die "Could not find valid -source/-target values for javac"
+ else
+ echo "${source_str} ${target_str}"
+ fi
+}
+
+# @FUNCTION: java-pkg_get-jni-cflags
+# @DESCRIPTION:
+# Echos the CFLAGS for JNI compilations
+java-pkg_get-jni-cflags() {
+ local flags="-I${JAVA_HOME}/include"
+
+ local platform="linux"
+ use elibc_FreeBSD && platform="freebsd"
+
+ # TODO do a check that the directories are valid
+ flags="${flags} -I${JAVA_HOME}/include/${platform}"
+
+ echo ${flags}
+}
+
+java-pkg_ensure-gcj() {
+ # was enforcing sys-devel/gcc[gcj]
+ die "${FUNCNAME} was removed. Use use-deps available as of EAPI 2 instead. #261562"
+}
+
+java-pkg_ensure-test() {
+ # was enforcing USE=test if FEATURES=test
+ die "${FUNCNAME} was removed. Package mangers handle this already. #278965"
+}
+
+# @FUNCTION: java-pkg_register-ant-task
+# @USAGE: [--version x.y] [<name>]
+# @DESCRIPTION:
+# Register this package as ant task, so that ant will load it when no specific
+# ANT_TASKS are specified. Note that even without this registering, all packages
+# specified in ANT_TASKS will be loaded. Mostly used by the actual ant tasks
+# packages, but can be also used by other ebuilds that used to symlink their
+# .jar into /usr/share/ant-core/lib to get autoloaded, for backwards
+# compatibility.
+#
+# @CODE
+# Parameters
+# --version x.y Register only for ant version x.y (otherwise for any ant
+# version). Used by the ant-* packages to prevent loading of mismatched
+# ant-core ant tasks after core was updated, before the tasks are updated,
+# without a need for blockers.
+# $1 Name to register as. Defaults to JAVA_PKG_NAME ($PN[-$SLOT])
+# @CODE
+java-pkg_register-ant-task() {
+ local TASKS_DIR="tasks"
+
+ # check for --version x.y parameters
+ while [[ -n "${1}" && -n "${2}" ]]; do
+ local var="${1#--}"
+ local val="${2}"
+ if [[ "${var}" == "version" ]]; then
+ TASKS_DIR="tasks-${val}"
+ else
+ die "Unknown parameter passed to java-pkg_register-ant-tasks: ${1} ${2}"
+ fi
+ shift 2
+ done
+
+ local TASK_NAME="${1:-${JAVA_PKG_NAME}}"
+
+ dodir /usr/share/ant/${TASKS_DIR}
+ touch "${D}/usr/share/ant/${TASKS_DIR}/${TASK_NAME}"
+}
+
+# @FUNCTION: java-pkg_ant-tasks-depend
+# @INTERNAL
+# @DESCRIPTION:
+# Translates the WANT_ANT_TASKS variable into valid dependencies.
+java-pkg_ant-tasks-depend() {
+ debug-print-function ${FUNCNAME} ${WANT_ANT_TASKS}
+
+ if [[ -n "${WANT_ANT_TASKS}" ]]; then
+ local DEP=""
+ for i in ${WANT_ANT_TASKS}
+ do
+ if [[ ${i} = ant-* ]]; then
+ DEP="${DEP}dev-java/${i} "
+ elif [[ ${i} = */*:* ]]; then
+ DEP="${DEP}${i} "
+ else
+ echo "Invalid atom in WANT_ANT_TASKS: ${i}"
+ return 1
+ fi
+ done
+ echo ${DEP}
+ return 0
+ else
+ return 0
+ fi
+}
+
+
+# @FUNCTION: ejunit_
+# @INTERNAL
+# @DESCRIPTION:
+# Internal Junit wrapper function. Makes it easier to run the tests and checks for
+# dev-java/junit in DEPEND. Launches the tests using junit.textui.TestRunner.
+# @CODE
+# Parameters:
+# $1 - junit package (junit or junit-4)
+# $2 - -cp or -classpath
+# $3 - classpath; junit and recorded dependencies get appended
+# $@ - the rest of the parameters are passed to java
+# @CODE
+ejunit_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local pkgs
+ if [[ -f ${JAVA_PKG_DEPEND_FILE} ]]; then
+ for atom in $(cat ${JAVA_PKG_DEPEND_FILE} | tr : ' '); do
+ pkgs=${pkgs},$(echo ${atom} | sed -re "s/^.*@//")
+ done
+ fi
+
+ local junit=${1}
+ shift 1
+
+ local cp=$(java-pkg_getjars --with-dependencies ${junit}${pkgs})
+ if [[ ${1} = -cp || ${1} = -classpath ]]; then
+ cp="${2}:${cp}"
+ shift 2
+ else
+ cp=".:${cp}"
+ fi
+
+ local runner=junit.textui.TestRunner
+ if [[ "${junit}" == "junit-4" ]] ; then
+ runner=org.junit.runner.JUnitCore
+ fi
+ debug-print "Calling: java -cp \"${cp}\" -Djava.awt.headless=true ${runner} ${@}"
+ java -cp "${cp}" -Djava.awt.headless=true ${runner} "${@}" || die "Running junit failed"
+}
+
+# @FUNCTION: ejunit
+# @DESCRIPTION:
+# Junit wrapper function. Makes it easier to run the tests and checks for
+# dev-java/junit in DEPEND. Launches the tests using org.junit.runner.JUnitCore.
+#
+# @CODE
+# Parameters:
+# $1 - -cp or -classpath
+# $2 - classpath; junit and recorded dependencies get appended
+# $@ - the rest of the parameters are passed to java
+#
+# Examples:
+# ejunit -cp build/classes org.blinkenlights.jid3.test.AllTests
+# ejunit org.blinkenlights.jid3.test.AllTests
+# ejunit org.blinkenlights.jid3.test.FirstTest org.blinkenlights.jid3.test.SecondTest
+# @CODE
+ejunit() {
+ debug-print-function ${FUNCNAME} $*
+
+ ejunit_ "junit" "${@}"
+}
+
+# @FUNCTION: ejunit4
+# @DESCRIPTION:
+# Junit4 wrapper function. Makes it easier to run the tests and checks for
+# dev-java/junit:4 in DEPEND. Launches the tests using junit.textui.TestRunner.
+#
+# @CODE
+# Parameters:
+# $1 - -cp or -classpath
+# $2 - classpath; junit and recorded dependencies get appended
+# $@ - the rest of the parameters are passed to java
+#
+# Examples:
+# ejunit4 -cp build/classes org.blinkenlights.jid3.test.AllTests
+# ejunit4 org.blinkenlights.jid3.test.AllTests
+# ejunit4 org.blinkenlights.jid3.test.FirstTest \
+# org.blinkenlights.jid3.test.SecondTest
+# @CODE
+ejunit4() {
+ debug-print-function ${FUNCNAME} $*
+
+ ejunit_ "junit-4" "${@}"
+}
+
+# @FUNCTION: java-utils-2_src_prepare
+# @DESCRIPTION:
+# src_prepare Searches for bundled jars
+# Don't call directly, but via java-pkg-2_src_prepare!
+java-utils-2_src_prepare() {
+ java-pkg_func-exists java_prepare && java_prepare
+
+ # Check for files in JAVA_RM_FILES array.
+ if [[ ${JAVA_RM_FILES[@]} ]]; then
+ debug-print "$FUNCNAME: removing unneeded files"
+ java-pkg_rm_files "${JAVA_RM_FILES[@]}"
+ fi
+
+ if is-java-strict; then
+ echo "Searching for bundled jars:"
+ java-pkg_find-normal-jars || echo "None found."
+ echo "Searching for bundled classes (no output if none found):"
+ find "${WORKDIR}" -name "*.class"
+ echo "Search done."
+ fi
+}
+
+# @FUNCTION: java-utils-2_pkg_preinst
+# @DESCRIPTION:
+# pkg_preinst Searches for missing and unneeded dependencies
+# Don't call directly, but via java-pkg-2_pkg_preinst!
+java-utils-2_pkg_preinst() {
+ if is-java-strict; then
+ if [[ ! -e "${JAVA_PKG_ENV}" ]] || has ant-tasks ${INHERITED}; then
+ return
+ fi
+
+ if has_version dev-java/java-dep-check; then
+ local output=$(GENTOO_VM= java-dep-check --image "${D}" "${JAVA_PKG_ENV}")
+ [[ ${output} ]] && ewarn "${output}"
+ else
+ eerror "Install dev-java/java-dep-check for dependency checking"
+ fi
+ fi
+}
+
+# @FUNCTION: eant
+# @USAGE: <ant_build_target(s)>
+# @DESCRIPTION:
+# Ant wrapper function. Will use the appropriate compiler, based on user-defined
+# compiler. Will also set proper ANT_TASKS from the variable ANT_TASKS,
+# variables:
+#
+# @CODE
+# Variables:
+# EANT_GENTOO_CLASSPATH - calls java-pkg_getjars for the value and adds to the
+# gentoo.classpath property. Be sure to call java-ant_rewrite-classpath in src_unpack.
+# EANT_NEEDS_TOOLS - add tools.jar to the gentoo.classpath. Should only be used
+# for build-time purposes, the dependency is not recorded to
+# package.env!
+# ANT_TASKS - used to determine ANT_TASKS before calling Ant.
+# @CODE
+eant() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ ${EBUILD_PHASE} = compile ]]; then
+ java-ant-2_src_configure
+ fi
+
+ if ! has java-ant-2 ${INHERITED}; then
+ local msg="You should inherit java-ant-2 when using eant"
+ java-pkg_announce-qa-violation "${msg}"
+ fi
+
+ local antflags="-Dnoget=true -Dmaven.mode.offline=true -Dbuild.sysclasspath=ignore"
+
+ java-pkg_init-compiler_
+ local compiler="${GENTOO_COMPILER}"
+
+ local compiler_env="${JAVA_PKG_COMPILER_DIR}/${compiler}"
+ local build_compiler="$(source ${compiler_env} 1>/dev/null 2>&1; echo ${ANT_BUILD_COMPILER})"
+ if [[ "${compiler}" != "javac" && -z "${build_compiler}" ]]; then
+ die "ANT_BUILD_COMPILER undefined in ${compiler_env}"
+ fi
+
+ if [[ ${compiler} != "javac" ]]; then
+ antflags="${antflags} -Dbuild.compiler=${build_compiler}"
+ # Figure out any extra stuff to put on the classpath for compilers aside
+ # from javac
+ # ANT_BUILD_COMPILER_DEPS should be something that could be passed to
+ # java-config -p
+ local build_compiler_deps="$(source ${JAVA_PKG_COMPILER_DIR}/${compiler} 1>/dev/null 2>&1; echo ${ANT_BUILD_COMPILER_DEPS})"
+ if [[ -n ${build_compiler_deps} ]]; then
+ antflags="${antflags} -lib $(java-config -p ${build_compiler_deps})"
+ fi
+ fi
+
+ for arg in "${@}"; do
+ if [[ ${arg} = -lib ]]; then
+ if is-java-strict; then
+ eerror "You should not use the -lib argument to eant because it will fail"
+ eerror "with JAVA_PKG_STRICT. Please use for example java-pkg_jar-from"
+ eerror "or ant properties to make dependencies available."
+ eerror "For ant tasks use WANT_ANT_TASKS or ANT_TASKS from."
+ eerror "split ant (>=dev-java/ant-core-1.7)."
+ die "eant -lib is deprecated/forbidden"
+ else
+ echo "eant -lib is deprecated. Turn JAVA_PKG_STRICT on for"
+ echo "more info."
+ fi
+ fi
+ done
+
+ # parse WANT_ANT_TASKS for atoms
+ local want_ant_tasks
+ for i in ${WANT_ANT_TASKS}; do
+ if [[ ${i} = */*:* ]]; then
+ i=${i#*/}
+ i=${i%:0}
+ want_ant_tasks+="${i/:/-} "
+ else
+ want_ant_tasks+="${i} "
+ fi
+ done
+ # default ANT_TASKS to WANT_ANT_TASKS, if ANT_TASKS is not set explicitly
+ ANT_TASKS="${ANT_TASKS:-${want_ant_tasks% }}"
+
+ # override ANT_TASKS with JAVA_PKG_FORCE_ANT_TASKS if it's set
+ ANT_TASKS="${JAVA_PKG_FORCE_ANT_TASKS:-${ANT_TASKS}}"
+
+ # if ant-tasks is not set by ebuild or forced, use none
+ ANT_TASKS="${ANT_TASKS:-none}"
+
+ # at this point, ANT_TASKS should be "all", "none" or explicit list
+ if [[ "${ANT_TASKS}" == "all" ]]; then
+ einfo "Using all available ANT_TASKS"
+ elif [[ "${ANT_TASKS}" == "none" ]]; then
+ einfo "Disabling all optional ANT_TASKS"
+ else
+ einfo "Using following ANT_TASKS: ${ANT_TASKS}"
+ fi
+
+ export ANT_TASKS
+
+ [[ -n ${JAVA_PKG_DEBUG} ]] && antflags="${antflags} --execdebug -debug"
+ [[ -n ${PORTAGE_QUIET} ]] && antflags="${antflags} -q"
+
+ local gcp="${EANT_GENTOO_CLASSPATH}"
+ local getjarsarg=""
+
+ if [[ ${EBUILD_PHASE} = "test" ]]; then
+ antflags="${antflags} -DJunit.present=true"
+ getjarsarg="--with-dependencies"
+
+ local re="\bant-junit4?([-:]\S+)?\b"
+ [[ ${ANT_TASKS} =~ ${re} ]] && gcp+=" ${BASH_REMATCH[0]}"
+ else
+ antflags="${antflags} -Dmaven.test.skip=true"
+ fi
+
+ local cp
+
+ for atom in ${gcp}; do
+ cp+=":$(java-pkg_getjars ${getjarsarg} ${atom})"
+ done
+
+ [[ ${EANT_NEEDS_TOOLS} ]] && cp+=":$(java-config --tools)"
+ [[ ${EANT_GENTOO_CLASSPATH_EXTRA} ]] && cp+=":${EANT_GENTOO_CLASSPATH_EXTRA}"
+
+ if [[ ${cp#:} ]]; then
+ # It seems ant does not like single quotes around ${cp}
+ antflags="${antflags} -Dgentoo.classpath=\"${cp#:}\""
+ fi
+
+ [[ -n ${JAVA_PKG_DEBUG} ]] && echo ant ${antflags} "${@}"
+ debug-print "Calling ant (GENTOO_VM: ${GENTOO_VM}): ${antflags} ${@}"
+ ant ${antflags} "${@}" || die "eant failed"
+}
+
+# @FUNCTION: ejavac
+# @USAGE: <javac_arguments>
+# @DESCRIPTION:
+# Javac wrapper function. Will use the appropriate compiler, based on
+# /etc/java-config/compilers.conf
+ejavac() {
+ debug-print-function ${FUNCNAME} $*
+
+ local compiler_executable
+ compiler_executable=$(java-pkg_get-javac)
+
+ local javac_args
+ javac_args="$(java-pkg_javac-args)"
+
+ [[ -n ${JAVA_PKG_DEBUG} ]] && echo ${compiler_executable} ${javac_args} "${@}"
+ ${compiler_executable} ${javac_args} "${@}" || die "ejavac failed"
+}
+
+# @FUNCTION: ejavadoc
+# @USAGE: <javadoc_arguments>
+# @DESCRIPTION:
+# javadoc wrapper function. Will set some flags based on the VM version
+# due to strict javadoc rules in 1.8.
+ejavadoc() {
+ debug-print-function ${FUNCNAME} $*
+
+ local javadoc_args=""
+
+ if java-pkg_is-vm-version-ge "1.8" ; then
+ javadoc_args="-Xdoclint:none"
+ fi
+
+ javadoc ${javadoc_args} "${@}" || die "ejavadoc failed"
+}
+
+# @FUNCTION: java-pkg_filter-compiler
+# @USAGE: <compiler(s)_to_filter>
+# @DESCRIPTION:
+# Used to prevent the use of some compilers. Should be used in src_compile.
+# Basically, it just appends onto JAVA_PKG_FILTER_COMPILER
+java-pkg_filter-compiler() {
+ JAVA_PKG_FILTER_COMPILER="${JAVA_PKG_FILTER_COMPILER} $@"
+}
+
+# @FUNCTION: java-pkg_force-compiler
+# @USAGE: <compiler(s)_to_force>
+# @DESCRIPTION:
+# Used to force the use of particular compilers. Should be used in src_compile.
+# A common use of this would be to force ecj-3.1 to be used on amd64, to avoid
+# OutOfMemoryErrors that may come up.
+java-pkg_force-compiler() {
+ JAVA_PKG_FORCE_COMPILER="$@"
+}
+
+# @FUNCTION: use_doc
+# @DESCRIPTION:
+#
+# Helper function for getting ant to build javadocs. If the user has USE=doc,
+# then 'javadoc' or the argument are returned. Otherwise, there is no return.
+#
+# The output of this should be passed to ant.
+# @CODE
+# Parameters:
+# $@ - Option value to return. Defaults to 'javadoc'
+#
+# Examples:
+# build javadocs by calling 'javadoc' target
+# eant $(use_doc)
+#
+# build javadocs by calling 'apidoc' target
+# eant $(use_doc apidoc)
+# @CODE
+# @RETURN string - Name of the target to create javadocs
+use_doc() {
+ use doc && echo ${@:-javadoc}
+}
+
+
+# @FUNCTION: java-pkg_init
+# @INTERNAL
+# @DESCRIPTION:
+# The purpose of this function, as the name might imply, is to initialize the
+# Java environment. It ensures that that there aren't any environment variables
+# that'll muss things up. It initializes some variables, which are used
+# internally. And most importantly, it'll switch the VM if necessary.
+#
+# This shouldn't be used directly. Instead, java-pkg and java-pkg-opt will
+# call it during each of the phases of the merge process.
+java-pkg_init() {
+ debug-print-function ${FUNCNAME} $*
+
+ # Don't set up build environment if installing from binary. #206024 #258423
+ [[ "${MERGE_TYPE}" == "binary" ]] && return
+ # Also try Portage's nonstandard EMERGE_FROM for old EAPIs, if it doesn't
+ # work nothing is lost.
+ has ${EAPI:-0} 0 1 2 3 && [[ "${EMERGE_FROM}" == "binary" ]] && return
+
+ unset JAVAC
+ unset JAVA_HOME
+
+ java-config --help >/dev/null || {
+ eerror ""
+ eerror "Can't run java-config --help"
+ eerror "Have you upgraded python recently but haven't"
+ eerror "run python-updater yet?"
+ die "Can't run java-config --help"
+ }
+
+ # People do all kinds of weird things.
+ # http://forums.gentoo.org/viewtopic-p-3943166.html
+ local silence="${SILENCE_JAVA_OPTIONS_WARNING}"
+ local accept="${I_WANT_GLOBAL_JAVA_OPTIONS}"
+ if [[ -n ${_JAVA_OPTIONS} && -z ${accept} && -z ${silence} ]]; then
+ ewarn "_JAVA_OPTIONS changes what java -version outputs at least for"
+ ewarn "sun-jdk vms and and as such break configure scripts that"
+ ewarn "use it (for example app-office/openoffice) so we filter it out."
+ ewarn "Use SILENCE_JAVA_OPTIONS_WARNING=true in the environment (use"
+ ewarn "make.conf for example) to silence this warning or"
+ ewarn "I_WANT_GLOBAL_JAVA_OPTIONS to not filter it."
+ fi
+
+ if [[ -z ${accept} ]]; then
+ # export _JAVA_OPTIONS= doesn't work because it will show up in java
+ # -version output
+ unset _JAVA_OPTIONS
+ # phase hooks make this run many times without this
+ I_WANT_GLOBAL_JAVA_OPTIONS="true"
+ fi
+
+ if java-pkg_func-exists ant_src_unpack; then
+ java-pkg_announce-qa-violation "Using old ant_src_unpack. Should be src_unpack"
+ fi
+
+ java-pkg_switch-vm
+ PATH=${JAVA_HOME}/bin:${PATH}
+
+ # TODO we will probably want to set JAVAC and JAVACFLAGS
+
+ # Do some QA checks
+ java-pkg_check-jikes
+
+ # Can't use unset here because Portage does not save the unset
+ # see https://bugs.gentoo.org/show_bug.cgi?id=189417#c11
+
+ # When users have crazy classpaths some packages can fail to compile.
+ # and everything should work with empty CLASSPATH.
+ # This also helps prevent unexpected dependencies on random things
+ # from the CLASSPATH.
+ export CLASSPATH=
+
+ # Unset external ANT_ stuff
+ export ANT_TASKS=
+ export ANT_OPTS=
+ export ANT_RESPECT_JAVA_HOME=
+}
+
+# @FUNCTION: java-pkg-init-compiler_
+# @INTERNAL
+# @DESCRIPTION:
+# This function attempts to figure out what compiler should be used. It does
+# this by reading the file at JAVA_PKG_COMPILERS_CONF, and checking the
+# COMPILERS variable defined there.
+# This can be overridden by a list in JAVA_PKG_FORCE_COMPILER
+#
+# It will go through the list of compilers, and verify that it supports the
+# target and source that are needed. If it is not suitable, then the next
+# compiler is checked. When JAVA_PKG_FORCE_COMPILER is defined, this checking
+# isn't done.
+#
+# Once the which compiler to use has been figured out, it is set to
+# GENTOO_COMPILER.
+#
+# If you hadn't guessed, JAVA_PKG_FORCE_COMPILER is for testing only.
+#
+# If the user doesn't defined anything in JAVA_PKG_COMPILERS_CONF, or no
+# suitable compiler was found there, then the default is to use javac provided
+# by the current VM.
+#
+#
+# @RETURN name of the compiler to use
+java-pkg_init-compiler_() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ -n ${GENTOO_COMPILER} ]]; then
+ debug-print "GENTOO_COMPILER already set"
+ return
+ fi
+
+ local compilers;
+ if [[ -z ${JAVA_PKG_FORCE_COMPILER} ]]; then
+ compilers="$(source ${JAVA_PKG_COMPILERS_CONF} 1>/dev/null 2>&1; echo ${COMPILERS})"
+ else
+ compilers=${JAVA_PKG_FORCE_COMPILER}
+ fi
+
+ debug-print "Read \"${compilers}\" from ${JAVA_PKG_COMPILERS_CONF}"
+
+ # Figure out if we should announce what compiler we're using
+ local compiler
+ for compiler in ${compilers}; do
+ debug-print "Checking ${compiler}..."
+ # javac should always be alright
+ if [[ ${compiler} = "javac" ]]; then
+ debug-print "Found javac... breaking"
+ export GENTOO_COMPILER="javac"
+ break
+ fi
+
+ if has ${compiler} ${JAVA_PKG_FILTER_COMPILER}; then
+ if [[ -z ${JAVA_PKG_FORCE_COMPILER} ]]; then
+ einfo "Filtering ${compiler}"
+ continue
+ fi
+ fi
+
+ # for non-javac, we need to make sure it supports the right target and
+ # source
+ local compiler_env="${JAVA_PKG_COMPILER_DIR}/${compiler}"
+ if [[ -f ${compiler_env} ]]; then
+ local desired_target="$(java-pkg_get-target)"
+ local desired_source="$(java-pkg_get-source)"
+
+
+ # Verify that the compiler supports target
+ local supported_target=$(source ${compiler_env} 1>/dev/null 2>&1; echo ${SUPPORTED_TARGET})
+ if ! has ${desired_target} ${supported_target}; then
+ ewarn "${compiler} does not support -target ${desired_target}, skipping"
+ continue
+ fi
+
+ # Verify that the compiler supports source
+ local supported_source=$(source ${compiler_env} 1>/dev/null 2>&1; echo ${SUPPORTED_SOURCE})
+ if ! has ${desired_source} ${supported_source}; then
+ ewarn "${compiler} does not support -source ${desired_source}, skipping"
+ continue
+ fi
+
+ # if you get here, then the compiler should be good to go
+ export GENTOO_COMPILER="${compiler}"
+ break
+ else
+ ewarn "Could not find configuration for ${compiler}, skipping"
+ ewarn "Perhaps it is not installed?"
+ continue
+ fi
+ done
+
+ # If it hasn't been defined already, default to javac
+ if [[ -z ${GENTOO_COMPILER} ]]; then
+ if [[ -n ${compilers} ]]; then
+ einfo "No suitable compiler found: defaulting to JDK default for compilation"
+ else
+ # probably don't need to notify users about the default.
+ :;#einfo "Defaulting to javac for compilation"
+ fi
+ if java-config -g GENTOO_COMPILER 2> /dev/null; then
+ export GENTOO_COMPILER=$(java-config -g GENTOO_COMPILER)
+ else
+ export GENTOO_COMPILER=javac
+ fi
+ else
+ einfo "Using ${GENTOO_COMPILER} for compilation"
+ fi
+
+}
+
+# @FUNCTION: init_paths_
+# @INTERNAL
+# @DESCRIPTION:
+# Initializes some variables that will be used. These variables are mostly used
+# to determine where things will eventually get installed.
+java-pkg_init_paths_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local pkg_name
+ if [[ "${SLOT%/*}" == "0" ]] ; then
+ JAVA_PKG_NAME="${PN}"
+ else
+ JAVA_PKG_NAME="${PN}-${SLOT%/*}"
+ fi
+
+ JAVA_PKG_SHAREPATH="${DESTTREE}/share/${JAVA_PKG_NAME}"
+ JAVA_PKG_SOURCESPATH="${JAVA_PKG_SHAREPATH}/sources/"
+ JAVA_PKG_ENV="${D}${JAVA_PKG_SHAREPATH}/package.env"
+ JAVA_PKG_VIRTUALS_PATH="${DESTTREE}/share/java-config-2/virtuals"
+ JAVA_PKG_VIRTUAL_PROVIDER="${D}/${JAVA_PKG_VIRTUALS_PATH}/${JAVA_PKG_NAME}"
+
+ [[ -z "${JAVA_PKG_JARDEST}" ]] && JAVA_PKG_JARDEST="${JAVA_PKG_SHAREPATH}/lib"
+ [[ -z "${JAVA_PKG_LIBDEST}" ]] && JAVA_PKG_LIBDEST="${DESTTREE}/$(get_libdir)/${JAVA_PKG_NAME}"
+ [[ -z "${JAVA_PKG_WARDEST}" ]] && JAVA_PKG_WARDEST="${JAVA_PKG_SHAREPATH}/webapps"
+
+ # TODO maybe only print once?
+ debug-print "JAVA_PKG_SHAREPATH: ${JAVA_PKG_SHAREPATH}"
+ debug-print "JAVA_PKG_ENV: ${JAVA_PKG_ENV}"
+ debug-print "JAVA_PKG_JARDEST: ${JAVA_PKG_JARDEST}"
+ debug-print "JAVA_PKG_LIBDEST: ${JAVA_PKG_LIBDEST}"
+ debug-print "JAVA_PKG_WARDEST: ${JAVA_PKG_WARDEST}"
+}
+
+# @FUNCTION: java-pkg_do_write_
+# @INTERNAL
+# @DESCRIPTION:
+# Writes the package.env out to disk.
+#
+# TODO change to do-write, to match everything else
+java-pkg_do_write_() {
+ debug-print-function ${FUNCNAME} $*
+ java-pkg_init_paths_
+ # Create directory for package.env
+ dodir "${JAVA_PKG_SHAREPATH}"
+ if [[ -n "${JAVA_PKG_CLASSPATH}" || -n "${JAVA_PKG_LIBRARY}" || -f \
+ "${JAVA_PKG_DEPEND_FILE}" || -f \
+ "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" ]]; then
+ # Create package.env
+ (
+ echo "DESCRIPTION=\"${DESCRIPTION}\""
+ echo "GENERATION=\"2\""
+ echo "SLOT=\"${SLOT}\""
+ echo "CATEGORY=\"${CATEGORY}\""
+ echo "PVR=\"${PVR}\""
+
+ [[ -n "${JAVA_PKG_CLASSPATH}" ]] && echo "CLASSPATH=\"${JAVA_PKG_CLASSPATH}\""
+ [[ -n "${JAVA_PKG_LIBRARY}" ]] && echo "LIBRARY_PATH=\"${JAVA_PKG_LIBRARY}\""
+ [[ -n "${JAVA_PROVIDE}" ]] && echo "PROVIDES=\"${JAVA_PROVIDE}\""
+ [[ -f "${JAVA_PKG_DEPEND_FILE}" ]] \
+ && echo "DEPEND=\"$(sort -u "${JAVA_PKG_DEPEND_FILE}" | tr '\n' ':')\""
+ [[ -f "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" ]] \
+ && echo "OPTIONAL_DEPEND=\"$(sort -u "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" | tr '\n' ':')\""
+ echo "VM=\"$(echo ${RDEPEND} ${DEPEND} | sed -e 's/ /\n/g' | sed -n -e '/virtual\/\(jre\|jdk\)/ { p;q }')\"" # TODO cleanup !
+ [[ -f "${JAVA_PKG_BUILD_DEPEND_FILE}" ]] \
+ && echo "BUILD_DEPEND=\"$(sort -u "${JAVA_PKG_BUILD_DEPEND_FILE}" | tr '\n' ':')\""
+ ) > "${JAVA_PKG_ENV}"
+
+ # register target/source
+ local target="$(java-pkg_get-target)"
+ local source="$(java-pkg_get-source)"
+ [[ -n ${target} ]] && echo "TARGET=\"${target}\"" >> "${JAVA_PKG_ENV}"
+ [[ -n ${source} ]] && echo "SOURCE=\"${source}\"" >> "${JAVA_PKG_ENV}"
+
+ # register javadoc info
+ [[ -n ${JAVADOC_PATH} ]] && echo "JAVADOC_PATH=\"${JAVADOC_PATH}\"" \
+ >> ${JAVA_PKG_ENV}
+ # register source archives
+ [[ -n ${JAVA_SOURCES} ]] && echo "JAVA_SOURCES=\"${JAVA_SOURCES}\"" \
+ >> ${JAVA_PKG_ENV}
+
+
+ echo "MERGE_VM=\"${GENTOO_VM}\"" >> "${JAVA_PKG_ENV}"
+ [[ -n ${GENTOO_COMPILER} ]] && echo "MERGE_COMPILER=\"${GENTOO_COMPILER}\"" >> "${JAVA_PKG_ENV}"
+
+ # extra env variables
+ if [[ -n "${JAVA_PKG_EXTRA_ENV_VARS}" ]]; then
+ cat "${JAVA_PKG_EXTRA_ENV}" >> "${JAVA_PKG_ENV}" || die
+ # nested echo to remove leading/trailing spaces
+ echo "ENV_VARS=\"$(echo ${JAVA_PKG_EXTRA_ENV_VARS})\"" \
+ >> "${JAVA_PKG_ENV}" || die
+ fi
+
+ # Strip unnecessary leading and trailing colons
+ # TODO try to cleanup if possible
+ sed -e "s/=\":/=\"/" -e "s/:\"$/\"/" -i "${JAVA_PKG_ENV}" || die "Did you forget to call java_init ?"
+ else
+ debug-print "JAVA_PKG_CLASSPATH, JAVA_PKG_LIBRARY, JAVA_PKG_DEPEND_FILE"
+ debug-print "or JAVA_PKG_OPTIONAL_DEPEND_FILE not defined so can't"
+ debug-print "write package.env."
+ fi
+}
+
+# @FUNCTION: java-pkg_record-jar_
+# @INTERNAL
+# @DESCRIPTION:
+# Record an (optional) dependency to the package.env
+# @CODE
+# Parameters:
+# --optional - record dependency as optional
+# --build - record dependency as build_only
+# $1 - package to record
+# $2 - (optional) jar of package to record
+# @CODE
+JAVA_PKG_DEPEND_FILE="${T}/java-pkg-depend"
+JAVA_PKG_OPTIONAL_DEPEND_FILE="${T}/java-pkg-optional-depend"
+JAVA_PKG_BUILD_DEPEND_FILE="${T}/java-pkg-build-depend"
+
+java-pkg_record-jar_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local depend_file="${JAVA_PKG_DEPEND_FILE}"
+ case "${1}" in
+ "--optional") depend_file="${JAVA_PKG_OPTIONAL_DEPEND_FILE}"; shift;;
+ "--build-only") depend_file="${JAVA_PKG_BUILD_DEPEND_FILE}"; shift;;
+ esac
+
+ local pkg=${1} jar=${2} append
+ if [[ -z "${jar}" ]]; then
+ append="${pkg}"
+ else
+ append="$(basename ${jar})@${pkg}"
+ fi
+
+ echo "${append}" >> "${depend_file}"
+}
+
+# @FUNCTION: java-pkg_append_
+# @INTERNAL
+# @DESCRIPTION:
+# Appends a value to a variable
+#
+# @CODE
+# Parameters:
+# $1 variable name to modify
+# $2 value to append
+#
+# Examples:
+# java-pkg_append_ CLASSPATH foo.jar
+# @CODE
+java-pkg_append_() {
+ debug-print-function ${FUNCNAME} $*
+
+ local var="${1}" value="${2}"
+ if [[ -z "${!var}" ]] ; then
+ export ${var}="${value}"
+ else
+ local oldIFS=${IFS} cur haveit
+ IFS=':'
+ for cur in ${!var}; do
+ if [[ ${cur} == ${value} ]]; then
+ haveit="yes"
+ break
+ fi
+ done
+ [[ -z ${haveit} ]] && export ${var}="${!var}:${value}"
+ IFS=${oldIFS}
+ fi
+}
+
+# @FUNCTION: java-pkg_expand_dir_
+# @INTERNAL
+# @DESCRIPTION:
+# Gets the full path of the file/directory's parent.
+# @CODE
+# Parameters:
+# $1 - file/directory to find parent directory for
+# @CODE
+# @RETURN: path to $1's parent directory
+java-pkg_expand_dir_() {
+ pushd "$(dirname "${1}")" >/dev/null 2>&1
+ pwd
+ popd >/dev/null 2>&1
+}
+
+# @FUNCTION: java-pkg_func-exists
+# @INTERNAL
+# @DESCRIPTION:
+# Does the indicated function exist?
+# @RETURN: 0 - function is declared, 1 - function is undeclared
+java-pkg_func-exists() {
+ declare -F ${1} > /dev/null
+}
+
+# @FUNCTION: java-pkg_setup-vm
+# @INTERNAL
+# @DESCRIPTION:
+# Sets up the environment for a specific VM
+java-pkg_setup-vm() {
+ debug-print-function ${FUNCNAME} $*
+
+ local vendor="$(java-pkg_get-vm-vendor)"
+ if [[ "${vendor}" == "sun" ]] && java-pkg_is-vm-version-ge "1.5" ; then
+ addpredict "/dev/random"
+ elif [[ "${vendor}" == "ibm" ]]; then
+ addpredict "/proc/self/maps"
+ addpredict "/proc/cpuinfo"
+ addpredict "/proc/self/coredump_filter"
+ elif [[ "${vendor}" == "oracle" ]]; then
+ addpredict "/dev/random"
+ addpredict "/proc/self/coredump_filter"
+ elif [[ "${vendor}" == icedtea* ]] && java-pkg_is-vm-version-ge "1.7" ; then
+ addpredict "/dev/random"
+ addpredict "/proc/self/coredump_filter"
+ elif [[ "${vendor}" == "jrockit" ]]; then
+ addpredict "/proc/cpuinfo"
+ fi
+}
+
+# @FUNCTION: java-pkg_needs-vm
+# @INTERNAL
+# @DESCRIPTION:
+# Does the current package depend on virtual/jdk or does it set
+# JAVA_PKG_WANT_BUILD_VM?
+#
+# @RETURN: 0 - Package depends on virtual/jdk; 1 - Package does not depend on virtual/jdk
+java-pkg_needs-vm() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ -n "$(echo ${JAVA_PKG_NV_DEPEND:-${DEPEND}} | sed -e '\:virtual/jdk:!d')" ]]; then
+ return 0
+ fi
+
+ [[ -n "${JAVA_PKG_WANT_BUILD_VM}" ]] && return 0
+
+ return 1
+}
+
+# @FUNCTION: java-pkg_get-current-vm
+# @INTERNAL
+# @RETURN - The current VM being used
+java-pkg_get-current-vm() {
+ java-config -f
+}
+
+# @FUNCTION: java-pkg_get-vm-vendor
+# @INTERNAL
+# @RETURN - The vendor of the current VM
+java-pkg_get-vm-vendor() {
+ debug-print-function ${FUNCNAME} $*
+
+ local vm="$(java-pkg_get-current-vm)"
+ vm="${vm/-*/}"
+ echo "${vm}"
+}
+
+# @FUNCTION: java-pkg_get-vm-version
+# @INTERNAL
+# @RETURN - The version of the current VM
+java-pkg_get-vm-version() {
+ debug-print-function ${FUNCNAME} $*
+
+ java-config -g PROVIDES_VERSION
+}
+
+# @FUNCTION: java-pkg_build-vm-from-handle
+# @INTERNAL
+# @DESCRIPTION:
+# Selects a build vm from a list of vm handles. First checks for the system-vm
+# beeing usable, then steps through the listed handles till a suitable vm is
+# found.
+#
+# @RETURN - VM handle of an available JDK
+java-pkg_build-vm-from-handle() {
+ debug-print-function ${FUNCNAME} "$*"
+
+ local vm
+ vm=$(java-pkg_get-current-vm 2>/dev/null)
+ if [[ $? -eq 0 ]]; then
+ if has ${vm} ${JAVA_PKG_WANT_BUILD_VM}; then
+ echo ${vm}
+ return 0
+ fi
+ fi
+
+ for vm in ${JAVA_PKG_WANT_BUILD_VM}; do
+ if java-config-2 --select-vm=${vm} 2>/dev/null; then
+ echo ${vm}
+ return 0
+ fi
+ done
+
+ eerror "${FUNCNAME}: No vm found for handles: ${JAVA_PKG_WANT_BUILD_VM}"
+ return 1
+}
+
+# @FUNCTION: java-pkg_switch-vm
+# @INTERNAL
+# @DESCRIPTION:
+# Switch VM if we're allowed to (controlled by JAVA_PKG_ALLOW_VM_CHANGE), and
+# verify that the current VM is sufficient.
+# Setup the environment for the VM being used.
+java-pkg_switch-vm() {
+ debug-print-function ${FUNCNAME} $*
+
+ if java-pkg_needs-vm; then
+ # Use the VM specified by JAVA_PKG_FORCE_VM
+ if [[ -n "${JAVA_PKG_FORCE_VM}" ]]; then
+ # If you're forcing the VM, I hope you know what your doing...
+ debug-print "JAVA_PKG_FORCE_VM used: ${JAVA_PKG_FORCE_VM}"
+ export GENTOO_VM="${JAVA_PKG_FORCE_VM}"
+ # if we're allowed to switch the vm...
+ elif [[ "${JAVA_PKG_ALLOW_VM_CHANGE}" == "yes" ]]; then
+ # if there is an explicit list of handles to choose from
+ if [[ -n "${JAVA_PKG_WANT_BUILD_VM}" ]]; then
+ debug-print "JAVA_PKG_WANT_BUILD_VM used: ${JAVA_PKG_WANT_BUILD_VM}"
+ GENTOO_VM=$(java-pkg_build-vm-from-handle)
+ if [[ $? != 0 ]]; then
+ eerror "${FUNCNAME}: No VM found for handles: ${JAVA_PKG_WANT_BUILD_VM}"
+ die "${FUNCNAME}: Failed to determine VM for building"
+ fi
+ # JAVA_PKG_WANT_SOURCE and JAVA_PKG_WANT_TARGET are required as
+ # they can't be deduced from handles.
+ if [[ -z "${JAVA_PKG_WANT_SOURCE}" ]]; then
+ eerror "JAVA_PKG_WANT_BUILD_VM specified but not JAVA_PKG_WANT_SOURCE"
+ die "Specify JAVA_PKG_WANT_SOURCE"
+ fi
+ if [[ -z "${JAVA_PKG_WANT_TARGET}" ]]; then
+ eerror "JAVA_PKG_WANT_BUILD_VM specified but not JAVA_PKG_WANT_TARGET"
+ die "Specify JAVA_PKG_WANT_TARGET"
+ fi
+ # otherwise determine a vm from dep string
+ else
+ debug-print "depend-java-query: NV_DEPEND: ${JAVA_PKG_NV_DEPEND:-${DEPEND}}"
+ GENTOO_VM="$(depend-java-query --get-vm "${JAVA_PKG_NV_DEPEND:-${DEPEND}}")"
+ if [[ -z "${GENTOO_VM}" || "${GENTOO_VM}" == "None" ]]; then
+ eerror "Unable to determine VM for building from dependencies:"
+ echo "NV_DEPEND: ${JAVA_PKG_NV_DEPEND:-${DEPEND}}"
+ die "Failed to determine VM for building."
+ fi
+ fi
+ export GENTOO_VM
+ # otherwise just make sure the current VM is sufficient
+ else
+ java-pkg_ensure-vm-version-sufficient
+ fi
+ debug-print "Using: $(java-config -f)"
+
+ java-pkg_setup-vm
+
+ export JAVA=$(java-config --java)
+ export JAVAC=$(java-config --javac)
+ JAVACFLAGS="$(java-pkg_javac-args)"
+ [[ -n ${JAVACFLAGS_EXTRA} ]] && JAVACFLAGS="${JAVACFLAGS_EXTRA} ${JAVACFLAGS}"
+ export JAVACFLAGS
+
+ export JAVA_HOME="$(java-config -g JAVA_HOME)"
+ export JDK_HOME=${JAVA_HOME}
+
+ #TODO If you know a better solution let us know.
+ java-pkg_append_ LD_LIBRARY_PATH "$(java-config -g LDPATH)"
+
+ local tann="${T}/announced-vm"
+ # With the hooks we should only get here once from pkg_setup but better safe than sorry
+ # if people have for example modified eclasses some where
+ if [[ -n "${JAVA_PKG_DEBUG}" ]] || [[ ! -f "${tann}" ]] ; then
+ einfo "Using: $(java-config -f)"
+ [[ ! -f "${tann}" ]] && touch "${tann}"
+ fi
+
+ else
+ [[ -n "${JAVA_PKG_DEBUG}" ]] && ewarn "!!! This package inherits java-pkg but doesn't depend on a JDK. -bin or broken dependency!!!"
+ fi
+}
+
+# @FUNCTION: java-pkg_die
+# @INTERNAL
+# @DESCRIPTION:
+# Enhanced die for Java packages, which displays some information that may be
+# useful for debugging bugs on bugzilla.
+#register_die_hook java-pkg_die
+if ! has java-pkg_die ${EBUILD_DEATH_HOOKS}; then
+ EBUILD_DEATH_HOOKS="${EBUILD_DEATH_HOOKS} java-pkg_die"
+fi
+
+java-pkg_die() {
+ echo "!!! When you file a bug report, please include the following information:" >&2
+ echo "GENTOO_VM=${GENTOO_VM} CLASSPATH=\"${CLASSPATH}\" JAVA_HOME=\"${JAVA_HOME}\"" >&2
+ echo "JAVACFLAGS=\"${JAVACFLAGS}\" COMPILER=\"${GENTOO_COMPILER}\"" >&2
+ echo "and of course, the output of emerge --info =${P}" >&2
+}
+
+
+# TODO document
+# List jars in the source directory, ${S}
+java-pkg_jar-list() {
+ if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
+ einfo "Linked Jars"
+ find "${S}" -type l -name '*.jar' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR},"
+ einfo "Jars"
+ find "${S}" -type f -name '*.jar' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR},"
+ einfo "Classes"
+ find "${S}" -type f -name '*.class' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR},"
+ fi
+}
+
+# @FUNCTION: java-pkg_verify-classes
+# @INTERNAL
+# @DESCRIPTION:
+# Verify that the classes were compiled for the right source / target. Dies if
+# not.
+# @CODE
+# $1 (optional) - the file to check, otherwise checks whole ${D}
+# @CODE
+java-pkg_verify-classes() {
+ #$(find ${D} -type f -name '*.jar' -o -name '*.class')
+
+ local version_verify="/usr/bin/class-version-verify.py"
+
+ if [[ ! -x "${version_verify}" ]]; then
+ version_verify="/usr/$(get_libdir)/javatoolkit/bin/class-version-verify.py"
+ fi
+
+ if [[ ! -x "${version_verify}" ]]; then
+ ewarn "Unable to perform class version checks as"
+ ewarn "class-version-verify.py is unavailable"
+ ewarn "Please install dev-java/javatoolkit."
+ return
+ fi
+
+ local target=$(java-pkg_get-target)
+ local result
+ local log="${T}/class-version-verify.log"
+ if [[ -n "${1}" ]]; then
+ ${version_verify} -v -t ${target} "${1}" > "${log}"
+ result=$?
+ else
+ ebegin "Verifying java class versions (target: ${target})"
+ ${version_verify} -v -t ${target} -r "${D}" > "${log}"
+ result=$?
+ eend ${result}
+ fi
+ [[ -n ${JAVA_PKG_DEBUG} ]] && cat "${log}"
+ if [[ ${result} != 0 ]]; then
+ eerror "Incorrect bytecode version found"
+ [[ -n "${1}" ]] && eerror "in file: ${1}"
+ eerror "See ${log} for more details."
+ die "Incorrect bytecode found"
+ fi
+}
+
+# @FUNCTION: java-pkg_ensure-dep
+# @INTERNAL
+# @DESCRIPTION:
+# Check that a package being used in jarfrom, getjars and getjar is contained
+# within DEPEND or RDEPEND with the correct SLOT. See this mail for details:
+# https://archives.gentoo.org/gentoo-dev/message/dcb644f89520f4bbb61cc7bbe45fdf6e
+# @CODE
+# Parameters:
+# $1 - empty - check both vars; "runtime" or "build" - check only
+# RDEPEND, resp. DEPEND
+# $2 - Package name and slot.
+# @CODE
+java-pkg_ensure-dep() {
+ debug-print-function ${FUNCNAME} $*
+
+ local limit_to="${1}"
+ local target_pkg="${2}"
+ local dev_error=""
+
+ # Transform into a regular expression to look for a matching package
+ # and SLOT. SLOTs don't have to be numeric so foo-bar could either
+ # mean foo-bar:0 or foo:bar. So you want to get your head around the
+ # line below?
+ #
+ # * The target package first has any dots escaped, e.g. foo-1.2
+ # becomes foo-1\.2.
+ #
+ # * sed then looks at the component following the last - or :
+ # character, or the whole string if there is no - or :
+ # character. It uses this to build a new regexp with two
+ # significant branches.
+ #
+ # * The first checks for the whole target package string, optionally
+ # followed by a version number, and then :0.
+ #
+ # * The second checks for the first part of the target package
+ # string, optionally followed by a version number, followed by the
+ # aforementioned component, treating that as a SLOT.
+ #
+ local stripped_pkg=/$(sed -r 's/[-:]?([^-:]+)$/(\0(-[^:]+)?:0|(-[^:]+)?:\1)/' <<< "${target_pkg//./\\.}")\\b
+
+ debug-print "Matching against: ${stripped_pkg}"
+
+ # Uncomment the lines below once we've dealt with more of these
+ # otherwise we'll be tempted to turn JAVA_PKG_STRICT off while
+ # getting hit with a wave of bug reports. :(
+
+ if [[ ${limit_to} != runtime && ! ( "${DEPEND}" =~ $stripped_pkg ) ]]; then
+ dev_error="The ebuild is attempting to use ${target_pkg}, which is not "
+ dev_error+="declared with a SLOT in DEPEND."
+# if is-java-strict; then
+# die "${dev_error}"
+# else
+ eqawarn "java-pkg_ensure-dep: ${dev_error}"
+# eerror "Because you have ${target_pkg} installed,"
+# eerror "the package will build without problems, but please"
+# eerror "report this to http://bugs.gentoo.org."
+# fi
+ elif [[ ${limit_to} != build && ! ( "${RDEPEND}${PDEPEND}" =~ ${stripped_pkg} ) ]]; then
+ dev_error="The ebuild is attempting to use ${target_pkg}, which is not "
+ dev_error+="declared with a SLOT in [RP]DEPEND and --build-only wasn't given."
+# if is-java-strict; then
+# die "${dev_error}"
+# else
+ eqawarn "java-pkg_ensure-dep: ${dev_error}"
+# eerror "The package will build without problems, but may fail to run"
+# eerror "if you don't have ${target_pkg} installed,"
+# eerror "so please report this to http://bugs.gentoo.org."
+# fi
+ fi
+}
+
+java-pkg_check-phase() {
+ local phase=${1}
+ local funcname=${FUNCNAME[1]}
+ if [[ ${EBUILD_PHASE} != ${phase} ]]; then
+ local msg="${funcname} used outside of src_${phase}"
+ java-pkg_announce-qa-violation "${msg}"
+ fi
+}
+
+java-pkg_check-versioned-jar() {
+ local jar=${1}
+
+ if [[ ${jar} =~ ${PV} ]]; then
+ java-pkg_announce-qa-violation "installing versioned jar '${jar}'"
+ fi
+}
+
+java-pkg_check-jikes() {
+ if has jikes ${IUSE}; then
+ java-pkg_announce-qa-violation "deprecated USE flag 'jikes' in IUSE"
+ fi
+}
+
+java-pkg_announce-qa-violation() {
+ local nodie
+ if [[ ${1} == "--nodie" ]]; then
+ nodie="true"
+ shift
+ fi
+ echo "Java QA Notice: $@" >&2
+ increment-qa-violations
+ [[ -z "${nodie}" ]] && is-java-strict && die "${@}"
+}
+
+increment-qa-violations() {
+ let "JAVA_PKG_QA_VIOLATIONS+=1"
+ export JAVA_PKG_QA_VIOLATIONS
+}
+
+is-java-strict() {
+ [[ -n ${JAVA_PKG_STRICT} ]]
+ return $?
+}
diff --git a/eclass/java-virtuals-2.eclass b/eclass/java-virtuals-2.eclass
new file mode 100644
index 000000000000..0c92018c7bab
--- /dev/null
+++ b/eclass/java-virtuals-2.eclass
@@ -0,0 +1,55 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: java-virtuals-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @AUTHOR:
+# Original Author: Alistair John Bush <ali_bush@gentoo.org>
+# @BLURB: Java virtuals eclass
+# @DESCRIPTION:
+# To provide a default (and only) src_install function for ebuilds in the
+# java-virtuals category.
+
+inherit java-utils-2
+
+DEPEND=">=dev-java/java-config-2.2.0"
+RDEPEND="${DEPEND}"
+
+S="${WORKDIR}"
+
+EXPORT_FUNCTIONS src_install
+
+# @FUNCTION: java-virtuals-2_src_install
+# @DESCRIPTION:
+# default src_install
+
+java-virtuals-2_src_install() {
+ java-virtuals-2_do_write
+}
+
+# @FUNCTION: java-pkg_do_virtuals_write
+# @INTERNAL
+# @DESCRIPTION:
+# Writes the virtual env file out to disk.
+
+java-virtuals-2_do_write() {
+ java-pkg_init_paths_
+
+ dodir "${JAVA_PKG_VIRTUALS_PATH}"
+ {
+ if [[ -n "${JAVA_VIRTUAL_PROVIDES}" ]]; then
+ echo "PROVIDERS=\"${JAVA_VIRTUAL_PROVIDES}\""
+ fi
+
+ if [[ -n "${JAVA_VIRTUAL_VM}" ]]; then
+ echo "VM=\"${JAVA_VIRTUAL_VM}\""
+ fi
+
+ if [[ -n "${JAVA_VIRTUAL_VM_CLASSPATH}" ]]; then
+ echo "VM_CLASSPATH=\"${JAVA_VIRTUAL_VM_CLASSPATH}\""
+ fi
+ echo "MULTI_PROVIDER=\"${JAVA_VIRTUAL_MULTI=FALSE}\""
+ } > "${JAVA_PKG_VIRTUAL_PROVIDER}"
+}
diff --git a/eclass/java-vm-2.eclass b/eclass/java-vm-2.eclass
new file mode 100644
index 000000000000..eb94451c7c39
--- /dev/null
+++ b/eclass/java-vm-2.eclass
@@ -0,0 +1,396 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: java-vm-2.eclass
+# @MAINTAINER:
+# java@gentoo.org
+# @BLURB: Java Virtual Machine eclass
+# @DESCRIPTION:
+# This eclass provides functionality which assists with installing
+# virtual machines, and ensures that they are recognized by java-config.
+
+inherit eutils fdo-mime multilib pax-utils prefix
+
+EXPORT_FUNCTIONS pkg_setup pkg_postinst pkg_prerm pkg_postrm
+
+RDEPEND="
+ >=dev-java/java-config-2.2.0
+ app-eselect/eselect-java"
+DEPEND="${RDEPEND}"
+has "${EAPI}" 0 1 && DEPEND="${DEPEND} >=sys-apps/portage-2.1"
+
+export WANT_JAVA_CONFIG=2
+
+
+# @ECLASS-VARIABLE: JAVA_VM_CONFIG_DIR
+# @INTERNAL
+# @DESCRIPTION:
+# Where to place the vm env file.
+JAVA_VM_CONFIG_DIR="/usr/share/java-config-2/vm"
+
+# @ECLASS-VARIABLE: JAVA_VM_DIR
+# @INTERNAL
+# @DESCRIPTION:
+# Base directory for vm links.
+JAVA_VM_DIR="/usr/lib/jvm"
+
+# @ECLASS-VARIABLE: JAVA_VM_SYSTEM
+# @INTERNAL
+# @DESCRIPTION:
+# Link for system-vm
+JAVA_VM_SYSTEM="/etc/java-config-2/current-system-vm"
+
+# @ECLASS-VARIABLE: JAVA_VM_BUILD_ONLY
+# @DESCRIPTION:
+# Set to YES to mark a vm as build-only.
+JAVA_VM_BUILD_ONLY="${JAVA_VM_BUILD_ONLY:-FALSE}"
+
+
+# @FUNCTION: java-vm-2_pkg_setup
+# @DESCRIPTION:
+# default pkg_setup
+#
+# Initialize vm handle.
+
+java-vm-2_pkg_setup() {
+ if [[ "${SLOT}" != "0" ]]; then
+ VMHANDLE=${PN}-${SLOT}
+ else
+ VMHANDLE=${PN}
+ fi
+}
+
+
+# @FUNCTION: java-vm-2_pkg_postinst
+# @DESCRIPTION:
+# default pkg_postinst
+#
+# Set the generation-2 system VM and Java plugin, if it isn't set or the
+# setting is invalid. Also update mime database.
+
+java-vm-2_pkg_postinst() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EROOT=${ROOT}
+ # Note that we cannot rely on java-config here, as it will silently recognize
+ # e.g. icedtea6-bin as valid system VM if icedtea6 is set but invalid (e.g. due
+ # to the migration to icedtea-6)
+ if [[ ! -L "${EROOT}${JAVA_VM_SYSTEM}" ]]; then
+ java_set_default_vm_
+ else
+ local current_vm_path=$(readlink "${EROOT}${JAVA_VM_SYSTEM}")
+ local current_vm=$(basename "${ROOT}${current_vm_path}")
+ if [[ ! -L "${EROOT}${JAVA_VM_DIR}/${current_vm}" ]]; then
+ java_set_default_vm_
+ fi
+ fi
+
+ java-vm_check-nsplugin
+ java_mozilla_clean_
+ fdo-mime_desktop_database_update
+}
+
+
+# @FUNCTION: java-vm_check-nsplugin
+# @INTERNAL
+# @DESCRIPTION:
+# Check if the nsplugin needs updating
+
+java-vm_check-nsplugin() {
+ local libdir
+ if [[ ${VMHANDLE} =~ emul-linux-x86 ]]; then
+ libdir=lib32
+ else
+ libdir=lib
+ fi
+
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EPREFIX=
+
+ # Install a default nsplugin if we don't already have one
+ if in_iuse nsplugin && use nsplugin; then
+ if [[ ! -f "${ROOT}${EPREFIX}"/usr/${libdir}/nsbrowser/plugins/javaplugin.so ]]; then
+ einfo "No system nsplugin currently set."
+ java-vm_set-nsplugin
+ else
+ einfo "System nsplugin is already set, not changing it."
+ fi
+ einfo "You can change nsplugin with eselect java-nsplugin."
+ fi
+}
+
+
+# @FUNCTION: java-vm_set-nsplugin
+# @INTERNAL
+# @DESCRIPTION:
+# Set the nsplugin implemetation.
+
+java-vm_set-nsplugin() {
+ local extra_args
+ if use amd64; then
+ if [[ ${VMHANDLE} =~ emul-linux-x86 ]]; then
+ extra_args="32bit"
+ else
+ extra_args="64bit"
+ fi
+ einfo "Setting ${extra_args} nsplugin to ${VMHANDLE}"
+ else
+ einfo "Setting nsplugin to ${VMHANDLE}..."
+ fi
+ eselect java-nsplugin set ${extra_args} ${VMHANDLE}
+}
+
+
+# @FUNCTION: java-vm-2_pkg_prerm
+# @DESCRIPTION:
+# default pkg_prerm
+#
+# Warn user if removing system-vm.
+
+java-vm-2_pkg_prerm() {
+ # Although REPLACED_BY_VERSION is EAPI=4, we shouldn't need to check EAPI for this use case
+ if [[ "$(GENTOO_VM="" java-config -f 2>/dev/null)" == "${VMHANDLE}" && -z "${REPLACED_BY_VERSION}" ]]; then
+ ewarn "It appears you are removing your system-vm!"
+ ewarn "Please run java-config -L to list available VMs,"
+ ewarn "then use java-config -S to set a new system-vm!"
+ fi
+}
+
+
+# @FUNCTION: java-vm-2_pkg_postrm
+# @DESCRIPTION:
+# default pkg_postrm
+#
+# Update mime database.
+
+java-vm-2_pkg_postrm() {
+ fdo-mime_desktop_database_update
+}
+
+
+# @FUNCTION: java_set_default_vm_
+# @INTERNAL
+# @DESCRIPTION:
+# Set system-vm.
+
+java_set_default_vm_() {
+ java-config-2 --set-system-vm="${VMHANDLE}"
+
+ einfo " ${P} set as the default system-vm."
+}
+
+
+# @FUNCTION: get_system_arch
+# @DESCRIPTION:
+# Get Java specific arch name.
+
+get_system_arch() {
+ local sarch
+ sarch=$(echo ${ARCH} | sed -e s/[i]*.86/i386/ -e s/x86_64/amd64/ -e s/sun4u/sparc/ -e s/sparc64/sparc/ -e s/arm.*/arm/ -e s/sa110/arm/)
+ if [ -z "${sarch}" ]; then
+ sarch=$(uname -m | sed -e s/[i]*.86/i386/ -e s/x86_64/amd64/ -e s/sun4u/sparc/ -e s/sparc64/sparc/ -e s/arm.*/arm/ -e s/sa110/arm/)
+ fi
+ echo ${sarch}
+}
+
+
+# @FUNCTION: set_java_env
+# @DESCRIPTION:
+# Installs a vm env file.
+
+# TODO rename to something more evident, like install_env_file
+set_java_env() {
+ debug-print-function ${FUNCNAME} $*
+
+ if has ${EAPI:-0} 0 1 2 && ! use prefix ; then
+ ED="${D}"
+ EPREFIX=""
+ fi
+
+ local platform="$(get_system_arch)"
+ local env_file="${ED}${JAVA_VM_CONFIG_DIR}/${VMHANDLE}"
+ local old_env_file="${ED}/etc/env.d/java/20${P}"
+ if [[ ${1} ]]; then
+ local source_env_file="${1}"
+ else
+ local source_env_file="${FILESDIR}/${VMHANDLE}.env"
+ fi
+
+ if [[ ! -f ${source_env_file} ]]; then
+ die "Unable to find the env file: ${source_env_file}"
+ fi
+
+ dodir ${JAVA_VM_CONFIG_DIR}
+ sed \
+ -e "s/@P@/${P}/g" \
+ -e "s/@PN@/${PN}/g" \
+ -e "s/@PV@/${PV}/g" \
+ -e "s/@PF@/${PF}/g" \
+ -e "s/@SLOT@/${SLOT}/g" \
+ -e "s/@PLATFORM@/${platform}/g" \
+ -e "s/@LIBDIR@/$(get_libdir)/g" \
+ -e "/^LDPATH=.*lib\\/\\\"/s|\"\\(.*\\)\"|\"\\1${platform}/:\\1${platform}/server/\"|" \
+ < "${source_env_file}" \
+ > "${env_file}" || die "sed failed"
+
+ (
+ echo "VMHANDLE=\"${VMHANDLE}\""
+ echo "BUILD_ONLY=\"${JAVA_VM_BUILD_ONLY}\""
+ ) >> "${env_file}"
+
+ eprefixify ${env_file}
+
+ [[ -n ${JAVA_PROVIDE} ]] && echo "PROVIDES=\"${JAVA_PROVIDE}\"" >> ${env_file}
+
+ local java_home=$(source "${env_file}"; echo ${JAVA_HOME})
+ [[ -z ${java_home} ]] && die "No JAVA_HOME defined in ${env_file}"
+
+ # Make the symlink
+ dodir "${JAVA_VM_DIR}"
+ dosym ${java_home#${EPREFIX}} ${JAVA_VM_DIR}/${VMHANDLE} \
+ || die "Failed to make VM symlink at ${JAVA_VM_DIR}/${VMHANDLE}"
+}
+
+
+# @FUNCTION: java-vm_set-pax-markings
+# @DESCRIPTION:
+# Set PaX markings on all JDK/JRE executables to allow code-generation on
+# the heap by the JIT compiler.
+#
+# The markings need to be set prior to the first invocation of the the freshly
+# built / installed VM. Be it before creating the Class Data Sharing archive or
+# generating cacerts. Otherwise a PaX enabled kernel will kill the VM.
+# Bug #215225 #389751
+#
+# @CODE
+# Parameters:
+# $1 - JDK/JRE base directory.
+#
+# Examples:
+# java-vm_set-pax-markings "${S}"
+# java-vm_set-pax-markings "${ED}"/opt/${P}
+# @CODE
+
+java-vm_set-pax-markings() {
+ debug-print-function ${FUNCNAME} "$*"
+ [[ $# -ne 1 ]] && die "${FUNCNAME}: takes exactly one argument"
+ [[ ! -f "${1}"/bin/java ]] \
+ && die "${FUNCNAME}: argument needs to be JDK/JRE base directory"
+
+ local executables=( "${1}"/bin/* )
+ [[ -d "${1}"/jre ]] && executables+=( "${1}"/jre/bin/* )
+
+ # Ensure a PaX header is created.
+ local pax_markings="C"
+ # Usally disabeling MPROTECT is sufficent.
+ local pax_markings+="m"
+ # On x86 for heap sizes over 700MB disable SEGMEXEC and PAGEEXEC as well.
+ use x86 && pax_markings+="sp"
+
+ pax-mark ${pax_markings} $(list-paxables "${executables[@]}")
+}
+
+
+# @FUNCTION: java-vm_revdep-mask
+# @DESCRIPTION:
+# Installs a revdep-rebuild control file which SEARCH_DIR_MASK set to the path
+# where the VM is installed. Prevents pointless rebuilds - see bug #177925.
+# Also gives a notice to the user.
+#
+# @CODE
+# Parameters:
+# $1 - Path of the VM (defaults to /opt/${P} if not set)
+#
+# Examples:
+# java-vm_revdep-mask
+# java-vm_revdep-mask /path/to/jdk/
+#
+# @CODE
+
+java-vm_revdep-mask() {
+ if has ${EAPI:-0} 0 1 2 && ! use prefix; then
+ ED="${D}"
+ EPREFIX=
+ fi
+
+ local VMROOT="${1-"${EPREFIX}"/opt/${P}}"
+
+ dodir /etc/revdep-rebuild/
+ echo "SEARCH_DIRS_MASK=\"${VMROOT}\""> "${ED}/etc/revdep-rebuild/61-${VMHANDLE}"
+}
+
+
+# @FUNCTION: java-vm_sandbox-predict
+# @DESCRIPTION:
+# Install a sandbox control file. Specified paths won't cause a sandbox
+# violation if opened read write but no write takes place. See bug 388937#c1
+#
+# @CODE
+# Examples:
+# java-vm_sandbox-predict /dev/random /proc/self/coredump_filter
+# @CODE
+
+java-vm_sandbox-predict() {
+ debug-print-function ${FUNCNAME} "$*"
+ [[ -z "${1}" ]] && die "${FUNCNAME} takes at least one argument"
+
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+
+ local path path_arr=("$@")
+ # subshell this to prevent IFS bleeding out dependant on bash version.
+ # could use local, which *should* work, but that requires a lot of testing.
+ path=$(IFS=":"; echo "${path_arr[*]}")
+ dodir /etc/sandbox.d
+ echo "SANDBOX_PREDICT=\"${path}\"" > "${ED}/etc/sandbox.d/20${VMHANDLE}" \
+ || die "Failed to write sandbox control file"
+}
+
+
+# @FUNCTION: java_get_plugin_dir_
+# @INTERNAL
+# @DESCRIPTION:
+# Get the java plugin dir.
+
+java_get_plugin_dir_() {
+ has ${EAPI:-0} 0 1 2 && ! use prefix && EPREFIX=
+ echo "${EPREFIX}"/usr/$(get_libdir)/nsbrowser/plugins
+}
+
+
+# @FUNCTION: install_mozilla_plugin
+# @DESCRIPTION:
+# Register a netscape java-plugin.
+
+install_mozilla_plugin() {
+ local plugin="${1}"
+ local variant="${2}"
+
+ has ${EAPI:-0} 0 1 2 && ! use prefix && ED="${D}"
+ if [[ ! -f "${ED}/${plugin}" ]]; then
+ die "Cannot find mozilla plugin at ${ED}/${plugin}"
+ fi
+
+ if [[ -n "${variant}" ]]; then
+ variant="-${variant}"
+ fi
+
+ local plugin_dir="/usr/share/java-config-2/nsplugin"
+ dodir "${plugin_dir}"
+ dosym "${plugin}" "${plugin_dir}/${VMHANDLE}${variant}-javaplugin.so"
+}
+
+
+# @FUNCTION: java_mozilla_clean_
+# @INTERNAL
+# @DESCRIPTION:
+# Because previously some ebuilds installed symlinks outside of pkg_install
+# and are left behind, which forces you to manualy remove them to select the
+# jdk/jre you want to use for java
+
+java_mozilla_clean_() {
+ local plugin_dir=$(java_get_plugin_dir_)
+ for file in ${plugin_dir}/javaplugin_*; do
+ rm -f ${file}
+ done
+ for file in ${plugin_dir}/libjavaplugin*; do
+ rm -f ${file}
+ done
+}
diff --git a/eclass/kde4-base.eclass b/eclass/kde4-base.eclass
new file mode 100644
index 000000000000..5a1f2490a439
--- /dev/null
+++ b/eclass/kde4-base.eclass
@@ -0,0 +1,922 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: kde4-base.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @BLURB: This eclass provides functions for kde 4.X ebuilds
+# @DESCRIPTION:
+# The kde4-base.eclass provides support for building KDE4 based ebuilds
+# and KDE4 applications.
+#
+# NOTE: KDE 4 ebuilds currently support EAPI 5. This will be
+# reviewed over time as new EAPI versions are approved.
+
+if [[ -z ${_KDE4_BASE_ECLASS} ]]; then
+_KDE4_BASE_ECLASS=1
+
+# @ECLASS-VARIABLE: KDE_SELINUX_MODULE
+# @DESCRIPTION:
+# If set to "none", do nothing.
+# For any other value, add selinux to IUSE, and depending on that useflag
+# add a dependency on sec-policy/selinux-${KDE_SELINUX_MODULE} to (R)DEPEND
+: ${KDE_SELINUX_MODULE:=none}
+
+# @ECLASS-VARIABLE: VIRTUALDBUS_TEST
+# @DESCRIPTION:
+# If defined, launch and use a private dbus session during src_test.
+
+# @ECLASS-VARIABLE: VIRTUALX_REQUIRED
+# @DESCRIPTION:
+# For proper description see virtualx.eclass manpage.
+# Here we redefine default value to be manual, if your package needs virtualx
+# for tests you should proceed with setting VIRTUALX_REQUIRED=test.
+: ${VIRTUALX_REQUIRED:=manual}
+
+inherit kde4-functions toolchain-funcs fdo-mime flag-o-matic gnome2-utils virtualx versionator eutils multilib
+
+if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case ${KDE_SCM} in
+ svn) inherit subversion ;;
+ git) inherit git-r3 ;;
+ esac
+fi
+
+# @ECLASS-VARIABLE: CMAKE_REQUIRED
+# @DESCRIPTION:
+# Specify if cmake buildsystem is being used. Possible values are 'always' and 'never'.
+# Please note that if it's set to 'never' you need to explicitly override following phases:
+# src_configure, src_compile, src_test and src_install.
+# Defaults to 'always'.
+: ${CMAKE_REQUIRED:=always}
+if [[ ${CMAKE_REQUIRED} = always ]]; then
+ buildsystem_eclass="cmake-utils"
+ export_fns="src_configure src_compile src_test src_install"
+fi
+
+# @ECLASS-VARIABLE: KDE_MINIMAL
+# @DESCRIPTION:
+# This variable is used when KDE_REQUIRED is set, to specify required KDE minimal
+# version for apps to work. Currently defaults to 4.4
+# One may override this variable to raise version requirements.
+# Note that it is fixed to ${PV} for kde-base packages.
+KDE_MINIMAL="${KDE_MINIMAL:-4.4}"
+
+# Set slot for KDEBASE known packages
+case ${KDEBASE} in
+ kde-base)
+ SLOT=4/$(get_version_component_range 1-2)
+ KDE_MINIMAL="${PV}"
+ ;;
+ kdevelop)
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ # @ECLASS-VARIABLE: KDEVELOP_VERSION
+ # @DESCRIPTION:
+ # Specifies KDevelop version. Default is 4.0.0 for tagged packages and 9999 for live packages.
+ # Applies to KDEBASE=kdevelop only.
+ KDEVELOP_VERSION="${KDEVELOP_VERSION:-4.9999}"
+ # @ECLASS-VARIABLE: KDEVPLATFORM_VERSION
+ # @DESCRIPTION:
+ # Specifies KDevplatform version. Default is 1.0.0 for tagged packages and 9999 for live packages.
+ # Applies to KDEBASE=kdevelop only.
+ KDEVPLATFORM_VERSION="${KDEVPLATFORM_VERSION:-4.9999}"
+ else
+ case ${PN} in
+ kdevelop)
+ KDEVELOP_VERSION=${PV}
+ KDEVPLATFORM_VERSION="$(($(get_major_version)-3)).$(get_after_major_version)"
+ ;;
+ kdevplatform|kdevelop-php*|kdevelop-python)
+ KDEVELOP_VERSION="$(($(get_major_version)+3)).$(get_after_major_version)"
+ KDEVPLATFORM_VERSION=${PV}
+ ;;
+ *)
+ KDEVELOP_VERSION="${KDEVELOP_VERSION:-4.0.0}"
+ KDEVPLATFORM_VERSION="${KDEVPLATFORM_VERSION:-1.0.0}"
+ esac
+ fi
+ SLOT="4"
+ ;;
+esac
+
+inherit ${buildsystem_eclass}
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare ${export_fns} pkg_preinst pkg_postinst pkg_postrm
+
+unset buildsystem_eclass
+unset export_fns
+
+# @ECLASS-VARIABLE: DECLARATIVE_REQUIRED
+# @DESCRIPTION:
+# Is qtdeclarative required? Possible values are 'always', 'optional' and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'never'.
+DECLARATIVE_REQUIRED="${DECLARATIVE_REQUIRED:-never}"
+
+# @ECLASS-VARIABLE: QTHELP_REQUIRED
+# @DESCRIPTION:
+# Is qthelp required? Possible values are 'always', 'optional' and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'never'.
+QTHELP_REQUIRED="${QTHELP_REQUIRED:-never}"
+
+# @ECLASS-VARIABLE: OPENGL_REQUIRED
+# @DESCRIPTION:
+# Is qtopengl required? Possible values are 'always', 'optional' and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'never'.
+OPENGL_REQUIRED="${OPENGL_REQUIRED:-never}"
+
+# @ECLASS-VARIABLE: MULTIMEDIA_REQUIRED
+# @DESCRIPTION:
+# Is qtmultimedia required? Possible values are 'always', 'optional' and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'never'.
+MULTIMEDIA_REQUIRED="${MULTIMEDIA_REQUIRED:-never}"
+
+# @ECLASS-VARIABLE: CPPUNIT_REQUIRED
+# @DESCRIPTION:
+# Is cppunit required for tests? Possible values are 'always', 'optional' and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'never'.
+CPPUNIT_REQUIRED="${CPPUNIT_REQUIRED:-never}"
+
+# @ECLASS-VARIABLE: KDE_REQUIRED
+# @DESCRIPTION:
+# Is kde required? Possible values are 'always', 'optional' and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'always'
+# If set to 'always' or 'optional', KDE_MINIMAL may be overriden as well.
+# Note that for kde-base packages this variable is fixed to 'always'.
+KDE_REQUIRED="${KDE_REQUIRED:-always}"
+
+# @ECLASS-VARIABLE: KDE_HANDBOOK
+# @DESCRIPTION:
+# Set to enable handbook in application. Possible values are 'always', 'optional'
+# (handbook USE flag) and 'never'.
+# This variable must be set before inheriting any eclasses. Defaults to 'never'.
+# It adds default handbook dirs for kde-base packages to KMEXTRA and in any case it
+# ensures buildtime and runtime dependencies.
+KDE_HANDBOOK="${KDE_HANDBOOK:-never}"
+
+# @ECLASS-VARIABLE: KDE_LINGUAS_LIVE_OVERRIDE
+# @DESCRIPTION:
+# Set this varible if you want your live package to manage its
+# translations. (Mostly all kde ebuilds does not ship documentation
+# and translations in live ebuilds)
+if [[ ${KDE_BUILD_TYPE} == live && -z ${KDE_LINGUAS_LIVE_OVERRIDE} ]]; then
+ # Kdebase actualy provides the handbooks even for live stuff
+ [[ ${KDEBASE} == kde-base ]] || KDE_HANDBOOK=never
+ KDE_LINGUAS=""
+fi
+
+# Setup packages inheriting this eclass
+case ${KDEBASE} in
+ kde-base)
+ HOMEPAGE="http://www.kde.org/"
+ LICENSE="GPL-2"
+ if [[ ${KDE_BUILD_TYPE} = live && -z ${I_KNOW_WHAT_I_AM_DOING} ]]; then
+ # Disable tests for live ebuilds by default
+ RESTRICT+=" test"
+ fi
+
+ # This code is to prevent portage from searching GENTOO_MIRRORS for
+ # packages that will never be mirrored. (As they only will ever be in
+ # the overlay).
+ case ${PV} in
+ *9999* | 4.?.[6-9]? | 4.??.[6-9]? | ??.?.[6-9]? | ??.??.[6-9]?)
+ RESTRICT+=" mirror"
+ ;;
+ esac
+ ;;
+ kdevelop)
+ HOMEPAGE="http://www.kdevelop.org/"
+ LICENSE="GPL-2"
+ ;;
+esac
+
+# @ECLASS-VARIABLE: QT_MINIMAL
+# @DESCRIPTION:
+# Determine version of qt we enforce as minimal for the package.
+QT_MINIMAL="${QT_MINIMAL:-4.8.5}"
+
+# Declarative dependencies
+qtdeclarativedepend="
+ >=dev-qt/qtdeclarative-${QT_MINIMAL}:4
+"
+case ${DECLARATIVE_REQUIRED} in
+ always)
+ COMMONDEPEND+=" ${qtdeclarativedepend}"
+ ;;
+ optional)
+ IUSE+=" declarative"
+ COMMONDEPEND+=" declarative? ( ${qtdeclarativedepend} )"
+ ;;
+ *) ;;
+esac
+unset qtdeclarativedepend
+
+# QtHelp dependencies
+qthelpdepend="
+ >=dev-qt/qthelp-${QT_MINIMAL}:4
+"
+case ${QTHELP_REQUIRED} in
+ always)
+ COMMONDEPEND+=" ${qthelpdepend}"
+ ;;
+ optional)
+ IUSE+=" qthelp"
+ COMMONDEPEND+=" qthelp? ( ${qthelpdepend} )"
+ ;;
+esac
+unset qthelpdepend
+
+# OpenGL dependencies
+qtopengldepend="
+ >=dev-qt/qtopengl-${QT_MINIMAL}:4
+"
+case ${OPENGL_REQUIRED} in
+ always)
+ COMMONDEPEND+=" ${qtopengldepend}"
+ ;;
+ optional)
+ IUSE+=" opengl"
+ COMMONDEPEND+=" opengl? ( ${qtopengldepend} )"
+ ;;
+ *) ;;
+esac
+unset qtopengldepend
+
+# MultiMedia dependencies
+qtmultimediadepend="
+ >=dev-qt/qtmultimedia-${QT_MINIMAL}:4
+"
+case ${MULTIMEDIA_REQUIRED} in
+ always)
+ COMMONDEPEND+=" ${qtmultimediadepend}"
+ ;;
+ optional)
+ IUSE+=" multimedia"
+ COMMONDEPEND+=" multimedia? ( ${qtmultimediadepend} )"
+ ;;
+ *) ;;
+esac
+unset qtmultimediadepend
+
+# CppUnit dependencies
+cppuintdepend="
+ dev-util/cppunit
+"
+case ${CPPUNIT_REQUIRED} in
+ always)
+ DEPEND+=" ${cppuintdepend}"
+ ;;
+ optional)
+ IUSE+=" test"
+ DEPEND+=" test? ( ${cppuintdepend} )"
+ ;;
+ *) ;;
+esac
+unset cppuintdepend
+
+# KDE dependencies
+# Qt accessibility classes are needed in various places, bug 325461
+kdecommondepend="
+ dev-lang/perl
+ >=dev-qt/qt3support-${QT_MINIMAL}:4[accessibility]
+ >=dev-qt/qtcore-${QT_MINIMAL}:4[qt3support,ssl]
+ >=dev-qt/qtdbus-${QT_MINIMAL}:4
+ >=dev-qt/designer-${QT_MINIMAL}:4
+ >=dev-qt/qtgui-${QT_MINIMAL}:4[accessibility,dbus(+)]
+ >=dev-qt/qtscript-${QT_MINIMAL}:4
+ >=dev-qt/qtsql-${QT_MINIMAL}:4[qt3support]
+ >=dev-qt/qtsvg-${QT_MINIMAL}:4
+ >=dev-qt/qttest-${QT_MINIMAL}:4
+ >=dev-qt/qtwebkit-${QT_MINIMAL}:4
+"
+
+if [[ ${PN} != kdelibs ]]; then
+ kdecommondepend+=" $(add_kdebase_dep kdelibs)"
+ if [[ ${KDEBASE} = kdevelop ]]; then
+ if [[ ${PN} != kdevplatform ]]; then
+ # @ECLASS-VARIABLE: KDEVPLATFORM_REQUIRED
+ # @DESCRIPTION:
+ # Specifies whether kdevplatform is required. Possible values are 'always' (default) and 'never'.
+ # Applies to KDEBASE=kdevelop only.
+ KDEVPLATFORM_REQUIRED="${KDEVPLATFORM_REQUIRED:-always}"
+ case ${KDEVPLATFORM_REQUIRED} in
+ always)
+ kdecommondepend+="
+ >=dev-util/kdevplatform-${KDEVPLATFORM_VERSION}:4
+ "
+ ;;
+ *) ;;
+ esac
+ fi
+ fi
+fi
+
+kdedepend="
+ dev-util/automoc
+ virtual/pkgconfig
+ !aqua? (
+ >=x11-libs/libXtst-1.1.0
+ x11-proto/xf86vidmodeproto
+ )
+"
+
+kderdepend=""
+
+if [[ ${CATEGORY} == kde-apps ]]; then
+ kderdepend+=" !kde-base/${PN}"
+fi
+
+# all packages needs oxygen icons for basic iconset
+if [[ ${PN} != oxygen-icons ]]; then
+ kderdepend+=" kde-apps/oxygen-icons"
+fi
+
+# add a dependency over kde-l10n
+if [[ ${KDEBASE} != "kde-base" && -n ${KDE_LINGUAS} ]]; then
+ for _lingua in ${KDE_LINGUAS}; do
+ # if our package has linguas, pull in kde-l10n with selected lingua enabled,
+ # but only for selected ones.
+ # this can't be done on one line because if user doesn't use any localisation
+ # then he is probably not interested in kde-l10n at all.
+ kderdepend+="
+ linguas_${_lingua}? ( $(add_kdeapps_dep kde4-l10n "linguas_${_lingua}(+)") )
+ "
+ done
+ unset _lingua
+fi
+
+kdehandbookdepend="
+ app-text/docbook-xml-dtd:4.2
+ app-text/docbook-xsl-stylesheets
+"
+kdehandbookrdepend="
+ $(add_kdebase_dep kdelibs 'handbook')
+"
+case ${KDE_HANDBOOK} in
+ always)
+ kdedepend+=" ${kdehandbookdepend}"
+ [[ ${PN} != kdelibs ]] && kderdepend+=" ${kdehandbookrdepend}"
+ ;;
+ optional)
+ IUSE+=" +handbook"
+ kdedepend+=" handbook? ( ${kdehandbookdepend} )"
+ [[ ${PN} != kdelibs ]] && kderdepend+=" handbook? ( ${kdehandbookrdepend} )"
+ ;;
+ *) ;;
+esac
+unset kdehandbookdepend kdehandbookrdepend
+
+case ${KDE_SELINUX_MODULE} in
+ none) ;;
+ *)
+ IUSE+=" selinux"
+ kderdepend+=" selinux? ( sec-policy/selinux-${KDE_SELINUX_MODULE} )"
+ ;;
+esac
+
+# We always need the aqua useflag because otherwise we cannot = refer to it inside
+# add_kdebase_dep. This was always kind of a bug, but came to light with EAPI=5
+# (where referring to a use flag not in IUSE masks the ebuild).
+# The only alternative would be to prohibit using add_kdebase_dep if KDE_REQUIRED=never
+IUSE+=" aqua"
+
+case ${KDE_REQUIRED} in
+ always)
+ [[ -n ${kdecommondepend} ]] && COMMONDEPEND+=" ${kdecommondepend}"
+ [[ -n ${kdedepend} ]] && DEPEND+=" ${kdedepend}"
+ [[ -n ${kderdepend} ]] && RDEPEND+=" ${kderdepend}"
+ ;;
+ optional)
+ IUSE+=" kde"
+ [[ -n ${kdecommondepend} ]] && COMMONDEPEND+=" kde? ( ${kdecommondepend} )"
+ [[ -n ${kdedepend} ]] && DEPEND+=" kde? ( ${kdedepend} )"
+ [[ -n ${kderdepend} ]] && RDEPEND+=" kde? ( ${kderdepend} )"
+ ;;
+ *) ;;
+esac
+
+unset kdecommondepend kdedepend kderdepend
+
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: COMMONDEPEND is ${COMMONDEPEND}"
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: DEPEND (only) is ${DEPEND}"
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: RDEPEND (only) is ${RDEPEND}"
+
+# Accumulate dependencies set by this eclass
+DEPEND+=" ${COMMONDEPEND}"
+RDEPEND+=" ${COMMONDEPEND}"
+unset COMMONDEPEND
+
+# Fetch section - If the ebuild's category is not 'kde-base' and if it is not a
+# kdevelop ebuild, the URI should be set in the ebuild itself
+_calculate_src_uri() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local _kmname _kmname_pv
+
+ # we calculate URI only for known KDEBASE modules
+ [[ -n ${KDEBASE} ]] || return
+
+ # calculate tarball module name
+ if [[ -n ${KMNAME} ]]; then
+ _kmname="${KMNAME}"
+ else
+ _kmname=${PN}
+ fi
+ _kmname_pv="${_kmname}-${PV}"
+ case ${KDEBASE} in
+ kde-base)
+ case ${PV} in
+ 4.4.11.1)
+ # KDEPIM 4.4, special case
+ # TODO: Remove this part when KDEPIM 4.4 gets out of the tree
+ SRC_URI="mirror://kde/stable/kdepim-${PV}/src/${_kmname_pv}.tar.bz2" ;;
+ 4.4.20*)
+ # KDEPIM 4.4 no-akonadi branch, special case
+ # TODO: Remove this part when KDEPIM 4.4 gets out of the tree
+ SRC_URI="http://dev.gentoo.org/~dilfridge/distfiles/${_kmname_pv}.tar.xz" ;;
+ 4.?.[6-9]? | 4.??.[6-9]?)
+ # Unstable KDE SC releases
+ SRC_URI="mirror://kde/unstable/${PV}/src/${_kmname_pv}.tar.xz" ;;
+ 4.11.14)
+ # Part of 4.14 actually, sigh. Not stable for next release!
+ SRC_URI="mirror://kde/stable/4.14.3/src/${_kmname_pv}.tar.xz" ;;
+ 4.11.19)
+ # Part of 15.04.1 actually, sigh. Not stable for next release!
+ SRC_URI="mirror://kde/stable/applications/15.04.1/src/${_kmname_pv}.tar.xz" ;;
+ 4.11.21)
+ # Part of 15.04.3 actually, sigh. Not stable for next release!
+ SRC_URI="mirror://kde/stable/applications/15.04.3/src/${_kmname_pv}.tar.xz" ;;
+ 4.14.3)
+ # Last SC release
+ SRC_URI="mirror://kde/stable/${PV}/src/${_kmname_pv}.tar.xz" ;;
+ 4.14.8)
+ # Part of 15.04.1 actually, sigh. Not stable for next release!
+ SRC_URI="mirror://kde/stable/applications/15.04.1/src/${_kmname_pv}.tar.xz" ;;
+ 4.14.10)
+ # Part of 15.04.3 actually, sigh. Not stable for next release!
+ SRC_URI="mirror://kde/stable/applications/15.04.3/src/${_kmname_pv}.tar.xz" ;;
+ ??.?.[6-9]? | ??.??.[4-9]?)
+ # Unstable KDE Applications releases
+ SRC_URI="mirror://kde/unstable/applications/${PV}/src/${_kmname}-${PV}.tar.xz" ;;
+ *)
+ # Stable KDE Applications releases
+ SRC_URI="mirror://kde/stable/applications/${PV}/src/${_kmname}-${PV}.tar.xz"
+ ;;
+ esac
+ ;;
+ kdevelop|kdevelop-php*|kdevplatform)
+ case ${KDEVELOP_VERSION} in
+ 4.[123].[6-9]*) SRC_URI="mirror://kde/unstable/kdevelop/${KDEVELOP_VERSION}/src/${P}.tar.xz" ;;
+ *) SRC_URI="mirror://kde/stable/kdevelop/${KDEVELOP_VERSION}/src/${P}.tar.xz" ;;
+ esac
+ ;;
+ esac
+}
+
+_calculate_live_repo() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ SRC_URI=""
+ case ${KDE_SCM} in
+ svn)
+ # Determine branch URL based on live type
+ local branch_prefix
+ case ${PV} in
+ 9999*)
+ # trunk
+ branch_prefix="trunk/KDE"
+ ;;
+ *)
+ # branch
+ branch_prefix="branches/KDE/$(get_kde_version)"
+ # @ECLASS-VARIABLE: ESVN_PROJECT_SUFFIX
+ # @DESCRIPTION
+ # Suffix appended to ESVN_PROJECT depending on fetched branch.
+ # Defaults is empty (for -9999 = trunk), and "-${PV}" otherwise.
+ ESVN_PROJECT_SUFFIX="-${PV}"
+ ;;
+ esac
+ # @ECLASS-VARIABLE: ESVN_MIRROR
+ # @DESCRIPTION:
+ # This variable allows easy overriding of default kde mirror service
+ # (anonsvn) with anything else you might want to use.
+ ESVN_MIRROR=${ESVN_MIRROR:=svn://anonsvn.kde.org/home/kde}
+ # Split ebuild, or extragear stuff
+ if [[ -n ${KMNAME} ]]; then
+ ESVN_PROJECT="${KMNAME}${ESVN_PROJECT_SUFFIX}"
+ if [[ -z ${KMNOMODULE} ]] && [[ -z ${KMMODULE} ]]; then
+ KMMODULE="${PN}"
+ fi
+ # Split kde-base/ ebuilds: (they reside in trunk/KDE)
+ case ${KMNAME} in
+ kdebase-*)
+ ESVN_REPO_URI="${ESVN_MIRROR}/${branch_prefix}/kdebase/${KMNAME#kdebase-}"
+ ;;
+ kdelibs-*)
+ ESVN_REPO_URI="${ESVN_MIRROR}/${branch_prefix}/kdelibs/${KMNAME#kdelibs-}"
+ ;;
+ kdereview*)
+ ESVN_REPO_URI="${ESVN_MIRROR}/trunk/${KMNAME}/${KMMODULE}"
+ ;;
+ kdesupport)
+ ESVN_REPO_URI="${ESVN_MIRROR}/trunk/${KMNAME}/${KMMODULE}"
+ ESVN_PROJECT="${PN}${ESVN_PROJECT_SUFFIX}"
+ ;;
+ kde*)
+ ESVN_REPO_URI="${ESVN_MIRROR}/${branch_prefix}/${KMNAME}"
+ ;;
+ extragear*|playground*)
+ # Unpack them in toplevel dir, so that they won't conflict with kde4-meta
+ # build packages from same svn location.
+ ESVN_REPO_URI="${ESVN_MIRROR}/trunk/${KMNAME}/${KMMODULE}"
+ ESVN_PROJECT="${PN}${ESVN_PROJECT_SUFFIX}"
+ ;;
+ *)
+ ESVN_REPO_URI="${ESVN_MIRROR}/trunk/${KMNAME}/${KMMODULE}"
+ ;;
+ esac
+ else
+ # kdelibs, kdepimlibs
+ ESVN_REPO_URI="${ESVN_MIRROR}/${branch_prefix}/${PN}"
+ ESVN_PROJECT="${PN}${ESVN_PROJECT_SUFFIX}"
+ fi
+ # @ECLASS-VARIABLE: ESVN_UP_FREQ
+ # @DESCRIPTION:
+ # This variable is used for specifying the timeout between svn synces
+ # for kde-base modules. Does not affect misc apps.
+ # Default value is 1 hour.
+ [[ ${KDEBASE} = kde-base ]] && ESVN_UP_FREQ=${ESVN_UP_FREQ:-1}
+ ;;
+ git)
+ local _kmname
+ # @ECLASS-VARIABLE: EGIT_MIRROR
+ # @DESCRIPTION:
+ # This variable allows easy overriding of default kde mirror service
+ # (anongit) with anything else you might want to use.
+ EGIT_MIRROR=${EGIT_MIRROR:=git://anongit.kde.org}
+
+ # @ECLASS-VARIABLE: EGIT_REPONAME
+ # @DESCRIPTION:
+ # This variable allows overriding of default repository
+ # name. Specify only if this differ from PN and KMNAME.
+ if [[ -n ${EGIT_REPONAME} ]]; then
+ # the repository and kmname different
+ _kmname=${EGIT_REPONAME}
+ elif [[ -n ${KMNAME} ]]; then
+ _kmname=${KMNAME}
+ else
+ _kmname=${PN}
+ fi
+
+ # default branching
+ [[ ${PV} != 4.9999* && ${PV} != 9999 && ${KDEBASE} == kde-base ]] && \
+ EGIT_BRANCH="KDE/$(get_kde_version)"
+
+ # Applications branching
+ [[ ${PV} == ??.??.49.9999 && ${KDEBASE} == kde-base ]] && \
+ EGIT_BRANCH="Applications/$(get_kde_version)"
+
+ # default repo uri
+ EGIT_REPO_URI+=( "${EGIT_MIRROR}/${_kmname}" )
+
+ debug-print "${FUNCNAME}: Repository: ${EGIT_REPO_URI}"
+ debug-print "${FUNCNAME}: Branch: ${EGIT_BRANCH}"
+ ;;
+ esac
+}
+
+case ${KDE_BUILD_TYPE} in
+ live) _calculate_live_repo ;;
+ *) _calculate_src_uri ;;
+esac
+
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: SRC_URI is ${SRC_URI}"
+
+# @ECLASS-VARIABLE: PREFIX
+# @DESCRIPTION:
+# Set the installation PREFIX for non kde-base applications. It defaults to /usr.
+# kde-base packages go into KDE4 installation directory (/usr).
+# No matter the PREFIX, package will be built against KDE installed in /usr.
+
+# @FUNCTION: kde4-base_pkg_setup
+# @DESCRIPTION:
+# Do some basic settings
+kde4-base_pkg_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if has handbook ${IUSE} || has "+handbook" ${IUSE} && [ "${KDE_HANDBOOK}" != optional ] ; then
+ eqawarn "Handbook support is enabled via KDE_HANDBOOK=optional in the ebuild."
+ eqawarn "Please do not just set IUSE=handbook, as this leads to dependency errors."
+ fi
+
+ # Don't set KDEHOME during compilation, it will cause access violations
+ unset KDEHOME
+
+ # Check if gcc compiler is fresh enough.
+ # In theory should be in pkg_pretend but we check it only for kdelibs there
+ # and for others we do just quick scan in pkg_setup because pkg_pretend
+ # executions consume quite some time (ie. when merging 300 packages at once will cause 300 checks)
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ [[ $(gcc-major-version) -lt 4 ]] || \
+ ( [[ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -le 6 ]] ) \
+ && die "Sorry, but gcc-4.6 and earlier wont work for some KDE packages."
+ fi
+
+ KDEDIR=/usr
+ : ${PREFIX:=/usr}
+ EKDEDIR=${EPREFIX}/usr
+
+ # Point to correct QT plugins path
+ QT_PLUGIN_PATH="${EPREFIX}/usr/$(get_libdir)/kde4/plugins/"
+
+ # Fix XDG collision with sandbox
+ export XDG_CONFIG_HOME="${T}"
+}
+
+# @FUNCTION: kde4-base_src_unpack
+# @DESCRIPTION:
+# This function unpacks the source tarballs for KDE4 applications.
+kde4-base_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case ${KDE_SCM} in
+ svn)
+ subversion_src_unpack
+ ;;
+ git)
+ git-r3_src_unpack
+ ;;
+ esac
+ else
+ unpack ${A}
+ fi
+}
+
+# @FUNCTION: kde4-base_src_prepare
+# @DESCRIPTION:
+# General pre-configure and pre-compile function for KDE4 applications.
+# It also handles translations if KDE_LINGUAS is defined. See KDE_LINGUAS and
+# enable_selected_linguas() and enable_selected_doc_linguas()
+# in kde4-functions.eclass(5) for further details.
+kde4-base_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # enable handbook and linguas only when not using live ebuild
+
+ # Only enable selected languages, used for KDE extragear apps.
+ if [[ -n ${KDE_LINGUAS} ]]; then
+ enable_selected_linguas
+ fi
+
+ # Enable/disable handbooks for kde4-base packages
+ # kde-l10n inherits kde4-base but is metpackage, so no check for doc
+ # kdelibs inherits kde4-base but handle installing the handbook itself
+ if ! has kde4-meta ${INHERITED} && in_iuse handbook; then
+ if [[ ${KDEBASE} == kde-base ]]; then
+ if [[ ${PN} != kde-l10n && ${PN} != kdepim-l10n && ${PN} != kdelibs ]] && use !handbook; then
+ # documentation in kde4-functions
+ : ${KDE_DOC_DIRS:=doc}
+ local dir
+ for dir in ${KDE_DOC_DIRS}; do
+ sed -e "\!^[[:space:]]*add_subdirectory[[:space:]]*([[:space:]]*${dir}[[:space:]]*)!s/^/#DONOTCOMPILE /" \
+ -e "\!^[[:space:]]*ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*${dir}[[:space:]]*)!s/^/#DONOTCOMPILE /" \
+ -e "\!^[[:space:]]*macro_optional_add_subdirectory[[:space:]]*([[:space:]]*${dir}[[:space:]]*)!s/^/#DONOTCOMPILE /" \
+ -e "\!^[[:space:]]*MACRO_OPTIONAL_ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*${dir}[[:space:]]*)!s/^/#DONOTCOMPILE /" \
+ -i CMakeLists.txt || die "failed to comment out handbook"
+ done
+ fi
+ else
+ enable_selected_doc_linguas
+ fi
+ fi
+
+ # SCM bootstrap
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case ${KDE_SCM} in
+ svn) subversion_src_prepare ;;
+ esac
+ fi
+
+ # Apply patches, cmake-utils does the job already
+ cmake-utils_src_prepare
+
+ # Save library dependencies
+ if [[ -n ${KMSAVELIBS} ]] ; then
+ save_library_dependencies
+ fi
+
+ # Inject library dependencies
+ if [[ -n ${KMLOADLIBS} ]] ; then
+ load_library_dependencies
+ fi
+
+ # Hack for manuals relying on outdated DTD, only outside kde-base/...
+ if [[ -z ${KDEBASE} ]]; then
+ find "${S}" -name "*.docbook" \
+ -exec sed -i -r \
+ -e 's:-//KDE//DTD DocBook XML V4\.1(\..)?-Based Variant V1\.[01]//EN:-//KDE//DTD DocBook XML V4.2-Based Variant V1.1//EN:g' {} + \
+ || die 'failed to fix DocBook variant version'
+ fi
+}
+
+# @FUNCTION: kde4-base_src_configure
+# @DESCRIPTION:
+# Function for configuring the build of KDE4 applications.
+kde4-base_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Build tests in src_test only, where we override this value
+ local cmakeargs=(-DKDE4_BUILD_TESTS=OFF)
+
+ if use_if_iuse debug; then
+ # Set "real" debug mode
+ CMAKE_KDE_BUILD_TYPE="Debugfull"
+ else
+ # Handle common release builds
+ append-cppflags -DQT_NO_DEBUG
+ fi
+
+ # Set distribution name
+ [[ ${PN} = kdelibs ]] && cmakeargs+=(-DKDE_DISTRIBUTION_TEXT=Gentoo)
+
+ # Here we set the install prefix
+ tc-is-cross-compiler || cmakeargs+=(-DCMAKE_INSTALL_PREFIX="${EPREFIX}${PREFIX}")
+
+ # Use colors
+ QTEST_COLORED=1
+
+ # Shadow existing installations
+ unset KDEDIRS
+
+ #qmake -query QT_INSTALL_LIBS unavailable when cross-compiling
+ tc-is-cross-compiler && cmakeargs+=(-DQT_LIBRARY_DIR=${ROOT}/usr/$(get_libdir)/qt4)
+ #kde-config -path data unavailable when cross-compiling
+ tc-is-cross-compiler && cmakeargs+=(-DKDE4_DATA_DIR=${ROOT}/usr/share/apps/)
+
+ # sysconf needs to be /etc, not /usr/etc
+ cmakeargs+=(-DSYSCONF_INSTALL_DIR="${EPREFIX}"/etc)
+
+ if [[ $(declare -p mycmakeargs 2>&-) != "declare -a mycmakeargs="* ]]; then
+ if [[ ${mycmakeargs} ]]; then
+ eqawarn "mycmakeargs should always be declared as an array, not a string"
+ fi
+ mycmakeargs=(${mycmakeargs})
+ fi
+
+ mycmakeargs=("${cmakeargs[@]}" "${mycmakeargs[@]}")
+
+ cmake-utils_src_configure
+}
+
+# @FUNCTION: kde4-base_src_compile
+# @DESCRIPTION:
+# General function for compiling KDE4 applications.
+kde4-base_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ cmake-utils_src_compile "$@"
+}
+
+# @FUNCTION: kde4-base_src_test
+# @DESCRIPTION:
+# Function for testing KDE4 applications.
+kde4-base_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local kded4_pid
+
+ _test_runner() {
+ if [[ -n "${VIRTUALDBUS_TEST}" ]]; then
+ export $(dbus-launch)
+ kded4 2>&1 > /dev/null &
+ kded4_pid=$!
+ fi
+
+ cmake-utils_src_test
+ }
+
+ # When run as normal user during ebuild development with the ebuild command, the
+ # kde tests tend to access the session DBUS. This however is not possible in a real
+ # emerge or on the tinderbox.
+ # > make sure it does not happen, so bad tests can be recognized and disabled
+ unset DBUS_SESSION_BUS_ADDRESS DBUS_SESSION_BUS_PID
+
+ # Override this value, set in kde4-base_src_configure()
+ mycmakeargs+=(-DKDE4_BUILD_TESTS=ON)
+ cmake-utils_src_configure
+ kde4-base_src_compile
+
+ if [[ ${VIRTUALX_REQUIRED} == always || ${VIRTUALX_REQUIRED} == test ]]; then
+ # check for sanity if anyone already redefined VIRTUALX_COMMAND from the default
+ if [[ ${VIRTUALX_COMMAND} != emake ]]; then
+ # surprise- we are already INSIDE virtualmake!!!
+ debug-print "QA Notice: This version of kde4-base.eclass includes the virtualx functionality."
+ debug-print " You may NOT set VIRTUALX_COMMAND or call virtualmake from the ebuild."
+ debug-print " Setting VIRTUALX_REQUIRED is completely sufficient. See the"
+ debug-print " kde4-base.eclass docs for details... Applying workaround."
+ _test_runner
+ else
+ VIRTUALX_COMMAND="_test_runner" virtualmake
+ fi
+ else
+ _test_runner
+ fi
+
+ if [ -n "${kded4_pid}" ] ; then
+ kill ${kded4_pid}
+ fi
+
+ if [ -n "${DBUS_SESSION_BUS_PID}" ] ; then
+ kill ${DBUS_SESSION_BUS_PID}
+ fi
+}
+
+# @FUNCTION: kde4-base_src_install
+# @DESCRIPTION:
+# Function for installing KDE4 applications.
+kde4-base_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ -n ${KMSAVELIBS} ]] ; then
+ install_library_dependencies
+ fi
+
+ # Install common documentation of KDE4 applications
+ local doc
+ if ! has kde4-meta ${INHERITED}; then
+ for doc in "${S}"/{AUTHORS,CHANGELOG,ChangeLog*,README*,NEWS,TODO,HACKING}; do
+ [[ -f ${doc} && -s ${doc} ]] && dodoc "${doc}"
+ done
+ for doc in "${S}"/*/{AUTHORS,CHANGELOG,ChangeLog*,README*,NEWS,TODO,HACKING}; do
+ [[ -f ${doc} && -s ${doc} ]] && newdoc "${doc}" "$(basename $(dirname ${doc})).$(basename ${doc})"
+ done
+ fi
+
+ cmake-utils_src_install
+
+ # We don't want ${PREFIX}/share/doc/HTML to be compressed,
+ # because then khelpcenter can't find the docs
+ [[ -d ${ED}/${PREFIX}/share/doc/HTML ]] &&
+ docompress -x ${PREFIX}/share/doc/HTML
+}
+
+# @FUNCTION: kde4-base_pkg_preinst
+# @DESCRIPTION:
+# Function storing icon caches
+kde4-base_pkg_preinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ gnome2_icon_savelist
+ if [[ ${KDE_BUILD_TYPE} == live && ${KDE_SCM} == svn ]]; then
+ subversion_pkg_preinst
+ fi
+}
+
+# @FUNCTION: kde4-base_pkg_postinst
+# @DESCRIPTION:
+# Function to rebuild the KDE System Configuration Cache after an application has been installed.
+kde4-base_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ gnome2_icon_cache_update
+ fdo-mime_desktop_database_update
+ fdo-mime_mime_database_update
+ buildsycoca
+
+ if [[ -z ${I_KNOW_WHAT_I_AM_DOING} ]]; then
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ echo
+ einfo "WARNING! This is an experimental live ebuild of ${CATEGORY}/${PN}"
+ einfo "Use it at your own risk."
+ einfo "Do _NOT_ file bugs at bugs.gentoo.org because of this ebuild!"
+ echo
+ fi
+ # for all 3rd party soft tell user that he SHOULD install kdebase-startkde or kdebase-runtime-meta
+ if [[ ${KDEBASE} != kde-base ]] && \
+ ! has_version 'kde-apps/kdebase-runtime-meta' && \
+ ! has_version 'kde-base/kdebase-startkde'; then
+ if [[ ${KDE_REQUIRED} == always ]] || ( [[ ${KDE_REQUIRED} == optional ]] && use kde ); then
+ echo
+ ewarn "WARNING! Your system configuration contains neither \"kde-base/kdebase-runtime-meta\""
+ ewarn "nor \"kde-base/kdebase-startkde\". You need one of above."
+ ewarn "With this setting you are unsupported by KDE team."
+ ewarn "All missing features you report for misc packages will be probably ignored or closed as INVALID."
+ fi
+ fi
+ fi
+}
+
+# @FUNCTION: kde4-base_pkg_postrm
+# @DESCRIPTION:
+# Function to rebuild the KDE System Configuration Cache after an application has been removed.
+kde4-base_pkg_postrm() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ gnome2_icon_cache_update
+ fdo-mime_desktop_database_update
+ fdo-mime_mime_database_update
+ buildsycoca
+}
+
+fi
diff --git a/eclass/kde4-functions.eclass b/eclass/kde4-functions.eclass
new file mode 100644
index 000000000000..f7a3bda221e4
--- /dev/null
+++ b/eclass/kde4-functions.eclass
@@ -0,0 +1,413 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: kde4-functions.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @BLURB: Common ebuild functions for KDE 4 packages
+# @DESCRIPTION:
+# This eclass contains all functions shared by the different eclasses,
+# for KDE 4 ebuilds.
+
+if [[ -z ${_KDE4_FUNCTIONS_ECLASS} ]]; then
+_KDE4_FUNCTIONS_ECLASS=1
+
+inherit versionator
+
+# @ECLASS-VARIABLE: EAPI
+# @DESCRIPTION:
+# Currently kde4 eclasses support EAPI 5.
+case ${EAPI} in
+ 5) : ;;
+ *) die "EAPI=${EAPI:-0} is not supported" ;;
+esac
+
+# @ECLASS-VARIABLE: KDE_OVERRIDE_MINIMAL
+# @DESCRIPTION:
+# For use only in very few well-defined cases; normally it should be unset.
+# If this variable is set, all calls to add_kdebase_dep return a dependency on
+# at least this version, independent of the version of the package itself.
+# If you know exactly that one specific NEW KDE component builds and runs fine
+# with all the rest of KDE at an OLDER version, you can set this old version here.
+# Warning- may lead to general instability and kill your pet targh.
+
+# @ECLASS-VARIABLE: KDEBASE
+# @DESCRIPTION:
+# This gets set to a non-zero value when a package is considered a kde or
+# kdevelop ebuild.
+if [[ ${CATEGORY} = kde-base || ${CATEGORY} = kde-apps ]]; then
+ debug-print "${ECLASS}: KDEBASE ebuild recognized"
+ KDEBASE=kde-base
+elif [[ ${KMNAME-${PN}} = kdevelop ]]; then
+ KDEBASE=kdevelop
+fi
+
+debug-print "${ECLASS}: ${KDEBASE} ebuild recognized"
+
+# determine the build type
+if [[ ${PV} = *9999* ]]; then
+ KDE_BUILD_TYPE="live"
+else
+ KDE_BUILD_TYPE="release"
+fi
+export KDE_BUILD_TYPE
+
+# Set reponame and SCM for modules that have fully migrated to git
+# (hack - it's here because it needs to be before SCM inherits from kde4-base)
+if [[ ${KDE_BUILD_TYPE} == live ]]; then
+ case "${KMNAME}" in
+ kdebase-workspace)
+ EGIT_REPONAME=${EGIT_REPONAME:=kde-workspace}
+ ;;
+ kdebase-runtime)
+ EGIT_REPONAME=${EGIT_REPONAME:=kde-runtime}
+ ;;
+ esac
+fi
+
+# @ECLASS-VARIABLE: KDE_SCM
+# @DESCRIPTION:
+# If this is a live package which scm does it use
+# Everything else uses git by default
+KDE_SCM="${KDE_SCM:-git}"
+case ${KDE_SCM} in
+ svn|git) ;;
+ *) die "KDE_SCM: ${KDE_SCM} is not supported" ;;
+esac
+
+# @ECLASS-VARIABLE: KDE_LINGUAS
+# @DESCRIPTION:
+# This is a whitespace-separated list of translations this ebuild supports.
+# These translations are automatically added to IUSE. Therefore ebuilds must set
+# this variable before inheriting any eclasses. To enable only selected
+# translations, ebuilds must call enable_selected_linguas(). kde4-{base,meta}.eclass does
+# this for you.
+#
+# Example: KDE_LINGUAS="de en_GB nl"
+if [[ ${KDE_BUILD_TYPE} != live || -n ${KDE_LINGUAS_LIVE_OVERRIDE} ]]; then
+ for _lingua in ${KDE_LINGUAS}; do
+ IUSE="${IUSE} linguas_${_lingua}"
+ done
+fi
+
+# @FUNCTION: buildsycoca
+# @DESCRIPTION:
+# Function to rebuild the KDE System Configuration Cache.
+# All KDE ebuilds should run this in pkg_postinst and pkg_postrm.
+buildsycoca() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # We no longer need to run kbuildsycoca4, as kded does that automatically, as needed
+
+ # fix permission for some directories
+ for x in usr/share/{config,kde4}; do
+ DIRS=${EROOT}usr
+ [[ -d "${EROOT}${x}" ]] || break # nothing to do if directory does not exist
+ # fixes Bug 318237
+ if use userland_BSD ; then
+ [[ $(stat -f %p "${EROOT}${x}") != 40755 ]]
+ local stat_rtn="$?"
+ else
+ [[ $(stat --format=%a "${EROOT}${x}") != 755 ]]
+ local stat_rtn=$?
+ fi
+ if [[ $stat_rtn != 1 ]] ; then
+ ewarn "QA Notice:"
+ ewarn "Package ${PN} is breaking ${EROOT}${x} permissions."
+ ewarn "Please report this issue to gentoo bugzilla."
+ einfo "Permissions will get adjusted automatically now."
+ find "${EROOT}${x}" -type d -print0 | xargs -0 chmod 755
+ fi
+ done
+}
+
+# @FUNCTION: comment_all_add_subdirectory
+# @USAGE: [list of directory names]
+# @DESCRIPTION:
+# Recursively comment all add_subdirectory instructions in listed directories,
+# except those in cmake/.
+comment_all_add_subdirectory() {
+ find "$@" -name CMakeLists.txt -print0 | grep -vFzZ "./cmake" | \
+ xargs -0 sed -i \
+ -e '/^[[:space:]]*add_subdirectory/s/^/#DONOTCOMPILE /' \
+ -e '/^[[:space:]]*ADD_SUBDIRECTORY/s/^/#DONOTCOMPILE /' \
+ -e '/^[[:space:]]*macro_optional_add_subdirectory/s/^/#DONOTCOMPILE /' \
+ -e '/^[[:space:]]*MACRO_OPTIONAL_ADD_SUBDIRECTORY/s/^/#DONOTCOMPILE /' \
+ || die "${LINENO}: Initial sed died"
+}
+
+# @FUNCTION: enable_selected_linguas
+# @DESCRIPTION:
+# Enable translations based on LINGUAS settings and translations supported by
+# the package (see KDE_LINGUAS). By default, translations are found in "${S}"/po
+# but this default can be overridden by defining KDE_LINGUAS_DIR.
+enable_selected_linguas() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local x
+
+ # if there is no linguas defined we enable everything
+ if ! $(env | grep -q "^LINGUAS="); then
+ return 0
+ fi
+
+ # @ECLASS-VARIABLE: KDE_LINGUAS_DIR
+ # @DESCRIPTION:
+ # Specified folder where application translations are located.
+ # Can be defined as array of folders where translations are located.
+ # Note that space separated list of dirs is not supported.
+ # Default value is set to "po".
+ if [[ "$(declare -p KDE_LINGUAS_DIR 2>/dev/null 2>&1)" == "declare -a"* ]]; then
+ debug-print "$FUNCNAME: we have these subfolders defined: ${KDE_LINGUAS_DIR}"
+ for x in ${KDE_LINGUAS_DIR[@]}; do
+ _enable_selected_linguas_dir ${x}
+ done
+ else
+ KDE_LINGUAS_DIR=${KDE_LINGUAS_DIR:="po"}
+ _enable_selected_linguas_dir ${KDE_LINGUAS_DIR}
+ fi
+}
+
+# @FUNCTION: enable_selected_doc_linguas
+# @DESCRIPTION:
+# Enable only selected linguas enabled doc folders.
+enable_selected_doc_linguas() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # @ECLASS-VARIABLE: KDE_DOC_DIRS
+ # @DESCRIPTION:
+ # Variable specifying whitespace separated patterns for documentation locations.
+ # Default is "doc/%lingua"
+ KDE_DOC_DIRS=${KDE_DOC_DIRS:='doc/%lingua'}
+ local linguas
+ for pattern in ${KDE_DOC_DIRS}; do
+
+ local handbookdir=`dirname ${pattern}`
+ local translationdir=`basename ${pattern}`
+ # Do filename pattern supplied, treat as directory
+ [[ ${handbookdir} = '.' ]] && handbookdir=${translationdir} && translationdir=
+ [[ -d ${handbookdir} ]] || die 'wrong doc dir specified'
+
+ if ! use handbook; then
+ # Disable whole directory
+ sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${handbookdir}[[:space:]]*)/s/^/#DONOTCOMPILE /" \
+ -e "/ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*${handbookdir}[[:space:]]*)/s/^/#DONOTCOMPILE /" \
+ -i CMakeLists.txt || die 'failed to comment out all handbooks'
+ else
+ # if there is no linguas defined we enable everything (i.e. comment out nothing)
+ if ! $(env | grep -q "^LINGUAS="); then
+ return 0
+ fi
+
+ # Disable subdirectories recursively
+ comment_all_add_subdirectory "${handbookdir}"
+
+ # In certain packages, the default handbook is en_US instead of the usual en. Since there is no en_US 'translation',
+ # it makes no sense to add to KDE_LINGUAS which causes this type of handbook to not be installed.
+ if [[ -d "${handbookdir}/en_US" && ! -d "${handbookdir}/en" ]]; then
+ mv "${handbookdir}/en_US" "${handbookdir}/en" || die
+ sed -e "s/en_US/en/" -i "${handbookdir}/CMakeLists.txt"
+ fi
+
+ # Add requested translations
+ local lingua
+ for lingua in en ${KDE_LINGUAS}; do
+ if [[ ${lingua} = en ]] || use linguas_${lingua}; then
+ if [[ -d ${handbookdir}/${translationdir//%lingua/${lingua}} ]]; then
+ sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${translationdir//%lingua/${lingua}}/s/^#DONOTCOMPILE //" \
+ -e "/ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*${translationdir//%lingua/${lingua}}/s/^#DONOTCOMPILE //" \
+ -i "${handbookdir}"/CMakeLists.txt && ! has ${lingua} ${linguas} && linguas="${linguas} ${lingua}"
+ fi
+ fi
+ done
+ fi
+
+ done
+ [[ -n "${linguas}" ]] && einfo "Enabling handbook translations:${linguas}"
+}
+
+# Functions handling KMLOADLIBS and KMSAVELIBS
+
+# @FUNCTION: save_library_dependencies
+# @DESCRIPTION:
+# Add exporting CMake dependencies for current package
+save_library_dependencies() {
+ local depsfile="${T}/${PN}"
+
+ ebegin "Saving library dependencies in ${depsfile##*/}"
+ echo "EXPORT_LIBRARY_DEPENDENCIES(\"${depsfile}\")" >> "${S}/CMakeLists.txt" || \
+ die "Failed to save the library dependencies."
+ eend $?
+}
+
+# @FUNCTION: install_library_dependencies
+# @DESCRIPTION:
+# Install generated CMake library dependencies to /var/lib/kde
+install_library_dependencies() {
+ local depsfile="${T}/${PN}"
+
+ ebegin "Installing library dependencies as ${depsfile##*/}"
+ insinto /var/lib/kde
+ doins "${depsfile}" || die "Failed to install library dependencies."
+ eend $?
+}
+
+# @FUNCTION: load_library_dependencies
+# @DESCRIPTION:
+# Inject specified library dependencies in current package
+load_library_dependencies() {
+ local pn i depsfile
+ ebegin "Injecting library dependencies from '${KMLOADLIBS}'"
+
+ i=0
+ for pn in ${KMLOADLIBS} ; do
+ ((i++))
+ depsfile="${EPREFIX}/var/lib/kde/${pn}"
+ [[ -r ${depsfile} ]] || depsfile="${EPREFIX}/var/lib/kde/${pn}:$(get_kde_version)"
+ [[ -r ${depsfile} ]] || die "Depsfile '${depsfile}' not accessible. You probably need to reinstall ${pn}."
+ sed -i -e "${i}iINCLUDE(\"${depsfile}\")" "${S}/CMakeLists.txt" || \
+ die "Failed to include library dependencies for ${pn}"
+ done
+ eend $?
+}
+
+# @FUNCTION: add_kdeapps_dep
+# @DESCRIPTION:
+# Create proper dependency for kde-apps/ dependencies.
+# This takes 1 to 3 arguments. The first being the package name, the optional
+# second is additional USE flags to append, and the optional third is the
+# version to use instead of the automatic version (use sparingly).
+# The output of this should be added directly to DEPEND/RDEPEND, and may be
+# wrapped in a USE conditional (but not an || conditional without an extra set
+# of parentheses).
+add_kdeapps_dep() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local ver
+
+ if [[ -n ${3} ]]; then
+ ver=${3}
+ elif [[ -n ${KDE_OVERRIDE_MINIMAL} ]]; then
+ ver=${KDE_OVERRIDE_MINIMAL}
+ elif [[ ${KDEBASE} != kde-base ]]; then
+ ver=${KDE_MINIMAL}
+ # if building stable-live version depend just on the raw KDE version
+ # to allow merging packages against more stable basic stuff
+ elif [[ ${PV} == *.9999 ]]; then
+ ver=$(get_kde_version)
+ else
+ ver=${PV}
+ fi
+
+ [[ -z ${1} ]] && die "Missing parameter"
+
+ #FIXME
+ # Drop aqua= from kf5 packages
+ echo " >=kde-apps/${1}-${ver}:4[aqua=${2:+,${2}}]"
+}
+
+# @FUNCTION: add_kdebase_dep
+# @DESCRIPTION:
+# Create proper dependency for kde-base/ dependencies.
+# This takes 1 to 3 arguments. The first being the package name, the optional
+# second is additional USE flags to append, and the optional third is the
+# version to use instead of the automatic version (use sparingly).
+# The output of this should be added directly to DEPEND/RDEPEND, and may be
+# wrapped in a USE conditional (but not an || conditional without an extra set
+# of parentheses).
+add_kdebase_dep() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local ver
+
+ if [[ -n ${3} ]]; then
+ ver=${3}
+ elif [[ -n ${KDE_OVERRIDE_MINIMAL} ]]; then
+ ver=${KDE_OVERRIDE_MINIMAL}
+ elif [[ -n ${KDE_MINIMAL} ]]; then
+ ver=${KDE_MINIMAL}
+ # if building live version depend on the final release since there will
+ # not be any more major development. this solves dep errors as not all
+ # packages have kde-base live versions now
+
+ # depend on the last sane released version where the normal >=${PV} dep
+ # is not possible
+ elif [[ ${CATEGORY} == kde-apps || ${PV} == *9999 ]]; then
+ ver=4.14.3
+ else
+ ver=${PV}
+ fi
+
+ [[ -z ${1} ]] && die "Missing parameter"
+
+ echo " >=kde-base/${1}-${ver}:4[aqua=${2:+,${2}}]"
+}
+
+# local function to enable specified translations for specified directory
+# used from kde4-functions_enable_selected_linguas function
+_enable_selected_linguas_dir() {
+ local lingua linguas sr_mess wp
+ local dir=${1}
+
+ [[ -d ${dir} ]] || die "linguas dir \"${dir}\" does not exist"
+ comment_all_add_subdirectory "${dir}"
+ pushd "${dir}" > /dev/null
+
+ # fix all various crazy sr@Latn variations
+ # this part is only ease for ebuilds, so there wont be any die when this
+ # fail at any point
+ sr_mess="sr@latn sr@latin sr@Latin"
+ for wp in ${sr_mess}; do
+ [[ -e ${wp}.po ]] && mv "${wp}.po" "sr@Latn.po"
+ if [[ -d ${wp} ]]; then
+ # move dir and fix cmakelists
+ mv "${wp}" "sr@Latn"
+ sed -i \
+ -e "s:${wp}:sr@Latn:g" \
+ CMakeLists.txt
+ fi
+ done
+
+ for lingua in ${KDE_LINGUAS}; do
+ if [[ -e ${lingua}.po ]]; then
+ mv "${lingua}.po" "${lingua}.po.old"
+ fi
+ done
+
+ for lingua in ${KDE_LINGUAS}; do
+ if use linguas_${lingua} ; then
+ if [[ -d ${lingua} ]]; then
+ linguas="${linguas} ${lingua}"
+ sed -e "/add_subdirectory([[:space:]]*${lingua}[[:space:]]*)[[:space:]]*$/ s/^#DONOTCOMPILE //" \
+ -e "/ADD_SUBDIRECTORY([[:space:]]*${lingua}[[:space:]]*)[[:space:]]*$/ s/^#DONOTCOMPILE //" \
+ -i CMakeLists.txt || die "Sed to uncomment linguas_${lingua} failed."
+ fi
+ if [[ -e ${lingua}.po.old ]]; then
+ linguas="${linguas} ${lingua}"
+ mv "${lingua}.po.old" "${lingua}.po"
+ fi
+ fi
+ done
+ [[ -n ${linguas} ]] && echo ">>> Enabling languages: ${linguas}"
+
+ popd > /dev/null
+}
+
+# @FUNCTION: get_kde_version
+# @DESCRIPTION:
+# Translates an ebuild version into a major.minor KDE SC
+# release version. If no version is specified, ${PV} is used.
+get_kde_version() {
+ local ver=${1:-${PV}}
+ local major=$(get_major_version ${ver})
+ local minor=$(get_version_component_range 2 ${ver})
+ local micro=$(get_version_component_range 3 ${ver})
+ if [[ ${ver} == 9999 ]]; then
+ echo live
+ else
+ (( micro < 50 )) && echo ${major}.${minor} || echo ${major}.$((minor + 1))
+ fi
+}
+
+fi
diff --git a/eclass/kde4-meta-pkg.eclass b/eclass/kde4-meta-pkg.eclass
new file mode 100644
index 000000000000..7b735c905e9e
--- /dev/null
+++ b/eclass/kde4-meta-pkg.eclass
@@ -0,0 +1,24 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: kde4-meta-pkg.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @BLURB: This eclass contains boilerplate for kde 4.X meta packages
+# @DESCRIPTION:
+# This eclass should only be used for defining meta packages for KDE4.
+
+if [[ -z ${_KDE4_META_PKG_ECLASS} ]]; then
+_KDE4_META_PKG_ECLASS=1
+
+inherit kde4-functions
+
+HOMEPAGE="http://www.kde.org/"
+
+LICENSE="metapackage"
+IUSE="aqua"
+
+SLOT=4
+
+fi
diff --git a/eclass/kde4-meta.eclass b/eclass/kde4-meta.eclass
new file mode 100644
index 000000000000..d3aae4e5ae36
--- /dev/null
+++ b/eclass/kde4-meta.eclass
@@ -0,0 +1,630 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: kde4-meta.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @BLURB: Eclass for writing "split" KDE packages.
+# @DESCRIPTION:
+# This eclass provides all necessary functions for writing split KDE ebuilds.
+#
+# You must define KMNAME to use this eclass, and do so before inheriting it. All other variables are optional.
+# Do not include the same item in more than one of KMMODULE, KMMEXTRA, KMCOMPILEONLY, KMEXTRACTONLY.
+
+if [[ -z ${_KDE4_META_ECLASS} ]]; then
+_KDE4_META_ECLASS=1
+
+[[ -z ${KMNAME} ]] && die "kde4-meta.eclass inherited but KMNAME not defined - broken ebuild"
+
+inherit kde4-base
+
+KDEMETA_EXPF="pkg_setup src_unpack src_prepare src_configure src_compile src_test src_install pkg_preinst pkg_postinst pkg_postrm"
+EXPORT_FUNCTIONS ${KDEMETA_EXPF}
+
+# Add dependencies that all packages in a certain module share.
+case ${KMNAME} in
+ kdepim|kdepim-runtime)
+ case ${PN} in
+ akregator|kaddressbook|kjots|kmail|knode|knotes|korganizer|ktimetracker)
+ IUSE+=" +kontact"
+ RDEPEND+=" kontact? ( $(add_kdebase_dep kontact) )"
+ ;;
+ esac
+ ;;
+esac
+
+DEPEND+=" ${COMMONDEPEND}"
+RDEPEND+=" ${COMMONDEPEND}"
+unset COMMONDEPEND
+
+debug-print "line ${LINENO} ${ECLASS}: DEPEND ${DEPEND} - after metapackage-specific dependencies"
+debug-print "line ${LINENO} ${ECLASS}: RDEPEND ${RDEPEND} - after metapackage-specific dependencies"
+
+# Useful to build kde4-meta style stuff from extragear/playground (plasmoids etc)
+case ${KDE_BUILD_TYPE} in
+ live)
+ if [[ ${KDE_SCM} == svn ]]; then
+ case ${KMNAME} in
+ extragear*|playground*)
+ ESVN_REPO_URI="${ESVN_MIRROR}/trunk/${KMNAME}"
+ ESVN_PROJECT="${KMNAME}${ESVN_PROJECT_SUFFIX}"
+ ;;
+ esac
+ fi
+ ;;
+esac
+
+# @ECLASS-VARIABLE: KMNAME
+# @DESCRIPTION:
+# Name of the parent-module (e.g. kdebase, kdepim, ...). You _must_ set it
+# _before_ inheriting this eclass, (unlike the other parameters), since it's
+# used to set $SRC_URI.
+
+# @ECLASS-VARIABLE: KMMODULE
+# @DESCRIPTION:
+# Specify exactly one subdirectory of $KMNAME here. Defaults to $PN.
+# The subdirectory listed here is treated exactly like items in $KMEXTRA.
+#
+# Example: The ebuild name of "kdebase/l10n" is kde-base/kdebase-l10n, because
+# just 'l10n' would be too confusing. Hence it sets KMMODULE="l10n".
+
+# @ECLASS-VARIABLE: KMNOMODULE
+# @DESCRIPTION:
+# If set to "true", $KMMODULE doesn't have to be defined.
+#
+# Example usage: If you're installing subdirectories of a package, like plugins,
+# you mark the top subdirectory (containing the package) as $KMEXTRACTONLY, and
+# set KMNOMODULE="true".
+if [[ -z ${KMMODULE} ]] && [[ ${KMNOMODULE} != true ]]; then
+ KMMODULE=${PN}
+fi
+
+# @ECLASS-VARIABLE: KMEXTRA
+# @DESCRIPTION:
+# All subdirectories listed here will be extracted, compiled & installed.
+# $KMMODULE is always added to $KMEXTRA.
+# If KDE_HANDBOOK is 'always' or 'optional' and handbook USE-flag is set, and if this
+# directory exists, then "doc/$KMMODULE" is added to $KMEXTRA. If there's additional
+# documentation in different subdirectories, it should be added to KMEXTRA manually..
+
+# @ECLASS-VARIABLE: KMCOMPILEONLY
+# @DESCRIPTION:
+# All subdirectories listed here will be extracted & compiled, but not installed.
+
+# TODO: better formulation may be needed
+# @ECLASS-VARIABLE: KMEXTRACTONLY
+# @DESCRIPTION:
+# All subdirectories listed here will be extracted, but neither compiled nor installed.
+# This can be used to avoid compilation in a subdirectory of a directory in $KMMODULE or $KMEXTRA
+
+# @ECLASS-VARIABLE: KMTARPARAMS
+# @DESCRIPTION:
+# Specify extra parameters to pass to tar, in kde4-meta_src_extract.
+# '-xpf -j' are passed to tar by default.
+
+# @FUNCTION: kde4-meta_pkg_setup
+# @DESCRIPTION:
+# Currently calls its equivalent in kde4-base.eclass(5) and checks the gcc version.
+# Use this one in split ebuilds.
+kde4-meta_pkg_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-base_pkg_setup
+}
+
+# @FUNCTION: kde4-meta_src_unpack
+# @DESCRIPTION:
+# This function unpacks the source for split ebuilds.
+# Further more is processed in kde4-meta_src_extract
+kde4-meta_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case "${KDE_SCM}" in
+ svn)
+ S="${WORKDIR}/${P}"
+ mkdir -p "${S}"
+ ESVN_RESTRICT="export" subversion_src_unpack
+ subversion_wc_info
+ subversion_bootstrap
+ ;;
+ git)
+ git-r3_src_unpack
+ ;;
+ esac
+ fi
+ kde4-meta_src_extract
+}
+
+# @FUNCTION: kde4-meta_src_extract
+# @DESCRIPTION:
+# A function to extract the source for a split KDE ebuild.
+# Also see KMMODULE, KMNOMODULE, KMEXTRA, KMCOMPILEONLY, KMEXTRACTONLY and
+# KMTARPARAMS.
+kde4-meta_src_extract() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ # Export working copy to ${S}
+ einfo "Exporting parts of working copy to ${S}"
+ kde4-meta_create_extractlists
+
+ case ${KDE_SCM} in
+ svn)
+ local rsync_options subdir targetdir wc_path escm
+
+ rsync_options="--group --links --owner --perms --quiet --exclude=.svn/ --exclude=.git/"
+ wc_path="${ESVN_WC_PATH}"
+ escm="{ESVN}"
+
+ # Copy ${KMNAME} non-recursively (toplevel files)
+ rsync ${rsync_options} "${wc_path}"/* "${S}" \
+ || die "${escm}: can't export toplevel files to '${S}'."
+ # Copy cmake directory
+ if [[ -d "${wc_path}/cmake" ]]; then
+ rsync --recursive ${rsync_options} "${wc_path}/cmake" "${S}" \
+ || die "${escm}: can't export cmake files to '${S}'."
+ fi
+ # Copy all subdirectories
+ for subdir in $(_list_needed_subdirectories); do
+ targetdir=""
+ if [[ $subdir = doc/* && ! -e "$wc_path/$subdir" ]]; then
+ continue
+ fi
+
+ [[ ${subdir%/} = */* ]] && targetdir=${subdir%/} && targetdir=${targetdir%/*} && mkdir -p "${S}/${targetdir}"
+ rsync --recursive ${rsync_options} "${wc_path}/${subdir%/}" "${S}/${targetdir}" \
+ || die "${escm}: can't export subdirectory '${subdir}' to '${S}/${targetdir}'."
+ done
+ ;;
+ esac
+ else
+ local abort tarball tarfile f extractlist postfix
+
+ if [[ ${PV} =~ 4.4.11 ]]; then
+ postfix="bz2"
+ KMTARPARAMS+=" --bzip2"
+ else
+ postfix="xz"
+ KMTARPARAMS+=" --xz"
+ fi
+
+ tarball="${KMNAME}-${PV}.tar.${postfix}"
+
+ # Full path to source tarball
+ tarfile="${DISTDIR}/${tarball}"
+
+ # Detect real toplevel dir from tarball name - it will be used upon extraction
+ # and in _list_needed_subdirectories
+ topdir="${tarball%.tar.*}/"
+
+ ebegin "Unpacking parts of ${tarball} to ${WORKDIR}"
+
+ kde4-meta_create_extractlists
+
+ for f in cmake/ CMakeLists.txt ConfigureChecks.cmake config.h.cmake
+ do
+ extractlist+=" ${topdir}${f}"
+ done
+ extractlist+=" $(_list_needed_subdirectories)"
+
+ pushd "${WORKDIR}" > /dev/null
+
+ # @ECLASS-VARIABLE: KDE4_STRICTER
+ # @DESCRIPTION:
+ # Print out all issues found executing tar / kmextract files
+ # Set on if you want to find issues in kde-base ebuild unpack sequences
+ [[ -n ${KDE4_STRICTER} ]] && echo 'tar -xpf "${tarfile}" ${KMTARPARAMS} ${extractlist}'
+ tar -xpf "${tarfile}" ${KMTARPARAMS} ${extractlist} 2> /dev/null || echo "tar extract command failed at least partially - continuing anyway"
+
+ # Default $S is based on $P; rename the extracted directory to match $S if necessary
+ if [[ ${KMNAME} != ${PN} ]]; then
+ mv ${topdir} ${P} || die "Died while moving \"${topdir}\" to \"${P}\""
+ fi
+
+ popd > /dev/null
+
+ eend $?
+
+ if [[ -n ${KDE4_STRICTER} ]]; then
+ for f in $(_list_needed_subdirectories fatal); do
+ if [[ ! -e ${S}/${f#*/} ]]; then
+ eerror "'${f#*/}' is missing"
+ abort=true
+ fi
+ done
+ [[ -n ${abort} ]] && die "There were missing files."
+ fi
+
+ # We don't need it anymore
+ unset topdir
+ fi
+}
+
+# @FUNCTION: kde4-meta_create_extractlists
+# @DESCRIPTION:
+# Create lists of files and subdirectories to extract.
+# Also see descriptions of KMMODULE, KMNOMODULE, KMEXTRA, KMCOMPILEONLY,
+# KMEXTRACTONLY and KMTARPARAMS.
+kde4-meta_create_extractlists() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Add default handbook locations
+ # FIXME - legacy code - remove when 4.4.5 is gone or preferrably port 4.4.5.
+ if [[ $(get_kde_version) < 4.5 ]] && use_if_iuse handbook && [[ -z ${KMNOMODULE} ]]; then
+ # We use the basename of $KMMODULE because $KMMODULE can contain
+ # the path to the module subdirectory.
+ KMEXTRA_NONFATAL+="
+ doc/${KMMODULE##*/}"
+ fi
+
+ # Add default handbook locations
+ if [[ -z ${KMNOMODULE} ]] && ( [[ ${KDE_HANDBOOK} == always ]] || ( [[ ${KDE_HANDBOOK} == optional ]] && use handbook ) ); then
+ KMEXTRA_NONFATAL+=" doc/${KMMODULE##*/}"
+ fi
+
+ # Add some CMake-files to KMEXTRACTONLY.
+ # Note that this actually doesn't include KMEXTRA handling.
+ # In those cases you should care to add the relevant files to KMEXTRACTONLY
+ case ${KMNAME} in
+ kde-baseapps)
+ KMEXTRACTONLY+="
+ CTestConfig.cmake
+ config-apps.h.cmake
+ ConfigureChecks.cmake"
+ ;;
+ kde-runtime)
+ KMEXTRACTONLY+="
+ cmake/modules/
+ CTestConfig.cmake
+ config-runtime.h.cmake"
+ ;;
+ kde-workspace)
+ KMEXTRACTONLY+="
+ cmake/modules/
+ config-unix.h.cmake
+ ConfigureChecks.cmake
+ config-workspace.h.cmake
+ config-X11.h.cmake
+ startkde.cmake
+ KDE4WorkspaceConfig.cmake.in"
+ ;;
+ kdepim)
+ if [[ ${PN} != libkdepim ]]; then
+ KMEXTRACTONLY+="
+ libkdepim/"
+ fi
+ KMEXTRACTONLY+="
+ config-enterprise.h.cmake
+ kleopatra/ConfigureChecks.cmake
+ CTestCustom.cmake
+ kdepim-version.h.cmake
+ kdepim-version.h"
+ if use_if_iuse kontact; then
+ KMEXTRA+="
+ kontact/plugins/${PLUGINNAME:-${PN}}/"
+ fi
+ ;;
+ esac
+
+ debug-print "line ${LINENO} ${ECLASS} ${FUNCNAME}: KMEXTRACTONLY ${KMEXTRACTONLY}"
+}
+
+_list_needed_subdirectories() {
+ local i j kmextra kmextra_expanded kmmodule_expanded kmcompileonly_expanded extractlist
+
+ # We expand KMEXTRA by adding CMakeLists.txt files
+ kmextra="${KMEXTRA}"
+ [[ ${1} != fatal ]] && kmextra+=" ${KMEXTRA_NONFATAL}"
+ for i in ${kmextra}; do
+ kmextra_expanded+=" ${i}"
+ j=$(dirname ${i})
+ while [[ ${j} != "." ]]; do
+ kmextra_expanded+=" ${j}/CMakeLists.txt";
+ j=$(dirname ${j})
+ done
+ done
+
+ # Expand KMMODULE
+ if [[ -n ${KMMODULE} ]]; then
+ kmmodule_expanded="${KMMODULE}"
+ j=$(dirname ${KMMODULE})
+ while [[ ${j} != "." ]]; do
+ kmmodule_expanded+=" ${j}/CMakeLists.txt";
+ j=$(dirname ${j})
+ done
+ fi
+
+ # Expand KMCOMPILEONLY
+ for i in ${KMCOMPILEONLY}; do
+ kmcompileonly_expanded+=" ${i}"
+ j=$(dirname ${i})
+ while [[ ${j} != "." ]]; do
+ kmcompileonly_expanded+=" ${j}/CMakeLists.txt";
+ j=$(dirname ${j})
+ done
+ done
+
+ debug-print "line ${LINENO} ${ECLASS} ${FUNCNAME} - kmextra_expanded: ${kmextra_expanded}"
+ debug-print "line ${LINENO} ${ECLASS} ${FUNCNAME} - kmmodule_expanded: ${kmmodule_expanded}"
+ debug-print "line ${LINENO} ${ECLASS} ${FUNCNAME} - kmcompileonly_expanded: ${kmcompileonly_expanded}"
+
+ # Create final list of stuff to extract
+ # We append topdir only when specified (usually for tarballs)
+ for i in ${kmmodule_expanded} ${kmextra_expanded} ${kmcompileonly_expanded} \
+ ${KMEXTRACTONLY}
+ do
+ extractlist+=" ${topdir}${i}"
+ done
+
+ echo ${extractlist}
+}
+
+# @FUNCTION: kde4-meta_src_prepare
+# @DESCRIPTION:
+# Meta-package build system configuration handling - commenting out targets, etc..
+kde4-meta_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-meta_change_cmakelists
+ kde4-base_src_prepare
+}
+
+# @FUNCTION: _change_cmakelists_parent_dirs
+# @DESCRIPTION:
+# Adjust CMakeLists.txt to shadow subdirectories
+# that are not required for the build.
+_change_cmakelists_parent_dirs() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local _olddir _dir
+ _dir="${S}"/${1}
+ until [[ ${_dir} == ${S} ]]; do
+ _olddir=$(basename "${_dir}")
+ _dir=$(dirname "${_dir}")
+ debug-print "${LINENO}: processing ${_dir} CMakeLists.txt searching for ${_olddir}"
+ if [[ -f ${_dir}/CMakeLists.txt ]]; then
+ sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${_olddir}[[:space:]]*)/s/#DONOTCOMPILE //g" \
+ -e "/ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*${_olddir}[[:space:]]*)/s/#DONOTCOMPILE //g" \
+ -i ${_dir}/CMakeLists.txt || die "${LINENO}: died in ${FUNCNAME} while processing ${_dir}"
+ fi
+ done
+}
+
+# @FUNCTION: kde4-meta_change_cmakelists
+# @DESCRIPTION:
+# Adjust CMakeLists.txt to comply to our splitting.
+kde4-meta_change_cmakelists() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ pushd "${S}" > /dev/null
+
+ comment_all_add_subdirectory ./
+
+ # Restore "add_subdirectory( cmake )" in ${S}/CMakeLists.txt
+ if [[ -f CMakeLists.txt ]]; then
+ sed -e '/add_subdirectory[[:space:]]*([[:space:]]*cmake[[:space:]]*)/s/^#DONOTCOMPILE //' \
+ -e '/ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*cmake[[:space:]]*)/s/^#DONOTCOMPILE //' \
+ -i CMakeLists.txt || die "${LINENO}: cmake sed died"
+ fi
+
+ # Restore "add_subdirectory( ${ ..." (this is done in kdesdk)
+ if [[ -f CMakeLists.txt ]]; then
+ sed -e '/add_subdirectory[[:space:]]*([[:space:]]*\${/s/^#DONOTCOMPILE //' \
+ -e '/ADD_SUBDIRECTORY[[:space:]]*([[:space:]]*\${/s/^#DONOTCOMPILE //' \
+ -i CMakeLists.txt || die "${LINENO}: cmake sed died"
+ fi
+
+ if [[ -z ${KMNOMODULE} ]]; then
+ # Restore "add_subdirectory" in $KMMODULE subdirectories
+ find "${S}"/${KMMODULE} -name CMakeLists.txt -print0 | \
+ xargs -0 sed -i -e 's/^#DONOTCOMPILE //g' || \
+ die "${LINENO}: died in KMMODULE section"
+ _change_cmakelists_parent_dirs ${KMMODULE}
+ fi
+
+ local i
+
+ # KMEXTRACTONLY section - Some ebuilds need to comment out some subdirs in KMMODULE and they use KMEXTRACTONLY
+ for i in ${KMEXTRACTONLY}; do
+ if [[ -d ${i} && -f ${i}/../CMakeLists.txt ]]; then
+ sed -e "/([[:space:]]*$(basename $i)[[:space:]]*)/s/^/#DONOTCOMPILE /" \
+ -i ${i}/../CMakeLists.txt || \
+ die "${LINENO}: sed died while working in the KMEXTRACTONLY section while processing ${i}"
+ fi
+ done
+
+ # KMCOMPILEONLY
+ for i in ${KMCOMPILEONLY}; do
+ debug-print "${LINENO}: KMCOMPILEONLY, processing ${i}"
+ # Uncomment "add_subdirectory" instructions inside $KMCOMPILEONLY, then comment "install" instructions.
+ find "${S}"/${i} -name CMakeLists.txt -print0 | \
+ xargs -0 sed -i \
+ -e 's/^#DONOTCOMPILE //g' \
+ -e '/install(.*)/I{s/^/#DONOTINSTALL /;}' \
+ -e '/^install(/,/)/I{s/^/#DONOTINSTALL /;}' \
+ -e '/kde4_install_icons(.*)/I{s/^/#DONOTINSTALL /;}' || \
+ die "${LINENO}: sed died in the KMCOMPILEONLY section while processing ${i}"
+ _change_cmakelists_parent_dirs ${i}
+ done
+
+ # KMEXTRA section
+ for i in ${KMEXTRA}; do
+ debug-print "${LINENO}: KMEXTRA section, processing ${i}"
+ find "${S}"/${i} -name CMakeLists.txt -print0 | \
+ xargs -0 sed -i -e 's/^#DONOTCOMPILE //g' || \
+ die "${LINENO}: sed died uncommenting add_subdirectory instructions in KMEXTRA section while processing ${i}"
+ _change_cmakelists_parent_dirs ${i}
+ done
+ # KMEXTRA_NONFATAL section
+ for i in ${KMEXTRA_NONFATAL}; do
+ if [[ -d "${S}"/${i} ]]; then
+ find "${S}"/${i} -name CMakeLists.txt -print0 | \
+ xargs -0 sed -i -e 's/^#DONOTCOMPILE //g' || \
+ die "${LINENO}: sed died uncommenting add_subdirectory instructions in KMEXTRA section while processing ${i}"
+ _change_cmakelists_parent_dirs ${i}
+ fi
+ done
+
+ case ${KMNAME} in
+ kde-workspace)
+ # COLLISION PROTECT section
+ # Install the startkde script just once, as a part of kde-base/kdebase-startkde,
+ # not as a part of every package.
+ if [[ ${PN} != kdebase-startkde && -f CMakeLists.txt ]]; then
+ # The startkde script moved to kdebase-workspace for KDE4 versions > 3.93.0.
+ sed -e '/startkde/s/^/#DONOTINSTALL /' \
+ -i CMakeLists.txt || die "${LINENO}: sed died in the kdebase-startkde collision prevention section"
+ fi
+ # Remove workspace target prefix in order to get direct linking to workspace libs
+ sed -e '/set(KDE4WORKSPACE_TARGET_PREFIX/s/^/#OVERRIDE /' \
+ -i CMakeLists.txt || die "${LINENO}: sed died in KDE4WORKSPACE_TARGET_PREFIX removal section"
+ # Strip EXPORT feature section from workspace for KDE4 versions > 4.1.82
+ if [[ ${PN} != libkworkspace ]]; then
+ sed -e '/install(FILES ${CMAKE_CURRENT_BINARY_DIR}\/KDE4WorkspaceConfig.cmake/,/^[[:space:]]*FILE KDE4WorkspaceLibraryTargets.cmake )[[:space:]]*^/d' \
+ -i CMakeLists.txt || die "${LINENO}: sed died in kde-workspace strip config install and fix EXPORT section"
+ fi
+ # <KDE/4.11
+ if [[ ${PN} != plasma-workspace ]]; then
+ sed -e '/KActivities/s/REQUIRED//' \
+ -i CMakeLists.txt || die "${LINENO}: sed died in kde-workspace dep reduction section"
+ fi
+ sed -e '/QImageBlitz/s/REQUIRED//' \
+ -i CMakeLists.txt || die "${LINENO}: sed died in kde-workspace dep reduction section 2"
+
+ # >=KDE/4.11
+ sed -e 's/TYPE REQUIRED/TYPE OPTIONAL/' -e 's/XCB REQUIRED/XCB/' -e 's/X11 REQUIRED/X11/' \
+ -e 's/message(FATAL_ERROR/message(/' -i CMakeLists.txt \
+ || die "${LINENO}: sed died in kde-workspace dep reduction section"
+ if [[ "${PN}" != "kwin" ]]; then
+ sed -i -e "/^ macro_log_feature(OPENGL_OR_ES_FOUND/s/TRUE/FALSE/" \
+ "${S}"/CMakeLists.txt || die "${LINENO}: sed died removing kde-workspace opengl dependency"
+ fi
+ ;;
+ kde-runtime)
+ sed -e 's/TYPE REQUIRED/TYPE OPTIONAL/' -e '/LibGcrypt/s/REQUIRED//' -i CMakeLists.txt \
+ || die "${LINENO}: sed died in kde-runtime dep reduction section"
+
+ # COLLISION PROTECT section
+ # Only install the kde4 script as part of kde-base/kdebase-data
+ if [[ ${PN} != kdebase-data && -f CMakeLists.txt ]]; then
+ sed -e '/^install(PROGRAMS[[:space:]]*[^[:space:]]*\/kde4[[:space:]]/s/^/#DONOTINSTALL /' \
+ -i CMakeLists.txt || die "Sed to exclude bin/kde4 failed"
+ fi
+ ;;
+ kdenetwork)
+ # Disable hardcoded kdepimlibs check
+ sed -e 's/find_package(KdepimLibs REQUIRED)/macro_optional_find_package(KdepimLibs)/' \
+ -i CMakeLists.txt || die "failed to disable hardcoded checks"
+ ;;
+ kdepim)
+ # Disable hardcoded checks
+ sed -r -e 's/TYPE REQUIRED/TYPE OPTIONAL/' -e '/find_package\(KdepimLibs/s/REQUIRED//' \
+ -e '/find_package\((KdepimLibs|Boost|QGpgme|Akonadi|ZLIB|Strigi|SharedDesktopOntologies|Soprano|Nepomuk)/{/macro_optional_/!s/find/macro_optional_&/}' \
+ -e '/macro_log_feature\((Boost|QGPGME|Akonadi|ZLIB|STRIGI|SHAREDDESKTOPONTOLOGIES|Soprano|Nepomuk)_FOUND/s/ TRUE / FALSE /' \
+ -e 's/if[[:space:]]*([[:space:]]*BUILD_.*)[[:space:]]*/if(1) # &/' \
+ -e 's/if[[:space:]]*([[:space:]]*[[:alnum:]]*_FOUND[[:space:]]*)[[:space:]]*$/if(1) # &/' \
+ -i CMakeLists.txt || die "failed to disable hardcoded checks"
+ # Disable broken or redundant build logic
+ if use_if_iuse kontact || [[ ${PN} = kontact ]]; then
+ sed -e 's/if[[:space:]]*([[:space:]]*BUILD_.*)[[:space:]]*$/if(1) # &/' \
+ -e 's/if[[:space:]]*([[:space:]]*[[:alnum:]]*_FOUND[[:space:]]*)[[:space:]]*$/if(1) # &/' \
+ -i kontact/plugins/CMakeLists.txt || die 'failed to override build logic'
+ fi
+ case ${PV} in
+ 4.4*)
+ case ${PN} in
+ kalarm|kmailcvt|kontact|korganizer|korn)
+ sed -n -e '/qt4_generate_dbus_interface(.*org\.kde\.kmail\.\(kmail\|mailcomposer\)\.xml/p' \
+ -e '/add_custom_target(kmail_xml /,/)/p' \
+ -i kmail/CMakeLists.txt || die "uncommenting xml failed"
+ _change_cmakelists_parent_dirs kmail
+ ;;
+ esac
+ ;;
+ esac
+ ;;
+ esac
+
+ popd > /dev/null
+}
+
+# @FUNCTION: kde4-meta_src_configure
+# @DESCRIPTION:
+# Currently just calls its equivalent in kde4-base.eclass(5). Use this one in split
+# ebuilds.
+kde4-meta_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-base_src_configure
+}
+
+# @FUNCTION: kde4-meta_src_compile
+# @DESCRIPTION:
+# General function for compiling split KDE4 applications.
+# Overrides kde4-base_src_compile.
+kde4-meta_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-base_src_compile "$@"
+}
+
+# @FUNCTION: kde4-meta_src_test
+# @DESCRIPTION:
+# Currently just calls its equivalent in kde4-base.eclass(5) if
+# I_KNOW_WHAT_I_AM_DOING is set. Use this in split ebuilds.
+kde4-meta_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ $I_KNOW_WHAT_I_AM_DOING ]]; then
+ kde4-base_src_test
+ else
+ einfo "Tests disabled"
+ fi
+}
+
+# @FUNCTION: kde4-meta_src_install
+# @DESCRIPTION:
+# Function for installing KDE4 split applications.
+kde4-meta_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Search ${S}/${KMMODULE} and install common documentation files found
+ local doc
+ for doc in "${S}/${KMMODULE}"/{AUTHORS,CHANGELOG,ChangeLog*,README*,NEWS,TODO,HACKING}; do
+ [[ -f "${doc}" ]] && [[ -s "${doc}" ]] && dodoc "${doc}"
+ done
+
+ kde4-base_src_install
+}
+
+# @FUNCTION: kde4-meta_pkg_preinst
+# @DESCRIPTION:
+# Invoke its equivalent in kde4-base.eclass.
+kde4-meta_pkg_preinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-base_pkg_preinst
+}
+
+# @FUNCTION: kde4-meta_pkg_postinst
+# @DESCRIPTION:
+# Invoke kbuildsycoca4.
+kde4-meta_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-base_pkg_postinst
+}
+
+# @FUNCTION: kde4-meta_pkg_postrm
+# @DESCRIPTION:
+# Currently just calls its equivalent in kde4-base.eclass(5). Use this in split
+# ebuilds.
+kde4-meta_pkg_postrm() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ kde4-base_pkg_postrm
+}
+
+fi
diff --git a/eclass/kde5-functions.eclass b/eclass/kde5-functions.eclass
new file mode 100644
index 000000000000..6f6f037b4625
--- /dev/null
+++ b/eclass/kde5-functions.eclass
@@ -0,0 +1,239 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: kde5-functions.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @BLURB: Common ebuild functions for KDE 5 packages
+# @DESCRIPTION:
+# This eclass contains all functions shared by the different eclasses,
+# for KDE 5 ebuilds.
+
+if [[ -z ${_KDE5_FUNCTIONS_ECLASS} ]]; then
+_KDE5_FUNCTIONS_ECLASS=1
+
+inherit toolchain-funcs versionator
+
+# @ECLASS-VARIABLE: EAPI
+# @DESCRIPTION:
+# Currently EAPI 5 is supported.
+case ${EAPI} in
+ 5) ;;
+ *) die "EAPI=${EAPI:-0} is not supported" ;;
+esac
+
+# @ECLASS-VARIABLE: FRAMEWORKS_MINIMAL
+# @DESCRIPTION:
+# Minimal Frameworks version to require for the package.
+: ${FRAMEWORKS_MINIMAL:=5.11.0}
+
+# @ECLASS-VARIABLE: PLASMA_MINIMAL
+# @DESCRIPTION:
+# Minimal Plasma version to require for the package.
+: ${PLASMA_MINIMAL:=5.3.1}
+
+# @ECLASS-VARIABLE: KDE_APPS_MINIMAL
+# @DESCRIPTION:
+# Minimal KDE Applicaions version to require for the package.
+: ${KDE_APPS_MINIMAL:=14.12.0}
+
+# @ECLASS-VARIABLE: KDEBASE
+# @DESCRIPTION:
+# This gets set to a non-zero value when a package is considered a kde or
+# kdevelop ebuild.
+if [[ ${CATEGORY} = kde-base ]]; then
+ KDEBASE=kde-base
+elif [[ ${CATEGORY} = kde-frameworks ]]; then
+ KDEBASE=kde-frameworks
+elif [[ ${KMNAME-${PN}} = kdevelop ]]; then
+ KDEBASE=kdevelop
+fi
+
+debug-print "${ECLASS}: ${KDEBASE} ebuild recognized"
+
+# @ECLASS-VARIABLE: KDE_SCM
+# @DESCRIPTION:
+# SCM to use if this is a live ebuild.
+: ${KDE_SCM:=git}
+
+case ${KDE_SCM} in
+ svn|git) ;;
+ *) die "KDE_SCM: ${KDE_SCM} is not supported" ;;
+esac
+
+# determine the build type
+if [[ ${PV} = *9999* ]]; then
+ KDE_BUILD_TYPE="live"
+else
+ KDE_BUILD_TYPE="release"
+fi
+export KDE_BUILD_TYPE
+
+# @FUNCTION: _check_gcc_version
+# @INTERNAL
+# @DESCRIPTION:
+# Determine if the current GCC version is acceptable, otherwise die.
+_check_gcc_version() {
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ local version=$(gcc-version)
+ local major=${version%.*}
+ local minor=${version#*.}
+
+ [[ ${major} -lt 4 ]] || \
+ ( [[ ${major} -eq 4 && ${minor} -lt 8 ]] ) \
+ && die "Sorry, but gcc-4.8 or later is required for KDE 5."
+ fi
+}
+
+# @FUNCTION: _add_kdecategory_dep
+# @INTERNAL
+# @DESCRIPTION:
+# Implementation of add_plasma_dep and add_frameworks_dep.
+_add_kdecategory_dep() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local category=${1}
+ local package=${2}
+ local use=${3}
+ local version=${4}
+ local slot=
+
+ if [[ -n ${use} ]] ; then
+ local use="[${use}]"
+ fi
+
+ if [[ -n ${version} ]] ; then
+ local operator=">="
+ local version="-$(get_version_component_range 1-3 ${version})"
+ fi
+
+ if [[ ${SLOT} = 4 || ${SLOT} = 5 ]] && ! has kde5-meta-pkg ${INHERITED} ; then
+ slot=":${SLOT}"
+ fi
+
+ echo " ${operator}${category}/${package}${version}${slot}${use}"
+}
+
+# @FUNCTION: add_frameworks_dep
+# @USAGE: <package> [USE flags] [minimum version]
+# @DESCRIPTION:
+# Create proper dependency for kde-frameworks/ dependencies.
+# This takes 1 to 3 arguments. The first being the package name, the optional
+# second is additional USE flags to append, and the optional third is the
+# version to use instead of the automatic version (use sparingly).
+# The output of this should be added directly to DEPEND/RDEPEND, and may be
+# wrapped in a USE conditional (but not an || conditional without an extra set
+# of parentheses).
+add_frameworks_dep() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local version
+
+ if [[ -n ${3} ]]; then
+ version=${3}
+ elif [[ ${CATEGORY} = kde-frameworks ]]; then
+ version=$(get_version_component_range 1-2)
+ elif [[ -z "${version}" ]] ; then
+ version=${FRAMEWORKS_MINIMAL}
+ fi
+
+ _add_kdecategory_dep kde-frameworks "${1}" "${2}" "${version}"
+}
+
+# @FUNCTION: add_plasma_dep
+# @USAGE: <package> [USE flags] [minimum version]
+# @DESCRIPTION:
+# Create proper dependency for kde-base/ dependencies.
+# This takes 1 to 3 arguments. The first being the package name, the optional
+# second is additional USE flags to append, and the optional third is the
+# version to use instead of the automatic version (use sparingly).
+# The output of this should be added directly to DEPEND/RDEPEND, and may be
+# wrapped in a USE conditional (but not an || conditional without an extra set
+# of parentheses).
+add_plasma_dep() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local version
+
+ if [[ -n ${3} ]]; then
+ version=${3}
+ elif [[ ${CATEGORY} = kde-plasma ]]; then
+ version=${PV}
+ elif [[ -z "${version}" ]] ; then
+ version=${PLASMA_MINIMAL}
+ fi
+
+ _add_kdecategory_dep kde-plasma "${1}" "${2}" "${version}"
+}
+
+# @FUNCTION: add_kdeapps_dep
+# @USAGE: <package> [USE flags] [minimum version]
+# @DESCRIPTION:
+# Create proper dependency for kde-apps/ dependencies.
+# This takes 1 to 3 arguments. The first being the package name, the optional
+# second is additional USE flags to append, and the optional third is the
+# version to use instead of the automatic version (use sparingly).
+# The output of this should be added directly to DEPEND/RDEPEND, and may be
+# wrapped in a USE conditional (but not an || conditional without an extra set
+# of parentheses).
+add_kdeapps_dep() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local version
+
+ if [[ -n ${3} ]]; then
+ version=${3}
+ elif [[ ${CATEGORY} = kde-apps ]]; then
+ version=${PV}
+ elif [[ -z "${version}" ]] ; then
+ # In KDE applications world, 5.9999 > yy.mm.x
+ [[ ${PV} = 5.9999 ]] && version=5.9999 || version=${KDE_APPS_MINIMAL}
+ fi
+
+ _add_kdecategory_dep kde-apps "${1}" "${2}" "${version}"
+}
+
+# @FUNCTION: get_kde_version
+# @DESCRIPTION:
+# Translates an ebuild version into a major.minor KDE SC
+# release version. If no version is specified, ${PV} is used.
+get_kde_version() {
+ local ver=${1:-${PV}}
+ local major=$(get_major_version ${ver})
+ local minor=$(get_version_component_range 2 ${ver})
+ local micro=$(get_version_component_range 3 ${ver})
+ if [[ ${ver} == 9999 ]]; then
+ echo live
+ else
+ (( micro < 50 )) && echo ${major}.${minor} || echo ${major}.$((minor + 1))
+ fi
+}
+
+# @FUNCTION: punt_bogus_dep
+# @USAGE: <prefix> <dependency>
+# @DESCRIPTION:
+# Removes a specified dependency from a find_package call with multiple components.
+punt_bogus_dep() {
+ local prefix=${1}
+ local dep=${2}
+
+ pcregrep -Mn "(?s)find_package\s*\(\s*${prefix}.[^)]*?${dep}.*?\)" CMakeLists.txt > "${T}/bogus${dep}"
+
+ # pcregrep returns non-zero on no matches/error
+ if [[ $? != 0 ]] ; then
+ return
+ fi
+
+ local length=$(wc -l "${T}/bogus${dep}" | cut -d " " -f 1)
+ local first=$(head -n 1 "${T}/bogus${dep}" | cut -d ":" -f 1)
+ local last=$(( ${length} + ${first} - 1))
+
+ sed -e "${first},${last}s/${dep}//" -i CMakeLists.txt || die
+
+ if [[ ${length} = 1 ]] ; then
+ sed -e "/find_package\s*(\s*${prefix}\s*REQUIRED\s*COMPONENTS\s*)/d" -i CMakeLists.txt || die
+ fi
+}
+
+fi
diff --git a/eclass/kde5.eclass b/eclass/kde5.eclass
new file mode 100644
index 000000000000..718bd988f778
--- /dev/null
+++ b/eclass/kde5.eclass
@@ -0,0 +1,553 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: kde5.eclass
+# @MAINTAINER:
+# kde@gentoo.org
+# @BLURB: Support eclass for KDE 5-related packages.
+# @DESCRIPTION:
+# The kde5.eclass provides support for building KDE 5-related packages.
+
+if [[ -z ${_KDE5_ECLASS} ]]; then
+_KDE5_ECLASS=1
+
+# @ECLASS-VARIABLE: VIRTUALX_REQUIRED
+# @DESCRIPTION:
+# For proper description see virtualx.eclass manpage.
+# Here we redefine default value to be manual, if your package needs virtualx
+# for tests you should proceed with setting VIRTUALX_REQUIRED=test.
+: ${VIRTUALX_REQUIRED:=manual}
+
+inherit kde5-functions fdo-mime flag-o-matic gnome2-utils versionator virtualx eutils cmake-utils
+
+if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case ${KDE_SCM} in
+ svn) inherit subversion ;;
+ git) inherit git-r3 ;;
+ esac
+fi
+
+EXPORT_FUNCTIONS pkg_pretend pkg_setup src_unpack src_prepare src_configure src_compile src_test src_install pkg_preinst pkg_postinst pkg_postrm
+
+# @ECLASS-VARIABLE: QT_MINIMAL
+# @DESCRIPTION:
+# Minimal Qt version to require for the package.
+: ${QT_MINIMAL:=5.4.1}
+
+# @ECLASS-VARIABLE: KDE_AUTODEPS
+# @DESCRIPTION:
+# If set to "false", do nothing.
+# For any other value, add a dependency on dev-qt/qtcore:5 and kde-frameworks/extra-cmake-modules:5.
+: ${KDE_AUTODEPS:=true}
+
+# @ECLASS-VARIABLE: KDE_BLOCK_SLOT4
+# @DESCRIPTION:
+# This variable is used when KDE_AUTODEPS is set.
+# If set to "true", add RDEPEND block on kde-{base,apps}/${PN}:4
+: ${KDE_BLOCK_SLOT4:=true}
+
+# @ECLASS-VARIABLE: KDE_DEBUG
+# @DESCRIPTION:
+# If set to "false", unconditionally build with -DNDEBUG.
+# Otherwise, add debug to IUSE to control building with that flag.
+: ${KDE_DEBUG:=true}
+
+# @ECLASS-VARIABLE: KDE_DOXYGEN
+# @DESCRIPTION:
+# If set to "false", do nothing.
+# Otherwise, add "doc" to IUSE, add appropriate dependencies, and generate and
+# install API documentation.
+if [[ ${CATEGORY} = kde-frameworks ]]; then
+ : ${KDE_DOXYGEN:=true}
+else
+ : ${KDE_DOXYGEN:=false}
+fi
+
+# @ECLASS-VARIABLE: KDE_EXAMPLES
+# @DESCRIPTION:
+# If set to "false", unconditionally ignore a top-level examples subdirectory.
+# Otherwise, add "examples" to IUSE to toggle adding that subdirectory.
+: ${KDE_EXAMPLES:=false}
+
+# @ECLASS-VARIABLE: KDE_HANDBOOK
+# @DESCRIPTION:
+# If set to "false", do nothing.
+# Otherwise, add "+handbook" to IUSE, add the appropriate dependency, and
+# generate and install KDE handbook.
+: ${KDE_HANDBOOK:=false}
+
+# @ECLASS-VARIABLE: KDE_TEST
+# @DESCRIPTION:
+# If set to "false", do nothing.
+# For any other value, add test to IUSE and add a dependency on dev-qt/qttest:5.
+if [[ ${CATEGORY} = kde-frameworks ]]; then
+ : ${KDE_TEST:=true}
+else
+ : ${KDE_TEST:=false}
+fi
+
+# @ECLASS-VARIABLE: KDE_PUNT_BOGUS_DEPS
+# @DESCRIPTION:
+# If set to "false", do nothing.
+# For any other value, do black magic to make hardcoded-but-optional dependencies
+# optional again. An upstream solution is preferable and this is a last resort.
+: ${KDE_PUNT_BOGUS_DEPS:=false}
+
+# @ECLASS-VARIABLE: KDE_SELINUX_MODULE
+# @DESCRIPTION:
+# If set to "none", do nothing.
+# For any other value, add selinux to IUSE, and depending on that useflag
+# add a dependency on sec-policy/selinux-${KDE_SELINUX_MODULE} to (R)DEPEND.
+: ${KDE_SELINUX_MODULE:=none}
+
+if [[ ${KDEBASE} = kdevelop ]]; then
+ HOMEPAGE="http://www.kdevelop.org/"
+else
+ HOMEPAGE="http://www.kde.org/"
+fi
+
+LICENSE="GPL-2"
+
+if [[ ${CATEGORY} = kde-frameworks ]]; then
+ SLOT=5/$(get_version_component_range 1-2)
+else
+ SLOT=5
+fi
+
+case ${KDE_AUTODEPS} in
+ false) ;;
+ *)
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case ${CATEGORY} in
+ kde-frameworks)
+ FRAMEWORKS_MINIMAL=9999
+ ;;
+ kde-plasma)
+ FRAMEWORKS_MINIMAL=9999
+ ;;
+ *) ;;
+ esac
+ fi
+
+ DEPEND+=" $(add_frameworks_dep extra-cmake-modules)"
+ RDEPEND+=" >=kde-frameworks/kf-env-3"
+ COMMONDEPEND+=" >=dev-qt/qtcore-${QT_MINIMAL}:5"
+
+ if [[ ${CATEGORY} = kde-plasma ]]; then
+ RDEPEND+="
+ !kde-apps/kde4-l10n[-minimal(-)]
+ !kde-base/kde-l10n:4[-minimal(-)]
+ "
+ fi
+
+ if [[ ${KDE_BLOCK_SLOT4} = true && ${CATEGORY} = kde-apps ]]; then
+ RDEPEND+=" !kde-apps/${PN}:4"
+ fi
+ ;;
+esac
+
+case ${KDE_DOXYGEN} in
+ false) ;;
+ *)
+ IUSE+=" doc"
+ DEPEND+=" doc? (
+ $(add_frameworks_dep kapidox)
+ app-doc/doxygen
+ )"
+ ;;
+esac
+
+case ${KDE_DEBUG} in
+ false) ;;
+ *)
+ IUSE+=" debug"
+ ;;
+esac
+
+case ${KDE_EXAMPLES} in
+ false) ;;
+ *)
+ IUSE+=" examples"
+ ;;
+esac
+
+case ${KDE_HANDBOOK} in
+ false) ;;
+ *)
+ IUSE+=" +handbook"
+ DEPEND+=" handbook? ( $(add_frameworks_dep kdoctools) )"
+ ;;
+esac
+
+case ${KDE_TEST} in
+ false) ;;
+ *)
+ IUSE+=" test"
+ DEPEND+=" test? ( >=dev-qt/qttest-${QT_MINIMAL}:5 )"
+ ;;
+esac
+
+case ${KDE_SELINUX_MODULE} in
+ none) ;;
+ *)
+ IUSE+=" selinux"
+ RDEPEND+=" selinux? ( sec-policy/selinux-${KDE_SELINUX_MODULE} )"
+ ;;
+esac
+
+DEPEND+=" ${COMMONDEPEND} dev-util/desktop-file-utils"
+RDEPEND+=" ${COMMONDEPEND}"
+unset COMMONDEPEND
+
+if [[ -n ${KMNAME} && ${KMNAME} != ${PN} && ${KDE_BUILD_TYPE} = release ]]; then
+ S=${WORKDIR}/${KMNAME}-${PV}
+fi
+
+# Determine fetch location for released tarballs
+_calculate_src_uri() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local _kmname
+
+ if [[ -n ${KMNAME} ]]; then
+ _kmname=${KMNAME}
+ else
+ _kmname=${PN}
+ fi
+
+ case ${PN} in
+ kdelibs4support | \
+ khtml | \
+ kjs | \
+ kjsembed | \
+ kmediaplayer | \
+ kross | \
+ krunner)
+ _kmname="portingAids/${_kmname}"
+ ;;
+ esac
+
+ DEPEND+=" app-arch/xz-utils"
+
+ case ${CATEGORY} in
+ kde-apps)
+ case ${PV} in
+ ??.?.[6-9]? | ??.??.[6-9]? )
+ SRC_URI="mirror://kde/unstable/applications/${PV}/src/${_kmname}-${PV}.tar.xz"
+ RESTRICT+=" mirror"
+ ;;
+ *)
+ SRC_URI="mirror://kde/stable/applications/${PV}/src/${_kmname}-${PV}.tar.xz" ;;
+ esac
+ ;;
+ kde-frameworks)
+ SRC_URI="mirror://kde/stable/frameworks/${PV%.*}/${_kmname}-${PV}.tar.xz" ;;
+ kde-plasma)
+ case ${PV} in
+ 5.?.[6-9]? )
+ # Plasma 5 beta releases
+ SRC_URI="mirror://kde/unstable/plasma/${PV}/${_kmname}-${PV}.tar.xz"
+ RESTRICT+=" mirror"
+ ;;
+ *)
+ # Plasma 5 stable releases
+ SRC_URI="mirror://kde/stable/plasma/${PV}/${_kmname}-${PV}.tar.xz" ;;
+ esac
+ ;;
+ esac
+}
+
+# Determine fetch location for live sources
+_calculate_live_repo() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ SRC_URI=""
+
+ case ${KDE_SCM} in
+ svn)
+ # @ECLASS-VARIABLE: ESVN_MIRROR
+ # @DESCRIPTION:
+ # This variable allows easy overriding of default kde mirror service
+ # (anonsvn) with anything else you might want to use.
+ ESVN_MIRROR=${ESVN_MIRROR:=svn://anonsvn.kde.org/home/kde}
+
+ local branch_prefix="KDE"
+
+ if [[ -n ${KMNAME} ]]; then
+ branch_prefix="${KMNAME}"
+ fi
+
+ ESVN_REPO_URI="${ESVN_MIRROR}/trunk/${branch_prefix}/${PN}"
+ ;;
+ git)
+ # @ECLASS-VARIABLE: EGIT_MIRROR
+ # @DESCRIPTION:
+ # This variable allows easy overriding of default kde mirror service
+ # (anongit) with anything else you might want to use.
+ EGIT_MIRROR=${EGIT_MIRROR:=git://anongit.kde.org}
+
+ local _kmname
+
+ # @ECLASS-VARIABLE: EGIT_REPONAME
+ # @DESCRIPTION:
+ # This variable allows overriding of default repository
+ # name. Specify only if this differ from PN and KMNAME.
+ if [[ -n ${EGIT_REPONAME} ]]; then
+ # the repository and kmname different
+ _kmname=${EGIT_REPONAME}
+ elif [[ -n ${KMNAME} ]]; then
+ _kmname=${KMNAME}
+ else
+ _kmname=${PN}
+ fi
+
+ if [[ ${PV} != 9999 && ${CATEGORY} = kde-plasma ]]; then
+ EGIT_BRANCH="Plasma/$(get_version_component_range 1-2)"
+ fi
+
+ EGIT_REPO_URI="${EGIT_MIRROR}/${_kmname}"
+ ;;
+ esac
+}
+
+case ${KDE_BUILD_TYPE} in
+ live) _calculate_live_repo ;;
+ *) _calculate_src_uri ;;
+esac
+
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: SRC_URI is ${SRC_URI}"
+
+# @FUNCTION: kde5_pkg_pretend
+# @DESCRIPTION:
+# Do some basic settings
+kde5_pkg_pretend() {
+ debug-print-function ${FUNCNAME} "$@"
+ _check_gcc_version
+}
+
+# @FUNCTION: kde5_pkg_setup
+# @DESCRIPTION:
+# Do some basic settings
+kde5_pkg_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+ _check_gcc_version
+}
+
+# @FUNCTION: kde5_src_unpack
+# @DESCRIPTION:
+# Function for unpacking KDE 5.
+kde5_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${KDE_BUILD_TYPE} = live ]]; then
+ case ${KDE_SCM} in
+ svn)
+ subversion_src_unpack
+ ;;
+ git)
+ git-r3_src_unpack
+ ;;
+ esac
+ else
+ default
+ fi
+}
+
+# @FUNCTION: kde5_src_prepare
+# @DESCRIPTION:
+# Function for preparing the KDE 5 sources.
+kde5_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # only build examples when required
+ if ! use_if_iuse examples || ! use examples ; then
+ comment_add_subdirectory examples
+ fi
+
+ # only enable handbook when required
+ if ! use_if_iuse handbook ; then
+ comment_add_subdirectory doc
+ fi
+
+ # enable only the requested translations
+ # when required
+ if [[ ${KDE_BUILD_TYPE} = release ]] ; then
+ if [[ -d po ]] ; then
+ pushd po > /dev/null
+ for lang in *; do
+ if ! has ${lang} ${LINGUAS} ; then
+ if [[ ${lang} != CMakeLists.txt ]] ; then
+ rm -rf ${lang}
+ fi
+ if [[ -e CMakeLists.txt ]] ; then
+ comment_add_subdirectory ${lang}
+ fi
+ fi
+ done
+ popd > /dev/null
+ fi
+
+ if [[ ${KDE_HANDBOOK} = true ]] ; then
+ pushd doc > /dev/null
+ for lang in *; do
+ if ! has ${lang} ${LINGUAS} ; then
+ comment_add_subdirectory ${lang}
+ fi
+ done
+ popd > /dev/null
+ fi
+ else
+ rm -rf po
+ fi
+
+ # in frameworks, tests = manual tests so never
+ # build them
+ if [[ ${CATEGORY} = kde-frameworks ]]; then
+ comment_add_subdirectory tests
+ fi
+
+ if [[ ${CATEGORY} = kde-frameworks || ${CATEGORY} = kde-plasma || ${CATEGORY} = kde-apps ]] ; then
+ # only build unit tests when required
+ if ! use_if_iuse test ; then
+ comment_add_subdirectory autotests
+ comment_add_subdirectory tests
+ fi
+ fi
+
+ case ${KDE_PUNT_BOGUS_DEPS} in
+ false) ;;
+ *)
+ if ! use_if_iuse test ; then
+ punt_bogus_dep Qt5 Test
+ fi
+ if ! use_if_iuse handbook ; then
+ punt_bogus_dep KF5 DocTools
+ fi
+ ;;
+ esac
+
+ cmake-utils_src_prepare
+}
+
+# @FUNCTION: kde5_src_configure
+# @DESCRIPTION:
+# Function for configuring the build of KDE 5.
+kde5_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # we rely on cmake-utils.eclass to append -DNDEBUG too
+ if ! use_if_iuse debug; then
+ append-cppflags -DQT_NO_DEBUG
+ fi
+
+ local cmakeargs
+
+ if ! use_if_iuse test ; then
+ cmakeargs+=( -DBUILD_TESTING=OFF )
+ fi
+
+ # install mkspecs in the same directory as qt stuff
+ cmakeargs+=(-DKDE_INSTALL_USE_QT_SYS_PATHS=ON)
+
+ # allow the ebuild to override what we set here
+ mycmakeargs=("${cmakeargs[@]}" "${mycmakeargs[@]}")
+
+ cmake-utils_src_configure
+}
+
+# @FUNCTION: kde5_src_compile
+# @DESCRIPTION:
+# Function for compiling KDE 5.
+kde5_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ cmake-utils_src_compile "$@"
+
+ # Build doxygen documentation if applicable
+ if use_if_iuse doc ; then
+ kgenapidox . || die
+ fi
+}
+
+# @FUNCTION: kde5_src_test
+# @DESCRIPTION:
+# Function for testing KDE 5.
+kde5_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ _test_runner() {
+ if [[ -n "${VIRTUALDBUS_TEST}" ]]; then
+ export $(dbus-launch)
+ fi
+
+ cmake-utils_src_test
+ }
+
+ # When run as normal user during ebuild development with the ebuild command, the
+ # kde tests tend to access the session DBUS. This however is not possible in a real
+ # emerge or on the tinderbox.
+ # > make sure it does not happen, so bad tests can be recognized and disabled
+ unset DBUS_SESSION_BUS_ADDRESS DBUS_SESSION_BUS_PID
+
+ if [[ ${VIRTUALX_REQUIRED} = always || ${VIRTUALX_REQUIRED} = test ]]; then
+ VIRTUALX_COMMAND="_test_runner" virtualmake
+ else
+ _test_runner
+ fi
+
+ if [[ -n "${DBUS_SESSION_BUS_PID}" ]] ; then
+ kill ${DBUS_SESSION_BUS_PID}
+ fi
+}
+
+# @FUNCTION: kde5_src_install
+# @DESCRIPTION:
+# Function for installing KDE 5.
+kde5_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Install doxygen documentation if applicable
+ if use_if_iuse doc ; then
+ dodoc -r apidocs/html
+ fi
+
+ cmake-utils_src_install
+
+ # We don't want ${PREFIX}/share/doc/HTML to be compressed,
+ # because then khelpcenter can't find the docs
+ if [[ -d ${ED}/${PREFIX}/share/doc/HTML ]]; then
+ docompress -x ${PREFIX}/share/doc/HTML
+ fi
+}
+
+# @FUNCTION: kde5_pkg_preinst
+# @DESCRIPTION:
+# Function storing icon caches
+kde5_pkg_preinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ gnome2_icon_savelist
+}
+
+# @FUNCTION: kde5_pkg_postinst
+# @DESCRIPTION:
+# Function to rebuild the KDE System Configuration Cache after an application has been installed.
+kde5_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ gnome2_icon_cache_update
+ fdo-mime_desktop_database_update
+}
+
+# @FUNCTION: kde5_pkg_postrm
+# @DESCRIPTION:
+# Function to rebuild the KDE System Configuration Cache after an application has been removed.
+kde5_pkg_postrm() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ gnome2_icon_cache_update
+ fdo-mime_desktop_database_update
+}
+
+fi
diff --git a/eclass/kernel-2.eclass b/eclass/kernel-2.eclass
new file mode 100644
index 000000000000..4f4861365af1
--- /dev/null
+++ b/eclass/kernel-2.eclass
@@ -0,0 +1,1347 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Description: kernel.eclass rewrite for a clean base regarding the 2.6
+# series of kernel with back-compatibility for 2.4
+#
+# Original author: John Mylchreest <johnm@gentoo.org>
+# Maintainer: kernel-misc@gentoo.org
+#
+# Please direct your bugs to the current eclass maintainer :)
+
+# added functionality:
+# unipatch - a flexible, singular method to extract, add and remove patches.
+
+# A Couple of env vars are available to effect usage of this eclass
+# These are as follows:
+#
+# K_USEPV - When setting the EXTRAVERSION variable, it should
+# add PV to the end.
+# this is useful for thigns like wolk. IE:
+# EXTRAVERSION would be something like : -wolk-4.19-r1
+# K_NOSETEXTRAVERSION - if this is set then EXTRAVERSION will not be
+# automatically set within the kernel Makefile
+# K_NOUSENAME - if this is set then EXTRAVERSION will not include the
+# first part of ${PN} in EXTRAVERSION
+# K_NOUSEPR - if this is set then EXTRAVERSION will not include the
+# anything based on ${PR}.
+# K_PREPATCHED - if the patchset is prepatched (ie: mm-sources,
+# ck-sources, ac-sources) it will use PR (ie: -r5) as
+# the patchset version for
+# and not use it as a true package revision
+# K_EXTRAEINFO - this is a new-line seperated list of einfo displays in
+# postinst and can be used to carry additional postinst
+# messages
+# K_EXTRAELOG - same as K_EXTRAEINFO except using elog instead of einfo
+# K_EXTRAEWARN - same as K_EXTRAEINFO except using ewarn instead of einfo
+# K_SYMLINK - if this is set, then forcably create symlink anyway
+#
+# K_BASE_VER - for git-sources, declare the base version this patch is
+# based off of.
+# K_DEFCONFIG - Allow specifying a different defconfig target.
+# If length zero, defaults to "defconfig".
+# K_WANT_GENPATCHES - Apply genpatches to kernel source. Provide any
+# combination of "base", "extras" or "experimental".
+# K_EXP_GENPATCHES_PULL - If set, we pull "experimental" regardless of the USE FLAG
+# but expect the ebuild maintainer to use K_EXP_GENPATCHES_LIST.
+# K_EXP_GENPATCHES_NOUSE - If set, no USE flag will be provided for "experimental";
+# as a result the user cannot choose to apply those patches.
+# K_EXP_GENPATCHES_LIST - A list of patches to pick from "experimental" to apply when
+# the USE flag is unset and K_EXP_GENPATCHES_PULL is set.
+# K_GENPATCHES_VER - The version of the genpatches tarball(s) to apply.
+# A value of "5" would apply genpatches-2.6.12-5 to
+# my-sources-2.6.12.ebuild
+# K_SECURITY_UNSUPPORTED- If set, this kernel is unsupported by Gentoo Security
+# K_DEBLOB_AVAILABLE - A value of "0" will disable all of the optional deblob
+# code. If empty, will be set to "1" if deblobbing is
+# possible. Test ONLY for "1".
+# K_DEBLOB_TAG - This will be the version of deblob script. It's a upstream SVN tag
+# such asw -gnu or -gnu1.
+# K_PREDEBLOBBED - This kernel was already deblobbed elsewhere.
+# If false, either optional deblobbing will be available
+# or the license will note the inclusion of freedist
+# code.
+# K_LONGTERM - If set, the eclass will search for the kernel source
+# in the long term directories on the upstream servers
+# as the location has been changed by upstream
+# K_KDBUS_AVAILABLE - If set, the ebuild contains the option of installing the
+# kdbus patch. This patch is not installed without the 'kdbus'
+# and 'experimental' use flags.
+# H_SUPPORTEDARCH - this should be a space separated list of ARCH's which
+# can be supported by the headers ebuild
+
+# UNIPATCH_LIST - space delimetered list of patches to be applied to the
+# kernel
+# UNIPATCH_EXCLUDE - an addition var to support exlusion based completely
+# on "<passedstring>*" and not "<passedno#>_*"
+# - this should _NOT_ be used from the ebuild as this is
+# reserved for end users passing excludes from the cli
+# UNIPATCH_DOCS - space delimemeted list of docs to be installed to
+# the doc dir
+# UNIPATCH_STRICTORDER - if this is set places patches into directories of
+# order, so they are applied in the order passed
+
+# Changing any other variable in this eclass is not supported; you can request
+# for additional variables to be added by contacting the current maintainer.
+# If you do change them, there is a chance that we will not fix resulting bugs;
+# that of course does not mean we're not willing to help.
+
+PYTHON_COMPAT=( python{2_6,2_7} )
+
+inherit eutils toolchain-funcs versionator multilib python-any-r1
+EXPORT_FUNCTIONS pkg_setup src_unpack src_compile src_test src_install pkg_preinst pkg_postinst pkg_postrm
+
+# Added by Daniel Ostrow <dostrow@gentoo.org>
+# This is an ugly hack to get around an issue with a 32-bit userland on ppc64.
+# I will remove it when I come up with something more reasonable.
+[[ ${PROFILE_ARCH} == "ppc64" ]] && CHOST="powerpc64-${CHOST#*-}"
+
+export CTARGET=${CTARGET:-${CHOST}}
+if [[ ${CTARGET} == ${CHOST} && ${CATEGORY/cross-} != ${CATEGORY} ]]; then
+ export CTARGET=${CATEGORY/cross-}
+fi
+
+HOMEPAGE="http://www.kernel.org/ http://www.gentoo.org/ ${HOMEPAGE}"
+: ${LICENSE:="GPL-2"}
+
+# This is the latest KV_PATCH of the deblob tool available from the
+# libre-sources upstream. If you bump this, you MUST regenerate the Manifests
+# for ALL kernel-2 consumer packages where deblob is available.
+: ${DEBLOB_MAX_VERSION:=38}
+
+# No need to run scanelf/strip on kernel sources/headers (bug #134453).
+RESTRICT="binchecks strip"
+
+# set LINUX_HOSTCFLAGS if not already set
+: ${LINUX_HOSTCFLAGS:="-Wall -Wstrict-prototypes -Os -fomit-frame-pointer -I${S}/include"}
+
+# debugging functions
+#==============================================================
+# this function exists only to help debug kernel-2.eclass
+# if you are adding new functionality in, put a call to it
+# at the start of src_unpack, or during SRC_URI/dep generation.
+debug-print-kernel2-variables() {
+ for v in PVR CKV OKV KV KV_FULL KV_MAJOR KV_MINOR KV_PATCH RELEASETYPE \
+ RELEASE UNIPATCH_LIST_DEFAULT UNIPATCH_LIST_GENPATCHES \
+ UNIPATCH_LIST S KERNEL_URI K_WANT_GENPATCHES ; do
+ debug-print "${v}: ${!v}"
+ done
+}
+
+#Eclass functions only from here onwards ...
+#==============================================================
+handle_genpatches() {
+ local tarball
+ [[ -z ${K_WANT_GENPATCHES} || -z ${K_GENPATCHES_VER} ]] && return 1
+
+ debug-print "Inside handle_genpatches"
+ local OKV_ARRAY
+ IFS="." read -r -a OKV_ARRAY <<<"${OKV}"
+
+ # for > 3.0 kernels, handle genpatches tarball name
+ # genpatches for 3.0 and 3.0.1 might be named
+ # genpatches-3.0-1.base.tar.xz and genpatches-3.0-2.base.tar.xz
+ # respectively. Handle this.
+
+ for i in ${K_WANT_GENPATCHES} ; do
+ if [[ ${KV_MAJOR} -ge 3 ]]; then
+ if [[ ${#OKV_ARRAY[@]} -ge 3 ]]; then
+ tarball="genpatches-${KV_MAJOR}.${KV_MINOR}-${K_GENPATCHES_VER}.${i}.tar.xz"
+ else
+ tarball="genpatches-${KV_MAJOR}.${KV_PATCH}-${K_GENPATCHES_VER}.${i}.tar.xz"
+ fi
+ else
+ tarball="genpatches-${OKV}-${K_GENPATCHES_VER}.${i}.tar.xz"
+ fi
+
+ local use_cond_start="" use_cond_end=""
+
+ if [[ "${i}" == "experimental" && -z ${K_EXP_GENPATCHES_PULL} && -z ${K_EXP_GENPATCHES_NOUSE} ]] ; then
+ use_cond_start="experimental? ( "
+ use_cond_end=" )"
+
+ if use experimental ; then
+ UNIPATCH_LIST_GENPATCHES+=" ${DISTDIR}/${tarball}"
+ debug-print "genpatches tarball: $tarball"
+ fi
+ else
+ UNIPATCH_LIST_GENPATCHES+=" ${DISTDIR}/${tarball}"
+ debug-print "genpatches tarball: $tarball"
+ fi
+ GENPATCHES_URI+=" ${use_cond_start}mirror://gentoo/${tarball}${use_cond_end}"
+ done
+}
+
+detect_version() {
+ # this function will detect and set
+ # - OKV: Original Kernel Version (2.6.0/2.6.0-test11)
+ # - KV: Kernel Version (2.6.0-gentoo/2.6.0-test11-gentoo-r1)
+ # - EXTRAVERSION: The additional version appended to OKV (-gentoo/-gentoo-r1)
+
+ # We've already run, so nothing to do here.
+ [[ -n ${KV_FULL} ]] && return 0
+
+ # CKV is used as a comparison kernel version, which is used when
+ # PV doesnt reflect the genuine kernel version.
+ # this gets set to the portage style versioning. ie:
+ # CKV=2.6.11_rc4
+ CKV=${CKV:-${PV}}
+ OKV=${OKV:-${CKV}}
+ OKV=${OKV/_beta/-test}
+ OKV=${OKV/_rc/-rc}
+ OKV=${OKV/-r*}
+ OKV=${OKV/_p*}
+
+ KV_MAJOR=$(get_version_component_range 1 ${OKV})
+ # handle if OKV is X.Y or X.Y.Z (e.g. 3.0 or 3.0.1)
+ local OKV_ARRAY
+ IFS="." read -r -a OKV_ARRAY <<<"${OKV}"
+
+ # if KV_MAJOR >= 3, then we have no more KV_MINOR
+ #if [[ ${KV_MAJOR} -lt 3 ]]; then
+ if [[ ${#OKV_ARRAY[@]} -ge 3 ]]; then
+ KV_MINOR=$(get_version_component_range 2 ${OKV})
+ KV_PATCH=$(get_version_component_range 3 ${OKV})
+ if [[ ${KV_MAJOR}${KV_MINOR}${KV_PATCH} -ge 269 ]]; then
+ KV_EXTRA=$(get_version_component_range 4- ${OKV})
+ KV_EXTRA=${KV_EXTRA/[-_]*}
+ else
+ KV_PATCH=$(get_version_component_range 3- ${OKV})
+ fi
+ else
+ KV_PATCH=$(get_version_component_range 2 ${OKV})
+ KV_EXTRA=$(get_version_component_range 3- ${OKV})
+ KV_EXTRA=${KV_EXTRA/[-_]*}
+ fi
+
+ debug-print "KV_EXTRA is ${KV_EXTRA}"
+
+ KV_PATCH=${KV_PATCH/[-_]*}
+
+ local v n=0 missing
+ #if [[ ${KV_MAJOR} -lt 3 ]]; then
+ if [[ ${#OKV_ARRAY[@]} -ge 3 ]]; then
+ for v in CKV OKV KV_{MAJOR,MINOR,PATCH} ; do
+ [[ -z ${!v} ]] && n=1 && missing="${missing}${v} ";
+ done
+ else
+ for v in CKV OKV KV_{MAJOR,PATCH} ; do
+ [[ -z ${!v} ]] && n=1 && missing="${missing}${v} ";
+ done
+ fi
+
+ [[ $n -eq 1 ]] && \
+ eerror "Missing variables: ${missing}" && \
+ die "Failed to extract kernel version (try explicit CKV in ebuild)!"
+ unset v n missing
+
+# if [[ ${KV_MAJOR} -ge 3 ]]; then
+ if [[ ${#OKV_ARRAY[@]} -lt 3 ]]; then
+ KV_PATCH_ARR=(${KV_PATCH//\./ })
+
+ # at this point 031412, Linus is putting all 3.x kernels in a
+ # 3.x directory, may need to revisit when 4.x is released
+ KERNEL_BASE_URI="mirror://kernel/linux/kernel/v${KV_MAJOR}.x"
+
+ [[ -n "${K_LONGTERM}" ]] &&
+ KERNEL_BASE_URI="${KERNEL_BASE_URI}/longterm/v${KV_MAJOR}.${KV_PATCH_ARR}"
+ else
+ #KERNEL_BASE_URI="mirror://kernel/linux/kernel/v${KV_MAJOR}.0"
+ #KERNEL_BASE_URI="mirror://kernel/linux/kernel/v${KV_MAJOR}.${KV_MINOR}"
+ if [[ ${KV_MAJOR} -ge 3 ]]; then
+ KERNEL_BASE_URI="mirror://kernel/linux/kernel/v${KV_MAJOR}.x"
+ else
+ KERNEL_BASE_URI="mirror://kernel/linux/kernel/v${KV_MAJOR}.${KV_MINOR}"
+ fi
+
+ [[ -n "${K_LONGTERM}" ]] &&
+ #KERNEL_BASE_URI="${KERNEL_BASE_URI}/longterm"
+ KERNEL_BASE_URI="${KERNEL_BASE_URI}/longterm/v${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}"
+ fi
+
+ debug-print "KERNEL_BASE_URI is ${KERNEL_BASE_URI}"
+
+ if [[ ${#OKV_ARRAY[@]} -ge 3 ]] && [[ ${KV_MAJOR} -ge 3 ]]; then
+ # handle non genpatch using sources correctly
+ if [[ -z ${K_WANT_GENPATCHES} && -z ${K_GENPATCHES_VER} && ${KV_PATCH} -gt 0 ]]; then
+ KERNEL_URI="${KERNEL_BASE_URI}/patch-${OKV}.xz"
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${CKV}.xz"
+ fi
+ KERNEL_URI="${KERNEL_URI} ${KERNEL_BASE_URI}/linux-${KV_MAJOR}.${KV_MINOR}.tar.xz"
+ else
+ KERNEL_URI="${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+ fi
+
+ RELEASE=${CKV/${OKV}}
+ RELEASE=${RELEASE/_beta}
+ RELEASE=${RELEASE/_rc/-rc}
+ RELEASE=${RELEASE/_pre/-pre}
+ # We cannot trivally call kernel_is here, because it calls us to detect the
+ # version
+ #kernel_is ge 2 6 && RELEASE=${RELEASE/-pre/-git}
+ [ $(($KV_MAJOR * 1000 + ${KV_MINOR:-0})) -ge 2006 ] && RELEASE=${RELEASE/-pre/-git}
+ RELEASETYPE=${RELEASE//[0-9]}
+
+ # Now we know that RELEASE is the -rc/-git
+ # and RELEASETYPE is the same but with its numerics stripped
+ # we can work on better sorting EXTRAVERSION.
+ # first of all, we add the release
+ EXTRAVERSION="${RELEASE}"
+ debug-print "0 EXTRAVERSION:${EXTRAVERSION}"
+ [[ -n ${KV_EXTRA} ]] && [[ ${KV_MAJOR} -lt 3 ]] && EXTRAVERSION=".${KV_EXTRA}${EXTRAVERSION}"
+
+ debug-print "1 EXTRAVERSION:${EXTRAVERSION}"
+ if [[ -n "${K_NOUSEPR}" ]]; then
+ # Don't add anything based on PR to EXTRAVERSION
+ debug-print "1.0 EXTRAVERSION:${EXTRAVERSION}"
+ elif [[ -n ${K_PREPATCHED} ]]; then
+ debug-print "1.1 EXTRAVERSION:${EXTRAVERSION}"
+ EXTRAVERSION="${EXTRAVERSION}-${PN/-*}${PR/r}"
+ elif [[ "${ETYPE}" = "sources" ]]; then
+ debug-print "1.2 EXTRAVERSION:${EXTRAVERSION}"
+ # For some sources we want to use the PV in the extra version
+ # This is because upstream releases with a completely different
+ # versioning scheme.
+ case ${PN/-*} in
+ wolk) K_USEPV=1;;
+ vserver) K_USEPV=1;;
+ esac
+
+ [[ -z "${K_NOUSENAME}" ]] && EXTRAVERSION="${EXTRAVERSION}-${PN/-*}"
+ [[ -n "${K_USEPV}" ]] && EXTRAVERSION="${EXTRAVERSION}-${PV//_/-}"
+ [[ -n "${PR//r0}" ]] && EXTRAVERSION="${EXTRAVERSION}-${PR}"
+ fi
+ debug-print "2 EXTRAVERSION:${EXTRAVERSION}"
+
+ # The only messing around which should actually effect this is for KV_EXTRA
+ # since this has to limit OKV to MAJ.MIN.PAT and strip EXTRA off else
+ # KV_FULL evaluates to MAJ.MIN.PAT.EXT.EXT after EXTRAVERSION
+
+ if [[ -n ${KV_EXTRA} ]]; then
+ if [[ -n ${KV_MINOR} ]]; then
+ OKV="${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}"
+ else
+ OKV="${KV_MAJOR}.${KV_PATCH}"
+ fi
+ KERNEL_URI="${KERNEL_BASE_URI}/patch-${CKV}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${CKV}.xz"
+ fi
+
+ # We need to set this using OKV, but we need to set it before we do any
+ # messing around with OKV based on RELEASETYPE
+ KV_FULL=${OKV}${EXTRAVERSION}
+
+ # we will set this for backwards compatibility.
+ S=${WORKDIR}/linux-${KV_FULL}
+ KV=${KV_FULL}
+
+ # -rc-git pulls can be achieved by specifying CKV
+ # for example:
+ # CKV="2.6.11_rc3_pre2"
+ # will pull:
+ # linux-2.6.10.tar.xz & patch-2.6.11-rc3.xz & patch-2.6.11-rc3-git2.xz
+
+ if [[ ${KV_MAJOR}${KV_MINOR} -eq 26 ]]; then
+
+ if [[ ${RELEASETYPE} == -rc ]] || [[ ${RELEASETYPE} == -pre ]]; then
+ OKV="${KV_MAJOR}.${KV_MINOR}.$((${KV_PATCH} - 1))"
+ KERNEL_URI="${KERNEL_BASE_URI}/testing/patch-${CKV//_/-}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${CKV//_/-}.xz"
+ fi
+
+ if [[ ${RELEASETYPE} == -git ]]; then
+ KERNEL_URI="${KERNEL_BASE_URI}/snapshots/patch-${OKV}${RELEASE}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${OKV}${RELEASE}.xz"
+ fi
+
+ if [[ ${RELEASETYPE} == -rc-git ]]; then
+ OKV="${KV_MAJOR}.${KV_MINOR}.$((${KV_PATCH} - 1))"
+ KERNEL_URI="${KERNEL_BASE_URI}/snapshots/patch-${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${RELEASE}.xz
+ ${KERNEL_BASE_URI}/testing/patch-${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${RELEASE/-git*}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${RELEASE/-git*}.xz ${DISTDIR}/patch-${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${RELEASE}.xz"
+ fi
+ else
+ KV_PATCH_ARR=(${KV_PATCH//\./ })
+
+ # the different majorminor versions have different patch start versions
+ OKV_DICT=(["2"]="${KV_MAJOR}.$((${KV_PATCH_ARR} - 1))" ["3"]="2.6.39" ["4"]="3.19")
+
+ if [[ ${RELEASETYPE} == -rc ]] || [[ ${RELEASETYPE} == -pre ]]; then
+ OKV=${K_BASE_VER:-$OKV_DICT["${KV_MAJOR}"]}
+ KERNEL_URI="${KERNEL_BASE_URI}/testing/patch-${CKV//_/-}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${CKV//_/-}.xz"
+ fi
+
+ if [[ ${RELEASETYPE} == -git ]]; then
+ KERNEL_URI="${KERNEL_BASE_URI}/snapshots/patch-${OKV}${RELEASE}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${OKV}${RELEASE}.xz"
+ fi
+
+ if [[ ${RELEASETYPE} == -rc-git ]]; then
+ OKV=${K_BASE_VER:-$OKV_DICT["${KV_MAJOR}"]}
+ KERNEL_URI="${KERNEL_BASE_URI}/snapshots/patch-${KV_MAJOR}.${KV_PATCH}${RELEASE}.xz
+ ${KERNEL_BASE_URI}/testing/patch-${KV_MAJOR}.${KV_PATCH}${RELEASE/-git*}.xz
+ ${KERNEL_BASE_URI}/linux-${OKV}.tar.xz"
+
+ UNIPATCH_LIST_DEFAULT="${DISTDIR}/patch-${KV_MAJOR}.${KV_PATCH}${RELEASE/-git*}.xz ${DISTDIR}/patch-${KV_MAJOR}.${KV_PATCH}${RELEASE}.xz"
+ fi
+
+
+ fi
+
+ debug-print-kernel2-variables
+
+ handle_genpatches
+}
+
+# Note: duplicated in linux-info.eclass
+kernel_is() {
+ # ALL of these should be set before we can safely continue this function.
+ # some of the sources have in the past had only one set.
+ local v n=0
+ for v in OKV KV_{MAJOR,MINOR,PATCH} ; do [[ -z ${!v} ]] && n=1 ; done
+ [[ $n -eq 1 ]] && detect_version
+ unset v n
+
+ # Now we can continue
+ local operator test value
+
+ case ${1#-} in
+ lt) operator="-lt"; shift;;
+ gt) operator="-gt"; shift;;
+ le) operator="-le"; shift;;
+ ge) operator="-ge"; shift;;
+ eq) operator="-eq"; shift;;
+ *) operator="-eq";;
+ esac
+ [[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
+
+ : $(( test = (KV_MAJOR << 16) + (KV_MINOR << 8) + KV_PATCH ))
+ : $(( value = (${1:-${KV_MAJOR}} << 16) + (${2:-${KV_MINOR}} << 8) + ${3:-${KV_PATCH}} ))
+ [ ${test} ${operator} ${value} ]
+}
+
+kernel_is_2_4() {
+ kernel_is 2 4
+}
+
+kernel_is_2_6() {
+ kernel_is 2 6 || kernel_is 2 5
+}
+
+# Capture the sources type and set DEPENDs
+if [[ ${ETYPE} == sources ]]; then
+ DEPEND="!build? (
+ sys-apps/sed
+ >=sys-devel/binutils-2.11.90.0.31
+ )"
+ RDEPEND="!build? (
+ >=sys-libs/ncurses-5.2
+ sys-devel/make
+ dev-lang/perl
+ sys-devel/bc
+ )"
+
+ SLOT="${PVR}"
+ DESCRIPTION="Sources based on the Linux Kernel."
+ IUSE="symlink build"
+
+ if [[ -n ${K_KDBUS_AVAILABLE} ]]; then
+ IUSE="${IUSE} kdbus"
+ fi
+
+ # Bug #266157, deblob for libre support
+ if [[ -z ${K_PREDEBLOBBED} ]] ; then
+ # Bug #359865, force a call to detect_version if needed
+ kernel_is ge 2 6 27 && \
+ [[ -z "${K_DEBLOB_AVAILABLE}" ]] && \
+ kernel_is le 2 6 ${DEBLOB_MAX_VERSION} && \
+ K_DEBLOB_AVAILABLE=1
+ if [[ ${K_DEBLOB_AVAILABLE} == "1" ]] ; then
+ IUSE="${IUSE} deblob"
+
+ # Reflect that kernels contain firmware blobs unless otherwise
+ # stripped
+ LICENSE="${LICENSE} !deblob? ( freedist )"
+
+ DEPEND+=" deblob? ( ${PYTHON_DEPS} )"
+
+ if [[ -n KV_MINOR ]]; then
+ DEBLOB_PV="${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}"
+ else
+ DEBLOB_PV="${KV_MAJOR}.${KV_PATCH}"
+ fi
+
+ if [[ ${KV_MAJOR} -ge 3 ]]; then
+ DEBLOB_PV="${KV_MAJOR}.${KV_MINOR}"
+ fi
+
+ # deblob svn tag, default is -gnu, to change, use K_DEBLOB_TAG in ebuild
+ K_DEBLOB_TAG=${K_DEBLOB_TAG:--gnu}
+ DEBLOB_A="deblob-${DEBLOB_PV}"
+ DEBLOB_CHECK_A="deblob-check-${DEBLOB_PV}"
+ DEBLOB_HOMEPAGE="http://www.fsfla.org/svn/fsfla/software/linux-libre/releases/tags"
+ DEBLOB_URI_PATH="${DEBLOB_PV}${K_DEBLOB_TAG}"
+ if ! has "${EAPI:-0}" 0 1 ; then
+ DEBLOB_CHECK_URI="${DEBLOB_HOMEPAGE}/${DEBLOB_URI_PATH}/deblob-check -> ${DEBLOB_CHECK_A}"
+ else
+ DEBLOB_CHECK_URI="mirror://gentoo/${DEBLOB_CHECK_A}"
+ fi
+
+ DEBLOB_URI="${DEBLOB_HOMEPAGE}/${DEBLOB_URI_PATH}/${DEBLOB_A}"
+ HOMEPAGE="${HOMEPAGE} ${DEBLOB_HOMEPAGE}"
+
+ KERNEL_URI="${KERNEL_URI}
+ deblob? (
+ ${DEBLOB_URI}
+ ${DEBLOB_CHECK_URI}
+ )"
+ else
+ # We have no way to deblob older kernels, so just mark them as
+ # tainted with non-libre materials.
+ LICENSE="${LICENSE} freedist"
+ fi
+ fi
+
+elif [[ ${ETYPE} == headers ]]; then
+ DESCRIPTION="Linux system headers"
+
+ # Since we should NOT honour KBUILD_OUTPUT in headers
+ # lets unset it here.
+ unset KBUILD_OUTPUT
+
+ SLOT="0"
+else
+ eerror "Unknown ETYPE=\"${ETYPE}\", must be \"sources\" or \"headers\""
+ die "Unknown ETYPE=\"${ETYPE}\", must be \"sources\" or \"headers\""
+fi
+
+# Cross-compile support functions
+#==============================================================
+kernel_header_destdir() {
+ [[ ${CTARGET} == ${CHOST} ]] \
+ && echo /usr/include \
+ || echo /usr/${CTARGET}/usr/include
+}
+
+cross_pre_c_headers() {
+ use crosscompile_opts_headers-only && [[ ${CHOST} != ${CTARGET} ]]
+}
+
+env_setup_xmakeopts() {
+ # Kernel ARCH != portage ARCH
+ export KARCH=$(tc-arch-kernel)
+
+ # When cross-compiling, we need to set the ARCH/CROSS_COMPILE
+ # variables properly or bad things happen !
+ xmakeopts="ARCH=${KARCH}"
+ if [[ ${CTARGET} != ${CHOST} ]] && ! cross_pre_c_headers ; then
+ xmakeopts="${xmakeopts} CROSS_COMPILE=${CTARGET}-"
+ elif type -p ${CHOST}-ar > /dev/null ; then
+ xmakeopts="${xmakeopts} CROSS_COMPILE=${CHOST}-"
+ fi
+ export xmakeopts
+}
+
+# Unpack functions
+#==============================================================
+unpack_2_4() {
+ # this file is required for other things to build properly,
+ # so we autogenerate it
+ make -s mrproper ${xmakeopts} || die "make mrproper failed"
+ make -s symlinks ${xmakeopts} || die "make symlinks failed"
+ make -s include/linux/version.h ${xmakeopts} || die "make include/linux/version.h failed"
+ echo ">>> version.h compiled successfully."
+}
+
+unpack_2_6() {
+ # this file is required for other things to build properly, so we
+ # autogenerate it ... generate a .config to keep version.h build from
+ # spitting out an annoying warning
+ make -s mrproper ${xmakeopts} 2>/dev/null \
+ || die "make mrproper failed"
+
+ # quick fix for bug #132152 which triggers when it cannot include linux
+ # headers (ie, we have not installed it yet)
+ if ! make -s defconfig ${xmakeopts} &>/dev/null 2>&1 ; then
+ touch .config
+ eerror "make defconfig failed."
+ eerror "assuming you dont have any headers installed yet and continuing"
+ epause 5
+ fi
+
+ make -s include/linux/version.h ${xmakeopts} 2>/dev/null \
+ || die "make include/linux/version.h failed"
+ rm -f .config >/dev/null
+}
+
+universal_unpack() {
+ debug-print "Inside universal_unpack"
+
+ local OKV_ARRAY
+ IFS="." read -r -a OKV_ARRAY <<<"${OKV}"
+
+ cd "${WORKDIR}"
+ if [[ ${#OKV_ARRAY[@]} -ge 3 ]] && [[ ${KV_MAJOR} -ge 3 ]]; then
+ unpack linux-${KV_MAJOR}.${KV_MINOR}.tar.xz
+ else
+ unpack linux-${OKV}.tar.xz
+ fi
+
+ if [[ -d "linux" ]]; then
+ debug-print "Moving linux to linux-${KV_FULL}"
+ mv linux linux-${KV_FULL} \
+ || die "Unable to move source tree to ${KV_FULL}."
+ elif [[ "${OKV}" != "${KV_FULL}" ]]; then
+ if [[ ${#OKV_ARRAY[@]} -ge 3 ]] && [[ ${KV_MAJOR} -ge 3 ]] &&
+ [[ "${ETYPE}" = "sources" ]]; then
+ debug-print "moving linux-${KV_MAJOR}.${KV_MINOR} to linux-${KV_FULL} "
+ mv linux-${KV_MAJOR}.${KV_MINOR} linux-${KV_FULL} \
+ || die "Unable to move source tree to ${KV_FULL}."
+ else
+ debug-print "moving linux-${OKV} to linux-${KV_FULL} "
+ mv linux-${OKV} linux-${KV_FULL} \
+ || die "Unable to move source tree to ${KV_FULL}."
+ fi
+ elif [[ ${#OKV_ARRAY[@]} -ge 3 ]] && [[ ${KV_MAJOR} -ge 3 ]]; then
+ mv linux-${KV_MAJOR}.${KV_MINOR} linux-${KV_FULL} \
+ || die "Unable to move source tree to ${KV_FULL}."
+ fi
+ cd "${S}"
+
+ # remove all backup files
+ find . -iname "*~" -exec rm {} \; 2> /dev/null
+
+}
+
+unpack_set_extraversion() {
+ cd "${S}"
+ sed -i -e "s:^\(EXTRAVERSION =\).*:\1 ${EXTRAVERSION}:" Makefile
+ cd "${OLDPWD}"
+}
+
+# Should be done after patches have been applied
+# Otherwise patches that modify the same area of Makefile will fail
+unpack_fix_install_path() {
+ cd "${S}"
+ sed -i -e 's:#export\tINSTALL_PATH:export\tINSTALL_PATH:' Makefile
+}
+
+# Compile Functions
+#==============================================================
+compile_headers() {
+ env_setup_xmakeopts
+
+ # if we couldnt obtain HOSTCFLAGS from the Makefile,
+ # then set it to something sane
+ local HOSTCFLAGS=$(getfilevar HOSTCFLAGS "${S}"/Makefile)
+ HOSTCFLAGS=${HOSTCFLAGS:--Wall -Wstrict-prototypes -O2 -fomit-frame-pointer}
+
+ if kernel_is 2 4; then
+ yes "" | make oldconfig ${xmakeopts}
+ echo ">>> make oldconfig complete"
+ make dep ${xmakeopts}
+ elif kernel_is 2 6; then
+ # 2.6.18 introduces headers_install which means we dont need any
+ # of this crap anymore :D
+ kernel_is ge 2 6 18 && return 0
+
+ # autoconf.h isnt generated unless it already exists. plus, we have
+ # no guarantee that any headers are installed on the system...
+ [[ -f ${ROOT}/usr/include/linux/autoconf.h ]] \
+ || touch include/linux/autoconf.h
+
+ # if K_DEFCONFIG isn't set, force to "defconfig"
+ # needed by mips
+ if [[ -z ${K_DEFCONFIG} ]]; then
+ if [[ $(KV_to_int ${KV}) -ge $(KV_to_int 2.6.16) ]]; then
+ case ${CTARGET} in
+ powerpc64*) K_DEFCONFIG="ppc64_defconfig";;
+ powerpc*) K_DEFCONFIG="pmac32_defconfig";;
+ *) K_DEFCONFIG="defconfig";;
+ esac
+ else
+ K_DEFCONFIG="defconfig"
+ fi
+ fi
+
+ # if there arent any installed headers, then there also isnt an asm
+ # symlink in /usr/include/, and make defconfig will fail, so we have
+ # to force an include path with $S.
+ HOSTCFLAGS="${HOSTCFLAGS} -I${S}/include/"
+ ln -sf asm-${KARCH} "${S}"/include/asm
+ cross_pre_c_headers && return 0
+
+ make ${K_DEFCONFIG} HOSTCFLAGS="${HOSTCFLAGS}" ${xmakeopts} || die "defconfig failed (${K_DEFCONFIG})"
+ if compile_headers_tweak_config ; then
+ yes "" | make oldconfig HOSTCFLAGS="${HOSTCFLAGS}" ${xmakeopts} || die "2nd oldconfig failed"
+ fi
+ make prepare HOSTCFLAGS="${HOSTCFLAGS}" ${xmakeopts} || die "prepare failed"
+ make prepare-all HOSTCFLAGS="${HOSTCFLAGS}" ${xmakeopts} || die "prepare failed"
+ fi
+}
+
+compile_headers_tweak_config() {
+ # some targets can be very very picky, so let's finesse the
+ # .config based upon any info we may have
+ case ${CTARGET} in
+ sh*)
+ sed -i '/CONFIG_CPU_SH/d' .config
+ echo "CONFIG_CPU_SH${CTARGET:2:1}=y" >> .config
+ return 0;;
+ esac
+
+ # no changes, so lets do nothing
+ return 1
+}
+
+# install functions
+#==============================================================
+install_universal() {
+ # Fix silly permissions in tarball
+ cd "${WORKDIR}"
+ chown -R 0:0 * >& /dev/null
+ chmod -R a+r-w+X,u+w *
+ cd ${OLDPWD}
+}
+
+install_headers() {
+ local ddir=$(kernel_header_destdir)
+
+ # 2.6.18 introduces headers_install which means we dont need any
+ # of this crap anymore :D
+ if kernel_is ge 2 6 18 ; then
+ env_setup_xmakeopts
+ emake headers_install INSTALL_HDR_PATH="${D}"/${ddir}/.. ${xmakeopts} || die
+
+ # let other packages install some of these headers
+ rm -rf "${D}"/${ddir}/scsi #glibc/uclibc/etc...
+ return 0
+ fi
+
+ # Do not use "linux/*" as that can cause problems with very long
+ # $S values where the cmdline to cp is too long
+ pushd "${S}" >/dev/null
+ dodir ${ddir}/linux
+ cp -pPR "${S}"/include/linux "${D}"/${ddir}/ || die
+ rm -rf "${D}"/${ddir}/linux/modules
+
+ dodir ${ddir}/asm
+ cp -pPR "${S}"/include/asm/* "${D}"/${ddir}/asm
+
+ if kernel_is 2 6 ; then
+ dodir ${ddir}/asm-generic
+ cp -pPR "${S}"/include/asm-generic/* "${D}"/${ddir}/asm-generic
+ fi
+
+ # clean up
+ find "${D}" -name '*.orig' -exec rm -f {} \;
+
+ popd >/dev/null
+}
+
+install_sources() {
+ local file
+
+ cd "${S}"
+ dodir /usr/src
+ echo ">>> Copying sources ..."
+
+ file="$(find ${WORKDIR} -iname "docs" -type d)"
+ if [[ -n ${file} ]]; then
+ for file in $(find ${file} -type f); do
+ echo "${file//*docs\/}" >> "${S}"/patches.txt
+ echo "===================================================" >> "${S}"/patches.txt
+ cat ${file} >> "${S}"/patches.txt
+ echo "===================================================" >> "${S}"/patches.txt
+ echo "" >> "${S}"/patches.txt
+ done
+ fi
+
+ if [[ ! -f ${S}/patches.txt ]]; then
+ # patches.txt is empty so lets use our ChangeLog
+ [[ -f ${FILESDIR}/../ChangeLog ]] && \
+ echo "Please check the ebuild ChangeLog for more details." \
+ > "${S}"/patches.txt
+ fi
+
+ mv ${WORKDIR}/linux* "${D}"/usr/src
+
+ if [[ -n "${UNIPATCH_DOCS}" ]] ; then
+ for i in ${UNIPATCH_DOCS}; do
+ dodoc "${T}"/${i}
+ done
+ fi
+}
+
+# pkg_preinst functions
+#==============================================================
+preinst_headers() {
+ local ddir=$(kernel_header_destdir)
+ [[ -L ${ddir}/linux ]] && rm ${ddir}/linux
+ [[ -L ${ddir}/asm ]] && rm ${ddir}/asm
+}
+
+# pkg_postinst functions
+#==============================================================
+postinst_sources() {
+ local MAKELINK=0
+
+ # if we have USE=symlink, then force K_SYMLINK=1
+ use symlink && K_SYMLINK=1
+
+ # if we're using a deblobbed kernel, it's not supported
+ [[ $K_DEBLOB_AVAILABLE == 1 ]] && \
+ use deblob && \
+ K_SECURITY_UNSUPPORTED=deblob
+
+ # if we are to forcably symlink, delete it if it already exists first.
+ if [[ ${K_SYMLINK} > 0 ]]; then
+ [[ -h ${ROOT}usr/src/linux ]] && rm ${ROOT}usr/src/linux
+ MAKELINK=1
+ fi
+
+ # if the link doesnt exist, lets create it
+ [[ ! -h ${ROOT}usr/src/linux ]] && MAKELINK=1
+
+ if [[ ${MAKELINK} == 1 ]]; then
+ cd "${ROOT}"usr/src
+ ln -sf linux-${KV_FULL} linux
+ cd ${OLDPWD}
+ fi
+
+ # Don't forget to make directory for sysfs
+ [[ ! -d ${ROOT}sys ]] && kernel_is 2 6 && mkdir ${ROOT}sys
+
+ echo
+ elog "If you are upgrading from a previous kernel, you may be interested"
+ elog "in the following document:"
+ elog " - General upgrade guide: https://wiki.gentoo.org/wiki/Kernel/Upgrade"
+ echo
+
+ # if K_EXTRAEINFO is set then lets display it now
+ if [[ -n ${K_EXTRAEINFO} ]]; then
+ echo ${K_EXTRAEINFO} | fmt |
+ while read -s ELINE; do einfo "${ELINE}"; done
+ fi
+
+ # if K_EXTRAELOG is set then lets display it now
+ if [[ -n ${K_EXTRAELOG} ]]; then
+ echo ${K_EXTRAELOG} | fmt |
+ while read -s ELINE; do elog "${ELINE}"; done
+ fi
+
+ # if K_EXTRAEWARN is set then lets display it now
+ if [[ -n ${K_EXTRAEWARN} ]]; then
+ echo ${K_EXTRAEWARN} | fmt |
+ while read -s ELINE; do ewarn "${ELINE}"; done
+ fi
+
+ # optionally display security unsupported message
+ # Start with why
+ if [[ -n ${K_SECURITY_UNSUPPORTED} ]]; then
+ ewarn "${PN} is UNSUPPORTED by Gentoo Security."
+ fi
+ # And now the general message.
+ if [[ -n ${K_SECURITY_UNSUPPORTED} ]]; then
+ ewarn "This means that it is likely to be vulnerable to recent security issues."
+ ewarn "For specific information on why this kernel is unsupported, please read:"
+ ewarn "http://www.gentoo.org/proj/en/security/kernel.xml"
+ fi
+
+ # warn sparc users that they need to do cross-compiling with >= 2.6.25(bug #214765)
+ KV_MAJOR=$(get_version_component_range 1 ${OKV})
+ KV_MINOR=$(get_version_component_range 2 ${OKV})
+ KV_PATCH=$(get_version_component_range 3 ${OKV})
+ if [[ "$(tc-arch)" = "sparc" ]]; then
+ if [[ $(gcc-major-version) -lt 4 && $(gcc-minor-version) -lt 4 ]]; then
+ if [[ ${KV_MAJOR} -ge 3 || ${KV_MAJOR}.${KV_MINOR}.${KV_PATCH} > 2.6.24 ]] ; then
+ echo
+ elog "NOTE: Since 2.6.25 the kernel Makefile has changed in a way that"
+ elog "you now need to do"
+ elog " make CROSS_COMPILE=sparc64-unknown-linux-gnu-"
+ elog "instead of just"
+ elog " make"
+ elog "to compile the kernel. For more information please browse to"
+ elog "https://bugs.gentoo.org/show_bug.cgi?id=214765"
+ echo
+ fi
+ fi
+ fi
+}
+
+# pkg_setup functions
+#==============================================================
+setup_headers() {
+ [[ -z ${H_SUPPORTEDARCH} ]] && H_SUPPORTEDARCH=${PN/-*/}
+ for i in ${H_SUPPORTEDARCH}; do
+ [[ $(tc-arch) == "${i}" ]] && H_ACCEPT_ARCH="yes"
+ done
+
+ if [[ ${H_ACCEPT_ARCH} != "yes" ]]; then
+ echo
+ eerror "This version of ${PN} does not support $(tc-arch)."
+ eerror "Please merge the appropriate sources, in most cases"
+ eerror "(but not all) this will be called $(tc-arch)-headers."
+ die "Package unsupported for $(tc-arch)"
+ fi
+}
+
+# unipatch
+#==============================================================
+unipatch() {
+ local i x y z extention PIPE_CMD UNIPATCH_DROP KPATCH_DIR PATCH_DEPTH ELINE
+ local STRICT_COUNT PATCH_LEVEL myLC_ALL myLANG
+
+ # set to a standard locale to ensure sorts are ordered properly.
+ myLC_ALL="${LC_ALL}"
+ myLANG="${LANG}"
+ LC_ALL="C"
+ LANG=""
+
+ [ -z "${KPATCH_DIR}" ] && KPATCH_DIR="${WORKDIR}/patches/"
+ [ ! -d ${KPATCH_DIR} ] && mkdir -p ${KPATCH_DIR}
+
+ # We're gonna need it when doing patches with a predefined patchlevel
+ eshopts_push -s extglob
+
+ # This function will unpack all passed tarballs, add any passed patches, and remove any passed patchnumbers
+ # usage can be either via an env var or by params
+ # although due to the nature we pass this within this eclass
+ # it shall be by param only.
+ # -z "${UNIPATCH_LIST}" ] && UNIPATCH_LIST="${@}"
+ UNIPATCH_LIST="${@}"
+
+ #unpack any passed tarballs
+ for i in ${UNIPATCH_LIST}; do
+ if echo ${i} | grep -qs -e "\.tar" -e "\.tbz" -e "\.tgz" ; then
+ if [ -n "${UNIPATCH_STRICTORDER}" ]; then
+ unset z
+ STRICT_COUNT=$((10#${STRICT_COUNT} + 1))
+ for((y=0; y<$((6 - ${#STRICT_COUNT})); y++));
+ do z="${z}0";
+ done
+ PATCH_ORDER="${z}${STRICT_COUNT}"
+
+ mkdir -p "${KPATCH_DIR}/${PATCH_ORDER}"
+ pushd "${KPATCH_DIR}/${PATCH_ORDER}" >/dev/null
+ unpack ${i##*/}
+ popd >/dev/null
+ else
+ pushd "${KPATCH_DIR}" >/dev/null
+ unpack ${i##*/}
+ popd >/dev/null
+ fi
+
+ [[ ${i} == *:* ]] && echo ">>> Strict patch levels not currently supported for tarballed patchsets"
+ else
+ extention=${i/*./}
+ extention=${extention/:*/}
+ PIPE_CMD=""
+ case ${extention} in
+ xz) PIPE_CMD="xz -dc";;
+ lzma) PIPE_CMD="lzma -dc";;
+ bz2) PIPE_CMD="bzip2 -dc";;
+ patch*) PIPE_CMD="cat";;
+ diff) PIPE_CMD="cat";;
+ gz|Z|z) PIPE_CMD="gzip -dc";;
+ ZIP|zip) PIPE_CMD="unzip -p";;
+ *) UNIPATCH_DROP="${UNIPATCH_DROP} ${i/:*/}";;
+ esac
+
+ PATCH_LEVEL=${i/*([^:])?(:)}
+ i=${i/:*/}
+ x=${i/*\//}
+ x=${x/\.${extention}/}
+
+ if [ -n "${PIPE_CMD}" ]; then
+ if [ ! -r "${i}" ]; then
+ echo
+ eerror "FATAL: unable to locate:"
+ eerror "${i}"
+ eerror "for read-only. The file either has incorrect permissions"
+ eerror "or does not exist."
+ die Unable to locate ${i}
+ fi
+
+ if [ -n "${UNIPATCH_STRICTORDER}" ]; then
+ unset z
+ STRICT_COUNT=$((10#${STRICT_COUNT} + 1))
+ for((y=0; y<$((6 - ${#STRICT_COUNT})); y++));
+ do z="${z}0";
+ done
+ PATCH_ORDER="${z}${STRICT_COUNT}"
+
+ mkdir -p ${KPATCH_DIR}/${PATCH_ORDER}/
+ $(${PIPE_CMD} ${i} > ${KPATCH_DIR}/${PATCH_ORDER}/${x}.patch${PATCH_LEVEL}) || die "uncompressing patch failed"
+ else
+ $(${PIPE_CMD} ${i} > ${KPATCH_DIR}/${x}.patch${PATCH_LEVEL}) || die "uncompressing patch failed"
+ fi
+ fi
+ fi
+
+ # If experimental was not chosen by the user, drop experimental patches not in K_EXP_GENPATCHES_LIST.
+ if [[ "${i}" == *"genpatches-"*".experimental."* && -n ${K_EXP_GENPATCHES_PULL} ]] ; then
+ if [[ -z ${K_EXP_GENPATCHES_NOUSE} ]] && use experimental; then
+ continue
+ fi
+
+ local j
+ for j in ${KPATCH_DIR}/*/50*_*.patch*; do
+ for k in ${K_EXP_GENPATCHES_LIST} ; do
+ [[ "$(basename ${j})" == ${k}* ]] && continue 2
+ done
+ UNIPATCH_DROP+=" $(basename ${j})"
+ done
+ else
+ UNIPATCH_LIST_GENPATCHES+=" ${DISTDIR}/${tarball}"
+ debug-print "genpatches tarball: $tarball"
+
+ # check gcc version < 4.9.X uses patch 5000 and = 4.9.X uses patch 5010
+ if [[ $(gcc-major-version) -eq 4 ]] && [[ $(gcc-minor-version) -ne 9 ]]; then
+ # drop 5000_enable-additional-cpu-optimizations-for-gcc-4.9.patch
+ if [[ $UNIPATCH_DROP != *"5010_enable-additional-cpu-optimizations-for-gcc-4.9.patch"* ]]; then
+ UNIPATCH_DROP+=" 5010_enable-additional-cpu-optimizations-for-gcc-4.9.patch"
+ fi
+ else
+ if [[ $UNIPATCH_DROP != *"5000_enable-additional-cpu-optimizations-for-gcc.patch"* ]]; then
+ #drop 5000_enable-additional-cpu-optimizations-for-gcc.patch
+ UNIPATCH_DROP+=" 5000_enable-additional-cpu-optimizations-for-gcc.patch"
+ fi
+ fi
+
+ # if kdbus use flag is not set, drop the kdbus patch
+ if [[ $UNIPATCH_DROP != *"5015_kdbus*.patch"* ]]; then
+ if ! has kdbus ${IUSE} || ! use kdbus; then
+ UNIPATCH_DROP="${UNIPATCH_DROP} 5015_kdbus*.patch"
+ fi
+ fi
+ fi
+ done
+
+ #populate KPATCH_DIRS so we know where to look to remove the excludes
+ x=${KPATCH_DIR}
+ KPATCH_DIR=""
+ for i in $(find ${x} -type d | sort -n); do
+ KPATCH_DIR="${KPATCH_DIR} ${i}"
+ done
+
+ # do not apply fbcondecor patch to sparc/sparc64 as it breaks boot
+ # bug #272676
+ if [[ "$(tc-arch)" = "sparc" || "$(tc-arch)" = "sparc64" ]]; then
+ if [[ ${KV_MAJOR} -ge 3 || ${KV_MAJOR}.${KV_MINOR}.${KV_PATCH} > 2.6.28 ]]; then
+ UNIPATCH_DROP="${UNIPATCH_DROP} *_fbcondecor-0.9.6.patch"
+ echo
+ ewarn "fbcondecor currently prevents sparc/sparc64 from booting"
+ ewarn "for kernel versions >= 2.6.29. Removing fbcondecor patch."
+ ewarn "See https://bugs.gentoo.org/show_bug.cgi?id=272676 for details"
+ echo
+ fi
+ fi
+
+ #so now lets get rid of the patchno's we want to exclude
+ UNIPATCH_DROP="${UNIPATCH_EXCLUDE} ${UNIPATCH_DROP}"
+ for i in ${UNIPATCH_DROP}; do
+ ebegin "Excluding Patch #${i}"
+ for x in ${KPATCH_DIR}; do rm -f ${x}/${i}* 2>/dev/null; done
+ eend $?
+ done
+
+ # and now, finally, we patch it :)
+ for x in ${KPATCH_DIR}; do
+ for i in $(find ${x} -maxdepth 1 -iname "*.patch*" -or -iname "*.diff*" | sort -n); do
+ STDERR_T="${T}/${i/*\//}"
+ STDERR_T="${STDERR_T/.patch*/.err}"
+
+ [ -z ${i/*.patch*/} ] && PATCH_DEPTH=${i/*.patch/}
+ #[ -z ${i/*.diff*/} ] && PATCH_DEPTH=${i/*.diff/}
+
+ if [ -z "${PATCH_DEPTH}" ]; then PATCH_DEPTH=0; fi
+
+ ####################################################################
+ # IMPORTANT: This is temporary code to support Linux git 3.15_rc1! #
+ # #
+ # The patch contains a removal of a symlink, followed by addition #
+ # of a file with the same name as the symlink in the same #
+ # location; this causes the dry-run to fail, filed bug #507656. #
+ # #
+ # https://bugs.gentoo.org/show_bug.cgi?id=507656 #
+ ####################################################################
+ if [[ ${PN} == "git-sources" ]] ; then
+ if [[ ${KV_MAJOR}${KV_PATCH} -ge 315 && ${RELEASETYPE} == -rc ]] ; then
+ ebegin "Applying ${i/*\//} (-p1)"
+ if [ $(patch -p1 --no-backup-if-mismatch -f < ${i} >> ${STDERR_T}) "$?" -eq 0 ]; then
+ eend 0
+ rm ${STDERR_T}
+ break
+ else
+ eend 1
+ eerror "Failed to apply patch ${i/*\//}"
+ eerror "Please attach ${STDERR_T} to any bug you may post."
+ eshopts_pop
+ die "Failed to apply ${i/*\//} on patch depth 1."
+ fi
+ fi
+ fi
+ ####################################################################
+
+ while [ ${PATCH_DEPTH} -lt 5 ]; do
+ echo "Attempting Dry-run:" >> ${STDERR_T}
+ echo "cmd: patch -p${PATCH_DEPTH} --no-backup-if-mismatch --dry-run -f < ${i}" >> ${STDERR_T}
+ echo "=======================================================" >> ${STDERR_T}
+ if [ $(patch -p${PATCH_DEPTH} --no-backup-if-mismatch --dry-run -f < ${i} >> ${STDERR_T}) $? -eq 0 ]; then
+ ebegin "Applying ${i/*\//} (-p${PATCH_DEPTH})"
+ echo "Attempting patch:" > ${STDERR_T}
+ echo "cmd: patch -p${PATCH_DEPTH} --no-backup-if-mismatch -f < ${i}" >> ${STDERR_T}
+ echo "=======================================================" >> ${STDERR_T}
+ if [ $(patch -p${PATCH_DEPTH} --no-backup-if-mismatch -f < ${i} >> ${STDERR_T}) "$?" -eq 0 ]; then
+ eend 0
+ rm ${STDERR_T}
+ break
+ else
+ eend 1
+ eerror "Failed to apply patch ${i/*\//}"
+ eerror "Please attach ${STDERR_T} to any bug you may post."
+ eshopts_pop
+ die "Failed to apply ${i/*\//} on patch depth ${PATCH_DEPTH}."
+ fi
+ else
+ PATCH_DEPTH=$((${PATCH_DEPTH} + 1))
+ fi
+ done
+ if [ ${PATCH_DEPTH} -eq 5 ]; then
+ eerror "Failed to dry-run patch ${i/*\//}"
+ eerror "Please attach ${STDERR_T} to any bug you may post."
+ eshopts_pop
+ die "Unable to dry-run patch on any patch depth lower than 5."
+ fi
+ done
+ done
+
+ # When genpatches is used, we want to install 0000_README which documents
+ # the patches that were used; such that the user can see them, bug #301478.
+ if [[ ! -z ${K_WANT_GENPATCHES} ]] ; then
+ UNIPATCH_DOCS="${UNIPATCH_DOCS} 0000_README"
+ fi
+
+ # When files listed in UNIPATCH_DOCS are found in KPATCH_DIR's, we copy it
+ # to the temporary directory and remember them in UNIPATCH_DOCS to install
+ # them during the install phase.
+ local tmp
+ for x in ${KPATCH_DIR}; do
+ for i in ${UNIPATCH_DOCS}; do
+ if [[ -f "${x}/${i}" ]] ; then
+ tmp="${tmp} ${i}"
+ cp -f "${x}/${i}" "${T}"/
+ fi
+ done
+ done
+ UNIPATCH_DOCS="${tmp}"
+
+ # clean up KPATCH_DIR's - fixes bug #53610
+ for x in ${KPATCH_DIR}; do rm -Rf ${x}; done
+
+ LC_ALL="${myLC_ALL}"
+ LANG="${myLANG}"
+ eshopts_pop
+}
+
+# getfilevar accepts 2 vars as follows:
+# getfilevar <VARIABLE> <CONFIGFILE>
+# pulled from linux-info
+
+getfilevar() {
+ local workingdir basefname basedname xarch=$(tc-arch-kernel)
+
+ if [[ -z ${1} ]] && [[ ! -f ${2} ]]; then
+ echo -e "\n"
+ eerror "getfilevar requires 2 variables, with the second a valid file."
+ eerror " getfilevar <VARIABLE> <CONFIGFILE>"
+ else
+ workingdir=${PWD}
+ basefname=$(basename ${2})
+ basedname=$(dirname ${2})
+ unset ARCH
+
+ cd ${basedname}
+ echo -e "include ${basefname}\ne:\n\t@echo \$(${1})" | \
+ make ${BUILD_FIXES} -s -f - e 2>/dev/null
+ cd ${workingdir}
+
+ ARCH=${xarch}
+ fi
+}
+
+detect_arch() {
+ # This function sets ARCH_URI and ARCH_PATCH
+ # with the neccessary info for the arch sepecific compatibility
+ # patchsets.
+
+ local ALL_ARCH LOOP_ARCH COMPAT_URI i
+
+ # COMPAT_URI is the contents of ${ARCH}_URI
+ # ARCH_URI is the URI for all the ${ARCH}_URI patches
+ # ARCH_PATCH is ARCH_URI broken into files for UNIPATCH
+
+ ARCH_URI=""
+ ARCH_PATCH=""
+ ALL_ARCH="ALPHA AMD64 ARM HPPA IA64 M68K MIPS PPC PPC64 S390 SH SPARC X86"
+
+ for LOOP_ARCH in ${ALL_ARCH}; do
+ COMPAT_URI="${LOOP_ARCH}_URI"
+ COMPAT_URI="${!COMPAT_URI}"
+
+ [[ -n ${COMPAT_URI} ]] && \
+ ARCH_URI="${ARCH_URI} $(echo ${LOOP_ARCH} | tr '[:upper:]' '[:lower:]')? ( ${COMPAT_URI} )"
+
+ if [[ ${LOOP_ARCH} == "$(echo $(tc-arch-kernel) | tr '[:lower:]' '[:upper:]')" ]]; then
+ for i in ${COMPAT_URI}; do
+ ARCH_PATCH="${ARCH_PATCH} ${DISTDIR}/${i/*\//}"
+ done
+ fi
+ done
+}
+
+headers___fix() {
+ # Voodoo to partially fix broken upstream headers.
+ # note: do not put inline/asm/volatile together (breaks "inline asm volatile")
+ sed -i \
+ -e '/^\#define.*_TYPES_H/{:loop n; bloop}' \
+ -e 's:\<\([us]\(8\|16\|32\|64\)\)\>:__\1:g' \
+ -e "s/\([[:space:]]\)inline\([[:space:](]\)/\1__inline__\2/g" \
+ -e "s/\([[:space:]]\)asm\([[:space:](]\)/\1__asm__\2/g" \
+ -e "s/\([[:space:]]\)volatile\([[:space:](]\)/\1__volatile__\2/g" \
+ "$@"
+}
+
+# common functions
+#==============================================================
+kernel-2_src_unpack() {
+ universal_unpack
+ debug-print "Doing unipatch"
+
+ [[ -n ${UNIPATCH_LIST} || -n ${UNIPATCH_LIST_DEFAULT} || -n ${UNIPATCH_LIST_GENPATCHES} ]] && \
+ unipatch "${UNIPATCH_LIST_DEFAULT} ${UNIPATCH_LIST_GENPATCHES} ${UNIPATCH_LIST}"
+
+ debug-print "Doing premake"
+
+ # allow ebuilds to massage the source tree after patching but before
+ # we run misc `make` functions below
+ [[ $(type -t kernel-2_hook_premake) == "function" ]] && kernel-2_hook_premake
+
+ debug-print "Doing epatch_user"
+ epatch_user
+
+ debug-print "Doing unpack_set_extraversion"
+
+ [[ -z ${K_NOSETEXTRAVERSION} ]] && unpack_set_extraversion
+ unpack_fix_install_path
+
+ # Setup xmakeopts and cd into sourcetree.
+ env_setup_xmakeopts
+ cd "${S}"
+
+ # We dont need a version.h for anything other than headers
+ # at least, I should hope we dont. If this causes problems
+ # take out the if/fi block and inform me please.
+ # unpack_2_6 should now be 2.6.17 safe anyways
+ if [[ ${ETYPE} == headers ]]; then
+ kernel_is 2 4 && unpack_2_4
+ kernel_is 2 6 && unpack_2_6
+ fi
+
+ if [[ $K_DEBLOB_AVAILABLE == 1 ]] && use deblob ; then
+ cp "${DISTDIR}/${DEBLOB_A}" "${T}" || die "cp ${DEBLOB_A} failed"
+ cp "${DISTDIR}/${DEBLOB_CHECK_A}" "${T}/deblob-check" || die "cp ${DEBLOB_CHECK_A} failed"
+ chmod +x "${T}/${DEBLOB_A}" "${T}/deblob-check" || die "chmod deblob scripts failed"
+ fi
+
+ # fix a problem on ppc where TOUT writes to /usr/src/linux breaking sandbox
+ # only do this for kernel < 2.6.27 since this file does not exist in later
+ # kernels
+ if [[ -n ${KV_MINOR} && ${KV_MAJOR}.${KV_MINOR}.${KV_PATCH} < 2.6.27 ]] ; then
+ sed -i \
+ -e 's|TOUT := .tmp_gas_check|TOUT := $(T).tmp_gas_check|' \
+ "${S}"/arch/ppc/Makefile
+ else
+ sed -i \
+ -e 's|TOUT := .tmp_gas_check|TOUT := $(T).tmp_gas_check|' \
+ "${S}"/arch/powerpc/Makefile
+ fi
+}
+
+kernel-2_src_compile() {
+ cd "${S}"
+ [[ ${ETYPE} == headers ]] && compile_headers
+
+ if [[ $K_DEBLOB_AVAILABLE == 1 ]] && use deblob ; then
+ echo ">>> Running deblob script ..."
+ python_setup
+ sh "${T}/${DEBLOB_A}" --force || die "Deblob script failed to run!!!"
+ fi
+}
+
+# if you leave it to the default src_test, it will run make to
+# find whether test/check targets are present; since "make test"
+# actually produces a few support files, they are installed even
+# though the package is binchecks-restricted.
+#
+# Avoid this altogether by making the function moot.
+kernel-2_src_test() { :; }
+
+kernel-2_pkg_preinst() {
+ [[ ${ETYPE} == headers ]] && preinst_headers
+}
+
+kernel-2_src_install() {
+ install_universal
+ [[ ${ETYPE} == headers ]] && install_headers
+ [[ ${ETYPE} == sources ]] && install_sources
+}
+
+kernel-2_pkg_postinst() {
+ [[ ${ETYPE} == sources ]] && postinst_sources
+}
+
+kernel-2_pkg_setup() {
+ if kernel_is 2 4; then
+ if [[ $(gcc-major-version) -ge 4 ]] ; then
+ echo
+ ewarn "Be warned !! >=sys-devel/gcc-4.0.0 isn't supported with linux-2.4!"
+ ewarn "Either switch to another gcc-version (via gcc-config) or use a"
+ ewarn "newer kernel that supports gcc-4."
+ echo
+ ewarn "Also be aware that bugreports about gcc-4 not working"
+ ewarn "with linux-2.4 based ebuilds will be closed as INVALID!"
+ echo
+ epause 10
+ fi
+ fi
+
+ ABI="${KERNEL_ABI}"
+ [[ ${ETYPE} == headers ]] && setup_headers
+ [[ ${ETYPE} == sources ]] && echo ">>> Preparing to unpack ..."
+}
+
+kernel-2_pkg_postrm() {
+ # This warning only makes sense for kernel sources.
+ [[ ${ETYPE} == headers ]] && return 0
+
+ # If there isn't anything left behind, then don't complain.
+ [[ -e ${ROOT}usr/src/linux-${KV_FULL} ]] || return 0
+ echo
+ ewarn "Note: Even though you have successfully unmerged "
+ ewarn "your kernel package, directories in kernel source location: "
+ ewarn "${ROOT}usr/src/linux-${KV_FULL}"
+ ewarn "with modified files will remain behind. By design, package managers"
+ ewarn "will not remove these modified files and the directories they reside in."
+ echo
+}
diff --git a/eclass/l10n.eclass b/eclass/l10n.eclass
new file mode 100644
index 000000000000..a7a6a26bb65a
--- /dev/null
+++ b/eclass/l10n.eclass
@@ -0,0 +1,119 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: l10n.eclass
+# @MAINTAINER:
+# Ben de Groot <yngwin@gentoo.org>
+# @BLURB: convenience functions to handle localizations
+# @DESCRIPTION:
+# The l10n (localization) eclass offers a number of functions to more
+# conveniently handle localizations (translations) offered by packages.
+# These are meant to prevent code duplication for such boring tasks as
+# determining the cross-section between the user's set LINGUAS and what
+# is offered by the package; and generating the right list of linguas_*
+# USE flags.
+
+# @ECLASS-VARIABLE: PLOCALES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Variable listing the locales for which localizations are offered by
+# the package. Check profiles/desc/linguas.desc to see if the locales
+# are listed there. Add any missing ones there.
+#
+# Example: PLOCALES="cy de el_GR en_US pt_BR vi zh_CN"
+
+# @ECLASS-VARIABLE: PLOCALE_BACKUP
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# In some cases the package fails when none of the offered PLOCALES are
+# selected by the user. In that case this variable should be set to a
+# default locale (usually 'en' or 'en_US') as backup.
+#
+# Example: PLOCALE_BACKUP="en_US"
+
+# Add linguas useflags
+for u in ${PLOCALES}; do
+ IUSE+=" linguas_${u}"
+done
+
+# @FUNCTION: l10n_for_each_locale_do
+# @USAGE: <function>
+# @DESCRIPTION:
+# Convenience function for processing localizations. The parameter should
+# be a function (defined in the consuming eclass or ebuild) which takes
+# an individual localization as (last) parameter.
+#
+# Example: l10n_for_each_locale_do install_locale
+l10n_for_each_locale_do() {
+ local locs x
+ locs=$(l10n_get_locales)
+ for x in ${locs}; do
+ "${@}" ${x} || die "failed to process enabled ${x} locale"
+ done
+}
+
+# @FUNCTION: l10n_for_each_disabled_locale_do
+# @USAGE: <function>
+# @DESCRIPTION:
+# Complementary to l10n_for_each_locale_do, this function will process
+# locales that are disabled. This could be used for example to remove
+# locales from a Makefile, to prevent them from being built needlessly.
+l10n_for_each_disabled_locale_do() {
+ local locs x
+ locs=$(l10n_get_locales disabled)
+ for x in ${locs}; do
+ "${@}" ${x} || die "failed to process disabled ${x} locale"
+ done
+}
+
+# @FUNCTION: l10n_find_plocales_changes
+# @USAGE: <translations dir> <filename pre pattern> <filename post pattern>
+# @DESCRIPTION:
+# Ebuild maintenance helper function to find changes in package offered
+# locales when doing a version bump. This could be added for example to
+# src_prepare
+#
+# Example: l10n_find_plocales_changes "${S}/src/translations" "${PN}_" '.ts'
+l10n_find_plocales_changes() {
+ [[ $# -ne 3 ]] && die "Exactly 3 arguments are needed!"
+ einfo "Looking in ${1} for new locales ..."
+ pushd "${1}" >/dev/null || die "Cannot access ${1}"
+ local current= x=
+ for x in ${2}*${3} ; do
+ x=${x#"${2}"}
+ x=${x%"${3}"}
+ current+="${x} "
+ done
+ popd >/dev/null
+ if [[ ${PLOCALES} != ${current%[[:space:]]} ]] ; then
+ einfo "There are changes in locales! This ebuild should be updated to:"
+ einfo "PLOCALES=\"${current%[[:space:]]}\""
+ else
+ einfo "Done"
+ fi
+}
+
+# @FUNCTION: l10n_get_locales
+# @USAGE: [disabled]
+# @DESCRIPTION:
+# Determine which LINGUAS USE flags the user has enabled that are offered
+# by the package, as listed in PLOCALES, and return them. In case no locales
+# are selected, fall back on PLOCALE_BACKUP. When the disabled argument is
+# given, return the disabled useflags instead of the enabled ones.
+l10n_get_locales() {
+ local disabled_locales enabled_locales loc locs
+ for loc in ${PLOCALES}; do
+ if use linguas_${loc}; then
+ enabled_locales+="${loc} "
+ else
+ disabled_locales+="${loc} "
+ fi
+ done
+ if [[ ${1} == disabled ]]; then
+ locs=${disabled_locales}
+ else
+ locs=${enabled_locales:-$PLOCALE_BACKUP}
+ fi
+ printf "%s" "${locs}"
+}
diff --git a/eclass/latex-package.eclass b/eclass/latex-package.eclass
new file mode 100644
index 000000000000..0acfcc95616c
--- /dev/null
+++ b/eclass/latex-package.eclass
@@ -0,0 +1,238 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: latex-package.eclass
+# @MAINTAINER:
+# TeX team <tex@gentoo.org>
+# @AUTHOR:
+# Matthew Turk <satai@gentoo.org>
+# Martin Ehmsen <ehmsen@gentoo.org>
+# @BLURB: An eclass for easy installation of LaTeX packages
+# @DESCRIPTION:
+# This eClass is designed to be easy to use and implement. The vast majority of
+# LaTeX packages will only need to define SRC_URI (and sometimes S) for a
+# successful installation. If fonts need to be installed, then the variable
+# SUPPLIER must also be defined.
+#
+# However, those packages that contain subdirectories must process each
+# subdirectory individually. For example, a package that contains directories
+# DIR1 and DIR2 must call latex-package_src_compile() and
+# latex-package_src_install() in each directory, as shown here:
+#
+# src_compile() {
+# cd ${S}
+# cd DIR1
+# latex-package_src_compile
+# cd ..
+# cd DIR2
+# latex-package_src_compile
+# }
+#
+# src_install() {
+# cd ${S}
+# cd DIR1
+# latex-package_src_install
+# cd ..
+# cd DIR2
+# latex-package_src_install
+# }
+#
+# The eClass automatically takes care of rehashing TeX's cache (ls-lR) after
+# installation and after removal, as well as creating final documentation from
+# TeX files that come with the source. Note that we break TeX layout standards
+# by placing documentation in /usr/share/doc/${PN}
+#
+# For examples of basic installations, check out dev-tex/aastex and
+# dev-tex/leaflet .
+#
+# NOTE: The CTAN "directory grab" function creates files with different MD5
+# signatures EVERY TIME. For this reason, if you are grabbing from the CTAN,
+# you must either grab each file individually, or find a place to mirror an
+# archive of them. (iBiblio)
+#
+# It inherits base.
+
+inherit base
+
+RDEPEND="virtual/latex-base"
+DEPEND="${RDEPEND}
+ >=sys-apps/texinfo-4.2-r5"
+HOMEPAGE="http://www.tug.org/"
+SRC_URI="ftp://tug.ctan.org/macros/latex/"
+S=${WORKDIR}/${P}
+TEXMF="/usr/share/texmf-site"
+
+# @ECLASS-VARIABLE: SUPPLIER
+# @DESCRIPTION:
+# This refers to the font supplier; it should be overridden (see eclass
+# DESCRIPTION above)
+SUPPLIER="misc"
+
+# @FUNCTION: latex-package_has_tetex3
+# @RETURN: true if at least one of (>=tetex-3 or >=ptex-3.1.8 or >=texlive-core-2007) is installed, else false
+# @DESCRIPTION:
+# It is often used to know if the current TeX installation supports gentoo's
+# texmf-update or if the package has to do it the old way
+latex-package_has_tetex_3() {
+ if has_version '>=app-text/tetex-3' || has_version '>=app-text/ptex-3.1.8' || has_version '>=app-text/texlive-core-2007' ; then
+ true
+ else
+ false
+ fi
+}
+
+# @FUNCTION: latex-package_src_doinstall
+# @USAGE: [ module ]
+# @DESCRIPTION:
+# [module] can be one or more of: sh, sty, cls, fd, clo, def, cfg, dvi, ps, pdf,
+# tex, dtx, tfm, vf, afm, pfb, ttf, bst, styles, doc, fonts, bin, or all.
+# If [module] is not given, all is assumed.
+# It installs the files found in the current directory to the standard locations
+# for a TeX installation
+latex-package_src_doinstall() {
+ debug-print function $FUNCNAME $*
+ # This actually follows the directions for a "single-user" system
+ # at http://www.ctan.org/installationadvice/ modified for gentoo.
+ [ -z "$1" ] && latex-package_src_install all
+
+ while [ "$1" ]; do
+ case $1 in
+ "sh")
+ for i in `find . -maxdepth 1 -type f -name "*.${1}"`
+ do
+ dobin $i || die "dobin $i failed"
+ done
+ ;;
+ "sty" | "cls" | "fd" | "clo" | "def" | "cfg")
+ for i in `find . -maxdepth 1 -type f -name "*.${1}"`
+ do
+ insinto ${TEXMF}/tex/latex/${PN}
+ doins $i || die "doins $i failed"
+ done
+ ;;
+ "dvi" | "ps" | "pdf")
+ for i in `find . -maxdepth 1 -type f -name "*.${1}"`
+ do
+ insinto /usr/share/doc/${PF}
+ doins $i || die "doins $i failed"
+ dosym /usr/share/doc/${PF}/$(basename ${i}) ${TEXMF}/doc/latex/${PN}/${i}
+ case "${EAPI:-0}" in
+ 0|1|2|3) ;;
+ *)
+ # prevent compression of symlink target
+ docompress -x /usr/share/doc/${PF}/$(basename ${i})
+ ;;
+ esac
+ done
+ ;;
+ "tex" | "dtx")
+ for i in `find . -maxdepth 1 -type f -name "*.${1}"`
+ do
+ einfo "Making documentation: $i"
+ texi2dvi -q -c --language=latex $i &> /dev/null
+ done
+ ;;
+ "tfm" | "vf" | "afm")
+ for i in `find . -maxdepth 1 -type f -name "*.${1}"`
+ do
+ insinto ${TEXMF}/fonts/${1}/${SUPPLIER}/${PN}
+ doins $i || die "doins $i failed"
+ done
+ ;;
+ "pfb")
+ for i in `find . -maxdepth 1 -type f -name "*.pfb"`
+ do
+ insinto ${TEXMF}/fonts/type1/${SUPPLIER}/${PN}
+ doins $i || die "doins $i failed"
+ done
+ ;;
+ "ttf")
+ for i in `find . -maxdepth 1 -type f -name "*.ttf"`
+ do
+ insinto ${TEXMF}/fonts/truetype/${SUPPLIER}/${PN}
+ doins $i || die "doins $i failed"
+ done
+ ;;
+ "bst")
+ for i in `find . -maxdepth 1 -type f -name "*.bst"`
+ do
+ insinto ${TEXMF}/bibtex/bst/${PN}
+ doins $i || die "doins $i failed"
+ done
+ ;;
+ "styles")
+ latex-package_src_doinstall sty cls fd clo def cfg bst
+ ;;
+ "doc")
+ latex-package_src_doinstall tex dtx dvi ps pdf
+ ;;
+ "fonts")
+ latex-package_src_doinstall tfm vf afm pfb ttf
+ ;;
+ "bin")
+ latex-package_src_doinstall sh
+ ;;
+ "all")
+ latex-package_src_doinstall styles fonts bin doc
+ ;;
+ esac
+ shift
+ done
+}
+
+# @FUNCTION: latex-package_src_compile
+# @DESCRIPTION:
+# Calls latex for each *.ins in the current directory in order to generate the
+# relevant files that will be installed
+latex-package_src_compile() {
+ debug-print function $FUNCNAME $*
+ for i in `find \`pwd\` -maxdepth 1 -type f -name "*.ins"`
+ do
+ einfo "Extracting from $i"
+ latex --interaction=batchmode $i &> /dev/null
+ done
+}
+
+# @FUNCTION: latex-package_src_install
+# @DESCRIPTION:
+# Installs the package
+latex-package_src_install() {
+ debug-print function $FUNCNAME $*
+ latex-package_src_doinstall all
+ if [ -n "${DOCS}" ] ; then
+ dodoc ${DOCS}
+ fi
+}
+
+# @FUNCTION: latex-package_pkg_postinst
+# @DESCRIPTION:
+# Calls latex-package_rehash to ensure the TeX installation is consistent with
+# the kpathsea database
+latex-package_pkg_postinst() {
+ debug-print function $FUNCNAME $*
+ latex-package_rehash
+}
+
+# @FUNCTION: latex-package_pkg_postrm
+# @DESCRIPTION:
+# Calls latex-package_rehash to ensure the TeX installation is consistent with
+# the kpathsea database
+latex-package_pkg_postrm() {
+ debug-print function $FUNCNAME $*
+ latex-package_rehash
+}
+
+# @FUNCTION: latex-package_rehash
+# @DESCRIPTION:
+# Rehashes the kpathsea database, according to the current TeX installation
+latex-package_rehash() {
+ debug-print function $FUNCNAME $*
+ if latex-package_has_tetex_3 ; then
+ texmf-update
+ else
+ texconfig rehash
+ fi
+}
+
+EXPORT_FUNCTIONS src_compile src_install pkg_postinst pkg_postrm
diff --git a/eclass/leechcraft.eclass b/eclass/leechcraft.eclass
new file mode 100644
index 000000000000..b9fc73b4350e
--- /dev/null
+++ b/eclass/leechcraft.eclass
@@ -0,0 +1,85 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: leechcraft.eclass
+# @MAINTAINER:
+# leechcraft@gentoo.org
+# @AUTHOR:
+# 0xd34df00d@gmail.com
+# NightNord@niifaq.ru
+# @BLURB: Common functions and setup utilities for the LeechCraft app
+# @DESCRIPTION:
+# The leechcraft eclass contains a common set of functions and steps
+# needed to build LeechCraft core or its plugins.
+#
+# Though this eclass seems to be small at the moment, it seems like a
+# good idea to make all plugins inherit from it, since all plugins
+# have mostly the same configuring/build process.
+#
+# Thanks for original eclass to Andrian Nord <NightNord@niifaq.ru>.
+#
+# Only EAPI >1 supported
+
+case ${EAPI:-0} in
+ 4|5) ;;
+ 0|1|2|3) die "EAPI not supported, bug ebuild mantainer" ;;
+ *) die "Unknown EAPI, bug eclass maintainers" ;;
+esac
+
+inherit cmake-utils toolchain-funcs versionator
+
+if [[ ${PV} == 9999 ]]; then
+ EGIT_REPO_URI="git://github.com/0xd34df00d/leechcraft.git
+ https://github.com/0xd34df00d/leechcraft.git"
+ EGIT_PROJECT="leechcraft"
+
+ inherit git-2
+else
+ DEPEND="app-arch/xz-utils"
+ SRC_URI="http://dist.leechcraft.org/LeechCraft/${PV}/leechcraft-${PV}.tar.xz"
+ S="${WORKDIR}/leechcraft-${PV}"
+fi
+
+HOMEPAGE="http://leechcraft.org/"
+LICENSE="Boost-1.0"
+
+# @ECLASS-VARIABLE: LEECHCRAFT_PLUGIN_CATEGORY
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Set this to the category of the plugin, if any.
+: ${LEECHCRAFT_PLUGIN_CATEGORY:=}
+
+if [[ "${LEECHCRAFT_PLUGIN_CATEGORY}" ]]; then
+ CMAKE_USE_DIR="${S}"/src/plugins/${LEECHCRAFT_PLUGIN_CATEGORY}/${PN#lc-}
+elif [[ ${PN} != lc-core ]]; then
+ CMAKE_USE_DIR="${S}"/src/plugins/${PN#lc-}
+else
+ CMAKE_USE_DIR="${S}"/src
+fi
+
+EXPORT_FUNCTIONS "pkg_pretend"
+
+# @FUNCTION: leechcraft_pkg_pretend
+# @DESCRIPTION:
+# Determine active compiler version and refuse to build
+# if it is not satisfied at least to minimal version,
+# supported by upstream developers
+leechcraft_pkg_pretend() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # 0.5.85 and later requires at least gcc 4.6
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ [[ $(gcc-major-version) -lt 4 ]] || \
+ ( [[ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 6 ]] ) \
+ && die "Sorry, but gcc 4.6 or higher is required."
+ fi
+ if version_is_at_least 0.6.66 || ( [[ ${PN} == lc-monocle ]] && version_is_at_least 0.6.65 ); then
+ # 0.6.65 monocle and all later plugins require at least gcc 4.8
+ if [[ ${MERGE_TYPE} != binary ]]; then
+ [[ $(gcc-major-version) -lt 4 ]] || \
+ ( [[ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 8 ]] ) \
+ && die "Sorry, but gcc 4.8 or higher is required."
+ fi
+ fi
+}
diff --git a/eclass/libtool.eclass b/eclass/libtool.eclass
new file mode 100644
index 000000000000..090c65769830
--- /dev/null
+++ b/eclass/libtool.eclass
@@ -0,0 +1,513 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: libtool.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: quickly update bundled libtool code
+# @DESCRIPTION:
+# This eclass patches ltmain.sh distributed with libtoolized packages with the
+# relink and portage patch among others
+#
+# Note, this eclass does not require libtool as it only applies patches to
+# generated libtool files. We do not run the libtoolize program because that
+# requires a regeneration of the main autotool files in order to work properly.
+
+if [[ -z ${_LIBTOOL_ECLASS} ]]; then
+_LIBTOOL_ECLASS=1
+
+# If an overlay has eclass overrides, but doesn't actually override the
+# libtool.eclass, we'll have ECLASSDIR pointing to the active overlay's
+# eclass/ dir, but libtool.eclass is still in the main Gentoo tree. So
+# add a check to locate the ELT-patches/ regardless of what's going on.
+_LIBTOOL_ECLASSDIR_LOCAL=${BASH_SOURCE[0]%/*}
+elt_patch_dir() {
+ local d="${ECLASSDIR}/ELT-patches"
+ if [[ ! -d ${d} ]] ; then
+ d="${_LIBTOOL_ECLASSDIR_LOCAL}/ELT-patches"
+ fi
+ echo "${d}"
+}
+
+inherit multilib toolchain-funcs
+
+#
+# See if we can apply $2 on $1, and if so, do it
+#
+ELT_try_and_apply_patch() {
+ local ret=0
+ local file=$1
+ local patch=$2
+ local src=$3
+ local disp="${src} patch"
+ local log="${T}/elibtool.log"
+
+ if [[ -z ${_ELT_NOTED_TMP} ]] ; then
+ _ELT_NOTED_TMP=true
+ printf 'temp patch: %s\n' "${patch}" > "${log}"
+ fi
+ printf '\nTrying %s\n' "${disp}" >> "${log}"
+
+ if [[ ! -e ${file} ]] ; then
+ echo "File not found: ${file}" >> "${log}"
+ return 1
+ fi
+
+ # Save file for permission restoration. `patch` sometimes resets things.
+ # Ideally we'd want 'stat -c %a', but stat is highly non portable and we are
+ # guaranted to have GNU find, so use that instead.
+ local perms="$(find ${file} -maxdepth 0 -printf '%m')"
+ # We only support patchlevel of 0 - why worry if its static patches?
+ if patch -p0 --dry-run "${file}" "${patch}" >> "${log}" 2>&1 ; then
+ einfo " Applying ${disp} ..."
+ patch -p0 -g0 --no-backup-if-mismatch "${file}" "${patch}" >> "${log}" 2>&1
+ ret=$?
+ export ELT_APPLIED_PATCHES="${ELT_APPLIED_PATCHES} ${src}"
+ else
+ ret=1
+ fi
+ chmod "${perms}" "${file}"
+
+ return "${ret}"
+}
+
+#
+# Get string version of ltmain.sh or ltconfig (passed as $1)
+#
+ELT_libtool_version() {
+ (
+ unset VERSION
+ eval $(grep -e '^[[:space:]]*VERSION=' "$1")
+ echo "${VERSION:-0}"
+ )
+}
+
+#
+# Run through the patches in $2 and see if any
+# apply to $1 ...
+#
+ELT_walk_patches() {
+ local patch tmp
+ local ret=1
+ local file=$1
+ local patch_set=$2
+ local patch_dir="$(elt_patch_dir)/${patch_set}"
+ local rem_int_dep=$3
+
+ [[ -z ${patch_set} ]] && return 1
+ [[ ! -d ${patch_dir} ]] && return 1
+
+ # Allow patches to use @GENTOO_LIBDIR@ replacements
+ local sed_args=( -e "s:@GENTOO_LIBDIR@:$(get_libdir):g" )
+ if [[ -n ${rem_int_dep} ]] ; then
+ # replace @REM_INT_DEP@ with what was passed
+ # to --remove-internal-dep
+ sed_args+=( -e "s|@REM_INT_DEP@|${rem_int_dep}|g" )
+ fi
+
+ pushd "$(elt_patch_dir)" >/dev/null || die
+
+ # Go through the patches in reverse order (newer version to older)
+ for patch in $(find "${patch_set}" -maxdepth 1 -type f | LC_ALL=C sort -r) ; do
+ tmp="${T}/libtool-elt.patch"
+ sed "${sed_args[@]}" "${patch}" > "${tmp}" || die
+ if ELT_try_and_apply_patch "${file}" "${tmp}" "${patch}" ; then
+ # Break to unwind w/popd rather than return directly
+ ret=0
+ break
+ fi
+ done
+
+ popd >/dev/null
+ return ${ret}
+}
+
+# @FUNCTION: elibtoolize
+# @USAGE: [dirs] [--portage] [--reverse-deps] [--patch-only] [--remove-internal-dep=xxx] [--shallow] [--no-uclibc]
+# @DESCRIPTION:
+# Apply a smorgasbord of patches to bundled libtool files. This function
+# should always be safe to run. If no directories are specified, then
+# ${S} will be searched for appropriate files.
+#
+# If the --shallow option is used, then only ${S}/ltmain.sh will be patched.
+#
+# The other options should be avoided in general unless you know what's going on.
+elibtoolize() {
+ local x
+ local dirs=()
+ local do_portage="no"
+ local do_reversedeps="no"
+ local do_only_patches="no"
+ local do_uclibc="yes"
+ local deptoremove=
+ local do_shallow="no"
+ local force="false"
+ local elt_patches="install-sh ltmain portage relink max_cmd_len sed test tmp cross as-needed target-nm"
+
+ for x in "$@" ; do
+ case ${x} in
+ --portage)
+ # Only apply portage patch, and don't
+ # 'libtoolize --copy --force' if all patches fail.
+ do_portage="yes"
+ ;;
+ --reverse-deps)
+ # Apply the reverse-deps patch
+ # http://bugzilla.gnome.org/show_bug.cgi?id=75635
+ do_reversedeps="yes"
+ elt_patches+=" fix-relink"
+ ;;
+ --patch-only)
+ # Do not run libtoolize if none of the patches apply ..
+ do_only_patches="yes"
+ ;;
+ --remove-internal-dep=*)
+ # We will replace @REM_INT_DEP@ with what is needed
+ # in ELT_walk_patches() ...
+ deptoremove=${x#--remove-internal-dep=}
+
+ # Add the patch for this ...
+ [[ -n ${deptoremove} ]] && elt_patches+=" rem-int-dep"
+ ;;
+ --shallow)
+ # Only patch the ltmain.sh in ${S}
+ do_shallow="yes"
+ ;;
+ --no-uclibc)
+ do_uclibc="no"
+ ;;
+ --force)
+ force="true"
+ ;;
+ -*)
+ eerror "Invalid elibtoolize option: ${x}"
+ die "elibtoolize called with ${x} ??"
+ ;;
+ *) dirs+=( "${x}" )
+ esac
+ done
+
+ [[ ${do_uclibc} == "yes" ]] && elt_patches+=" uclibc-conf uclibc-ltconf"
+
+ case ${CHOST} in
+ *-aix*) elt_patches+=" hardcode aixrtl" ;; #213277
+ *-darwin*) elt_patches+=" darwin-ltconf darwin-ltmain darwin-conf" ;;
+ *-solaris*) elt_patches+=" sol2-conf sol2-ltmain" ;;
+ *-freebsd*) elt_patches+=" fbsd-conf fbsd-ltconf" ;;
+ *-hpux*) elt_patches+=" hpux-conf deplibs hc-flag-ld hardcode hardcode-relink relink-prog no-lc" ;;
+ *-irix*) elt_patches+=" irix-ltmain" ;;
+ *-mint*) elt_patches+=" mint-conf" ;;
+ esac
+
+ if $(tc-getLD) --version 2>&1 | grep -qs 'GNU gold'; then
+ elt_patches+=" gold-conf"
+ fi
+
+ # Find out what dirs to scan.
+ if [[ ${do_shallow} == "yes" ]] ; then
+ [[ ${#dirs[@]} -ne 0 ]] && die "Using --shallow with explicit dirs doesn't make sense"
+ [[ -f ${S}/ltmain.sh || -f ${S}/configure ]] && dirs+=( "${S}" )
+ else
+ [[ ${#dirs[@]} -eq 0 ]] && dirs+=( "${S}" )
+ dirs=( $(find "${dirs[@]}" '(' -name ltmain.sh -o -name configure ')' -printf '%h\n' | sort -u) )
+ fi
+
+ local d p ret
+ for d in "${dirs[@]}" ; do
+ export ELT_APPLIED_PATCHES=
+
+ if [[ -f ${d}/.elibtoolized ]] ; then
+ ${force} || continue
+ fi
+
+ local outfunc="einfo"
+ [[ -f ${d}/.elibtoolized ]] && outfunc="ewarn"
+ ${outfunc} "Running elibtoolize in: ${d#${WORKDIR}/}/"
+ if [[ ${outfunc} == "ewarn" ]] ; then
+ ewarn " We've already been run in this tree; you should"
+ ewarn " avoid this if possible (perhaps by filing a bug)"
+ fi
+
+ # patching ltmain.sh
+ [[ -f ${d}/ltmain.sh ]] &&
+ for p in ${elt_patches} ; do
+ ret=0
+
+ case ${p} in
+ portage)
+ # Stupid test to see if its already applied ...
+ if ! grep -qs 'We do not want portage' "${d}/ltmain.sh" ; then
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ fi
+ ;;
+ rem-int-dep)
+ ELT_walk_patches "${d}/ltmain.sh" "${p}" "${deptoremove}"
+ ret=$?
+ ;;
+ fix-relink)
+ # Do not apply if we do not have the relink patch applied ...
+ if grep -qs 'inst_prefix_dir' "${d}/ltmain.sh" ; then
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ fi
+ ;;
+ max_cmd_len)
+ # Do not apply if $max_cmd_len is not used ...
+ if grep -qs 'max_cmd_len' "${d}/ltmain.sh" ; then
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ fi
+ ;;
+ as-needed)
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ ;;
+ uclibc-ltconf)
+ # Newer libtoolize clears ltconfig, as not used anymore
+ if [[ -s ${d}/ltconfig ]] ; then
+ ELT_walk_patches "${d}/ltconfig" "${p}"
+ ret=$?
+ fi
+ ;;
+ fbsd-ltconf)
+ if [[ -s ${d}/ltconfig ]] ; then
+ ELT_walk_patches "${d}/ltconfig" "${p}"
+ ret=$?
+ fi
+ ;;
+ darwin-ltconf)
+ # Newer libtoolize clears ltconfig, as not used anymore
+ if [[ -s ${d}/ltconfig ]] ; then
+ ELT_walk_patches "${d}/ltconfig" "${p}"
+ ret=$?
+ fi
+ ;;
+ darwin-ltmain)
+ # special case to avoid false positives (failing to apply
+ # ltmain.sh path message), newer libtools have this patch
+ # built in, so not much to patch around then
+ if [[ -e ${d}/ltmain.sh ]] && \
+ ! grep -qs 'verstring="-compatibility_version' "${d}/ltmain.sh" ; then
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ fi
+ ;;
+ install-sh)
+ ELT_walk_patches "${d}/install-sh" "${p}"
+ ret=$?
+ ;;
+ cross)
+ if tc-is-cross-compiler ; then
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ fi
+ ;;
+ *)
+ ELT_walk_patches "${d}/ltmain.sh" "${p}"
+ ret=$?
+ ;;
+ esac
+
+ if [[ ${ret} -ne 0 ]] ; then
+ case ${p} in
+ relink)
+ local version=$(ELT_libtool_version "${d}/ltmain.sh")
+ # Critical patch, but could be applied ...
+ # FIXME: Still need a patch for ltmain.sh > 1.4.0
+ if ! grep -qs 'inst_prefix_dir' "${d}/ltmain.sh" && \
+ [[ $(VER_to_int "${version}") -ge $(VER_to_int "1.4.0") ]] ; then
+ ewarn " Could not apply relink.patch!"
+ fi
+ ;;
+ portage)
+ # Critical patch - for this one we abort, as it can really
+ # cause breakage without it applied!
+ if [[ ${do_portage} == "yes" ]] ; then
+ # Stupid test to see if its already applied ...
+ if ! grep -qs 'We do not want portage' "${d}/ltmain.sh" ; then
+ echo
+ eerror "Portage patch requested, but failed to apply!"
+ eerror "Please file a bug report to add a proper patch."
+ die "Portage patch requested, but failed to apply!"
+ fi
+ else
+ if grep -qs 'We do not want portage' "${d}/ltmain.sh" ; then
+ # ewarn " Portage patch seems to be already applied."
+ # ewarn " Please verify that it is not needed."
+ :
+ else
+ local version=$(ELT_libtool_version "${d}"/ltmain.sh)
+ echo
+ eerror "Portage patch failed to apply (ltmain.sh version ${version})!"
+ eerror "Please file a bug report to add a proper patch."
+ die "Portage patch failed to apply!"
+ fi
+ # We do not want to run libtoolize ...
+ ELT_APPLIED_PATCHES="portage"
+ fi
+ ;;
+ darwin-*)
+ [[ ${CHOST} == *"-darwin"* ]] && ewarn " Darwin patch set '${p}' failed to apply!"
+ ;;
+ esac
+ fi
+ done
+
+ # makes sense for ltmain.sh patches only
+ [[ -f ${d}/ltmain.sh ]] &&
+ if [[ -z ${ELT_APPLIED_PATCHES} ]] ; then
+ if [[ ${do_portage} == "no" && \
+ ${do_reversedeps} == "no" && \
+ ${do_only_patches} == "no" && \
+ ${deptoremove} == "" ]]
+ then
+ ewarn "Cannot apply any patches, please file a bug about this"
+ die
+ fi
+ fi
+
+ # patching configure
+ [[ -f ${d}/configure ]] &&
+ for p in ${elt_patches} ; do
+ ret=0
+
+ case ${p} in
+ uclibc-conf)
+ if grep -qs 'Transform linux' "${d}/configure" ; then
+ ELT_walk_patches "${d}/configure" "${p}"
+ ret=$?
+ fi
+ ;;
+ fbsd-conf)
+ if grep -qs 'version_type=freebsd-' "${d}/configure" ; then
+ ELT_walk_patches "${d}/configure" "${p}"
+ ret=$?
+ fi
+ ;;
+ darwin-conf)
+ if grep -qs '&& echo \.so ||' "${d}/configure" ; then
+ ELT_walk_patches "${d}/configure" "${p}"
+ ret=$?
+ fi
+ ;;
+ aixrtl|hpux-conf)
+ ret=1
+ local subret=0
+ # apply multiple patches as often as they match
+ while [[ $subret -eq 0 ]]; do
+ subret=1
+ if [[ -e ${d}/configure ]]; then
+ ELT_walk_patches "${d}/configure" "${p}"
+ subret=$?
+ fi
+ if [[ $subret -eq 0 ]]; then
+ # have at least one patch succeeded.
+ ret=0
+ fi
+ done
+ ;;
+ mint-conf|gold-conf|sol2-conf)
+ ELT_walk_patches "${d}/configure" "${p}"
+ ret=$?
+ ;;
+ target-nm)
+ ELT_walk_patches "${d}/configure" "${p}"
+ ret=$?
+ ;;
+ *)
+ # ltmain.sh patches are applied above
+ ;;
+ esac
+
+ if [[ ${ret} -ne 0 ]] ; then
+ case ${p} in
+ uclibc-*)
+ [[ ${CHOST} == *-uclibc ]] && ewarn " uClibc patch set '${p}' failed to apply!"
+ ;;
+ fbsd-*)
+ if [[ ${CHOST} == *-freebsd* ]] ; then
+ if [[ -z $(grep 'Handle Gentoo/FreeBSD as it was Linux' \
+ "${d}/configure" 2>/dev/null) ]]; then
+ eerror " FreeBSD patch set '${p}' failed to apply!"
+ die "FreeBSD patch set '${p}' failed to apply!"
+ fi
+ fi
+ ;;
+ darwin-*)
+ [[ ${CHOST} == *"-darwin"* ]] && ewarn " Darwin patch set '${p}' failed to apply!"
+ ;;
+ esac
+ fi
+ done
+
+ rm -f "${d}/libtool"
+
+ > "${d}/.elibtoolized"
+ done
+}
+
+uclibctoolize() { die "Use elibtoolize"; }
+darwintoolize() { die "Use elibtoolize"; }
+
+# char *VER_major(string)
+#
+# Return the Major (X of X.Y.Z) version
+#
+VER_major() {
+ [[ -z $1 ]] && return 1
+
+ local VER=$@
+ echo "${VER%%[^[:digit:]]*}"
+}
+
+# char *VER_minor(string)
+#
+# Return the Minor (Y of X.Y.Z) version
+#
+VER_minor() {
+ [[ -z $1 ]] && return 1
+
+ local VER=$@
+ VER=${VER#*.}
+ echo "${VER%%[^[:digit:]]*}"
+}
+
+# char *VER_micro(string)
+#
+# Return the Micro (Z of X.Y.Z) version.
+#
+VER_micro() {
+ [[ -z $1 ]] && return 1
+
+ local VER=$@
+ VER=${VER#*.*.}
+ echo "${VER%%[^[:digit:]]*}"
+}
+
+# int VER_to_int(string)
+#
+# Convert a string type version (2.4.0) to an int (132096)
+# for easy compairing or versions ...
+#
+VER_to_int() {
+ [[ -z $1 ]] && return 1
+
+ local VER_MAJOR=$(VER_major "$1")
+ local VER_MINOR=$(VER_minor "$1")
+ local VER_MICRO=$(VER_micro "$1")
+ local VER_int=$(( VER_MAJOR * 65536 + VER_MINOR * 256 + VER_MICRO ))
+
+ # We make version 1.0.0 the minimum version we will handle as
+ # a sanity check ... if its less, we fail ...
+ if [[ ${VER_int} -ge 65536 ]] ; then
+ echo "${VER_int}"
+ return 0
+ fi
+
+ echo 1
+ return 1
+}
+
+fi
diff --git a/eclass/linux-info.eclass b/eclass/linux-info.eclass
new file mode 100644
index 000000000000..a913af29aead
--- /dev/null
+++ b/eclass/linux-info.eclass
@@ -0,0 +1,923 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: linux-info.eclass
+# @MAINTAINER:
+# kernel-misc@gentoo.org
+# @AUTHOR:
+# Original author: John Mylchreest <johnm@gentoo.org>
+# @BLURB: eclass used for accessing kernel related information
+# @DESCRIPTION:
+# This eclass is used as a central eclass for accessing kernel
+# related information for source or binary already installed.
+# It is vital for linux-mod.eclass to function correctly, and is split
+# out so that any ebuild behaviour "templates" are abstracted out
+# using additional eclasses.
+#
+# "kernel config" in this file means:
+# The .config of the currently installed sources is used as the first
+# preference, with a fall-back to bundled config (/proc/config.gz) if available.
+#
+# Before using any of the config-handling functions in this eclass, you must
+# ensure that one of the following functions has been called (in order of
+# preference), otherwise you will get bugs like #364041):
+# linux-info_pkg_setup
+# linux-info_get_any_version
+# get_version
+# get_running_version
+
+# A Couple of env vars are available to effect usage of this eclass
+# These are as follows:
+
+# @ECLASS-VARIABLE: KERNEL_DIR
+# @DESCRIPTION:
+# A string containing the directory of the target kernel sources. The default value is
+# "/usr/src/linux"
+
+# @ECLASS-VARIABLE: CONFIG_CHECK
+# @DESCRIPTION:
+# A string containing a list of .config options to check for before
+# proceeding with the install.
+#
+# e.g.: CONFIG_CHECK="MTRR"
+#
+# You can also check that an option doesn't exist by
+# prepending it with an exclamation mark (!).
+#
+# e.g.: CONFIG_CHECK="!MTRR"
+#
+# To simply warn about a missing option, prepend a '~'.
+# It may be combined with '!'.
+#
+# In general, most checks should be non-fatal. The only time fatal checks should
+# be used is for building kernel modules or cases that a compile will fail
+# without the option.
+#
+# This is to allow usage of binary kernels, and minimal systems without kernel
+# sources.
+
+# @ECLASS-VARIABLE: ERROR_<CFG>
+# @DESCRIPTION:
+# A string containing the error message to display when the check against CONFIG_CHECK
+# fails. <CFG> should reference the appropriate option used in CONFIG_CHECK.
+#
+# e.g.: ERROR_MTRR="MTRR exists in the .config but shouldn't!!"
+
+# @ECLASS-VARIABLE: KBUILD_OUTPUT
+# @DESCRIPTION:
+# A string passed on commandline, or set from the kernel makefile. It contains the directory
+# which is to be used as the kernel object directory.
+
+# There are also a couple of variables which are set by this, and shouldn't be
+# set by hand. These are as follows:
+
+# @ECLASS-VARIABLE: KV_FULL
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the full kernel version. ie: 2.6.9-gentoo-johnm-r1
+
+# @ECLASS-VARIABLE: KV_MAJOR
+# @DESCRIPTION:
+# A read-only variable. It's an integer containing the kernel major version. ie: 2
+
+# @ECLASS-VARIABLE: KV_MINOR
+# @DESCRIPTION:
+# A read-only variable. It's an integer containing the kernel minor version. ie: 6
+
+# @ECLASS-VARIABLE: KV_PATCH
+# @DESCRIPTION:
+# A read-only variable. It's an integer containing the kernel patch version. ie: 9
+
+# @ECLASS-VARIABLE: KV_EXTRA
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel EXTRAVERSION. ie: -gentoo
+
+# @ECLASS-VARIABLE: KV_LOCAL
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel LOCALVERSION concatenation. ie: -johnm
+
+# @ECLASS-VARIABLE: KV_DIR
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel source directory, will be null if
+# KERNEL_DIR is invalid.
+
+# @ECLASS-VARIABLE: KV_OUT_DIR
+# @DESCRIPTION:
+# A read-only variable. It's a string containing the kernel object directory, will be KV_DIR unless
+# KBUILD_OUTPUT is used. This should be used for referencing .config.
+
+# And to ensure all the weirdness with crosscompile
+inherit toolchain-funcs versionator
+
+EXPORT_FUNCTIONS pkg_setup
+
+# Overwritable environment Var's
+# ---------------------------------------
+KERNEL_DIR="${KERNEL_DIR:-${ROOT}usr/src/linux}"
+
+
+# Bug fixes
+# fix to bug #75034
+case ${ARCH} in
+ ppc) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
+ ppc64) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
+esac
+
+# @FUNCTION: set_arch_to_kernel
+# @DESCRIPTION:
+# Set the env ARCH to match what the kernel expects.
+set_arch_to_kernel() { export ARCH=$(tc-arch-kernel); }
+# @FUNCTION: set_arch_to_portage
+# @DESCRIPTION:
+# Set the env ARCH to match what portage expects.
+set_arch_to_portage() { export ARCH=$(tc-arch); }
+
+# qeinfo "Message"
+# -------------------
+# qeinfo is a quiet einfo call when EBUILD_PHASE
+# should not have visible output.
+qout() {
+ local outputmsg type
+ type=${1}
+ shift
+ outputmsg="${@}"
+ case "${EBUILD_PHASE}" in
+ depend) unset outputmsg;;
+ clean) unset outputmsg;;
+ preinst) unset outputmsg;;
+ esac
+ [ -n "${outputmsg}" ] && ${type} "${outputmsg}"
+}
+
+qeinfo() { qout einfo "${@}" ; }
+qewarn() { qout ewarn "${@}" ; }
+qeerror() { qout eerror "${@}" ; }
+
+# File Functions
+# ---------------------------------------
+
+# @FUNCTION: getfilevar
+# @USAGE: variable configfile
+# @RETURN: the value of the variable
+# @DESCRIPTION:
+# It detects the value of the variable defined in the file configfile. This is
+# done by including the configfile, and printing the variable with Make.
+# It WILL break if your makefile has missing dependencies!
+getfilevar() {
+ local ERROR basefname basedname myARCH="${ARCH}" M="${S}"
+ ERROR=0
+
+ [ -z "${1}" ] && ERROR=1
+ [ ! -f "${2}" ] && ERROR=1
+
+ if [ "${ERROR}" = 1 ]
+ then
+ echo -e "\n"
+ eerror "getfilevar requires 2 variables, with the second a valid file."
+ eerror " getfilevar <VARIABLE> <CONFIGFILE>"
+ else
+ basefname="$(basename ${2})"
+ basedname="$(dirname ${2})"
+ unset ARCH
+
+ # We use nonfatal because we want the caller to take care of things #373151
+ [[ ${EAPI:-0} == [0123] ]] && nonfatal() { "$@"; }
+ case ${EBUILD_PHASE_FUNC} in
+ pkg_info|pkg_nofetch|pkg_pretend) M="${T}" ;;
+ esac
+ echo -e "e:\\n\\t@echo \$(${1})\\ninclude ${basefname}" | \
+ nonfatal emake -C "${basedname}" M="${M}" ${BUILD_FIXES} -s -f - 2>/dev/null
+
+ ARCH=${myARCH}
+ fi
+}
+
+# @FUNCTION: getfilevar_noexec
+# @USAGE: variable configfile
+# @RETURN: the value of the variable
+# @DESCRIPTION:
+# It detects the value of the variable defined in the file configfile.
+# This is done with sed matching an expression only. If the variable is defined,
+# you will run into problems. See getfilevar for those cases.
+getfilevar_noexec() {
+ local ERROR basefname basedname mycat myARCH="${ARCH}"
+ ERROR=0
+ mycat='cat'
+
+ [ -z "${1}" ] && ERROR=1
+ [ ! -f "${2}" ] && ERROR=1
+ [ "${2%.gz}" != "${2}" ] && mycat='zcat'
+
+ if [ "${ERROR}" = 1 ]
+ then
+ echo -e "\n"
+ eerror "getfilevar_noexec requires 2 variables, with the second a valid file."
+ eerror " getfilevar_noexec <VARIABLE> <CONFIGFILE>"
+ else
+ ${mycat} "${2}" | \
+ sed -n \
+ -e "/^[[:space:]]*${1}[[:space:]]*:\\?=[[:space:]]*\(.*\)\$/{
+ s,^[^=]*[[:space:]]*=[[:space:]]*,,g ;
+ s,[[:space:]]*\$,,g ;
+ p
+ }"
+ fi
+}
+
+# @ECLASS-VARIABLE: _LINUX_CONFIG_EXISTS_DONE
+# @INTERNAL
+# @DESCRIPTION:
+# This is only set if one of the linux_config_*exists functions has been called.
+# We use it for a QA warning that the check for a config has not been performed,
+# as linux_chkconfig* in non-legacy mode WILL return an undefined value if no
+# config is available at all.
+_LINUX_CONFIG_EXISTS_DONE=
+
+linux_config_qa_check() {
+ local f="$1"
+ if [ -z "${_LINUX_CONFIG_EXISTS_DONE}" ]; then
+ ewarn "QA: You called $f before any linux_config_exists!"
+ ewarn "QA: The return value of $f will NOT guaranteed later!"
+ fi
+}
+
+# @FUNCTION: linux_config_src_exists
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true if .config exists in a build directory otherwise false
+linux_config_src_exists() {
+ export _LINUX_CONFIG_EXISTS_DONE=1
+ [[ -n ${KV_OUT_DIR} && -s ${KV_OUT_DIR}/.config ]]
+}
+
+# @FUNCTION: linux_config_bin_exists
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true if .config exists in /proc, otherwise false
+linux_config_bin_exists() {
+ export _LINUX_CONFIG_EXISTS_DONE=1
+ [[ -s /proc/config.gz ]]
+}
+
+# @FUNCTION: linux_config_exists
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true if .config exists otherwise false
+#
+# This function MUST be checked before using any of the linux_chkconfig_*
+# functions.
+linux_config_exists() {
+ linux_config_src_exists || linux_config_bin_exists
+}
+
+# @FUNCTION: linux_config_path
+# @DESCRIPTION:
+# Echo the name of the config file to use. If none are found,
+# then return false.
+linux_config_path() {
+ if linux_config_src_exists; then
+ echo "${KV_OUT_DIR}/.config"
+ elif linux_config_bin_exists; then
+ echo "/proc/config.gz"
+ else
+ return 1
+ fi
+}
+
+# @FUNCTION: require_configured_kernel
+# @DESCRIPTION:
+# This function verifies that the current kernel is configured (it checks against the existence of .config)
+# otherwise it dies.
+require_configured_kernel() {
+ if ! linux_config_src_exists; then
+ qeerror "Could not find a usable .config in the kernel source directory."
+ qeerror "Please ensure that ${KERNEL_DIR} points to a configured set of Linux sources."
+ qeerror "If you are using KBUILD_OUTPUT, please set the environment var so that"
+ qeerror "it points to the necessary object directory so that it might find .config."
+ die "Kernel not configured; no .config found in ${KV_OUT_DIR}"
+ fi
+}
+
+# @FUNCTION: linux_chkconfig_present
+# @USAGE: option
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks that CONFIG_<option>=y or CONFIG_<option>=m is present in the current kernel .config
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_present() {
+ linux_config_qa_check linux_chkconfig_present
+ [[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == [my] ]]
+}
+
+# @FUNCTION: linux_chkconfig_module
+# @USAGE: option
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks that CONFIG_<option>=m is present in the current kernel .config
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_module() {
+ linux_config_qa_check linux_chkconfig_module
+ [[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == m ]]
+}
+
+# @FUNCTION: linux_chkconfig_builtin
+# @USAGE: option
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks that CONFIG_<option>=y is present in the current kernel .config
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_builtin() {
+ linux_config_qa_check linux_chkconfig_builtin
+ [[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == y ]]
+}
+
+# @FUNCTION: linux_chkconfig_string
+# @USAGE: option
+# @RETURN: CONFIG_<option>
+# @DESCRIPTION:
+# It prints the CONFIG_<option> value of the current kernel .config (it requires a configured kernel).
+# If linux_config_exists returns false, the results of this are UNDEFINED. You
+# MUST call linux_config_exists first.
+linux_chkconfig_string() {
+ linux_config_qa_check linux_chkconfig_string
+ getfilevar_noexec "CONFIG_$1" "$(linux_config_path)"
+}
+
+# Versioning Functions
+# ---------------------------------------
+
+# @FUNCTION: kernel_is
+# @USAGE: [-lt -gt -le -ge -eq] major_number [minor_number patch_number]
+# @RETURN: true or false
+# @DESCRIPTION:
+# It returns true when the current kernel version satisfies the comparison against the passed version.
+# -eq is the default comparison.
+#
+# @CODE
+# For Example where KV = 2.6.9
+# kernel_is 2 4 returns false
+# kernel_is 2 returns true
+# kernel_is 2 6 returns true
+# kernel_is 2 6 8 returns false
+# kernel_is 2 6 9 returns true
+# @CODE
+
+# Note: duplicated in kernel-2.eclass
+kernel_is() {
+ # if we haven't determined the version yet, we need to.
+ linux-info_get_any_version
+
+ # Now we can continue
+ local operator test value
+
+ case ${1#-} in
+ lt) operator="-lt"; shift;;
+ gt) operator="-gt"; shift;;
+ le) operator="-le"; shift;;
+ ge) operator="-ge"; shift;;
+ eq) operator="-eq"; shift;;
+ *) operator="-eq";;
+ esac
+ [[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
+
+ : $(( test = (KV_MAJOR << 16) + (KV_MINOR << 8) + KV_PATCH ))
+ : $(( value = (${1:-${KV_MAJOR}} << 16) + (${2:-${KV_MINOR}} << 8) + ${3:-${KV_PATCH}} ))
+ [ ${test} ${operator} ${value} ]
+}
+
+get_localversion() {
+ local lv_list i x
+
+ # ignore files with ~ in it.
+ for i in $(ls ${1}/localversion* 2>/dev/null); do
+ [[ -n ${i//*~*} ]] && lv_list="${lv_list} ${i}"
+ done
+
+ for i in ${lv_list}; do
+ x="${x}$(<${i})"
+ done
+ x=${x/ /}
+ echo ${x}
+}
+
+# Check if the Makefile is valid for direct parsing.
+# Check status results:
+# - PASS, use 'getfilevar' to extract values
+# - FAIL, use 'getfilevar_noexec' to extract values
+# The check may fail if:
+# - make is not present
+# - corruption exists in the kernel makefile
+get_makefile_extract_function() {
+ local a='' b='' mkfunc='getfilevar'
+ a="$(getfilevar VERSION ${KERNEL_MAKEFILE})"
+ b="$(getfilevar_noexec VERSION ${KERNEL_MAKEFILE})"
+ [[ "${a}" != "${b}" ]] && mkfunc='getfilevar_noexec'
+ echo "${mkfunc}"
+}
+
+# internal variable, so we know to only print the warning once
+get_version_warning_done=
+
+# @FUNCTION: get_version
+# @DESCRIPTION:
+# It gets the version of the kernel inside KERNEL_DIR and populates the KV_FULL variable
+# (if KV_FULL is already set it does nothing).
+#
+# The kernel version variables (KV_MAJOR, KV_MINOR, KV_PATCH, KV_EXTRA and KV_LOCAL) are also set.
+#
+# The KV_DIR is set using the KERNEL_DIR env var, the KV_DIR_OUT is set using a valid
+# KBUILD_OUTPUT (in a decreasing priority list, we look for the env var, makefile var or the
+# symlink /lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build).
+get_version() {
+ local tmplocal
+
+ # no need to execute this twice assuming KV_FULL is populated.
+ # we can force by unsetting KV_FULL
+ [ -n "${KV_FULL}" ] && return 0
+
+ # if we dont know KV_FULL, then we need too.
+ # make sure KV_DIR isnt set since we need to work it out via KERNEL_DIR
+ unset KV_DIR
+
+ # KV_DIR will contain the full path to the sources directory we should use
+ [ -z "${get_version_warning_done}" ] && \
+ qeinfo "Determining the location of the kernel source code"
+ [ -h "${KERNEL_DIR}" ] && KV_DIR="$(readlink -f ${KERNEL_DIR})"
+ [ -d "${KERNEL_DIR}" ] && KV_DIR="${KERNEL_DIR}"
+
+ if [ -z "${KV_DIR}" ]
+ then
+ if [ -z "${get_version_warning_done}" ]; then
+ get_version_warning_done=1
+ qewarn "Unable to find kernel sources at ${KERNEL_DIR}"
+ #qeinfo "This package requires Linux sources."
+ if [ "${KERNEL_DIR}" == "/usr/src/linux" ] ; then
+ qeinfo "Please make sure that ${KERNEL_DIR} points at your running kernel, "
+ qeinfo "(or the kernel you wish to build against)."
+ qeinfo "Alternatively, set the KERNEL_DIR environment variable to the kernel sources location"
+ else
+ qeinfo "Please ensure that the KERNEL_DIR environment variable points at full Linux sources of the kernel you wish to compile against."
+ fi
+ fi
+ return 1
+ fi
+
+ # See if the kernel dir is actually an output dir. #454294
+ if [ -z "${KBUILD_OUTPUT}" -a -L "${KERNEL_DIR}/source" ]; then
+ KBUILD_OUTPUT=${KERNEL_DIR}
+ KERNEL_DIR=$(readlink -f "${KERNEL_DIR}/source")
+ KV_DIR=${KERNEL_DIR}
+ fi
+
+ if [ -z "${get_version_warning_done}" ]; then
+ qeinfo "Found kernel source directory:"
+ qeinfo " ${KV_DIR}"
+ fi
+
+ if [ ! -s "${KV_DIR}/Makefile" ]
+ then
+ if [ -z "${get_version_warning_done}" ]; then
+ get_version_warning_done=1
+ qeerror "Could not find a Makefile in the kernel source directory."
+ qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources"
+ fi
+ return 1
+ fi
+
+ # OK so now we know our sources directory, but they might be using
+ # KBUILD_OUTPUT, and we need this for .config and localversions-*
+ # so we better find it eh?
+ # do we pass KBUILD_OUTPUT on the CLI?
+ local OUTPUT_DIR=${KBUILD_OUTPUT}
+
+ # keep track of it
+ KERNEL_MAKEFILE="${KV_DIR}/Makefile"
+
+ if [[ -z ${OUTPUT_DIR} ]]; then
+ # Decide the function used to extract makefile variables.
+ local mkfunc=$(get_makefile_extract_function "${KERNEL_MAKEFILE}")
+
+ # And if we didn't pass it, we can take a nosey in the Makefile.
+ OUTPUT_DIR=$(${mkfunc} KBUILD_OUTPUT "${KERNEL_MAKEFILE}")
+ fi
+
+ # And contrary to existing functions I feel we shouldn't trust the
+ # directory name to find version information as this seems insane.
+ # So we parse ${KERNEL_MAKEFILE}. We should be able to trust that
+ # the Makefile is simple enough to use the noexec extract function.
+ # This has been true for every release thus far, and it's faster
+ # than using make to evaluate the Makefile every time.
+ KV_MAJOR=$(getfilevar_noexec VERSION "${KERNEL_MAKEFILE}")
+ KV_MINOR=$(getfilevar_noexec PATCHLEVEL "${KERNEL_MAKEFILE}")
+ KV_PATCH=$(getfilevar_noexec SUBLEVEL "${KERNEL_MAKEFILE}")
+ KV_EXTRA=$(getfilevar_noexec EXTRAVERSION "${KERNEL_MAKEFILE}")
+
+ if [ -z "${KV_MAJOR}" -o -z "${KV_MINOR}" -o -z "${KV_PATCH}" ]
+ then
+ if [ -z "${get_version_warning_done}" ]; then
+ get_version_warning_done=1
+ qeerror "Could not detect kernel version."
+ qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources."
+ fi
+ return 1
+ fi
+
+ # and in newer versions we can also pull LOCALVERSION if it is set.
+ # but before we do this, we need to find if we use a different object directory.
+ # This *WILL* break if the user is using localversions, but we assume it was
+ # caught before this if they are.
+ if [[ -z ${OUTPUT_DIR} ]] ; then
+ # Try to locate a kernel that is most relevant for us.
+ for OUTPUT_DIR in "${SYSROOT}" "${ROOT}" "" ; do
+ OUTPUT_DIR+="/lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build"
+ if [[ -e ${OUTPUT_DIR} ]] ; then
+ break
+ fi
+ done
+ fi
+
+ [ -h "${OUTPUT_DIR}" ] && KV_OUT_DIR="$(readlink -f ${OUTPUT_DIR})"
+ [ -d "${OUTPUT_DIR}" ] && KV_OUT_DIR="${OUTPUT_DIR}"
+ if [ -n "${KV_OUT_DIR}" ];
+ then
+ qeinfo "Found kernel object directory:"
+ qeinfo " ${KV_OUT_DIR}"
+ fi
+ # and if we STILL have not got it, then we better just set it to KV_DIR
+ KV_OUT_DIR="${KV_OUT_DIR:-${KV_DIR}}"
+
+ # Grab the kernel release from the output directory.
+ # TODO: we MUST detect kernel.release being out of date, and 'return 1' from
+ # this function.
+ if [ -s "${KV_OUT_DIR}"/include/config/kernel.release ]; then
+ KV_LOCAL=$(<"${KV_OUT_DIR}"/include/config/kernel.release)
+ elif [ -s "${KV_OUT_DIR}"/.kernelrelease ]; then
+ KV_LOCAL=$(<"${KV_OUT_DIR}"/.kernelrelease)
+ else
+ KV_LOCAL=
+ fi
+
+ # KV_LOCAL currently contains the full release; discard the first bits.
+ tmplocal=${KV_LOCAL#${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}}
+
+ # If the updated local version was not changed, the tree is not prepared.
+ # Clear out KV_LOCAL in that case.
+ # TODO: this does not detect a change in the localversion part between
+ # kernel.release and the value that would be generated.
+ if [ "$KV_LOCAL" = "$tmplocal" ]; then
+ KV_LOCAL=
+ else
+ KV_LOCAL=$tmplocal
+ fi
+
+ # And we should set KV_FULL to the full expanded version
+ KV_FULL="${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}${KV_LOCAL}"
+
+ qeinfo "Found sources for kernel version:"
+ qeinfo " ${KV_FULL}"
+
+ return 0
+}
+
+# @FUNCTION: get_running_version
+# @DESCRIPTION:
+# It gets the version of the current running kernel and the result is the same as get_version() if the
+# function can find the sources.
+get_running_version() {
+ KV_FULL=$(uname -r)
+
+ if [[ -f ${ROOT}/lib/modules/${KV_FULL}/source/Makefile && -f ${ROOT}/lib/modules/${KV_FULL}/build/Makefile ]]; then
+ KERNEL_DIR=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/source)
+ KBUILD_OUTPUT=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/build)
+ unset KV_FULL
+ get_version
+ return $?
+ elif [[ -f ${ROOT}/lib/modules/${KV_FULL}/source/Makefile ]]; then
+ KERNEL_DIR=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/source)
+ unset KV_FULL
+ get_version
+ return $?
+ elif [[ -f ${ROOT}/lib/modules/${KV_FULL}/build/Makefile ]]; then
+ KERNEL_DIR=$(readlink -f ${ROOT}/lib/modules/${KV_FULL}/build)
+ unset KV_FULL
+ get_version
+ return $?
+ else
+ # This handles a variety of weird kernel versions. Make sure to update
+ # tests/linux-info_get_running_version.sh if you want to change this.
+ local kv_full=${KV_FULL//[-+_]*}
+ KV_MAJOR=$(get_version_component_range 1 ${kv_full})
+ KV_MINOR=$(get_version_component_range 2 ${kv_full})
+ KV_PATCH=$(get_version_component_range 3 ${kv_full})
+ KV_EXTRA="${KV_FULL#${KV_MAJOR}.${KV_MINOR}${KV_PATCH:+.${KV_PATCH}}}"
+ : ${KV_PATCH:=0}
+ fi
+ return 0
+}
+
+# This next function is named with the eclass prefix to avoid conflicts with
+# some old versionator-like eclass functions.
+
+# @FUNCTION: linux-info_get_any_version
+# @DESCRIPTION:
+# This attempts to find the version of the sources, and otherwise falls back to
+# the version of the running kernel.
+linux-info_get_any_version() {
+ get_version
+ if [[ $? -ne 0 ]]; then
+ ewarn "Unable to calculate Linux Kernel version for build, attempting to use running version"
+ get_running_version
+ fi
+}
+
+
+# ebuild check functions
+# ---------------------------------------
+
+# @FUNCTION: check_kernel_built
+# @DESCRIPTION:
+# This function verifies that the current kernel sources have been already prepared otherwise it dies.
+check_kernel_built() {
+ # if we haven't determined the version yet, we need to
+ require_configured_kernel
+ get_version
+
+ local versionh_path
+ if kernel_is -ge 3 7; then
+ versionh_path="include/generated/uapi/linux/version.h"
+ else
+ versionh_path="include/linux/version.h"
+ fi
+
+ if [ ! -f "${KV_OUT_DIR}/${versionh_path}" ]
+ then
+ eerror "These sources have not yet been prepared."
+ eerror "We cannot build against an unprepared tree."
+ eerror "To resolve this, please type the following:"
+ eerror
+ eerror "# cd ${KV_DIR}"
+ eerror "# make oldconfig"
+ eerror "# make modules_prepare"
+ eerror
+ eerror "Then please try merging this module again."
+ die "Kernel sources need compiling first"
+ fi
+}
+
+# @FUNCTION: check_modules_supported
+# @DESCRIPTION:
+# This function verifies that the current kernel support modules (it checks CONFIG_MODULES=y) otherwise it dies.
+check_modules_supported() {
+ # if we haven't determined the version yet, we need too.
+ require_configured_kernel
+ get_version
+
+ if ! linux_chkconfig_builtin "MODULES"; then
+ eerror "These sources do not support loading external modules."
+ eerror "to be able to use this module please enable \"Loadable modules support\""
+ eerror "in your kernel, recompile and then try merging this module again."
+ die "No support for external modules in ${KV_FULL} config"
+ fi
+}
+
+# @FUNCTION: check_extra_config
+# @DESCRIPTION:
+# It checks the kernel config options specified by CONFIG_CHECK. It dies only when a required config option (i.e.
+# the prefix ~ is not used) doesn't satisfy the directive.
+check_extra_config() {
+ local config negate die error reworkmodulenames
+ local soft_errors_count=0 hard_errors_count=0 config_required=0
+ # store the value of the QA check, because otherwise we won't catch usages
+ # after if check_extra_config is called AND other direct calls are done
+ # later.
+ local old_LINUX_CONFIG_EXISTS_DONE="${_LINUX_CONFIG_EXISTS_DONE}"
+
+ # if we haven't determined the version yet, we need to
+ linux-info_get_any_version
+
+ # Determine if we really need a .config. The only time when we don't need
+ # one is when all of the CONFIG_CHECK options are prefixed with "~".
+ for config in ${CONFIG_CHECK}; do
+ if [[ "${config:0:1}" != "~" ]]; then
+ config_required=1
+ break
+ fi
+ done
+
+ if [[ ${config_required} == 0 ]]; then
+ # In the case where we don't require a .config, we can now bail out
+ # if the user has no .config as there is nothing to do. Otherwise
+ # code later will cause a failure due to missing .config.
+ if ! linux_config_exists; then
+ ewarn "Unable to check for the following kernel config options due"
+ ewarn "to absence of any configured kernel sources or compiled"
+ ewarn "config:"
+ for config in ${CONFIG_CHECK}; do
+ local_error="ERROR_${config#\~}"
+ msg="${!local_error}"
+ if [[ "x${msg}" == "x" ]]; then
+ local_error="WARNING_${config#\~}"
+ msg="${!local_error}"
+ fi
+ ewarn " - ${config#\~}${msg:+ - }${msg}"
+ done
+ ewarn "You're on your own to make sure they are set if needed."
+ export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
+ return 0
+ fi
+ else
+ require_configured_kernel
+ fi
+
+ einfo "Checking for suitable kernel configuration options..."
+
+ for config in ${CONFIG_CHECK}
+ do
+ # if we specify any fatal, ensure we honor them
+ die=1
+ error=0
+ negate=0
+ reworkmodulenames=0
+
+ if [[ ${config:0:1} == "~" ]]; then
+ die=0
+ config=${config:1}
+ elif [[ ${config:0:1} == "@" ]]; then
+ die=0
+ reworkmodulenames=1
+ config=${config:1}
+ fi
+ if [[ ${config:0:1} == "!" ]]; then
+ negate=1
+ config=${config:1}
+ fi
+
+ if [[ ${negate} == 1 ]]; then
+ linux_chkconfig_present ${config} && error=2
+ elif [[ ${reworkmodulenames} == 1 ]]; then
+ local temp_config="${config//*:}" i n
+ config="${config//:*}"
+ if linux_chkconfig_present ${config}; then
+ for i in ${MODULE_NAMES}; do
+ n="${i//${temp_config}}"
+ [[ -z ${n//\(*} ]] && \
+ MODULE_IGNORE="${MODULE_IGNORE} ${temp_config}"
+ done
+ error=2
+ fi
+ else
+ linux_chkconfig_present ${config} || error=1
+ fi
+
+ if [[ ${error} > 0 ]]; then
+ local report_func="eerror" local_error
+ local_error="ERROR_${config}"
+ local_error="${!local_error}"
+
+ if [[ -z "${local_error}" ]]; then
+ # using old, deprecated format.
+ local_error="${config}_ERROR"
+ local_error="${!local_error}"
+ fi
+ if [[ ${die} == 0 && -z "${local_error}" ]]; then
+ #soft errors can be warnings
+ local_error="WARNING_${config}"
+ local_error="${!local_error}"
+ if [[ -n "${local_error}" ]] ; then
+ report_func="ewarn"
+ fi
+ fi
+
+ if [[ -z "${local_error}" ]]; then
+ [[ ${error} == 1 ]] \
+ && local_error="is not set when it should be." \
+ || local_error="should not be set. But it is."
+ local_error="CONFIG_${config}:\t ${local_error}"
+ fi
+ if [[ ${die} == 0 ]]; then
+ ${report_func} " ${local_error}"
+ soft_errors_count=$[soft_errors_count + 1]
+ else
+ ${report_func} " ${local_error}"
+ hard_errors_count=$[hard_errors_count + 1]
+ fi
+ fi
+ done
+
+ if [[ ${hard_errors_count} > 0 ]]; then
+ eerror "Please check to make sure these options are set correctly."
+ eerror "Failure to do so may cause unexpected problems."
+ eerror "Once you have satisfied these options, please try merging"
+ eerror "this package again."
+ export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
+ die "Incorrect kernel configuration options"
+ elif [[ ${soft_errors_count} > 0 ]]; then
+ ewarn "Please check to make sure these options are set correctly."
+ ewarn "Failure to do so may cause unexpected problems."
+ else
+ eend 0
+ fi
+ export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
+}
+
+check_zlibinflate() {
+ # if we haven't determined the version yet, we need to
+ require_configured_kernel
+ get_version
+
+ # although I restructured this code - I really really really dont support it!
+
+ # bug #27882 - zlib routines are only linked into the kernel
+ # if something compiled into the kernel calls them
+ #
+ # plus, for the cloop module, it appears that there's no way
+ # to get cloop.o to include a static zlib if CONFIG_MODVERSIONS
+ # is on
+
+ local INFLATE
+ local DEFLATE
+
+ einfo "Determining the usability of ZLIB_INFLATE support in your kernel"
+
+ ebegin "checking ZLIB_INFLATE"
+ linux_chkconfig_builtin CONFIG_ZLIB_INFLATE
+ eend $?
+ [ "$?" != 0 ] && die
+
+ ebegin "checking ZLIB_DEFLATE"
+ linux_chkconfig_builtin CONFIG_ZLIB_DEFLATE
+ eend $?
+ [ "$?" != 0 ] && die
+
+ local LINENO_START
+ local LINENO_END
+ local SYMBOLS
+ local x
+
+ LINENO_END="$(grep -n 'CONFIG_ZLIB_INFLATE y' ${KV_DIR}/lib/Config.in | cut -d : -f 1)"
+ LINENO_START="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | grep -n 'if \[' | tail -n 1 | cut -d : -f 1)"
+ (( LINENO_AMOUNT = $LINENO_END - $LINENO_START ))
+ (( LINENO_END = $LINENO_END - 1 ))
+ SYMBOLS="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | tail -n $LINENO_AMOUNT | sed -e 's/^.*\(CONFIG_[^\" ]*\).*/\1/g;')"
+
+ # okay, now we have a list of symbols
+ # we need to check each one in turn, to see whether it is set or not
+ for x in $SYMBOLS ; do
+ if [ "${!x}" = "y" ]; then
+ # we have a winner!
+ einfo "${x} ensures zlib is linked into your kernel - excellent"
+ return 0
+ fi
+ done
+
+ eerror
+ eerror "This kernel module requires ZLIB library support."
+ eerror "You have enabled zlib support in your kernel, but haven't enabled"
+ eerror "enabled any option that will ensure that zlib is linked into your"
+ eerror "kernel."
+ eerror
+ eerror "Please ensure that you enable at least one of these options:"
+ eerror
+
+ for x in $SYMBOLS ; do
+ eerror " * $x"
+ done
+
+ eerror
+ eerror "Please remember to recompile and install your kernel, and reboot"
+ eerror "into your new kernel before attempting to load this kernel module."
+
+ die "Kernel doesn't include zlib support"
+}
+
+################################
+# Default pkg_setup
+# Also used when inheriting linux-mod to force a get_version call
+# @FUNCTION: linux-info_pkg_setup
+# @DESCRIPTION:
+# Force a get_version() call when inherited from linux-mod.eclass and then check if the kernel is configured
+# to support the options specified in CONFIG_CHECK (if not null)
+linux-info_pkg_setup() {
+ linux-info_get_any_version
+
+ if kernel_is 2 4; then
+ if [ "$( gcc-major-version )" -eq "4" ] ; then
+ echo
+ ewarn "Be warned !! >=sys-devel/gcc-4.0.0 isn't supported with"
+ ewarn "linux-2.4 (or modules building against a linux-2.4 kernel)!"
+ echo
+ ewarn "Either switch to another gcc-version (via gcc-config) or use a"
+ ewarn "newer kernel that supports gcc-4."
+ echo
+ ewarn "Also be aware that bugreports about gcc-4 not working"
+ ewarn "with linux-2.4 based ebuilds will be closed as INVALID!"
+ echo
+ epause 10
+ fi
+ fi
+
+ [ -n "${CONFIG_CHECK}" ] && check_extra_config;
+}
diff --git a/eclass/linux-mod.eclass b/eclass/linux-mod.eclass
new file mode 100644
index 000000000000..b2e9cd2e4c41
--- /dev/null
+++ b/eclass/linux-mod.eclass
@@ -0,0 +1,741 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: linux-mod.eclass
+# @MAINTAINER:
+# kernel-misc@gentoo.org
+# @AUTHOR:
+# John Mylchreest <johnm@gentoo.org>,
+# Stefan Schweizer <genstef@gentoo.org>
+# @BLURB: It provides the functionality required to install external modules against a kernel source tree.
+# @DESCRIPTION:
+# This eclass is used to interface with linux-info.eclass in such a way
+# to provide the functionality and initial functions
+# required to install external modules against a kernel source
+# tree.
+
+# A Couple of env vars are available to effect usage of this eclass
+# These are as follows:
+
+# @ECLASS-VARIABLE: MODULES_OPTIONAL_USE
+# @DESCRIPTION:
+# A string containing the USE flag to use for making this eclass optional
+# The recommended non-empty value is 'modules'
+
+# @ECLASS-VARIABLE: KERNEL_DIR
+# @DESCRIPTION:
+# A string containing the directory of the target kernel sources. The default value is
+# "/usr/src/linux"
+
+# @ECLASS-VARIABLE: ECONF_PARAMS
+# @DESCRIPTION:
+# It's a string containing the parameters to pass to econf.
+# If this is not set, then econf isn't run.
+
+# @ECLASS-VARIABLE: BUILD_PARAMS
+# @DESCRIPTION:
+# It's a string with the parameters to pass to emake.
+
+# @ECLASS-VARIABLE: BUILD_TARGETS
+# @DESCRIPTION:
+# It's a string with the build targets to pass to make. The default value is "clean module"
+
+# @ECLASS-VARIABLE: MODULE_NAMES
+# @DESCRIPTION:
+# It's a string containing the modules to be built automatically using the default
+# src_compile/src_install. It will only make ${BUILD_TARGETS} once in any directory.
+#
+# The structure of each MODULE_NAMES entry is as follows:
+#
+# modulename(libdir:srcdir:objdir)
+#
+# where:
+#
+# modulename = name of the module file excluding the .ko
+# libdir = place in system modules directory where module is installed (by default it's misc)
+# srcdir = place for ebuild to cd to before running make (by default it's ${S})
+# objdir = place the .ko and objects are located after make runs (by default it's set to srcdir)
+#
+# To get an idea of how these variables are used, here's a few lines
+# of code from around line 540 in this eclass:
+#
+# einfo "Installing ${modulename} module"
+# cd ${objdir} || die "${objdir} does not exist"
+# insinto /lib/modules/${KV_FULL}/${libdir}
+# doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed"
+#
+# For example:
+# MODULE_NAMES="module_pci(pci:${S}/pci:${S}) module_usb(usb:${S}/usb:${S})"
+#
+# what this would do is
+#
+# cd "${S}"/pci
+# make ${BUILD_PARAMS} ${BUILD_TARGETS}
+# cd "${S}"
+# insinto /lib/modules/${KV_FULL}/pci
+# doins module_pci.${KV_OBJ}
+#
+# cd "${S}"/usb
+# make ${BUILD_PARAMS} ${BUILD_TARGETS}
+# cd "${S}"
+# insinto /lib/modules/${KV_FULL}/usb
+# doins module_usb.${KV_OBJ}
+
+# There is also support for automated modprobe.d file generation.
+# This can be explicitly enabled by setting any of the following variables.
+
+# @ECLASS-VARIABLE: MODULESD_<modulename>_ENABLED
+# @DESCRIPTION:
+# This is used to disable the modprobe.d file generation otherwise the file will be
+# always generated (unless no MODULESD_<modulename>_* variable is provided). Set to "no" to disable
+# the generation of the file and the installation of the documentation.
+
+# @ECLASS-VARIABLE: MODULESD_<modulename>_EXAMPLES
+# @DESCRIPTION:
+# This is a bash array containing a list of examples which should
+# be used. If you want us to try and take a guess set this to "guess".
+#
+# For each array_component it's added an options line in the modprobe.d file
+#
+# options array_component
+#
+# where array_component is "<modulename> options" (see modprobe.conf(5))
+
+# @ECLASS-VARIABLE: MODULESD_<modulename>_ALIASES
+# @DESCRIPTION:
+# This is a bash array containing a list of associated aliases.
+#
+# For each array_component it's added an alias line in the modprobe.d file
+#
+# alias array_component
+#
+# where array_component is "wildcard <modulename>" (see modprobe.conf(5))
+
+# @ECLASS-VARIABLE: MODULESD_<modulename>_ADDITIONS
+# @DESCRIPTION:
+# This is a bash array containing a list of additional things to
+# add to the bottom of the file. This can be absolutely anything.
+# Each entry is a new line.
+
+# @ECLASS-VARIABLE: MODULESD_<modulename>_DOCS
+# @DESCRIPTION:
+# This is a string list which contains the full path to any associated
+# documents for <modulename>. These files are installed in the live tree.
+
+# @ECLASS-VARIABLE: KV_OBJ
+# @DESCRIPTION:
+# It's a read-only variable. It contains the extension of the kernel modules.
+
+inherit eutils linux-info multilib
+EXPORT_FUNCTIONS pkg_setup pkg_preinst pkg_postinst src_install src_compile pkg_postrm
+
+IUSE="kernel_linux ${MODULES_OPTIONAL_USE}"
+SLOT="0"
+RDEPEND="${MODULES_OPTIONAL_USE}${MODULES_OPTIONAL_USE:+? (} kernel_linux? ( virtual/modutils ) ${MODULES_OPTIONAL_USE:+)}"
+DEPEND="${RDEPEND}
+ ${MODULES_OPTIONAL_USE}${MODULES_OPTIONAL_USE:+? (}
+ sys-apps/sed
+ kernel_linux? ( virtual/linux-sources )
+ ${MODULES_OPTIONAL_USE:+)}"
+
+# eclass utilities
+# ----------------------------------
+
+check_vermagic() {
+ debug-print-function ${FUNCNAME} $*
+
+ local curr_gcc_ver=$(gcc -dumpversion)
+ local tmpfile old_chost old_gcc_ver result=0
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ tmpfile=`find "${KV_DIR}/" -iname "*.o.cmd" -exec grep usr/lib/gcc {} \; -quit`
+ tmpfile=${tmpfile//*usr/lib}
+ tmpfile=${tmpfile//\/include*}
+ old_chost=${tmpfile//*gcc\/}
+ old_chost=${old_chost//\/*}
+ old_gcc_ver=${tmpfile//*\/}
+
+ if [[ -z ${old_gcc_ver} || -z ${old_chost} ]]; then
+ ewarn ""
+ ewarn "Unable to detect what version of GCC was used to compile"
+ ewarn "the kernel. Build will continue, but you may experience problems."
+ elif [[ ${curr_gcc_ver} != ${old_gcc_ver} ]]; then
+ ewarn ""
+ ewarn "The version of GCC you are using (${curr_gcc_ver}) does"
+ ewarn "not match the version of GCC used to compile the"
+ ewarn "kernel (${old_gcc_ver})."
+ result=1
+ elif [[ ${CHOST} != ${old_chost} ]]; then
+ ewarn ""
+ ewarn "The current CHOST (${CHOST}) does not match the chost"
+ ewarn "used when compiling the kernel (${old_chost})."
+ result=1
+ fi
+
+ if [[ ${result} -gt 0 ]]; then
+ ewarn ""
+ ewarn "Build will not continue, because you will experience problems."
+ ewarn "To fix this either change the version of GCC you wish to use"
+ ewarn "to match the kernel, or recompile the kernel first."
+ die "GCC Version Mismatch."
+ fi
+}
+
+# @FUNCTION: use_m
+# @RETURN: true or false
+# @DESCRIPTION:
+# It checks if the kernel version is greater than 2.6.5.
+use_m() {
+ debug-print-function ${FUNCNAME} $*
+
+ # if we haven't determined the version yet, we need too.
+ get_version;
+
+ # if the kernel version is greater than 2.6.6 then we should use
+ # M= instead of SUBDIRS=
+ [ ${KV_MAJOR} -eq 3 ] && return 0
+ [ ${KV_MAJOR} -eq 2 -a ${KV_MINOR} -gt 5 -a ${KV_PATCH} -gt 5 ] && \
+ return 0 || return 1
+}
+
+# @FUNCTION: convert_to_m
+# @USAGE: /path/to/the/file
+# @DESCRIPTION:
+# It converts a file (e.g. a makefile) to use M= instead of SUBDIRS=
+convert_to_m() {
+ debug-print-function ${FUNCNAME} $*
+
+ if use_m
+ then
+ [ ! -f "${1}" ] && \
+ die "convert_to_m() requires a filename as an argument"
+ ebegin "Converting ${1/${WORKDIR}\//} to use M= instead of SUBDIRS="
+ sed -i 's:SUBDIRS=:M=:g' "${1}"
+ eend $?
+ fi
+}
+
+# internal function
+#
+# FUNCTION: update_depmod
+# DESCRIPTION:
+# It updates the modules.dep file for the current kernel.
+update_depmod() {
+ debug-print-function ${FUNCNAME} $*
+
+ # if we haven't determined the version yet, we need too.
+ get_version;
+
+ ebegin "Updating module dependencies for ${KV_FULL}"
+ if [ -r "${KV_OUT_DIR}"/System.map ]
+ then
+ depmod -ae -F "${KV_OUT_DIR}"/System.map -b "${ROOT}" ${KV_FULL}
+ eend $?
+ else
+ ewarn
+ ewarn "${KV_OUT_DIR}/System.map not found."
+ ewarn "You must manually update the kernel module dependencies using depmod."
+ eend 1
+ ewarn
+ fi
+}
+
+# internal function
+#
+# FUNCTION: move_old_moduledb
+# DESCRIPTION:
+# It updates the location of the database used by the module-rebuild utility.
+move_old_moduledb() {
+ debug-print-function ${FUNCNAME} $*
+
+ local OLDDIR="${ROOT}"/usr/share/module-rebuild/
+ local NEWDIR="${ROOT}"/var/lib/module-rebuild/
+
+ if [[ -f "${OLDDIR}"/moduledb ]]; then
+ [[ ! -d "${NEWDIR}" ]] && mkdir -p "${NEWDIR}"
+ [[ ! -f "${NEWDIR}"/moduledb ]] && \
+ mv "${OLDDIR}"/moduledb "${NEWDIR}"/moduledb
+ rm -f "${OLDDIR}"/*
+ rmdir "${OLDDIR}"
+ fi
+}
+
+# internal function
+#
+# FUNCTION: update_moduledb
+# DESCRIPTION:
+# It adds the package to the /var/lib/module-rebuild/moduledb database used by the module-rebuild utility.
+update_moduledb() {
+ debug-print-function ${FUNCNAME} $*
+
+ local MODULEDB_DIR="${ROOT}"/var/lib/module-rebuild/
+ move_old_moduledb
+
+ if [[ ! -f "${MODULEDB_DIR}"/moduledb ]]; then
+ [[ ! -d "${MODULEDB_DIR}" ]] && mkdir -p "${MODULEDB_DIR}"
+ touch "${MODULEDB_DIR}"/moduledb
+ fi
+
+ if ! grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then
+ einfo "Adding module to moduledb."
+ echo "a:1:${CATEGORY}/${PN}-${PVR}" >> "${MODULEDB_DIR}"/moduledb
+ fi
+}
+
+# internal function
+#
+# FUNCTION: remove_moduledb
+# DESCRIPTION:
+# It removes the package from the /var/lib/module-rebuild/moduledb database used by
+# the module-rebuild utility.
+remove_moduledb() {
+ debug-print-function ${FUNCNAME} $*
+
+ local MODULEDB_DIR="${ROOT}"/var/lib/module-rebuild/
+ move_old_moduledb
+
+ if grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then
+ einfo "Removing ${CATEGORY}/${PN}-${PVR} from moduledb."
+ sed -i -e "/.*${CATEGORY}\/${PN}-${PVR}.*/d" "${MODULEDB_DIR}"/moduledb
+ fi
+}
+
+# @FUNCTION: set_kvobj
+# @DESCRIPTION:
+# It sets the KV_OBJ variable.
+set_kvobj() {
+ debug-print-function ${FUNCNAME} $*
+
+ if kernel_is ge 2 6
+ then
+ KV_OBJ="ko"
+ else
+ KV_OBJ="o"
+ fi
+ # Do we really need to know this?
+ # Lets silence it.
+ # einfo "Using KV_OBJ=${KV_OBJ}"
+}
+
+get-KERNEL_CC() {
+ debug-print-function ${FUNCNAME} $*
+
+ if [[ -n ${KERNEL_CC} ]] ; then
+ echo "${KERNEL_CC}"
+ return
+ fi
+
+ local kernel_cc
+ if [ -n "${KERNEL_ABI}" ]; then
+ # In future, an arch might want to define CC_$ABI
+ #kernel_cc="$(get_abi_CC)"
+ #[ -z "${kernel_cc}" ] &&
+ kernel_cc="$(tc-getCC $(ABI=${KERNEL_ABI} get_abi_CHOST))"
+ else
+ kernel_cc=$(tc-getCC)
+ fi
+ echo "${kernel_cc}"
+}
+
+# internal function
+#
+# FUNCTION:
+# USAGE: /path/to/the/modulename_without_extension
+# RETURN: A file in /etc/modprobe.d
+# DESCRIPTION:
+# This function will generate and install the neccessary modprobe.d file from the
+# information contained in the modules exported parms.
+# (see the variables MODULESD_<modulename>_ENABLED, MODULESD_<modulename>_EXAMPLES,
+# MODULESD_<modulename>_ALIASES, MODULESD_<modulename>_ADDITION and MODULESD_<modulename>_DOCS).
+#
+# At the end the documentation specified with MODULESD_<modulename>_DOCS is installed.
+generate_modulesd() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ local currm_path currm currm_t t myIFS myVAR
+ local module_docs module_enabled module_aliases \
+ module_additions module_examples module_modinfo module_opts
+
+ for currm_path in ${@}
+ do
+ currm=${currm_path//*\/}
+ currm=$(echo ${currm} | tr '[:lower:]' '[:upper:]')
+ currm_t=${currm}
+ while [[ -z ${currm_t//*-*} ]]; do
+ currm_t=${currm_t/-/_}
+ done
+
+ module_docs="$(eval echo \${MODULESD_${currm_t}_DOCS})"
+ module_enabled="$(eval echo \${MODULESD_${currm_t}_ENABLED})"
+ module_aliases="$(eval echo \${#MODULESD_${currm_t}_ALIASES[*]})"
+ module_additions="$(eval echo \${#MODULESD_${currm_t}_ADDITIONS[*]})"
+ module_examples="$(eval echo \${#MODULESD_${currm_t}_EXAMPLES[*]})"
+
+ [[ ${module_aliases} -eq 0 ]] && unset module_aliases
+ [[ ${module_additions} -eq 0 ]] && unset module_additions
+ [[ ${module_examples} -eq 0 ]] && unset module_examples
+
+ # If we specify we dont want it, then lets exit, otherwise we assume
+ # that if its set, we do want it.
+ [[ ${module_enabled} == no ]] && return 0
+
+ # unset any unwanted variables.
+ for t in ${!module_*}
+ do
+ [[ -z ${!t} ]] && unset ${t}
+ done
+
+ [[ -z ${!module_*} ]] && return 0
+
+ # OK so now if we have got this far, then we know we want to continue
+ # and generate the modprobe.d file.
+ module_modinfo="$(modinfo -p ${currm_path}.${KV_OBJ})"
+ module_config="${T}/modulesd-${currm}"
+
+ ebegin "Preparing file for modprobe.d"
+ #-----------------------------------------------------------------------
+ echo "# modprobe.d configuration file for ${currm}" >> "${module_config}"
+ #-----------------------------------------------------------------------
+ [[ -n ${module_docs} ]] && \
+ echo "# For more information please read:" >> "${module_config}"
+ for t in ${module_docs}
+ do
+ echo "# ${t//*\/}" >> "${module_config}"
+ done
+ echo >> "${module_config}"
+
+ #-----------------------------------------------------------------------
+ if [[ ${module_aliases} -gt 0 ]]
+ then
+ echo "# Internal Aliases - Do not edit" >> "${module_config}"
+ echo "# ------------------------------" >> "${module_config}"
+
+ for((t=0; t<${module_aliases}; t++))
+ do
+ echo "alias $(eval echo \${MODULESD_${currm}_ALIASES[$t]})" \
+ >> "${module_config}"
+ done
+ echo '' >> "${module_config}"
+ fi
+
+ #-----------------------------------------------------------------------
+ if [[ -n ${module_modinfo} ]]
+ then
+ echo >> "${module_config}"
+ echo "# Configurable module parameters" >> "${module_config}"
+ echo "# ------------------------------" >> "${module_config}"
+ myIFS="${IFS}"
+ IFS="$(echo -en "\n\b")"
+
+ for t in ${module_modinfo}
+ do
+ myVAR="$(echo ${t#*:} | grep -o "[^ ]*[0-9][ =][^ ]*" | tail -1 | grep -o "[0-9]")"
+ if [[ -n ${myVAR} ]]
+ then
+ module_opts="${module_opts} ${t%%:*}:${myVAR}"
+ fi
+ echo -e "# ${t%%:*}:\t${t#*:}" >> "${module_config}"
+ done
+ IFS="${myIFS}"
+ echo '' >> "${module_config}"
+ fi
+
+ #-----------------------------------------------------------------------
+ if [[ $(eval echo \${MODULESD_${currm}_ALIASES[0]}) == guess ]]
+ then
+ # So lets do some guesswork eh?
+ if [[ -n ${module_opts} ]]
+ then
+ echo "# For Example..." >> "${module_config}"
+ echo "# --------------" >> "${module_config}"
+ for t in ${module_opts}
+ do
+ echo "# options ${currm} ${t//:*}=${t//*:}" >> "${module_config}"
+ done
+ echo '' >> "${module_config}"
+ fi
+ elif [[ ${module_examples} -gt 0 ]]
+ then
+ echo "# For Example..." >> "${module_config}"
+ echo "# --------------" >> "${module_config}"
+ for((t=0; t<${module_examples}; t++))
+ do
+ echo "options $(eval echo \${MODULESD_${currm}_EXAMPLES[$t]})" \
+ >> "${module_config}"
+ done
+ echo '' >> "${module_config}"
+ fi
+
+ #-----------------------------------------------------------------------
+ if [[ ${module_additions} -gt 0 ]]
+ then
+ for((t=0; t<${module_additions}; t++))
+ do
+ echo "$(eval echo \${MODULESD_${currm}_ADDITIONS[$t]})" \
+ >> "${module_config}"
+ done
+ echo '' >> "${module_config}"
+ fi
+
+ #-----------------------------------------------------------------------
+
+ # then we install it
+ insinto /etc/modprobe.d
+ newins "${module_config}" "${currm_path//*\/}.conf"
+
+ # and install any documentation we might have.
+ [[ -n ${module_docs} ]] && dodoc ${module_docs}
+ done
+ eend 0
+ return 0
+}
+
+# internal function
+#
+# FUNCTION: find_module_params
+# USAGE: A string "NAME(LIBDIR:SRCDIR:OBJDIR)"
+# RETURN: The string "modulename:NAME libdir:LIBDIR srcdir:SRCDIR objdir:OBJDIR"
+# DESCRIPTION:
+# Analyze the specification NAME(LIBDIR:SRCDIR:OBJDIR) of one module as described in MODULE_NAMES.
+find_module_params() {
+ debug-print-function ${FUNCNAME} $*
+
+ local matched_offset=0 matched_opts=0 test="${@}" temp_var result
+ local i=0 y=0 z=0
+
+ for((i=0; i<=${#test}; i++))
+ do
+ case ${test:${i}:1} in
+ \() matched_offset[0]=${i};;
+ \:) matched_opts=$((${matched_opts} + 1));
+ matched_offset[${matched_opts}]="${i}";;
+ \)) matched_opts=$((${matched_opts} + 1));
+ matched_offset[${matched_opts}]="${i}";;
+ esac
+ done
+
+ for((i=0; i<=${matched_opts}; i++))
+ do
+ # i = offset were working on
+ # y = last offset
+ # z = current offset - last offset
+ # temp_var = temporary name
+ case ${i} in
+ 0) tempvar=${test:0:${matched_offset[0]}};;
+ *) y=$((${matched_offset[$((${i} - 1))]} + 1))
+ z=$((${matched_offset[${i}]} - ${matched_offset[$((${i} - 1))]}));
+ z=$((${z} - 1))
+ tempvar=${test:${y}:${z}};;
+ esac
+
+ case ${i} in
+ 0) result="${result} modulename:${tempvar}";;
+ 1) result="${result} libdir:${tempvar}";;
+ 2) result="${result} srcdir:${tempvar}";;
+ 3) result="${result} objdir:${tempvar}";;
+ esac
+ done
+
+ echo ${result}
+}
+
+# default ebuild functions
+# --------------------------------
+
+# @FUNCTION: linux-mod_pkg_setup
+# @DESCRIPTION:
+# It checks the CONFIG_CHECK options (see linux-info.eclass(5)), verifies that the kernel is
+# configured, verifies that the sources are prepared, verifies that the modules support is builtin
+# in the kernel and sets the object extension KV_OBJ.
+linux-mod_pkg_setup() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ local is_bin="${MERGE_TYPE}"
+
+ # If we are installing a binpkg, take a different path.
+ # use MERGE_TYPE if available (eapi>=4); else use non-PMS EMERGE_FROM (eapi<4)
+ if has ${EAPI} 0 1 2 3; then
+ is_bin=${EMERGE_FROM}
+ fi
+
+ if [[ ${is_bin} == binary ]]; then
+ linux-mod_pkg_setup_binary
+ return
+ fi
+
+ linux-info_pkg_setup;
+ require_configured_kernel
+ check_kernel_built;
+ strip_modulenames;
+ [[ -n ${MODULE_NAMES} ]] && check_modules_supported
+ set_kvobj;
+ # Commented out with permission from johnm until a fixed version for arches
+ # who intentionally use different kernel and userland compilers can be
+ # introduced - Jason Wever <weeve@gentoo.org>, 23 Oct 2005
+ #check_vermagic;
+}
+
+# @FUNCTION: linux-mod_pkg_setup_binary
+# @DESCRIPTION:
+# Perform all kernel option checks non-fatally, as the .config and
+# /proc/config.gz might not be present. Do not do anything that requires kernel
+# sources.
+linux-mod_pkg_setup_binary() {
+ debug-print-function ${FUNCNAME} $*
+ local new_CONFIG_CHECK
+ # ~ needs always to be quoted, else bash expands it.
+ for config in $CONFIG_CHECK ; do
+ optional='~'
+ [[ ${config:0:1} == "~" ]] && optional=''
+ new_CONFIG_CHECK="${new_CONFIG_CHECK} ${optional}${config}"
+ done
+ export CONFIG_CHECK="${new_CONFIG_CHECK}"
+ linux-info_pkg_setup;
+}
+
+strip_modulenames() {
+ debug-print-function ${FUNCNAME} $*
+
+ local i
+ for i in ${MODULE_IGNORE}; do
+ MODULE_NAMES=${MODULE_NAMES//${i}(*}
+ done
+}
+
+# @FUNCTION: linux-mod_src_compile
+# @DESCRIPTION:
+# It compiles all the modules specified in MODULE_NAMES. For each module the econf command is
+# executed only if ECONF_PARAMS is defined, the name of the target is specified by BUILD_TARGETS
+# while the options are in BUILD_PARAMS (all the modules share these variables). The compilation
+# happens inside ${srcdir}.
+#
+# Look at the description of these variables for more details.
+linux-mod_src_compile() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ local modulename libdir srcdir objdir i n myABI="${ABI}"
+ set_arch_to_kernel
+ ABI="${KERNEL_ABI}"
+
+ BUILD_TARGETS=${BUILD_TARGETS:-clean module}
+ strip_modulenames;
+ cd "${S}"
+ touch Module.symvers
+ for i in ${MODULE_NAMES}
+ do
+ unset libdir srcdir objdir
+ for n in $(find_module_params ${i})
+ do
+ eval ${n/:*}=${n/*:/}
+ done
+ libdir=${libdir:-misc}
+ srcdir=${srcdir:-${S}}
+ objdir=${objdir:-${srcdir}}
+
+ if [ ! -f "${srcdir}/.built" ];
+ then
+ cd "${srcdir}"
+ ln -s "${S}"/Module.symvers Module.symvers
+ einfo "Preparing ${modulename} module"
+ if [[ -n ${ECONF_PARAMS} ]]
+ then
+ econf ${ECONF_PARAMS} || \
+ die "Unable to run econf ${ECONF_PARAMS}"
+ fi
+
+ # This looks messy, but it is needed to handle multiple variables
+ # being passed in the BUILD_* stuff where the variables also have
+ # spaces that must be preserved. If don't do this, then the stuff
+ # inside the variables gets used as targets for Make, which then
+ # fails.
+ eval "emake HOSTCC=\"$(tc-getBUILD_CC)\" \
+ CROSS_COMPILE=${CHOST}- \
+ LDFLAGS=\"$(get_abi_LDFLAGS)\" \
+ ${BUILD_FIXES} \
+ ${BUILD_PARAMS} \
+ ${BUILD_TARGETS} " \
+ || die "Unable to emake HOSTCC="$(tc-getBUILD_CC)" CROSS_COMPILE=${CHOST}- LDFLAGS="$(get_abi_LDFLAGS)" ${BUILD_FIXES} ${BUILD_PARAMS} ${BUILD_TARGETS}"
+ cd "${OLDPWD}"
+ touch "${srcdir}"/.built
+ fi
+ done
+
+ set_arch_to_portage
+ ABI="${myABI}"
+}
+
+# @FUNCTION: linux-mod_src_install
+# @DESCRIPTION:
+# It install the modules specified in MODULES_NAME. The modules should be inside the ${objdir}
+# directory and they are installed inside /lib/modules/${KV_FULL}/${libdir}.
+#
+# The modprobe.d configuration file is automatically generated if the
+# MODULESD_<modulename>_* variables are defined. The only way to stop this process is by
+# setting MODULESD_<modulename>_ENABLED=no. At the end the documentation specified via
+# MODULESD_<modulename>_DOCS is also installed.
+#
+# Look at the description of these variables for more details.
+linux-mod_src_install() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ local modulename libdir srcdir objdir i n
+
+ strip_modulenames;
+ for i in ${MODULE_NAMES}
+ do
+ unset libdir srcdir objdir
+ for n in $(find_module_params ${i})
+ do
+ eval ${n/:*}=${n/*:/}
+ done
+ libdir=${libdir:-misc}
+ srcdir=${srcdir:-${S}}
+ objdir=${objdir:-${srcdir}}
+
+ einfo "Installing ${modulename} module"
+ cd "${objdir}" || die "${objdir} does not exist"
+ insinto /lib/modules/${KV_FULL}/${libdir}
+ doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed"
+ cd "${OLDPWD}"
+
+ generate_modulesd "${objdir}/${modulename}"
+ done
+}
+
+# @FUNCTION: linux-mod_pkg_preinst
+# @DESCRIPTION:
+# It checks what to do after having merged the package.
+linux-mod_pkg_preinst() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ [ -d "${D}lib/modules" ] && UPDATE_DEPMOD=true || UPDATE_DEPMOD=false
+ [ -d "${D}lib/modules" ] && UPDATE_MODULEDB=true || UPDATE_MODULEDB=false
+}
+
+# @FUNCTION: linux-mod_pkg_postinst
+# @DESCRIPTION:
+# It executes /sbin/depmod and adds the package to the /var/lib/module-rebuild/moduledb
+# database (if ${D}/lib/modules is created)"
+linux-mod_pkg_postinst() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+
+ ${UPDATE_DEPMOD} && update_depmod;
+ ${UPDATE_MODULEDB} && update_moduledb;
+}
+
+# @FUNCTION: linux-mod_pkg_postrm
+# @DESCRIPTION:
+# It removes the package from the /var/lib/module-rebuild/moduledb database but it doens't
+# call /sbin/depmod because the modules are still installed.
+linux-mod_pkg_postrm() {
+ debug-print-function ${FUNCNAME} $*
+ [ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
+ remove_moduledb;
+}
diff --git a/eclass/makeedit.eclass b/eclass/makeedit.eclass
new file mode 100644
index 000000000000..dad378fb8c35
--- /dev/null
+++ b/eclass/makeedit.eclass
@@ -0,0 +1,37 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: makeedit.eclass
+# @AUTHOR:
+# Spider
+# @BLURB: An eclass to replace some flags in makefiles
+# @DESCRIPTION:
+#
+# @CODE
+# To use this eclass, do 2 things:
+# 1. append-flags "$MAKEEDIT_FLAGS". If you filter-flags, make sure to do
+# the append-flags afterward, otherwise you'll lose them.
+# 2. after running configure or econf, call edit_makefiles to remove
+# extraneous CFLAGS from your Makefiles.
+# @CODE
+#
+# This combination should reduce the RAM requirements of your build, and maybe
+# even speed it up a bit.
+
+
+MAKEEDIT_FLAGS="-Wno-return-type -w"
+
+# @FUNCTION: edit_makefiles
+# @DESCRIPTION:
+# Removes some flags in makefiles
+edit_makefiles() {
+ # We already add "-Wno-return-type -w" to compiler flags, so
+ # no need to replace "-Wall" and "-Wreturn-type" with them.
+ einfo "Parsing Makefiles ..."
+ find . \( -iname makefile -o -name \*.mk -o -name GNUmakefile \) -print0 | \
+ xargs -0 sed -i \
+ -e 's:-Wall::g' \
+ -e 's:-Wreturn-type::g' \
+ -e 's:-pedantic::g'
+}
diff --git a/eclass/mercurial.eclass b/eclass/mercurial.eclass
new file mode 100644
index 000000000000..f58335a4ce75
--- /dev/null
+++ b/eclass/mercurial.eclass
@@ -0,0 +1,197 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mercurial.eclass
+# @MAINTAINER:
+# Christoph Junghans <ottxor@gentoo.org>
+# Dirkjan Ochtman <djc@gentoo.org>
+# @AUTHOR:
+# Next gen author: Krzysztof Pawlik <nelchael@gentoo.org>
+# Original author: Aron Griffis <agriffis@gentoo.org>
+# @BLURB: This eclass provides generic mercurial fetching functions
+# @DESCRIPTION:
+# This eclass provides generic mercurial fetching functions. To fetch sources
+# from mercurial repository just set EHG_REPO_URI to correct repository URI. If
+# you need to share single repository between several ebuilds set EHG_PROJECT to
+# project name in all of them.
+
+inherit eutils
+
+EXPORT_FUNCTIONS src_unpack
+
+DEPEND="dev-vcs/mercurial"
+
+# @ECLASS-VARIABLE: EHG_REPO_URI
+# @DESCRIPTION:
+# Mercurial repository URI.
+
+# @ECLASS-VARIABLE: EHG_REVISION
+# @DESCRIPTION:
+# Create working directory for specified revision, defaults to default.
+#
+# EHG_REVISION is passed as a value for --updaterev parameter, so it can be more
+# than just a revision, please consult `hg help revisions' for more details.
+: ${EHG_REVISION:="default"}
+
+# @ECLASS-VARIABLE: EHG_STORE_DIR
+# @DESCRIPTION:
+# Mercurial sources store directory. Users may override this in /etc/portage/make.conf
+[[ -z "${EHG_STORE_DIR}" ]] && EHG_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/hg-src"
+
+# @ECLASS-VARIABLE: EHG_PROJECT
+# @DESCRIPTION:
+# Project name.
+#
+# This variable default to $PN, but can be changed to allow repository sharing
+# between several ebuilds.
+[[ -z "${EHG_PROJECT}" ]] && EHG_PROJECT="${PN}"
+
+# @ECLASS-VARIABLE: EGIT_CHECKOUT_DIR
+# @DESCRIPTION:
+# The directory to check the hg sources out to.
+#
+# EHG_CHECKOUT_DIR=${S}
+
+# @ECLASS-VARIABLE: EHG_QUIET
+# @DESCRIPTION:
+# Suppress some extra noise from mercurial, set it to 'ON' to be quiet.
+: ${EHG_QUIET:="OFF"}
+[[ "${EHG_QUIET}" == "ON" ]] && EHG_QUIET_CMD_OPT="--quiet"
+
+# @ECLASS-VARIABLE: EHG_CLONE_CMD
+# @DESCRIPTION:
+# Command used to perform initial repository clone.
+[[ -z "${EHG_CLONE_CMD}" ]] && EHG_CLONE_CMD="hg clone ${EHG_QUIET_CMD_OPT} --pull --noupdate"
+
+# @ECLASS-VARIABLE: EHG_PULL_CMD
+# @DESCRIPTION:
+# Command used to update repository.
+[[ -z "${EHG_PULL_CMD}" ]] && EHG_PULL_CMD="hg pull ${EHG_QUIET_CMD_OPT}"
+
+# @ECLASS-VARIABLE: EHG_OFFLINE
+# @DESCRIPTION:
+# Set this variable to a non-empty value to disable the automatic updating of
+# a mercurial source tree. This is intended to be set outside the ebuild by
+# users.
+EHG_OFFLINE="${EHG_OFFLINE:-${EVCS_OFFLINE}}"
+
+# @FUNCTION: mercurial_fetch
+# @USAGE: [repository_uri] [module] [sourcedir]
+# @DESCRIPTION:
+# Clone or update repository.
+#
+# If repository URI is not passed it defaults to EHG_REPO_URI, if module is
+# empty it defaults to basename of EHG_REPO_URI, sourcedir defaults to
+# EHG_CHECKOUT_DIR, which defaults to S.
+
+mercurial_fetch() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ EHG_REPO_URI=${1-${EHG_REPO_URI}}
+ [[ -z "${EHG_REPO_URI}" ]] && die "EHG_REPO_URI is empty"
+
+ local module="${2-$(basename "${EHG_REPO_URI}")}"
+ local sourcedir="${3:-${EHG_CHECKOUT_DIR:-${S}}}"
+
+ # Should be set but blank to prevent using $HOME/.hgrc
+ export HGRCPATH=
+
+ # Check ${EHG_STORE_DIR} directory:
+ addwrite "$(dirname "${EHG_STORE_DIR}")" || die "addwrite failed"
+ if [[ ! -d "${EHG_STORE_DIR}" ]]; then
+ mkdir -p "${EHG_STORE_DIR}" || die "failed to create ${EHG_STORE_DIR}"
+ chmod -f g+rw "${EHG_STORE_DIR}" || \
+ die "failed to chown ${EHG_STORE_DIR}"
+ fi
+
+ # Create project directory:
+ mkdir -p "${EHG_STORE_DIR}/${EHG_PROJECT}" || \
+ die "failed to create ${EHG_STORE_DIR}/${EHG_PROJECT}"
+ chmod -f g+rw "${EHG_STORE_DIR}/${EHG_PROJECT}" || \
+ echo "Warning: failed to chmod g+rw ${EHG_PROJECT}"
+ cd "${EHG_STORE_DIR}/${EHG_PROJECT}" || \
+ die "failed to cd to ${EHG_STORE_DIR}/${EHG_PROJECT}"
+
+ # Clone/update repository:
+ if [[ ! -d "${module}" ]]; then
+ einfo "Cloning ${EHG_REPO_URI} to ${EHG_STORE_DIR}/${EHG_PROJECT}/${module}"
+ ${EHG_CLONE_CMD} "${EHG_REPO_URI}" "${module}" || {
+ rm -rf "${module}"
+ die "failed to clone ${EHG_REPO_URI}"
+ }
+ cd "${module}"
+ elif [[ -z "${EHG_OFFLINE}" ]]; then
+ einfo "Updating ${EHG_STORE_DIR}/${EHG_PROJECT}/${module} from ${EHG_REPO_URI}"
+ cd "${module}" || die "failed to cd to ${module}"
+ ${EHG_PULL_CMD} "${EHG_REPO_URI}" || die "update failed"
+ fi
+
+ # Checkout working copy:
+ einfo "Creating working directory in ${sourcedir} (target revision: ${EHG_REVISION})"
+ mkdir -p "${sourcedir}" || die "failed to create ${sourcedir}"
+ hg clone \
+ ${EHG_QUIET_CMD_OPT} \
+ --updaterev="${EHG_REVISION}" \
+ "${EHG_STORE_DIR}/${EHG_PROJECT}/${module}" \
+ "${sourcedir}" || die "hg clone failed"
+ # An exact revision helps a lot for testing purposes, so have some output...
+ # id num branch
+ # fd6e32d61721 6276 default
+ local HG_REVDATA=($(hg identify -b -i "${sourcedir}"))
+ export HG_REV_ID=${HG_REVDATA[0]}
+ local HG_REV_BRANCH=${HG_REVDATA[1]}
+ einfo "Work directory: ${sourcedir} global id: ${HG_REV_ID} (was ${EHG_REVISION} branch: ${HG_REV_BRANCH}"
+}
+
+# @FUNCTION: mercurial_bootstrap
+# @INTERNAL
+# @DESCRIPTION:
+# Internal function that runs bootstrap command on unpacked source.
+mercurial_bootstrap() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # @ECLASS-VARIABLE: EHG_BOOTSTRAP
+ # @DESCRIPTION:
+ # Command to be executed after checkout and clone of the specified
+ # repository.
+ if [[ ${EHG_BOOTSTRAP} ]]; then
+ pushd "${S}" > /dev/null
+ einfo "Starting bootstrap"
+
+ if [[ -f ${EHG_BOOTSTRAP} ]]; then
+ # we have file in the repo which we should execute
+ debug-print "${FUNCNAME}: bootstraping with file \"${EHG_BOOTSTRAP}\""
+
+ if [[ -x ${EHG_BOOTSTRAP} ]]; then
+ eval "./${EHG_BOOTSTRAP}" \
+ || die "${FUNCNAME}: bootstrap script failed"
+ else
+ eerror "\"${EHG_BOOTSTRAP}\" is not executable."
+ eerror "Report upstream, or bug ebuild maintainer to remove bootstrap command."
+ die "\"${EHG_BOOTSTRAP}\" is not executable"
+ fi
+ else
+ # we execute some system command
+ debug-print "${FUNCNAME}: bootstraping with commands \"${EHG_BOOTSTRAP}\""
+
+ eval "${EHG_BOOTSTRAP}" \
+ || die "${FUNCNAME}: bootstrap commands failed"
+ fi
+
+ einfo "Bootstrap finished"
+ popd > /dev/null
+ fi
+}
+
+# @FUNCTION: mercurial_src_unpack
+# @DESCRIPTION:
+# The mercurial src_unpack function, which will be exported.
+function mercurial_src_unpack {
+ debug-print-function ${FUNCNAME} "$@"
+
+ mercurial_fetch
+ mercurial_bootstrap
+}
diff --git a/eclass/mono-env.eclass b/eclass/mono-env.eclass
new file mode 100644
index 000000000000..1fb351fd1fd9
--- /dev/null
+++ b/eclass/mono-env.eclass
@@ -0,0 +1,45 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mono-env.eclass
+# @MAINTAINER:
+# dotnet@gentoo.org
+# @BLURB: Set environment variables commonly used by dotnet packages.
+# @DESCRIPTION:
+# Set environment variables commonly used by dotnet packages.
+
+SRC_URI="http://download.mono-project.com/sources/${PN}/${P}.tar.bz2"
+
+EXPORT_FUNCTIONS pkg_setup
+
+if [[ ! ${_MONO_ENV} ]]; then
+
+mono-env_pkg_setup() {
+ # >=mono-0.92 versions using mcs -pkg:foo-sharp require shared memory, so we set the
+ # shared dir to ${T} so that ${T}/.wapi can be used during the install process.
+ export MONO_SHARED_DIR="${T}"
+
+ # export more variables as needed by other dotnet packages
+ export MONO_REGISTRY_PATH="${T}/registry"
+ export XDG_DATA_HOME="${T}/data"
+
+ # Building mono, nant and many other dotnet packages is known to fail if LC_ALL
+ # variable is not set to C. To prevent this all mono related packages will be
+ # build with LC_ALL=C (see bugs #146424, #149817)
+ export LC_ALL=C
+
+ # Monodevelop-using applications need this to be set or they will try to create config
+ # files in the user's ~ dir.
+ export XDG_CONFIG_HOME="${T}"
+
+ # Fix bug 83020:
+ # "Access Violations Arise When Emerging Mono-Related Packages with MONO_AOT_CACHE"
+ unset MONO_AOT_CACHE
+
+ # mono libs can live on /usr/lib as they are not arch specific
+ QA_MULTILIB_PATHS="usr/lib/"
+}
+
+_MONO_ENV=1
+fi
diff --git a/eclass/mono.eclass b/eclass/mono.eclass
new file mode 100644
index 000000000000..3fe51054b30e
--- /dev/null
+++ b/eclass/mono.eclass
@@ -0,0 +1,81 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mono.eclass
+# @MAINTAINER:
+# dotnet@gentoo.org
+# @BLURB: common settings and functions for mono and dotnet related packages
+# @DESCRIPTION:
+# The mono eclass contains common environment settings that are useful for
+# dotnet packages. Currently, it provides no functions, just exports
+# MONO_SHARED_DIR and sets LC_ALL in order to prevent errors during compilation
+# of dotnet packages.
+
+inherit multilib
+
+# >=mono-0.92 versions using mcs -pkg:foo-sharp require shared memory, so we set the
+# shared dir to ${T} so that ${T}/.wapi can be used during the install process.
+export MONO_SHARED_DIR="${T}"
+
+# Building mono, nant and many other dotnet packages is known to fail if LC_ALL
+# variable is not set to C. To prevent this all mono related packages will be
+# build with LC_ALL=C (see bugs #146424, #149817)
+export LC_ALL=C
+
+# Monodevelop-using applications need this to be set or they will try to create config
+# files in the user's ~ dir.
+
+export XDG_CONFIG_HOME="${T}"
+
+# Fix bug 83020:
+# "Access Violations Arise When Emerging Mono-Related Packages with MONO_AOT_CACHE"
+
+unset MONO_AOT_CACHE
+
+egacinstall() {
+ use !prefix && has "${EAPI:-0}" 0 1 2 && ED="${D}"
+ gacutil -i "${1}" \
+ -root "${ED}"/usr/$(get_libdir) \
+ -gacdir /usr/$(get_libdir) \
+ -package ${2:-${GACPN:-${PN}}} \
+ || die "installing ${1} into the Global Assembly Cache failed"
+}
+
+mono_multilib_comply() {
+ use !prefix && has "${EAPI:-0}" 0 1 2 && ED="${D}"
+ local dir finddirs=() mv_command=${mv_command:-mv}
+ if [[ -d "${ED}/usr/lib" && "$(get_libdir)" != "lib" ]]
+ then
+ if ! [[ -d "${ED}"/usr/"$(get_libdir)" ]]
+ then
+ mkdir "${ED}"/usr/"$(get_libdir)" || die "Couldn't mkdir ${ED}/usr/$(get_libdir)"
+ fi
+ ${mv_command} "${ED}"/usr/lib/* "${ED}"/usr/"$(get_libdir)"/ || die "Moving files into correct libdir failed"
+ rm -rf "${ED}"/usr/lib
+ for dir in "${ED}"/usr/"$(get_libdir)"/pkgconfig "${ED}"/usr/share/pkgconfig
+ do
+
+ if [[ -d "${dir}" && "$(find "${dir}" -name '*.pc')" != "" ]]
+ then
+ pushd "${dir}" &> /dev/null
+ sed -i -r -e 's:/(lib)([^a-zA-Z0-9]|$):/'"$(get_libdir)"'\2:g' \
+ *.pc \
+ || die "Sedding some sense into pkgconfig files failed."
+ popd "${dir}" &> /dev/null
+ fi
+ done
+ if [[ -d "${ED}/usr/bin" ]]
+ then
+ for exe in "${ED}/usr/bin"/*
+ do
+ if [[ "$(file "${exe}")" == *"shell script text"* ]]
+ then
+ sed -r -i -e ":/lib(/|$): s:/lib(/|$):/$(get_libdir)\1:" \
+ "${exe}" || die "Sedding some sense into ${exe} failed"
+ fi
+ done
+ fi
+
+ fi
+}
diff --git a/eclass/mount-boot.eclass b/eclass/mount-boot.eclass
new file mode 100644
index 000000000000..e8bd9f63bef6
--- /dev/null
+++ b/eclass/mount-boot.eclass
@@ -0,0 +1,157 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mount-boot.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: functions for packages that install files into /boot
+# @DESCRIPTION:
+# This eclass is really only useful for bootloaders.
+#
+# If the live system has a separate /boot partition configured, then this
+# function tries to ensure that it's mounted in rw mode, exiting with an
+# error if it can't. It does nothing if /boot isn't a separate partition.
+
+EXPORT_FUNCTIONS pkg_pretend pkg_preinst pkg_postinst pkg_prerm pkg_postrm
+
+# @FUNCTION: mount-boot_disabled
+# @INTERNAL
+# @DESCRIPTION:
+# Detect whether the current environment/build settings are such that we do not
+# want to mess with any mounts.
+mount-boot_is_disabled() {
+ # Since this eclass only deals with /boot, skip things when ROOT is active.
+ if [[ "${ROOT:-/}" != "/" ]] ; then
+ return 0
+ fi
+
+ # If we're only building a package, then there's no need to check things.
+ if [[ "${MERGE_TYPE}" == "buildonly" ]] ; then
+ return 0
+ fi
+
+ # The user wants us to leave things be.
+ if [[ -n ${DONT_MOUNT_BOOT} ]] ; then
+ return 0
+ fi
+
+ # OK, we want to handle things ourselves.
+ return 1
+}
+
+# @FUNCTION: mount-boot_check_status
+# @INTERNAL
+# @DESCRIPTION:
+# Figure out what kind of work we need to do in order to have /boot be sane.
+# Return values are:
+# 0 - Do nothing at all!
+# 1 - It's mounted, but is currently ro, so need to remount rw.
+# 2 - It's not mounted, so need to mount it rw.
+mount-boot_check_status() {
+ # Get out fast if possible.
+ mount-boot_is_disabled && return 0
+
+ # note that /dev/BOOT is in the Gentoo default /etc/fstab file
+ local fstabstate=$(awk '!/^#|^[[:blank:]]+#|^\/dev\/BOOT/ {print $2}' /etc/fstab | egrep "^/boot$" )
+ local procstate=$(awk '$2 ~ /^\/boot$/ {print $2}' /proc/mounts)
+ local proc_ro=$(awk '{ print $2 " ," $4 "," }' /proc/mounts | sed -n '/\/boot .*,ro,/p')
+
+ if [ -n "${fstabstate}" ] && [ -n "${procstate}" ] ; then
+ if [ -n "${proc_ro}" ] ; then
+ echo
+ einfo "Your boot partition, detected as being mounted at /boot, is read-only."
+ einfo "It will be remounted in read-write mode temporarily."
+ return 1
+ else
+ echo
+ einfo "Your boot partition was detected as being mounted at /boot."
+ einfo "Files will be installed there for ${PN} to function correctly."
+ return 0
+ fi
+ elif [ -n "${fstabstate}" ] && [ -z "${procstate}" ] ; then
+ echo
+ einfo "Your boot partition was not mounted at /boot, so it will be automounted for you."
+ einfo "Files will be installed there for ${PN} to function correctly."
+ return 2
+ else
+ echo
+ einfo "Assuming you do not have a separate /boot partition."
+ return 0
+ fi
+}
+
+mount-boot_pkg_pretend() {
+ # Get out fast if possible.
+ mount-boot_is_disabled && return 0
+
+ elog "To avoid automounting and auto(un)installing with /boot,"
+ elog "just export the DONT_MOUNT_BOOT variable."
+ mount-boot_check_status
+}
+
+mount-boot_mount_boot_partition() {
+ mount-boot_check_status
+ case $? in
+ 0) # Nothing to do.
+ ;;
+ 1) # Remount it rw.
+ mount -o remount,rw /boot
+ if [ $? -ne 0 ] ; then
+ echo
+ eerror "Unable to remount in rw mode. Please do it manually!"
+ die "Can't remount in rw mode. Please do it manually!"
+ fi
+ touch /boot/.e.remount
+ ;;
+ 2) # Mount it rw.
+ mount /boot -o rw
+ if [ $? -ne 0 ] ; then
+ echo
+ eerror "Cannot automatically mount your /boot partition."
+ eerror "Your boot partition has to be mounted rw before the installation"
+ eerror "can continue. ${PN} needs to install important files there."
+ die "Please mount your /boot partition manually!"
+ fi
+ touch /boot/.e.mount
+ ;;
+ esac
+}
+
+mount-boot_pkg_preinst() {
+ # Handle older EAPIs.
+ case ${EAPI:-0} in
+ [0-3]) mount-boot_pkg_pretend ;;
+ esac
+
+ mount-boot_mount_boot_partition
+}
+
+mount-boot_pkg_prerm() {
+ touch "${ROOT}"/boot/.keep 2>/dev/null
+ mount-boot_mount_boot_partition
+ touch "${ROOT}"/boot/.keep 2>/dev/null
+}
+
+mount-boot_umount_boot_partition() {
+ # Get out fast if possible.
+ mount-boot_is_disabled && return 0
+
+ if [ -e /boot/.e.remount ] ; then
+ einfo "Automatically remounting /boot as ro as it was previously."
+ rm -f /boot/.e.remount
+ mount -o remount,ro /boot
+ elif [ -e /boot/.e.mount ] ; then
+ einfo "Automatically unmounting /boot as it was previously."
+ rm -f /boot/.e.mount
+ umount /boot
+ fi
+}
+
+mount-boot_pkg_postinst() {
+ mount-boot_umount_boot_partition
+}
+
+mount-boot_pkg_postrm() {
+ mount-boot_umount_boot_partition
+}
diff --git a/eclass/mozconfig-3.eclass b/eclass/mozconfig-3.eclass
new file mode 100644
index 000000000000..a0a01a19910f
--- /dev/null
+++ b/eclass/mozconfig-3.eclass
@@ -0,0 +1,104 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# mozconfig.eclass: the new mozilla.eclass
+
+inherit multilib flag-o-matic mozcoreconf-2
+
+# use-flags common among all mozilla ebuilds
+IUSE="+alsa +dbus debug libnotify startup-notification system-sqlite wifi"
+
+# XXX: GConf is used for setting the default browser
+# revisit to make it optional with GNOME 3
+# pango[X] is needed for pangoxft.h
+# freedesktop-icon-theme is needed for bug 341697
+RDEPEND="app-arch/zip
+ app-arch/unzip
+ >=app-text/hunspell-1.2
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.8[X]
+ >=x11-libs/gtk+-2.8.6:2
+ >=x11-libs/pango-1.10.1[X]
+ virtual/jpeg:0
+ alsa? ( media-libs/alsa-lib )
+ virtual/freedesktop-icon-theme
+ dbus? ( >=dev-libs/dbus-glib-0.72 )
+ libnotify? ( >=x11-libs/libnotify-0.4 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ wifi? ( net-wireless/wireless-tools )"
+DEPEND="${RDEPEND}"
+
+mozconfig_config() {
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ if ! $(mozversion_is_new_enough) ; then
+ mozconfig_use_enable alsa ogg
+ mozconfig_use_enable alsa wave
+ mozconfig_use_enable libnotify
+ mozconfig_use_enable debug debugger-info-modules
+ if has +ipc ${IUSE}; then
+ mozconfig_use_enable ipc
+ fi
+ if [[ ${PN} != thunderbird ]] ; then
+ mozconfig_annotate 'places' --enable-storage --enable-places --enable-places_bookmarks
+ mozconfig_annotate '' --enable-oji --enable-mathml
+ mozconfig_annotate 'broken' --disable-mochitest
+ fi
+ if use system-sqlite; then
+ mozconfig_annotate '' --with-sqlite-prefix="${EPREFIX}"/usr
+ fi
+ if use amd64 || use x86 || use arm || use sparc; then
+ mozconfig_annotate '' --enable-tracejit
+ fi
+ fi
+
+ mozconfig_use_enable dbus
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ fi
+ mozconfig_use_enable startup-notification
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_enable wifi necko-wifi
+
+ if $(mozversion_is_new_enough) ; then
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+ mozconfig_annotate 'required' --with-system-libvpx
+ elif has +webm ${IUSE} && use webm; then
+ if ! use alsa; then
+ echo "Enabling alsa support due to webm request"
+ mozconfig_annotate '+webm -alsa' --enable-ogg
+ mozconfig_annotate '+webm -alsa' --enable-wave
+ mozconfig_annotate '+webm' --enable-webm
+ mozconfig_annotate '+webm' --with-system-libvpx
+ else
+ mozconfig_use_enable webm
+ mozconfig_annotate '+webm' --with-system-libvpx
+ fi
+ else
+ mozconfig_annotate '' --disable-webm
+ mozconfig_annotate '' --disable-system-libvpx
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+}
diff --git a/eclass/mozconfig-v5.31.eclass b/eclass/mozconfig-v5.31.eclass
new file mode 100644
index 000000000000..3c96d5001392
--- /dev/null
+++ b/eclass/mozconfig-v5.31.eclass
@@ -0,0 +1,218 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozconfig-v5.31.eclass
+# @MAINTAINER:
+# mozilla team <mozilla@gentoo.org>
+# @BLURB: the new mozilla common configuration eclass for FF31 and newer, v5
+# @DESCRIPTION:
+# This eclass is used in mozilla ebuilds (firefox, thunderbird, seamonkey)
+# to provide a single common place for the common mozilla engine compoments.
+#
+# The eclass provides all common dependencies as well as common use flags.
+#
+# Some use flags which may be optional in particular mozilla packages can be
+# supported through setting eclass variables.
+#
+# This eclass inherits mozconfig helper functions as defined in mozcoreconf-v3,
+# and so ebuilds inheriting this eclass do not need to inherit that.
+
+inherit multilib flag-o-matic toolchain-funcs mozcoreconf-v3
+
+case ${EAPI} in
+ 0|1|2|3|4) die "EAPI=${EAPI} not supported"
+esac
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_WIFI
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="wifi". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if necko-wifi support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_JIT
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="jit". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if optional jit support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# use-flags common among all mozilla ebuilds
+IUSE="${IUSE} dbus debug gstreamer pulseaudio startup-notification system-cairo system-icu system-jpeg system-sqlite system-libvpx"
+
+# some notes on deps:
+# gtk:2 minimum is technically 2.10 but gio support (enabled by default) needs 2.14
+# media-libs/mesa needs to be 10.2 or above due to a bug with flash+vdpau
+
+RDEPEND=">=app-text/hunspell-1.2
+ dev-libs/atk
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.10[X]
+ >=x11-libs/gtk+-2.14:2
+ x11-libs/gdk-pixbuf
+ >=x11-libs/pango-1.22.0
+ >=media-libs/libpng-1.6.10:0=[apng]
+ >=media-libs/mesa-10.2:*
+ media-libs/fontconfig
+ >=media-libs/freetype-2.4.10
+ kernel_linux? ( media-libs/alsa-lib )
+ pulseaudio? ( media-sound/pulseaudio )
+ >=sys-libs/zlib-1.2.3
+ virtual/freedesktop-icon-theme
+ dbus? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ >=dev-libs/glib-2.26:2
+ virtual/libffi
+ gstreamer? ( media-plugins/gst-plugins-meta:1.0[ffmpeg] )
+ x11-libs/libX11
+ x11-libs/libXext
+ x11-libs/libXrender
+ x11-libs/libXt
+ system-cairo? ( >=x11-libs/cairo-1.12[X] >=x11-libs/pixman-0.19.2 )
+ system-icu? ( >=dev-libs/icu-51.1:= )
+ system-jpeg? ( >=media-libs/libjpeg-turbo-1.2.1 )
+ system-sqlite? ( >=dev-db/sqlite-3.8.4.2:3[secure-delete,debug=] )
+"
+
+# firefox-31.0-patches-0.3 and above carry a patch making newer libvpx compatible
+case ${PATCHFF##*31.0-patches-} in
+ 0.3) RDEPEND+=" system-libvpx? ( >=media-libs/libvpx-1.3.0 )" ;;
+ *) RDEPEND+=" system-libvpx? ( =media-libs/libvpx-1.3.0* )" ;;
+esac
+
+if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_WIFI} = "enabled" ]]; then
+ IUSE+=" +wifi"
+ else
+ IUSE+=" wifi"
+ fi
+ RDEPEND+="
+ wifi? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72
+ net-wireless/wireless-tools )"
+fi
+if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_JIT} = "enabled" ]]; then
+ IUSE+=" +jit"
+ else
+ IUSE+=" jit"
+ fi
+fi
+
+DEPEND="app-arch/zip
+ app-arch/unzip
+ >=sys-devel/binutils-2.16.1
+ ${RDEPEND}"
+
+# @FUNCTION: mozconfig_config
+# @DESCRIPTION:
+# Set common configure options for mozilla packages.
+# Call this within src_configure() phase, after mozconfig_init
+#
+# Example:
+#
+# inherit mozconfig-v5.31
+#
+# src_configure() {
+# mozconfig_init
+# mozconfig_config
+# # ... misc ebuild-unique settings via calls to
+# # ... mozconfig_{annotate,use_with,use_enable}
+# mozconfig_final
+# }
+
+mozconfig_config() {
+ # Migrated from mozcoreconf-2
+ mozconfig_annotate 'system_libs' \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-svg
+
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ fi
+
+ mozconfig_use_enable startup-notification
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]] ; then
+ # wifi pulls in dbus so manage both here
+ mozconfig_use_enable wifi necko-wifi
+ if use wifi && ! use dbus; then
+ echo "Enabling dbus support due to wifi request"
+ mozconfig_annotate 'dbus required by necko-wifi' --enable-dbus
+ else
+ mozconfig_use_enable dbus
+ fi
+ else
+ mozconfig_use_enable dbus
+ mozconfig_annotate 'disabled' --disable-necko-wifi
+ fi
+
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ mozconfig_use_enable jit ion
+ mozconfig_use_enable jit yarr-jit
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --libdir="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+ mozconfig_annotate '' --with-system-png
+ mozconfig_annotate '' --enable-system-ffi
+ mozconfig_annotate '' --disable-gold
+ mozconfig_annotate '' --disable-gconf
+
+ # We must force enable jemalloc 3 threw .mozconfig
+ echo "export MOZ_JEMALLOC=1" >> "${S}"/.mozconfig || die
+ mozconfig_annotate '' --enable-jemalloc
+ mozconfig_annotate '' --enable-replace-malloc
+
+ mozconfig_annotate '' --target="${CTARGET:-${CHOST}}"
+ mozconfig_annotate '' --build="${CTARGET:-${CHOST}}"
+
+ if use gstreamer; then
+ mozconfig_annotate '+gstreamer' --enable-gstreamer=1.0
+ else
+ mozconfig_annotate '' --disable-gstreamer
+ fi
+ mozconfig_use_enable pulseaudio
+
+ mozconfig_use_enable system-cairo
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_with system-jpeg
+ mozconfig_use_with system-icu
+ mozconfig_use_enable system-icu intl-api
+ mozconfig_use_with system-libvpx
+}
diff --git a/eclass/mozconfig-v5.34.eclass b/eclass/mozconfig-v5.34.eclass
new file mode 100644
index 000000000000..aa739660f17e
--- /dev/null
+++ b/eclass/mozconfig-v5.34.eclass
@@ -0,0 +1,232 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozconfig-v5.33.eclass
+# @MAINTAINER:
+# mozilla team <mozilla@gentoo.org>
+# @BLURB: the new mozilla common configuration eclass for FF33 and newer, v5
+# @DESCRIPTION:
+# This eclass is used in mozilla ebuilds (firefox, thunderbird, seamonkey)
+# to provide a single common place for the common mozilla engine compoments.
+#
+# The eclass provides all common dependencies as well as common use flags.
+#
+# Some use flags which may be optional in particular mozilla packages can be
+# supported through setting eclass variables.
+#
+# This eclass inherits mozconfig helper functions as defined in mozcoreconf-v3,
+# and so ebuilds inheriting this eclass do not need to inherit that.
+
+inherit multilib flag-o-matic toolchain-funcs mozcoreconf-v3
+
+case ${EAPI} in
+ 0|1|2|3|4) die "EAPI=${EAPI} not supported"
+esac
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_WIFI
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="wifi". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if necko-wifi support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_JIT
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="jit". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if optional jit support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# use-flags common among all mozilla ebuilds
+IUSE="${IUSE} dbus debug gstreamer pulseaudio selinux startup-notification system-cairo system-icu system-jpeg system-sqlite system-libvpx"
+
+# some notes on deps:
+# gtk:2 minimum is technically 2.10 but gio support (enabled by default) needs 2.14
+# media-libs/mesa needs to be 10.2 or above due to a bug with flash+vdpau
+
+RDEPEND=">=app-text/hunspell-1.2
+ dev-libs/atk
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.10[X]
+ >=x11-libs/gtk+-2.14:2
+ x11-libs/gdk-pixbuf
+ >=x11-libs/pango-1.22.0
+ >=media-libs/libpng-1.6.13:0=[apng]
+ >=media-libs/mesa-10.2:*
+ media-libs/fontconfig
+ >=media-libs/freetype-2.4.10
+ kernel_linux? ( media-libs/alsa-lib )
+ pulseaudio? ( media-sound/pulseaudio )
+ virtual/freedesktop-icon-theme
+ dbus? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ >=dev-libs/glib-2.26:2
+ >=sys-libs/zlib-1.2.3
+ virtual/libffi
+ gstreamer? (
+ >=media-libs/gstreamer-1.2.3:1.0
+ >=media-libs/gst-plugins-base-1.2.3:1.0
+ >=media-libs/gst-plugins-good-1.2.3:1.0
+ >=media-plugins/gst-plugins-libav-1.1.0_pre20130128-r1:1.0
+ )
+ x11-libs/libX11
+ x11-libs/libXcomposite
+ x11-libs/libXdamage
+ x11-libs/libXext
+ x11-libs/libXfixes
+ x11-libs/libXrender
+ x11-libs/libXt
+ system-cairo? ( >=x11-libs/cairo-1.12[X] >=x11-libs/pixman-0.19.2 )
+ system-icu? ( >=dev-libs/icu-51.1:= )
+ system-jpeg? ( >=media-libs/libjpeg-turbo-1.2.1 )
+ system-sqlite? ( >=dev-db/sqlite-3.8.6:3[secure-delete,debug=] )
+ system-libvpx? ( =media-libs/libvpx-1.3.0*[postproc] )
+"
+
+if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_WIFI} = "enabled" ]]; then
+ IUSE+=" +wifi"
+ else
+ IUSE+=" wifi"
+ fi
+ RDEPEND+="
+ wifi? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72
+ net-wireless/wireless-tools )"
+fi
+if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_JIT} = "enabled" ]]; then
+ IUSE+=" +jit"
+ else
+ IUSE+=" jit"
+ fi
+fi
+
+DEPEND="app-arch/zip
+ app-arch/unzip
+ >=sys-devel/binutils-2.16.1
+ ${RDEPEND}"
+
+RDEPEND+="
+ selinux? ( sec-policy/selinux-mozilla )"
+
+# @FUNCTION: mozconfig_config
+# @DESCRIPTION:
+# Set common configure options for mozilla packages.
+# Call this within src_configure() phase, after mozconfig_init
+#
+# Example:
+#
+# inherit mozconfig-v5.33
+#
+# src_configure() {
+# mozconfig_init
+# mozconfig_config
+# # ... misc ebuild-unique settings via calls to
+# # ... mozconfig_{annotate,use_with,use_enable}
+# mozconfig_final
+# }
+
+mozconfig_config() {
+ # Migrated from mozcoreconf-2
+ mozconfig_annotate 'system_libs' \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-svg \
+ --with-system-bz2
+
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ else
+ mozconfig_annotate 'enabled by Gentoo' --enable-debug-symbols
+ fi
+
+ mozconfig_use_enable startup-notification
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]] ; then
+ # wifi pulls in dbus so manage both here
+ mozconfig_use_enable wifi necko-wifi
+ if use wifi && ! use dbus; then
+ echo "Enabling dbus support due to wifi request"
+ mozconfig_annotate 'dbus required by necko-wifi' --enable-dbus
+ else
+ mozconfig_use_enable dbus
+ fi
+ else
+ mozconfig_use_enable dbus
+ mozconfig_annotate 'disabled' --disable-necko-wifi
+ fi
+
+ # These are forced-on for webm support
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ mozconfig_use_enable jit ion
+ mozconfig_use_enable jit yarr-jit
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --libdir="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate 'Gentoo default' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+ mozconfig_annotate 'Gentoo default' --with-system-png
+ mozconfig_annotate '' --enable-system-ffi
+ mozconfig_annotate 'Gentoo default to honor system linker' --disable-gold
+ mozconfig_annotate '' --disable-gconf
+
+ # Use jemalloc unless libc is not glibc >= 2.4
+ # at this time the minimum glibc in the tree is 2.9 so we should be safe.
+ if use elibc_glibc; then
+ # We must force-enable jemalloc 3 via .mozconfig
+ echo "export MOZ_JEMALLOC3=1" >> "${S}"/.mozconfig || die
+ mozconfig_annotate '' --enable-jemalloc
+ mozconfig_annotate '' --enable-replace-malloc
+ fi
+
+ mozconfig_annotate '' --target="${CTARGET:-${CHOST}}"
+ mozconfig_annotate '' --build="${CTARGET:-${CHOST}}"
+
+ if use gstreamer; then
+ mozconfig_annotate '+gstreamer' --enable-gstreamer=1.0
+ else
+ mozconfig_annotate '' --disable-gstreamer
+ fi
+ mozconfig_use_enable pulseaudio
+
+ mozconfig_use_enable system-cairo
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_with system-jpeg
+ mozconfig_use_with system-icu
+ mozconfig_use_enable system-icu intl-api
+ mozconfig_use_with system-libvpx
+}
diff --git a/eclass/mozconfig-v5.36.eclass b/eclass/mozconfig-v5.36.eclass
new file mode 100644
index 000000000000..03159aceb1d6
--- /dev/null
+++ b/eclass/mozconfig-v5.36.eclass
@@ -0,0 +1,232 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozconfig-v5.33.eclass
+# @MAINTAINER:
+# mozilla team <mozilla@gentoo.org>
+# @BLURB: the new mozilla common configuration eclass for FF33 and newer, v5
+# @DESCRIPTION:
+# This eclass is used in mozilla ebuilds (firefox, thunderbird, seamonkey)
+# to provide a single common place for the common mozilla engine compoments.
+#
+# The eclass provides all common dependencies as well as common use flags.
+#
+# Some use flags which may be optional in particular mozilla packages can be
+# supported through setting eclass variables.
+#
+# This eclass inherits mozconfig helper functions as defined in mozcoreconf-v3,
+# and so ebuilds inheriting this eclass do not need to inherit that.
+
+inherit multilib flag-o-matic toolchain-funcs mozcoreconf-v3
+
+case ${EAPI} in
+ 0|1|2|3|4) die "EAPI=${EAPI} not supported"
+esac
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_WIFI
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="wifi". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if necko-wifi support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_JIT
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="jit". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if optional jit support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# use-flags common among all mozilla ebuilds
+IUSE="${IUSE} dbus debug gstreamer pulseaudio selinux startup-notification system-cairo system-icu system-jpeg system-sqlite system-libvpx"
+
+# some notes on deps:
+# gtk:2 minimum is technically 2.10 but gio support (enabled by default) needs 2.14
+# media-libs/mesa needs to be 10.2 or above due to a bug with flash+vdpau
+
+RDEPEND=">=app-text/hunspell-1.2
+ dev-libs/atk
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.10[X]
+ >=x11-libs/gtk+-2.18:2
+ x11-libs/gdk-pixbuf
+ >=x11-libs/pango-1.22.0
+ >=media-libs/libpng-1.6.14:0=[apng]
+ >=media-libs/mesa-10.2:*
+ media-libs/fontconfig
+ >=media-libs/freetype-2.4.10
+ kernel_linux? ( media-libs/alsa-lib )
+ pulseaudio? ( media-sound/pulseaudio )
+ virtual/freedesktop-icon-theme
+ dbus? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ >=dev-libs/glib-2.26:2
+ >=sys-libs/zlib-1.2.3
+ >=virtual/libffi-3.0.10
+ gstreamer? (
+ >=media-libs/gstreamer-1.2.3:1.0
+ >=media-libs/gst-plugins-base-1.2.3:1.0
+ >=media-libs/gst-plugins-good-1.2.3:1.0
+ >=media-plugins/gst-plugins-libav-1.1.0_pre20130128-r1:1.0
+ )
+ x11-libs/libX11
+ x11-libs/libXcomposite
+ x11-libs/libXdamage
+ x11-libs/libXext
+ x11-libs/libXfixes
+ x11-libs/libXrender
+ x11-libs/libXt
+ system-cairo? ( >=x11-libs/cairo-1.12[X] >=x11-libs/pixman-0.19.2 )
+ system-icu? ( >=dev-libs/icu-51.1:= )
+ system-jpeg? ( >=media-libs/libjpeg-turbo-1.2.1 )
+ system-sqlite? ( >=dev-db/sqlite-3.8.7.4:3[secure-delete,debug=] )
+ system-libvpx? ( >=media-libs/libvpx-1.3.0[postproc] )
+"
+
+if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_WIFI} = "enabled" ]]; then
+ IUSE+=" +wifi"
+ else
+ IUSE+=" wifi"
+ fi
+ RDEPEND+="
+ wifi? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72
+ net-wireless/wireless-tools )"
+fi
+if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_JIT} = "enabled" ]]; then
+ IUSE+=" +jit"
+ else
+ IUSE+=" jit"
+ fi
+fi
+
+DEPEND="app-arch/zip
+ app-arch/unzip
+ >=sys-devel/binutils-2.16.1
+ ${RDEPEND}"
+
+RDEPEND+="
+ selinux? ( sec-policy/selinux-mozilla )"
+
+# @FUNCTION: mozconfig_config
+# @DESCRIPTION:
+# Set common configure options for mozilla packages.
+# Call this within src_configure() phase, after mozconfig_init
+#
+# Example:
+#
+# inherit mozconfig-v5.33
+#
+# src_configure() {
+# mozconfig_init
+# mozconfig_config
+# # ... misc ebuild-unique settings via calls to
+# # ... mozconfig_{annotate,use_with,use_enable}
+# mozconfig_final
+# }
+
+mozconfig_config() {
+ # Migrated from mozcoreconf-2
+ mozconfig_annotate 'system_libs' \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-svg \
+ --with-system-bz2
+
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ else
+ mozconfig_annotate 'enabled by Gentoo' --enable-debug-symbols
+ fi
+
+ mozconfig_use_enable startup-notification
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]] ; then
+ # wifi pulls in dbus so manage both here
+ mozconfig_use_enable wifi necko-wifi
+ if use wifi && ! use dbus; then
+ echo "Enabling dbus support due to wifi request"
+ mozconfig_annotate 'dbus required by necko-wifi' --enable-dbus
+ else
+ mozconfig_use_enable dbus
+ fi
+ else
+ mozconfig_use_enable dbus
+ mozconfig_annotate 'disabled' --disable-necko-wifi
+ fi
+
+ # These are forced-on for webm support
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ mozconfig_use_enable jit ion
+ mozconfig_use_enable jit yarr-jit
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --libdir="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate 'Gentoo default' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+ mozconfig_annotate 'Gentoo default' --with-system-png
+ mozconfig_annotate '' --enable-system-ffi
+ mozconfig_annotate 'Gentoo default to honor system linker' --disable-gold
+ mozconfig_annotate '' --disable-gconf
+
+ # Use jemalloc unless libc is not glibc >= 2.4
+ # at this time the minimum glibc in the tree is 2.9 so we should be safe.
+ if use elibc_glibc; then
+ # We must force-enable jemalloc 3 via .mozconfig
+ echo "export MOZ_JEMALLOC3=1" >> "${S}"/.mozconfig || die
+ mozconfig_annotate '' --enable-jemalloc
+ mozconfig_annotate '' --enable-replace-malloc
+ fi
+
+ mozconfig_annotate '' --target="${CTARGET:-${CHOST}}"
+ mozconfig_annotate '' --build="${CTARGET:-${CHOST}}"
+
+ if use gstreamer; then
+ mozconfig_annotate '+gstreamer' --enable-gstreamer=1.0
+ else
+ mozconfig_annotate '' --disable-gstreamer
+ fi
+ mozconfig_use_enable pulseaudio
+
+ mozconfig_use_enable system-cairo
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_with system-jpeg
+ mozconfig_use_with system-icu
+ mozconfig_use_enable system-icu intl-api
+ mozconfig_use_with system-libvpx
+}
diff --git a/eclass/mozconfig-v5.38.eclass b/eclass/mozconfig-v5.38.eclass
new file mode 100644
index 000000000000..53e53b0618bc
--- /dev/null
+++ b/eclass/mozconfig-v5.38.eclass
@@ -0,0 +1,231 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozconfig-v5.33.eclass
+# @MAINTAINER:
+# mozilla team <mozilla@gentoo.org>
+# @BLURB: the new mozilla common configuration eclass for FF33 and newer, v5
+# @DESCRIPTION:
+# This eclass is used in mozilla ebuilds (firefox, thunderbird, seamonkey)
+# to provide a single common place for the common mozilla engine compoments.
+#
+# The eclass provides all common dependencies as well as common use flags.
+#
+# Some use flags which may be optional in particular mozilla packages can be
+# supported through setting eclass variables.
+#
+# This eclass inherits mozconfig helper functions as defined in mozcoreconf-v3,
+# and so ebuilds inheriting this eclass do not need to inherit that.
+
+inherit multilib flag-o-matic toolchain-funcs mozcoreconf-v3
+
+case ${EAPI} in
+ 0|1|2|3|4) die "EAPI=${EAPI} not supported"
+esac
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_WIFI
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="wifi". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if necko-wifi support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_JIT
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="jit". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if optional jit support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# use-flags common among all mozilla ebuilds
+IUSE="${IUSE} dbus debug gstreamer +jemalloc3 pulseaudio selinux startup-notification system-cairo system-icu system-jpeg system-sqlite system-libvpx"
+
+# some notes on deps:
+# gtk:2 minimum is technically 2.10 but gio support (enabled by default) needs 2.14
+# media-libs/mesa needs to be 10.2 or above due to a bug with flash+vdpau
+
+RDEPEND=">=app-text/hunspell-1.2
+ dev-libs/atk
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.10[X]
+ >=x11-libs/gtk+-2.18:2
+ x11-libs/gdk-pixbuf
+ >=x11-libs/pango-1.22.0
+ >=media-libs/libpng-1.6.16:0=[apng]
+ >=media-libs/mesa-10.2:*
+ media-libs/fontconfig
+ >=media-libs/freetype-2.4.10
+ kernel_linux? ( media-libs/alsa-lib )
+ pulseaudio? ( media-sound/pulseaudio )
+ virtual/freedesktop-icon-theme
+ dbus? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ >=dev-libs/glib-2.26:2
+ >=sys-libs/zlib-1.2.3
+ >=virtual/libffi-3.0.10
+ gstreamer? (
+ >=media-libs/gstreamer-1.2.3:1.0
+ >=media-libs/gst-plugins-base-1.2.3:1.0
+ >=media-libs/gst-plugins-good-1.2.3:1.0
+ >=media-plugins/gst-plugins-libav-1.1.0_pre20130128-r1:1.0
+ )
+ x11-libs/libX11
+ x11-libs/libXcomposite
+ x11-libs/libXdamage
+ x11-libs/libXext
+ x11-libs/libXfixes
+ x11-libs/libXrender
+ x11-libs/libXt
+ system-cairo? ( >=x11-libs/cairo-1.12[X] >=x11-libs/pixman-0.19.2 )
+ system-icu? ( >=dev-libs/icu-51.1:= )
+ system-jpeg? ( >=media-libs/libjpeg-turbo-1.2.1 )
+ system-sqlite? ( >=dev-db/sqlite-3.8.8.2:3[secure-delete,debug=] )
+ system-libvpx? ( >=media-libs/libvpx-1.3.0[postproc] )
+"
+
+if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_WIFI} = "enabled" ]]; then
+ IUSE+=" +wifi"
+ else
+ IUSE+=" wifi"
+ fi
+ RDEPEND+="
+ wifi? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72
+ net-wireless/wireless-tools )"
+fi
+if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_JIT} = "enabled" ]]; then
+ IUSE+=" +jit"
+ else
+ IUSE+=" jit"
+ fi
+fi
+
+DEPEND="app-arch/zip
+ app-arch/unzip
+ >=sys-devel/binutils-2.16.1
+ ${RDEPEND}"
+
+RDEPEND+="
+ selinux? ( sec-policy/selinux-mozilla )"
+
+# @FUNCTION: mozconfig_config
+# @DESCRIPTION:
+# Set common configure options for mozilla packages.
+# Call this within src_configure() phase, after mozconfig_init
+#
+# Example:
+#
+# inherit mozconfig-v5.33
+#
+# src_configure() {
+# mozconfig_init
+# mozconfig_config
+# # ... misc ebuild-unique settings via calls to
+# # ... mozconfig_{annotate,use_with,use_enable}
+# mozconfig_final
+# }
+
+mozconfig_config() {
+ # Migrated from mozcoreconf-2
+ mozconfig_annotate 'system_libs' \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-svg \
+ --with-system-bz2
+
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ else
+ mozconfig_annotate 'enabled by Gentoo' --enable-debug-symbols
+ fi
+
+ mozconfig_use_enable startup-notification
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]] ; then
+ # wifi pulls in dbus so manage both here
+ mozconfig_use_enable wifi necko-wifi
+ if use wifi && ! use dbus; then
+ echo "Enabling dbus support due to wifi request"
+ mozconfig_annotate 'dbus required by necko-wifi' --enable-dbus
+ else
+ mozconfig_use_enable dbus
+ fi
+ else
+ mozconfig_use_enable dbus
+ mozconfig_annotate 'disabled' --disable-necko-wifi
+ fi
+
+ # These are forced-on for webm support
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ mozconfig_use_enable jit ion
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --libdir="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate 'Gentoo default' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+ mozconfig_annotate 'Gentoo default' --with-system-png
+ mozconfig_annotate '' --enable-system-ffi
+ mozconfig_annotate 'Gentoo default to honor system linker' --disable-gold
+ mozconfig_annotate '' --disable-gconf
+
+ # Use jemalloc unless libc is not glibc >= 2.4
+ # at this time the minimum glibc in the tree is 2.9 so we should be safe.
+ if use elibc_glibc && use jemalloc3; then
+ # We must force-enable jemalloc 3 via .mozconfig
+ echo "export MOZ_JEMALLOC3=1" >> "${S}"/.mozconfig || die
+ mozconfig_annotate '' --enable-jemalloc
+ mozconfig_annotate '' --enable-replace-malloc
+ fi
+
+ mozconfig_annotate '' --target="${CTARGET:-${CHOST}}"
+ mozconfig_annotate '' --build="${CTARGET:-${CHOST}}"
+
+ if use gstreamer; then
+ mozconfig_annotate '+gstreamer' --enable-gstreamer=1.0
+ else
+ mozconfig_annotate '' --disable-gstreamer
+ fi
+ mozconfig_use_enable pulseaudio
+
+ mozconfig_use_enable system-cairo
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_with system-jpeg
+ mozconfig_use_with system-icu
+ mozconfig_use_enable system-icu intl-api
+ mozconfig_use_with system-libvpx
+}
diff --git a/eclass/mozconfig-v6.38.eclass b/eclass/mozconfig-v6.38.eclass
new file mode 100644
index 000000000000..367a8a734b83
--- /dev/null
+++ b/eclass/mozconfig-v6.38.eclass
@@ -0,0 +1,239 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozconfig-v5.33.eclass
+# @MAINTAINER:
+# mozilla team <mozilla@gentoo.org>
+# @BLURB: the new mozilla common configuration eclass for FF33 and newer, v5
+# @DESCRIPTION:
+# This eclass is used in mozilla ebuilds (firefox, thunderbird, seamonkey)
+# to provide a single common place for the common mozilla engine compoments.
+#
+# The eclass provides all common dependencies as well as common use flags.
+#
+# Some use flags which may be optional in particular mozilla packages can be
+# supported through setting eclass variables.
+#
+# This eclass inherits mozconfig helper functions as defined in mozcoreconf-v3,
+# and so ebuilds inheriting this eclass do not need to inherit that.
+
+inherit multilib flag-o-matic toolchain-funcs mozcoreconf-v3
+
+case ${EAPI} in
+ 0|1|2|3|4) die "EAPI=${EAPI} not supported"
+esac
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_WIFI
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="wifi". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if necko-wifi support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_JIT
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="jit". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if optional jit support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# use-flags common among all mozilla ebuilds
+IUSE="${IUSE} dbus debug gstreamer gstreamer-0 +jemalloc3 pulseaudio selinux startup-notification system-cairo system-icu system-jpeg system-sqlite system-libvpx"
+
+# some notes on deps:
+# gtk:2 minimum is technically 2.10 but gio support (enabled by default) needs 2.14
+# media-libs/mesa needs to be 10.2 or above due to a bug with flash+vdpau
+
+RDEPEND=">=app-text/hunspell-1.2
+ dev-libs/atk
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.10[X]
+ >=x11-libs/gtk+-2.18:2
+ x11-libs/gdk-pixbuf
+ >=x11-libs/pango-1.22.0
+ >=media-libs/libpng-1.6.16:0=[apng]
+ >=media-libs/mesa-10.2:*
+ media-libs/fontconfig
+ >=media-libs/freetype-2.4.10
+ kernel_linux? ( media-libs/alsa-lib )
+ pulseaudio? ( media-sound/pulseaudio )
+ virtual/freedesktop-icon-theme
+ dbus? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ >=dev-libs/glib-2.26:2
+ >=sys-libs/zlib-1.2.3
+ >=virtual/libffi-3.0.10
+ gstreamer? (
+ >=media-libs/gstreamer-1.4.5:1.0
+ >=media-libs/gst-plugins-base-1.4.5:1.0
+ >=media-libs/gst-plugins-good-1.4.5:1.0
+ >=media-plugins/gst-plugins-libav-1.4.5:1.0
+ )
+ gstreamer-0? (
+ media-plugins/gst-plugins-meta:0.10[ffmpeg]
+ )
+ x11-libs/libX11
+ x11-libs/libXcomposite
+ x11-libs/libXdamage
+ x11-libs/libXext
+ x11-libs/libXfixes
+ x11-libs/libXrender
+ x11-libs/libXt
+ system-cairo? ( >=x11-libs/cairo-1.12[X] >=x11-libs/pixman-0.19.2 )
+ system-icu? ( >=dev-libs/icu-51.1:= )
+ system-jpeg? ( >=media-libs/libjpeg-turbo-1.2.1 )
+ system-sqlite? ( >=dev-db/sqlite-3.8.8.2:3[secure-delete,debug=] )
+ system-libvpx? ( >=media-libs/libvpx-1.3.0[postproc] )
+"
+
+if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_WIFI} = "enabled" ]]; then
+ IUSE+=" +wifi"
+ else
+ IUSE+=" wifi"
+ fi
+ RDEPEND+="
+ wifi? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72
+ net-wireless/wireless-tools )"
+fi
+if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_JIT} = "enabled" ]]; then
+ IUSE+=" +jit"
+ else
+ IUSE+=" jit"
+ fi
+fi
+
+DEPEND="app-arch/zip
+ app-arch/unzip
+ >=sys-devel/binutils-2.16.1
+ ${RDEPEND}"
+
+RDEPEND+="
+ selinux? ( sec-policy/selinux-mozilla )"
+
+# only one of gstreamer and gstreamer-0 can be enabled at a time, so set REQUIRED_USE to signify this
+REQUIRED_USE="?? ( gstreamer gstreamer-0 )"
+
+# @FUNCTION: mozconfig_config
+# @DESCRIPTION:
+# Set common configure options for mozilla packages.
+# Call this within src_configure() phase, after mozconfig_init
+#
+# Example:
+#
+# inherit mozconfig-v5.33
+#
+# src_configure() {
+# mozconfig_init
+# mozconfig_config
+# # ... misc ebuild-unique settings via calls to
+# # ... mozconfig_{annotate,use_with,use_enable}
+# mozconfig_final
+# }
+
+mozconfig_config() {
+ # Migrated from mozcoreconf-2
+ mozconfig_annotate 'system_libs' \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-svg \
+ --with-system-bz2
+
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ else
+ mozconfig_annotate 'enabled by Gentoo' --enable-debug-symbols
+ fi
+
+ mozconfig_use_enable startup-notification
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]] ; then
+ # wifi pulls in dbus so manage both here
+ mozconfig_use_enable wifi necko-wifi
+ if use wifi && ! use dbus; then
+ echo "Enabling dbus support due to wifi request"
+ mozconfig_annotate 'dbus required by necko-wifi' --enable-dbus
+ else
+ mozconfig_use_enable dbus
+ fi
+ else
+ mozconfig_use_enable dbus
+ mozconfig_annotate 'disabled' --disable-necko-wifi
+ fi
+
+ # These are forced-on for webm support
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ mozconfig_use_enable jit ion
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --libdir="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate 'Gentoo default' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+ mozconfig_annotate 'Gentoo default' --with-system-png
+ mozconfig_annotate '' --enable-system-ffi
+ mozconfig_annotate 'Gentoo default to honor system linker' --disable-gold
+ mozconfig_annotate '' --disable-gconf
+
+ # Use jemalloc unless libc is not glibc >= 2.4
+ # at this time the minimum glibc in the tree is 2.9 so we should be safe.
+ if use elibc_glibc && use jemalloc3; then
+ # We must force-enable jemalloc 3 via .mozconfig
+ echo "export MOZ_JEMALLOC3=1" >> "${S}"/.mozconfig || die
+ mozconfig_annotate '' --enable-jemalloc
+ mozconfig_annotate '' --enable-replace-malloc
+ fi
+
+ mozconfig_annotate '' --target="${CTARGET:-${CHOST}}"
+ mozconfig_annotate '' --build="${CTARGET:-${CHOST}}"
+
+ if use gstreamer ; then
+ mozconfig_annotate '+gstreamer' --enable-gstreamer=1.0
+ elif use gstreamer-0 ; then
+ mozconfig_annotate '+gstreamer-0' --enable-gstreamer=0.10
+ else
+ mozconfig_annotate '' --disable-gstreamer
+ fi
+ mozconfig_use_enable pulseaudio
+
+ mozconfig_use_enable system-cairo
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_with system-jpeg
+ mozconfig_use_with system-icu
+ mozconfig_use_enable system-icu intl-api
+ mozconfig_use_with system-libvpx
+}
diff --git a/eclass/mozconfig-v6.39.eclass b/eclass/mozconfig-v6.39.eclass
new file mode 100644
index 000000000000..909b65d38269
--- /dev/null
+++ b/eclass/mozconfig-v6.39.eclass
@@ -0,0 +1,240 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozconfig-v5.33.eclass
+# @MAINTAINER:
+# mozilla team <mozilla@gentoo.org>
+# @BLURB: the new mozilla common configuration eclass for FF33 and newer, v5
+# @DESCRIPTION:
+# This eclass is used in mozilla ebuilds (firefox, thunderbird, seamonkey)
+# to provide a single common place for the common mozilla engine compoments.
+#
+# The eclass provides all common dependencies as well as common use flags.
+#
+# Some use flags which may be optional in particular mozilla packages can be
+# supported through setting eclass variables.
+#
+# This eclass inherits mozconfig helper functions as defined in mozcoreconf-v3,
+# and so ebuilds inheriting this eclass do not need to inherit that.
+
+inherit multilib flag-o-matic toolchain-funcs mozcoreconf-v3
+
+case ${EAPI} in
+ 0|1|2|3|4) die "EAPI=${EAPI} not supported"
+esac
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_WIFI
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="wifi". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if necko-wifi support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# @ECLASS-VARIABLE: MOZCONFIG_OPTIONAL_JIT
+# @DESCRIPTION:
+# Set this variable before the inherit line, when an ebuild needs to provide
+# optional necko-wifi support via IUSE="jit". Currently this would include
+# ebuilds for firefox, and potentially seamonkey.
+#
+# Leave the variable UNSET if optional jit support should not be available.
+# Set the variable to "enabled" if the use flag should be enabled by default.
+# Set the variable to any value if the use flag should exist but not be default-enabled.
+
+# use-flags common among all mozilla ebuilds
+IUSE="${IUSE} dbus debug gstreamer gstreamer-0 +jemalloc3 pulseaudio selinux startup-notification system-cairo system-icu system-jpeg system-sqlite system-libvpx"
+
+# some notes on deps:
+# gtk:2 minimum is technically 2.10 but gio support (enabled by default) needs 2.14
+# media-libs/mesa needs to be 10.2 or above due to a bug with flash+vdpau
+
+RDEPEND=">=app-text/hunspell-1.2
+ dev-libs/atk
+ dev-libs/expat
+ >=dev-libs/libevent-1.4.7
+ >=x11-libs/cairo-1.10[X]
+ >=x11-libs/gtk+-2.18:2
+ x11-libs/gdk-pixbuf
+ >=x11-libs/pango-1.22.0
+ >=media-libs/libpng-1.6.16:0=[apng]
+ >=media-libs/mesa-10.2:*
+ media-libs/fontconfig
+ >=media-libs/freetype-2.4.10
+ kernel_linux? ( media-libs/alsa-lib )
+ pulseaudio? ( media-sound/pulseaudio )
+ virtual/freedesktop-icon-theme
+ dbus? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72 )
+ startup-notification? ( >=x11-libs/startup-notification-0.8 )
+ >=dev-libs/glib-2.26:2
+ >=sys-libs/zlib-1.2.3
+ >=virtual/libffi-3.0.10
+ gstreamer? (
+ >=media-libs/gstreamer-1.4.5:1.0
+ >=media-libs/gst-plugins-base-1.4.5:1.0
+ >=media-libs/gst-plugins-good-1.4.5:1.0
+ >=media-plugins/gst-plugins-libav-1.4.5:1.0
+ )
+ gstreamer-0? (
+ >=media-libs/gstreamer-0.10.25:0.10
+ media-plugins/gst-plugins-meta:0.10[ffmpeg]
+ )
+ x11-libs/libX11
+ x11-libs/libXcomposite
+ x11-libs/libXdamage
+ x11-libs/libXext
+ x11-libs/libXfixes
+ x11-libs/libXrender
+ x11-libs/libXt
+ system-cairo? ( >=x11-libs/cairo-1.12[X] >=x11-libs/pixman-0.19.2 )
+ system-icu? ( >=dev-libs/icu-51.1:= )
+ system-jpeg? ( >=media-libs/libjpeg-turbo-1.2.1 )
+ system-sqlite? ( >=dev-db/sqlite-3.8.9:3[secure-delete,debug=] )
+ system-libvpx? ( >=media-libs/libvpx-1.3.0[postproc] )
+"
+
+if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_WIFI} = "enabled" ]]; then
+ IUSE+=" +wifi"
+ else
+ IUSE+=" wifi"
+ fi
+ RDEPEND+="
+ wifi? ( >=sys-apps/dbus-0.60
+ >=dev-libs/dbus-glib-0.72
+ net-wireless/wireless-tools )"
+fi
+if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ if [[ ${MOZCONFIG_OPTIONAL_JIT} = "enabled" ]]; then
+ IUSE+=" +jit"
+ else
+ IUSE+=" jit"
+ fi
+fi
+
+DEPEND="app-arch/zip
+ app-arch/unzip
+ >=sys-devel/binutils-2.16.1
+ ${RDEPEND}"
+
+RDEPEND+="
+ selinux? ( sec-policy/selinux-mozilla )"
+
+# only one of gstreamer and gstreamer-0 can be enabled at a time, so set REQUIRED_USE to signify this
+REQUIRED_USE="?? ( gstreamer gstreamer-0 )"
+
+# @FUNCTION: mozconfig_config
+# @DESCRIPTION:
+# Set common configure options for mozilla packages.
+# Call this within src_configure() phase, after mozconfig_init
+#
+# Example:
+#
+# inherit mozconfig-v5.33
+#
+# src_configure() {
+# mozconfig_init
+# mozconfig_config
+# # ... misc ebuild-unique settings via calls to
+# # ... mozconfig_{annotate,use_with,use_enable}
+# mozconfig_final
+# }
+
+mozconfig_config() {
+ # Migrated from mozcoreconf-2
+ mozconfig_annotate 'system_libs' \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-svg \
+ --with-system-bz2
+
+ mozconfig_annotate '' --enable-default-toolkit=cairo-gtk2
+
+ if has bindist ${IUSE}; then
+ mozconfig_use_enable !bindist official-branding
+ if [[ ${PN} == firefox ]] && use bindist ; then
+ mozconfig_annotate '' --with-branding=browser/branding/aurora
+ fi
+ fi
+
+ mozconfig_use_enable debug
+ mozconfig_use_enable debug tests
+
+ if ! use debug ; then
+ mozconfig_annotate 'disabled by Gentoo' --disable-debug-symbols
+ else
+ mozconfig_annotate 'enabled by Gentoo' --enable-debug-symbols
+ fi
+
+ mozconfig_use_enable startup-notification
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_WIFI} ]] ; then
+ # wifi pulls in dbus so manage both here
+ mozconfig_use_enable wifi necko-wifi
+ if use wifi && ! use dbus; then
+ echo "Enabling dbus support due to wifi request"
+ mozconfig_annotate 'dbus required by necko-wifi' --enable-dbus
+ else
+ mozconfig_use_enable dbus
+ fi
+ else
+ mozconfig_use_enable dbus
+ mozconfig_annotate 'disabled' --disable-necko-wifi
+ fi
+
+ # These are forced-on for webm support
+ mozconfig_annotate 'required' --enable-ogg
+ mozconfig_annotate 'required' --enable-wave
+
+ if [[ -n ${MOZCONFIG_OPTIONAL_JIT} ]]; then
+ mozconfig_use_enable jit ion
+ fi
+
+ # These are enabled by default in all mozilla applications
+ mozconfig_annotate '' --with-system-nspr --with-nspr-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --with-system-nss --with-nss-prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --x-includes="${EPREFIX}"/usr/include --x-libraries="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate '' --with-system-libevent="${EPREFIX}"/usr
+ mozconfig_annotate '' --prefix="${EPREFIX}"/usr
+ mozconfig_annotate '' --libdir="${EPREFIX}"/usr/$(get_libdir)
+ mozconfig_annotate 'Gentoo default' --enable-system-hunspell
+ mozconfig_annotate '' --disable-gnomevfs
+ mozconfig_annotate '' --disable-gnomeui
+ mozconfig_annotate '' --enable-gio
+ mozconfig_annotate '' --disable-crashreporter
+ mozconfig_annotate 'Gentoo default' --with-system-png
+ mozconfig_annotate '' --enable-system-ffi
+ mozconfig_annotate 'Gentoo default to honor system linker' --disable-gold
+ mozconfig_annotate '' --disable-gconf
+
+ # Use jemalloc unless libc is not glibc >= 2.4
+ # at this time the minimum glibc in the tree is 2.9 so we should be safe.
+ if use elibc_glibc && use jemalloc3; then
+ # We must force-enable jemalloc 3 via .mozconfig
+ echo "export MOZ_JEMALLOC3=1" >> "${S}"/.mozconfig || die
+ mozconfig_annotate '' --enable-jemalloc
+ mozconfig_annotate '' --enable-replace-malloc
+ fi
+
+ mozconfig_annotate '' --target="${CTARGET:-${CHOST}}"
+ mozconfig_annotate '' --build="${CTARGET:-${CHOST}}"
+
+ if use gstreamer ; then
+ mozconfig_annotate '+gstreamer' --enable-gstreamer=1.0
+ elif use gstreamer-0 ; then
+ mozconfig_annotate '+gstreamer-0' --enable-gstreamer=0.10
+ else
+ mozconfig_annotate '' --disable-gstreamer
+ fi
+ mozconfig_use_enable pulseaudio
+
+ mozconfig_use_enable system-cairo
+ mozconfig_use_enable system-sqlite
+ mozconfig_use_with system-jpeg
+ mozconfig_use_with system-icu
+ mozconfig_use_enable system-icu intl-api
+ mozconfig_use_with system-libvpx
+}
diff --git a/eclass/mozcoreconf-2.eclass b/eclass/mozcoreconf-2.eclass
new file mode 100644
index 000000000000..d7008b955087
--- /dev/null
+++ b/eclass/mozcoreconf-2.eclass
@@ -0,0 +1,274 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# mozcoreconf.eclass : core options for mozilla
+# inherit mozconfig-2 if you need USE flags
+
+PYTHON_COMPAT=( python2_7 )
+PYTHON_REQ_USE='threads,sqlite'
+
+inherit multilib flag-o-matic python-any-r1 versionator
+
+IUSE="${IUSE} custom-cflags custom-optimization"
+
+RDEPEND="x11-libs/libXrender
+ x11-libs/libXt
+ >=sys-libs/zlib-1.1.4"
+
+DEPEND="${RDEPEND}
+ virtual/pkgconfig
+ ${PYTHON_DEPS}"
+
+# mozconfig_annotate: add an annotated line to .mozconfig
+#
+# Example:
+# mozconfig_annotate "building on ultrasparc" --enable-js-ultrasparc
+# => ac_add_options --enable-js-ultrasparc # building on ultrasparc
+mozconfig_annotate() {
+ declare reason=$1 x ; shift
+ [[ $# -gt 0 ]] || die "mozconfig_annotate missing flags for ${reason}\!"
+ for x in ${*}; do
+ echo "ac_add_options ${x} # ${reason}" >>.mozconfig
+ done
+}
+
+# mozconfig_use_enable: add a line to .mozconfig based on a USE-flag
+#
+# Example:
+# mozconfig_use_enable truetype freetype2
+# => ac_add_options --enable-freetype2 # +truetype
+mozconfig_use_enable() {
+ declare flag=$(use_enable "$@")
+ mozconfig_annotate "$(use $1 && echo +$1 || echo -$1)" "${flag}"
+}
+
+# mozconfig_use_with: add a line to .mozconfig based on a USE-flag
+#
+# Example:
+# mozconfig_use_with kerberos gss-api /usr/$(get_libdir)
+# => ac_add_options --with-gss-api=/usr/lib # +kerberos
+mozconfig_use_with() {
+ declare flag=$(use_with "$@")
+ mozconfig_annotate "$(use $1 && echo +$1 || echo -$1)" "${flag}"
+}
+
+# mozconfig_use_extension: enable or disable an extension based on a USE-flag
+#
+# Example:
+# mozconfig_use_extension gnome gnomevfs
+# => ac_add_options --enable-extensions=gnomevfs
+mozconfig_use_extension() {
+ declare minus=$(use $1 || echo -)
+ mozconfig_annotate "${minus:-+}$1" --enable-extensions=${minus}${2}
+}
+
+mozversion_is_new_enough() {
+ case ${PN} in
+ firefox|thunderbird)
+ if [[ $(get_version_component_range 1) -ge 17 ]] ; then
+ return 0
+ fi
+ ;;
+ seamonkey)
+ if [[ $(get_version_component_range 1) -eq 2 ]] && [[ $(get_version_component_range 2) -ge 14 ]] ; then
+ return 0
+ fi
+ ;;
+ esac
+
+ return 1
+}
+
+moz_pkgsetup() {
+ # Ensure we use C locale when building
+ export LANG="C"
+ export LC_ALL="C"
+ export LC_MESSAGES="C"
+ export LC_CTYPE="C"
+
+ # Ensure that we have a sane build enviroment
+ export MOZILLA_CLIENT=1
+ export BUILD_OPT=1
+ export NO_STATIC_LIB=1
+ export USE_PTHREADS=1
+ export ALDFLAGS=${LDFLAGS}
+ # ensure MOZCONFIG is not defined
+ eval unset MOZCONFIG
+
+ # nested configure scripts in mozilla products generate unrecognized options
+ # false positives when toplevel configure passes downwards.
+ export QA_CONFIGURE_OPTIONS=".*"
+
+ if [[ $(gcc-major-version) -eq 3 ]]; then
+ ewarn "Unsupported compiler detected, DO NOT file bugs for"
+ ewarn "outdated compilers. Bugs opened with gcc-3 will be closed"
+ ewarn "invalid."
+ fi
+
+ python-any-r1_pkg_setup
+}
+
+mozconfig_init() {
+ declare enable_optimize pango_version myext x
+ declare XUL=$([[ ${PN} == xulrunner ]] && echo true || echo false)
+ declare FF=$([[ ${PN} == firefox ]] && echo true || echo false)
+ declare SM=$([[ ${PN} == seamonkey ]] && echo true || echo false)
+ declare TB=$([[ ${PN} == thunderbird ]] && echo true || echo false)
+
+ ####################################
+ #
+ # Setup the initial .mozconfig
+ # See http://www.mozilla.org/build/configure-build.html
+ #
+ ####################################
+
+ case ${PN} in
+ *xulrunner)
+ cp xulrunner/config/mozconfig .mozconfig \
+ || die "cp xulrunner/config/mozconfig failed" ;;
+ *firefox)
+ cp browser/config/mozconfig .mozconfig \
+ || die "cp browser/config/mozconfig failed" ;;
+ seamonkey)
+ # Must create the initial mozconfig to enable application
+ : >.mozconfig || die "initial mozconfig creation failed"
+ mozconfig_annotate "" --enable-application=suite ;;
+ *thunderbird)
+ # Must create the initial mozconfig to enable application
+ : >.mozconfig || die "initial mozconfig creation failed"
+ mozconfig_annotate "" --enable-application=mail ;;
+ esac
+
+ ####################################
+ #
+ # CFLAGS setup and ARCH support
+ #
+ ####################################
+
+ # Set optimization level
+ if [[ ${ARCH} == hppa ]]; then
+ mozconfig_annotate "more than -O0 causes a segfault on hppa" --enable-optimize=-O0
+ elif [[ ${ARCH} == x86 ]]; then
+ mozconfig_annotate "less then -O2 causes a segfault on x86" --enable-optimize=-O2
+ elif use custom-optimization || [[ ${ARCH} =~ (alpha|ia64) ]]; then
+ # Set optimization level based on CFLAGS
+ if is-flag -O0; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-O0
+ elif [[ ${ARCH} == ppc ]] && has_version '>=sys-libs/glibc-2.8'; then
+ mozconfig_annotate "more than -O1 segfaults on ppc with glibc-2.8" --enable-optimize=-O1
+ elif is-flag -O3; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-O3
+ elif is-flag -O1; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-O1
+ elif is-flag -Os; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-Os
+ else
+ mozconfig_annotate "Gentoo's default optimization" --enable-optimize=-O2
+ fi
+ else
+ # Enable Mozilla's default
+ mozconfig_annotate "mozilla default" --enable-optimize
+ fi
+
+ # Strip optimization so it does not end up in compile string
+ filter-flags '-O*'
+
+ # Strip over-aggressive CFLAGS
+ use custom-cflags || strip-flags
+
+ # Additional ARCH support
+ case "${ARCH}" in
+ alpha)
+ # Historically we have needed to add -fPIC manually for 64-bit.
+ # Additionally, alpha should *always* build with -mieee for correct math
+ # operation
+ append-flags -fPIC -mieee
+ ;;
+
+ ia64)
+ # Historically we have needed to add this manually for 64-bit
+ append-flags -fPIC
+ ;;
+
+ ppc64)
+ append-flags -fPIC -mminimal-toc
+ ;;
+ esac
+
+ # Go a little faster; use less RAM
+ append-flags "$MAKEEDIT_FLAGS"
+
+ ####################################
+ #
+ # mozconfig setup
+ #
+ ####################################
+
+ mozconfig_annotate system_libs \
+ --with-system-jpeg \
+ --with-system-zlib \
+ --enable-pango \
+ --enable-system-cairo
+ if ! $(mozversion_is_new_enough) ; then
+ mozconfig_annotate system-libs --enable-svg
+ fi
+
+ mozconfig_annotate disable_update_strip \
+ --disable-pedantic \
+ --disable-updater \
+ --disable-strip \
+ --disable-install-strip
+ if ! $(mozversion_is_new_enough) ; then
+ mozconfig_annotate disable_update_strip \
+ --disable-installer \
+ --disable-strip-libs
+ fi
+
+ if [[ ${PN} != seamonkey ]]; then
+ mozconfig_annotate basic_profile \
+ --disable-profilelocking
+ if ! $(mozversion_is_new_enough) ; then
+ mozconfig_annotate basic_profile \
+ --enable-single-profile \
+ --disable-profilesharing
+ fi
+ fi
+
+ # Here is a strange one...
+ if is-flag '-mcpu=ultrasparc*' || is-flag '-mtune=ultrasparc*'; then
+ mozconfig_annotate "building on ultrasparc" --enable-js-ultrasparc
+ fi
+
+ # Currently --enable-elf-dynstr-gc only works for x86,
+ # thanks to Jason Wever <weeve@gentoo.org> for the fix.
+ if use x86 && [[ ${enable_optimize} != -O0 ]]; then
+ mozconfig_annotate "${ARCH} optimized build" --enable-elf-dynstr-gc
+ fi
+
+ # jemalloc won't build with older glibc
+ ! has_version ">=sys-libs/glibc-2.4" && mozconfig_annotate "we have old glibc" --disable-jemalloc
+}
+
+# mozconfig_final: display a table describing all configuration options paired
+# with reasons, then clean up extensions list
+mozconfig_final() {
+ declare ac opt hash reason
+ echo
+ echo "=========================================================="
+ echo "Building ${PF} with the following configuration"
+ grep ^ac_add_options .mozconfig | while read ac opt hash reason; do
+ [[ -z ${hash} || ${hash} == \# ]] \
+ || die "error reading mozconfig: ${ac} ${opt} ${hash} ${reason}"
+ printf " %-30s %s\n" "${opt}" "${reason:-mozilla.org default}"
+ done
+ echo "=========================================================="
+ echo
+
+ # Resolve multiple --enable-extensions down to one
+ declare exts=$(sed -n 's/^ac_add_options --enable-extensions=\([^ ]*\).*/\1/p' \
+ .mozconfig | xargs)
+ sed -i '/^ac_add_options --enable-extensions/d' .mozconfig
+ echo "ac_add_options --enable-extensions=${exts// /,}" >> .mozconfig
+}
+
diff --git a/eclass/mozcoreconf-v3.eclass b/eclass/mozcoreconf-v3.eclass
new file mode 100644
index 000000000000..01c14ee90797
--- /dev/null
+++ b/eclass/mozcoreconf-v3.eclass
@@ -0,0 +1,261 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozcoreconf.eclass
+# @MAINTAINER:
+# Mozilla team <mozilla@gentoo.org>
+# @BLURB: core options and configuration functions for mozilla
+# @DESCRIPTION:
+#
+# inherit mozconfig-v5.* or above for mozilla configuration support
+
+if [[ ! ${_MOZCORECONF_V3} ]]; then
+
+PYTHON_COMPAT=( python2_7 )
+PYTHON_REQ_USE='threads,sqlite'
+
+inherit multilib flag-o-matic python-any-r1 versionator
+
+IUSE="${IUSE} custom-cflags custom-optimization"
+
+DEPEND="virtual/pkgconfig
+ ${PYTHON_DEPS}"
+
+# @FUNCTION: mozconfig_annotate
+# @DESCRIPTION:
+# add an annotated line to .mozconfig
+#
+# Example:
+# mozconfig_annotate "building on ultrasparc" --enable-js-ultrasparc
+# => ac_add_options --enable-js-ultrasparc # building on ultrasparc
+mozconfig_annotate() {
+ declare reason=$1 x ; shift
+ [[ $# -gt 0 ]] || die "mozconfig_annotate missing flags for ${reason}\!"
+ for x in ${*}; do
+ echo "ac_add_options ${x} # ${reason}" >>.mozconfig
+ done
+}
+
+# @FUNCTION: mozconfig_use_enable
+# @DESCRIPTION:
+# add a line to .mozconfig based on a USE-flag
+#
+# Example:
+# mozconfig_use_enable truetype freetype2
+# => ac_add_options --enable-freetype2 # +truetype
+mozconfig_use_enable() {
+ declare flag=$(use_enable "$@")
+ mozconfig_annotate "$(use $1 && echo +$1 || echo -$1)" "${flag}"
+}
+
+# @FUNCTION mozconfig_use_with
+# @DESCRIPTION
+# add a line to .mozconfig based on a USE-flag
+#
+# Example:
+# mozconfig_use_with kerberos gss-api /usr/$(get_libdir)
+# => ac_add_options --with-gss-api=/usr/lib # +kerberos
+mozconfig_use_with() {
+ declare flag=$(use_with "$@")
+ mozconfig_annotate "$(use $1 && echo +$1 || echo -$1)" "${flag}"
+}
+
+# @FUNCTION mozconfig_use_extension
+# @DESCRIPTION
+# enable or disable an extension based on a USE-flag
+#
+# Example:
+# mozconfig_use_extension gnome gnomevfs
+# => ac_add_options --enable-extensions=gnomevfs
+mozconfig_use_extension() {
+ declare minus=$(use $1 || echo -)
+ mozconfig_annotate "${minus:-+}$1" --enable-extensions=${minus}${2}
+}
+
+moz_pkgsetup() {
+ # Ensure we use C locale when building
+ export LANG="C"
+ export LC_ALL="C"
+ export LC_MESSAGES="C"
+ export LC_CTYPE="C"
+
+ # Ensure that we have a sane build enviroment
+ export MOZILLA_CLIENT=1
+ export BUILD_OPT=1
+ export NO_STATIC_LIB=1
+ export USE_PTHREADS=1
+ export ALDFLAGS=${LDFLAGS}
+ # ensure MOZCONFIG is not defined
+ eval unset MOZCONFIG
+
+ # nested configure scripts in mozilla products generate unrecognized options
+ # false positives when toplevel configure passes downwards.
+ export QA_CONFIGURE_OPTIONS=".*"
+
+ if [[ $(gcc-major-version) -eq 3 ]]; then
+ ewarn "Unsupported compiler detected, DO NOT file bugs for"
+ ewarn "outdated compilers. Bugs opened with gcc-3 will be closed"
+ ewarn "invalid."
+ fi
+
+ python-any-r1_pkg_setup
+}
+
+# @FUNCTION: mozconfig_init
+# @DESCRIPTION
+# Initialize mozilla configuration and populate with core settings.
+# This should be called in src_configure before any other mozconfig_* functions.
+mozconfig_init() {
+ declare enable_optimize pango_version myext x
+ declare XUL=$([[ ${PN} == xulrunner ]] && echo true || echo false)
+ declare FF=$([[ ${PN} == firefox ]] && echo true || echo false)
+ declare SM=$([[ ${PN} == seamonkey ]] && echo true || echo false)
+ declare TB=$([[ ${PN} == thunderbird ]] && echo true || echo false)
+
+ ####################################
+ #
+ # Setup the initial .mozconfig
+ # See http://www.mozilla.org/build/configure-build.html
+ #
+ ####################################
+
+ case ${PN} in
+ *xulrunner)
+ cp xulrunner/config/mozconfig .mozconfig \
+ || die "cp xulrunner/config/mozconfig failed" ;;
+ *firefox)
+ cp browser/config/mozconfig .mozconfig \
+ || die "cp browser/config/mozconfig failed" ;;
+ seamonkey)
+ # Must create the initial mozconfig to enable application
+ : >.mozconfig || die "initial mozconfig creation failed"
+ mozconfig_annotate "" --enable-application=suite ;;
+ *thunderbird)
+ # Must create the initial mozconfig to enable application
+ : >.mozconfig || die "initial mozconfig creation failed"
+ mozconfig_annotate "" --enable-application=mail ;;
+ esac
+
+ ####################################
+ #
+ # CFLAGS setup and ARCH support
+ #
+ ####################################
+
+ # Set optimization level
+ if [[ ${ARCH} == hppa ]]; then
+ mozconfig_annotate "more than -O0 causes a segfault on hppa" --enable-optimize=-O0
+ elif [[ ${ARCH} == x86 ]]; then
+ mozconfig_annotate "less then -O2 causes a segfault on x86" --enable-optimize=-O2
+ elif use custom-optimization || [[ ${ARCH} =~ (alpha|ia64) ]]; then
+ # Set optimization level based on CFLAGS
+ if is-flag -O0; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-O0
+ elif [[ ${ARCH} == ppc ]] && has_version '>=sys-libs/glibc-2.8'; then
+ mozconfig_annotate "more than -O1 segfaults on ppc with glibc-2.8" --enable-optimize=-O1
+ elif is-flag -O3; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-O3
+ elif is-flag -O1; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-O1
+ elif is-flag -Os; then
+ mozconfig_annotate "from CFLAGS" --enable-optimize=-Os
+ else
+ mozconfig_annotate "Gentoo's default optimization" --enable-optimize=-O2
+ fi
+ else
+ # Enable Mozilla's default
+ mozconfig_annotate "mozilla default" --enable-optimize
+ fi
+
+ # Strip optimization so it does not end up in compile string
+ filter-flags '-O*'
+
+ # Strip over-aggressive CFLAGS
+ use custom-cflags || strip-flags
+
+ # Additional ARCH support
+ case "${ARCH}" in
+ alpha)
+ # Historically we have needed to add -fPIC manually for 64-bit.
+ # Additionally, alpha should *always* build with -mieee for correct math
+ # operation
+ append-flags -fPIC -mieee
+ ;;
+
+ ia64)
+ # Historically we have needed to add this manually for 64-bit
+ append-flags -fPIC
+ ;;
+
+ ppc64)
+ append-flags -fPIC -mminimal-toc
+ ;;
+ esac
+
+ # Go a little faster; use less RAM
+ append-flags "$MAKEEDIT_FLAGS"
+
+ ####################################
+ #
+ # mozconfig setup
+ #
+ ####################################
+
+ mozconfig_annotate disable_update_strip \
+ --disable-pedantic \
+ --disable-updater \
+ --disable-strip \
+ --disable-install-strip \
+ --disable-installer \
+ --disable-strip-libs
+
+ if [[ ${PN} != seamonkey ]]; then
+ mozconfig_annotate basic_profile \
+ --disable-profilelocking \
+ --enable-single-profile \
+ --disable-profilesharing
+ fi
+
+ # Here is a strange one...
+ if is-flag '-mcpu=ultrasparc*' || is-flag '-mtune=ultrasparc*'; then
+ mozconfig_annotate "building on ultrasparc" --enable-js-ultrasparc
+ fi
+
+ # Currently --enable-elf-dynstr-gc only works for x86,
+ # thanks to Jason Wever <weeve@gentoo.org> for the fix.
+ if use x86 && [[ ${enable_optimize} != -O0 ]]; then
+ mozconfig_annotate "${ARCH} optimized build" --enable-elf-dynstr-gc
+ fi
+
+ # jemalloc won't build with older glibc
+ ! has_version ">=sys-libs/glibc-2.4" && mozconfig_annotate "we have old glibc" --disable-jemalloc
+}
+
+# @FUNCTION: mozconfig_final
+# @DESCRIPTION:
+# Display a table describing all configuration options paired
+# with reasons, then clean up extensions list.
+# This should be called in src_configure at the end of all other mozconfig_* functions.
+mozconfig_final() {
+ declare ac opt hash reason
+ echo
+ echo "=========================================================="
+ echo "Building ${PF} with the following configuration"
+ grep ^ac_add_options .mozconfig | while read ac opt hash reason; do
+ [[ -z ${hash} || ${hash} == \# ]] \
+ || die "error reading mozconfig: ${ac} ${opt} ${hash} ${reason}"
+ printf " %-30s %s\n" "${opt}" "${reason:-mozilla.org default}"
+ done
+ echo "=========================================================="
+ echo
+
+ # Resolve multiple --enable-extensions down to one
+ declare exts=$(sed -n 's/^ac_add_options --enable-extensions=\([^ ]*\).*/\1/p' \
+ .mozconfig | xargs)
+ sed -i '/^ac_add_options --enable-extensions/d' .mozconfig
+ echo "ac_add_options --enable-extensions=${exts// /,}" >> .mozconfig
+}
+
+_MOZCORECONF_V3=1
+fi
diff --git a/eclass/mozextension.eclass b/eclass/mozextension.eclass
new file mode 100644
index 000000000000..40a8ac00c588
--- /dev/null
+++ b/eclass/mozextension.eclass
@@ -0,0 +1,88 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: mozextension.eclass
+# @MAINTAINER:
+# Mozilla team <mozilla@gentoo.org>
+# @BLURB: Install extensions for use in mozilla products.
+#
+if [[ ! ${_MOZEXTENSION} ]]; then
+
+# @ECLASS-VARIABLE: MOZEXTENSION_TARGET
+# @DESCRIPTION:
+# This variable allows the installation path for xpi_install
+# to be overridden from the default app-global extensions path.
+# Default is empty, which installs to predetermined hard-coded
+# paths specified in the eclass.
+: ${MOZEXTENSION_TARGET:=""}
+
+inherit eutils
+
+DEPEND="app-arch/unzip"
+
+mozversion_extension_location() {
+ case ${PN} in
+ firefox|firefox-bin)
+ if [[ $(get_version_component_range 1) -ge 21 ]] ; then
+ return 0
+ fi
+ ;;
+ esac
+
+ return 1
+}
+
+xpi_unpack() {
+ local xpi xpiname srcdir
+
+ # Not gonna use ${A} as we are looking for a specific option being passed to function
+ # You must specify which xpi to use
+ [[ -z "$*" ]] && die "Nothing passed to the $FUNCNAME command. please pass which xpi to unpack"
+
+ for xpi in "$@"; do
+ einfo "Unpacking ${xpi} to ${PWD}"
+ xpiname=$(basename ${xpi%.*})
+
+ if [[ "${xpi:0:2}" != "./" ]] && [[ "${xpi:0:1}" != "/" ]] ; then
+ srcdir="${DISTDIR}/"
+ fi
+
+ [[ -s "${srcdir}${xpi}" ]] || die "${xpi} does not exist"
+
+ case "${xpi##*.}" in
+ ZIP|zip|jar|xpi)
+ mkdir "${WORKDIR}/${xpiname}" && \
+ unzip -qo "${srcdir}${xpi}" -d "${WORKDIR}/${xpiname}" || die "failed to unpack ${xpi}"
+ ;;
+ *)
+ einfo "unpack ${xpi}: file format not recognized. Ignoring."
+ ;;
+ esac
+ done
+}
+
+
+xpi_install() {
+ local emid
+
+ # You must tell xpi_install which xpi to use
+ [[ ${#} -ne 1 ]] && die "$FUNCNAME takes exactly one argument, please specify an xpi to unpack"
+
+ x="${1}"
+ cd ${x}
+ # determine id for extension
+ emid="$(sed -n -e '/install-manifest/,$ { /em:id/!d; s/.*[\">]\([^\"<>]*\)[\"<].*/\1/; p; q }' "${x}"/install.rdf)" \
+ || die "failed to determine extension id"
+ if [[ -n ${MOZEXTENSION_TARGET} ]]; then
+ insinto "${MOZILLA_FIVE_HOME}"/${MOZEXTENSION_TARGET%/}/${emid}
+ elif $(mozversion_extension_location) ; then
+ insinto "${MOZILLA_FIVE_HOME}"/browser/extensions/${emid}
+ else
+ insinto "${MOZILLA_FIVE_HOME}"/extensions/${emid}
+ fi
+ doins -r "${x}"/* || die "failed to copy extension"
+}
+
+_MOZEXTENSION=1
+fi
diff --git a/eclass/mozilla-launcher.eclass b/eclass/mozilla-launcher.eclass
new file mode 100644
index 000000000000..0d7063859638
--- /dev/null
+++ b/eclass/mozilla-launcher.eclass
@@ -0,0 +1,123 @@
+# Copyright 1999-2009 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+inherit nsplugins multilib
+
+if [[ ${PN: -4} != "-bin" ]] ; then
+ IUSE="moznopango"
+fi
+
+# update_mozilla_launcher_symlinks
+# --------------------------------
+# Create or remove the following symlinks in /usr/bin:
+#
+# firefox -> firefox-bin
+# thunderbird -> thunderbird-bin
+# mozilla -> mozilla-bin
+# sunbird -> sunbird-bin
+# seamonkey -> seamonkey-bin
+#
+# The symlinks are removed if they're found to be dangling. They are
+# created according to the following rules:
+#
+# - If there's a -bin symlink in /usr/bin, and no corresponding
+# non-bin symlink, then create one.
+#
+# - Can't do this in src_install otherwise it overwrites the one
+# for the non-bin package.
+#
+# - Link to the -bin symlink so it's easier to detect when to
+# remove the symlink.
+#
+# NOTE: This eclass does *not* manage the launcher stubs in /usr/bin except
+# when a -bin package is installed and the corresponding from-source
+# package is not installed. The usual stubs are actually installed in
+# src_install so they are included in the package inventory.
+#
+update_mozilla_launcher_symlinks() {
+ local f browsers="mozilla firefox thunderbird sunbird seamonkey"
+ cd "${ROOT}"/usr/bin
+
+ # Remove launcher symlinks that no longer apply
+
+ for f in ${browsers}; do
+ if [[ -L ${f} && ! -f ${f} ]]; then
+ einfo "Removing dangling ${f} launcher"
+ rm -f ${f}
+ fi
+ done
+
+ # Create new symlinks
+
+ for f in ${browsers}; do
+ if [[ -e ${f}-bin && ! -e ${f} ]]; then
+ einfo "Adding link from ${f}-bin to ${f}"
+ ln -s ${f}-bin ${f}
+ fi
+ done
+}
+
+# install_mozilla_launcher_stub name libdir
+# -----------------------------------------
+# Install a stub called /usr/bin/$name that executes mozilla-launcher
+#
+# Note: $PLUGINS_DIR comes from nsplugins (specifically the deprecated section).
+#
+install_mozilla_launcher_stub() {
+ [[ -n $2 ]] || die "install_launcher_stub requires two arguments"
+ declare name=$1
+ declare libdir=$2
+
+ # If we use xulrunner, the name of the binary should be the same
+ if [[ ${name: -3} == "xul" ]]; then
+ name=${name/xul/}
+ declare appname=xulrunner
+ declare xulparams="export XUL_PARAMS=${libdir}/application.ini"
+ declare libdir="/usr/$(get_libdir)/xulrunner-1.9"
+ else
+ declare appname=${name}
+ fi
+
+ dodir /usr/bin
+
+ if [[ ${PN: -4} == "-bin" ]] || ! use moznopango; then
+ cat <<EOF >"${D}"/usr/bin/${name}
+#!/bin/sh
+#
+# Stub script to run mozilla-launcher. We used to use a symlink here
+# but OOo brokenness makes it necessary to use a stub instead:
+# http://bugs.gentoo.org/show_bug.cgi?id=78890
+
+export MOZILLA_LAUNCHER=${appname}
+export MOZILLA_LIBDIR=${libdir}
+export MOZ_PLUGIN_PATH=\${MOZ_PLUGIN_PATH:-/usr/$(get_libdir)/$PLUGINS_DIR}
+${xulparams}
+exec /usr/libexec/mozilla-launcher "\$@"
+EOF
+ else
+ cat <<EOF >"${D}"/usr/bin/${name}
+#!/bin/sh
+#
+# Stub script to run mozilla-launcher. We used to use a symlink here
+# but OOo brokenness makes it necessary to use a stub instead:
+# http://bugs.gentoo.org/show_bug.cgi?id=78890
+
+export MOZILLA_LAUNCHER=${appname}
+export MOZILLA_LIBDIR=${libdir}
+export MOZ_PLUGIN_PATH=\${MOZ_PLUGIN_PATH:-/usr/$(get_libdir)/$PLUGINS_DIR}
+export MOZ_DISABLE_PANGO=1
+${xulparams}
+exec /usr/libexec/mozilla-launcher "\$@"
+EOF
+ fi
+ chmod 0755 "${D}"/usr/bin/${name}
+}
+
+warn_mozilla_launcher_stub() {
+ elog "Not all locales support the disabling of pango."
+ elog "If your locale does not support disabling pango,"
+ elog "please open a bug report on http://bugs.gentoo.org"
+ elog "Then we can filter around the problem with those"
+ elog "specific locales."
+}
diff --git a/eclass/mozlinguas.eclass b/eclass/mozlinguas.eclass
new file mode 100644
index 000000000000..4bd0a8d1810b
--- /dev/null
+++ b/eclass/mozlinguas.eclass
@@ -0,0 +1,315 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mozlinguas.eclass
+# @MAINTAINER:
+# mozilla@gentoo.org
+# @AUTHOR:
+# Nirbheek Chauhan <nirbheek@gentoo.org>
+# Ian Stakenvicius <axs@gentoo.org>
+# @BLURB: Handle language packs for mozilla products
+# @DESCRIPTION:
+# Sets IUSE according to MOZ_LANGS (language packs available). Also exports
+# src_unpack, src_compile and src_install for use in ebuilds, and provides
+# supporting functions for langpack generation and installation.
+
+inherit mozextension
+
+case "${EAPI:-0}" in
+ 0|1)
+ die "EAPI ${EAPI:-0} does not support the '->' SRC_URI operator";;
+ 2|3|4|5)
+ EXPORT_FUNCTIONS src_unpack src_compile src_install;;
+ *)
+ die "EAPI ${EAPI} is not supported, contact eclass maintainers";;
+esac
+
+# @ECLASS-VARIABLE: MOZ_LANGS
+# @DESCRIPTION:
+# Array containing the list of language pack xpis available for
+# this release. The list can be updated with scripts/get_langs.sh from the
+# mozilla overlay.
+: ${MOZ_LANGS:=()}
+
+# @ECLASS-VARIABLE: MOZ_PV
+# @DESCRIPTION:
+# Ebuild package version converted to equivalent upstream version.
+# Defaults to ${PV}, and should be overridden for alphas, betas, and RCs
+: ${MOZ_PV:="${PV}"}
+
+# @ECLASS-VARIABLE: MOZ_PN
+# @DESCRIPTION:
+# Ebuild package name converted to equivalent upstream name.
+# Defaults to ${PN}, and should be overridden for binary ebuilds.
+: ${MOZ_PN:="${PN}"}
+
+# @ECLASS-VARIABLE: MOZ_P
+# @DESCRIPTION:
+# Ebuild package name + version converted to upstream equivalent.
+# Defaults to ${MOZ_PN}-${MOZ_PV}
+: ${MOZ_P:="${MOZ_PN}-${MOZ_PV}"}
+
+# @ECLASS-VARIABLE: MOZ_FTP_URI
+# @DESCRIPTION:
+# The ftp URI prefix for the release tarballs and language packs.
+: ${MOZ_FTP_URI:=""}
+
+# @ECLASS-VARIABLE: MOZ_HTTP_URI
+# @DESCRIPTION:
+# The http URI prefix for the release tarballs and language packs.
+: ${MOZ_HTTP_URI:=""}
+
+# @ECLASS-VARIABLE: MOZ_LANGPACK_PREFIX
+# @DESCRIPTION:
+# The relative path till the lang code in the langpack file URI.
+# Defaults to ${MOZ_PV}/linux-i686/xpi/
+: ${MOZ_LANGPACK_PREFIX:="${MOZ_PV}/linux-i686/xpi/"}
+
+# @ECLASS-VARIABLE: MOZ_LANGPACK_SUFFIX
+# @DESCRIPTION:
+# The suffix after the lang code in the langpack file URI.
+# Defaults to '.xpi'
+: ${MOZ_LANGPACK_SUFFIX:=".xpi"}
+
+# @ECLASS-VARIABLE: MOZ_LANGPACK_UNOFFICIAL
+# @DESCRIPTION:
+# The status of the langpack, used to differentiate within
+# Manifests and on Gentoo mirrors as to when the langpacks are
+# generated officially by Mozilla or if they were generated
+# unofficially by others (ie the Gentoo mozilla team). When
+# this var is set, the distfile will have a .unofficial.xpi
+# suffix.
+: ${MOZ_LANGPACK_UNOFFICIAL:=""}
+
+# @ECLASS-VARIABLE: MOZ_GENERATE_LANGPACKS
+# @DESCRIPTION:
+# This flag specifies whether or not the langpacks should be
+# generated directly during the build process, rather than
+# being downloaded and installed from upstream pre-built
+# extensions. Primarily it supports pre-release builds.
+# Defaults to empty.
+: ${MOZ_GENERATE_LANGPACKS:=""}
+
+# @ECLASS-VARIABLE: MOZ_L10N_SOURCEDIR
+# @DESCRIPTION:
+# The path that l10n sources can be found at, once unpacked.
+# Defaults to ${WORKDIR}/l10n-sources
+: ${MOZ_L10N_SOURCEDIR:="${WORKDIR}/l10n-sources"}
+
+# @ECLASS-VARIABLE: MOZ_L10N_URI_PREFIX
+# @DESCRIPTION:
+# The full URI prefix of the distfile for each l10n locale. The
+# AB_CD and MOZ_L10N_URI_SUFFIX will be appended to this to complete the
+# SRC_URI when MOZ_GENERATE_LANGPACKS is set. If empty, nothing will
+# be added to SRC_URI.
+# Defaults to empty.
+: ${MOZ_L10N_URI_PREFIX:=""}
+
+# @ECLASS-VARIABLE: MOZ_L10N_URI_SUFFIX
+# @DESCRIPTION:
+# The suffix of l10n source distfiles.
+# Defaults to '.tar.xz'
+: ${MOZ_L10N_URI_SUFFIX:=".tar.xz"}
+
+# Add linguas_* to IUSE according to available language packs
+# No language packs for alphas and betas
+if ! [[ -n ${MOZ_GENERATE_LANGPACKS} ]] ; then
+ if ! [[ ${PV} =~ alpha|beta ]] || { [[ ${PN} == seamonkey ]] && ! [[ ${PV} =~ alpha ]] ; } ; then
+ [[ -z ${MOZ_FTP_URI} ]] && [[ -z ${MOZ_HTTP_URI} ]] && die "No URI set to download langpacks, please set one of MOZ_{FTP,HTTP}_URI"
+ for x in "${MOZ_LANGS[@]}" ; do
+ # en and en_US are handled internally
+ if [[ ${x} == en ]] || [[ ${x} == en-US ]]; then
+ continue
+ fi
+ SRC_URI+=" linguas_${x/-/_}? ("
+ [[ -n ${MOZ_FTP_URI} ]] && SRC_URI+="
+ ${MOZ_FTP_URI}/${MOZ_LANGPACK_PREFIX}${x}${MOZ_LANGPACK_SUFFIX} -> ${MOZ_P}-${x}${MOZ_LANGPACK_UNOFFICIAL:+.unofficial}.xpi"
+ [[ -n ${MOZ_HTTP_URI} ]] && SRC_URI+="
+ ${MOZ_HTTP_URI}/${MOZ_LANGPACK_PREFIX}${x}${MOZ_LANGPACK_SUFFIX} -> ${MOZ_P}-${x}${MOZ_LANGPACK_UNOFFICIAL:+.unofficial}.xpi"
+ SRC_URI+=" )"
+ IUSE+=" linguas_${x/-/_}"
+ # We used to do some magic if specific/generic locales were missing, but
+ # we stopped doing that due to bug 325195.
+ done
+ fi
+else
+ for x in "${MOZ_LANGS[@]}" ; do
+ # en and en_US are handled internally
+ if [[ ${x} == en ]] || [[ ${x} == en-US ]]; then
+ continue
+ fi
+# Do NOT grab l10n sources from hg tip at this time, since it is a moving target
+# if [[ ${PV} =~ alpha ]]; then
+# # Please note that this URI is not deterministic - digest breakage could occur
+# SRC_URI+=" linguas_${x/-/_}? ( http://hg.mozilla.org/releases/l10n/mozilla-aurora/ach/archive/tip.tar.bz2 -> ${MOZ_P}-l10n-${x}.tar.bz2 )"
+# elif [[ ${PV} =~ beta ]] && ! [[ ${PN} == seamonkey ]]; then
+# # Please note that this URI is not deterministic - digest breakage could occur
+# SRC_URI+=" linguas_${x/-/_}? ( http://hg.mozilla.org/releases/l10n/mozilla-beta/ach/archive/tip.tar.bz2 -> ${MOZ_P}-l10n-${x}.tar.bz2 )"
+# elif [[ -n ${MOZ_L10N_URI_PREFIX} ]]; then
+ if [[ -n ${MOZ_L10N_URI_PREFIX} ]]; then
+ SRC_URI+=" linguas_${x/-/_}? ( ${MOZ_L10N_URI_PREFIX}${x}${MOZ_L10N_URI_SUFFIX} )"
+ fi
+ IUSE+=" linguas_${x/-/_}"
+ done
+fi
+unset x
+
+# @FUNCTION: mozlinguas_export
+# @INTERNAL
+# @DESCRIPTION:
+# Generate the list of language packs called "mozlinguas"
+# This list is used to unpack and install the xpi language packs
+mozlinguas_export() {
+ if [[ ${PN} == seamonkey ]] ; then
+ [[ ${PV} =~ alpha ]] && ! [[ -n ${MOZ_GENERATE_LANGPACKS} ]] && return
+ else
+ [[ ${PV} =~ alpha|beta ]] && ! [[ -n ${MOZ_GENERATE_LANGPACKS} ]] && return
+ fi
+ local lingua
+ mozlinguas=()
+ for lingua in ${LINGUAS}; do
+ if has ${lingua} en en_US; then
+ # For mozilla products, en and en_US are handled internally
+ continue
+ # If this language is supported by ${P},
+ elif has ${lingua} "${MOZ_LANGS[@]//-/_}"; then
+ # Add the language to mozlinguas, if it isn't already there
+ has ${lingua//_/-} "${mozlinguas[@]}" || mozlinguas+=(${lingua//_/-})
+ continue
+ # For each short lingua that isn't in MOZ_LANGS,
+ # We used to add *all* long MOZ_LANGS to the mozlinguas list,
+ # but we stopped doing that due to bug 325195.
+ else
+ :
+ fi
+ ewarn "Sorry, but ${P} does not support the ${lingua} locale"
+ done
+}
+
+# @FUNCTION: mozlinguas_src_unpack
+# @DESCRIPTION:
+# Unpack xpi language packs according to the user's LINGUAS settings
+mozlinguas_src_unpack() {
+ local x
+ if ! [[ -n ${MOZ_GENERATE_LANGPACKS} ]]; then
+ mozlinguas_export
+ for x in "${mozlinguas[@]}"; do
+ # FIXME: Add support for unpacking xpis to portage
+ xpi_unpack "${MOZ_P}-${x}${MOZ_LANGPACK_UNOFFICIAL:+.unofficial}.xpi"
+ done
+ if [[ "${mozlinguas[*]}" != "" && "${mozlinguas[*]}" != "en" ]]; then
+ einfo "Selected language packs (first will be default): ${mozlinguas[*]}"
+ fi
+ fi
+}
+
+# @FUNCTION: mozlinguas_mozconfig
+# @DESCRIPTION:
+# if applicable, add the necessary flag to .mozconfig to support
+# the generation of locales. Note that this function requires
+# mozconfig_annontate to already be declared via an inherit of
+# mozconfig or mozcoreconf.
+mozlinguas_mozconfig() {
+ if [[ -n ${MOZ_GENERATE_LANGPACKS} ]]; then
+ if declare -f mozconfig_annotate >/dev/null ; then
+ mozconfig_annotate 'for building locales' --with-l10n-base=${MOZ_L10N_SOURCEDIR}
+ else
+ die "Could not configure l10n-base, mozconfig_annotate not declared -- missing inherit?"
+ fi
+ fi
+}
+
+# @FUNCTION: mozlinguas_src_compile
+# @DESCRIPTION:
+# if applicable, build the selected locales.
+mozlinguas_src_compile() {
+ if [[ -n ${MOZ_GENERATE_LANGPACKS} ]]; then
+ # leverage BUILD_OBJ_DIR if set otherwise assume PWD.
+ local x y targets=( "langpack" ) localedir="${BUILD_OBJ_DIR:-.}"
+ case ${PN} in
+ *firefox)
+ localedir+="/browser/locales"
+ ;;
+ seamonkey)
+ localedir+="/suite/locales"
+ ;;
+ *thunderbird)
+ localedir+="/mail/locales"
+ targets+=( "calendar-langpack" )
+ ;;
+ *) die "Building locales for ${PN} is not supported."
+ esac
+ pushd "${localedir}" > /dev/null || die
+ mozlinguas_export
+ for x in "${mozlinguas[@]}"; do for y in "${targets[@]}"; do
+ emake ${y}-${x} LOCALE_MERGEDIR="./${y}-${x}"
+ done; done
+ popd > /dev/null || die
+ fi
+}
+
+# @FUNCTION: mozlinguas_xpistage_langpacks
+# @DESCRIPTION:
+# Add extra langpacks to the xpi-stage dir for prebuilt plugins
+#
+# First argument is the path to the extension
+# Second argument is the prefix of the source (same as first if unspecified)
+# Remaining arguments are the modules in the extension that are localized
+# (basename of first if unspecified)
+#
+# Example - installing extra langpacks for lightning:
+# src_install() {
+# ... # general installation steps
+# mozlinguas_xpistage_langpacks \
+# "${BUILD_OBJ_DIR}"/dist/xpi-stage/lightning \
+# "${WORKDIR}"/lightning \
+# lightning calendar
+# ... # proceed with installation from the xpi-stage dir
+# }
+
+mozlinguas_xpistage_langpacks() {
+ local l c modpath="${1}" srcprefix="${1}" modules=( "${1##*/}" )
+ shift
+ if [[ -n ${1} ]] ; then srcprefix="${1}" ; shift ; fi
+ if [[ -n ${1} ]] ; then modules=( $@ ) ; fi
+
+ mozlinguas_export
+ mkdir -p "${modpath}/chrome" || die
+ for l in "${mozlinguas[@]}"; do for c in "${modules[@]}" ; do
+ if [[ -e "${srcprefix}-${l}/chrome/${c}-${l}" ]]; then
+ cp -RLp -t "${modpath}/chrome" "${srcprefix}-${l}/chrome/${c}-${l}" || die
+ grep "locale ${c} ${l} chrome/" "${srcprefix}-${l}/chrome.manifest" \
+ >>"${modpath}/chrome.manifest" || die
+ elif [[ -e "${srcprefix}/chrome/${c}-${l}" ]]; then
+ cp -RLp -t "${modpath}/chrome" "${srcprefix}/chrome/${c}-${l}" || die
+ grep "locale ${c} ${l} chrome/" "${srcprefix}/chrome.manifest" \
+ >>"${modpath}/chrome.manifest" || die
+ else
+ ewarn "Locale ${l} was not found for ${c}, skipping."
+ fi
+ done; done
+}
+
+# @FUNCTION: mozlinguas_src_install
+# @DESCRIPTION:
+# Install xpi language packs according to the user's LINGUAS settings
+# NOTE - uses ${BUILD_OBJ_DIR} or PWD if unset, for source-generated langpacks
+mozlinguas_src_install() {
+ local x
+ mozlinguas_export
+ if [[ -n ${MOZ_GENERATE_LANGPACKS} ]]; then
+ local repopath="${WORKDIR}/${PN}-generated-langpacks"
+ mkdir -p "${repopath}"
+ pushd "${BUILD_OBJ_DIR:-.}"/dist/*/xpi > /dev/null || die
+ for x in "${mozlinguas[@]}"; do
+ cp "${MOZ_P}.${x}.langpack.xpi" \
+ "${repopath}/${MOZ_P}-${x}${MOZ_LANGPACK_UNOFFICIAL:+.unofficial}.xpi" || die
+ xpi_unpack "${repopath}/${MOZ_P}-${x}${MOZ_LANGPACK_UNOFFICIAL:+.unofficial}.xpi"
+ done
+ popd > /dev/null || die
+ fi
+ for x in "${mozlinguas[@]}"; do
+ xpi_install "${WORKDIR}/${MOZ_P}-${x}${MOZ_LANGPACK_UNOFFICIAL:+.unofficial}"
+ done
+}
diff --git a/eclass/multibuild.eclass b/eclass/multibuild.eclass
new file mode 100644
index 000000000000..b7269d267025
--- /dev/null
+++ b/eclass/multibuild.eclass
@@ -0,0 +1,269 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: multibuild
+# @MAINTAINER:
+# Michał Górny <mgorny@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# @BLURB: A generic eclass for building multiple variants of packages.
+# @DESCRIPTION:
+# The multibuild eclass aims to provide a generic framework for building
+# multiple 'variants' of a package (e.g. multilib, Python
+# implementations).
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_MULTIBUILD} ]]; then
+
+# @ECLASS-VARIABLE: MULTIBUILD_VARIANTS
+# @DESCRIPTION:
+# An array specifying all enabled variants which multibuild_foreach*
+# can execute the process for.
+#
+# In ebuild, it can be set in global scope. Eclasses should set it
+# locally in function scope to support nesting properly.
+#
+# Example:
+# @CODE
+# python_foreach_impl() {
+# local MULTIBUILD_VARIANTS=( python{2_5,2_6,2_7} ... )
+# multibuild_foreach_variant python_compile
+# }
+# @CODE
+
+# @ECLASS-VARIABLE: MULTIBUILD_VARIANT
+# @DESCRIPTION:
+# The current variant which the function was executed for.
+#
+# Example value:
+# @CODE
+# python2_6
+# @CODE
+
+# @ECLASS-VARIABLE: MULTIBUILD_ID
+# @DESCRIPTION:
+# The unique identifier for a multibuild run. In a simple run, it is
+# equal to MULTIBUILD_VARIANT. In a nested multibuild environment, it
+# contains the complete selection tree.
+#
+# It can be used to create variant-unique directories and files.
+#
+# Example value:
+# @CODE
+# amd64-double
+# @CODE
+
+# @ECLASS-VARIABLE: BUILD_DIR
+# @DESCRIPTION:
+# The current build directory. In global scope, it is supposed
+# to contain an 'initial' build directory. If unset, ${S} is used.
+#
+# multibuild_foreach_variant() sets BUILD_DIR locally
+# to variant-specific build directories based on the initial value
+# of BUILD_DIR.
+#
+# Example value:
+# @CODE
+# ${WORKDIR}/foo-1.3-python2_6
+# @CODE
+
+# @FUNCTION: multibuild_foreach_variant
+# @USAGE: [<argv>...]
+# @DESCRIPTION:
+# Run the passed command repeatedly for each of the enabled package
+# variants.
+#
+# Each of the runs will have variant-specific BUILD_DIR set, and output
+# teed to a separate log in ${T}.
+#
+# The function returns 0 if all commands return 0, or the first non-zero
+# exit status otherwise. However, it performs all the invocations
+# nevertheless. It is preferred to call 'die' inside of the passed
+# function.
+multibuild_foreach_variant() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${MULTIBUILD_VARIANTS} ]] \
+ || die "MULTIBUILD_VARIANTS need to be set"
+
+ local bdir=${BUILD_DIR:-${S}}
+
+ # Avoid writing outside WORKDIR if S=${WORKDIR}.
+ [[ ${bdir%%/} == ${WORKDIR%%/} ]] && bdir=${WORKDIR}/build
+
+ local prev_id=${MULTIBUILD_ID:+${MULTIBUILD_ID}-}
+ local ret=0 lret=0 v
+
+ debug-print "${FUNCNAME}: initial build_dir = ${bdir}"
+
+ for v in "${MULTIBUILD_VARIANTS[@]}"; do
+ local MULTIBUILD_VARIANT=${v}
+ local MULTIBUILD_ID=${prev_id}${v}
+ local BUILD_DIR=${bdir%%/}-${v}
+
+ _multibuild_run() {
+ # find the first non-private command
+ local i=1
+ while [[ ${!i} == _* ]]; do
+ (( i += 1 ))
+ done
+
+ [[ ${i} -le ${#} ]] && einfo "${v}: running ${@:${i}}"
+ "${@}"
+ }
+
+ _multibuild_run "${@}" \
+ > >(exec tee -a "${T}/build-${MULTIBUILD_ID}.log") 2>&1
+ lret=${?}
+ done
+ [[ ${ret} -eq 0 && ${lret} -ne 0 ]] && ret=${lret}
+
+ return ${ret}
+}
+
+# @FUNCTION: multibuild_parallel_foreach_variant
+# @USAGE: [<argv>...]
+# @DESCRIPTION:
+# Run the passed command repeatedly for each of the enabled package
+# variants. This used to run the commands in parallel but now it's
+# just a deprecated alias to multibuild_foreach_variant.
+#
+# The function returns 0 if all commands return 0, or the first non-zero
+# exit status otherwise. However, it performs all the invocations
+# nevertheless. It is preferred to call 'die' inside of the passed
+# function.
+multibuild_parallel_foreach_variant() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ multibuild_foreach_variant "${@}"
+}
+
+# @FUNCTION: multibuild_for_best_variant
+# @USAGE: [<argv>...]
+# @DESCRIPTION:
+# Run the passed command once, for the best of the enabled package
+# variants.
+#
+# The run will have a proper, variant-specificBUILD_DIR set, and output
+# teed to a separate log in ${T}.
+#
+# The function returns command exit status.
+multibuild_for_best_variant() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${MULTIBUILD_VARIANTS} ]] \
+ || die "MULTIBUILD_VARIANTS need to be set"
+
+ # bash-4.1 can't handle negative subscripts
+ local MULTIBUILD_VARIANTS=(
+ "${MULTIBUILD_VARIANTS[$(( ${#MULTIBUILD_VARIANTS[@]} - 1 ))]}"
+ )
+ multibuild_foreach_variant "${@}"
+}
+
+# @FUNCTION: multibuild_copy_sources
+# @DESCRIPTION:
+# Create per-variant copies of source tree. The source tree is assumed
+# to be in ${BUILD_DIR}, or ${S} if the former is unset. The copies will
+# be placed in directories matching BUILD_DIRs used by
+# multibuild_foreach().
+multibuild_copy_sources() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local _MULTIBUILD_INITIAL_BUILD_DIR=${BUILD_DIR:-${S}}
+
+ einfo "Will copy sources from ${_MULTIBUILD_INITIAL_BUILD_DIR}"
+
+ local cp_args=()
+ if cp --reflink=auto --version &>/dev/null; then
+ # enable reflinking if possible to make this faster
+ cp_args+=( --reflink=auto )
+ fi
+
+ _multibuild_create_source_copy() {
+ einfo "${MULTIBUILD_VARIANT}: copying to ${BUILD_DIR}"
+ cp -pr "${cp_args[@]}" \
+ "${_MULTIBUILD_INITIAL_BUILD_DIR}" "${BUILD_DIR}" || die
+ }
+
+ multibuild_foreach_variant _multibuild_create_source_copy
+}
+
+# @FUNCTION: run_in_build_dir
+# @USAGE: <argv>...
+# @DESCRIPTION:
+# Run the given command in the directory pointed by BUILD_DIR.
+run_in_build_dir() {
+ debug-print-function ${FUNCNAME} "${@}"
+ local ret
+
+ [[ ${#} -ne 0 ]] || die "${FUNCNAME}: no command specified."
+ [[ ${BUILD_DIR} ]] || die "${FUNCNAME}: BUILD_DIR not set."
+
+ mkdir -p "${BUILD_DIR}" || die
+ pushd "${BUILD_DIR}" >/dev/null || die
+ "${@}"
+ ret=${?}
+ popd >/dev/null || die
+
+ return ${ret}
+}
+
+# @FUNCTION: multibuild_merge_root
+# @USAGE: <src-root> <dest-root>
+# @DESCRIPTION:
+# Merge the directory tree (fake root) from <src-root> to <dest-root>
+# (the real root). Both directories have to be real, absolute paths
+# (i.e. including ${D}). Source root will be removed.
+multibuild_merge_root() {
+ local src=${1}
+ local dest=${2}
+
+ local ret
+
+ if use userland_BSD; then
+ # Most of BSD variants fail to copy broken symlinks, #447370
+ # also, they do not support --version
+
+ tar -C "${src}" -f - -c . \
+ | tar -x -f - -C "${dest}"
+ [[ ${PIPESTATUS[*]} == '0 0' ]]
+ ret=${?}
+ else
+ local cp_args=()
+
+ if cp -a --version &>/dev/null; then
+ cp_args+=( -a )
+ else
+ cp_args+=( -P -R -p )
+ fi
+
+ if cp --reflink=auto --version &>/dev/null; then
+ # enable reflinking if possible to make this faster
+ cp_args+=( --reflink=auto )
+ fi
+
+ cp "${cp_args[@]}" "${src}"/. "${dest}"/
+ ret=${?}
+ fi
+
+ if [[ ${ret} -ne 0 ]]; then
+ die "${MULTIBUILD_VARIANT:-(unknown)}: merging image failed."
+ fi
+
+ rm -rf "${src}"
+}
+
+_MULTIBUILD=1
+fi
diff --git a/eclass/multilib-build.eclass b/eclass/multilib-build.eclass
new file mode 100644
index 000000000000..ca0fd5444111
--- /dev/null
+++ b/eclass/multilib-build.eclass
@@ -0,0 +1,659 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: multilib-build.eclass
+# @MAINTAINER:
+# gx86-multilib team <multilib@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# @BLURB: flags and utility functions for building multilib packages
+# @DESCRIPTION:
+# The multilib-build.eclass exports USE flags and utility functions
+# necessary to build packages for multilib in a clean and uniform
+# manner.
+#
+# Please note that dependency specifications for multilib-capable
+# dependencies shall use the USE dependency string in ${MULTILIB_USEDEP}
+# to properly request multilib enabled.
+
+if [[ ! ${_MULTILIB_BUILD} ]]; then
+
+# EAPI=4 is required for meaningful MULTILIB_USEDEP.
+case ${EAPI:-0} in
+ 4|5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+inherit multibuild multilib
+
+# @ECLASS-VARIABLE: _MULTILIB_FLAGS
+# @INTERNAL
+# @DESCRIPTION:
+# The list of multilib flags and corresponding ABI values. If the same
+# flag is reused for multiple ABIs (e.g. x86 on Linux&FreeBSD), multiple
+# ABIs may be separated by commas.
+#
+# Please contact multilib before modifying this list. This way we can
+# ensure that every *preliminary* work is done and the multilib can be
+# extended safely.
+_MULTILIB_FLAGS=(
+ abi_x86_32:x86,x86_fbsd,x86_freebsd,x86_linux,x86_macos,x86_solaris
+ abi_x86_64:amd64,amd64_fbsd,x64_freebsd,amd64_linux,x64_macos,x64_solaris
+ abi_x86_x32:x32
+ abi_mips_n32:n32
+ abi_mips_n64:n64
+ abi_mips_o32:o32
+ abi_ppc_32:ppc,ppc_aix,ppc_macos
+ abi_ppc_64:ppc64
+ abi_s390_32:s390
+ abi_s390_64:s390x
+)
+
+# @ECLASS-VARIABLE: MULTILIB_COMPAT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of multilib ABIs supported by the ebuild. If unset, defaults to
+# all ABIs supported by the eclass.
+#
+# This variable is intended for use in prebuilt multilib packages that
+# can provide binaries only for a limited set of ABIs. If ABIs need to
+# be limited due to a bug in source code, package.use.mask is to be used
+# instead. Along with MULTILIB_COMPAT, KEYWORDS should contain '-*'.
+#
+# Note that setting this variable effectively disables support for all
+# other ABIs, including other architectures. For example, specifying
+# abi_x86_{32,64} disables support for MIPS as well.
+#
+# The value of MULTILIB_COMPAT determines the value of IUSE. If set, it
+# also enables REQUIRED_USE constraints.
+#
+# Example use:
+# @CODE
+# # Upstream provides binaries for x86 & amd64 only
+# MULTILIB_COMPAT=( abi_x86_{32,64} )
+# @CODE
+
+# @ECLASS-VARIABLE: MULTILIB_USEDEP
+# @DESCRIPTION:
+# The USE-dependency to be used on dependencies (libraries) needing
+# to support multilib as well.
+#
+# Example use:
+# @CODE
+# RDEPEND="dev-libs/libfoo[${MULTILIB_USEDEP}]
+# net-libs/libbar[ssl,${MULTILIB_USEDEP}]"
+# @CODE
+
+# @ECLASS-VARIABLE: MULTILIB_ABI_FLAG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The complete ABI name. Resembles the USE flag name.
+#
+# This is set within multilib_foreach_abi(),
+# multilib_parallel_foreach_abi() and multilib-minimal sub-phase
+# functions.
+#
+# It may be null (empty) when the build is done on ABI not controlled
+# by a USE flag (e.g. on non-multilib arch or when using multilib
+# portage). The build will always be done for a single ABI then.
+#
+# Example value:
+# @CODE
+# abi_x86_64
+# @CODE
+
+_multilib_build_set_globals() {
+ local flags=( "${_MULTILIB_FLAGS[@]%:*}" )
+
+ if [[ ${MULTILIB_COMPAT[@]} ]]; then
+ # Validate MULTILIB_COMPAT and filter out the flags.
+ local f
+ for f in "${MULTILIB_COMPAT[@]}"; do
+ if ! has "${f}" "${flags[@]}"; then
+ die "Invalid value in MULTILIB_COMPAT: ${f}"
+ fi
+ done
+
+ flags=( "${MULTILIB_COMPAT[@]}" )
+
+ REQUIRED_USE="|| ( ${flags[*]} )"
+ fi
+
+ local usedeps=${flags[@]/%/(-)?}
+
+ IUSE=${flags[*]}
+ MULTILIB_USEDEP=${usedeps// /,}
+}
+_multilib_build_set_globals
+
+# @FUNCTION: multilib_get_enabled_abis
+# @DESCRIPTION:
+# Return the ordered list of enabled ABIs if multilib builds
+# are enabled. The best (most preferred) ABI will come last.
+#
+# If multilib is disabled, the default ABI will be returned
+# in order to enforce consistent testing with multilib code.
+multilib_get_enabled_abis() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local pairs=( $(multilib_get_enabled_abi_pairs) )
+ echo "${pairs[@]#*.}"
+}
+
+# @FUNCTION: multilib_get_enabled_abi_pairs
+# @DESCRIPTION:
+# Return the ordered list of enabled <use-flag>.<ABI> pairs
+# if multilib builds are enabled. The best (most preferred)
+# ABI will come last.
+#
+# If multilib is disabled, the default ABI will be returned
+# along with empty <use-flag>.
+multilib_get_enabled_abi_pairs() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local abis=( $(get_all_abis) )
+
+ local abi i found
+ for abi in "${abis[@]}"; do
+ for i in "${_MULTILIB_FLAGS[@]}"; do
+ local m_abis=${i#*:} m_abi
+ local m_flag=${i%:*}
+
+ # split on ,; we can't switch IFS for function scope because
+ # paludis is broken (bug #486592), and switching it locally
+ # for the split is more complex than cheating like this
+ for m_abi in ${m_abis//,/ }; do
+ if [[ ${m_abi} == ${abi} ]] \
+ && { [[ ! "${MULTILIB_COMPAT[@]}" ]] || has "${m_flag}" "${MULTILIB_COMPAT[@]}"; } \
+ && use "${m_flag}"
+ then
+ echo "${m_flag}.${abi}"
+ found=1
+ break 2
+ fi
+ done
+ done
+ done
+
+ if [[ ! ${found} ]]; then
+ # ${ABI} can be used to override the fallback (multilib-portage),
+ # ${DEFAULT_ABI} is the safe fallback.
+ local abi=${ABI:-${DEFAULT_ABI}}
+
+ debug-print "${FUNCNAME}: no ABIs enabled, fallback to ${abi}"
+ debug-print "${FUNCNAME}: ABI=${ABI}, DEFAULT_ABI=${DEFAULT_ABI}"
+ echo ".${abi}"
+ fi
+}
+
+# @FUNCTION: _multilib_multibuild_wrapper
+# @USAGE: <argv>...
+# @INTERNAL
+# @DESCRIPTION:
+# Initialize the environment for ABI selected for multibuild.
+_multilib_multibuild_wrapper() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local ABI=${MULTIBUILD_VARIANT#*.}
+ local MULTILIB_ABI_FLAG=${MULTIBUILD_VARIANT%.*}
+
+ multilib_toolchain_setup "${ABI}"
+ "${@}"
+}
+
+# @FUNCTION: multilib_foreach_abi
+# @USAGE: <argv>...
+# @DESCRIPTION:
+# If multilib support is enabled, sets the toolchain up for each
+# supported ABI along with the ABI variable and correct BUILD_DIR,
+# and runs the given commands with them.
+#
+# If multilib support is disabled, it just runs the commands. No setup
+# is done.
+multilib_foreach_abi() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
+ multibuild_foreach_variant _multilib_multibuild_wrapper "${@}"
+}
+
+# @FUNCTION: multilib_parallel_foreach_abi
+# @USAGE: <argv>...
+# @DESCRIPTION:
+# If multilib support is enabled, sets the toolchain up for each
+# supported ABI along with the ABI variable and correct BUILD_DIR,
+# and runs the given commands with them.
+#
+# If multilib support is disabled, it just runs the commands. No setup
+# is done.
+#
+# This function used to run multiple commands in parallel. Now it's just
+# a deprecated alias to multilib_foreach_abi.
+multilib_parallel_foreach_abi() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
+ multibuild_foreach_variant _multilib_multibuild_wrapper "${@}"
+}
+
+# @FUNCTION: multilib_for_best_abi
+# @USAGE: <argv>...
+# @DESCRIPTION:
+# Runs the given command with setup for the 'best' (usually native) ABI.
+multilib_for_best_abi() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ eqawarn "QA warning: multilib_for_best_abi() function is deprecated and should"
+ eqawarn "not be used. The multilib_is_native_abi() check may be used instead."
+
+ local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
+
+ multibuild_for_best_variant _multilib_multibuild_wrapper "${@}"
+}
+
+# @FUNCTION: multilib_check_headers
+# @DESCRIPTION:
+# Check whether the header files are consistent between ABIs.
+#
+# This function needs to be called after each ABI's installation phase.
+# It obtains the header file checksums and compares them with previous
+# runs (if any). Dies if header files differ.
+multilib_check_headers() {
+ _multilib_header_cksum() {
+ [[ -d ${ED}usr/include ]] && \
+ find "${ED}"usr/include -type f \
+ -exec cksum {} + | sort -k2
+ }
+
+ local cksum=$(_multilib_header_cksum)
+ local cksum_file=${T}/.multilib_header_cksum
+
+ if [[ -f ${cksum_file} ]]; then
+ local cksum_prev=$(< "${cksum_file}")
+
+ if [[ ${cksum} != ${cksum_prev} ]]; then
+ echo "${cksum}" > "${cksum_file}.new"
+
+ eerror "Header files have changed between ABIs."
+
+ if type -p diff &>/dev/null; then
+ eerror "$(diff -du "${cksum_file}" "${cksum_file}.new")"
+ else
+ eerror "Old checksums in: ${cksum_file}"
+ eerror "New checksums in: ${cksum_file}.new"
+ fi
+
+ die "Header checksum mismatch, aborting."
+ fi
+ else
+ echo "${cksum}" > "${cksum_file}"
+ fi
+}
+
+# @FUNCTION: multilib_copy_sources
+# @DESCRIPTION:
+# Create a single copy of the package sources for each enabled ABI.
+#
+# The sources are always copied from initial BUILD_DIR (or S if unset)
+# to ABI-specific build directory matching BUILD_DIR used by
+# multilib_foreach_abi().
+multilib_copy_sources() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local MULTIBUILD_VARIANTS=( $(multilib_get_enabled_abi_pairs) )
+ multibuild_copy_sources
+}
+
+# @ECLASS-VARIABLE: MULTILIB_WRAPPED_HEADERS
+# @DESCRIPTION:
+# A list of headers to wrap for multilib support. The listed headers
+# will be moved to a non-standard location and replaced with a file
+# including them conditionally to current ABI.
+#
+# This variable has to be a bash array. Paths shall be relative to
+# installation root (${ED}), and name regular files. Recursive wrapping
+# is not supported.
+#
+# Please note that header wrapping is *discouraged*. It is preferred to
+# install all headers in a subdirectory of libdir and use pkg-config to
+# locate the headers. Some C preprocessors will not work with wrapped
+# headers.
+#
+# Example:
+# @CODE
+# MULTILIB_WRAPPED_HEADERS=(
+# /usr/include/foobar/config.h
+# )
+# @CODE
+
+# @ECLASS-VARIABLE: MULTILIB_CHOST_TOOLS
+# @DESCRIPTION:
+# A list of tool executables to preserve for each multilib ABI.
+# The listed executables will be renamed to ${CHOST}-${basename},
+# and the native variant will be symlinked to the generic name.
+#
+# This variable has to be a bash array. Paths shall be relative to
+# installation root (${ED}), and name regular files or symbolic
+# links to regular files. Recursive wrapping is not supported.
+#
+# If symbolic link is passed, both symlink path and symlink target
+# will be changed. As a result, the symlink target is expected
+# to be wrapped as well (either by listing in MULTILIB_CHOST_TOOLS
+# or externally).
+#
+# Please note that tool wrapping is *discouraged*. It is preferred to
+# install pkg-config files for each ABI, and require reverse
+# dependencies to use that.
+#
+# Packages that search for tools properly (e.g. using AC_PATH_TOOL
+# macro) will find the wrapper executables automatically. Other packages
+# will need explicit override of tool paths.
+#
+# Example:
+# @CODE
+# MULTILIB_CHOST_TOOLS=(
+# /usr/bin/foo-config
+# )
+
+# @CODE
+# @FUNCTION: multilib_prepare_wrappers
+# @USAGE: [<install-root>]
+# @DESCRIPTION:
+# Perform the preparation of all kinds of wrappers for the current ABI.
+# This function shall be called once per each ABI, after installing
+# the files to be wrapped.
+#
+# Takes an optional custom <install-root> from which files will be
+# used. If no root is specified, uses ${ED}.
+#
+# The files to be wrapped are specified using separate variables,
+# e.g. MULTILIB_WRAPPED_HEADERS. Those variables shall not be changed
+# between the successive calls to multilib_prepare_wrappers
+# and multilib_install_wrappers.
+#
+# After all wrappers are prepared, multilib_install_wrappers shall
+# be called to commit them to the installation tree.
+multilib_prepare_wrappers() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -le 1 ]] || die "${FUNCNAME}: too many arguments"
+
+ local root=${1:-${ED}}
+ local f
+
+ if [[ ${COMPLETE_MULTILIB} == yes ]]; then
+ # symlink '${CHOST}-foo -> foo' to support abi-wrapper while
+ # keeping ${CHOST}-foo calls correct.
+
+ for f in "${MULTILIB_CHOST_TOOLS[@]}"; do
+ # drop leading slash if it's there
+ f=${f#/}
+
+ local dir=${f%/*}
+ local fn=${f##*/}
+
+ ln -s "${fn}" "${root}/${dir}/${CHOST}-${fn}" || die
+ done
+
+ return
+ fi
+
+ for f in "${MULTILIB_CHOST_TOOLS[@]}"; do
+ # drop leading slash if it's there
+ f=${f#/}
+
+ local dir=${f%/*}
+ local fn=${f##*/}
+
+ if [[ -L ${root}/${f} ]]; then
+ # rewrite the symlink target
+ local target=$(readlink "${root}/${f}")
+ local target_dir
+ local target_fn=${target##*/}
+
+ [[ ${target} == */* ]] && target_dir=${target%/*}
+
+ ln -f -s "${target_dir+${target_dir}/}${CHOST}-${target_fn}" \
+ "${root}/${f}" || die
+ fi
+
+ mv "${root}/${f}" "${root}/${dir}/${CHOST}-${fn}" || die
+
+ # symlink the native one back
+ if multilib_is_native_abi; then
+ ln -s "${CHOST}-${fn}" "${root}/${f}" || die
+ fi
+ done
+
+ if [[ ${MULTILIB_WRAPPED_HEADERS[@]} ]]; then
+ # If abi_flag is unset, then header wrapping is unsupported on
+ # this ABI. This means the arch doesn't support multilib at all
+ # -- in this case, the headers are not wrapped and everything
+ # works as expected.
+
+ if [[ ${MULTILIB_ABI_FLAG} ]]; then
+ for f in "${MULTILIB_WRAPPED_HEADERS[@]}"; do
+ # drop leading slash if it's there
+ f=${f#/}
+
+ if [[ ${f} != usr/include/* ]]; then
+ die "Wrapping headers outside of /usr/include is not supported at the moment."
+ fi
+ # and then usr/include
+ f=${f#usr/include}
+
+ local dir=${f%/*}
+
+ # Some ABIs may have install less files than others.
+ if [[ -f ${root}/usr/include${f} ]]; then
+ local wrapper=${ED}/tmp/multilib-include${f}
+
+ if [[ ! -f ${ED}/tmp/multilib-include${f} ]]; then
+ dodir "/tmp/multilib-include${dir}"
+ # a generic template
+ cat > "${wrapper}" <<_EOF_
+/* This file is auto-generated by multilib-build.eclass
+ * as a multilib-friendly wrapper. For the original content,
+ * please see the files that are #included below.
+ */
+
+#if defined(__x86_64__) /* amd64 */
+# if defined(__ILP32__) /* x32 ABI */
+# error "abi_x86_x32 not supported by the package."
+# else /* 64-bit ABI */
+# error "abi_x86_64 not supported by the package."
+# endif
+#elif defined(__i386__) /* plain x86 */
+# error "abi_x86_32 not supported by the package."
+#elif defined(__mips__)
+# if(_MIPS_SIM == _ABIN32) /* n32 */
+# error "abi_mips_n32 not supported by the package."
+# elif(_MIPS_SIM == _ABI64) /* n64 */
+# error "abi_mips_n64 not supported by the package."
+# elif(_MIPS_SIM == _ABIO32) /* o32 */
+# error "abi_mips_o32 not supported by the package."
+# endif
+#elif defined(__sparc__)
+# if defined(__arch64__)
+# error "abi_sparc_64 not supported by the package."
+# else
+# error "abi_sparc_32 not supported by the package."
+# endif
+#elif defined(__s390__)
+# if defined(__s390x__)
+# error "abi_s390_64 not supported by the package."
+# else
+# error "abi_s390_32 not supported by the package."
+# endif
+#elif defined(__powerpc__)
+# if defined(__powerpc64__)
+# error "abi_ppc_64 not supported by the package."
+# else
+# error "abi_ppc_32 not supported by the package."
+# endif
+#elif defined(SWIG) /* https://sourceforge.net/p/swig/bugs/799/ */
+# error "Native ABI not supported by the package."
+#else
+# error "No ABI matched, please report a bug to bugs.gentoo.org"
+#endif
+_EOF_
+ fi
+
+ if ! grep -q "${MULTILIB_ABI_FLAG} " "${wrapper}"
+ then
+ die "Flag ${MULTILIB_ABI_FLAG} not listed in wrapper template. Please report a bug to https://bugs.gentoo.org."
+ fi
+
+ # $CHOST shall be set by multilib_toolchain_setup
+ dodir "/tmp/multilib-include/${CHOST}${dir}"
+ mv "${root}/usr/include${f}" "${ED}/tmp/multilib-include/${CHOST}${dir}/" || die
+
+ # Note: match a space afterwards to avoid collision potential.
+ sed -e "/${MULTILIB_ABI_FLAG} /s&error.*&include <${CHOST}${f}>&" \
+ -i "${wrapper}" || die
+
+ # Needed for swig.
+ if multilib_is_native_abi; then
+ sed -e "/Native ABI/s&error.*&include <${CHOST}${f}>&" \
+ -i "${wrapper}" || die
+ fi
+ fi
+ done
+ fi
+ fi
+}
+
+# @FUNCTION: multilib_install_wrappers
+# @USAGE: [<install-root>]
+# @DESCRIPTION:
+# Install the previously-prepared wrappers. This function shall
+# be called once, after all wrappers were prepared.
+#
+# Takes an optional custom <install-root> to which the wrappers will be
+# installed. If no root is specified, uses ${ED}. There is no need to
+# use the same root as when preparing the wrappers.
+#
+# The files to be wrapped are specified using separate variables,
+# e.g. MULTILIB_WRAPPED_HEADERS. Those variables shall not be changed
+# between the calls to multilib_prepare_wrappers
+# and multilib_install_wrappers.
+multilib_install_wrappers() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -le 1 ]] || die "${FUNCNAME}: too many arguments"
+
+ [[ ${COMPLETE_MULTILIB} == yes ]] && return
+
+ local root=${1:-${ED}}
+
+ if [[ -d "${ED}"/tmp/multilib-include ]]; then
+ multibuild_merge_root \
+ "${ED}"/tmp/multilib-include "${root}"/usr/include
+ # it can fail if something else uses /tmp
+ rmdir "${ED}"/tmp &>/dev/null
+ fi
+}
+
+# @FUNCTION: multilib_is_native_abi
+# @DESCRIPTION:
+# Determine whether the currently built ABI is the profile native.
+# Return true status (0) if that is true, otherwise false (1).
+multilib_is_native_abi() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -eq 0 ]] || die "${FUNCNAME}: too many arguments"
+
+ [[ ${COMPLETE_MULTILIB} == yes || ${ABI} == ${DEFAULT_ABI} ]]
+}
+
+# @FUNCTION: multilib_build_binaries
+# @DESCRIPTION:
+# Deprecated synonym for multilib_is_native_abi
+multilib_build_binaries() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ eqawarn "QA warning: multilib_build_binaries is deprecated. Please use the equivalent"
+ eqawarn "multilib_is_native_abi function instead."
+
+ multilib_is_native_abi "${@}"
+}
+
+# @FUNCTION: multilib_native_use_with
+# @USAGE: <flag> [<opt-name> [<opt-value>]]
+# @DESCRIPTION:
+# Output --with configure option alike use_with if USE <flag> is enabled
+# and executables are being built (multilib_is_native_abi is true).
+# Otherwise, outputs --without configure option. Arguments are the same
+# as for use_with in the EAPI.
+multilib_native_use_with() {
+ if multilib_is_native_abi; then
+ use_with "${@}"
+ else
+ echo "--without-${2:-${1}}"
+ fi
+}
+
+# @FUNCTION: multilib_native_use_enable
+# @USAGE: <flag> [<opt-name> [<opt-value>]]
+# @DESCRIPTION:
+# Output --enable configure option alike use_enable if USE <flag>
+# is enabled and executables are being built (multilib_is_native_abi
+# is true). Otherwise, outputs --disable configure option. Arguments are
+# the same as for use_enable in the EAPI.
+multilib_native_use_enable() {
+ if multilib_is_native_abi; then
+ use_enable "${@}"
+ else
+ echo "--disable-${2:-${1}}"
+ fi
+}
+
+# @FUNCTION: multilib_native_enable
+# @USAGE: <opt-name> [<opt-value>]
+# @DESCRIPTION:
+# Output --enable configure option if executables are being built
+# (multilib_is_native_abi is true). Otherwise, output --disable configure
+# option.
+multilib_native_enable() {
+ if multilib_is_native_abi; then
+ echo "--enable-${1}${2+=${2}}"
+ else
+ echo "--disable-${1}"
+ fi
+}
+
+# @FUNCTION: multilib_native_with
+# @USAGE: <opt-name> [<opt-value>]
+# @DESCRIPTION:
+# Output --with configure option if executables are being built
+# (multilib_is_native_abi is true). Otherwise, output --without configure
+# option.
+multilib_native_with() {
+ if multilib_is_native_abi; then
+ echo "--with-${1}${2+=${2}}"
+ else
+ echo "--without-${1}"
+ fi
+}
+
+# @FUNCTION: multilib_native_usex
+# @USAGE: <flag> [<true1> [<false1> [<true2> [<false2>]]]]
+# @DESCRIPTION:
+# Output the concatenation of <true1> (or 'yes' if unspecified)
+# and <true2> if USE <flag> is enabled and executables are being built
+# (multilib_is_native_abi is true). Otherwise, output the concatenation
+# of <false1> (or 'no' if unspecified) and <false2>. Arguments
+# are the same as for usex in the EAPI.
+#
+# Note: in EAPI 4 you need to inherit eutils to use this function.
+multilib_native_usex() {
+ if multilib_is_native_abi; then
+ usex "${@}"
+ else
+ echo "${3-no}${5}"
+ fi
+}
+
+_MULTILIB_BUILD=1
+fi
diff --git a/eclass/multilib-minimal.eclass b/eclass/multilib-minimal.eclass
new file mode 100644
index 000000000000..a3b6d37dc93d
--- /dev/null
+++ b/eclass/multilib-minimal.eclass
@@ -0,0 +1,124 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: multilib-minimal.eclass
+# @MAINTAINER:
+# Julian Ospald <hasufell@gentoo.org>
+# @BLURB: wrapper for multilib builds providing convenient multilib_src_* functions
+# @DESCRIPTION:
+#
+# src_configure, src_compile, src_test and src_install are exported.
+#
+# Use multilib_src_* instead of src_* which runs this phase for
+# all enabled ABIs.
+#
+# multilib-minimal should _always_ go last in inherit order!
+#
+# If you want to use in-source builds, then you must run
+# multilib_copy_sources at the end of src_prepare!
+# Also make sure to set correct variables such as
+# ECONF_SOURCE=${S}
+#
+# If you need generic install rules, use multilib_src_install_all function.
+
+
+# EAPI=4 is required for meaningful MULTILIB_USEDEP.
+case ${EAPI:-0} in
+ 4|5) ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+
+inherit eutils multilib-build
+
+EXPORT_FUNCTIONS src_configure src_compile src_test src_install
+
+
+multilib-minimal_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ multilib-minimal_abi_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ mkdir -p "${BUILD_DIR}" || die
+ pushd "${BUILD_DIR}" >/dev/null || die
+ if declare -f multilib_src_configure >/dev/null ; then
+ multilib_src_configure
+ else
+ default_src_configure
+ fi
+ popd >/dev/null || die
+ }
+
+ multilib_foreach_abi multilib-minimal_abi_src_configure
+}
+
+multilib-minimal_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ multilib-minimal_abi_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ pushd "${BUILD_DIR}" >/dev/null || die
+ if declare -f multilib_src_compile >/dev/null ; then
+ multilib_src_compile
+ else
+ default_src_compile
+ fi
+ popd >/dev/null || die
+ }
+
+ multilib_foreach_abi multilib-minimal_abi_src_compile
+}
+
+multilib-minimal_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ multilib-minimal_abi_src_test() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ pushd "${BUILD_DIR}" >/dev/null || die
+ if declare -f multilib_src_test >/dev/null ; then
+ multilib_src_test
+ else
+ default_src_test
+ fi
+ popd >/dev/null || die
+ }
+
+ multilib_foreach_abi multilib-minimal_abi_src_test
+}
+
+multilib-minimal_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ multilib-minimal_abi_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ pushd "${BUILD_DIR}" >/dev/null || die
+ if declare -f multilib_src_install >/dev/null ; then
+ multilib_src_install
+ else
+ # default_src_install will not work here as it will
+ # break handling of DOCS wrt #468092
+ # so we split up the emake and doc-install part
+ # this is synced with __eapi4_src_install
+ if [[ -f Makefile || -f GNUmakefile || -f makefile ]] ; then
+ emake DESTDIR="${D}" install
+ fi
+ fi
+
+ multilib_prepare_wrappers
+ multilib_check_headers
+ popd >/dev/null || die
+ }
+ multilib_foreach_abi multilib-minimal_abi_src_install
+ multilib_install_wrappers
+
+ if declare -f multilib_src_install_all >/dev/null ; then
+ multilib_src_install_all
+ else
+ einstalldocs
+ fi
+}
diff --git a/eclass/multilib.eclass b/eclass/multilib.eclass
new file mode 100644
index 000000000000..eb0ac1d24100
--- /dev/null
+++ b/eclass/multilib.eclass
@@ -0,0 +1,451 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: multilib.eclass
+# @MAINTAINER:
+# amd64@gentoo.org
+# toolchain@gentoo.org
+# @BLURB: This eclass is for all functions pertaining to handling multilib configurations.
+# @DESCRIPTION:
+# This eclass is for all functions pertaining to handling multilib configurations.
+
+if [[ -z ${_MULTILIB_ECLASS} ]]; then
+_MULTILIB_ECLASS=1
+
+inherit toolchain-funcs
+
+# Defaults:
+export MULTILIB_ABIS=${MULTILIB_ABIS:-"default"}
+export DEFAULT_ABI=${DEFAULT_ABI:-"default"}
+export CFLAGS_default
+export LDFLAGS_default
+export CHOST_default=${CHOST_default:-${CHOST}}
+export CTARGET_default=${CTARGET_default:-${CTARGET:-${CHOST_default}}}
+export LIBDIR_default=${CONF_LIBDIR:-"lib"}
+export KERNEL_ABI=${KERNEL_ABI:-${DEFAULT_ABI}}
+
+# @FUNCTION: has_multilib_profile
+# @DESCRIPTION:
+# Return true if the current profile is a multilib profile and lists more than
+# one abi in ${MULTILIB_ABIS}. When has_multilib_profile returns true, that
+# profile should enable the 'multilib' use flag. This is so you can DEPEND on
+# a package only for multilib or not multilib.
+has_multilib_profile() {
+ [ -n "${MULTILIB_ABIS}" -a "${MULTILIB_ABIS}" != "${MULTILIB_ABIS/ /}" ]
+}
+
+# @FUNCTION: get_libdir
+# @RETURN: the libdir for the selected ABI
+# @DESCRIPTION:
+# This function simply returns the desired lib directory. With portage
+# 2.0.51, we now have support for installing libraries to lib32/lib64
+# to accomidate the needs of multilib systems. It's no longer a good idea
+# to assume all libraries will end up in lib. Replace any (sane) instances
+# where lib is named directly with $(get_libdir) if possible.
+#
+# Jeremy Huddleston <eradicator@gentoo.org> (23 Dec 2004):
+# Added support for ${ABI} and ${DEFAULT_ABI}. If they're both not set,
+# fall back on old behavior. Any profile that has these set should also
+# depend on a newer version of portage (not yet released) which uses these
+# over CONF_LIBDIR in econf, dolib, etc...
+get_libdir() {
+ local CONF_LIBDIR
+ if [ -n "${CONF_LIBDIR_OVERRIDE}" ] ; then
+ # if there is an override, we want to use that... always.
+ echo ${CONF_LIBDIR_OVERRIDE}
+ else
+ get_abi_LIBDIR
+ fi
+}
+
+# @FUNCTION: get_abi_var
+# @USAGE: <VAR> [ABI]
+# @RETURN: returns the value of ${<VAR>_<ABI>} which should be set in make.defaults
+# @INTERNAL
+# @DESCRIPTION:
+# ex:
+# CFLAGS=$(get_abi_var CFLAGS sparc32) # CFLAGS=-m32
+#
+# Note that the prefered method is to set CC="$(tc-getCC) $(get_abi_CFLAGS)"
+# This will hopefully be added to portage soon...
+#
+# If <ABI> is not specified, ${ABI} is used.
+# If <ABI> is not specified and ${ABI} is not defined, ${DEFAULT_ABI} is used.
+# If <ABI> is not specified and ${ABI} and ${DEFAULT_ABI} are not defined, we return an empty string.
+get_abi_var() {
+ local flag=$1
+ local abi=${2:-${ABI:-${DEFAULT_ABI:-default}}}
+ local var="${flag}_${abi}"
+ echo ${!var}
+}
+
+# @FUNCTION: get_abi_CFLAGS
+# @USAGE: [ABI]
+# @DESCRIPTION:
+# Alias for 'get_abi_var CFLAGS'
+get_abi_CFLAGS() { get_abi_var CFLAGS "$@"; }
+
+# @FUNCTION: get_abi_LDFLAGS
+# @USAGE: [ABI]
+# @DESCRIPTION:
+# Alias for 'get_abi_var LDFLAGS'
+get_abi_LDFLAGS() { get_abi_var LDFLAGS "$@"; }
+
+# @FUNCTION: get_abi_CHOST
+# @USAGE: [ABI]
+# @DESCRIPTION:
+# Alias for 'get_abi_var CHOST'
+get_abi_CHOST() { get_abi_var CHOST "$@"; }
+
+# @FUNCTION: get_abi_CTARGET
+# @USAGE: [ABI]
+# @DESCRIPTION:
+# Alias for 'get_abi_var CTARGET'
+get_abi_CTARGET() { get_abi_var CTARGET "$@"; }
+
+# @FUNCTION: get_abi_FAKE_TARGETS
+# @USAGE: [ABI]
+# @DESCRIPTION:
+# Alias for 'get_abi_var FAKE_TARGETS'
+get_abi_FAKE_TARGETS() { get_abi_var FAKE_TARGETS "$@"; }
+
+# @FUNCTION: get_abi_LIBDIR
+# @USAGE: [ABI]
+# @DESCRIPTION:
+# Alias for 'get_abi_var LIBDIR'
+get_abi_LIBDIR() { get_abi_var LIBDIR "$@"; }
+
+# @FUNCTION: get_install_abis
+# @DESCRIPTION:
+# Return a list of the ABIs we want to install for with
+# the last one in the list being the default.
+get_install_abis() {
+ local x order=""
+
+ if [[ -z ${MULTILIB_ABIS} ]] ; then
+ echo "default"
+ return 0
+ fi
+
+ if [[ ${EMULTILIB_PKG} == "true" ]] ; then
+ for x in ${MULTILIB_ABIS} ; do
+ if [[ ${x} != "${DEFAULT_ABI}" ]] ; then
+ has ${x} ${ABI_DENY} || order="${order} ${x}"
+ fi
+ done
+ has ${DEFAULT_ABI} ${ABI_DENY} || order="${order} ${DEFAULT_ABI}"
+
+ if [[ -n ${ABI_ALLOW} ]] ; then
+ local ordera=""
+ for x in ${order} ; do
+ if has ${x} ${ABI_ALLOW} ; then
+ ordera="${ordera} ${x}"
+ fi
+ done
+ order=${ordera}
+ fi
+ else
+ order=${DEFAULT_ABI}
+ fi
+
+ if [[ -z ${order} ]] ; then
+ die "The ABI list is empty. Are you using a proper multilib profile? Perhaps your USE flags or MULTILIB_ABIS are too restrictive for this package."
+ fi
+
+ echo ${order}
+ return 0
+}
+
+# @FUNCTION: get_all_abis
+# @DESCRIPTION:
+# Return a list of the ABIs supported by this profile.
+# the last one in the list being the default.
+get_all_abis() {
+ local x order="" mvar dvar
+
+ mvar="MULTILIB_ABIS"
+ dvar="DEFAULT_ABI"
+ if [[ -n $1 ]] ; then
+ mvar="$1_${mvar}"
+ dvar="$1_${dvar}"
+ fi
+
+ if [[ -z ${!mvar} ]] ; then
+ echo "default"
+ return 0
+ fi
+
+ for x in ${!mvar}; do
+ if [[ ${x} != ${!dvar} ]] ; then
+ order="${order:+${order} }${x}"
+ fi
+ done
+ order="${order:+${order} }${!dvar}"
+
+ echo ${order}
+ return 0
+}
+
+# @FUNCTION: get_all_libdirs
+# @DESCRIPTION:
+# Returns a list of all the libdirs used by this profile. This includes
+# those that might not be touched by the current ebuild and always includes
+# "lib".
+get_all_libdirs() {
+ local libdirs abi
+
+ for abi in ${MULTILIB_ABIS}; do
+ libdirs+=" $(get_abi_LIBDIR ${abi})"
+ done
+ [[ " ${libdirs} " != *" lib "* ]] && libdirs+=" lib"
+
+ echo "${libdirs}"
+}
+
+# @FUNCTION: is_final_abi
+# @DESCRIPTION:
+# Return true if ${ABI} is the last ABI on our list (or if we're not
+# using the new multilib configuration. This can be used to determine
+# if we're in the last (or only) run through src_{unpack,compile,install}
+is_final_abi() {
+ has_multilib_profile || return 0
+ set -- $(get_install_abis)
+ local LAST_ABI=$#
+ [[ ${!LAST_ABI} == ${ABI} ]]
+}
+
+# @FUNCTION: number_abis
+# @DESCRIPTION:
+# echo the number of ABIs we will be installing for
+number_abis() {
+ set -- `get_install_abis`
+ echo $#
+}
+
+# @FUNCTION: get_libname
+# @USAGE: [version]
+# @DESCRIPTION:
+# Returns libname with proper suffix {.so,.dylib,.dll,etc} and optionally
+# supplied version for the current platform identified by CHOST.
+#
+# Example:
+# get_libname ${PV}
+# Returns: .so.${PV} (ELF) || .${PV}.dylib (MACH) || ...
+get_libname() {
+ local libname
+ local ver=$1
+ case ${CHOST} in
+ *-cygwin|mingw*|*-mingw*) libname="dll";;
+ *-darwin*) libname="dylib";;
+ *-mint*) libname="irrelevant";;
+ hppa*-hpux*) libname="sl";;
+ *) libname="so";;
+ esac
+
+ if [[ -z $* ]] ; then
+ echo ".${libname}"
+ else
+ for ver in "$@" ; do
+ case ${CHOST} in
+ *-darwin*) echo ".${ver}.${libname}";;
+ *-mint*) echo ".${libname}";;
+ *) echo ".${libname}.${ver}";;
+ esac
+ done
+ fi
+}
+
+# @FUNCTION: get_modname
+# @USAGE:
+# @DESCRIPTION:
+# Returns modulename with proper suffix {.so,.bundle,etc} for the current
+# platform identified by CHOST.
+#
+# Example:
+# libfoo$(get_modname)
+# Returns: libfoo.so (ELF) || libfoo.bundle (MACH) || ...
+get_modname() {
+ local modname
+ local ver=$1
+ case ${CHOST} in
+ *-darwin*) modname="bundle";;
+ *) modname="so";;
+ esac
+
+ echo ".${modname}"
+}
+
+# This is for the toolchain to setup profile variables when pulling in
+# a crosscompiler (and thus they aren't set in the profile)
+multilib_env() {
+ local CTARGET=${1:-${CTARGET}}
+ local cpu=${CTARGET%%*-}
+
+ case ${cpu} in
+ aarch64*)
+ # Not possible to do multilib with aarch64 and a single toolchain.
+ export CFLAGS_arm=${CFLAGS_arm-}
+ case ${cpu} in
+ aarch64*be) export CHOST_arm="armv8b-${CTARGET#*-}";;
+ *) export CHOST_arm="armv8l-${CTARGET#*-}";;
+ esac
+ CHOST_arm=${CHOST_arm/%-gnu/-gnueabi}
+ export CTARGET_arm=${CHOST_arm}
+ export LIBDIR_arm="lib"
+
+ export CFLAGS_arm64=${CFLAGS_arm64-}
+ export CHOST_arm64=${CTARGET}
+ export CTARGET_arm64=${CHOST_arm64}
+ export LIBDIR_arm64="lib64"
+
+ : ${MULTILIB_ABIS=arm64}
+ : ${DEFAULT_ABI=arm64}
+ ;;
+ x86_64*)
+ export CFLAGS_x86=${CFLAGS_x86--m32}
+ export CHOST_x86=${CTARGET/x86_64/i686}
+ CHOST_x86=${CHOST_x86/%-gnux32/-gnu}
+ export CTARGET_x86=${CHOST_x86}
+ if [[ ${SYMLINK_LIB} == "yes" ]] ; then
+ export LIBDIR_x86="lib32"
+ else
+ export LIBDIR_x86="lib"
+ fi
+
+ export CFLAGS_amd64=${CFLAGS_amd64--m64}
+ export CHOST_amd64=${CTARGET/%-gnux32/-gnu}
+ export CTARGET_amd64=${CHOST_amd64}
+ export LIBDIR_amd64="lib64"
+
+ export CFLAGS_x32=${CFLAGS_x32--mx32}
+ export CHOST_x32=${CTARGET/%-gnu/-gnux32}
+ export CTARGET_x32=${CHOST_x32}
+ export LIBDIR_x32="libx32"
+
+ case ${CTARGET} in
+ *-gnux32)
+ : ${MULTILIB_ABIS=x32 amd64 x86}
+ : ${DEFAULT_ABI=x32}
+ ;;
+ *)
+ : ${MULTILIB_ABIS=amd64 x86}
+ : ${DEFAULT_ABI=amd64}
+ ;;
+ esac
+ ;;
+ mips64*)
+ export CFLAGS_o32=${CFLAGS_o32--mabi=32}
+ export CHOST_o32=${CTARGET/mips64/mips}
+ export CTARGET_o32=${CHOST_o32}
+ export LIBDIR_o32="lib"
+
+ export CFLAGS_n32=${CFLAGS_n32--mabi=n32}
+ export CHOST_n32=${CTARGET}
+ export CTARGET_n32=${CHOST_n32}
+ export LIBDIR_n32="lib32"
+
+ export CFLAGS_n64=${CFLAGS_n64--mabi=64}
+ export CHOST_n64=${CTARGET}
+ export CTARGET_n64=${CHOST_n64}
+ export LIBDIR_n64="lib64"
+
+ : ${MULTILIB_ABIS=n64 n32 o32}
+ : ${DEFAULT_ABI=n32}
+ ;;
+ powerpc64*)
+ export CFLAGS_ppc=${CFLAGS_ppc--m32}
+ export CHOST_ppc=${CTARGET/powerpc64/powerpc}
+ export CTARGET_ppc=${CHOST_ppc}
+ export LIBDIR_ppc="lib"
+
+ export CFLAGS_ppc64=${CFLAGS_ppc64--m64}
+ export CHOST_ppc64=${CTARGET}
+ export CTARGET_ppc64=${CHOST_ppc64}
+ export LIBDIR_ppc64="lib64"
+
+ : ${MULTILIB_ABIS=ppc64 ppc}
+ : ${DEFAULT_ABI=ppc64}
+ ;;
+ s390x*)
+ export CFLAGS_s390=${CFLAGS_s390--m31} # the 31 is not a typo
+ export CHOST_s390=${CTARGET/s390x/s390}
+ export CTARGET_s390=${CHOST_s390}
+ export LIBDIR_s390="lib"
+
+ export CFLAGS_s390x=${CFLAGS_s390x--m64}
+ export CHOST_s390x=${CTARGET}
+ export CTARGET_s390x=${CHOST_s390x}
+ export LIBDIR_s390x="lib64"
+
+ : ${MULTILIB_ABIS=s390x s390}
+ : ${DEFAULT_ABI=s390x}
+ ;;
+ sparc64*)
+ export CFLAGS_sparc32=${CFLAGS_sparc32--m32}
+ export CHOST_sparc32=${CTARGET/sparc64/sparc}
+ export CTARGET_sparc32=${CHOST_sparc32}
+ export LIBDIR_sparc32="lib"
+
+ export CFLAGS_sparc64=${CFLAGS_sparc64--m64}
+ export CHOST_sparc64=${CTARGET}
+ export CTARGET_sparc64=${CHOST_sparc64}
+ export LIBDIR_sparc64="lib64"
+
+ : ${MULTILIB_ABIS=sparc64 sparc32}
+ : ${DEFAULT_ABI=sparc64}
+ ;;
+ *)
+ : ${MULTILIB_ABIS=default}
+ : ${DEFAULT_ABI=default}
+ ;;
+ esac
+
+ export MULTILIB_ABIS DEFAULT_ABI
+}
+
+# @FUNCTION: multilib_toolchain_setup
+# @DESCRIPTION:
+# Hide multilib details here for packages which are forced to be compiled for a
+# specific ABI when run on another ABI (like x86-specific packages on amd64)
+multilib_toolchain_setup() {
+ local v vv
+
+ export ABI=$1
+
+ # First restore any saved state we have laying around.
+ if [[ ${_DEFAULT_ABI_SAVED} == "true" ]] ; then
+ for v in CHOST CBUILD AS CC CXX F77 FC LD PKG_CONFIG_{LIBDIR,PATH} ; do
+ vv="_abi_saved_${v}"
+ [[ ${!vv+set} == "set" ]] && export ${v}="${!vv}" || unset ${v}
+ unset ${vv}
+ done
+ unset _DEFAULT_ABI_SAVED
+ fi
+
+ # We want to avoid the behind-the-back magic of gcc-config as it
+ # screws up ccache and distcc. See #196243 for more info.
+ if [[ ${ABI} != ${DEFAULT_ABI} ]] ; then
+ # Back that multilib-ass up so we can restore it later
+ for v in CHOST CBUILD AS CC CXX F77 FC LD PKG_CONFIG_{LIBDIR,PATH} ; do
+ vv="_abi_saved_${v}"
+ [[ ${!v+set} == "set" ]] && export ${vv}="${!v}" || unset ${vv}
+ done
+ export _DEFAULT_ABI_SAVED="true"
+
+ # Set the CHOST native first so that we pick up the native
+ # toolchain and not a cross-compiler by accident #202811.
+ export CHOST=$(get_abi_CHOST ${DEFAULT_ABI})
+ export CC="$(tc-getCC) $(get_abi_CFLAGS)"
+ export CXX="$(tc-getCXX) $(get_abi_CFLAGS)"
+ export F77="$(tc-getF77) $(get_abi_CFLAGS)"
+ export FC="$(tc-getFC) $(get_abi_CFLAGS)"
+ export LD="$(tc-getLD) $(get_abi_LDFLAGS)"
+ export CHOST=$(get_abi_CHOST $1)
+ export CBUILD=$(get_abi_CHOST $1)
+ export PKG_CONFIG_LIBDIR=${EPREFIX}/usr/$(get_libdir)/pkgconfig
+ export PKG_CONFIG_PATH=${EPREFIX}/usr/share/pkgconfig
+ fi
+}
+
+fi
diff --git a/eclass/multiprocessing.eclass b/eclass/multiprocessing.eclass
new file mode 100644
index 000000000000..534b35c56b96
--- /dev/null
+++ b/eclass/multiprocessing.eclass
@@ -0,0 +1,273 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: multiprocessing.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @AUTHOR:
+# Brian Harring <ferringb@gentoo.org>
+# Mike Frysinger <vapier@gentoo.org>
+# @BLURB: parallelization with bash (wtf?)
+# @DESCRIPTION:
+# The multiprocessing eclass contains a suite of functions that allow ebuilds
+# to quickly run things in parallel using shell code.
+#
+# It has two modes: pre-fork and post-fork. If you don't want to dive into any
+# more nuts & bolts, just use the pre-fork mode. For main threads that mostly
+# spawn children and then wait for them to finish, use the pre-fork mode. For
+# main threads that do a bit of processing themselves, use the post-fork mode.
+# You may mix & match them for longer computation loops.
+# @EXAMPLE:
+#
+# @CODE
+# # First initialize things:
+# multijob_init
+#
+# # Then hash a bunch of files in parallel:
+# for n in {0..20} ; do
+# multijob_child_init md5sum data.${n} > data.${n}
+# done
+#
+# # Then wait for all the children to finish:
+# multijob_finish
+# @CODE
+
+if [[ -z ${_MULTIPROCESSING_ECLASS} ]]; then
+_MULTIPROCESSING_ECLASS=1
+
+# @FUNCTION: bashpid
+# @DESCRIPTION:
+# Return the process id of the current sub shell. This is to support bash
+# versions older than 4.0 that lack $BASHPID support natively. Simply do:
+# echo ${BASHPID:-$(bashpid)}
+#
+# Note: Using this func in any other way than the one above is not supported.
+bashpid() {
+ # Running bashpid plainly will return incorrect results. This func must
+ # be run in a subshell of the current subshell to get the right pid.
+ # i.e. This will show the wrong value:
+ # bashpid
+ # But this will show the right value:
+ # (bashpid)
+ sh -c 'echo ${PPID}'
+}
+
+# @FUNCTION: makeopts_jobs
+# @USAGE: [${MAKEOPTS}]
+# @DESCRIPTION:
+# Searches the arguments (defaults to ${MAKEOPTS}) and extracts the jobs number
+# specified therein. Useful for running non-make tools in parallel too.
+# i.e. if the user has MAKEOPTS=-j9, this will echo "9" -- we can't return the
+# number as bash normalizes it to [0, 255]. If the flags haven't specified a
+# -j flag, then "1" is shown as that is the default `make` uses. Since there's
+# no way to represent infinity, we return 999 if the user has -j without a number.
+makeopts_jobs() {
+ [[ $# -eq 0 ]] && set -- ${MAKEOPTS}
+ # This assumes the first .* will be more greedy than the second .*
+ # since POSIX doesn't specify a non-greedy match (i.e. ".*?").
+ local jobs=$(echo " $* " | sed -r -n \
+ -e 's:.*[[:space:]](-j|--jobs[=[:space:]])[[:space:]]*([0-9]+).*:\2:p' \
+ -e 's:.*[[:space:]](-j|--jobs)[[:space:]].*:999:p')
+ echo ${jobs:-1}
+}
+
+# @FUNCTION: makeopts_loadavg
+# @USAGE: [${MAKEOPTS}]
+# @DESCRIPTION:
+# Searches the arguments (defaults to ${MAKEOPTS}) and extracts the value set
+# for load-average. For make and ninja based builds this will mean new jobs are
+# not only limited by the jobs-value, but also by the current load - which might
+# get excessive due to I/O and not just due to CPU load.
+# Be aware that the returned number might be a floating-point number. Test
+# whether your software supports that.
+makeopts_loadavg() {
+ [[ $# -eq 0 ]] && set -- ${MAKEOPTS}
+ # This assumes the first .* will be more greedy than the second .*
+ # since POSIX doesn't specify a non-greedy match (i.e. ".*?").
+ local lavg=$(echo " $* " | sed -r -n \
+ -e 's:.*[[:space:]](-l|--load-average[=[:space:]])[[:space:]]*([0-9]+|[0-9]+\.[0-9]+)[^0-9.]*:\2:p' \
+ -e 's:.*[[:space:]](-l|--load-average)[[:space:]].*:999:p')
+ echo ${lavg:-1}
+}
+
+# @FUNCTION: multijob_init
+# @USAGE: [${MAKEOPTS}]
+# @DESCRIPTION:
+# Setup the environment for executing code in parallel.
+# You must call this before any other multijob function.
+multijob_init() {
+ # When something goes wrong, try to wait for all the children so we
+ # don't leave any zombies around.
+ has wait ${EBUILD_DEATH_HOOKS} || EBUILD_DEATH_HOOKS+=" wait "
+
+ # Setup a pipe for children to write their pids to when they finish.
+ # We have to allocate two fd's because POSIX has undefined behavior
+ # when you open a FIFO for simultaneous read/write. #487056
+ local pipe="${T}/multijob.pipe"
+ mkfifo -m 600 "${pipe}"
+ redirect_alloc_fd mj_write_fd "${pipe}"
+ redirect_alloc_fd mj_read_fd "${pipe}"
+ rm -f "${pipe}"
+
+ # See how many children we can fork based on the user's settings.
+ mj_max_jobs=$(makeopts_jobs "$@")
+ mj_num_jobs=0
+}
+
+# @FUNCTION: multijob_child_init
+# @USAGE: [--pre|--post] [command to run in background]
+# @DESCRIPTION:
+# This function has two forms. You can use it to execute a simple command
+# in the background (and it takes care of everything else), or you must
+# call this first thing in your forked child process.
+#
+# The --pre/--post options allow you to select the child generation mode.
+#
+# @CODE
+# # 1st form: pass the command line as arguments:
+# multijob_child_init ls /dev
+# # Or if you want to use pre/post fork modes:
+# multijob_child_init --pre ls /dev
+# multijob_child_init --post ls /dev
+#
+# # 2nd form: execute multiple stuff in the background (post fork):
+# (
+# multijob_child_init
+# out=`ls`
+# if echo "${out}" | grep foo ; then
+# echo "YEAH"
+# fi
+# ) &
+# multijob_post_fork
+#
+# # 2nd form: execute multiple stuff in the background (pre fork):
+# multijob_pre_fork
+# (
+# multijob_child_init
+# out=`ls`
+# if echo "${out}" | grep foo ; then
+# echo "YEAH"
+# fi
+# ) &
+# @CODE
+multijob_child_init() {
+ local mode="pre"
+ case $1 in
+ --pre) mode="pre" ; shift ;;
+ --post) mode="post"; shift ;;
+ esac
+
+ if [[ $# -eq 0 ]] ; then
+ trap 'echo ${BASHPID:-$(bashpid)} $? >&'${mj_write_fd} EXIT
+ trap 'exit 1' INT TERM
+ else
+ local ret
+ [[ ${mode} == "pre" ]] && { multijob_pre_fork; ret=$?; }
+ ( multijob_child_init ; "$@" ) &
+ [[ ${mode} == "post" ]] && { multijob_post_fork; ret=$?; }
+ return ${ret}
+ fi
+}
+
+# @FUNCTION: _multijob_fork
+# @INTERNAL
+# @DESCRIPTION:
+# Do the actual book keeping.
+_multijob_fork() {
+ [[ $# -eq 1 ]] || die "incorrect number of arguments"
+
+ local ret=0
+ [[ $1 == "post" ]] && : $(( ++mj_num_jobs ))
+ if [[ ${mj_num_jobs} -ge ${mj_max_jobs} ]] ; then
+ multijob_finish_one
+ ret=$?
+ fi
+ [[ $1 == "pre" ]] && : $(( ++mj_num_jobs ))
+ return ${ret}
+}
+
+# @FUNCTION: multijob_pre_fork
+# @DESCRIPTION:
+# You must call this in the parent process before forking a child process.
+# If the parallel limit has been hit, it will wait for one child to finish
+# and return its exit status.
+multijob_pre_fork() { _multijob_fork pre "$@" ; }
+
+# @FUNCTION: multijob_post_fork
+# @DESCRIPTION:
+# You must call this in the parent process after forking a child process.
+# If the parallel limit has been hit, it will wait for one child to finish
+# and return its exit status.
+multijob_post_fork() { _multijob_fork post "$@" ; }
+
+# @FUNCTION: multijob_finish_one
+# @DESCRIPTION:
+# Wait for a single process to exit and return its exit code.
+multijob_finish_one() {
+ [[ $# -eq 0 ]] || die "${FUNCNAME} takes no arguments"
+
+ local pid ret
+ read -r -u ${mj_read_fd} pid ret || die
+ : $(( --mj_num_jobs ))
+ return ${ret}
+}
+
+# @FUNCTION: multijob_finish
+# @DESCRIPTION:
+# Wait for all pending processes to exit and return the bitwise or
+# of all their exit codes.
+multijob_finish() {
+ local ret=0
+ while [[ ${mj_num_jobs} -gt 0 ]] ; do
+ multijob_finish_one
+ : $(( ret |= $? ))
+ done
+ # Let bash clean up its internal child tracking state.
+ wait
+
+ # Do this after reaping all the children.
+ [[ $# -eq 0 ]] || die "${FUNCNAME} takes no arguments"
+
+ # No need to hook anymore.
+ EBUILD_DEATH_HOOKS=${EBUILD_DEATH_HOOKS/ wait / }
+
+ return ${ret}
+}
+
+# @FUNCTION: redirect_alloc_fd
+# @USAGE: <var> <file> [redirection]
+# @DESCRIPTION:
+# Find a free fd and redirect the specified file via it. Store the new
+# fd in the specified variable. Useful for the cases where we don't care
+# about the exact fd #.
+redirect_alloc_fd() {
+ local var=$1 file=$2 redir=${3:-"<>"}
+
+ # Make sure /dev/fd is sane on Linux hosts. #479656
+ if [[ ! -L /dev/fd && ${CBUILD} == *linux* ]] ; then
+ eerror "You're missing a /dev/fd symlink to /proc/self/fd."
+ eerror "Please fix the symlink and check your boot scripts (udev/etc...)."
+ die "/dev/fd is broken"
+ fi
+
+ if [[ $(( (BASH_VERSINFO[0] << 8) + BASH_VERSINFO[1] )) -ge $(( (4 << 8) + 1 )) ]] ; then
+ # Newer bash provides this functionality.
+ eval "exec {${var}}${redir}'${file}'"
+ else
+ # Need to provide the functionality ourselves.
+ local fd=10
+ while :; do
+ # Make sure the fd isn't open. It could be a char device,
+ # or a symlink (possibly broken) to something else.
+ if [[ ! -e /dev/fd/${fd} ]] && [[ ! -L /dev/fd/${fd} ]] ; then
+ eval "exec ${fd}${redir}'${file}'" && break
+ fi
+ [[ ${fd} -gt 1024 ]] && die 'could not locate a free temp fd !?'
+ : $(( ++fd ))
+ done
+ : $(( ${var} = fd ))
+ fi
+}
+
+fi
diff --git a/eclass/myspell-r2.eclass b/eclass/myspell-r2.eclass
new file mode 100644
index 000000000000..ebca9c4028ec
--- /dev/null
+++ b/eclass/myspell-r2.eclass
@@ -0,0 +1,118 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: aspell-dict.eclass
+# @MAINTAINER:
+# maintainer-needed@gentoo.org
+# @AUTHOR:
+# Tomáš Chvátal <scarabeus@gentoo.org>
+# @BLURB: An eclass to ease the construction of ebuilds for myspell dicts
+# @DESCRIPTION:
+
+EXPORT_FUNCTIONS src_unpack src_install
+
+# @ECLASS-VARIABLE: MYSPELL_DICT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array variable containing list of all dictionary files.
+# MYSPELL_DICT=( "file.dic" "dir/file2.aff" )
+
+# @ECLASS-VARIABLE: MYSPELL_HYPH
+# @DESCRIPTION:
+# Array variable containing list of all hyphenation files.
+# MYSPELL_HYPH=( "file.dic" "dir/file2.dic" )
+
+# @ECLASS-VARIABLE: MYSPELL_THES
+# @DESCRIPTION:
+# Array variable containing list of all thesarus files.
+# MYSPELL_HYPH=( "file.dat" "dir/file2.idx" )
+
+# Basically no extra deps needed.
+# Unzip is required for .oxt libreoffice extensions
+# which are just fancy zip files.
+DEPEND="app-arch/unzip"
+RDEPEND=""
+
+# by default this stuff does not have any folder in the pack
+S="${WORKDIR}"
+
+# @FUNCTION: myspell-r2_src_unpack
+# @DESCRIPTION:
+# Unpack all variants of weird stuff.
+# In our case .oxt packs.
+myspell-r2_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local f
+ for f in ${A}; do
+ case ${f} in
+ *.oxt)
+ echo ">>> Unpacking "${DISTDIR}/${f}" to ${PWD}"
+ unzip -qoj ${DISTDIR}/${f}
+ assert "failed unpacking ${DISTDIR}/${f}"
+ ;;
+ *) unpack ${f} ;;
+ esac
+ done
+}
+
+# @FUNCTION: myspell-r2_src_install
+# @DESCRIPTION:
+# Install the dictionaries to the right places.
+myspell-r2_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local x target
+
+ # Following the debian directory layout here.
+ # DICT: /usr/share/hunspell
+ # THES: /usr/share/mythes
+ # HYPH: /usr/share/hyphen
+ # We just need to copy the required files to proper places.
+
+ # TODO: backcompat dosym remove when all dictionaries and libreoffice
+ # ebuilds in tree use only the new paths
+
+ # Very old installs have hunspell to be symlink to myspell.
+ # This results in fcked up install/symlink stuff.
+ if [[ -L "${EPREFIX}/usr/share/hunspell" ]] ; then
+ eerror "\"${EPREFIX}/usr/share/hunspell\" is a symlink."
+ eerror "Please remove it so it is created properly as folder"
+ die "\"${EPREFIX}/usr/share/hunspell\" is a symlink."
+ fi
+
+ insinto /usr/share/hunspell
+ for x in "${MYSPELL_DICT[@]}"; do
+ target="${x##*/}"
+ newins "${x}" "${target}" || die
+ dosym /usr/share/hunspell/"${target}" /usr/share/myspell/"${target}" || die
+ done
+
+ insinto /usr/share/mythes
+ for x in "${MYSPELL_THES[@]}"; do
+ target="${x##*/}"
+ newins "${x}" "${target}" || die
+ dosym /usr/share/mythes/"${target}" /usr/share/myspell/"${target}" || die
+ done
+
+ insinto /usr/share/hyphen
+ for x in "${MYSPELL_HYPH[@]}"; do
+ target="${x##*/}"
+ newins "${x}" "${target}" || die
+ dosym /usr/share/hyphen/"${target}" /usr/share/myspell/"${target}" || die
+ done
+
+ # Remove licenses as they suffix them with .txt too
+ rm -rf COPYING*
+ rm -rf LICENSE*
+ rm -rf LICENCE*
+ rm -rf license*
+ rm -rf licence*
+ # Readme and so on
+ for x in *.txt README*; do
+ if [[ -f ${x} ]]; then
+ dodoc ${x} || die
+ fi
+ done
+}
diff --git a/eclass/myspell.eclass b/eclass/myspell.eclass
new file mode 100644
index 000000000000..c692f37afc35
--- /dev/null
+++ b/eclass/myspell.eclass
@@ -0,0 +1,257 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Author: Kevin F. Quinn <kevquinn@gentoo.org>
+# Packages: app-dicts/myspell-*
+# Maintainer: maintainer-needed@gentoo.org
+
+inherit multilib
+
+EXPORT_FUNCTIONS src_install pkg_preinst pkg_postinst
+
+IUSE=""
+
+SLOT="0"
+
+# tar, gzip, bzip2 are included in the base profile, but not unzip
+DEPEND="app-arch/unzip"
+
+# Dictionaries don't have any runtime dependencies
+# Myspell dictionaries can be used by hunspell, openoffice and others
+RDEPEND=""
+
+# The destination directory for myspell dictionaries
+MYSPELL_DICTBASE="/usr/share/myspell"
+
+# Legacy variable for dictionaries installed before eselect-oodict existed
+# so has to remain for binpkg support. This variable is unmaintained -
+# if you have a problem with it, emerge app-eselect/eselect-oodict.
+# The location for openoffice softlinks
+MYSPELL_OOOBASE="/usr/lib/openoffice/share/dict/ooo"
+
+
+# set array "fields" to the elements of $1, separated by $2.
+# This saves having to muck about with IFS all over the place.
+set_fields() {
+ local old_IFS
+ old_IFS="${IFS}"
+ IFS=$2
+ fields=($1)
+ IFS="${old_IFS}"
+}
+
+# language is the second element of the ebuild name
+# myspell-<lang>-<version>
+get_myspell_lang() {
+ local fields
+ set_fields "${P}" "-"
+ echo ${fields[1]}
+}
+
+get_myspell_suffixes() {
+ case $1 in
+ DICT) echo ".aff .dic" ;;
+ HYPH) echo ".dic" ;;
+ THES) echo ".dat .idx" ;;
+ esac
+}
+
+# OOo dictionary files are held on the mirrors, rather than
+# being fetched direct from the OOo site as upstream doesn't
+# change the name when they rebuild the dictionaries.
+# <lang>-<country>.zip becomes myspell-<lang>-<country>-version.zip
+get_myspell_ooo_uri() {
+ local files fields newfile filestem srcfile dict uris
+ files=()
+ uris=""
+ for dict in \
+ "${MYSPELL_SPELLING_DICTIONARIES[@]}" \
+ "${MYSPELL_HYPHENATION_DICTIONARIES[@]}" \
+ "${MYSPELL_THESAURUS_DICTIONARIES[@]}"; do
+ set_fields "${dict}" ","
+ newfile=${fields[4]// }
+ for file in "${files[@]}"; do
+ [[ ${file} == ${newfile} ]] && continue 2
+ done
+ filestem=${newfile/.zip}
+ files=("${files[@]}" "${newfile}")
+ srcfile="myspell-${filestem}-${PV}.zip"
+ [[ -z ${uris} ]] &&
+ uris="mirror://gentoo/${srcfile}" ||
+ uris="${uris} mirror://gentoo/${srcfile}"
+ done
+ echo "${uris}"
+}
+
+
+[[ -z ${SRC_URI} ]] && SRC_URI=$(get_myspell_ooo_uri)
+
+# Format of dictionary.lst files (from OOo standard
+# dictionary.lst file):
+#
+# List of All Dictionaries to be Loaded by OpenOffice
+# ---------------------------------------------------
+# Each Entry in the list have the following space delimited fields
+#
+# Field 0: Entry Type "DICT" - spellchecking dictionary
+# "HYPH" - hyphenation dictionary
+# "THES" - thesaurus files
+#
+# Field 1: Language code from Locale "en" or "de" or "pt" ...
+#
+# Field 2: Country Code from Locale "US" or "GB" or "PT"
+#
+# Field 3: Root name of file(s) "en_US" or "hyph_de" or "th_en_US"
+# (do not add extensions to the name)
+
+# Format of MYSPELL_[SPELLING|HYPHENATION|THESAURUS]_DICTIONARIES:
+#
+# Field 0: Language code
+# Field 1: Country code
+# Field 2: Root name of dictionary files
+# Field 3: Description
+# Field 4: Archive filename
+#
+# This format is from the available.lst, hyphavail.lst and
+# thesavail.lst files on the openoffice.org repository.
+
+myspell_src_install() {
+ local filen fields entry dictlst
+ cd "${WORKDIR}"
+ # Install the dictionary, hyphenation and thesaurus files.
+ # Create dictionary.lst.<lang> file containing the parts of
+ # OOo's dictionary.lst file for this language, indicating
+ # which dictionaries are relevant for each country variant
+ # of the language.
+ insinto ${MYSPELL_DICTBASE}
+ dictlst="dictionary.lst.$(get_myspell_lang)"
+ echo "# Autogenerated by ${CATEGORY}/${P}" > ${dictlst}
+ for entry in "${MYSPELL_SPELLING_DICTIONARIES[@]}"; do
+ set_fields "${entry}" ","
+ echo "DICT ${fields[0]} ${fields[1]} ${fields[2]}" >> ${dictlst}
+ doins ${fields[2]}.aff || die "Missing ${fields[2]}.aff"
+ doins ${fields[2]}.dic || die "Missing ${fields[2]}.dic"
+ done
+ for entry in "${MYSPELL_HYPHENATION_DICTIONARIES[@]}"; do
+ set_fields "${entry}" ","
+ echo "HYPH ${fields[0]} ${fields[1]} ${fields[2]}" >> ${dictlst}
+ doins ${fields[2]}.dic || die "Missing ${fields[2]}.dic"
+ done
+ for entry in "${MYSPELL_THESAURUS_DICTIONARIES[@]}"; do
+ set_fields "${entry}" ","
+ echo "THES ${fields[0]} ${fields[1]} ${fields[2]}" >> ${dictlst}
+ doins ${fields[2]}.dat || die "Missing ${fields[2]}.dat"
+ doins ${fields[2]}.idx || die "Missing ${fields[2]}.idx"
+ done
+ doins ${dictlst} || die "Failed to install ${dictlst}"
+ # Install any txt files (usually README.txt) as documentation
+ for filen in *.txt; do
+ [[ -s ${filen} ]] && dodoc ${filen}
+ done
+}
+
+
+# Add entries in dictionary.lst.<lang> to OOo dictionary.lst
+# and create softlinks indicated by dictionary.lst.<lang>
+myspell_pkg_postinst() {
+ # Update for known applications
+ if has_version ">=app-eselect/eselect-oodict-20060706"; then
+ if has_version app-office/openoffice; then
+ eselect oodict set myspell-$(get_myspell_lang)
+ fi
+ if has_version app-office/openoffice-bin; then
+ # On AMD64, openoffice-bin is 32-bit so force ABI
+ has_multilib_profile && ABI=x86
+ eselect oodict set myspell-$(get_myspell_lang) --libdir $(get_libdir)
+ fi
+ return
+ fi
+ if has_version app-eselect/eselect-oodict; then
+ eselect oodict set myspell-$(get_myspell_lang)
+ return
+ fi
+
+ # Legacy code for dictionaries installed before eselect-oodict existed
+ # so has to remain for binpkg support. This code is unmaintained -
+ # if you have a problem with it, emerge app-eselect/eselect-oodict.
+ [[ -d ${MYSPELL_OOOBASE} ]] || return
+ # This stuff is here, not in src_install, as the softlinks are
+ # deliberately _not_ listed in the package database.
+ local dictlst entry fields prefix suffix suffixes filen
+ # Note; can only reach this point if ${MYSPELL_DICTBASE}/${dictlst}
+ # was successfully installed
+ dictlst="dictionary.lst.$(get_myspell_lang)"
+ while read entry; do
+ fields=(${entry})
+ [[ ${fields[0]:0:1} == "#" ]] && continue
+ [[ -f ${MYSPELL_OOOBASE}/dictionary.lst ]] || \
+ touch ${MYSPELL_OOOBASE}/dictionary.lst
+ grep "^${fields[0]} ${fields[1]} ${fields[2]} " \
+ ${MYSPELL_OOOBASE}/dictionary.lst > /dev/null 2>&1 ||
+ echo "${entry}" >> ${MYSPELL_OOOBASE}/dictionary.lst
+ for suffix in $(get_myspell_suffixes ${fields[0]}); do
+ filen="${fields[3]}${suffix}"
+ [[ -h ${MYSPELL_OOOBASE}/${filen} ]] &&
+ rm -f ${MYSPELL_OOOBASE}/${filen}
+ [[ ! -f ${MYSPELL_OOOBASE}/${filen} ]] &&
+ ln -s ${MYSPELL_DICTBASE}/${filen} \
+ ${MYSPELL_OOOBASE}/${filen}
+ done
+ done < ${MYSPELL_DICTBASE}/${dictlst}
+}
+
+
+# Remove softlinks and entries in dictionary.lst - uses
+# dictionary.<lang>.lst from /usr/share/myspell
+# Done in preinst (prerm happens after postinst, which overwrites
+# the dictionary.<lang>.lst file)
+myspell_pkg_preinst() {
+ # Update for known applications
+ if has_version ">=app-eselect/eselect-oodict-20060706"; then
+ if has_version app-office/openoffice; then
+ # When building from source, the default library path is correct
+ eselect oodict unset myspell-$(get_myspell_lang)
+ fi
+ if has_version app-office/openoffice-bin; then
+ # On AMD64, openoffice-bin is 32-bit, so get 32-bit library directory
+ has_multilib_profile && ABI=x86
+ eselect oodict unset myspell-$(get_myspell_lang) --libdir $(get_libdir)
+ fi
+ eselect oodict unset myspell-$(get_myspell_lang) --libdir $(get_libdir)
+ return
+ fi
+ # Previous versions of eselect-oodict didn't cater for -bin on amd64
+ if has_version app-eselect/eselect-oodict; then
+ eselect oodict unset myspell-$(get_myspell_lang)
+ return
+ fi
+
+ # Legacy code for dictionaries installed before eselect-oodict existed
+ # Don't delete this; needed for uninstalls and binpkg support.
+ # This code is unmaintained - if you have a problem with it,
+ # emerge app-eselect/eselect-oodict.
+ local filen dictlst entry fields removeentry suffix
+ dictlst="dictionary.lst.$(get_myspell_lang)"
+ [[ -d ${MYSPELL_OOOBASE} ]] || return
+ [[ -f ${MYSPELL_DICTBASE}/${dictlst} ]] || return
+ while read entry; do
+ fields=(${entry})
+ [[ ${fields[0]:0:1} == "#" ]] && continue
+ [[ ${fields[3]} == "" ]] && continue
+ # Remove entry from dictionary.lst
+ sed -i -e "/^${fields[0]} ${fields[1]} ${fields[2]} ${fields[3]}$/ { d }" \
+ ${MYSPELL_OOOBASE}/dictionary.lst
+ # See if any other entries in dictionary.lst match the current
+ # dictionary type and filename
+ grep "^${fields[0]} .* ${fields[3]}$" ${MYSPELL_OOOBASE}/dictionary.lst \
+ 2>&1 > /dev/null && continue
+ # If no other entries match, remove relevant symlinks
+ for suffix in $(get_myspell_suffixes ${fields[0]}); do
+ filen="${fields[3]}${suffix}"
+ ewarn "Removing entry ${MYSPELL_OOOBASE}/${filen}"
+ [[ -h ${MYSPELL_OOOBASE}/${filen} ]] &&
+ rm -f ${MYSPELL_OOOBASE}/${filen}
+ done
+ done < ${MYSPELL_DICTBASE}/${dictlst}
+}
diff --git a/eclass/mysql-cmake.eclass b/eclass/mysql-cmake.eclass
new file mode 100644
index 000000000000..8d026b0b4c3c
--- /dev/null
+++ b/eclass/mysql-cmake.eclass
@@ -0,0 +1,534 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mysql-cmake.eclass
+# @MAINTAINER:
+# Maintainers:
+# - MySQL Team <mysql-bugs@gentoo.org>
+# - Robin H. Johnson <robbat2@gentoo.org>
+# - Jorge Manuel B. S. Vicetto <jmbsvicetto@gentoo.org>
+# - Brian Evans <grknight@gentoo.org>
+# @BLURB: This eclass provides the support for cmake based mysql releases
+# @DESCRIPTION:
+# The mysql-cmake.eclass provides the support to build the mysql
+# ebuilds using the cmake build system. This eclass provides
+# the src_prepare, src_configure, src_compile, and src_install
+# phase hooks.
+
+inherit cmake-utils flag-o-matic multilib prefix eutils
+
+#
+# HELPER FUNCTIONS:
+#
+
+# @FUNCTION: mysql_cmake_disable_test
+# @DESCRIPTION:
+# Helper function to disable specific tests.
+mysql-cmake_disable_test() {
+
+ local rawtestname testname testsuite reason mysql_disabled_file mysql_disabled_dir
+ rawtestname="${1}" ; shift
+ reason="${@}"
+ ewarn "test '${rawtestname}' disabled: '${reason}'"
+
+ testsuite="${rawtestname/.*}"
+ testname="${rawtestname/*.}"
+ for mysql_disabled_file in \
+ ${S}/mysql-test/disabled.def \
+ ${S}/mysql-test/t/disabled.def ; do
+ [[ -f ${mysql_disabled_file} ]] && break
+ done
+ #mysql_disabled_file="${S}/mysql-test/t/disabled.def"
+ #einfo "rawtestname=${rawtestname} testname=${testname} testsuite=${testsuite}"
+ echo ${testname} : ${reason} >> "${mysql_disabled_file}"
+
+ if [[ ( -n ${testsuite} ) && ( ${testsuite} != "main" ) ]]; then
+ for mysql_disabled_file in \
+ ${S}/mysql-test/suite/${testsuite}/disabled.def \
+ ${S}/mysql-test/suite/${testsuite}/t/disabled.def \
+ FAILED ; do
+ [[ -f ${mysql_disabled_file} ]] && break
+ done
+ if [[ ${mysql_disabled_file} != "FAILED" ]]; then
+ echo "${testname} : ${reason}" >> "${mysql_disabled_file}"
+ else
+ for mysql_disabled_dir in \
+ ${S}/mysql-test/suite/${testsuite} \
+ ${S}/mysql-test/suite/${testsuite}/t \
+ FAILED ; do
+ [[ -d ${mysql_disabled_dir} ]] && break
+ done
+ if [[ ${mysql_disabled_dir} != "FAILED" ]]; then
+ echo "${testname} : ${reason}" >> "${mysql_disabled_dir}/disabled.def"
+ else
+ ewarn "Could not find testsuite disabled.def location for ${rawtestname}"
+ fi
+ fi
+ fi
+}
+
+# @FUNCTION: mysql-cmake_use_plugin
+# @DESCRIPTION:
+# Helper function to enable/disable plugins by use flags
+# cmake-utils_use_with is not enough as some references check WITH_ (0|1)
+# and some check WITHOUT_. Also, this can easily extend to non-storage plugins.
+mysql-cmake_use_plugin() {
+ [[ -z $2 ]] && die "mysql-cmake_use_plugin <USE flag> <flag name>"
+ if use_if_iuse $1 ; then
+ echo "-DWITH_$2=1 -DPLUGIN_$2=YES"
+ else
+ echo "-DWITHOUT_$2=1 -DWITH_$2=0 -DPLUGIN_$2=NO"
+ fi
+}
+
+# @FUNCTION: configure_cmake_locale
+# @DESCRIPTION:
+# Helper function to configure locale cmake options
+configure_cmake_locale() {
+
+ if use_if_iuse minimal ; then
+ :
+ elif ! in_iuse server || use_if_iuse server ; then
+ if [[ ( -n ${MYSQL_DEFAULT_CHARSET} ) && ( -n ${MYSQL_DEFAULT_COLLATION} ) ]]; then
+ ewarn "You are using a custom charset of ${MYSQL_DEFAULT_CHARSET}"
+ ewarn "and a collation of ${MYSQL_DEFAULT_COLLATION}."
+ ewarn "You MUST file bugs without these variables set."
+
+ mycmakeargs+=(
+ -DDEFAULT_CHARSET=${MYSQL_DEFAULT_CHARSET}
+ -DDEFAULT_COLLATION=${MYSQL_DEFAULT_COLLATION}
+ )
+
+ elif ! use latin1 ; then
+ mycmakeargs+=(
+ -DDEFAULT_CHARSET=utf8
+ -DDEFAULT_COLLATION=utf8_general_ci
+ )
+ else
+ mycmakeargs+=(
+ -DDEFAULT_CHARSET=latin1
+ -DDEFAULT_COLLATION=latin1_swedish_ci
+ )
+ fi
+ fi
+}
+
+# @FUNCTION: configure_cmake_minimal
+# @DESCRIPTION:
+# Helper function to configure minimal build
+configure_cmake_minimal() {
+
+ mycmakeargs+=(
+ -DWITHOUT_SERVER=1
+ -DWITHOUT_EMBEDDED_SERVER=1
+ -DEXTRA_CHARSETS=none
+ -DINSTALL_SQLBENCHDIR=
+ -DWITHOUT_ARCHIVE_STORAGE_ENGINE=1
+ -DWITHOUT_BLACKHOLE_STORAGE_ENGINE=1
+ -DWITHOUT_CSV_STORAGE_ENGINE=1
+ -DWITHOUT_FEDERATED_STORAGE_ENGINE=1
+ -DWITHOUT_HEAP_STORAGE_ENGINE=1
+ -DWITHOUT_INNOBASE_STORAGE_ENGINE=1
+ -DWITHOUT_MYISAMMRG_STORAGE_ENGINE=1
+ -DWITHOUT_MYISAM_STORAGE_ENGINE=1
+ -DWITHOUT_PARTITION_STORAGE_ENGINE=1
+ -DPLUGIN_ARCHIVE=NO
+ -DPLUGIN_BLACKHOLE=NO
+ -DPLUGIN_CSV=NO
+ -DPLUGIN_FEDERATED=NO
+ -DPLUGIN_HEAP=NO
+ -DPLUGIN_INNOBASE=NO
+ -DPLUGIN_MYISAMMRG=NO
+ -DPLUGIN_MYISAM=NO
+ -DPLUGIN_PARTITION=NO
+ )
+}
+
+# @FUNCTION: configure_cmake_standard
+# @DESCRIPTION:
+# Helper function to configure standard build
+configure_cmake_standard() {
+
+ mycmakeargs+=(
+ -DEXTRA_CHARSETS=all
+ -DMYSQL_USER=mysql
+ -DMYSQL_UNIX_ADDR=${EPREFIX}/var/run/mysqld/mysqld.sock
+ $(cmake-utils_use_disable !static SHARED)
+ $(cmake-utils_use_with debug)
+ $(cmake-utils_use_with embedded EMBEDDED_SERVER)
+ $(cmake-utils_use_with profiling)
+ $(cmake-utils_use_enable systemtap DTRACE)
+ )
+
+ if use static; then
+ mycmakeargs+=( -DWITH_PIC=1 )
+ fi
+
+ if use jemalloc; then
+ mycmakeargs+=( -DWITH_SAFEMALLOC=OFF )
+ fi
+
+ if use tcmalloc; then
+ mycmakeargs+=( -DWITH_SAFEMALLOC=OFF )
+ fi
+
+ # Storage engines
+ mycmakeargs+=(
+ -DWITH_ARCHIVE_STORAGE_ENGINE=1
+ -DWITH_BLACKHOLE_STORAGE_ENGINE=1
+ -DWITH_CSV_STORAGE_ENGINE=1
+ -DWITH_HEAP_STORAGE_ENGINE=1
+ -DWITH_INNOBASE_STORAGE_ENGINE=1
+ -DWITH_MYISAMMRG_STORAGE_ENGINE=1
+ -DWITH_MYISAM_STORAGE_ENGINE=1
+ -DWITH_PARTITION_STORAGE_ENGINE=1
+ )
+
+ if in_iuse pbxt ; then
+ mycmakeargs+=( $(cmake-utils_use_with pbxt PBXT_STORAGE_ENGINE) )
+ fi
+
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+
+ # Federated{,X} must be treated special otherwise they will not be built as plugins
+ if ! use extraengine ; then
+ mycmakeargs+=(
+ -DWITHOUT_FEDERATED_STORAGE_ENGINE=1
+ -DPLUGIN_FEDERATED=0
+ -DWITHOUT_FEDERATEDX_STORAGE_ENGINE=1
+ -DPLUGIN_FEDERATEDX=0 )
+ fi
+
+ mycmakeargs+=(
+ $(mysql-cmake_use_plugin oqgraph OQGRAPH)
+ $(mysql-cmake_use_plugin sphinx SPHINX)
+ $(mysql-cmake_use_plugin tokudb TOKUDB)
+ $(mysql-cmake_use_plugin pam AUTH_PAM)
+ )
+
+ if mysql_version_is_at_least 10.0.5 ; then
+ # CassandraSE needs Apache Thrift which is not in portage
+ mycmakeargs+=(
+ -DWITHOUT_CASSANDRA=1 -DWITH_CASSANDRA=0
+ -DPLUGIN_CASSANDRA=NO
+ $(mysql-cmake_use_plugin extraengine SEQUENCE)
+ $(mysql-cmake_use_plugin extraengine SPIDER)
+ $(mysql-cmake_use_plugin extraengine CONNECT)
+ -DCONNECT_WITH_MYSQL=1
+ $(cmake-utils_use xml CONNECT_WITH_LIBXML2)
+ $(cmake-utils_use odbc CONNECT_WITH_ODBC)
+ )
+ fi
+
+ if in_iuse mroonga ; then
+ use mroonga || mycmakeargs+=( -DWITHOUT_MROONGA=1 )
+ else
+ mycmakeargs+=( -DWITHOUT_MROONGA=1 )
+ fi
+
+ if in_iuse galera ; then
+ mycmakeargs+=( $(cmake-utils_use_with galera WSREP) )
+ fi
+
+ if mysql_version_is_at_least "10.1.1" ; then
+ mycmakeargs+=( $(cmake-utils_use_with innodb-lz4 INNODB_LZ4)
+ $(cmake-utils_use_with innodb-lzo INNODB_LZO) )
+ fi
+
+ if mysql_version_is_at_least "10.1.2" ; then
+ mycmakeargs+=( $(mysql-cmake_use_plugin cracklib CRACKLIB_PASSWORD_CHECK ) )
+ fi
+ else
+ mycmakeargs+=( $(cmake-utils_use_with extraengine FEDERATED_STORAGE_ENGINE) )
+ fi
+
+ if [[ ${PN} == "percona-server" ]]; then
+ mycmakeargs+=(
+ $(cmake-utils_use_with pam)
+ )
+ fi
+
+ if [[ ${PN} == "mysql-cluster" ]]; then
+ # TODO: This really should include the following options,
+ # but the memcached package doesn't install the files it seeks.
+ # -DWITH_BUNDLED_MEMCACHED=OFF
+ # -DMEMCACHED_HOME=${EPREFIX}/usr
+ mycmakeargs+=(
+ -DWITH_BUNDLED_LIBEVENT=OFF
+ $(cmake-utils_use_with java NDB_JAVA)
+ )
+ fi
+}
+
+#
+# EBUILD FUNCTIONS
+#
+
+# @FUNCTION: mysql-cmake_src_prepare
+# @DESCRIPTION:
+# Apply patches to the source code and remove unneeded bundled libs.
+mysql-cmake_src_prepare() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ cd "${S}"
+
+ if [[ ${MY_EXTRAS_VER} != none ]]; then
+
+ # Apply the patches for this MySQL version
+ EPATCH_SUFFIX="patch"
+ mkdir -p "${EPATCH_SOURCE}" || die "Unable to create epatch directory"
+ # Clean out old items
+ rm -f "${EPATCH_SOURCE}"/*
+ # Now link in right patches
+ mysql_mv_patches
+ # And apply
+ epatch
+ fi
+
+ # last -fPIC fixup, per bug #305873
+ i="${S}"/storage/innodb_plugin/plug.in
+ [[ -f ${i} ]] && sed -i -e '/CFLAGS/s,-prefer-non-pic,,g' "${i}"
+
+ rm -f "scripts/mysqlbug"
+ if use jemalloc && ! ( [[ ${PN} == "mariadb" ]] && mysql_version_is_at_least "5.5.33" ); then
+ echo "TARGET_LINK_LIBRARIES(mysqld jemalloc)" >> "${S}/sql/CMakeLists.txt" || die
+ fi
+
+ if use tcmalloc; then
+ echo "TARGET_LINK_LIBRARIES(mysqld tcmalloc)" >> "${S}/sql/CMakeLists.txt"
+ fi
+
+ if in_iuse tokudb ; then
+ # Don't build bundled xz-utils
+ rm -f "${S}/storage/tokudb/ft-index/cmake_modules/TokuThirdParty.cmake"
+ touch "${S}/storage/tokudb/ft-index/cmake_modules/TokuThirdParty.cmake"
+ sed -i 's/ build_lzma//' "${S}/storage/tokudb/ft-index/ft/CMakeLists.txt" || die
+ fi
+
+ # Remove the bundled groonga if it exists
+ # There is no CMake flag, it simply checks for existance
+ if [[ -d "${S}"/storage/mroonga/vendor/groonga ]] ; then
+ rm -r "${S}"/storage/mroonga/vendor/groonga || die "could not remove packaged groonga"
+ fi
+
+ epatch_user
+}
+
+# @FUNCTION: mysql-cmake_src_configure
+# @DESCRIPTION:
+# Configure mysql to build the code for Gentoo respecting the use flags.
+mysql-cmake_src_configure() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ CMAKE_BUILD_TYPE="RelWithDebInfo"
+
+ # debug hack wrt #497532
+ mycmakeargs=(
+ -DCMAKE_C_FLAGS_RELWITHDEBINFO="$(usex debug "" "-DNDEBUG")"
+ -DCMAKE_CXX_FLAGS_RELWITHDEBINFO="$(usex debug "" "-DNDEBUG")"
+ -DCMAKE_INSTALL_PREFIX=${EPREFIX}/usr
+ -DMYSQL_DATADIR=${EPREFIX}/var/lib/mysql
+ -DSYSCONFDIR=${EPREFIX}/etc/mysql
+ -DINSTALL_BINDIR=bin
+ -DINSTALL_DOCDIR=share/doc/${P}
+ -DINSTALL_DOCREADMEDIR=share/doc/${P}
+ -DINSTALL_INCLUDEDIR=include/mysql
+ -DINSTALL_INFODIR=share/info
+ -DINSTALL_LIBDIR=$(get_libdir)
+ -DINSTALL_ELIBDIR=$(get_libdir)/mysql
+ -DINSTALL_MANDIR=share/man
+ -DINSTALL_MYSQLDATADIR=${EPREFIX}/var/lib/mysql
+ -DINSTALL_MYSQLSHAREDIR=share/mysql
+ -DINSTALL_MYSQLTESTDIR=share/mysql/mysql-test
+ -DINSTALL_PLUGINDIR=$(get_libdir)/mysql/plugin
+ -DINSTALL_SBINDIR=sbin
+ -DINSTALL_SCRIPTDIR=share/mysql/scripts
+ -DINSTALL_SQLBENCHDIR=share/mysql
+ -DINSTALL_SUPPORTFILESDIR=${EPREFIX}/usr/share/mysql
+ -DWITH_COMMENT="Gentoo Linux ${PF}"
+ $(cmake-utils_use_with test UNIT_TESTS)
+ -DWITH_LIBEDIT=0
+ -DWITH_ZLIB=system
+ -DWITHOUT_LIBWRAP=1
+ -DENABLED_LOCAL_INFILE=1
+ $(cmake-utils_use_enable static-libs STATIC_LIBS)
+ -DWITH_SSL=$(usex ssl system bundled)
+ -DWITH_DEFAULT_COMPILER_OPTIONS=0
+ -DWITH_DEFAULT_FEATURE_SET=0
+ )
+
+ if in_iuse bindist ; then
+ mycmakeargs+=(
+ -DWITH_READLINE=$(usex bindist 1 0)
+ -DNOT_FOR_DISTRIBUTION=$(usex bindist 0 1)
+ $(usex bindist -DHAVE_BFD_H=0 '')
+ )
+ fi
+
+ mycmakeargs+=( -DWITH_EDITLINE=system )
+
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ mycmakeargs+=(
+ -DWITH_JEMALLOC=$(usex jemalloc system)
+ )
+ mysql_version_is_at_least "10.0.9" && mycmakeargs+=( -DWITH_PCRE=system )
+ fi
+
+ configure_cmake_locale
+
+ if use_if_iuse minimal ; then
+ configure_cmake_minimal
+ else
+ configure_cmake_standard
+ fi
+
+ # Bug #114895, bug #110149
+ filter-flags "-O" "-O[01]"
+
+ CXXFLAGS="${CXXFLAGS} -fno-strict-aliasing"
+ CXXFLAGS="${CXXFLAGS} -felide-constructors"
+ # Causes linkage failures. Upstream bug #59607 removes it
+ if ! mysql_version_is_at_least "5.6" ; then
+ CXXFLAGS="${CXXFLAGS} -fno-implicit-templates"
+ fi
+ # As of 5.7, exceptions and rtti are used!
+ if ! mysql_version_is_at_least "5.7" ; then
+ CXXFLAGS="${CXXFLAGS} -fno-exceptions -fno-rtti"
+ fi
+ export CXXFLAGS
+
+ # bug #283926, with GCC4.4, this is required to get correct behavior.
+ append-flags -fno-strict-aliasing
+
+ cmake-utils_src_configure
+}
+
+# @FUNCTION: mysql-cmake_src_compile
+# @DESCRIPTION:
+# Compile the mysql code.
+mysql-cmake_src_compile() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ cmake-utils_src_compile
+}
+
+# @FUNCTION: mysql-cmake_src_install
+# @DESCRIPTION:
+# Install mysql.
+mysql-cmake_src_install() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Make sure the vars are correctly initialized
+ mysql_init_vars
+
+ cmake-utils_src_install
+
+ if ! in_iuse tools || use_if_iuse tools ; then
+ # Convenience links
+ einfo "Making Convenience links for mysqlcheck multi-call binary"
+ dosym "/usr/bin/mysqlcheck" "/usr/bin/mysqlanalyze"
+ dosym "/usr/bin/mysqlcheck" "/usr/bin/mysqlrepair"
+ dosym "/usr/bin/mysqlcheck" "/usr/bin/mysqloptimize"
+ fi
+
+ # Create a mariadb_config symlink
+ [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && dosym "/usr/bin/mysql_config" "/usr/bin/mariadb_config"
+
+ # INSTALL_LAYOUT=STANDALONE causes cmake to create a /usr/data dir
+ rm -Rf "${ED}/usr/data"
+
+ # Various junk (my-*.cnf moved elsewhere)
+ einfo "Removing duplicate /usr/share/mysql files"
+
+ # Unless they explicitly specific USE=test, then do not install the
+ # testsuite. It DOES have a use to be installed, esp. when you want to do a
+ # validation of your database configuration after tuning it.
+ if ! use test ; then
+ rm -rf "${D}"/${MY_SHAREDSTATEDIR}/mysql-test
+ fi
+
+ # Configuration stuff
+ case ${MYSQL_PV_MAJOR} in
+ 5.[1-4]*) mysql_mycnf_version="5.1" ;;
+ 5.5) mysql_mycnf_version="5.5" ;;
+ 5.[6-9]|6*|7*|8*|9*|10*) mysql_mycnf_version="5.6" ;;
+ esac
+ einfo "Building default my.cnf (${mysql_mycnf_version})"
+ insinto "${MY_SYSCONFDIR#${EPREFIX}}"
+ [[ -f "${S}/scripts/mysqlaccess.conf" ]] && doins "${S}"/scripts/mysqlaccess.conf
+ mycnf_src="my.cnf-${mysql_mycnf_version}"
+ sed -e "s!@DATADIR@!${MY_DATADIR}!g" \
+ "${FILESDIR}/${mycnf_src}" \
+ > "${TMPDIR}/my.cnf.ok" || die
+ use prefix && sed -i -r -e '/^user[[:space:]]*=[[:space:]]*mysql$/d' "${TMPDIR}/my.cnf.ok"
+ if use latin1 ; then
+ sed -i \
+ -e "/character-set/s|utf8|latin1|g" \
+ "${TMPDIR}/my.cnf.ok" || die
+ fi
+ eprefixify "${TMPDIR}/my.cnf.ok"
+ newins "${TMPDIR}/my.cnf.ok" my.cnf
+
+ # Minimal builds don't have the MySQL server
+ if use_if_iuse minimal ; then
+ :
+ elif ! in_iuse server || use_if_iuse server ; then
+ einfo "Creating initial directories"
+ # Empty directories ...
+ diropts "-m0750"
+ if [[ ${PREVIOUS_DATADIR} != "yes" ]] ; then
+ dodir "${MY_DATADIR#${EPREFIX}}"
+ keepdir "${MY_DATADIR#${EPREFIX}}"
+ chown -R mysql:mysql "${D}/${MY_DATADIR}"
+ fi
+
+ diropts "-m0755"
+ for folder in "${MY_LOGDIR#${EPREFIX}}" ; do
+ dodir "${folder}"
+ keepdir "${folder}"
+ chown -R mysql:mysql "${ED}/${folder}"
+ done
+ fi
+
+ # Minimal builds don't have the MySQL server
+ if use_if_iuse minimal ; then
+ :
+ elif ! in_iuse server || use_if_iuse server; then
+ einfo "Including support files and sample configurations"
+ docinto "support-files"
+ for script in \
+ "${S}"/support-files/my-*.cnf.sh \
+ "${S}"/support-files/magic \
+ "${S}"/support-files/ndb-config-2-node.ini.sh
+ do
+ [[ -f $script ]] && dodoc "${script}"
+ done
+
+ docinto "scripts"
+ for script in "${S}"/scripts/mysql* ; do
+ [[ ( -f $script ) && ( ${script%.sh} == ${script} ) ]] && dodoc "${script}"
+ done
+ fi
+
+ #Remove mytop if perl is not selected
+ [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && ! use perl \
+ && rm -f "${ED}/usr/bin/mytop"
+
+ in_iuse client-libs && ! use client-libs && return
+
+ # Percona has decided to rename libmysqlclient to libperconaserverclient
+ # Use a symlink to preserve linkages for those who don't use mysql_config
+ if [[ ${PN} == "percona-server" ]] && mysql_version_is_at_least "5.5.36" ; then
+ dosym libperconaserverclient.so /usr/$(get_libdir)/libmysqlclient.so
+ dosym libperconaserverclient.so /usr/$(get_libdir)/libmysqlclient_r.so
+ if use static-libs ; then
+ dosym libperconaserverclient.a /usr/$(get_libdir)/libmysqlclient.a
+ dosym libperconaserverclient.a /usr/$(get_libdir)/libmysqlclient_r.a
+ fi
+ fi
+}
diff --git a/eclass/mysql-multilib.eclass b/eclass/mysql-multilib.eclass
new file mode 100644
index 000000000000..9765aacee7ba
--- /dev/null
+++ b/eclass/mysql-multilib.eclass
@@ -0,0 +1,1113 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mysql-multilib.eclass
+# @MAINTAINER:
+# Maintainers:
+# - MySQL Team <mysql-bugs@gentoo.org>
+# - Robin H. Johnson <robbat2@gentoo.org>
+# - Jorge Manuel B. S. Vicetto <jmbsvicetto@gentoo.org>
+# - Brian Evans <grknight@gentoo.org>
+# @BLURB: This eclass provides most of the functions for mysql ebuilds
+# @DESCRIPTION:
+# The mysql-multilib.eclass is the base eclass to build the mysql and
+# alternative projects (mariadb and percona) ebuilds.
+# This eclass uses the mysql-cmake eclass for the
+# specific bits related to the build system.
+# It provides the src_unpack, src_prepare, src_configure, src_compile,
+# src_install, pkg_preinst, pkg_postinst, pkg_config and pkg_postrm
+# phase hooks.
+
+MYSQL_EXTRAS=""
+
+# @ECLASS-VARIABLE: MYSQL_EXTRAS_VER
+# @DESCRIPTION:
+# The version of the MYSQL_EXTRAS repo to use to build mysql
+# Use "none" to disable it's use
+[[ ${MY_EXTRAS_VER} == "live" ]] && MYSQL_EXTRAS="git-r3"
+
+inherit eutils flag-o-matic ${MYSQL_EXTRAS} mysql-cmake mysql_fx versionator \
+ toolchain-funcs user cmake-utils multilib-minimal
+
+#
+# Supported EAPI versions and export functions
+#
+
+case "${EAPI:-0}" in
+ 5) ;;
+ *) die "Unsupported EAPI: ${EAPI}" ;;
+esac
+
+EXPORT_FUNCTIONS pkg_pretend pkg_setup src_unpack src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_config
+
+#
+# VARIABLES:
+#
+
+# @ECLASS-VARIABLE: MYSQL_CMAKE_NATIVE_DEFINES
+# @DESCRIPTION:
+# Add extra CMake arguments for native multilib builds
+
+# @ECLASS-VARIABLE: MYSQL_CMAKE_NONNATIVE_DEFINES
+# @DESCRIPTION:
+# Add extra CMake arguments for non-native multilib builds
+
+# @ECLASS-VARIABLE: MYSQL_CMAKE_EXTRA_DEFINES
+# @DESCRIPTION:
+# Add extra CMake arguments
+
+# Shorten the path because the socket path length must be shorter than 107 chars
+# and we will run a mysql server during test phase
+S="${WORKDIR}/mysql"
+
+[[ ${MY_EXTRAS_VER} == "latest" ]] && MY_EXTRAS_VER="20090228-0714Z"
+if [[ ${MY_EXTRAS_VER} == "live" ]]; then
+ EGIT_REPO_URI="git://anongit.gentoo.org/proj/mysql-extras.git"
+ EGIT_CHECKOUT_DIR=${WORKDIR}/mysql-extras
+ EGIT_CLONE_TYPE=shallow
+fi
+
+# @ECLASS-VARIABLE: MYSQL_PV_MAJOR
+# @DESCRIPTION:
+# Upstream MySQL considers the first two parts of the version number to be the
+# major version. Upgrades that change major version should always run
+# mysql_upgrade.
+MYSQL_PV_MAJOR="$(get_version_component_range 1-2 ${PV})"
+
+# Cluster is a special case...
+if [[ "${PN}" == "mysql-cluster" ]]; then
+ case $PV in
+ 7.2*) MYSQL_PV_MAJOR=5.5 ;;
+ 7.3*) MYSQL_PV_MAJOR=5.6 ;;
+ esac
+fi
+
+# MariaDB has left the numbering schema but keeping compatibility
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+ case ${PV} in
+ 10.0*) MYSQL_PV_MAJOR="5.6" ;;
+ 10.1*) MYSQL_PV_MAJOR="5.6" ;;
+ esac
+fi
+
+# @ECLASS-VARIABLE: MYSQL_VERSION_ID
+# @DESCRIPTION:
+# MYSQL_VERSION_ID will be:
+# major * 10e6 + minor * 10e4 + micro * 10e2 + gentoo revision number, all [0..99]
+# This is an important part, because many of the choices the MySQL ebuild will do
+# depend on this variable.
+# In particular, the code below transforms a $PVR like "5.0.18-r3" in "5001803"
+# We also strip off upstream's trailing letter that they use to respin tarballs
+MYSQL_VERSION_ID=""
+tpv="${PV%[a-z]}"
+tpv=( ${tpv//[-._]/ } ) ; tpv[3]="${PVR:${#PV}}" ; tpv[3]="${tpv[3]##*-r}"
+for vatom in 0 1 2 3 ; do
+ # pad to length 2
+ tpv[${vatom}]="00${tpv[${vatom}]}"
+ MYSQL_VERSION_ID="${MYSQL_VERSION_ID}${tpv[${vatom}]:0-2}"
+done
+# strip leading "0" (otherwise it's considered an octal number by BASH)
+MYSQL_VERSION_ID=${MYSQL_VERSION_ID##"0"}
+
+# This eclass should only be used with at least mysql-5.5.35
+mysql_version_is_at_least "5.5.35" || die "This eclass should only be used with >=mysql-5.5.35"
+
+# Work out the default SERVER_URI correctly
+if [[ -z ${SERVER_URI} ]]; then
+ [[ -z ${MY_PV} ]] && MY_PV="${PV//_/-}"
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+ # Beginning with 5.5, MariaDB stopped putting beta, alpha or rc on their tarball names
+ mysql_version_is_at_least "5.5" && MARIA_FULL_PV=$(get_version_component_range 1-3) || \
+ MARIA_FULL_PV=$(replace_version_separator 3 '-' ${MY_PV})
+ MARIA_FULL_P="${PN}-${MARIA_FULL_PV}"
+ SERVER_URI="
+ http://ftp.osuosl.org/pub/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://mirror.jmu.edu/pub/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://mirrors.coreix.net/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://mirrors.syringanetworks.net/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://mirrors.fe.up.pt/pub/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://mirror2.hs-esslingen.de/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://ftp.osuosl.org/pub/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirror.jmu.edu/pub/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirrors.coreix.net/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirrors.syringanetworks.net/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirrors.fe.up.pt/pub/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirror2.hs-esslingen.de/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ "
+ if [[ ${PN} == "mariadb-galera" ]]; then
+ MY_SOURCEDIR="${PN%%-galera}-${MARIA_FULL_PV}"
+ fi
+ elif [[ ${PN} == "percona-server" ]]; then
+ PERCONA_PN="Percona-Server"
+ MIRROR_PV=$(get_version_component_range 1-2 ${PV})
+ MY_PV=$(get_version_component_range 1-3 ${PV})
+ PERCONA_RELEASE=$(get_version_component_range 4-5 ${PV})
+ PERCONA_RC=$(get_version_component_range 6 ${PV})
+ SERVER_URI="http://www.percona.com/redir/downloads/${PERCONA_PN}-${MIRROR_PV}/${PERCONA_PN}-${MY_PV}-${PERCONA_RC}${PERCONA_RELEASE}/source/tarball/${PN}-${MY_PV}-${PERCONA_RC}${PERCONA_RELEASE}.tar.gz"
+# http://www.percona.com/redir/downloads/Percona-Server-5.5/LATEST/source/tarball/Percona-Server-5.5.30-rel30.2.tar.gz
+# http://www.percona.com/redir/downloads/Percona-Server-5.6/Percona-Server-5.6.13-rc60.5/source/tarball/Percona-Server-5.6.13-rc60.5.tar.gz
+ else
+ if [[ "${PN}" == "mysql-cluster" ]] ; then
+ URI_DIR="MySQL-Cluster"
+ URI_FILE="mysql-cluster-gpl"
+ else
+ URI_DIR="MySQL"
+ URI_FILE="mysql"
+ fi
+ URI_A="${URI_FILE}-${MY_PV}.tar.gz"
+ MIRROR_PV=$(get_version_component_range 1-2 ${PV})
+ # Recently upstream switched to an archive site, and not on mirrors
+ SERVER_URI="http://downloads.mysql.com/archives/${URI_FILE}-${MIRROR_PV}/${URI_A}
+ mirror://mysql/Downloads/${URI_DIR}-${PV%.*}/${URI_A}"
+ fi
+fi
+
+# Define correct SRC_URIs
+SRC_URI="${SERVER_URI}"
+
+# Gentoo patches to MySQL
+if [[ ${MY_EXTRAS_VER} != "live" && ${MY_EXTRAS_VER} != "none" ]]; then
+ SRC_URI="${SRC_URI}
+ mirror://gentoo/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://dev.gentoo.org/~robbat2/distfiles/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://dev.gentoo.org/~jmbsvicetto/distfiles/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://dev.gentoo.org/~grknight/distfiles/mysql-extras-${MY_EXTRAS_VER}.tar.bz2"
+fi
+
+DESCRIPTION="A fast, multi-threaded, multi-user SQL database server"
+HOMEPAGE="http://www.mysql.com/"
+if [[ ${PN} == "mariadb" ]]; then
+ HOMEPAGE="http://mariadb.org/"
+ DESCRIPTION="An enhanced, drop-in replacement for MySQL"
+fi
+if [[ ${PN} == "mariadb-galera" ]]; then
+ HOMEPAGE="http://mariadb.org/"
+ DESCRIPTION="An enhanced, drop-in replacement for MySQL with Galera Replication"
+fi
+if [[ ${PN} == "percona-server" ]]; then
+ HOMEPAGE="http://www.percona.com/software/percona-server"
+ DESCRIPTION="An enhanced, drop-in replacement for MySQL from the Percona team"
+fi
+LICENSE="GPL-2"
+SLOT="0/${SUBSLOT:=0}"
+
+IUSE="+community cluster debug embedded extraengine jemalloc latin1
+ +perl profiling selinux ssl systemtap static static-libs tcmalloc test"
+
+### Begin readline/libedit
+### If the world was perfect, we would use external libedit on both to have a similar experience
+### However libedit does not seem to support UTF-8 keyboard input
+
+# This probably could be simplified, but the syntax would have to be just right
+#if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && \
+# mysql_check_version_range "5.5.37 to 10.0.13.99" ; then
+# IUSE="bindist ${IUSE}"
+#elif [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] && \
+# mysql_check_version_range "5.5.37 to 5.6.11.99" ; then
+# IUSE="bindist ${IUSE}"
+#elif [[ ${PN} == "mysql-cluster" ]] && \
+# mysql_check_version_range "7.2 to 7.2.99.99" ; then
+# IUSE="bindist ${IUSE}"
+#fi
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ IUSE="bindist ${IUSE}"
+ RESTRICT="${RESTRICT} !bindist? ( bindist )"
+fi
+
+### End readline/libedit
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+ IUSE="${IUSE} oqgraph pam sphinx tokudb"
+ # 5.5.33 and 10.0.5 add TokuDB. Authors strongly recommend jemalloc or perfomance suffers
+ mysql_version_is_at_least "10.0.5" && IUSE="${IUSE} odbc xml"
+ if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ REQUIRED_USE="${REQUIRED_USE} !server? ( !oqgraph !sphinx ) tokudb? ( jemalloc )"
+ else
+ REQUIRED_USE="${REQUIRED_USE} minimal? ( !oqgraph !sphinx ) tokudb? ( jemalloc )"
+ fi
+ # MariaDB 10.1 introduces InnoDB/XtraDB compression with external libraries
+ # Choices are bzip2, lz4, lzma, lzo. bzip2 and lzma enabled by default as they are system libraries
+ mysql_version_is_at_least "10.1.1" && IUSE="${IUSE} innodb-lz4 innodb-lzo"
+
+ # 10.1.2 introduces a cracklib password checker
+ mysql_version_is_at_least "10.1.1" && IUSE="${IUSE} cracklib"
+fi
+
+if [[ -n "${WSREP_REVISION}" ]]; then
+ if [[ ${PN} == "mariadb" ]]; then
+ IUSE="${IUSE} galera sst-rsync sst-xtrabackup"
+ REQUIRED_USE="${REQUIRED_USE} sst-rsync? ( galera ) sst-xtrabackup? ( galera )"
+ else
+ IUSE="${IUSE} +sst-rsync sst-xtrabackup"
+ fi
+fi
+
+if [[ ${PN} == "percona-server" ]]; then
+ IUSE="${IUSE} pam"
+fi
+
+if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ IUSE="${IUSE} client-libs +server +tools"
+ REQUIRED_USE="${REQUIRED_USE} !server? ( !extraengine !embedded ) server? ( tools ) || ( client-libs server tools )"
+else
+ IUSE="${IUSE} minimal"
+ REQUIRED_USE="${REQUIRED_USE} minimal? ( !extraengine !embedded )"
+fi
+
+REQUIRED_USE="
+ ${REQUIRED_USE} tcmalloc? ( !jemalloc ) jemalloc? ( !tcmalloc )
+ static? ( !ssl )"
+
+#
+# DEPENDENCIES:
+#
+
+# Be warned, *DEPEND are version-dependant
+# These are used for both runtime and compiletime
+# MULTILIB_USEDEP only set for libraries used by the client library
+DEPEND="
+ ssl? ( >=dev-libs/openssl-1.0.0:0=[${MULTILIB_USEDEP},static-libs?] )
+ kernel_linux? (
+ sys-process/procps:0=
+ dev-libs/libaio:0=
+ )
+ >=sys-apps/sed-4
+ >=sys-apps/texinfo-4.7-r1
+ !dev-db/mariadb-native-client[mysqlcompat]
+ jemalloc? ( dev-libs/jemalloc:0= )
+ tcmalloc? ( dev-util/google-perftools:0= )
+ systemtap? ( >=dev-util/systemtap-1.3:0= )
+"
+
+if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ DEPEND+="
+ client-libs? (
+ ssl? ( >=dev-libs/openssl-1.0.0:0=[${MULTILIB_USEDEP},static-libs?] )
+ >=sys-libs/zlib-1.2.3:0=[${MULTILIB_USEDEP},static-libs?]
+ )
+ !client-libs? (
+ ssl? ( >=dev-libs/openssl-1.0.0:0=[static-libs?] )
+ >=sys-libs/zlib-1.2.3:0=[static-libs?]
+ )
+ tools? ( sys-libs/ncurses ) embedded? ( sys-libs/ncurses )
+ "
+else
+ DEPEND+="
+ ssl? ( >=dev-libs/openssl-1.0.0:0=[${MULTILIB_USEDEP},static-libs?] )
+ >=sys-libs/zlib-1.2.3:0=[${MULTILIB_USEDEP},static-libs?]
+ sys-libs/ncurses[${MULTILIB_USEDEP}]
+ "
+fi
+
+### Begin readline/libedit
+### If the world was perfect, we would use external libedit on both to have a similar experience
+### However libedit does not seem to support UTF-8 keyboard input
+
+# dev-db/mysql-5.6.12+ only works with dev-libs/libedit
+# mariadb 10.0.14 fixes libedit detection. changed to follow mysql
+# This probably could be simplified
+#if [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] && \
+# mysql_version_is_at_least "5.6.12" ; then
+# DEPEND="${DEPEND} dev-libs/libedit:0=[${MULTILIB_USEDEP}]"
+#elif [[ ${PN} == "mysql-cluster" ]] && mysql_version_is_at_least "7.3"; then
+# DEPEND="${DEPEND} dev-libs/libedit:0=[${MULTILIB_USEDEP}]"
+#elif [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && \
+# mysql_version_is_at_least "10.0.14" ; then
+# DEPEND="${DEPEND} dev-libs/libedit:0=[${MULTILIB_USEDEP}]"
+#else
+# DEPEND="${DEPEND} !bindist? ( >=sys-libs/readline-4.1:0=[${MULTILIB_USEDEP}] )"
+#fi
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ # Readline is only used for the command-line and embedded example
+ if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ DEPEND="${DEPEND} !bindist? ( tools? ( >=sys-libs/readline-4.1:0= ) embedded? ( >=sys-libs/readline-4.1:0= ) )"
+ else
+ DEPEND="${DEPEND} !bindist? ( >=sys-libs/readline-4.1:0=[${MULTILIB_USEDEP}] )"
+ fi
+fi
+
+### End readline/libedit
+
+if [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] ; then
+ if mysql_version_is_at_least "5.7.6" ; then DEPEND="${DEPEND} >=dev-libs/boost-1.57.0:0=" ; else
+ mysql_version_is_at_least "5.7.5" && DEPEND="${DEPEND} >=dev-libs/boost-1.56.0:0="
+ fi
+fi
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ # Bug 441700 MariaDB >=5.3 include custom mytop
+ if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ DEPEND="${DEPEND} server? ( pam? ( virtual/pam:0= ) )"
+ else
+ DEPEND="${DEPEND} !minimal? ( pam? ( virtual/pam:0= ) )"
+ fi
+ DEPEND="${DEPEND}
+ oqgraph? ( >=dev-libs/boost-1.40.0:0= )
+ perl? ( !dev-db/mytop )"
+ if mysql_version_is_at_least "10.0.5" ; then
+ DEPEND="${DEPEND}
+ extraengine? (
+ odbc? ( dev-db/unixODBC:0= )
+ xml? ( dev-libs/libxml2:2= )
+ )
+ "
+ fi
+ mysql_version_is_at_least "10.0.7" && DEPEND="${DEPEND} oqgraph? ( dev-libs/judy:0= )"
+ mysql_version_is_at_least "10.0.9" && DEPEND="${DEPEND} >=dev-libs/libpcre-8.35:3="
+
+ mysql_version_is_at_least "10.1.1" && DEPEND="${DEPEND}
+ innodb-lz4? ( app-arch/lz4 )
+ innodb-lzo? ( dev-libs/lzo )
+ "
+
+ mysql_version_is_at_least "10.1.2" && DEPEND="${DEPEND} cracklib? ( sys-libs/cracklib:0= )"
+fi
+
+if [[ ${PN} == "percona-server" ]] ; then
+ if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ DEPEND="${DEPEND} server? ( pam? ( virtual/pam:0= ) )"
+ else
+ DEPEND="${DEPEND} !minimal? ( pam? ( virtual/pam:0= ) )"
+ fi
+fi
+
+# Having different flavours at the same time is not a good idea
+for i in "mysql" "mariadb" "mariadb-galera" "percona-server" "mysql-cluster" ; do
+ [[ ${i} == ${PN} ]] ||
+ DEPEND="${DEPEND} !dev-db/${i}"
+done
+
+if [[ ${PN} == "mysql-cluster" ]] ; then
+ # TODO: This really should include net-misc/memcached
+ # but the package does not install the files it seeks.
+ mysql_version_is_at_least "7.2.3" && \
+ DEPEND="${DEPEND} dev-libs/libevent:0="
+fi
+
+# prefix: first need to implement something for #196294
+# TODO: check emul-linux-x86-db dep when it is multilib enabled
+RDEPEND="${DEPEND}
+ selinux? ( sec-policy/selinux-mysql )
+ abi_x86_32? ( !app-emulation/emul-linux-x86-db[-abi_x86_32(-)] )
+"
+
+if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ RDEPEND="${RDEPEND}
+ server? ( !prefix? ( dev-db/mysql-init-scripts ) )
+ !client-libs? ( virtual/libmysqlclient )
+ !<virtual/mysql-5.6-r4"
+else
+ RDEPEND="${RDEPEND} !minimal? ( !prefix? ( dev-db/mysql-init-scripts ) )"
+fi
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ # Bug 455016 Add dependencies of mytop
+ RDEPEND="${RDEPEND} perl? (
+ virtual/perl-Getopt-Long
+ dev-perl/TermReadKey
+ virtual/perl-Term-ANSIColor
+ virtual/perl-Time-HiRes ) "
+fi
+
+# @ECLASS-VARIABLE: WSREP_REVISION
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Version of the sys-cluster/galera API (major version in portage) to use for galera clustering
+
+if [[ -n "${WSREP_REVISION}" ]] ; then
+ # The wsrep API version must match between the ebuild and sys-cluster/galera.
+ # This will be indicated by WSREP_REVISION in the ebuild and the first number
+ # in the version of sys-cluster/galera
+ #
+ # lsof is required as of 5.5.38 and 10.0.11 for the rsync sst
+
+ GALERA_RDEPEND="sys-apps/iproute2
+ =sys-cluster/galera-${WSREP_REVISION}*
+ "
+ if [[ ${PN} == "mariadb" ]]; then
+ GALERA_RDEPEND="galera? ( ${GALERA_RDEPEND} )"
+ fi
+ RDEPEND="${RDEPEND} ${GALERA_RDEPEND}
+ sst-rsync? ( sys-process/lsof )
+ sst-xtrabackup? (
+ net-misc/socat[ssl]
+ )
+ "
+ # Causes a circular dependency if DBD-mysql is not already installed
+ PDEPEND="${PDEPEND} sst-xtrabackup? ( >=dev-db/xtrabackup-bin-2.2.4 )"
+fi
+
+if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && RDEPEND="${RDEPEND} java? ( >=virtual/jre-1.6 )" && \
+ DEPEND="${DEPEND} java? ( >=virtual/jdk-1.6 )"
+fi
+
+# compile-time-only
+# ncurses only needs multilib for compile time due to a binary that will be not installed
+DEPEND="${DEPEND}
+ virtual/yacc
+ static? ( sys-libs/ncurses[static-libs] )
+ >=dev-util/cmake-2.8.9
+"
+
+# Transition dep until all ebuilds have client-libs patch and USE
+if ! [[ ${HAS_TOOLS_PATCH} ]] ; then
+ DEPEND="${DEPEND} sys-libs/ncurses[${MULTILIB_USEDEP}]"
+fi
+
+# For other stuff to bring us in
+# dev-perl/DBD-mysql is needed by some scripts installed by MySQL
+PDEPEND="${PDEPEND} perl? ( >=dev-perl/DBD-mysql-2.9004 )
+ ~virtual/mysql-${MYSQL_PV_MAJOR}"
+
+# my_config.h includes ABI specific data
+MULTILIB_WRAPPED_HEADERS=( /usr/include/mysql/my_config.h /usr/include/mysql/private/embedded_priv.h )
+
+[[ ${PN} == "mysql-cluster" ]] && \
+ MULTILIB_WRAPPED_HEADERS+=( /usr/include/mysql/storage/ndb/ndb_types.h )
+
+[[ ${PN} == "mariadb" ]] && mysql_version_is_at_least "10.1.1" && \
+ MULTILIB_WRAPPED_HEADERS+=( /usr/include/mysql/mysql_version.h )
+
+#
+# HELPER FUNCTIONS:
+#
+
+# @FUNCTION: mysql-multilib_disable_test
+# @DESCRIPTION:
+# Helper function to disable specific tests.
+mysql-multilib_disable_test() {
+ mysql-cmake_disable_test "$@"
+}
+
+#
+# EBUILD FUNCTIONS
+#
+
+# @FUNCTION: mysql-multilib_pkg_pretend
+# @DESCRIPTION:
+# Perform some basic tests and tasks during pkg_pretend phase:
+mysql-multilib_pkg_pretend() {
+ if [[ ${MERGE_TYPE} != binary ]] ; then
+ if use_if_iuse tokudb && [[ $(gcc-major-version) -lt 4 || \
+ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 7 ]] ; then
+ eerror "${PN} with tokudb needs to be built with gcc-4.7 or later."
+ eerror "Please use gcc-config to switch to gcc-4.7 or later version."
+ die
+ fi
+ fi
+ if use_if_iuse cluster && [[ "${PN}" != "mysql-cluster" ]]; then
+ die "NDB Cluster support has been removed from all packages except mysql-cluster"
+ fi
+}
+
+# @FUNCTION: mysql-multilib_pkg_setup
+# @DESCRIPTION:
+# Perform some basic tests and tasks during pkg_setup phase:
+# die if FEATURES="test", USE="-minimal" and not using FEATURES="userpriv"
+# create new user and group for mysql
+# warn about deprecated features
+mysql-multilib_pkg_setup() {
+
+ if has test ${FEATURES} ; then
+ if use_if_iuse minimal ; then
+ :
+ elif ! in_iuse server || use_if_iuse server ; then
+ if ! has userpriv ${FEATURES} ; then
+ eerror "Testing with FEATURES=-userpriv is no longer supported by upstream. Tests MUST be run as non-root."
+ fi
+ fi
+ fi
+
+ # This should come after all of the die statements
+ enewgroup mysql 60 || die "problem adding 'mysql' group"
+ enewuser mysql 60 -1 /dev/null mysql || die "problem adding 'mysql' user"
+
+ if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && java-pkg-opt-2_pkg_setup
+ fi
+}
+
+# @FUNCTION: mysql-multilib_src_unpack
+# @DESCRIPTION:
+# Unpack the source code
+mysql-multilib_src_unpack() {
+
+ # Initialize the proper variables first
+ mysql_init_vars
+
+ unpack ${A}
+ # Grab the patches
+ [[ "${MY_EXTRAS_VER}" == "live" ]] && S="${WORKDIR}/mysql-extras" git-r3_src_unpack
+
+ mv -f "${WORKDIR}/${MY_SOURCEDIR}" "${S}"
+}
+
+# @FUNCTION: mysql-multilib_src_prepare
+# @DESCRIPTION:
+# Apply patches to the source code and remove unneeded bundled libs.
+mysql-multilib_src_prepare() {
+ mysql-cmake_src_prepare "$@"
+ if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && java-pkg-opt-2_src_prepare
+ fi
+}
+
+
+# @FUNCTION: mysql-multilib_src_configure
+# @DESCRIPTION:
+# Configure mysql to build the code for Gentoo respecting the use flags.
+mysql-multilib_src_configure() {
+ # Bug #114895, bug #110149
+ filter-flags "-O" "-O[01]"
+
+ CXXFLAGS="${CXXFLAGS} -fno-strict-aliasing"
+ CXXFLAGS="${CXXFLAGS} -felide-constructors"
+ # Causes linkage failures. Upstream bug #59607 removes it
+ if ! mysql_version_is_at_least "5.6" ; then
+ CXXFLAGS="${CXXFLAGS} -fno-implicit-templates"
+ fi
+ # As of 5.7, exceptions are used!
+ if ! mysql_version_is_at_least "5.7" ; then
+ CXXFLAGS="${CXXFLAGS} -fno-exceptions -fno-rtti"
+ fi
+ export CXXFLAGS
+
+ # bug #283926, with GCC4.4, this is required to get correct behavior.
+ append-flags -fno-strict-aliasing
+
+ # bug 508724 mariadb cannot use ld.gold
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ tc-ld-disable-gold
+ fi
+
+ multilib-minimal_src_configure
+}
+
+multilib_src_configure() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ CMAKE_BUILD_TYPE="RelWithDebInfo"
+
+ if ! multilib_is_native_abi && in_iuse client-libs ; then
+ if ! use client-libs ; then
+ ewarn "Skipping multilib build due to client-libs USE disabled"
+ return 0
+ fi
+ fi
+
+ # debug hack wrt #497532
+ mycmakeargs=(
+ -DCMAKE_C_FLAGS_RELWITHDEBINFO="$(usex debug "" "-DNDEBUG")"
+ -DCMAKE_CXX_FLAGS_RELWITHDEBINFO="$(usex debug "" "-DNDEBUG")"
+ -DCMAKE_INSTALL_PREFIX=${EPREFIX}/usr
+ -DMYSQL_DATADIR=${EPREFIX}/var/lib/mysql
+ -DSYSCONFDIR=${EPREFIX}/etc/mysql
+ -DINSTALL_BINDIR=bin
+ -DINSTALL_DOCDIR=share/doc/${PF}
+ -DINSTALL_DOCREADMEDIR=share/doc/${PF}
+ -DINSTALL_INCLUDEDIR=include/mysql
+ -DINSTALL_INFODIR=share/info
+ -DINSTALL_LIBDIR=$(get_libdir)
+ -DINSTALL_ELIBDIR=$(get_libdir)/mysql
+ -DINSTALL_MANDIR=share/man
+ -DINSTALL_MYSQLDATADIR=${EPREFIX}/var/lib/mysql
+ -DINSTALL_MYSQLSHAREDIR=share/mysql
+ -DINSTALL_MYSQLTESTDIR=share/mysql/mysql-test
+ -DINSTALL_PLUGINDIR=$(get_libdir)/mysql/plugin
+ -DINSTALL_SBINDIR=sbin
+ -DINSTALL_SCRIPTDIR=share/mysql/scripts
+ -DINSTALL_SQLBENCHDIR=share/mysql
+ -DINSTALL_SUPPORTFILESDIR=${EPREFIX}/usr/share/mysql
+ -DWITH_COMMENT="Gentoo Linux ${PF}"
+ $(cmake-utils_use_with test UNIT_TESTS)
+ -DWITH_LIBEDIT=0
+ -DWITH_ZLIB=system
+ -DWITHOUT_LIBWRAP=1
+ -DENABLED_LOCAL_INFILE=1
+ -DMYSQL_UNIX_ADDR=${EPREFIX}/var/run/mysqld/mysqld.sock
+ -DINSTALL_UNIX_ADDRDIR=${EPREFIX}/var/run/mysqld/mysqld.sock
+ -DWITH_SSL=$(usex ssl system bundled)
+ -DWITH_DEFAULT_COMPILER_OPTIONS=0
+ -DWITH_DEFAULT_FEATURE_SET=0
+ )
+
+ if in_iuse client-libs ; then
+ mycmakeargs+=( -DWITHOUT_CLIENTLIBS=$(usex client-libs 0 1) )
+ fi
+
+ if in_iuse tools ; then
+ if multilib_is_native_abi ; then
+ mycmakeargs+=( -DWITHOUT_TOOLS=$(usex tools 0 1) )
+ else
+ mycmakeargs+=( -DWITHOUT_TOOLS=1 )
+ fi
+ fi
+
+ if in_iuse bindist ; then
+ # bfd.h is only used starting with 10.1 and can be controlled by NOT_FOR_DISTRIBUTION
+ if multilib_is_native_abi; then
+ mycmakeargs+=(
+ -DWITH_READLINE=$(usex bindist 1 0)
+ -DNOT_FOR_DISTRIBUTION=$(usex bindist 0 1)
+ )
+ elif ! in_iuse client-libs ; then
+ mycmakeargs+=(
+ -DWITH_READLINE=1
+ -DNOT_FOR_DISTRIBUTION=0
+ )
+ fi
+ fi
+
+ ### TODO: make this system but issues with UTF-8 prevent it
+ mycmakeargs+=( -DWITH_EDITLINE=bundled )
+
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && multilib_is_native_abi ; then
+ mycmakeargs+=(
+ -DWITH_JEMALLOC=$(usex jemalloc system)
+ )
+
+ mysql_version_is_at_least "10.0.9" && mycmakeargs+=( -DWITH_PCRE=system )
+ fi
+
+ configure_cmake_locale
+
+ if use_if_iuse minimal ; then
+ configure_cmake_minimal
+ elif in_iuse server ; then
+ if multilib_is_native_abi && use server ; then
+ configure_cmake_standard
+ else
+ configure_cmake_minimal
+ fi
+ else
+ if multilib_is_native_abi ; then
+ configure_cmake_standard
+ else
+ configure_cmake_minimal
+ fi
+ fi
+
+ # systemtap only works on native ABI bug 530132
+ if multilib_is_native_abi; then
+ mycmakeargs+=( $(cmake-utils_use_enable systemtap DTRACE) )
+ [[ ${MYSQL_CMAKE_NATIVE_DEFINES} ]] && mycmakeargs+=( ${MYSQL_CMAKE_NATIVE_DEFINES} )
+ else
+ mycmakeargs+=( -DENABLE_DTRACE=0 )
+ [[ ${MYSQL_CMAKE_NONNATIVE_DEFINES} ]] && mycmakeargs+=( ${MYSQL_CMAKE_NONNATIVE_DEFINES} )
+ fi
+
+ [[ ${MYSQL_CMAKE_EXTRA_DEFINES} ]] && mycmakeargs+=( ${MYSQL_CMAKE_EXTRA_DEFINES} )
+
+ # Always build NDB with mysql-cluster for libndbclient
+ [[ ${PN} == "mysql-cluster" ]] && mycmakeargs+=(
+ -DWITH_NDBCLUSTER=1 -DWITH_PARTITION_STORAGE_ENGINE=1
+ -DWITHOUT_PARTITION_STORAGE_ENGINE=0 )
+
+ cmake-utils_src_configure
+}
+
+mysql-multilib_src_compile() {
+ local _cmake_args=( "${@}" )
+
+ multilib-minimal_src_compile
+}
+
+multilib_src_compile() {
+ if ! multilib_is_native_abi && in_iuse client-libs ; then
+ if ! use client-libs ; then
+ ewarn "Skipping multilib build due to client-libs USE disabled"
+ return 0
+ fi
+ fi
+
+ cmake-utils_src_compile "${_cmake_args[@]}"
+}
+
+
+# @FUNCTION: mysql-multilib_src_install
+# @DESCRIPTION:
+# Install mysql.
+mysql-multilib_src_install() {
+ if ! in_iuse client-libs || use_if_iuse client-libs ; then
+ # wrap the config script
+ MULTILIB_CHOST_TOOLS=( /usr/bin/mysql_config )
+ fi
+
+ if in_iuse client-libs && ! use client-libs ; then
+ multilib_foreach_abi multilib_src_install
+ else
+ multilib-minimal_src_install
+ fi
+}
+
+multilib_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if ! multilib_is_native_abi && in_iuse client-libs ; then
+ if ! use client-libs ; then
+ ewarn "Skipping multilib build due to client-libs USE disabled"
+ return 0
+ fi
+ fi
+
+ if multilib_is_native_abi; then
+ mysql-cmake_src_install
+ else
+ cmake-utils_src_install
+ if [[ "${PN}" == "mariadb" || "${PN}" == "mariadb-galera" ]] ; then
+ if use_if_iuse minimal ; then
+ :
+ elif use_if_iuse server || ! in_iuse server ; then
+ insinto /usr/include/mysql/private
+ doins "${S}"/sql/*.h
+ fi
+ fi
+ fi
+}
+
+# @FUNCTION: mysql-multilib_pkg_preinst
+# @DESCRIPTION:
+# Call java-pkg-opt-2 eclass when mysql-cluster is installed
+mysql-multilib_pkg_preinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && java-pkg-opt-2_pkg_preinst
+ fi
+}
+
+# @FUNCTION: mysql-multilib_pkg_postinst
+# @DESCRIPTION:
+# Run post-installation tasks:
+# create the dir for logfiles if non-existant
+# touch the logfiles and secure them
+# install scripts
+# issue required steps for optional features
+# issue deprecation warnings
+mysql-multilib_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Make sure the vars are correctly initialized
+ mysql_init_vars
+
+ # Check FEATURES="collision-protect" before removing this
+ [[ -d "${ROOT}${MY_LOGDIR}" ]] || install -d -m0750 -o mysql -g mysql "${ROOT}${MY_LOGDIR}"
+
+ # Secure the logfiles
+ touch "${ROOT}${MY_LOGDIR}"/mysql.{log,err}
+ chown mysql:mysql "${ROOT}${MY_LOGDIR}"/mysql*
+ chmod 0660 "${ROOT}${MY_LOGDIR}"/mysql*
+
+ # Minimal builds don't have the MySQL server
+ if use_if_iuse minimal ; then
+ :
+ elif ! in_iuse server || use_if_iuse server ; then
+ docinto "support-files"
+ for script in \
+ support-files/my-*.cnf \
+ support-files/magic \
+ support-files/ndb-config-2-node.ini
+ do
+ [[ -f "${script}" ]] \
+ && dodoc "${script}"
+ done
+
+ docinto "scripts"
+ for script in scripts/mysql* ; do
+ if [[ -f "${script}" && "${script%.sh}" == "${script}" ]]; then
+ dodoc "${script}"
+ fi
+ done
+
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ if use_if_iuse pam ; then
+ einfo
+ elog "This install includes the PAM authentication plugin."
+ elog "To activate and configure the PAM plugin, please read:"
+ elog "https://kb.askmonty.org/en/pam-authentication-plugin/"
+ einfo
+ fi
+ fi
+
+ einfo
+ elog "You might want to run:"
+ elog "\"emerge --config =${CATEGORY}/${PF}\""
+ elog "if this is a new install."
+ einfo
+
+ einfo
+ elog "If you are upgrading major versions, you should run the"
+ elog "mysql_upgrade tool."
+ einfo
+
+ if [[ ${PN} == "mariadb-galera" ]] ; then
+ einfo
+ elog "Be sure to edit the my.cnf file to activate your cluster settings."
+ elog "This should be done after running \"emerge --config =${CATEGORY}/${PF}\""
+ elog "The first time the cluster is activated, you should add"
+ elog "--wsrep-new-cluster to the options in /etc/conf.d/mysql for one node."
+ elog "This option should then be removed for subsequent starts."
+ einfo
+ fi
+ fi
+}
+
+# @FUNCTION: mysql-multilib_getopt
+# @DESCRIPTION:
+# Use my_print_defaults to extract specific config options
+mysql-multilib_getopt() {
+ local mypd="${EROOT}"/usr/bin/my_print_defaults
+ section="$1"
+ flag="--${2}="
+ "${mypd}" $section | sed -n "/^${flag}/p"
+}
+
+# @FUNCTION: mysql-multilib_getoptval
+# @DESCRIPTION:
+# Use my_print_defaults to extract specific config options
+mysql-multilib_getoptval() {
+ local mypd="${EROOT}"/usr/bin/my_print_defaults
+ section="$1"
+ flag="--${2}="
+ "${mypd}" $section | sed -n "/^${flag}/s,${flag},,gp"
+}
+
+# @FUNCTION: mysql-multilib_pkg_config
+# @DESCRIPTION:
+# Configure mysql environment.
+mysql-multilib_pkg_config() {
+
+ debug-print-function ${FUNCNAME} "$@"
+
+ local old_MY_DATADIR="${MY_DATADIR}"
+ local old_HOME="${HOME}"
+ # my_print_defaults needs to read stuff in $HOME/.my.cnf
+ export HOME=${EPREFIX}/root
+
+ # Make sure the vars are correctly initialized
+ mysql_init_vars
+
+ [[ -z "${MY_DATADIR}" ]] && die "Sorry, unable to find MY_DATADIR"
+ if [[ ${HAS_TOOLS_PATCH} ]] ; then
+ if ! built_with_use ${CATEGORY}/${PN} server ; then
+ die "Minimal builds do NOT include the MySQL server"
+ fi
+ else
+ if built_with_use ${CATEGORY}/${PN} minimal ; then
+ die "Minimal builds do NOT include the MySQL server"
+ fi
+ fi
+
+ if [[ ( -n "${MY_DATADIR}" ) && ( "${MY_DATADIR}" != "${old_MY_DATADIR}" ) ]]; then
+ local MY_DATADIR_s="${ROOT}/${MY_DATADIR}"
+ MY_DATADIR_s="${MY_DATADIR_s%%/}"
+ local old_MY_DATADIR_s="${ROOT}/${old_MY_DATADIR}"
+ old_MY_DATADIR_s="${old_MY_DATADIR_s%%/}"
+
+ if [[ ( -d "${old_MY_DATADIR_s}" ) && ( "${old_MY_DATADIR_s}" != / ) ]]; then
+ if [[ -d "${MY_DATADIR_s}" ]]; then
+ ewarn "Both ${old_MY_DATADIR_s} and ${MY_DATADIR_s} exist"
+ ewarn "Attempting to use ${MY_DATADIR_s} and preserving ${old_MY_DATADIR_s}"
+ else
+ elog "Moving MY_DATADIR from ${old_MY_DATADIR_s} to ${MY_DATADIR_s}"
+ mv --strip-trailing-slashes -T "${old_MY_DATADIR_s}" "${MY_DATADIR_s}" \
+ || die "Moving MY_DATADIR failed"
+ fi
+ else
+ ewarn "Previous MY_DATADIR (${old_MY_DATADIR_s}) does not exist"
+ if [[ -d "${MY_DATADIR_s}" ]]; then
+ ewarn "Attempting to use ${MY_DATADIR_s}"
+ else
+ eerror "New MY_DATADIR (${MY_DATADIR_s}) does not exist"
+ die "Configuration Failed! Please reinstall ${CATEGORY}/${PN}"
+ fi
+ fi
+ fi
+
+ local pwd1="a"
+ local pwd2="b"
+ local maxtry=15
+
+ if [ -z "${MYSQL_ROOT_PASSWORD}" ]; then
+ MYSQL_ROOT_PASSWORD="$(mysql-multilib_getoptval 'client mysql' password)"
+ fi
+ MYSQL_TMPDIR="$(mysql-multilib_getoptval mysqld tmpdir)"
+ # These are dir+prefix
+ MYSQL_RELAY_LOG="$(mysql-multilib_getoptval mysqld relay-log)"
+ MYSQL_RELAY_LOG=${MYSQL_RELAY_LOG%/*}
+ MYSQL_LOG_BIN="$(mysql-multilib_getoptval mysqld log-bin)"
+ MYSQL_LOG_BIN=${MYSQL_LOG_BIN%/*}
+
+ if [[ ! -d "${ROOT}"/$MYSQL_TMPDIR ]]; then
+ einfo "Creating MySQL tmpdir $MYSQL_TMPDIR"
+ install -d -m 770 -o mysql -g mysql "${EROOT}"/$MYSQL_TMPDIR
+ fi
+ if [[ ! -d "${ROOT}"/$MYSQL_LOG_BIN ]]; then
+ einfo "Creating MySQL log-bin directory $MYSQL_LOG_BIN"
+ install -d -m 770 -o mysql -g mysql "${EROOT}"/$MYSQL_LOG_BIN
+ fi
+ if [[ ! -d "${EROOT}"/$MYSQL_RELAY_LOG ]]; then
+ einfo "Creating MySQL relay-log directory $MYSQL_RELAY_LOG"
+ install -d -m 770 -o mysql -g mysql "${EROOT}"/$MYSQL_RELAY_LOG
+ fi
+
+ if [[ -d "${ROOT}/${MY_DATADIR}/mysql" ]] ; then
+ ewarn "You have already a MySQL database in place."
+ ewarn "(${ROOT}/${MY_DATADIR}/*)"
+ ewarn "Please rename or delete it if you wish to replace it."
+ die "MySQL database already exists!"
+ fi
+
+ # Bug #213475 - MySQL _will_ object strenously if your machine is named
+ # localhost. Also causes weird failures.
+ [[ "${HOSTNAME}" == "localhost" ]] && die "Your machine must NOT be named localhost"
+
+ if [ -z "${MYSQL_ROOT_PASSWORD}" ]; then
+
+ einfo "Please provide a password for the mysql 'root' user now"
+ einfo "or through the ${HOME}/.my.cnf file."
+ ewarn "Avoid [\"'\\_%] characters in the password"
+ read -rsp " >" pwd1 ; echo
+
+ einfo "Retype the password"
+ read -rsp " >" pwd2 ; echo
+
+ if [[ "x$pwd1" != "x$pwd2" ]] ; then
+ die "Passwords are not the same"
+ fi
+ MYSQL_ROOT_PASSWORD="${pwd1}"
+ unset pwd1 pwd2
+ fi
+
+ local options
+ local sqltmp="$(emktemp)"
+
+ # Fix bug 446200. Don't reference host my.cnf, needs to come first,
+ # see http://bugs.mysql.com/bug.php?id=31312
+ use prefix && options="${options} '--defaults-file=${MY_SYSCONFDIR}/my.cnf'"
+
+ local help_tables="${ROOT}${MY_SHAREDSTATEDIR}/fill_help_tables.sql"
+ [[ -r "${help_tables}" ]] \
+ && cp "${help_tables}" "${TMPDIR}/fill_help_tables.sql" \
+ || touch "${TMPDIR}/fill_help_tables.sql"
+ help_tables="${TMPDIR}/fill_help_tables.sql"
+
+ # Figure out which options we need to disable to do the setup
+ helpfile="${TMPDIR}/mysqld-help"
+ ${EROOT}/usr/sbin/mysqld --verbose --help >"${helpfile}" 2>/dev/null
+ for opt in grant-tables host-cache name-resolve networking slave-start \
+ federated ssl log-bin relay-log slow-query-log external-locking \
+ ndbcluster log-slave-updates \
+ ; do
+ optexp="--(skip-)?${opt}" optfull="--loose-skip-${opt}"
+ egrep -sq -- "${optexp}" "${helpfile}" && options="${options} ${optfull}"
+ done
+ # But some options changed names
+ egrep -sq external-locking "${helpfile}" && \
+ options="${options/skip-locking/skip-external-locking}"
+
+ use prefix || options="${options} --user=mysql"
+
+ # MySQL 5.6+ needs InnoDB
+ if [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] ; then
+ mysql_version_is_at_least "5.6" || options="${options} --loose-skip-innodb"
+ fi
+
+ einfo "Creating the mysql database and setting proper permissions on it ..."
+
+ # Now that /var/run is a tmpfs mount point, we need to ensure it exists before using it
+ PID_DIR="${EROOT}/var/run/mysqld"
+ if [[ ! -d "${PID_DIR}" ]]; then
+ mkdir -p "${PID_DIR}" || die "Could not create pid directory"
+ chown mysql:mysql "${PID_DIR}" || die "Could not set ownership on pid directory"
+ chmod 755 "${PID_DIR}" || die "Could not set permissions on pid directory"
+ fi
+
+ pushd "${TMPDIR}" &>/dev/null
+ #cmd="'${EROOT}/usr/share/mysql/scripts/mysql_install_db' '--basedir=${EPREFIX}/usr' ${options}"
+ cmd=${EROOT}usr/share/mysql/scripts/mysql_install_db
+ [[ -f ${cmd} ]] || cmd=${EROOT}usr/bin/mysql_install_db
+ cmd="'$cmd' '--basedir=${EPREFIX}/usr' ${options} '--datadir=${ROOT}/${MY_DATADIR}' '--tmpdir=${ROOT}/${MYSQL_TMPDIR}'"
+ einfo "Command: $cmd"
+ eval $cmd \
+ >"${TMPDIR}"/mysql_install_db.log 2>&1
+ if [ $? -ne 0 ]; then
+ grep -B5 -A999 -i "ERROR" "${TMPDIR}"/mysql_install_db.log 1>&2
+ die "Failed to run mysql_install_db. Please review ${EPREFIX}/var/log/mysql/mysqld.err AND ${TMPDIR}/mysql_install_db.log"
+ fi
+ popd &>/dev/null
+ [[ -f "${ROOT}/${MY_DATADIR}/mysql/user.frm" ]] \
+ || die "MySQL databases not installed"
+ chown -R mysql:mysql "${ROOT}/${MY_DATADIR}" 2>/dev/null
+ chmod 0750 "${ROOT}/${MY_DATADIR}" 2>/dev/null
+
+ # Filling timezones, see
+ # http://dev.mysql.com/doc/mysql/en/time-zone-support.html
+ "${EROOT}/usr/bin/mysql_tzinfo_to_sql" "${EROOT}/usr/share/zoneinfo" > "${sqltmp}" 2>/dev/null
+
+ if [[ -r "${help_tables}" ]] ; then
+ cat "${help_tables}" >> "${sqltmp}"
+ fi
+
+ local socket="${EROOT}/var/run/mysqld/mysqld${RANDOM}.sock"
+ local pidfile="${EROOT}/var/run/mysqld/mysqld${RANDOM}.pid"
+ local mysqld="${EROOT}/usr/sbin/mysqld \
+ ${options} \
+ $(use prefix || echo --user=mysql) \
+ --log-warnings=0 \
+ --basedir=${EROOT}/usr \
+ --datadir=${ROOT}/${MY_DATADIR} \
+ --max_allowed_packet=8M \
+ --net_buffer_length=16K \
+ --default-storage-engine=MyISAM \
+ --socket=${socket} \
+ --pid-file=${pidfile}
+ --tmpdir=${ROOT}/${MYSQL_TMPDIR}"
+ #einfo "About to start mysqld: ${mysqld}"
+ ebegin "Starting mysqld"
+ einfo "Command ${mysqld}"
+ ${mysqld} &
+ rc=$?
+ while ! [[ -S "${socket}" || "${maxtry}" -lt 1 ]] ; do
+ maxtry=$((${maxtry}-1))
+ echo -n "."
+ sleep 1
+ done
+ eend $rc
+
+ if ! [[ -S "${socket}" ]]; then
+ die "Completely failed to start up mysqld with: ${mysqld}"
+ fi
+
+ ebegin "Setting root password"
+ # Do this from memory, as we don't want clear text passwords in temp files
+ local sql="UPDATE mysql.user SET Password = PASSWORD('${MYSQL_ROOT_PASSWORD}') WHERE USER='root'"
+ "${EROOT}/usr/bin/mysql" \
+ --socket=${socket} \
+ -hlocalhost \
+ -e "${sql}"
+ eend $?
+
+ ebegin "Loading \"zoneinfo\", this step may require a few seconds"
+ "${EROOT}/usr/bin/mysql" \
+ --socket=${socket} \
+ -hlocalhost \
+ -uroot \
+ --password="${MYSQL_ROOT_PASSWORD}" \
+ mysql < "${sqltmp}"
+ rc=$?
+ eend $?
+ [[ $rc -ne 0 ]] && ewarn "Failed to load zoneinfo!"
+
+ # Stop the server and cleanup
+ einfo "Stopping the server ..."
+ kill $(< "${pidfile}" )
+ rm -f "${sqltmp}"
+ wait %1
+ einfo "Done"
+}
diff --git a/eclass/mysql-v2.eclass b/eclass/mysql-v2.eclass
new file mode 100644
index 000000000000..b317be46be0d
--- /dev/null
+++ b/eclass/mysql-v2.eclass
@@ -0,0 +1,921 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mysql-v2.eclass
+# @MAINTAINER:
+# Maintainers:
+# - MySQL Team <mysql-bugs@gentoo.org>
+# - Robin H. Johnson <robbat2@gentoo.org>
+# - Jorge Manuel B. S. Vicetto <jmbsvicetto@gentoo.org>
+# - Brian Evans <grknight@gentoo.org>
+# @BLURB: This eclass provides most of the functions for mysql ebuilds
+# @DESCRIPTION:
+# The mysql-v2.eclass is the base eclass to build the mysql and
+# alternative projects (mariadb and percona) ebuilds.
+# This eclass uses the mysql-autotools and mysql-cmake eclasses for the
+# specific bits related to the build system.
+# It provides the src_unpack, src_prepare, src_configure, src_compile,
+# src_install, pkg_preinst, pkg_postinst, pkg_config and pkg_postrm
+# phase hooks.
+
+# @ECLASS-VARIABLE: BUILD
+# @DESCRIPTION:
+# Build type of the mysql version
+: ${BUILD:=autotools}
+
+case ${BUILD} in
+ "cmake")
+ BUILD_INHERIT="mysql-cmake"
+ ;;
+ "autotools")
+ BUILD_INHERIT="mysql-autotools"
+
+ WANT_AUTOCONF="latest"
+ WANT_AUTOMAKE="latest"
+ ;;
+ *)
+ die "${BUILD} is not a valid build system for mysql"
+ ;;
+esac
+
+MYSQL_EXTRAS=""
+
+# @ECLASS-VARIABLE: MYSQL_EXTRAS_VER
+# @DESCRIPTION:
+# The version of the MYSQL_EXTRAS repo to use to build mysql
+# Use "none" to disable it's use
+[[ ${MY_EXTRAS_VER} == "live" ]] && MYSQL_EXTRAS="git-r3"
+
+inherit eutils flag-o-matic gnuconfig ${MYSQL_EXTRAS} ${BUILD_INHERIT} mysql_fx versionator toolchain-funcs user
+
+#
+# Supported EAPI versions and export functions
+#
+
+case "${EAPI:-0}" in
+ 4|5) ;;
+ *) die "Unsupported EAPI: ${EAPI}" ;;
+esac
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_config pkg_postrm
+
+#
+# VARIABLES:
+#
+
+# Shorten the path because the socket path length must be shorter than 107 chars
+# and we will run a mysql server during test phase
+S="${WORKDIR}/mysql"
+
+[[ ${MY_EXTRAS_VER} == "latest" ]] && MY_EXTRAS_VER="20090228-0714Z"
+if [[ ${MY_EXTRAS_VER} == "live" ]]; then
+ EGIT_REPO_URI="git://anongit.gentoo.org/proj/mysql-extras.git"
+ EGIT_CHECKOUT_DIR=${WORKDIR}/mysql-extras
+ EGIT_CLONE_TYPE=shallow
+fi
+
+# @ECLASS-VARIABLE: MYSQL_PV_MAJOR
+# @DESCRIPTION:
+# Upstream MySQL considers the first two parts of the version number to be the
+# major version. Upgrades that change major version should always run
+# mysql_upgrade.
+MYSQL_PV_MAJOR="$(get_version_component_range 1-2 ${PV})"
+
+# Cluster is a special case...
+if [[ ${PN} == "mysql-cluster" ]]; then
+ case ${PV} in
+ 6.1*|7.0*|7.1*) MYSQL_PV_MAJOR=5.1 ;;
+ 7.2*) MYSQL_PV_MAJOR=5.5 ;;
+ 7.3*) MYSQL_PV_MAJOR=5.6 ;;
+ esac
+fi
+
+# MariaDB has left the numbering schema but keeping compatibility
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+ case ${PV} in
+ 10.0*|10.1*) MYSQL_PV_MAJOR="5.6" ;;
+ esac
+fi
+
+# @ECLASS-VARIABLE: MYSQL_VERSION_ID
+# @DESCRIPTION:
+# MYSQL_VERSION_ID will be:
+# major * 10e6 + minor * 10e4 + micro * 10e2 + gentoo revision number, all [0..99]
+# This is an important part, because many of the choices the MySQL ebuild will do
+# depend on this variable.
+# In particular, the code below transforms a $PVR like "5.0.18-r3" in "5001803"
+# We also strip off upstream's trailing letter that they use to respin tarballs
+MYSQL_VERSION_ID=""
+tpv="${PV%[a-z]}"
+tpv=( ${tpv//[-._]/ } ) ; tpv[3]="${PVR:${#PV}}" ; tpv[3]="${tpv[3]##*-r}"
+for vatom in 0 1 2 3 ; do
+ # pad to length 2
+ tpv[${vatom}]="00${tpv[${vatom}]}"
+ MYSQL_VERSION_ID="${MYSQL_VERSION_ID}${tpv[${vatom}]:0-2}"
+done
+# strip leading "0" (otherwise it's considered an octal number by BASH)
+MYSQL_VERSION_ID=${MYSQL_VERSION_ID##"0"}
+
+# This eclass should only be used with at least mysql-5.1.50
+mysql_version_is_at_least "5.1.50" || die "This eclass should only be used with >=mysql-5.1.50"
+
+# @ECLASS-VARIABLE: XTRADB_VER
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Version of the XTRADB storage engine
+
+# @ECLASS-VARIABLE: PERCONA_VER
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Designation by PERCONA for a MySQL version to apply an XTRADB release
+
+# Work out the default SERVER_URI correctly
+if [[ -z ${SERVER_URI} ]]; then
+ [[ -z ${MY_PV} ]] && MY_PV="${PV//_/-}"
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+ # Beginning with 5.5, MariaDB stopped putting beta, alpha or rc on their tarball names
+ mysql_version_is_at_least "5.5" && MARIA_FULL_PV=$(get_version_component_range 1-3) || \
+ MARIA_FULL_PV=$(replace_version_separator 3 '-' ${MY_PV})
+ MARIA_FULL_P="${PN}-${MARIA_FULL_PV}"
+ SERVER_URI="
+ http://ftp.osuosl.org/pub/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://ftp.osuosl.org/pub/mariadb/${MARIA_FULL_P}/source/${MARIA_FULL_P}.tar.gz
+ http://mirror.jmu.edu/pub/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirrors.coreix.net/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirrors.syringanetworks.net/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirrors.fe.up.pt/pub/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ http://mirror2.hs-esslingen.de/mariadb/${MARIA_FULL_P}/kvm-tarbake-jaunty-x86/${MARIA_FULL_P}.tar.gz
+ "
+ if [[ ${PN} == "mariadb-galera" ]]; then
+ MY_SOURCEDIR="${PN%%-galera}-${MARIA_FULL_PV}"
+ fi
+ elif [[ ${PN} == "percona-server" ]]; then
+ PERCONA_PN="Percona-Server"
+ MIRROR_PV=$(get_version_component_range 1-2 ${PV})
+ MY_PV=$(get_version_component_range 1-3 ${PV})
+ PERCONA_RELEASE=$(get_version_component_range 4-5 ${PV})
+ PERCONA_RC=$(get_version_component_range 6 ${PV})
+ SERVER_URI="http://www.percona.com/redir/downloads/${PERCONA_PN}-${MIRROR_PV}/${PERCONA_PN}-${MY_PV}-${PERCONA_RC}${PERCONA_RELEASE}/source/tarball/${PN}-${MY_PV}-${PERCONA_RC}${PERCONA_RELEASE}.tar.gz"
+# http://www.percona.com/redir/downloads/Percona-Server-5.5/LATEST/source/tarball/Percona-Server-5.5.30-30.2.tar.gz
+# http://www.percona.com/redir/downloads/Percona-Server-5.6/Percona-Server-5.6.13-rc60.5/source/tarball/Percona-Server-5.6.13-rc60.5.tar.gz
+ else
+ if [[ "${PN}" == "mysql-cluster" ]] ; then
+ URI_DIR="MySQL-Cluster"
+ URI_FILE="mysql-cluster-gpl"
+ else
+ URI_DIR="MySQL"
+ URI_FILE="mysql"
+ fi
+ URI_A="${URI_FILE}-${MY_PV}.tar.gz"
+ MIRROR_PV=$(get_version_component_range 1-2 ${PV})
+ # Recently upstream switched to an archive site, and not on mirrors
+ SERVER_URI="http://downloads.mysql.com/archives/${URI_FILE}-${MIRROR_PV}/${URI_A}
+ https://downloads.skysql.com/files/${URI_FILE}-${MIRROR_PV}/${URI_A}
+ mirror://mysql/Downloads/${URI_DIR}-${PV%.*}/${URI_A}"
+ fi
+fi
+
+# Define correct SRC_URIs
+SRC_URI="${SERVER_URI}"
+
+# Gentoo patches to MySQL
+if [[ ${MY_EXTRAS_VER} != "live" && ${MY_EXTRAS_VER} != "none" ]]; then
+ SRC_URI="${SRC_URI}
+ mirror://gentoo/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://g3nt8.org/patches/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://dev.gentoo.org/~robbat2/distfiles/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://dev.gentoo.org/~jmbsvicetto/distfiles/mysql-extras-${MY_EXTRAS_VER}.tar.bz2
+ http://dev.gentoo.org/~grknight/distfiles/mysql-extras-${MY_EXTRAS_VER}.tar.bz2"
+fi
+
+DESCRIPTION="A fast, multi-threaded, multi-user SQL database server"
+HOMEPAGE="http://www.mysql.com/"
+if [[ ${PN} == "mariadb" ]]; then
+ HOMEPAGE="http://mariadb.org/"
+ DESCRIPTION="An enhanced, drop-in replacement for MySQL"
+fi
+if [[ ${PN} == "mariadb-galera" ]]; then
+ HOMEPAGE="http://mariadb.org/"
+ DESCRIPTION="An enhanced, drop-in replacement for MySQL with Galera Replication"
+fi
+if [[ ${PN} == "percona-server" ]]; then
+ HOMEPAGE="http://www.percona.com/software/percona-server"
+ DESCRIPTION="An enhanced, drop-in replacement for MySQL from the Percona team"
+fi
+LICENSE="GPL-2"
+SLOT="0"
+
+case "${BUILD}" in
+ "autotools")
+ IUSE="big-tables debug embedded minimal +perl selinux ssl static test"
+ ;;
+ "cmake")
+ IUSE="debug embedded minimal +perl selinux ssl static static-libs test"
+ ;;
+esac
+
+# Common IUSE
+IUSE="${IUSE} latin1 extraengine cluster max-idx-128 +community profiling"
+
+# This probably could be simplified, but the syntax would have to be just right
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && \
+ mysql_version_is_at_least "5.5" ; then
+ IUSE="bindist ${IUSE}"
+ RESTRICT="${RESTRICT} !bindist? ( bindist )"
+elif [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] && \
+ mysql_check_version_range "5.5.37 to 5.6.11.99" ; then
+ IUSE="bindist ${IUSE}"
+ RESTRICT="${RESTRICT} !bindist? ( bindist )"
+elif [[ ${PN} == "mysql-cluster" ]] && \
+ mysql_check_version_range "7.2 to 7.2.99.99" ; then
+ IUSE="bindist ${IUSE}"
+ RESTRICT="${RESTRICT} !bindist? ( bindist )"
+fi
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]]; then
+ mysql_check_version_range "5.1.38 to 5.3.99" && IUSE="${IUSE} libevent"
+ mysql_version_is_at_least "5.2" && IUSE="${IUSE} oqgraph" && \
+ REQUIRED_USE="${REQUIRED_USE} minimal? ( !oqgraph )"
+ mysql_version_is_at_least "5.2.5" && IUSE="${IUSE} sphinx" && \
+ REQUIRED_USE="${REQUIRED_USE} minimal? ( !sphinx )"
+ mysql_version_is_at_least "5.2.10" && IUSE="${IUSE} pam"
+ # 5.5.33 and 10.0.5 add TokuDB. Authors strongly recommend jemalloc or perfomance suffers
+ mysql_version_is_at_least "10.0.5" && IUSE="${IUSE} tokudb odbc xml" && \
+ REQUIRED_USE="${REQUIRED_USE} odbc? ( extraengine ) xml? ( extraengine ) tokudb? ( jemalloc )"
+ mysql_check_version_range "5.5.33 to 5.5.99" && IUSE="${IUSE} tokudb" && \
+ REQUIRED_USE="${REQUIRED_USE} tokudb? ( jemalloc )"
+fi
+
+if mysql_version_is_at_least "5.5"; then
+ REQUIRED_USE="${REQUIRED_USE} tcmalloc? ( !jemalloc ) jemalloc? ( !tcmalloc )"
+ IUSE="${IUSE} jemalloc tcmalloc"
+fi
+
+if mysql_version_is_at_least "5.5.7"; then
+ IUSE="${IUSE} systemtap"
+fi
+
+if [[ ${PN} == "percona-server" ]]; then
+ mysql_version_is_at_least "5.5.10" && IUSE="${IUSE} pam"
+fi
+
+REQUIRED_USE="${REQUIRED_USE} minimal? ( !cluster !extraengine !embedded ) static? ( !ssl )"
+
+#
+# DEPENDENCIES:
+#
+
+# Be warned, *DEPEND are version-dependant
+# These are used for both runtime and compiletime
+DEPEND="
+ ssl? ( >=dev-libs/openssl-0.9.6d:0 )
+ kernel_linux? ( sys-process/procps )
+ >=sys-apps/sed-4
+ >=sys-apps/texinfo-4.7-r1
+ >=sys-libs/zlib-1.2.3
+"
+# TODO: add this as a dep if it is moved from the overlay
+# !dev-db/mariadb-native-client[mysqlcompat]
+
+# dev-db/mysql-5.6.12+ only works with dev-libs/libedit
+# This probably could be simplified
+if [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] && \
+ mysql_version_is_at_least "5.6.12" ; then
+ DEPEND="${DEPEND} dev-libs/libedit"
+elif [[ ${PN} == "mysql-cluster" ]] && mysql_version_is_at_least "7.3"; then
+ DEPEND="${DEPEND} dev-libs/libedit"
+else
+ if mysql_version_is_at_least "5.5" ; then
+ DEPEND="${DEPEND} !bindist? ( >=sys-libs/readline-4.1:0 )"
+ else
+ DEPEND="${DEPEND} >=sys-libs/readline-4.1:0"
+ fi
+fi
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ mysql_check_version_range "5.1.38 to 5.3.99" && DEPEND="${DEPEND} libevent? ( >=dev-libs/libevent-1.4 )"
+ mysql_version_is_at_least "5.2" && DEPEND="${DEPEND} oqgraph? ( >=dev-libs/boost-1.40.0 )"
+ mysql_version_is_at_least "5.2.10" && DEPEND="${DEPEND} !minimal? ( pam? ( virtual/pam ) )"
+ # Bug 441700 MariaDB >=5.3 include custom mytop
+ mysql_version_is_at_least "5.3" && DEPEND="${DEPEND} perl? ( !dev-db/mytop )"
+ if mysql_version_is_at_least "10.0.5" ; then
+ DEPEND="${DEPEND}
+ odbc? ( dev-db/unixODBC )
+ xml? ( dev-libs/libxml2 )
+ "
+ fi
+ mysql_version_is_at_least "10.0.7" && DEPEND="${DEPEND} oqgraph? ( dev-libs/judy )"
+ if mysql_version_is_at_least "10.0.9" ; then
+ DEPEND="${DEPEND} >=dev-libs/libpcre-8.35"
+ fi
+fi
+
+# Having different flavours at the same time is not a good idea
+for i in "mysql" "mariadb" "mariadb-galera" "percona-server" "mysql-cluster" ; do
+ [[ ${i} == ${PN} ]] ||
+ DEPEND="${DEPEND} !dev-db/${i}"
+done
+
+if mysql_version_is_at_least "5.5.7" ; then
+ DEPEND="${DEPEND}
+ jemalloc? ( dev-libs/jemalloc[static-libs?] )
+ tcmalloc? ( dev-util/google-perftools )
+ >=sys-libs/zlib-1.2.3[static-libs?]
+ ssl? ( >=dev-libs/openssl-0.9.6d[static-libs?] )
+ systemtap? ( >=dev-util/systemtap-1.3 )
+ kernel_linux? ( dev-libs/libaio )
+ "
+fi
+
+if [[ ${PN} == "mysql-cluster" ]] ; then
+ # TODO: This really should include net-misc/memcached
+ # but the package does not install the files it seeks.
+ mysql_version_is_at_least "7.2.3" && \
+ DEPEND="${DEPEND} dev-libs/libevent"
+fi
+
+# prefix: first need to implement something for #196294
+RDEPEND="${DEPEND}
+ !minimal? ( !prefix? ( dev-db/mysql-init-scripts ) )
+ selinux? ( sec-policy/selinux-mysql )
+"
+
+if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ # Bug 455016 Add dependencies of mytop
+ if mysql_version_is_at_least "5.3" ; then
+ RDEPEND="${RDEPEND}
+ perl? (
+ virtual/perl-Getopt-Long
+ dev-perl/TermReadKey
+ virtual/perl-Term-ANSIColor
+ virtual/perl-Time-HiRes
+ )
+ "
+ fi
+fi
+
+if [[ ${PN} == "mariadb-galera" ]] ; then
+ # The wsrep API version must match between the ebuild and sys-cluster/galera.
+ # This will be indicated by WSREP_REVISION in the ebuild and the first number
+ # in the version of sys-cluster/galera
+ RDEPEND="${RDEPEND}
+ =sys-cluster/galera-${WSREP_REVISION}*
+ "
+fi
+
+if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && RDEPEND="${RDEPEND} java? ( >=virtual/jre-1.6 )" && \
+ DEPEND="${DEPEND} java? ( >=virtual/jdk-1.6 )"
+fi
+
+DEPEND="${DEPEND}
+ virtual/yacc
+"
+
+DEPEND="${DEPEND} static? ( sys-libs/ncurses[static-libs] )"
+
+# compile-time-only
+DEPEND="${DEPEND} >=dev-util/cmake-2.4.3"
+
+# compile-time-only
+if mysql_version_is_at_least "5.5.8" ; then
+ DEPEND="${DEPEND} >=dev-util/cmake-2.6.3"
+fi
+
+# dev-perl/DBD-mysql is needed by some scripts installed by MySQL
+PDEPEND="perl? ( >=dev-perl/DBD-mysql-2.9004 )"
+
+# For other stuff to bring us in
+PDEPEND="${PDEPEND} ~virtual/mysql-${MYSQL_PV_MAJOR}"
+
+#
+# External patches
+#
+
+# MariaDB has integrated PBXT until it was dropped in version 5.5.33
+# PBXT_VERSION means that we have a PBXT patch for this PV
+# PBXT was only introduced after 5.1.12
+pbxt_patch_available() {
+ [[ ${PN} != "mariadb" && ${PN} != "mariadb-galera" && ( -n "${PBXT_VERSION}" ) ]]
+ return $?
+}
+
+pbxt_available() {
+ pbxt_patch_available || [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] && mysql_check_version_range "5.1 to 5.5.32"
+ return $?
+}
+
+# Get the percona tarball if XTRADB_VER and PERCONA_VER are both set
+# MariaDB has integrated XtraDB
+# XTRADB_VERS means that we have a XTRADB patch for this PV
+# XTRADB was only introduced after 5.1.26
+xtradb_patch_available() {
+ [[ ${PN} != "mariadb" && ${PN} != "mariadb-galera"
+ && ( -n "${XTRADB_VER}" ) && ( -n "${PERCONA_VER}" ) ]]
+ return $?
+}
+
+if pbxt_patch_available; then
+
+ PBXT_P="pbxt-${PBXT_VERSION}"
+ PBXT_SRC_URI="http://www.primebase.org/download/${PBXT_P}.tar.gz mirror://sourceforge/pbxt/${PBXT_P}.tar.gz"
+ SRC_URI="${SRC_URI} pbxt? ( ${PBXT_SRC_URI} )"
+fi
+
+# PBXT_NEWSTYLE means pbxt is in storage/ and gets enabled as other plugins
+# vs. built outside the dir
+if pbxt_available; then
+
+ IUSE="${IUSE} pbxt"
+ PBXT_NEWSTYLE=1
+ REQUIRED_USE="${REQUIRED_USE} pbxt? ( !embedded ) "
+fi
+
+if xtradb_patch_available; then
+ XTRADB_P="percona-xtradb-${XTRADB_VER}"
+ XTRADB_SRC_URI_COMMON="${PERCONA_VER}/source/${XTRADB_P}.tar.gz"
+ XTRADB_SRC_B1="http://www.percona.com/"
+ XTRADB_SRC_B2="${XTRADB_SRC_B1}/percona-builds/"
+ XTRADB_SRC_URI1="${XTRADB_SRC_B2}/Percona-Server/Percona-Server-${XTRADB_SRC_URI_COMMON}"
+ XTRADB_SRC_URI2="${XTRADB_SRC_B2}/xtradb/${XTRADB_SRC_URI_COMMON}"
+ XTRADB_SRC_URI3="${XTRADB_SRC_B1}/${PN}/xtradb/${XTRADB_SRC_URI_COMMON}"
+ SRC_URI="${SRC_URI} xtradb? ( ${XTRADB_SRC_URI1} ${XTRADB_SRC_URI2} ${XTRADB_SRC_URI3} )"
+ IUSE="${IUSE} xtradb"
+ REQUIRED_USE="${REQUIRED_USE} xtradb? ( !embedded ) "
+fi
+
+#
+# HELPER FUNCTIONS:
+#
+
+# @FUNCTION: mysql-v2_disable_test
+# @DESCRIPTION:
+# Helper function to disable specific tests.
+mysql-v2_disable_test() {
+ ${BUILD_INHERIT}_disable_test "$@"
+}
+
+# @FUNCTION: mysql-v2_configure_minimal
+# @DESCRIPTION:
+# Helper function to configure minimal build
+configure_minimal() {
+ ${BUILD_INHERIT}_configure_minimal "$@"
+}
+
+# @FUNCTION: mysql-v2_configure_common
+# @DESCRIPTION:
+# Helper function to configure common builds
+configure_common() {
+ ${BUILD_INHERIT}_configure_common "$@"
+}
+
+#
+# EBUILD FUNCTIONS
+#
+
+# @FUNCTION: mysql-v2_pkg_setup
+# @DESCRIPTION:
+# Perform some basic tests and tasks during pkg_setup phase:
+# die if FEATURES="test", USE="-minimal" and not using FEATURES="userpriv"
+# check for conflicting use flags
+# create new user and group for mysql
+# warn about deprecated features
+mysql-v2_pkg_setup() {
+
+ if has test ${FEATURES} ; then
+ if ! use minimal ; then
+ if ! has userpriv ${FEATURES} ; then
+ eerror "Testing with FEATURES=-userpriv is no longer supported by upstream. Tests MUST be run as non-root."
+ fi
+ fi
+ fi
+
+ # Check for USE flag problems in pkg_setup
+ if ! mysql_version_is_at_least "5.2" && use debug ; then
+ # Also in package.use.mask
+ die "Bug #344885: Upstream has broken USE=debug for 5.1 series >=5.1.51"
+ fi
+
+ # This should come after all of the die statements
+ enewgroup mysql 60 || die "problem adding 'mysql' group"
+ enewuser mysql 60 -1 /dev/null mysql || die "problem adding 'mysql' user"
+
+ if use cluster && [[ "${PN}" != "mysql-cluster" ]]; then
+ ewarn "Upstream has noted that the NDB cluster support in the 5.0 and"
+ ewarn "5.1 series should NOT be put into production. In the near"
+ ewarn "future, it will be disabled from building."
+ fi
+
+ if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && java-pkg-opt-2_pkg_setup
+ fi
+
+ if use_if_iuse tokudb && [[ $(gcc-major-version) -lt 4 || $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 7 ]] ; then
+ eerror "${PN} with tokudb needs to be built with gcc-4.7 or later."
+ eerror "Please use gcc-config to switch to gcc-4.7 or later version."
+ die
+ fi
+
+}
+
+# @FUNCTION: mysql-v2_src_unpack
+# @DESCRIPTION:
+# Unpack the source code
+mysql-v2_src_unpack() {
+
+ # Initialize the proper variables first
+ mysql_init_vars
+
+ unpack ${A}
+ # Grab the patches
+ [[ "${MY_EXTRAS_VER}" == "live" ]] && S="${WORKDIR}/mysql-extras" git-r3_src_unpack
+
+ mv -f "${WORKDIR}/${MY_SOURCEDIR}" "${S}"
+}
+
+# @FUNCTION: mysql-v2_src_prepare
+# @DESCRIPTION:
+# Apply patches to the source code and remove unneeded bundled libs.
+mysql-v2_src_prepare() {
+ ${BUILD_INHERIT}_src_prepare "$@"
+ if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && java-pkg-opt-2_src_prepare
+ fi
+}
+
+# @FUNCTION: mysql-v2_src_configure
+# @DESCRIPTION:
+# Configure mysql to build the code for Gentoo respecting the use flags.
+mysql-v2_src_configure() {
+ ${BUILD_INHERIT}_src_configure "$@"
+}
+
+# @FUNCTION: mysql-v2_src_compile
+# @DESCRIPTION:
+# Compile the mysql code.
+mysql-v2_src_compile() {
+ ${BUILD_INHERIT}_src_compile "$@"
+}
+
+# @FUNCTION: mysql-v2_src_install
+# @DESCRIPTION:
+# Install mysql.
+mysql-v2_src_install() {
+ ${BUILD_INHERIT}_src_install "$@"
+}
+
+# @FUNCTION: mysql-v2_pkg_preinst
+# @DESCRIPTION:
+# Create the user and groups for mysql - die if that fails.
+mysql-v2_pkg_preinst() {
+ if [[ ${PN} == "mysql-cluster" ]] ; then
+ mysql_version_is_at_least "7.2.9" && java-pkg-opt-2_pkg_preinst
+ fi
+ enewgroup mysql 60 || die "problem adding 'mysql' group"
+ enewuser mysql 60 -1 /dev/null mysql || die "problem adding 'mysql' user"
+}
+
+# @FUNCTION: mysql-v2_pkg_postinst
+# @DESCRIPTION:
+# Run post-installation tasks:
+# create the dir for logfiles if non-existant
+# touch the logfiles and secure them
+# install scripts
+# issue required steps for optional features
+# issue deprecation warnings
+mysql-v2_pkg_postinst() {
+
+ # Make sure the vars are correctly initialized
+ mysql_init_vars
+
+ # Check FEATURES="collision-protect" before removing this
+ [[ -d "${ROOT}${MY_LOGDIR}" ]] || install -d -m0750 -o mysql -g mysql "${ROOT}${MY_LOGDIR}"
+
+ # Secure the logfiles
+ touch "${ROOT}${MY_LOGDIR}"/mysql.{log,err}
+ chown mysql:mysql "${ROOT}${MY_LOGDIR}"/mysql*
+ chmod 0660 "${ROOT}${MY_LOGDIR}"/mysql*
+
+ # Minimal builds don't have the MySQL server
+ if ! use minimal ; then
+ docinto "support-files"
+ for script in \
+ support-files/my-*.cnf \
+ support-files/magic \
+ support-files/ndb-config-2-node.ini
+ do
+ [[ -f "${script}" ]] \
+ && dodoc "${script}"
+ done
+
+ docinto "scripts"
+ for script in scripts/mysql* ; do
+ if [[ -f "${script}" && "${script%.sh}" == "${script}" ]]; then
+ dodoc "${script}"
+ fi
+ done
+
+ if [[ ${PN} == "mariadb" || ${PN} == "mariadb-galera" ]] ; then
+ if use_if_iuse pam ; then
+ einfo
+ elog "This install includes the PAM authentication plugin."
+ elog "To activate and configure the PAM plugin, please read:"
+ elog "https://kb.askmonty.org/en/pam-authentication-plugin/"
+ einfo
+ fi
+ fi
+
+ einfo
+ elog "You might want to run:"
+ elog "\"emerge --config =${CATEGORY}/${PF}\""
+ elog "if this is a new install."
+ einfo
+
+ einfo
+ elog "If you are upgrading major versions, you should run the"
+ elog "mysql_upgrade tool."
+ einfo
+
+ if [[ ${PN} == "mariadb-galera" ]] ; then
+ einfo
+ elog "Be sure to edit the my.cnf file to activate your cluster settings."
+ elog "This should be done after running \"emerge --config =${CATEGORY}/${PF}\""
+ elog "The first time the cluster is activated, you should add"
+ elog "--wsrep-new-cluster to the options in /etc/conf.d/mysql for one node."
+ elog "This option should then be removed for subsequent starts."
+ einfo
+ fi
+ fi
+
+ if use_if_iuse pbxt ; then
+ elog "Note: PBXT is now statically built when enabled."
+ elog ""
+ elog "If, you previously installed as a plugin and "
+ elog "you cannot start the MySQL server,"
+ elog "remove the ${MY_DATADIR}/mysql/plugin.* files, then"
+ elog "use the MySQL upgrade script to restore the table"
+ elog "or execute the following SQL command:"
+ elog " CREATE TABLE IF NOT EXISTS plugin ("
+ elog " name char(64) binary DEFAULT '' NOT NULL,"
+ elog " dl char(128) DEFAULT '' NOT NULL,"
+ elog " PRIMARY KEY (name)"
+ elog " ) CHARACTER SET utf8 COLLATE utf8_bin;"
+ fi
+}
+
+# @FUNCTION: mysql-v2_getopt
+# @DESCRIPTION:
+# Use my_print_defaults to extract specific config options
+mysql-v2_getopt() {
+ local mypd="${EROOT}"/usr/bin/my_print_defaults
+ section="$1"
+ flag="--${2}="
+ "${mypd}" $section | sed -n "/^${flag}/p"
+}
+
+# @FUNCTION: mysql-v2_getoptval
+# @DESCRIPTION:
+# Use my_print_defaults to extract specific config options
+mysql-v2_getoptval() {
+ local mypd="${EROOT}"/usr/bin/my_print_defaults
+ section="$1"
+ flag="--${2}="
+ "${mypd}" $section | sed -n "/^${flag}/s,${flag},,gp"
+}
+
+# @FUNCTION: mysql-v2_pkg_config
+# @DESCRIPTION:
+# Configure mysql environment.
+mysql-v2_pkg_config() {
+
+ local old_MY_DATADIR="${MY_DATADIR}"
+ local old_HOME="${HOME}"
+ # my_print_defaults needs to read stuff in $HOME/.my.cnf
+ export HOME=${EPREFIX}/root
+
+ # Make sure the vars are correctly initialized
+ mysql_init_vars
+
+ [[ -z "${MY_DATADIR}" ]] && die "Sorry, unable to find MY_DATADIR"
+
+ if built_with_use ${CATEGORY}/${PN} minimal ; then
+ die "Minimal builds do NOT include the MySQL server"
+ fi
+
+ if [[ ( -n "${MY_DATADIR}" ) && ( "${MY_DATADIR}" != "${old_MY_DATADIR}" ) ]]; then
+ local MY_DATADIR_s="${ROOT}/${MY_DATADIR}"
+ MY_DATADIR_s="${MY_DATADIR_s%%/}"
+ local old_MY_DATADIR_s="${ROOT}/${old_MY_DATADIR}"
+ old_MY_DATADIR_s="${old_MY_DATADIR_s%%/}"
+
+ if [[ ( -d "${old_MY_DATADIR_s}" ) && ( "${old_MY_DATADIR_s}" != / ) ]]; then
+ if [[ -d "${MY_DATADIR_s}" ]]; then
+ ewarn "Both ${old_MY_DATADIR_s} and ${MY_DATADIR_s} exist"
+ ewarn "Attempting to use ${MY_DATADIR_s} and preserving ${old_MY_DATADIR_s}"
+ else
+ elog "Moving MY_DATADIR from ${old_MY_DATADIR_s} to ${MY_DATADIR_s}"
+ mv --strip-trailing-slashes -T "${old_MY_DATADIR_s}" "${MY_DATADIR_s}" \
+ || die "Moving MY_DATADIR failed"
+ fi
+ else
+ ewarn "Previous MY_DATADIR (${old_MY_DATADIR_s}) does not exist"
+ if [[ -d "${MY_DATADIR_s}" ]]; then
+ ewarn "Attempting to use ${MY_DATADIR_s}"
+ else
+ eerror "New MY_DATADIR (${MY_DATADIR_s}) does not exist"
+ die "Configuration Failed! Please reinstall ${CATEGORY}/${PN}"
+ fi
+ fi
+ fi
+
+ local pwd1="a"
+ local pwd2="b"
+ local maxtry=15
+
+ if [ -z "${MYSQL_ROOT_PASSWORD}" ]; then
+ MYSQL_ROOT_PASSWORD="$(mysql-v2_getoptval 'client mysql' password)"
+ fi
+ MYSQL_TMPDIR="$(mysql-v2_getoptval mysqld tmpdir)"
+ # These are dir+prefix
+ MYSQL_RELAY_LOG="$(mysql-v2_getoptval mysqld relay-log)"
+ MYSQL_RELAY_LOG=${MYSQL_RELAY_LOG%/*}
+ MYSQL_LOG_BIN="$(mysql-v2_getoptval mysqld log-bin)"
+ MYSQL_LOG_BIN=${MYSQL_LOG_BIN%/*}
+
+ if [[ ! -d "${ROOT}"/$MYSQL_TMPDIR ]]; then
+ einfo "Creating MySQL tmpdir $MYSQL_TMPDIR"
+ install -d -m 770 -o mysql -g mysql "${ROOT}"/$MYSQL_TMPDIR
+ fi
+ if [[ ! -d "${ROOT}"/$MYSQL_LOG_BIN ]]; then
+ einfo "Creating MySQL log-bin directory $MYSQL_LOG_BIN"
+ install -d -m 770 -o mysql -g mysql "${ROOT}"/$MYSQL_LOG_BIN
+ fi
+ if [[ ! -d "${EROOT}"/$MYSQL_RELAY_LOG ]]; then
+ einfo "Creating MySQL relay-log directory $MYSQL_RELAY_LOG"
+ install -d -m 770 -o mysql -g mysql "${EROOT}"/$MYSQL_RELAY_LOG
+ fi
+
+ if [[ -d "${ROOT}/${MY_DATADIR}/mysql" ]] ; then
+ ewarn "You have already a MySQL database in place."
+ ewarn "(${ROOT}/${MY_DATADIR}/*)"
+ ewarn "Please rename or delete it if you wish to replace it."
+ die "MySQL database already exists!"
+ fi
+
+ # Bug #213475 - MySQL _will_ object strenously if your machine is named
+ # localhost. Also causes weird failures.
+ [[ "${HOSTNAME}" == "localhost" ]] && die "Your machine must NOT be named localhost"
+
+ if [ -z "${MYSQL_ROOT_PASSWORD}" ]; then
+
+ einfo "Please provide a password for the mysql 'root' user now, in the"
+ einfo "MYSQL_ROOT_PASSWORD env var or through the ${HOME}/.my.cnf file."
+ ewarn "Avoid [\"'\\_%] characters in the password"
+ read -rsp " >" pwd1 ; echo
+
+ einfo "Retype the password"
+ read -rsp " >" pwd2 ; echo
+
+ if [[ "x$pwd1" != "x$pwd2" ]] ; then
+ die "Passwords are not the same"
+ fi
+ MYSQL_ROOT_PASSWORD="${pwd1}"
+ unset pwd1 pwd2
+ fi
+
+ local options
+ local sqltmp="$(emktemp)"
+
+ # Fix bug 446200. Don't reference host my.cnf, needs to come first,
+ # see http://bugs.mysql.com/bug.php?id=31312
+ use prefix && options="${options} --defaults-file=${MY_SYSCONFDIR}/my.cnf"
+
+ local help_tables="${ROOT}${MY_SHAREDSTATEDIR}/fill_help_tables.sql"
+ [[ -r "${help_tables}" ]] \
+ && cp "${help_tables}" "${TMPDIR}/fill_help_tables.sql" \
+ || touch "${TMPDIR}/fill_help_tables.sql"
+ help_tables="${TMPDIR}/fill_help_tables.sql"
+
+ # Figure out which options we need to disable to do the setup
+ helpfile="${TMPDIR}/mysqld-help"
+ ${EROOT}/usr/sbin/mysqld --verbose --help >"${helpfile}" 2>/dev/null
+ for opt in grant-tables host-cache name-resolve networking slave-start \
+ federated ssl log-bin relay-log slow-query-log external-locking \
+ ndbcluster log-slave-updates \
+ ; do
+ optexp="--(skip-)?${opt}" optfull="--loose-skip-${opt}"
+ egrep -sq -- "${optexp}" "${helpfile}" && options="${options} ${optfull}"
+ done
+ # But some options changed names
+ egrep -sq external-locking "${helpfile}" && \
+ options="${options/skip-locking/skip-external-locking}"
+
+ use prefix || options="${options} --user=mysql"
+
+ # MySQL 5.6+ needs InnoDB
+ if [[ ${PN} == "mysql" || ${PN} == "percona-server" ]] ; then
+ mysql_version_is_at_least "5.6" || options="${options} --loose-skip-innodb"
+ fi
+
+ einfo "Creating the mysql database and setting proper permissions on it ..."
+
+ # Now that /var/run is a tmpfs mount point, we need to ensure it exists before using it
+ PID_DIR="${EROOT}/var/run/mysqld"
+ if [[ ! -d "${PID_DIR}" ]]; then
+ mkdir -p "${PID_DIR}" || die "Could not create pid directory"
+ chown mysql:mysql "${PID_DIR}" || die "Could not set ownership on pid directory"
+ chmod 755 "${PID_DIR}" || die "Could not set permissions on pid directory"
+ fi
+
+ pushd "${TMPDIR}" &>/dev/null
+ #cmd="'${EROOT}/usr/share/mysql/scripts/mysql_install_db' '--basedir=${EPREFIX}/usr' ${options}"
+ cmd=${EROOT}usr/share/mysql/scripts/mysql_install_db
+ [[ -f ${cmd} ]] || cmd=${EROOT}usr/bin/mysql_install_db
+ cmd="'$cmd' '--basedir=${EPREFIX}/usr' ${options} '--datadir=${ROOT}/${MY_DATADIR}' '--tmpdir=${ROOT}/${MYSQL_TMPDIR}'"
+ einfo "Command: $cmd"
+ eval $cmd \
+ >"${TMPDIR}"/mysql_install_db.log 2>&1
+ if [ $? -ne 0 ]; then
+ grep -B5 -A999 -i "ERROR" "${TMPDIR}"/mysql_install_db.log 1>&2
+ die "Failed to run mysql_install_db. Please review ${EPREFIX}/var/log/mysql/mysqld.err AND ${TMPDIR}/mysql_install_db.log"
+ fi
+ popd &>/dev/null
+ [[ -f "${ROOT}/${MY_DATADIR}/mysql/user.frm" ]] \
+ || die "MySQL databases not installed"
+ chown -R mysql:mysql "${ROOT}/${MY_DATADIR}" 2>/dev/null
+ chmod 0750 "${ROOT}/${MY_DATADIR}" 2>/dev/null
+
+ # Filling timezones, see
+ # http://dev.mysql.com/doc/mysql/en/time-zone-support.html
+ "${EROOT}/usr/bin/mysql_tzinfo_to_sql" "${EROOT}/usr/share/zoneinfo" > "${sqltmp}" 2>/dev/null
+
+ if [[ -r "${help_tables}" ]] ; then
+ cat "${help_tables}" >> "${sqltmp}"
+ fi
+
+ local socket="${EROOT}/var/run/mysqld/mysqld${RANDOM}.sock"
+ local pidfile="${EROOT}/var/run/mysqld/mysqld${RANDOM}.pid"
+ local mysqld="${EROOT}/usr/sbin/mysqld \
+ ${options} \
+ $(use prefix || echo --user=mysql) \
+ --log-warnings=0 \
+ --basedir=${EROOT}/usr \
+ --datadir=${ROOT}/${MY_DATADIR} \
+ --max_allowed_packet=8M \
+ --net_buffer_length=16K \
+ --default-storage-engine=MyISAM \
+ --socket=${socket} \
+ --pid-file=${pidfile}
+ --tmpdir=${ROOT}/${MYSQL_TMPDIR}"
+ #einfo "About to start mysqld: ${mysqld}"
+ ebegin "Starting mysqld"
+ einfo "Command ${mysqld}"
+ ${mysqld} &
+ rc=$?
+ while ! [[ -S "${socket}" || "${maxtry}" -lt 1 ]] ; do
+ maxtry=$((${maxtry}-1))
+ echo -n "."
+ sleep 1
+ done
+ eend $rc
+
+ if ! [[ -S "${socket}" ]]; then
+ die "Completely failed to start up mysqld with: ${mysqld}"
+ fi
+
+ ebegin "Setting root password"
+ # Do this from memory, as we don't want clear text passwords in temp files
+ local sql="UPDATE mysql.user SET Password = PASSWORD('${MYSQL_ROOT_PASSWORD}') WHERE USER='root'"
+ "${EROOT}/usr/bin/mysql" \
+ --socket=${socket} \
+ -hlocalhost \
+ -e "${sql}"
+ eend $?
+
+ ebegin "Loading \"zoneinfo\", this step may require a few seconds"
+ "${EROOT}/usr/bin/mysql" \
+ --socket=${socket} \
+ -hlocalhost \
+ -uroot \
+ --password="${MYSQL_ROOT_PASSWORD}" \
+ mysql < "${sqltmp}"
+ rc=$?
+ eend $?
+ [[ $rc -ne 0 ]] && ewarn "Failed to load zoneinfo!"
+
+ # Stop the server and cleanup
+ einfo "Stopping the server ..."
+ kill $(< "${pidfile}" )
+ rm -f "${sqltmp}"
+ wait %1
+ einfo "Done"
+}
+
+# @FUNCTION: mysql-v2_pkg_postrm
+# @DESCRIPTION:
+# Remove mysql symlinks.
+mysql-v2_pkg_postrm() {
+
+ : # mysql_lib_symlinks "${ED}"
+}
diff --git a/eclass/mysql_fx.eclass b/eclass/mysql_fx.eclass
new file mode 100644
index 000000000000..1c501cbeb9e5
--- /dev/null
+++ b/eclass/mysql_fx.eclass
@@ -0,0 +1,306 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Author: Francesco Riosa (Retired) <vivo@gentoo.org>
+# Maintainer:
+# - MySQL Team <mysql-bugs@gentoo.org>
+# - Luca Longinotti <chtekk@gentoo.org>
+
+inherit multilib
+
+#
+# Helper function, version (integer) may have sections separated by dots
+# for readability.
+#
+stripdots() {
+ local dotver=${1:-"0"}
+ local v=""
+ local ret=0
+ if [[ "${dotver/./}" != "${dotver}" ]] ; then
+ # dotted version number
+ for i in 1000000 10000 100 1 ; do
+ v=${dotver%%\.*}
+ # remove leading zeroes
+ while [[ ${#v} -gt 1 ]] && [[ ${v:0:1} == "0" ]] ; do v=${v#0} ; done
+ # increment integer version number
+ ret=$(( ${v} * ${i} + ${ret} ))
+ if [[ "${dotver}" == "${dotver/\.}" ]] ; then
+ dotver=0
+ else
+ dotver=${dotver#*\.}
+ fi
+ done
+ echo "${ret}"
+ else
+ # already an integer
+ v=${dotver}
+ while [[ ${#v} -gt 1 ]] && [[ ${v:0:1} == "0" ]] ; do v=${v#0} ; done
+ echo "${v}"
+ fi
+}
+
+#
+# Check if a version number falls inside a given range.
+# The range includes the extremes and must be specified as
+# "low_version to high_version" i.e. "4.1.2 to 5.1.99.99".
+# Returns true if inside the range.
+#
+mysql_check_version_range() {
+ local lbound="${1%% to *}" ; lbound=$(stripdots "${lbound}")
+ local rbound="${1#* to }" ; rbound=$(stripdots "${rbound}")
+ local my_ver="${2:-"${MYSQL_VERSION_ID}"}"
+ [[ ${lbound} -le ${my_ver} ]] && [[ ${my_ver} -le ${rbound} ]] && return 0
+ return 1
+}
+
+#
+# True if at least one applicable range is found for the patch.
+#
+_mysql_test_patch_ver_pn() {
+ local allelements=", version, package name"
+ # So that it fails the directory test if none of them exist
+ local filesdir="/dev/null"
+ for d in "${WORKDIR}/mysql-extras-${MY_EXTRAS_VER}" \
+ "${WORKDIR}/mysql-extras" ; do
+ if [ -d "${d}" ]; then
+ filesdir="${d}"
+ break
+ fi
+ done
+
+ [[ -d "${filesdir}" ]] || die "Source dir must be a directory"
+ local flags=$1 pname=$2
+ if [[ $(( $flags & $(( 1 + 4 + 16 )) )) -eq 21 ]] ; then
+ einfo "using '${pname}'"
+ ln -sf "${filesdir}/${pname}" "${EPATCH_SOURCE}" || die "Couldn't move ${pname}"
+ return 0
+ fi
+
+ [[ $(( $flags & $(( 2 + 4 )) )) -gt 0 ]] \
+ && allelements="${allelements//", version"}"
+
+ [[ $(( $flags & $(( 8 + 16 )) )) -gt 0 ]] \
+ && allelements="${allelements//", package name"}"
+
+ [[ -n "${allelements}" ]] && [[ "${flags}" -gt 0 ]] \
+ && ewarn "QA notice: ${allelements} missing in ${pname} patch"
+
+ return 1
+}
+
+#
+# Parse a "index_file" looking for patches to apply to the
+# current MySQL version.
+# If the patch applies, print its description.
+#
+mysql_mv_patches() {
+ # So that it fails the directory test if none of them exist
+ local filesdir="/dev/null"
+ if [[ -z "${1}" ]]; then
+ for d in "${WORKDIR}/mysql-extras-${MY_EXTRAS_VER}" \
+ "${WORKDIR}/mysql-extras" ; do
+ if [ -d "${d}" ]; then
+ filesdir="${d}"
+ break
+ fi
+ done
+ [[ -d "${filesdir}" ]] || die "No patches directory found!"
+ fi
+
+ for i in "$1" "${filesdir}/0000_index.txt" "${filesdir}/000_index.txt" ; do
+ if [ -n "$i" -a -f "$i" ]; then
+ local index_file="$i"
+ break
+ fi
+ done
+
+ local my_ver="${2:-"${MYSQL_VERSION_ID}"}"
+ local my_test_fx=${3:-"_mysql_test_patch_ver_pn"}
+ _mysql_mv_patches "${index_file}" "${my_ver}" "${my_test_fx}"
+}
+
+_mysql_mv_patches() {
+ local index_file="${1}"
+ local my_ver="${2}"
+ local my_test_fx="${3}"
+ local dsc ndsc=0 i
+ dsc=( )
+
+ # Values for flags are (2^x):
+ # 1 - one patch found
+ # 2 - at least one version range is wrong
+ # 4 - at least one version range is ok
+ # 8 - at least one ${PN} did not match
+ # 16 - at least one ${PN} has been matched
+ local flags=0 pname=""
+ while read row ; do
+ case "${row}" in
+ @patch\ *)
+ [[ -n "${pname}" ]] \
+ && ${my_test_fx} ${flags} "${pname}" \
+ && for (( i=0 ; $i < $ndsc ; i++ )) ; do einfo "> ${dsc[$i]}" ; done
+ flags=1 ; ndsc=0 ; dsc=( )
+ pname=${row#"@patch "}
+ ;;
+ @ver\ *)
+ if mysql_check_version_range "${row#"@ver "}" "${my_ver}" ; then
+ flags=$(( ${flags} | 4 ))
+ else
+ flags=$(( ${flags} | 2 ))
+ fi
+ ;;
+ @pn\ *)
+ if [[ ${row#"@pn "} == "${PN}" ]] ; then
+ flags=$(( ${flags} | 16 ))
+ else
+ flags=$(( ${flags} | 8 ))
+ fi
+ ;;
+ # @use\ *) ;;
+ @@\ *)
+ dsc[$ndsc]="${row#"@@ "}"
+ (( ++ndsc ))
+ ;;
+ esac
+ done < "${index_file}"
+
+ ${my_test_fx} ${flags} "${pname}" \
+ && for (( i=0 ; $i < $ndsc ; i++ )) ; do einfo "> ${dsc[$i]}" ; done
+}
+
+#
+# Is $2 (defaults to $MYSQL_VERSION_ID) at least version $1?
+# (nice) idea from versionator.eclass
+#
+mysql_version_is_at_least() {
+ local want_s=$(stripdots "$1") have_s=$(stripdots "${2:-${MYSQL_VERSION_ID}}")
+ [[ -z "${want_s}" ]] && die "mysql_version_is_at_least missing value to check"
+ [[ ${want_s} -le ${have_s} ]] && return 0 || return 1
+}
+
+#
+# To be called on the live filesystem, reassigning symlinks of each MySQL
+# library to the best version available.
+#
+mysql_lib_symlinks() {
+
+ local d dirlist maxdots libname libnameln libsuffix reldir
+ libsuffix=$(get_libname)
+
+ einfo "libsuffix = ${libsuffix}"
+ einfo "Updating MySQL libraries symlinks"
+
+ reldir="${1}"
+ pushd "${reldir}/usr/$(get_libdir)" &> /dev/null
+
+ # dirlist must contain the less significative directory left
+ dirlist="mysql"
+
+ # waste some time in removing and recreating symlinks
+ for d in $dirlist ; do
+ for libname in $( find "${d}" -mindepth 1 -maxdepth 1 -name "*${libsuffix}*" -and -not -type "l" 2>/dev/null ) ; do
+ # maxdot is a limit versus infinite loop
+ maxdots=0
+ libnameln=${libname##*/}
+ # loop in version of the library to link it, similar to how
+ # libtool works
+ if [[ ${CHOST} == *-darwin* ]] ; then
+ # macho: libname.x.y.z.dylib
+ local libbasename=${libnameln%%.*} # libname
+ local libver=${libnameln#${libbasename}} # .x.y.z.dylib
+ libver=${libver%${libsuffix}} # .x.y.z
+ while [[ -n ${libver} ]] && [[ ${maxdots} -lt 6 ]] ; do
+ libnameln="${libbasename}${libver}${libsuffix}"
+ rm -f "${libnameln}"
+ ln -s "${libname}" "${libnameln}"
+ (( ++maxdots ))
+ libver=${libver%.*}
+ done
+ libnameln="${libbasename}${libsuffix}"
+ rm -f "${libnameln}"
+ ln -s "${libname}" "${libnameln}"
+ else
+ # elf: libname.so.x.y.z
+ while [[ ${libnameln:0-3} != '${libsuffix}' ]] && [[ ${maxdots} -lt 6 ]] ; do
+ rm -f "${libnameln}"
+ ln -s "${libname}" "${libnameln}"
+ (( ++maxdots ))
+ libnameln="${libnameln%.*}"
+ done
+ rm -f "${libnameln}"
+ ln -s "${libname}" "${libnameln}"
+ fi
+ done
+ done
+
+ popd &> /dev/null
+}
+
+# @FUNCTION: mysql_init_vars
+# @DESCRIPTION:
+# void mysql_init_vars()
+# Initialize global variables
+# 2005-11-19 <vivo@gentoo.org>
+mysql_init_vars() {
+ MY_SHAREDSTATEDIR=${MY_SHAREDSTATEDIR="${EPREFIX}/usr/share/mysql"}
+ MY_SYSCONFDIR=${MY_SYSCONFDIR="${EPREFIX}/etc/mysql"}
+ MY_LOCALSTATEDIR=${MY_LOCALSTATEDIR="${EPREFIX}/var/lib/mysql"}
+ MY_LOGDIR=${MY_LOGDIR="${EPREFIX}/var/log/mysql"}
+ MY_INCLUDEDIR=${MY_INCLUDEDIR="${EPREFIX}/usr/include/mysql"}
+ MY_LIBDIR=${MY_LIBDIR="${EPREFIX}/usr/$(get_libdir)/mysql"}
+
+ if [[ -z "${MY_DATADIR}" ]] ; then
+ MY_DATADIR=""
+ if [[ -f "${MY_SYSCONFDIR}/my.cnf" ]] ; then
+ MY_DATADIR=`"my_print_defaults" mysqld 2>/dev/null \
+ | sed -ne '/datadir/s|^--datadir=||p' \
+ | tail -n1`
+ if [[ -z "${MY_DATADIR}" ]] ; then
+ MY_DATADIR=`grep ^datadir "${MY_SYSCONFDIR}/my.cnf" \
+ | sed -e 's/.*=\s*//' \
+ | tail -n1`
+ fi
+ fi
+ if [[ -z "${MY_DATADIR}" ]] ; then
+ MY_DATADIR="${MY_LOCALSTATEDIR}"
+ einfo "Using default MY_DATADIR"
+ fi
+ elog "MySQL MY_DATADIR is ${MY_DATADIR}"
+
+ if [[ -z "${PREVIOUS_DATADIR}" ]] ; then
+ if [[ -e "${MY_DATADIR}" ]] ; then
+ # If you get this and you're wondering about it, see bug #207636
+ elog "MySQL datadir found in ${MY_DATADIR}"
+ elog "A new one will not be created."
+ PREVIOUS_DATADIR="yes"
+ else
+ PREVIOUS_DATADIR="no"
+ fi
+ export PREVIOUS_DATADIR
+ fi
+ else
+ if [[ ${EBUILD_PHASE} == "config" ]]; then
+ local new_MY_DATADIR
+ new_MY_DATADIR=`"my_print_defaults" mysqld 2>/dev/null \
+ | sed -ne '/datadir/s|^--datadir=||p' \
+ | tail -n1`
+
+ if [[ ( -n "${new_MY_DATADIR}" ) && ( "${new_MY_DATADIR}" != "${MY_DATADIR}" ) ]]; then
+ ewarn "MySQL MY_DATADIR has changed"
+ ewarn "from ${MY_DATADIR}"
+ ewarn "to ${new_MY_DATADIR}"
+ MY_DATADIR="${new_MY_DATADIR}"
+ fi
+ fi
+ fi
+
+ if [ "${MY_SOURCEDIR:-unset}" == "unset" ]; then
+ MY_SOURCEDIR=${SERVER_URI##*/}
+ MY_SOURCEDIR=${MY_SOURCEDIR%.tar*}
+ fi
+
+ export MY_SHAREDSTATEDIR MY_SYSCONFDIR
+ export MY_LIBDIR MY_LOCALSTATEDIR MY_LOGDIR
+ export MY_INCLUDEDIR MY_DATADIR MY_SOURCEDIR
+}
diff --git a/eclass/mythtv-plugins.eclass b/eclass/mythtv-plugins.eclass
new file mode 100644
index 000000000000..0844fa576ba7
--- /dev/null
+++ b/eclass/mythtv-plugins.eclass
@@ -0,0 +1,129 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mythtv-plugins.eclass
+# @MAINTAINER:
+# Doug Goldstein <cardoe@gentoo.org>
+# @AUTHOR:
+# Doug Goldstein <cardoe@gentoo.org>
+# @BLURB: Installs MythTV plugins along with patches from the release-${PV}-fixes branch
+
+# NOTE: YOU MUST INHERIT EITHER qt3 or qt4 IN YOUR PLUGIN!
+
+inherit mythtv multilib versionator
+
+# Extra configure options to pass to econf
+MTVCONF=${MTVCONF:=""}
+
+SLOT="0"
+IUSE="${IUSE} debug mmx"
+
+if [[ -z $MYTHTV_NODEPS ]] ; then
+RDEPEND="${RDEPEND}
+ =media-tv/mythtv-${MY_PV}*"
+DEPEND="${DEPEND}
+ =media-tv/mythtv-${MY_PV}*
+ >=sys-apps/sed-4"
+fi
+
+# bug 240325
+RESTRICT="strip"
+
+mythtv-plugins_pkg_setup() {
+ # List of available plugins (needs to include ALL of them in the tarball)
+ MYTHPLUGINS="mythbrowser mythcontrols mythdvd mythflix mythgallery"
+ MYTHPLUGINS="${MYTHPLUGINS} mythgame mythmusic mythnews mythphone"
+ MYTHPLUGINS="${MYTHPLUGINS} mythvideo mythweather mythweb"
+
+ if version_is_at_least "0.20" ; then
+ MYTHPLUGINS="${MYTHPLUGINS} mytharchive"
+ fi
+
+ if version_is_at_least "0.21_beta" ; then
+ MYTHPLUGINS="${MYTHPLUGINS} mythzoneminder mythmovies"
+ MYTHPLUGINS="${MYTHPLUGINS/mythdvd/}"
+ fi
+
+ if version_is_at_least "0.22_beta" ; then
+ MYTHPLUGINS="${MYTHPLUGINS/mythcontrols/}"
+ MYTHPLUGINS="${MYTHPLUGINS/mythphone/}"
+ fi
+
+ if version_is_at_least "0.23_beta" ; then
+ MYTHPLUGINS="${MYTHPLUGINS/mythflix/}"
+ MYTHPLUGINS="${MYTHPLUGINS} mythnetvision"
+ fi
+}
+
+mythtv-plugins_src_prepare() {
+ sed -e 's!PREFIX = /usr/local!PREFIX = /usr!' \
+ -i 'settings.pro' || die "fixing PREFIX to /usr failed"
+
+ sed -e "s!QMAKE_CXXFLAGS_RELEASE = -O3 -march=pentiumpro -fomit-frame-pointer!QMAKE_CXXFLAGS_RELEASE = ${CXXFLAGS}!" \
+ -i 'settings.pro' || die "Fixing QMake's CXXFLAGS failed"
+
+ sed -e "s!QMAKE_CFLAGS_RELEASE = \$\${QMAKE_CXXFLAGS_RELEASE}!QMAKE_CFLAGS_RELEASE = ${CFLAGS}!" \
+ -i 'settings.pro' || die "Fixing Qmake's CFLAGS failed"
+
+ find "${S}" -name '*.pro' -exec sed -i \
+ -e "s:\$\${PREFIX}/lib/:\$\${PREFIX}/$(get_libdir)/:g" \
+ -e "s:\$\${PREFIX}/lib$:\$\${PREFIX}/$(get_libdir):g" \
+ {} \;
+}
+
+mythtv-plugins_src_configure() {
+ cd "${S}"
+
+ if use debug; then
+ sed -e 's!CONFIG += release!CONFIG += debug!' \
+ -i 'settings.pro' || die "switching to debug build failed"
+ fi
+
+# if ( use x86 && ! use mmx ) || ! use amd64 ; then
+ if ( ! use mmx ); then
+ sed -e 's!DEFINES += HAVE_MMX!DEFINES -= HAVE_MMX!' \
+ -i 'settings.pro' || die "disabling MMX failed"
+ fi
+
+ local myconf=""
+
+ if has ${PN} ${MYTHPLUGINS} ; then
+ for x in ${MYTHPLUGINS} ; do
+ if [[ ${PN} == ${x} ]] ; then
+ myconf="${myconf} --enable-${x}"
+ else
+ myconf="${myconf} --disable-${x}"
+ fi
+ done
+ else
+ die "Package ${PN} is unsupported"
+ fi
+
+ chmod +x configure
+ econf ${myconf} ${MTVCONF}
+}
+
+mythtv-plugins_src_compile() {
+ if version_is_at_least "0.22" ; then
+ eqmake4 mythplugins.pro || die "eqmake4 failed"
+ else
+ eqmake3 mythplugins.pro || die "eqmake3 failed"
+ fi
+ emake || die "make failed to compile"
+}
+
+mythtv-plugins_src_install() {
+ if has ${PN} ${MYTHPLUGINS} ; then
+ cd "${S}"/${PN}
+ else
+ die "Package ${PN} is unsupported"
+ fi
+
+ einstall INSTALL_ROOT="${D}"
+ for doc in AUTHORS COPYING FAQ UPGRADING ChangeLog README; do
+ test -e "${doc}" && dodoc ${doc}
+ done
+}
+
+EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_compile src_install
diff --git a/eclass/mythtv.eclass b/eclass/mythtv.eclass
new file mode 100644
index 000000000000..627c889fff4e
--- /dev/null
+++ b/eclass/mythtv.eclass
@@ -0,0 +1,48 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: mythtv.eclass
+# @MAINTAINER:
+# Doug Goldstein <cardoe@gentoo.org>
+# @AUTHOR:
+# Doug Goldstein <cardoe@gentoo.org>
+# @BLURB: Downloads the MythTV source packages and any patches from the fixes branch
+
+inherit versionator
+
+# temporary until all the packagers are fixed for bug #283798
+DEPEND="app-arch/unzip"
+
+# Release version
+MY_PV="${PV%_*}"
+
+# what product do we want
+case "${PN}" in
+ mythtv) MY_PN="mythtv";;
+ mythtv-themes) MY_PN="myththemes";;
+ mythtv-themes-extra) MY_PN="themes";;
+ *) MY_PN="mythplugins";;
+esac
+
+# _pre is from SVN trunk while _p and _beta are from SVN ${MY_PV}-fixes
+# TODO: probably ought to do something smart if the regex doesn't match anything
+[[ "${PV}" =~ (_alpha|_beta|_pre|_rc|_p)([0-9]+) ]] || {
+ eerror "Invalid version requested (_alpha|_beta|_pre|_rc|_p) only"
+ exit 1
+}
+
+REV_PREFIX="${BASH_REMATCH[1]}" # _alpha, _beta, _pre, _rc, or _p
+MYTHTV_REV="${BASH_REMATCH[2]}" # revision number
+
+case $REV_PREFIX in
+ _pre|_alpha) MYTHTV_REPO="trunk";;
+ _p|_beta|_rc) VER_COMP=( $(get_version_components ${MY_PV}) )
+ FIXES_VER="${VER_COMP[0]}-${VER_COMP[1]}"
+ MYTHTV_REPO="branches/release-${FIXES_VER}-fixes";;
+esac
+
+HOMEPAGE="http://www.mythtv.org"
+LICENSE="GPL-2"
+SRC_URI="http://svn.mythtv.org/trac/changeset/${MYTHTV_REV}/${MYTHTV_REPO}/${MY_PN}?old_path=%2F&format=zip -> ${MY_PN}-${PV}.zip"
+S="${WORKDIR}/${MYTHTV_REPO}/${MY_PN}"
diff --git a/eclass/netsurf.eclass b/eclass/netsurf.eclass
new file mode 100644
index 000000000000..2ad37169dec1
--- /dev/null
+++ b/eclass/netsurf.eclass
@@ -0,0 +1,178 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: netsurf.eclass
+# @MAINTAINER:
+# Michael Weber <xmw@gentoo.org>
+# @BLURB: Handle buildsystem of www.netsurf-browser.org components
+# @DESCRIPTION:
+# Handle unpacking and usage of separate buildsystem tarball and manage
+# multilib build, static-libs generation and debug building.
+#
+# Supports PATCHES and DOCS as in base.eclass
+
+case ${EAPI:-0} in
+ 0|1|2|3|4) die "this eclass doesn't support EAPI<5" ;;
+ *) ;;
+esac
+
+inherit eutils toolchain-funcs multilib-minimal
+
+EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install
+
+# @ECLASS-VARIABLE: NETSURF_BUILDSYSTEM
+# @DESCRIPTION:
+# Select version of buildsystem tarball to be used along the component
+# defaults to buildsystem-1.0
+NETSURF_BUILDSYSTEM="${NETSURF_BUILDSYSTEM:-buildsystem-1.0}"
+
+# @ECLASS-VARIABLE: NETSURF_BUILDSYSTEM_SRC_URI
+# @DESCRIPTION:
+# Download link for NETSURF_BUILDSYSTEM, add to SRC_URI iff set explicitly.
+NETSURF_BUILDSYSTEM_SRC_URI="http://download.netsurf-browser.org/libs/releases/${NETSURF_BUILDSYSTEM}.tar.gz -> netsurf-${NETSURF_BUILDSYSTEM}.tar.gz"
+
+# @ECLASS-VARIABLE: NETSURF_COMPONENT_TYPE
+# @DESCRIPTION:
+# Passed to buildsystem as COMPONENT_TYPE, valid values are
+# lib-shared, lib-static and binary. Defaults to "lib-static lib-shared"
+NETSURF_COMPONENT_TYPE="${NETSURF_COMPONENT_TYPE:-lib-static lib-shared}"
+
+# @ECLASS-VARIABLE: SRC_URI
+# @DESCRIPTION:
+# Defaults to http://download.netsurf-browser.org/libs/releases/${P}-src.tar.gz
+# and NETSURF_BUILDSYSTEM_SRC_URI.
+if [ -z "${SRC_URI}" ] ; then
+ SRC_URI="http://download.netsurf-browser.org/libs/releases/${P}-src.tar.gz
+ ${NETSURF_BUILDSYSTEM_SRC_URI}"
+fi
+
+IUSE="debug"
+if has lib-static ${NETSURF_COMPONENT_TYPE} ; then
+ IUSE+=" static-libs"
+fi
+
+DEPEND="virtual/pkgconfig"
+
+# @FUNCTION: netsurf_src_prepare
+# @DESCRIPTION:
+# Apply and PATCHES and multilib_copy_sources for in-source build.
+netsurf_src_prepare() {
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ debug-print "$FUNCNAME: applying user patches"
+ epatch_user
+
+ multilib_copy_sources
+}
+
+# @ECLASS-VARIABLE: netsurf_makeconf
+# @DESCRIPTION:
+# Configuration variable bash array to be passed to emake calls.
+# Defined at netsurf_src_configure and can be altered afterwards.
+
+# @FUNCTION: netsurf_src_configure
+# @DESCRIPTION:
+# Setup netsurf_makeconf and run multilib-minimal_src_configure.
+# A default multilib_src_configure is provided by this eclass.
+netsurf_src_configure() {
+ netsurf_makeconf=(
+ NSSHARED=${WORKDIR}/${NETSURF_BUILDSYSTEM}
+ Q=
+ HOST_CC="\$(CC)"
+ CCOPT=
+ CCNOOPT=
+ CCDBG=
+ LDDBG=
+ AR="$(tc-getAR)"
+ BUILD=$(usex debug debug release)
+ PREFIX="${EROOT}"usr
+ )
+
+ multilib-minimal_src_configure
+}
+
+multilib_src_configure() {
+ sed -e "/^INSTALL_ITEMS/s: /lib: /$(get_libdir):g" \
+ -i Makefile || die
+ if [ -f ${PN}.pc.in ] ; then
+ sed -e "/^libdir/s:/lib:/$(get_libdir):g" \
+ -i ${PN}.pc.in || die
+ fi
+ sed -e 's:/bin/which:which:' \
+ -i ../${NETSURF_BUILDSYSTEM}/makefiles/Makefile.tools || die
+}
+
+# @FUNCTION: netsurf_make
+# @DESCRIPTION:
+# Calls emake with netsurf_makeconf and toolchain CC/LD
+# as arguments for every NETSURF_COMPONENT_TYPE if activated.
+netsurf_make() {
+ for COMPONENT_TYPE in ${NETSURF_COMPONENT_TYPE} ; do
+ if [ "${COMPONENT_TYPE}" == "lib-static" ] ; then
+ if ! use static-libs ; then
+ continue
+ fi
+ fi
+ emake CC="$(tc-getCC)" LD="$(tc-getLD)" "${netsurf_makeconf[@]}" \
+ COMPONENT_TYPE=${COMPONENT_TYPE} LIBDIR="$(get_libdir)" "$@"
+ done
+}
+
+# @FUNCTION: netsurf_src_compile
+# @DESCRIPTION:
+# Calls multilib-minimal_src_compile and netsurf_make doc if USE=doc.
+# A default multilib_src_compile is provided by this eclass.
+netsurf_src_compile() {
+ local problems=$(egrep -Hn -- ' (-O.?|-g)( |$)' \
+ $(find . -type f -name 'Makefile*'))
+ if [ -n "${problems}" ] ; then
+ elog "found bad flags:
+${problems}"
+ fi
+
+ multilib-minimal_src_compile "$@"
+
+ if has doc ${USE} ; then
+ netsurf_make "$@" docs
+ fi
+}
+
+multilib_src_compile() {
+ netsurf_make "$@"
+}
+
+# @FUNCTION: netsurf_src_test
+# @DESCRIPTION:
+# Calls multilib-minimal_src_test.
+# A default multilib_src_test is provided by this eclass.
+netsurf_src_test() {
+ multilib-minimal_src_test "$@"
+}
+
+multilib_src_test() {
+ netsurf_make test "$@"
+}
+
+# @FUNCTION: netsurf_src_install
+# @DESCRIPTION:
+# Calls multilib-minimal_src_install.
+# A default multilib_src_test is provided by this eclass.
+# A default multilib_src_install is provided by this eclass.
+netsurf_src_install() {
+ multilib-minimal_src_install "$@"
+}
+
+multilib_src_install() {
+ #DEFAULT_ABI may not be the last.
+ #install to clean dir, rename binaries, move everything back
+ if [ "${ABI}" == "${DEFAULT_ABI}" ] ; then
+ netsurf_make DESTDIR="${D}" install "$@"
+ else
+ netsurf_make DESTDIR="${D}"${ABI} install "$@"
+ if [ "${ABI}" != "${DEFAULT_ABI}" ] ; then
+ find "${D}"${ABI}/usr/bin -type f -exec mv {} {}.${ABI} \;
+ fi
+ mv "${D}"${ABI}/* "${D}" || die
+ rmdir "${D}"${ABI} || die
+ fi
+}
diff --git a/eclass/nsplugins.eclass b/eclass/nsplugins.eclass
new file mode 100644
index 000000000000..bd9b3fb05985
--- /dev/null
+++ b/eclass/nsplugins.eclass
@@ -0,0 +1,80 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# @ECLASS: nsplugins.eclass
+# @MAINTAINER:
+# Mozilla Team <mozilla@gentoo.org>
+# @AUTHOR:
+# Original Author: Martin Schlemmer <azarah@gentoo.org>
+# @BLURB: reusable functions for netscape/moz plugin sharing
+# @DESCRIPTION:
+# Reusable functions that promote sharing of netscape/moz plugins, also provides
+# share_plugins_dir function for mozilla applications.
+
+inherit eutils multilib versionator mozextension
+
+PLUGINS_DIR="nsbrowser/plugins"
+
+# This function move the plugin dir in src_install() to
+# ${D}/usr/$(get_libdir)/${PLUGIN_DIR}. First argument should be
+# the full path (without $D) to old plugin dir.
+src_mv_plugins() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
+
+ # Move plugins dir. We use keepdir so that it might not be unmerged
+ # by mistake ...
+ keepdir /usr/$(get_libdir)/${PLUGINS_DIR}
+ cp -a "${ED}"/$1/* "${ED}"/usr/$(get_libdir)/${PLUGINS_DIR}
+ rm -rf "${ED}"/$1
+ dosym /usr/$(get_libdir)/${PLUGINS_DIR} $1
+}
+
+# This function move plugins in pkg_preinst() in old dir to
+# ${ROOT}/usr/$(get_libdir)/${PLUGIN_DIR}. First argument should be
+# the full path (without $ROOT) to old plugin dir.
+pkg_mv_plugins() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${ROOT}"
+
+ # Move old plugins dir
+ if [ -d "${ROOT}/$1" -a ! -L "${ROOT}/$1" ]
+ then
+ mkdir -p "${EROOT}"/usr/$(get_libdir)/${PLUGINS_DIR}
+ cp -a "${EROOT}"/$1/* "${EROOT}"/usr/$(get_libdir)/${PLUGINS_DIR}
+ rm -rf "${EROOT}"/$1
+ fi
+}
+
+# This function installs a plugin with dosym to PLUGINS_DIR.
+# First argument should be the plugin file.
+inst_plugin() {
+ if [[ -z "${1}" ]]; then
+ eerror "The plugin file \"${1}\" does not exist."
+ die "No such file or directory."
+ fi
+
+ dodir /usr/$(get_libdir)/${PLUGINS_DIR}
+ dosym ${1} /usr/$(get_libdir)/${PLUGINS_DIR}/$(basename ${1})
+}
+
+# This function ensures we use proper plugin path for Gentoo.
+# This should only be used by mozilla packages.
+# ${MOZILLA_FIVE_HOME} must be defined in src_install to support
+share_plugins_dir() {
+ if [[ ${PN} == seamonkey ]] ; then
+ rm -rf "${D}"${MOZILLA_FIVE_HOME}/plugins \
+ || die "failed to remove existing plugins dir"
+ fi
+
+ if [[ ${PN} == *-bin ]] ; then
+ PLUGIN_BASE_PATH="/usr/$(get_libdir)"
+ else
+ PLUGIN_BASE_PATH=".."
+ fi
+
+ if $(mozversion_extension_location) ; then
+ dosym "${PLUGIN_BASE_PATH}/nsbrowser/plugins" "${MOZILLA_FIVE_HOME}/browser/plugins"
+ else
+ dosym "${PLUGIN_BASE_PATH}/nsbrowser/plugins" "${MOZILLA_FIVE_HOME}/plugins"
+ fi
+}
diff --git a/eclass/nvidia-driver.eclass b/eclass/nvidia-driver.eclass
new file mode 100644
index 000000000000..44ef003fd136
--- /dev/null
+++ b/eclass/nvidia-driver.eclass
@@ -0,0 +1,188 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: nvidia-driver.eclass
+# @MAINTAINER:
+# Tony Vroon <chainsaw@gentoo.org>
+# Jeroen Roovers <jer@gentoo.org>
+# @AUTHOR:
+# Original author: Doug Goldstein <cardoe@gentoo.org>
+# @BLURB: Provide useful messages for nvidia-drivers based on currently installed Nvidia card
+# @DESCRIPTION:
+# Provide useful messages for nvidia-drivers based on currently installed Nvidia
+# card. It inherits versionator.
+
+inherit readme.gentoo versionator
+
+DEPEND="sys-apps/pciutils"
+
+# Variables for readme.gentoo.eclass:
+DISABLE_AUTOFORMATTING="yes"
+DOC_CONTENTS="You must be in the video group to use the NVIDIA device
+For more info, read the docs at
+http://www.gentoo.org/doc/en/nvidia-guide.xml#doc_chap3_sect6
+
+This ebuild installs a kernel module and X driver. Both must
+match explicitly in their version. This means, if you restart
+X, you must modprobe -r nvidia before starting it back up
+
+To use the NVIDIA GLX, run \"eselect opengl set nvidia\"
+
+To use the NVIDIA CUDA/OpenCL, run \"eselect opencl set nvidia\"
+
+NVIDIA has requested that any bug reports submitted have the
+output of nvidia-bug-report.sh included.
+"
+
+# the data below is derived from
+# http://us.download.nvidia.com/XFree86/Linux-x86_64/319.12/README/supportedchips.html
+
+drv_71xx="
+ 0020 0028 0029 002c 002d 00a0 0100 0101 0103 0150 0151 0152 0153
+"
+
+drv_96xx="
+ 0110 0111 0112 0113 0170 0171 0172 0173 0174 0175 0176 0177 0178 0179 017a
+ 017c 017d 0181 0182 0183 0185 0188 018a 018b 018c 01a0 01f0 0200 0201 0202
+ 0203 0250 0251 0253 0258 0259 025b 0280 0281 0282 0286 0288 0289 028c
+"
+
+drv_173x="
+ 00fa 00fb 00fc 00fd 00fe 0301 0302 0308 0309 0311 0312 0314 031a 031b 031c
+ 0320 0321 0322 0323 0324 0325 0326 0327 0328 032a 032b 032c 032d 0330 0331
+ 0332 0333 0334 0338 033f 0341 0342 0343 0344 0347 0348 034c 034e
+"
+
+drv_304x="
+ 0040 0041 0042 0043 0044 0045 0046 0047 0048 004e 0090 0091 0092 0093 0095
+ 0098 0099 009d 00c0 00c1 00c2 00c3 00c8 00c9 00cc 00cd 00ce 00f1 00f2 00f3
+ 00f4 00f5 00f6 00f8 00f9 0140 0141 0142 0143 0144 0145 0146 0147 0148 0149
+ 014a 014c 014d 014e 014f 0160 0161 0162 0163 0164 0165 0166 0167 0168 0169
+ 016a 01d0 01d1 01d2 01d3 01d6 01d7 01d8 01da 01db 01dc 01dd 01de 01df 0211
+ 0212 0215 0218 0221 0222 0240 0241 0242 0244 0245 0247 0290 0291 0292 0293
+ 0294 0295 0297 0298 0299 029a 029b 029c 029d 029e 029f 02e0 02e1 02e2 02e3
+ 02e4 038b 0390 0391 0392 0393 0394 0395 0397 0398 0399 039c 039e 03d0 03d1
+ 03d2 03d5 03d6 0531 0533 053a 053b 053e 07e0 07e1 07e2 07e3 07e5
+"
+
+drv_340x="
+ 0191 0193 0194 0197 019d 019e 0400 0401 0402 0403 0404 0405 0406 0407 0408
+ 0409 040a 040b 040c 040d 040e 040f 0410 0420 0421 0422 0423 0424 0425 0426
+ 0427 0428 0429 042a 042b 042c 042d 042e 042f 05e0 05e1 05e2 05e3 05e6 05e7
+ 05e7 05e7 05e7 05e7 05e7 05ea 05eb 05ed 05f8 05f9 05fd 05fe 05ff 0600 0601
+ 0602 0603 0604 0605 0606 0607 0608 0609 0609 060a 060b 060c 060d 060f 0610
+ 0611 0612 0613 0614 0615 0617 0618 0619 061a 061b 061c 061d 061e 061f 0621
+ 0622 0623 0625 0626 0627 0628 062a 062b 062c 062d 062e 062e 0630 0631 0632
+ 0635 0637 0638 063a 0640 0641 0643 0644 0645 0646 0647 0648 0649 0649 064a
+ 064b 064c 0651 0652 0652 0653 0654 0654 0654 0655 0656 0658 0659 065a 065b
+ 065c 06e0 06e1 06e2 06e3 06e4 06e5 06e6 06e7 06e8 06e8 06e9 06ea 06eb 06ec
+ 06ef 06f1 06f8 06f9 06f9 06fa 06fb 06fd 06ff 06ff 0840 0844 0845 0846 0847
+ 0848 0849 084a 084b 084c 084d 084f 0860 0861 0862 0863 0864 0865 0866 0866
+ 0867 0868 0869 086a 086c 086d 086e 086f 0870 0871 0872 0872 0873 0873 0874
+ 0876 087a 087d 087e 087f 08a0 08a2 08a3 08a4 08a5 0a20 0a22 0a23 0a26 0a27
+ 0a28 0a29 0a2a 0a2b 0a2c 0a2d 0a32 0a34 0a35 0a38 0a3c 0a60 0a62 0a63 0a64
+ 0a65 0a66 0a67 0a68 0a69 0a6a 0a6c 0a6e 0a6e 0a6f 0a70 0a70 0a70 0a71 0a72
+ 0a73 0a73 0a73 0a74 0a74 0a75 0a75 0a76 0a78 0a7a 0a7a 0a7a 0a7a 0a7a 0a7a
+ 0a7a 0a7a 0a7a 0a7a 0a7a 0a7c 0ca0 0ca2 0ca3 0ca4 0ca5 0ca7 0ca8 0ca9 0cac
+ 0caf 0cb0 0cb1 0cbc 10c0 10c3 10c5 10d8
+"
+
+mask_71xx=">=x11-drivers/nvidia-drivers-72.0.0"
+mask_96xx=">=x11-drivers/nvidia-drivers-97.0.0"
+mask_173x=">=x11-drivers/nvidia-drivers-177.0.0"
+mask_304x=">=x11-drivers/nvidia-drivers-305.0.0"
+mask_340x=">=x11-drivers/nvidia-drivers-341.0.0"
+
+# @FUNCTION: nvidia-driver-get-card
+# @DESCRIPTION:
+# Retrieve the PCI device ID for each Nvidia video card you have
+nvidia-driver-get-card() {
+ local NVIDIA_CARD=$(
+ [ -x /usr/sbin/lspci ] && /usr/sbin/lspci -d 10de: -n \
+ | awk -F'[: ]' '/ 0300: /{print $6}'
+ )
+
+ if [ -n "${NVIDIA_CARD}" ]; then
+ echo "${NVIDIA_CARD}"
+ else
+ echo 0000
+ fi
+}
+
+nvidia-driver-get-mask() {
+ local NVIDIA_CARDS="$(nvidia-driver-get-card)"
+ local card drv
+
+ for card in ${NVIDIA_CARDS}; do
+ for drv in ${drv_71xx}; do
+ if [ "x${card}" = "x${drv}" ]; then
+ echo "${mask_71xx}"
+ return 0
+ fi
+ done
+
+ for drv in ${drv_96xx}; do
+ if [ "x${card}" = "x${drv}" ]; then
+ echo "${mask_96xx}"
+ return 0
+ fi
+ done
+
+ for drv in ${drv_173x}; do
+ if [ "x${card}" = "x${drv}" ]; then
+ echo "${mask_173x}"
+ return 0
+ fi
+ done
+
+ for drv in ${drv_304x}; do
+ if [ "x${card}" = "x${drv}" ]; then
+ echo "${mask_304x}"
+ return 0
+ fi
+ done
+
+ for drv in ${drv_340x}; do
+ if [ "x${card}" = "x${drv}" ]; then
+ echo "${mask_340x}"
+ return 0
+ fi
+ done
+ done
+
+ echo ''
+ return 1
+}
+
+# @FUNCTION: nvidia-driver-check-warning
+# @DESCRIPTION:
+# Prints out a warning if the driver does not work w/ the installed video card
+nvidia-driver-check-warning() {
+ local NVIDIA_MASK="$(nvidia-driver-get-mask)"
+
+ if [ -n "${NVIDIA_MASK}" ]; then
+ version_compare "${NVIDIA_MASK##*-}" "${PV}"
+ if [ x"${?}" = x1 ]; then
+ ewarn "***** WARNING *****"
+ ewarn
+ ewarn "You are currently installing a version of nvidia-drivers that is"
+ ewarn "known not to work with a video card you have installed on your"
+ ewarn "system. If this is intentional, please ignore this. If it is not"
+ ewarn "please perform the following steps:"
+ ewarn
+ ewarn "Add the following mask entry to /etc/portage/package.mask by"
+ if [ -d "${ROOT}/etc/portage/package.mask" ]; then
+ ewarn "echo \"${NVIDIA_MASK}\" > /etc/portage/package.mask/nvidia-drivers"
+ else
+ ewarn "echo \"${NVIDIA_MASK}\" >> /etc/portage/package.mask"
+ fi
+ ewarn
+ ewarn "Failure to perform the steps above could result in a non-working"
+ ewarn "X setup."
+ ewarn
+ ewarn "For more information please read:"
+ ewarn "http://www.nvidia.com/object/IO_32667.html"
+ fi
+ fi
+}
diff --git a/eclass/oasis.eclass b/eclass/oasis.eclass
new file mode 100644
index 000000000000..7dc86f3633cf
--- /dev/null
+++ b/eclass/oasis.eclass
@@ -0,0 +1,121 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: oasis.eclass
+# @MAINTAINER:
+# ml@gentoo.org
+# @AUTHOR:
+# Original Author: Alexis Ballier <aballier@gentoo.org>
+# @BLURB: Provides common ebuild phases for oasis-based packages.
+# @DESCRIPTION:
+# Provides common ebuild phases for oasis-based packages.
+# Most of these packages will just have to inherit the eclass, set their
+# dependencies and the DOCS variable for base.eclass to install it and be done.
+#
+# It inherits multilib, findlib, eutils and base eclasses.
+# Ebuilds using oasis.eclass must be EAPI>=3.
+
+# @ECLASS-VARIABLE: OASIS_BUILD_DOCS
+# @DESCRIPTION:
+# Will make oasis_src_compile build the documentation if this variable is
+# defined and the doc useflag is enabled.
+# The eclass takes care of setting doc in IUSE but the ebuild should take care
+# of the extra dependencies it may need.
+# Set before inheriting the eclass.
+
+# @ECLASS-VARIABLE: OASIS_BUILD_TESTS
+# @DESCRIPTION:
+# Will make oasis_src_configure enable building the tests if the test useflag is
+# enabled. oasis_src_test will then run them.
+# Note that you sometimes need to enable this for src_test to be useful,
+# sometimes not. It has to be enabled on a per-case basis.
+# The eclass takes care of setting test in IUSE but the ebuild should take care
+# of the extra dependencies it may need.
+# Set before inheriting the eclass.
+
+
+# @ECLASS-VARIABLE: OASIS_NO_DEBUG
+# @DESCRIPTION:
+# Disable debug useflag usage. Old oasis versions did not support it so we allow
+# disabling it in those cases.
+# The eclass takes care of setting debug in IUSE.
+# Set before inheriting the eclass.
+
+inherit multilib findlib eutils base
+
+case ${EAPI:-0} in
+ 0|1|2) die "You need at least EAPI-3 to use oasis.eclass";;
+ 3|4) RDEPEND=">=dev-lang/ocaml-3.12[ocamlopt?]";;
+ *) RDEPEND=">=dev-lang/ocaml-3.12:=[ocamlopt?]";;
+esac
+
+IUSE="+ocamlopt"
+[ -n "${OASIS_NO_DEBUG}" ] || IUSE="${IUSE} debug"
+[ -n "${OASIS_BUILD_DOCS}" ] && IUSE="${IUSE} doc"
+[ -n "${OASIS_BUILD_TESTS}" ] && IUSE="${IUSE} test"
+
+DEPEND="${RDEPEND}"
+
+# @FUNCTION: oasis_use_enable
+# @USAGE: < useflag > < variable >
+# @DESCRIPTION:
+# A use_enable-like function for oasis configure variables.
+# Outputs '--override variable (true|false)', whether useflag is enabled or
+# not.
+# Typical usage: $(oasis_use_enable ocamlopt is_native) as an oasis configure
+# argument.
+oasis_use_enable() {
+ echo "--override $2 $(usex $1 true false)"
+}
+
+# @FUNCTION: oasis_src_configure
+# @DESCRIPTION:
+# src_configure phase shared by oasis-based packages.
+# Extra arguments may be passed via oasis_configure_opts.
+oasis_src_configure() {
+ local confargs=""
+ [ -n "${OASIS_BUILD_TESTS}" ] && confargs="${confargs} $(use_enable test tests)"
+ [ -n "${OASIS_NO_DEBUG}" ] || confargs="${confargs} $(oasis_use_enable debug debug)"
+ ${OASIS_SETUP_COMMAND:-ocaml setup.ml} -configure \
+ --prefix "${EPREFIX}/usr" \
+ --libdir "${EPREFIX}/usr/$(get_libdir)" \
+ --docdir "${EPREFIX}/usr/share/doc/${PF}/html" \
+ --destdir "${D}" \
+ $(oasis_use_enable ocamlopt is_native) \
+ ${confargs} \
+ ${oasis_configure_opts} \
+ || die
+}
+
+# @FUNCTION: oasis_src_compile
+# @DESCRIPTION:
+# Builds an oasis-based package.
+# Will build documentation if OASIS_BUILD_DOCS is defined and the doc useflag is
+# enabled.
+oasis_src_compile() {
+ ${OASIS_SETUP_COMMAND:-ocaml setup.ml} -build || die
+ if [ -n "${OASIS_BUILD_DOCS}" ] && use doc; then
+ ocaml setup.ml -doc || die
+ fi
+}
+
+# @FUNCTION: oasis_src_test
+# @DESCRIPTION:
+# Runs the testsuite of an oasis-based package.
+oasis_src_test() {
+ LD_LIBRARY_PATH="${S}/_build/lib" ${OASIS_SETUP_COMMAND:-ocaml setup.ml} -test || die
+}
+
+# @FUNCTION: oasis_src_install
+# @DESCRIPTION:
+# Installs an oasis-based package.
+# It calls base_src_install_docs, so will install documentation declared in the
+# DOCS variable.
+oasis_src_install() {
+ findlib_src_preinst
+ ${OASIS_SETUP_COMMAND:-ocaml setup.ml} -install || die
+ base_src_install_docs
+}
+
+EXPORT_FUNCTIONS src_configure src_compile src_test src_install
diff --git a/eclass/obs-download.eclass b/eclass/obs-download.eclass
new file mode 100644
index 000000000000..39aada35c663
--- /dev/null
+++ b/eclass/obs-download.eclass
@@ -0,0 +1,43 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: obs-download.eclass
+# @MAINTAINER:
+# suse@gentoo.org
+# @BLURB: Simplifies downloading from openSUSE Build Service.
+# @DESCRIPTION:
+# This eclass constructs OBS_URI based on provided project in openSUSE Build
+# Service and package name. It can be used by packages/eclasses to download
+# actual files.
+#
+# All you need to do in order to use it is set OBS_PROJECT and OBS_PACKAGE and
+# inherit this eclass. It will provide OBS_URI in return which you will prepend
+# to your files and use it in SRC_URI. Alternatively you can just set
+# OPENSUSE_RELEASE and OBS_PACKAGE and it will give you back OBS_URI for
+# downloading files from obs project corresponding to the specified openSUSE
+# release.
+
+# @ECLASS-VARIABLE: OPENSUSE_RELEASE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# From which openSUSE realease to take files.
+# Eg.: 12.1, 12.2, Factory
+
+# @ECLASS-VARIABLE: OBS_PROJECT
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# In which obs project pakage is.
+# This variable does not have to be set and is overriden, if
+# OPENSUSE_RELEASE is provided.
+
+# @ECLASS-VARIABLE: OBS_PACKAGE
+# @DESCRIPTION:
+# Name of the package we want to take files from.
+# By default taken from ${PN}.
+
+[[ -z ${OPENSUSE_RELEASE} ]] || OBS_PROJECT="openSUSE:${OPENSUSE_RELEASE}"
+[[ -n ${OBS_PROJECT} ]] || die "OBS_PROJECT not set!"
+[[ -n ${OBS_PACKAGE} ]] || OBS_PACKAGE="${PN}"
+
+OBS_URI="https://api.opensuse.org/public/source/${OBS_PROJECT}/${OBS_PACKAGE}"
diff --git a/eclass/obs-service.eclass b/eclass/obs-service.eclass
new file mode 100644
index 000000000000..d53e3b328e7f
--- /dev/null
+++ b/eclass/obs-service.eclass
@@ -0,0 +1,112 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: obs-service.eclass
+# @MAINTAINER:
+# suse@gentoo.org
+# @BLURB: Reduces code duplication in the Open Build Service services.
+# @DESCRIPTION:
+# This eclass makes it easier to package Open Build Service services. Based on
+# provided information it will set all needed variables and takes care of
+# installation.
+#
+# @EXAMPLE:
+# Typical ebuild using obs-service.eclass:
+#
+# @CODE
+# EAPI=4
+#
+# inherit obs-service
+#
+# KEYWORDS=""
+#
+# DEPEND=""
+# RDEPEND="${DEPEND}"
+#
+# @CODE
+
+# @ECLASS-VARIABLE: OBS_SERVICE_NAME
+# @DESCRIPTION:
+# Name of the service. If not set, it is taken from ${PN}.
+
+# @ECLASS-VARIABLE: ADDITIONAL_FILES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If any additional files are needed.
+
+case "${EAPI:-0}" in
+ 4|5) : ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+HOMEPAGE="http://en.opensuse.org/openSUSE:OSC"
+LICENSE="GPL-2"
+SLOT="0"
+IUSE=""
+
+RDEPEND="
+ dev-util/osc
+ dev-util/suse-build
+"
+
+[[ -n ${OBS_SERVICE_NAME} ]] || OBS_SERVICE_NAME=${PN/obs-service-/}
+OBS_PROJECT="openSUSE:Tools"
+
+DESCRIPTION="Open Build Service client module - ${OBS_SERVICE_NAME} service"
+
+inherit obs-download
+
+# As it aint versioned at all use arrows to deal with it
+SRC_URI="${OBS_URI}/${OBS_SERVICE_NAME} -> ${OBS_SERVICE_NAME}-${PV}"
+SRC_URI+=" ${OBS_URI}/${OBS_SERVICE_NAME}.service -> ${OBS_SERVICE_NAME}-${PV}.service"
+
+for i in ${ADDITIONAL_FILES}; do
+ SRC_URI+=" ${OBS_URI}/${i} -> ${i}-${PV}"
+done
+
+# @FUNCTION: obs-service_src_unpack
+# @DESCRIPTION:
+# Just copy files. Files are not compressed.
+obs-service_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+ cd "${DISTDIR}"
+ mkdir -p "${S}"
+ cp ${A} "${S}"
+}
+
+# @FUNCTION: obs-service_src_prepare
+# @DESCRIPTION:
+# Replaces all /usr/lib/build directories with /usr/share/suse-build to reflect
+# where suse-build is installed in Gentoo.
+obs-service_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+ debug-print "Replacing all paths to find suse-build in Gentoo"
+ find "${S}" -type f -exec \
+ sed -i 's|/usr/lib/build|/usr/libexec/suse-build|g' {} +
+ debug-print "Replacing all paths from hardcoded suse libexec"
+ find "${S}" -type f -exec \
+ sed -i 's|/usr/lib/obs|/usr/libexec/obs|g' {} +
+}
+
+# @FUNCTION: obs-service_src_install
+# @DESCRIPTION:
+# Does the installation of the downloaded files.
+obs-service_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+ debug-print "Installing service \"${OBS_SERVICE_NAME}\""
+ exeinto /usr/libexec/obs/service
+ newexe "${S}"/${OBS_SERVICE_NAME}-${PV} ${OBS_SERVICE_NAME}
+ insinto /usr/libexec/obs/service
+ newins "${S}"/${OBS_SERVICE_NAME}-${PV}.service ${OBS_SERVICE_NAME}.service
+ if [[ -n ${ADDITIONAL_FILES} ]]; then
+ debug-print "Installing following additional files:"
+ debug-print " ${ADDITIONAL_FILES}"
+ exeinto /usr/libexec/obs/service/${OBS_SERVICE_NAME}.files
+ for i in ${ADDITIONAL_FILES}; do
+ newexe "${S}"/${i}-${PV} ${i}
+ done
+ fi
+}
+
+EXPORT_FUNCTIONS src_install src_prepare src_unpack
diff --git a/eclass/office-ext-r1.eclass b/eclass/office-ext-r1.eclass
new file mode 100644
index 000000000000..a7afb15e4d84
--- /dev/null
+++ b/eclass/office-ext-r1.eclass
@@ -0,0 +1,230 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: office-ext-r1.eclass
+# @MAINTAINER:
+# The office team <openoffice@gentoo.org>
+# @AUTHOR:
+# Tomáš Chvátal <scarabeus@gentoo.org>
+# @BLURB: Eclass for installing libreoffice/openoffice extensions
+# @DESCRIPTION:
+# Eclass for easing maitenance of libreoffice/openoffice extensions.
+
+case "${EAPI:-0}" in
+ 5) OEXT_EXPORTED_FUNCTIONS="src_unpack src_install pkg_postinst pkg_prerm" ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+inherit eutils multilib
+
+# @ECLASS-VARIABLE: OFFICE_REQ_USE
+# @DESCRIPTION:
+# Useflags required on office implementation for the extension.
+#
+# Example:
+# @CODE
+# OFFICE_REQ_USE="java,jemalloc(-)?"
+# @CODE
+if [[ ${OFFICE_REQ_USE} ]]; then
+ # Append the brackets for the depend bellow
+ OFFICE_REQ_USE="[${OFFICE_REQ_USE}]"
+fi
+
+# @ECLASS-VARIABLE: OFFICE_IMPLEMENTATIONS
+# @DESCRIPTION:
+# List of implementations supported by the extension.
+# Some work only for libreoffice and vice versa.
+# Default value is all implementations.
+#
+# Example:
+# @CODE
+# OFFICE_IMPLEMENTATIONS=( "libreoffice" "openoffice" )
+# @CODE
+[[ -z ${OFFICE_IMPLEMENTATIONS} ]] && OFFICE_IMPLEMENTATIONS=( "libreoffice" "openoffice" )
+
+# @ECLASS-VARIABLE: OFFICE_EXTENSIONS
+# @REQUIRED
+# @DESCRIPTION:
+# Array containing list of extensions to install.
+#
+# Example:
+# @CODE
+# OFFICE_EXTENSIONS=( ${PN}_${PV}.oxt )
+# @CODE
+[[ -z ${OFFICE_EXTENSIONS} ]] && die "OFFICE_EXTENSIONS variable is unset."
+if [[ "$(declare -p OFFICE_EXTENSIONS 2>/dev/null 2>&1)" != "declare -a"* ]]; then
+ die "OFFICE_EXTENSIONS variable is not an array."
+fi
+
+# @ECLASS-VARIABLE: OFFICE_EXTENSIONS_LOCATION
+# @DESCRIPTION:
+# Path to the extensions location. Defaults to ${DISTDIR}.
+#
+# Example:
+# @CODE
+# OFFICE_EXTENSIONS_LOCATION="${S}/unpacked/"
+# @CODE
+: ${OFFICE_EXTENSIONS_LOCATION:=${DISTDIR}}
+
+IUSE=""
+RDEPEND=""
+
+for i in ${OFFICE_IMPLEMENTATIONS[@]}; do
+ IUSE+=" office_implementation_${i}"
+ RDEPEND+="
+ office_implementation_${i}? (
+ || (
+ app-office/${i}${OFFICE_REQ_USE}
+ app-office/${i}-bin${OFFICE_REQ_USE}
+ )
+ )
+ "
+done
+
+REQUIRED_USE="|| ( "
+for i in ${OFFICE_IMPLEMENTATIONS[@]}; do
+ REQUIRED_USE+=" office_implementation_${i} "
+done
+REQUIRED_USE+=" )"
+
+DEPEND="${RDEPEND}
+ app-arch/unzip
+"
+
+# Most projects actually do not provide any relevant sourcedir as they are oxt.
+S="${WORKDIR}"
+
+# @FUNCTION: office-ext-r1_src_unpack
+# @DESCRIPTION:
+# Flush the cache after removal of an extension.
+office-ext-r1_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+ local i
+
+ default
+
+ for i in ${OFFICE_EXTENSIONS[@]}; do
+ # Unpack the extensions where required and add case for oxt
+ # which should be most common case for the extensions.
+ if [[ -f "${OFFICE_EXTENSIONS_LOCATION}/${i}" ]] ; then
+ case ${i} in
+ *.oxt)
+ mkdir -p "${WORKDIR}/${i}/"
+ pushd "${WORKDIR}/${i}/" > /dev/null
+ echo ">>> Unpacking "${OFFICE_EXTENSIONS_LOCATION}/${i}" to ${PWD}"
+ unzip -qo ${OFFICE_EXTENSIONS_LOCATION}/${i}
+ assert "failed unpacking ${OFFICE_EXTENSIONS_LOCATION}/${i}"
+ popd > /dev/null
+ ;;
+ *) unpack ${i} ;;
+ esac
+ fi
+ done
+}
+
+# @FUNCTION: office-ext-r1_src_install
+# @DESCRIPTION:
+# Install the extension source to the proper location.
+office-ext-r1_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+ debug-print "Extensions: ${OFFICE_EXTENSIONS[@]}"
+
+ local i j
+
+ for i in ${OFFICE_IMPLEMENTATIONS[@]}; do
+ if use office_implementation_${i}; then
+ if [[ ${i} == openoffice ]]; then
+ # OOO needs to use uno because direct deployment segfaults.
+ # This is bug by their side, but i don't want to waste time
+ # fixing it myself.
+ insinto /usr/$(get_libdir)/${i}/share/extension/install
+ for j in ${OFFICE_EXTENSIONS[@]}; do
+ doins ${OFFICE_EXTENSIONS_LOCATION}/${j}
+ done
+ else
+ for j in ${OFFICE_EXTENSIONS[@]}; do
+ pushd "${WORKDIR}/${j}/" > /dev/null
+ insinto /usr/$(get_libdir)/${i}/share/extensions/${j/.oxt/}
+ doins -r *
+ popd > /dev/null
+ done
+ fi
+ fi
+ done
+}
+
+#### OPENOFFICE COMPAT CODE
+
+UNOPKG_BINARY="/usr/lib64/openoffice/program/unopkg"
+
+# @FUNCTION: office-ext-r1_add_extension
+# @DESCRIPTION:
+# Install the extension into the libreoffice/openoffice.
+office-ext-r1_add_extension() {
+ debug-print-function ${FUNCNAME} "$@"
+ local ext=$1
+ local tmpdir=$(mktemp -d --tmpdir="${T}")
+
+ debug-print "${FUNCNAME}: ${UNOPKG_BINARY} add --shared \"${ext}\""
+ ebegin "Adding office extension: \"${ext}\""
+ ${UNOPKG_BINARY} add --suppress-license \
+ --shared "${ext}" \
+ "-env:UserInstallation=file:///${tmpdir}" \
+ "-env:JFW_PLUGIN_DO_NOT_CHECK_ACCESSIBILITY=1"
+ eend $?
+ ${UNOPKG_BINARY} list --shared > /dev/null
+ rm -rf "${tmpdir}"
+}
+
+# @FUNCTION: office-ext-r1_remove_extension
+# @DESCRIPTION:
+# Remove the extension from the libreoffice/openoffice.
+office-ext-r1_remove_extension() {
+ debug-print-function ${FUNCNAME} "$@"
+ local ext=$1
+ local tmpdir=$(mktemp -d --tmpdir="${T}")
+
+ debug-print "${FUNCNAME}: ${UNOPKG_BINARY} remove --shared \"${ext}\""
+ ebegin "Removing office extension: \"${ext}\""
+ ${UNOPKG_BINARY} remove --suppress-license \
+ --shared "${ext}" \
+ "-env:UserInstallation=file:///${tmpdir}" \
+ "-env:JFW_PLUGIN_DO_NOT_CHECK_ACCESSIBILITY=1"
+ eend $?
+ ${UNOPKG_BINARY} list --shared > /dev/null
+ rm -rf "${tmpdir}"
+}
+
+# @FUNCTION: office-ext-r1_pkg_postinst
+# @DESCRIPTION:
+# Add the extensions to the openoffice.
+office-ext-r1_pkg_postinst() {
+ if in_iuse office_implementation_openoffice && use office_implementation_openoffice; then
+ debug-print-function ${FUNCNAME} "$@"
+ debug-print "Extensions: ${OFFICE_EXTENSIONS[@]}"
+ local i
+
+ for i in ${OFFICE_EXTENSIONS[@]}; do
+ office-ext-r1_add_extension "/usr/lib64/openoffice/share/extension/install/${i}"
+ done
+ fi
+}
+
+# @FUNCTION: office-ext-r1_pkg_prerm
+# @DESCRIPTION:
+# Remove the extensions from the openoffice.
+office-ext-r1_pkg_prerm() {
+ if in_iuse office_implementation_openoffice && use office_implementation_openoffice; then
+ debug-print-function ${FUNCNAME} "$@"
+ debug-print "Extensions: ${OFFICE_EXTENSIONS[@]}"
+ local i
+
+ for i in ${OFFICE_EXTENSIONS[@]}; do
+ office-ext-r1_remove_extension "${i}"
+ done
+ fi
+}
+
+EXPORT_FUNCTIONS ${OEXT_EXPORTED_FUNCTIONS}
+unset OEXT_EXPORTED_FUNCTIONS
diff --git a/eclass/openib.eclass b/eclass/openib.eclass
new file mode 100644
index 000000000000..1b2102ae0403
--- /dev/null
+++ b/eclass/openib.eclass
@@ -0,0 +1,155 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: openib.eclass
+# @AUTHOR:
+# Original Author: Alexey Shvetsov <alexxy@gentoo.org>
+# @BLURB: Simplify working with OFED packages
+
+inherit eutils rpm versionator
+
+EXPORT_FUNCTIONS src_unpack
+
+HOMEPAGE="http://www.openfabrics.org/"
+LICENSE="|| ( GPL-2 BSD-2 )"
+
+# @ECLASS-VARIABLE: OFED_VER
+# @DESCRIPTION:
+# Defines OFED version eg 1.4 or 1.4.0.1
+
+# @ECLASS-VARIABLE: OFED_RC
+# @DESCRIPTION:
+# Sets if this version is RC
+
+# @ECLASS-VARIABLE: OFED_RC_VER
+# @DESCRIPTION:
+# Sets RC version
+
+
+# @ECLASS-VARIABLE: OFED_SUFFIX
+# @DESCRIPTION:
+# Defines OFED package suffix eg -1.ofed1.4
+
+# @ECLASS-VARIABLE: OFED_SNAPSHOT
+# @DESCRIPTION:
+# Defines if src tarball is git snapshot
+
+SLOT="${OFED_VER}"
+
+# @ECLASS-VARIABLE: OFED_VERSIONS
+# @DESCRIPTION:
+# Defines array of ofed version supported by eclass
+
+OFED_VERSIONS=(
+ "1.5.4.1"
+ "3.5"
+ "3.12"
+ )
+
+# @FUNCTION: block_other_ofed_versions
+# @DESCRIPTION:
+# function that creates blockers list for ofed
+block_other_ofed_versions() {
+ local slot
+ RDEPEND="${RDEPEND} !sys-infiniband/${PN}:0"
+ for slot in ${OFED_VERSIONS[@]}; do
+ if [[ ${slot} != ${SLOT} ]]; then
+ RDEPEND+=" !sys-infiniband/${PN}:${slot}"
+ fi
+ done
+}
+
+OFED_BASE_VER=$(get_version_component_range 1-3 ${OFED_VER})
+
+if [ -z $OFED_RC ] ; then
+ SRC_URI="https://www.openfabrics.org/downloads/OFED/ofed-${OFED_BASE_VER}/OFED-${OFED_VER}.tgz"
+else
+ SRC_URI="https://www.openfabrics.org/downloads/OFED/ofed-${OFED_BASE_VER}/OFED-${OFED_VER}-rc${OFED_RC_VER}.tgz"
+fi
+
+case ${PN} in
+ ofed)
+ case ${PV} in
+ 1.5.*|1.5.*.*)
+ MY_PN="ofa_kernel"
+ ;;
+ *)
+ MY_PN="compat-rdma"
+ ;;
+ esac
+ ;;
+ *)
+ MY_PN="${PN}"
+ ;;
+esac
+
+case ${PV} in
+ *p*)
+ MY_PV="${PV/p/}"
+ ;;
+ *)
+ MY_PV="${PV}"
+ ;;
+esac
+
+case ${MY_PN} in
+ ofa_kernel|compat-rdma)
+ EXT="tgz"
+ ;;
+ *)
+ EXT="tar.gz"
+ ;;
+esac
+
+if [ -z ${OFED_SRC_SNAPSHOT} ]; then
+ S="${WORKDIR}/${MY_PN}-${MY_PV}"
+else
+ S="${WORKDIR}/${MY_PN}-${MY_PV}-${OFED_SUFFIX}"
+fi
+
+
+# @FUNCTION: openib_src_unpack
+# @DESCRIPTION:
+# This function will unpack OFED packages
+openib_src_unpack() {
+ unpack ${A}
+ if [ -z ${OFED_RC} ]; then
+ case ${PN} in
+ ofed)
+ rpm_unpack "./OFED-${OFED_VER}/SRPMS/${MY_PN}-${OFED_VER}-${OFED_SUFFIX}.src.rpm"
+ ;;
+ *)
+ rpm_unpack "./OFED-${OFED_VER}/SRPMS/${MY_PN}-${MY_PV}-${OFED_SUFFIX}.src.rpm"
+ ;;
+ esac
+ else
+ case ${PN} in
+ ofed)
+ rpm_unpack "./OFED-${OFED_VER}-rc${OFED_RC_VER}/SRPMS/${MY_PN}-${OFED_VER}-${OFED_SUFFIX}.src.rpm"
+ ;;
+ *)
+ rpm_unpack "./OFED-${OFED_VER}-rc${OFED_RC_VER}/SRPMS/${MY_PN}-${MY_PV}-${OFED_SUFFIX}.src.rpm"
+ ;;
+ esac
+ fi
+ if [ -z ${OFED_SNAPSHOT} ]; then
+ case ${PN} in
+ ofed)
+ unpack ./${MY_PN}-${OFED_VER}.${EXT}
+ ;;
+ *)
+ unpack ./${MY_PN}-${MY_PV}.${EXT}
+ ;;
+ esac
+ else
+ case ${PN} in
+ ofed)
+ unpack ./${MY_PN}-${OFED_VER}-${OFED_SUFFIX}.${EXT}
+ ;;
+ *)
+ unpack ./${MY_PN}-${MY_PV}-${OFED_SUFFIX}.${EXT}
+ ;;
+ esac
+ fi
+}
diff --git a/eclass/pam.eclass b/eclass/pam.eclass
new file mode 100644
index 000000000000..a690e00a1034
--- /dev/null
+++ b/eclass/pam.eclass
@@ -0,0 +1,262 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License, v2 or later
+# $Id$
+#
+
+# @ECLASS: pam.eclass
+# @MAINTAINER:
+# pam-bugs@gentoo.org
+# @AUTHOR:
+# Diego Pettenò <flameeyes@gentoo.org>
+# @BLURB: Handles pam related tasks
+# @DESCRIPTION:
+# This eclass contains functions to install pamd configuration files and
+# pam modules.
+
+if [[ -z ${_PAM_ECLASS} ]]; then
+_PAM_ECLASS=1
+
+inherit flag-o-matic multilib
+
+# @FUNCTION: dopamd
+# @USAGE: <file> [more files]
+# @DESCRIPTION:
+# Install pam auth config file in /etc/pam.d
+dopamd() {
+ [[ -z $1 ]] && die "dopamd requires at least one argument"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0;
+ fi
+
+ ( # dont want to pollute calling env
+ insinto /etc/pam.d
+ insopts -m 0644
+ doins "$@"
+ ) || die "failed to install $@"
+ cleanpamd "$@"
+}
+
+# @FUNCTION: newpamd
+# @USAGE: <old name> <new name>
+# @DESCRIPTION:
+# Install pam file <old name> as <new name> in /etc/pam.d
+newpamd() {
+ [[ $# -ne 2 ]] && die "newpamd requires two arguments"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0;
+ fi
+
+ ( # dont want to pollute calling env
+ insinto /etc/pam.d
+ insopts -m 0644
+ newins "$1" "$2"
+ ) || die "failed to install $1 as $2"
+ cleanpamd $2
+}
+
+# @FUNCTION: dopamsecurity
+# @USAGE: <section> <file> [more files]
+# @DESCRIPTION:
+# Installs the config files in /etc/security/<section>/
+dopamsecurity() {
+ [[ $# -lt 2 ]] && die "dopamsecurity requires at least two arguments"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0
+ fi
+
+ ( # dont want to pollute calling env
+ insinto /etc/security/$1
+ insopts -m 0644
+ doins "${@:2}"
+ ) || die "failed to install ${@:2}"
+}
+
+# @FUNCTION: newpamsecurity
+# @USAGE: <section> <old name> <new name>
+# @DESCRIPTION:
+# Installs the config file <old name> as <new name> in /etc/security/<section>/
+newpamsecurity() {
+ [[ $# -ne 3 ]] && die "newpamsecurity requires three arguments"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0;
+ fi
+
+ ( # dont want to pollute calling env
+ insinto /etc/security/$1
+ insopts -m 0644
+ newins "$2" "$3"
+ ) || die "failed to install $2 as $3"
+}
+
+# @FUNCTION: getpam_mod_dir
+# @DESCRIPTION:
+# Returns the pam modules' directory for current implementation
+getpam_mod_dir() {
+ if has_version sys-libs/pam || has_version sys-libs/openpam; then
+ PAM_MOD_DIR=/$(get_libdir)/security
+ else
+ # Unable to find PAM implementation... defaulting
+ PAM_MOD_DIR=/$(get_libdir)/security
+ fi
+
+ echo ${PAM_MOD_DIR}
+}
+
+# @FUNCTION: pammod_hide_symbols
+# @DESCRIPTION:
+# Hide all non-PAM-used symbols from the module; this function creates a
+# simple ld version script that hides all the symbols that are not
+# necessary for PAM to load the module, then uses append-flags to make
+# sure that it gets used.
+pammod_hide_symbols() {
+ cat - > "${T}"/pam-eclass-pam_symbols.ver <<EOF
+{
+ global: pam_sm_*;
+ local: *;
+};
+EOF
+
+ append-ldflags -Wl,--version-script="${T}"/pam-eclass-pam_symbols.ver
+}
+
+# @FUNCTION: dopammod
+# @USAGE: <file> [more files]
+# @DESCRIPTION:
+# Install pam module file in the pam modules' dir for current implementation
+dopammod() {
+ [[ -z $1 ]] && die "dopammod requires at least one argument"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0;
+ fi
+
+ exeinto $(getpam_mod_dir)
+ doexe "$@" || die "failed to install $@"
+}
+
+# @FUNCTION: newpammod
+# @USAGE: <old name> <new name>
+# @DESCRIPTION:
+# Install pam module file <old name> as <new name> in the pam
+# modules' dir for current implementation
+newpammod() {
+ [[ $# -ne 2 ]] && die "newpammod requires two arguements"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0;
+ fi
+
+ exeinto $(getpam_mod_dir)
+ newexe "$1" "$2" || die "failed to install $1 as $2"
+}
+
+# @FUNCTION: pamd_mimic_system
+# @USAGE: <pamd file> [auth levels]
+# @DESCRIPTION:
+# This function creates a pamd file which mimics system-auth file
+# for the given levels in the /etc/pam.d directory.
+pamd_mimic_system() {
+ [[ $# -lt 2 ]] && die "pamd_mimic_system requires at least two argments"
+ pamd_mimic system-auth "$@"
+}
+
+# @FUNCTION: pamd_mimic
+# @USAGE: <stack> <pamd file> [auth levels]
+# @DESCRIPTION:
+# This function creates a pamd file which mimics the given stack
+# for the given levels in the /etc/pam.d directory.
+pamd_mimic() {
+ [[ $# -lt 3 ]] && die "pamd_mimic requires at least three argments"
+
+ if has pam ${IUSE} && ! use pam; then
+ return 0;
+ fi
+
+ dodir /etc/pam.d
+ pamdfile=${D}/etc/pam.d/$2
+ echo -e "# File autogenerated by pamd_mimic in pam eclass\n\n" >> \
+ $pamdfile
+
+ originalstack=$1
+ authlevels="auth account password session"
+
+ if has_version '<sys-libs/pam-0.78'; then
+ mimic="\trequired\t\tpam_stack.so service=${originalstack}"
+ else
+ mimic="\tinclude\t\t${originalstack}"
+ fi
+
+ shift; shift
+
+ while [[ -n $1 ]]; do
+ has $1 ${authlevels} || die "unknown level type"
+
+ echo -e "$1${mimic}" >> ${pamdfile}
+
+ shift
+ done
+}
+
+# @FUNCTION: cleanpamd
+# @USAGE: <pamd file>
+# @DESCRIPTION:
+# Cleans a pam.d file from modules that might not be present on the system
+# where it's going to be installed
+cleanpamd() {
+ while [[ -n $1 ]]; do
+ if ! has_version sys-libs/pam; then
+ sed -i -e '/pam_shells\|pam_console/s:^:#:' "${D}/etc/pam.d/$1"
+ fi
+
+ shift
+ done
+}
+
+# @FUNCTION: pam_epam_expand
+# @USAGE: <pamd file>
+# @DESCRIPTION:
+# Steer clear, deprecated, don't use, bad experiment
+pam_epam_expand() {
+ sed -n -e 's|#%EPAM-\([[:alpha:]-]\+\):\([-+<>=/.![:alnum:]]\+\)%#.*|\1 \2|p' \
+ "$@" | sort -u | while read condition parameter; do
+
+ disable="yes"
+
+ case "$condition" in
+ If-Has)
+ message="This can be used only if you have ${parameter} installed"
+ has_version "$parameter" && disable="no"
+ ;;
+ Use-Flag)
+ message="This can be used only if you enabled the ${parameter} USE flag"
+ use "$parameter" && disable="no"
+ ;;
+ *)
+ eerror "Unknown EPAM condition '${condition}' ('${parameter}')"
+ die "Unknown EPAM condition '${condition}' ('${parameter}')"
+ ;;
+ esac
+
+ if [ "${disable}" = "yes" ]; then
+ sed -i -e "/#%EPAM-${condition}:${parameter/\//\\/}%#/d" "$@"
+ else
+ sed -i -e "s|#%EPAM-${condition}:${parameter}%#||" "$@"
+ fi
+
+ done
+}
+
+# Think about it before uncommenting this one, for now run it by hand
+# pam_pkg_preinst() {
+# eshopts_push -o noglob # so that bash doen't expand "*"
+#
+# pam_epam_expand "${D}"/etc/pam.d/*
+#
+# eshopts_pop # reset old shell opts
+# }
+
+fi
diff --git a/eclass/pax-utils.eclass b/eclass/pax-utils.eclass
new file mode 100644
index 000000000000..e72a04f1fe56
--- /dev/null
+++ b/eclass/pax-utils.eclass
@@ -0,0 +1,189 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: pax-utils.eclass
+# @MAINTAINER:
+# The Gentoo Linux Hardened Team <hardened@gentoo.org>
+# @AUTHOR:
+# Original Author: Kevin F. Quinn <kevquinn@gentoo.org>
+# Modifications for bugs #365825, #431092, #520198, @ ECLASS markup: Anthony G. Basile <blueness@gentoo.org>
+# @BLURB: functions to provide pax markings
+# @DESCRIPTION:
+#
+# This eclass provides support for manipulating PaX markings on ELF binaries,
+# whether the system is using legacy PT_PAX markings or the newer XATTR_PAX.
+# The eclass wraps the use of paxctl-ng, paxctl, set/getattr and scanelf utilities,
+# deciding which to use depending on what's installed on the build host, and
+# whether we're working with PT_PAX, XATTR_PAX or both.
+#
+# To control what markings are made, set PAX_MARKINGS in /etc/portage/make.conf
+# to contain either "PT", "XT" or "none". The default is to attempt both
+# PT_PAX and XATTR_PAX.
+
+if [[ -z ${_PAX_UTILS_ECLASS} ]]; then
+_PAX_UTILS_ECLASS=1
+
+# @ECLASS-VARIABLE: PAX_MARKINGS
+# @DESCRIPTION:
+# Control which markings are made:
+# PT = PT_PAX markings, XT = XATTR_PAX markings
+# Default to PT markings.
+PAX_MARKINGS=${PAX_MARKINGS:="PT"}
+
+# @FUNCTION: pax-mark
+# @USAGE: <flags> {<ELF files>}
+# @RETURN: Shell true if we succeed, shell false otherwise
+# @DESCRIPTION:
+# Marks <ELF files> with provided PaX <flags>
+#
+# Flags are passed directly to the utilities unchanged
+#
+# p: disable PAGEEXEC P: enable PAGEEXEC
+# e: disable EMUTRAMP E: enable EMUTRAMP
+# m: disable MPROTECT M: enable MPROTECT
+# r: disable RANDMMAP R: enable RANDMMAP
+# s: disable SEGMEXEC S: enable SEGMEXEC
+#
+# Default flags are 'PeMRS', which are the most restrictive settings. Refer
+# to http://pax.grsecurity.net/ for details on what these flags are all about.
+#
+# Please confirm any relaxation of restrictions with the Gentoo Hardened team.
+# Either ask on the gentoo-hardened mailing list, or CC/assign hardened@g.o on
+# the bug report.
+pax-mark() {
+
+ local f # loop over paxables
+ local flags # pax flags
+ local ret=0 # overal return code of this function
+
+ # Only the actual PaX flags and z are accepted
+ # 1. The leading '-' is optional
+ # 2. -C -c only make sense for paxctl, but are unnecessary
+ # because we progressively do -q -qc -qC
+ # 3. z is allowed for the default
+
+ flags="${1//[!zPpEeMmRrSs]}"
+ [[ "${flags}" ]] || return 0
+ shift
+
+ # z = default. For XATTR_PAX, the default is no xattr field at all
+ local dodefault=""
+ [[ "${flags//[!z]}" ]] && dodefault="yes"
+
+ if has PT ${PAX_MARKINGS}; then
+ _pax_list_files einfo "$@"
+ for f in "$@"; do
+
+ #First try paxctl -> this might try to create/convert program headers
+ if type -p paxctl > /dev/null; then
+ einfo "PT PaX marking -${flags} ${f} with paxctl"
+ # First, try modifying the existing PAX_FLAGS header
+ paxctl -q${flags} "${f}" && continue
+ # Second, try creating a PT_PAX header (works on ET_EXEC)
+ # Even though this is less safe, most exes need it, eg bug #463170
+ paxctl -qC${flags} "${f}" && continue
+ # Third, try stealing the (unused under PaX) PT_GNU_STACK header
+ paxctl -qc${flags} "${f}" && continue
+ fi
+
+ #Next try paxctl-ng -> this will not create/convert any program headers
+ if type -p paxctl-ng > /dev/null && paxctl-ng -L ; then
+ einfo "PT PaX marking -${flags} ${f} with paxctl-ng"
+ flags="${flags//z}"
+ [[ ${dodefault} == "yes" ]] && paxctl-ng -L -z "${f}"
+ [[ "${flags}" ]] || continue
+ paxctl-ng -L -${flags} "${f}" && continue
+ fi
+
+ #Finally fall back on scanelf
+ if type -p scanelf > /dev/null && [[ ${PAX_MARKINGS} != "none" ]]; then
+ scanelf -Xxz ${flags} "$f"
+ #We failed to set PT_PAX flags
+ elif [[ ${PAX_MARKINGS} != "none" ]]; then
+ elog "Failed to set PT_PAX markings -${flags} ${f}."
+ ret=1
+ fi
+ done
+ fi
+
+ if has XT ${PAX_MARKINGS}; then
+ _pax_list_files einfo "$@"
+ flags="${flags//z}"
+ for f in "$@"; do
+
+ #First try paxctl-ng
+ if type -p paxctl-ng > /dev/null && paxctl-ng -l ; then
+ einfo "XT PaX marking -${flags} ${f} with paxctl-ng"
+ [[ ${dodefault} == "yes" ]] && paxctl-ng -d "${f}"
+ [[ "${flags}" ]] || continue
+ paxctl-ng -l -${flags} "${f}" && continue
+ fi
+
+ #Next try setfattr
+ if type -p setfattr > /dev/null; then
+ [[ "${flags//[!Ee]}" ]] || flags+="e" # bug 447150
+ einfo "XT PaX marking -${flags} ${f} with setfattr"
+ [[ ${dodefault} == "yes" ]] && setfattr -x "user.pax.flags" "${f}"
+ setfattr -n "user.pax.flags" -v "${flags}" "${f}" && continue
+ fi
+
+ #We failed to set XATTR_PAX flags
+ if [[ ${PAX_MARKINGS} != "none" ]]; then
+ elog "Failed to set XATTR_PAX markings -${flags} ${f}."
+ ret=1
+ fi
+ done
+ fi
+
+ # [[ ${ret} == 1 ]] && elog "Executables may be killed by PaX kernels."
+
+ return ${ret}
+}
+
+# @FUNCTION: list-paxables
+# @USAGE: {<files>}
+# @RETURN: Subset of {<files>} which are ELF executables or shared objects
+# @DESCRIPTION:
+# Print to stdout all of the <files> that are suitable to have PaX flag
+# markings, i.e., filter out the ELF executables or shared objects from a list
+# of files. This is useful for passing wild-card lists to pax-mark, although
+# in general it is preferable for ebuilds to list precisely which ELFS are to
+# be marked. Often not all the ELF installed by a package need remarking.
+# @EXAMPLE:
+# pax-mark -m $(list-paxables ${S}/{,usr/}bin/*)
+list-paxables() {
+ file "$@" 2> /dev/null | grep -E 'ELF.*(executable|shared object)' | sed -e 's/: .*$//'
+}
+
+# @FUNCTION: host-is-pax
+# @RETURN: Shell true if the build process is PaX enabled, shell false otherwise
+# @DESCRIPTION:
+# This is intended for use where the build process must be modified conditionally
+# depending on whether the host is PaX enabled or not. It is not intedened to
+# determine whether the final binaries need PaX markings. Note: if procfs is
+# not mounted on /proc, this returns shell false (e.g. Gentoo/FBSD).
+host-is-pax() {
+ grep -qs ^PaX: /proc/self/status
+}
+
+
+# INTERNAL FUNCTIONS
+# ------------------
+#
+# These functions are for use internally by the eclass - do not use
+# them elsewhere as they are not supported (i.e. they may be removed
+# or their function may change arbitratily).
+
+# Display a list of things, one per line, indented a bit, using the
+# display command in $1.
+_pax_list_files() {
+ local f cmd
+ cmd=$1
+ shift
+ for f in "$@"; do
+ ${cmd} " ${f}"
+ done
+}
+
+fi
diff --git a/eclass/perl-app.eclass b/eclass/perl-app.eclass
new file mode 100644
index 000000000000..eb109f9c67aa
--- /dev/null
+++ b/eclass/perl-app.eclass
@@ -0,0 +1,35 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Author: Michael Cummings <mcummings@gentoo.org>
+# Maintained by the Perl herd <perl@gentoo.org>
+
+# If the ebuild doesn't override this, ensure we do not depend on the perl subslot value
+: ${GENTOO_DEPEND_ON_PERL_SUBSLOT:="no"}
+inherit perl-module
+
+# @FUNCTION: perl-app_src_prep
+# @USAGE: perl-app_src_prep
+# @DESCRIPTION:
+# This is a wrapper function to perl-app_src_configure().
+perl-app_src_prep() {
+ perl-app_src_configure
+}
+
+# @FUNCTION: perl-app_src_configure
+# @USAGE: perl-app_src_configure
+# @DESCRIPTION:
+# This is a wrapper function to perl-module_src_configure().
+perl-app_src_configure() {
+ perl-module_src_configure
+}
+
+# @FUNCTION: perl-app_src_compile
+# @USAGE: perl-app_src_compile
+# @DESCRIPTION:
+# This is a wrapper function to perl-module_src_compile().
+perl-app_src_compile() {
+ has "${EAPI:-0}" 0 1 && perl-app_src_prep
+ perl-module_src_compile
+}
diff --git a/eclass/perl-module.eclass b/eclass/perl-module.eclass
new file mode 100644
index 000000000000..341fa891668f
--- /dev/null
+++ b/eclass/perl-module.eclass
@@ -0,0 +1,547 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: perl-module.eclass
+# @MAINTAINER:
+# perl@gentoo.org
+# @AUTHOR:
+# Seemant Kulleen <seemant@gentoo.org>
+# @BLURB: eclass for perl modules
+# @DESCRIPTION:
+# The perl-module eclass is designed to allow easier installation of perl
+# modules, and their incorporation into the Gentoo Linux system.
+
+inherit eutils multiprocessing unpacker
+[[ ${CATEGORY} == "perl-core" ]] && inherit alternatives
+
+PERL_EXPF="src_unpack src_prepare src_configure src_compile src_test src_install"
+
+case "${EAPI:-0}" in
+ 5)
+ [[ ${CATEGORY} == "perl-core" ]] && \
+ PERL_EXPF+=" pkg_postinst pkg_postrm"
+
+ case "${GENTOO_DEPEND_ON_PERL:-yes}" in
+ yes)
+ case "${GENTOO_DEPEND_ON_PERL_SUBSLOT:-yes}" in
+ yes)
+ DEPEND="dev-lang/perl:=[-build(-)]"
+ ;;
+ *)
+ DEPEND="dev-lang/perl[-build(-)]"
+ ;;
+ esac
+ RDEPEND="${DEPEND}"
+ ;;
+ esac
+ ;;
+ *)
+ die "EAPI=${EAPI} is not supported by perl-module.eclass"
+ ;;
+esac
+
+case "${PERL_EXPORT_PHASE_FUNCTIONS:-yes}" in
+ yes)
+ EXPORT_FUNCTIONS ${PERL_EXPF}
+ ;;
+ no)
+ debug-print "PERL_EXPORT_PHASE_FUNCTIONS=no"
+ ;;
+ *)
+ die "PERL_EXPORT_PHASE_FUNCTIONS=${PERL_EXPORT_PHASE_FUNCTIONS} is not supported by perl-module.eclass"
+ ;;
+esac
+
+LICENSE="${LICENSE:-|| ( Artistic GPL-1+ )}"
+
+if [[ -n ${MY_PN} || -n ${MY_PV} || -n ${MODULE_VERSION} ]] ; then
+ : ${MY_P:=${MY_PN:-${PN}}-${MY_PV:-${MODULE_VERSION:-${PV}}}}
+ S=${MY_S:-${WORKDIR}/${MY_P}}
+fi
+
+[[ -z "${SRC_URI}" && -z "${MODULE_A}" ]] && \
+ MODULE_A="${MY_P:-${P}}.${MODULE_A_EXT:-tar.gz}"
+[[ -z "${SRC_URI}" && -n "${MODULE_AUTHOR}" ]] && \
+ SRC_URI="mirror://cpan/authors/id/${MODULE_AUTHOR:0:1}/${MODULE_AUTHOR:0:2}/${MODULE_AUTHOR}/${MODULE_SECTION:+${MODULE_SECTION}/}${MODULE_A}"
+[[ -z "${HOMEPAGE}" ]] && \
+ HOMEPAGE="http://search.cpan.org/dist/${MY_PN:-${PN}}/"
+
+SRC_PREP="no"
+SRC_TEST="skip"
+PREFER_BUILDPL="yes"
+
+pm_echovar=""
+perlinfo_done=false
+
+# @FUNCTION: perl-module_src_unpack
+# @USAGE: perl-module_src_unpack
+# @DESCRIPTION:
+# Unpack the ebuild tarball(s).
+# This function is to be called during the ebuild src_unpack() phase.
+perl-module_src_unpack() {
+ debug-print-function $FUNCNAME "$@"
+
+ unpacker_src_unpack
+}
+
+# @FUNCTION: perl-module_src_prepare
+# @USAGE: perl-module_src_prepare
+# @DESCRIPTION:
+# Get the ebuild sources ready.
+# This function is to be called during the ebuild src_prepare() phase.
+perl-module_src_prepare() {
+ debug-print-function $FUNCNAME "$@"
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ debug-print "$FUNCNAME: applying user patches"
+ epatch_user
+ if [[ ${PERL_RM_FILES[@]} ]]; then
+ debug-print "$FUNCNAME: stripping unneeded files"
+ perl_rm_files "${PERL_RM_FILES[@]}"
+ fi
+ perl_fix_osx_extra
+ esvn_clean
+}
+
+# @FUNCTION: perl-module_src_configure
+# @USAGE: perl-module_src_configure
+# @DESCRIPTION:
+# Configure the ebuild sources.
+# This function is to be called during the ebuild src_configure() phase.
+perl-module_src_configure() {
+ debug-print-function $FUNCNAME "$@"
+
+ [[ ${SRC_PREP} = yes ]] && return 0
+ SRC_PREP="yes"
+
+ perl_check_env
+
+ perl_set_version
+
+ [[ -z ${pm_echovar} ]] && export PERL_MM_USE_DEFAULT=1
+ # Disable ExtUtils::AutoInstall from prompting
+ export PERL_EXTUTILS_AUTOINSTALL="--skipdeps"
+
+ if [[ $(declare -p myconf 2>&-) != "declare -a myconf="* ]]; then
+ local myconf_local=(${myconf})
+ else
+ local myconf_local=("${myconf[@]}")
+ fi
+
+ if [[ ( ${PREFER_BUILDPL} == yes || ! -f Makefile.PL ) && -f Build.PL ]] ; then
+ if grep -q '\(use\|require\)\s*Module::Build::Tiny' Build.PL ; then
+ einfo "Using Module::Build::Tiny"
+ if [[ ${DEPEND} != *dev-perl/Module-Build-Tiny* && ${PN} != Module-Build-Tiny ]]; then
+ eqawarn "QA Notice: The ebuild uses Module::Build::Tiny but doesn't depend on it."
+ eqawarn " Add dev-perl/Module-Build-Tiny to DEPEND!"
+ if [[ -n ${PERLQAFATAL} ]]; then
+ eerror "Bailing out due to PERLQAFATAL=1";
+ die
+ fi
+ fi
+ else
+ einfo "Using Module::Build"
+ if [[ ${DEPEND} != *virtual/perl-Module-Build* && ${DEPEND} != *dev-perl/Module-Build* && ${PN} != Module-Build ]] ; then
+ eqawarn "QA Notice: The ebuild uses Module::Build but doesn't depend on it."
+ eqawarn " Add dev-perl/Module-Build to DEPEND!"
+ if [[ -n ${PERLQAFATAL} ]]; then
+ eerror "Bailing out due to PERLQAFATAL=1";
+ die
+ fi
+ fi
+ fi
+ set -- \
+ --installdirs=vendor \
+ --libdoc= \
+ --destdir="${D}" \
+ --create_packlist=0 \
+ "${myconf_local[@]}"
+ einfo "perl Build.PL" "$@"
+ perl Build.PL "$@" <<< "${pm_echovar}" \
+ || die "Unable to build!"
+ elif [[ -f Makefile.PL ]] ; then
+ einfo "Using ExtUtils::MakeMaker"
+ set -- \
+ PREFIX=${EPREFIX}/usr \
+ INSTALLDIRS=vendor \
+ INSTALLMAN3DIR='none' \
+ DESTDIR="${D}" \
+ "${myconf_local[@]}"
+ einfo "perl Makefile.PL" "$@"
+ perl Makefile.PL "$@" <<< "${pm_echovar}" \
+ || die "Unable to build!"
+ fi
+ if [[ ! -f Build.PL && ! -f Makefile.PL ]] ; then
+ einfo "No Make or Build file detected..."
+ return
+ fi
+}
+
+# @FUNCTION: perl-module_src_compile
+# @USAGE: perl-module_src_compile
+# @DESCRIPTION:
+# Compile the ebuild sources.
+# This function is to be called during the ebuild src_compile() phase.
+perl-module_src_compile() {
+ debug-print-function $FUNCNAME "$@"
+ perl_set_version
+
+ if [[ $(declare -p mymake 2>&-) != "declare -a mymake="* ]]; then
+ local mymake_local=(${mymake})
+ else
+ local mymake_local=("${mymake[@]}")
+ fi
+
+ if [[ -f Build ]] ; then
+ ./Build build \
+ || die "Compilation failed"
+ elif [[ -f Makefile ]] ; then
+ set -- \
+ OTHERLDFLAGS="${LDFLAGS}" \
+ "${mymake_local[@]}"
+ einfo "emake" "$@"
+ emake "$@" \
+ || die "Compilation failed"
+# OPTIMIZE="${CFLAGS}" \
+ fi
+}
+
+# @FUNCTION: perl-module_src-test
+# @USAGE: perl-module_src_test()
+# @DESCRIPTION:
+# This code attempts to work out your threadingness from MAKEOPTS
+# and apply them to Test::Harness.
+#
+# If you want more verbose testing, set TEST_VERBOSE=1
+# in your bashrc | /etc/portage/make.conf | ENV
+#
+# or ebuild writers:
+# If you wish to enable default tests w/ 'make test' ,
+#
+# SRC_TEST="do"
+#
+# If you wish to have threads run in parallel ( using the users makeopts )
+# all of the following have been tested to work.
+#
+# SRC_TEST="do parallel"
+# SRC_TEST="parallel"
+# SRC_TEST="parallel do"
+# SRC_TEST=parallel
+#
+perl-module_src_test() {
+ debug-print-function $FUNCNAME "$@"
+ if has 'do' ${SRC_TEST} || has 'parallel' ${SRC_TEST} ; then
+ if has "${TEST_VERBOSE:-0}" 0 && has 'parallel' ${SRC_TEST} ; then
+ export HARNESS_OPTIONS=j$(makeopts_jobs)
+ einfo "Test::Harness Jobs=$(makeopts_jobs)"
+ fi
+ ${perlinfo_done} || perl_set_version
+ if [[ -f Build ]] ; then
+ ./Build test verbose=${TEST_VERBOSE:-0} || die "test failed"
+ elif [[ -f Makefile ]] ; then
+ emake test TEST_VERBOSE=${TEST_VERBOSE:-0} || die "test failed"
+ fi
+ fi
+}
+
+# @FUNCTION: perl-module_src_install
+# @USAGE: perl-module_src_install
+# @DESCRIPTION:
+# Install a Perl ebuild.
+# This function is to be called during the ebuild src_install() phase.
+perl-module_src_install() {
+ debug-print-function $FUNCNAME "$@"
+
+ perl_set_version
+
+ local f
+
+ if [[ -f Build ]]; then
+ mytargets="${mytargets:-install}"
+ mbparams="${mbparams:---pure}"
+ einfo "./Build ${mytargets} ${mbparams}"
+ ./Build ${mytargets} ${mbparams} \
+ || die "./Build ${mytargets} ${mbparams} failed"
+ elif [[ -f Makefile ]]; then
+ case "${CATEGORY}" in
+ dev-perl|perl-core) mytargets="pure_install" ;;
+ *) mytargets="install" ;;
+ esac
+ if [[ $(declare -p myinst 2>&-) != "declare -a myinst="* ]]; then
+ local myinst_local=(${myinst})
+ else
+ local myinst_local=("${myinst[@]}")
+ fi
+ emake "${myinst_local[@]}" ${mytargets} \
+ || die "emake ${myinst_local[@]} ${mytargets} failed"
+ fi
+
+ perl_delete_module_manpages
+ perl_delete_localpod
+ perl_delete_packlist
+ perl_remove_temppath
+
+ for f in Change* CHANGES README* TODO FAQ ${mydoc}; do
+ [[ -s ${f} ]] && dodoc ${f}
+ done
+
+ perl_link_duallife_scripts
+}
+
+# @FUNCTION: perl-module_pkg_postinst
+# @USAGE: perl-module_pkg_postinst
+# @DESCRIPTION:
+# This function is to be called during the pkg_postinst() phase. It only does
+# useful things for the perl-core category, where it handles the file renaming and symbolic
+# links that prevent file collisions for dual-life packages installing scripts.
+# In any other category it immediately exits.
+perl-module_pkg_postinst() {
+ debug-print-function $FUNCNAME "$@"
+ if [[ ${CATEGORY} != perl-core ]] ; then
+ eqawarn "perl-module.eclass: You are calling perl-module_pkg_postinst outside the perl-core category."
+ eqawarn " This does not do anything; the call can be safely removed."
+ return 0
+ fi
+ perl_link_duallife_scripts
+}
+
+# @FUNCTION: perl-module_pkg_postrm
+# @USAGE: perl-module_pkg_postrm
+# @DESCRIPTION:
+# This function is to be called during the pkg_postrm() phase. It only does
+# useful things for the perl-core category, where it handles the file renaming and symbolic
+# links that prevent file collisions for dual-life packages installing scripts.
+# In any other category it immediately exits.
+perl-module_pkg_postrm() {
+ debug-print-function $FUNCNAME "$@"
+ if [[ ${CATEGORY} != perl-core ]] ; then
+ eqawarn "perl-module.eclass: You are calling perl-module_pkg_postrm outside the perl-core category."
+ eqawarn " This does not do anything; the call can be safely removed."
+ return 0
+ fi
+ perl_link_duallife_scripts
+}
+
+# @FUNCTION: perl_set_version
+# @USAGE: perl_set_version
+# @DESCRIPTION:
+# Extract version information and installation paths from the current Perl
+# interpreter.
+#
+# This sets the following variables: PERL_VERSION, SITE_ARCH, SITE_LIB,
+# ARCH_LIB, VENDOR_LIB, VENDOR_ARCH
+#
+# This function used to be called perlinfo as well.
+perl_set_version() {
+ debug-print-function $FUNCNAME "$@"
+ debug-print "$FUNCNAME: perlinfo_done=${perlinfo_done}"
+ ${perlinfo_done} && return 0
+ perlinfo_done=true
+
+ local f version install{{site,vendor}{arch,lib},archlib}
+ eval "$(perl -V:{version,install{{site,vendor}{arch,lib},archlib}} )"
+ PERL_VERSION=${version}
+ SITE_ARCH=${installsitearch}
+ SITE_LIB=${installsitelib}
+ ARCH_LIB=${installarchlib}
+ VENDOR_LIB=${installvendorlib}
+ VENDOR_ARCH=${installvendorarch}
+}
+
+# @FUNCTION: perl_delete_localpod
+# @USAGE: perl_delete_localpod
+# @DESCRIPTION:
+# Remove stray perllocal.pod files in the temporary install directory D.
+#
+# This function used to be called fixlocalpod as well.
+perl_delete_localpod() {
+ debug-print-function $FUNCNAME "$@"
+
+ find "${D}" -type f -name perllocal.pod -delete
+ find "${D}" -depth -mindepth 1 -type d -empty -delete
+}
+
+# @FUNCTION: perl_fix_osx_extra
+# @USAGE: perl_fix_osx_extra
+# @DESCRIPTION:
+# Look through ${S} for AppleDouble encoded files and get rid of them.
+perl_fix_osx_extra() {
+ debug-print-function $FUNCNAME "$@"
+
+ local f
+ find "${S}" -type f -name "._*" -print0 | while read -rd '' f ; do
+ einfo "Removing AppleDouble encoded Macintosh file: ${f#${S}/}"
+ rm -f "${f}"
+ f=${f#${S}/}
+ grep -q "${f}" "${S}"/MANIFEST && \
+ elog "AppleDouble encoded Macintosh file in MANIFEST: ${f#${S}/}"
+ done
+}
+
+# @FUNCTION: perl_delete_module_manpages
+# @USAGE: perl_delete_module_manpages
+# @DESCRIPTION:
+# Bump off manpages installed by the current module such as *.3pm files as well
+# as empty directories.
+perl_delete_module_manpages() {
+ debug-print-function $FUNCNAME "$@"
+
+ if [[ -d "${ED}"/usr/share/man ]] ; then
+ find "${ED}"/usr/share/man -type f -name "*.3pm" -delete
+ find "${ED}"/usr/share/man -depth -type d -empty -delete
+ fi
+}
+
+# @FUNCTION: perl_delete_packlist
+# @USAGE: perl_delete_packlist
+# @DESCRIPTION:
+# Look through ${D} for .packlist files, empty .bs files and empty directories,
+# and get rid of items found.
+perl_delete_packlist() {
+ debug-print-function $FUNCNAME "$@"
+ perl_set_version
+ if [[ -d ${D}/${VENDOR_ARCH} ]] ; then
+ find "${D}/${VENDOR_ARCH}" -type f -a \( -name .packlist \
+ -o \( -name '*.bs' -a -empty \) \) -delete
+ find "${D}" -depth -mindepth 1 -type d -empty -delete
+ fi
+}
+
+# @FUNCTION: perl_remove_temppath
+# @USAGE: perl_remove_temppath
+# @DESCRIPTION:
+# Look through ${D} for text files containing the temporary installation
+# folder (i.e. ${D}). If the pattern is found (i.e. " text"), replace it with `/'.
+perl_remove_temppath() {
+ debug-print-function $FUNCNAME "$@"
+
+ find "${D}" -type f -not -name '*.so' -print0 | while read -rd '' f ; do
+ if file "${f}" | grep -q -i " text" ; then
+ grep -q "${D}" "${f}" && ewarn "QA: File contains a temporary path ${f}"
+ sed -i -e "s:${D}:/:g" "${f}"
+ fi
+ done
+}
+
+# @FUNCTION: perl_rm_files
+# @USAGE: perl_rm_files "file_1" "file_2"
+# @DESCRIPTION:
+# Remove certain files from a Perl release and remove them from the MANIFEST
+# while we're there.
+#
+# Most useful in src_prepare for nuking bad tests, and is highly recommended
+# for any tests like 'pod.t', 'pod-coverage.t' or 'kwalitee.t', as what they
+# test is completely irrelevant to end users, and frequently fail simply
+# because the authors of Test::Pod... changed their recommendations, and thus
+# failures are only useful feedback to Authors, not users.
+#
+# Removing from MANIFEST also avoids needless log messages warning
+# users about files "missing from their kit".
+perl_rm_files() {
+ debug-print-function $FUNCNAME "$@"
+ local skipfile="${T}/.gentoo_makefile_skip"
+ local manifile="${S}/MANIFEST"
+ local manitemp="${T}/.gentoo_manifest_temp"
+ oldifs="$IFS"
+ IFS="\n"
+ for filename in "$@"; do
+ einfo "Removing un-needed ${filename}";
+ # Remove the file
+ rm -f "${S}/${filename}"
+ [[ -e "${manifile}" ]] && echo "${filename}" >> "${skipfile}"
+ done
+ if [[ -e "${manifile}" && -e "${skipfile}" ]]; then
+ einfo "Fixing Manifest"
+ grep -v -F -f "${skipfile}" "${manifile}" > "${manitemp}"
+ mv -f -- "${manitemp}" "${manifile}"
+ rm -- "${skipfile}";
+ fi
+ IFS="$oldifs"
+}
+
+# @FUNCTION: perl_link_duallife_scripts
+# @USAGE: perl_link_duallife_scripts
+# @DESCRIPTION:
+# Moves files and generates symlinks so dual-life packages installing scripts do not
+# lead to file collisions. Mainly for use in pkg_postinst and pkg_postrm, and makes
+# only sense for perl-core packages.
+perl_link_duallife_scripts() {
+ debug-print-function $FUNCNAME "$@"
+ if [[ ${CATEGORY} != perl-core ]] || ! has_version ">=dev-lang/perl-5.8.8-r8" ; then
+ return 0
+ fi
+
+ local i ff
+ if has "${EBUILD_PHASE:-none}" "postinst" "postrm" ; then
+ for i in "${DUALLIFESCRIPTS[@]}" ; do
+ alternatives_auto_makesym "/${i}" "/${i}-[0-9]*"
+ done
+ for i in "${DUALLIFEMAN[@]}" ; do
+ ff=`echo "${EROOT}"/${i%.1}-${PV}-${P}.1*`
+ ff=${ff##*.1}
+ alternatives_auto_makesym "/${i}${ff}" "/${i%.1}-[0-9]*"
+ done
+ else
+ pushd "${ED}" > /dev/null
+ for i in $(find usr/bin -maxdepth 1 -type f 2>/dev/null) ; do
+ mv ${i}{,-${PV}-${P}} || die
+ #DUALLIFESCRIPTS[${#DUALLIFESCRIPTS[*]}]=${i##*/}
+ DUALLIFESCRIPTS[${#DUALLIFESCRIPTS[*]}]=${i}
+ done
+ for i in $(find usr/share/man/man1 -maxdepth 1 -type f 2>/dev/null) ; do
+ mv ${i} ${i%.1}-${PV}-${P}.1 || die
+ DUALLIFEMAN[${#DUALLIFEMAN[*]}]=${i}
+ done
+ popd > /dev/null
+ fi
+}
+
+# @FUNCTION: perl_check_env
+# @USAGE: perl_check_env
+# @DESCRIPTION:
+# Checks a blacklist of known-suspect ENV values that can be accidentally set by users
+# doing personal perl work, which may accidentally leak into portage and break the
+# system perl installaton.
+# Dies if any of the suspect fields are found, and tell the user what needs to be unset.
+# There's a workaround, but you'll have to read the code for it.
+perl_check_env() {
+ local errored value;
+
+ for i in PERL_MM_OPT PERL5LIB PERL5OPT PERL_MB_OPT PERL_CORE PERLPREFIX; do
+ # Next unless match
+ [ -v $i ] || continue;
+
+ # Warn only once, and warn only when one of the bad values are set.
+ # record failure here.
+ if [ ${errored:-0} == 0 ]; then
+ if [ -n "${I_KNOW_WHAT_I_AM_DOING}" ]; then
+ elog "perl-module.eclass: Suspicious environment values found.";
+ else
+ eerror "perl-module.eclass: Suspicious environment values found.";
+ fi
+ fi
+ errored=1
+
+ # Read ENV Value
+ eval "value=\$$i";
+
+ # Print ENV name/value pair
+ if [ -n "${I_KNOW_WHAT_I_AM_DOING}" ]; then
+ elog " $i=\"$value\"";
+ else
+ eerror " $i=\"$value\"";
+ fi
+ done
+
+ # Return if there were no failures
+ [ ${errored:-0} == 0 ] && return;
+
+ # Return if user knows what they're doing
+ if [ -n "${I_KNOW_WHAT_I_AM_DOING}" ]; then
+ elog "Continuing anyway, seems you know what you're doing."
+ return
+ fi
+
+ eerror "Your environment settings may lead to undefined behavior and/or build failures."
+ die "Please fix your environment ( ~/.bashrc, package.env, ... ), see above for details."
+}
diff --git a/eclass/php-ext-pecl-r2.eclass b/eclass/php-ext-pecl-r2.eclass
new file mode 100644
index 000000000000..1357a09f5767
--- /dev/null
+++ b/eclass/php-ext-pecl-r2.eclass
@@ -0,0 +1,105 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: php-ext-pecl-r1.eclass
+# @MAINTAINER:
+# Gentoo PHP team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Author: Tal Peer <coredumb@gentoo.org>
+# Author: Luca Longinotti <chtekk@gentoo.org>
+# Author: Jakub Moc <jakub@gentoo.org>
+# @BLURB: A uniform way of installing PECL extensions
+# @DESCRIPTION:
+# This eclass should be used by all dev-php[45]/pecl-* ebuilds
+# as a uniform way of installing PECL extensions.
+# For more information about PECL, see http://pecl.php.net/
+
+# @ECLASS-VARIABLE: PHP_EXT_PECL_PKG
+# @DESCRIPTION:
+# Set in ebuild before inheriting this eclass if the tarball name
+# differs from ${PN/pecl-/} so that SRC_URI and HOMEPAGE gets set
+# correctly by the eclass.
+#
+# Setting this variable manually also affects PHP_EXT_NAME and ${S}
+# unless you override those in ebuild. Also see PHP_EXT_PECL_FILENAME
+# if this is not desired for whatever reason.
+
+# @ECLASS-VARIABLE: PHP_EXT_PECL_FILENAME
+# @DESCRIPTION:
+# Set in ebuild before inheriting this eclass if the tarball name
+# differs from ${PN/pecl-/} so that SRC_URI gets set correctly by
+# the eclass.
+#
+# Unlike PHP_EXT_PECL_PKG, setting this variable does not affect
+# HOMEPAGE, PHP_EXT_NAME or ${S}.
+
+
+[[ -z "${PHP_EXT_PECL_PKG}" ]] && PHP_EXT_PECL_PKG="${PN/pecl-/}"
+
+
+[[ -z ${MY_PV} ]] && MY_PV=${PV}
+
+PECL_PKG="${PHP_EXT_PECL_PKG}"
+MY_PV="${MY_PV/_/}"
+PECL_PKG_V="${PECL_PKG}-${MY_PV}"
+
+[[ -z "${PHP_EXT_NAME}" ]] && PHP_EXT_NAME="${PECL_PKG}"
+
+S="${WORKDIR}/${PECL_PKG_V}"
+
+inherit php-ext-source-r2
+
+EXPORT_FUNCTIONS src_compile src_install src_test
+
+if [[ -n "${PHP_EXT_PECL_FILENAME}" ]] ; then
+ FILENAME="${PHP_EXT_PECL_FILENAME}-${MY_PV}.tgz"
+else
+ FILENAME="${PECL_PKG_V}.tgz"
+fi
+
+SRC_URI="http://pecl.php.net/get/${FILENAME}"
+HOMEPAGE="http://pecl.php.net/${PECL_PKG}"
+
+
+# @FUNCTION: php-ext-pecl-r1_src_compile
+# @DESCRIPTION:
+# Takes care of standard compile for PECL packages.
+php-ext-pecl-r2_src_compile() {
+ php-ext-source-r2_src_compile
+}
+
+# @FUNCTION: php-ext-pecl-r1_src_install
+# @DESCRIPTION:
+# Takes care of standard install for PECL packages.
+# You can also simply add examples to IUSE to automagically install
+# examples supplied with the package.
+
+# @VARIABLE: DOCS
+# @DESCRIPTION:
+# Set in ebuild if you wish to install additional, package-specific documentation.
+php-ext-pecl-r2_src_install() {
+ php-ext-source-r2_src_install
+
+ for doc in ${DOCS} "${WORKDIR}"/package.xml CREDITS ; do
+ [[ -s ${doc} ]] && dodoc ${doc}
+ done
+
+ if has examples ${IUSE} && use examples ; then
+ insinto /usr/share/doc/${CATEGORY}/${PF}/examples
+ doins -r examples/*
+ fi
+}
+
+
+# @FUNCTION: php-ext-pecl-r2_src_test
+# @DESCRIPTION:
+# Takes care of running any tests delivered with the PECL package.
+# Standard phpize generates a run-tests.php file that is executed in make test
+# We only need to force it to non-interactive mode
+php-ext-pecl-r2_src_test() {
+ for slot in `php_get_slots`; do
+ php_init_slot_env ${slot}
+ NO_INTERACTION="yes" emake test || die "emake test failed for slot ${slot}"
+ done
+}
diff --git a/eclass/php-ext-source-r2.eclass b/eclass/php-ext-source-r2.eclass
new file mode 100644
index 000000000000..17073a9962f7
--- /dev/null
+++ b/eclass/php-ext-source-r2.eclass
@@ -0,0 +1,377 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: php-ext-source-r2.eclass
+# @MAINTAINER:
+# Gentoo PHP team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Author: Tal Peer <coredumb@gentoo.org>
+# Author: Stuart Herbert <stuart@gentoo.org>
+# Author: Luca Longinotti <chtekk@gentoo.org>
+# Author: Jakub Moc <jakub@gentoo.org> (documentation)
+# Author: Ole Markus With <olemarkus@gentoo.org>
+# @BLURB: A unified interface for compiling and installing standalone PHP extensions.
+# @DESCRIPTION:
+# This eclass provides a unified interface for compiling and installing standalone
+# PHP extensions (modules).
+
+inherit flag-o-matic autotools multilib eutils
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install
+
+DEPEND=">=sys-devel/m4-1.4.3
+ >=sys-devel/libtool-1.5.18"
+RDEPEND=""
+
+# Because of USE deps, we require at least EAPI 2
+case ${EAPI} in
+ 4|5) ;;
+ *)
+ die "php-ext-source-r2 is not compatible with EAPI=${EAPI}"
+esac
+
+# @ECLASS-VARIABLE: PHP_EXT_NAME
+# @REQUIRED
+# @DESCRIPTION:
+# The extension name. This must be set, otherwise the eclass dies.
+# Only automagically set by php-ext-pecl-r2.eclass, so unless your ebuild
+# inherits that eclass, you must set this manually before inherit.
+[[ -z "${PHP_EXT_NAME}" ]] && die "No module name specified for the php-ext-source-r2 eclass"
+
+# @ECLASS-VARIABLE: PHP_EXT_INI
+# @DESCRIPTION:
+# Controls whether or not to add a line to php.ini for the extension.
+# Defaults to "yes" and should not be changed in most cases.
+[[ -z "${PHP_EXT_INI}" ]] && PHP_EXT_INI="yes"
+
+# @ECLASS-VARIABLE: PHP_EXT_ZENDEXT
+# @DESCRIPTION:
+# Controls whether the extension is a ZendEngine extension or not.
+# Defaults to "no" and if you don't know what is it, you don't need it.
+[[ -z "${PHP_EXT_ZENDEXT}" ]] && PHP_EXT_ZENDEXT="no"
+
+# @ECLASS-VARIABLE: USE_PHP
+# @REQUIRED
+# @DESCRIPTION:
+# Lists the PHP slots compatibile the extension is compatibile with
+# Example:
+# @CODE
+# USE_PHP="php5-5 php5-6"
+# @CODE
+[[ -z "${USE_PHP}" ]] && die "USE_PHP is not set for the php-ext-source-r2 eclass"
+
+# @ECLASS-VARIABLE: PHP_EXT_OPTIONAL_USE
+# @DESCRIPTION:
+# If set, this is the USE flag that the PHP dependencies are behind
+# Most commonly set as PHP_EXT_OPTIONAL_USE=php to get the dependencies behind
+# USE=php.
+
+# @ECLASS-VARIABLE: PHP_EXT_S
+# @DESCRIPTION:
+# The relative location of the temporary build directory for the PHP extension within
+# the source package. This is useful for packages that bundle the PHP extension.
+# Defaults to ${S}
+[[ -z "${PHP_EXT_S}" ]] && PHP_EXT_S="${S}"
+
+#Make sure at least one target is installed.
+REQUIRED_USE="${PHP_EXT_OPTIONAL_USE}${PHP_EXT_OPTIONAL_USE:+? ( }|| ( "
+for target in ${USE_PHP}; do
+ IUSE="${IUSE} php_targets_${target}"
+ target=${target/+}
+ REQUIRED_USE+="php_targets_${target} "
+ slot=${target/php}
+ slot=${slot/-/.}
+ PHPDEPEND="${PHPDEPEND}
+ php_targets_${target}? ( dev-lang/php:${slot} )"
+done
+REQUIRED_USE+=") ${PHP_EXT_OPTIONAL_USE:+ )}"
+
+RDEPEND="${RDEPEND}
+ ${PHP_EXT_OPTIONAL_USE}${PHP_EXT_OPTIONAL_USE:+? ( }
+ ${PHPDEPEND}
+ ${PHP_EXT_OPTIONAL_USE:+ )}"
+
+DEPEND="${DEPEND}
+ ${PHP_EXT_OPTIONAL_USE}${PHP_EXT_OPTIONAL_USE:+? ( }
+ ${PHPDEPEND}
+ ${PHP_EXT_OPTIONAL_USE:+ )}
+"
+
+# @FUNCTION: php-ext-source-r2_src_unpack
+# @DESCRIPTION:
+# runs standard src_unpack + _phpize
+
+# @ECLASS-VARIABLE: PHP_EXT_SKIP_PHPIZE
+# @DESCRIPTION:
+# phpize will be run by default for all ebuilds that use
+# php-ext-source-r2_src_unpack
+# Set PHP_EXT_SKIP_PHPIZE="yes" in your ebuild if you do not want to run phpize.
+
+php-ext-source-r2_src_unpack() {
+ unpack ${A}
+ local slot orig_s="${PHP_EXT_S}"
+ for slot in $(php_get_slots); do
+ cp -r "${orig_s}" "${WORKDIR}/${slot}" || die "Failed to copy source ${orig_s} to PHP target directory"
+ done
+}
+
+php-ext-source-r2_src_prepare() {
+ local slot orig_s="${PHP_EXT_S}"
+ for slot in $(php_get_slots); do
+ php_init_slot_env ${slot}
+ php-ext-source-r2_phpize
+ done
+}
+
+# @FUNCTION: php-ext-source-r2_phpize
+# @DESCRIPTION:
+# Runs phpize and autotools in addition to the standard src_unpack
+php-ext-source-r2_phpize() {
+ if [[ "${PHP_EXT_SKIP_PHPIZE}" != 'yes' ]] ; then
+ # Create configure out of config.m4
+ # I wish I could run this to solve #329071, but I cannot
+ #autotools_run_tool ${PHPIZE}
+ ${PHPIZE}
+ # force run of libtoolize and regeneration of related autotools
+ # files (bug 220519)
+ rm aclocal.m4
+ eautoreconf
+ fi
+}
+
+# @FUNCTION: php-ext-source-r2_src_configure
+# @DESCRIPTION:
+# Takes care of standard configure for PHP extensions (modules).
+
+# @ECLASS-VARIABLE: my_conf
+# @DESCRIPTION:
+# Set this in the ebuild to pass configure options to econf.
+
+php-ext-source-r2_src_configure() {
+ # net-snmp creates this file #385403
+ addpredict /usr/share/snmp/mibs/.index
+ addpredict /var/lib/net-snmp/mib_indexes
+
+ local slot
+ for slot in $(php_get_slots); do
+ php_init_slot_env ${slot}
+ # Set the correct config options
+ econf --with-php-config=${PHPCONFIG} ${my_conf} || die "Unable to configure code to compile"
+ done
+}
+
+# @FUNCTION: php-ext-source-r2_src_compile
+# @DESCRIPTION:
+# Takes care of standard compile for PHP extensions (modules).
+php-ext-source-r2_src_compile() {
+ # net-snmp creates this file #324739
+ addpredict /usr/share/snmp/mibs/.index
+ addpredict /var/lib/net-snmp/mib_indexes
+
+ # shm extension createss a semaphore file #173574
+ addpredict /session_mm_cli0.sem
+ local slot
+ for slot in $(php_get_slots); do
+ php_init_slot_env ${slot}
+ emake || die "Unable to make code"
+
+ done
+}
+
+# @FUNCTION: php-ext-source-r2_src_install
+# @DESCRIPTION:
+# Takes care of standard install for PHP extensions (modules).
+
+# @ECLASS-VARIABLE: DOCS
+# @DESCRIPTION:
+# Set in ebuild if you wish to install additional, package-specific documentation.
+php-ext-source-r2_src_install() {
+ local slot
+ for slot in $(php_get_slots); do
+ php_init_slot_env ${slot}
+
+ # Let's put the default module away
+ insinto "${EXT_DIR}"
+ newins "modules/${PHP_EXT_NAME}.so" "${PHP_EXT_NAME}.so" || die "Unable to install extension"
+
+ local doc
+ for doc in ${DOCS} ; do
+ [[ -s ${doc} ]] && dodoc ${doc}
+ done
+
+ INSTALL_ROOT="${D}" emake install-headers
+ done
+ php-ext-source-r2_createinifiles
+}
+
+
+php_get_slots() {
+ local s slot
+ for slot in ${USE_PHP}; do
+ use php_targets_${slot} && s+=" ${slot/-/.}"
+ done
+ echo $s
+}
+
+php_init_slot_env() {
+ libdir=$(get_libdir)
+
+ PHPIZE="/usr/${libdir}/${1}/bin/phpize"
+ PHPCONFIG="/usr/${libdir}/${1}/bin/php-config"
+ PHPCLI="/usr/${libdir}/${1}/bin/php"
+ PHPCGI="/usr/${libdir}/${1}/bin/php-cgi"
+ PHP_PKG="$(best_version =dev-lang/php-${1:3}*)"
+ PHPPREFIX="/usr/${libdir}/${slot}"
+ EXT_DIR="$(${PHPCONFIG} --extension-dir 2>/dev/null)"
+ PHP_CURRENTSLOT=${1:3}
+
+ PHP_EXT_S="${WORKDIR}/${1}"
+ cd "${PHP_EXT_S}"
+}
+
+php-ext-source-r2_buildinilist() {
+ # Work out the list of <ext>.ini files to edit/add to
+ if [[ -z "${PHPSAPILIST}" ]] ; then
+ PHPSAPILIST="apache2 cli cgi fpm embed"
+ fi
+
+ PHPINIFILELIST=""
+ local x
+ for x in ${PHPSAPILIST} ; do
+ if [[ -f "/etc/php/${x}-${1}/php.ini" ]] ; then
+ PHPINIFILELIST="${PHPINIFILELIST} etc/php/${x}-${1}/ext/${PHP_EXT_NAME}.ini"
+ fi
+ done
+ PHPFULLINIFILELIST="${PHPFULLINIFILELIST} ${PHPINIFILELIST}"
+}
+
+# @FUNCTION: php-ext-source-r2_createinifiles
+# @DESCRIPTION:
+# Builds ini files for every enabled slot and SAPI
+php-ext-source-r2_createinifiles() {
+ local slot
+ for slot in $(php_get_slots); do
+ php_init_slot_env ${slot}
+ # Pull in the PHP settings
+
+ # Build the list of <ext>.ini files to edit/add to
+ php-ext-source-r2_buildinilist ${slot}
+
+
+ # Add the needed lines to the <ext>.ini files
+ local file
+ if [[ "${PHP_EXT_INI}" = "yes" ]] ; then
+ for file in ${PHPINIFILELIST}; do
+ php-ext-source-r2_addextension "${PHP_EXT_NAME}.so" "${file}"
+ done
+ fi
+
+ # Symlink the <ext>.ini files from ext/ to ext-active/
+ local inifile
+ for inifile in ${PHPINIFILELIST} ; do
+ if [[ -n "${PHP_EXT_INIFILE}" ]]; then
+ cat "${FILESDIR}/${PHP_EXT_INIFILE}" >> "${ED}/${inifile}"
+ einfo "Added content of ${FILESDIR}/${PHP_EXT_INIFILE} to ${inifile}"
+ fi
+ inidir="${inifile/${PHP_EXT_NAME}.ini/}"
+ inidir="${inidir/ext/ext-active}"
+ dodir "/${inidir}"
+ dosym "/${inifile}" "/${inifile/ext/ext-active}"
+ done
+
+ # Add support for installing PHP files into a version dependant directory
+ PHP_EXT_SHARED_DIR="/usr/share/php/${PHP_EXT_NAME}"
+ done
+}
+
+php-ext-source-r2_addextension() {
+ if [[ "${PHP_EXT_ZENDEXT}" = "yes" ]] ; then
+ # We need the full path for ZendEngine extensions
+ # and we need to check for debugging enabled!
+ if has_version "dev-lang/php:${PHP_CURRENTSLOT}[threads]" ; then
+ if has_version "dev-lang/php:${PHP_CURRENTSLOT}[debug]" ; then
+ ext_type="zend_extension_debug_ts"
+ else
+ ext_type="zend_extension_ts"
+ fi
+ ext_file="${EXT_DIR}/${1}"
+ else
+ if has_version "dev-lang/php:${PHP_CURRENTSLOT}[debug]"; then
+ ext_type="zend_extension_debug"
+ else
+ ext_type="zend_extension"
+ fi
+ ext_file="${EXT_DIR}/${1}"
+ fi
+
+ # php-5.3 unifies zend_extension loading and just requires the
+ # zend_extension keyword with no suffix
+ # TODO: drop previous code and this check once <php-5.3 support is
+ # discontinued
+ if has_version '>=dev-lang/php-5.3' ; then
+ ext_type="zend_extension"
+ fi
+ else
+ # We don't need the full path for normal extensions!
+ ext_type="extension"
+ ext_file="${1}"
+ fi
+
+ php-ext-source-r2_addtoinifile "${ext_type}" "${ext_file}" "${2}" "Extension added"
+}
+
+# $1 - Setting name
+# $2 - Setting value
+# $3 - File to add to
+# $4 - Sanitized text to output
+php-ext-source-r2_addtoinifile() {
+ local inifile="${WORKDIR}/${3}"
+ if [[ ! -d $(dirname ${inifile}) ]] ; then
+ mkdir -p $(dirname ${inifile})
+ fi
+
+ # Are we adding the name of a section?
+ if [[ ${1:0:1} == "[" ]] ; then
+ echo "${1}" >> "${inifile}"
+ my_added="${1}"
+ else
+ echo "${1}=${2}" >> "${inifile}"
+ my_added="${1}=${2}"
+ fi
+
+ if [[ -z "${4}" ]] ; then
+ einfo "Added '${my_added}' to /${3}"
+ else
+ einfo "${4} to /${3}"
+ fi
+
+ insinto /$(dirname ${3})
+ doins "${inifile}"
+}
+
+# @FUNCTION: php-ext-source-r2_addtoinifiles
+# @USAGE: <setting name> <setting value> [message to output]; or just [section name]
+# @DESCRIPTION:
+# Add value settings to php.ini file installed by the extension (module).
+# You can also add a [section], see examples below.
+#
+# @CODE
+# Add some settings for the extension:
+#
+# php-ext-source-r2_addtoinifiles "zend_optimizer.optimization_level" "15"
+# php-ext-source-r2_addtoinifiles "zend_optimizer.enable_loader" "0"
+# php-ext-source-r2_addtoinifiles "zend_optimizer.disable_licensing" "0"
+#
+# Adding values to a section in php.ini file installed by the extension:
+#
+# php-ext-source-r2_addtoinifiles "[Debugger]"
+# php-ext-source-r2_addtoinifiles "debugger.enabled" "on"
+# php-ext-source-r2_addtoinifiles "debugger.profiler_enabled" "on"
+# @CODE
+php-ext-source-r2_addtoinifiles() {
+ local x
+ for x in ${PHPFULLINIFILELIST} ; do
+ php-ext-source-r2_addtoinifile "${1}" "${2}" "${x}" "${3}"
+ done
+}
diff --git a/eclass/php-lib-r1.eclass b/eclass/php-lib-r1.eclass
new file mode 100644
index 000000000000..fdbb47a32295
--- /dev/null
+++ b/eclass/php-lib-r1.eclass
@@ -0,0 +1,61 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: php-lib-r1.eclass
+# @MAINTAINER:
+# Gentoo PHP team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Author: Stuart Herbert <stuart@gentoo.org>
+# Author: Luca Longinotti <chtekk@gentoo.org>
+# @BLURB: A unified interface for adding new PHP libraries.
+# @DESCRIPTION:
+# This eclass provides a unified interface for adding new PHP libraries.
+# PHP libraries are PHP scripts designed for reuse inside other PHP scripts.
+
+EXPORT_FUNCTIONS src_install
+# Silence repoman warnings
+case "${EAPI:-0}" in
+ 0|1|2|3|4)
+ DEPEND="dev-lang/php"
+ ;;
+ *)
+ DEPEND="dev-lang/php:*"
+ ;;
+esac
+
+RDEPEND="${DEPEND}"
+
+# @ECLASS-VARIABLE: PHP_LIB_NAME
+# @DESCRIPTION:
+# Defaults to ${PN} unless set manually in the ebuild.
+[[ -z "${PHP_LIB_NAME}" ]] && PHP_LIB_NAME="${PN}"
+
+# @FUNCTION: php-lib-r1_src_install
+# @USAGE: <directory to install from> <list of files>
+# @DESCRIPTION:
+# Takes care of install for PHP libraries.
+# You have to pass in a list of the PHP files to install.
+
+# @VARIABLE: DOCS
+# @DESCRIPTION:
+# Set in ebuild if you wish to install additional, package-specific documentation.
+
+# $1 - directory in ${S} to insert from
+# $2 ... list of files to install
+php-lib-r1_src_install() {
+ local x
+
+ S_DIR="$1"
+ shift
+
+ for x in $@ ; do
+ SUBDIR="$(dirname ${x})"
+ insinto "/usr/share/php/${PHP_LIB_NAME}/${SUBDIR}"
+ doins "${S_DIR}/${x}"
+ done
+
+ for doc in ${DOCS} ; do
+ [[ -s ${doc} ]] && dodoc ${doc}
+ done
+}
diff --git a/eclass/php-pear-lib-r1.eclass b/eclass/php-pear-lib-r1.eclass
new file mode 100644
index 000000000000..03440c8abe1a
--- /dev/null
+++ b/eclass/php-pear-lib-r1.eclass
@@ -0,0 +1,97 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: php-pear-lib-r1.eclass
+# @MAINTAINER:
+# Gentoo PHP team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Author: Luca Longinotti <chtekk@gentoo.org>
+# @BLURB: Provides means for an easy installation of PEAR-based libraries.
+# @DESCRIPTION:
+# This class provides means for an easy installation of PEAR-based libraries,
+# such as Creole, Jargon, Phing etc., while retaining the functionality to put
+# the libraries into version-dependant directories.
+
+inherit multilib
+
+EXPORT_FUNCTIONS pkg_setup src_install
+
+# Silence repoman warnings
+case "${EAPI:-0}" in
+ 5)
+ PHP_DEPEND="dev-lang/php:*"
+ ;;
+ *)
+ PHP_DEPEND="dev-lang/php"
+ ;;
+esac
+
+DEPEND="${PHP_DEPEND}
+ >=dev-php/pear-1.9.0"
+RDEPEND="${DEPEND}"
+
+if [[ -n $PHP_PEAR_CHANNEL ]] ; then
+ PHP_PEAR_PV=${PV/_rc/RC}
+ [[ -z ${PHP_PEAR_PN} ]] && die "Missing PHP_PEAR_PN. Please notify the maintainer"
+ PHP_PEAR_P=${PHP_PEAR_PN}-${PHP_PEAR_PV}
+
+ S="${WORKDIR}/${PHP_PEAR_P}"
+
+ SRC_URI="http://${PHP_PEAR_URI}/get/${PHP_PEAR_P}.tgz"
+fi
+
+
+# @FUNCTION: php-pear-lib-r1_pkg_setup
+# @DESCRIPTION:
+# Adds required PEAR channel if necessary
+php-pear-lib-r1_pkg_setup() {
+ if [[ -n $PHP_PEAR_CHANNEL ]] ; then
+ if [[ -f $PHP_PEAR_CHANNEL ]]; then
+ pear channel-add $PHP_PEAR_CHANNEL || einfo "Ignore any errors about existing channels"
+ else
+ die "Could not find channel file $PHP_PEAR_CHANNEL"
+ fi
+ fi
+}
+
+
+# @FUNCTION: php-pear-lib-r1_src_install
+# @DESCRIPTION:
+# Takes care of standard install for PEAR-based libraries.
+php-pear-lib-r1_src_install() {
+ # SNMP support
+ addpredict /usr/share/snmp/mibs/.index
+ addpredict /var/lib/net-snmp/
+ addpredict /var/lib/net-snmp/mib_indexes
+ addpredict /session_mm_cli0.sem
+
+ PHP_BIN="/usr/bin/php"
+
+ cd "${S}"
+
+ if [[ -f "${WORKDIR}"/package2.xml ]] ; then
+ mv -f "${WORKDIR}/package2.xml" "${S}"
+ local WWW_DIR="/usr/share/webapps/${PN}/${PVR}/htdocs"
+ peardev -d php_bin="${PHP_BIN}" -d www_dir="${WWW_DIR}" \
+ install --force --loose --nodeps --offline --packagingroot="${D}" \
+ "${S}/package2.xml" || die "Unable to install PEAR package"
+ else
+ mv -f "${WORKDIR}/package.xml" "${S}"
+ local WWW_DIR="/usr/share/webapps/${PN}/${PVR}/htdocs"
+ peardev -d php_bin="${PHP_BIN}" -d www_dir="${WWW_DIR}" \
+ install --force --loose --nodeps --offline --packagingroot="${D}" \
+ "${S}/package.xml" || die "Unable to install PEAR package"
+ fi
+
+ rm -Rf "${D}/usr/share/php/.channels" \
+ "${D}/usr/share/php/.depdblock" \
+ "${D}/usr/share/php/.depdb" \
+ "${D}/usr/share/php/.filemap" \
+ "${D}/usr/share/php/.lock" \
+ "${D}/usr/share/php/.registry"
+
+ einfo
+ einfo "Installing to /usr/share/php ..."
+ einfo
+}
diff --git a/eclass/php-pear-r1.eclass b/eclass/php-pear-r1.eclass
new file mode 100644
index 000000000000..c2610307c713
--- /dev/null
+++ b/eclass/php-pear-r1.eclass
@@ -0,0 +1,118 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: php-pear-r1.eclass
+# @MAINTAINER:
+# Gentoo PHP Team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Author: Tal Peer <coredumb@gentoo.org>
+# Author: Luca Longinotti <chtekk@gentoo.org>
+# @BLURB: Provides means for an easy installation of PEAR packages.
+# @DESCRIPTION:
+# This eclass provides means for an easy installation of PEAR packages.
+# For more information on PEAR, see http://pear.php.net/
+# Note that this eclass doesn't handle dependencies of PEAR packages
+# on purpose; please use (R)DEPEND to define them correctly!
+
+inherit multilib
+
+EXPORT_FUNCTIONS pkg_setup src_install
+
+# Silence repoman warnings
+case "${EAPI:-0}" in
+ 5)
+ PHP_DEPEND="dev-lang/php:*"
+ ;;
+ *)
+ PHP_DEPEND="dev-lang/php"
+ ;;
+esac
+
+DEPEND="${PHP_DEPEND}
+ >=dev-php/pear-1.8.1"
+RDEPEND="${DEPEND}"
+
+# @ECLASS-VARIABLE: PHP_PEAR_PKG_NAME
+# @DESCRIPTION:
+# Set this if the the PEAR package name differs from ${PN/PEAR-/}
+# (generally shouldn't be the case).
+[[ -z "${PHP_PEAR_PKG_NAME}" ]] && PHP_PEAR_PKG_NAME="${PN/PEAR-/}"
+
+fix_PEAR_PV() {
+ tmp="${PV}"
+ tmp="${tmp/_/}"
+ tmp="${tmp/rc/RC}"
+ tmp="${tmp/beta/b}"
+ tmp="${tmp/alpha/a}"
+ PEAR_PV="${tmp}"
+}
+
+# @ECLASS-VARIABLE: PEAR_PV
+# @DESCRIPTION:
+# Set in ebuild if the eclass ${PV} mangling breaks SRC_URI for alpha/beta/rc versions
+[[ -z "${PEAR_PV}" ]] && fix_PEAR_PV
+
+PEAR_PN="${PHP_PEAR_PKG_NAME}-${PEAR_PV}"
+: ${PHP_PEAR_URI:=pear.php.net}
+: ${PHP_PEAR_CHANNEL:=${FILESDIR}/channel.xml}
+
+[[ -z "${SRC_URI}" ]] && SRC_URI="http://${PHP_PEAR_URI}/get/${PEAR_PN}.tgz"
+[[ -z "${HOMEPAGE}" ]] && HOMEPAGE="http://${PHP_PEAR_URI}/${PHP_PEAR_PKG_NAME}"
+
+S="${WORKDIR}/${PEAR_PN}"
+
+# @FUNCTION: php-pear-lib-r1_pkg_setup
+# @DESCRIPTION:
+# Adds required PEAR channel if necessary
+php-pear-r1_pkg_setup() {
+ if [[ -f $PHP_PEAR_CHANNEL ]]; then
+ pear channel-add $PHP_PEAR_CHANNEL || einfo "Ignore any errors about existing channels"
+ fi
+}
+
+# @FUNCTION: php-pear-r1_src_install
+# @DESCRIPTION:
+# Takes care of standard install for PEAR packages.
+php-pear-r1_src_install() {
+ # SNMP support
+ addpredict /usr/share/snmp/mibs/.index
+ addpredict /var/lib/net-snmp/
+ addpredict /var/lib/net-snmp/mib_indexes
+ addpredict /session_mm_cli0.sem
+
+ PHP_BIN="/usr/bin/php"
+
+ cd "${S}"
+
+ if [[ -f "${WORKDIR}"/package2.xml ]] ; then
+ mv -f "${WORKDIR}/package2.xml" "${S}"
+ if has_version '>=dev-php/PEAR-PEAR-1.7.0' ; then
+ local WWW_DIR="/usr/share/webapps/${PN}/${PVR}/htdocs"
+ peardev -d php_bin="${PHP_BIN}" -d www_dir="${WWW_DIR}" \
+ install --force --loose --nodeps --offline --packagingroot="${D}" \
+ "${S}/package2.xml" || die "Unable to install PEAR package"
+ else
+ peardev -d php_bin="${PHP_BIN}" install --force --loose --nodeps --offline --packagingroot="${D}" \
+ "${S}/package2.xml" || die "Unable to install PEAR package"
+ fi
+ else
+ mv -f "${WORKDIR}/package.xml" "${S}"
+ if has_version '>=dev-php/PEAR-PEAR-1.7.0' ; then
+ local WWW_DIR="/usr/share/webapps/${PN}/${PVR}/htdocs"
+ peardev -d php_bin="${PHP_BIN}" -d www_dir="${WWW_DIR}" \
+ install --force --loose --nodeps --offline --packagingroot="${D}" \
+ "${S}/package.xml" || die "Unable to install PEAR package"
+ else
+ peardev -d php_bin="${PHP_BIN}" install --force --loose --nodeps --offline --packagingroot="${D}" \
+ "${S}/package.xml" || die "Unable to install PEAR package"
+ fi
+ fi
+
+ rm -Rf "${D}/usr/share/php/.channels" \
+ "${D}/usr/share/php/.depdblock" \
+ "${D}/usr/share/php/.depdb" \
+ "${D}/usr/share/php/.filemap" \
+ "${D}/usr/share/php/.lock" \
+ "${D}/usr/share/php/.registry"
+}
diff --git a/eclass/phpconfutils.eclass b/eclass/phpconfutils.eclass
new file mode 100644
index 000000000000..d416ce9fdf95
--- /dev/null
+++ b/eclass/phpconfutils.eclass
@@ -0,0 +1,458 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: phpconfutils.eclass
+# @MAINTAINER:
+# Gentoo PHP team <php-bugs@gentoo.org>
+# @AUTHOR:
+# Based on Stuart's work on the original confutils eclass
+# Luca Longinotti <chtekk@gentoo.org>
+# @BLURB: Provides utility functions to help with configuring PHP.
+# @DESCRIPTION:
+# This eclass provides utility functions to help with configuring PHP.
+# It is only used by other php eclasses currently and the functions
+# are not generally intended for direct use in ebuilds.
+
+
+# ========================================================================
+# List of USE flags that need deps that aren't yet in Portage
+# or that can't be (fex. certain commercial apps)
+#
+# You must define PHPCONFUTILS_MISSING_DEPS if you need this
+
+# ========================================================================
+# phpconfutils_sort_flags()
+#
+# Sort and remove duplicates of the auto-enabled USE flags
+#
+
+phpconfutils_sort_flags() {
+ # Sort the list of auto-magically enabled USE flags
+ PHPCONFUTILS_AUTO_USE="$(echo ${PHPCONFUTILS_AUTO_USE} | tr '\040\010' '\012\012' | sort -u)"
+}
+
+# ========================================================================
+# phpconfutils_init()
+#
+# Call this function from your src_compile() function to initialise
+# this eclass first
+#
+
+phpconfutils_init() {
+ # Define wheter we shall support shared extensions or not
+ if use "sharedext" ; then
+ shared="=shared"
+ else
+ shared=""
+ fi
+
+ phpconfutils_sort_flags
+}
+
+# ========================================================================
+# phpconfutils_usecheck()
+#
+# Check if the USE flag we want enabled is part of the auto-magical ones
+#
+
+phpconfutils_usecheck() {
+ local x
+ local use="$1"
+
+ for x in ${PHPCONFUTILS_AUTO_USE} ; do
+ if [[ "${use}+" == "${x}+" ]] ; then
+ return 0
+ fi
+ done
+
+ # If we get here, the USE is not among the auto-enabled ones
+ return 1
+}
+
+# ========================================================================
+# phpconfutils_require_any()
+#
+# Use this function to ensure one or more of the specified USE flags have
+# been enabled and output the results
+#
+# $1 - message to output everytime a flag is found
+# $2 - message to output everytime a flag is not found
+# $3 .. - flags to check
+#
+
+phpconfutils_require_any() {
+ local success_msg="$1"
+ shift
+ local fail_msg="$1"
+ shift
+
+ local required_flags="$@"
+ local default_flag="$1"
+ local success="0"
+
+ while [[ -n "$1" ]] ; do
+ if use "$1" ; then
+ einfo "${success_msg} $1"
+ success="1"
+ else
+ einfo "${fail_msg} $1"
+ fi
+ shift
+ done
+
+ # Did we find what we are looking for?
+ if [[ "${success}" == "1" ]] ; then
+ return
+ fi
+
+ # If we get here, then none of the required USE flags were enabled
+ eerror
+ eerror "You should enable one or more of the following USE flags:"
+ eerror " ${required_flags}"
+ eerror
+ eerror "You can do this by enabling these flags in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} ${required_flags}"
+ eerror
+ eerror "The ${default_flag} USE flag was automatically enabled now."
+ eerror
+ PHPCONFUTILS_AUTO_USE="${PHPCONFUTILS_AUTO_USE} ${default_flag}"
+}
+
+# ========================================================================
+# phpconfutils_use_conflict()
+#
+# Use this function to automatically complain to the user if USE flags
+# that directly conflict have been enabled
+#
+# $1 - flag that conflicts with other flags
+# $2 .. - flags that conflict
+#
+
+phpconfutils_use_conflict() {
+ phpconfutils_sort_flags
+
+ if ! use "$1" && ! phpconfutils_usecheck "$1" ; then
+ return
+ fi
+
+ local my_flag="$1"
+ shift
+
+ local my_present=""
+ local my_remove=""
+
+ while [[ "$1+" != "+" ]] ; do
+ if use "$1" || phpconfutils_usecheck "$1" ; then
+ my_present="${my_present} $1"
+ my_remove="${my_remove} -$1"
+ fi
+ shift
+ done
+
+ if [[ -n "${my_present}" ]] ; then
+ eerror
+ eerror "USE flag '${my_flag}' conflicts with these USE flag(s):"
+ eerror " ${my_present}"
+ eerror
+ eerror "You must disable these conflicting flags before you can emerge this package."
+ eerror "You can do this by disabling these flags in /etc/portage/package.use:"
+ eerror " =${CATEGORY}/${PN}-${PVR} ${my_remove}"
+ eerror
+ die "Conflicting USE flags found"
+ fi
+}
+
+# ========================================================================
+# phpconfutils_use_depend_all()
+#
+# Use this function to specify USE flags that depend on eachother,
+# they will be automatically enabled and used for checks later
+#
+# $1 - flag that depends on other flags
+# $2 .. - the flags that must be set for $1 to be valid
+#
+
+phpconfutils_use_depend_all() {
+ phpconfutils_sort_flags
+
+ if ! use "$1" && ! phpconfutils_usecheck "$1" ; then
+ return
+ fi
+
+ local my_flag="$1"
+ shift
+
+ local my_missing=""
+
+ while [[ "$1+" != "+" ]] ; do
+ if ! use "$1" && ! phpconfutils_usecheck "$1" ; then
+ my_missing="${my_missing} $1"
+ fi
+ shift
+ done
+
+ if [[ -n "${my_missing}" ]] ; then
+ PHPCONFUTILS_AUTO_USE="${PHPCONFUTILS_AUTO_USE} ${my_missing}"
+ ewarn
+ ewarn "USE flag '${my_flag}' needs these additional flag(s) set:"
+ ewarn " ${my_missing}"
+ ewarn
+ ewarn "'${my_missing}' was automatically enabled and the required extensions will be"
+ ewarn "built. In any case it is recommended to enable those flags for"
+ ewarn "future reference, by adding the following to /etc/portage/package.use:"
+ ewarn " =${CATEGORY}/${PN}-${PVR} ${my_missing}"
+ ewarn
+ fi
+}
+
+# ========================================================================
+# phpconfutils_use_depend_any()
+#
+# Use this function to automatically complain to the user if a USE flag
+# depends on another USE flag that hasn't been enabled
+#
+# $1 - flag that depends on other flags
+# $2 - flag that is used as default if none is enabled
+# $3 .. - flags that must be set for $1 to be valid
+#
+
+phpconfutils_use_depend_any() {
+ phpconfutils_sort_flags
+
+ if ! use "$1" && ! phpconfutils_usecheck "$1" ; then
+ return
+ fi
+
+ local my_flag="$1"
+ shift
+
+ local my_default_flag="$1"
+ shift
+
+ local my_found=""
+ local my_missing=""
+
+ while [[ "$1+" != "+" ]] ; do
+ if use "$1" || phpconfutils_usecheck "$1" ; then
+ my_found="${my_found} $1"
+ else
+ my_missing="${my_missing} $1"
+ fi
+ shift
+ done
+
+ if [[ -z "${my_found}" ]] ; then
+ PHPCONFUTILS_AUTO_USE="${PHPCONFUTILS_AUTO_USE} ${my_default_flag}"
+ ewarn
+ ewarn "USE flag '${my_flag}' needs one of these additional flag(s) set:"
+ ewarn " ${my_missing}"
+ ewarn
+ ewarn "'${my_default_flag}' was automatically selected and enabled."
+ ewarn "You can change that by enabling/disabling those flags accordingly"
+ ewarn "in /etc/portage/package.use."
+ ewarn
+ fi
+}
+
+# ========================================================================
+# phpconfutils_extension_disable()
+#
+# Use this function to disable an extension that is enabled by default.
+# This is provided for those rare configure scripts that don't support
+# a --enable for the corresponding --disable
+#
+# $1 - extension name
+# $2 - USE flag
+# $3 - optional message to einfo() to the user
+#
+
+phpconfutils_extension_disable() {
+ if ! use "$2" && ! phpconfutils_usecheck "$2" ; then
+ my_conf="${my_conf} --disable-$1"
+ [[ -n "$3" ]] && einfo " Disabling $1"
+ else
+ [[ -n "$3" ]] && einfo " Enabling $1"
+ fi
+}
+
+# ========================================================================
+# phpconfutils_extension_enable()
+#
+# This function is like use_enable(), except that it knows about
+# enabling modules as shared libraries, and it supports passing
+# additional data with the switch
+#
+# $1 - extension name
+# $2 - USE flag
+# $3 - 1 = support shared, 0 = never support shared
+# $4 - additional setting for configure
+# $5 - additional message to einfo out to the user
+#
+
+phpconfutils_extension_enable() {
+ local my_shared
+
+ if [[ "$3" == "1" ]] ; then
+ if [[ "${shared}+" != "+" ]] ; then
+ my_shared="${shared}"
+ if [[ "$4+" != "+" ]] ; then
+ my_shared="${my_shared},$4"
+ fi
+ elif [[ "$4+" != "+" ]] ; then
+ my_shared="=$4"
+ fi
+ else
+ if [[ "$4+" != "+" ]] ; then
+ my_shared="=$4"
+ fi
+ fi
+
+ if use "$2" || phpconfutils_usecheck "$2" ; then
+ my_conf="${my_conf} --enable-$1${my_shared}"
+ einfo " Enabling $1"
+ else
+ my_conf="${my_conf} --disable-$1"
+ einfo " Disabling $1"
+ fi
+}
+
+# ========================================================================
+# phpconfutils_extension_without()
+#
+# Use this function to disable an extension that is enabled by default
+# This function is provided for those rare configure scripts that support
+# --without but not the corresponding --with
+#
+# $1 - extension name
+# $2 - USE flag
+# $3 - optional message to einfo() to the user
+#
+
+phpconfutils_extension_without() {
+ if ! use "$2" && ! phpconfutils_usecheck "$2" ; then
+ my_conf="${my_conf} --without-$1"
+ einfo " Disabling $1"
+ else
+ einfo " Enabling $1"
+ fi
+}
+
+# ========================================================================
+# phpconfutils_extension_with()
+#
+# This function is a replacement for use_with. It supports building
+# extensions as shared libraries,
+#
+# $1 - extension name
+# $2 - USE flag
+# $3 - 1 = support shared, 0 = never support shared
+# $4 - additional setting for configure
+# $5 - optional message to einfo() out to the user
+#
+
+phpconfutils_extension_with() {
+ local my_shared
+
+ if [[ "$3" == "1" ]] ; then
+ if [[ "${shared}+" != "+" ]] ; then
+ my_shared="${shared}"
+ if [[ "$4+" != "+" ]] ; then
+ my_shared="${my_shared},$4"
+ fi
+ elif [[ "$4+" != "+" ]] ; then
+ my_shared="=$4"
+ fi
+ else
+ if [[ "$4+" != "+" ]] ; then
+ my_shared="=$4"
+ fi
+ fi
+
+ if use "$2" || phpconfutils_usecheck "$2" ; then
+ my_conf="${my_conf} --with-$1${my_shared}"
+ einfo " Enabling $1"
+ else
+ my_conf="${my_conf} --without-$1"
+ einfo " Disabling $1"
+ fi
+}
+
+# ========================================================================
+# phpconfutils_warn_about_external_deps()
+#
+# This will output a warning to the user if he enables commercial or other
+# software not currently present in Portage
+#
+
+phpconfutils_warn_about_external_deps() {
+ phpconfutils_sort_flags
+
+ local x
+ local my_found="0"
+
+ for x in ${PHPCONFUTILS_MISSING_DEPS} ; do
+ if use "${x}" || phpconfutils_usecheck "${x}" ; then
+ ewarn "USE flag ${x} enables support for software not present in Portage!"
+ my_found="1"
+ fi
+ done
+
+ if [[ "${my_found}" == "1" ]] ; then
+ ewarn
+ ewarn "This ebuild will continue, but if you haven't already installed the"
+ ewarn "software required to satisfy the list above, this package will probably"
+ ewarn "fail to compile later on."
+ ewarn "*DO NOT* file bugs about compile failures or issues you're having"
+ ewarn "when using one of those flags, as we aren't able to support them."
+ ewarn "|=|=|=|=|=|=| You are on your own if you use them! |=|=|=|=|=|=|"
+ ewarn
+ ebeep 5
+ fi
+}
+
+# ========================================================================
+# phpconfutils_built_with_use()
+#
+# Sobstitute for built_with_use() to support the magically enabled USE flags
+#
+
+phpconfutils_built_with_use() {
+ local opt="$1"
+ [[ ${opt:0:1} = "-" ]] && shift || opt="-a"
+
+ local PHP_PKG=$(best_version $1)
+ shift
+
+ local PHP_USEFILE="${ROOT}/var/lib/php-pkg/${PHP_PKG}/PHP_USEFILE"
+
+ [[ ! -e "${PHP_USEFILE}" ]] && return 0
+
+ local PHP_USE_BUILT=$(<${PHP_USEFILE})
+ while [[ $# -gt 0 ]] ; do
+ if [[ ${opt} = "-o" ]] ; then
+ has $1 ${PHP_USE_BUILT} && return 0
+ else
+ has $1 ${PHP_USE_BUILT} || return 1
+ fi
+ shift
+ done
+ [[ ${opt} = "-a" ]]
+}
+
+# ========================================================================
+# phpconfutils_generate_usefile()
+#
+# Generate the file used by phpconfutils_built_with_use() to check it's
+# USE flags
+#
+
+phpconfutils_generate_usefile() {
+ phpconfutils_sort_flags
+
+ local PHP_USEFILE="${D}/var/lib/php-pkg/${CATEGORY}/${PN}-${PVR}/PHP_USEFILE"
+
+ # Write the auto-enabled USEs into the correct file
+ dodir "/var/lib/php-pkg/${CATEGORY}/${PN}-${PVR}/"
+ echo "${PHPCONFUTILS_AUTO_USE}" > "${PHP_USEFILE}"
+}
diff --git a/eclass/portability.eclass b/eclass/portability.eclass
new file mode 100644
index 000000000000..2e4e01d51432
--- /dev/null
+++ b/eclass/portability.eclass
@@ -0,0 +1,156 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: portability.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @AUTHOR:
+# Diego Pettenò <flameeyes@gentoo.org>
+# @BLURB: This eclass is created to avoid using non-portable GNUisms inside ebuilds
+
+if [[ -z ${_PORTABILITY_ECLASS} ]]; then
+_PORTABILITY_ECLASS=1
+
+# @FUNCTION: treecopy
+# @USAGE: <orig1> [orig2 orig3 ....] <dest>
+# @RETURN:
+# @DESCRIPTION:
+# mimic cp --parents copy, but working on BSD userland as well
+treecopy() {
+ local dest=${!#}
+ local files_count=$#
+
+ while (( $# > 1 )); do
+ local dirstruct=$(dirname "$1")
+ mkdir -p "${dest}/${dirstruct}" || die
+ cp -pPR "$1" "${dest}/${dirstruct}" || die
+
+ shift
+ done
+}
+
+# @FUNCTION: seq
+# @USAGE: [min] <max> [step]
+# @RETURN: sequence from min to max regardless of seq command being present on system
+# @DESCRIPTION:
+# compatibility function that mimes seq command if not available
+seq() {
+ # First try `seq`
+ local p=$(type -P seq)
+ if [[ -n ${p} ]] ; then
+ "${p}" "$@" || die
+ return $?
+ fi
+
+ local min max step
+ case $# in
+ 1) min=1 max=$1 step=1 ;;
+ 2) min=$1 max=$2 step=1 ;;
+ 3) min=$1 max=$3 step=$2 ;;
+ *) die "seq called with wrong number of arguments" ;;
+ esac
+
+ # Then try `jot`
+ p=$(type -P jot)
+ if [[ -n ${p} ]] ; then
+ local reps
+ # BSD userland
+ if [[ ${step} != 0 ]] ; then
+ reps=$(( (max - min) / step + 1 ))
+ else
+ reps=0
+ fi
+
+ jot $reps $min $max $step || die
+ return $?
+ fi
+
+ # Screw it, do the output ourselves
+ while :; do
+ [[ $max < $min && $step > 0 ]] && break
+ [[ $min < $max && $step < 0 ]] && break
+ echo $min
+ : $(( min += step ))
+ done
+ return 0
+}
+
+# @FUNCTION: dlopen_lib
+# @USAGE:
+# @RETURN: linker flag if needed
+# @DESCRIPTION:
+# Gets the linker flag to link to dlopen() function
+dlopen_lib() {
+ # - Solaris needs nothing
+ # - Darwin needs nothing
+ # - *BSD needs nothing
+ # - Linux needs -ldl (glibc and uclibc)
+ # - Interix needs -ldl
+ case "${CHOST}" in
+ *-linux-gnu*|*-linux-uclibc|*-interix*)
+ echo "-ldl"
+ ;;
+ esac
+}
+
+# @FUNCTION: get_bmake
+# @USAGE:
+# @RETURN: system version of make
+# @DESCRIPTION:
+# Gets the name of the BSD-ish make command (pmake from NetBSD)
+#
+# This will return make (provided by system packages) for BSD userlands,
+# or bsdmake for Darwin userlands and pmake for the rest of userlands,
+# both of which are provided by sys-devel/pmake package.
+#
+# Note: the bsdmake for Darwin userland is with compatibility with MacOSX
+# default name.
+get_bmake() {
+ if [[ ${USERLAND} == *BSD ]]; then
+ echo make
+ elif [[ ${USERLAND} == "Darwin" ]]; then
+ echo bsdmake
+ else
+ echo pmake
+ fi
+}
+
+# @FUNCTION: get_mounts
+# @USAGE:
+# @RETURN: table of mounts in form "point node fs opts"
+# @MAINTAINER:
+# @DESCRIPTION:
+# Portable method of getting mount names and points.
+# Returns as "point node fs options"
+# Remember to convert 040 back to a space.
+get_mounts() {
+ local point= node= fs= opts= foo=
+
+ # Linux has /proc/mounts which should always exist
+ if [[ $(uname -s) == "Linux" ]] ; then
+ while read node point fs opts foo ; do
+ echo "${point} ${node} ${fs} ${opts}"
+ done < /proc/mounts
+ return
+ fi
+
+ # OK, pray we have a -p option that outputs mounts in fstab format
+ # using tabs as the seperator.
+ # Then pray that there are no tabs in the either.
+ # Currently only FreeBSD supports this and the other BSDs will
+ # have to be patched.
+ # Athough the BSD's may support /proc, they do NOT put \040 in place
+ # of the spaces and we should not force a /proc either.
+ local IFS=$'\t'
+ LC_ALL=C mount -p | while read node point fs foo ; do
+ opts=${fs#* }
+ fs=${fs%% *}
+ echo "${point// /\040} ${node// /\040} ${fs%% *} ${opts// /\040}"
+ done
+}
+
+_dead_portability_user_funcs() { die "if you really need this, please file a bug for base-system@gentoo.org"; }
+is-login-disabled() { _dead_portability_user_funcs; }
+
+fi
diff --git a/eclass/prefix.eclass b/eclass/prefix.eclass
new file mode 100644
index 000000000000..1f3c4b0c4a5b
--- /dev/null
+++ b/eclass/prefix.eclass
@@ -0,0 +1,52 @@
+# Copyright 1999-2009 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: prefix.eclass
+# @MAINTAINER:
+# Feel free to contact the Prefix team through <prefix@gentoo.org> if
+# you have problems, suggestions or questions.
+# @BLURB: Eclass to provide Prefix functionality
+# @DESCRIPTION:
+# Gentoo Prefix allows users to install into a self defined offset
+# located somewhere in the filesystem. Prefix ebuilds require
+# additional functions and variables which are defined by this eclass.
+
+# @ECLASS-VARIABLE: EPREFIX
+# @DESCRIPTION:
+# The offset prefix of a Gentoo Prefix installation. When Gentoo Prefix
+# is not used, ${EPREFIX} should be "". Prefix Portage sets EPREFIX,
+# hence this eclass has nothing to do here in that case.
+# Note that setting EPREFIX in the environment with Prefix Portage sets
+# Portage into cross-prefix mode.
+if [[ ! ${EPREFIX+set} ]]; then
+ export EPREFIX=''
+fi
+
+
+# @FUNCTION: eprefixify
+# @USAGE: <list of to be eprefixified files>
+# @DESCRIPTION:
+# replaces @GENTOO_PORTAGE_EPREFIX@ with ${EPREFIX} for the given files,
+# dies if no arguments are given, a file does not exist, or changing a
+# file failed.
+eprefixify() {
+ [[ $# -lt 1 ]] && die "at least one argument required"
+
+ einfo "Adjusting to prefix ${EPREFIX:-/}"
+ local x
+ for x in "$@" ; do
+ if [[ -e ${x} ]] ; then
+ ebegin " ${x##*/}"
+ sed -i -e "s|@GENTOO_PORTAGE_EPREFIX@|${EPREFIX}|g" "${x}"
+ eend $? || die "failed to eprefixify ${x}"
+ else
+ die "${x} does not exist"
+ fi
+ done
+
+ return 0
+}
+
+
+# vim: tw=72:
diff --git a/eclass/python-any-r1.eclass b/eclass/python-any-r1.eclass
new file mode 100644
index 000000000000..0f544d9baac1
--- /dev/null
+++ b/eclass/python-any-r1.eclass
@@ -0,0 +1,326 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: python-any-r1
+# @MAINTAINER:
+# Python team <python@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
+# @BLURB: An eclass for packages having build-time dependency on Python.
+# @DESCRIPTION:
+# A minimal eclass for packages which need any Python interpreter
+# installed without a need for explicit choice and invariability.
+# This usually involves packages requiring Python at build-time
+# but having no other relevance to it.
+#
+# This eclass provides a minimal PYTHON_DEPS variable with a dependency
+# string on any of the supported Python implementations. It also exports
+# pkg_setup() which finds the best supported implementation and sets it
+# as the active one.
+#
+# Optionally, you can define a python_check_deps() function. It will
+# be called by the eclass with EPYTHON set to each matching Python
+# implementation and it is expected to check whether the implementation
+# fulfills the package requirements. You can use the locally exported
+# PYTHON_USEDEP to check USE-dependencies of relevant packages. It
+# should return a true value (0) if the Python implementation fulfills
+# the requirements, a false value (non-zero) otherwise.
+#
+# Please note that python-any-r1 will always inherit python-utils-r1
+# as well. Thus, all the functions defined there can be used in the
+# packages using python-any-r1, and there is no need ever to inherit
+# both.
+#
+# For more information, please see the wiki:
+# https://wiki.gentoo.org/wiki/Project:Python/python-any-r1
+
+case "${EAPI:-0}" in
+ 0|1|2|3|4|5)
+ # EAPI=4 needed by python-r1
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_PYTHON_ANY_R1} ]]; then
+
+if [[ ${_PYTHON_R1} ]]; then
+ die 'python-any-r1.eclass can not be used with python-r1.eclass.'
+elif [[ ${_PYTHON_SINGLE_R1} ]]; then
+ die 'python-any-r1.eclass can not be used with python-single-r1.eclass.'
+fi
+
+inherit python-utils-r1
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup
+
+if [[ ! ${_PYTHON_ANY_R1} ]]; then
+
+# @ECLASS-VARIABLE: PYTHON_COMPAT
+# @REQUIRED
+# @DESCRIPTION:
+# This variable contains a list of Python implementations the package
+# supports. It must be set before the `inherit' call. It has to be
+# an array.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_5,2_6,2_7} )
+# @CODE
+if ! declare -p PYTHON_COMPAT &>/dev/null; then
+ die 'PYTHON_COMPAT not declared.'
+fi
+
+# @ECLASS-VARIABLE: PYTHON_REQ_USE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The list of USEflags required to be enabled on the Python
+# implementations, formed as a USE-dependency string. It should be valid
+# for all implementations in PYTHON_COMPAT, so it may be necessary to
+# use USE defaults.
+#
+# Example:
+# @CODE
+# PYTHON_REQ_USE="gdbm,ncurses(-)?"
+# @CODE
+#
+# It will cause the Python dependencies to look like:
+# @CODE
+# || ( dev-lang/python:X.Y[gdbm,ncurses(-)?] ... )
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_DEPS
+# @DESCRIPTION:
+# This is an eclass-generated Python dependency string for all
+# implementations listed in PYTHON_COMPAT.
+#
+# Any of the supported interpreters will satisfy the dependency.
+#
+# Example use:
+# @CODE
+# DEPEND="${RDEPEND}
+# ${PYTHON_DEPS}"
+# @CODE
+#
+# Example value:
+# @CODE
+# || ( dev-lang/python:2.7[gdbm]
+# dev-lang/python:2.6[gdbm] )
+# @CODE
+
+_python_build_set_globals() {
+ local usestr i PYTHON_PKG_DEP
+ [[ ${PYTHON_REQ_USE} ]] && usestr="[${PYTHON_REQ_USE}]"
+
+ # check for invalid PYTHON_COMPAT
+ for i in "${PYTHON_COMPAT[@]}"; do
+ # the function simply dies on invalid impl
+ _python_impl_supported "${i}"
+ done
+
+ PYTHON_DEPS=
+ for i in "${_PYTHON_ALL_IMPLS[@]}"; do
+ has "${i}" "${PYTHON_COMPAT[@]}" || continue
+
+ python_export "${i}" PYTHON_PKG_DEP
+
+ PYTHON_DEPS="${PYTHON_PKG_DEP} ${PYTHON_DEPS}"
+ done
+ PYTHON_DEPS="|| ( ${PYTHON_DEPS})"
+}
+_python_build_set_globals
+
+# @ECLASS-VARIABLE: PYTHON_USEDEP
+# @DESCRIPTION:
+# An eclass-generated USE-dependency string for the currently tested
+# implementation. It is set locally for python_check_deps() call.
+#
+# The generate USE-flag list is compatible with packages using python-r1,
+# python-single-r1 and python-distutils-ng eclasses. It must not be used
+# on packages using python.eclass.
+#
+# Example use:
+# @CODE
+# python_check_deps() {
+# has_version "dev-python/foo[${PYTHON_USEDEP}]"
+# }
+# @CODE
+#
+# Example value:
+# @CODE
+# python_targets_python2_7(-)?,python_single_target_python2_7(+)?
+# @CODE
+
+# @FUNCTION: python_gen_any_dep
+# @USAGE: <dependency-block>
+# @DESCRIPTION:
+# Generate an any-of dependency that enforces a version match between
+# the Python interpreter and Python packages. <dependency-block> needs
+# to list one or more dependencies with verbatim '${PYTHON_USEDEP}'
+# references (quoted!) that will get expanded inside the function.
+#
+# This should be used along with an appropriate python_check_deps()
+# that checks which of the any-of blocks were matched.
+#
+# Example use:
+# @CODE
+# DEPEND="$(python_gen_any_dep '
+# dev-python/foo[${PYTHON_USEDEP}]
+# || ( dev-python/bar[${PYTHON_USEDEP}]
+# dev-python/baz[${PYTHON_USEDEP}] )')"
+#
+# python_check_deps() {
+# has_version "dev-python/foo[${PYTHON_USEDEP}]" \
+# && { has_version "dev-python/bar[${PYTHON_USEDEP}]" \
+# || has_version "dev-python/baz[${PYTHON_USEDEP}]"; }
+# }
+# @CODE
+#
+# Example value:
+# @CODE
+# || (
+# (
+# dev-lang/python:2.7
+# dev-python/foo[python_targets_python2_7(-)?,python_single_target_python2_7(+)?]
+# || ( dev-python/bar[python_targets_python2_7(-)?,python_single_target_python2_7(+)?]
+# dev-python/baz[python_targets_python2_7(-)?,python_single_target_python2_7(+)?] )
+# )
+# (
+# dev-lang/python:2.6
+# dev-python/foo[python_targets_python2_6(-)?,python_single_target_python2_6(+)?]
+# || ( dev-python/bar[python_targets_python2_6(-)?,python_single_target_python2_6(+)?]
+# dev-python/baz[python_targets_python2_6(-)?,python_single_target_python2_6(+)?] )
+# )
+# )
+# @CODE
+python_gen_any_dep() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local depstr=${1}
+ [[ ${depstr} ]] || die "No dependency string provided"
+
+ local PYTHON_PKG_DEP out=
+ for i in "${_PYTHON_ALL_IMPLS[@]}"; do
+ has "${i}" "${PYTHON_COMPAT[@]}" || continue
+
+ local PYTHON_USEDEP="python_targets_${i}(-),python_single_target_${i}(+)"
+ python_export "${i}" PYTHON_PKG_DEP
+
+ local i_depstr=${depstr//\$\{PYTHON_USEDEP\}/${PYTHON_USEDEP}}
+ out="( ${PYTHON_PKG_DEP} ${i_depstr} ) ${out}"
+ done
+ echo "|| ( ${out})"
+}
+
+# @FUNCTION: _python_EPYTHON_supported
+# @USAGE: <epython>
+# @INTERNAL
+# @DESCRIPTION:
+# Check whether the specified implementation is supported by package
+# (specified in PYTHON_COMPAT). Calls python_check_deps() if declared.
+_python_EPYTHON_supported() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local EPYTHON=${1}
+ local i=${EPYTHON/./_}
+
+ case "${i}" in
+ python*|jython*|pypy*)
+ ;;
+ *)
+ ewarn "Invalid EPYTHON: ${EPYTHON}"
+ return 1
+ ;;
+ esac
+
+ if has "${i}" "${PYTHON_COMPAT[@]}"; then
+ if python_is_installed "${i}"; then
+ if declare -f python_check_deps >/dev/null; then
+ local PYTHON_USEDEP="python_targets_${i}(-),python_single_target_${i}(+)"
+ python_check_deps
+ return ${?}
+ fi
+
+ return 0
+ fi
+ elif ! has "${i}" "${_PYTHON_ALL_IMPLS[@]}"; then
+ ewarn "Invalid EPYTHON: ${EPYTHON}"
+ fi
+ return 1
+}
+
+# @FUNCTION: python_setup
+# @DESCRIPTION:
+# Determine what the best installed (and supported) Python
+# implementation is, and set the Python build environment up for it.
+#
+# This function will call python_check_deps() if defined.
+python_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ # first, try ${EPYTHON}... maybe it's good enough for us.
+ if [[ ${EPYTHON} ]]; then
+ if _python_EPYTHON_supported "${EPYTHON}"; then
+ python_export EPYTHON PYTHON
+ python_wrapper_setup
+ return
+ fi
+ fi
+
+ # then, try eselect-python
+ local variant i
+ for variant in '' '--python2' '--python3'; do
+ i=$(eselect python --show ${variant} 2>/dev/null)
+
+ if [[ ! ${i} ]]; then
+ # no eselect-python?
+ break
+ elif _python_EPYTHON_supported "${i}"; then
+ python_export "${i}" EPYTHON PYTHON
+ python_wrapper_setup
+ return
+ fi
+ done
+
+ # fallback to best installed impl.
+ local rev_impls=()
+ for i in "${_PYTHON_ALL_IMPLS[@]}"; do
+ if has "${i}" "${PYTHON_COMPAT[@]}"; then
+ rev_impls=( "${i}" "${rev_impls[@]}" )
+ fi
+ done
+
+ for i in "${rev_impls[@]}"; do
+ python_export "${i}" EPYTHON PYTHON
+ if _python_EPYTHON_supported "${EPYTHON}"; then
+ python_wrapper_setup
+ return
+ fi
+ done
+
+ eerror "No Python implementation found for the build. This is usually"
+ eerror "a bug in the ebuild. Please report it to bugs.gentoo.org"
+ eerror "along with the build log."
+ echo
+ die "No supported Python implementation installed."
+}
+
+# @FUNCTION: python-any-r1_pkg_setup
+# @DESCRIPTION:
+# Runs python_setup during from-source installs.
+#
+# In a binary package installs is a no-op. If you need Python in pkg_*
+# phases of a binary package, call python_setup directly.
+python-any-r1_pkg_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${MERGE_TYPE} != binary ]] && python_setup
+}
+
+_PYTHON_ANY_R1=1
+fi
diff --git a/eclass/python-r1.eclass b/eclass/python-r1.eclass
new file mode 100644
index 000000000000..4f9163c263ba
--- /dev/null
+++ b/eclass/python-r1.eclass
@@ -0,0 +1,656 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: python-r1
+# @MAINTAINER:
+# Python team <python@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
+# @BLURB: A common, simple eclass for Python packages.
+# @DESCRIPTION:
+# A common eclass providing helper functions to build and install
+# packages supporting being installed for multiple Python
+# implementations.
+#
+# This eclass sets correct IUSE. Modification of REQUIRED_USE has to
+# be done by the author of the ebuild (but PYTHON_REQUIRED_USE is
+# provided for convenience, see below). python-r1 exports PYTHON_DEPS
+# and PYTHON_USEDEP so you can create correct dependencies for your
+# package easily. It also provides methods to easily run a command for
+# each enabled Python implementation and duplicate the sources for them.
+#
+# Please note that python-r1 will always inherit python-utils-r1 as
+# well. Thus, all the functions defined there can be used
+# in the packages using python-r1, and there is no need ever to inherit
+# both.
+#
+# For more information, please see the wiki:
+# https://wiki.gentoo.org/wiki/Project:Python/python-r1
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4)
+ # EAPI=4 is only allowed on legacy packages
+ if [[ ${CATEGORY}/${P} == dev-python/pyelftools-0.2[123] ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == sys-apps/file-5.22 ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == sys-apps/i2c-tools-3.1.1 ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == sys-libs/cracklib-2.9.[12] ]]; then
+ :
+ else
+ die "Unsupported EAPI=${EAPI:-4} (too old, allowed only on restricted set of packages) for ${ECLASS}"
+ fi
+ ;;
+ 5)
+ # EAPI=5 is required for sane USE_EXPAND dependencies
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_PYTHON_R1} ]]; then
+
+if [[ ${_PYTHON_SINGLE_R1} ]]; then
+ die 'python-r1.eclass can not be used with python-single-r1.eclass.'
+elif [[ ${_PYTHON_ANY_R1} ]]; then
+ die 'python-r1.eclass can not be used with python-any-r1.eclass.'
+fi
+
+inherit multibuild python-utils-r1
+
+# @ECLASS-VARIABLE: PYTHON_COMPAT
+# @REQUIRED
+# @DESCRIPTION:
+# This variable contains a list of Python implementations the package
+# supports. It must be set before the `inherit' call. It has to be
+# an array.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python2_5 python2_6 python2_7 )
+# @CODE
+#
+# Please note that you can also use bash brace expansion if you like:
+# @CODE
+# PYTHON_COMPAT=( python{2_5,2_6,2_7} )
+# @CODE
+if ! declare -p PYTHON_COMPAT &>/dev/null; then
+ die 'PYTHON_COMPAT not declared.'
+fi
+
+# @ECLASS-VARIABLE: PYTHON_COMPAT_OVERRIDE
+# @INTERNAL
+# @DESCRIPTION:
+# This variable can be used when working with ebuilds to override
+# the in-ebuild PYTHON_COMPAT. It is a string listing all
+# the implementations which package will be built for. It need be
+# specified in the calling environment, and not in ebuilds.
+#
+# It should be noted that in order to preserve metadata immutability,
+# PYTHON_COMPAT_OVERRIDE does not affect IUSE nor dependencies.
+# The state of PYTHON_TARGETS is ignored, and all the implementations
+# in PYTHON_COMPAT_OVERRIDE are built. Dependencies need to be satisfied
+# manually.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT_OVERRIDE='pypy python3_3' emerge -1v dev-python/foo
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_REQ_USE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The list of USEflags required to be enabled on the chosen Python
+# implementations, formed as a USE-dependency string. It should be valid
+# for all implementations in PYTHON_COMPAT, so it may be necessary to
+# use USE defaults.
+#
+# This should be set before calling `inherit'.
+#
+# Example:
+# @CODE
+# PYTHON_REQ_USE="gdbm,ncurses(-)?"
+# @CODE
+#
+# It will cause the Python dependencies to look like:
+# @CODE
+# python_targets_pythonX_Y? ( dev-lang/python:X.Y[gdbm,ncurses(-)?] )
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_DEPS
+# @DESCRIPTION:
+# This is an eclass-generated Python dependency string for all
+# implementations listed in PYTHON_COMPAT.
+#
+# Example use:
+# @CODE
+# RDEPEND="${PYTHON_DEPS}
+# dev-foo/mydep"
+# DEPEND="${RDEPEND}"
+# @CODE
+#
+# Example value:
+# @CODE
+# dev-lang/python-exec:=
+# python_targets_python2_6? ( dev-lang/python:2.6[gdbm] )
+# python_targets_python2_7? ( dev-lang/python:2.7[gdbm] )
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_USEDEP
+# @DESCRIPTION:
+# This is an eclass-generated USE-dependency string which can be used to
+# depend on another Python package being built for the same Python
+# implementations.
+#
+# The generate USE-flag list is compatible with packages using python-r1
+# and python-distutils-ng eclasses. It must not be used on packages
+# using python.eclass.
+#
+# Example use:
+# @CODE
+# RDEPEND="dev-python/foo[${PYTHON_USEDEP}]"
+# @CODE
+#
+# Example value:
+# @CODE
+# python_targets_python2_6(-)?,python_targets_python2_7(-)?
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_REQUIRED_USE
+# @DESCRIPTION:
+# This is an eclass-generated required-use expression which ensures at
+# least one Python implementation has been enabled.
+#
+# This expression should be utilized in an ebuild by including it in
+# REQUIRED_USE, optionally behind a use flag.
+#
+# Example use:
+# @CODE
+# REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+# @CODE
+#
+# Example value:
+# @CODE
+# || ( python_targets_python2_6 python_targets_python2_7 )
+# @CODE
+
+_python_set_globals() {
+ local impls=()
+
+ PYTHON_DEPS=
+ local i PYTHON_PKG_DEP
+ for i in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${i}" || continue
+
+ python_export "${i}" PYTHON_PKG_DEP
+ PYTHON_DEPS+="python_targets_${i}? ( ${PYTHON_PKG_DEP} ) "
+
+ impls+=( "${i}" )
+ done
+
+ if [[ ${#impls[@]} -eq 0 ]]; then
+ die "No supported implementation in PYTHON_COMPAT."
+ fi
+
+ local flags=( "${impls[@]/#/python_targets_}" )
+ local optflags=${flags[@]/%/(-)?}
+
+ # A nice QA trick here. Since a python-single-r1 package has to have
+ # at least one PYTHON_SINGLE_TARGET enabled (REQUIRED_USE),
+ # the following check will always fail on those packages. Therefore,
+ # it should prevent developers from mistakenly depending on packages
+ # not supporting multiple Python implementations.
+
+ local flags_st=( "${impls[@]/#/-python_single_target_}" )
+ optflags+=,${flags_st[@]/%/(-)}
+
+ IUSE=${flags[*]}
+ PYTHON_REQUIRED_USE="|| ( ${flags[*]} )"
+ PYTHON_USEDEP=${optflags// /,}
+
+ # 1) well, python-exec would suffice as an RDEP
+ # but no point in making this overcomplex, BDEP doesn't hurt anyone
+ # 2) python-exec should be built with all targets forced anyway
+ # but if new targets were added, we may need to force a rebuild
+ # 3) use whichever python-exec slot installed in EAPI 5. For EAPI 4,
+ # just fix :2 since := deps are not supported.
+ if [[ ${_PYTHON_WANT_PYTHON_EXEC2} == 0 ]]; then
+ die "python-exec:0 is no longer supported, please fix your ebuild to work with python-exec:2"
+ elif [[ ${EAPI} != 4 ]]; then
+ PYTHON_DEPS+=">=dev-lang/python-exec-2:=[${PYTHON_USEDEP}]"
+ else
+ PYTHON_DEPS+="dev-lang/python-exec:2[${PYTHON_USEDEP}]"
+ fi
+}
+_python_set_globals
+
+# @FUNCTION: _python_validate_useflags
+# @INTERNAL
+# @DESCRIPTION:
+# Enforce the proper setting of PYTHON_TARGETS.
+_python_validate_useflags() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local i
+
+ for i in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${i}" || continue
+
+ use "python_targets_${i}" && return 0
+ done
+
+ eerror "No Python implementation selected for the build. Please add one"
+ eerror "of the following values to your PYTHON_TARGETS (in make.conf):"
+ eerror
+ eerror "${PYTHON_COMPAT[@]}"
+ echo
+ die "No supported Python implementation in PYTHON_TARGETS."
+}
+
+# @FUNCTION: python_gen_usedep
+# @USAGE: <pattern> [...]
+# @DESCRIPTION:
+# Output a USE dependency string for Python implementations which
+# are both in PYTHON_COMPAT and match any of the patterns passed
+# as parameters to the function.
+#
+# Remember to escape or quote the patterns to prevent shell filename
+# expansion.
+#
+# When all implementations are requested, please use ${PYTHON_USEDEP}
+# instead. Please also remember to set an appropriate REQUIRED_USE
+# to avoid ineffective USE flags.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_7,3_4} )
+# DEPEND="doc? ( dev-python/epydoc[$(python_gen_usedep 'python2*')] )"
+# @CODE
+#
+# It will cause the dependency to look like:
+# @CODE
+# DEPEND="doc? ( dev-python/epydoc[python_targets_python2_7?] )"
+# @CODE
+python_gen_usedep() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl pattern
+ local matches=()
+
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+
+ for pattern; do
+ if [[ ${impl} == ${pattern} ]]; then
+ matches+=(
+ "python_targets_${impl}(-)?"
+ "-python_single_target_${impl}(-)"
+ )
+ break
+ fi
+ done
+ done
+
+ [[ ${matches[@]} ]] || die "No supported implementations match python_gen_usedep patterns: ${@}"
+
+ local out=${matches[@]}
+ echo "${out// /,}"
+}
+
+# @FUNCTION: python_gen_useflags
+# @USAGE: <pattern> [...]
+# @DESCRIPTION:
+# Output a list of USE flags for Python implementations which
+# are both in PYTHON_COMPAT and match any of the patterns passed
+# as parameters to the function.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_7,3_4} )
+# REQUIRED_USE="doc? ( || ( $(python_gen_useflags python2*) ) )"
+# @CODE
+#
+# It will cause the variable to look like:
+# @CODE
+# REQUIRED_USE="doc? ( || ( python_targets_python2_7 ) )"
+# @CODE
+python_gen_useflags() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl pattern
+ local matches=()
+
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+
+ for pattern; do
+ if [[ ${impl} == ${pattern} ]]; then
+ matches+=( "python_targets_${impl}" )
+ break
+ fi
+ done
+ done
+
+ echo "${matches[@]}"
+}
+
+# @FUNCTION: python_gen_cond_dep
+# @USAGE: <dependency> <pattern> [...]
+# @DESCRIPTION:
+# Output a list of <dependency>-ies made conditional to USE flags
+# of Python implementations which are both in PYTHON_COMPAT and match
+# any of the patterns passed as the remaining parameters.
+#
+# In order to enforce USE constraints on the packages, verbatim
+# '${PYTHON_USEDEP}' (quoted!) may be placed in the dependency
+# specification. It will get expanded within the function into a proper
+# USE dependency string.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_5,2_6,2_7} )
+# RDEPEND="$(python_gen_cond_dep \
+# 'dev-python/unittest2[${PYTHON_USEDEP}]' python{2_5,2_6})"
+# @CODE
+#
+# It will cause the variable to look like:
+# @CODE
+# RDEPEND="python_targets_python2_5? (
+# dev-python/unittest2[python_targets_python2_5?] )
+# python_targets_python2_6? (
+# dev-python/unittest2[python_targets_python2_6?] )"
+# @CODE
+python_gen_cond_dep() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl pattern
+ local matches=()
+
+ local dep=${1}
+ shift
+
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+
+ for pattern; do
+ if [[ ${impl} == ${pattern} ]]; then
+ # substitute ${PYTHON_USEDEP} if used
+ # (since python_gen_usedep() will not return ${PYTHON_USEDEP}
+ # the code is run at most once)
+ if [[ ${dep} == *'${PYTHON_USEDEP}'* ]]; then
+ local PYTHON_USEDEP=$(python_gen_usedep "${@}")
+ dep=${dep//\$\{PYTHON_USEDEP\}/${PYTHON_USEDEP}}
+ fi
+
+ matches+=( "python_targets_${impl}? ( ${dep} )" )
+ break
+ fi
+ done
+ done
+
+ echo "${matches[@]}"
+}
+
+# @ECLASS-VARIABLE: BUILD_DIR
+# @DESCRIPTION:
+# The current build directory. In global scope, it is supposed to
+# contain an initial build directory; if unset, it defaults to ${S}.
+#
+# In functions run by python_foreach_impl(), the BUILD_DIR is locally
+# set to an implementation-specific build directory. That path is
+# created through appending a hyphen and the implementation name
+# to the final component of the initial BUILD_DIR.
+#
+# Example value:
+# @CODE
+# ${WORKDIR}/foo-1.3-python2_6
+# @CODE
+
+# @FUNCTION: python_copy_sources
+# @DESCRIPTION:
+# Create a single copy of the package sources for each enabled Python
+# implementation.
+#
+# The sources are always copied from initial BUILD_DIR (or S if unset)
+# to implementation-specific build directory matching BUILD_DIR used by
+# python_foreach_abi().
+python_copy_sources() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local MULTIBUILD_VARIANTS
+ _python_obtain_impls
+
+ multibuild_copy_sources
+}
+
+# @FUNCTION: _python_obtain_impls
+# @INTERNAL
+# @DESCRIPTION:
+# Set up the enabled implementation list.
+_python_obtain_impls() {
+ if [[ ${PYTHON_COMPAT_OVERRIDE} ]]; then
+ if [[ ! ${_PYTHON_COMPAT_OVERRIDE_WARNED} ]]; then
+ ewarn "WARNING: PYTHON_COMPAT_OVERRIDE in effect. The following Python"
+ ewarn "implementations will be enabled:"
+ ewarn
+ ewarn " ${PYTHON_COMPAT_OVERRIDE}"
+ ewarn
+ ewarn "Dependencies won't be satisfied, and PYTHON_TARGETS will be ignored."
+ _PYTHON_COMPAT_OVERRIDE_WARNED=1
+ fi
+
+ MULTIBUILD_VARIANTS=( ${PYTHON_COMPAT_OVERRIDE} )
+ return
+ fi
+
+ _python_validate_useflags
+
+ MULTIBUILD_VARIANTS=()
+
+ for impl in "${_PYTHON_ALL_IMPLS[@]}"; do
+ if has "${impl}" "${PYTHON_COMPAT[@]}" \
+ && use "python_targets_${impl}"
+ then
+ MULTIBUILD_VARIANTS+=( "${impl}" )
+ fi
+ done
+}
+
+# @FUNCTION: _python_multibuild_wrapper
+# @USAGE: <command> [<args>...]
+# @INTERNAL
+# @DESCRIPTION:
+# Initialize the environment for Python implementation selected
+# for multibuild.
+_python_multibuild_wrapper() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local -x EPYTHON PYTHON
+ local -x PATH=${PATH} PKG_CONFIG_PATH=${PKG_CONFIG_PATH}
+ python_export "${MULTIBUILD_VARIANT}" EPYTHON PYTHON
+ python_wrapper_setup
+
+ "${@}"
+}
+
+# @FUNCTION: python_foreach_impl
+# @USAGE: <command> [<args>...]
+# @DESCRIPTION:
+# Run the given command for each of the enabled Python implementations.
+# If additional parameters are passed, they will be passed through
+# to the command.
+#
+# The function will return 0 status if all invocations succeed.
+# Otherwise, the return code from first failing invocation will
+# be returned.
+#
+# For each command being run, EPYTHON, PYTHON and BUILD_DIR are set
+# locally, and the former two are exported to the command environment.
+python_foreach_impl() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local MULTIBUILD_VARIANTS
+ _python_obtain_impls
+
+ multibuild_foreach_variant _python_multibuild_wrapper "${@}"
+}
+
+# @FUNCTION: python_parallel_foreach_impl
+# @USAGE: <command> [<args>...]
+# @DESCRIPTION:
+# Run the given command for each of the enabled Python implementations.
+# If additional parameters are passed, they will be passed through
+# to the command.
+#
+# The function will return 0 status if all invocations succeed.
+# Otherwise, the return code from first failing invocation will
+# be returned.
+#
+# For each command being run, EPYTHON, PYTHON and BUILD_DIR are set
+# locally, and the former two are exported to the command environment.
+#
+# This command used to be the parallel variant of python_foreach_impl.
+# However, the parallel run support has been removed to simplify
+# the eclasses and make them more predictable and therefore it is now
+# only a deprecated alias to python_foreach_impl.
+python_parallel_foreach_impl() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ! ${_PYTHON_PARALLEL_WARNED} ]]; then
+ eqawarn "python_parallel_foreach_impl() is no longer meaningful. All runs"
+ eqawarn "are non-parallel now. Please replace the call with python_foreach_impl."
+
+ _PYTHON_PARALLEL_WARNED=1
+ fi
+
+ local MULTIBUILD_VARIANTS
+ _python_obtain_impls
+ multibuild_foreach_variant _python_multibuild_wrapper "${@}"
+}
+
+# @FUNCTION: python_setup
+# @USAGE: [<impl-pattern>...]
+# @DESCRIPTION:
+# Find the best (most preferred) Python implementation that is enabled
+# and matches at least one of the patterns passed (or '*' if no patterns
+# passed). Set the Python build environment up for that implementation.
+#
+# This function needs to be used when Python is being called outside
+# of python_foreach_impl calls (e.g. for shared processes like doc
+# building). python_foreach_impl sets up the build environment itself.
+#
+# If the specific commands support only a subset of Python
+# implementations, patterns need to be passed to restrict the allowed
+# implementations.
+#
+# Example:
+# @CODE
+# DEPEND="doc? ( dev-python/epydoc[$(python_gen_usedep 'python2*')] )"
+#
+# src_compile() {
+# #...
+# if use doc; then
+# python_setup 'python2*'
+# make doc
+# fi
+# }
+# @CODE
+python_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local best_impl patterns=( "${@-*}" )
+ _python_try_impl() {
+ local pattern
+ for pattern in "${patterns[@]}"; do
+ if [[ ${EPYTHON} == ${pattern} ]]; then
+ best_impl=${EPYTHON}
+ fi
+ done
+ }
+ python_foreach_impl _python_try_impl
+
+ if [[ ! ${best_impl} ]]; then
+ eerror "${FUNCNAME}: none of the enabled implementation matched the patterns."
+ eerror " patterns: ${@-'(*)'}"
+ eerror "Likely a REQUIRED_USE constraint (possibly USE-conditional) is missing."
+ eerror " suggested: || ( \$(python_gen_useflags ${@}) )"
+ eerror "(remember to quote all the patterns with '')"
+ die "${FUNCNAME}: no enabled implementation satisfy requirements"
+ fi
+
+ python_export "${best_impl}" EPYTHON PYTHON
+ python_wrapper_setup
+}
+
+# @FUNCTION: python_export_best
+# @USAGE: [<variable>...]
+# @DESCRIPTION:
+# Find the best (most preferred) Python implementation enabled
+# and export given variables for it. If no variables are provided,
+# EPYTHON & PYTHON will be exported.
+python_export_best() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ eqawarn "python_export_best() is deprecated. Please use python_setup instead,"
+ eqawarn "combined with python_export if necessary."
+
+ [[ ${#} -gt 0 ]] || set -- EPYTHON PYTHON
+
+ local best MULTIBUILD_VARIANTS
+ _python_obtain_impls
+
+ _python_set_best() {
+ best=${MULTIBUILD_VARIANT}
+ }
+ multibuild_for_best_variant _python_set_best
+
+ debug-print "${FUNCNAME}: Best implementation is: ${best}"
+ python_export "${best}" "${@}"
+ python_wrapper_setup
+}
+
+# @FUNCTION: python_replicate_script
+# @USAGE: <path>...
+# @DESCRIPTION:
+# Copy the given script to variants for all enabled Python
+# implementations, then replace it with a symlink to the wrapper.
+#
+# All specified files must start with a 'python' shebang. A file not
+# having a matching shebang will be refused.
+python_replicate_script() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ _python_replicate_script() {
+ local _PYTHON_FIX_SHEBANG_QUIET=1
+
+ local PYTHON_SCRIPTDIR
+ python_export PYTHON_SCRIPTDIR
+
+ (
+ exeinto "${PYTHON_SCRIPTDIR#${EPREFIX}}"
+ doexe "${files[@]}"
+ )
+
+ python_fix_shebang -q \
+ "${files[@]/*\//${D%/}/${PYTHON_SCRIPTDIR}/}"
+ }
+
+ local files=( "${@}" )
+ python_foreach_impl _python_replicate_script
+
+ # install the wrappers
+ local f
+ for f; do
+ _python_ln_rel "${ED%/}/usr/lib/python-exec/python-exec2" "${f}" || die
+ done
+}
+
+_PYTHON_R1=1
+fi
diff --git a/eclass/python-single-r1.eclass b/eclass/python-single-r1.eclass
new file mode 100644
index 000000000000..0f21fb7e354b
--- /dev/null
+++ b/eclass/python-single-r1.eclass
@@ -0,0 +1,468 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: python-single-r1
+# @MAINTAINER:
+# Python team <python@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
+# @BLURB: An eclass for Python packages not installed for multiple implementations.
+# @DESCRIPTION:
+# An extension of the python-r1 eclass suite for packages which
+# don't support being installed for multiple Python implementations.
+# This mostly includes tools embedding Python.
+#
+# This eclass extends the IUSE and REQUIRED_USE set by python-r1
+# to request the PYTHON_SINGLE_TARGET when the inheriting ebuild
+# can be supported by more than one Python implementation. It also
+# replaces PYTHON_USEDEP and PYTHON_DEPS with a more suitable form.
+#
+# Please note that packages support multiple Python implementations
+# (using python-r1 eclass) can not depend on packages not supporting
+# them (using this eclass).
+#
+# Please note that python-single-r1 will always inherit python-utils-r1
+# as well. Thus, all the functions defined there can be used
+# in the packages using python-single-r1, and there is no need ever
+# to inherit both.
+#
+# For more information, please see the wiki:
+# https://wiki.gentoo.org/wiki/Project:Python/python-single-r1
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4)
+ # EAPI=4 is only allowed on legacy packages
+ if [[ ${CATEGORY}/${P} == app-arch/threadzip-1.2 ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == media-libs/lv2-1.8.0 ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == media-libs/lv2-1.10.0 ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == sys-apps/paludis-1* ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == sys-apps/paludis-2.[02].0 ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == sys-apps/util-linux-2.2[456]* ]]; then
+ :
+ elif [[ ${CATEGORY}/${P} == */gdb-7.[78]* ]]; then
+ :
+ else
+ die "Unsupported EAPI=${EAPI:-4} (too old, allowed only on restricted set of packages) for ${ECLASS}"
+ fi
+ ;;
+ 5)
+ # EAPI=5 is required for sane USE_EXPAND dependencies
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_PYTHON_SINGLE_R1} ]]; then
+
+if [[ ${_PYTHON_R1} ]]; then
+ die 'python-single-r1.eclass can not be used with python-r1.eclass.'
+elif [[ ${_PYTHON_ANY_R1} ]]; then
+ die 'python-single-r1.eclass can not be used with python-any-r1.eclass.'
+fi
+
+inherit python-utils-r1
+
+fi
+
+EXPORT_FUNCTIONS pkg_setup
+
+if [[ ! ${_PYTHON_SINGLE_R1} ]]; then
+
+# @ECLASS-VARIABLE: PYTHON_COMPAT
+# @REQUIRED
+# @DESCRIPTION:
+# This variable contains a list of Python implementations the package
+# supports. It must be set before the `inherit' call. It has to be
+# an array.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_5,2_6,2_7} )
+# @CODE
+if ! declare -p PYTHON_COMPAT &>/dev/null; then
+ die 'PYTHON_COMPAT not declared.'
+fi
+
+# @ECLASS-VARIABLE: PYTHON_REQ_USE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The list of USEflags required to be enabled on the chosen Python
+# implementations, formed as a USE-dependency string. It should be valid
+# for all implementations in PYTHON_COMPAT, so it may be necessary to
+# use USE defaults.
+#
+# This should be set before calling `inherit'.
+#
+# Example:
+# @CODE
+# PYTHON_REQ_USE="gdbm,ncurses(-)?"
+# @CODE
+#
+# It will cause the Python dependencies to look like:
+# @CODE
+# python_single_target_pythonX_Y? ( dev-lang/python:X.Y[gdbm,ncurses(-)?] )
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_DEPS
+# @DESCRIPTION:
+# This is an eclass-generated Python dependency string for all
+# implementations listed in PYTHON_COMPAT.
+#
+# The dependency string is conditional on PYTHON_SINGLE_TARGET.
+#
+# Example use:
+# @CODE
+# RDEPEND="${PYTHON_DEPS}
+# dev-foo/mydep"
+# DEPEND="${RDEPEND}"
+# @CODE
+#
+# Example value:
+# @CODE
+# dev-lang/python-exec:=
+# python_single_target_python2_6? ( dev-lang/python:2.6[gdbm] )
+# python_single_target_python2_7? ( dev-lang/python:2.7[gdbm] )
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_USEDEP
+# @DESCRIPTION:
+# This is an eclass-generated USE-dependency string which can be used to
+# depend on another Python package being built for the same Python
+# implementations.
+#
+# The generate USE-flag list is compatible with packages using python-r1,
+# python-single-r1 and python-distutils-ng eclasses. It must not be used
+# on packages using python.eclass.
+#
+# Example use:
+# @CODE
+# RDEPEND="dev-python/foo[${PYTHON_USEDEP}]"
+# @CODE
+#
+# Example value:
+# @CODE
+# python_targets_python2_7(-)?,python_single_target_python2_7(+)?
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_REQUIRED_USE
+# @DESCRIPTION:
+# This is an eclass-generated required-use expression which ensures the following
+# when more than one python implementation is possible:
+# 1. Exactly one PYTHON_SINGLE_TARGET value has been enabled.
+# 2. The selected PYTHON_SINGLE_TARGET value is enabled in PYTHON_TARGETS.
+#
+# This expression should be utilized in an ebuild by including it in
+# REQUIRED_USE, optionally behind a use flag.
+#
+# Example use:
+# @CODE
+# REQUIRED_USE="python? ( ${PYTHON_REQUIRED_USE} )"
+# @CODE
+#
+# Example value:
+# @CODE
+# python_single_target_python2_6? ( python_targets_python2_6 )
+# python_single_target_python2_7? ( python_targets_python2_7 )
+# ^^ ( python_single_target_python2_6 python_single_target_python2_7 )
+# @CODE
+
+_python_single_set_globals() {
+ local impls=()
+ local unimpls=()
+
+ PYTHON_DEPS=
+ local i PYTHON_PKG_DEP
+ for i in "${_PYTHON_ALL_IMPLS[@]}"; do
+ has "${i}" "${PYTHON_COMPAT[@]}" \
+ && impls+=( "${i}" ) \
+ || unimpls+=( "${i}" )
+ done
+
+ if [[ ${#impls[@]} -eq 0 ]]; then
+ die "No supported implementation in PYTHON_COMPAT."
+ fi
+
+ local flags_mt=( "${impls[@]/#/python_targets_}" )
+ local flags=( "${impls[@]/#/python_single_target_}" )
+ local unflags=( "${unimpls[@]/#/-python_single_target_}" )
+
+ local optflags=${flags_mt[@]/%/(-)?},${unflags[@]/%/(-)}
+
+ IUSE="${flags_mt[*]}"
+
+ if [[ ${#impls[@]} -eq 1 ]]; then
+ # There is only one supported implementation; set IUSE and other
+ # variables without PYTHON_SINGLE_TARGET.
+ PYTHON_REQUIRED_USE="${flags_mt[*]}"
+ python_export "${impls[0]}" PYTHON_PKG_DEP
+ PYTHON_DEPS="${PYTHON_PKG_DEP} "
+ # Force on the python_single_target_* flag for this impl, so
+ # that any dependencies that inherit python-single-r1 and
+ # happen to have multiple implementations will still need
+ # to bound by the implementation used by this package.
+ optflags+=,${flags[0]/%/(+)}
+ else
+ # Multiple supported implementations; honor PYTHON_SINGLE_TARGET.
+ IUSE+=" ${flags[*]}"
+ PYTHON_REQUIRED_USE="^^ ( ${flags[*]} )"
+ # Ensure deps honor the same python_single_target_* flag as is set
+ # on this package.
+ optflags+=,${flags[@]/%/(+)?}
+
+ for i in "${impls[@]}"; do
+ # The chosen targets need to be in PYTHON_TARGETS as well.
+ # This is in order to enforce correct dependencies on packages
+ # supporting multiple implementations.
+ PYTHON_REQUIRED_USE+=" python_single_target_${i}? ( python_targets_${i} )"
+
+ python_export "${i}" PYTHON_PKG_DEP
+ PYTHON_DEPS+="python_single_target_${i}? ( ${PYTHON_PKG_DEP} ) "
+ done
+ fi
+ PYTHON_USEDEP=${optflags// /,}
+
+ # 1) well, python-exec would suffice as an RDEP
+ # but no point in making this overcomplex, BDEP doesn't hurt anyone
+ # 2) python-exec should be built with all targets forced anyway
+ # but if new targets were added, we may need to force a rebuild
+ # 3) use whichever python-exec slot installed in EAPI 5. For EAPI 4,
+ # just fix :2 since := deps are not supported.
+ if [[ ${_PYTHON_WANT_PYTHON_EXEC2} == 0 ]]; then
+ die "python-exec:0 is no longer supported, please fix your ebuild to work with python-exec:2"
+ elif [[ ${EAPI} != 4 ]]; then
+ PYTHON_DEPS+=">=dev-lang/python-exec-2:=[${PYTHON_USEDEP}]"
+ else
+ PYTHON_DEPS+="dev-lang/python-exec:2[${PYTHON_USEDEP}]"
+ fi
+}
+_python_single_set_globals
+
+# @FUNCTION: python_gen_usedep
+# @USAGE: <pattern> [...]
+# @DESCRIPTION:
+# Output a USE dependency string for Python implementations which
+# are both in PYTHON_COMPAT and match any of the patterns passed
+# as parameters to the function.
+#
+# Remember to escape or quote the patterns to prevent shell filename
+# expansion.
+#
+# When all implementations are requested, please use ${PYTHON_USEDEP}
+# instead. Please also remember to set an appropriate REQUIRED_USE
+# to avoid ineffective USE flags.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_7,3_4} )
+# DEPEND="doc? ( dev-python/epydoc[$(python_gen_usedep 'python2*')] )"
+# @CODE
+#
+# It will cause the dependency to look like:
+# @CODE
+# DEPEND="doc? ( dev-python/epydoc[python_targets_python2_7(-)?,...] )"
+# @CODE
+python_gen_usedep() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl pattern
+ local matches=()
+
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+
+ for pattern; do
+ if [[ ${impl} == ${pattern} ]]; then
+ matches+=(
+ "python_targets_${impl}(-)?"
+ "python_single_target_${impl}(+)?"
+ )
+ break
+ fi
+ done
+ done
+
+ [[ ${matches[@]} ]] || die "No supported implementations match python_gen_usedep patterns: ${@}"
+
+ local out=${matches[@]}
+ echo "${out// /,}"
+}
+
+# @FUNCTION: python_gen_useflags
+# @USAGE: <pattern> [...]
+# @DESCRIPTION:
+# Output a list of USE flags for Python implementations which
+# are both in PYTHON_COMPAT and match any of the patterns passed
+# as parameters to the function.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_7,3_4} )
+# REQUIRED_USE="doc? ( ^^ ( $(python_gen_useflags 'python2*') ) )"
+# @CODE
+#
+# It will cause the variable to look like:
+# @CODE
+# REQUIRED_USE="doc? ( ^^ ( python_single_target_python2_7 ) )"
+# @CODE
+python_gen_useflags() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl pattern
+ local matches=()
+
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+
+ for pattern; do
+ if [[ ${impl} == ${pattern} ]]; then
+ matches+=( "python_single_target_${impl}" )
+ break
+ fi
+ done
+ done
+
+ echo "${matches[@]}"
+}
+
+# @FUNCTION: python_gen_cond_dep
+# @USAGE: <dependency> <pattern> [...]
+# @DESCRIPTION:
+# Output a list of <dependency>-ies made conditional to USE flags
+# of Python implementations which are both in PYTHON_COMPAT and match
+# any of the patterns passed as the remaining parameters.
+#
+# In order to enforce USE constraints on the packages, verbatim
+# '${PYTHON_USEDEP}' (quoted!) may be placed in the dependency
+# specification. It will get expanded within the function into a proper
+# USE dependency string.
+#
+# Example:
+# @CODE
+# PYTHON_COMPAT=( python{2_5,2_6,2_7} )
+# RDEPEND="$(python_gen_cond_dep \
+# 'dev-python/unittest2[${PYTHON_USEDEP}]' python{2_5,2_6})"
+# @CODE
+#
+# It will cause the variable to look like:
+# @CODE
+# RDEPEND="python_single_target_python2_5? (
+# dev-python/unittest2[python_targets_python2_5(-)?,...] )
+# python_single_target_python2_6? (
+# dev-python/unittest2[python_targets_python2_6(-)?,...] )"
+# @CODE
+python_gen_cond_dep() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl pattern
+ local matches=()
+
+ local dep=${1}
+ shift
+
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+
+ for pattern; do
+ if [[ ${impl} == ${pattern} ]]; then
+ # substitute ${PYTHON_USEDEP} if used
+ # (since python_gen_usedep() will not return ${PYTHON_USEDEP}
+ # the code is run at most once)
+ if [[ ${dep} == *'${PYTHON_USEDEP}'* ]]; then
+ local PYTHON_USEDEP=$(python_gen_usedep "${@}")
+ dep=${dep//\$\{PYTHON_USEDEP\}/${PYTHON_USEDEP}}
+ fi
+
+ matches+=( "python_single_target_${impl}? ( ${dep} )" )
+ break
+ fi
+ done
+ done
+
+ echo "${matches[@]}"
+}
+
+# @FUNCTION: python_setup
+# @DESCRIPTION:
+# Determine what the selected Python implementation is and set
+# the Python build environment up for it.
+python_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ unset EPYTHON
+
+ local impl impls=()
+ for impl in "${PYTHON_COMPAT[@]}"; do
+ _python_impl_supported "${impl}" || continue
+ impls+=( "${impl}" )
+ done
+
+ if [[ ${#impls[@]} -eq 1 ]]; then
+ if use "python_targets_${impls[0]}"; then
+ # Only one supported implementation, enable it explicitly
+ python_export "${impls[0]}" EPYTHON PYTHON
+ python_wrapper_setup
+ fi
+ else
+ for impl in "${impls[@]}"; do
+ if use "python_single_target_${impl}"; then
+ if [[ ${EPYTHON} ]]; then
+ eerror "Your PYTHON_SINGLE_TARGET setting lists more than a single Python"
+ eerror "implementation. Please set it to just one value. If you need"
+ eerror "to override the value for a single package, please use package.env"
+ eerror "or an equivalent solution (man 5 portage)."
+ echo
+ die "More than one implementation in PYTHON_SINGLE_TARGET."
+ fi
+
+ if ! use "python_targets_${impl}"; then
+ eerror "The implementation chosen as PYTHON_SINGLE_TARGET must be added"
+ eerror "to PYTHON_TARGETS as well. This is in order to ensure that"
+ eerror "dependencies are satisfied correctly. We're sorry"
+ eerror "for the inconvenience."
+ echo
+ die "Build target (${impl}) not in PYTHON_TARGETS."
+ fi
+
+ python_export "${impl}" EPYTHON PYTHON
+ python_wrapper_setup
+ fi
+ done
+ fi
+
+ if [[ ! ${EPYTHON} ]]; then
+ eerror "No Python implementation selected for the build. Please set"
+ if [[ ${#impls[@]} -eq 1 ]]; then
+ eerror "the PYTHON_TARGETS variable in your make.conf to include one"
+ else
+ eerror "the PYTHON_SINGLE_TARGET variable in your make.conf to one"
+ fi
+ eerror "of the following values:"
+ eerror
+ eerror "${impls[@]}"
+ echo
+ die "No supported Python implementation in PYTHON_SINGLE_TARGET/PYTHON_TARGETS."
+ fi
+}
+
+# @FUNCTION: python-single-r1_pkg_setup
+# @DESCRIPTION:
+# Runs python_setup.
+python-single-r1_pkg_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${MERGE_TYPE} != binary ]] && python_setup
+}
+
+_PYTHON_SINGLE_R1=1
+fi
diff --git a/eclass/python-utils-r1.eclass b/eclass/python-utils-r1.eclass
new file mode 100644
index 000000000000..544d2ef62ef3
--- /dev/null
+++ b/eclass/python-utils-r1.eclass
@@ -0,0 +1,1315 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: python-utils-r1
+# @MAINTAINER:
+# Python team <python@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Based on work of: Krzysztof Pawlik <nelchael@gentoo.org>
+# @BLURB: Utility functions for packages with Python parts.
+# @DESCRIPTION:
+# A utility eclass providing functions to query Python implementations,
+# install Python modules and scripts.
+#
+# This eclass does not set any metadata variables nor export any phase
+# functions. It can be inherited safely.
+#
+# For more information, please see the wiki:
+# https://wiki.gentoo.org/wiki/Project:Python/python-utils-r1
+
+case "${EAPI:-0}" in
+ 0|1|2|3|4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ${_PYTHON_ECLASS_INHERITED} ]]; then
+ die 'python-r1 suite eclasses can not be used with python.eclass.'
+fi
+
+if [[ ! ${_PYTHON_UTILS_R1} ]]; then
+
+inherit eutils multilib toolchain-funcs
+
+# @ECLASS-VARIABLE: _PYTHON_ALL_IMPLS
+# @INTERNAL
+# @DESCRIPTION:
+# All supported Python implementations, most preferred last.
+_PYTHON_ALL_IMPLS=(
+ jython2_5 jython2_7
+ pypy pypy3
+ python3_3 python3_4
+ python2_7
+)
+
+# @FUNCTION: _python_impl_supported
+# @USAGE: <impl>
+# @INTERNAL
+# @DESCRIPTION:
+# Check whether the implementation <impl> (PYTHON_COMPAT-form)
+# is still supported.
+#
+# Returns 0 if the implementation is valid and supported. If it is
+# unsupported, returns 1 -- and the caller should ignore the entry.
+# If it is invalid, dies with an appopriate error messages.
+_python_impl_supported() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -eq 1 ]] || die "${FUNCNAME}: takes exactly 1 argument (impl)."
+
+ local impl=${1}
+
+ # keep in sync with _PYTHON_ALL_IMPLS!
+ # (not using that list because inline patterns shall be faster)
+ case "${impl}" in
+ python2_7|python3_[34]|jython2_[57])
+ return 0
+ ;;
+ pypy1_[89]|pypy2_0|python2_[56]|python3_[12])
+ return 1
+ ;;
+ pypy|pypy3)
+ if [[ ${EAPI:-0} == [01234] ]]; then
+ die "PyPy is supported in EAPI 5 and newer only."
+ fi
+ ;;
+ *)
+ die "Invalid implementation in PYTHON_COMPAT: ${impl}"
+ esac
+}
+
+# @ECLASS-VARIABLE: PYTHON
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The absolute path to the current Python interpreter.
+#
+# This variable is set automatically in the following contexts:
+#
+# python-r1: Set in functions called by python_foreach_impl() or after
+# calling python_export_best().
+#
+# python-single-r1: Set after calling python-single-r1_pkg_setup().
+#
+# distutils-r1: Set within any of the python sub-phase functions.
+#
+# Example value:
+# @CODE
+# /usr/bin/python2.7
+# @CODE
+
+# @ECLASS-VARIABLE: EPYTHON
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The executable name of the current Python interpreter.
+#
+# This variable is set automatically in the following contexts:
+#
+# python-r1: Set in functions called by python_foreach_impl() or after
+# calling python_export_best().
+#
+# python-single-r1: Set after calling python-single-r1_pkg_setup().
+#
+# distutils-r1: Set within any of the python sub-phase functions.
+#
+# Example value:
+# @CODE
+# python2.7
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_SITEDIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The path to Python site-packages directory.
+#
+# Set and exported on request using python_export().
+#
+# Example value:
+# @CODE
+# /usr/lib64/python2.7/site-packages
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_INCLUDEDIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The path to Python include directory.
+#
+# Set and exported on request using python_export().
+#
+# Example value:
+# @CODE
+# /usr/include/python2.7
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_LIBPATH
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The path to Python library.
+#
+# Set and exported on request using python_export().
+# Valid only for CPython.
+#
+# Example value:
+# @CODE
+# /usr/lib64/libpython2.7.so
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_CFLAGS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Proper C compiler flags for building against Python. Obtained from
+# pkg-config or python-config.
+#
+# Set and exported on request using python_export().
+# Valid only for CPython. Requires a proper build-time dependency
+# on the Python implementation and on pkg-config.
+#
+# Example value:
+# @CODE
+# -I/usr/include/python2.7
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_LIBS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Proper C compiler flags for linking against Python. Obtained from
+# pkg-config or python-config.
+#
+# Set and exported on request using python_export().
+# Valid only for CPython. Requires a proper build-time dependency
+# on the Python implementation and on pkg-config.
+#
+# Example value:
+# @CODE
+# -lpython2.7
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_PKG_DEP
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The complete dependency on a particular Python package as a string.
+#
+# Set and exported on request using python_export().
+#
+# Example value:
+# @CODE
+# dev-lang/python:2.7[xml]
+# @CODE
+
+# @ECLASS-VARIABLE: PYTHON_SCRIPTDIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The location where Python scripts must be installed for current impl.
+#
+# Set and exported on request using python_export().
+#
+# Example value:
+# @CODE
+# /usr/lib/python-exec/python2.7
+# @CODE
+
+# @FUNCTION: python_export
+# @USAGE: [<impl>] <variables>...
+# @DESCRIPTION:
+# Set and export the Python implementation-relevant variables passed
+# as parameters.
+#
+# The optional first parameter may specify the requested Python
+# implementation (either as PYTHON_TARGETS value, e.g. python2_7,
+# or an EPYTHON one, e.g. python2.7). If no implementation passed,
+# the current one will be obtained from ${EPYTHON}.
+#
+# The variables which can be exported are: PYTHON, EPYTHON,
+# PYTHON_SITEDIR. They are described more completely in the eclass
+# variable documentation.
+python_export() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local impl var
+
+ case "${1}" in
+ python*|jython*)
+ impl=${1/_/.}
+ shift
+ ;;
+ pypy|pypy3)
+ impl=${1}
+ shift
+ ;;
+ *)
+ impl=${EPYTHON}
+ if [[ -z ${impl} ]]; then
+ die "python_export called without a python implementation and EPYTHON is unset"
+ fi
+ ;;
+ esac
+ debug-print "${FUNCNAME}: implementation: ${impl}"
+
+ for var; do
+ case "${var}" in
+ EPYTHON)
+ export EPYTHON=${impl}
+ debug-print "${FUNCNAME}: EPYTHON = ${EPYTHON}"
+ ;;
+ PYTHON)
+ export PYTHON=${EPREFIX}/usr/bin/${impl}
+ debug-print "${FUNCNAME}: PYTHON = ${PYTHON}"
+ ;;
+ PYTHON_SITEDIR)
+ local dir
+ case "${impl}" in
+ python*|pypy|pypy3)
+ dir=/usr/$(get_libdir)/${impl}
+ ;;
+ jython*)
+ dir=/usr/share/${impl/n/n-}/Lib
+ ;;
+ esac
+
+ export PYTHON_SITEDIR=${EPREFIX}${dir}/site-packages
+ debug-print "${FUNCNAME}: PYTHON_SITEDIR = ${PYTHON_SITEDIR}"
+ ;;
+ PYTHON_INCLUDEDIR)
+ local dir
+ case "${impl}" in
+ python*)
+ dir=/usr/include/${impl}
+ ;;
+ pypy|pypy3)
+ dir=/usr/$(get_libdir)/${impl}/include
+ ;;
+ *)
+ die "${impl} lacks header files"
+ ;;
+ esac
+
+ export PYTHON_INCLUDEDIR=${EPREFIX}${dir}
+ debug-print "${FUNCNAME}: PYTHON_INCLUDEDIR = ${PYTHON_INCLUDEDIR}"
+ ;;
+ PYTHON_LIBPATH)
+ local libname
+ case "${impl}" in
+ python*)
+ libname=lib${impl}
+ ;;
+ *)
+ die "${impl} lacks a dynamic library"
+ ;;
+ esac
+
+ local path=${EPREFIX}/usr/$(get_libdir)
+
+ export PYTHON_LIBPATH=${path}/${libname}$(get_libname)
+ debug-print "${FUNCNAME}: PYTHON_LIBPATH = ${PYTHON_LIBPATH}"
+ ;;
+ PYTHON_CFLAGS)
+ local val
+
+ case "${impl}" in
+ python*)
+ # python-2.7, python-3.2, etc.
+ val=$($(tc-getPKG_CONFIG) --cflags ${impl/n/n-})
+ ;;
+ *)
+ die "${impl}: obtaining ${var} not supported"
+ ;;
+ esac
+
+ export PYTHON_CFLAGS=${val}
+ debug-print "${FUNCNAME}: PYTHON_CFLAGS = ${PYTHON_CFLAGS}"
+ ;;
+ PYTHON_LIBS)
+ local val
+
+ case "${impl}" in
+ python*)
+ # python-2.7, python-3.2, etc.
+ val=$($(tc-getPKG_CONFIG) --libs ${impl/n/n-})
+ ;;
+ *)
+ die "${impl}: obtaining ${var} not supported"
+ ;;
+ esac
+
+ export PYTHON_LIBS=${val}
+ debug-print "${FUNCNAME}: PYTHON_LIBS = ${PYTHON_LIBS}"
+ ;;
+ PYTHON_PKG_DEP)
+ local d
+ case ${impl} in
+ python2.7)
+ PYTHON_PKG_DEP='>=dev-lang/python-2.7.5-r2:2.7';;
+ python3.3)
+ PYTHON_PKG_DEP='>=dev-lang/python-3.3.2-r2:3.3';;
+ python*)
+ PYTHON_PKG_DEP="dev-lang/python:${impl#python}";;
+ pypy)
+ PYTHON_PKG_DEP='virtual/pypy:0=';;
+ pypy3)
+ PYTHON_PKG_DEP='virtual/pypy3:0=';;
+ jython2.5)
+ PYTHON_PKG_DEP='>=dev-java/jython-2.5.3-r2:2.5';;
+ jython2.7)
+ PYTHON_PKG_DEP='dev-java/jython:2.7';;
+ *)
+ die "Invalid implementation: ${impl}"
+ esac
+
+ # use-dep
+ if [[ ${PYTHON_REQ_USE} ]]; then
+ PYTHON_PKG_DEP+=[${PYTHON_REQ_USE}]
+ fi
+
+ export PYTHON_PKG_DEP
+ debug-print "${FUNCNAME}: PYTHON_PKG_DEP = ${PYTHON_PKG_DEP}"
+ ;;
+ PYTHON_SCRIPTDIR)
+ local dir
+ export PYTHON_SCRIPTDIR=${EPREFIX}/usr/lib/python-exec/${impl}
+ debug-print "${FUNCNAME}: PYTHON_SCRIPTDIR = ${PYTHON_SCRIPTDIR}"
+ ;;
+ *)
+ die "python_export: unknown variable ${var}"
+ esac
+ done
+}
+
+# @FUNCTION: python_get_sitedir
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Obtain and print the 'site-packages' path for the given
+# implementation. If no implementation is provided, ${EPYTHON} will
+# be used.
+#
+# If you just need to have PYTHON_SITEDIR set (and exported), then it is
+# better to use python_export() directly instead.
+python_get_sitedir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_export "${@}" PYTHON_SITEDIR
+ echo "${PYTHON_SITEDIR}"
+}
+
+# @FUNCTION: python_get_includedir
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Obtain and print the include path for the given implementation. If no
+# implementation is provided, ${EPYTHON} will be used.
+#
+# If you just need to have PYTHON_INCLUDEDIR set (and exported), then it
+# is better to use python_export() directly instead.
+python_get_includedir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_export "${@}" PYTHON_INCLUDEDIR
+ echo "${PYTHON_INCLUDEDIR}"
+}
+
+# @FUNCTION: python_get_library_path
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Obtain and print the Python library path for the given implementation.
+# If no implementation is provided, ${EPYTHON} will be used.
+#
+# Please note that this function can be used with CPython only. Use
+# in another implementation will result in a fatal failure.
+python_get_library_path() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_export "${@}" PYTHON_LIBPATH
+ echo "${PYTHON_LIBPATH}"
+}
+
+# @FUNCTION: python_get_CFLAGS
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Obtain and print the compiler flags for building against Python,
+# for the given implementation. If no implementation is provided,
+# ${EPYTHON} will be used.
+#
+# Please note that this function can be used with CPython only.
+# It requires Python and pkg-config installed, and therefore proper
+# build-time dependencies need be added to the ebuild.
+python_get_CFLAGS() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_export "${@}" PYTHON_CFLAGS
+ echo "${PYTHON_CFLAGS}"
+}
+
+# @FUNCTION: python_get_LIBS
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Obtain and print the compiler flags for linking against Python,
+# for the given implementation. If no implementation is provided,
+# ${EPYTHON} will be used.
+#
+# Please note that this function can be used with CPython only.
+# It requires Python and pkg-config installed, and therefore proper
+# build-time dependencies need be added to the ebuild.
+python_get_LIBS() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_export "${@}" PYTHON_LIBS
+ echo "${PYTHON_LIBS}"
+}
+
+# @FUNCTION: python_get_scriptdir
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Obtain and print the script install path for the given
+# implementation. If no implementation is provided, ${EPYTHON} will
+# be used.
+python_get_scriptdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_export "${@}" PYTHON_SCRIPTDIR
+ echo "${PYTHON_SCRIPTDIR}"
+}
+
+# @FUNCTION: _python_ln_rel
+# @USAGE: <from> <to>
+# @INTERNAL
+# @DESCRIPTION:
+# Create a relative symlink.
+_python_ln_rel() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local target=${1}
+ local symname=${2}
+
+ local tgpath=${target%/*}/
+ local sympath=${symname%/*}/
+ local rel_target=
+
+ while [[ ${sympath} ]]; do
+ local tgseg= symseg=
+
+ while [[ ! ${tgseg} && ${tgpath} ]]; do
+ tgseg=${tgpath%%/*}
+ tgpath=${tgpath#${tgseg}/}
+ done
+
+ while [[ ! ${symseg} && ${sympath} ]]; do
+ symseg=${sympath%%/*}
+ sympath=${sympath#${symseg}/}
+ done
+
+ if [[ ${tgseg} != ${symseg} ]]; then
+ rel_target=../${rel_target}${tgseg:+${tgseg}/}
+ fi
+ done
+ rel_target+=${tgpath}${target##*/}
+
+ debug-print "${FUNCNAME}: ${symname} -> ${target}"
+ debug-print "${FUNCNAME}: rel_target = ${rel_target}"
+
+ ln -fs "${rel_target}" "${symname}"
+}
+
+# @FUNCTION: python_optimize
+# @USAGE: [<directory>...]
+# @DESCRIPTION:
+# Compile and optimize Python modules in specified directories (absolute
+# paths). If no directories are provided, the default system paths
+# are used (prepended with ${D}).
+python_optimize() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${EBUILD_PHASE} == pre* || ${EBUILD_PHASE} == post* ]]; then
+ eerror "The new Python eclasses expect the compiled Python files to"
+ eerror "be controlled by the Package Manager. For this reason,"
+ eerror "the python_optimize function can be used only during src_* phases"
+ eerror "(src_install most commonly) and not during pkg_* phases."
+ echo
+ die "python_optimize is not to be used in pre/post* phases"
+ fi
+
+ [[ ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'
+
+ local PYTHON=${PYTHON}
+ [[ ${PYTHON} ]] || python_export PYTHON
+
+ # Note: python2.6 can't handle passing files to compileall...
+ # TODO: we do not support 2.6 any longer
+
+ # default to sys.path
+ if [[ ${#} -eq 0 ]]; then
+ local f
+ while IFS= read -r -d '' f; do
+ # 1) accept only absolute paths
+ # (i.e. skip '', '.' or anything like that)
+ # 2) skip paths which do not exist
+ # (python2.6 complains about them verbosely)
+
+ if [[ ${f} == /* && -d ${D}${f} ]]; then
+ set -- "${D}${f}" "${@}"
+ fi
+ done < <("${PYTHON}" -c 'import sys; print("\0".join(sys.path))')
+
+ debug-print "${FUNCNAME}: using sys.path: ${*/%/;}"
+ fi
+
+ local d
+ for d; do
+ # make sure to get a nice path without //
+ local instpath=${d#${D}}
+ instpath=/${instpath##/}
+
+ case "${EPYTHON}" in
+ python*)
+ "${PYTHON}" -m compileall -q -f -d "${instpath}" "${d}"
+ "${PYTHON}" -OO -m compileall -q -f -d "${instpath}" "${d}"
+ ;;
+ *)
+ "${PYTHON}" -m compileall -q -f -d "${instpath}" "${d}"
+ ;;
+ esac
+ done
+}
+
+# @ECLASS-VARIABLE: python_scriptroot
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The current script destination for python_doscript(). The path
+# is relative to the installation root (${ED}).
+#
+# When unset, ${DESTTREE}/bin (/usr/bin by default) will be used.
+#
+# Can be set indirectly through the python_scriptinto() function.
+#
+# Example:
+# @CODE
+# src_install() {
+# local python_scriptroot=${GAMES_BINDIR}
+# python_foreach_impl python_doscript foo
+# }
+# @CODE
+
+# @FUNCTION: python_scriptinto
+# @USAGE: <new-path>
+# @DESCRIPTION:
+# Set the current scriptroot. The new value will be stored
+# in the 'python_scriptroot' environment variable. The new value need
+# be relative to the installation root (${ED}).
+#
+# Alternatively, you can set the variable directly.
+python_scriptinto() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_scriptroot=${1}
+}
+
+# @FUNCTION: python_doexe
+# @USAGE: <files>...
+# @DESCRIPTION:
+# Install the given executables into current python_scriptroot,
+# for the current Python implementation (${EPYTHON}).
+#
+# The executable will be wrapped properly for the Python implementation,
+# though no shebang mangling will be performed.
+python_doexe() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local f
+ for f; do
+ python_newexe "${f}" "${f##*/}"
+ done
+}
+
+# @FUNCTION: python_newexe
+# @USAGE: <path> <new-name>
+# @DESCRIPTION:
+# Install the given executable into current python_scriptroot,
+# for the current Python implementation (${EPYTHON}).
+#
+# The executable will be wrapped properly for the Python implementation,
+# though no shebang mangling will be performed. It will be renamed
+# to <new-name>.
+python_newexe() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'
+ [[ ${#} -eq 2 ]] || die "Usage: ${FUNCNAME} <path> <new-name>"
+
+ local wrapd=${python_scriptroot:-${DESTTREE}/bin}
+
+ local f=${1}
+ local newfn=${2}
+
+ local PYTHON_SCRIPTDIR d
+ python_export PYTHON_SCRIPTDIR
+ d=${PYTHON_SCRIPTDIR#${EPREFIX}}
+
+ (
+ dodir "${wrapd}"
+ exeinto "${d}"
+ newexe "${f}" "${newfn}" || die
+ )
+
+ # install the wrapper
+ _python_ln_rel "${ED%/}"/usr/lib/python-exec/python-exec2 \
+ "${ED%/}/${wrapd}/${newfn}" || die
+
+ # don't use this at home, just call python_doscript() instead
+ if [[ ${_PYTHON_REWRITE_SHEBANG} ]]; then
+ python_fix_shebang -q "${ED%/}/${d}/${newfn}"
+ fi
+}
+
+# @FUNCTION: python_doscript
+# @USAGE: <files>...
+# @DESCRIPTION:
+# Install the given scripts into current python_scriptroot,
+# for the current Python implementation (${EPYTHON}).
+#
+# All specified files must start with a 'python' shebang. The shebang
+# will be converted, and the files will be wrapped properly
+# for the Python implementation.
+#
+# Example:
+# @CODE
+# src_install() {
+# python_foreach_impl python_doscript ${PN}
+# }
+# @CODE
+python_doscript() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local _PYTHON_REWRITE_SHEBANG=1
+ python_doexe "${@}"
+}
+
+# @FUNCTION: python_newscript
+# @USAGE: <path> <new-name>
+# @DESCRIPTION:
+# Install the given script into current python_scriptroot
+# for the current Python implementation (${EPYTHON}), and name it
+# <new-name>.
+#
+# The file must start with a 'python' shebang. The shebang will be
+# converted, and the file will be wrapped properly for the Python
+# implementation. It will be renamed to <new-name>.
+#
+# Example:
+# @CODE
+# src_install() {
+# python_foreach_impl python_newscript foo.py foo
+# }
+# @CODE
+python_newscript() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local _PYTHON_REWRITE_SHEBANG=1
+ python_newexe "${@}"
+}
+
+# @ECLASS-VARIABLE: python_moduleroot
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The current module root for python_domodule(). The path can be either
+# an absolute system path (it must start with a slash, and ${ED} will be
+# prepended to it) or relative to the implementation's site-packages directory
+# (then it must start with a non-slash character).
+#
+# When unset, the modules will be installed in the site-packages root.
+#
+# Can be set indirectly through the python_moduleinto() function.
+#
+# Example:
+# @CODE
+# src_install() {
+# local python_moduleroot=bar
+# # installs ${PYTHON_SITEDIR}/bar/baz.py
+# python_foreach_impl python_domodule baz.py
+# }
+# @CODE
+
+# @FUNCTION: python_moduleinto
+# @USAGE: <new-path>
+# @DESCRIPTION:
+# Set the current module root. The new value will be stored
+# in the 'python_moduleroot' environment variable. The new value need
+# be relative to the site-packages root.
+#
+# Alternatively, you can set the variable directly.
+python_moduleinto() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ python_moduleroot=${1}
+}
+
+# @FUNCTION: python_domodule
+# @USAGE: <files>...
+# @DESCRIPTION:
+# Install the given modules (or packages) into the current
+# python_moduleroot. The list can mention both modules (files)
+# and packages (directories). All listed files will be installed
+# for all enabled implementations, and compiled afterwards.
+#
+# Example:
+# @CODE
+# src_install() {
+# # (${PN} being a directory)
+# python_foreach_impl python_domodule ${PN}
+# }
+# @CODE
+python_domodule() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'
+
+ local d
+ if [[ ${python_moduleroot} == /* ]]; then
+ # absolute path
+ d=${python_moduleroot}
+ else
+ # relative to site-packages
+ local PYTHON_SITEDIR=${PYTHON_SITEDIR}
+ [[ ${PYTHON_SITEDIR} ]] || python_export PYTHON_SITEDIR
+
+ d=${PYTHON_SITEDIR#${EPREFIX}}/${python_moduleroot}
+ fi
+
+ local INSDESTTREE
+
+ insinto "${d}"
+ doins -r "${@}" || die
+
+ python_optimize "${ED}/${d}"
+}
+
+# @FUNCTION: python_doheader
+# @USAGE: <files>...
+# @DESCRIPTION:
+# Install the given headers into the implementation-specific include
+# directory. This function is unconditionally recursive, i.e. you can
+# pass directories instead of files.
+#
+# Example:
+# @CODE
+# src_install() {
+# python_foreach_impl python_doheader foo.h bar.h
+# }
+# @CODE
+python_doheader() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${EPYTHON} ]] || die 'No Python implementation set (EPYTHON is null).'
+
+ local d PYTHON_INCLUDEDIR=${PYTHON_INCLUDEDIR}
+ [[ ${PYTHON_INCLUDEDIR} ]] || python_export PYTHON_INCLUDEDIR
+
+ d=${PYTHON_INCLUDEDIR#${EPREFIX}}
+
+ local INSDESTTREE
+
+ insinto "${d}"
+ doins -r "${@}" || die
+}
+
+# @FUNCTION: python_wrapper_setup
+# @USAGE: [<path> [<impl>]]
+# @DESCRIPTION:
+# Create proper 'python' executable and pkg-config wrappers
+# (if available) in the directory named by <path>. Set up PATH
+# and PKG_CONFIG_PATH appropriately. <path> defaults to ${T}/${EPYTHON}.
+#
+# The wrappers will be created for implementation named by <impl>,
+# or for one named by ${EPYTHON} if no <impl> passed.
+#
+# If the named directory contains a python symlink already, it will
+# be assumed to contain proper wrappers already and only environment
+# setup will be done. If wrapper update is requested, the directory
+# shall be removed first.
+python_wrapper_setup() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local workdir=${1:-${T}/${EPYTHON}}
+ local impl=${2:-${EPYTHON}}
+
+ [[ ${workdir} ]] || die "${FUNCNAME}: no workdir specified."
+ [[ ${impl} ]] || die "${FUNCNAME}: no impl nor EPYTHON specified."
+
+ if [[ ! -x ${workdir}/bin/python ]]; then
+ _python_check_dead_variables
+
+ mkdir -p "${workdir}"/{bin,pkgconfig} || die
+
+ # Clean up, in case we were supposed to do a cheap update.
+ rm -f "${workdir}"/bin/python{,2,3,-config} || die
+ rm -f "${workdir}"/bin/2to3 || die
+ rm -f "${workdir}"/pkgconfig/python{,2,3}.pc || die
+
+ local EPYTHON PYTHON
+ python_export "${impl}" EPYTHON PYTHON
+
+ local pyver pyother
+ if python_is_python3; then
+ pyver=3
+ pyother=2
+ else
+ pyver=2
+ pyother=3
+ fi
+
+ # Python interpreter
+ # note: we don't use symlinks because python likes to do some
+ # symlink reading magic that breaks stuff
+ # https://bugs.gentoo.org/show_bug.cgi?id=555752
+ cat > "${workdir}/bin/python" <<-_EOF_
+ #!/bin/sh
+ exec "${PYTHON}" "\${@}"
+ _EOF_
+ cp "${workdir}/bin/python" "${workdir}/bin/python${pyver}" || die
+ chmod +x "${workdir}/bin/python" "${workdir}/bin/python${pyver}" || die
+
+ local nonsupp=( "python${pyother}" "python${pyother}-config" )
+
+ # CPython-specific
+ if [[ ${EPYTHON} == python* ]]; then
+ cat > "${workdir}/bin/python-config" <<-_EOF_
+ #!/bin/sh
+ exec "${PYTHON}-config" "\${@}"
+ _EOF_
+ cp "${workdir}/bin/python-config" \
+ "${workdir}/bin/python${pyver}-config" || die
+ chmod +x "${workdir}/bin/python-config" \
+ "${workdir}/bin/python${pyver}-config" || die
+
+ # Python 2.6+.
+ ln -s "${PYTHON/python/2to3-}" "${workdir}"/bin/2to3 || die
+
+ # Python 2.7+.
+ ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${EPYTHON/n/n-}.pc \
+ "${workdir}"/pkgconfig/python.pc || die
+ ln -s python.pc "${workdir}"/pkgconfig/python${pyver}.pc || die
+ else
+ nonsupp+=( 2to3 python-config "python${pyver}-config" )
+ fi
+
+ local x
+ for x in "${nonsupp[@]}"; do
+ cat >"${workdir}"/bin/${x} <<__EOF__
+#!/bin/sh
+echo "${x} is not supported by ${EPYTHON}" >&2
+exit 127
+__EOF__
+ chmod +x "${workdir}"/bin/${x} || die
+ done
+
+ # Now, set the environment.
+ # But note that ${workdir} may be shared with something else,
+ # and thus already on top of PATH.
+ if [[ ${PATH##:*} != ${workdir}/bin ]]; then
+ PATH=${workdir}/bin${PATH:+:${PATH}}
+ fi
+ if [[ ${PKG_CONFIG_PATH##:*} != ${workdir}/pkgconfig ]]; then
+ PKG_CONFIG_PATH=${workdir}/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}}
+ fi
+ export PATH PKG_CONFIG_PATH
+ fi
+}
+
+# @FUNCTION: python_is_python3
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Check whether <impl> (or ${EPYTHON}) is a Python3k variant
+# (i.e. uses syntax and stdlib of Python 3.*).
+#
+# Returns 0 (true) if it is, 1 (false) otherwise.
+python_is_python3() {
+ local impl=${1:-${EPYTHON}}
+ [[ ${impl} ]] || die "python_is_python3: no impl nor EPYTHON"
+
+ [[ ${impl} == python3* || ${impl} == pypy3 ]]
+}
+
+# @FUNCTION: python_is_installed
+# @USAGE: [<impl>]
+# @DESCRIPTION:
+# Check whether the interpreter for <impl> (or ${EPYTHON}) is installed.
+# Uses has_version with a proper dependency string.
+#
+# Returns 0 (true) if it is, 1 (false) otherwise.
+python_is_installed() {
+ local impl=${1:-${EPYTHON}}
+ [[ ${impl} ]] || die "${FUNCNAME}: no impl nor EPYTHON"
+
+ # for has_version
+ local -x ROOT=/
+ case "${impl}" in
+ pypy|pypy3)
+ local append=
+ if [[ ${PYTHON_REQ_USE} ]]; then
+ append=[${PYTHON_REQ_USE}]
+ fi
+
+ # be happy with just the interpeter, no need for the virtual
+ has_version "dev-python/${impl}${append}" \
+ || has_version "dev-python/${impl}-bin${append}"
+ ;;
+ *)
+ local PYTHON_PKG_DEP
+ python_export "${impl}" PYTHON_PKG_DEP
+ has_version "${PYTHON_PKG_DEP}"
+ ;;
+ esac
+}
+
+# @FUNCTION: python_fix_shebang
+# @USAGE: [-f|--force] [-q|--quiet] <path>...
+# @DESCRIPTION:
+# Replace the shebang in Python scripts with the current Python
+# implementation (EPYTHON). If a directory is passed, works recursively
+# on all Python scripts.
+#
+# Only files having a 'python*' shebang will be modified. Files with
+# other shebang will either be skipped when working recursively
+# on a directory or treated as error when specified explicitly.
+#
+# Shebangs matching explicitly current Python version will be left
+# unmodified. Shebangs requesting another Python version will be treated
+# as fatal error, unless --force is given.
+#
+# --force causes the function to replace even shebangs that require
+# incompatible Python version. --quiet causes the function not to list
+# modified files verbosely.
+python_fix_shebang() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${EPYTHON} ]] || die "${FUNCNAME}: EPYTHON unset (pkg_setup not called?)"
+
+ local force quiet
+ while [[ ${@} ]]; do
+ case "${1}" in
+ -f|--force) force=1; shift;;
+ -q|--quiet) quiet=1; shift;;
+ --) shift; break;;
+ *) break;;
+ esac
+ done
+
+ [[ ${1} ]] || die "${FUNCNAME}: no paths given"
+
+ local path f
+ for path; do
+ local any_correct any_fixed is_recursive
+
+ [[ -d ${path} ]] && is_recursive=1
+
+ while IFS= read -r -d '' f; do
+ local shebang i
+ local error= from=
+
+ IFS= read -r shebang <"${f}"
+
+ # First, check if it's shebang at all...
+ if [[ ${shebang} == '#!'* ]]; then
+ local split_shebang=()
+ read -r -a split_shebang <<<${shebang}
+
+ # Match left-to-right in a loop, to avoid matching random
+ # repetitions like 'python2.7 python2'.
+ for i in "${split_shebang[@]}"; do
+ case "${i}" in
+ *"${EPYTHON}")
+ debug-print "${FUNCNAME}: in file ${f#${D}}"
+ debug-print "${FUNCNAME}: shebang matches EPYTHON: ${shebang}"
+
+ # Nothing to do, move along.
+ any_correct=1
+ from=${EPYTHON}
+ break
+ ;;
+ *python|*python[23])
+ debug-print "${FUNCNAME}: in file ${f#${D}}"
+ debug-print "${FUNCNAME}: rewriting shebang: ${shebang}"
+
+ if [[ ${i} == *python2 ]]; then
+ from=python2
+ if [[ ! ${force} ]]; then
+ python_is_python3 "${EPYTHON}" && error=1
+ fi
+ elif [[ ${i} == *python3 ]]; then
+ from=python3
+ if [[ ! ${force} ]]; then
+ python_is_python3 "${EPYTHON}" || error=1
+ fi
+ else
+ from=python
+ fi
+ break
+ ;;
+ *python[23].[0123456789]|*pypy|*pypy3|*jython[23].[0123456789])
+ # Explicit mismatch.
+ if [[ ! ${force} ]]; then
+ error=1
+ else
+ case "${i}" in
+ *python[23].[0123456789])
+ from="python[23].[0123456789]";;
+ *pypy)
+ from="pypy";;
+ *pypy3)
+ from="pypy3";;
+ *jython[23].[0123456789])
+ from="jython[23].[0123456789]";;
+ *)
+ die "${FUNCNAME}: internal error in 2nd pattern match";;
+ esac
+ fi
+ break
+ ;;
+ esac
+ done
+ fi
+
+ if [[ ! ${error} && ! ${from} ]]; then
+ # Non-Python shebang. Allowed in recursive mode,
+ # disallowed when specifying file explicitly.
+ [[ ${is_recursive} ]] && continue
+ error=1
+ fi
+
+ if [[ ! ${quiet} ]]; then
+ einfo "Fixing shebang in ${f#${D}}."
+ fi
+
+ if [[ ! ${error} ]]; then
+ # We either want to match ${from} followed by space
+ # or at end-of-string.
+ if [[ ${shebang} == *${from}" "* ]]; then
+ sed -i -e "1s:${from} :${EPYTHON} :" "${f}" || die
+ else
+ sed -i -e "1s:${from}$:${EPYTHON}:" "${f}" || die
+ fi
+ any_fixed=1
+ else
+ eerror "The file has incompatible shebang:"
+ eerror " file: ${f#${D}}"
+ eerror " current shebang: ${shebang}"
+ eerror " requested impl: ${EPYTHON}"
+ die "${FUNCNAME}: conversion of incompatible shebang requested"
+ fi
+ done < <(find "${path}" -type f -print0)
+
+ if [[ ! ${any_fixed} ]]; then
+ eqawarn "QA warning: ${FUNCNAME}, ${path#${D}} did not match any fixable files."
+ if [[ ${any_correct} ]]; then
+ eqawarn "All files have ${EPYTHON} shebang already."
+ else
+ eqawarn "There are no Python files in specified directory."
+ fi
+ fi
+ done
+}
+
+# @FUNCTION: python_export_utf8_locale
+# @RETURN: 0 on success, 1 on failure.
+# @DESCRIPTION:
+# Attempts to export a usable UTF-8 locale in the LC_CTYPE variable. Does
+# nothing if LC_ALL is defined, or if the current locale uses a UTF-8 charmap.
+# This may be used to work around the quirky open() behavior of python3.
+python_export_utf8_locale() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ $(locale charmap) != UTF-8 ]]; then
+ if [[ -n ${LC_ALL} ]]; then
+ ewarn "LC_ALL is set to a locale with a charmap other than UTF-8."
+ ewarn "This may trigger build failures in some python packages."
+ return 1
+ fi
+
+ # Try English first, then everything else.
+ local lang locales="en_US.UTF-8 $(locale -a)"
+
+ for lang in ${locales}; do
+ if [[ $(LC_CTYPE=${lang} locale charmap 2>/dev/null) == UTF-8 ]]; then
+ export LC_CTYPE=${lang}
+ return 0
+ fi
+ done
+
+ ewarn "Could not find a UTF-8 locale. This may trigger build failures in"
+ ewarn "some python packages. Please ensure that a UTF-8 locale is listed in"
+ ewarn "/etc/locale.gen and run locale-gen."
+ return 1
+ fi
+
+ return 0
+}
+
+# -- python.eclass functions --
+
+_python_check_dead_variables() {
+ local v
+
+ for v in PYTHON_DEPEND PYTHON_USE_WITH{,_OR,_OPT} {RESTRICT,SUPPORT}_PYTHON_ABIS
+ do
+ if [[ ${!v} ]]; then
+ die "${v} is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#Ebuild_head"
+ fi
+ done
+
+ for v in PYTHON_{CPPFLAGS,CFLAGS,CXXFLAGS,LDFLAGS}
+ do
+ if [[ ${!v} ]]; then
+ die "${v} is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#PYTHON_CFLAGS"
+ fi
+ done
+
+ for v in PYTHON_TESTS_RESTRICTED_ABIS PYTHON_EXPORT_PHASE_FUNCTIONS \
+ PYTHON_VERSIONED_{SCRIPTS,EXECUTABLES} PYTHON_NONVERSIONED_EXECUTABLES
+ do
+ if [[ ${!v} ]]; then
+ die "${v} is invalid for python-r1 suite"
+ fi
+ done
+
+ for v in DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES DISTUTILS_SETUP_FILES \
+ DISTUTILS_GLOBAL_OPTIONS DISTUTILS_SRC_TEST PYTHON_MODNAME
+ do
+ if [[ ${!v} ]]; then
+ die "${v} is invalid for distutils-r1, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#${v}"
+ fi
+ done
+
+ if [[ ${DISTUTILS_DISABLE_TEST_DEPENDENCY} ]]; then
+ die "${v} is invalid for distutils-r1, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#DISTUTILS_SRC_TEST"
+ fi
+
+ # python.eclass::progress
+ for v in PYTHON_BDEPEND PYTHON_MULTIPLE_ABIS PYTHON_ABI_TYPE \
+ PYTHON_RESTRICTED_ABIS PYTHON_TESTS_FAILURES_TOLERANT_ABIS \
+ PYTHON_CFFI_MODULES_GENERATION_COMMANDS
+ do
+ if [[ ${!v} ]]; then
+ die "${v} is invalid for python-r1 suite"
+ fi
+ done
+}
+
+python_pkg_setup() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#pkg_setup"
+}
+
+python_convert_shebangs() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#python_convert_shebangs"
+}
+
+python_clean_py-compile_files() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_clean_installation_image() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_execute_function() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#python_execute_function"
+}
+
+python_generate_wrapper_scripts() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_merge_intermediate_installation_images() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_set_active_version() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#pkg_setup"
+}
+
+python_need_rebuild() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+PYTHON() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#.24.28PYTHON.29.2C_.24.7BEPYTHON.7D"
+}
+
+python_get_implementation() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_get_implementational_package() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_get_libdir() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_get_library() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_get_version() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_get_implementation_and_version() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_execute_nosetests() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_execute_py.test() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_execute_trial() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_enable_pyc() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_disable_pyc() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_mod_optimize() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#Python_byte-code_compilation"
+}
+
+python_mod_cleanup() {
+ die "${FUNCNAME}() is invalid for python-r1 suite, please take a look @ https://wiki.gentoo.org/wiki/Project:Python/Python.eclass_conversion#Python_byte-code_compilation"
+}
+
+# python.eclass::progress
+
+python_abi_depend() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_install_executables() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_get_extension_module_suffix() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_byte-compile_modules() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_clean_byte-compiled_modules() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+python_generate_cffi_modules() {
+ die "${FUNCNAME}() is invalid for python-r1 suite"
+}
+
+_PYTHON_UTILS_R1=1
+fi
diff --git a/eclass/python.eclass b/eclass/python.eclass
new file mode 100644
index 000000000000..c3634a004861
--- /dev/null
+++ b/eclass/python.eclass
@@ -0,0 +1,3181 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: python.eclass
+# @MAINTAINER:
+# Gentoo Python Project <python@gentoo.org>
+# @BLURB: Eclass for Python packages
+# @DESCRIPTION:
+# The python eclass contains miscellaneous, useful functions for Python packages.
+#
+# This eclass is DEPRECATED. Please use python-r1, python-single-r1
+# or python-any-r1 instead.
+
+if [[ ${_PYTHON_UTILS_R1} ]]; then
+ die 'python.eclass can not be used with python-r1 suite eclasses.'
+fi
+
+# Must call inherit before EXPORT_FUNCTIONS to avoid QA warning.
+if [[ -z "${_PYTHON_ECLASS_INHERITED}" ]]; then
+ inherit multilib
+fi
+
+# Export pkg_setup every time to avoid issues with eclass inheritance order.
+if ! has "${EAPI:-0}" 0 1 2 3 || { has "${EAPI:-0}" 2 3 && [[ -n "${PYTHON_USE_WITH}" || -n "${PYTHON_USE_WITH_OR}" ]]; }; then
+ EXPORT_FUNCTIONS pkg_setup
+fi
+
+# Avoid processing this eclass more than once.
+if [[ -z "${_PYTHON_ECLASS_INHERITED}" ]]; then
+_PYTHON_ECLASS_INHERITED="1"
+
+if ! has "${EAPI:-0}" 0 1 2 3 4 5; then
+ die "API of python.eclass in EAPI=\"${EAPI}\" not established"
+fi
+
+# Please do not add any new versions of Python here! Instead, please
+# focus on converting packages to use the new eclasses.
+
+_CPYTHON2_GLOBALLY_SUPPORTED_ABIS=(2.4 2.5 2.6 2.7)
+_CPYTHON3_GLOBALLY_SUPPORTED_ABIS=(3.1 3.2 3.3)
+_JYTHON_GLOBALLY_SUPPORTED_ABIS=(2.5-jython 2.7-jython)
+_PYPY_GLOBALLY_SUPPORTED_ABIS=(2.7-pypy-1.7 2.7-pypy-1.8 2.7-pypy-1.9 2.7-pypy-2.0)
+_PYTHON_GLOBALLY_SUPPORTED_ABIS=(${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]} ${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]} ${_JYTHON_GLOBALLY_SUPPORTED_ABIS[@]} ${_PYPY_GLOBALLY_SUPPORTED_ABIS[@]})
+
+# ================================================================================================
+# ===================================== HANDLING OF METADATA =====================================
+# ================================================================================================
+
+_PYTHON_ABI_PATTERN_REGEX="([[:alnum:]]|\.|-|\*|\[|\])+"
+
+_python_check_python_abi_matching() {
+ local pattern patterns patterns_list="0" PYTHON_ABI
+
+ while (($#)); do
+ case "$1" in
+ --patterns-list)
+ patterns_list="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$#" -ne 2 ]]; then
+ die "${FUNCNAME}() requires 2 arguments"
+ fi
+
+ PYTHON_ABI="$1"
+
+ if [[ "${patterns_list}" == "0" ]]; then
+ pattern="$2"
+
+ if [[ "${pattern}" == *"-cpython" ]]; then
+ [[ "${PYTHON_ABI}" =~ ^[[:digit:]]+\.[[:digit:]]+$ && "${PYTHON_ABI}" == ${pattern%-cpython} ]]
+ elif [[ "${pattern}" == *"-jython" ]]; then
+ [[ "${PYTHON_ABI}" == ${pattern} ]]
+ elif [[ "${pattern}" == *"-pypy-"* ]]; then
+ [[ "${PYTHON_ABI}" == ${pattern} ]]
+ else
+ if [[ "${PYTHON_ABI}" =~ ^[[:digit:]]+\.[[:digit:]]+$ ]]; then
+ [[ "${PYTHON_ABI}" == ${pattern} ]]
+ elif [[ "${PYTHON_ABI}" =~ ^[[:digit:]]+\.[[:digit:]]+-jython$ ]]; then
+ [[ "${PYTHON_ABI%-jython}" == ${pattern} ]]
+ elif [[ "${PYTHON_ABI}" =~ ^[[:digit:]]+\.[[:digit:]]+-pypy-[[:digit:]]+\.[[:digit:]]+$ ]]; then
+ [[ "${PYTHON_ABI%-pypy-*}" == ${pattern} ]]
+ else
+ die "${FUNCNAME}(): Unrecognized Python ABI '${PYTHON_ABI}'"
+ fi
+ fi
+ else
+ patterns="${2// /$'\n'}"
+
+ while read pattern; do
+ if _python_check_python_abi_matching "${PYTHON_ABI}" "${pattern}"; then
+ return 0
+ fi
+ done <<< "${patterns}"
+
+ return 1
+ fi
+}
+
+_python_implementation() {
+ if [[ "${CATEGORY}/${PN}" == "dev-lang/python" ]]; then
+ return 0
+ elif [[ "${CATEGORY}/${PN}" == "dev-java/jython" ]]; then
+ return 0
+ elif [[ "${CATEGORY}/${PN}" == "virtual/pypy" ]]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+_python_package_supporting_installation_for_multiple_python_abis() {
+ [[ -n "${SUPPORT_PYTHON_ABIS}" ]]
+}
+
+# @ECLASS-VARIABLE: PYTHON_DEPEND
+# @DESCRIPTION:
+# Specification of dependency on dev-lang/python.
+# Syntax:
+# PYTHON_DEPEND: [[!]USE_flag? ]<version_components_group>[ version_components_group]
+# version_components_group: <major_version[:[minimal_version][:maximal_version]]>
+# major_version: <2|3|*>
+# minimal_version: <minimal_major_version.minimal_minor_version>
+# maximal_version: <maximal_major_version.maximal_minor_version>
+
+_python_parse_PYTHON_DEPEND() {
+ local major_version maximal_version minimal_version python_all="0" python_maximal_version python_minimal_version python_versions=() python2="0" python2_maximal_version python2_minimal_version python3="0" python3_maximal_version python3_minimal_version USE_flag= version_components_group version_components_group_regex version_components_groups
+
+ version_components_group_regex="(2|3|\*)(:([[:digit:]]+\.[[:digit:]]+)?(:([[:digit:]]+\.[[:digit:]]+)?)?)?"
+ version_components_groups="${PYTHON_DEPEND}"
+
+ if [[ "${version_components_groups}" =~ ^((\!)?[[:alnum:]_-]+\?\ )?${version_components_group_regex}(\ ${version_components_group_regex})?$ ]]; then
+ if [[ "${version_components_groups}" =~ ^(\!)?[[:alnum:]_-]+\? ]]; then
+ USE_flag="${version_components_groups%\? *}"
+ version_components_groups="${version_components_groups#* }"
+ fi
+ if [[ "${version_components_groups}" =~ ("*".*" "|" *"|^2.*\ (2|\*)|^3.*\ (3|\*)) ]]; then
+ die "Invalid syntax of PYTHON_DEPEND: Incorrectly specified groups of versions"
+ fi
+
+ version_components_groups="${version_components_groups// /$'\n'}"
+ while read version_components_group; do
+ major_version="${version_components_group:0:1}"
+ minimal_version="${version_components_group:2}"
+ minimal_version="${minimal_version%:*}"
+ maximal_version="${version_components_group:$((3 + ${#minimal_version}))}"
+
+ if [[ "${major_version}" =~ ^(2|3)$ ]]; then
+ if [[ -n "${minimal_version}" && "${major_version}" != "${minimal_version:0:1}" ]]; then
+ die "Invalid syntax of PYTHON_DEPEND: Minimal version '${minimal_version}' not in specified group of versions"
+ fi
+ if [[ -n "${maximal_version}" && "${major_version}" != "${maximal_version:0:1}" ]]; then
+ die "Invalid syntax of PYTHON_DEPEND: Maximal version '${maximal_version}' not in specified group of versions"
+ fi
+ fi
+
+ if [[ "${major_version}" == "2" ]]; then
+ python2="1"
+ python_versions=("${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}")
+ python2_minimal_version="${minimal_version}"
+ python2_maximal_version="${maximal_version}"
+ elif [[ "${major_version}" == "3" ]]; then
+ python3="1"
+ python_versions=("${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}")
+ python3_minimal_version="${minimal_version}"
+ python3_maximal_version="${maximal_version}"
+ else
+ python_all="1"
+ python_versions=("${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}" "${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}")
+ python_minimal_version="${minimal_version}"
+ python_maximal_version="${maximal_version}"
+ fi
+
+ if [[ -n "${minimal_version}" ]] && ! has "${minimal_version}" "${python_versions[@]}"; then
+ die "Invalid syntax of PYTHON_DEPEND: Unrecognized minimal version '${minimal_version}'"
+ fi
+ if [[ -n "${maximal_version}" ]] && ! has "${maximal_version}" "${python_versions[@]}"; then
+ die "Invalid syntax of PYTHON_DEPEND: Unrecognized maximal version '${maximal_version}'"
+ fi
+
+ if [[ -n "${minimal_version}" && -n "${maximal_version}" && "${minimal_version}" > "${maximal_version}" ]]; then
+ die "Invalid syntax of PYTHON_DEPEND: Minimal version '${minimal_version}' greater than maximal version '${maximal_version}'"
+ fi
+ done <<< "${version_components_groups}"
+
+ _PYTHON_ATOMS=()
+
+ _append_accepted_versions_range() {
+ local accepted_version="0" i
+ for ((i = "${#python_versions[@]}"; i >= 0; i--)); do
+ if [[ "${python_versions[${i}]}" == "${python_maximal_version}" ]]; then
+ accepted_version="1"
+ fi
+ if [[ "${accepted_version}" == "1" ]]; then
+ _PYTHON_ATOMS+=("=dev-lang/python-${python_versions[${i}]}*")
+ fi
+ if [[ "${python_versions[${i}]}" == "${python_minimal_version}" ]]; then
+ accepted_version="0"
+ fi
+ done
+ }
+
+ if [[ "${python_all}" == "1" ]]; then
+ if [[ -z "${python_minimal_version}" && -z "${python_maximal_version}" ]]; then
+ _PYTHON_ATOMS+=("dev-lang/python")
+ else
+ python_versions=("${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}" "${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}")
+ python_minimal_version="${python_minimal_version:-${python_versions[0]}}"
+ python_maximal_version="${python_maximal_version:-${python_versions[${#python_versions[@]}-1]}}"
+ _append_accepted_versions_range
+ fi
+ else
+ if [[ "${python3}" == "1" ]]; then
+ if [[ -z "${python3_minimal_version}" && -z "${python3_maximal_version}" ]]; then
+ _PYTHON_ATOMS+=("=dev-lang/python-3*")
+ else
+ python_versions=("${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}")
+ python_minimal_version="${python3_minimal_version:-${python_versions[0]}}"
+ python_maximal_version="${python3_maximal_version:-${python_versions[${#python_versions[@]}-1]}}"
+ _append_accepted_versions_range
+ fi
+ fi
+ if [[ "${python2}" == "1" ]]; then
+ if [[ -z "${python2_minimal_version}" && -z "${python2_maximal_version}" ]]; then
+ _PYTHON_ATOMS+=("=dev-lang/python-2*")
+ else
+ python_versions=("${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}")
+ python_minimal_version="${python2_minimal_version:-${python_versions[0]}}"
+ python_maximal_version="${python2_maximal_version:-${python_versions[${#python_versions[@]}-1]}}"
+ _append_accepted_versions_range
+ fi
+ fi
+ fi
+
+ unset -f _append_accepted_versions_range
+
+ if [[ "${#_PYTHON_ATOMS[@]}" -gt 1 ]]; then
+ DEPEND+="${DEPEND:+ }${USE_flag}${USE_flag:+? ( }|| ( ${_PYTHON_ATOMS[@]} )${USE_flag:+ )}"
+ RDEPEND+="${RDEPEND:+ }${USE_flag}${USE_flag:+? ( }|| ( ${_PYTHON_ATOMS[@]} )${USE_flag:+ )}"
+ else
+ DEPEND+="${DEPEND:+ }${USE_flag}${USE_flag:+? ( }${_PYTHON_ATOMS[@]}${USE_flag:+ )}"
+ RDEPEND+="${RDEPEND:+ }${USE_flag}${USE_flag:+? ( }${_PYTHON_ATOMS[@]}${USE_flag:+ )}"
+ fi
+ else
+ die "Invalid syntax of PYTHON_DEPEND"
+ fi
+}
+
+if _python_implementation; then
+ DEPEND=">=app-eselect/eselect-python-20091230"
+ RDEPEND="${DEPEND}"
+ PDEPEND="app-admin/python-updater"
+fi
+
+if [[ -n "${PYTHON_DEPEND}" ]]; then
+ _python_parse_PYTHON_DEPEND
+else
+ _PYTHON_ATOMS=("dev-lang/python")
+fi
+unset -f _python_parse_PYTHON_DEPEND
+
+if [[ -n "${NEED_PYTHON}" ]]; then
+ eerror "Use PYTHON_DEPEND variable instead of NEED_PYTHON variable."
+ die "NEED_PYTHON variable is banned"
+fi
+
+# @ECLASS-VARIABLE: PYTHON_USE_WITH
+# @DESCRIPTION:
+# Set this to a space separated list of USE flags the Python slot in use must be built with.
+
+# @ECLASS-VARIABLE: PYTHON_USE_WITH_OR
+# @DESCRIPTION:
+# Set this to a space separated list of USE flags of which one must be turned on for the slot in use.
+
+# @ECLASS-VARIABLE: PYTHON_USE_WITH_OPT
+# @DESCRIPTION:
+# Set this to a name of a USE flag if you need to make either PYTHON_USE_WITH or
+# PYTHON_USE_WITH_OR atoms conditional under a USE flag.
+
+if ! has "${EAPI:-0}" 0 1 && [[ -n ${PYTHON_USE_WITH} || -n ${PYTHON_USE_WITH_OR} ]]; then
+ _PYTHON_USE_WITH_ATOMS_ARRAY=()
+ if [[ -n "${PYTHON_USE_WITH}" ]]; then
+ for _PYTHON_ATOM in "${_PYTHON_ATOMS[@]}"; do
+ _PYTHON_USE_WITH_ATOMS_ARRAY+=("${_PYTHON_ATOM}[${PYTHON_USE_WITH// /,}]")
+ done
+ elif [[ -n "${PYTHON_USE_WITH_OR}" ]]; then
+ for _USE_flag in ${PYTHON_USE_WITH_OR}; do
+ for _PYTHON_ATOM in "${_PYTHON_ATOMS[@]}"; do
+ _PYTHON_USE_WITH_ATOMS_ARRAY+=("${_PYTHON_ATOM}[${_USE_flag}]")
+ done
+ done
+ unset _USE_flag
+ fi
+ if [[ "${#_PYTHON_USE_WITH_ATOMS_ARRAY[@]}" -gt 1 ]]; then
+ _PYTHON_USE_WITH_ATOMS="|| ( ${_PYTHON_USE_WITH_ATOMS_ARRAY[@]} )"
+ else
+ _PYTHON_USE_WITH_ATOMS="${_PYTHON_USE_WITH_ATOMS_ARRAY[@]}"
+ fi
+ if [[ -n "${PYTHON_USE_WITH_OPT}" ]]; then
+ _PYTHON_USE_WITH_ATOMS="${PYTHON_USE_WITH_OPT}? ( ${_PYTHON_USE_WITH_ATOMS} )"
+ fi
+ DEPEND+="${DEPEND:+ }${_PYTHON_USE_WITH_ATOMS}"
+ RDEPEND+="${RDEPEND:+ }${_PYTHON_USE_WITH_ATOMS}"
+ unset _PYTHON_ATOM _PYTHON_USE_WITH_ATOMS _PYTHON_USE_WITH_ATOMS_ARRAY
+fi
+
+unset _PYTHON_ATOMS
+
+# ================================================================================================
+# =================================== MISCELLANEOUS FUNCTIONS ====================================
+# ================================================================================================
+
+_python_abi-specific_local_scope() {
+ [[ " ${FUNCNAME[@]:2} " =~ " "(_python_final_sanity_checks|python_execute_function|python_mod_optimize|python_mod_cleanup)" " ]]
+}
+
+_python_initialize_prefix_variables() {
+ if has "${EAPI:-0}" 0 1 2; then
+ if [[ -n "${ROOT}" && -z "${EROOT}" ]]; then
+ EROOT="${ROOT%/}${EPREFIX}/"
+ fi
+ if [[ -n "${D}" && -z "${ED}" ]]; then
+ ED="${D%/}${EPREFIX}/"
+ fi
+ fi
+}
+
+unset PYTHON_SANITY_CHECKS_EXECUTED PYTHON_SKIP_SANITY_CHECKS
+
+_python_initial_sanity_checks() {
+ if [[ "$(declare -p PYTHON_SANITY_CHECKS_EXECUTED 2> /dev/null)" != "declare -- PYTHON_SANITY_CHECKS_EXECUTED="* || " ${FUNCNAME[@]:1} " =~ " "(python_set_active_version|python_pkg_setup)" " && -z "${PYTHON_SKIP_SANITY_CHECKS}" ]]; then
+ # Ensure that /usr/bin/python and /usr/bin/python-config are valid.
+ if [[ "$(readlink "${EPREFIX}/usr/bin/python")" != "python-wrapper" ]]; then
+ eerror "'${EPREFIX}/usr/bin/python' is not a valid symlink."
+ eerror "Use \`eselect python set \${python_interpreter}\` to fix this problem."
+ die "'${EPREFIX}/usr/bin/python' is not a valid symlink"
+ fi
+ if [[ "$(<"${EPREFIX}/usr/bin/python-config")" != *"Gentoo python-config wrapper script"* ]]; then
+ eerror "'${EPREFIX}/usr/bin/python-config' is not a valid script"
+ eerror "Use \`eselect python set \${python_interpreter}\` to fix this problem."
+ die "'${EPREFIX}/usr/bin/python-config' is not a valid script"
+ fi
+ fi
+}
+
+_python_final_sanity_checks() {
+ if ! _python_implementation && [[ "$(declare -p PYTHON_SANITY_CHECKS_EXECUTED 2> /dev/null)" != "declare -- PYTHON_SANITY_CHECKS_EXECUTED="* || " ${FUNCNAME[@]:1} " =~ " "(python_set_active_version|python_pkg_setup)" " && -z "${PYTHON_SKIP_SANITY_CHECKS}" ]]; then
+ local PYTHON_ABI="${PYTHON_ABI}"
+ for PYTHON_ABI in ${PYTHON_ABIS-${PYTHON_ABI}}; do
+ # Ensure that appropriate version of Python is installed.
+ if ! has_version "$(python_get_implementational_package)"; then
+ die "$(python_get_implementational_package) is not installed"
+ fi
+
+ # Ensure that EPYTHON variable is respected.
+ if [[ "$(EPYTHON="$(PYTHON)" python -c "${_PYTHON_ABI_EXTRACTION_COMMAND}")" != "${PYTHON_ABI}" ]]; then
+ eerror "Path to 'python': '$(type -p python)'"
+ eerror "ABI: '${ABI}'"
+ eerror "DEFAULT_ABI: '${DEFAULT_ABI}'"
+ eerror "EPYTHON: '$(PYTHON)'"
+ eerror "PYTHON_ABI: '${PYTHON_ABI}'"
+ eerror "Locally active version of Python: '$(EPYTHON="$(PYTHON)" python -c "${_PYTHON_ABI_EXTRACTION_COMMAND}")'"
+ die "'python' does not respect EPYTHON variable"
+ fi
+ done
+ fi
+ PYTHON_SANITY_CHECKS_EXECUTED="1"
+}
+
+# @ECLASS-VARIABLE: PYTHON_COLORS
+# @DESCRIPTION:
+# User-configurable colored output.
+PYTHON_COLORS="${PYTHON_COLORS:-0}"
+
+_python_set_color_variables() {
+ if [[ "${PYTHON_COLORS}" != "0" && "${NOCOLOR:-false}" =~ ^(false|no)$ ]]; then
+ _BOLD=$'\e[1m'
+ _RED=$'\e[1;31m'
+ _GREEN=$'\e[1;32m'
+ _BLUE=$'\e[1;34m'
+ _CYAN=$'\e[1;36m'
+ _NORMAL=$'\e[0m'
+ else
+ _BOLD=
+ _RED=
+ _GREEN=
+ _BLUE=
+ _CYAN=
+ _NORMAL=
+ fi
+}
+
+_python_check_python_pkg_setup_execution() {
+ [[ " ${FUNCNAME[@]:1} " =~ " "(python_set_active_version|python_pkg_setup)" " ]] && return
+
+ if ! has "${EAPI:-0}" 0 1 2 3 && [[ -z "${PYTHON_PKG_SETUP_EXECUTED}" ]]; then
+ die "python_pkg_setup() not called"
+ fi
+}
+
+# @FUNCTION: python_pkg_setup
+# @DESCRIPTION:
+# Perform sanity checks and initialize environment.
+#
+# This function is exported in EAPI 2 and 3 when PYTHON_USE_WITH or PYTHON_USE_WITH_OR variable
+# is set and always in EAPI >=4. Calling of this function is mandatory in EAPI >=4.
+python_pkg_setup() {
+ if [[ "${EBUILD_PHASE}" != "setup" ]]; then
+ die "${FUNCNAME}() can be used only in pkg_setup() phase"
+ fi
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ export JYTHON_SYSTEM_CACHEDIR="1"
+ addwrite "${EPREFIX}/var/cache/jython"
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ _python_calculate_PYTHON_ABIS
+ export EPYTHON="$(PYTHON -f)"
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+
+ if ! has "${EAPI:-0}" 0 1 && [[ -n "${PYTHON_USE_WITH}" || -n "${PYTHON_USE_WITH_OR}" ]]; then
+ if [[ "${PYTHON_USE_WITH_OPT}" ]]; then
+ if [[ "${PYTHON_USE_WITH_OPT}" == !* ]]; then
+ use ${PYTHON_USE_WITH_OPT#!} && return
+ else
+ use !${PYTHON_USE_WITH_OPT} && return
+ fi
+ fi
+
+ python_pkg_setup_check_USE_flags() {
+ local python_atom USE_flag
+ python_atom="$(python_get_implementational_package)"
+
+ for USE_flag in ${PYTHON_USE_WITH}; do
+ if ! has_version "${python_atom}[${USE_flag}]"; then
+ eerror "Please rebuild ${python_atom} with the following USE flags enabled: ${PYTHON_USE_WITH}"
+ die "Please rebuild ${python_atom} with the following USE flags enabled: ${PYTHON_USE_WITH}"
+ fi
+ done
+
+ for USE_flag in ${PYTHON_USE_WITH_OR}; do
+ if has_version "${python_atom}[${USE_flag}]"; then
+ return
+ fi
+ done
+
+ if [[ ${PYTHON_USE_WITH_OR} ]]; then
+ eerror "Please rebuild ${python_atom} with at least one of the following USE flags enabled: ${PYTHON_USE_WITH_OR}"
+ die "Please rebuild ${python_atom} with at least one of the following USE flags enabled: ${PYTHON_USE_WITH_OR}"
+ fi
+ }
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ PYTHON_SKIP_SANITY_CHECKS="1" python_execute_function -q python_pkg_setup_check_USE_flags
+ else
+ python_pkg_setup_check_USE_flags
+ fi
+
+ unset -f python_pkg_setup_check_USE_flags
+ fi
+
+ PYTHON_PKG_SETUP_EXECUTED="1"
+}
+
+_PYTHON_SHEBANG_BASE_PART_REGEX='^#![[:space:]]*([^[:space:]]*/usr/bin/env[[:space:]]+)?([^[:space:]]*/)?(jython|pypy-c|python)'
+
+# @FUNCTION: python_convert_shebangs
+# @USAGE: [-q|--quiet] [-r|--recursive] [-x|--only-executables] [--] <Python_ABI|Python_version> <file|directory> [files|directories]
+# @DESCRIPTION:
+# Convert shebangs in specified files. Directories can be specified only with --recursive option.
+python_convert_shebangs() {
+ _python_check_python_pkg_setup_execution
+
+ local argument file files=() only_executables="0" python_interpreter quiet="0" recursive="0" shebangs_converted="0"
+
+ while (($#)); do
+ case "$1" in
+ -r|--recursive)
+ recursive="1"
+ ;;
+ -q|--quiet)
+ quiet="1"
+ ;;
+ -x|--only-executables)
+ only_executables="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$#" -eq 0 ]]; then
+ die "${FUNCNAME}(): Missing Python version and files or directories"
+ elif [[ "$#" -eq 1 ]]; then
+ die "${FUNCNAME}(): Missing files or directories"
+ fi
+
+ if [[ -n "$(_python_get_implementation --ignore-invalid "$1")" ]]; then
+ python_interpreter="$(PYTHON "$1")"
+ else
+ python_interpreter="python$1"
+ fi
+ shift
+
+ for argument in "$@"; do
+ if [[ ! -e "${argument}" ]]; then
+ die "${FUNCNAME}(): '${argument}' does not exist"
+ elif [[ -f "${argument}" ]]; then
+ files+=("${argument}")
+ elif [[ -d "${argument}" ]]; then
+ if [[ "${recursive}" == "1" ]]; then
+ while read -d $'\0' -r file; do
+ files+=("${file}")
+ done < <(find "${argument}" $([[ "${only_executables}" == "1" ]] && echo -perm /111) -type f -print0)
+ else
+ die "${FUNCNAME}(): '${argument}' is not a regular file"
+ fi
+ else
+ die "${FUNCNAME}(): '${argument}' is not a regular file or a directory"
+ fi
+ done
+
+ for file in "${files[@]}"; do
+ file="${file#./}"
+ [[ "${only_executables}" == "1" && ! -x "${file}" ]] && continue
+
+ if [[ "$(head -n1 "${file}")" =~ ${_PYTHON_SHEBANG_BASE_PART_REGEX} ]]; then
+ [[ "$(sed -ne "2p" "${file}")" =~ ^"# Gentoo '".*"' wrapper script generated by python_generate_wrapper_scripts()"$ ]] && continue
+
+ shebangs_converted="1"
+
+ if [[ "${quiet}" == "0" ]]; then
+ einfo "Converting shebang in '${file}'"
+ fi
+
+ sed -e "1s:^#![[:space:]]*\([^[:space:]]*/usr/bin/env[[:space:]]\)\?[[:space:]]*\([^[:space:]]*/\)\?\(jython\|pypy-c\|python\)\([[:digit:]]\+\(\.[[:digit:]]\+\)\?\)\?\(\$\|[[:space:]].*\):#!\1\2${python_interpreter}\6:" -i "${file}" || die "Conversion of shebang in '${file}' failed"
+ fi
+ done
+
+ if [[ "${shebangs_converted}" == "0" ]]; then
+ ewarn "${FUNCNAME}(): Python scripts not found"
+ fi
+}
+
+# @FUNCTION: python_clean_py-compile_files
+# @USAGE: [-q|--quiet]
+# @DESCRIPTION:
+# Clean py-compile files to disable byte-compilation.
+python_clean_py-compile_files() {
+ _python_check_python_pkg_setup_execution
+
+ local file files=() quiet="0"
+
+ while (($#)); do
+ case "$1" in
+ -q|--quiet)
+ quiet="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ while read -d $'\0' -r file; do
+ files+=("${file#./}")
+ done < <(find -name py-compile -type f -print0)
+
+ for file in "${files[@]}"; do
+ if [[ "${quiet}" == "0" ]]; then
+ einfo "Cleaning '${file}' file"
+ fi
+ echo "#!/bin/sh" > "${file}"
+ done
+}
+
+# @FUNCTION: python_clean_installation_image
+# @USAGE: [-q|--quiet]
+# @DESCRIPTION:
+# Delete needless files in installation image.
+#
+# This function can be used only in src_install() phase.
+python_clean_installation_image() {
+ if [[ "${EBUILD_PHASE}" != "install" ]]; then
+ die "${FUNCNAME}() can be used only in src_install() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ local file files=() quiet="0"
+
+ while (($#)); do
+ case "$1" in
+ -q|--quiet)
+ quiet="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ while read -d $'\0' -r file; do
+ files+=("${file}")
+ done < <(find "${ED}" "(" -name "*.py[co]" -o -name "*\$py.class" ")" -type f -print0)
+
+ if [[ "${#files[@]}" -gt 0 ]]; then
+ if [[ "${quiet}" == "0" ]]; then
+ ewarn "Deleting byte-compiled Python modules needlessly generated by build system:"
+ fi
+ for file in "${files[@]}"; do
+ if [[ "${quiet}" == "0" ]]; then
+ ewarn " ${file}"
+ fi
+ rm -f "${file}"
+
+ # Delete empty __pycache__ directories.
+ if [[ "${file%/*}" == *"/__pycache__" ]]; then
+ rmdir "${file%/*}" 2> /dev/null
+ fi
+ done
+ fi
+
+ python_clean_sitedirs() {
+ if [[ -d "${ED}$(python_get_sitedir)" ]]; then
+ find "${ED}$(python_get_sitedir)" "(" -name "*.c" -o -name "*.h" -o -name "*.la" ")" -type f -print0 | xargs -0 rm -f
+ fi
+ }
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ python_execute_function -q python_clean_sitedirs
+ else
+ python_clean_sitedirs
+ fi
+
+ unset -f python_clean_sitedirs
+}
+
+# ================================================================================================
+# =========== FUNCTIONS FOR PACKAGES SUPPORTING INSTALLATION FOR MULTIPLE PYTHON ABIS ============
+# ================================================================================================
+
+# @ECLASS-VARIABLE: SUPPORT_PYTHON_ABIS
+# @DESCRIPTION:
+# Set this in EAPI <= 4 to indicate that current package supports installation for
+# multiple Python ABIs.
+
+# @ECLASS-VARIABLE: PYTHON_TESTS_RESTRICTED_ABIS
+# @DESCRIPTION:
+# Space-separated list of Python ABI patterns. Testing in Python ABIs matching any Python ABI
+# patterns specified in this list is skipped.
+
+# @ECLASS-VARIABLE: PYTHON_EXPORT_PHASE_FUNCTIONS
+# @DESCRIPTION:
+# Set this to export phase functions for the following ebuild phases:
+# src_prepare(), src_configure(), src_compile(), src_test(), src_install().
+if ! has "${EAPI:-0}" 0 1; then
+ python_src_prepare() {
+ if [[ "${EBUILD_PHASE}" != "prepare" ]]; then
+ die "${FUNCNAME}() can be used only in src_prepare() phase"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ python_copy_sources
+ }
+
+ for python_default_function in src_configure src_compile src_test; do
+ eval "python_${python_default_function}() {
+ if [[ \"\${EBUILD_PHASE}\" != \"${python_default_function#src_}\" ]]; then
+ die \"\${FUNCNAME}() can be used only in ${python_default_function}() phase\"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die \"\${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs\"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ python_execute_function -d -s -- \"\$@\"
+ }"
+ done
+ unset python_default_function
+
+ python_src_install() {
+ if [[ "${EBUILD_PHASE}" != "install" ]]; then
+ die "${FUNCNAME}() can be used only in src_install() phase"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ if has "${EAPI:-0}" 0 1 2 3; then
+ python_execute_function -d -s -- "$@"
+ else
+ python_installation() {
+ emake DESTDIR="${T}/images/${PYTHON_ABI}" install "$@"
+ }
+ python_execute_function -s python_installation "$@"
+ unset python_installation
+
+ python_merge_intermediate_installation_images "${T}/images"
+ fi
+ }
+
+ if [[ -n "${PYTHON_EXPORT_PHASE_FUNCTIONS}" ]]; then
+ EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
+ fi
+fi
+
+unset PYTHON_ABIS
+
+_python_calculate_PYTHON_ABIS() {
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_initial_sanity_checks
+
+ if [[ "$(declare -p PYTHON_ABIS 2> /dev/null)" != "declare -x PYTHON_ABIS="* ]]; then
+ local PYTHON_ABI
+
+ if [[ "$(declare -p USE_PYTHON 2> /dev/null)" == "declare -x USE_PYTHON="* ]]; then
+ local cpython_enabled="0"
+
+ if [[ -z "${USE_PYTHON}" ]]; then
+ die "USE_PYTHON variable is empty"
+ fi
+
+ for PYTHON_ABI in ${USE_PYTHON}; do
+ if ! has "${PYTHON_ABI}" "${_PYTHON_GLOBALLY_SUPPORTED_ABIS[@]}"; then
+ die "USE_PYTHON variable contains invalid value '${PYTHON_ABI}'"
+ fi
+
+ if has "${PYTHON_ABI}" "${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}" "${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}"; then
+ cpython_enabled="1"
+ fi
+
+ if ! _python_check_python_abi_matching --patterns-list "${PYTHON_ABI}" "${RESTRICT_PYTHON_ABIS}"; then
+ export PYTHON_ABIS+="${PYTHON_ABIS:+ }${PYTHON_ABI}"
+ fi
+ done
+
+ if [[ -z "${PYTHON_ABIS//[${IFS}]/}" ]]; then
+ die "USE_PYTHON variable does not enable any Python ABI supported by ${CATEGORY}/${PF}"
+ fi
+
+ if [[ "${cpython_enabled}" == "0" ]]; then
+ die "USE_PYTHON variable does not enable any CPython ABI"
+ fi
+ else
+ local python_version python2_version python3_version support_python_major_version
+
+ if ! has_version "dev-lang/python"; then
+ die "${FUNCNAME}(): 'dev-lang/python' is not installed"
+ fi
+
+ python_version="$("${EPREFIX}/usr/bin/python" -c 'from sys import version_info; print(".".join(str(x) for x in version_info[:2]))')"
+
+ if has_version "=dev-lang/python-2*"; then
+ if [[ "$(readlink "${EPREFIX}/usr/bin/python2")" != "python2."* ]]; then
+ die "'${EPREFIX}/usr/bin/python2' is not a valid symlink"
+ fi
+
+ python2_version="$("${EPREFIX}/usr/bin/python2" -c 'from sys import version_info; print(".".join(str(x) for x in version_info[:2]))')"
+
+ support_python_major_version="0"
+ for PYTHON_ABI in "${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}"; do
+ if ! _python_check_python_abi_matching --patterns-list "${PYTHON_ABI}" "${RESTRICT_PYTHON_ABIS}"; then
+ support_python_major_version="1"
+ break
+ fi
+ done
+ if [[ "${support_python_major_version}" == "1" ]]; then
+ if _python_check_python_abi_matching --patterns-list "${python2_version}" "${RESTRICT_PYTHON_ABIS}"; then
+ die "Active version of CPython 2 is not supported by ${CATEGORY}/${PF}"
+ fi
+ else
+ python2_version=""
+ fi
+ fi
+
+ if has_version "=dev-lang/python-3*"; then
+ if [[ "$(readlink "${EPREFIX}/usr/bin/python3")" != "python3."* ]]; then
+ die "'${EPREFIX}/usr/bin/python3' is not a valid symlink"
+ fi
+
+ python3_version="$("${EPREFIX}/usr/bin/python3" -c 'from sys import version_info; print(".".join(str(x) for x in version_info[:2]))')"
+
+ support_python_major_version="0"
+ for PYTHON_ABI in "${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}"; do
+ if ! _python_check_python_abi_matching --patterns-list "${PYTHON_ABI}" "${RESTRICT_PYTHON_ABIS}"; then
+ support_python_major_version="1"
+ break
+ fi
+ done
+ if [[ "${support_python_major_version}" == "1" ]]; then
+ if _python_check_python_abi_matching --patterns-list "${python3_version}" "${RESTRICT_PYTHON_ABIS}"; then
+ die "Active version of CPython 3 is not supported by ${CATEGORY}/${PF}"
+ fi
+ else
+ python3_version=""
+ fi
+ fi
+
+ if [[ -z "${python2_version}" && -z "${python3_version}" ]]; then
+ eerror "${CATEGORY}/${PF} requires at least one of the following packages:"
+ for PYTHON_ABI in "${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}" "${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}"; do
+ if ! _python_check_python_abi_matching --patterns-list "${PYTHON_ABI}" "${RESTRICT_PYTHON_ABIS}"; then
+ eerror " dev-lang/python:${PYTHON_ABI}"
+ fi
+ done
+ die "No supported version of CPython installed"
+ fi
+
+ if [[ -n "${python2_version}" && "${python_version}" == "2."* && "${python_version}" != "${python2_version}" ]]; then
+ eerror "Python wrapper is configured incorrectly or '${EPREFIX}/usr/bin/python2' symlink"
+ eerror "is set incorrectly. Use \`eselect python\` to fix configuration."
+ die "Incorrect configuration of Python"
+ fi
+ if [[ -n "${python3_version}" && "${python_version}" == "3."* && "${python_version}" != "${python3_version}" ]]; then
+ eerror "Python wrapper is configured incorrectly or '${EPREFIX}/usr/bin/python3' symlink"
+ eerror "is set incorrectly. Use \`eselect python\` to fix configuration."
+ die "Incorrect configuration of Python"
+ fi
+
+ PYTHON_ABIS="${python2_version} ${python3_version}"
+ PYTHON_ABIS="${PYTHON_ABIS# }"
+ export PYTHON_ABIS="${PYTHON_ABIS% }"
+ fi
+ fi
+
+ _python_final_sanity_checks
+}
+
+_python_prepare_flags() {
+ local array=() deleted_flag element flags new_value old_flag old_value operator pattern prefix variable
+
+ for variable in CPPFLAGS CFLAGS CXXFLAGS LDFLAGS; do
+ eval "_PYTHON_SAVED_${variable}=\"\${!variable}\""
+ for prefix in PYTHON_USER_ PYTHON_; do
+ if [[ "$(declare -p ${prefix}${variable} 2> /dev/null)" == "declare -a ${prefix}${variable}="* ]]; then
+ eval "array=(\"\${${prefix}${variable}[@]}\")"
+ for element in "${array[@]}"; do
+ if [[ "${element}" =~ ^${_PYTHON_ABI_PATTERN_REGEX}\ (\+|-)\ .+ ]]; then
+ pattern="${element%% *}"
+ element="${element#* }"
+ operator="${element%% *}"
+ flags="${element#* }"
+ if _python_check_python_abi_matching "${PYTHON_ABI}" "${pattern}"; then
+ if [[ "${operator}" == "+" ]]; then
+ eval "export ${variable}+=\"\${variable:+ }${flags}\""
+ elif [[ "${operator}" == "-" ]]; then
+ flags="${flags// /$'\n'}"
+ old_value="${!variable// /$'\n'}"
+ new_value=""
+ while read old_flag; do
+ while read deleted_flag; do
+ if [[ "${old_flag}" == ${deleted_flag} ]]; then
+ continue 2
+ fi
+ done <<< "${flags}"
+ new_value+="${new_value:+ }${old_flag}"
+ done <<< "${old_value}"
+ eval "export ${variable}=\"\${new_value}\""
+ fi
+ fi
+ else
+ die "Element '${element}' of ${prefix}${variable} array has invalid syntax"
+ fi
+ done
+ elif [[ -n "$(declare -p ${prefix}${variable} 2> /dev/null)" ]]; then
+ die "${prefix}${variable} should be indexed array"
+ fi
+ done
+ done
+}
+
+_python_restore_flags() {
+ local variable
+
+ for variable in CPPFLAGS CFLAGS CXXFLAGS LDFLAGS; do
+ eval "${variable}=\"\${_PYTHON_SAVED_${variable}}\""
+ unset _PYTHON_SAVED_${variable}
+ done
+}
+
+# @FUNCTION: python_execute_function
+# @USAGE: [--action-message message] [-d|--default-function] [--failure-message message] [-f|--final-ABI] [--nonfatal] [-q|--quiet] [-s|--separate-build-dirs] [--source-dir source_directory] [--] <function> [arguments]
+# @DESCRIPTION:
+# Execute specified function for each value of PYTHON_ABIS, optionally passing additional
+# arguments. The specified function can use PYTHON_ABI and BUILDDIR variables.
+python_execute_function() {
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_set_color_variables
+
+ local action action_message action_message_template default_function="0" failure_message failure_message_template final_ABI="0" function iterated_PYTHON_ABIS nonfatal="0" previous_directory previous_directory_stack previous_directory_stack_length PYTHON_ABI quiet="0" return_code separate_build_dirs="0" source_dir
+
+ while (($#)); do
+ case "$1" in
+ --action-message)
+ action_message_template="$2"
+ shift
+ ;;
+ -d|--default-function)
+ default_function="1"
+ ;;
+ --failure-message)
+ failure_message_template="$2"
+ shift
+ ;;
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ --nonfatal)
+ nonfatal="1"
+ ;;
+ -q|--quiet)
+ quiet="1"
+ ;;
+ -s|--separate-build-dirs)
+ separate_build_dirs="1"
+ ;;
+ --source-dir)
+ source_dir="$2"
+ shift
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ -n "${source_dir}" && "${separate_build_dirs}" == 0 ]]; then
+ die "${FUNCNAME}(): '--source-dir' option can be specified only with '--separate-build-dirs' option"
+ fi
+
+ if [[ "${default_function}" == "0" ]]; then
+ if [[ "$#" -eq 0 ]]; then
+ die "${FUNCNAME}(): Missing function name"
+ fi
+ function="$1"
+ shift
+
+ if [[ -z "$(type -t "${function}")" ]]; then
+ die "${FUNCNAME}(): '${function}' function is not defined"
+ fi
+ else
+ if has "${EAPI:-0}" 0 1; then
+ die "${FUNCNAME}(): '--default-function' option cannot be used in this EAPI"
+ fi
+
+ if [[ "${EBUILD_PHASE}" == "configure" ]]; then
+ if has "${EAPI}" 2 3; then
+ python_default_function() {
+ econf "$@"
+ }
+ else
+ python_default_function() {
+ nonfatal econf "$@"
+ }
+ fi
+ elif [[ "${EBUILD_PHASE}" == "compile" ]]; then
+ python_default_function() {
+ emake "$@"
+ }
+ elif [[ "${EBUILD_PHASE}" == "test" ]]; then
+ python_default_function() {
+ # Stolen from portage's _eapi0_src_test()
+ local emake_cmd="${MAKE:-make} ${MAKEOPTS} ${EXTRA_EMAKE}"
+ if ${emake_cmd} -j1 -n check &> /dev/null; then
+ ${emake_cmd} -j1 check "$@"
+ elif ${emake_cmd} -j1 -n test &> /dev/null; then
+ ${emake_cmd} -j1 test "$@"
+ fi
+ }
+ elif [[ "${EBUILD_PHASE}" == "install" ]]; then
+ python_default_function() {
+ emake DESTDIR="${D}" install "$@"
+ }
+ else
+ die "${FUNCNAME}(): '--default-function' option cannot be used in this ebuild phase"
+ fi
+ function="python_default_function"
+ fi
+
+ # Ensure that python_execute_function() cannot be directly or indirectly called by python_execute_function().
+ if _python_abi-specific_local_scope; then
+ die "${FUNCNAME}(): Invalid call stack"
+ fi
+
+ if [[ "${quiet}" == "0" ]]; then
+ [[ "${EBUILD_PHASE}" == "setup" ]] && action="Setting up"
+ [[ "${EBUILD_PHASE}" == "unpack" ]] && action="Unpacking"
+ [[ "${EBUILD_PHASE}" == "prepare" ]] && action="Preparation"
+ [[ "${EBUILD_PHASE}" == "configure" ]] && action="Configuration"
+ [[ "${EBUILD_PHASE}" == "compile" ]] && action="Building"
+ [[ "${EBUILD_PHASE}" == "test" ]] && action="Testing"
+ [[ "${EBUILD_PHASE}" == "install" ]] && action="Installation"
+ [[ "${EBUILD_PHASE}" == "preinst" ]] && action="Preinstallation"
+ [[ "${EBUILD_PHASE}" == "postinst" ]] && action="Postinstallation"
+ [[ "${EBUILD_PHASE}" == "prerm" ]] && action="Preuninstallation"
+ [[ "${EBUILD_PHASE}" == "postrm" ]] && action="Postuninstallation"
+ fi
+
+ _python_calculate_PYTHON_ABIS
+ if [[ "${final_ABI}" == "1" ]]; then
+ iterated_PYTHON_ABIS="$(PYTHON -f --ABI)"
+ else
+ iterated_PYTHON_ABIS="${PYTHON_ABIS}"
+ fi
+ for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do
+ if [[ "${EBUILD_PHASE}" == "test" ]] && _python_check_python_abi_matching --patterns-list "${PYTHON_ABI}" "${PYTHON_TESTS_RESTRICTED_ABIS}"; then
+ if [[ "${quiet}" == "0" ]]; then
+ echo " ${_GREEN}*${_NORMAL} ${_BLUE}Testing of ${CATEGORY}/${PF} with $(python_get_implementation_and_version) skipped${_NORMAL}"
+ fi
+ continue
+ fi
+
+ _python_prepare_flags
+
+ if [[ "${quiet}" == "0" ]]; then
+ if [[ -n "${action_message_template}" ]]; then
+ eval "action_message=\"${action_message_template}\""
+ else
+ action_message="${action} of ${CATEGORY}/${PF} with $(python_get_implementation_and_version)..."
+ fi
+ echo " ${_GREEN}*${_NORMAL} ${_BLUE}${action_message}${_NORMAL}"
+ fi
+
+ if [[ "${separate_build_dirs}" == "1" ]]; then
+ if [[ -n "${source_dir}" ]]; then
+ export BUILDDIR="${S}/${source_dir}-${PYTHON_ABI}"
+ else
+ export BUILDDIR="${S}-${PYTHON_ABI}"
+ fi
+ pushd "${BUILDDIR}" > /dev/null || die "pushd failed"
+ else
+ export BUILDDIR="${S}"
+ fi
+
+ previous_directory="$(pwd)"
+ previous_directory_stack="$(dirs -p)"
+ previous_directory_stack_length="$(dirs -p | wc -l)"
+
+ if ! has "${EAPI}" 0 1 2 3 && has "${PYTHON_ABI}" ${FAILURE_TOLERANT_PYTHON_ABIS}; then
+ EPYTHON="$(PYTHON)" nonfatal "${function}" "$@"
+ else
+ EPYTHON="$(PYTHON)" "${function}" "$@"
+ fi
+
+ return_code="$?"
+
+ _python_restore_flags
+
+ if [[ "${return_code}" -ne 0 ]]; then
+ if [[ -n "${failure_message_template}" ]]; then
+ eval "failure_message=\"${failure_message_template}\""
+ else
+ failure_message="${action} failed with $(python_get_implementation_and_version) in ${function}() function"
+ fi
+
+ if [[ "${nonfatal}" == "1" ]]; then
+ if [[ "${quiet}" == "0" ]]; then
+ ewarn "${failure_message}"
+ fi
+ elif [[ "${final_ABI}" == "0" ]] && has "${PYTHON_ABI}" ${FAILURE_TOLERANT_PYTHON_ABIS}; then
+ if [[ "${EBUILD_PHASE}" != "test" ]] || ! has test-fail-continue ${FEATURES}; then
+ local enabled_PYTHON_ABIS= other_PYTHON_ABI
+ for other_PYTHON_ABI in ${PYTHON_ABIS}; do
+ [[ "${other_PYTHON_ABI}" != "${PYTHON_ABI}" ]] && enabled_PYTHON_ABIS+="${enabled_PYTHON_ABIS:+ }${other_PYTHON_ABI}"
+ done
+ export PYTHON_ABIS="${enabled_PYTHON_ABIS}"
+ fi
+ if [[ "${quiet}" == "0" ]]; then
+ ewarn "${failure_message}"
+ fi
+ if [[ -z "${PYTHON_ABIS}" ]]; then
+ die "${function}() function failed with all enabled Python ABIs"
+ fi
+ else
+ die "${failure_message}"
+ fi
+ fi
+
+ # Ensure that directory stack has not been decreased.
+ if [[ "$(dirs -p | wc -l)" -lt "${previous_directory_stack_length}" ]]; then
+ die "Directory stack decreased illegally"
+ fi
+
+ # Avoid side effects of earlier returning from the specified function.
+ while [[ "$(dirs -p | wc -l)" -gt "${previous_directory_stack_length}" ]]; do
+ popd > /dev/null || die "popd failed"
+ done
+
+ # Ensure that the bottom part of directory stack has not been changed. Restore
+ # previous directory (from before running of the specified function) before
+ # comparison of directory stacks to avoid mismatch of directory stacks after
+ # potential using of 'cd' to change current directory. Restoration of previous
+ # directory allows to safely use 'cd' to change current directory in the
+ # specified function without changing it back to original directory.
+ cd "${previous_directory}"
+ if [[ "$(dirs -p)" != "${previous_directory_stack}" ]]; then
+ die "Directory stack changed illegally"
+ fi
+
+ if [[ "${separate_build_dirs}" == "1" ]]; then
+ popd > /dev/null || die "popd failed"
+ fi
+ unset BUILDDIR
+ done
+
+ if [[ "${default_function}" == "1" ]]; then
+ unset -f python_default_function
+ fi
+}
+
+# @FUNCTION: python_copy_sources
+# @USAGE: <directory="${S}"> [directory]
+# @DESCRIPTION:
+# Copy unpacked sources of current package to separate build directory for each Python ABI.
+python_copy_sources() {
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ local dir dirs=() PYTHON_ABI
+
+ if [[ "$#" -eq 0 ]]; then
+ if [[ "${WORKDIR}" == "${S}" ]]; then
+ die "${FUNCNAME}() cannot be used with current value of S variable"
+ fi
+ dirs=("${S%/}")
+ else
+ dirs=("$@")
+ fi
+
+ _python_calculate_PYTHON_ABIS
+ for PYTHON_ABI in ${PYTHON_ABIS}; do
+ for dir in "${dirs[@]}"; do
+ cp -pr "${dir}" "${dir}-${PYTHON_ABI}" > /dev/null || die "Copying of sources failed"
+ done
+ done
+}
+
+# @FUNCTION: python_generate_wrapper_scripts
+# @USAGE: [-E|--respect-EPYTHON] [-f|--force] [-q|--quiet] [--] <file> [files]
+# @DESCRIPTION:
+# Generate wrapper scripts. Existing files are overwritten only with --force option.
+# If --respect-EPYTHON option is specified, then generated wrapper scripts will
+# respect EPYTHON variable at run time.
+#
+# This function can be used only in src_install() phase.
+python_generate_wrapper_scripts() {
+ if [[ "${EBUILD_PHASE}" != "install" ]]; then
+ die "${FUNCNAME}() can be used only in src_install() phase"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ local eselect_python_option file force="0" quiet="0" PYTHON_ABI PYTHON_ABIS_list python2_enabled="0" python3_enabled="0" respect_EPYTHON="0"
+
+ while (($#)); do
+ case "$1" in
+ -E|--respect-EPYTHON)
+ respect_EPYTHON="1"
+ ;;
+ -f|--force)
+ force="1"
+ ;;
+ -q|--quiet)
+ quiet="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$#" -eq 0 ]]; then
+ die "${FUNCNAME}(): Missing arguments"
+ fi
+
+ _python_calculate_PYTHON_ABIS
+ for PYTHON_ABI in "${_CPYTHON2_GLOBALLY_SUPPORTED_ABIS[@]}"; do
+ if has "${PYTHON_ABI}" ${PYTHON_ABIS}; then
+ python2_enabled="1"
+ fi
+ done
+ for PYTHON_ABI in "${_CPYTHON3_GLOBALLY_SUPPORTED_ABIS[@]}"; do
+ if has "${PYTHON_ABI}" ${PYTHON_ABIS}; then
+ python3_enabled="1"
+ fi
+ done
+
+ if [[ "${python2_enabled}" == "1" && "${python3_enabled}" == "1" ]]; then
+ eselect_python_option=
+ elif [[ "${python2_enabled}" == "1" && "${python3_enabled}" == "0" ]]; then
+ eselect_python_option="--python2"
+ elif [[ "${python2_enabled}" == "0" && "${python3_enabled}" == "1" ]]; then
+ eselect_python_option="--python3"
+ else
+ die "${FUNCNAME}(): Unsupported environment"
+ fi
+
+ PYTHON_ABIS_list="$("$(PYTHON -f)" -c "print(', '.join('\"%s\"' % x for x in reversed('${PYTHON_ABIS}'.split())))")"
+
+ for file in "$@"; do
+ if [[ -f "${file}" && "${force}" == "0" ]]; then
+ die "${FUNCNAME}(): '${file}' already exists"
+ fi
+
+ if [[ "${quiet}" == "0" ]]; then
+ einfo "Generating '${file#${ED%/}}' wrapper script"
+ fi
+
+ cat << EOF > "${file}"
+#!/usr/bin/env python
+# Gentoo '${file##*/}' wrapper script generated by python_generate_wrapper_scripts()
+
+import os
+import re
+import subprocess
+import sys
+
+cpython_ABI_re = re.compile(r"^(\d+\.\d+)$")
+jython_ABI_re = re.compile(r"^(\d+\.\d+)-jython$")
+pypy_ABI_re = re.compile(r"^\d+\.\d+-pypy-(\d+\.\d+)$")
+cpython_interpreter_re = re.compile(r"^python(\d+\.\d+)$")
+jython_interpreter_re = re.compile(r"^jython(\d+\.\d+)$")
+pypy_interpreter_re = re.compile(r"^pypy-c(\d+\.\d+)$")
+cpython_shebang_re = re.compile(r"^#![ \t]*(?:${EPREFIX}/usr/bin/python|(?:${EPREFIX})?/usr/bin/env[ \t]+(?:${EPREFIX}/usr/bin/)?python)")
+python_shebang_options_re = re.compile(r"^#![ \t]*${EPREFIX}/usr/bin/(?:jython|pypy-c|python)(?:\d+(?:\.\d+)?)?[ \t]+(-\S)")
+python_verification_output_re = re.compile("^GENTOO_PYTHON_TARGET_SCRIPT_PATH supported\n$")
+
+#pypy_versions_mapping = {
+# "1.5": "2.7",
+# "1.6": "2.7",
+# "1.7": "2.7",
+# "1.8": "2.7",
+# "1.9": "2.7",
+# "2.0": "2.7",
+#}
+
+def get_PYTHON_ABI(python_interpreter):
+ cpython_matched = cpython_interpreter_re.match(python_interpreter)
+ jython_matched = jython_interpreter_re.match(python_interpreter)
+ pypy_matched = pypy_interpreter_re.match(python_interpreter)
+ if cpython_matched is not None:
+ PYTHON_ABI = cpython_matched.group(1)
+ elif jython_matched is not None:
+ PYTHON_ABI = jython_matched.group(1) + "-jython"
+ elif pypy_matched is not None:
+ #PYTHON_ABI = pypy_versions_mapping[pypy_matched.group(1)] + "-pypy-" + pypy_matched.group(1)
+ PYTHON_ABI = "2.7-pypy-" + pypy_matched.group(1)
+ else:
+ PYTHON_ABI = None
+ return PYTHON_ABI
+
+def get_python_interpreter(PYTHON_ABI):
+ cpython_matched = cpython_ABI_re.match(PYTHON_ABI)
+ jython_matched = jython_ABI_re.match(PYTHON_ABI)
+ pypy_matched = pypy_ABI_re.match(PYTHON_ABI)
+ if cpython_matched is not None:
+ python_interpreter = "python" + cpython_matched.group(1)
+ elif jython_matched is not None:
+ python_interpreter = "jython" + jython_matched.group(1)
+ elif pypy_matched is not None:
+ python_interpreter = "pypy-c" + pypy_matched.group(1)
+ else:
+ python_interpreter = None
+ return python_interpreter
+
+EOF
+ if [[ "$?" != "0" ]]; then
+ die "${FUNCNAME}(): Generation of '$1' failed"
+ fi
+ if [[ "${respect_EPYTHON}" == "1" ]]; then
+ cat << EOF >> "${file}"
+python_interpreter = os.environ.get("EPYTHON")
+if python_interpreter:
+ PYTHON_ABI = get_PYTHON_ABI(python_interpreter)
+ if PYTHON_ABI is None:
+ sys.stderr.write("%s: EPYTHON variable has unrecognized value '%s'\n" % (sys.argv[0], python_interpreter))
+ sys.exit(1)
+else:
+ try:
+ environment = os.environ.copy()
+ environment["ROOT"] = "/"
+ eselect_process = subprocess.Popen(["${EPREFIX}/usr/bin/eselect", "python", "show"${eselect_python_option:+, $(echo "\"")}${eselect_python_option}${eselect_python_option:+$(echo "\"")}], env=environment, stdout=subprocess.PIPE)
+ if eselect_process.wait() != 0:
+ raise ValueError
+ except (OSError, ValueError):
+ sys.stderr.write("%s: Execution of 'eselect python show${eselect_python_option:+ }${eselect_python_option}' failed\n" % sys.argv[0])
+ sys.exit(1)
+
+ python_interpreter = eselect_process.stdout.read()
+ if not isinstance(python_interpreter, str):
+ # Python 3
+ python_interpreter = python_interpreter.decode()
+ python_interpreter = python_interpreter.rstrip("\n")
+
+ PYTHON_ABI = get_PYTHON_ABI(python_interpreter)
+ if PYTHON_ABI is None:
+ sys.stderr.write("%s: 'eselect python show${eselect_python_option:+ }${eselect_python_option}' printed unrecognized value '%s'\n" % (sys.argv[0], python_interpreter))
+ sys.exit(1)
+
+wrapper_script_path = os.path.realpath(sys.argv[0])
+target_executable_path = "%s-%s" % (wrapper_script_path, PYTHON_ABI)
+if not os.path.exists(target_executable_path):
+ sys.stderr.write("%s: '%s' does not exist\n" % (sys.argv[0], target_executable_path))
+ sys.exit(1)
+EOF
+ if [[ "$?" != "0" ]]; then
+ die "${FUNCNAME}(): Generation of '$1' failed"
+ fi
+ else
+ cat << EOF >> "${file}"
+try:
+ environment = os.environ.copy()
+ environment["ROOT"] = "/"
+ eselect_process = subprocess.Popen(["${EPREFIX}/usr/bin/eselect", "python", "show"${eselect_python_option:+, $(echo "\"")}${eselect_python_option}${eselect_python_option:+$(echo "\"")}], env=environment, stdout=subprocess.PIPE)
+ if eselect_process.wait() != 0:
+ raise ValueError
+except (OSError, ValueError):
+ sys.stderr.write("%s: Execution of 'eselect python show${eselect_python_option:+ }${eselect_python_option}' failed\n" % sys.argv[0])
+ sys.exit(1)
+
+python_interpreter = eselect_process.stdout.read()
+if not isinstance(python_interpreter, str):
+ # Python 3
+ python_interpreter = python_interpreter.decode()
+python_interpreter = python_interpreter.rstrip("\n")
+
+PYTHON_ABI = get_PYTHON_ABI(python_interpreter)
+if PYTHON_ABI is None:
+ sys.stderr.write("%s: 'eselect python show${eselect_python_option:+ }${eselect_python_option}' printed unrecognized value '%s'\n" % (sys.argv[0], python_interpreter))
+ sys.exit(1)
+
+wrapper_script_path = os.path.realpath(sys.argv[0])
+for PYTHON_ABI in [PYTHON_ABI, ${PYTHON_ABIS_list}]:
+ target_executable_path = "%s-%s" % (wrapper_script_path, PYTHON_ABI)
+ if os.path.exists(target_executable_path):
+ break
+else:
+ sys.stderr.write("%s: No target script exists for '%s'\n" % (sys.argv[0], wrapper_script_path))
+ sys.exit(1)
+
+python_interpreter = get_python_interpreter(PYTHON_ABI)
+if python_interpreter is None:
+ sys.stderr.write("%s: Unrecognized Python ABI '%s'\n" % (sys.argv[0], PYTHON_ABI))
+ sys.exit(1)
+EOF
+ if [[ "$?" != "0" ]]; then
+ die "${FUNCNAME}(): Generation of '$1' failed"
+ fi
+ fi
+ cat << EOF >> "${file}"
+
+target_executable = open(target_executable_path, "rb")
+target_executable_first_line = target_executable.readline()
+target_executable.close()
+if not isinstance(target_executable_first_line, str):
+ # Python 3
+ target_executable_first_line = target_executable_first_line.decode("utf_8", "replace")
+
+options = []
+python_shebang_options_matched = python_shebang_options_re.match(target_executable_first_line)
+if python_shebang_options_matched is not None:
+ options = [python_shebang_options_matched.group(1)]
+
+cpython_shebang_matched = cpython_shebang_re.match(target_executable_first_line)
+
+if cpython_shebang_matched is not None:
+ try:
+ python_interpreter_path = "${EPREFIX}/usr/bin/%s" % python_interpreter
+ os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION"] = "1"
+ python_verification_process = subprocess.Popen([python_interpreter_path, "-c", "pass"], stdout=subprocess.PIPE)
+ del os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION"]
+ if python_verification_process.wait() != 0:
+ raise ValueError
+
+ python_verification_output = python_verification_process.stdout.read()
+ if not isinstance(python_verification_output, str):
+ # Python 3
+ python_verification_output = python_verification_output.decode()
+
+ if not python_verification_output_re.match(python_verification_output):
+ raise ValueError
+
+ if cpython_interpreter_re.match(python_interpreter) is not None:
+ os.environ["GENTOO_PYTHON_PROCESS_NAME"] = os.path.basename(sys.argv[0])
+ os.environ["GENTOO_PYTHON_WRAPPER_SCRIPT_PATH"] = sys.argv[0]
+ os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH"] = target_executable_path
+
+ if hasattr(os, "execv"):
+ os.execv(python_interpreter_path, [python_interpreter_path] + options + sys.argv)
+ else:
+ sys.exit(subprocess.Popen([python_interpreter_path] + options + sys.argv).wait())
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ pass
+ for variable in ("GENTOO_PYTHON_PROCESS_NAME", "GENTOO_PYTHON_WRAPPER_SCRIPT_PATH", "GENTOO_PYTHON_TARGET_SCRIPT_PATH", "GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION"):
+ if variable in os.environ:
+ del os.environ[variable]
+
+if hasattr(os, "execv"):
+ os.execv(target_executable_path, sys.argv)
+else:
+ sys.exit(subprocess.Popen([target_executable_path] + sys.argv[1:]).wait())
+EOF
+ if [[ "$?" != "0" ]]; then
+ die "${FUNCNAME}(): Generation of '$1' failed"
+ fi
+ fperms +x "${file#${ED%/}}" || die "fperms '${file}' failed"
+ done
+}
+
+# @ECLASS-VARIABLE: PYTHON_VERSIONED_SCRIPTS
+# @DESCRIPTION:
+# Array of regular expressions of paths to versioned Python scripts.
+# Python scripts in /usr/bin and /usr/sbin are versioned by default.
+
+# @ECLASS-VARIABLE: PYTHON_VERSIONED_EXECUTABLES
+# @DESCRIPTION:
+# Array of regular expressions of paths to versioned executables (including Python scripts).
+
+# @ECLASS-VARIABLE: PYTHON_NONVERSIONED_EXECUTABLES
+# @DESCRIPTION:
+# Array of regular expressions of paths to nonversioned executables (including Python scripts).
+
+# @FUNCTION: python_merge_intermediate_installation_images
+# @USAGE: [-q|--quiet] [--] <intermediate_installation_images_directory>
+# @DESCRIPTION:
+# Merge intermediate installation images into installation image.
+#
+# This function can be used only in src_install() phase.
+python_merge_intermediate_installation_images() {
+ if [[ "${EBUILD_PHASE}" != "install" ]]; then
+ die "${FUNCNAME}() can be used only in src_install() phase"
+ fi
+
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ local absolute_file b file files=() intermediate_installation_images_directory PYTHON_ABI quiet="0" regex shebang version_executable wrapper_scripts=() wrapper_scripts_set=()
+
+ while (($#)); do
+ case "$1" in
+ -q|--quiet)
+ quiet="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$#" -ne 1 ]]; then
+ die "${FUNCNAME}() requires 1 argument"
+ fi
+
+ intermediate_installation_images_directory="$1"
+
+ if [[ ! -d "${intermediate_installation_images_directory}" ]]; then
+ die "${FUNCNAME}(): Intermediate installation images directory '${intermediate_installation_images_directory}' does not exist"
+ fi
+
+ _python_calculate_PYTHON_ABIS
+ if [[ "$(PYTHON -f --ABI)" == 3.* ]]; then
+ b="b"
+ fi
+
+ while read -d $'\0' -r file; do
+ files+=("${file}")
+ done < <("$(PYTHON -f)" -c \
+"import os
+import sys
+
+if hasattr(sys.stdout, 'buffer'):
+ # Python 3
+ stdout = sys.stdout.buffer
+else:
+ # Python 2
+ stdout = sys.stdout
+
+files_set = set()
+
+os.chdir(${b}'${intermediate_installation_images_directory}')
+
+for PYTHON_ABI in ${b}'${PYTHON_ABIS}'.split():
+ for root, dirs, files in os.walk(PYTHON_ABI + ${b}'${EPREFIX}'):
+ root = root[len(PYTHON_ABI + ${b}'${EPREFIX}')+1:]
+ files_set.update(root + ${b}'/' + file for file in files)
+
+for file in sorted(files_set):
+ stdout.write(file)
+ stdout.write(${b}'\x00')" || die "${FUNCNAME}(): Failure of extraction of files in intermediate installation images")
+
+ for PYTHON_ABI in ${PYTHON_ABIS}; do
+ if [[ ! -d "${intermediate_installation_images_directory}/${PYTHON_ABI}" ]]; then
+ die "${FUNCNAME}(): Intermediate installation image for Python ABI '${PYTHON_ABI}' does not exist"
+ fi
+
+ pushd "${intermediate_installation_images_directory}/${PYTHON_ABI}${EPREFIX}" > /dev/null || die "pushd failed"
+
+ for file in "${files[@]}"; do
+ version_executable="0"
+ for regex in "/usr/bin/.*" "/usr/sbin/.*" "${PYTHON_VERSIONED_SCRIPTS[@]}"; do
+ if [[ "/${file}" =~ ^${regex}$ ]]; then
+ version_executable="1"
+ break
+ fi
+ done
+ for regex in "${PYTHON_VERSIONED_EXECUTABLES[@]}"; do
+ if [[ "/${file}" =~ ^${regex}$ ]]; then
+ version_executable="2"
+ break
+ fi
+ done
+ if [[ "${version_executable}" != "0" ]]; then
+ for regex in "${PYTHON_NONVERSIONED_EXECUTABLES[@]}"; do
+ if [[ "/${file}" =~ ^${regex}$ ]]; then
+ version_executable="0"
+ break
+ fi
+ done
+ fi
+
+ [[ "${version_executable}" == "0" ]] && continue
+
+ if [[ -L "${file}" ]]; then
+ absolute_file="$(readlink "${file}")"
+ if [[ "${absolute_file}" == /* ]]; then
+ absolute_file="${intermediate_installation_images_directory}/${PYTHON_ABI}${EPREFIX}/${absolute_file##/}"
+ else
+ if [[ "${file}" == */* ]]; then
+ absolute_file="${intermediate_installation_images_directory}/${PYTHON_ABI}${EPREFIX}/${file%/*}/${absolute_file}"
+ else
+ absolute_file="${intermediate_installation_images_directory}/${PYTHON_ABI}${EPREFIX}/${absolute_file}"
+ fi
+ fi
+ else
+ absolute_file="${intermediate_installation_images_directory}/${PYTHON_ABI}${EPREFIX}/${file}"
+ fi
+
+ [[ ! -x "${absolute_file}" ]] && continue
+
+ shebang="$(head -n1 "${absolute_file}")" || die "Extraction of shebang from '${absolute_file}' failed"
+
+ if [[ "${version_executable}" == "2" ]]; then
+ wrapper_scripts+=("${ED}${file}")
+ elif [[ "${version_executable}" == "1" ]]; then
+ if [[ "${shebang}" =~ ${_PYTHON_SHEBANG_BASE_PART_REGEX}([[:digit:]]+(\.[[:digit:]]+)?)?($|[[:space:]]+) ]]; then
+ wrapper_scripts+=("${ED}${file}")
+ else
+ version_executable="0"
+ fi
+ fi
+
+ [[ "${version_executable}" == "0" ]] && continue
+
+ if [[ -e "${file}-${PYTHON_ABI}" ]]; then
+ die "${FUNCNAME}(): '${EPREFIX}/${file}-${PYTHON_ABI}' already exists"
+ fi
+
+ mv "${file}" "${file}-${PYTHON_ABI}" || die "Renaming of '${file}' failed"
+
+ if [[ "${shebang}" =~ ${_PYTHON_SHEBANG_BASE_PART_REGEX}[[:digit:]]*($|[[:space:]]+) ]]; then
+ if [[ -L "${file}-${PYTHON_ABI}" ]]; then
+ python_convert_shebangs $([[ "${quiet}" == "1" ]] && echo --quiet) "${PYTHON_ABI}" "${absolute_file}"
+ else
+ python_convert_shebangs $([[ "${quiet}" == "1" ]] && echo --quiet) "${PYTHON_ABI}" "${file}-${PYTHON_ABI}"
+ fi
+ fi
+ done
+
+ popd > /dev/null || die "popd failed"
+
+ # This is per bug #390691, without the duplication refactor, and with
+ # the 3-way structure per comment #6. This enable users with old
+ # coreutils to upgrade a lot easier (you need to upgrade python+portage
+ # before coreutils can be upgraded).
+ if ROOT="/" has_version '>=sys-apps/coreutils-6.9.90'; then
+ cp -fr --preserve=all --no-preserve=context "${intermediate_installation_images_directory}/${PYTHON_ABI}/"* "${D}" || die "Merging of intermediate installation image for Python ABI '${PYTHON_ABI} into installation image failed"
+ elif ROOT="/" has_version sys-apps/coreutils; then
+ cp -fr --preserve=all "${intermediate_installation_images_directory}/${PYTHON_ABI}/"* "${D}" || die "Merging of intermediate installation image for Python ABI '${PYTHON_ABI} into installation image failed"
+ else
+ cp -fpr "${intermediate_installation_images_directory}/${PYTHON_ABI}/"* "${D}" || die "Merging of intermediate installation image for Python ABI '${PYTHON_ABI} into installation image failed"
+ fi
+ done
+
+ rm -fr "${intermediate_installation_images_directory}"
+
+ if [[ "${#wrapper_scripts[@]}" -ge 1 ]]; then
+ rm -f "${T}/python_wrapper_scripts"
+
+ for file in "${wrapper_scripts[@]}"; do
+ echo -n "${file}" >> "${T}/python_wrapper_scripts"
+ echo -en "\x00" >> "${T}/python_wrapper_scripts"
+ done
+
+ while read -d $'\0' -r file; do
+ wrapper_scripts_set+=("${file}")
+ done < <("$(PYTHON -f)" -c \
+"import sys
+
+if hasattr(sys.stdout, 'buffer'):
+ # Python 3
+ stdout = sys.stdout.buffer
+else:
+ # Python 2
+ stdout = sys.stdout
+
+python_wrapper_scripts_file = open('${T}/python_wrapper_scripts', 'rb')
+files = set(python_wrapper_scripts_file.read().rstrip(${b}'\x00').split(${b}'\x00'))
+python_wrapper_scripts_file.close()
+
+for file in sorted(files):
+ stdout.write(file)
+ stdout.write(${b}'\x00')" || die "${FUNCNAME}(): Failure of extraction of set of wrapper scripts")
+
+ python_generate_wrapper_scripts $([[ "${quiet}" == "1" ]] && echo --quiet) "${wrapper_scripts_set[@]}"
+ fi
+}
+
+# ================================================================================================
+# ========= FUNCTIONS FOR PACKAGES NOT SUPPORTING INSTALLATION FOR MULTIPLE PYTHON ABIS ==========
+# ================================================================================================
+
+unset EPYTHON PYTHON_ABI
+
+# @FUNCTION: python_set_active_version
+# @USAGE: <Python_ABI|2|3>
+# @DESCRIPTION:
+# Set locally active version of Python.
+# If Python_ABI argument is specified, then version of Python corresponding to Python_ABI is used.
+# If 2 argument is specified, then active version of CPython 2 is used.
+# If 3 argument is specified, then active version of CPython 3 is used.
+#
+# This function can be used only in pkg_setup() phase.
+python_set_active_version() {
+ if [[ "${EBUILD_PHASE}" != "setup" ]]; then
+ die "${FUNCNAME}() can be used only in pkg_setup() phase"
+ fi
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages supporting installation for multiple Python ABIs"
+ fi
+
+ if [[ "$#" -ne 1 ]]; then
+ die "${FUNCNAME}() requires 1 argument"
+ fi
+
+ _python_initial_sanity_checks
+
+ if [[ -z "${PYTHON_ABI}" ]]; then
+ if [[ -n "$(_python_get_implementation --ignore-invalid "$1")" ]]; then
+ # PYTHON_ABI variable is intended to be used only in ebuilds/eclasses,
+ # so it does not need to be exported to subprocesses.
+ PYTHON_ABI="$1"
+ if ! _python_implementation && ! has_version "$(python_get_implementational_package)"; then
+ die "${FUNCNAME}(): '$(python_get_implementational_package)' is not installed"
+ fi
+ export EPYTHON="$(PYTHON "$1")"
+ elif [[ "$1" == "2" ]]; then
+ if ! _python_implementation && ! has_version "=dev-lang/python-2*"; then
+ die "${FUNCNAME}(): '=dev-lang/python-2*' is not installed"
+ fi
+ export EPYTHON="$(PYTHON -2)"
+ PYTHON_ABI="${EPYTHON#python}"
+ PYTHON_ABI="${PYTHON_ABI%%-*}"
+ elif [[ "$1" == "3" ]]; then
+ if ! _python_implementation && ! has_version "=dev-lang/python-3*"; then
+ die "${FUNCNAME}(): '=dev-lang/python-3*' is not installed"
+ fi
+ export EPYTHON="$(PYTHON -3)"
+ PYTHON_ABI="${EPYTHON#python}"
+ PYTHON_ABI="${PYTHON_ABI%%-*}"
+ else
+ die "${FUNCNAME}(): Unrecognized argument '$1'"
+ fi
+ fi
+
+ _python_final_sanity_checks
+
+ # python-updater checks PYTHON_REQUESTED_ACTIVE_VERSION variable.
+ PYTHON_REQUESTED_ACTIVE_VERSION="$1"
+}
+
+# @FUNCTION: python_need_rebuild
+# @DESCRIPTION:
+# Mark current package for rebuilding by python-updater after
+# switching of active version of Python.
+python_need_rebuild() {
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}() cannot be used in ebuilds of packages supporting installation for multiple Python ABIs"
+ fi
+
+ _python_check_python_pkg_setup_execution
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ export PYTHON_NEED_REBUILD="$(PYTHON --ABI)"
+}
+
+# ================================================================================================
+# ======================================= GETTER FUNCTIONS =======================================
+# ================================================================================================
+
+_PYTHON_ABI_EXTRACTION_COMMAND=\
+'import platform
+import sys
+sys.stdout.write(".".join(str(x) for x in sys.version_info[:2]))
+if platform.system()[:4] == "Java":
+ sys.stdout.write("-jython")
+elif hasattr(platform, "python_implementation") and platform.python_implementation() == "PyPy":
+ sys.stdout.write("-pypy-" + ".".join(str(x) for x in sys.pypy_version_info[:2]))'
+
+_python_get_implementation() {
+ local ignore_invalid="0"
+
+ while (($#)); do
+ case "$1" in
+ --ignore-invalid)
+ ignore_invalid="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$#" -ne 1 ]]; then
+ die "${FUNCNAME}() requires 1 argument"
+ fi
+
+ if [[ "$1" =~ ^[[:digit:]]+\.[[:digit:]]+$ ]]; then
+ echo "CPython"
+ elif [[ "$1" =~ ^[[:digit:]]+\.[[:digit:]]+-jython$ ]]; then
+ echo "Jython"
+ elif [[ "$1" =~ ^[[:digit:]]+\.[[:digit:]]+-pypy-[[:digit:]]+\.[[:digit:]]+$ ]]; then
+ echo "PyPy"
+ else
+ if [[ "${ignore_invalid}" == "0" ]]; then
+ die "${FUNCNAME}(): Unrecognized Python ABI '$1'"
+ fi
+ fi
+}
+
+# @FUNCTION: PYTHON
+# @USAGE: [-2] [-3] [--ABI] [-a|--absolute-path] [-f|--final-ABI] [--] <Python_ABI="${PYTHON_ABI}">
+# @DESCRIPTION:
+# Print filename of Python interpreter for specified Python ABI. If Python_ABI argument
+# is ommitted, then PYTHON_ABI environment variable must be set and is used.
+# If -2 option is specified, then active version of CPython 2 is used.
+# If -3 option is specified, then active version of CPython 3 is used.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+# -2, -3 and --final-ABI options and Python_ABI argument cannot be specified simultaneously.
+# If --ABI option is specified, then only specified Python ABI is printed instead of
+# filename of Python interpreter.
+# If --absolute-path option is specified, then absolute path to Python interpreter is printed.
+# --ABI and --absolute-path options cannot be specified simultaneously.
+PYTHON() {
+ _python_check_python_pkg_setup_execution
+
+ local ABI_output="0" absolute_path_output="0" final_ABI="0" PYTHON_ABI="${PYTHON_ABI}" python_interpreter python2="0" python3="0"
+
+ while (($#)); do
+ case "$1" in
+ -2)
+ python2="1"
+ ;;
+ -3)
+ python3="1"
+ ;;
+ --ABI)
+ ABI_output="1"
+ ;;
+ -a|--absolute-path)
+ absolute_path_output="1"
+ ;;
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${ABI_output}" == "1" && "${absolute_path_output}" == "1" ]]; then
+ die "${FUNCNAME}(): '--ABI' and '--absolute-path' options cannot be specified simultaneously"
+ fi
+
+ if [[ "$((${python2} + ${python3} + ${final_ABI}))" -gt 1 ]]; then
+ die "${FUNCNAME}(): '-2', '-3' or '--final-ABI' options cannot be specified simultaneously"
+ fi
+
+ if [[ "$#" -eq 0 ]]; then
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ _python_calculate_PYTHON_ABIS
+ PYTHON_ABI="${PYTHON_ABIS##* }"
+ elif [[ "${python2}" == "1" ]]; then
+ PYTHON_ABI="$(ROOT="/" eselect python show --python2 --ABI)"
+ if [[ -z "${PYTHON_ABI}" ]]; then
+ die "${FUNCNAME}(): Active version of CPython 2 not set"
+ elif [[ "${PYTHON_ABI}" != "2."* ]]; then
+ die "${FUNCNAME}(): Internal error in \`eselect python show --python2\`"
+ fi
+ elif [[ "${python3}" == "1" ]]; then
+ PYTHON_ABI="$(ROOT="/" eselect python show --python3 --ABI)"
+ if [[ -z "${PYTHON_ABI}" ]]; then
+ die "${FUNCNAME}(): Active version of CPython 3 not set"
+ elif [[ "${PYTHON_ABI}" != "3."* ]]; then
+ die "${FUNCNAME}(): Internal error in \`eselect python show --python3\`"
+ fi
+ elif _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="$("${EPREFIX}/usr/bin/python" -c "${_PYTHON_ABI_EXTRACTION_COMMAND}")"
+ if [[ -z "${PYTHON_ABI}" ]]; then
+ die "${FUNCNAME}(): Failure of extraction of locally active version of Python"
+ fi
+ fi
+ elif [[ "$#" -eq 1 ]]; then
+ if [[ "${final_ABI}" == "1" ]]; then
+ die "${FUNCNAME}(): '--final-ABI' option and Python ABI cannot be specified simultaneously"
+ fi
+ if [[ "${python2}" == "1" ]]; then
+ die "${FUNCNAME}(): '-2' option and Python ABI cannot be specified simultaneously"
+ fi
+ if [[ "${python3}" == "1" ]]; then
+ die "${FUNCNAME}(): '-3' option and Python ABI cannot be specified simultaneously"
+ fi
+ PYTHON_ABI="$1"
+ else
+ die "${FUNCNAME}(): Invalid usage"
+ fi
+
+ if [[ "${ABI_output}" == "1" ]]; then
+ echo -n "${PYTHON_ABI}"
+ return
+ else
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ python_interpreter="python${PYTHON_ABI}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ python_interpreter="jython${PYTHON_ABI%-jython}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ python_interpreter="pypy-c${PYTHON_ABI#*-pypy-}"
+ fi
+
+ if [[ "${absolute_path_output}" == "1" ]]; then
+ echo -n "${EPREFIX}/usr/bin/${python_interpreter}"
+ else
+ echo -n "${python_interpreter}"
+ fi
+ fi
+
+ if [[ -n "${ABI}" && "${ABI}" != "${DEFAULT_ABI}" && "${DEFAULT_ABI}" != "default" ]]; then
+ echo -n "-${ABI}"
+ fi
+}
+
+# @FUNCTION: python_get_implementation
+# @USAGE: [-f|--final-ABI]
+# @DESCRIPTION:
+# Print name of Python implementation.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_implementation() {
+ _python_check_python_pkg_setup_execution
+
+ local final_ABI="0" PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ echo "$(_python_get_implementation "${PYTHON_ABI}")"
+}
+
+# @FUNCTION: python_get_implementational_package
+# @USAGE: [-f|--final-ABI]
+# @DESCRIPTION:
+# Print category, name and slot of package providing Python implementation.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_implementational_package() {
+ _python_check_python_pkg_setup_execution
+
+ local final_ABI="0" PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ if [[ "${EAPI:-0}" == "0" ]]; then
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ echo "=dev-lang/python-${PYTHON_ABI}*"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ echo "=dev-java/jython-${PYTHON_ABI%-jython}*"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ echo "=virtual/pypy-${PYTHON_ABI#*-pypy-}*"
+ fi
+ else
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ echo "dev-lang/python:${PYTHON_ABI}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ echo "dev-java/jython:${PYTHON_ABI%-jython}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ echo "virtual/pypy:${PYTHON_ABI#*-pypy-}"
+ fi
+ fi
+}
+
+# @FUNCTION: python_get_includedir
+# @USAGE: [-b|--base-path] [-f|--final-ABI]
+# @DESCRIPTION:
+# Print path to Python include directory.
+# If --base-path option is specified, then path not prefixed with "/" is printed.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_includedir() {
+ _python_check_python_pkg_setup_execution
+
+ local base_path="0" final_ABI="0" prefix PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -b|--base-path)
+ base_path="1"
+ ;;
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${base_path}" == "0" ]]; then
+ prefix="/"
+ fi
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ echo "${prefix}usr/include/python${PYTHON_ABI}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ echo "${prefix}usr/share/jython-${PYTHON_ABI%-jython}/Include"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ echo "${prefix}usr/$(get_libdir)/pypy${PYTHON_ABI#*-pypy-}/include"
+ fi
+}
+
+# @FUNCTION: python_get_libdir
+# @USAGE: [-b|--base-path] [-f|--final-ABI]
+# @DESCRIPTION:
+# Print path to Python standard library directory.
+# If --base-path option is specified, then path not prefixed with "/" is printed.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_libdir() {
+ _python_check_python_pkg_setup_execution
+
+ local base_path="0" final_ABI="0" prefix PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -b|--base-path)
+ base_path="1"
+ ;;
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${base_path}" == "0" ]]; then
+ prefix="/"
+ fi
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ echo "${prefix}usr/$(get_libdir)/python${PYTHON_ABI}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ echo "${prefix}usr/share/jython-${PYTHON_ABI%-jython}/Lib"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ die "${FUNCNAME}(): PyPy has multiple standard library directories"
+ fi
+}
+
+# @FUNCTION: python_get_sitedir
+# @USAGE: [-b|--base-path] [-f|--final-ABI]
+# @DESCRIPTION:
+# Print path to Python site-packages directory.
+# If --base-path option is specified, then path not prefixed with "/" is printed.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_sitedir() {
+ _python_check_python_pkg_setup_execution
+
+ local base_path="0" final_ABI="0" prefix PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -b|--base-path)
+ base_path="1"
+ ;;
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${base_path}" == "0" ]]; then
+ prefix="/"
+ fi
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ echo "${prefix}usr/$(get_libdir)/python${PYTHON_ABI}/site-packages"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ echo "${prefix}usr/share/jython-${PYTHON_ABI%-jython}/Lib/site-packages"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ echo "${prefix}usr/$(get_libdir)/pypy${PYTHON_ABI#*-pypy-}/site-packages"
+ fi
+}
+
+# @FUNCTION: python_get_library
+# @USAGE: [-b|--base-path] [-f|--final-ABI] [-l|--linker-option]
+# @DESCRIPTION:
+# Print path to Python library.
+# If --base-path option is specified, then path not prefixed with "/" is printed.
+# If --linker-option is specified, then "-l${library}" linker option is printed.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_library() {
+ _python_check_python_pkg_setup_execution
+
+ local base_path="0" final_ABI="0" linker_option="0" prefix PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -b|--base-path)
+ base_path="1"
+ ;;
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -l|--linker-option)
+ linker_option="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${base_path}" == "0" ]]; then
+ prefix="/"
+ fi
+
+ if [[ "${base_path}" == "1" && "${linker_option}" == "1" ]]; then
+ die "${FUNCNAME}(): '--base-path' and '--linker-option' options cannot be specified simultaneously"
+ fi
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ if [[ "${linker_option}" == "1" ]]; then
+ echo "-lpython${PYTHON_ABI}"
+ else
+ echo "${prefix}usr/$(get_libdir)/libpython${PYTHON_ABI}$(get_libname)"
+ fi
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ die "${FUNCNAME}(): Jython does not have shared library"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ die "${FUNCNAME}(): PyPy does not have shared library"
+ fi
+}
+
+# @FUNCTION: python_get_version
+# @USAGE: [-f|--final-ABI] [-l|--language] [--full] [--major] [--minor] [--micro]
+# @DESCRIPTION:
+# Print version of Python implementation.
+# --full, --major, --minor and --micro options cannot be specified simultaneously.
+# If --full, --major, --minor and --micro options are not specified, then "${major_version}.${minor_version}" is printed.
+# If --language option is specified, then version of Python language is printed.
+# --language and --full options cannot be specified simultaneously.
+# --language and --micro options cannot be specified simultaneously.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_version() {
+ _python_check_python_pkg_setup_execution
+
+ local final_ABI="0" language="0" language_version full="0" major="0" minor="0" micro="0" PYTHON_ABI="${PYTHON_ABI}" python_command
+
+ while (($#)); do
+ case "$1" in
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -l|--language)
+ language="1"
+ ;;
+ --full)
+ full="1"
+ ;;
+ --major)
+ major="1"
+ ;;
+ --minor)
+ minor="1"
+ ;;
+ --micro)
+ micro="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis && ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ fi
+
+ if [[ "$((${full} + ${major} + ${minor} + ${micro}))" -gt 1 ]]; then
+ die "${FUNCNAME}(): '--full', '--major', '--minor' or '--micro' options cannot be specified simultaneously"
+ fi
+
+ if [[ "${language}" == "1" ]]; then
+ if [[ "${final_ABI}" == "1" ]]; then
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ elif [[ -z "${PYTHON_ABI}" ]]; then
+ PYTHON_ABI="$(PYTHON --ABI)"
+ fi
+ language_version="${PYTHON_ABI%%-*}"
+ if [[ "${full}" == "1" ]]; then
+ die "${FUNCNAME}(): '--language' and '--full' options cannot be specified simultaneously"
+ elif [[ "${major}" == "1" ]]; then
+ echo "${language_version%.*}"
+ elif [[ "${minor}" == "1" ]]; then
+ echo "${language_version#*.}"
+ elif [[ "${micro}" == "1" ]]; then
+ die "${FUNCNAME}(): '--language' and '--micro' options cannot be specified simultaneously"
+ else
+ echo "${language_version}"
+ fi
+ else
+ if [[ "${full}" == "1" ]]; then
+ python_command="import sys; print('.'.join(str(x) for x in getattr(sys, 'pypy_version_info', sys.version_info)[:3]))"
+ elif [[ "${major}" == "1" ]]; then
+ python_command="import sys; print(getattr(sys, 'pypy_version_info', sys.version_info)[0])"
+ elif [[ "${minor}" == "1" ]]; then
+ python_command="import sys; print(getattr(sys, 'pypy_version_info', sys.version_info)[1])"
+ elif [[ "${micro}" == "1" ]]; then
+ python_command="import sys; print(getattr(sys, 'pypy_version_info', sys.version_info)[2])"
+ else
+ if [[ -n "${PYTHON_ABI}" && "${final_ABI}" == "0" ]]; then
+ if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then
+ echo "${PYTHON_ABI}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then
+ echo "${PYTHON_ABI%-jython}"
+ elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "PyPy" ]]; then
+ echo "${PYTHON_ABI#*-pypy-}"
+ fi
+ return
+ fi
+ python_command="from sys import version_info; print('.'.join(str(x) for x in version_info[:2]))"
+ fi
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ "$(PYTHON -f)" -c "${python_command}"
+ else
+ "$(PYTHON ${PYTHON_ABI})" -c "${python_command}"
+ fi
+ fi
+}
+
+# @FUNCTION: python_get_implementation_and_version
+# @USAGE: [-f|--final-ABI]
+# @DESCRIPTION:
+# Print name and version of Python implementation.
+# If version of Python implementation is not bound to version of Python language, then
+# version of Python language is additionally printed.
+# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used.
+python_get_implementation_and_version() {
+ _python_check_python_pkg_setup_execution
+
+ local final_ABI="0" PYTHON_ABI="${PYTHON_ABI}"
+
+ while (($#)); do
+ case "$1" in
+ -f|--final-ABI)
+ final_ABI="1"
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ die "${FUNCNAME}(): Invalid usage"
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${final_ABI}" == "1" ]]; then
+ if ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+ PYTHON_ABI="$(PYTHON -f --ABI)"
+ else
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if ! _python_abi-specific_local_scope; then
+ die "${FUNCNAME}() should be used in ABI-specific local scope"
+ fi
+ else
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ fi
+ fi
+
+ if [[ "${PYTHON_ABI}" =~ ^[[:digit:]]+\.[[:digit:]]+-[[:alnum:]]+-[[:digit:]]+\.[[:digit:]]+$ ]]; then
+ echo "$(_python_get_implementation "${PYTHON_ABI}") ${PYTHON_ABI##*-} (Python ${PYTHON_ABI%%-*})"
+ else
+ echo "$(_python_get_implementation "${PYTHON_ABI}") ${PYTHON_ABI%%-*}"
+ fi
+}
+
+# ================================================================================================
+# ================================ FUNCTIONS FOR RUNNING OF TESTS ================================
+# ================================================================================================
+
+# @ECLASS-VARIABLE: PYTHON_TEST_VERBOSITY
+# @DESCRIPTION:
+# User-configurable verbosity of tests of Python modules.
+# Supported values: 0, 1, 2, 3, 4.
+PYTHON_TEST_VERBOSITY="${PYTHON_TEST_VERBOSITY:-1}"
+
+_python_test_hook() {
+ if [[ "$#" -ne 1 ]]; then
+ die "${FUNCNAME}() requires 1 argument"
+ fi
+
+ if _python_package_supporting_installation_for_multiple_python_abis && [[ "$(type -t "${_PYTHON_TEST_FUNCTION}_$1_hook")" == "function" ]]; then
+ "${_PYTHON_TEST_FUNCTION}_$1_hook"
+ fi
+}
+
+# @FUNCTION: python_execute_nosetests
+# @USAGE: [-P|--PYTHONPATH PYTHONPATH] [-s|--separate-build-dirs] [--] [arguments]
+# @DESCRIPTION:
+# Execute nosetests for all enabled Python ABIs.
+# In ebuilds of packages supporting installation for multiple Python ABIs, this function calls
+# python_execute_nosetests_pre_hook() and python_execute_nosetests_post_hook(), if they are defined.
+python_execute_nosetests() {
+ _python_check_python_pkg_setup_execution
+ _python_set_color_variables
+
+ local PYTHONPATH_template separate_build_dirs
+
+ while (($#)); do
+ case "$1" in
+ -P|--PYTHONPATH)
+ PYTHONPATH_template="$2"
+ shift
+ ;;
+ -s|--separate-build-dirs)
+ separate_build_dirs="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ python_test_function() {
+ local evaluated_PYTHONPATH
+
+ eval "evaluated_PYTHONPATH=\"${PYTHONPATH_template}\""
+
+ _PYTHON_TEST_FUNCTION="python_execute_nosetests" _python_test_hook pre
+
+ if [[ -n "${evaluated_PYTHONPATH}" ]]; then
+ echo ${_BOLD}PYTHONPATH="${evaluated_PYTHONPATH}" nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@"${_NORMAL}
+ PYTHONPATH="${evaluated_PYTHONPATH}" nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@" || return "$?"
+ else
+ echo ${_BOLD}nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@"${_NORMAL}
+ nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@" || return "$?"
+ fi
+
+ _PYTHON_TEST_FUNCTION="python_execute_nosetests" _python_test_hook post
+ }
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ python_execute_function ${separate_build_dirs:+-s} python_test_function "$@"
+ else
+ if [[ -n "${separate_build_dirs}" ]]; then
+ die "${FUNCNAME}(): Invalid usage"
+ fi
+ python_test_function "$@" || die "Testing failed"
+ fi
+
+ unset -f python_test_function
+}
+
+# @FUNCTION: python_execute_py.test
+# @USAGE: [-P|--PYTHONPATH PYTHONPATH] [-s|--separate-build-dirs] [--] [arguments]
+# @DESCRIPTION:
+# Execute py.test for all enabled Python ABIs.
+# In ebuilds of packages supporting installation for multiple Python ABIs, this function calls
+# python_execute_py.test_pre_hook() and python_execute_py.test_post_hook(), if they are defined.
+python_execute_py.test() {
+ _python_check_python_pkg_setup_execution
+ _python_set_color_variables
+
+ local PYTHONPATH_template separate_build_dirs
+
+ while (($#)); do
+ case "$1" in
+ -P|--PYTHONPATH)
+ PYTHONPATH_template="$2"
+ shift
+ ;;
+ -s|--separate-build-dirs)
+ separate_build_dirs="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ python_test_function() {
+ local evaluated_PYTHONPATH
+
+ eval "evaluated_PYTHONPATH=\"${PYTHONPATH_template}\""
+
+ _PYTHON_TEST_FUNCTION="python_execute_py.test" _python_test_hook pre
+
+ if [[ -n "${evaluated_PYTHONPATH}" ]]; then
+ echo ${_BOLD}PYTHONPATH="${evaluated_PYTHONPATH}" py.test $([[ "${PYTHON_TEST_VERBOSITY}" -ge 2 ]] && echo -v) "$@"${_NORMAL}
+ PYTHONPATH="${evaluated_PYTHONPATH}" py.test $([[ "${PYTHON_TEST_VERBOSITY}" -ge 2 ]] && echo -v) "$@" || return "$?"
+ else
+ echo ${_BOLD}py.test $([[ "${PYTHON_TEST_VERBOSITY}" -gt 1 ]] && echo -v) "$@"${_NORMAL}
+ py.test $([[ "${PYTHON_TEST_VERBOSITY}" -gt 1 ]] && echo -v) "$@" || return "$?"
+ fi
+
+ _PYTHON_TEST_FUNCTION="python_execute_py.test" _python_test_hook post
+ }
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ python_execute_function ${separate_build_dirs:+-s} python_test_function "$@"
+ else
+ if [[ -n "${separate_build_dirs}" ]]; then
+ die "${FUNCNAME}(): Invalid usage"
+ fi
+ python_test_function "$@" || die "Testing failed"
+ fi
+
+ unset -f python_test_function
+}
+
+# @FUNCTION: python_execute_trial
+# @USAGE: [-P|--PYTHONPATH PYTHONPATH] [-s|--separate-build-dirs] [--] [arguments]
+# @DESCRIPTION:
+# Execute trial for all enabled Python ABIs.
+# In ebuilds of packages supporting installation for multiple Python ABIs, this function
+# calls python_execute_trial_pre_hook() and python_execute_trial_post_hook(), if they are defined.
+python_execute_trial() {
+ _python_check_python_pkg_setup_execution
+ _python_set_color_variables
+
+ local PYTHONPATH_template separate_build_dirs
+
+ while (($#)); do
+ case "$1" in
+ -P|--PYTHONPATH)
+ PYTHONPATH_template="$2"
+ shift
+ ;;
+ -s|--separate-build-dirs)
+ separate_build_dirs="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ python_test_function() {
+ local evaluated_PYTHONPATH
+
+ eval "evaluated_PYTHONPATH=\"${PYTHONPATH_template}\""
+
+ _PYTHON_TEST_FUNCTION="python_execute_trial" _python_test_hook pre
+
+ if [[ -n "${evaluated_PYTHONPATH}" ]]; then
+ echo ${_BOLD}PYTHONPATH="${evaluated_PYTHONPATH}" trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@"${_NORMAL}
+ PYTHONPATH="${evaluated_PYTHONPATH}" trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@" || return "$?"
+ else
+ echo ${_BOLD}trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@"${_NORMAL}
+ trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@" || return "$?"
+ fi
+
+ _PYTHON_TEST_FUNCTION="python_execute_trial" _python_test_hook post
+ }
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ python_execute_function ${separate_build_dirs:+-s} python_test_function "$@"
+ else
+ if [[ -n "${separate_build_dirs}" ]]; then
+ die "${FUNCNAME}(): Invalid usage"
+ fi
+ python_test_function "$@" || die "Testing failed"
+ fi
+
+ unset -f python_test_function
+}
+
+# ================================================================================================
+# ======================= FUNCTIONS FOR HANDLING OF BYTE-COMPILED MODULES ========================
+# ================================================================================================
+
+# @FUNCTION: python_enable_pyc
+# @DESCRIPTION:
+# Tell Python to automatically recompile modules to .pyc/.pyo if the
+# timestamps/version stamps have changed.
+python_enable_pyc() {
+ _python_check_python_pkg_setup_execution
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ unset PYTHONDONTWRITEBYTECODE
+}
+
+# @FUNCTION: python_disable_pyc
+# @DESCRIPTION:
+# Tell Python not to automatically recompile modules to .pyc/.pyo
+# even if the timestamps/version stamps do not match. This is done
+# to protect sandbox.
+python_disable_pyc() {
+ _python_check_python_pkg_setup_execution
+
+ if [[ "$#" -ne 0 ]]; then
+ die "${FUNCNAME}() does not accept arguments"
+ fi
+
+ export PYTHONDONTWRITEBYTECODE="1"
+}
+
+_python_vecho() {
+ [[ -z ${PORTAGE_VERBOSE} ]] || echo "$@"
+}
+
+_python_clean_compiled_modules() {
+ _python_initialize_prefix_variables
+ _python_set_color_variables
+
+ [[ "${FUNCNAME[1]}" =~ ^(python_mod_optimize|python_mod_cleanup)$ ]] || die "${FUNCNAME}(): Invalid usage"
+
+ local base_module_name compiled_file compiled_files=() dir path py_file root
+
+ # Strip trailing slash from EROOT.
+ root="${EROOT%/}"
+
+ for path in "$@"; do
+ compiled_files=()
+ if [[ -d "${path}" ]]; then
+ while read -d $'\0' -r compiled_file; do
+ compiled_files+=("${compiled_file}")
+ done < <(find "${path}" "(" -name "*.py[co]" -o -name "*\$py.class" ")" -print0)
+
+ if [[ "${EBUILD_PHASE}" == "postrm" ]]; then
+ # Delete empty child directories.
+ find "${path}" -type d | sort -r | while read -r dir; do
+ if rmdir "${dir}" 2> /dev/null; then
+ _python_vecho "<<< ${dir}"
+ fi
+ done
+ fi
+ elif [[ "${path}" == *.py ]]; then
+ base_module_name="${path##*/}"
+ base_module_name="${base_module_name%.py}"
+ if [[ -d "${path%/*}/__pycache__" ]]; then
+ while read -d $'\0' -r compiled_file; do
+ compiled_files+=("${compiled_file}")
+ done < <(find "${path%/*}/__pycache__" "(" -name "${base_module_name}.*.py[co]" -o -name "${base_module_name}\$py.class" ")" -print0)
+ fi
+ compiled_files+=("${path}c" "${path}o" "${path%.py}\$py.class")
+ fi
+
+ for compiled_file in "${compiled_files[@]}"; do
+ [[ ! -f "${compiled_file}" ]] && continue
+ dir="${compiled_file%/*}"
+ dir="${dir##*/}"
+ if [[ "${compiled_file}" == *.py[co] ]]; then
+ if [[ "${dir}" == "__pycache__" ]]; then
+ base_module_name="${compiled_file##*/}"
+ base_module_name="${base_module_name%.*py[co]}"
+ base_module_name="${base_module_name%.*}"
+ py_file="${compiled_file%__pycache__/*}${base_module_name}.py"
+ else
+ py_file="${compiled_file%[co]}"
+ fi
+ if [[ "${EBUILD_PHASE}" == "postinst" ]]; then
+ [[ -f "${py_file}" && "${compiled_file}" -nt "${py_file}" ]] && continue
+ else
+ [[ -f "${py_file}" ]] && continue
+ fi
+ _python_vecho "<<< ${compiled_file%[co]}[co]"
+ rm -f "${compiled_file%[co]}"[co]
+ elif [[ "${compiled_file}" == *\$py.class ]]; then
+ if [[ "${dir}" == "__pycache__" ]]; then
+ base_module_name="${compiled_file##*/}"
+ base_module_name="${base_module_name%\$py.class}"
+ py_file="${compiled_file%__pycache__/*}${base_module_name}.py"
+ else
+ py_file="${compiled_file%\$py.class}.py"
+ fi
+ if [[ "${EBUILD_PHASE}" == "postinst" ]]; then
+ [[ -f "${py_file}" && "${compiled_file}" -nt "${py_file}" ]] && continue
+ else
+ [[ -f "${py_file}" ]] && continue
+ fi
+ _python_vecho "<<< ${compiled_file}"
+ rm -f "${compiled_file}"
+ else
+ die "${FUNCNAME}(): Unrecognized file type: '${compiled_file}'"
+ fi
+
+ # Delete empty parent directories.
+ dir="${compiled_file%/*}"
+ while [[ "${dir}" != "${root}" ]]; do
+ if rmdir "${dir}" 2> /dev/null; then
+ _python_vecho "<<< ${dir}"
+ else
+ break
+ fi
+ dir="${dir%/*}"
+ done
+ done
+ done
+}
+
+# @FUNCTION: python_mod_optimize
+# @USAGE: [--allow-evaluated-non-sitedir-paths] [-d directory] [-f] [-l] [-q] [-x regular_expression] [--] <file|directory> [files|directories]
+# @DESCRIPTION:
+# Byte-compile specified Python modules.
+# -d, -f, -l, -q and -x options passed to this function are passed to compileall.py.
+#
+# This function can be used only in pkg_postinst() phase.
+python_mod_optimize() {
+ if [[ "${EBUILD_PHASE}" != "postinst" ]]; then
+ die "${FUNCNAME}() can be used only in pkg_postinst() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis || _python_implementation || [[ "${CATEGORY}/${PN}" == "sys-apps/portage" ]]; then
+ # PYTHON_ABI variable cannot be local in packages not supporting installation for multiple Python ABIs.
+ local allow_evaluated_non_sitedir_paths="0" dir dirs=() evaluated_dirs=() evaluated_files=() file files=() iterated_PYTHON_ABIS options=() other_dirs=() other_files=() previous_PYTHON_ABI="${PYTHON_ABI}" return_code root site_packages_dirs=() site_packages_files=() stderr stderr_line
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if has "${EAPI:-0}" 0 1 2 3 && [[ -z "${PYTHON_ABIS}" ]]; then
+ die "${FUNCNAME}(): python_pkg_setup() or python_execute_function() not called"
+ fi
+ iterated_PYTHON_ABIS="${PYTHON_ABIS}"
+ else
+ if has "${EAPI:-0}" 0 1 2 3; then
+ iterated_PYTHON_ABIS="${PYTHON_ABI:=$(PYTHON --ABI)}"
+ else
+ iterated_PYTHON_ABIS="${PYTHON_ABI}"
+ fi
+ fi
+
+ # Strip trailing slash from EROOT.
+ root="${EROOT%/}"
+
+ while (($#)); do
+ case "$1" in
+ --allow-evaluated-non-sitedir-paths)
+ allow_evaluated_non_sitedir_paths="1"
+ ;;
+ -l|-f|-q)
+ options+=("$1")
+ ;;
+ -d|-x)
+ options+=("$1" "$2")
+ shift
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${allow_evaluated_non_sitedir_paths}" == "1" ]] && ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--allow-evaluated-non-sitedir-paths' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ if [[ "$#" -eq 0 ]]; then
+ die "${FUNCNAME}(): Missing files or directories"
+ fi
+
+ while (($#)); do
+ if [[ "$1" =~ ^($|(\.|\.\.|/)($|/)) ]]; then
+ die "${FUNCNAME}(): Invalid argument '$1'"
+ elif ! _python_implementation && [[ "$1" =~ ^/usr/lib(32|64)?/python[[:digit:]]+\.[[:digit:]]+ ]]; then
+ die "${FUNCNAME}(): Paths of directories / files in site-packages directories must be relative to site-packages directories"
+ elif [[ "$1" =~ ^/ ]]; then
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if [[ "${allow_evaluated_non_sitedir_paths}" != "1" ]]; then
+ die "${FUNCNAME}(): Absolute paths cannot be used in ebuilds of packages supporting installation for multiple Python ABIs"
+ fi
+ if [[ "$1" != *\$* ]]; then
+ die "${FUNCNAME}(): '$1' has invalid syntax"
+ fi
+ if [[ "$1" == *.py ]]; then
+ evaluated_files+=("$1")
+ else
+ evaluated_dirs+=("$1")
+ fi
+ else
+ if [[ -d "${root}$1" ]]; then
+ other_dirs+=("${root}$1")
+ elif [[ -f "${root}$1" ]]; then
+ other_files+=("${root}$1")
+ elif [[ -e "${root}$1" ]]; then
+ eerror "${FUNCNAME}(): '${root}$1' is not a regular file or a directory"
+ else
+ eerror "${FUNCNAME}(): '${root}$1' does not exist"
+ fi
+ fi
+ else
+ for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do
+ if [[ -d "${root}$(python_get_sitedir)/$1" ]]; then
+ site_packages_dirs+=("$1")
+ break
+ elif [[ -f "${root}$(python_get_sitedir)/$1" ]]; then
+ site_packages_files+=("$1")
+ break
+ elif [[ -e "${root}$(python_get_sitedir)/$1" ]]; then
+ eerror "${FUNCNAME}(): '$1' is not a regular file or a directory"
+ else
+ eerror "${FUNCNAME}(): '$1' does not exist"
+ fi
+ done
+ fi
+ shift
+ done
+
+ # Set additional options.
+ options+=("-q")
+
+ for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do
+ if ((${#site_packages_dirs[@]})) || ((${#site_packages_files[@]})) || ((${#evaluated_dirs[@]})) || ((${#evaluated_files[@]})); then
+ return_code="0"
+ stderr=""
+ ebegin "Compilation and optimization of Python modules for $(python_get_implementation_and_version)"
+ if ((${#site_packages_dirs[@]})) || ((${#evaluated_dirs[@]})); then
+ for dir in "${site_packages_dirs[@]}"; do
+ dirs+=("${root}$(python_get_sitedir)/${dir}")
+ done
+ for dir in "${evaluated_dirs[@]}"; do
+ eval "dirs+=(\"\${root}${dir}\")"
+ done
+ stderr+="${stderr:+$'\n'}$("$(PYTHON)" -m compileall "${options[@]}" "${dirs[@]}" 2>&1)" || return_code="1"
+ if ! has "$(_python_get_implementation "${PYTHON_ABI}")" Jython PyPy; then
+ "$(PYTHON)" -O -m compileall "${options[@]}" "${dirs[@]}" &> /dev/null || return_code="1"
+ fi
+ _python_clean_compiled_modules "${dirs[@]}"
+ fi
+ if ((${#site_packages_files[@]})) || ((${#evaluated_files[@]})); then
+ for file in "${site_packages_files[@]}"; do
+ files+=("${root}$(python_get_sitedir)/${file}")
+ done
+ for file in "${evaluated_files[@]}"; do
+ eval "files+=(\"\${root}${file}\")"
+ done
+ stderr+="${stderr:+$'\n'}$("$(PYTHON)" -m py_compile "${files[@]}" 2>&1)" || return_code="1"
+ if ! has "$(_python_get_implementation "${PYTHON_ABI}")" Jython PyPy; then
+ "$(PYTHON)" -O -m py_compile "${files[@]}" &> /dev/null || return_code="1"
+ fi
+ _python_clean_compiled_modules "${files[@]}"
+ fi
+ eend "${return_code}"
+ if [[ -n "${stderr}" ]]; then
+ eerror "Syntax errors / warnings in Python modules for $(python_get_implementation_and_version):" &> /dev/null
+ while read stderr_line; do
+ eerror " ${stderr_line}"
+ done <<< "${stderr}"
+ fi
+ fi
+ unset dirs files
+ done
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ # Restore previous value of PYTHON_ABI.
+ if [[ -n "${previous_PYTHON_ABI}" ]]; then
+ PYTHON_ABI="${previous_PYTHON_ABI}"
+ else
+ unset PYTHON_ABI
+ fi
+ fi
+
+ if ((${#other_dirs[@]})) || ((${#other_files[@]})); then
+ return_code="0"
+ stderr=""
+ ebegin "Compilation and optimization of Python modules placed outside of site-packages directories for $(python_get_implementation_and_version)"
+ if ((${#other_dirs[@]})); then
+ stderr+="${stderr:+$'\n'}$("$(PYTHON ${PYTHON_ABI})" -m compileall "${options[@]}" "${other_dirs[@]}" 2>&1)" || return_code="1"
+ if ! has "$(_python_get_implementation "${PYTHON_ABI}")" Jython PyPy; then
+ "$(PYTHON ${PYTHON_ABI})" -O -m compileall "${options[@]}" "${other_dirs[@]}" &> /dev/null || return_code="1"
+ fi
+ _python_clean_compiled_modules "${other_dirs[@]}"
+ fi
+ if ((${#other_files[@]})); then
+ stderr+="${stderr:+$'\n'}$("$(PYTHON ${PYTHON_ABI})" -m py_compile "${other_files[@]}" 2>&1)" || return_code="1"
+ if ! has "$(_python_get_implementation "${PYTHON_ABI}")" Jython PyPy; then
+ "$(PYTHON ${PYTHON_ABI})" -O -m py_compile "${other_files[@]}" &> /dev/null || return_code="1"
+ fi
+ _python_clean_compiled_modules "${other_files[@]}"
+ fi
+ eend "${return_code}"
+ if [[ -n "${stderr}" ]]; then
+ eerror "Syntax errors / warnings in Python modules placed outside of site-packages directories for $(python_get_implementation_and_version):" &> /dev/null
+ while read stderr_line; do
+ eerror " ${stderr_line}"
+ done <<< "${stderr}"
+ fi
+ fi
+ else
+ # Deprecated part of python_mod_optimize()
+
+ local myroot mydirs=() myfiles=() myopts=() return_code="0"
+
+ # strip trailing slash
+ myroot="${EROOT%/}"
+
+ # respect EROOT and options passed to compileall.py
+ while (($#)); do
+ case "$1" in
+ -l|-f|-q)
+ myopts+=("$1")
+ ;;
+ -d|-x)
+ myopts+=("$1" "$2")
+ shift
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "$#" -eq 0 ]]; then
+ die "${FUNCNAME}(): Missing files or directories"
+ fi
+
+ while (($#)); do
+ if [[ "$1" =~ ^($|(\.|\.\.|/)($|/)) ]]; then
+ die "${FUNCNAME}(): Invalid argument '$1'"
+ elif [[ -d "${myroot}/${1#/}" ]]; then
+ mydirs+=("${myroot}/${1#/}")
+ elif [[ -f "${myroot}/${1#/}" ]]; then
+ myfiles+=("${myroot}/${1#/}")
+ elif [[ -e "${myroot}/${1#/}" ]]; then
+ eerror "${FUNCNAME}(): ${myroot}/${1#/} is not a regular file or directory"
+ else
+ eerror "${FUNCNAME}(): ${myroot}/${1#/} does not exist"
+ fi
+ shift
+ done
+
+ # set additional opts
+ myopts+=(-q)
+
+ PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}"
+
+ ebegin "Compilation and optimization of Python modules for $(python_get_implementation) $(python_get_version)"
+ if ((${#mydirs[@]})); then
+ "$(PYTHON ${PYTHON_ABI})" "${myroot}$(python_get_libdir)/compileall.py" "${myopts[@]}" "${mydirs[@]}" || return_code="1"
+ "$(PYTHON ${PYTHON_ABI})" -O "${myroot}$(python_get_libdir)/compileall.py" "${myopts[@]}" "${mydirs[@]}" &> /dev/null || return_code="1"
+ _python_clean_compiled_modules "${mydirs[@]}"
+ fi
+
+ if ((${#myfiles[@]})); then
+ "$(PYTHON ${PYTHON_ABI})" "${myroot}$(python_get_libdir)/py_compile.py" "${myfiles[@]}" || return_code="1"
+ "$(PYTHON ${PYTHON_ABI})" -O "${myroot}$(python_get_libdir)/py_compile.py" "${myfiles[@]}" &> /dev/null || return_code="1"
+ _python_clean_compiled_modules "${myfiles[@]}"
+ fi
+
+ eend "${return_code}"
+ fi
+}
+
+# @FUNCTION: python_mod_cleanup
+# @USAGE: [--allow-evaluated-non-sitedir-paths] [--] <file|directory> [files|directories]
+# @DESCRIPTION:
+# Delete orphaned byte-compiled Python modules corresponding to specified Python modules.
+#
+# This function can be used only in pkg_postrm() phase.
+python_mod_cleanup() {
+ if [[ "${EBUILD_PHASE}" != "postrm" ]]; then
+ die "${FUNCNAME}() can be used only in pkg_postrm() phase"
+ fi
+
+ _python_check_python_pkg_setup_execution
+ _python_initialize_prefix_variables
+
+ local allow_evaluated_non_sitedir_paths="0" dir iterated_PYTHON_ABIS PYTHON_ABI="${PYTHON_ABI}" root search_paths=() sitedir
+
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if has "${EAPI:-0}" 0 1 2 3 && [[ -z "${PYTHON_ABIS}" ]]; then
+ die "${FUNCNAME}(): python_pkg_setup() or python_execute_function() not called"
+ fi
+ iterated_PYTHON_ABIS="${PYTHON_ABIS}"
+ else
+ if has "${EAPI:-0}" 0 1 2 3; then
+ iterated_PYTHON_ABIS="${PYTHON_ABI:-$(PYTHON --ABI)}"
+ else
+ iterated_PYTHON_ABIS="${PYTHON_ABI}"
+ fi
+ fi
+
+ # Strip trailing slash from EROOT.
+ root="${EROOT%/}"
+
+ while (($#)); do
+ case "$1" in
+ --allow-evaluated-non-sitedir-paths)
+ allow_evaluated_non_sitedir_paths="1"
+ ;;
+ --)
+ shift
+ break
+ ;;
+ -*)
+ die "${FUNCNAME}(): Unrecognized option '$1'"
+ ;;
+ *)
+ break
+ ;;
+ esac
+ shift
+ done
+
+ if [[ "${allow_evaluated_non_sitedir_paths}" == "1" ]] && ! _python_package_supporting_installation_for_multiple_python_abis; then
+ die "${FUNCNAME}(): '--allow-evaluated-non-sitedir-paths' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs"
+ fi
+
+ if [[ "$#" -eq 0 ]]; then
+ die "${FUNCNAME}(): Missing files or directories"
+ fi
+
+ if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis || _python_implementation || [[ "${CATEGORY}/${PN}" == "sys-apps/portage" ]]; then
+ while (($#)); do
+ if [[ "$1" =~ ^($|(\.|\.\.|/)($|/)) ]]; then
+ die "${FUNCNAME}(): Invalid argument '$1'"
+ elif ! _python_implementation && [[ "$1" =~ ^/usr/lib(32|64)?/python[[:digit:]]+\.[[:digit:]]+ ]]; then
+ die "${FUNCNAME}(): Paths of directories / files in site-packages directories must be relative to site-packages directories"
+ elif [[ "$1" =~ ^/ ]]; then
+ if _python_package_supporting_installation_for_multiple_python_abis; then
+ if [[ "${allow_evaluated_non_sitedir_paths}" != "1" ]]; then
+ die "${FUNCNAME}(): Absolute paths cannot be used in ebuilds of packages supporting installation for multiple Python ABIs"
+ fi
+ if [[ "$1" != *\$* ]]; then
+ die "${FUNCNAME}(): '$1' has invalid syntax"
+ fi
+ for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do
+ eval "search_paths+=(\"\${root}$1\")"
+ done
+ else
+ search_paths+=("${root}$1")
+ fi
+ else
+ for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do
+ search_paths+=("${root}$(python_get_sitedir)/$1")
+ done
+ fi
+ shift
+ done
+ else
+ # Deprecated part of python_mod_cleanup()
+
+ search_paths=("${@#/}")
+ search_paths=("${search_paths[@]/#/${root}/}")
+ fi
+
+ _python_clean_compiled_modules "${search_paths[@]}"
+}
+
+# ================================================================================================
+# ===================================== DEPRECATED FUNCTIONS =====================================
+# ================================================================================================
+
+fi # _PYTHON_ECLASS_INHERITED
diff --git a/eclass/qmail.eclass b/eclass/qmail.eclass
new file mode 100644
index 000000000000..eb6e0122998f
--- /dev/null
+++ b/eclass/qmail.eclass
@@ -0,0 +1,536 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: qmail.eclass
+# @MAINTAINER:
+# qmail-bugs@gentoo.org
+# @BLURB: common qmail functions
+
+inherit flag-o-matic toolchain-funcs fixheadtails user
+
+# hardcoded paths
+QMAIL_HOME="/var/qmail"
+TCPRULES_DIR="/etc/tcprules.d"
+SUPERVISE_DIR="/var/qmail/supervise"
+
+# source files and directories
+GENQMAIL_F=genqmail-${GENQMAIL_PV}.tar.bz2
+GENQMAIL_S="${WORKDIR}"/genqmail-${GENQMAIL_PV}
+
+QMAIL_SPP_F=qmail-spp-${QMAIL_SPP_PV}.tar.gz
+QMAIL_SPP_S="${WORKDIR}"/qmail-spp-${QMAIL_SPP_PV}
+
+# @FUNCTION: primes
+# @USAGE: <min> <max>
+# @DESCRIPTION:
+# Prints a list of primes between min and max inclusive
+# Note: this functions gets very slow when used with large numbers.
+primes() {
+ local min=${1} max=${2}
+ local result= primelist=2 i p
+
+ [[ ${min} -le 2 ]] && result="${result} 2"
+
+ for ((i = 3; i <= max; i += 2))
+ do
+ for p in ${primelist}
+ do
+ [[ $[i % p] == 0 || $[p * p] -gt ${i} ]] && \
+ break
+ done
+ if [[ $[i % p] != 0 ]]
+ then
+ primelist="${primelist} ${i}"
+ [[ ${i} -ge ${min} ]] && \
+ result="${result} ${i}"
+ fi
+ done
+
+ echo ${result}
+}
+
+# @FUNCTION: is_prima
+# @USAGE: <number>
+# @DESCRIPTION:
+# Checks wether a number is a prime number
+is_prime() {
+ local number=${1} i
+ for i in $(primes ${number} ${number})
+ do
+ [[ ${i} == ${number} ]] && return 0
+ done
+ return 1
+}
+
+dospp() {
+ insinto "${QMAIL_HOME}"/plugins/
+ insopts -o root -g "$GROUP_ROOT" -m 0755
+ newins $1 ${2:-$(basename $1)}
+}
+
+# @FUNCTION: dosupervise
+# @USAGE: dosupervise <service> [<runfile> <logfile>]
+# @DESCRIPTION:
+# Install runfiles for services and logging to supervise directory
+dosupervise() {
+ local service=$1
+ local runfile=${2:-${service}} logfile=${3:-${service}-log}
+ [[ -z "${service}" ]] && die "no service given"
+
+ insopts -o root -g "$GROUP_ROOT" -m 0755
+ diropts -o root -g "$GROUP_ROOT" -m 0755
+
+ dodir ${SUPERVISE_DIR}/${service}{,/log}
+ fperms +t ${SUPERVISE_DIR}/${service}{,/log}
+
+ insinto ${SUPERVISE_DIR}/${service}
+ newins ${runfile} run
+
+ insinto ${SUPERVISE_DIR}/${service}/log
+ newins ${logfile} run
+}
+
+# @FUNCTION: qmail_set_cc
+# @DESCRIPTION:
+# The following commands patch the conf-{cc,ld} files to use the user's
+# specified CFLAGS and LDFLAGS. These rather complex commands are needed
+# because a user supplied patch might apply changes to these files, too.
+# See bug #165981.
+qmail_set_cc() {
+ local cc=$(head -n 1 ./conf-cc | sed -e "s#^g\?cc\s\+\(-O2\)\?#$(tc-getCC) #")
+ local ld=$(head -n 1 ./conf-ld | sed -e "s#^g\?cc\s\+\(-s\)\?#$(tc-getCC) #")
+
+ echo "${cc} ${CFLAGS} ${CPPFLAGS}" > ./conf-cc || die 'Patching conf-cc failed.'
+ echo "${ld} ${LDFLAGS}" > ./conf-ld || die 'Patching conf-ld failed.'
+}
+
+# @FUNCTION: qmail_create_groups
+# @DESCRIPTION:
+# Keep qmail groups in sync across ebuilds
+qmail_create_groups() {
+ einfo "Creating qmail groups"
+ enewgroup nofiles 200
+ enewgroup qmail 201
+}
+
+# @FUNCTION: qmail_create_users
+# @DESCRIPTION:
+# Keep qmail users in sync across ebuilds
+qmail_create_users() {
+ qmail_create_groups
+
+ einfo "Creating qmail users"
+ enewuser alias 200 -1 "${QMAIL_HOME}"/alias 200
+ enewuser qmaild 201 -1 "${QMAIL_HOME}" 200
+ enewuser qmaill 202 -1 "${QMAIL_HOME}" 200
+ enewuser qmailp 203 -1 "${QMAIL_HOME}" 200
+ enewuser qmailq 204 -1 "${QMAIL_HOME}" 201
+ enewuser qmailr 205 -1 "${QMAIL_HOME}" 201
+ enewuser qmails 206 -1 "${QMAIL_HOME}" 201
+}
+
+genqmail_src_unpack() {
+ cd "${WORKDIR}"
+ [[ -n ${GENQMAIL_PV} ]] && unpack "${GENQMAIL_F}"
+}
+
+qmail_spp_src_unpack() {
+ cd "${WORKDIR}"
+ [[ -n ${QMAIL_SPP_PV} ]] && unpack "${QMAIL_SPP_F}"
+}
+
+# @FUNCTION: qmail_src_postunpack
+# @DESCRIPTION:
+# Unpack common config files, apply custom patches if supplied and
+# set built configuration (CFLAGS, LDFLAGS, etc)
+qmail_src_postunpack() {
+ cd "${S}"
+
+ qmail_set_cc
+
+ mysplit=${QMAIL_CONF_SPLIT:-23}
+ is_prime ${mysplit} || die "QMAIL_CONF_SPLIT is not a prime number."
+ einfo "Using conf-split value of ${mysplit}."
+ echo -n ${mysplit} > "${S}"/conf-split
+}
+
+qmail_src_compile() {
+ cd "${S}"
+ emake it man "$@" || die "make failed"
+}
+
+qmail_spp_src_compile() {
+ cd "${GENQMAIL_S}"/spp/
+ emake || die "make spp failed"
+}
+
+qmail_base_install() {
+ einfo "Setting up basic directory hierarchy"
+ diropts -o root -g qmail -m 755
+ keepdir "${QMAIL_HOME}"/{,bin,control}
+
+ einfo "Installing basic qmail software"
+ insinto "${QMAIL_HOME}"/bin
+
+ insopts -o root -g qmail -m 755
+ doins datemail elq forward maildir2mbox maildirmake \
+ maildirwatch mailsubj pinq predate qail \
+ qmail-{inject,qmqpc,showctl} sendmail
+
+ einfo "Adding env.d entry for qmail"
+ doenvd "${GENQMAIL_S}"/conf/99qmail
+
+ declare -F qmail_base_install_hook >/dev/null && \
+ qmail_base_install_hook
+}
+
+qmail_full_install() {
+ einfo "Setting up full directory hierarchy"
+ keepdir "${QMAIL_HOME}"/users
+ diropts -o alias -g qmail -m 755
+ keepdir "${QMAIL_HOME}"/alias
+
+ einfo "Installing all qmail software"
+ insopts -o root -g qmail -m 755
+ doins bouncesaying condredirect config-fast except preline qbiff \
+ qmail-{pop3d,qmqpd,qmtpd,qread,qstat,smtpd,tcpok,tcpto} \
+ qreceipt qsmhook tcp-env
+
+ insopts -o root -g qmail -m 711
+ doins qmail-{clean,getpw,local,popup,pw2u,remote,rspawn,send} splogger
+
+ insopts -o root -g qmail -m 700
+ doins qmail-{lspawn,newmrh,newu,start}
+
+ insopts -o qmailq -g qmail -m 4711
+ doins qmail-queue
+
+ declare -F qmail_full_install_hook >/dev/null && \
+ qmail_full_install_hook
+}
+
+qmail_config_install() {
+ einfo "Installing stock configuration files"
+ insinto "${QMAIL_HOME}"/control
+ insopts -o root -g "$GROUP_ROOT" -m 644
+ doins "${GENQMAIL_S}"/control/{conf-*,defaultdelivery}
+
+ einfo "Installing configuration sanity checker and launcher"
+ insinto "${QMAIL_HOME}"/bin
+ insopts -o root -g "$GROUP_ROOT" -m 644
+ doins "${GENQMAIL_S}"/control/qmail-config-system
+
+ declare -F qmail_config_install_hook >/dev/null && \
+ qmail_config_install_hook
+}
+
+qmail_man_install() {
+ einfo "Installing manpages and documentation"
+
+ # those are tagged for section 8 but named for
+ # section 9 (which does not exist anyway)
+ for i in *.9; do
+ mv ${i} ${i/.9/.8}
+ done
+
+ into /usr
+ doman *.[1578]
+ dodoc BLURB* CHANGES FAQ INSTALL* PIC* README* REMOVE* SECURITY \
+ SENDMAIL SYSDEPS TEST* THANKS* THOUGHTS TODO* \
+ UPGRADE VERSION*
+
+ declare -F qmail_man_install_hook >/dev/null && \
+ qmail_man_install_hook
+}
+
+qmail_sendmail_install() {
+ einfo "Installing sendmail replacement"
+ diropts -m 755
+ dodir /usr/sbin /usr/lib
+
+ dosym "${QMAIL_HOME}"/bin/sendmail /usr/sbin/sendmail
+ dosym "${QMAIL_HOME}"/bin/sendmail /usr/lib/sendmail
+
+ declare -F qmail_sendmail_install_hook >/dev/null && \
+ qmail_sendmail_install_hook
+}
+
+qmail_maildir_install() {
+ # use the correct maildirmake
+ # the courier-imap one has some extensions that are nicer
+ MAILDIRMAKE="${D}${QMAIL_HOME}/bin/maildirmake"
+ [[ -e /usr/bin/maildirmake ]] && \
+ MAILDIRMAKE="/usr/bin/maildirmake"
+
+ einfo "Setting up the default aliases"
+ diropts -o alias -g qmail -m 700
+ "${MAILDIRMAKE}" "${D}${QMAIL_HOME}"/alias/.maildir
+ keepdir "${QMAIL_HOME}"/alias/.maildir/{cur,new,tmp}
+
+ for i in "${QMAIL_HOME}"/alias/.qmail-{mailer-daemon,postmaster,root}; do
+ if [[ ! -f "${ROOT}${i}" ]]; then
+ touch "${D}${i}"
+ fowners alias:qmail "${i}"
+ fi
+ done
+
+ einfo "Setting up default maildirs in the account skeleton"
+ diropts -o root -g "$GROUP_ROOT" -m 755
+ insinto /etc/skel
+ insopts -o root -g "$GROUP_ROOT" -m 644
+ newins "${GENQMAIL_S}"/control/defaultdelivery .qmail.sample
+ "${MAILDIRMAKE}" "${D}"/etc/skel/.maildir
+ keepdir /etc/skel/.maildir/{cur,new,tmp}
+
+ declare -F qmail_maildir_install_hook >/dev/null && \
+ qmail_maildir_install_hook
+}
+
+qmail_tcprules_install() {
+ dodir "${TCPRULES_DIR}"
+ insinto "${TCPRULES_DIR}"
+ insopts -o root -g "$GROUP_ROOT" -m 0644
+ doins "${GENQMAIL_S}"/tcprules/Makefile.qmail
+ doins "${GENQMAIL_S}"/tcprules/tcp.qmail-*
+ use ssl || rm -f "${D}${TCPRULES_DIR}"/tcp.qmail-pop3sd
+}
+
+qmail_supervise_install() {
+ einfo "Installing supervise scripts"
+
+ cd "${GENQMAIL_S}"/supervise
+
+ for i in qmail-{send,smtpd,qmtpd,qmqpd,pop3d}; do
+ dosupervise ${i}
+ diropts -o qmaill -g "$GROUP_ROOT" -m 755
+ keepdir /var/log/qmail/${i}
+ done
+
+ if use ssl; then
+ dosupervise qmail-pop3sd
+ diropts -o qmaill -g "$GROUP_ROOT" -m 755
+ keepdir /var/log/qmail/qmail-pop3sd
+ fi
+
+ declare -F qmail_supervise_install_hook >/dev/null && \
+ qmail_supervise_install_hook
+}
+
+qmail_spp_install() {
+ einfo "Installing qmail-spp configuration files"
+ insinto "${QMAIL_HOME}"/control/
+ insopts -o root -g "$GROUP_ROOT" -m 0644
+ doins "${GENQMAIL_S}"/spp/smtpplugins
+
+ einfo "Installing qmail-spp plugins"
+ keepdir "${QMAIL_HOME}"/plugins/
+ for i in authlog mfdnscheck ifauthnext tarpit; do
+ dospp "${GENQMAIL_S}"/spp/${i}
+ done
+
+ declare -F qmail_spp_install_hook >/dev/null && \
+ qmail_spp_install_hook
+}
+
+qmail_ssl_install() {
+ use gencertdaily && \
+ CRON_FOLDER=cron.daily || \
+ CRON_FOLDER=cron.hourly
+
+ einfo "Installing SSL Certificate creation script"
+ insinto "${QMAIL_HOME}"/control
+ insopts -o root -g "$GROUP_ROOT" -m 0644
+ doins "${GENQMAIL_S}"/ssl/servercert.cnf
+
+ insinto "${QMAIL_HOME}"/bin
+ insopts -o root -g "$GROUP_ROOT" -m 0755
+ doins "${GENQMAIL_S}"/ssl/mkservercert
+
+ einfo "Installing RSA key generation cronjob"
+ insinto /etc/${CRON_FOLDER}
+ insopts -o root -g "$GROUP_ROOT" -m 0755
+ doins "${GENQMAIL_S}"/ssl/qmail-genrsacert.sh
+
+ keepdir "${QMAIL_HOME}"/control/tlshosts
+
+ declare -F qmail_ssl_install_hook >/dev/null && \
+ qmail_ssl_install_hook
+}
+
+qmail_src_install() {
+ export GROUP_ROOT="$(id -gn root)"
+ qmail_base_install
+ qmail_full_install
+ qmail_config_install
+ qmail_man_install
+ qmail_sendmail_install
+ qmail_maildir_install
+ qmail_tcprules_install
+ qmail_supervise_install
+
+ use qmail-spp && qmail_spp_install
+ use ssl && qmail_ssl_install
+}
+
+qmail_queue_setup() {
+ if use highvolume; then
+ myconf="--bigtodo"
+ else
+ myconf="--no-bigtodo"
+ fi
+
+ mysplit=${QMAIL_CONF_SPLIT:-23}
+ is_prime ${mysplit} || die "QMAIL_CONF_SPLIT is not a prime number."
+
+ einfo "Setting up the message queue hierarchy"
+ /usr/bin/queue-repair.py --create ${myconf} \
+ --split ${mysplit} \
+ "${ROOT}${QMAIL_HOME}" >/dev/null || \
+ die 'queue-repair failed'
+}
+
+qmail_rootmail_fixup() {
+ local TMPCMD="ln -sf ${QMAIL_HOME}/alias/.maildir/ ${ROOT}/root/.maildir"
+
+ if [[ -d "${ROOT}"/root/.maildir && ! -L "${ROOT}"/root/.maildir ]] ; then
+ elog "Previously the qmail ebuilds created /root/.maildir/ but not"
+ elog "every mail was delivered there. If the directory does not"
+ elog "contain any mail, please delete it and run:"
+ elog "${TMPCMD}"
+ else
+ ${TMPCMD}
+ fi
+
+ chown -R alias:qmail "${ROOT}${QMAIL_HOME}"/alias/.maildir 2>/dev/null
+}
+
+qmail_tcprules_fixup() {
+ mkdir -p "${TCPRULES_DIR}"
+ for f in {smtp,qmtp,qmqp,pop3}{,.cdb}; do
+ old="/etc/tcp.${f}"
+ new="${TCPRULES_DIR}/tcp.qmail-${f}"
+ fail=0
+ if [[ -f "${old}" && ! -f "${new}" ]]; then
+ einfo "Moving ${old} to ${new}"
+ cp "${old}" "${new}" || fail=1
+ else
+ fail=1
+ fi
+ if [[ "${fail}" = 1 && -f "${old}" ]]; then
+ eerror "Error moving ${old} to ${new}, be sure to check the"
+ eerror "configuration! You may have already moved the files,"
+ eerror "in which case you can delete ${old}"
+ fi
+ done
+}
+
+qmail_tcprules_build() {
+ for f in tcp.qmail-{smtp,qmtp,qmqp,pop3,pop3s}; do
+ # please note that we don't check if it exists
+ # as we want it to make the cdb files anyway!
+ src="${ROOT}${TCPRULES_DIR}/${f}"
+ cdb="${ROOT}${TCPRULES_DIR}/${f}.cdb"
+ tmp="${ROOT}${TCPRULES_DIR}/.${f}.tmp"
+ [[ -e "${src}" ]] && tcprules "${cdb}" "${tmp}" < "${src}"
+ done
+}
+
+qmail_config_notice() {
+ elog
+ elog "To setup ${PN} to run out-of-the-box on your system, run:"
+ elog "emerge --config =${CATEGORY}/${PF}"
+}
+
+qmail_supervise_config_notice() {
+ elog
+ elog "To start qmail at boot you have to add svscan to your startup"
+ elog "and create the following links:"
+ elog "ln -s ${SUPERVISE_DIR}/qmail-send /service/qmail-send"
+ elog "ln -s ${SUPERVISE_DIR}/qmail-smtpd /service/qmail-smtpd"
+ elog
+ elog "To start the pop3 server as well, create the following link:"
+ elog "ln -s ${SUPERVISE_DIR}/qmail-pop3d /service/qmail-pop3d"
+ elog
+ if use ssl; then
+ elog "To start the pop3s server as well, create the following link:"
+ elog "ln -s ${SUPERVISE_DIR}/qmail-pop3sd /service/qmail-pop3sd"
+ elog
+ fi
+ elog "Additionally, the QMTP and QMQP protocols are supported, "
+ elog "and can be started as:"
+ elog "ln -s ${SUPERVISE_DIR}/qmail-qmtpd /service/qmail-qmtpd"
+ elog "ln -s ${SUPERVISE_DIR}/qmail-qmqpd /service/qmail-qmqpd"
+ elog
+ elog "Additionally, if you wish to run qmail right now, you should "
+ elog "run this before anything else:"
+ elog "source /etc/profile"
+}
+
+qmail_config_fast() {
+ if [[ ${ROOT} = / ]]; then
+ local host=$(hostname --fqdn)
+
+ if [[ -z "${host}" ]]; then
+ eerror
+ eerror "Cannot determine your fully-qualified hostname"
+ eerror "Please setup your /etc/hosts as described in"
+ eerror "http://www.gentoo.org/doc/en/handbook/handbook-x86.xml?part=1&chap=8#doc_chap2_sect4"
+ eerror
+ die "cannot determine FQDN"
+ fi
+
+ if [[ ! -f "${ROOT}${QMAIL_HOME}"/control/me ]]; then
+ "${ROOT}${QMAIL_HOME}"/bin/config-fast ${host}
+ fi
+ else
+ ewarn "Skipping some configuration as it MUST be run on the final host"
+ fi
+}
+
+qmail_tcprules_config() {
+ local localips ip tcpstring line proto f
+
+ einfo "Accepting relaying by default from all ips configured on this machine."
+
+ # Start with iproute2 as ifconfig is deprecated, and ifconfig does not handle
+ # additional addresses added via iproute2.
+ # Note: We have to strip off the packed netmask w/e.g. 192.168.0.2/24
+ localips=$(ip address show 2>/dev/null | awk '$1 == "inet" {print $2}' | sed 's:/.*::')
+ if [[ -z ${localips} ]] ; then
+ # Hello old friend. Maybe you can tell us at least something.
+ localips=$(ifconfig | awk '$1 == "inet" {print $2}')
+ fi
+
+ tcpstring=':allow,RELAYCLIENT="",RBLSMTPD=""'
+
+ for ip in ${localips}; do
+ line="${ip}${tcpstring}"
+ for proto in smtp qmtp qmqp; do
+ f="${EROOT}${TCPRULES_DIR}/tcp.qmail-${proto}"
+ egrep -qs "${line}" "${f}" || echo "${line}" >> "${f}"
+ done
+ done
+}
+
+qmail_ssl_generate() {
+ CRON_FOLDER=cron.hourly
+ use gencertdaily && CRON_FOLDER=cron.daily
+
+ ebegin "Generating RSA keys for SSL/TLS, this can take some time"
+ "${ROOT}"/etc/${CRON_FOLDER}/qmail-genrsacert.sh
+ eend $?
+
+ einfo "Creating a self-signed ssl-certificate:"
+ "${ROOT}${QMAIL_HOME}"/bin/mkservercert
+
+ einfo "If you want to have a properly signed certificate "
+ einfo "instead, do the following:"
+ # space at the end of the string because of the current implementation
+ # of einfo
+ einfo "openssl req -new -nodes -out req.pem \\ "
+ einfo " -config ${QMAIL_HOME}/control/servercert.cnf \\ "
+ einfo " -keyout ${QMAIL_HOME}/control/servercert.pem"
+ einfo "Send req.pem to your CA to obtain signed_req.pem, and do:"
+ einfo "cat signed_req.pem >> ${QMAIL_HOME}/control/servercert.pem"
+}
diff --git a/eclass/qmake-utils.eclass b/eclass/qmake-utils.eclass
new file mode 100644
index 000000000000..a5d37566ad15
--- /dev/null
+++ b/eclass/qmake-utils.eclass
@@ -0,0 +1,323 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: qmake-utils.eclass
+# @MAINTAINER:
+# Qt herd <qt@gentoo.org>
+# @AUTHOR:
+# Davide Pesavento <pesa@gentoo.org>
+# @BLURB: Common functions for qmake-based packages.
+# @DESCRIPTION:
+# Utility eclass providing wrapper functions for Qt4 and Qt5 qmake.
+#
+# This eclass does not set any metadata variables nor export any phase
+# functions. It can be inherited safely.
+
+if [[ -z ${_QMAKE_UTILS_ECLASS} ]]; then
+_QMAKE_UTILS_ECLASS=1
+
+inherit eutils multilib toolchain-funcs
+
+# @FUNCTION: qt4_get_bindir
+# @DESCRIPTION:
+# Echoes the directory where Qt4 binaries are installed.
+# EPREFIX is already prepended to the returned path.
+qt4_get_bindir() {
+ has "${EAPI:-0}" 0 1 2 && use !prefix && EPREFIX=
+
+ local qtbindir=${EPREFIX}$(qt4_get_libdir)/bin
+ if [[ -d ${qtbindir} ]]; then
+ echo ${qtbindir}
+ else
+ echo ${EPREFIX}/usr/bin
+ fi
+}
+
+# @FUNCTION: qt4_get_headerdir
+# @DESCRIPTION:
+# Echoes the directory where Qt4 headers are installed.
+qt4_get_headerdir() {
+ echo /usr/include/qt4
+}
+
+# @FUNCTION: qt4_get_libdir
+# @DESCRIPTION:
+# Echoes the directory where Qt4 libraries are installed.
+qt4_get_libdir() {
+ echo /usr/$(get_libdir)/qt4
+}
+
+# @FUNCTION: qt4_get_mkspecsdir
+# @DESCRIPTION:
+# Echoes the directory where Qt4 mkspecs are installed.
+qt4_get_mkspecsdir() {
+ echo /usr/share/qt4/mkspecs
+}
+
+# @FUNCTION: qt4_get_plugindir
+# @DESCRIPTION:
+# Echoes the directory where Qt4 plugins are installed.
+qt4_get_plugindir() {
+ echo $(qt4_get_libdir)/plugins
+}
+
+# @FUNCTION: qt5_get_bindir
+# @DESCRIPTION:
+# Echoes the directory where Qt5 binaries are installed.
+# EPREFIX is already prepended to the returned path.
+qt5_get_bindir() {
+ has "${EAPI:-0}" 0 1 2 && use !prefix && EPREFIX=
+
+ echo ${EPREFIX}$(qt5_get_libdir)/qt5/bin
+}
+
+# @FUNCTION: qt5_get_headerdir
+# @DESCRIPTION:
+# Echoes the directory where Qt5 headers are installed.
+qt5_get_headerdir() {
+ echo /usr/include/qt5
+}
+
+# @FUNCTION: qt5_get_libdir
+# @DESCRIPTION:
+# Echoes the directory where Qt5 libraries are installed.
+qt5_get_libdir() {
+ echo /usr/$(get_libdir)
+}
+
+# @FUNCTION: qt5_get_mkspecsdir
+# @DESCRIPTION:
+# Echoes the directory where Qt5 mkspecs are installed.
+qt5_get_mkspecsdir() {
+ echo $(qt5_get_libdir)/qt5/mkspecs
+}
+
+# @FUNCTION: qt5_get_plugindir
+# @DESCRIPTION:
+# Echoes the directory where Qt5 plugins are installed.
+qt5_get_plugindir() {
+ echo $(qt5_get_libdir)/qt5/plugins
+}
+
+# @FUNCTION: qmake-utils_find_pro_file
+# @RETURN: zero or one qmake .pro file names
+# @INTERNAL
+# @DESCRIPTION:
+# Outputs a project file name that can be passed to eqmake.
+# 0 *.pro files found --> outputs null string;
+# 1 *.pro file found --> outputs its name;
+# 2 or more *.pro files found --> if "${PN}.pro" or
+# "$(basename ${S}).pro" are there, outputs one of them.
+qmake-utils_find_pro_file() {
+ local dir_name=$(basename "${S}")
+
+ # set nullglob to avoid expanding *.pro to the literal
+ # string "*.pro" when there are no matching files
+ eshopts_push -s nullglob
+ local pro_files=(*.pro)
+ eshopts_pop
+
+ case ${#pro_files[@]} in
+ 0)
+ : ;;
+ 1)
+ echo "${pro_files}"
+ ;;
+ *)
+ for pro_file in "${pro_files[@]}"; do
+ if [[ ${pro_file%.pro} == ${dir_name} || ${pro_file%.pro} == ${PN} ]]; then
+ echo "${pro_file}"
+ break
+ fi
+ done
+ ;;
+ esac
+}
+
+# @VARIABLE: EQMAKE4_EXCLUDE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of files to be excluded from eqmake4 CONFIG processing.
+# Paths are relative to the current working directory (usually ${S}).
+#
+# Example: EQMAKE4_EXCLUDE="ignore/me.pro foo/*"
+
+# @FUNCTION: eqmake4
+# @USAGE: [project_file] [parameters to qmake]
+# @DESCRIPTION:
+# Wrapper for Qt4's qmake. If project_file is not specified, eqmake4 looks
+# for one in the current directory (non-recursively). If multiple project
+# files are found, then ${PN}.pro is used, if it exists, otherwise eqmake4
+# will not be able to continue.
+#
+# All other arguments are appended unmodified to qmake command line.
+#
+# For recursive build systems, i.e. those based on the subdirs template,
+# you should run eqmake4 on the top-level project file only, unless you
+# have a valid reason to do otherwise. During the building, qmake will
+# be automatically re-invoked with the right arguments on every directory
+# specified inside the top-level project file.
+eqmake4() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ has "${EAPI:-0}" 0 1 2 && use !prefix && EPREFIX=
+
+ ebegin "Running qmake"
+
+ local qmake_args=("$@")
+
+ # Check if the project file name was passed as first argument. If not, look for candidates.
+ local regexp='.*\.pro'
+ if ! [[ ${1} =~ ${regexp} ]]; then
+ local project_file=$(qmake-utils_find_pro_file)
+ if [[ -z ${project_file} ]]; then
+ echo
+ eerror "No project files found in '${PWD}'"
+ eerror "This shouldn't happen - please send a bug report to https://bugs.gentoo.org/"
+ echo
+ die "eqmake4 failed"
+ fi
+ qmake_args+=("${project_file}")
+ fi
+
+ # Make sure the CONFIG variable is correctly set for both release and debug builds.
+ local config_add=release
+ local config_remove=debug
+ if use_if_iuse debug; then
+ config_add=debug
+ config_remove=release
+ fi
+
+ local awkscript='BEGIN {
+ printf "### eqmake4 was here ###\n" > file;
+ printf "CONFIG -= debug_and_release %s\n", remove >> file;
+ printf "CONFIG += %s\n\n", add >> file;
+ fixed=0;
+ }
+ /^[[:blank:]]*CONFIG[[:blank:]]*[\+\*]?=/ {
+ if (gsub("\\<((" remove ")|(debug_and_release))\\>", "") > 0) {
+ fixed=1;
+ }
+ }
+ /^[[:blank:]]*CONFIG[[:blank:]]*-=/ {
+ if (gsub("\\<" add "\\>", "") > 0) {
+ fixed=1;
+ }
+ }
+ {
+ print >> file;
+ }
+ END {
+ print fixed;
+ }'
+
+ [[ -n ${EQMAKE4_EXCLUDE} ]] && eshopts_push -o noglob
+
+ local file
+ while read file; do
+ local excl
+ for excl in ${EQMAKE4_EXCLUDE}; do
+ [[ ${file} == ${excl} ]] && continue 2
+ done
+ grep -q '^### eqmake4 was here ###$' "${file}" && continue
+
+ local retval=$({
+ rm -f "${file}" || echo FAIL
+ awk -v file="${file}" \
+ -v add=${config_add} \
+ -v remove=${config_remove} \
+ -- "${awkscript}" || echo FAIL
+ } < "${file}")
+
+ if [[ ${retval} == 1 ]]; then
+ einfo " - fixed CONFIG in ${file}"
+ elif [[ ${retval} != 0 ]]; then
+ eerror " - error while processing ${file}"
+ die "eqmake4 failed to process ${file}"
+ fi
+ done < <(find . -type f -name '*.pr[io]' -printf '%P\n' 2>/dev/null)
+
+ [[ -n ${EQMAKE4_EXCLUDE} ]] && eshopts_pop
+
+ "$(qt4_get_bindir)"/qmake \
+ -makefile \
+ QMAKE_AR="$(tc-getAR) cqs" \
+ QMAKE_CC="$(tc-getCC)" \
+ QMAKE_CXX="$(tc-getCXX)" \
+ QMAKE_LINK="$(tc-getCXX)" \
+ QMAKE_LINK_C="$(tc-getCC)" \
+ QMAKE_OBJCOPY="$(tc-getOBJCOPY)" \
+ QMAKE_RANLIB= \
+ QMAKE_STRIP= \
+ QMAKE_CFLAGS="${CFLAGS}" \
+ QMAKE_CFLAGS_RELEASE= \
+ QMAKE_CFLAGS_DEBUG= \
+ QMAKE_CXXFLAGS="${CXXFLAGS}" \
+ QMAKE_CXXFLAGS_RELEASE= \
+ QMAKE_CXXFLAGS_DEBUG= \
+ QMAKE_LFLAGS="${LDFLAGS}" \
+ QMAKE_LFLAGS_RELEASE= \
+ QMAKE_LFLAGS_DEBUG= \
+ QMAKE_LIBDIR_QT="${EPREFIX}$(qt4_get_libdir)" \
+ QMAKE_LIBDIR_X11="${EPREFIX}/usr/$(get_libdir)" \
+ QMAKE_LIBDIR_OPENGL="${EPREFIX}/usr/$(get_libdir)" \
+ "${qmake_args[@]}"
+
+ if ! eend $? ; then
+ echo
+ eerror "Running qmake has failed! (see above for details)"
+ eerror "This shouldn't happen - please send a bug report to https://bugs.gentoo.org/"
+ echo
+ die "eqmake4 failed"
+ fi
+}
+
+# @FUNCTION: eqmake5
+# @USAGE: [arguments for qmake]
+# @DESCRIPTION:
+# Wrapper for Qt5's qmake. All arguments are passed to qmake.
+#
+# For recursive build systems, i.e. those based on the subdirs template,
+# you should run eqmake5 on the top-level project file only, unless you
+# have a valid reason to do otherwise. During the building, qmake will
+# be automatically re-invoked with the right arguments on every directory
+# specified inside the top-level project file.
+eqmake5() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ebegin "Running qmake"
+
+ "$(qt5_get_bindir)"/qmake \
+ -makefile \
+ QMAKE_AR="$(tc-getAR) cqs" \
+ QMAKE_CC="$(tc-getCC)" \
+ QMAKE_LINK_C="$(tc-getCC)" \
+ QMAKE_LINK_C_SHLIB="$(tc-getCC)" \
+ QMAKE_CXX="$(tc-getCXX)" \
+ QMAKE_LINK="$(tc-getCXX)" \
+ QMAKE_LINK_SHLIB="$(tc-getCXX)" \
+ QMAKE_OBJCOPY="$(tc-getOBJCOPY)" \
+ QMAKE_RANLIB= \
+ QMAKE_STRIP= \
+ QMAKE_CFLAGS="${CFLAGS}" \
+ QMAKE_CFLAGS_RELEASE= \
+ QMAKE_CFLAGS_DEBUG= \
+ QMAKE_CXXFLAGS="${CXXFLAGS}" \
+ QMAKE_CXXFLAGS_RELEASE= \
+ QMAKE_CXXFLAGS_DEBUG= \
+ QMAKE_LFLAGS="${LDFLAGS}" \
+ QMAKE_LFLAGS_RELEASE= \
+ QMAKE_LFLAGS_DEBUG= \
+ "$@"
+
+ if ! eend $? ; then
+ echo
+ eerror "Running qmake has failed! (see above for details)"
+ eerror "This shouldn't happen - please send a bug report to https://bugs.gentoo.org/"
+ echo
+ die "eqmake5 failed"
+ fi
+}
+
+fi
diff --git a/eclass/qt4-build-multilib.eclass b/eclass/qt4-build-multilib.eclass
new file mode 100644
index 000000000000..6cc4ea3643d8
--- /dev/null
+++ b/eclass/qt4-build-multilib.eclass
@@ -0,0 +1,835 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: qt4-build-multilib.eclass
+# @MAINTAINER:
+# Qt herd <qt@gentoo.org>
+# @AUTHOR:
+# Davide Pesavento <pesa@gentoo.org>
+# @BLURB: Eclass for Qt4 split ebuilds with multilib support.
+# @DESCRIPTION:
+# This eclass contains various functions that are used when building Qt4.
+# Requires EAPI 5.
+
+case ${EAPI} in
+ 5) : ;;
+ *) die "qt4-build-multilib.eclass: unsupported EAPI=${EAPI:-0}" ;;
+esac
+
+inherit eutils flag-o-matic multilib multilib-minimal toolchain-funcs
+
+HOMEPAGE="https://www.qt.io/"
+LICENSE="|| ( LGPL-2.1 LGPL-3 GPL-3 ) FDL-1.3"
+SLOT="4"
+
+case ${PV} in
+ 4.?.9999)
+ # git stable branch
+ QT4_BUILD_TYPE="live"
+ EGIT_BRANCH=${PV%.9999}
+ ;;
+ *)
+ # official stable release
+ QT4_BUILD_TYPE="release"
+ MY_P=qt-everywhere-opensource-src-${PV/_/-}
+ SRC_URI="http://download.qt.io/official_releases/qt/${PV%.*}/${PV}/${MY_P}.tar.gz"
+ S=${WORKDIR}/${MY_P}
+ ;;
+esac
+
+EGIT_REPO_URI=(
+ "git://code.qt.io/qt/qt.git"
+ "https://code.qt.io/git/qt/qt.git"
+ "https://github.com/qtproject/qt.git"
+)
+[[ ${QT4_BUILD_TYPE} == live ]] && inherit git-r3
+
+if [[ ${PN} != qttranslations ]]; then
+ IUSE="aqua debug pch"
+ [[ ${PN} != qtxmlpatterns ]] && IUSE+=" +exceptions"
+fi
+
+DEPEND="
+ dev-lang/perl
+ virtual/pkgconfig[${MULTILIB_USEDEP}]
+"
+RDEPEND="
+ dev-qt/qtchooser
+ abi_x86_32? ( !app-emulation/emul-linux-x86-qtlibs[-abi_x86_32(-)] )
+"
+
+
+# src_{configure,compile,test,install} are inherited from multilib-minimal
+EXPORT_FUNCTIONS src_unpack src_prepare pkg_postinst pkg_postrm
+
+multilib_src_configure() { qt4_multilib_src_configure; }
+multilib_src_compile() { qt4_multilib_src_compile; }
+multilib_src_test() { qt4_multilib_src_test; }
+multilib_src_install() { qt4_multilib_src_install; }
+multilib_src_install_all() { qt4_multilib_src_install_all; }
+
+
+# @ECLASS-VARIABLE: PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array variable containing all the patches to be applied. This variable
+# is expected to be defined in the global scope of ebuilds. Make sure to
+# specify the full path. This variable is used in src_prepare phase.
+#
+# Example:
+# @CODE
+# PATCHES=(
+# "${FILESDIR}/mypatch.patch"
+# "${FILESDIR}/mypatch2.patch"
+# )
+# @CODE
+
+# @ECLASS-VARIABLE: QT4_TARGET_DIRECTORIES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space-separated list of directories that will be configured,
+# compiled, and installed. All paths must be relative to ${S}.
+
+# @ECLASS-VARIABLE: QCONFIG_ADD
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of options that must be added to QT_CONFIG in qconfig.pri
+
+# @ECLASS-VARIABLE: QCONFIG_REMOVE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of options that must be removed from QT_CONFIG in qconfig.pri
+
+# @ECLASS-VARIABLE: QCONFIG_DEFINE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of macros that must be defined in QtCore/qconfig.h
+
+
+###### Phase functions ######
+
+# @FUNCTION: qt4-build-multilib_src_unpack
+# @DESCRIPTION:
+# Unpacks the sources.
+qt4-build-multilib_src_unpack() {
+ if [[ $(gcc-major-version) -lt 4 ]] || [[ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 4 ]]; then
+ ewarn
+ ewarn "Using a GCC version lower than 4.4 is not supported."
+ ewarn
+ fi
+
+ if [[ ${PN} == qtwebkit ]]; then
+ eshopts_push -s extglob
+ if is-flagq '-g?(gdb)?([1-9])'; then
+ ewarn
+ ewarn "You have enabled debug info (probably have -g or -ggdb in your CFLAGS/CXXFLAGS)."
+ ewarn "You may experience really long compilation times and/or increased memory usage."
+ ewarn "If compilation fails, please try removing -g/-ggdb before reporting a bug."
+ ewarn "For more info check out https://bugs.gentoo.org/307861"
+ ewarn
+ fi
+ eshopts_pop
+ fi
+
+ case ${QT4_BUILD_TYPE} in
+ live) git-r3_src_unpack ;;
+ release) default ;;
+ esac
+}
+
+# @FUNCTION: qt4-build-multilib_src_prepare
+# @DESCRIPTION:
+# Prepare the sources before the configure phase. Strip CFLAGS if necessary, and fix
+# the build system in order to respect CFLAGS/CXXFLAGS/LDFLAGS specified in make.conf.
+qt4-build-multilib_src_prepare() {
+ if [[ ${PN} != qtcore ]]; then
+ # avoid unnecessary qmake recompilations
+ sed -i -e 's/^if true;/if false;/' configure \
+ || die "sed failed (skip qmake bootstrap)"
+ fi
+
+ # skip X11 tests in non-gui packages to avoid spurious dependencies
+ if has ${PN} qtbearer qtcore qtdbus qtscript qtsql qttest qttranslations qtxmlpatterns; then
+ sed -i -e '/^if.*PLATFORM_X11.*CFG_GUI/,/^fi$/d' configure \
+ || die "sed failed (skip X11 tests)"
+ fi
+
+ if [[ ${PN} == qtcore ]]; then
+ # Bug 373061
+ # qmake bus errors with -O2 or -O3 but -O1 works
+ if [[ ${CHOST} == *86*-apple-darwin* ]]; then
+ replace-flags -O[23] -O1
+ fi
+
+ # Bug 503500
+ # undefined reference with -Os and --as-needed
+ if use x86 || use_if_iuse abi_x86_32; then
+ replace-flags -Os -O2
+ fi
+ fi
+
+ if [[ ${PN} == qtdeclarative ]]; then
+ # Bug 551560
+ # gcc-4.8 ICE with -Os, fixed in 4.9
+ if use x86 && [[ $(gcc-version) == 4.8 ]]; then
+ replace-flags -Os -O2
+ fi
+ fi
+
+ if [[ ${PN} == qtwebkit ]]; then
+ # Bug 550780
+ # various ICEs with graphite-related flags, gcc-5 works
+ if [[ $(gcc-major-version) -lt 5 ]]; then
+ filter-flags -fgraphite-identity -floop-strip-mine
+ fi
+ fi
+
+ # Bug 261632
+ if use ppc64; then
+ append-flags -mminimal-toc
+ fi
+
+ # Read also AR from the environment
+ sed -i -e 's/^SYSTEM_VARIABLES="/&AR /' \
+ configure || die "sed SYSTEM_VARIABLES failed"
+
+ # Reset QMAKE_*FLAGS_{RELEASE,DEBUG} variables,
+ # or they will override the user's flags (via .qmake.cache)
+ sed -i -e '/^SYSTEM_VARIABLES=/ i \
+ QMakeVar set QMAKE_CFLAGS_RELEASE\
+ QMakeVar set QMAKE_CFLAGS_DEBUG\
+ QMakeVar set QMAKE_CXXFLAGS_RELEASE\
+ QMakeVar set QMAKE_CXXFLAGS_DEBUG\
+ QMakeVar set QMAKE_LFLAGS_RELEASE\
+ QMakeVar set QMAKE_LFLAGS_DEBUG\n' \
+ configure || die "sed QMAKE_*FLAGS_{RELEASE,DEBUG} failed"
+
+ # Drop -nocache from qmake invocation in all configure tests, to ensure that the
+ # correct toolchain and build flags are picked up from config.tests/.qmake.cache
+ find config.tests/unix -name '*.test' -type f -execdir \
+ sed -i -e '/bin\/qmake/s/-nocache//' '{}' + || die "sed -nocache failed"
+
+ # compile.test needs additional patching so that it doesn't create another cache file
+ # inside the test subdir, which would incorrectly override config.tests/.qmake.cache
+ sed -i -e '/echo.*QT_BUILD_TREE.*\.qmake\.cache/d' \
+ -e '/bin\/qmake/s/ "$SRCDIR/ "QT_BUILD_TREE=$OUTDIR"&/' \
+ config.tests/unix/compile.test || die "sed compile.test failed"
+
+ # Delete references to the obsolete /usr/X11R6 directory
+ # On prefix, this also prevents looking at non-prefix stuff
+ sed -i -re '/^QMAKE_(LIB|INC)DIR(_X11|_OPENGL|)\s+/ s/=.*/=/' \
+ mkspecs/common/linux.conf \
+ mkspecs/$(qt4_get_mkspec)/qmake.conf \
+ || die "sed QMAKE_(LIB|INC)DIR failed"
+
+ if use_if_iuse aqua; then
+ sed -i \
+ -e '/^CONFIG/s:app_bundle::' \
+ -e '/^CONFIG/s:plugin_no_soname:plugin_with_soname absolute_library_soname:' \
+ mkspecs/$(qt4_get_mkspec)/qmake.conf \
+ || die "sed failed (aqua)"
+
+ # we are crazy and build cocoa + qt3support
+ if { ! in_iuse qt3support || use qt3support; } && [[ ${CHOST##*-darwin} -ge 9 ]]; then
+ sed -i -e "/case \"\$PLATFORM,\$CFG_MAC_COCOA\" in/,/;;/ s|CFG_QT3SUPPORT=\"no\"|CFG_QT3SUPPORT=\"yes\"|" \
+ configure || die "sed failed (cocoa + qt3support)"
+ fi
+ fi
+
+ if [[ ${CHOST} == *-darwin* ]]; then
+ # Set FLAGS and remove -arch, since our gcc-apple is multilib crippled (by design)
+ sed -i \
+ -e "s:QMAKE_CFLAGS_RELEASE.*=.*:QMAKE_CFLAGS_RELEASE=${CFLAGS}:" \
+ -e "s:QMAKE_CXXFLAGS_RELEASE.*=.*:QMAKE_CXXFLAGS_RELEASE=${CXXFLAGS}:" \
+ -e "s:QMAKE_LFLAGS_RELEASE.*=.*:QMAKE_LFLAGS_RELEASE=-headerpad_max_install_names ${LDFLAGS}:" \
+ -e "s:-arch\s\w*::g" \
+ mkspecs/common/g++-macx.conf \
+ || die "sed g++-macx.conf failed"
+
+ # Fix configure's -arch settings that appear in qmake/Makefile and also
+ # fix arch handling (automagically duplicates our -arch arg and breaks
+ # pch). Additionally disable Xarch support.
+ sed -i \
+ -e "s:-arch i386::" \
+ -e "s:-arch ppc::" \
+ -e "s:-arch x86_64::" \
+ -e "s:-arch ppc64::" \
+ -e "s:-arch \$i::" \
+ -e "/if \[ ! -z \"\$NATIVE_64_ARCH\" \]; then/,/fi/ d" \
+ -e "s:CFG_MAC_XARCH=yes:CFG_MAC_XARCH=no:g" \
+ -e "s:-Xarch_x86_64::g" \
+ -e "s:-Xarch_ppc64::g" \
+ configure mkspecs/common/gcc-base-macx.conf mkspecs/common/g++-macx.conf \
+ || die "sed -arch/-Xarch failed"
+
+ # On Snow Leopard don't fall back to 10.5 deployment target.
+ if [[ ${CHOST} == *-apple-darwin10 ]]; then
+ sed -i \
+ -e "s:QMakeVar set QMAKE_MACOSX_DEPLOYMENT_TARGET.*:QMakeVar set QMAKE_MACOSX_DEPLOYMENT_TARGET 10.6:g" \
+ -e "s:-mmacosx-version-min=10.[0-9]:-mmacosx-version-min=10.6:g" \
+ configure mkspecs/common/g++-macx.conf \
+ || die "sed deployment target failed"
+ fi
+ fi
+
+ if [[ ${CHOST} == *-solaris* ]]; then
+ sed -i -e '/^QMAKE_LFLAGS_THREAD/a QMAKE_LFLAGS_DYNAMIC_LIST = -Wl,--dynamic-list,' \
+ mkspecs/$(qt4_get_mkspec)/qmake.conf || die
+ fi
+
+ # apply patches
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ epatch_user
+}
+
+qt4_multilib_src_configure() {
+ qt4_prepare_env
+
+ qt4_symlink_tools_to_build_dir
+
+ # toolchain setup ('local -x' because of bug 532510)
+ local -x \
+ AR="$(tc-getAR) cqs" \
+ CC=$(tc-getCC) \
+ CXX=$(tc-getCXX) \
+ LD=$(tc-getCXX) \
+ MAKEFLAGS=${MAKEOPTS} \
+ OBJCOPY=$(tc-getOBJCOPY) \
+ OBJDUMP=$(tc-getOBJDUMP) \
+ STRIP=$(tc-getSTRIP)
+
+ # convert tc-arch to the values supported by Qt
+ local arch=$(tc-arch)
+ case ${arch} in
+ amd64|x64-*) arch=x86_64 ;;
+ arm64|hppa) arch=generic ;;
+ ppc*-macos) arch=ppc ;;
+ ppc*) arch=powerpc ;;
+ sparc*) arch=sparc ;;
+ x86-macos) arch=x86 ;;
+ x86*) arch=i386 ;;
+ esac
+
+ # configure arguments
+ local conf=(
+ # installation paths
+ -prefix "${QT4_PREFIX}"
+ -bindir "${QT4_BINDIR}"
+ -libdir "${QT4_LIBDIR}"
+ -docdir "${QT4_DOCDIR}"
+ -headerdir "${QT4_HEADERDIR}"
+ -plugindir "${QT4_PLUGINDIR}"
+ -importdir "${QT4_IMPORTDIR}"
+ -datadir "${QT4_DATADIR}"
+ -translationdir "${QT4_TRANSLATIONDIR}"
+ -sysconfdir "${QT4_SYSCONFDIR}"
+ -examplesdir "${QT4_EXAMPLESDIR}"
+ -demosdir "${QT4_DEMOSDIR}"
+
+ # debug/release
+ $(use_if_iuse debug && echo -debug || echo -release)
+ -no-separate-debug-info
+
+ # licensing stuff
+ -opensource -confirm-license
+
+ # build shared libraries
+ -shared
+
+ # skip recursive processing of .pro files at the end of configure
+ # (we run qmake by ourselves), thus saving quite a bit of time
+ -dont-process
+
+ # always enable large file support
+ -largefile
+
+ # exceptions USE flag
+ $(in_iuse exceptions && qt_use exceptions || echo -exceptions)
+
+ # build STL support
+ -stl
+
+ # architecture/platform (mkspec)
+ -arch ${arch}
+ -platform $(qt4_get_mkspec)
+
+ # instruction set support
+ $(is-flagq -mno-mmx && echo -no-mmx)
+ $(is-flagq -mno-3dnow && echo -no-3dnow)
+ $(is-flagq -mno-sse && echo -no-sse)
+ $(is-flagq -mno-sse2 && echo -no-sse2)
+ $(is-flagq -mno-sse3 && echo -no-sse3)
+ $(is-flagq -mno-ssse3 && echo -no-ssse3)
+ $(is-flagq -mno-sse4.1 && echo -no-sse4.1)
+ $(is-flagq -mno-sse4.2 && echo -no-sse4.2)
+ $(is-flagq -mno-avx && echo -no-avx)
+ $(is-flagq -mfpu=* && ! is-flagq -mfpu=*neon* && echo -no-neon)
+
+ # bug 367045
+ $([[ ${CHOST} == *86*-apple-darwin* ]] && echo -no-ssse3)
+
+ # prefer system libraries
+ -system-zlib
+
+ # exclude examples and demos from default build
+ -nomake examples
+ -nomake demos
+
+ # disable rpath on non-prefix (bugs 380415 and 417169)
+ $(usex prefix '' -no-rpath)
+
+ # print verbose information about each configure test
+ -verbose
+
+ # precompiled headers don't work on hardened, where the flag is masked
+ $(in_iuse pch && qt_use pch || echo -no-pch)
+
+ # enable linker optimizations to reduce relocations, except on Solaris
+ # where this flag seems to introduce major breakage to applications,
+ # mostly to be seen as a core dump with the message:
+ # "QPixmap: Must construct a QApplication before a QPaintDevice"
+ $([[ ${CHOST} != *-solaris* ]] && echo -reduce-relocations)
+ )
+
+ if use_if_iuse aqua; then
+ if [[ ${CHOST##*-darwin} -ge 9 ]]; then
+ conf+=(
+ # on (snow) leopard use the new (frameworked) cocoa code
+ -cocoa -framework
+ # add hint for the framework location
+ -F"${QT4_LIBDIR}"
+ )
+ else
+ conf+=(-no-framework)
+ fi
+ fi
+
+ conf+=(
+ # module-specific options
+ "${myconf[@]}"
+ )
+
+ einfo "Configuring with: ${conf[@]}"
+ "${S}"/configure "${conf[@]}" || die "configure failed"
+
+ # configure is stupid and assigns QMAKE_LFLAGS twice,
+ # thus the previous -rpath-link flag gets overwritten
+ # and some packages (e.g. qthelp) fail to link
+ sed -i -e '/^QMAKE_LFLAGS =/ s:$: $$QMAKE_LFLAGS:' \
+ .qmake.cache || die "sed .qmake.cache failed"
+
+ qt4_qmake
+ qt4_foreach_target_subdir qt4_qmake
+}
+
+qt4_multilib_src_compile() {
+ qt4_prepare_env
+
+ qt4_foreach_target_subdir emake
+}
+
+qt4_multilib_src_test() {
+ qt4_prepare_env
+
+ qt4_foreach_target_subdir emake -j1 check
+}
+
+qt4_multilib_src_install() {
+ qt4_prepare_env
+
+ qt4_foreach_target_subdir emake INSTALL_ROOT="${D}" install
+
+ if [[ ${PN} == qtcore ]]; then
+ set -- emake INSTALL_ROOT="${D}" install_{mkspecs,qmake}
+ einfo "Running $*"
+ "$@"
+
+ # install env.d file
+ cat > "${T}/44qt4-${CHOST}" <<-_EOF_
+ LDPATH="${QT4_LIBDIR}"
+ _EOF_
+ doenvd "${T}/44qt4-${CHOST}"
+
+ # install qtchooser configuration file
+ cat > "${T}/qt4-${CHOST}.conf" <<-_EOF_
+ ${QT4_BINDIR}
+ ${QT4_LIBDIR}
+ _EOF_
+
+ (
+ insinto /etc/xdg/qtchooser
+ doins "${T}/qt4-${CHOST}.conf"
+ )
+
+ if multilib_is_native_abi; then
+ # convenience symlinks
+ dosym qt4-"${CHOST}".conf /etc/xdg/qtchooser/4.conf
+ dosym qt4-"${CHOST}".conf /etc/xdg/qtchooser/qt4.conf
+ # TODO bug 522646: write an eselect module to manage default.conf
+ dosym qt4.conf /etc/xdg/qtchooser/default.conf
+ fi
+ fi
+
+ # move pkgconfig directory to the correct location
+ if [[ -d ${D}${QT4_LIBDIR}/pkgconfig ]]; then
+ mv "${D}${QT4_LIBDIR}"/pkgconfig "${ED}usr/$(get_libdir)" || die
+ fi
+
+ qt4_install_module_qconfigs
+ qt4_symlink_framework_headers
+}
+
+qt4_multilib_src_install_all() {
+ if [[ ${PN} == qtcore ]]; then
+ # include gentoo-qconfig.h at the beginning of Qt{,Core}/qconfig.h
+ if use aqua && [[ ${CHOST#*-darwin} -ge 9 ]]; then
+ sed -i -e '1i #include <QtCore/Gentoo/gentoo-qconfig.h>\n' \
+ "${D}${QT4_LIBDIR}"/QtCore.framework/Headers/qconfig.h \
+ || die "sed failed (qconfig.h)"
+ dosym "${QT4_HEADERDIR#${EPREFIX}}"/Gentoo \
+ "${QT4_LIBDIR#${EPREFIX}}"/QtCore.framework/Headers/Gentoo
+ else
+ sed -i -e '1i #include <Gentoo/gentoo-qconfig.h>\n' \
+ "${D}${QT4_HEADERDIR}"/Qt{,Core}/qconfig.h \
+ || die "sed failed (qconfig.h)"
+ fi
+
+ dodir "${QT4_DATADIR#${EPREFIX}}"/mkspecs/gentoo
+ mv "${D}${QT4_DATADIR}"/mkspecs/{qconfig.pri,gentoo/} || die
+ fi
+
+ # install private headers of a few modules
+ if has ${PN} qtcore qtdeclarative qtgui qtscript; then
+ local moduledir=${PN#qt}
+ local modulename=Qt$(tr 'a-z' 'A-Z' <<< ${moduledir:0:1})${moduledir:1}
+ [[ ${moduledir} == core ]] && moduledir=corelib
+
+ einfo "Installing private headers into ${QT4_HEADERDIR}/${modulename}/private"
+ insinto "${QT4_HEADERDIR#${EPREFIX}}"/${modulename}/private
+ find "${S}"/src/${moduledir} -type f -name '*_p.h' -exec doins '{}' + || die
+ fi
+
+ prune_libtool_files
+}
+
+# @FUNCTION: qt4-build-multilib_pkg_postinst
+# @DESCRIPTION:
+# Regenerate configuration after installation or upgrade/downgrade.
+qt4-build-multilib_pkg_postinst() {
+ qt4_regenerate_global_qconfigs
+}
+
+# @FUNCTION: qt4-build-multilib_pkg_postrm
+# @DESCRIPTION:
+# Regenerate configuration when a module is completely removed.
+qt4-build-multilib_pkg_postrm() {
+ qt4_regenerate_global_qconfigs
+}
+
+
+###### Public helpers ######
+
+# @FUNCTION: qt_use
+# @USAGE: <flag> [feature] [enableval]
+# @DESCRIPTION:
+# <flag> is the name of a flag in IUSE.
+#
+# Outputs "-${enableval}-${feature}" if <flag> is enabled, "-no-${feature}"
+# otherwise. If [feature] is not specified, <flag> is used in its place.
+# If [enableval] is not specified, the "-${enableval}" prefix is omitted.
+qt_use() {
+ [[ $# -ge 1 ]] || die "${FUNCNAME}() requires at least one argument"
+
+ usex "$1" "${3:+-$3}-${2:-$1}" "-no-${2:-$1}"
+}
+
+# @FUNCTION: qt_native_use
+# @USAGE: <flag> [feature] [enableval]
+# @DESCRIPTION:
+# <flag> is the name of a flag in IUSE.
+#
+# Outputs "-${enableval}-${feature}" if <flag> is enabled and we are currently
+# building for the native ABI, "-no-${feature}" otherwise. If [feature] is not
+# specified, <flag> is used in its place. If [enableval] is not specified,
+# the "-${enableval}" prefix is omitted.
+qt_native_use() {
+ [[ $# -ge 1 ]] || die "${FUNCNAME}() requires at least one argument"
+
+ multilib_is_native_abi && qt_use "$@" || echo "-no-${2:-$1}"
+}
+
+
+###### Internal functions ######
+
+# @FUNCTION: qt4_prepare_env
+# @INTERNAL
+# @DESCRIPTION:
+# Prepares the environment for building Qt.
+qt4_prepare_env() {
+ # setup installation directories
+ # note: keep paths in sync with qmake-utils.eclass
+ QT4_PREFIX=${EPREFIX}/usr
+ QT4_HEADERDIR=${QT4_PREFIX}/include/qt4
+ QT4_LIBDIR=${QT4_PREFIX}/$(get_libdir)/qt4
+ QT4_BINDIR=${QT4_LIBDIR}/bin
+ QT4_PLUGINDIR=${QT4_LIBDIR}/plugins
+ QT4_IMPORTDIR=${QT4_LIBDIR}/imports
+ QT4_DATADIR=${QT4_PREFIX}/share/qt4
+ QT4_DOCDIR=${QT4_PREFIX}/share/doc/qt-${PV}
+ QT4_TRANSLATIONDIR=${QT4_DATADIR}/translations
+ QT4_EXAMPLESDIR=${QT4_DATADIR}/examples
+ QT4_DEMOSDIR=${QT4_DATADIR}/demos
+ QT4_SYSCONFDIR=${EPREFIX}/etc/qt4
+ QMAKE_LIBDIR_QT=${QT4_LIBDIR}
+
+ export XDG_CONFIG_HOME="${T}"
+}
+
+# @FUNCTION: qt4_foreach_target_subdir
+# @INTERNAL
+# @DESCRIPTION:
+# Executes the given command inside each directory listed in QT4_TARGET_DIRECTORIES.
+qt4_foreach_target_subdir() {
+ local ret=0 subdir=
+ for subdir in ${QT4_TARGET_DIRECTORIES}; do
+ mkdir -p "${subdir}" || die
+ pushd "${subdir}" >/dev/null || die
+
+ einfo "Running $* ${subdir:+in ${subdir}}"
+ "$@"
+ ((ret+=$?))
+
+ popd >/dev/null || die
+ done
+
+ return ${ret}
+}
+
+# @FUNCTION: qt4_symlink_tools_to_build_dir
+# @INTERNAL
+# @DESCRIPTION:
+# Symlinks qtcore tools to BUILD_DIR,
+# so that they can be used when building other modules.
+qt4_symlink_tools_to_build_dir() {
+ local tool= tools=()
+ if [[ ${PN} != qtcore ]]; then
+ tools+=(qmake moc rcc uic)
+ fi
+
+ mkdir -p "${BUILD_DIR}"/bin || die
+ pushd "${BUILD_DIR}"/bin >/dev/null || die
+
+ for tool in "${tools[@]}"; do
+ [[ -e ${QT4_BINDIR}/${tool} ]] || continue
+ ln -s "${QT4_BINDIR}/${tool}" . || die "failed to symlink ${tool}"
+ done
+
+ popd >/dev/null || die
+}
+
+# @FUNCTION: qt4_qmake
+# @INTERNAL
+# @DESCRIPTION:
+# Helper function that runs qmake in the current target subdir.
+# Intended to be called by qt4_foreach_target_subdir().
+qt4_qmake() {
+ local projectdir=${PWD/#${BUILD_DIR}/${S}}
+
+ "${BUILD_DIR}"/bin/qmake \
+ CONFIG+=nostrip \
+ LIBS+=-L"${QT4_LIBDIR}" \
+ "${projectdir}" \
+ || die "qmake failed (${projectdir})"
+}
+
+# @FUNCTION: qt4_install_module_qconfigs
+# @INTERNAL
+# @DESCRIPTION:
+# Creates and installs gentoo-specific ${PN}-qconfig.{h,pri} files.
+qt4_install_module_qconfigs() {
+ local x
+ if [[ -n ${QCONFIG_ADD} || -n ${QCONFIG_REMOVE} ]]; then
+ for x in QCONFIG_ADD QCONFIG_REMOVE; do
+ [[ -n ${!x} ]] && echo ${x}=${!x} >> "${BUILD_DIR}"/${PN}-qconfig.pri
+ done
+ insinto ${QT4_DATADIR#${EPREFIX}}/mkspecs/gentoo
+ doins "${BUILD_DIR}"/${PN}-qconfig.pri
+ fi
+
+ if [[ -n ${QCONFIG_DEFINE} ]]; then
+ for x in ${QCONFIG_DEFINE}; do
+ echo "#define ${x}" >> "${BUILD_DIR}"/gentoo-${PN}-qconfig.h
+ done
+ insinto ${QT4_HEADERDIR#${EPREFIX}}/Gentoo
+ doins "${BUILD_DIR}"/gentoo-${PN}-qconfig.h
+ fi
+}
+
+# @FUNCTION: qt4_regenerate_global_qconfigs
+# @INTERNAL
+# @DESCRIPTION:
+# Generates Gentoo-specific qconfig.{h,pri} according to the build configuration.
+# Don't call die here because dying in pkg_post{inst,rm} only makes things worse.
+qt4_regenerate_global_qconfigs() {
+ if [[ -n ${QCONFIG_ADD} || -n ${QCONFIG_REMOVE} || -n ${QCONFIG_DEFINE} || ${PN} == qtcore ]]; then
+ local x qconfig_add qconfig_remove qconfig_new
+ for x in "${ROOT}${QT4_DATADIR}"/mkspecs/gentoo/*-qconfig.pri; do
+ [[ -f ${x} ]] || continue
+ qconfig_add+=" $(sed -n 's/^QCONFIG_ADD=//p' "${x}")"
+ qconfig_remove+=" $(sed -n 's/^QCONFIG_REMOVE=//p' "${x}")"
+ done
+
+ if [[ -e "${ROOT}${QT4_DATADIR}"/mkspecs/gentoo/qconfig.pri ]]; then
+ # start with the qconfig.pri that qtcore installed
+ if ! cp "${ROOT}${QT4_DATADIR}"/mkspecs/gentoo/qconfig.pri \
+ "${ROOT}${QT4_DATADIR}"/mkspecs/qconfig.pri; then
+ eerror "cp qconfig failed."
+ return 1
+ fi
+
+ # generate list of QT_CONFIG entries from the existing list
+ # including qconfig_add and excluding qconfig_remove
+ for x in $(sed -n 's/^QT_CONFIG +=//p' \
+ "${ROOT}${QT4_DATADIR}"/mkspecs/qconfig.pri) ${qconfig_add}; do
+ has ${x} ${qconfig_remove} || qconfig_new+=" ${x}"
+ done
+
+ # replace the existing QT_CONFIG list with qconfig_new
+ if ! sed -i -e "s/QT_CONFIG +=.*/QT_CONFIG += ${qconfig_new}/" \
+ "${ROOT}${QT4_DATADIR}"/mkspecs/qconfig.pri; then
+ eerror "Sed for QT_CONFIG failed"
+ return 1
+ fi
+
+ # create Gentoo/qconfig.h
+ if [[ ! -e ${ROOT}${QT4_HEADERDIR}/Gentoo ]]; then
+ if ! mkdir -p "${ROOT}${QT4_HEADERDIR}"/Gentoo; then
+ eerror "mkdir ${QT4_HEADERDIR}/Gentoo failed"
+ return 1
+ fi
+ fi
+ : > "${ROOT}${QT4_HEADERDIR}"/Gentoo/gentoo-qconfig.h
+ for x in "${ROOT}${QT4_HEADERDIR}"/Gentoo/gentoo-*-qconfig.h; do
+ [[ -f ${x} ]] || continue
+ cat "${x}" >> "${ROOT}${QT4_HEADERDIR}"/Gentoo/gentoo-qconfig.h
+ done
+ else
+ rm -f "${ROOT}${QT4_DATADIR}"/mkspecs/qconfig.pri
+ rm -f "${ROOT}${QT4_HEADERDIR}"/Gentoo/gentoo-qconfig.h
+ rmdir "${ROOT}${QT4_DATADIR}"/mkspecs \
+ "${ROOT}${QT4_DATADIR}" \
+ "${ROOT}${QT4_HEADERDIR}"/Gentoo \
+ "${ROOT}${QT4_HEADERDIR}" 2>/dev/null
+ fi
+ fi
+}
+
+# @FUNCTION: qt4_symlink_framework_headers
+# @DESCRIPTION:
+# On OS X we need to add some symlinks when frameworks are being
+# used, to avoid complications with some more or less stupid packages.
+qt4_symlink_framework_headers() {
+ if use_if_iuse aqua && [[ ${CHOST##*-darwin} -ge 9 ]]; then
+ local frw dest f h rdir
+ # Some packages tend to include <Qt/...>
+ dodir "${QT4_HEADERDIR#${EPREFIX}}"/Qt
+
+ # Fake normal headers when frameworks are installed... eases life later
+ # on, make sure we use relative links though, as some ebuilds assume
+ # these dirs exist in src_install to add additional files
+ f=${QT4_HEADERDIR}
+ h=${QT4_LIBDIR}
+ while [[ -n ${f} && ${f%%/*} == ${h%%/*} ]] ; do
+ f=${f#*/}
+ h=${h#*/}
+ done
+ rdir=${h}
+ f="../"
+ while [[ ${h} == */* ]] ; do
+ f="${f}../"
+ h=${h#*/}
+ done
+ rdir="${f}${rdir}"
+
+ for frw in "${D}${QT4_LIBDIR}"/*.framework; do
+ [[ -e "${frw}"/Headers ]] || continue
+ f=$(basename ${frw})
+ dest="${QT4_HEADERDIR#${EPREFIX}}"/${f%.framework}
+ dosym "${rdir}"/${f}/Headers "${dest}"
+
+ # Link normal headers as well.
+ for hdr in "${D}${QT4_LIBDIR}/${f}"/Headers/*; do
+ h=$(basename ${hdr})
+ dosym "../${rdir}"/${f}/Headers/${h} \
+ "${QT4_HEADERDIR#${EPREFIX}}"/Qt/${h}
+ done
+ done
+ fi
+}
+
+# @FUNCTION: qt4_get_mkspec
+# @INTERNAL
+# @DESCRIPTION:
+# Returns the right mkspec for the current CHOST/CXX combination.
+qt4_get_mkspec() {
+ local spec=
+
+ case ${CHOST} in
+ *-linux*)
+ spec=linux ;;
+ *-darwin*)
+ use_if_iuse aqua &&
+ spec=macx || # mac with carbon/cocoa
+ spec=darwin ;; # darwin/mac with X11
+ *-freebsd*|*-dragonfly*)
+ spec=freebsd ;;
+ *-netbsd*)
+ spec=netbsd ;;
+ *-openbsd*)
+ spec=openbsd ;;
+ *-aix*)
+ spec=aix ;;
+ hppa*-hpux*)
+ spec=hpux ;;
+ ia64*-hpux*)
+ spec=hpuxi ;;
+ *-solaris*)
+ spec=solaris ;;
+ *)
+ die "qt4-build-multilib.eclass: unsupported CHOST '${CHOST}'" ;;
+ esac
+
+ case $(tc-getCXX) in
+ *g++*)
+ spec+=-g++ ;;
+ *clang*)
+ if [[ -d ${S}/mkspecs/unsupported/${spec}-clang ]]; then
+ spec=unsupported/${spec}-clang
+ else
+ ewarn "${spec}-clang mkspec does not exist, falling back to ${spec}-g++"
+ spec+=-g++
+ fi ;;
+ *icpc*)
+ if [[ -d ${S}/mkspecs/${spec}-icc ]]; then
+ spec+=-icc
+ else
+ ewarn "${spec}-icc mkspec does not exist, falling back to ${spec}-g++"
+ spec+=-g++
+ fi ;;
+ *)
+ die "qt4-build-multilib.eclass: unsupported compiler '$(tc-getCXX)'" ;;
+ esac
+
+ # Add -64 for 64-bit prefix profiles
+ if use amd64-linux || use ia64-linux || use ppc64-linux ||
+ use x64-macos ||
+ use sparc64-freebsd || use x64-freebsd || use x64-openbsd ||
+ use ia64-hpux ||
+ use sparc64-solaris || use x64-solaris
+ then
+ [[ -d ${S}/mkspecs/${spec}-64 ]] && spec+=-64
+ fi
+
+ echo ${spec}
+}
diff --git a/eclass/qt4-build.eclass b/eclass/qt4-build.eclass
new file mode 100644
index 000000000000..28e13c336db0
--- /dev/null
+++ b/eclass/qt4-build.eclass
@@ -0,0 +1,804 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: qt4-build.eclass
+# @MAINTAINER:
+# Qt herd <qt@gentoo.org>
+# @BLURB: Eclass for Qt4 split ebuilds.
+# @DESCRIPTION:
+# This eclass contains various functions that are used when building Qt4.
+
+case ${EAPI} in
+ 4|5) : ;;
+ *) die "qt4-build.eclass: unsupported EAPI=${EAPI:-0}" ;;
+esac
+
+inherit eutils flag-o-matic multilib toolchain-funcs
+
+HOMEPAGE="https://www.qt.io/"
+LICENSE="|| ( LGPL-2.1 GPL-3 )"
+SLOT="4"
+
+case ${PV} in
+ 4.?.9999)
+ QT4_BUILD_TYPE="live"
+ EGIT_REPO_URI=(
+ "git://code.qt.io/qt/qt.git"
+ "https://code.qt.io/git/qt/qt.git"
+ "https://github.com/qtproject/qt.git"
+ )
+ EGIT_BRANCH=${PV%.9999}
+ inherit git-r3
+ ;;
+ *)
+ QT4_BUILD_TYPE="release"
+ MY_P=qt-everywhere-opensource-src-${PV/_/-}
+ SRC_URI="http://download.qt.io/archive/qt/${PV%.*}/${PV}/${MY_P}.tar.gz"
+ S=${WORKDIR}/${MY_P}
+ ;;
+esac
+
+IUSE="aqua debug pch"
+[[ ${PN} != qtxmlpatterns ]] && IUSE+=" +exceptions"
+
+DEPEND="virtual/pkgconfig"
+if [[ ${QT4_BUILD_TYPE} == live ]]; then
+ DEPEND+=" dev-lang/perl"
+fi
+
+# @FUNCTION: qt4-build_pkg_setup
+# @DESCRIPTION:
+# Sets up PATH and LD_LIBRARY_PATH.
+qt4-build_pkg_setup() {
+ # Warn users of possible breakage when downgrading to a previous release.
+ # Downgrading revisions within the same release is safe.
+ if has_version ">${CATEGORY}/${P}-r9999:4"; then
+ ewarn
+ ewarn "Downgrading Qt is completely unsupported and can break your system!"
+ ewarn
+ fi
+
+ PATH="${S}/bin${PATH:+:}${PATH}"
+ if [[ ${CHOST} != *-darwin* ]]; then
+ LD_LIBRARY_PATH="${S}/lib${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}"
+ else
+ DYLD_LIBRARY_PATH="${S}/lib${DYLD_LIBRARY_PATH:+:}${DYLD_LIBRARY_PATH}"
+ # On MacOS we *need* at least src/gui/kernel/qapplication_mac.mm for
+ # platform detection. Note: needs to come before any directories to
+ # avoid extract failure.
+ [[ ${CHOST} == *-apple-darwin* ]] && \
+ QT4_EXTRACT_DIRECTORIES="src/gui/kernel/qapplication_mac.mm
+ ${QT4_EXTRACT_DIRECTORIES}"
+ fi
+}
+
+# @ECLASS-VARIABLE: PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array variable containing all the patches to be applied. This variable
+# is expected to be defined in the global scope of ebuilds. Make sure to
+# specify the full path. This variable is used in src_prepare phase.
+#
+# Example:
+# @CODE
+# PATCHES=(
+# "${FILESDIR}/mypatch.patch"
+# "${FILESDIR}/patches_folder/"
+# )
+# @CODE
+
+# @ECLASS-VARIABLE: QT4_EXTRACT_DIRECTORIES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space-separated list of directories that will be extracted
+# from Qt tarball.
+
+# @ECLASS-VARIABLE: QT4_TARGET_DIRECTORIES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Space-separated list of directories that will be configured,
+# compiled, and installed. All paths must be relative to ${S}.
+
+# @FUNCTION: qt4-build_src_unpack
+# @DESCRIPTION:
+# Unpacks the sources.
+qt4-build_src_unpack() {
+ setqtenv
+
+ if [[ $(gcc-major-version) -lt 4 ]] || [[ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 4 ]]; then
+ ewarn
+ ewarn "Using a GCC version lower than 4.4 is not supported."
+ ewarn
+ fi
+
+ if [[ ${PN} == qtwebkit ]]; then
+ eshopts_push -s extglob
+ if is-flagq '-g?(gdb)?([1-9])'; then
+ ewarn
+ ewarn "You have enabled debug info (probably have -g or -ggdb in your CFLAGS/CXXFLAGS)."
+ ewarn "You may experience really long compilation times and/or increased memory usage."
+ ewarn "If compilation fails, please try removing -g/-ggdb before reporting a bug."
+ ewarn "For more info check out https://bugs.gentoo.org/307861"
+ ewarn
+ fi
+ eshopts_pop
+ fi
+
+ case ${QT4_BUILD_TYPE} in
+ live)
+ git-r3_src_unpack
+ ;;
+ release)
+ local tarball="${MY_P}.tar.gz" target= targets=
+ for target in configure LICENSE.GPL3 LICENSE.LGPL projects.pro \
+ src/{qbase,qt_targets,qt_install}.pri bin config.tests \
+ mkspecs qmake ${QT4_EXTRACT_DIRECTORIES}
+ do
+ targets+="${MY_P}/${target} "
+ done
+
+ ebegin "Unpacking parts of ${tarball}:" ${targets//${MY_P}\/}
+ tar -xzf "${DISTDIR}/${tarball}" ${targets}
+ eend $? || die "failed to unpack"
+ ;;
+ esac
+}
+
+# @FUNCTION: qt4-build_src_prepare
+# @DESCRIPTION:
+# Prepare the sources before the configure phase. Strip CFLAGS if necessary, and fix
+# the build system in order to respect CFLAGS/CXXFLAGS/LDFLAGS specified in make.conf.
+qt4-build_src_prepare() {
+ setqtenv
+
+ if [[ ${QT4_BUILD_TYPE} == live ]]; then
+ QTDIR="." ./bin/syncqt || die "syncqt failed"
+ fi
+
+ # avoid X11 dependency in non-gui packages
+ local nolibx11_pkgs="qtbearer qtcore qtdbus qtscript qtsql qttest qtxmlpatterns"
+ has ${PN} ${nolibx11_pkgs} && qt_nolibx11
+
+ if use aqua; then
+ # provide a proper macx-g++-64
+ use x64-macos && ln -s macx-g++ mkspecs/$(qt_mkspecs_dir)
+
+ sed -e '/^CONFIG/s:app_bundle::' \
+ -e '/^CONFIG/s:plugin_no_soname:plugin_with_soname absolute_library_soname:' \
+ -i mkspecs/$(qt_mkspecs_dir)/qmake.conf || die
+ fi
+
+ if [[ ${CATEGORY}/${PN} != dev-qt/qtcore ]]; then
+ skip_qmake_build
+ skip_project_generation
+ symlink_binaries_to_buildtree
+ else
+ # Bug 373061
+ # qmake bus errors with -O2 or -O3 but -O1 works
+ if [[ ${CHOST} == *86*-apple-darwin* ]]; then
+ replace-flags -O[23] -O1
+ fi
+
+ # Bug 503500
+ # undefined reference with -Os and --as-needed
+ if use x86; then
+ replace-flags -Os -O2
+ fi
+ fi
+
+ # Bug 261632
+ if use ppc64; then
+ append-flags -mminimal-toc
+ fi
+
+ # Bug 417105
+ # graphite on gcc 4.7 causes miscompilations
+ if [[ $(gcc-version) == "4.7" ]]; then
+ filter-flags -fgraphite-identity
+ fi
+
+ if use_if_iuse c++0x; then
+ append-cxxflags -std=c++0x
+ fi
+
+ # Respect CC, CXX, {C,CXX,LD}FLAGS in .qmake.cache
+ sed -e "/^SYSTEM_VARIABLES=/i \
+ CC='$(tc-getCC)'\n\
+ CXX='$(tc-getCXX)'\n\
+ CFLAGS='${CFLAGS}'\n\
+ CXXFLAGS='${CXXFLAGS}'\n\
+ LDFLAGS='${LDFLAGS}'\n\
+ QMakeVar set QMAKE_CFLAGS_RELEASE\n\
+ QMakeVar set QMAKE_CFLAGS_DEBUG\n\
+ QMakeVar set QMAKE_CXXFLAGS_RELEASE\n\
+ QMakeVar set QMAKE_CXXFLAGS_DEBUG\n\
+ QMakeVar set QMAKE_LFLAGS_RELEASE\n\
+ QMakeVar set QMAKE_LFLAGS_DEBUG\n"\
+ -i configure \
+ || die "sed SYSTEM_VARIABLES failed"
+
+ # Respect CC, CXX, LINK and *FLAGS in config.tests
+ find config.tests/unix -name '*.test' -type f -print0 | xargs -0 \
+ sed -i -e "/bin\/qmake/ s: \"\$SRCDIR/: \
+ 'QMAKE_CC=$(tc-getCC)' 'QMAKE_CXX=$(tc-getCXX)' 'QMAKE_LINK=$(tc-getCXX)' \
+ 'QMAKE_CFLAGS+=${CFLAGS}' 'QMAKE_CXXFLAGS+=${CXXFLAGS}' 'QMAKE_LFLAGS+=${LDFLAGS}'&:" \
+ || die "sed config.tests failed"
+
+ # Bug 172219
+ sed -e 's:/X11R6/:/:' -i mkspecs/$(qt_mkspecs_dir)/qmake.conf || die
+
+ if [[ ${CHOST} == *-darwin* ]]; then
+ # Set FLAGS *and* remove -arch, since our gcc-apple is multilib
+ # crippled (by design) :/
+ local mac_gpp_conf=
+ if [[ -f mkspecs/common/mac-g++.conf ]]; then
+ # qt < 4.8 has mac-g++.conf
+ mac_gpp_conf="mkspecs/common/mac-g++.conf"
+ elif [[ -f mkspecs/common/g++-macx.conf ]]; then
+ # qt >= 4.8 has g++-macx.conf
+ mac_gpp_conf="mkspecs/common/g++-macx.conf"
+ else
+ die "no known conf file for mac found"
+ fi
+ sed \
+ -e "s:QMAKE_CFLAGS_RELEASE.*=.*:QMAKE_CFLAGS_RELEASE=${CFLAGS}:" \
+ -e "s:QMAKE_CXXFLAGS_RELEASE.*=.*:QMAKE_CXXFLAGS_RELEASE=${CXXFLAGS}:" \
+ -e "s:QMAKE_LFLAGS_RELEASE.*=.*:QMAKE_LFLAGS_RELEASE=-headerpad_max_install_names ${LDFLAGS}:" \
+ -e "s:-arch\s\w*::g" \
+ -i ${mac_gpp_conf} \
+ || die "sed ${mac_gpp_conf} failed"
+
+ # Fix configure's -arch settings that appear in qmake/Makefile and also
+ # fix arch handling (automagically duplicates our -arch arg and breaks
+ # pch). Additionally disable Xarch support.
+ local mac_gcc_confs="${mac_gpp_conf}"
+ if [[ -f mkspecs/common/gcc-base-macx.conf ]]; then
+ mac_gcc_confs+=" mkspecs/common/gcc-base-macx.conf"
+ fi
+ sed \
+ -e "s:-arch i386::" \
+ -e "s:-arch ppc::" \
+ -e "s:-arch x86_64::" \
+ -e "s:-arch ppc64::" \
+ -e "s:-arch \$i::" \
+ -e "/if \[ ! -z \"\$NATIVE_64_ARCH\" \]; then/,/fi/ d" \
+ -e "s:CFG_MAC_XARCH=yes:CFG_MAC_XARCH=no:g" \
+ -e "s:-Xarch_x86_64::g" \
+ -e "s:-Xarch_ppc64::g" \
+ -i configure ${mac_gcc_confs} \
+ || die "sed -arch/-Xarch failed"
+
+ # On Snow Leopard don't fall back to 10.5 deployment target.
+ if [[ ${CHOST} == *-apple-darwin10 ]]; then
+ sed -e "s:QMakeVar set QMAKE_MACOSX_DEPLOYMENT_TARGET.*:QMakeVar set QMAKE_MACOSX_DEPLOYMENT_TARGET 10.6:g" \
+ -e "s:-mmacosx-version-min=10.[0-9]:-mmacosx-version-min=10.6:g" \
+ -i configure ${mac_gpp_conf} \
+ || die "sed deployment target failed"
+ fi
+ fi
+
+ # this one is needed for all systems with a separate -liconv, apart from
+ # Darwin, for which the sources already cater for -liconv
+ if use !elibc_glibc && [[ ${CHOST} != *-darwin* ]]; then
+ sed -e 's|mac:\(LIBS += -liconv\)|\1|g' \
+ -i config.tests/unix/iconv/iconv.pro \
+ || die "sed iconv.pro failed"
+ fi
+
+ # we need some patches for Solaris
+ sed -i -e '/^QMAKE_LFLAGS_THREAD/a\QMAKE_LFLAGS_DYNAMIC_LIST = -Wl,--dynamic-list,' \
+ mkspecs/$(qt_mkspecs_dir)/qmake.conf || die
+ # use GCC over SunStudio
+ sed -i -e '/PLATFORM=solaris-cc/s/cc/g++/' configure || die
+ # do not flirt with non-Prefix stuff, we're quite possessive
+ sed -i -e '/^QMAKE_\(LIB\|INC\)DIR\(_X11\|_OPENGL\|\)\t/s/=.*$/=/' \
+ mkspecs/$(qt_mkspecs_dir)/qmake.conf || die
+
+ # apply patches
+ [[ -n ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ epatch_user
+}
+
+# @FUNCTION: qt4-build_src_configure
+# @DESCRIPTION:
+# Default configure phase
+qt4-build_src_configure() {
+ setqtenv
+
+ local conf="
+ -prefix ${QTPREFIXDIR}
+ -bindir ${QTBINDIR}
+ -libdir ${QTLIBDIR}
+ -docdir ${QTDOCDIR}
+ -headerdir ${QTHEADERDIR}
+ -plugindir ${QTPLUGINDIR}
+ -importdir ${QTIMPORTDIR}
+ -datadir ${QTDATADIR}
+ -translationdir ${QTTRANSDIR}
+ -sysconfdir ${QTSYSCONFDIR}
+ -examplesdir ${QTEXAMPLESDIR}
+ -demosdir ${QTDEMOSDIR}
+ -opensource -confirm-license
+ -shared -fast -largefile -stl -verbose
+ -nomake examples -nomake demos"
+
+ # ARCH is set on Gentoo. Qt now falls back to generic on an unsupported
+ # $(tc-arch). Therefore we convert it to supported values.
+ case "$(tc-arch)" in
+ amd64|x64-*) conf+=" -arch x86_64" ;;
+ ppc-macos) conf+=" -arch ppc" ;;
+ ppc|ppc64|ppc-*) conf+=" -arch powerpc" ;;
+ sparc|sparc-*|sparc64-*) conf+=" -arch sparc" ;;
+ x86-macos) conf+=" -arch x86" ;;
+ x86|x86-*) conf+=" -arch i386" ;;
+ alpha|arm|ia64|mips|s390) conf+=" -arch $(tc-arch)" ;;
+ arm64|hppa|sh) conf+=" -arch generic" ;;
+ *) die "$(tc-arch) is unsupported by this eclass. Please file a bug." ;;
+ esac
+
+ conf+=" -platform $(qt_mkspecs_dir)"
+
+ [[ $(get_libdir) != lib ]] && conf+=" -L${EPREFIX}/usr/$(get_libdir)"
+
+ # debug/release
+ if use debug; then
+ conf+=" -debug"
+ else
+ conf+=" -release"
+ fi
+ conf+=" -no-separate-debug-info"
+
+ # exceptions USE flag
+ conf+=" $(in_iuse exceptions && qt_use exceptions || echo -exceptions)"
+
+ # disable rpath (bug 380415), except on prefix (bug 417169)
+ use prefix || conf+=" -no-rpath"
+
+ # precompiled headers don't work on hardened, where the flag is masked
+ conf+=" $(qt_use pch)"
+
+ # -reduce-relocations
+ # This flag seems to introduce major breakage to applications,
+ # mostly to be seen as a core dump with the message "QPixmap: Must
+ # construct a QApplication before a QPaintDevice" on Solaris.
+ # -- Daniel Vergien
+ [[ ${CHOST} != *-solaris* ]] && conf+=" -reduce-relocations"
+
+ # this one is needed for all systems with a separate -liconv, apart from
+ # Darwin, for which the sources already cater for -liconv
+ if use !elibc_glibc && [[ ${CHOST} != *-darwin* ]]; then
+ conf+=" -liconv"
+ fi
+
+ if use_if_iuse glib; then
+ local glibflags="$(pkg-config --cflags --libs glib-2.0 gthread-2.0)"
+ # avoid the -pthread argument
+ conf+=" ${glibflags//-pthread}"
+ unset glibflags
+ fi
+
+ if use aqua; then
+ # On (snow) leopard use the new (frameworked) cocoa code.
+ if [[ ${CHOST##*-darwin} -ge 9 ]]; then
+ conf+=" -cocoa -framework"
+ # We need the source's headers, not the installed ones.
+ conf+=" -I${S}/include"
+ # Add hint for the framework location.
+ conf+=" -F${QTLIBDIR}"
+
+ # We are crazy and build cocoa + qt3support :-)
+ if use qt3support; then
+ sed -e "/case \"\$PLATFORM,\$CFG_MAC_COCOA\" in/,/;;/ s|CFG_QT3SUPPORT=\"no\"|CFG_QT3SUPPORT=\"yes\"|" \
+ -i configure || die
+ fi
+ else
+ conf+=" -no-framework"
+ fi
+ else
+ # freetype2 include dir is non-standard, thus pass it to configure
+ conf+=" $(pkg-config --cflags-only-I freetype2)"
+ fi
+
+ conf+=" ${myconf}"
+ myconf=
+
+ echo ./configure ${conf}
+ ./configure ${conf} || die "./configure failed"
+
+ prepare_directories ${QT4_TARGET_DIRECTORIES}
+}
+
+# @FUNCTION: qt4-build_src_compile
+# @DESCRIPTION:
+# Actual compile phase
+qt4-build_src_compile() {
+ setqtenv
+
+ build_directories ${QT4_TARGET_DIRECTORIES}
+}
+
+# @FUNCTION: qt4-build_src_test
+# @DESCRIPTION:
+# Runs tests only in target directories.
+qt4-build_src_test() {
+ # QtMultimedia does not have any test suite (bug #332299)
+ [[ ${CATEGORY}/${PN} == dev-qt/qtmultimedia ]] && return
+
+ for dir in ${QT4_TARGET_DIRECTORIES}; do
+ emake -j1 check -C ${dir}
+ done
+}
+
+# @FUNCTION: fix_includes
+# @DESCRIPTION:
+# For MacOS X we need to add some symlinks when frameworks are
+# being used, to avoid complications with some more or less stupid packages.
+fix_includes() {
+ if use aqua && [[ ${CHOST##*-darwin} -ge 9 ]]; then
+ local frw dest f h rdir
+ # Some packages tend to include <Qt/...>
+ dodir "${QTHEADERDIR#${EPREFIX}}"/Qt
+
+ # Fake normal headers when frameworks are installed... eases life later
+ # on, make sure we use relative links though, as some ebuilds assume
+ # these dirs exist in src_install to add additional files
+ f=${QTHEADERDIR}
+ h=${QTLIBDIR}
+ while [[ -n ${f} && ${f%%/*} == ${h%%/*} ]] ; do
+ f=${f#*/}
+ h=${h#*/}
+ done
+ rdir=${h}
+ f="../"
+ while [[ ${h} == */* ]] ; do
+ f="${f}../"
+ h=${h#*/}
+ done
+ rdir="${f}${rdir}"
+
+ for frw in "${D}${QTLIBDIR}"/*.framework; do
+ [[ -e "${frw}"/Headers ]] || continue
+ f=$(basename ${frw})
+ dest="${QTHEADERDIR#${EPREFIX}}"/${f%.framework}
+ dosym "${rdir}"/${f}/Headers "${dest}"
+
+ # Link normal headers as well.
+ for hdr in "${D}/${QTLIBDIR}/${f}"/Headers/*; do
+ h=$(basename ${hdr})
+ dosym "../${rdir}"/${f}/Headers/${h} \
+ "${QTHEADERDIR#${EPREFIX}}"/Qt/${h}
+ done
+ done
+ fi
+}
+
+# @FUNCTION: qt4-build_src_install
+# @DESCRIPTION:
+# Perform the actual installation including some library fixes.
+qt4-build_src_install() {
+ setqtenv
+
+ install_directories ${QT4_TARGET_DIRECTORIES}
+ install_qconfigs
+ fix_library_files
+ fix_includes
+
+ # remove .la files since we are building only shared libraries
+ prune_libtool_files
+}
+
+# @FUNCTION: setqtenv
+# @INTERNAL
+setqtenv() {
+ # Set up installation directories
+ QTPREFIXDIR=${EPREFIX}/usr
+ QTBINDIR=${QTPREFIXDIR}/bin
+ QTLIBDIR=${QTPREFIXDIR}/$(get_libdir)/qt4
+ QTPCDIR=${QTPREFIXDIR}/$(get_libdir)/pkgconfig
+ QTDOCDIR=${QTPREFIXDIR}/share/doc/qt-${PV}
+ QTHEADERDIR=${QTPREFIXDIR}/include/qt4
+ QTPLUGINDIR=${QTLIBDIR}/plugins
+ QTIMPORTDIR=${QTLIBDIR}/imports
+ QTDATADIR=${QTPREFIXDIR}/share/qt4
+ QTTRANSDIR=${QTDATADIR}/translations
+ QTSYSCONFDIR=${EPREFIX}/etc/qt4
+ QTEXAMPLESDIR=${QTDATADIR}/examples
+ QTDEMOSDIR=${QTDATADIR}/demos
+ QMAKE_LIBDIR_QT=${QTLIBDIR}
+
+ PLATFORM=$(qt_mkspecs_dir)
+ unset QMAKESPEC
+
+ export XDG_CONFIG_HOME="${T}"
+}
+
+# @FUNCTION: prepare_directories
+# @USAGE: < directories >
+# @INTERNAL
+# @DESCRIPTION:
+# Generates Makefiles for the given list of directories.
+prepare_directories() {
+ for x in "$@"; do
+ pushd "${S}"/${x} >/dev/null || die
+ einfo "Running qmake in: ${x}"
+ "${S}"/bin/qmake \
+ "LIBS+=-L${QTLIBDIR}" \
+ "CONFIG+=nostrip" \
+ || die "qmake failed"
+ popd >/dev/null || die
+ done
+}
+
+# @FUNCTION: build_directories
+# @USAGE: < directories >
+# @INTERNAL
+# @DESCRIPTION:
+# Compiles the code in the given list of directories.
+build_directories() {
+ for x in "$@"; do
+ pushd "${S}"/${x} >/dev/null || die
+ emake \
+ AR="$(tc-getAR) cqs" \
+ CC="$(tc-getCC)" \
+ CXX="$(tc-getCXX)" \
+ LINK="$(tc-getCXX)" \
+ RANLIB=":" \
+ STRIP=":"
+ popd >/dev/null || die
+ done
+}
+
+# @FUNCTION: install_directories
+# @USAGE: < directories >
+# @INTERNAL
+# @DESCRIPTION:
+# Runs emake install in the given directories, which are separated by spaces.
+install_directories() {
+ for x in "$@"; do
+ pushd "${S}"/${x} >/dev/null || die
+ emake INSTALL_ROOT="${D}" install
+ popd >/dev/null || die
+ done
+}
+
+# @ECLASS-VARIABLE: QCONFIG_ADD
+# @DESCRIPTION:
+# List options that need to be added to QT_CONFIG in qconfig.pri
+: ${QCONFIG_ADD:=}
+
+# @ECLASS-VARIABLE: QCONFIG_REMOVE
+# @DESCRIPTION:
+# List options that need to be removed from QT_CONFIG in qconfig.pri
+: ${QCONFIG_REMOVE:=}
+
+# @ECLASS-VARIABLE: QCONFIG_DEFINE
+# @DESCRIPTION:
+# List variables that should be defined at the top of QtCore/qconfig.h
+: ${QCONFIG_DEFINE:=}
+
+# @FUNCTION: install_qconfigs
+# @INTERNAL
+# @DESCRIPTION:
+# Install gentoo-specific mkspecs configurations.
+install_qconfigs() {
+ local x
+ if [[ -n ${QCONFIG_ADD} || -n ${QCONFIG_REMOVE} ]]; then
+ for x in QCONFIG_ADD QCONFIG_REMOVE; do
+ [[ -n ${!x} ]] && echo ${x}=${!x} >> "${T}"/${PN}-qconfig.pri
+ done
+ insinto ${QTDATADIR#${EPREFIX}}/mkspecs/gentoo
+ doins "${T}"/${PN}-qconfig.pri
+ fi
+
+ if [[ -n ${QCONFIG_DEFINE} ]]; then
+ for x in ${QCONFIG_DEFINE}; do
+ echo "#define ${x}" >> "${T}"/gentoo-${PN}-qconfig.h
+ done
+ insinto ${QTHEADERDIR#${EPREFIX}}/Gentoo
+ doins "${T}"/gentoo-${PN}-qconfig.h
+ fi
+}
+
+# @FUNCTION: generate_qconfigs
+# @INTERNAL
+# @DESCRIPTION:
+# Generates gentoo-specific qconfig.{h,pri}.
+generate_qconfigs() {
+ if [[ -n ${QCONFIG_ADD} || -n ${QCONFIG_REMOVE} || -n ${QCONFIG_DEFINE} || ${CATEGORY}/${PN} == dev-qt/qtcore ]]; then
+ local x qconfig_add qconfig_remove qconfig_new
+ for x in "${ROOT}${QTDATADIR}"/mkspecs/gentoo/*-qconfig.pri; do
+ [[ -f ${x} ]] || continue
+ qconfig_add+=" $(sed -n 's/^QCONFIG_ADD=//p' "${x}")"
+ qconfig_remove+=" $(sed -n 's/^QCONFIG_REMOVE=//p' "${x}")"
+ done
+
+ # these error checks do not use die because dying in pkg_post{inst,rm}
+ # just makes things worse.
+ if [[ -e "${ROOT}${QTDATADIR}"/mkspecs/gentoo/qconfig.pri ]]; then
+ # start with the qconfig.pri that qtcore installed
+ if ! cp "${ROOT}${QTDATADIR}"/mkspecs/gentoo/qconfig.pri \
+ "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri; then
+ eerror "cp qconfig failed."
+ return 1
+ fi
+
+ # generate list of QT_CONFIG entries from the existing list
+ # including qconfig_add and excluding qconfig_remove
+ for x in $(sed -n 's/^QT_CONFIG +=//p' \
+ "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri) ${qconfig_add}; do
+ has ${x} ${qconfig_remove} || qconfig_new+=" ${x}"
+ done
+
+ # replace the existing QT_CONFIG list with qconfig_new
+ if ! sed -i -e "s/QT_CONFIG +=.*/QT_CONFIG += ${qconfig_new}/" \
+ "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri; then
+ eerror "Sed for QT_CONFIG failed"
+ return 1
+ fi
+
+ # create Gentoo/qconfig.h
+ if [[ ! -e ${ROOT}${QTHEADERDIR}/Gentoo ]]; then
+ if ! mkdir -p "${ROOT}${QTHEADERDIR}"/Gentoo; then
+ eerror "mkdir ${QTHEADERDIR}/Gentoo failed"
+ return 1
+ fi
+ fi
+ : > "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-qconfig.h
+ for x in "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-*-qconfig.h; do
+ [[ -f ${x} ]] || continue
+ cat "${x}" >> "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-qconfig.h
+ done
+ else
+ rm -f "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri
+ rm -f "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-qconfig.h
+ rmdir "${ROOT}${QTDATADIR}"/mkspecs \
+ "${ROOT}${QTDATADIR}" \
+ "${ROOT}${QTHEADERDIR}"/Gentoo \
+ "${ROOT}${QTHEADERDIR}" 2>/dev/null
+ fi
+ fi
+}
+
+# @FUNCTION: qt4-build_pkg_postrm
+# @DESCRIPTION:
+# Regenerate configuration when the package is completely removed.
+qt4-build_pkg_postrm() {
+ generate_qconfigs
+}
+
+# @FUNCTION: qt4-build_pkg_postinst
+# @DESCRIPTION:
+# Regenerate configuration, plus throw a message about possible
+# breakages and proposed solutions.
+qt4-build_pkg_postinst() {
+ generate_qconfigs
+}
+
+# @FUNCTION: skip_qmake_build
+# @INTERNAL
+# @DESCRIPTION:
+# Patches configure to skip qmake compilation, as it's already installed by qtcore.
+skip_qmake_build() {
+ sed -i -e "s:if true:if false:g" "${S}"/configure || die
+}
+
+# @FUNCTION: skip_project_generation
+# @INTERNAL
+# @DESCRIPTION:
+# Exit the script early by throwing in an exit before all of the .pro files are scanned.
+skip_project_generation() {
+ sed -i -e "s:echo \"Finding:exit 0\n\necho \"Finding:g" "${S}"/configure || die
+}
+
+# @FUNCTION: symlink_binaries_to_buildtree
+# @INTERNAL
+# @DESCRIPTION:
+# Symlinks generated binaries to buildtree, so they can be used during compilation time.
+symlink_binaries_to_buildtree() {
+ for bin in qmake moc uic rcc; do
+ ln -s "${QTBINDIR}"/${bin} "${S}"/bin/ || die "symlinking ${bin} to ${S}/bin failed"
+ done
+}
+
+# @FUNCTION: fix_library_files
+# @INTERNAL
+# @DESCRIPTION:
+# Fixes the paths in *.la, *.prl, *.pc, as they are wrong due to sandbox and
+# moves the *.pc files into the pkgconfig directory.
+fix_library_files() {
+ local libfile
+ for libfile in "${D}"/${QTLIBDIR}/{*.la,*.prl,pkgconfig/*.pc}; do
+ if [[ -e ${libfile} ]]; then
+ sed -i -e "s:${S}/lib:${QTLIBDIR}:g" ${libfile} || die "sed on ${libfile} failed"
+ fi
+ done
+
+ # pkgconfig files refer to WORKDIR/bin as the moc and uic locations
+ for libfile in "${D}"/${QTLIBDIR}/pkgconfig/*.pc; do
+ if [[ -e ${libfile} ]]; then
+ sed -i -e "s:${S}/bin:${QTBINDIR}:g" ${libfile} || die "sed on ${libfile} failed"
+
+ # Move .pc files into the pkgconfig directory
+ dodir ${QTPCDIR#${EPREFIX}}
+ mv ${libfile} "${D}"/${QTPCDIR}/ || die "moving ${libfile} to ${D}/${QTPCDIR}/ failed"
+ fi
+ done
+
+ # Don't install an empty directory
+ rmdir "${D}"/${QTLIBDIR}/pkgconfig
+}
+
+# @FUNCTION: qt_use
+# @USAGE: < flag > [ feature ] [ enableval ]
+# @DESCRIPTION:
+# This will echo "-${enableval}-${feature}" if <flag> is enabled, or
+# "-no-${feature}" if it's disabled. If [feature] is not specified, <flag>
+# will be used for that. If [enableval] is not specified, it omits the
+# "-${enableval}" part.
+qt_use() {
+ use "$1" && echo "${3:+-$3}-${2:-$1}" || echo "-no-${2:-$1}"
+}
+
+# @FUNCTION: qt_mkspecs_dir
+# @RETURN: the specs-directory w/o path
+# @DESCRIPTION:
+# Allows us to define which mkspecs dir we want to use.
+qt_mkspecs_dir() {
+ local spec=
+
+ case "${CHOST}" in
+ *-freebsd*|*-dragonfly*)
+ spec=freebsd ;;
+ *-openbsd*)
+ spec=openbsd ;;
+ *-netbsd*)
+ spec=netbsd ;;
+ *-darwin*)
+ if use aqua; then
+ # mac with carbon/cocoa
+ spec=macx
+ else
+ # darwin/mac with x11
+ spec=darwin
+ fi
+ ;;
+ *-solaris*)
+ spec=solaris ;;
+ *-linux-*|*-linux)
+ spec=linux ;;
+ *)
+ die "${FUNCNAME}(): Unknown CHOST '${CHOST}'" ;;
+ esac
+
+ case "$(tc-getCXX)" in
+ *g++*)
+ spec+=-g++ ;;
+ *icpc*)
+ spec+=-icc ;;
+ *)
+ die "${FUNCNAME}(): Unknown compiler '$(tc-getCXX)'" ;;
+ esac
+
+ # Add -64 for 64bit profiles
+ if use x64-freebsd ||
+ use amd64-linux ||
+ use x64-macos ||
+ use x64-solaris ||
+ use sparc64-solaris
+ then
+ spec+=-64
+ fi
+
+ echo "${spec}"
+}
+
+# @FUNCTION: qt_nolibx11
+# @INTERNAL
+# @DESCRIPTION:
+# Skip X11 tests for packages that don't need X libraries installed.
+qt_nolibx11() {
+ sed -i -e '/^if.*PLATFORM_X11.*CFG_GUI/,/^fi$/d' "${S}"/configure || die
+}
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_configure src_compile src_install src_test pkg_postrm pkg_postinst
diff --git a/eclass/qt4-r2.eclass b/eclass/qt4-r2.eclass
new file mode 100644
index 000000000000..964233bf64c4
--- /dev/null
+++ b/eclass/qt4-r2.eclass
@@ -0,0 +1,138 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: qt4-r2.eclass
+# @MAINTAINER:
+# Qt herd <qt@gentoo.org>
+# @BLURB: Eclass for Qt4-based packages, second edition.
+# @DESCRIPTION:
+# This eclass contains various functions that may be useful when
+# dealing with packages using Qt4 libraries. Requires EAPI=2 or later.
+
+case ${EAPI} in
+ 2|3|4|5) : ;;
+ *) die "qt4-r2.eclass: unsupported EAPI=${EAPI:-0}" ;;
+esac
+
+inherit base eutils qmake-utils
+
+export XDG_CONFIG_HOME="${T}"
+
+# @ECLASS-VARIABLE: DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array containing documents passed to dodoc command.
+# Paths can be absolute or relative to ${S}.
+#
+# Example: DOCS=( ChangeLog README "${WORKDIR}/doc_folder/" )
+
+# @ECLASS-VARIABLE: HTML_DOCS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array containing documents passed to dohtml command.
+# Paths can be absolute or relative to ${S}.
+#
+# Example: HTML_DOCS=( "doc/document.html" "${WORKDIR}/html_folder/" )
+
+# @ECLASS-VARIABLE: LANGS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# In case your Qt4 application provides various translations, use this variable
+# to specify them in order to populate "linguas_*" IUSE automatically. Make sure
+# that you set this variable before inheriting qt4-r2 eclass.
+#
+# Example: LANGS="de el it ja"
+for x in ${LANGS}; do
+ IUSE+=" linguas_${x}"
+done
+
+# @ECLASS-VARIABLE: LANGSLONG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Same as LANGS, but this variable is for LINGUAS that must be in long format.
+# Remember to set this variable before inheriting qt4-r2 eclass.
+# Look at ${PORTDIR}/profiles/desc/linguas.desc for details.
+#
+# Example: LANGSLONG="en_GB ru_RU"
+for x in ${LANGSLONG}; do
+ IUSE+=" linguas_${x%_*}"
+done
+unset x
+
+# @ECLASS-VARIABLE: PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array variable containing all the patches to be applied. This variable
+# is expected to be defined in the global scope of ebuilds. Make sure to
+# specify the full path. This variable is used in src_prepare phase.
+#
+# Example:
+# @CODE
+# PATCHES=(
+# "${FILESDIR}/mypatch.patch"
+# "${FILESDIR}/mypatch2.patch"
+# )
+# @CODE
+
+# @FUNCTION: qt4-r2_src_unpack
+# @DESCRIPTION:
+# Default src_unpack function for packages that depend on qt4. If you have to
+# override src_unpack in your ebuild (probably you don't need to), call
+# qt4-r2_src_unpack in it.
+qt4-r2_src_unpack() {
+ debug-print-function $FUNCNAME "$@"
+
+ base_src_unpack "$@"
+}
+
+# @FUNCTION: qt4-r2_src_prepare
+# @DESCRIPTION:
+# Default src_prepare function for packages that depend on qt4. If you have to
+# override src_prepare in your ebuild, you should call qt4-r2_src_prepare in it,
+# otherwise autopatcher will not work!
+qt4-r2_src_prepare() {
+ debug-print-function $FUNCNAME "$@"
+
+ base_src_prepare "$@"
+}
+
+# @FUNCTION: qt4-r2_src_configure
+# @DESCRIPTION:
+# Default src_configure function for packages that depend on qt4. If you have to
+# override src_configure in your ebuild, call qt4-r2_src_configure in it.
+qt4-r2_src_configure() {
+ debug-print-function $FUNCNAME "$@"
+
+ local project_file=$(qmake-utils_find_pro_file)
+
+ if [[ -n ${project_file} ]]; then
+ eqmake4 "${project_file}"
+ else
+ base_src_configure "$@"
+ fi
+}
+
+# @FUNCTION: qt4-r2_src_compile
+# @DESCRIPTION:
+# Default src_compile function for packages that depend on qt4. If you have to
+# override src_compile in your ebuild (probably you don't need to), call
+# qt4-r2_src_compile in it.
+qt4-r2_src_compile() {
+ debug-print-function $FUNCNAME "$@"
+
+ base_src_compile "$@"
+}
+
+# @FUNCTION: qt4-r2_src_install
+# @DESCRIPTION:
+# Default src_install function for qt4-based packages. Installs compiled code,
+# and documentation (via DOCS and HTML_DOCS variables).
+qt4-r2_src_install() {
+ debug-print-function $FUNCNAME "$@"
+
+ base_src_install INSTALL_ROOT="${D}" "$@"
+ einstalldocs
+}
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install
diff --git a/eclass/qt5-build.eclass b/eclass/qt5-build.eclass
new file mode 100644
index 000000000000..b26d56022409
--- /dev/null
+++ b/eclass/qt5-build.eclass
@@ -0,0 +1,787 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: qt5-build.eclass
+# @MAINTAINER:
+# Qt herd <qt@gentoo.org>
+# @AUTHOR:
+# Davide Pesavento <pesa@gentoo.org>
+# @BLURB: Eclass for Qt5 split ebuilds.
+# @DESCRIPTION:
+# This eclass contains various functions that are used when building Qt5.
+# Requires EAPI 5.
+
+case ${EAPI} in
+ 5) : ;;
+ *) die "qt5-build.eclass: unsupported EAPI=${EAPI:-0}" ;;
+esac
+
+inherit eutils flag-o-matic multilib toolchain-funcs virtualx
+
+QT5_MINOR_VERSION=${PV#*.}
+QT5_MINOR_VERSION=${QT5_MINOR_VERSION%%.*}
+
+HOMEPAGE="https://www.qt.io/"
+LICENSE="|| ( LGPL-2.1 LGPL-3 ) FDL-1.3"
+SLOT="5"
+
+# @ECLASS-VARIABLE: QT5_MODULE
+# @DESCRIPTION:
+# The upstream name of the module this package belongs to. Used for
+# SRC_URI and EGIT_REPO_URI. Must be defined before inheriting the eclass.
+: ${QT5_MODULE:=${PN}}
+
+case ${PV} in
+ 5.9999)
+ # git dev branch
+ QT5_BUILD_TYPE="live"
+ EGIT_BRANCH="dev"
+ ;;
+ 5.?.9999)
+ # git stable branch
+ QT5_BUILD_TYPE="live"
+ EGIT_BRANCH=${PV%.9999}
+ ;;
+ *_alpha*|*_beta*|*_rc*)
+ # development release
+ QT5_BUILD_TYPE="release"
+ MY_P=${QT5_MODULE}-opensource-src-${PV/_/-}
+ SRC_URI="http://download.qt.io/development_releases/qt/${PV%.*}/${PV/_/-}/submodules/${MY_P}.tar.xz"
+ S=${WORKDIR}/${MY_P}
+ ;;
+ *)
+ # official stable release
+ QT5_BUILD_TYPE="release"
+ MY_P=${QT5_MODULE}-opensource-src-${PV}
+ SRC_URI="http://download.qt.io/official_releases/qt/${PV%.*}/${PV}/submodules/${MY_P}.tar.xz"
+ S=${WORKDIR}/${MY_P}
+ ;;
+esac
+
+EGIT_REPO_URI=(
+ "git://code.qt.io/qt/${QT5_MODULE}.git"
+ "https://code.qt.io/git/qt/${QT5_MODULE}.git"
+ "https://github.com/qtproject/${QT5_MODULE}.git"
+)
+[[ ${QT5_BUILD_TYPE} == live ]] && inherit git-r3
+
+IUSE="debug test"
+
+[[ ${PN} == qtwebkit ]] && RESTRICT+=" mirror" # bug 524584
+[[ ${QT5_BUILD_TYPE} == release ]] && RESTRICT+=" test" # bug 457182
+
+DEPEND="
+ dev-lang/perl
+ virtual/pkgconfig
+"
+if [[ ${PN} != qttest ]]; then
+ if [[ ${QT5_MODULE} == qtbase ]]; then
+ DEPEND+=" test? ( ~dev-qt/qttest-${PV} )"
+ else
+ DEPEND+=" test? ( >=dev-qt/qttest-${PV}:5 )"
+ fi
+fi
+RDEPEND="
+ dev-qt/qtchooser
+"
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install src_test pkg_postinst pkg_postrm
+
+
+# @ECLASS-VARIABLE: PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array variable containing all the patches to be applied. This variable
+# is expected to be defined in the global scope of ebuilds. Make sure to
+# specify the full path. This variable is used in src_prepare phase.
+#
+# Example:
+# @CODE
+# PATCHES=(
+# "${FILESDIR}/mypatch.patch"
+# "${FILESDIR}/mypatch2.patch"
+# )
+# @CODE
+
+# @ECLASS-VARIABLE: QT5_TARGET_SUBDIRS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array variable containing the source directories that should be built.
+# All paths must be relative to ${S}.
+
+# @ECLASS-VARIABLE: QT5_BUILD_DIR
+# @DESCRIPTION:
+# Build directory for out-of-source builds.
+case ${QT5_BUILD_TYPE} in
+ live) : ${QT5_BUILD_DIR:=${S}_build} ;;
+ release) : ${QT5_BUILD_DIR:=${S}} ;; # workaround for bug 497312
+esac
+
+# @ECLASS-VARIABLE: QT5_GENTOO_CONFIG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Array of <useflag:feature:macro> triplets that are evaluated in src_install
+# to generate the per-package list of enabled QT_CONFIG features and macro
+# definitions, which are then merged together with all other Qt5 packages
+# installed on the system to obtain the global qconfig.{h,pri} files.
+
+# @ECLASS-VARIABLE: VIRTUALX_REQUIRED
+# @DESCRIPTION:
+# For proper description see virtualx.eclass man page.
+# Here we redefine default value to be manual, if your package needs virtualx
+# for tests you should proceed with setting VIRTUALX_REQUIRED=test.
+: ${VIRTUALX_REQUIRED:=manual}
+
+
+###### Phase functions ######
+
+# @FUNCTION: qt5-build_src_unpack
+# @DESCRIPTION:
+# Unpacks the sources.
+qt5-build_src_unpack() {
+ if [[ $(gcc-major-version) -lt 4 ]] || [[ $(gcc-major-version) -eq 4 && $(gcc-minor-version) -lt 5 ]]; then
+ ewarn
+ ewarn "Using a GCC version lower than 4.5 is not supported."
+ ewarn
+ fi
+
+ if [[ ${PN} == qtwebkit ]]; then
+ eshopts_push -s extglob
+ if is-flagq '-g?(gdb)?([1-9])'; then
+ ewarn
+ ewarn "You have enabled debug info (probably have -g or -ggdb in your CFLAGS/CXXFLAGS)."
+ ewarn "You may experience really long compilation times and/or increased memory usage."
+ ewarn "If compilation fails, please try removing -g/-ggdb before reporting a bug."
+ ewarn "For more info check out https://bugs.gentoo.org/307861"
+ ewarn
+ fi
+ eshopts_pop
+ fi
+
+ case ${QT5_BUILD_TYPE} in
+ live) git-r3_src_unpack ;;
+ release) default ;;
+ esac
+}
+
+# @FUNCTION: qt5-build_src_prepare
+# @DESCRIPTION:
+# Prepares the environment and patches the sources if necessary.
+qt5-build_src_prepare() {
+ qt5_prepare_env
+
+ if [[ ${QT5_MODULE} == qtbase ]]; then
+ qt5_symlink_tools_to_build_dir
+
+ # Avoid unnecessary qmake recompilations
+ sed -i -re "s|^if true;.*(\[ '\!').*(\"\\\$outpath/bin/qmake\".*)|if \1 -e \2 then|" \
+ configure || die "sed failed (skip qmake bootstrap)"
+
+ # Respect CC, CXX, *FLAGS, MAKEOPTS and EXTRA_EMAKE when bootstrapping qmake
+ sed -i -e "/outpath\/qmake\".*\"\$MAKE\")/ s:): \
+ ${MAKEOPTS} ${EXTRA_EMAKE} 'CC=$(tc-getCC)' 'CXX=$(tc-getCXX)' \
+ 'QMAKE_CFLAGS=${CFLAGS}' 'QMAKE_CXXFLAGS=${CXXFLAGS}' 'QMAKE_LFLAGS=${LDFLAGS}'&:" \
+ -e '/"$CFG_RELEASE_QMAKE"/,/^\s\+fi$/ d' \
+ configure || die "sed failed (respect env for qmake build)"
+ sed -i -e '/^CPPFLAGS\s*=/ s/-g //' \
+ qmake/Makefile.unix || die "sed failed (CPPFLAGS for qmake build)"
+
+ # Respect CXX in {bsymbolic_functions,fvisibility,precomp}.test
+ sed -i -e "/^QMAKE_CONF_COMPILER=/ s:=.*:=\"$(tc-getCXX)\":" \
+ configure || die "sed failed (QMAKE_CONF_COMPILER)"
+
+ # Respect toolchain and flags in config.tests
+ find config.tests/unix -name '*.test' -type f \
+ -execdir sed -i -e '/bin\/qmake/ s/-nocache //' '{}' + \
+ || die "sed failed (config.tests)"
+
+ # Don't add -O3 to CXXFLAGS (bug 549140)
+ sed -i -e '/CONFIG\s*+=/ s/optimize_full//' \
+ src/{corelib/corelib,gui/gui}.pro || die "sed failed (optimize_full)"
+
+ # Don't force sse2 on x86 (bug 552942)
+ sed -i -e 's/^sse2:/false:&/' \
+ mkspecs/features/qt_module.prf || die "sed failed (sse2)"
+ fi
+
+ # apply patches
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ epatch_user
+}
+
+# @FUNCTION: qt5-build_src_configure
+# @DESCRIPTION:
+# Runs qmake in the target directories. For packages
+# in qtbase, ./configure is also run before qmake.
+qt5-build_src_configure() {
+ if [[ ${QT5_MODULE} == qtbase ]]; then
+ qt5_base_configure
+ fi
+
+ qt5_foreach_target_subdir qt5_qmake
+}
+
+# @FUNCTION: qt5-build_src_compile
+# @DESCRIPTION:
+# Runs emake in the target directories.
+qt5-build_src_compile() {
+ qt5_foreach_target_subdir emake
+}
+
+# @FUNCTION: qt5-build_src_test
+# @DESCRIPTION:
+# Runs tests in the target directories.
+qt5-build_src_test() {
+ # disable broken cmake tests (bug 474004)
+ local myqmakeargs=("${myqmakeargs[@]}" -after SUBDIRS-=cmake SUBDIRS-=installed_cmake)
+
+ qt5_foreach_target_subdir qt5_qmake
+ qt5_foreach_target_subdir emake
+
+ # create a custom testrunner script that correctly sets
+ # {,DY}LD_LIBRARY_PATH before executing the given test
+ local testrunner=${QT5_BUILD_DIR}/gentoo-testrunner
+ cat <<-EOF > "${testrunner}"
+ #!/bin/sh
+ export LD_LIBRARY_PATH="${QT5_BUILD_DIR}/lib:${QT5_LIBDIR}"
+ export DYLD_LIBRARY_PATH="${QT5_BUILD_DIR}/lib:${QT5_LIBDIR}"
+ "\$@"
+ EOF
+ chmod +x "${testrunner}"
+
+ _qt5_test_runner() {
+ qt5_foreach_target_subdir emake TESTRUNNER="'${testrunner}'" check
+ }
+
+ if [[ ${VIRTUALX_REQUIRED} == test ]]; then
+ VIRTUALX_COMMAND="_qt5_test_runner" virtualmake
+ else
+ _qt5_test_runner
+ fi
+}
+
+# @FUNCTION: qt5-build_src_install
+# @DESCRIPTION:
+# Runs emake install in the target directories.
+qt5-build_src_install() {
+ qt5_foreach_target_subdir emake INSTALL_ROOT="${D}" install
+
+ if [[ ${PN} == qtcore ]]; then
+ pushd "${QT5_BUILD_DIR}" >/dev/null || die
+
+ set -- emake INSTALL_ROOT="${D}" install_{global_docs,mkspecs,qmake,syncqt}
+ einfo "Running $*"
+ "$@"
+
+ popd >/dev/null || die
+
+ docompress -x "${QT5_DOCDIR#${EPREFIX}}"/global
+
+ # install an empty Gentoo/gentoo-qconfig.h in ${D}
+ # so that it's placed under package manager control
+ > "${T}"/gentoo-qconfig.h
+ (
+ insinto "${QT5_HEADERDIR#${EPREFIX}}"/Gentoo
+ doins "${T}"/gentoo-qconfig.h
+ )
+
+ # include gentoo-qconfig.h at the beginning of QtCore/qconfig.h
+ sed -i -e '1i #include <Gentoo/gentoo-qconfig.h>\n' \
+ "${D}${QT5_HEADERDIR}"/QtCore/qconfig.h \
+ || die "sed failed (qconfig.h)"
+
+ # install qtchooser configuration file
+ cat > "${T}/qt5-${CHOST}.conf" <<-_EOF_
+ ${QT5_BINDIR}
+ ${QT5_LIBDIR}
+ _EOF_
+
+ (
+ insinto /etc/xdg/qtchooser
+ doins "${T}/qt5-${CHOST}.conf"
+ )
+
+ # convenience symlinks
+ dosym qt5-"${CHOST}".conf /etc/xdg/qtchooser/5.conf
+ dosym qt5-"${CHOST}".conf /etc/xdg/qtchooser/qt5.conf
+ fi
+
+ qt5_install_module_qconfigs
+ prune_libtool_files
+}
+
+# @FUNCTION: qt5-build_pkg_postinst
+# @DESCRIPTION:
+# Regenerate configuration after installation or upgrade/downgrade.
+qt5-build_pkg_postinst() {
+ qt5_regenerate_global_qconfigs
+}
+
+# @FUNCTION: qt5-build_pkg_postrm
+# @DESCRIPTION:
+# Regenerate configuration when a module is completely removed.
+qt5-build_pkg_postrm() {
+ if [[ -z ${REPLACED_BY_VERSION} && ${PN} != qtcore ]]; then
+ qt5_regenerate_global_qconfigs
+ fi
+}
+
+
+###### Public helpers ######
+
+# @FUNCTION: qt_use
+# @USAGE: <flag> [feature] [enableval]
+# @DESCRIPTION:
+# <flag> is the name of a flag in IUSE.
+#
+# Outputs "-${enableval}-${feature}" if <flag> is enabled, "-no-${feature}"
+# otherwise. If [feature] is not specified, <flag> is used in its place.
+# If [enableval] is not specified, the "-${enableval}" prefix is omitted.
+qt_use() {
+ [[ $# -ge 1 ]] || die "${FUNCNAME}() requires at least one argument"
+
+ usex "$1" "${3:+-$3}-${2:-$1}" "-no-${2:-$1}"
+}
+
+# @FUNCTION: qt_use_compile_test
+# @USAGE: <flag> [config]
+# @DESCRIPTION:
+# <flag> is the name of a flag in IUSE.
+# [config] is the argument of qtCompileTest, defaults to <flag>.
+#
+# This function is useful to disable optional dependencies that are checked
+# at qmake-time using the qtCompileTest() function. If <flag> is disabled,
+# the compile test is skipped and the dependency is assumed to be unavailable,
+# i.e. the corresponding feature will be disabled. Note that all invocations
+# of this function must happen before calling qt5-build_src_configure.
+qt_use_compile_test() {
+ [[ $# -ge 1 ]] || die "${FUNCNAME}() requires at least one argument"
+
+ if ! use "$1"; then
+ mkdir -p "${QT5_BUILD_DIR}" || die
+ echo "CONFIG += done_config_${2:-$1}" >> "${QT5_BUILD_DIR}"/.qmake.cache || die
+ fi
+}
+
+# @FUNCTION: qt_use_disable_mod
+# @USAGE: <flag> <module> <files...>
+# @DESCRIPTION:
+# <flag> is the name of a flag in IUSE.
+# <module> is the (lowercase) name of a Qt5 module.
+# <files...> is a list of one or more qmake project files.
+#
+# This function patches <files> to treat <module> as not installed
+# when <flag> is disabled, otherwise it does nothing.
+# This can be useful to avoid an automagic dependency when the module
+# is present on the system but the corresponding USE flag is disabled.
+qt_use_disable_mod() {
+ [[ $# -ge 3 ]] || die "${FUNCNAME}() requires at least three arguments"
+
+ local flag=$1
+ local module=$2
+ shift 2
+
+ if ! use "${flag}"; then
+ echo "$@" | xargs sed -i -e "s/qtHaveModule(${module})/false/g" || die
+ fi
+}
+
+
+###### Internal functions ######
+
+# @FUNCTION: qt5_prepare_env
+# @INTERNAL
+# @DESCRIPTION:
+# Prepares the environment for building Qt.
+qt5_prepare_env() {
+ # setup installation directories
+ # note: keep paths in sync with qmake-utils.eclass
+ QT5_PREFIX=${EPREFIX}/usr
+ QT5_HEADERDIR=${QT5_PREFIX}/include/qt5
+ QT5_LIBDIR=${QT5_PREFIX}/$(get_libdir)
+ QT5_ARCHDATADIR=${QT5_PREFIX}/$(get_libdir)/qt5
+ QT5_BINDIR=${QT5_ARCHDATADIR}/bin
+ QT5_PLUGINDIR=${QT5_ARCHDATADIR}/plugins
+ QT5_LIBEXECDIR=${QT5_ARCHDATADIR}/libexec
+ QT5_IMPORTDIR=${QT5_ARCHDATADIR}/imports
+ QT5_QMLDIR=${QT5_ARCHDATADIR}/qml
+ QT5_DATADIR=${QT5_PREFIX}/share/qt5
+ QT5_DOCDIR=${QT5_PREFIX}/share/doc/qt-${PV}
+ QT5_TRANSLATIONDIR=${QT5_DATADIR}/translations
+ QT5_EXAMPLESDIR=${QT5_DATADIR}/examples
+ QT5_TESTSDIR=${QT5_DATADIR}/tests
+ QT5_SYSCONFDIR=${EPREFIX}/etc/xdg
+
+ if [[ ${QT5_MODULE} == qtbase ]]; then
+ # see mkspecs/features/qt_config.prf
+ export QMAKEMODULES="${QT5_BUILD_DIR}/mkspecs/modules:${S}/mkspecs/modules:${QT5_ARCHDATADIR}/mkspecs/modules"
+ fi
+}
+
+# @FUNCTION: qt5_foreach_target_subdir
+# @INTERNAL
+# @DESCRIPTION:
+# Executes the arguments inside each directory listed in QT5_TARGET_SUBDIRS.
+qt5_foreach_target_subdir() {
+ [[ -z ${QT5_TARGET_SUBDIRS[@]} ]] && QT5_TARGET_SUBDIRS=("")
+
+ local ret=0 subdir=
+ for subdir in "${QT5_TARGET_SUBDIRS[@]}"; do
+ if [[ ${EBUILD_PHASE} == test ]]; then
+ subdir=tests/auto${subdir#src}
+ [[ -d ${S}/${subdir} ]] || continue
+ fi
+
+ mkdir -p "${QT5_BUILD_DIR}/${subdir}" || die
+ pushd "${QT5_BUILD_DIR}/${subdir}" >/dev/null || die
+
+ einfo "Running $* ${subdir:+in ${subdir}}"
+ "$@"
+ ((ret+=$?))
+
+ popd >/dev/null || die
+ done
+
+ return ${ret}
+}
+
+# @FUNCTION: qt5_symlink_tools_to_build_dir
+# @INTERNAL
+# @DESCRIPTION:
+# Symlinks qmake and a few other tools to QT5_BUILD_DIR,
+# so that they can be used when building other modules.
+qt5_symlink_tools_to_build_dir() {
+ local tool= tools=()
+ if [[ ${PN} != qtcore ]]; then
+ tools+=(qmake moc rcc qlalr)
+ [[ ${PN} != qdoc ]] && tools+=(qdoc)
+ [[ ${PN} != qtdbus ]] && tools+=(qdbuscpp2xml qdbusxml2cpp)
+ [[ ${PN} != qtwidgets ]] && tools+=(uic)
+ fi
+
+ mkdir -p "${QT5_BUILD_DIR}"/bin || die
+ pushd "${QT5_BUILD_DIR}"/bin >/dev/null || die
+
+ for tool in "${tools[@]}"; do
+ [[ -e ${QT5_BINDIR}/${tool} ]] || continue
+ ln -s "${QT5_BINDIR}/${tool}" . || die "failed to symlink ${tool}"
+ done
+
+ popd >/dev/null || die
+}
+
+# @FUNCTION: qt5_base_configure
+# @INTERNAL
+# @DESCRIPTION:
+# Runs ./configure for modules belonging to qtbase.
+qt5_base_configure() {
+ # setup toolchain variables used by configure
+ tc-export AR CC CXX OBJDUMP RANLIB STRIP
+ export LD="$(tc-getCXX)"
+
+ # configure arguments
+ local conf=(
+ # installation paths
+ -prefix "${QT5_PREFIX}"
+ -bindir "${QT5_BINDIR}"
+ -headerdir "${QT5_HEADERDIR}"
+ -libdir "${QT5_LIBDIR}"
+ -archdatadir "${QT5_ARCHDATADIR}"
+ -plugindir "${QT5_PLUGINDIR}"
+ -libexecdir "${QT5_LIBEXECDIR}"
+ -importdir "${QT5_IMPORTDIR}"
+ -qmldir "${QT5_QMLDIR}"
+ -datadir "${QT5_DATADIR}"
+ -docdir "${QT5_DOCDIR}"
+ -translationdir "${QT5_TRANSLATIONDIR}"
+ -sysconfdir "${QT5_SYSCONFDIR}"
+ -examplesdir "${QT5_EXAMPLESDIR}"
+ -testsdir "${QT5_TESTSDIR}"
+
+ # configure in release mode by default,
+ # override via the CONFIG qmake variable
+ -release
+ -no-separate-debug-info
+
+ # licensing stuff
+ -opensource -confirm-license
+
+ # let configure automatically figure out if C++11 is supported
+ #-c++11
+
+ # build shared libraries
+ -shared
+
+ # always enable large file support
+ -largefile
+
+ # disabling accessibility is not recommended by upstream, as
+ # it will break QStyle and may break other internal parts of Qt
+ -accessibility
+
+ # disable all SQL drivers by default, override in qtsql
+ -no-sql-db2 -no-sql-ibase -no-sql-mysql -no-sql-oci -no-sql-odbc
+ -no-sql-psql -no-sql-sqlite -no-sql-sqlite2 -no-sql-tds
+
+ # obsolete flag, does nothing
+ #-qml-debug
+
+ # instruction set support
+ $(is-flagq -mno-sse2 && echo -no-sse2)
+ $(is-flagq -mno-sse3 && echo -no-sse3)
+ $(is-flagq -mno-ssse3 && echo -no-ssse3)
+ $(is-flagq -mno-sse4.1 && echo -no-sse4.1)
+ $(is-flagq -mno-sse4.2 && echo -no-sse4.2)
+ $(is-flagq -mno-avx && echo -no-avx)
+ $(is-flagq -mno-avx2 && echo -no-avx2)
+ $(is-flagq -mno-dsp && echo -no-mips_dsp)
+ $(is-flagq -mno-dspr2 && echo -no-mips_dspr2)
+
+ # use pkg-config to detect include and library paths
+ -pkg-config
+
+ # prefer system libraries (only common deps here)
+ -system-zlib
+ -system-pcre
+
+ # disable everything to prevent automagic deps (part 1)
+ -no-mtdev
+ -no-journald
+ -no-libpng -no-libjpeg
+ -no-freetype -no-harfbuzz
+ -no-openssl
+ $([[ ${QT5_MINOR_VERSION} -ge 5 ]] && echo -no-libproxy)
+ $([[ ${QT5_MINOR_VERSION} -ge 5 ]] && echo -no-xkbcommon-{x11,evdev})
+ -no-xinput2 -no-xcb-xlib
+
+ # don't specify -no-gif because there is no way to override it later
+ #-no-gif
+
+ # always enable glib event loop support
+ -glib
+
+ # disable everything to prevent automagic deps (part 2)
+ -no-pulseaudio -no-alsa
+
+ # override in qtgui and qtwidgets where x11-libs/cairo[qt4] is blocked
+ # to avoid adding qt4 include paths (bug 433826)
+ -no-gtkstyle
+
+ # exclude examples and tests from default build
+ -nomake examples
+ -nomake tests
+ -no-compile-examples
+
+ # disable rpath on non-prefix (bugs 380415 and 417169)
+ $(usex prefix '' -no-rpath)
+
+ # print verbose information about each configure test
+ -verbose
+
+ # doesn't actually matter since we override CXXFLAGS
+ #-no-optimized-qmake
+
+ # obsolete flag, does nothing
+ #-nis
+
+ # always enable iconv support
+ -iconv
+
+ # disable everything to prevent automagic deps (part 3)
+ -no-cups -no-evdev
+ $([[ ${QT5_MINOR_VERSION} -ge 5 ]] && echo -no-tslib)
+ -no-icu -no-fontconfig
+ -no-dbus
+
+ # don't strip
+ -no-strip
+
+ # precompiled headers are not that useful for us
+ # and cause problems on hardened, so turn them off
+ -no-pch
+
+ # reduced relocations cause major breakage on at least arm and ppc, so
+ # don't specify anything and let the configure figure out if they are
+ # supported; see also https://bugreports.qt.io/browse/QTBUG-36129
+ #-reduce-relocations
+
+ # let configure automatically detect if GNU gold is available
+ #-use-gold-linker
+
+ # disable all platform plugins by default, override in qtgui
+ -no-xcb -no-eglfs -no-directfb -no-linuxfb -no-kms
+
+ # disable undocumented X11-related flags, override in qtgui
+ # (not shown in ./configure -help output)
+ -no-xkb -no-xrender
+
+ # disable obsolete/unused X11-related flags
+ # (not shown in ./configure -help output)
+ -no-mitshm -no-xcursor -no-xfixes -no-xinerama -no-xinput
+ -no-xrandr -no-xshape -no-xsync -no-xvideo
+
+ # always enable session management support: it doesn't need extra deps
+ # at configure time and turning it off is dangerous, see bug 518262
+ -sm
+
+ # typedef qreal to double (warning: changing this flag breaks the ABI)
+ -qreal double
+
+ # disable OpenGL and EGL support by default, override in qtgui,
+ # qtopengl, qtprintsupport and qtwidgets
+ -no-opengl -no-egl
+
+ # disable libinput-based generic plugin by default, override in qtgui
+ $([[ ${QT5_MINOR_VERSION} -ge 5 ]] && echo -no-libinput)
+
+ # disable gstreamer by default, override in qtmultimedia
+ $([[ ${QT5_MINOR_VERSION} -ge 5 ]] && echo -no-gstreamer)
+
+ # use upstream default
+ #-no-system-proxies
+
+ # do not build with -Werror
+ -no-warnings-are-errors
+
+ # module-specific options
+ "${myconf[@]}"
+ )
+
+ pushd "${QT5_BUILD_DIR}" >/dev/null || die
+
+ einfo "Configuring with: ${conf[@]}"
+ "${S}"/configure "${conf[@]}" || die "configure failed"
+
+ popd >/dev/null || die
+}
+
+# @FUNCTION: qt5_qmake
+# @INTERNAL
+# @DESCRIPTION:
+# Helper function that runs qmake in the current target subdir.
+# Intended to be called by qt5_foreach_target_subdir().
+qt5_qmake() {
+ local projectdir=${PWD/#${QT5_BUILD_DIR}/${S}}
+ local qmakepath=
+ if [[ ${QT5_MODULE} == qtbase ]]; then
+ qmakepath=${QT5_BUILD_DIR}/bin
+ else
+ qmakepath=${QT5_BINDIR}
+ fi
+
+ "${qmakepath}"/qmake \
+ "${projectdir}" \
+ CONFIG+=$(usex debug debug release) \
+ CONFIG-=$(usex debug release debug) \
+ QMAKE_AR="$(tc-getAR) cqs" \
+ QMAKE_CC="$(tc-getCC)" \
+ QMAKE_LINK_C="$(tc-getCC)" \
+ QMAKE_LINK_C_SHLIB="$(tc-getCC)" \
+ QMAKE_CXX="$(tc-getCXX)" \
+ QMAKE_LINK="$(tc-getCXX)" \
+ QMAKE_LINK_SHLIB="$(tc-getCXX)" \
+ QMAKE_OBJCOPY="$(tc-getOBJCOPY)" \
+ QMAKE_RANLIB= \
+ QMAKE_STRIP="$(tc-getSTRIP)" \
+ QMAKE_CFLAGS="${CFLAGS}" \
+ QMAKE_CFLAGS_RELEASE= \
+ QMAKE_CFLAGS_DEBUG= \
+ QMAKE_CXXFLAGS="${CXXFLAGS}" \
+ QMAKE_CXXFLAGS_RELEASE= \
+ QMAKE_CXXFLAGS_DEBUG= \
+ QMAKE_LFLAGS="${LDFLAGS}" \
+ QMAKE_LFLAGS_RELEASE= \
+ QMAKE_LFLAGS_DEBUG= \
+ "${myqmakeargs[@]}" \
+ || die "qmake failed (${projectdir#${S}/})"
+}
+
+# @FUNCTION: qt5_install_module_qconfigs
+# @INTERNAL
+# @DESCRIPTION:
+# Creates and installs gentoo-specific ${PN}-qconfig.{h,pri} files.
+qt5_install_module_qconfigs() {
+ local x qconfig_add= qconfig_remove=
+
+ > "${T}"/${PN}-qconfig.h
+ > "${T}"/${PN}-qconfig.pri
+
+ # generate qconfig_{add,remove} and ${PN}-qconfig.h
+ for x in "${QT5_GENTOO_CONFIG[@]}"; do
+ local flag=${x%%:*}
+ x=${x#${flag}:}
+ local feature=${x%%:*}
+ x=${x#${feature}:}
+ local macro=${x}
+ macro=$(tr 'a-z-' 'A-Z_' <<< "${macro}")
+
+ if [[ -z ${flag} ]] || { [[ ${flag} != '!' ]] && use ${flag}; }; then
+ [[ -n ${feature} ]] && qconfig_add+=" ${feature}"
+ [[ -n ${macro} ]] && echo "#define QT_${macro}" >> "${T}"/${PN}-qconfig.h
+ else
+ [[ -n ${feature} ]] && qconfig_remove+=" ${feature}"
+ [[ -n ${macro} ]] && echo "#define QT_NO_${macro}" >> "${T}"/${PN}-qconfig.h
+ fi
+ done
+
+ # install ${PN}-qconfig.h
+ [[ -s ${T}/${PN}-qconfig.h ]] && (
+ insinto "${QT5_HEADERDIR#${EPREFIX}}"/Gentoo
+ doins "${T}"/${PN}-qconfig.h
+ )
+
+ # generate and install ${PN}-qconfig.pri
+ [[ -n ${qconfig_add} ]] && echo "QCONFIG_ADD=${qconfig_add}" >> "${T}"/${PN}-qconfig.pri
+ [[ -n ${qconfig_remove} ]] && echo "QCONFIG_REMOVE=${qconfig_remove}" >> "${T}"/${PN}-qconfig.pri
+ [[ -s ${T}/${PN}-qconfig.pri ]] && (
+ insinto "${QT5_ARCHDATADIR#${EPREFIX}}"/mkspecs/gentoo
+ doins "${T}"/${PN}-qconfig.pri
+ )
+}
+
+# @FUNCTION: qt5_regenerate_global_qconfigs
+# @INTERNAL
+# @DESCRIPTION:
+# Generates Gentoo-specific qconfig.{h,pri} according to the build configuration.
+# Don't call die here because dying in pkg_post{inst,rm} only makes things worse.
+qt5_regenerate_global_qconfigs() {
+ einfo "Regenerating gentoo-qconfig.h"
+
+ find "${ROOT%/}${QT5_HEADERDIR}"/Gentoo \
+ -name '*-qconfig.h' -a \! -name 'gentoo-qconfig.h' -type f \
+ -execdir cat '{}' + | sort -u > "${T}"/gentoo-qconfig.h
+
+ [[ -s ${T}/gentoo-qconfig.h ]] || ewarn "Generated gentoo-qconfig.h is empty"
+ mv -f "${T}"/gentoo-qconfig.h "${ROOT%/}${QT5_HEADERDIR}"/Gentoo/gentoo-qconfig.h \
+ || eerror "Failed to install new gentoo-qconfig.h"
+
+ einfo "Updating QT_CONFIG in qconfig.pri"
+
+ local qconfig_pri=${ROOT%/}${QT5_ARCHDATADIR}/mkspecs/qconfig.pri
+ if [[ -f ${qconfig_pri} ]]; then
+ local x qconfig_add= qconfig_remove=
+ local qt_config=$(sed -n 's/^QT_CONFIG\s*+=\s*//p' "${qconfig_pri}")
+ local new_qt_config=
+
+ # generate list of QT_CONFIG entries from the existing list,
+ # appending QCONFIG_ADD and excluding QCONFIG_REMOVE
+ eshopts_push -s nullglob
+ for x in "${ROOT%/}${QT5_ARCHDATADIR}"/mkspecs/gentoo/*-qconfig.pri; do
+ qconfig_add+=" $(sed -n 's/^QCONFIG_ADD=\s*//p' "${x}")"
+ qconfig_remove+=" $(sed -n 's/^QCONFIG_REMOVE=\s*//p' "${x}")"
+ done
+ eshopts_pop
+ for x in ${qt_config} ${qconfig_add}; do
+ if ! has "${x}" ${new_qt_config} ${qconfig_remove}; then
+ new_qt_config+=" ${x}"
+ fi
+ done
+
+ # now replace the existing QT_CONFIG with the generated list
+ sed -i -e "s/^QT_CONFIG\s*+=.*/QT_CONFIG +=${new_qt_config}/" \
+ "${qconfig_pri}" || eerror "Failed to sed QT_CONFIG in ${qconfig_pri}"
+ else
+ ewarn "${qconfig_pri} does not exist or is not a regular file"
+ fi
+}
diff --git a/eclass/readme.gentoo.eclass b/eclass/readme.gentoo.eclass
new file mode 100644
index 000000000000..4947bb55c2a4
--- /dev/null
+++ b/eclass/readme.gentoo.eclass
@@ -0,0 +1,130 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: readme.gentoo
+# @MAINTAINER:
+# Pacho Ramos <pacho@gentoo.org>
+# @AUTHOR:
+# Author: Pacho Ramos <pacho@gentoo.org>
+# @BLURB: An eclass for installing a README.gentoo doc file recording tips
+# shown via elog messages.
+# @DESCRIPTION:
+# An eclass for installing a README.gentoo doc file recording tips
+# shown via elog messages. With this eclass, those elog messages will only be
+# shown at first package installation and a file for later reviewing will be
+# installed under /usr/share/doc/${PF}
+
+if [[ -z ${_README_GENTOO_ECLASS} ]]; then
+_README_GENTOO_ECLASS=1
+
+inherit eutils
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4|5)
+ # EAPI>=4 is required for REPLACING_VERSIONS preventing us
+ # from needing to export another pkg_preinst phase to save has_version
+ # result. Also relies on EAPI >=4 default src_install phase.
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+EXPORT_FUNCTIONS src_install pkg_postinst
+
+# @ECLASS-VARIABLE: DISABLE_AUTOFORMATTING
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty, DOC_CONTENTS information will be strictly respected,
+# not getting it automatically formatted by fmt. If empty, it will
+# rely on fmt for formatting and 'echo -e' options to tweak lines a bit.
+
+# @ECLASS-VARIABLE: FORCE_PRINT_ELOG
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If non-empty this variable forces elog messages to be printed.
+
+# @ECLASS-VARIABLE: README_GENTOO_SUFFIX
+# @DESCRIPTION:
+# If you want to specify a suffix for README.gentoo file please export it.
+: ${README_GENTOO_SUFFIX:=""}
+
+# @FUNCTION: readme.gentoo_create_doc
+# @DESCRIPTION:
+# Create doc file with ${DOC_CONTENTS} variable (preferred) and, if not set,
+# look for "${FILESDIR}/README.gentoo" contents. You can use
+# ${FILESDIR}/README.gentoo-${SLOT} also.
+# Usually called at src_install phase.
+readme.gentoo_create_doc() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ -n "${DOC_CONTENTS}" ]]; then
+ eshopts_push
+ set -f
+ if [[ -n "${DISABLE_AUTOFORMATTING}" ]]; then
+ echo "${DOC_CONTENTS}" > "${T}"/README.gentoo
+ else
+ echo -e ${DOC_CONTENTS} | fold -s -w 70 \
+ | sed 's/[[:space:]]*$//' > "${T}"/README.gentoo
+ fi
+ eshopts_pop
+ elif [[ -f "${FILESDIR}/README.gentoo-${SLOT%/*}" ]]; then
+ cp "${FILESDIR}/README.gentoo-${SLOT%/*}" "${T}"/README.gentoo || die
+ elif [[ -f "${FILESDIR}/README.gentoo${README_GENTOO_SUFFIX}" ]]; then
+ cp "${FILESDIR}/README.gentoo${README_GENTOO_SUFFIX}" "${T}"/README.gentoo || die
+ else
+ die "You are not specifying README.gentoo contents!"
+ fi
+
+ dodoc "${T}"/README.gentoo
+ README_GENTOO_DOC_VALUE=$(< "${T}/README.gentoo")
+}
+
+# @FUNCTION: readme.gentoo_print_elog
+# @DESCRIPTION:
+# Print elog messages with "${T}"/README.gentoo contents. They will be
+# shown only when package is installed at first time.
+# Usually called at pkg_postinst phase.
+#
+# If you want to show them always, please set FORCE_PRINT_ELOG to a non empty
+# value in your ebuild before this function is called.
+# This can be useful when, for example, DOC_CONTENTS is modified, then, you can
+# rely on specific REPLACING_VERSIONS handling in your ebuild to print messages
+# when people update from versions still providing old message.
+readme.gentoo_print_elog() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ -z "${README_GENTOO_DOC_VALUE}" ]]; then
+ die "readme.gentoo_print_elog invoked without matching readme.gentoo_create_doc call!"
+ elif ! [[ -n "${REPLACING_VERSIONS}" ]] || [[ -n "${FORCE_PRINT_ELOG}" ]]; then
+ echo -e "${README_GENTOO_DOC_VALUE}" | while read -r ELINE; do elog "${ELINE}"; done
+ elog ""
+ elog "(Note: Above message is only printed the first time package is"
+ elog "installed. Please look at ${EPREFIX}/usr/share/doc/${PF}/README.gentoo*"
+ elog "for future reference)"
+ fi
+}
+
+
+# @FUNCTION: readme.gentoo_src_install
+# @DESCRIPTION:
+# Install generated doc file automatically.
+readme.gentoo_src_install() {
+ debug-print-function ${FUNCNAME} "${@}"
+ default
+ readme.gentoo_create_doc
+}
+
+# @FUNCTION: readme.gentoo_pkg_postinst
+# @DESCRIPTION:
+# Show elog messages from from just generated doc file.
+readme.gentoo_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "${@}"
+ readme.gentoo_print_elog
+}
+
+fi
diff --git a/eclass/rpm.eclass b/eclass/rpm.eclass
new file mode 100644
index 000000000000..b646206b50d2
--- /dev/null
+++ b/eclass/rpm.eclass
@@ -0,0 +1,127 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: rpm.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: convenience class for extracting RPMs
+
+inherit eutils
+
+DEPEND=">=app-arch/rpm2targz-9.0.0.3g"
+
+# @FUNCTION: rpm_unpack
+# @USAGE: <rpms>
+# @DESCRIPTION:
+# Unpack the contents of the specified rpms like the unpack() function.
+rpm_unpack() {
+ [[ $# -eq 0 ]] && set -- ${A}
+ local a
+ for a in "$@" ; do
+ echo ">>> Unpacking ${a} to ${PWD}"
+ if [[ ${a} == ./* ]] ; then
+ : nothing to do -- path is local
+ elif [[ ${a} == ${DISTDIR}/* ]] ; then
+ ewarn 'QA: do not use ${DISTDIR} with rpm_unpack -- it is added for you'
+ elif [[ ${a} == /* ]] ; then
+ ewarn 'QA: do not use full paths with rpm_unpack -- use ./ paths instead'
+ else
+ a="${DISTDIR}/${a}"
+ fi
+ rpm2tar -O "${a}" | tar xf - || die "failure unpacking ${a}"
+ done
+}
+
+# @FUNCTION: srcrpm_unpack
+# @USAGE: <rpms>
+# @DESCRIPTION:
+# Unpack the contents of the specified rpms like the unpack() function as well
+# as any archives that it might contain. Note that the secondary archive
+# unpack isn't perfect in that it simply unpacks all archives in the working
+# directory (with the assumption that there weren't any to start with).
+srcrpm_unpack() {
+ [[ $# -eq 0 ]] && set -- ${A}
+ rpm_unpack "$@"
+
+ # no .src.rpm files, then nothing to do
+ [[ "$* " != *".src.rpm " ]] && return 0
+
+ eshopts_push -s nullglob
+
+ # unpack everything
+ local a
+ for a in *.tar.{gz,bz2} *.t{gz,bz2} *.zip *.ZIP ; do
+ unpack "./${a}"
+ rm -f "${a}"
+ done
+
+ eshopts_pop
+
+ return 0
+}
+
+# @FUNCTION: rpm_src_unpack
+# @DESCRIPTION:
+# Automatically unpack all archives in ${A} including rpms. If one of the
+# archives in a source rpm, then the sub archives will be unpacked as well.
+rpm_src_unpack() {
+ local a
+ for a in ${A} ; do
+ case ${a} in
+ *.rpm) srcrpm_unpack "${a}" ;;
+ *) unpack "${a}" ;;
+ esac
+ done
+}
+
+# @FUNCTION: rpm_spec_epatch
+# @USAGE: [spec]
+# @DESCRIPTION:
+# Read the specified spec (defaults to ${PN}.spec) and attempt to apply
+# all the patches listed in it. If the spec does funky things like moving
+# files around, well this won't handle that.
+rpm_spec_epatch() {
+ local p spec=$1
+ local dir
+
+ if [[ -z ${spec} ]] ; then
+ # search likely places for the spec file
+ for spec in "${PWD}" "${S}" "${WORKDIR}" ; do
+ spec+="/${PN}.spec"
+ [[ -e ${spec} ]] && break
+ done
+ fi
+ [[ ${spec} == */* ]] \
+ && dir=${spec%/*} \
+ || dir=
+
+ ebegin "Applying patches from ${spec}"
+
+ grep '^%patch' "${spec}" | \
+ while read line ; do
+ # expand the %patch line
+ set -- ${line}
+ p=$1
+ shift
+
+ # process the %patch arguments
+ local arg
+ EPATCH_OPTS=
+ for arg in "$@" ; do
+ case ${arg} in
+ -b) EPATCH_OPTS+=" --suffix" ;;
+ *) EPATCH_OPTS+=" ${arg}" ;;
+ esac
+ done
+
+ # extract the patch name from the Patch# line
+ set -- $(grep "^P${p#%p}: " "${spec}")
+ shift
+ epatch "${dir:+${dir}/}$*"
+ done
+
+ eend
+}
+
+EXPORT_FUNCTIONS src_unpack
diff --git a/eclass/ruby-fakegem.eclass b/eclass/ruby-fakegem.eclass
new file mode 100644
index 000000000000..ed392b59fe81
--- /dev/null
+++ b/eclass/ruby-fakegem.eclass
@@ -0,0 +1,532 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ruby-fakegem.eclass
+# @MAINTAINER:
+# Ruby herd <ruby@gentoo.org>
+# @AUTHOR:
+# Author: Diego E. Pettenò <flameeyes@gentoo.org>
+# Author: Alex Legler <a3li@gentoo.org>
+# @BLURB: An eclass for installing Ruby packages to behave like RubyGems.
+# @DESCRIPTION:
+# This eclass allows to install arbitrary Ruby libraries (including Gems),
+# providing integration into the RubyGems system even for "regular" packages.
+
+inherit ruby-ng
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_NAME
+# @DESCRIPTION:
+# Sets the Gem name for the generated fake gemspec.
+# This variable MUST be set before inheriting the eclass.
+# RUBY_FAKEGEM_NAME="${PN}"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_VERSION
+# @DESCRIPTION:
+# Sets the Gem version for the generated fake gemspec.
+# This variable MUST be set before inheriting the eclass.
+# RUBY_FAKEGEM_VERSION="${PV}"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_TASK_DOC
+# @DESCRIPTION:
+# Specify the rake(1) task to run to generate documentation.
+# RUBY_FAKEGEM_TASK_DOC="rdoc"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_RECIPE_TEST
+# @DESCRIPTION:
+# Specify one of the default testing function for ruby-fakegem:
+# - rake (default; see also RUBY_FAKEGEM_TASK_TEST)
+# - rspec (calls ruby-ng_rspec, adds dev-ruby/rspec:2 to the dependencies)
+# - rspec3 (calls ruby-ng_rspec, adds dev-ruby/rspec:3 to the dependencies)
+# - cucumber (calls ruby-ng_cucumber, adds dev-util/cucumber to the
+# dependencies; does not work on JRuby).
+# - none
+# RUBY_FAKEGEM_RECIPE_TEST="rake"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_TASK_TEST
+# @DESCRIPTION:
+# Specify the rake(1) task used for executing tests. Only valid
+# if RUBY_FAKEGEM_RECIPE_TEST is set to "rake" (the default).
+# RUBY_FAKEGEM_TASK_TEST="test"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_RECIPE_DOC
+# @DESCRIPTION:
+# Specify one of the default API doc building function for ruby-fakegem:
+# - rake (default; see also RUBY_FAKEGEM_TASK_DOC)
+# - rdoc (calls `rdoc-2`, adds dev-ruby/rdoc to the dependencies);
+# - yard (calls `yard`, adds dev-ruby/yard to the dependencies);
+# - none
+# RUBY_FAKEGEM_RECIPE_DOC="rake"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_DOCDIR
+# @DESCRIPTION:
+# Specify the directory under which the documentation is built;
+# if empty no documentation will be installed automatically.
+# Note: if RUBY_FAKEGEM_RECIPE_DOC is set to `rdoc`, this variable is
+# hardwired to `doc`.
+# RUBY_FAKEGEM_DOCDIR=""
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_EXTRADOC
+# @DESCRIPTION:
+# Extra documentation to install (readme, changelogs, …).
+# RUBY_FAKEGEM_EXTRADOC=""
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_DOC_SOURCES
+# @DESCRIPTION:
+# Allow settings defined sources to scan for documentation.
+# This only applies if RUBY_FAKEGEM_DOC_TASK is set to `rdoc`.
+# RUBY_FAKEGEM_DOC_SOURCES="lib"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_BINWRAP
+# @DESCRIPTION:
+# Binaries to wrap around (relative to the bin/ directory)
+# RUBY_FAKEGEM_BINWRAP="*"
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_REQUIRE_PATHS
+# @DESCRIPTION:
+# Extra require paths (beside lib) to add to the specification
+# RUBY_FAKEGEM_REQUIRE_PATHS=""
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_GEMSPEC
+# @DESCRIPTION:
+# Filename of .gemspec file to install instead of generating a generic one.
+# RUBY_FAKEGEM_GEMSPEC=""
+
+# @ECLASS-VARIABLE: RUBY_FAKEGEM_EXTRAINSTALL
+# @DESCRIPTION:
+# List of files and directories relative to the top directory that also
+# get installed. Some gems provide extra files such as version information,
+# Rails generators, or data that needs to be installed as well.
+# RUBY_FAKEGEM_EXTRAINSTALL=""
+
+RUBY_FAKEGEM_NAME="${RUBY_FAKEGEM_NAME:-${PN}}"
+RUBY_FAKEGEM_VERSION="${RUBY_FAKEGEM_VERSION:-${PV/_pre/.pre}}"
+RUBY_FAKEGEM_SUFFIX="${RUBY_FAKEGEM_SUFFIX:-}"
+
+RUBY_FAKEGEM_RECIPE_DOC="${RUBY_FAKEGEM_RECIPE_DOC-rake}"
+RUBY_FAKEGEM_TASK_DOC="${RUBY_FAKEGEM_TASK_DOC-rdoc}"
+RUBY_FAKEGEM_DOC_SOURCES="${RUBY_FAKEGEM_DOC_SOURCES-lib}"
+
+RUBY_FAKEGEM_RECIPE_TEST="${RUBY_FAKEGEM_RECIPE_TEST-rake}"
+RUBY_FAKEGEM_TASK_TEST="${RUBY_FAKEGEM_TASK_TEST-test}"
+
+RUBY_FAKEGEM_BINWRAP="${RUBY_FAKEGEM_BINWRAP-*}"
+
+[[ ${RUBY_FAKEGEM_TASK_DOC} == "" ]] && RUBY_FAKEGEM_RECIPE_DOC="none"
+
+case ${RUBY_FAKEGEM_RECIPE_DOC} in
+ rake)
+ IUSE+=" doc"
+ ruby_add_bdepend "doc? ( dev-ruby/rake )"
+ RUBY_FAKEGEM_DOCDIR="doc"
+ ;;
+ rdoc)
+ IUSE+=" doc"
+ ruby_add_bdepend "doc? ( dev-ruby/rdoc )"
+ RUBY_FAKEGEM_DOCDIR="doc"
+ ;;
+ yard)
+ IUSE+="doc"
+ ruby_add_bdepend "doc? ( dev-ruby/yard )"
+ RUBY_FAKEGEM_DOCDIR="doc"
+ ;;
+ none)
+ [[ -n ${RUBY_FAKEGEM_DOCDIR} ]] && IUSE+=" doc"
+ ;;
+esac
+
+[[ ${RUBY_FAKEGEM_TASK_TEST} == "" ]] && RUBY_FAKEGEM_RECIPE_TEST="none"
+
+case ${RUBY_FAKEGEM_RECIPE_TEST} in
+ rake)
+ IUSE+=" test"
+ ruby_add_bdepend "test? ( dev-ruby/rake )"
+ ;;
+ rspec)
+ IUSE+=" test"
+ # Also require a new enough rspec-core version that installs the
+ # rspec-2 wrapper.
+ ruby_add_bdepend "test? ( dev-ruby/rspec:2 >=dev-ruby/rspec-core-2.14.8-r2 )"
+ ;;
+ rspec3)
+ IUSE+=" test"
+ ruby_add_bdepend "test? ( dev-ruby/rspec:3 )"
+ ;;
+ cucumber)
+ IUSE+=" test"
+ # Unfortunately as of August 2012, cucumber is not supported on
+ # JRuby. We work it around here to avoid repeating the same
+ # code over and over again.
+ USE_RUBY="${USE_RUBY/jruby/}" ruby_add_bdepend "test? ( dev-util/cucumber )"
+ ;;
+ *)
+ RUBY_FAKEGEM_RECIPE_TEST="none"
+ ;;
+esac
+
+SRC_URI="mirror://rubygems/${RUBY_FAKEGEM_NAME}-${RUBY_FAKEGEM_VERSION}${RUBY_FAKEGEM_SUFFIX:+-${RUBY_FAKEGEM_SUFFIX}}.gem"
+
+ruby_add_bdepend virtual/rubygems
+ruby_add_rdepend virtual/rubygems
+
+# @FUNCTION: ruby_fakegem_gemsdir
+# @RETURN: Returns the gem data directory
+# @DESCRIPTION:
+# This function returns the gems data directory for the ruby
+# implementation in question.
+ruby_fakegem_gemsdir() {
+ has "${EAPI}" 2 && ! use prefix && EPREFIX=
+
+ local _gemsitedir=$(ruby_rbconfig_value 'sitelibdir')
+ _gemsitedir=${_gemsitedir//site_ruby/gems}
+ _gemsitedir=${_gemsitedir#${EPREFIX}}
+
+ [[ -z ${_gemsitedir} ]] && {
+ eerror "Unable to find the gems dir"
+ die "Unable to find the gems dir"
+ }
+
+ echo "${_gemsitedir}"
+}
+
+# @FUNCTION: ruby_fakegem_doins
+# @USAGE: file [file...]
+# @DESCRIPTION:
+# Installs the specified file(s) into the gems directory.
+ruby_fakegem_doins() {
+ (
+ insinto $(ruby_fakegem_gemsdir)/gems/${RUBY_FAKEGEM_NAME}-${RUBY_FAKEGEM_VERSION}
+ doins "$@"
+ ) || die "failed $0 $@"
+}
+
+# @FUNCTION: ruby_fakegem_newsins()
+# @USAGE: file filename
+# @DESCRIPTION:
+# Installs the specified file into the gems directory using the provided filename.
+ruby_fakegem_newins() {
+ (
+ # Since newins does not accept full paths but just basenames
+ # for the target file, we want to extend it here.
+ local newdirname=/$(dirname "$2")
+ [[ ${newdirname} == "/." ]] && newdirname=
+
+ local newbasename=$(basename "$2")
+
+ insinto $(ruby_fakegem_gemsdir)/gems/${RUBY_FAKEGEM_NAME}-${RUBY_FAKEGEM_VERSION}${newdirname}
+ newins "$1" ${newbasename}
+ ) || die "failed $0 $@"
+}
+
+# @FUNCTION: ruby_fakegem_install_gemspec
+# @DESCRIPTION:
+# Install a .gemspec file for this package. Either use the file indicated
+# by the RUBY_FAKEGEM_GEMSPEC variable, or generate one using
+# ruby_fakegem_genspec.
+ruby_fakegem_install_gemspec() {
+ local gemspec="${T}"/${RUBY_FAKEGEM_NAME}-${_ruby_implementation}
+
+ (
+ if [[ ${RUBY_FAKEGEM_GEMSPEC} != "" ]]; then
+ ruby_fakegem_gemspec_gemspec ${RUBY_FAKEGEM_GEMSPEC} ${gemspec}
+ else
+ local metadata="${WORKDIR}"/${_ruby_implementation}/metadata
+
+ if [[ -e ${metadata} ]]; then
+ ruby_fakegem_metadata_gemspec ${metadata} ${gemspec}
+ else
+ ruby_fakegem_genspec ${gemspec}
+ fi
+ fi
+ ) || die "Unable to generate gemspec file."
+
+ insinto $(ruby_fakegem_gemsdir)/specifications
+ newins ${gemspec} ${RUBY_FAKEGEM_NAME}-${RUBY_FAKEGEM_VERSION}.gemspec || die "Unable to install gemspec file."
+}
+
+# @FUNCTION: ruby_fakegem_gemspec_gemspec
+# @USAGE: gemspec-input gemspec-output
+# @DESCRIPTION:
+# Generates an installable version of the specification indicated by
+# RUBY_FAKEGEM_GEMSPEC. This file is eval'ed to produce a final specification
+# in a way similar to packaging the gemspec file.
+ruby_fakegem_gemspec_gemspec() {
+ ${RUBY} -e "puts eval(File::open('$1').read).to_ruby" > $2
+}
+
+# @FUNCTION: ruby_fakegem_metadata_gemspec
+# @USAGE: gemspec-metadata gemspec-output
+# @DESCRIPTION:
+# Generates an installable version of the specification indicated by
+# the metadata distributed by the gem itself. This is similar to how
+# rubygems creates an installation from a .gem file.
+ruby_fakegem_metadata_gemspec() {
+ case ${RUBY} in
+ *jruby)
+ ${RUBY} -r yaml -e "puts Gem::Specification.from_yaml(File::open('$1').read).to_ruby" > $2
+ ;;
+ *)
+ ${RUBY} -r yaml -e "puts Gem::Specification.from_yaml(File::open('$1', :encoding => 'UTF-8').read).to_ruby" > $2
+ ;;
+ esac
+}
+
+# @FUNCTION: ruby_fakegem_genspec
+# @USAGE: output-gemspec
+# @DESCRIPTION:
+# Generates a gemspec for the package and places it into the "specifications"
+# directory of RubyGems.
+# If the metadata normally distributed with a gem is present then that is
+# used to generate the gemspec file.
+#
+# As a fallback we can generate our own version.
+# In the gemspec, the following values are set: name, version, summary,
+# homepage, and require_paths=["lib"].
+# See RUBY_FAKEGEM_NAME and RUBY_FAKEGEM_VERSION for setting name and version.
+# See RUBY_FAKEGEM_REQUIRE_PATHS for setting extra require paths.
+ruby_fakegem_genspec() {
+ local required_paths="'lib'"
+ for path in ${RUBY_FAKEGEM_REQUIRE_PATHS}; do
+ required_paths="${required_paths}, '${path}'"
+ done
+
+ # We use the _ruby_implementation variable to avoid having stray
+ # copies with different implementations; while for now we're using
+ # the same exact content, we might have differences in the future,
+ # so better taking this into consideration.
+ local quoted_description=${DESCRIPTION//\"/\\\"}
+ cat - > $1 <<EOF
+# generated by ruby-fakegem.eclass $Revision: 1.45 $
+Gem::Specification.new do |s|
+ s.name = "${RUBY_FAKEGEM_NAME}"
+ s.version = "${RUBY_FAKEGEM_VERSION}"
+ s.summary = "${quoted_description}"
+ s.homepage = "${HOMEPAGE}"
+ s.require_paths = [${required_paths}]
+end
+EOF
+}
+
+# @FUNCTION: ruby_fakegem_binwrapper
+# @USAGE: command [path] [content]
+# @DESCRIPTION:
+# Creates a new binary wrapper for a command installed by the RubyGem.
+# path defaults to /usr/bin/$command content is optional and can be used
+# to inject additional ruby code into the wrapper. This may be useful to
+# e.g. force a specific version using the gem command.
+ruby_fakegem_binwrapper() {
+ (
+ local gembinary=$1
+ local newbinary=${2:-/usr/bin/$gembinary}
+ local content=$3
+ local relativegembinary=${RUBY_FAKEGEM_NAME}-${RUBY_FAKEGEM_VERSION}/bin/${gembinary}
+ local binpath=$(dirname $newbinary)
+ [[ ${binpath} = . ]] && binpath=/usr/bin
+
+ # Try to find out whether the package is going to install for
+ # one or multiple implementations; if we're installing for a
+ # *single* implementation, no need to use “/usr/bin/env ruby”
+ # in the shebang, and we can actually avoid errors when
+ # calling the script by default (see for instance the
+ # JRuby-specific commands).
+ local rubycmd=
+ for implementation in ${USE_RUBY}; do
+ # ignore non-enabled implementations
+ use ruby_targets_${implementation} || continue
+ if [ -z $rubycmd ]; then
+ # if no other implementation was set before, set it.
+ rubycmd="$(ruby_implementation_command ${implementation})"
+ else
+ # if another implementation already arrived, then make
+ # it generic and break out of the loop. This ensures
+ # that we do at most two iterations.
+ rubycmd="/usr/bin/env ruby"
+ break
+ fi
+ done
+
+ cat - > "${T}"/gembin-wrapper-${gembinary} <<EOF
+#!${rubycmd}
+# This is a simplified version of the RubyGems wrapper
+#
+# Generated by ruby-fakegem.eclass $Revision: 1.45 $
+
+require 'rubygems'
+
+${content}
+load Gem::default_path[-1] + "/gems/${relativegembinary}"
+
+EOF
+
+ exeinto ${binpath:-/usr/bin}
+ newexe "${T}"/gembin-wrapper-${gembinary} $(basename $newbinary)
+ ) || die "Unable to create fakegem wrapper"
+}
+
+# @FUNCTION: all_fakegem_compile
+# @DESCRIPTION:
+# Build documentation for the package if indicated by the doc USE flag
+# and if there is a documetation task defined.
+all_fakegem_compile() {
+ if [[ -n ${RUBY_FAKEGEM_DOCDIR} ]] && use doc; then
+ case ${RUBY_FAKEGEM_RECIPE_DOC} in
+ rake)
+ rake ${RUBY_FAKEGEM_TASK_DOC} || die "failed to (re)build documentation"
+ ;;
+ rdoc)
+ rdoc ${RUBY_FAKEGEM_DOC_SOURCES} || die "failed to (re)build documentation"
+ ;;
+ yard)
+ yard doc ${RUBY_FAKEGEM_DOC_SOURCES} || die "failed to (re)build documentation"
+ ;;
+ esac
+ fi
+}
+
+# @FUNCTION: all_ruby_unpack
+# @DESCRIPTION:
+# Unpack the source archive, including support for unpacking gems.
+all_ruby_unpack() {
+ # Special support for extracting .gem files; the file need to be
+ # extracted twice and the mtime from the archive _has_ to be
+ # ignored (it's always set to epoch 0).
+ for archive in ${A}; do
+ case "${archive}" in
+ *.gem)
+ # Make sure that we're not running unpack for more than
+ # one .gem file, since we won't support that at all.
+ [[ -d "${S}" ]] && die "Unable to unpack ${archive}, ${S} exists"
+
+ ebegin "Unpacking .gem file..."
+ tar -mxf "${DISTDIR}"/${archive} || die
+ eend $?
+
+ ebegin "Uncompressing metadata"
+ gunzip metadata.gz || die
+ eend $?
+
+ mkdir "${S}"
+ pushd "${S}" &>/dev/null
+
+ ebegin "Unpacking data.tar.gz"
+ tar -mxf "${my_WORKDIR}"/data.tar.gz || die
+ eend $?
+
+ popd &>/dev/null
+ ;;
+ *.patch.bz2)
+ # We apply the patches with RUBY_PATCHES directly from DISTDIR,
+ # as the WORKDIR variable changes value between the global-scope
+ # and the time all_ruby_unpack/_prepare are called. Since we can
+ # simply decompress them when applying, this is much easier to
+ # deal with for us.
+ einfo "Keeping ${archive} as-is"
+ ;;
+ *)
+ unpack ${archive}
+ ;;
+ esac
+ done
+}
+
+# @FUNCTION: all_ruby_compile
+# @DESCRIPTION:
+# Compile the package.
+all_ruby_compile() {
+ all_fakegem_compile
+}
+
+# @FUNCTION: each_fakegem_test
+# @DESCRIPTION:
+# Run tests for the package for each ruby target if the test task is defined.
+each_fakegem_test() {
+ case ${RUBY_FAKEGEM_RECIPE_TEST} in
+ rake)
+ ${RUBY} -S rake ${RUBY_FAKEGEM_TASK_TEST} || die "tests failed"
+ ;;
+ rspec)
+ RSPEC_VERSION=2 ruby-ng_rspec
+ ;;
+ rspec3)
+ RSPEC_VERSION=3 ruby-ng_rspec
+ ;;
+ cucumber)
+ ruby-ng_cucumber
+ ;;
+ none)
+ ewarn "each_fakegem_test called, but \${RUBY_FAKEGEM_RECIPE_TEST} is 'none'"
+ ;;
+ esac
+}
+
+if [[ ${RUBY_FAKEGEM_RECIPE_TEST} != none ]]; then
+ # @FUNCTION: each_ruby_test
+ # @DESCRIPTION:
+ # Run the tests for this package.
+ each_ruby_test() {
+ each_fakegem_test
+ }
+fi
+
+# @FUNCTION: each_fakegem_install
+# @DESCRIPTION:
+# Install the package for each ruby target.
+each_fakegem_install() {
+ ruby_fakegem_install_gemspec
+
+ local _gemlibdirs="${RUBY_FAKEGEM_EXTRAINSTALL}"
+ for directory in bin lib; do
+ [[ -d ${directory} ]] && _gemlibdirs="${_gemlibdirs} ${directory}"
+ done
+
+ [[ -n ${_gemlibdirs} ]] && \
+ ruby_fakegem_doins -r ${_gemlibdirs}
+}
+
+# @FUNCTION: each_ruby_install
+# @DESCRIPTION:
+# Install the package for each target.
+each_ruby_install() {
+ each_fakegem_install
+}
+
+# @FUNCTION: all_fakegem_install
+# @DESCRIPTION:
+# Install files common to all ruby targets.
+all_fakegem_install() {
+ if [[ -n ${RUBY_FAKEGEM_DOCDIR} ]] && use doc; then
+ for dir in ${RUBY_FAKEGEM_DOCDIR}; do
+ [[ -d ${dir} ]] || continue
+
+ pushd ${dir} &>/dev/null
+ dohtml -r * || die "failed to install documentation"
+ popd &>/dev/null
+ done
+ fi
+
+ if [[ -n ${RUBY_FAKEGEM_EXTRADOC} ]]; then
+ dodoc ${RUBY_FAKEGEM_EXTRADOC} || die "failed to install further documentation"
+ fi
+
+ # binary wrappers; we assume that all the implementations get the
+ # same binaries, or something is wrong anyway, so...
+ if [[ -n ${RUBY_FAKEGEM_BINWRAP} ]]; then
+ local bindir=$(find "${D}" -type d -path "*/gems/${RUBY_FAKEGEM_NAME}-${RUBY_FAKEGEM_VERSION}/bin" -print -quit)
+
+ if [[ -d "${bindir}" ]]; then
+ pushd "${bindir}" &>/dev/null
+ local binaries=$(eval ls ${RUBY_FAKEGEM_BINWRAP})
+ for binary in $binaries; do
+ ruby_fakegem_binwrapper $binary
+ done
+ popd &>/dev/null
+ fi
+ fi
+}
+
+# @FUNCTION: all_ruby_install
+# @DESCRIPTION:
+# Install files common to all ruby targets.
+all_ruby_install() {
+ all_fakegem_install
+}
diff --git a/eclass/ruby-ng-gnome2.eclass b/eclass/ruby-ng-gnome2.eclass
new file mode 100644
index 000000000000..87965feec4eb
--- /dev/null
+++ b/eclass/ruby-ng-gnome2.eclass
@@ -0,0 +1,93 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ruby-ng-gnome2.eclass
+# @MAINTAINER:
+# Ruby herd <ruby@gentoo.org>
+# @AUTHOR:
+# Author: Hans de Graaff <graaff@gentoo.org>
+# @BLURB: An eclass to simplify handling of various ruby-gnome2 parts.
+# @DESCRIPTION:
+# This eclass simplifies installation of the various pieces of
+# ruby-gnome2 since they share a very common installation procedure.
+
+RUBY_FAKEGEM_NAME="${RUBY_FAKEGEM_NAME:-${PN#ruby-}}"
+RUBY_FAKEGEM_TASK_TEST=""
+RUBY_FAKEGEM_TASK_DOC=""
+
+inherit ruby-fakegem multilib versionator
+
+IUSE=""
+
+# Define EPREFIX if needed
+has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+subbinding=${PN#ruby-}
+if [ $(get_version_component_range "1-2") == "0.19" ]; then
+ subbinding=${subbinding/%2}
+else
+ subbinding=${subbinding/-/_}
+ DEPEND="virtual/pkgconfig"
+ ruby_add_bdepend "dev-ruby/pkg-config"
+fi
+if has "${EAPI:-0}" 0 1 2 3 ; then
+ S=${WORKDIR}/ruby-gnome2-all-${PV}/${subbinding}
+else
+ RUBY_S=ruby-gnome2-all-${PV}/${subbinding}
+fi
+SRC_URI="mirror://sourceforge/ruby-gnome2/ruby-gnome2-all-${PV}.tar.gz"
+HOMEPAGE="http://ruby-gnome2.sourceforge.jp/"
+LICENSE="Ruby"
+SLOT="0"
+
+# @FUNCTION: each_ruby_configure
+# @DESCRIPTION:
+# Run the configure script in the subbinding for each specific ruby target.
+each_ruby_configure() {
+ ${RUBY} extconf.rb || die "extconf.rb failed"
+}
+
+# @FUNCTION: each_ruby_compile
+# @DESCRIPTION:
+# Compile the C bindings in the subbinding for each specific ruby target.
+each_ruby_compile() {
+ # We have injected --no-undefined in Ruby as a safety precaution
+ # against broken ebuilds, but the Ruby-Gnome bindings
+ # unfortunately rely on the lazy load of other extensions; see bug
+ # #320545.
+ find . -name Makefile -print0 | xargs -0 \
+ sed -i -e 's:-Wl,--no-undefined ::' \
+ -e "s/^ldflags = /ldflags = $\(LDFLAGS\) /" \
+ || die "--no-undefined removal failed"
+
+ emake V=1 || die "emake failed"
+}
+
+# @FUNCTION: each_ruby_install
+# @DESCRIPTION:
+# Install the files in the subbinding for each specific ruby target.
+each_ruby_install() {
+ # Create the directories, or the package will create them as files.
+ local archdir=$(ruby_rbconfig_value "sitearchdir")
+ dodir ${archdir#${EPREFIX}} /usr/$(get_libdir)/pkgconfig
+
+ emake DESTDIR="${D}" install || die "make install failed"
+
+ each_fakegem_install
+}
+
+# @FUNCTION: all_ruby_install
+# @DESCRIPTION:
+# Install the files common to all ruby targets.
+all_ruby_install() {
+ for doc in ../AUTHORS ../NEWS ChangeLog README; do
+ [ -s "$doc" ] && dodoc $doc
+ done
+ if [[ -d sample ]]; then
+ insinto /usr/share/doc/${PF}
+ doins -r sample || die "sample install failed"
+ fi
+
+ all_fakegem_install
+}
diff --git a/eclass/ruby-ng.eclass b/eclass/ruby-ng.eclass
new file mode 100644
index 000000000000..4b99b399e6c6
--- /dev/null
+++ b/eclass/ruby-ng.eclass
@@ -0,0 +1,724 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ruby-ng.eclass
+# @MAINTAINER:
+# Ruby herd <ruby@gentoo.org>
+# @AUTHOR:
+# Author: Diego E. Pettenò <flameeyes@gentoo.org>
+# Author: Alex Legler <a3li@gentoo.org>
+# Author: Hans de Graaff <graaff@gentoo.org>
+# @BLURB: An eclass for installing Ruby packages with proper support for multiple Ruby slots.
+# @DESCRIPTION:
+# The Ruby eclass is designed to allow an easier installation of Ruby packages
+# and their incorporation into the Gentoo Linux system.
+#
+# Currently available targets are:
+# * ruby18 - Ruby (MRI) 1.8.x
+# * ruby19 - Ruby (MRI) 1.9.x
+# * ruby20 - Ruby (MRI) 2.0.x
+# * ruby21 - Ruby (MRI) 2.1.x
+# * ruby22 - Ruby (MRI) 2.2.x
+# * ree18 - Ruby Enterprise Edition 1.8.x
+# * jruby - JRuby
+# * rbx - Rubinius
+#
+# This eclass does not define the implementation of the configure,
+# compile, test, or install phases. Instead, the default phases are
+# used. Specific implementations of these phases can be provided in
+# the ebuild either to be run for each Ruby implementation, or for all
+# Ruby implementations, as follows:
+#
+# * each_ruby_configure
+# * all_ruby_configure
+
+# @ECLASS-VARIABLE: USE_RUBY
+# @DEFAULT_UNSET
+# @REQUIRED
+# @DESCRIPTION:
+# This variable contains a space separated list of targets (see above) a package
+# is compatible to. It must be set before the `inherit' call. There is no
+# default. All ebuilds are expected to set this variable.
+
+# @ECLASS-VARIABLE: RUBY_PATCHES
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# A String or Array of filenames of patches to apply to all implementations.
+
+# @ECLASS-VARIABLE: RUBY_OPTIONAL
+# @DESCRIPTION:
+# Set the value to "yes" to make the dependency on a Ruby interpreter
+# optional and then ruby_implementations_depend() to help populate
+# DEPEND and RDEPEND.
+
+# @ECLASS-VARIABLE: RUBY_S
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If defined this variable determines the source directory name after
+# unpacking. This defaults to the name of the package. Note that this
+# variable supports a wildcard mechanism to help with github tarballs
+# that contain the commit hash as part of the directory name.
+
+# @ECLASS-VARIABLE: RUBY_QA_ALLOWED_LIBS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If defined this variable contains a whitelist of shared objects that
+# are allowed to exist even if they don't link to libruby. This avoids
+# the QA check that makes this mandatory. This is most likely not what
+# you are looking for if you get the related "Missing links" QA warning,
+# since the proper fix is almost always to make sure the shared object
+# is linked against libruby. There are cases were this is not the case
+# and the shared object is generic code to be used in some other way
+# (e.g. selenium's firefox driver extension). When set this argument is
+# passed to "grep -E" to remove reporting of these shared objects.
+
+inherit eutils java-utils-2 multilib toolchain-funcs ruby-utils
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_test src_install pkg_setup
+
+case ${EAPI} in
+ 0|1)
+ die "Unsupported EAPI=${EAPI} (too old) for ruby-ng.eclass" ;;
+ 2|3) ;;
+ 4|5)
+ # S is no longer automatically assigned when it doesn't exist.
+ S="${WORKDIR}"
+ ;;
+ *)
+ die "Unknown EAPI=${EAPI} for ruby-ng.eclass"
+esac
+
+# @FUNCTION: ruby_implementation_depend
+# @USAGE: target [comparator [version]]
+# @RETURN: Package atom of a Ruby implementation to be used in dependencies.
+# @DESCRIPTION:
+# This function returns the formal package atom for a Ruby implementation.
+#
+# `target' has to be one of the valid values for USE_RUBY (see above)
+#
+# Set `comparator' and `version' to include a comparator (=, >=, etc.) and a
+# version string to the returned string
+ruby_implementation_depend() {
+ _ruby_implementation_depend $1
+}
+
+# @FUNCTION: ruby_samelib
+# @RETURN: use flag string with current ruby implementations
+# @DESCRIPTION:
+# Convenience function to output the use dependency part of a
+# dependency. Used as a building block for ruby_add_rdepend() and
+# ruby_add_bdepend(), but may also be useful in an ebuild to specify
+# more complex dependencies.
+ruby_samelib() {
+ local res=
+ for _ruby_implementation in $USE_RUBY; do
+ has -${_ruby_implementation} $@ || \
+ res="${res}ruby_targets_${_ruby_implementation}?,"
+ done
+
+ echo "[${res%,}]"
+}
+
+_ruby_atoms_samelib_generic() {
+ eshopts_push -o noglob
+ echo "RUBYTARGET? ("
+ for token in $*; do
+ case "$token" in
+ "||" | "(" | ")" | *"?")
+ echo "${token}" ;;
+ *])
+ echo "${token%[*}[RUBYTARGET,${token/*[}" ;;
+ *)
+ echo "${token}[RUBYTARGET]" ;;
+ esac
+ done
+ echo ")"
+ eshopts_pop
+}
+
+# @FUNCTION: ruby_implementation_command
+# @RETURN: the path to the given ruby implementation
+# @DESCRIPTION:
+# Not all implementations have the same command basename as the
+# target; namely Ruby Enterprise 1.8 uses ree18 and rubyee18
+# respectively. This function translate between the two
+ruby_implementation_command() {
+ local _ruby_name=$1
+
+ # Add all USE_RUBY values where the flag name diverts from the binary here
+ case $1 in
+ ree18)
+ _ruby_name=rubyee18
+ ;;
+ esac
+
+ echo $(type -p ${_ruby_name} 2>/dev/null)
+}
+
+_ruby_atoms_samelib() {
+ local atoms=$(_ruby_atoms_samelib_generic "$*")
+
+ for _ruby_implementation in $USE_RUBY; do
+ echo "${atoms//RUBYTARGET/ruby_targets_${_ruby_implementation}}"
+ done
+}
+
+_ruby_wrap_conditions() {
+ local conditions="$1"
+ local atoms="$2"
+
+ for condition in $conditions; do
+ atoms="${condition}? ( ${atoms} )"
+ done
+
+ echo "$atoms"
+}
+
+# @FUNCTION: ruby_add_rdepend
+# @USAGE: dependencies
+# @DESCRIPTION:
+# Adds the specified dependencies, with use condition(s) to RDEPEND,
+# taking the current set of ruby targets into account. This makes sure
+# that all ruby dependencies of the package are installed for the same
+# ruby targets. Use this function for all ruby dependencies instead of
+# setting RDEPEND yourself. The list of atoms uses the same syntax as
+# normal dependencies.
+#
+# Note: runtime dependencies are also added as build-time test
+# dependencies.
+ruby_add_rdepend() {
+ case $# in
+ 1) ;;
+ 2)
+ [[ "${GENTOO_DEV}" == "yes" ]] && eqawarn "You can now use the usual syntax in ruby_add_rdepend for $CATEGORY/$PF"
+ ruby_add_rdepend "$(_ruby_wrap_conditions "$1" "$2")"
+ return
+ ;;
+ *)
+ die "bad number of arguments to $0"
+ ;;
+ esac
+
+ local dependency=$(_ruby_atoms_samelib "$1")
+
+ RDEPEND="${RDEPEND} $dependency"
+
+ # Add the dependency as a test-dependency since we're going to
+ # execute the code during test phase.
+ DEPEND="${DEPEND} test? ( ${dependency} )"
+ has test "$IUSE" || IUSE="${IUSE} test"
+}
+
+# @FUNCTION: ruby_add_bdepend
+# @USAGE: dependencies
+# @DESCRIPTION:
+# Adds the specified dependencies, with use condition(s) to DEPEND,
+# taking the current set of ruby targets into account. This makes sure
+# that all ruby dependencies of the package are installed for the same
+# ruby targets. Use this function for all ruby dependencies instead of
+# setting DEPEND yourself. The list of atoms uses the same syntax as
+# normal dependencies.
+ruby_add_bdepend() {
+ case $# in
+ 1) ;;
+ 2)
+ [[ "${GENTOO_DEV}" == "yes" ]] && eqawarn "You can now use the usual syntax in ruby_add_bdepend for $CATEGORY/$PF"
+ ruby_add_bdepend "$(_ruby_wrap_conditions "$1" "$2")"
+ return
+ ;;
+ *)
+ die "bad number of arguments to $0"
+ ;;
+ esac
+
+ local dependency=$(_ruby_atoms_samelib "$1")
+
+ DEPEND="${DEPEND} $dependency"
+ RDEPEND="${RDEPEND}"
+}
+
+# @FUNCTION: ruby_get_use_implementations
+# @DESCRIPTION:
+# Gets an array of ruby use targets enabled by the user
+ruby_get_use_implementations() {
+ local i implementation
+ for implementation in ${USE_RUBY}; do
+ use ruby_targets_${implementation} && i+=" ${implementation}"
+ done
+ echo $i
+}
+
+# @FUNCTION: ruby_get_use_targets
+# @DESCRIPTION:
+# Gets an array of ruby use targets that the ebuild sets
+ruby_get_use_targets() {
+ local t implementation
+ for implementation in ${USE_RUBY}; do
+ t+=" ruby_targets_${implementation}"
+ done
+ echo $t
+}
+
+# @FUNCTION: ruby_implementations_depend
+# @RETURN: Dependencies suitable for injection into DEPEND and RDEPEND.
+# @DESCRIPTION:
+# Produces the dependency string for the various implementations of ruby
+# which the package is being built against. This should not be used when
+# RUBY_OPTIONAL is unset but must be used if RUBY_OPTIONAL=yes. Do not
+# confuse this function with ruby_implementation_depend().
+#
+# @EXAMPLE:
+# EAPI=4
+# RUBY_OPTIONAL=yes
+#
+# inherit ruby-ng
+# ...
+# DEPEND="ruby? ( $(ruby_implementations_depend) )"
+# RDEPEND="${DEPEND}"
+ruby_implementations_depend() {
+ local depend
+ for _ruby_implementation in ${USE_RUBY}; do
+ depend="${depend}${depend+ }ruby_targets_${_ruby_implementation}? ( $(ruby_implementation_depend $_ruby_implementation) )"
+ done
+ echo "${depend}"
+}
+
+IUSE+=" $(ruby_get_use_targets)"
+# If you specify RUBY_OPTIONAL you also need to take care of
+# ruby useflag and dependency.
+if [[ ${RUBY_OPTIONAL} != yes ]]; then
+ DEPEND="${DEPEND} $(ruby_implementations_depend)"
+ RDEPEND="${RDEPEND} $(ruby_implementations_depend)"
+
+ case ${EAPI:-0} in
+ 4|5)
+ REQUIRED_USE+=" || ( $(ruby_get_use_targets) )"
+ ;;
+ esac
+fi
+
+_ruby_invoke_environment() {
+ old_S=${S}
+ case ${EAPI} in
+ 4|5)
+ if [ -z "${RUBY_S}" ]; then
+ sub_S=${P}
+ else
+ sub_S=${RUBY_S}
+ fi
+ ;;
+ *)
+ sub_S=${S#${WORKDIR}/}
+ ;;
+ esac
+
+ # Special case, for the always-lovely GitHub fetches. With this,
+ # we allow the star glob to just expand to whatever directory it's
+ # called.
+ if [[ "${sub_S}" = *"*"* ]]; then
+ case ${EAPI} in
+ 2|3)
+ #The old method of setting S depends on undefined package
+ # manager behaviour, so encourage upgrading to EAPI=4.
+ eqawarn "Using * expansion of S is deprecated. Use EAPI and RUBY_S instead."
+ ;;
+ esac
+ pushd "${WORKDIR}"/all &>/dev/null
+ sub_S=$(eval ls -d "${sub_S}" 2>/dev/null)
+ popd &>/dev/null
+ fi
+
+ environment=$1; shift
+
+ my_WORKDIR="${WORKDIR}"/${environment}
+ S="${my_WORKDIR}"/"${sub_S}"
+
+ if [[ -d "${S}" ]]; then
+ pushd "$S" &>/dev/null
+ elif [[ -d "${my_WORKDIR}" ]]; then
+ pushd "${my_WORKDIR}" &>/dev/null
+ else
+ pushd "${WORKDIR}" &>/dev/null
+ fi
+
+ ebegin "Running ${_PHASE:-${EBUILD_PHASE}} phase for $environment"
+ "$@"
+ popd &>/dev/null
+
+ S=${old_S}
+}
+
+_ruby_each_implementation() {
+ local invoked=no
+ for _ruby_implementation in ${USE_RUBY}; do
+ # only proceed if it's requested
+ use ruby_targets_${_ruby_implementation} || continue
+
+ RUBY=$(ruby_implementation_command ${_ruby_implementation})
+ invoked=yes
+
+ if [[ -n "$1" ]]; then
+ _ruby_invoke_environment ${_ruby_implementation} "$@"
+ fi
+
+ unset RUBY
+ done
+
+ if [[ ${invoked} == "no" ]]; then
+ eerror "You need to select at least one compatible Ruby installation target via RUBY_TARGETS in make.conf."
+ eerror "Compatible targets for this package are: ${USE_RUBY}"
+ eerror
+ eerror "See http://www.gentoo.org/proj/en/prog_lang/ruby/index.xml#doc_chap3 for more information."
+ eerror
+ die "No compatible Ruby target selected."
+ fi
+}
+
+# @FUNCTION: ruby-ng_pkg_setup
+# @DESCRIPTION:
+# Check whether at least one ruby target implementation is present.
+ruby-ng_pkg_setup() {
+ # This only checks that at least one implementation is present
+ # before doing anything; by leaving the parameters empty we know
+ # it's a special case.
+ _ruby_each_implementation
+
+ has ruby_targets_jruby ${IUSE} && use ruby_targets_jruby && java-pkg_setup-vm
+}
+
+# @FUNCTION: ruby-ng_src_unpack
+# @DESCRIPTION:
+# Unpack the source archive.
+ruby-ng_src_unpack() {
+ mkdir "${WORKDIR}"/all
+ pushd "${WORKDIR}"/all &>/dev/null
+
+ # We don't support an each-unpack, it's either all or nothing!
+ if type all_ruby_unpack &>/dev/null; then
+ _ruby_invoke_environment all all_ruby_unpack
+ else
+ [[ -n ${A} ]] && unpack ${A}
+ fi
+
+ popd &>/dev/null
+}
+
+_ruby_apply_patches() {
+ for patch in "${RUBY_PATCHES[@]}"; do
+ if [ -f "${patch}" ]; then
+ epatch "${patch}"
+ elif [ -f "${FILESDIR}/${patch}" ]; then
+ epatch "${FILESDIR}/${patch}"
+ else
+ die "Cannot find patch ${patch}"
+ fi
+ done
+
+ # This is a special case: instead of executing just in the special
+ # "all" environment, this will actually copy the effects on _all_
+ # the other environments, and is thus executed before the copy
+ type all_ruby_prepare &>/dev/null && all_ruby_prepare
+}
+
+_ruby_source_copy() {
+ # Until we actually find a reason not to, we use hardlinks, this
+ # should reduce the amount of disk space that is wasted by this.
+ cp -prlP all ${_ruby_implementation} \
+ || die "Unable to copy ${_ruby_implementation} environment"
+}
+
+# @FUNCTION: ruby-ng_src_prepare
+# @DESCRIPTION:
+# Apply patches and prepare versions for each ruby target
+# implementation. Also carry out common clean up tasks.
+ruby-ng_src_prepare() {
+ # Way too many Ruby packages are prepared on OSX without removing
+ # the extra data forks, we do it here to avoid repeating it for
+ # almost every other ebuild.
+ find . -name '._*' -delete
+
+ _ruby_invoke_environment all _ruby_apply_patches
+
+ _PHASE="source copy" \
+ _ruby_each_implementation _ruby_source_copy
+
+ if type each_ruby_prepare &>/dev/null; then
+ _ruby_each_implementation each_ruby_prepare
+ fi
+}
+
+# @FUNCTION: ruby-ng_src_configure
+# @DESCRIPTION:
+# Configure the package.
+ruby-ng_src_configure() {
+ if type each_ruby_configure &>/dev/null; then
+ _ruby_each_implementation each_ruby_configure
+ fi
+
+ type all_ruby_configure &>/dev/null && \
+ _ruby_invoke_environment all all_ruby_configure
+}
+
+# @FUNCTION: ruby-ng_src_compile
+# @DESCRIPTION:
+# Compile the package.
+ruby-ng_src_compile() {
+ if type each_ruby_compile &>/dev/null; then
+ _ruby_each_implementation each_ruby_compile
+ fi
+
+ type all_ruby_compile &>/dev/null && \
+ _ruby_invoke_environment all all_ruby_compile
+}
+
+# @FUNCTION: ruby-ng_src_test
+# @DESCRIPTION:
+# Run tests for the package.
+ruby-ng_src_test() {
+ if type each_ruby_test &>/dev/null; then
+ _ruby_each_implementation each_ruby_test
+ fi
+
+ type all_ruby_test &>/dev/null && \
+ _ruby_invoke_environment all all_ruby_test
+}
+
+_each_ruby_check_install() {
+ local scancmd=scanelf
+ # we have a Mach-O object here
+ [[ ${CHOST} == *-darwin ]] && scancmd=scanmacho
+
+ has "${EAPI}" 2 && ! use prefix && EPREFIX=
+
+ local libruby_basename=$(${RUBY} -rrbconfig -e 'puts RbConfig::CONFIG["LIBRUBY_SO"]')
+ local libruby_soname=$(basename $(${scancmd} -F "%S#F" -qS "${EPREFIX}/usr/$(get_libdir)/${libruby_basename}") 2>/dev/null)
+ local sitedir=$(${RUBY} -rrbconfig -e 'puts RbConfig::CONFIG["sitedir"]')
+ local sitelibdir=$(${RUBY} -rrbconfig -e 'puts RbConfig::CONFIG["sitelibdir"]')
+
+ # Look for wrong files in sitedir
+ # if [[ -d "${D}${sitedir}" ]]; then
+ # local f=$(find "${D}${sitedir}" -mindepth 1 -maxdepth 1 -not -wholename "${D}${sitelibdir}")
+ # if [[ -n ${f} ]]; then
+ # eerror "Found files in sitedir, outsite sitelibdir:"
+ # eerror "${f}"
+ # die "Misplaced files in sitedir"
+ # fi
+ # fi
+
+ # The current implementation lacks libruby (i.e.: jruby)
+ [[ -z ${libruby_soname} ]] && return 0
+
+ # Check also the gems directory, since we could be installing compiled
+ # extensions via ruby-fakegem; make sure to check only in sitelibdir, since
+ # that's what changes between two implementations (otherwise you'd get false
+ # positives now that Ruby 1.9.2 installs with the same sitedir as 1.8)
+ ${scancmd} -qnR "${D}${sitelibdir}" "${D}${sitelibdir/site_ruby/gems}" \
+ | fgrep -v "${libruby_soname}" \
+ | grep -E -v "${RUBY_QA_ALLOWED_LIBS}" \
+ > "${T}"/ruby-ng-${_ruby_implementation}-mislink.log
+
+ if [[ -s "${T}"/ruby-ng-${_ruby_implementation}-mislink.log ]]; then
+ ewarn "Extensions installed for ${_ruby_implementation} with missing links to ${libruby_soname}"
+ ewarn $(< "${T}"/ruby-ng-${_ruby_implementation}-mislink.log )
+ die "Missing links to ${libruby_soname}"
+ fi
+}
+
+# @FUNCTION: ruby-ng_src_install
+# @DESCRIPTION:
+# Install the package for each ruby target implementation.
+ruby-ng_src_install() {
+ if type each_ruby_install &>/dev/null; then
+ _ruby_each_implementation each_ruby_install
+ fi
+
+ type all_ruby_install &>/dev/null && \
+ _ruby_invoke_environment all all_ruby_install
+
+ _PHASE="check install" \
+ _ruby_each_implementation _each_ruby_check_install
+}
+
+# @FUNCTION: ruby_rbconfig_value
+# @USAGE: rbconfig item
+# @RETURN: Returns the value of the given rbconfig item of the Ruby interpreter in ${RUBY}.
+ruby_rbconfig_value() {
+ echo $(${RUBY} -rrbconfig -e "puts RbConfig::CONFIG['$1']")
+}
+
+# @FUNCTION: doruby
+# @USAGE: file [file...]
+# @DESCRIPTION:
+# Installs the specified file(s) into the sitelibdir of the Ruby interpreter in ${RUBY}.
+doruby() {
+ [[ -z ${RUBY} ]] && die "\$RUBY is not set"
+ has "${EAPI}" 2 && ! use prefix && EPREFIX=
+ ( # don't want to pollute calling env
+ sitelibdir=$(ruby_rbconfig_value 'sitelibdir')
+ insinto ${sitelibdir#${EPREFIX}}
+ insopts -m 0644
+ doins "$@"
+ ) || die "failed to install $@"
+}
+
+# @FUNCTION: ruby_get_libruby
+# @RETURN: The location of libruby*.so belonging to the Ruby interpreter in ${RUBY}.
+ruby_get_libruby() {
+ ${RUBY} -rrbconfig -e 'puts File.join(RbConfig::CONFIG["libdir"], RbConfig::CONFIG["LIBRUBY"])'
+}
+
+# @FUNCTION: ruby_get_hdrdir
+# @RETURN: The location of the header files belonging to the Ruby interpreter in ${RUBY}.
+ruby_get_hdrdir() {
+ local rubyhdrdir=$(ruby_rbconfig_value 'rubyhdrdir')
+
+ if [[ "${rubyhdrdir}" = "nil" ]] ; then
+ rubyhdrdir=$(ruby_rbconfig_value 'archdir')
+ fi
+
+ echo "${rubyhdrdir}"
+}
+
+# @FUNCTION: ruby_get_version
+# @RETURN: The version of the Ruby interpreter in ${RUBY}, or what 'ruby' points to.
+ruby_get_version() {
+ local ruby=${RUBY:-$(type -p ruby 2>/dev/null)}
+
+ echo $(${ruby} -e 'puts RUBY_VERSION')
+}
+
+# @FUNCTION: ruby_get_implementation
+# @RETURN: The implementation of the Ruby interpreter in ${RUBY}, or what 'ruby' points to.
+ruby_get_implementation() {
+ local ruby=${RUBY:-$(type -p ruby 2>/dev/null)}
+
+ case $(${ruby} --version) in
+ *Enterprise*)
+ echo "ree"
+ ;;
+ *jruby*)
+ echo "jruby"
+ ;;
+ *rubinius*)
+ echo "rbx"
+ ;;
+ *)
+ echo "mri"
+ ;;
+ esac
+}
+
+# @FUNCTION: ruby-ng_rspec <arguments>
+# @DESCRIPTION:
+# This is simply a wrapper around the rspec command (executed by $RUBY})
+# which also respects TEST_VERBOSE and NOCOLOR environment variables.
+# Optionally takes arguments to pass on to the rspec invocation. The
+# environment variable RSPEC_VERSION can be used to control the specific
+# rspec version that must be executed. It defaults to 2 for historical
+# compatibility.
+ruby-ng_rspec() {
+ local version=${RSPEC_VERSION-2}
+ local files="$@"
+
+ # Explicitly pass the expected spec directory since the versioned
+ # rspec wrappers don't handle this automatically.
+ if [ ${#@} -eq 0 ]; then
+ files="spec"
+ fi
+
+ if [[ ${DEPEND} != *"dev-ruby/rspec"* ]]; then
+ ewarn "Missing dev-ruby/rspec in \${DEPEND}"
+ fi
+
+ local rspec_params=
+ case ${NOCOLOR} in
+ 1|yes|true)
+ rspec_params+=" --no-color"
+ ;;
+ *)
+ rspec_params+=" --color"
+ ;;
+ esac
+
+ case ${TEST_VERBOSE} in
+ 1|yes|true)
+ rspec_params+=" --format documentation"
+ ;;
+ *)
+ rspec_params+=" --format progress"
+ ;;
+ esac
+
+ ${RUBY} -S rspec-${version} ${rspec_params} ${files} || die "rspec failed"
+}
+
+# @FUNCTION: ruby-ng_cucumber
+# @DESCRIPTION:
+# This is simply a wrapper around the cucumber command (executed by $RUBY})
+# which also respects TEST_VERBOSE and NOCOLOR environment variables.
+ruby-ng_cucumber() {
+ if [[ ${DEPEND} != *"dev-util/cucumber"* ]]; then
+ ewarn "Missing dev-util/cucumber in \${DEPEND}"
+ fi
+
+ local cucumber_params=
+ case ${NOCOLOR} in
+ 1|yes|true)
+ cucumber_params+=" --no-color"
+ ;;
+ *)
+ cucumber_params+=" --color"
+ ;;
+ esac
+
+ case ${TEST_VERBOSE} in
+ 1|yes|true)
+ cucumber_params+=" --format pretty"
+ ;;
+ *)
+ cucumber_params+=" --format progress"
+ ;;
+ esac
+
+ if [[ ${RUBY} == *jruby ]]; then
+ ewarn "Skipping cucumber tests on JRuby (unsupported)."
+ return 0
+ fi
+
+ ${RUBY} -S cucumber ${cucumber_params} "$@" || die "cucumber failed"
+}
+
+# @FUNCTION: ruby-ng_testrb-2
+# @DESCRIPTION:
+# This is simply a replacement for the testrb command that load the test
+# files and execute them, with test-unit 2.x. This actually requires
+# either an old test-unit-2 version or 2.5.1-r1 or later, as they remove
+# their script and we installed a broken wrapper for a while.
+# This also respects TEST_VERBOSE and NOCOLOR environment variables.
+ruby-ng_testrb-2() {
+ if [[ ${DEPEND} != *"dev-ruby/test-unit"* ]]; then
+ ewarn "Missing dev-ruby/test-unit in \${DEPEND}"
+ fi
+
+ local testrb_params=
+ case ${NOCOLOR} in
+ 1|yes|true)
+ testrb_params+=" --no-use-color"
+ ;;
+ *)
+ testrb_params+=" --use-color=auto"
+ ;;
+ esac
+
+ case ${TEST_VERBOSE} in
+ 1|yes|true)
+ testrb_params+=" --verbose=verbose"
+ ;;
+ *)
+ testrb_params+=" --verbose=normal"
+ ;;
+ esac
+
+ ${RUBY} -S testrb-2 ${testrb_params} "$@" || die "testrb-2 failed"
+}
diff --git a/eclass/ruby-single.eclass b/eclass/ruby-single.eclass
new file mode 100644
index 000000000000..8dd1d82b618f
--- /dev/null
+++ b/eclass/ruby-single.eclass
@@ -0,0 +1,90 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ruby-single
+# @MAINTAINER:
+# Ruby team <ruby@gentoo.org>
+# @AUTHOR:
+# Author: Hans de Graaff <graaff@gentoo.org>
+# Based on python-single-r1 by: Michał Górny <mgorny@gentoo.org>
+# @BLURB: An eclass for Ruby packages not installed for multiple implementations.
+# @DESCRIPTION:
+# An eclass for packages which don't support being installed for
+# multiple Ruby implementations. This mostly includes ruby-based
+# scripts. Set USE_RUBY to include all the ruby targets that have been
+# verified to work and include the eclass. RUBY_DEPS is now available to
+# pull in the dependency on the requested ruby targets.
+#
+# @CODE
+# USE_RUBY="ruby20 ruby21"
+# inherit ruby-single
+# RDEPEND="${RUBY_DEPS}"
+# @CODE
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_RUBY_SINGLE} ]]; then
+
+inherit ruby-utils
+
+# @ECLASS-VARIABLE: USE_RUBY
+# @DEFAULT_UNSET
+# @REQUIRED
+# @DESCRIPTION:
+# This variable contains a space separated list of targets (see above) a package
+# is compatible to. It must be set before the `inherit' call. There is no
+# default. All ebuilds are expected to set this variable.
+
+
+# @ECLASS-VARIABLE: RUBY_DEPS
+# @DESCRIPTION:
+#
+# This is an eclass-generated Ruby dependency string for all
+# implementations listed in USE_RUBY. Any one of the supported ruby
+# targets will satisfy this dependency. A dependency on
+# virtual/rubygems is also added to ensure that this is installed
+# in time for the package to use it.
+#
+# Example use:
+# @CODE
+# RDEPEND="${RUBY_DEPS}
+# dev-foo/mydep"
+# BDEPEND="${RDEPEND}"
+# @CODE
+#
+# Example value:
+# @CODE
+# || ( dev-lang/ruby:2.0 dev-lang/ruby:1.9 ) virtual/rubygems
+# @CODE
+#
+# The order of dependencies will change over time to best match the
+# current state of ruby targets, e.g. stable version first.
+
+_ruby_single_implementations_depend() {
+ local depend
+ for _ruby_implementation in ${RUBY_TARGETS_PREFERENCE}; do
+ if [[ ${USE_RUBY} =~ ${_ruby_implementation} ]]; then
+ depend="${depend} $(_ruby_implementation_depend $_ruby_implementation)"
+ fi
+ done
+ echo "|| ( ${depend} ) virtual/rubygems"
+}
+
+_ruby_single_set_globals() {
+ RUBY_DEPS=$(_ruby_single_implementations_depend)
+}
+_ruby_single_set_globals
+
+
+_RUBY_SINGLE=1
+fi
diff --git a/eclass/ruby-utils.eclass b/eclass/ruby-utils.eclass
new file mode 100644
index 000000000000..d365272c26e3
--- /dev/null
+++ b/eclass/ruby-utils.eclass
@@ -0,0 +1,83 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ruby-utils.eclass
+# @MAINTAINER:
+# Ruby team <ruby@gentoo.org>
+# @AUTHOR:
+# Author: Hans de Graaff <graaff@gentoo.org>
+# @BLURB: An eclass for supporting ruby scripts and bindings in non-ruby packages
+# @DESCRIPTION:
+# The ruby-utils eclass is designed to allow an easier installation of
+# Ruby scripts and bindings for non-ruby packages.
+#
+# This eclass does not set any metadata variables nor export any phase
+# functions. It can be inherited safely.
+
+
+if [[ ! ${_RUBY_UTILS} ]]; then
+
+
+# @ECLASS-VARIABLE: RUBY_TARGETS_PREFERENCE
+# @DESCRIPTION:
+# This variable lists all the known ruby targets in preference of use as
+# determined by the ruby team. By using this ordering rather than the
+# USE_RUBY mandated ordering we have more control over which ruby
+# implementation will be installed first (and thus eselected). This will
+# provide for a better first installation experience.
+
+# All RUBY_TARGETS
+RUBY_TARGETS_PREFERENCE="ruby20 ruby19 "
+
+# All other active ruby targets
+RUBY_TARGETS_PREFERENCE+="ruby21 ruby22 "
+
+
+_ruby_implementation_depend() {
+ local rubypn=
+ local rubyslot=
+
+ case $1 in
+ ruby18)
+ rubypn="dev-lang/ruby"
+ rubyslot=":1.8"
+ ;;
+ ruby19)
+ rubypn="dev-lang/ruby"
+ rubyslot=":1.9"
+ ;;
+ ruby20)
+ rubypn="dev-lang/ruby"
+ rubyslot=":2.0"
+ ;;
+ ruby21)
+ rubypn="dev-lang/ruby"
+ rubyslot=":2.1"
+ ;;
+ ruby22)
+ rubypn="dev-lang/ruby"
+ rubyslot=":2.2"
+ ;;
+ ree18)
+ rubypn="dev-lang/ruby-enterprise"
+ rubyslot=":1.8"
+ ;;
+ jruby)
+ rubypn="dev-java/jruby"
+ rubyslot=""
+ ;;
+ rbx)
+ rubypn="dev-lang/rubinius"
+ rubyslot=""
+ ;;
+ *) die "$1: unknown Ruby implementation"
+ esac
+
+ echo "$2${rubypn}$3${rubyslot}"
+}
+
+
+
+_RUBY_UTILS=1
+fi
diff --git a/eclass/s6.eclass b/eclass/s6.eclass
new file mode 100644
index 000000000000..464fb50b1103
--- /dev/null
+++ b/eclass/s6.eclass
@@ -0,0 +1,119 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: s6.eclass
+# @MAINTAINER:
+# William Hubbs <williamh@gentoo.org>
+# @BLURB: helper functions to install s6 services
+# @DESCRIPTION:
+# This eclass provides helpers to install s6 services.
+# @EXAMPLE:
+#
+# @CODE
+# inherit s6
+#
+# src_install() {
+# ...
+# s6_install_service myservice "${FILESDIR}"/run-s6 "${FILESDIR}"/finish-s6
+# ...
+# If you want a service to be logged, install the log service as
+# shown here.
+# s6_install_service myservice/log "${FILESDIR}"/log-run-s6 \
+# "${FILESDIR}"/log-finish-s6
+# ...
+# }
+# @CODE
+
+case ${EAPI:-0} in
+ 5) ;;
+ *) die "${ECLASS}.eclass: API in EAPI ${EAPI} not yet established" ;;
+esac
+
+# @FUNCTION: _s6_get_servicedir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed servicedir.
+_s6_get_servicedir() {
+ echo /var/svc.d
+}
+
+# @FUNCTION: s6_get_servicedir
+# @DESCRIPTION:
+# Output the path for the s6 service directory (not including ${D}).
+s6_get_servicedir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_s6_get_servicedir)"
+}
+
+# @FUNCTION: s6_install_service
+# @USAGE: servicename run finish
+# @DESCRIPTION:
+# Install an s6 service.
+# servicename is the name of the service.
+# run is the run script for the service.
+# finish is the optional finish script for the service.
+s6_install_service() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local name="$1"
+ local run="$2"
+ local finish="$3"
+
+ [[ $name ]] ||
+ die "${ECLASS}.eclass: you must specify the s6 service name"
+ [[ $run ]] ||
+ die "${ECLASS}.eclass: you must specify the s6 service run script"
+
+ (
+ local servicepath="$(_s6_get_servicedir)/$name"
+ exeinto "$servicepath"
+ newexe "$run" run
+ [[ $finish ]] && newexe "$finish" finish
+ )
+}
+
+# @FUNCTION: s6_service_down
+# @USAGE: servicename
+# @DESCRIPTION:
+# Install the "down" flag so this service will not be started by
+# default.
+# servicename is the name of the service.
+s6_service_down() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local name="$1"
+
+ [[ $name ]] ||
+ die "${ECLASS}.eclass: you must specify the s6 service name"
+
+ (
+ touch "$T"/down || die
+ local servicepath="$(_s6_get_servicedir)/$name"
+ insinto "$servicepath"
+ doins "$T"/down
+ )
+}
+
+# @FUNCTION: s6_service_nosetsid
+# @USAGE: servicename
+# @DESCRIPTION:
+# Install the "nosetsid" flag so this service will not be made a session
+# leader.
+# servicename is the name of the service.
+s6_service_nosetsid() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local name="$1"
+
+ [[ $name ]] ||
+ die "${ECLASS}.eclass: you must specify the s6 service name"
+
+ (
+ touch "$T"/nosetsid || die
+ local servicepath="$(_s6_get_servicedir)/$name"
+ insinto "$servicepath"
+ doins "$T"/nosetsid
+ )
+}
diff --git a/eclass/savedconfig.eclass b/eclass/savedconfig.eclass
new file mode 100644
index 000000000000..b8f9415eefa9
--- /dev/null
+++ b/eclass/savedconfig.eclass
@@ -0,0 +1,155 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: savedconfig.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: common API for saving/restoring complex configuration files
+# @DESCRIPTION:
+# It is not uncommon to come across a package which has a very fine
+# grained level of configuration options that go way beyond what
+# USE flags can properly describe. For this purpose, a common API
+# of saving and restoring the configuration files was developed
+# so users can modify these config files and the ebuild will take it
+# into account as needed.
+#
+# @ROFF .nr step 1 1
+# Typically you can create your own configuration files quickly by
+# doing:
+# @ROFF .IP \n[step] 3
+# Build the package with FEATURES=noclean USE=savedconfig.
+# @ROFF .IP \n+[step]
+# Go into the build dir and edit the relevant configuration system
+# (e.g. `make menuconfig` or `nano config-header.h`). You can look
+# at the files in /etc/portage/savedconfig/ to see what files get
+# loaded/restored.
+# @ROFF .IP \n+[step]
+# Copy the modified configuration files out of the workdir and to
+# the paths in /etc/portage/savedconfig/.
+# @ROFF .IP \n+[step]
+# Emerge the package with just USE=savedconfig to get the custom build.
+
+inherit portability
+
+IUSE="savedconfig"
+
+# @FUNCTION: save_config
+# @USAGE: <config files to save>
+# @DESCRIPTION:
+# Use this function to save the package's configuration file into the
+# right location. You may specify any number of configuration files,
+# but just make sure you call save_config with all of them at the same
+# time in order for things to work properly.
+save_config() {
+ if [[ ${EBUILD_PHASE} != "install" ]]; then
+ die "Bad package! save_config only for use in src_install functions!"
+ fi
+ [[ $# -eq 0 ]] && die "Usage: save_config <files>"
+
+ # Be lazy in our EAPI compat
+ : ${ED:=${D}}
+
+ local dest="/etc/portage/savedconfig/${CATEGORY}"
+ if [[ $# -eq 1 && -f $1 ]] ; then
+ # Just one file, so have the ${PF} be that config file
+ dodir "${dest}"
+ cp "$@" "${ED}/${dest}/${PF}" || die "failed to save $*"
+ else
+ # A dir, or multiple files, so have the ${PF} be a dir
+ # with all the saved stuff below it
+ dodir "${dest}/${PF}"
+ treecopy "$@" "${ED}/${dest}/${PF}" || die "failed to save $*"
+ fi
+
+ elog "Your configuration for ${CATEGORY}/${PF} has been saved in "
+ elog "/etc/portage/savedconfig/${CATEGORY}/${PF} for your editing pleasure."
+ elog "You can edit these files by hand and remerge this package with"
+ elog "USE=savedconfig to customise the configuration."
+ elog "You can rename this file/directory to one of the following for"
+ elog "its configuration to apply to multiple versions:"
+ elog '${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/'
+ elog '[${CTARGET}|${CHOST}|""]/${CATEGORY}/[${PF}|${P}|${PN}]'
+}
+
+# @FUNCTION: restore_config
+# @USAGE: <config files to restore>
+# @DESCRIPTION:
+# Restores the configuation saved ebuild previously potentially with user edits.
+# You can restore a single file or a whole bunch, just make sure you call
+# restore_config with all of the files to restore at the same time.
+#
+# Config files can be laid out as:
+# @CODE
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CTARGET}/${CATEGORY}/${PF}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CHOST}/${CATEGORY}/${PF}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CATEGORY}/${PF}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CTARGET}/${CATEGORY}/${P}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CHOST}/${CATEGORY}/${P}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CATEGORY}/${P}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CTARGET}/${CATEGORY}/${PN}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CHOST}/${CATEGORY}/${PN}
+# ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CATEGORY}/${PN}
+# @CODE
+restore_config() {
+ case ${EBUILD_PHASE} in
+ unpack|compile|configure|prepare) ;;
+ *) die "Bad package! restore_config only for use in src_{unpack,compile,configure,prepare} functions!" ;;
+ esac
+
+ use savedconfig || return
+
+ local found check configfile
+ local base=${PORTAGE_CONFIGROOT}/etc/portage/savedconfig
+ for check in {${CATEGORY}/${PF},${CATEGORY}/${P},${CATEGORY}/${PN}}; do
+ configfile=${base}/${CTARGET}/${check}
+ [[ -r ${configfile} ]] || configfile=${base}/${CHOST}/${check}
+ [[ -r ${configfile} ]] || configfile=${base}/${check}
+ einfo "Checking existence of ${configfile} ..."
+ if [[ -r "${configfile}" ]]; then
+ einfo "found ${configfile}"
+ found=${configfile};
+ break;
+ fi
+ done
+ if [[ -f ${found} ]]; then
+ elog "Building using saved configfile ${found}"
+ if [ $# -gt 0 ]; then
+ cp -pPR "${found}" "$1" || die "Failed to restore ${found} to $1"
+ else
+ die "need to know the restoration filename"
+ fi
+ elif [[ -d ${found} ]]; then
+ elog "Building using saved config directory ${found}"
+ local dest=${PWD}
+ pushd "${found}" > /dev/null
+ treecopy . "${dest}" || die "Failed to restore ${found} to $1"
+ popd > /dev/null
+ else
+ # maybe the user is screwing around with perms they shouldnt #289168
+ if [[ ! -r ${base} ]] ; then
+ eerror "Unable to read ${base} -- please check its permissions."
+ die "Reading config files failed"
+ fi
+ ewarn "No saved config to restore - please remove USE=savedconfig or"
+ ewarn "provide a configuration file in ${PORTAGE_CONFIGROOT}/etc/portage/savedconfig/${CATEGORY}/${PN}"
+ ewarn "Your config file(s) will not be used this time"
+ fi
+}
+
+savedconfig_pkg_postinst() {
+ # If the user has USE=savedconfig, then chances are they
+ # are modifying these files, so keep them around. #396169
+ # This might lead to cruft build up, but the alternatives
+ # are worse :/.
+
+ if use savedconfig ; then
+ # Be lazy in our EAPI compat
+ : ${EROOT:=${ROOT}}
+
+ find "${EROOT}/etc/portage/savedconfig/${CATEGORY}/${PF}" \
+ -exec touch {} + 2>/dev/null
+ fi
+}
+
+EXPORT_FUNCTIONS pkg_postinst
diff --git a/eclass/scons-utils.eclass b/eclass/scons-utils.eclass
new file mode 100644
index 000000000000..a2a6884e55d3
--- /dev/null
+++ b/eclass/scons-utils.eclass
@@ -0,0 +1,235 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: scons-utils.eclass
+# @MAINTAINER:
+# mgorny@gentoo.org
+# @BLURB: helper functions to deal with SCons buildsystem
+# @DESCRIPTION:
+# This eclass provides a set of function to help developers sanely call
+# dev-util/scons and pass parameters to it.
+# @EXAMPLE:
+#
+# @CODE
+# inherit scons-utils toolchain-funcs
+#
+# EAPI=4
+#
+# src_configure() {
+# myesconsargs=(
+# CC="$(tc-getCC)"
+# $(use_scons nls ENABLE_NLS)
+# )
+# }
+#
+# src_compile() {
+# escons
+# }
+#
+# src_install() {
+# # note: this can be DESTDIR, INSTALL_ROOT, ... depending on package
+# escons DESTDIR="${D}" install
+# }
+# @CODE
+
+# -- public variables --
+
+# @ECLASS-VARIABLE: SCONS_MIN_VERSION
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The minimal version of SCons required for the build to work.
+
+# @VARIABLE: myesconsargs
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# List of package-specific options to pass to all SCons calls. Supposed to be
+# set in src_configure().
+
+# @ECLASS-VARIABLE: SCONSOPTS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The default set of options to pass to scons. Similar to MAKEOPTS,
+# supposed to be set in make.conf. If unset, escons() will use cleaned
+# up MAKEOPTS instead.
+
+# @ECLASS-VARIABLE: EXTRA_ESCONS
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# The additional parameters to pass to SCons whenever escons() is used.
+# Much like EXTRA_EMAKE, this is not supposed to be used in make.conf
+# and not in ebuilds!
+
+# @ECLASS-VARIABLE: USE_SCONS_TRUE
+# @DESCRIPTION:
+# The default value for truth in scons-use() (1 by default).
+: ${USE_SCONS_TRUE:=1}
+
+# @ECLASS-VARIABLE: USE_SCONS_FALSE
+# @DESCRIPTION:
+# The default value for false in scons-use() (0 by default).
+: ${USE_SCONS_FALSE:=0}
+
+# -- EAPI support check --
+
+case ${EAPI:-0} in
+ 0|1|2|3|4|5) ;;
+ *) die "EAPI ${EAPI} unsupported."
+esac
+
+# -- ebuild variables setup --
+
+if [[ -n ${SCONS_MIN_VERSION} ]]; then
+ DEPEND=">=dev-util/scons-${SCONS_MIN_VERSION}"
+else
+ DEPEND="dev-util/scons"
+fi
+
+# -- public functions --
+
+# @FUNCTION: escons
+# @USAGE: [scons-arg] ...
+# @DESCRIPTION:
+# Call scons, passing the supplied arguments, ${myesconsargs[@]},
+# filtered ${MAKEOPTS}, ${EXTRA_ESCONS}. Similar to emake. Like emake,
+# this function does die on failure in EAPI 4 (unless called nonfatal).
+escons() {
+ local ret
+
+ debug-print-function ${FUNCNAME} "${@}"
+
+ # if SCONSOPTS are _unset_, use cleaned MAKEOPTS
+ set -- scons ${SCONSOPTS-$(scons_clean_makeopts)} ${EXTRA_ESCONS} \
+ "${myesconsargs[@]}" "${@}"
+ echo "${@}" >&2
+ "${@}"
+ ret=${?}
+
+ [[ ${ret} -ne 0 ]] && has "${EAPI:-0}" 4 5 && die "escons failed."
+ return ${ret}
+}
+
+# @FUNCTION: scons_clean_makeopts
+# @USAGE: [makeflags] [...]
+# @DESCRIPTION:
+# Strip the supplied makeflags (or ${MAKEOPTS} if called without
+# an argument) of options not supported by SCons and make sure --jobs
+# gets an argument. Output the resulting flag list (suitable
+# for an assignment to SCONSOPTS).
+scons_clean_makeopts() {
+ local new_makeopts
+
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${#} -eq 0 ]]; then
+ debug-print "Using MAKEOPTS: [${MAKEOPTS}]"
+ set -- ${MAKEOPTS}
+ else
+ # unquote if necessary
+ set -- ${*}
+ fi
+
+ # empty MAKEOPTS give out empty SCONSOPTS
+ # thus, we do need to worry about the initial setup
+ if [[ ${*} = ${_SCONS_CACHE_MAKEOPTS} ]]; then
+ set -- ${_SCONS_CACHE_SCONSOPTS}
+ debug-print "Cache hit: [${*}]"
+ echo ${*}
+ return
+ fi
+ export _SCONS_CACHE_MAKEOPTS=${*}
+
+ while [[ ${#} -gt 0 ]]; do
+ case ${1} in
+ # clean, simple to check -- we like that
+ --jobs=*|--keep-going)
+ new_makeopts=${new_makeopts+${new_makeopts} }${1}
+ ;;
+ # need to take a look at the next arg and guess
+ --jobs)
+ if [[ ${#} -gt 1 && ${2} =~ ^[0-9]+$ ]]; then
+ new_makeopts="${new_makeopts+${new_makeopts} }${1} ${2}"
+ shift
+ else
+ # no value means no limit, let's pass a random int
+ new_makeopts=${new_makeopts+${new_makeopts} }${1}=5
+ fi
+ ;;
+ # strip other long options
+ --*)
+ ;;
+ # short option hell
+ -*)
+ local str new_optstr
+ new_optstr=
+ str=${1#-}
+
+ while [[ -n ${str} ]]; do
+ case ${str} in
+ k*)
+ new_optstr=${new_optstr}k
+ ;;
+ # -j needs to come last
+ j)
+ if [[ ${#} -gt 1 && ${2} =~ ^[0-9]+$ ]]; then
+ new_optstr="${new_optstr}j ${2}"
+ shift
+ else
+ new_optstr="${new_optstr}j 5"
+ fi
+ ;;
+ # otherwise, everything after -j is treated as an arg
+ j*)
+ new_optstr=${new_optstr}${str}
+ break
+ ;;
+ esac
+ str=${str#?}
+ done
+
+ if [[ -n ${new_optstr} ]]; then
+ new_makeopts=${new_makeopts+${new_makeopts} }-${new_optstr}
+ fi
+ ;;
+ esac
+ shift
+ done
+
+ set -- ${new_makeopts}
+ export _SCONS_CACHE_SCONSOPTS=${*}
+ debug-print "New SCONSOPTS: [${*}]"
+ echo ${*}
+}
+
+# @FUNCTION: use_scons
+# @USAGE: <use-flag> [var-name] [var-opt-true] [var-opt-false]
+# @DESCRIPTION:
+# Output a SCons parameter with value depending on the USE flag state.
+# If the USE flag is set, output <var-name>=<var-opt-true>; otherwise
+# <var-name>=<var-opt-false>.
+#
+# If <var-name> is omitted, <use-flag> will be used instead. However,
+# if <use-flag> starts with an exclamation mark (!flag), 'no' will be
+# prepended to the name (e.g. noflag).
+#
+# If <var-opt-true> and/or <var-opt-false> are omitted,
+# ${USE_SCONS_TRUE} and/or ${USE_SCONS_FALSE} will be used instead.
+use_scons() {
+ local flag=${1}
+ local varname=${2:-${flag/\!/no}}
+ local vartrue=${3:-${USE_SCONS_TRUE}}
+ local varfalse=${4:-${USE_SCONS_FALSE}}
+
+ debug-print-function ${FUNCNAME} "${@}"
+
+ if [[ ${#} -eq 0 ]]; then
+ eerror "Usage: scons-use <use-flag> [var-name] [var-opt-true] [var-opt-false]"
+ die 'scons-use(): not enough arguments'
+ fi
+
+ if use "${flag}"; then
+ echo "${varname}=${vartrue}"
+ else
+ echo "${varname}=${varfalse}"
+ fi
+}
diff --git a/eclass/scsh.eclass b/eclass/scsh.eclass
new file mode 100644
index 000000000000..7c199d84a9d6
--- /dev/null
+++ b/eclass/scsh.eclass
@@ -0,0 +1,73 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+
+inherit eutils multilib
+
+SLOT="0"
+IUSE="scsh"
+
+scsh_scsh_path() {
+ echo /usr/$(get_libdir)/scsh
+}
+
+set_layout() {
+ if use scsh; then
+ SCSH_LAYOUT=scsh
+ else
+ ewarn "No layout was specified via USE, defaulting to FHS."
+ SCSH_LAYOUT=fhs
+ fi
+ export SCSH_LAYOUT
+}
+
+set_path_variables() {
+ SCSH_VERSION="$(best_version 'app-shells/scsh')"
+ SCSH_MV="${SCSH_VERSION%*.*}"
+ SCSH_MV="${SCSH_MV//app-shells\/scsh-}"
+ export SCSH_VERSION SCSH_MV
+
+ case ${SCSH_LAYOUT} in
+ fhs)
+ SCSH_PREFIX=/usr
+ SCSH_MODULES_PATH=/usr/share/scsh-${SCSH_MV}/modules
+ ;;
+ scsh)
+ SCSH_PREFIX=/usr/$(get_libdir)/scsh/modules
+ SCSH_MODULES_PATH=/usr/$(get_libdir)/scsh/modules/${SCSH_MV}
+ ;;
+ esac
+ export SCSH_PREFIX SCSH_MODULES_PATH
+
+ SCSH_LIB_DIRS='"'${SCSH_MODULES_PATH}'"'" "'"'$(scsh_scsh_path)'"'" "'"'.'"'
+ export SCSH_LIB_DIRS
+}
+
+scsh_src_unpack() {
+ set_layout
+ set_path_variables
+ einfo "Using ${SCSH_LAYOUT} layout"
+ unpack ${A}
+}
+
+scsh_get_layout_conf() {
+ SCSH_LAYOUT_CONF=" --build ${CHOST}
+ --force
+ --layout ${SCSH_LAYOUT}
+ --prefix ${SCSH_PREFIX}
+ --no-user-defaults
+ --dest-dir ${D}"
+ export SCSH_LAYOUT_CONF
+}
+
+scsh_src_compile() {
+ scsh_get_layout_conf
+}
+
+scsh_src_install() {
+ dodir ${SCSH_MODULES_PATH}
+ scsh-install-pkg ${SCSH_LAYOUT_CONF} || die "./scsh-install-pkg failed"
+}
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
diff --git a/eclass/selinux-policy-2.eclass b/eclass/selinux-policy-2.eclass
new file mode 100644
index 000000000000..18492a9388fa
--- /dev/null
+++ b/eclass/selinux-policy-2.eclass
@@ -0,0 +1,357 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Eclass for installing SELinux policy, and optionally
+# reloading the reference-policy based modules.
+
+# @ECLASS: selinux-policy-2.eclass
+# @MAINTAINER:
+# selinux@gentoo.org
+# @BLURB: This eclass supports the deployment of the various SELinux modules in sec-policy
+# @DESCRIPTION:
+# The selinux-policy-2.eclass supports deployment of the various SELinux modules
+# defined in the sec-policy category. It is responsible for extracting the
+# specific bits necessary for single-module deployment (instead of full-blown
+# policy rebuilds) and applying the necessary patches.
+#
+# Also, it supports for bundling patches to make the whole thing just a bit more
+# manageable.
+
+# @ECLASS-VARIABLE: MODS
+# @DESCRIPTION:
+# This variable contains the (upstream) module name for the SELinux module.
+# This name is only the module name, not the category!
+: ${MODS:="_illegal"}
+
+# @ECLASS-VARIABLE: BASEPOL
+# @DESCRIPTION:
+# This variable contains the version string of the selinux-base-policy package
+# that this module build depends on. It is used to patch with the appropriate
+# patch bundle(s) that are part of selinux-base-policy.
+: ${BASEPOL:=${PVR}}
+
+# @ECLASS-VARIABLE: POLICY_PATCH
+# @DESCRIPTION:
+# This variable contains the additional patch(es) that need to be applied on top
+# of the patchset already contained within the BASEPOL variable. The variable
+# can be both a simple string (space-separated) or a bash array.
+: ${POLICY_PATCH:=""}
+
+# @ECLASS-VARIABLE: POLICY_FILES
+# @DESCRIPTION:
+# When defined, this contains the files (located in the ebuilds' files/
+# directory) which should be copied as policy module files into the store.
+# Generally, users would want to include at least a .te and .fc file, but .if
+# files are supported as well. The variable can be both a simple string
+# (space-separated) or a bash array.
+: ${POLICY_FILES:=""}
+
+# @ECLASS-VARIABLE: POLICY_TYPES
+# @DESCRIPTION:
+# This variable informs the eclass for which SELinux policies the module should
+# be built. Currently, Gentoo supports targeted, strict, mcs and mls.
+# This variable is the same POLICY_TYPES variable that we tell SELinux
+# users to set in make.conf. Therefore, it is not the module that should
+# override it, but the user.
+: ${POLICY_TYPES:="targeted strict mcs mls"}
+
+# @ECLASS-VARIABLE: SELINUX_GIT_REPO
+# @DESCRIPTION:
+# When defined, this variable overrides the default repository URL as used by
+# this eclass. It allows end users to point to a different policy repository
+# using a single variable, rather than having to set the packagename_LIVE_REPO
+# variable for each and every SELinux policy module package they want to install.
+# The default value is Gentoo's hardened-refpolicy repository.
+: ${SELINUX_GIT_REPO:="git://anongit.gentoo.org/proj/hardened-refpolicy.git https://anongit.gentoo.org/git/proj/hardened-refpolicy.git"};
+
+# @ECLASS-VARIABLE: SELINUX_GIT_BRANCH
+# @DESCRIPTION:
+# When defined, this variable sets the Git branch to use of the repository. This
+# allows for users and developers to use a different branch for the entire set of
+# SELinux policy packages, rather than having to override them one by one with the
+# packagename_LIVE_BRANCH variable.
+# The default value is the 'master' branch.
+: ${SELINUX_GIT_BRANCH:="master"};
+
+extra_eclass=""
+case ${BASEPOL} in
+ 9999) extra_eclass="git-r3";
+ EGIT_REPO_URI="${SELINUX_GIT_REPO}";
+ EGIT_BRANCH="${SELINUX_GIT_BRANCH}";
+ EGIT_CHECKOUT_DIR="${WORKDIR}/refpolicy";;
+esac
+
+inherit eutils ${extra_eclass}
+
+IUSE=""
+
+HOMEPAGE="https://wiki.gentoo.org/wiki/Project:SELinux"
+if [[ -n ${BASEPOL} ]] && [[ "${BASEPOL}" != "9999" ]];
+then
+ SRC_URI="https://raw.githubusercontent.com/wiki/TresysTechnology/refpolicy/files/refpolicy-${PV}.tar.bz2
+ http://dev.gentoo.org/~swift/patches/selinux-base-policy/patchbundle-selinux-base-policy-${BASEPOL}.tar.bz2"
+elif [[ "${BASEPOL}" != "9999" ]];
+then
+ SRC_URI="https://raw.githubusercontent.com/wiki/TresysTechnology/refpolicy/files/refpolicy-${PV}.tar.bz2"
+else
+ SRC_URI=""
+fi
+
+LICENSE="GPL-2"
+SLOT="0"
+S="${WORKDIR}/"
+PATCHBUNDLE="${DISTDIR}/patchbundle-selinux-base-policy-${BASEPOL}.tar.bz2"
+
+# Modules should always depend on at least the first release of the
+# selinux-base-policy for which they are generated.
+if [[ -n ${BASEPOL} ]];
+then
+ RDEPEND=">=sys-apps/policycoreutils-2.0.82
+ >=sec-policy/selinux-base-policy-${BASEPOL}"
+else
+ RDEPEND=">=sys-apps/policycoreutils-2.0.82
+ >=sec-policy/selinux-base-policy-${PV}"
+fi
+DEPEND="${RDEPEND}
+ sys-devel/m4
+ >=sys-apps/checkpolicy-2.0.21"
+
+case "${EAPI:-0}" in
+ 0|1|2|3|4) die "EAPI<5 is not supported";;
+ *) : ;;
+esac
+
+EXPORT_FUNCTIONS "src_unpack src_prepare src_compile src_install pkg_postinst pkg_postrm"
+
+# @FUNCTION: selinux-policy-2_src_unpack
+# @DESCRIPTION:
+# Unpack the policy sources as offered by upstream (refpolicy).
+selinux-policy-2_src_unpack() {
+ if [[ "${BASEPOL}" != "9999" ]];
+ then
+ unpack ${A}
+ else
+ git-r3_src_unpack
+ fi
+}
+
+# @FUNCTION: selinux-policy-2_src_prepare
+# @DESCRIPTION:
+# Patch the reference policy sources with our set of enhancements. Start with
+# the base patchbundle referred to by the ebuilds through the BASEPOL variable,
+# then apply the additional patches as offered by the ebuild.
+#
+# Next, extract only those files needed for this particular module (i.e. the .te
+# and .fc files for the given module in the MODS variable).
+#
+# Finally, prepare the build environments for each of the supported SELinux
+# types (such as targeted or strict), depending on the POLICY_TYPES variable
+# content.
+selinux-policy-2_src_prepare() {
+ local modfiles
+ local add_interfaces=0;
+
+ # Create 3rd_party location for user-contributed policies
+ cd "${S}/refpolicy/policy/modules" && mkdir 3rd_party;
+
+ # Patch the sources with the base patchbundle
+ if [[ -n ${BASEPOL} ]] && [[ "${BASEPOL}" != "9999" ]];
+ then
+ cd "${S}"
+ EPATCH_MULTI_MSG="Applying SELinux policy updates ... " \
+ EPATCH_SUFFIX="patch" \
+ EPATCH_SOURCE="${WORKDIR}" \
+ EPATCH_FORCE="yes" \
+ epatch
+ fi
+
+ # Call in epatch_user. We do this early on as we start moving
+ # files left and right hereafter.
+ epatch_user
+
+ # Copy additional files to the 3rd_party/ location
+ if [[ "$(declare -p POLICY_FILES 2>/dev/null 2>&1)" == "declare -a"* ]] ||
+ [[ -n ${POLICY_FILES} ]];
+ then
+ add_interfaces=1;
+ cd "${S}/refpolicy/policy/modules"
+ for POLFILE in ${POLICY_FILES[@]};
+ do
+ cp "${FILESDIR}/${POLFILE}" 3rd_party/ || die "Could not copy ${POLFILE} to 3rd_party/ location";
+ done
+ fi
+
+ # Apply the additional patches refered to by the module ebuild.
+ # But first some magic to differentiate between bash arrays and strings
+ if [[ "$(declare -p POLICY_PATCH 2>/dev/null 2>&1)" == "declare -a"* ]] ||
+ [[ -n ${POLICY_PATCH} ]];
+ then
+ cd "${S}/refpolicy/policy/modules"
+ for POLPATCH in ${POLICY_PATCH[@]};
+ do
+ epatch "${POLPATCH}"
+ done
+ fi
+
+ # Collect only those files needed for this particular module
+ for i in ${MODS}; do
+ modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.te) $modfiles"
+ modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.fc) $modfiles"
+ if [ ${add_interfaces} -eq 1 ];
+ then
+ modfiles="$(find ${S}/refpolicy/policy/modules -iname $i.if) $modfiles"
+ fi
+ done
+
+ for i in ${POLICY_TYPES}; do
+ mkdir "${S}"/${i} || die "Failed to create directory ${S}/${i}"
+ cp "${S}"/refpolicy/doc/Makefile.example "${S}"/${i}/Makefile \
+ || die "Failed to copy Makefile.example to ${S}/${i}/Makefile"
+
+ cp ${modfiles} "${S}"/${i} \
+ || die "Failed to copy the module files to ${S}/${i}"
+ done
+}
+
+# @FUNCTION: selinux-policy-2_src_compile
+# @DESCRIPTION:
+# Build the SELinux policy module (.pp file) for just the selected module, and
+# this for each SELinux policy mentioned in POLICY_TYPES
+selinux-policy-2_src_compile() {
+ local makeuse=""
+ for useflag in ${IUSE};
+ do
+ use ${useflag} && makeuse="${makeuse} -D use_${useflag}"
+ done
+
+ for i in ${POLICY_TYPES}; do
+ # Support USE flags in builds
+ export M4PARAM="${makeuse}"
+ if [[ ${BASEPOL} == 2.20140311* ]]; then
+ # Parallel builds are broken in 2.20140311-r7 and earlier, bug 530178
+ emake -j1 NAME=$i -C "${S}"/${i} || die "${i} compile failed"
+ else
+ emake NAME=$i -C "${S}"/${i} || die "${i} compile failed"
+ fi
+ done
+}
+
+# @FUNCTION: selinux-policy-2_src_install
+# @DESCRIPTION:
+# Install the built .pp files in the correct subdirectory within
+# /usr/share/selinux.
+selinux-policy-2_src_install() {
+ local BASEDIR="/usr/share/selinux"
+
+ for i in ${POLICY_TYPES}; do
+ for j in ${MODS}; do
+ einfo "Installing ${i} ${j} policy package"
+ insinto ${BASEDIR}/${i}
+ doins "${S}"/${i}/${j}.pp || die "Failed to add ${j}.pp to ${i}"
+
+ if [[ "${POLICY_FILES[@]}" == *"${j}.if"* ]];
+ then
+ insinto ${BASEDIR}/${i}/include/3rd_party
+ doins "${S}"/${i}/${j}.if || die "Failed to add ${j}.if to ${i}"
+ fi
+ done
+ done
+}
+
+# @FUNCTION: selinux-policy-2_pkg_postinst
+# @DESCRIPTION:
+# Install the built .pp files in the SELinux policy stores, effectively
+# activating the policy on the system.
+selinux-policy-2_pkg_postinst() {
+ # build up the command in the case of multiple modules
+ local COMMAND
+ for i in ${MODS}; do
+ COMMAND="-i ${i}.pp ${COMMAND}"
+ done
+
+ for i in ${POLICY_TYPES}; do
+ if [ "${i}" == "strict" ] && [ "${MODS}" = "unconfined" ];
+ then
+ einfo "Ignoring loading of unconfined module in strict module store.";
+ continue;
+ fi
+ einfo "Inserting the following modules into the $i module store: ${MODS}"
+
+ cd /usr/share/selinux/${i} || die "Could not enter /usr/share/selinux/${i}"
+ semodule -s ${i} ${COMMAND}
+ if [ $? -ne 0 ];
+ then
+ ewarn "SELinux module load failed. Trying full reload...";
+ if [ "${i}" == "targeted" ];
+ then
+ semodule -s ${i} -b base.pp -i $(ls *.pp | grep -v base.pp);
+ else
+ semodule -s ${i} -b base.pp -i $(ls *.pp | grep -v base.pp | grep -v unconfined.pp);
+ fi
+ if [ $? -ne 0 ];
+ then
+ ewarn "Failed to reload SELinux policies."
+ ewarn ""
+ ewarn "If this is *not* the last SELinux module package being installed,"
+ ewarn "then you can safely ignore this as the reloads will be retried"
+ ewarn "with other, recent modules."
+ ewarn ""
+ ewarn "If it is the last SELinux module package being installed however,"
+ ewarn "then it is advised to look at the error above and take appropriate"
+ ewarn "action since the new SELinux policies are not loaded until the"
+ ewarn "command finished succesfully."
+ ewarn ""
+ ewarn "To reload, run the following command from within /usr/share/selinux/${i}:"
+ ewarn " semodule -b base.pp -i \$(ls *.pp | grep -v base.pp)"
+ ewarn "or"
+ ewarn " semodule -b base.pp -i \$(ls *.pp | grep -v base.pp | grep -v unconfined.pp)"
+ ewarn "depending on if you need the unconfined domain loaded as well or not."
+ else
+ einfo "SELinux modules reloaded succesfully."
+ fi
+ else
+ einfo "SELinux modules loaded succesfully."
+ fi
+ done
+
+ # Relabel depending packages
+ PKGSET="";
+ if [ -x /usr/bin/qdepends ] ; then
+ PKGSET=$(/usr/bin/qdepends -Cq -r -Q ${CATEGORY}/${PN} | grep -v "sec-policy/selinux-");
+ elif [ -x /usr/bin/equery ] ; then
+ PKGSET=$(/usr/bin/equery -Cq depends ${CATEGORY}/${PN} | grep -v "sec-policy/selinux-");
+ fi
+ if [ -n "${PKGSET}" ] ; then
+ rlpkg ${PKGSET};
+ fi
+}
+
+# @FUNCTION: selinux-policy-2_pkg_postrm
+# @DESCRIPTION:
+# Uninstall the module(s) from the SELinux policy stores, effectively
+# deactivating the policy on the system.
+selinux-policy-2_pkg_postrm() {
+ # Only if we are not upgrading
+ if [[ -z "${REPLACED_BY_VERSION}" ]];
+ then
+ # build up the command in the case of multiple modules
+ local COMMAND
+ for i in ${MODS}; do
+ COMMAND="-r ${i} ${COMMAND}"
+ done
+
+ for i in ${POLICY_TYPES}; do
+ einfo "Removing the following modules from the $i module store: ${MODS}"
+
+ semodule -s ${i} ${COMMAND}
+ if [ $? -ne 0 ];
+ then
+ ewarn "SELinux module unload failed.";
+ else
+ einfo "SELinux modules unloaded succesfully."
+ fi
+ done
+ fi
+}
+
diff --git a/eclass/sgml-catalog.eclass b/eclass/sgml-catalog.eclass
new file mode 100644
index 000000000000..9b5215d6690b
--- /dev/null
+++ b/eclass/sgml-catalog.eclass
@@ -0,0 +1,101 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: sgml-catalog.eclass
+# @MAINTAINER:
+# SGML Herd <sgml@gentoo.org>
+# @AUTHOR:
+# Author Matthew Turk <satai@gentoo.org>
+# @BLURB: Functions for installing SGML catalogs
+
+inherit base
+
+DEPEND=">=app-text/sgml-common-0.6.3-r2"
+
+# @ECLASS-VARIABLE: SGML_TOINSTALL
+# @DESCRIPTION:
+# An array of catalogs, arranged in pairs.
+# Each pair consists of a centralized catalog followed by an ordinary catalog.
+SGML_TOINSTALL=()
+
+# @FUNCTION: sgml-catalog_cat_include
+# @USAGE: <centralized catalog> <ordinary catalog>
+# @DESCRIPTION:
+# Appends a catalog pair to the SGML_TOINSTALL array.
+sgml-catalog_cat_include() {
+ debug-print function $FUNCNAME $*
+ SGML_TOINSTALL+=("$1" "$2")
+}
+
+# @FUNCTION: sgml-catalog_cat_doinstall
+# @USAGE: <centralized catalog> <ordinary catalog>
+# @DESCRIPTION:
+# Adds an ordinary catalog to a centralized catalog.
+sgml-catalog_cat_doinstall() {
+ debug-print function $FUNCNAME $*
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ "${EPREFIX}"/usr/bin/install-catalog --add "${EPREFIX}$1" "${EPREFIX}$2" &>/dev/null
+}
+
+# @FUNCTION: sgml-catalog_cat_doremove
+# @USAGE: <centralized catalog> <ordinary catalog>
+# @DESCRIPTION:
+# Removes an ordinary catalog from a centralized catalog.
+sgml-catalog_cat_doremove() {
+ debug-print function $FUNCNAME $*
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ "${EPREFIX}"/usr/bin/install-catalog --remove "${EPREFIX}$1" "${EPREFIX}$2" &>/dev/null
+}
+
+sgml-catalog_pkg_postinst() {
+ debug-print function $FUNCNAME $*
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ set -- "${SGML_TOINSTALL[@]}"
+
+ while (( $# )); do
+ if [[ ! -e "${EPREFIX}$2" ]]; then
+ ewarn "${EPREFIX}$2 doesn't appear to exist, although it ought to!"
+ shift 2
+ continue
+ fi
+ einfo "Now adding ${EPREFIX}$2 to ${EPREFIX}$1 and ${EPREFIX}/etc/sgml/catalog"
+ sgml-catalog_cat_doinstall "$1" "$2"
+ shift 2
+ done
+ sgml-catalog_cleanup
+}
+
+sgml-catalog_pkg_prerm() {
+ sgml-catalog_cleanup
+}
+
+sgml-catalog_pkg_postrm() {
+ debug-print function $FUNCNAME $*
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+
+ set -- "${SGML_TOINSTALL[@]}"
+
+ while (( $# )); do
+ einfo "Now removing ${EPREFIX}$2 from ${EPREFIX}$1 and ${EPREFIX}/etc/sgml/catalog"
+ sgml-catalog_cat_doremove "$1" "$2"
+ shift 2
+ done
+}
+
+sgml-catalog_cleanup() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ if [ -e "${EPREFIX}/usr/bin/gensgmlenv" ]
+ then
+ einfo Regenerating SGML environment variables ...
+ gensgmlenv
+ grep -v export "${EPREFIX}/etc/sgml/sgml.env" > "${EPREFIX}/etc/env.d/93sgmltools-lite"
+ fi
+}
+
+sgml-catalog_src_compile() {
+ return
+}
+
+EXPORT_FUNCTIONS pkg_postrm pkg_postinst src_compile pkg_prerm
diff --git a/eclass/ssl-cert.eclass b/eclass/ssl-cert.eclass
new file mode 100644
index 000000000000..33bee2f8ded8
--- /dev/null
+++ b/eclass/ssl-cert.eclass
@@ -0,0 +1,250 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: ssl-cert.eclass
+# @MAINTAINER:
+# @AUTHOR:
+# Max Kalika <max@gentoo.org>
+# @BLURB: Eclass for SSL certificates
+# @DESCRIPTION:
+# This eclass implements a standard installation procedure for installing
+# self-signed SSL certificates.
+# @EXAMPLE:
+# "install_cert /foo/bar" installs ${ROOT}/foo/bar.{key,csr,crt,pem}
+
+# @ECLASS-VARIABLE: SSL_CERT_MANDATORY
+# @DESCRIPTION:
+# Set to non zero if ssl-cert is mandatory for ebuild.
+: ${SSL_CERT_MANDATORY:=0}
+
+# @ECLASS-VARIABLE: SSL_CERT_USE
+# @DESCRIPTION:
+# Use flag to append dependency to.
+: ${SSL_CERT_USE:=ssl}
+
+if [[ "${SSL_CERT_MANDATORY}" == "0" ]]; then
+ DEPEND="${SSL_CERT_USE}? ( dev-libs/openssl )"
+ IUSE="${SSL_CERT_USE}"
+else
+ DEPEND="dev-libs/openssl"
+fi
+
+# @FUNCTION: gen_cnf
+# @USAGE:
+# @DESCRIPTION:
+# Initializes variables and generates the needed
+# OpenSSL configuration file and a CA serial file
+#
+# Access: private
+gen_cnf() {
+ # Location of the config file
+ SSL_CONF="${T}/${$}ssl.cnf"
+ # Location of the CA serial file
+ SSL_SERIAL="${T}/${$}ca.ser"
+ # Location of some random files OpenSSL can use: don't use
+ # /dev/u?random here -- doesn't work properly on all platforms
+ SSL_RANDOM="${T}/environment:${T}/eclass-debug.log:/etc/resolv.conf"
+
+ # These can be overridden in the ebuild
+ SSL_DAYS="${SSL_DAYS:-730}"
+ SSL_BITS="${SSL_BITS:-1024}"
+ SSL_COUNTRY="${SSL_COUNTRY:-US}"
+ SSL_STATE="${SSL_STATE:-California}"
+ SSL_LOCALITY="${SSL_LOCALITY:-Santa Barbara}"
+ SSL_ORGANIZATION="${SSL_ORGANIZATION:-SSL Server}"
+ SSL_UNIT="${SSL_UNIT:-For Testing Purposes Only}"
+ SSL_COMMONNAME="${SSL_COMMONNAME:-localhost}"
+ SSL_EMAIL="${SSL_EMAIL:-root@localhost}"
+
+ # Create the CA serial file
+ echo "01" > "${SSL_SERIAL}"
+
+ # Create the config file
+ ebegin "Generating OpenSSL configuration${1:+ for CA}"
+ cat <<-EOF > "${SSL_CONF}"
+ [ req ]
+ prompt = no
+ default_bits = ${SSL_BITS}
+ distinguished_name = req_dn
+ [ req_dn ]
+ C = ${SSL_COUNTRY}
+ ST = ${SSL_STATE}
+ L = ${SSL_LOCALITY}
+ O = ${SSL_ORGANIZATION}
+ OU = ${SSL_UNIT}
+ CN = ${SSL_COMMONNAME}${1:+ CA}
+ emailAddress = ${SSL_EMAIL}
+ EOF
+ eend $?
+
+ return $?
+}
+
+# @FUNCTION: get_base
+# @USAGE: [if_ca]
+# @RETURN: <base path>
+# @DESCRIPTION:
+# Simple function to determine whether we're creating
+# a CA (which should only be done once) or final part
+#
+# Access: private
+get_base() {
+ if [ "${1}" ] ; then
+ echo "${T}/${$}ca"
+ else
+ echo "${T}/${$}server"
+ fi
+}
+
+# @FUNCTION: gen_key
+# @USAGE: <base path>
+# @DESCRIPTION:
+# Generates an RSA key
+#
+# Access: private
+gen_key() {
+ local base=$(get_base "$1")
+ ebegin "Generating ${SSL_BITS} bit RSA key${1:+ for CA}"
+ openssl genrsa -rand "${SSL_RANDOM}" \
+ -out "${base}.key" "${SSL_BITS}" &> /dev/null
+ eend $?
+
+ return $?
+}
+
+# @FUNCTION: gen_csr
+# @USAGE: <base path>
+# @DESCRIPTION:
+# Generates a certificate signing request using
+# the key made by gen_key()
+#
+# Access: private
+gen_csr() {
+ local base=$(get_base "$1")
+ ebegin "Generating Certificate Signing Request${1:+ for CA}"
+ openssl req -config "${SSL_CONF}" -new \
+ -key "${base}.key" -out "${base}.csr" &>/dev/null
+ eend $?
+
+ return $?
+}
+
+# @FUNCTION: gen_crt
+# @USAGE: <base path>
+# @DESCRIPTION:
+# Generates either a self-signed CA certificate using
+# the csr and key made by gen_csr() and gen_key() or
+# a signed server certificate using the CA cert previously
+# created by gen_crt()
+#
+# Access: private
+gen_crt() {
+ local base=$(get_base "$1")
+ if [ "${1}" ] ; then
+ ebegin "Generating self-signed X.509 Certificate for CA"
+ openssl x509 -extfile "${SSL_CONF}" \
+ -days ${SSL_DAYS} -req -signkey "${base}.key" \
+ -in "${base}.csr" -out "${base}.crt" &>/dev/null
+ else
+ local ca=$(get_base 1)
+ ebegin "Generating authority-signed X.509 Certificate"
+ openssl x509 -extfile "${SSL_CONF}" \
+ -days ${SSL_DAYS} -req -CAserial "${SSL_SERIAL}" \
+ -CAkey "${ca}.key" -CA "${ca}.crt" \
+ -in "${base}.csr" -out "${base}.crt" &>/dev/null
+ fi
+ eend $?
+
+ return $?
+}
+
+# @FUNCTION: gen_pem
+# @USAGE: <base path>
+# @DESCRIPTION:
+# Generates a PEM file by concatinating the key
+# and cert file created by gen_key() and gen_cert()
+#
+# Access: private
+gen_pem() {
+ local base=$(get_base "$1")
+ ebegin "Generating PEM Certificate"
+ (cat "${base}.key"; echo; cat "${base}.crt") > "${base}.pem"
+ eend $?
+
+ return $?
+}
+
+# @FUNCTION: install_cert
+# @USAGE: <certificates>
+# @DESCRIPTION:
+# Uses all the private functions above to generate and install the
+# requested certificates.
+# <certificates> are full pathnames relative to ROOT, without extension.
+#
+# Example: "install_cert /foo/bar" installs ${ROOT}/foo/bar.{key,csr,crt,pem}
+#
+# Access: public
+install_cert() {
+ if [ $# -lt 1 ] ; then
+ eerror "At least one argument needed"
+ return 1;
+ fi
+
+ case ${EBUILD_PHASE} in
+ unpack|prepare|configure|compile|test|install)
+ die "install_cert cannot be called in ${EBUILD_PHASE}"
+ ;;
+ esac
+
+ # Generate a CA environment #164601
+ gen_cnf 1 || return 1
+ gen_key 1 || return 1
+ gen_csr 1 || return 1
+ gen_crt 1 || return 1
+ echo
+
+ gen_cnf || return 1
+ echo
+
+ local count=0
+ for cert in "$@" ; do
+ # Check the requested certificate
+ if [ -z "${cert##*/}" ] ; then
+ ewarn "Invalid certification requested, skipping"
+ continue
+ fi
+
+ # Check for previous existence of generated files
+ for type in key csr crt pem ; do
+ if [ -e "${ROOT}${cert}.${type}" ] ; then
+ ewarn "${ROOT}${cert}.${type}: exists, skipping"
+ continue 2
+ fi
+ done
+
+ # Generate the requested files
+ gen_key || continue
+ gen_csr || continue
+ gen_crt || continue
+ gen_pem || continue
+ echo
+
+ # Install the generated files and set sane permissions
+ local base=$(get_base)
+ install -d "${ROOT}${cert%/*}"
+ install -m0400 "${base}.key" "${ROOT}${cert}.key"
+ install -m0444 "${base}.csr" "${ROOT}${cert}.csr"
+ install -m0444 "${base}.crt" "${ROOT}${cert}.crt"
+ install -m0400 "${base}.pem" "${ROOT}${cert}.pem"
+ : $(( ++count ))
+ done
+
+ # Resulting status
+ if [ ${count} = 0 ] ; then
+ eerror "No certificates were generated"
+ return 1
+ elif [ ${count} != ${#} ] ; then
+ ewarn "Some requested certificates were not generated"
+ fi
+}
diff --git a/eclass/stardict.eclass b/eclass/stardict.eclass
new file mode 100644
index 000000000000..255a87f957ee
--- /dev/null
+++ b/eclass/stardict.eclass
@@ -0,0 +1,60 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Author : Alastair Tse <liquidx@gentoo.org>
+#
+# Convienence class to do stardict dictionary installations.
+#
+# Usage:
+# - Variables to set :
+# * FROM_LANG - From this language
+# * TO_LANG - To this language
+# * DICT_PREFIX - SRC_URI prefix, like "dictd_www.mova.org_"
+# * DICT_SUFFIX - SRC_URI after the prefix.
+
+RESTRICT="strip"
+
+[ -z "${DICT_SUFFIX}" ] && DICT_SUFFIX=${PN#stardict-[[:lower:]]*-}
+[ -z "${DICT_P}" ] && DICT_P=stardict-${DICT_PREFIX}${DICT_SUFFIX}-${PV}
+
+if [ -n "${FROM_LANG}" -a -n "${TO_LANG}" ]; then
+ DESCRIPTION="Stardict Dictionary ${FROM_LANG} to ${TO_LANG}"
+elif [ -z "${DESCRIPTION}" ]; then
+ DESCRIPTION="Another Stardict Dictionary"
+fi
+
+HOMEPAGE="http://stardict.sourceforge.net/"
+SRC_URI="mirror://sourceforge/stardict/${DICT_P}.tar.bz2"
+
+IUSE="gzip"
+SLOT="0"
+LICENSE="GPL-2"
+
+DEPEND="|| ( >=app-text/stardict-2.4.2
+ app-text/sdcv
+ app-text/goldendict )
+ gzip? ( app-arch/gzip
+ app-text/dictd )"
+
+S=${WORKDIR}/${DICT_P}
+
+stardict_src_compile() {
+ if use gzip; then
+ for file in *.idx; do
+ [[ -f $file ]] && gzip ${file}
+ done
+ for file in *.dict; do
+ [[ -f $file ]] && dictzip ${file}
+ done
+ fi
+}
+
+stardict_src_install() {
+ insinto /usr/share/stardict/dic
+ doins *.dict.dz*
+ doins *.idx*
+ doins *.ifo
+}
+
+EXPORT_FUNCTIONS src_compile src_install
diff --git a/eclass/subversion.eclass b/eclass/subversion.eclass
new file mode 100644
index 000000000000..f0f33791bca3
--- /dev/null
+++ b/eclass/subversion.eclass
@@ -0,0 +1,525 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: subversion.eclass
+# @MAINTAINER:
+# Akinori Hattori <hattya@gentoo.org>
+# @AUTHOR:
+# Original Author: Akinori Hattori <hattya@gentoo.org>
+# @BLURB: The subversion eclass is written to fetch software sources from subversion repositories
+# @DESCRIPTION:
+# The subversion eclass provides functions to fetch, patch and bootstrap
+# software sources from subversion repositories.
+
+inherit eutils
+
+ESVN="${ECLASS}"
+
+case "${EAPI:-0}" in
+ 0|1)
+ EXPORT_FUNCTIONS src_unpack pkg_preinst
+ DEPEND="dev-vcs/subversion"
+ ;;
+ *)
+ EXPORT_FUNCTIONS src_unpack src_prepare pkg_preinst
+ DEPEND="|| ( dev-vcs/subversion[http] dev-vcs/subversion[webdav-neon] dev-vcs/subversion[webdav-serf] )"
+ ;;
+esac
+
+DEPEND+=" net-misc/rsync"
+
+# @ECLASS-VARIABLE: ESVN_STORE_DIR
+# @DESCRIPTION:
+# subversion sources store directory. Users may override this in /etc/portage/make.conf
+[[ -z ${ESVN_STORE_DIR} ]] && ESVN_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/svn-src"
+
+# @ECLASS-VARIABLE: ESVN_FETCH_CMD
+# @DESCRIPTION:
+# subversion checkout command
+ESVN_FETCH_CMD="svn checkout"
+
+# @ECLASS-VARIABLE: ESVN_UPDATE_CMD
+# @DESCRIPTION:
+# subversion update command
+ESVN_UPDATE_CMD="svn update"
+
+# @ECLASS-VARIABLE: ESVN_SWITCH_CMD
+# @DESCRIPTION:
+# subversion switch command
+ESVN_SWITCH_CMD="svn switch"
+
+# @ECLASS-VARIABLE: ESVN_OPTIONS
+# @DESCRIPTION:
+# the options passed to checkout or update. If you want a specific revision see
+# ESVN_REPO_URI instead of using -rREV.
+ESVN_OPTIONS="${ESVN_OPTIONS:-}"
+
+# @ECLASS-VARIABLE: ESVN_REPO_URI
+# @DESCRIPTION:
+# repository uri
+#
+# e.g. http://foo/trunk, svn://bar/trunk, svn://bar/branch/foo@1234
+#
+# supported URI schemes:
+# http://
+# https://
+# svn://
+# svn+ssh://
+# file://
+#
+# to peg to a specific revision, append @REV to the repo's uri
+ESVN_REPO_URI="${ESVN_REPO_URI:-}"
+
+# @ECLASS-VARIABLE: ESVN_REVISION
+# @DESCRIPTION:
+# User configurable revision checkout or update to from the repository
+#
+# Useful for live svn or trunk svn ebuilds allowing the user to peg
+# to a specific revision
+#
+# Note: This should never be set in an ebuild!
+ESVN_REVISION="${ESVN_REVISION:-}"
+
+# @ECLASS-VARIABLE: ESVN_USER
+# @DESCRIPTION:
+# User name
+ESVN_USER="${ESVN_USER:-}"
+
+# @ECLASS-VARIABLE: ESVN_PASSWORD
+# @DESCRIPTION:
+# Password
+ESVN_PASSWORD="${ESVN_PASSWORD:-}"
+
+# @ECLASS-VARIABLE: ESVN_PROJECT
+# @DESCRIPTION:
+# project name of your ebuild (= name space)
+#
+# subversion eclass will check out the subversion repository like:
+#
+# ${ESVN_STORE_DIR}/${ESVN_PROJECT}/${ESVN_REPO_URI##*/}
+#
+# so if you define ESVN_REPO_URI as http://svn.collab.net/repo/svn/trunk or
+# http://svn.collab.net/repo/svn/trunk/. and PN is subversion-svn.
+# it will check out like:
+#
+# ${ESVN_STORE_DIR}/subversion/trunk
+#
+# this is not used in order to declare the name of the upstream project.
+# so that you can declare this like:
+#
+# # jakarta commons-loggin
+# ESVN_PROJECT=commons/logging
+#
+# default: ${PN/-svn}.
+ESVN_PROJECT="${ESVN_PROJECT:-${PN/-svn}}"
+
+# @ECLASS-VARIABLE: ESVN_BOOTSTRAP
+# @DESCRIPTION:
+# bootstrap script or command like autogen.sh or etc..
+ESVN_BOOTSTRAP="${ESVN_BOOTSTRAP:-}"
+
+# @ECLASS-VARIABLE: ESVN_PATCHES
+# @DESCRIPTION:
+# subversion eclass can apply patches in subversion_bootstrap().
+# you can use regexp in this variable like *.diff or *.patch or etc.
+# NOTE: patches will be applied before ESVN_BOOTSTRAP is processed.
+#
+# Patches are searched both in ${PWD} and ${FILESDIR}, if not found in either
+# location, the installation dies.
+ESVN_PATCHES="${ESVN_PATCHES:-}"
+
+# @ECLASS-VARIABLE: ESVN_RESTRICT
+# @DESCRIPTION:
+# this should be a space delimited list of subversion eclass features to
+# restrict.
+# export)
+# don't export the working copy to S.
+ESVN_RESTRICT="${ESVN_RESTRICT:-}"
+
+# @ECLASS-VARIABLE: ESVN_OFFLINE
+# @DESCRIPTION:
+# Set this variable to a non-empty value to disable the automatic updating of
+# an svn source tree. This is intended to be set outside the subversion source
+# tree by users.
+ESVN_OFFLINE="${ESVN_OFFLINE:-${EVCS_OFFLINE}}"
+
+# @ECLASS-VARIABLE: ESVN_UMASK
+# @DESCRIPTION:
+# Set this variable to a custom umask. This is intended to be set by users.
+# By setting this to something like 002, it can make life easier for people
+# who do development as non-root (but are in the portage group), and then
+# switch over to building with FEATURES=userpriv. Or vice-versa. Shouldn't
+# be a security issue here as anyone who has portage group write access
+# already can screw the system over in more creative ways.
+ESVN_UMASK="${ESVN_UMASK:-${EVCS_UMASK}}"
+
+# @ECLASS-VARIABLE: ESVN_UP_FREQ
+# @DESCRIPTION:
+# Set the minimum number of hours between svn up'ing in any given svn module. This is particularly
+# useful for split KDE ebuilds where we want to ensure that all submodules are compiled for the same
+# revision. It should also be kept user overrideable.
+ESVN_UP_FREQ="${ESVN_UP_FREQ:=}"
+
+# @ECLASS-VARIABLE: ESCM_LOGDIR
+# @DESCRIPTION:
+# User configuration variable. If set to a path such as e.g. /var/log/scm any
+# package inheriting from subversion.eclass will record svn revision to
+# ${CATEGORY}/${PN}.log in that path in pkg_preinst. This is not supposed to be
+# set by ebuilds/eclasses. It defaults to empty so users need to opt in.
+ESCM_LOGDIR="${ESCM_LOGDIR:=}"
+
+# @FUNCTION: subversion_fetch
+# @USAGE: [repo_uri] [destination]
+# @DESCRIPTION:
+# Wrapper function to fetch sources from subversion via svn checkout or svn update,
+# depending on whether there is an existing working copy in ${ESVN_STORE_DIR}.
+#
+# Can take two optional parameters:
+# repo_uri - a repository URI. default is ESVN_REPO_URI.
+# destination - a check out path in S.
+subversion_fetch() {
+ local repo_uri="$(subversion__get_repository_uri "${1:-${ESVN_REPO_URI}}")"
+ local revision="$(subversion__get_peg_revision "${1:-${ESVN_REPO_URI}}")"
+ local S_dest="${2}"
+
+ if [[ -z ${repo_uri} ]]; then
+ die "${ESVN}: ESVN_REPO_URI (or specified URI) is empty."
+ fi
+
+ [[ -n "${ESVN_REVISION}" ]] && revision="${ESVN_REVISION}"
+
+ # check for the scheme
+ local scheme="${repo_uri%%:*}"
+ case "${scheme}" in
+ http|https)
+ ;;
+ svn|svn+ssh)
+ ;;
+ file)
+ ;;
+ *)
+ die "${ESVN}: fetch from '${scheme}' is not yet implemented."
+ ;;
+ esac
+
+ addread "/etc/subversion"
+ addwrite "${ESVN_STORE_DIR}"
+
+ if [[ -n "${ESVN_UMASK}" ]]; then
+ eumask_push "${ESVN_UMASK}"
+ fi
+
+ if [[ ! -d ${ESVN_STORE_DIR} ]]; then
+ debug-print "${FUNCNAME}: initial checkout. creating subversion directory"
+ mkdir -m 775 -p "${ESVN_STORE_DIR}" || die "${ESVN}: can't mkdir ${ESVN_STORE_DIR}."
+ fi
+
+ pushd "${ESVN_STORE_DIR}" >/dev/null || die "${ESVN}: can't chdir to ${ESVN_STORE_DIR}"
+
+ local wc_path="$(subversion__get_wc_path "${repo_uri}")"
+ local options="${ESVN_OPTIONS} --config-dir ${ESVN_STORE_DIR}/.subversion"
+
+ [[ -n "${revision}" ]] && options="${options} -r ${revision}"
+
+ if [[ "${ESVN_OPTIONS}" = *-r* ]]; then
+ ewarn "\${ESVN_OPTIONS} contains -r, this usage is unsupported. Please"
+ ewarn "see \${ESVN_REPO_URI}"
+ fi
+
+ if has_version ">=dev-vcs/subversion-1.6.0"; then
+ options="${options} --config-option=config:auth:password-stores="
+ fi
+
+ debug-print "${FUNCNAME}: wc_path = \"${wc_path}\""
+ debug-print "${FUNCNAME}: ESVN_OPTIONS = \"${ESVN_OPTIONS}\""
+ debug-print "${FUNCNAME}: options = \"${options}\""
+
+ if [[ ! -d ${wc_path}/.svn ]]; then
+ if [[ -n ${ESVN_OFFLINE} ]]; then
+ ewarn "ESVN_OFFLINE cannot be used when there is no existing checkout."
+ fi
+ # first check out
+ einfo "subversion check out start -->"
+ einfo " repository: ${repo_uri}${revision:+@}${revision}"
+
+ debug-print "${FUNCNAME}: ${ESVN_FETCH_CMD} ${options} ${repo_uri}"
+
+ mkdir -m 775 -p "${ESVN_PROJECT}" || die "${ESVN}: can't mkdir ${ESVN_PROJECT}."
+ cd "${ESVN_PROJECT}" || die "${ESVN}: can't chdir to ${ESVN_PROJECT}"
+ if [[ -n "${ESVN_USER}" ]]; then
+ ${ESVN_FETCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
+ else
+ ${ESVN_FETCH_CMD} ${options} "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
+ fi
+
+ elif [[ -n ${ESVN_OFFLINE} ]]; then
+ svn upgrade "${wc_path}" &>/dev/null
+ svn cleanup "${wc_path}" &>/dev/null
+ subversion_wc_info "${repo_uri}" || die "${ESVN}: unknown problem occurred while accessing working copy."
+
+ if [[ -n ${ESVN_REVISION} && ${ESVN_REVISION} != ${ESVN_WC_REVISION} ]]; then
+ die "${ESVN}: You requested off-line updating and revision ${ESVN_REVISION} but only revision ${ESVN_WC_REVISION} is available locally."
+ fi
+ einfo "Fetching disabled: Using existing repository copy at revision ${ESVN_WC_REVISION}."
+ else
+ svn upgrade "${wc_path}" &>/dev/null
+ svn cleanup "${wc_path}" &>/dev/null
+ subversion_wc_info "${repo_uri}" || die "${ESVN}: unknown problem occurred while accessing working copy."
+
+ local esvn_up_freq=
+ if [[ -n ${ESVN_UP_FREQ} ]]; then
+ if [[ -n ${ESVN_UP_FREQ//[[:digit:]]} ]]; then
+ die "${ESVN}: ESVN_UP_FREQ must be an integer value corresponding to the minimum number of hours between svn up."
+ elif [[ -z $(find "${wc_path}/.svn/entries" -mmin "+$((ESVN_UP_FREQ*60))") ]]; then
+ einfo "Fetching disabled since ${ESVN_UP_FREQ} hours has not passed since last update."
+ einfo "Using existing repository copy at revision ${ESVN_WC_REVISION}."
+ esvn_up_freq=no_update
+ fi
+ fi
+
+ if [[ -z ${esvn_up_freq} ]]; then
+ if [[ ${ESVN_WC_UUID} != $(subversion__svn_info "${repo_uri}" "Repository UUID") ]]; then
+ # UUID mismatch. Delete working copy and check out it again.
+ einfo "subversion recheck out start -->"
+ einfo " old UUID: ${ESVN_WC_UUID}"
+ einfo " new UUID: $(subversion__svn_info "${repo_uri}" "Repository UUID")"
+ einfo " repository: ${repo_uri}${revision:+@}${revision}"
+
+ rm -fr "${ESVN_PROJECT}" || die
+
+ debug-print "${FUNCNAME}: ${ESVN_FETCH_CMD} ${options} ${repo_uri}"
+
+ mkdir -m 775 -p "${ESVN_PROJECT}" || die "${ESVN}: can't mkdir ${ESVN_PROJECT}."
+ cd "${ESVN_PROJECT}" || die "${ESVN}: can't chdir to ${ESVN_PROJECT}"
+ if [[ -n "${ESVN_USER}" ]]; then
+ ${ESVN_FETCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
+ else
+ ${ESVN_FETCH_CMD} ${options} "${repo_uri}" || die "${ESVN}: can't fetch to ${wc_path} from ${repo_uri}."
+ fi
+ elif [[ ${ESVN_WC_URL} != $(subversion__get_repository_uri "${repo_uri}") ]]; then
+ einfo "subversion switch start -->"
+ einfo " old repository: ${ESVN_WC_URL}@${ESVN_WC_REVISION}"
+ einfo " new repository: ${repo_uri}${revision:+@}${revision}"
+
+ debug-print "${FUNCNAME}: ${ESVN_SWITCH_CMD} ${options} ${repo_uri}"
+
+ cd "${wc_path}" || die "${ESVN}: can't chdir to ${wc_path}"
+ if [[ -n "${ESVN_USER}" ]]; then
+ ${ESVN_SWITCH_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" ${repo_uri} || die "${ESVN}: can't update ${wc_path} from ${repo_uri}."
+ else
+ ${ESVN_SWITCH_CMD} ${options} ${repo_uri} || die "${ESVN}: can't update ${wc_path} from ${repo_uri}."
+ fi
+ else
+ # update working copy
+ einfo "subversion update start -->"
+ einfo " repository: ${repo_uri}${revision:+@}${revision}"
+
+ debug-print "${FUNCNAME}: ${ESVN_UPDATE_CMD} ${options}"
+
+ cd "${wc_path}" || die "${ESVN}: can't chdir to ${wc_path}"
+ if [[ -n "${ESVN_USER}" ]]; then
+ ${ESVN_UPDATE_CMD} ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" || die "${ESVN}: can't update ${wc_path} from ${repo_uri}."
+ else
+ ${ESVN_UPDATE_CMD} ${options} || die "${ESVN}: can't update ${wc_path} from ${repo_uri}."
+ fi
+ fi
+
+ # export updated information for the working copy
+ subversion_wc_info "${repo_uri}" || die "${ESVN}: unknown problem occurred while accessing working copy."
+ fi
+ fi
+
+ if [[ -n "${ESVN_UMASK}" ]]; then
+ eumask_pop
+ fi
+
+ einfo " working copy: ${wc_path}"
+
+ if ! has "export" ${ESVN_RESTRICT}; then
+ cd "${wc_path}" || die "${ESVN}: can't chdir to ${wc_path}"
+
+ local S="${S}/${S_dest}"
+ mkdir -p "${S}"
+
+ # export to the ${WORKDIR}
+ #* "svn export" has a bug. see http://bugs.gentoo.org/119236
+ #* svn export . "${S}" || die "${ESVN}: can't export to ${S}."
+ rsync -rlpgo --exclude=".svn/" . "${S}" || die "${ESVN}: can't export to ${S}."
+ fi
+
+ popd >/dev/null
+ echo
+}
+
+# @FUNCTION: subversion_bootstrap
+# @DESCRIPTION:
+# Apply patches in ${ESVN_PATCHES} and run ${ESVN_BOOTSTRAP} if specified.
+subversion_bootstrap() {
+ if has "export" ${ESVN_RESTRICT}; then
+ return
+ fi
+
+ cd "${S}"
+
+ if [[ -n ${ESVN_PATCHES} ]]; then
+ local patch fpatch
+ einfo "apply patches -->"
+ for patch in ${ESVN_PATCHES}; do
+ if [[ -f ${patch} ]]; then
+ epatch "${patch}"
+ else
+ for fpatch in ${FILESDIR}/${patch}; do
+ if [[ -f ${fpatch} ]]; then
+ epatch "${fpatch}"
+ else
+ die "${ESVN}: ${patch} not found"
+ fi
+ done
+ fi
+ done
+ echo
+ fi
+
+ if [[ -n ${ESVN_BOOTSTRAP} ]]; then
+ einfo "begin bootstrap -->"
+ if [[ -f ${ESVN_BOOTSTRAP} && -x ${ESVN_BOOTSTRAP} ]]; then
+ einfo " bootstrap with a file: ${ESVN_BOOTSTRAP}"
+ eval "./${ESVN_BOOTSTRAP}" || die "${ESVN}: can't execute ESVN_BOOTSTRAP."
+ else
+ einfo " bootstrap with command: ${ESVN_BOOTSTRAP}"
+ eval "${ESVN_BOOTSTRAP}" || die "${ESVN}: can't eval ESVN_BOOTSTRAP."
+ fi
+ fi
+}
+
+# @FUNCTION: subversion_wc_info
+# @USAGE: [repo_uri]
+# @RETURN: ESVN_WC_URL, ESVN_WC_ROOT, ESVN_WC_UUID, ESVN_WC_REVISION and ESVN_WC_PATH
+# @DESCRIPTION:
+# Get svn info for the specified repo_uri. The default repo_uri is ESVN_REPO_URI.
+#
+# The working copy information on the specified repository URI are set to
+# ESVN_WC_* variables.
+subversion_wc_info() {
+ local repo_uri="$(subversion__get_repository_uri "${1:-${ESVN_REPO_URI}}")"
+ local wc_path="$(subversion__get_wc_path "${repo_uri}")"
+
+ debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
+ debug-print "${FUNCNAME}: wc_path = ${wc_path}"
+
+ if [[ ! -d ${wc_path} ]]; then
+ return 1
+ fi
+
+ export ESVN_WC_URL="$(subversion__svn_info "${wc_path}" "URL")"
+ export ESVN_WC_ROOT="$(subversion__svn_info "${wc_path}" "Repository Root")"
+ export ESVN_WC_UUID="$(subversion__svn_info "${wc_path}" "Repository UUID")"
+ export ESVN_WC_REVISION="$(subversion__svn_info "${wc_path}" "Revision")"
+ export ESVN_WC_PATH="${wc_path}"
+}
+
+# @FUNCTION: subversion_src_unpack
+# @DESCRIPTION:
+# Default src_unpack. Fetch and, in older EAPIs, bootstrap.
+subversion_src_unpack() {
+ subversion_fetch || die "${ESVN}: unknown problem occurred in subversion_fetch."
+ if has "${EAPI:-0}" 0 1; then
+ subversion_bootstrap || die "${ESVN}: unknown problem occurred in subversion_bootstrap."
+ fi
+}
+
+# @FUNCTION: subversion_src_prepare
+# @DESCRIPTION:
+# Default src_prepare. Bootstrap.
+subversion_src_prepare() {
+ subversion_bootstrap || die "${ESVN}: unknown problem occurred in subversion_bootstrap."
+}
+
+# @FUNCTION: subversion_pkg_preinst
+# @USAGE: [repo_uri]
+# @DESCRIPTION:
+# Log the svn revision of source code. Doing this in pkg_preinst because we
+# want the logs to stick around if packages are uninstalled without messing with
+# config protection.
+subversion_pkg_preinst() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local pkgdate=$(date "+%Y%m%d %H:%M:%S")
+ if [[ -n ${ESCM_LOGDIR} ]]; then
+ local dir="${EROOT}/${ESCM_LOGDIR}/${CATEGORY}"
+ if [[ ! -d ${dir} ]]; then
+ mkdir -p "${dir}" || eerror "Failed to create '${dir}' for logging svn revision"
+ fi
+ local logmessage="svn: ${pkgdate} - ${PF}:${SLOT} was merged at revision ${ESVN_WC_REVISION}"
+ if [[ -d ${dir} ]]; then
+ echo "${logmessage}" >>"${dir}/${PN}.log"
+ else
+ eerror "Could not log the message '${logmessage}' to '${dir}/${PN}.log'"
+ fi
+ fi
+}
+
+## -- Private Functions
+
+## -- subversion__svn_info() ------------------------------------------------- #
+#
+# param $1 - a target.
+# param $2 - a key name.
+#
+subversion__svn_info() {
+ local target="${1}"
+ local key="${2}"
+
+ env LC_ALL=C svn info ${options} --username "${ESVN_USER}" --password "${ESVN_PASSWORD}" "${target}" \
+ | grep -i "^${key}" \
+ | cut -d" " -f2-
+}
+
+## -- subversion__get_repository_uri() --------------------------------------- #
+#
+# param $1 - a repository URI.
+subversion__get_repository_uri() {
+ local repo_uri="${1}"
+
+ debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
+ if [[ -z ${repo_uri} ]]; then
+ die "${ESVN}: ESVN_REPO_URI (or specified URI) is empty."
+ fi
+ # delete trailing slash
+ if [[ -z ${repo_uri##*/} ]]; then
+ repo_uri="${repo_uri%/}"
+ fi
+ repo_uri="${repo_uri%@*}"
+
+ echo "${repo_uri}"
+}
+
+## -- subversion__get_wc_path() ---------------------------------------------- #
+#
+# param $1 - a repository URI.
+subversion__get_wc_path() {
+ local repo_uri="$(subversion__get_repository_uri "${1}")"
+
+ debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
+
+ echo "${ESVN_STORE_DIR}/${ESVN_PROJECT}/${repo_uri##*/}"
+}
+
+## -- subversion__get_peg_revision() ----------------------------------------- #
+#
+# param $1 - a repository URI.
+subversion__get_peg_revision() {
+ local repo_uri="${1}"
+ local peg_rev=
+
+ debug-print "${FUNCNAME}: repo_uri = ${repo_uri}"
+ # repo_uri has peg revision?
+ if [[ ${repo_uri} = *@* ]]; then
+ peg_rev="${repo_uri##*@}"
+ debug-print "${FUNCNAME}: peg_rev = ${peg_rev}"
+ else
+ debug-print "${FUNCNAME}: repo_uri does not have a peg revision."
+ fi
+
+ echo "${peg_rev}"
+}
diff --git a/eclass/sword-module.eclass b/eclass/sword-module.eclass
new file mode 100644
index 000000000000..2af91a40d546
--- /dev/null
+++ b/eclass/sword-module.eclass
@@ -0,0 +1,34 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+#
+# eclass to simplify installation of Sword modules
+# Bugs to theology@gentoo.org
+#
+
+HOMEPAGE="http://www.crosswire.org/sword/modules/"
+
+# Sword packages are generally released as FooBar.zip in their 'rawzip' form
+# The files are also unversioned, so the packager will need to rename the
+# original file to something else and host it somewhere to avoid breaking
+# the digest when new versions are released.
+
+SRC_URI="mirror://gentoo/${SWORD_MODULE}-${PV}.zip"
+
+SLOT="0"
+IUSE=""
+
+S="${WORKDIR}"
+
+RDEPEND="app-text/sword"
+DEPEND="app-arch/unzip"
+
+sword-module_src_install() {
+ insinto /usr/share/sword/modules
+ doins -r "${S}"/modules/*
+ insinto /usr/share/sword/mods.d
+ doins "${S}"/mods.d/*
+}
+
+EXPORT_FUNCTIONS src_install
diff --git a/eclass/systemd.eclass b/eclass/systemd.eclass
new file mode 100644
index 000000000000..278d319c7c25
--- /dev/null
+++ b/eclass/systemd.eclass
@@ -0,0 +1,387 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: systemd.eclass
+# @MAINTAINER:
+# systemd@gentoo.org
+# @BLURB: helper functions to install systemd units
+# @DESCRIPTION:
+# This eclass provides a set of functions to install unit files for
+# sys-apps/systemd within ebuilds.
+# @EXAMPLE:
+#
+# @CODE
+# inherit systemd
+#
+# src_configure() {
+# local myeconfargs=(
+# --enable-foo
+# --disable-bar
+# "$(systemd_with_unitdir)"
+# )
+#
+# econf "${myeconfargs[@]}"
+# }
+# @CODE
+
+inherit eutils toolchain-funcs
+
+case ${EAPI:-0} in
+ 0|1|2|3|4|5) ;;
+ *) die "${ECLASS}.eclass API in EAPI ${EAPI} not yet established."
+esac
+
+DEPEND="virtual/pkgconfig"
+
+# @FUNCTION: _systemd_get_unitdir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed unitdir.
+_systemd_get_unitdir() {
+ if $(tc-getPKG_CONFIG) --exists systemd; then
+ echo "$($(tc-getPKG_CONFIG) --variable=systemdsystemunitdir systemd)"
+ else
+ echo /usr/lib/systemd/system
+ fi
+}
+
+# @FUNCTION: systemd_get_unitdir
+# @DESCRIPTION:
+# Output the path for the systemd unit directory (not including ${D}).
+# This function always succeeds, even if systemd is not installed.
+systemd_get_unitdir() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_systemd_get_unitdir)"
+}
+
+# @FUNCTION: _systemd_get_userunitdir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed userunitdir.
+_systemd_get_userunitdir() {
+ if $(tc-getPKG_CONFIG) --exists systemd; then
+ echo "$($(tc-getPKG_CONFIG) --variable=systemduserunitdir systemd)"
+ else
+ echo /usr/lib/systemd/user
+ fi
+}
+
+# @FUNCTION: systemd_get_userunitdir
+# @DESCRIPTION:
+# Output the path for the systemd user unit directory (not including
+# ${D}). This function always succeeds, even if systemd is not
+# installed.
+systemd_get_userunitdir() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_systemd_get_userunitdir)"
+}
+
+# @FUNCTION: _systemd_get_utildir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed utildir.
+_systemd_get_utildir() {
+ if $(tc-getPKG_CONFIG) --exists systemd; then
+ echo "$($(tc-getPKG_CONFIG) --variable=systemdutildir systemd)"
+ else
+ echo /usr/lib/systemd
+ fi
+}
+
+# @FUNCTION: systemd_get_utildir
+# @DESCRIPTION:
+# Output the path for the systemd utility directory (not including
+# ${D}). This function always succeeds, even if systemd is not
+# installed.
+systemd_get_utildir() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "${EPREFIX}$(_systemd_get_utildir)"
+}
+
+# @FUNCTION: systemd_dounit
+# @USAGE: <unit>...
+# @DESCRIPTION:
+# Install systemd unit(s). Uses doins, thus it is fatal in EAPI 4
+# and non-fatal in earlier EAPIs.
+systemd_dounit() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_systemd_get_unitdir)"
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: systemd_newunit
+# @USAGE: <old-name> <new-name>
+# @DESCRIPTION:
+# Install systemd unit with a new name. Uses newins, thus it is fatal
+# in EAPI 4 and non-fatal in earlier EAPIs.
+systemd_newunit() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_systemd_get_unitdir)"
+ newins "${@}"
+ )
+}
+
+# @FUNCTION: systemd_douserunit
+# @USAGE: <unit>...
+# @DESCRIPTION:
+# Install systemd user unit(s). Uses doins, thus it is fatal in EAPI 4
+# and non-fatal in earlier EAPIs.
+systemd_douserunit() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_systemd_get_userunitdir)"
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: systemd_newuserunit
+# @USAGE: <old-name> <new-name>
+# @DESCRIPTION:
+# Install systemd user unit with a new name. Uses newins, thus it
+# is fatal in EAPI 4 and non-fatal in earlier EAPIs.
+systemd_newuserunit() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_systemd_get_userunitdir)"
+ newins "${@}"
+ )
+}
+
+# @FUNCTION: systemd_install_serviced
+# @USAGE: <conf-file> [<service.d>]
+# @DESCRIPTION:
+# Install the file <conf-file> as service.d/00gentoo.conf template.
+# The <service.d> argument specifies the configured service name.
+# If not specified, the configuration file name will be used with .conf
+# suffix stripped (e.g. foo.service.conf -> foo.service).
+systemd_install_serviced() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local src=${1}
+ local service=${2}
+
+ [[ ${src} ]] || die "No file specified"
+
+ if [[ ! ${service} ]]; then
+ [[ ${src} == *.conf ]] || die "Source file needs .conf suffix"
+ service=${src##*/}
+ service=${service%.conf}
+ fi
+ # avoid potentially common mistake
+ [[ ${service} == *.d ]] && die "Service must not have .d suffix"
+
+ (
+ insinto /etc/systemd/system/"${service}".d
+ newins "${src}" 00gentoo.conf
+ )
+}
+
+# @FUNCTION: systemd_dotmpfilesd
+# @USAGE: <tmpfilesd>...
+# @DESCRIPTION:
+# Install systemd tmpfiles.d files. Uses doins, thus it is fatal
+# in EAPI 4 and non-fatal in earlier EAPIs.
+systemd_dotmpfilesd() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ for f; do
+ [[ ${f} == *.conf ]] \
+ || die 'tmpfiles.d files need to have .conf suffix.'
+ done
+
+ (
+ insinto /usr/lib/tmpfiles.d/
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: systemd_newtmpfilesd
+# @USAGE: <old-name> <new-name>.conf
+# @DESCRIPTION:
+# Install systemd tmpfiles.d file under a new name. Uses newins, thus it
+# is fatal in EAPI 4 and non-fatal in earlier EAPIs.
+systemd_newtmpfilesd() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${2} == *.conf ]] \
+ || die 'tmpfiles.d files need to have .conf suffix.'
+
+ (
+ insinto /usr/lib/tmpfiles.d/
+ newins "${@}"
+ )
+}
+
+# @FUNCTION: systemd_enable_service
+# @USAGE: <target> <service>
+# @DESCRIPTION:
+# Enable service in desired target, e.g. install a symlink for it.
+# Uses dosym, thus it is fatal in EAPI 4 and non-fatal in earlier
+# EAPIs.
+systemd_enable_service() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ ${#} -eq 2 ]] || die "Synopsis: systemd_enable_service target service"
+
+ local target=${1}
+ local service=${2}
+ local ud=$(_systemd_get_unitdir)
+ local destname=${service##*/}
+
+ dodir "${ud}"/"${target}".wants && \
+ dosym ../"${service}" "${ud}"/"${target}".wants/"${destname}"
+}
+
+# @FUNCTION: systemd_enable_ntpunit
+# @USAGE: <NN-name> <service>...
+# @DESCRIPTION:
+# Add an NTP service provider to the list of implementations
+# in timedated. <NN-name> defines the newly-created ntp-units.d priority
+# and name, while the remaining arguments list service units that will
+# be added to that file.
+#
+# Uses doins, thus it is fatal in EAPI 4 and non-fatal in earlier
+# EAPIs.
+#
+# Doc: http://www.freedesktop.org/wiki/Software/systemd/timedated/
+systemd_enable_ntpunit() {
+ debug-print-function ${FUNCNAME} "${@}"
+ if [[ ${#} -lt 2 ]]; then
+ die "Usage: systemd_enable_ntpunit <NN-name> <service>..."
+ fi
+
+ local ntpunit_name=${1}
+ local services=( "${@:2}" )
+
+ if [[ ${ntpunit_name} != [0-9][0-9]-* ]]; then
+ die "ntpunit.d file must be named NN-name where NN are digits."
+ elif [[ ${ntpunit_name} == *.list ]]; then
+ die "The .list suffix is appended implicitly to ntpunit.d name."
+ fi
+
+ local unitdir=$(systemd_get_unitdir)
+ local s
+ for s in "${services[@]}"; do
+ if [[ ! -f "${D}${unitdir}/${s}" ]]; then
+ die "ntp-units.d provider ${s} not installed (yet?) in \${D}."
+ fi
+ echo "${s}" >> "${T}"/${ntpunit_name}.list
+ done
+
+ (
+ insinto "$(_systemd_get_utildir)"/ntp-units.d
+ doins "${T}"/${ntpunit_name}.list
+ )
+ local ret=${?}
+
+ rm "${T}"/${ntpunit_name}.list || die
+
+ return ${ret}
+}
+
+# @FUNCTION: systemd_with_unitdir
+# @USAGE: [<configure-option-name>]
+# @DESCRIPTION:
+# Output '--with-systemdsystemunitdir' as expected by systemd-aware configure
+# scripts. This function always succeeds. Its output may be quoted in order
+# to preserve whitespace in paths. systemd_to_myeconfargs() is preferred over
+# this function.
+#
+# If upstream does use invalid configure option to handle installing systemd
+# units (e.g. `--with-systemdunitdir'), you can pass the 'suffix' as an optional
+# argument to this function (`$(systemd_with_unitdir systemdunitdir)'). Please
+# remember to report a bug upstream as well.
+systemd_with_unitdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+ local optname=${1:-systemdsystemunitdir}
+
+ echo --with-${optname}="$(systemd_get_unitdir)"
+}
+
+# @FUNCTION: systemd_with_utildir
+# @DESCRIPTION:
+# Output '--with-systemdsystemutildir' as used by some packages to install
+# systemd helpers. This function always succeeds. Its output may be quoted
+# in order to preserve whitespace in paths.
+systemd_with_utildir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo --with-systemdutildir="$(systemd_get_utildir)"
+}
+
+# @FUNCTION: systemd_to_myeconfargs
+# @DESCRIPTION:
+# Add '--with-systemdsystemunitdir' as expected by systemd-aware configure
+# scripts to the myeconfargs variable used by autotools-utils eclass. Handles
+# quoting automatically.
+systemd_to_myeconfargs() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ eqawarn 'systemd_to_myeconfargs() is deprecated and will be removed on 2013-10-11.'
+ eqawarn 'Please use $(systemd_with_unitdir) instead.'
+
+ myeconfargs=(
+ "${myeconfargs[@]}"
+ --with-systemdsystemunitdir="$(systemd_get_unitdir)"
+ )
+}
+
+# @FUNCTION: systemd_update_catalog
+# @DESCRIPTION:
+# Update the journald catalog. This needs to be called after installing
+# or removing catalog files.
+#
+# If systemd is not installed, no operation will be done. The catalog
+# will be (re)built once systemd is installed.
+#
+# See: http://www.freedesktop.org/wiki/Software/systemd/catalog
+systemd_update_catalog() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ # Make sure to work on the correct system.
+
+ local journalctl=${EPREFIX}/usr/bin/journalctl
+ if [[ -x ${journalctl} ]]; then
+ ebegin "Updating systemd journal catalogs"
+ journalctl --update-catalog
+ eend $?
+ else
+ debug-print "${FUNCNAME}: journalctl not found."
+ fi
+}
+
+# @FUNCTION: systemd_is_booted
+# @DESCRIPTION:
+# Check whether the system was booted using systemd.
+#
+# This should be used purely for informational purposes, e.g. warning
+# user that he needs to use systemd. Installed files or application
+# behavior *must not* rely on this. Please remember to check MERGE_TYPE
+# to not trigger the check on binary package build hosts!
+#
+# Returns 0 if systemd is used to boot the system, 1 otherwise.
+#
+# See: man sd_booted
+systemd_is_booted() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ [[ -d /run/systemd/system ]]
+ local ret=${?}
+
+ debug-print "${FUNCNAME}: [[ -d /run/systemd/system ]] -> ${ret}"
+ return ${ret}
+}
diff --git a/eclass/tests/autotools_eaclocal_amflags.sh b/eclass/tests/autotools_eaclocal_amflags.sh
new file mode 100755
index 000000000000..e164796f0916
--- /dev/null
+++ b/eclass/tests/autotools_eaclocal_amflags.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit autotools
+
+test-it() {
+ tbegin "eaclocal_amflags $1: $2"
+ printf "ACLOCAL_AMFLAGS = %b\n" "$2" > Makefile.am
+ local flags=$(eaclocal_amflags) exp=${3:-$2}
+ [[ "${flags}" == "${exp}" ]]
+ if ! tend $? ; then
+ printf '### INPUT:\n%s\n' "$2"
+ printf '### FILE:\n%s\n' "$(<Makefile.am)"
+ printf '### EXPECTED:\n%s\n' "${exp}"
+ printf '### ACTUAL:\n%s\n' "${flags}"
+ fi
+ rm Makefile.am
+}
+
+test-it simple "-Im4"
+test-it simple "-I m4 -I lakdjfladsfj /////"
+
+test-it shell-exec '`echo hi`' "hi"
+test-it shell-exec '`echo {0..3}`' "0 1 2 3"
+
+test-it multiline '-I oneline \\\n\t-I twoline' "-I oneline -I twoline"
+
+texit
diff --git a/eclass/tests/distutils-r1.sh b/eclass/tests/distutils-r1.sh
new file mode 100755
index 000000000000..c5501bdcde74
--- /dev/null
+++ b/eclass/tests/distutils-r1.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+EAPI=5
+PYTHON_COMPAT=( python2_7 )
+source tests-common.sh
+
+test-phase_name_free() {
+ local ph=${1}
+
+ if declare -f "${ph}"; then
+ die "${ph} function declared while name reserved for phase!"
+ fi
+ if declare -f "${ph}_all"; then
+ die "${ph}_all function declared while name reserved for phase!"
+ fi
+}
+
+inherit distutils-r1
+
+tbegin "sane function names"
+
+test-phase_name_free python_prepare
+test-phase_name_free python_configure
+test-phase_name_free python_compile
+test-phase_name_free python_test
+test-phase_name_free python_install
+
+tend ${failed}
+
+texit
diff --git a/eclass/tests/eutils_eshopts.sh b/eclass/tests/eutils_eshopts.sh
new file mode 100755
index 000000000000..65dc89c9f258
--- /dev/null
+++ b/eclass/tests/eutils_eshopts.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+
+source tests-common.sh
+
+inherit eutils
+
+test-it() {
+ local s0 s1 s2
+
+ tbegin "push/pop '$*'"
+ s0=$(shopt -p)
+ t eshopts_push $*
+ s1=$(shopt -p)
+ t eshopts_pop
+ s2=$(shopt -p)
+ [[ ${s0} == "${s2}" ]] && \
+ [[ ${s1} == *"shopt $*"* ]]
+ tend $?
+}
+
+# should handle bug #395025
+for arg in nullglob dotglob extglob ; do
+ for flag in s u ; do
+ test-it -${flag} ${arg}
+ done
+done
+
+tbegin "multi push/pop"
+s0=$(shopt -p)
+t eshopts_push -s dotglob
+t eshopts_push -u dotglob
+t eshopts_push -s extglob
+t eshopts_push -u dotglob
+t eshopts_push -s dotglob
+t eshopts_pop
+t eshopts_pop
+t eshopts_pop
+t eshopts_pop
+t eshopts_pop
+s1=$(shopt -p)
+[[ ${s0} == "${s1}" ]]
+tend $?
+
+texit
diff --git a/eclass/tests/eutils_estack.sh b/eclass/tests/eutils_estack.sh
new file mode 100755
index 000000000000..c029c9b1dae3
--- /dev/null
+++ b/eclass/tests/eutils_estack.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit eutils
+
+tbegin "initial stack state"
+estack_pop teststack
+# Should be empty and thus return 1
+[[ $? -eq 1 ]]
+tend $?
+
+tbegin "simple push/pop"
+estack_push ttt 1
+pu=$?
+estack_pop ttt
+po=$?
+[[ ${pu}${po} == "00" ]]
+tend $?
+
+tbegin "simple push/pop var"
+estack_push xxx "boo ga boo"
+pu=$?
+estack_pop xxx i
+po=$?
+[[ ${pu}${po} == "00" ]] && [[ ${i} == "boo ga boo" ]]
+tend $?
+
+tbegin "multi push/pop"
+estack_push yyy {1..10}
+pu=$?
+i=0
+while estack_pop yyy ; do
+ : $(( i++ ))
+done
+[[ ${pu} -eq 0 && ${i} -eq 10 ]]
+tend $?
+
+tbegin "umask push/pop"
+u0=$(umask)
+eumask_push 0000
+pu=$?
+u1=$(umask)
+eumask_pop
+po=$?
+u2=$(umask)
+[[ ${pu}${po}:${u0}:${u1}:${u2} == "00:${u0}:0000:${u0}" ]]
+tend $?
+
+texit
diff --git a/eclass/tests/eutils_evar.sh b/eclass/tests/eutils_evar.sh
new file mode 100755
index 000000000000..1e53a873e901
--- /dev/null
+++ b/eclass/tests/eutils_evar.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit eutils
+
+tbegin "simple push/pop"
+VAR=1
+evar_push VAR
+pu=$?
+VAR=2
+evar_pop
+po=$?
+[[ ${pu}${po}${VAR} == "001" ]]
+tend $?
+
+tbegin "unset push/pop"
+unset VAR
+evar_push VAR
+pu=$?
+VAR=2
+evar_pop
+po=$?
+[[ ${pu}${po}${VAR+set} == "00" ]]
+tend $?
+
+tbegin "empty push/pop"
+VAR=
+evar_push VAR
+pu=$?
+VAR=2
+evar_pop
+po=$?
+[[ ${pu}${po}${VAR+set}${VAR} == "00set" ]]
+tend $?
+
+tbegin "export push/pop"
+export VAR=exported
+evar_push VAR
+pu=$?
+VAR=2
+evar_pop
+po=$?
+var=$(bash -c 'echo ${VAR}')
+[[ ${pu}${po}${var} == "00exported" ]]
+tend $?
+
+tbegin "unexport push/pop"
+unset VAR
+VAR=not-exported
+evar_push VAR
+pu=$?
+VAR=2
+evar_pop
+po=$?
+var=$(bash -c 'echo ${VAR+set}')
+[[ ${pu}${po}${VAR}${var} == "00not-exported" ]]
+tend $?
+
+tbegin "multi push/pop"
+A=a B=b C=c
+evar_push A B C
+pu=$?
+A=A B=B C=C
+evar_pop 1
+po1=$?
+[[ ${A}${B}${C} == "ABc" ]]
+po2=$?
+evar_pop 2
+po3=$?
+var=$(bash -c 'echo ${VAR+set}')
+[[ ${pu}${po1}${po2}${po3}${A}${B}${C} == "0000abc" ]]
+tend $?
+
+tbegin "simple push_set/pop"
+VAR=1
+evar_push_set VAR 2
+pu=$?
+[[ ${VAR} == "2" ]]
+po1=$?
+evar_pop
+po2=$?
+[[ ${pu}${po1}${po2}${VAR} == "0001" ]]
+tend $?
+
+tbegin "unset push_set/pop"
+VAR=1
+evar_push_set VAR
+pu=$?
+[[ ${VAR+set} != "set" ]]
+po1=$?
+evar_pop
+po2=$?
+[[ ${pu}${po1}${po2}${VAR} == "0001" ]]
+tend $?
+
+texit
diff --git a/eclass/tests/eutils_path_exists.sh b/eclass/tests/eutils_path_exists.sh
new file mode 100755
index 000000000000..a3d1f993f7d1
--- /dev/null
+++ b/eclass/tests/eutils_path_exists.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit eutils
+
+test-path_exists() {
+ local exp=$1; shift
+ tbegin "path_exists($*) == ${exp}"
+ path_exists "$@"
+ [[ ${exp} -eq $? ]]
+ tend $?
+}
+
+test-path_exists 1
+test-path_exists 1 -a
+test-path_exists 1 -o
+
+good="/ . tests-common.sh /bin/bash"
+test-path_exists 0 ${good}
+test-path_exists 0 -a ${good}
+test-path_exists 0 -o ${good}
+
+bad="/asjdkfljasdlfkja jlakjdsflkasjdflkasdjflkasdjflaskdjf"
+test-path_exists 1 ${bad}
+test-path_exists 1 -a ${bad}
+test-path_exists 1 -o ${bad}
+
+test-path_exists 1 ${good} ${bad}
+test-path_exists 1 -a ${good} ${bad}
+test-path_exists 0 -o ${good} ${bad}
+
+texit
diff --git a/eclass/tests/flag-o-matic.sh b/eclass/tests/flag-o-matic.sh
new file mode 100755
index 000000000000..762a5f74d94d
--- /dev/null
+++ b/eclass/tests/flag-o-matic.sh
@@ -0,0 +1,149 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit flag-o-matic
+
+CFLAGS="-a -b -c=1"
+CXXFLAGS="-x -y -z=2"
+LDFLAGS="-l -m -n=3"
+ftend() {
+ local ret=$?
+ local msg="Failed; flags are:"
+ local flag
+ for flag in $(all-flag-vars) ; do
+ msg+=$'\n\t'"${flag}=${!flag}"
+ done
+ tend ${ret} "${msg}"
+}
+
+tbegin "is-flag"
+! (is-flag 1 2 3) 2>/dev/null
+ftend
+
+tbegin "is-ldflag"
+! (is-ldflag 1 2 3) 2>/dev/null
+ftend
+
+while read exp flag ; do
+ [[ -z ${exp}${flag} ]] && continue
+
+ tbegin "is-flagq ${flag}"
+ is-flagq ${flag}
+ [[ ${exp} -eq $? ]]
+ ftend
+done <<<"
+ 1 -L
+ 0 -a
+ 0 -x
+"
+
+while read exp flag ; do
+ [[ -z ${exp}${flag} ]] && continue
+
+ tbegin "is-ldflagq ${flag}"
+ is-ldflagq "${flag}"
+ [[ ${exp} -eq $? ]]
+ ftend
+done <<<"
+ 1 -a
+ 0 -n=*
+ 1 -n
+"
+
+tbegin "strip-unsupported-flags"
+strip-unsupported-flags
+[[ ${CFLAGS} == "" ]] && [[ ${CXXFLAGS} == "-z=2" ]]
+ftend
+
+for var in $(all-flag-vars) ; do
+ eval ${var}=\"-filter -filter-glob -foo-${var%FLAGS}\"
+done
+
+tbegin "filter-flags basic"
+filter-flags -filter
+(
+for var in $(all-flag-vars) ; do
+ val=${!var}
+ [[ ${val} == "-filter-glob -foo-${var%FLAGS}" ]] || exit 1
+done
+)
+ftend
+
+tbegin "filter-flags glob"
+filter-flags '-filter-*'
+(
+for var in $(all-flag-vars) ; do
+ val=${!var}
+ [[ ${val} == "-foo-${var%FLAGS}" ]] || exit 1
+done
+)
+ftend
+
+tbegin "strip-flags basic"
+CXXFLAGS+=" -O999 "
+strip-flags
+[[ -z ${CFLAGS}${LDFLAGS}${CPPFLAGS} && ${CXXFLAGS} == "-O2" ]]
+ftend
+
+tbegin "replace-flags basic"
+CFLAGS="-O0 -foo"
+replace-flags -O0 -O1
+[[ ${CFLAGS} == "-O1 -foo" ]]
+ftend
+
+tbegin "replace-flags glob"
+CXXFLAGS="-O0 -mcpu=bad -cow"
+replace-flags '-mcpu=*' -mcpu=good
+[[ ${CXXFLAGS} == "-O0 -mcpu=good -cow" ]]
+ftend
+
+tbegin "append-cflags basic"
+CFLAGS=
+append-cflags -O0
+[[ ${CFLAGS} == " -O0" ]]
+ftend
+
+tbegin "append-cflags -DFOO='a b c'"
+CFLAGS=
+append-cflags '-DFOO="a b c"'
+[[ ${CFLAGS} == ' -DFOO="a b c"' ]]
+ftend
+
+tbegin "raw-ldflags"
+LDFLAGS='-Wl,-O1 -Wl,--as-needed -Wl,-z,now -flto'
+LDFLAGS=$(raw-ldflags)
+[[ ${LDFLAGS} == '-O1 --as-needed -z now' ]]
+ftend
+
+tbegin "test-flags-CC (valid flags)"
+out=$(test-flags-CC -O3)
+[[ $? -eq 0 && ${out} == "-O3" ]]
+ftend
+
+tbegin "test-flags-CC (invalid flags)"
+out=$(test-flags-CC -finvalid-flag)
+[[ $? -ne 0 && -z ${out} ]]
+ftend
+
+if type -P clang >/dev/null ; then
+tbegin "test-flags-CC (valid flags w/clang)"
+out=$(CC=clang test-flags-CC -O3)
+[[ $? -eq 0 && ${out} == "-O3" ]]
+ftend
+
+tbegin "test-flags-CC (invalid flags w/clang)"
+out=$(CC=clang test-flags-CC -finvalid-flag)
+[[ $? -ne 0 && -z ${out} ]]
+ftend
+
+tbegin "test-flags-CC (gcc-valid but clang-invalid flags)"
+out=$(CC=clang test-flags-CC -finline-limit=1200)
+[[ $? -ne 0 && -z ${out} ]]
+ftend
+fi
+
+texit
diff --git a/eclass/tests/git-r3.sh b/eclass/tests/git-r3.sh
new file mode 100755
index 000000000000..3de6f059752f
--- /dev/null
+++ b/eclass/tests/git-r3.sh
@@ -0,0 +1,204 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit git-r3
+
+testdir=${pkg_root}/git
+mkdir "${testdir}" || die "unable to mkdir testdir"
+cd "${testdir}" || die "unable to cd to testdir"
+
+EGIT3_STORE_DIR=store
+mkdir "${EGIT3_STORE_DIR}" || die "unable to mkdir store"
+
+test_file() {
+ local fn=${1}
+ local expect=${2}
+
+ if [[ ! -f ${fn} ]]; then
+ eerror "${fn} does not exist (not checked out?)"
+ else
+ local got=$(<"${fn}")
+
+ if [[ ${got} != ${expect} ]]; then
+ eerror "${fn}, expected: ${expect}, got: ${got}"
+ else
+ return 0
+ fi
+ fi
+ return 1
+}
+
+test_no_file() {
+ local fn=${1}
+
+ if [[ -f ${fn} ]]; then
+ eerror "${fn} exists (wtf?!)"
+ else
+ return 0
+ fi
+ return 1
+}
+
+test_repo_clean() {
+ local P=${P}_${FUNCNAME#test_}
+
+ (
+ mkdir repo
+ cd repo
+ git init -q
+ echo test > file
+ git add file
+ git commit -m 1 -q
+ echo other-text > file2
+ git add file2
+ git commit -m 2 -q
+ ) || die "unable to prepare repo"
+
+ # we need to use an array to preserve whitespace
+ local EGIT_REPO_URI=(
+ "ext::git daemon --export-all --base-path=. --inetd %G/repo"
+ )
+
+ tbegin "fetching from a simple repo"
+ (
+ git-r3_src_unpack
+ test_file "${WORKDIR}/${P}/file" test && \
+ test_file "${WORKDIR}/${P}/file2" other-text
+ ) &>fetch.log
+
+ eend ${?} || cat fetch.log
+}
+
+test_repo_revert() {
+ local P=${P}_${FUNCNAME#test_}
+
+ (
+ cd repo
+ git revert -n HEAD^
+ git commit -m r1 -q
+ ) || die "unable to prepare repo"
+
+ # we need to use an array to preserve whitespace
+ local EGIT_REPO_URI=(
+ "ext::git daemon --export-all --base-path=. --inetd %G/repo"
+ )
+
+ tbegin "fetching revert"
+ (
+ git-r3_src_unpack
+ test_no_file "${WORKDIR}/${P}/file" && \
+ test_file "${WORKDIR}/${P}/file2" other-text
+ ) &>fetch.log
+
+ eend ${?} || cat fetch.log
+}
+
+test_repo_branch() {
+ local P=${P}_${FUNCNAME#test_}
+
+ (
+ cd repo
+ git branch -q other-branch HEAD^
+ git checkout -q other-branch
+ echo one-more > file3
+ git add file3
+ git commit -m 3 -q
+ git checkout -q master
+ ) || die "unable to prepare repo"
+
+ # we need to use an array to preserve whitespace
+ local EGIT_REPO_URI=(
+ "ext::git daemon --export-all --base-path=. --inetd %G/repo"
+ )
+ local EGIT_BRANCH=other-branch
+
+ tbegin "switching branches"
+ (
+ git-r3_src_unpack
+ test_file "${WORKDIR}/${P}/file" test && \
+ test_file "${WORKDIR}/${P}/file2" other-text && \
+ test_file "${WORKDIR}/${P}/file3" one-more
+ ) &>fetch.log
+
+ eend ${?} || cat fetch.log
+}
+
+test_repo_merge() {
+ local P=${P}_${FUNCNAME#test_}
+
+ (
+ cd repo
+ git branch -q one-more-branch HEAD^
+ git checkout -q one-more-branch
+ echo foobarbaz > file3
+ git add file3
+ git commit -m 3b -q
+ git checkout -q master
+ git merge -m 4 -q one-more-branch
+ ) || die "unable to prepare repo"
+
+ # we need to use an array to preserve whitespace
+ local EGIT_REPO_URI=(
+ "ext::git daemon --export-all --base-path=. --inetd %G/repo"
+ )
+
+ tbegin "fetching a merge commit"
+ (
+ git-r3_src_unpack
+ test_no_file "${WORKDIR}/${P}/file" && \
+ test_file "${WORKDIR}/${P}/file2" other-text && \
+ test_file "${WORKDIR}/${P}/file3" foobarbaz
+ ) &>fetch.log
+
+ eend ${?} || cat fetch.log
+}
+
+test_repo_revert_merge() {
+ local P=${P}_${FUNCNAME#test_}
+
+ (
+ cd repo
+ git branch -q to-be-reverted
+ git checkout -q to-be-reverted
+ echo trrm > file3
+ git add file3
+ git commit -m 5b -q
+ git checkout -q master
+ echo trrm > file2
+ git add file2
+ git commit -m 5 -q
+ git merge -m 6 -q to-be-reverted
+ echo trrm > file
+ git add file
+ git commit -m 7 -q
+ git revert -m 1 -n HEAD^
+ git commit -m 7r -q
+ ) || die "unable to prepare repo"
+
+ # we need to use an array to preserve whitespace
+ local EGIT_REPO_URI=(
+ "ext::git daemon --export-all --base-path=. --inetd %G/repo"
+ )
+
+ tbegin "fetching a revert of a merge commit"
+ (
+ git-r3_src_unpack
+ test_file "${WORKDIR}/${P}/file" trrm && \
+ test_file "${WORKDIR}/${P}/file2" trrm && \
+ test_file "${WORKDIR}/${P}/file3" foobarbaz
+ ) &>fetch.log
+
+ eend ${?} || cat fetch.log
+}
+
+test_repo_clean
+test_repo_revert
+test_repo_branch
+test_repo_merge
+test_repo_revert_merge
+
+texit
diff --git a/eclass/tests/git-r3_GIT_DIR.sh b/eclass/tests/git-r3_GIT_DIR.sh
new file mode 100755
index 000000000000..38ed723c8a83
--- /dev/null
+++ b/eclass/tests/git-r3_GIT_DIR.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit git-r3
+
+testdir=${pkg_root}/git
+mkdir "${testdir}" || die "unable to mkdir testdir"
+cd "${testdir}" || die "unable to cd to testdir"
+
+EGIT3_STORE_DIR=store
+mkdir "${EGIT3_STORE_DIR}" || die "unable to mkdir store"
+
+# Test cleaning up canonical repo URI
+test_repouri() {
+ local uri=${1}
+ local expect=${2}
+ local -x GIT_DIR
+
+ tbegin "GIT_DIR for ${uri}"
+
+ _git-r3_set_gitdir "${uri}" &>/dev/null
+ local got=${GIT_DIR#${EGIT3_STORE_DIR}/}
+
+ [[ ${expect} == ${got} ]]
+ tend ${?} || eerror "Expected: ${expect}, got: ${got}"
+}
+
+test_repouri git://git.overlays.gentoo.org/proj/portage.git proj_portage.git
+test_repouri https://git.overlays.gentoo.org/gitroot/proj/portage.git proj_portage.git
+test_repouri git+ssh://git@git.overlays.gentoo.org/proj/portage.git proj_portage.git
+
+test_repouri git://anongit.freedesktop.org/mesa/mesa mesa_mesa.git
+test_repouri ssh://git.freedesktop.org/git/mesa/mesa mesa_mesa.git
+test_repouri http://anongit.freedesktop.org/git/mesa/mesa.git mesa_mesa.git
+test_repouri http://cgit.freedesktop.org/mesa/mesa/ mesa_mesa.git
+
+test_repouri https://code.google.com/p/snakeoil/ snakeoil.git
+
+test_repouri git://git.code.sf.net/p/xournal/code xournal_code.git
+test_repouri http://git.code.sf.net/p/xournal/code xournal_code.git
+
+test_repouri git://git.gnome.org/glibmm glibmm.git
+test_repouri https://git.gnome.org/browse/glibmm glibmm.git
+test_repouri ssh://USERNAME@git.gnome.org/git/glibmm glibmm.git
+
+test_repouri git://git.kernel.org/pub/scm/git/git.git git_git.git
+test_repouri http://git.kernel.org/pub/scm/git/git.git git_git.git
+test_repouri https://git.kernel.org/pub/scm/git/git.git git_git.git
+test_repouri https://git.kernel.org/cgit/git/git.git/ git_git.git
+
+#test_repouri git@github.com:gentoo/identity.gentoo.org.git gentoo_identity.gentoo.org.git
+test_repouri https://github.com/gentoo/identity.gentoo.org.git gentoo_identity.gentoo.org.git
+
+#test_repouri git@bitbucket.org:mgorny/python-exec.git mgorny_python-exec.git
+test_repouri https://mgorny@bitbucket.org/mgorny/python-exec.git mgorny_python-exec.git
+
+texit
diff --git a/eclass/tests/git-r3_subrepos.sh b/eclass/tests/git-r3_subrepos.sh
new file mode 100755
index 000000000000..4a66a1c3707b
--- /dev/null
+++ b/eclass/tests/git-r3_subrepos.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit git-r3
+
+# Test getting submodule URIs
+test_subrepos() {
+ local suburi=${1}
+ local expect=( "${@:2}" )
+
+ tbegin "subrepos for ${suburi} -> ${expect[0]}${expect[1]+...}"
+
+ local subrepos
+ _git-r3_set_subrepos "${suburi}" "${repos[@]}"
+
+ [[ ${expect[@]} == ${subrepos[@]} ]]
+ tend ${?} || eerror "Expected: ${expect[@]}, got: ${subrepos[@]}"
+}
+
+# parent repos
+repos=( http://foohub/fooman/foo.git git://foohub/fooman/foo.git )
+
+# absolute URI
+test_subrepos http://foo/bar http://foo/bar
+test_subrepos /foo/bar /foo/bar
+
+# plain relative URI
+test_subrepos ./baz http://foohub/fooman/foo.git/baz git://foohub/fooman/foo.git/baz
+
+# backward relative URIs
+test_subrepos ../baz.git http://foohub/fooman/baz.git git://foohub/fooman/baz.git
+test_subrepos ../../bazman/baz.git http://foohub/bazman/baz.git git://foohub/bazman/baz.git
+
+texit
diff --git a/eclass/tests/linux-info_get_running_version.sh b/eclass/tests/linux-info_get_running_version.sh
new file mode 100755
index 000000000000..d5b36a5d8902
--- /dev/null
+++ b/eclass/tests/linux-info_get_running_version.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit linux-info
+
+test_get_running_version() {
+ local test_kv=$1 major=$2 minor=$3 patch=$4 extra=$5
+ tbegin "get_running_version ${test_kv}"
+ uname() { echo "${test_kv}" ; }
+ ROOT=/:/:/:/: get_running_version
+ local r=$?
+ [[ ${r} -eq 0 &&
+ ${major} == "${KV_MAJOR}" &&
+ ${minor} == "${KV_MINOR}" &&
+ ${patch} == "${KV_PATCH}" &&
+ ${extra} == "${KV_EXTRA}" ]]
+ tend $? "FAIL: {ret: ${r}==0} {major: ${major}==${KV_MAJOR}} {minor: ${minor}==${KV_MINOR}} {patch: ${patch}==${KV_PATCH}} {extra: ${extra}==${KV_EXTRA}}"
+}
+
+tests=(
+ # KV_FULL MAJOR MINOR PATCH EXTRA
+ 1.2.3 1 2 3 ''
+ 1.2.3.4 1 2 3 .4
+ 1.2.3-ver+1.4 1 2 3 -ver+1.4
+ 1.2-kern.3 1 2 0 -kern.3
+ 1.2+kern.5 1 2 0 +kern.5
+ 1.2.3_blah 1 2 3 _blah
+ 3.2.1-zen-vs2.3.2.5+ 3 2 1 -zen-vs2.3.2.5+
+)
+
+for (( i = 0; i < ${#tests[@]}; i += 5 )) ; do
+ test_get_running_version "${tests[@]:i:5}"
+done
+
+texit
diff --git a/eclass/tests/multiprocessing.sh b/eclass/tests/multiprocessing.sh
new file mode 100755
index 000000000000..d20cb6ea08d1
--- /dev/null
+++ b/eclass/tests/multiprocessing.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit multiprocessing
+
+tbegin "simple"
+MAKEOPTS="-j1" multijob_init
+multijob_child_init ls -d / >/dev/null || die "fail!"
+multijob_finish
+tend $?
+
+tbegin "less simple"
+multijob_init -j3
+multijob_child_init true || die "fail!"
+multijob_child_init false || die "fail!"
+multijob_child_init true || die "fail!"
+multijob_finish
+tend $(( $? == 1 ? 0 : 1 ))
+
+tbegin "less less simple"
+multijob_init -j1
+multijob_child_init true || die "fail!"
+multijob_child_init false || die "fail!"
+multijob_child_init true && die "fail!"
+multijob_finish
+tend $?
+
+tbegin "less less less simple"
+multijob_init -j10
+multijob_child_init true || die "fail!"
+multijob_finish_one || die "fail!"
+multijob_child_init false || die "fail!"
+multijob_finish_one && die "fail!"
+multijob_child_init true || die "fail!"
+multijob_finish_one || die "fail!"
+multijob_finish
+tend $?
+
+texit
diff --git a/eclass/tests/multiprocessing_makeopts_jobs.sh b/eclass/tests/multiprocessing_makeopts_jobs.sh
new file mode 100755
index 000000000000..017d491156a0
--- /dev/null
+++ b/eclass/tests/multiprocessing_makeopts_jobs.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit multiprocessing
+
+test-makeopts_jobs() {
+ local exp=$1; shift
+ tbegin "makeopts_jobs($*) == ${exp}"
+ local act=$(makeopts_jobs "$@")
+ [[ ${act} == "${exp}" ]]
+ tend $? "Got back: ${act}"
+}
+
+tests=(
+ 999 "-j"
+ 999 "--jobs"
+ 1 ""
+ 1 "-l9 -w"
+ 1 "-l9 -w-j4"
+ 1 "-l9--jobs=3"
+ 1 "-l9--jobs=8"
+ 2 "-j2"
+ 3 "-j 3"
+ 4 "-l3 -j 4 -w"
+ 5 "--jobs=5"
+ 6 "--jobs 6"
+ 7 "-l3 --jobs 7 -w"
+ 4 "-j1 -j 2 --jobs 3 --jobs=4"
+ 8 " -j 8 "
+)
+for (( i = 0; i < ${#tests[@]}; i += 2 )) ; do
+ test-makeopts_jobs "${tests[i]}" "${tests[i+1]}"
+done
+
+texit
diff --git a/eclass/tests/python-utils-r1.sh b/eclass/tests/python-utils-r1.sh
new file mode 100755
index 000000000000..e49636a6111a
--- /dev/null
+++ b/eclass/tests/python-utils-r1.sh
@@ -0,0 +1,170 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+EAPI=5
+source tests-common.sh
+
+test_var() {
+ local var=${1}
+ local impl=${2}
+ local expect=${3}
+
+ tbegin "${var} for ${impl}"
+
+ local ${var}
+ python_export ${impl} ${var}
+ [[ ${!var} == ${expect} ]] || eerror "(${impl}: ${var}: ${!var} != ${expect}"
+
+ tend ${?}
+}
+
+test_is() {
+ local func=${1}
+ local expect=${2}
+
+ tbegin "${func} (expecting: ${expect})"
+
+ ${func}
+ [[ ${?} == ${expect} ]]
+
+ tend ${?}
+}
+
+test_fix_shebang() {
+ local from=${1}
+ local to=${2}
+ local expect=${3}
+ local args=( "${@:4}" )
+
+ tbegin "python_fix_shebang${args[@]+ ${args[*]}} from ${from} to ${to} (exp: ${expect})"
+
+ echo "${from}" > "${tmpfile}"
+ output=$( EPYTHON=${to} python_fix_shebang "${args[@]}" -q "${tmpfile}" 2>&1 )
+
+ if [[ ${?} != 0 ]]; then
+ if [[ ${expect} != FAIL ]]; then
+ echo "${output}"
+ tend 1
+ else
+ tend 0
+ fi
+ else
+ [[ $(<"${tmpfile}") == ${expect} ]] \
+ || eerror "${from} -> ${to}: $(<"${tmpfile}") != ${expect}"
+ tend ${?}
+ fi
+}
+
+tmpfile=$(mktemp)
+
+inherit python-utils-r1
+
+test_var EPYTHON python2_7 python2.7
+test_var PYTHON python2_7 /usr/bin/python2.7
+test_var PYTHON_SITEDIR python2_7 /usr/lib/python2.7/site-packages
+test_var PYTHON_INCLUDEDIR python2_7 /usr/include/python2.7
+test_var PYTHON_LIBPATH python2_7 /usr/lib/libpython2.7$(get_libname)
+test_var PYTHON_PKG_DEP python2_7 '*dev-lang/python*:2.7'
+test_var PYTHON_SCRIPTDIR python2_7 /usr/lib/python-exec/python2.7
+
+test_var EPYTHON python3_3 python3.3
+test_var PYTHON python3_3 /usr/bin/python3.3
+test_var PYTHON_SITEDIR python3_3 /usr/lib/python3.3/site-packages
+test_var PYTHON_INCLUDEDIR python3_3 /usr/include/python3.3
+test_var PYTHON_LIBPATH python3_3 /usr/lib/libpython3.3$(get_libname)
+test_var PYTHON_PKG_DEP python3_3 '*dev-lang/python*:3.3'
+test_var PYTHON_SCRIPTDIR python3_3 /usr/lib/python-exec/python3.3
+
+test_var EPYTHON jython2_7 jython2.7
+test_var PYTHON jython2_7 /usr/bin/jython2.7
+test_var PYTHON_SITEDIR jython2_7 /usr/share/jython-2.7/Lib/site-packages
+test_var PYTHON_PKG_DEP jython2_7 '*dev-java/jython*:2.7'
+test_var PYTHON_SCRIPTDIR jython2_7 /usr/lib/python-exec/jython2.7
+
+test_var EPYTHON pypy pypy
+test_var PYTHON pypy /usr/bin/pypy
+test_var PYTHON_SITEDIR pypy /usr/lib/pypy/site-packages
+test_var PYTHON_INCLUDEDIR pypy /usr/lib/pypy/include
+test_var PYTHON_PKG_DEP pypy '*virtual/pypy*:0='
+test_var PYTHON_SCRIPTDIR pypy /usr/lib/python-exec/pypy
+
+test_var EPYTHON pypy3 pypy3
+test_var PYTHON pypy3 /usr/bin/pypy3
+test_var PYTHON_SITEDIR pypy3 /usr/lib/pypy3/site-packages
+test_var PYTHON_INCLUDEDIR pypy3 /usr/lib/pypy3/include
+test_var PYTHON_PKG_DEP pypy3 '*virtual/pypy3*:0='
+test_var PYTHON_SCRIPTDIR pypy3 /usr/lib/python-exec/pypy3
+
+test_is "python_is_python3 python2.7" 1
+test_is "python_is_python3 python3.2" 0
+test_is "python_is_python3 jython2.7" 1
+test_is "python_is_python3 pypy" 1
+test_is "python_is_python3 pypy3" 0
+
+# generic shebangs
+test_fix_shebang '#!/usr/bin/python' python2.7 '#!/usr/bin/python2.7'
+test_fix_shebang '#!/usr/bin/python' python3.4 '#!/usr/bin/python3.4'
+test_fix_shebang '#!/usr/bin/python' pypy '#!/usr/bin/pypy'
+test_fix_shebang '#!/usr/bin/python' pypy3 '#!/usr/bin/pypy3'
+test_fix_shebang '#!/usr/bin/python' jython2.7 '#!/usr/bin/jython2.7'
+
+# python2/python3 matching
+test_fix_shebang '#!/usr/bin/python2' python2.7 '#!/usr/bin/python2.7'
+test_fix_shebang '#!/usr/bin/python3' python2.7 FAIL
+test_fix_shebang '#!/usr/bin/python3' python2.7 '#!/usr/bin/python2.7' --force
+test_fix_shebang '#!/usr/bin/python3' python3.4 '#!/usr/bin/python3.4'
+test_fix_shebang '#!/usr/bin/python2' python3.4 FAIL
+test_fix_shebang '#!/usr/bin/python2' python3.4 '#!/usr/bin/python3.4' --force
+
+# pythonX.Y matching (those mostly test the patterns)
+test_fix_shebang '#!/usr/bin/python2.7' python2.7 '#!/usr/bin/python2.7'
+test_fix_shebang '#!/usr/bin/python2.7' python3.2 FAIL
+test_fix_shebang '#!/usr/bin/python2.7' python3.2 '#!/usr/bin/python3.2' --force
+test_fix_shebang '#!/usr/bin/python3.2' python3.2 '#!/usr/bin/python3.2'
+test_fix_shebang '#!/usr/bin/python3.2' python2.7 FAIL
+test_fix_shebang '#!/usr/bin/python3.2' python2.7 '#!/usr/bin/python2.7' --force
+test_fix_shebang '#!/usr/bin/pypy' pypy '#!/usr/bin/pypy'
+test_fix_shebang '#!/usr/bin/pypy' python2.7 FAIL
+test_fix_shebang '#!/usr/bin/pypy' python2.7 '#!/usr/bin/python2.7' --force
+test_fix_shebang '#!/usr/bin/jython2.7' jython2.7 '#!/usr/bin/jython2.7'
+test_fix_shebang '#!/usr/bin/jython2.7' jython3.2 FAIL
+test_fix_shebang '#!/usr/bin/jython2.7' jython3.2 '#!/usr/bin/jython3.2' --force
+
+# fancy path handling
+test_fix_shebang '#!/mnt/python2/usr/bin/python' python3.4 \
+ '#!/mnt/python2/usr/bin/python3.4'
+test_fix_shebang '#!/mnt/python2/usr/bin/python2' python2.7 \
+ '#!/mnt/python2/usr/bin/python2.7'
+test_fix_shebang '#!/mnt/python2/usr/bin/env python' python2.7 \
+ '#!/mnt/python2/usr/bin/env python2.7'
+test_fix_shebang '#!/mnt/python2/usr/bin/python2 python2' python2.7 \
+ '#!/mnt/python2/usr/bin/python2.7 python2'
+test_fix_shebang '#!/mnt/python2/usr/bin/python3 python2' python2.7 FAIL
+test_fix_shebang '#!/mnt/python2/usr/bin/python3 python2' python2.7 \
+ '#!/mnt/python2/usr/bin/python2.7 python2' --force
+test_fix_shebang '#!/usr/bin/foo' python2.7 FAIL
+
+# regression test for bug #522080
+test_fix_shebang '#!/usr/bin/python ' python2.7 '#!/usr/bin/python2.7 '
+
+# make sure we don't break pattern matching
+test_is "_python_impl_supported python2_5" 1
+test_is "_python_impl_supported python2_6" 1
+test_is "_python_impl_supported python2_7" 0
+test_is "_python_impl_supported python3_1" 1
+test_is "_python_impl_supported python3_2" 1
+test_is "_python_impl_supported python3_3" 0
+test_is "_python_impl_supported python3_4" 0
+test_is "_python_impl_supported pypy1_8" 1
+test_is "_python_impl_supported pypy1_9" 1
+test_is "_python_impl_supported pypy2_0" 1
+test_is "_python_impl_supported pypy" 0
+test_is "_python_impl_supported pypy3" 0
+test_is "_python_impl_supported jython2_5" 0
+test_is "_python_impl_supported jython2_7" 0
+
+rm "${tmpfile}"
+
+texit
diff --git a/eclass/tests/savedconfig.sh b/eclass/tests/savedconfig.sh
new file mode 100755
index 000000000000..c4a3ce824fdc
--- /dev/null
+++ b/eclass/tests/savedconfig.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit savedconfig
+
+quiet() {
+ local out ret
+ out=$("$@" 2>&1)
+ ret=$?
+ [[ ${ret} -eq 0 ]] || echo "${out}"
+ return ${ret}
+}
+sc() { EBUILD_PHASE=install quiet save_config "$@" ; }
+rc() { EBUILD_PHASE=prepare quiet restore_config "$@" ; }
+
+cleanup() { rm -rf "${ED}"/* "${T}"/* "${WORKDIR}"/* ; }
+test-it() {
+ local ret=0
+ tbegin "$@"
+ mkdir -p "${ED}"/etc/portage/savedconfig
+ : $(( ret |= $? ))
+ pushd "${WORKDIR}" >/dev/null
+ : $(( ret |= $? ))
+ test
+ : $(( ret |= $? ))
+ popd >/dev/null
+ : $(( ret |= $? ))
+ tend ${ret}
+ cleanup
+}
+
+test() {
+ touch f || return 1
+ sc f || return 1
+ [[ -f ${ED}/etc/portage/savedconfig/${CATEGORY}/${PF} ]]
+}
+test-it "simple save_config"
+
+test() {
+ touch a b c || return 1
+ sc a b c || return 1
+ [[ -d ${ED}/etc/portage/savedconfig/${CATEGORY}/${PF} ]]
+}
+test-it "multi save_config"
+
+test() {
+ mkdir dir || return 1
+ touch dir/{a,b,c} || return 1
+ sc dir || return 1
+ [[ -d ${ED}/etc/portage/savedconfig/${CATEGORY}/${PF} ]]
+}
+test-it "dir save_config"
+
+PORTAGE_CONFIGROOT=${D}
+
+test() {
+ echo "ggg" > f || return 1
+ rc f || return 1
+ [[ $(<f) == "ggg" ]]
+}
+test-it "simple restore_config"
+
+test() {
+ echo "ggg" > f || return 1
+ rc f || return 1
+ [[ $(<f) == "ggg" ]] || return 1
+ sc f || return 1
+
+ echo "hhh" > f || return 1
+ rc f || return 1
+ [[ $(<f) == "ggg" ]]
+}
+test-it "simple restore+save config"
+
+texit
diff --git a/eclass/tests/scons-utils.sh b/eclass/tests/scons-utils.sh
new file mode 100755
index 000000000000..5a65fbe63ffc
--- /dev/null
+++ b/eclass/tests/scons-utils.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit scons-utils
+
+test-scons_clean_makeopts() {
+ local sconsopts=$(scons_clean_makeopts ${1})
+
+ if [[ ${sconsopts} != ${2-${1}} ]]; then
+ eerror "Self-test failed:"
+ eindent
+ eerror "MAKEOPTS: ${1}"
+ eerror "Expected: ${2-${1}}"
+ eerror "Actual: ${sconsopts}"
+ eoutdent
+ (( ++failed ))
+ return 1
+ fi
+
+ return 0
+}
+
+# jobcount expected for non-specified state
+jc=5
+# failed test counter
+failed=0
+
+tbegin "scons_clean_makeopts()"
+
+# sane MAKEOPTS
+test-scons_clean_makeopts '--jobs=14 -k'
+test-scons_clean_makeopts '--jobs=14 -k'
+test-scons_clean_makeopts '--jobs 15 -k'
+test-scons_clean_makeopts '--jobs=16 --keep-going'
+test-scons_clean_makeopts '-j17 --keep-going'
+test-scons_clean_makeopts '-j 18 --keep-going'
+
+# needing cleaning
+test-scons_clean_makeopts '--jobs -k' "--jobs=${jc} -k"
+test-scons_clean_makeopts '--jobs --keep-going' "--jobs=${jc} --keep-going"
+test-scons_clean_makeopts '-kj' "-kj ${jc}"
+
+# broken by definition (but passed as it breaks make as well)
+test-scons_clean_makeopts '-jk'
+test-scons_clean_makeopts '--jobs=randum'
+test-scons_clean_makeopts '-kjrandum'
+
+# needing stripping
+test-scons_clean_makeopts '--load-average=25 -kj16' '-kj16'
+test-scons_clean_makeopts '--load-average 25 -k -j17' '-k -j17'
+test-scons_clean_makeopts '-j2 HOME=/tmp' '-j2'
+test-scons_clean_makeopts '--jobs funnystuff -k' "--jobs=${jc} -k"
+
+# bug #388961
+test-scons_clean_makeopts '--jobs -l3' "--jobs=${jc}"
+test-scons_clean_makeopts '-j -l3' "-j ${jc}"
+
+tend ${failed}
+
+texit
diff --git a/eclass/tests/tests-common.sh b/eclass/tests/tests-common.sh
new file mode 100644
index 000000000000..d3095e5fc350
--- /dev/null
+++ b/eclass/tests/tests-common.sh
@@ -0,0 +1,147 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+if ! source /lib/gentoo/functions.sh ; then
+ echo "Missing functions.sh. Please install sys-apps/gentoo-functions!" 1>&2
+ exit 1
+fi
+
+# Let overlays override this so they can add their own testsuites.
+TESTS_ECLASS_SEARCH_PATHS=( .. )
+
+inherit() {
+ local e path
+ for e in "$@" ; do
+ for path in "${TESTS_ECLASS_SEARCH_PATHS[@]}" ; do
+ local eclass=${path}/${e}.eclass
+ if [[ -e "${eclass}" ]] ; then
+ source "${eclass}"
+ return 0
+ fi
+ done
+ done
+ die "could not find ${eclass}"
+}
+EXPORT_FUNCTIONS() { :; }
+
+debug-print() {
+ [[ ${#} -eq 0 ]] && return
+
+ if [[ ${ECLASS_DEBUG_OUTPUT} == on ]]; then
+ printf 'debug: %s\n' "${@}" >&2
+ elif [[ -n ${ECLASS_DEBUG_OUTPUT} ]]; then
+ printf 'debug: %s\n' "${@}" >> "${ECLASS_DEBUG_OUTPUT}"
+ fi
+}
+
+debug-print-function() {
+ debug-print "${1}, parameters: ${*:2}"
+}
+
+debug-print-section() {
+ debug-print "now in section ${*}"
+}
+
+has() {
+ local needle=$1
+ shift
+
+ local x
+ for x in "$@"; do
+ [ "${x}" = "${needle}" ] && return 0
+ done
+ return 1
+}
+use() { has "$1" ${IUSE} ; }
+
+die() {
+ echo "die: $*" 1>&2
+ exit 1
+}
+
+has_version() {
+ portageq has_version / "$@"
+}
+
+KV_major() {
+ [[ -z $1 ]] && return 1
+
+ local KV=$@
+ echo "${KV%%.*}"
+}
+
+KV_minor() {
+ [[ -z $1 ]] && return 1
+
+ local KV=$@
+ KV=${KV#*.}
+ echo "${KV%%.*}"
+}
+
+KV_micro() {
+ [[ -z $1 ]] && return 1
+
+ local KV=$@
+ KV=${KV#*.*.}
+ echo "${KV%%[^[:digit:]]*}"
+}
+
+KV_to_int() {
+ [[ -z $1 ]] && return 1
+
+ local KV_MAJOR=$(KV_major "$1")
+ local KV_MINOR=$(KV_minor "$1")
+ local KV_MICRO=$(KV_micro "$1")
+ local KV_int=$(( KV_MAJOR * 65536 + KV_MINOR * 256 + KV_MICRO ))
+
+ # We make version 2.2.0 the minimum version we will handle as
+ # a sanity check ... if its less, we fail ...
+ if [[ ${KV_int} -ge 131584 ]] ; then
+ echo "${KV_int}"
+ return 0
+ fi
+
+ return 1
+}
+
+tret=0
+tbegin() {
+ ebegin "Testing $*"
+}
+texit() {
+ rm -rf "${tmpdir}"
+ exit ${tret}
+}
+tend() {
+ t eend "$@"
+}
+t() {
+ "$@"
+ local ret=$?
+ : $(( tret |= ${ret} ))
+ return ${ret}
+}
+
+tmpdir="${PWD}/tmp"
+pkg_root="${tmpdir}/$0/${RANDOM}"
+T="${pkg_root}/temp"
+D="${pkg_root}/image"
+WORKDIR="${pkg_root}/work"
+ED=${D}
+mkdir -p "${D}" "${T}" "${WORKDIR}"
+
+dodir() {
+ mkdir -p "${@/#/${ED}/}"
+}
+
+elog() { einfo "$@" ; }
+
+IUSE=""
+CATEGORY="dev-eclass"
+PN="tests"
+PV="0"
+P="${PN}-${PV}"
+PF=${P}
+SLOT=0
diff --git a/eclass/tests/toolchain-funcs.sh b/eclass/tests/toolchain-funcs.sh
new file mode 100755
index 000000000000..41c1ae59306b
--- /dev/null
+++ b/eclass/tests/toolchain-funcs.sh
@@ -0,0 +1,115 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit toolchain-funcs
+
+#
+# TEST: tc-arch-kernel
+#
+test-tc-arch-kernel() {
+ local ret=0
+ KV=$1 ; shift
+ for CHOST in "$@" ; do
+ exp=${CHOST##*:}
+ CHOST=${CHOST%%:*}
+ actual=$(tc-arch-kernel)
+
+ if [[ ${actual} != ${exp:-${CHOST}} ]] ; then
+ eerror "Failure for CHOST: ${CHOST} Expected: ${exp} != Actual: ${actual}"
+ ((++ret))
+ fi
+ done
+ return ${ret}
+}
+tbegin "tc-arch-kernel() (KV=2.6.0)"
+test-tc-arch-kernel 2.6.0 \
+ alpha arm{,eb}:arm avr32 bfin:blackfin cris hppa:parisc \
+ i{3..6}86:i386 ia64 m68k mips{,eb}:mips nios2 powerpc:ppc powerpc64:ppc64 \
+ s390{,x}:s390 sh{1..4}{,eb}:sh sparc{,64} vax x86_64 \
+ i{3..6}86-gentoo-freebsd:i386
+tend $?
+tbegin "tc-arch-kernel() (KV=2.6.30)"
+test-tc-arch-kernel 2.6.30 \
+ i{3..6}86:x86 x86_64:x86 \
+ powerpc{,64}:powerpc i{3..6}86-gentoo-freebsd:i386
+tend $?
+
+#
+# TEST: tc-arch
+#
+tbegin "tc-arch"
+ret=0
+for CHOST in \
+ alpha arm{,eb}:arm avr32:avr bfin cris hppa i{3..6}86:x86 ia64 m68k \
+ mips{,eb}:mips nios2 powerpc:ppc powerpc64:ppc64 s390{,x}:s390 \
+ sh{1..4}{,eb}:sh sparc{,64}:sparc vax x86_64:amd64
+do
+ exp=${CHOST##*:}
+ CHOST=${CHOST%%:*}
+ actual=$(tc-arch)
+
+ if [[ ${actual} != ${exp:-${CHOST}} ]] ; then
+ eerror "Failure for CHOST: ${CHOST} Expected: ${exp} != Actual: ${actual}"
+ : $((++ret))
+ fi
+done
+tend ${ret}
+
+#
+# TEST: tc-ld-is-gold
+#
+tbegin "tc-ld-is-gold (bfd selected)"
+LD=ld.bfd tc-ld-is-gold && ret=1 || ret=0
+tend ${ret}
+
+tbegin "tc-ld-is-gold (gold selected)"
+LD=ld.gold tc-ld-is-gold
+ret=$?
+tend ${ret}
+
+tbegin "tc-ld-is-gold (bfd selected via flags)"
+LD=ld.gold LDFLAGS=-fuse-ld=bfd tc-ld-is-gold
+ret=$?
+tend ${ret}
+
+tbegin "tc-ld-is-gold (gold selected via flags)"
+LD=ld.bfd LDFLAGS=-fuse-ld=gold tc-ld-is-gold
+ret=$?
+tend ${ret}
+
+#
+# TEST: tc-ld-disable-gold
+#
+tbegin "tc-ld-disable-gold (bfd selected)"
+(
+export LD=ld.bfd LDFLAGS=
+ewarn() { :; }
+tc-ld-disable-gold
+[[ ${LD} == "ld.bfd" && -z ${LDFLAGS} ]]
+)
+tend $?
+
+tbegin "tc-ld-disable-gold (gold selected)"
+(
+export LD=ld.gold LDFLAGS=
+ewarn() { :; }
+tc-ld-disable-gold
+[[ ${LD} == "ld.bfd" || ${LDFLAGS} == *"-fuse-ld=bfd"* ]]
+)
+tend $?
+
+tbegin "tc-ld-disable-gold (gold selected via flags)"
+(
+export LD= LDFLAGS="-fuse-ld=gold"
+ewarn() { :; }
+tc-ld-disable-gold
+[[ ${LD} == *"/ld.bfd" || ${LDFLAGS} == "-fuse-ld=gold -fuse-ld=bfd" ]]
+)
+tend $?
+
+
+texit
diff --git a/eclass/tests/toolchain.sh b/eclass/tests/toolchain.sh
new file mode 100755
index 000000000000..7dc5e7330c3c
--- /dev/null
+++ b/eclass/tests/toolchain.sh
@@ -0,0 +1,83 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit toolchain
+
+test_downgrade_arch_flags() {
+ local exp msg ret=0 ver
+
+ ver=${1}
+ exp=${2}
+ shift 2
+ CFLAGS=${@}
+
+ tbegin "${ver} ${CFLAGS} => ${exp}"
+
+ CHOST=x86_64 # needed for tc-arch
+ downgrade_arch_flags ${ver}
+
+ if [[ ${CFLAGS} != ${exp} ]]; then
+ msg="Failure - Expected: \"${exp}\" Got: \"${CFLAGS}\""
+ ret=1
+ fi
+ tend ${ret} ${msg}
+}
+
+# ver expected given
+test_downgrade_arch_flags 4.9 "-march=haswell" "-march=haswell"
+test_downgrade_arch_flags 4.8 "-march=core-avx2" "-march=haswell"
+test_downgrade_arch_flags 4.7 "-march=core-avx2" "-march=haswell"
+test_downgrade_arch_flags 4.6 "-march=core-avx-i" "-march=haswell"
+test_downgrade_arch_flags 4.5 "-march=core2" "-march=haswell"
+test_downgrade_arch_flags 4.4 "-march=core2" "-march=haswell"
+test_downgrade_arch_flags 4.3 "-march=core2" "-march=haswell"
+test_downgrade_arch_flags 4.2 "-march=nocona" "-march=haswell"
+test_downgrade_arch_flags 4.1 "-march=nocona" "-march=haswell"
+test_downgrade_arch_flags 4.0 "-march=nocona" "-march=haswell"
+test_downgrade_arch_flags 3.4 "-march=nocona" "-march=haswell"
+test_downgrade_arch_flags 3.3 "-march=nocona" "-march=haswell"
+
+test_downgrade_arch_flags 4.9 "-march=bdver4" "-march=bdver4"
+test_downgrade_arch_flags 4.8 "-march=bdver3" "-march=bdver4"
+test_downgrade_arch_flags 4.7 "-march=bdver2" "-march=bdver4"
+test_downgrade_arch_flags 4.6 "-march=bdver1" "-march=bdver4"
+test_downgrade_arch_flags 4.5 "-march=amdfam10" "-march=bdver4"
+test_downgrade_arch_flags 4.4 "-march=amdfam10" "-march=bdver4"
+test_downgrade_arch_flags 4.3 "-march=amdfam10" "-march=bdver4"
+test_downgrade_arch_flags 4.2 "-march=k8" "-march=bdver4"
+test_downgrade_arch_flags 4.1 "-march=k8" "-march=bdver4"
+test_downgrade_arch_flags 4.0 "-march=k8" "-march=bdver4"
+test_downgrade_arch_flags 3.4 "-march=k8" "-march=bdver4"
+test_downgrade_arch_flags 3.3 "-march=x86-64" "-march=bdver4"
+
+test_downgrade_arch_flags 3.4 "-march=c3-2" "-march=c3-2"
+test_downgrade_arch_flags 3.3 "-march=c3" "-march=c3-2"
+
+test_downgrade_arch_flags 4.5 "-march=garbage" "-march=garbage"
+
+test_downgrade_arch_flags 4.9 "-mtune=intel" "-mtune=intel"
+test_downgrade_arch_flags 4.8 "-mtune=generic" "-mtune=intel"
+test_downgrade_arch_flags 3.4 "" "-mtune=generic"
+test_downgrade_arch_flags 3.4 "" "-mtune=x86-64"
+test_downgrade_arch_flags 3.3 "" "-mtune=anything"
+
+test_downgrade_arch_flags 4.5 "-march=amdfam10 -mtune=generic" "-march=btver2 -mtune=generic"
+test_downgrade_arch_flags 3.3 "-march=k6-2" "-march=geode -mtune=barcelona"
+test_downgrade_arch_flags 3.4 "-march=k8" "-march=btver2 -mtune=generic"
+
+test_downgrade_arch_flags 4.2 "-march=native" "-march=native"
+test_downgrade_arch_flags 4.1 "-march=nocona" "-march=native"
+
+test_downgrade_arch_flags 4.9 "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1" "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1"
+test_downgrade_arch_flags 4.8 "-march=foo -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1" "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1"
+test_downgrade_arch_flags 4.7 "-march=foo -mno-avx2 -mno-avx -mno-sse4.1" "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1"
+test_downgrade_arch_flags 4.6 "-march=foo -mno-avx -mno-sse4.1" "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1"
+test_downgrade_arch_flags 4.3 "-march=foo -mno-sse4.1" "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1"
+test_downgrade_arch_flags 4.2 "-march=foo" "-march=foo -mno-sha -mno-rtm -mno-avx2 -mno-avx -mno-sse4.1"
+
+test_downgrade_arch_flags 4.4 "-O2 -march=core2 -ffoo -fblah" "-O2 -march=atom -mno-sha -ffoo -mno-rtm -fblah"
+texit
diff --git a/eclass/tests/versionator_version_compare.sh b/eclass/tests/versionator_version_compare.sh
new file mode 100755
index 000000000000..633ba26dc2e6
--- /dev/null
+++ b/eclass/tests/versionator_version_compare.sh
@@ -0,0 +1,200 @@
+#!/bin/bash
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+source tests-common.sh
+
+inherit versionator
+
+eshopts_push -s extglob
+ver=( "" "lt" "eq" "gt" )
+lt=1 eq=2 gt=3
+
+test_version_compare() {
+ tbegin "version_compare ${1} -${ver[${2}]} ${3}"
+ version_compare "${1}" "${3}"
+ local r=$?
+ [[ ${r} -eq ${2} ]]
+ tend $? "FAIL: ${@} (got ${r} exp ${2})"
+}
+
+echo "
+ 0 $lt 1
+ 1 $lt 2
+ 2 $gt 1
+ 2 $eq 2
+ 0 $eq 0
+ 10 $lt 20
+ 68 $eq 068
+ 068 $gt 67
+ 068 $lt 69
+
+ 1.0 $lt 2.0
+ 2.0 $eq 2.0
+ 2.0 $gt 1.0
+
+ 1.0 $gt 0.0
+ 0.0 $eq 0.0
+ 0.0 $lt 1.0
+
+ 0.1 $lt 0.2
+ 0.2 $eq 0.2
+ 0.3 $gt 0.2
+
+ 1.2 $lt 2.1
+ 2.1 $gt 1.2
+
+ 1.2.3 $lt 1.2.4
+ 1.2.4 $gt 1.2.3
+
+ 1.2.0 $gt 1.2
+ 1.2.1 $gt 1.2
+ 1.2 $lt 1.2.1
+
+ 1.2b $eq 1.2b
+ 1.2b $lt 1.2c
+ 1.2b $gt 1.2a
+ 1.2b $gt 1.2
+ 1.2 $lt 1.2a
+
+ 1.3 $gt 1.2a
+ 1.3 $lt 1.3a
+
+ 1.0_alpha7 $lt 1.0_beta7
+ 1.0_beta $lt 1.0_pre
+ 1.0_pre5 $lt 1.0_rc2
+ 1.0_rc2 $lt 1.0
+
+ 1.0_p1 $gt 1.0
+ 1.0_p1-r1 $gt 1.0_p1
+
+ 1.0_alpha6-r1 $gt 1.0_alpha6
+ 1.0_beta6-r1 $gt 1.0_alpha6-r2
+
+ 1.0_pre1 $lt 1.0_p1
+
+ 1.0p $gt 1.0_p1
+ 1.0r $gt 1.0-r1
+ 1.6.15 $gt 1.6.10-r2
+ 1.6.10-r2 $lt 1.6.15
+
+" | while read a b c ; do
+ [[ -z "${a}${b}${c}" ]] && continue
+ test_version_compare "${a}" "${b}" "${c}"
+done
+
+
+for q in "alpha beta pre rc=${lt};${gt}" "p=${gt};${lt}" ; do
+ for p in ${q%%=*} ; do
+ c=${q##*=}
+ alt=${c%%;*} agt=${c##*;}
+ test_version_compare "1.0" $agt "1.0_${p}"
+ test_version_compare "1.0" $agt "1.0_${p}1"
+ test_version_compare "1.0" $agt "1.0_${p}068"
+
+ test_version_compare "2.0_${p}" $alt "2.0"
+ test_version_compare "2.0_${p}1" $alt "2.0"
+ test_version_compare "2.0_${p}068" $alt "2.0"
+
+ test_version_compare "1.0_${p}" $eq "1.0_${p}"
+ test_version_compare "0.0_${p}" $lt "0.0_${p}1"
+ test_version_compare "666_${p}3" $gt "666_${p}"
+
+ test_version_compare "1_${p}7" $lt "1_${p}8"
+ test_version_compare "1_${p}7" $eq "1_${p}7"
+ test_version_compare "1_${p}7" $gt "1_${p}6"
+ test_version_compare "1_${p}09" $eq "1_${p}9"
+
+ test_version_compare "1_${p}7-r0" $eq "1_${p}7"
+ test_version_compare "1_${p}7-r0" $lt "1_${p}7-r1"
+ test_version_compare "1_${p}7-r0" $lt "1_${p}7-r01"
+ test_version_compare "1_${p}7-r01" $eq "1_${p}7-r1"
+ test_version_compare "1_${p}8-r1" $gt "1_${p}7-r100"
+
+ test_version_compare "1_${p}_alpha" $lt "1_${p}_beta"
+ done
+done
+
+for p in "-r" "_p" ; do
+ test_version_compare "7.2${p}1" $lt "7.2${p}2"
+ test_version_compare "7.2${p}2" $gt "7.2${p}1"
+ test_version_compare "7.2${p}3" $gt "7.2${p}2"
+ test_version_compare "7.2${p}2" $lt "7.2${p}3"
+done
+
+# The following tests all come from portage's test cases:
+test_version_compare "6.0" $gt "5.0"
+test_version_compare "5.0" $gt "5"
+test_version_compare "1.0-r1" $gt "1.0-r0"
+test_version_compare "1.0-r1" $gt "1.0"
+test_version_compare "999999999999999999999999999999" $gt "999999999999999999999999999998"
+test_version_compare "1.0.0" $gt "1.0"
+test_version_compare "1.0.0" $gt "1.0b"
+test_version_compare "1b" $gt "1"
+test_version_compare "1b_p1" $gt "1_p1"
+test_version_compare "1.1b" $gt "1.1"
+test_version_compare "12.2.5" $gt "12.2b"
+
+test_version_compare "4.0" $lt "5.0"
+test_version_compare "5" $lt "5.0"
+test_version_compare "1.0_pre2" $lt "1.0_p2"
+test_version_compare "1.0_alpha2" $lt "1.0_p2"
+test_version_compare "1.0_alpha1" $lt "1.0_beta1"
+test_version_compare "1.0_beta3" $lt "1.0_rc3"
+test_version_compare "1.001000000000000000001" $lt "1.001000000000000000002"
+test_version_compare "1.00100000000" $lt "1.0010000000000000001"
+test_version_compare "999999999999999999999999999998" $lt "999999999999999999999999999999"
+test_version_compare "1.01" $lt "1.1"
+test_version_compare "1.0-r0" $lt "1.0-r1"
+test_version_compare "1.0" $lt "1.0-r1"
+test_version_compare "1.0" $lt "1.0.0"
+test_version_compare "1.0b" $lt "1.0.0"
+test_version_compare "1_p1" $lt "1b_p1"
+test_version_compare "1" $lt "1b"
+test_version_compare "1.1" $lt "1.1b"
+test_version_compare "12.2b" $lt "12.2.5"
+
+test_version_compare "4.0" $eq "4.0"
+test_version_compare "1.0" $eq "1.0"
+test_version_compare "1.0-r0" $eq "1.0"
+test_version_compare "1.0" $eq "1.0-r0"
+test_version_compare "1.0-r0" $eq "1.0-r0"
+test_version_compare "1.0-r1" $eq "1.0-r1"
+
+# The following were just tests for != in portage, we need something a bit
+# more precise
+test_version_compare "1" $lt "2"
+test_version_compare "1.0_alpha" $lt "1.0_pre"
+test_version_compare "1.0_beta" $gt "1.0_alpha"
+test_version_compare "0" $lt "0.0"
+test_version_compare "1.0-r0" $lt "1.0-r1"
+test_version_compare "1.0-r1" $gt "1.0-r0"
+test_version_compare "1.0" $lt "1.0-r1"
+test_version_compare "1.0-r1" $gt "1.0"
+test_version_compare "1_p1" $lt "1b_p1"
+test_version_compare "1b" $gt "1"
+test_version_compare "1.1b" $gt "1.1"
+test_version_compare "12.2b" $gt "12.2"
+
+# The following tests all come from paludis's test cases:
+test_version_compare "1.0" $gt "1"
+test_version_compare "1" $lt "1.0"
+test_version_compare "1.0_alpha" $gt "1_alpha"
+test_version_compare "1.0_alpha" $gt "1"
+test_version_compare "1.0_alpha" $lt "1.0"
+test_version_compare "1.2.0.0_alpha7-r4" $gt "1.2_alpha7-r4"
+
+test_version_compare "0001" $eq "1"
+test_version_compare "01" $eq "001"
+test_version_compare "0001.1" $eq "1.1"
+test_version_compare "01.01" $eq "1.01"
+test_version_compare "1.010" $eq "1.01"
+test_version_compare "1.00" $eq "1.0"
+test_version_compare "1.0100" $eq "1.010"
+test_version_compare "1" $eq "1-r0"
+test_version_compare "1-r00" $eq "1-r0"
+
+eshopts_pop
+
+texit
diff --git a/eclass/texlive-common.eclass b/eclass/texlive-common.eclass
new file mode 100644
index 000000000000..e4798b3acd95
--- /dev/null
+++ b/eclass/texlive-common.eclass
@@ -0,0 +1,170 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: texlive-common.eclass
+# @MAINTAINER:
+# tex@gentoo.org
+# @AUTHOR:
+# Original Author: Alexis Ballier <aballier@gentoo.org>
+# @BLURB: Provide various functions used by both texlive-core and texlive modules
+# @DESCRIPTION:
+# Purpose: Provide various functions used by both texlive-core and texlive
+# modules.
+#
+# Note that this eclass *must* not assume the presence of any standard tex tool
+
+case "${EAPI:-0}" in
+ 0|1|2)
+ die "EAPI='${EAPI}' is not supported anymore"
+ ;;
+ *)
+ ;;
+esac
+
+TEXMF_PATH=/usr/share/texmf
+TEXMF_DIST_PATH=/usr/share/texmf-dist
+TEXMF_VAR_PATH=/var/lib/texmf
+
+# @FUNCTION: texlive-common_handle_config_files
+# @DESCRIPTION:
+# Has to be called in src_install after having installed the files in ${D}
+# This function will move the relevant files to /etc/texmf and symling them
+# from their original location. This is to allow easy update of texlive's
+# configuration
+
+texlive-common_handle_config_files() {
+ # Handle config files properly
+ [ -d "${ED}${TEXMF_PATH}" ] || return
+ cd "${ED}${TEXMF_PATH}"
+ for f in $(find . -name '*.cnf' -type f -o -name '*.cfg' -type f | sed -e "s:\./::g") ; do
+ if [ "${f#*config}" != "${f}" -o "${f#doc}" != "${f}" -o "${f#source}" != "${f}" -o "${f#tex}" != "${f}" ] ; then
+ continue
+ fi
+ dodir /etc/texmf/$(dirname ${f}).d
+ einfo "Moving (and symlinking) ${EPREFIX}${TEXMF_PATH}/${f} to ${EPREFIX}/etc/texmf/$(dirname ${f}).d"
+ mv "${ED}/${TEXMF_PATH}/${f}" "${ED}/etc/texmf/$(dirname ${f}).d" || die "mv ${f} failed."
+ dosym /etc/texmf/$(dirname ${f}).d/$(basename ${f}) ${TEXMF_PATH}/${f}
+ done
+}
+
+# @FUNCTION: texlive-common_is_file_present_in_texmf
+# @DESCRIPTION:
+# Return if a file is present in the texmf tree
+# Call it from the directory containing texmf and texmf-dist
+
+texlive-common_is_file_present_in_texmf() {
+ local mark="${T}/$1.found"
+ [ -d texmf ] && find texmf -name $1 -exec touch "${mark}" \;
+ [ -d texmf-dist ] && find texmf-dist -name $1 -exec touch "${mark}" \;
+ [ -f "${mark}" ]
+}
+
+# @FUNCTION: texlive-common_do_symlinks
+# @USAGE: < src > < dest >
+# @DESCRIPTION:
+# Mimic the install_link function of texlinks
+#
+# Should have the same behavior as the one in /usr/bin/texlinks
+# except that it is under the control of the package manager
+# Note that $1 corresponds to $src and $2 to $dest in this function
+# ( Arguments are switched because texlinks main function sends them switched )
+# This function should not be called from an ebuild, prefer etexlinks that will
+# also do the fmtutil file parsing.
+
+texlive-common_do_symlinks() {
+ while [ $# != 0 ]; do
+ case $1 in
+ cont-??|metafun|mptopdf)
+ einfo "Symlink $1 skipped (special case)"
+ ;;
+ mf)
+ einfo "Symlink $1 -> $2 skipped (texlive-core takes care of it)"
+ ;;
+ *)
+ if [ $1 = $2 ];
+ then
+ einfo "Symlink $1 -> $2 skipped"
+ elif [ -e "${ED}/usr/bin/$1" ];
+ then
+ einfo "Symlink $1 skipped (file exists)"
+ else
+ einfo "Making symlink from $1 to $2"
+ dosym $2 /usr/bin/$1
+ fi
+ ;;
+ esac
+ shift; shift;
+ done
+}
+
+# @FUNCTION: etexlinks
+# @USAGE: < file >
+# @DESCRIPTION:
+# Mimic texlinks on a fmtutil format file
+#
+# $1 has to be a fmtutil format file like fmtutil.cnf
+# etexlinks foo will install the symlinks that texlinks --cnffile foo would have
+# created. We cannot use texlinks with portage as it is not DESTDIR aware.
+# (It would not fail but will not create the symlinks if the target is not in
+# the same dir as the source)
+# Also, as this eclass must not depend on a tex distribution to be installed we
+# cannot use texlinks from here.
+
+etexlinks() {
+ # Install symlinks from formats to engines
+ texlive-common_do_symlinks $(sed '/^[ ]*#/d; /^[ ]*$/d' "$1" | awk '{print $1, $2}')
+}
+
+# @FUNCTION: dobin_texmf_scripts
+# @USAGE: < file1 file2 ... >
+# @DESCRIPTION:
+# Symlinks a script from the texmf tree to /usr/bin. Requires permissions to be
+# correctly set for the file that it will point to.
+
+dobin_texmf_scripts() {
+ while [ $# -gt 0 ] ; do
+ local trg=$(basename ${1} | sed 's,\.[^/]*$,,' | tr '[:upper:]' '[:lower:]')
+ einfo "Installing ${1} as ${trg} bin wrapper"
+ [ -x "${ED}/usr/share/${1}" ] || die "Trying to install a non existing or non executable symlink to /usr/bin: ${1}"
+ dosym ../share/${1} /usr/bin/${trg} || die "failed to install ${1} as $trg"
+ shift
+ done
+}
+
+# @FUNCTION: etexmf-update
+# @USAGE: In ebuilds' pkg_postinst and pkg_postrm phases
+# @DESCRIPTION:
+# Runs texmf-update if it is available and prints a warning otherwise. This
+# function helps in factorizing some code.
+
+etexmf-update() {
+ if has_version 'app-text/texlive-core' ; then
+ if [ "$ROOT" = "/" ] && [ -x "${EPREFIX}"/usr/sbin/texmf-update ] ; then
+ "${EPREFIX}"/usr/sbin/texmf-update
+ else
+ ewarn "Cannot run texmf-update for some reason."
+ ewarn "Your texmf tree might be inconsistent with your configuration"
+ ewarn "Please try to figure what has happened"
+ fi
+ fi
+}
+
+# @FUNCTION: efmtutil-sys
+# @USAGE: In ebuilds' pkg_postinst to force a rebuild of TeX formats.
+# @DESCRIPTION:
+# Runs fmtutil-sys if it is available and prints a warning otherwise. This
+# function helps in factorizing some code.
+
+efmtutil-sys() {
+ if has_version 'app-text/texlive-core' ; then
+ if [ "$ROOT" = "/" ] && [ -x "${EPREFIX}"/usr/bin/fmtutil-sys ] ; then
+ einfo "Rebuilding formats"
+ "${EPREFIX}"/usr/bin/fmtutil-sys --all &> /dev/null
+ else
+ ewarn "Cannot run fmtutil-sys for some reason."
+ ewarn "Your formats might be inconsistent with your installed ${PN} version"
+ ewarn "Please try to figure what has happened"
+ fi
+ fi
+}
diff --git a/eclass/texlive-module.eclass b/eclass/texlive-module.eclass
new file mode 100644
index 000000000000..f78e00fefabd
--- /dev/null
+++ b/eclass/texlive-module.eclass
@@ -0,0 +1,401 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: texlive-module.eclass
+# @MAINTAINER:
+# tex@gentoo.org
+# @AUTHOR:
+# Original Author: Alexis Ballier <aballier@gentoo.org>
+# @BLURB: Provide generic install functions so that modular texlive's texmf ebuild will only have to inherit this eclass
+# @DESCRIPTION:
+# Purpose: Provide generic install functions so that modular texlive's texmf ebuilds will
+# only have to inherit this eclass.
+# Ebuilds have to provide TEXLIVE_MODULE_CONTENTS variable that contains the list
+# of packages that it will install. (See below)
+#
+# For TeX Live versions prior to 2009, the ebuild was supposed to unpack the
+# texmf and texmf-dist directories to ${WORKDIR} (which is what the default
+# src_unpack does).
+# Starting from TeX Live 2009, the eclass provides a src_unpack function taking
+# care of unpacking and relocating the files that need it.
+#
+# It inherits texlive-common. Patching is supported via the PATCHES
+# bash array.
+
+# @ECLASS-VARIABLE: TEXLIVE_MODULE_CONTENTS
+# @DESCRIPTION:
+# The list of packages that will be installed. This variable will be expanded to
+# SRC_URI:
+# foo -> texlive-module-foo-${PV}.tar.xz
+
+# @ECLASS-VARIABLE: TEXLIVE_MODULE_DOC_CONTENTS
+# @DESCRIPTION:
+# The list of packages that will be installed if the doc useflag is enabled.
+# Expansion to SRC_URI is the same as for TEXLIVE_MODULE_CONTENTS.
+
+# @ECLASS-VARIABLE: TEXLIVE_MODULE_SRC_CONTENTS
+# @DESCRIPTION:
+# The list of packages that will be installed if the source useflag is enabled.
+# Expansion to SRC_URI is the same as for TEXLIVE_MODULE_CONTENTS.
+
+# @ECLASS-VARIABLE: TEXLIVE_MODULE_BINSCRIPTS
+# @DESCRIPTION:
+# A space separated list of files that are in fact scripts installed in the
+# texmf tree and that we want to be available directly. They will be installed in
+# /usr/bin.
+
+# @ECLASS-VARIABLE: TEXLIVE_MODULE_BINLINKS
+# @DESCRIPTION:
+# A space separated list of links to add for BINSCRIPTS.
+# The systax is: foo:bar to create a symlink bar -> foo.
+
+# @ECLASS-VARIABLE: TL_PV
+# @DESCRIPTION:
+# Normally the module's PV reflects the TeXLive release it belongs to.
+# If this is not the case, TL_PV takes the version number for the
+# needed app-text/texlive-core.
+
+# @ECLASS-VARIABLE: TL_MODULE_INFORMATION
+# @DESCRIPTION:
+# Information to display about the package.
+# e.g. for enabling/disabling a feature
+
+# @ECLASS-VARIABLE: PATCHES
+# @DESCRIPTION:
+# Array variable specifying any patches to be applied.
+
+inherit texlive-common eutils
+
+case "${EAPI:-0}" in
+ 0|1|2)
+ die "EAPI='${EAPI}' is not supported anymore"
+ ;;
+ *)
+ ;;
+esac
+
+HOMEPAGE="http://www.tug.org/texlive/"
+
+COMMON_DEPEND=">=app-text/texlive-core-${TL_PV:-${PV}}"
+
+IUSE="source"
+
+# Starting from TeX Live 2009, upstream provides .tar.xz modules.
+PKGEXT=tar.xz
+DEPEND="${COMMON_DEPEND}
+ app-arch/xz-utils"
+
+for i in ${TEXLIVE_MODULE_CONTENTS}; do
+ SRC_URI="${SRC_URI} mirror://gentoo/texlive-module-${i}-${PV}.${PKGEXT}"
+done
+
+# Forge doc SRC_URI
+[ -n "${PN##*documentation*}" ] && [ -n "${TEXLIVE_MODULE_DOC_CONTENTS}" ] && SRC_URI="${SRC_URI} doc? ("
+for i in ${TEXLIVE_MODULE_DOC_CONTENTS}; do
+ SRC_URI="${SRC_URI} mirror://gentoo/texlive-module-${i}-${PV}.${PKGEXT}"
+done
+[ -n "${PN##*documentation*}" ] && [ -n "${TEXLIVE_MODULE_DOC_CONTENTS}" ] && SRC_URI="${SRC_URI} )"
+
+# Forge source SRC_URI
+if [ -n "${TEXLIVE_MODULE_SRC_CONTENTS}" ] ; then
+ SRC_URI="${SRC_URI} source? ("
+ for i in ${TEXLIVE_MODULE_SRC_CONTENTS}; do
+ SRC_URI="${SRC_URI} mirror://gentoo/texlive-module-${i}-${PV}.${PKGEXT}"
+ done
+ SRC_URI="${SRC_URI} )"
+fi
+
+RDEPEND="${COMMON_DEPEND}"
+
+[ -z "${PN##*documentation*}" ] || IUSE="${IUSE} doc"
+
+# @ECLASS-VARIABLE: TEXLIVE_MODULE_OPTIONAL_ENGINE
+# @DESCRIPTION:
+# A space separated list of Tex engines that can be made optional.
+# e.g. "luatex luajittex"
+
+if [ -n "${TEXLIVE_MODULE_OPTIONAL_ENGINE}" ] ; then
+ for engine in ${TEXLIVE_MODULE_OPTIONAL_ENGINE} ; do
+ IUSE="${IUSE} +${engine}"
+ done
+fi
+
+S="${WORKDIR}"
+
+# @FUNCTION: texlive-module_src_unpack
+# @DESCRIPTION:
+# Only for TeX Live 2009 and later.
+# After unpacking, the files that need to be relocated are moved accordingly.
+
+RELOC_TARGET=texmf-dist
+
+texlive-module_src_unpack() {
+ unpack ${A}
+
+ grep RELOC tlpkg/tlpobj/* | awk '{print $2}' | sed 's#^RELOC/##' > "${T}/reloclist"
+ { for i in $(<"${T}/reloclist"); do dirname $i; done; } | uniq > "${T}/dirlist"
+ for i in $(<"${T}/dirlist"); do
+ [ -d "${RELOC_TARGET}/${i}" ] || mkdir -p "${RELOC_TARGET}/${i}"
+ done
+ for i in $(<"${T}/reloclist"); do
+ mv "${i}" "${RELOC_TARGET}"/$(dirname "${i}") || die "failed to relocate ${i} to ${RELOC_TARGET}/$(dirname ${i})"
+ done
+}
+
+# @FUNCTION: texlive-module_src_prepare
+# @DESCRIPTION:
+# Apply patches from the PATCHES array and user patches, if any.
+
+texlive-module_src_prepare() {
+ [[ ${#PATCHES[@]} -gt 0 ]] && epatch "${PATCHES[@]}"
+ epatch_user
+}
+
+# @FUNCTION: texlive-module_add_format
+# @DESCRIPTION:
+# Creates/appends to a format.${PN}.cnf file for fmtutil.
+# It parses the AddFormat directive of tlpobj files to create it.
+# This will make fmtutil generate the formats when asked and allow the remaining
+# src_compile phase to build the formats.
+
+texlive-module_add_format() {
+ local name engine mode patterns options
+ eval $@
+ einfo "Appending to format.${PN}.cnf for $@"
+ [ -d texmf-dist/fmtutil ] || mkdir -p texmf-dist/fmtutil
+ [ -f texmf-dist/fmtutil/format.${PN}.cnf ] || { echo "# Generated for ${PN} by texlive-module.eclass" > texmf-dist/fmtutil/format.${PN}.cnf; }
+ [ -n "${TEXLIVE_MODULE_OPTIONAL_ENGINE}" ] && has ${engine} ${TEXLIVE_MODULE_OPTIONAL_ENGINE} && use !${engine} && mode="disabled"
+ if [ "${mode}" = "disabled" ]; then
+ printf "#! " >> texmf-dist/fmtutil/format.${PN}.cnf
+ fi
+ [ -z "${patterns}" ] && patterns="-"
+ printf "${name}\t${engine}\t${patterns}\t${options}\n" >> texmf-dist/fmtutil/format.${PN}.cnf
+}
+
+# @FUNCTION: texlive-module_make_language_def_lines
+# @DESCRIPTION:
+# Creates a language.${PN}.def entry to put in /etc/texmf/language.def.d.
+# It parses the AddHyphen directive of tlpobj files to create it.
+
+texlive-module_make_language_def_lines() {
+ local lefthyphenmin righthyphenmin synonyms name file file_patterns file_exceptions luaspecial
+ eval $@
+ einfo "Generating language.def entry for $@"
+ [ -z "$lefthyphenmin" ] && lefthyphenmin="2"
+ [ -z "$righthyphenmin" ] && righthyphenmin="3"
+ echo "\\addlanguage{$name}{$file}{}{$lefthyphenmin}{$righthyphenmin}" >> "${S}/language.${PN}.def"
+ if [ -n "$synonyms" ] ; then
+ for i in $(echo $synonyms | tr ',' ' ') ; do
+ einfo "Generating language.def synonym $i for $@"
+ echo "\\addlanguage{$i}{$file}{}{$lefthyphenmin}{$righthyphenmin}" >> "${S}/language.${PN}.def"
+ done
+ fi
+}
+
+# @FUNCTION: texlive-module_make_language_dat_lines
+# @DESCRIPTION:
+# Creates a language.${PN}.dat entry to put in /etc/texmf/language.dat.d.
+# It parses the AddHyphen directive of tlpobj files to create it.
+
+texlive-module_make_language_dat_lines() {
+ local lefthyphenmin righthyphenmin synonyms name file file_patterns file_exceptions luaspecial
+ eval $@
+ einfo "Generating language.dat entry for $@"
+ echo "$name $file" >> "${S}/language.${PN}.dat"
+ if [ -n "$synonyms" ] ; then
+ for i in $(echo $synonyms | tr ',' ' ') ; do
+ einfo "Generating language.dat synonym $i for $@"
+ echo "=$i" >> "${S}/language.${PN}.dat"
+ done
+ fi
+}
+
+# @FUNCTION: texlive-module_synonyms_to_language_lua_line
+# @DESCRIPTION:
+# Helper function for texlive-module_make_language_lua_lines to generate a
+# correctly formatted synonyms entry for language.dat.lua.
+
+texlive-module_synonyms_to_language_lua_line() {
+ local prev=""
+ for i in $(echo $@ | tr ',' ' ') ; do
+ printf "${prev} '%s'" $i
+ prev=","
+ done
+}
+
+# @FUNCTION: texlive-module_make_language_lua_lines
+# @DESCRIPTION:
+# Only valid for TeXLive 2010 and later.
+# Creates a language.${PN}.dat.lua entry to put in
+# /etc/texmf/language.dat.lua.d.
+# It parses the AddHyphen directive of tlpobj files to create it.
+
+texlive-module_make_language_lua_lines() {
+ local lefthyphenmin righthyphenmin synonyms name file file_patterns file_exceptions luaspecial
+ local dest="${S}/language.${PN}.dat.lua"
+ eval $@
+ [ -z "$lefthyphenmin" ] && lefthyphenmin="2"
+ [ -z "$righthyphenmin" ] && righthyphenmin="3"
+ einfo "Generating language.dat.lua entry for $@"
+ printf "\t['%s'] = {\n" "$name" >> "$dest"
+ printf "\t\tloader = '%s',\n" "$file" >> "$dest"
+ printf "\t\tlefthyphenmin = %s,\n\t\trighthyphenmin = %s,\n" "$lefthyphenmin" "$righthyphenmin" >> "$dest"
+ printf "\t\tsynonyms = {%s },\n" "$(texlive-module_synonyms_to_language_lua_line "$synonyms")" >> "$dest"
+ [ -n "$file_patterns" ] && printf "\t\tpatterns = '%s',\n" "$file_patterns" >> "$dest"
+ [ -n "$file_exceptions" ] && printf "\t\thyphenation = '%s',\n" "$file_exceptions" >> "$dest"
+ [ -n "$luaspecial" ] && printf "\t\tspecial = '%s',\n" "$luaspecial" >> "$dest"
+ printf "\t},\n" >> "$dest"
+}
+
+# @FUNCTION: texlive-module_src_compile
+# @DESCRIPTION:
+# exported function:
+# Generates the config files that are to be installed in /etc/texmf;
+# texmf-update script will take care of merging the different config files for
+# different packages in a single one used by the whole tex installation.
+#
+# Once the config files are generated, we build the format files using fmtutil
+# (provided by texlive-core). The compiled format files will be sent to
+# texmf-var/web2c, like fmtutil defaults to but with some trick to stay in the
+# sandbox.
+
+texlive-module_src_compile() {
+ # Generate config files from the tlpobj files provided by TeX Live 2008 and
+ # later
+ for i in "${S}"/tlpkg/tlpobj/*;
+ do
+ grep '^execute ' "${i}" | sed -e 's/^execute //' | tr ' \t' '##' |sort|uniq >> "${T}/jobs"
+ done
+
+ for i in $(<"${T}/jobs");
+ do
+ j="$(echo $i | tr '#' ' ')"
+ command=${j%% *}
+ parameter=${j#* }
+ case "${command}" in
+ addMap)
+ echo "Map ${parameter}" >> "${S}/${PN}.cfg";;
+ addMixedMap)
+ echo "MixedMap ${parameter}" >> "${S}/${PN}.cfg";;
+ addKanjiMap)
+ echo "KanjiMap ${parameter}" >> "${S}/${PN}.cfg";;
+ addDvipsMap)
+ echo "p +${parameter}" >> "${S}/${PN}-config.ps";;
+ addDvipdfmMap)
+ echo "f ${parameter}" >> "${S}/${PN}-config";;
+ AddHyphen)
+ texlive-module_make_language_def_lines "$parameter"
+ texlive-module_make_language_dat_lines "$parameter"
+ texlive-module_make_language_lua_lines "$parameter"
+ ;;
+ AddFormat)
+ texlive-module_add_format "$parameter";;
+ BuildFormat)
+ einfo "Format $parameter already built.";;
+ BuildLanguageDat)
+ einfo "Language file $parameter already generated.";;
+ *)
+ die "No rule to proccess ${command}. Please file a bug."
+ esac
+ done
+
+ # Build format files
+ for i in texmf-dist/fmtutil/format*.cnf; do
+ if [ -f "${i}" ]; then
+ einfo "Building format ${i}"
+ [ -d texmf-var ] || mkdir texmf-var
+ [ -d texmf-var/web2c ] || mkdir texmf-var/web2c
+ VARTEXFONTS="${T}/fonts" TEXMFHOME="${S}/texmf:${S}/texmf-dist:${S}/texmf-var"\
+ env -u TEXINPUTS fmtutil --cnffile "${i}" --fmtdir "${S}/texmf-var/web2c" --all\
+ || die "failed to build format ${i}"
+ fi
+ done
+
+ # Delete ls-R files, these should not be created but better be certain they
+ # do not end up being installed.
+ find . -name 'ls-R' -delete
+}
+
+# @FUNCTION: texlive-module_src_install
+# @DESCRIPTION:
+# exported function:
+# Installs texmf and config files to the system.
+
+texlive-module_src_install() {
+ for i in texmf-dist/fmtutil/format*.cnf; do
+ [ -f "${i}" ] && etexlinks "${i}"
+ done
+
+ dodir /usr/share
+ if [ -z "${PN##*documentation*}" ] || use doc; then
+ [ -d texmf-doc ] && cp -pR texmf-doc "${ED}/usr/share/"
+ else
+ [ -d texmf/doc ] && rm -rf texmf/doc
+ [ -d texmf-dist/doc ] && rm -rf texmf-dist/doc
+ fi
+
+ [ -d texmf ] && cp -pR texmf "${ED}/usr/share/"
+ [ -d texmf-dist ] && cp -pR texmf-dist "${ED}/usr/share/"
+ [ -d tlpkg ] && use source && cp -pR tlpkg "${ED}/usr/share/"
+
+ insinto /var/lib/texmf
+ [ -d texmf-var ] && doins -r texmf-var/*
+
+ insinto /etc/texmf/updmap.d
+ [ -f "${S}/${PN}.cfg" ] && doins "${S}/${PN}.cfg"
+ insinto /etc/texmf/dvips.d
+ [ -f "${S}/${PN}-config.ps" ] && doins "${S}/${PN}-config.ps"
+ insinto /etc/texmf/dvipdfm/config
+ [ -f "${S}/${PN}-config" ] && doins "${S}/${PN}-config"
+
+ if [ -f "${S}/language.${PN}.def" ] ; then
+ insinto /etc/texmf/language.def.d
+ doins "${S}/language.${PN}.def"
+ fi
+
+ if [ -f "${S}/language.${PN}.dat" ] ; then
+ insinto /etc/texmf/language.dat.d
+ doins "${S}/language.${PN}.dat"
+ fi
+
+ if [ -f "${S}/language.${PN}.dat.lua" ] ; then
+ insinto /etc/texmf/language.dat.lua.d
+ doins "${S}/language.${PN}.dat.lua"
+ fi
+
+ [ -n "${TEXLIVE_MODULE_BINSCRIPTS}" ] && dobin_texmf_scripts ${TEXLIVE_MODULE_BINSCRIPTS}
+ if [ -n "${TEXLIVE_MODULE_BINLINKS}" ] ; then
+ for i in ${TEXLIVE_MODULE_BINLINKS} ; do
+ [ -f "${ED}/usr/bin/${i%:*}" ] || die "Trying to install an invalid BINLINK. This should not happen. Please file a bug."
+ dosym ${i%:*} /usr/bin/${i#*:}
+ done
+ fi
+
+ texlive-common_handle_config_files
+ TEXMF_PATH=${TEXMF_DIST_PATH} texlive-common_handle_config_files
+}
+
+# @FUNCTION: texlive-module_pkg_postinst
+# @DESCRIPTION:
+# exported function:
+# Run texmf-update to ensure the tex installation is consistent with the
+# installed texmf trees.
+
+texlive-module_pkg_postinst() {
+ etexmf-update
+ [ -n "${TL_MODULE_INFORMATION}" ] && elog "${TL_MODULE_INFORMATION}"
+}
+
+# @FUNCTION: texlive-module_pkg_postrm
+# @DESCRIPTION:
+# exported function:
+# Run texmf-update to ensure the tex installation is consistent with the
+# installed texmf trees.
+
+texlive-module_pkg_postrm() {
+ etexmf-update
+}
+
+EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install \
+ pkg_postinst pkg_postrm
diff --git a/eclass/toolchain-binutils.eclass b/eclass/toolchain-binutils.eclass
new file mode 100644
index 000000000000..f13506085bc4
--- /dev/null
+++ b/eclass/toolchain-binutils.eclass
@@ -0,0 +1,498 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Maintainer: Toolchain Ninjas <toolchain@gentoo.org>
+#
+# We install binutils into CTARGET-VERSION specific directories. This lets
+# us easily merge multiple versions for multiple targets (if we wish) and
+# then switch the versions on the fly (with `binutils-config`).
+#
+# binutils-9999 -> live git
+# binutils-9999_preYYMMDD -> nightly snapshot date YYMMDD
+# binutils-# -> normal release
+
+if [[ -n ${BINUTILS_TYPE} ]] ; then
+ BTYPE=${BINUTILS_TYPE}
+else
+ case ${PV} in
+ 9999) BTYPE="git";;
+ 9999_pre*) BTYPE="snap";;
+ *.*.90) BTYPE="snap";;
+ *.*.*.*.*) BTYPE="hjlu";;
+ *) BTYPE="rel";;
+ esac
+fi
+
+case ${BTYPE} in
+git)
+ BVER="git"
+ EGIT_REPO_URI="git://sourceware.org/git/binutils-gdb.git"
+ inherit git-2
+ ;;
+snap)
+ BVER=${PV/9999_pre}
+ ;;
+*)
+ BVER=${BINUTILS_VER:-${PV}}
+ ;;
+esac
+
+inherit eutils libtool flag-o-matic gnuconfig multilib versionator unpacker
+case ${EAPI:-0} in
+0|1)
+ EXPORT_FUNCTIONS src_unpack src_compile src_test src_install pkg_postinst pkg_postrm ;;
+2|3|4|5)
+ EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_test src_install pkg_postinst pkg_postrm ;;
+*) die "unsupported EAPI ${EAPI}" ;;
+esac
+
+export CTARGET=${CTARGET:-${CHOST}}
+if [[ ${CTARGET} == ${CHOST} ]] ; then
+ if [[ ${CATEGORY} == cross-* ]] ; then
+ export CTARGET=${CATEGORY#cross-}
+ fi
+fi
+is_cross() { [[ ${CHOST} != ${CTARGET} ]] ; }
+
+DESCRIPTION="Tools necessary to build programs"
+HOMEPAGE="http://sourceware.org/binutils/"
+
+case ${BTYPE} in
+ git) SRC_URI="" ;;
+ snap)
+ SRC_URI="ftp://gcc.gnu.org/pub/binutils/snapshots/binutils-${BVER}.tar.bz2
+ ftp://sourceware.org/pub/binutils/snapshots/binutils-${BVER}.tar.bz2" ;;
+ hjlu)
+ SRC_URI="mirror://kernel/linux/devel/binutils/binutils-${BVER}.tar."
+ version_is_at_least 2.21.51.0.5 && SRC_URI+="xz" || SRC_URI+="bz2" ;;
+ rel) SRC_URI="mirror://gnu/binutils/binutils-${BVER}.tar.bz2" ;;
+esac
+add_src_uri() {
+ [[ -z $2 ]] && return
+ local a=$1
+ if version_is_at_least 2.22.52.0.2 ; then
+ a+=".xz"
+ else
+ a+=".bz2"
+ fi
+ set -- mirror://gentoo http://dev.gentoo.org/~vapier/dist
+ SRC_URI="${SRC_URI} ${@/%//${a}}"
+}
+add_src_uri binutils-${BVER}-patches-${PATCHVER}.tar ${PATCHVER}
+add_src_uri binutils-${BVER}-uclibc-patches-${UCLIBC_PATCHVER}.tar ${UCLIBC_PATCHVER}
+add_src_uri elf2flt-${ELF2FLT_VER}.tar ${ELF2FLT_VER}
+
+if version_is_at_least 2.18 ; then
+ LICENSE="|| ( GPL-3 LGPL-3 )"
+else
+ LICENSE="|| ( GPL-2 LGPL-2 )"
+fi
+IUSE="cxx multislot multitarget nls static-libs test vanilla"
+if version_is_at_least 2.19 ; then
+ IUSE+=" zlib"
+fi
+if ! version_is_at_least 2.23.90 || [[ ${BTYPE} != "rel" ]] || is_cross || use multislot ; then
+ SLOT="${BVER}"
+else
+ SLOT="0"
+fi
+
+RDEPEND=">=sys-devel/binutils-config-3"
+in_iuse zlib && RDEPEND+=" zlib? ( sys-libs/zlib )"
+DEPEND="${RDEPEND}
+ test? ( dev-util/dejagnu )
+ nls? ( sys-devel/gettext )
+ sys-devel/flex
+ virtual/yacc"
+
+S=${WORKDIR}/binutils
+case ${BVER} in
+git) ;;
+*) S=${S}-${BVER} ;;
+esac
+
+LIBPATH=/usr/$(get_libdir)/binutils/${CTARGET}/${BVER}
+INCPATH=${LIBPATH}/include
+DATAPATH=/usr/share/binutils-data/${CTARGET}/${BVER}
+MY_BUILDDIR=${WORKDIR}/build
+if is_cross ; then
+ BINPATH=/usr/${CHOST}/${CTARGET}/binutils-bin/${BVER}
+else
+ BINPATH=/usr/${CTARGET}/binutils-bin/${BVER}
+fi
+
+tc-binutils_unpack() {
+ case ${BTYPE} in
+ git) git-2_src_unpack ;;
+ *) unpacker ${A} ;;
+ esac
+ mkdir -p "${MY_BUILDDIR}"
+ [[ -d ${WORKDIR}/patch ]] && mkdir "${WORKDIR}"/patch/skip
+}
+
+# In case the ebuild wants to add a few of their own.
+PATCHES=()
+
+tc-binutils_apply_patches() {
+ cd "${S}"
+
+ if ! use vanilla ; then
+ if [[ -n ${PATCHVER} ]] ; then
+ EPATCH_SOURCE=${WORKDIR}/patch
+ if [[ ${CTARGET} == mips* ]] ; then
+ # remove gnu-hash for mips (bug #233233)
+ EPATCH_EXCLUDE+=" 77_all_generate-gnu-hash.patch"
+ fi
+ [[ -n $(ls "${EPATCH_SOURCE}"/*.bz2 2>/dev/null) ]] \
+ && EPATCH_SUFFIX="patch.bz2" \
+ || EPATCH_SUFFIX="patch"
+ epatch
+ fi
+ if [[ -n ${UCLIBC_PATCHVER} ]] ; then
+ EPATCH_SOURCE=${WORKDIR}/uclibc-patches
+ [[ -n $(ls "${EPATCH_SOURCE}"/*.bz2 2>/dev/null) ]] \
+ && EPATCH_SUFFIX="patch.bz2" \
+ || EPATCH_SUFFIX="patch"
+ EPATCH_MULTI_MSG="Applying uClibc fixes ..." \
+ epatch
+ elif [[ ${CTARGET} == *-uclibc* ]] ; then
+ # starting with binutils-2.17.50.0.17, we no longer need
+ # uClibc patchsets :D
+ if grep -qs 'linux-gnu' "${S}"/ltconfig ; then
+ die "sorry, but this binutils doesn't yet support uClibc :("
+ fi
+ fi
+ [[ ${#PATCHES[@]} -gt 0 ]] && epatch "${PATCHES[@]}"
+ epatch_user
+ fi
+
+ # fix locale issues if possible #122216
+ if [[ -e ${FILESDIR}/binutils-configure-LANG.patch ]] ; then
+ einfo "Fixing misc issues in configure files"
+ for f in $(find "${S}" -name configure -exec grep -l 'autoconf version 2.13' {} +) ; do
+ ebegin " Updating ${f/${S}\/}"
+ patch "${f}" "${FILESDIR}"/binutils-configure-LANG.patch >& "${T}"/configure-patch.log \
+ || eerror "Please file a bug about this"
+ eend $?
+ done
+ fi
+ # fix conflicts with newer glibc #272594
+ if [[ -e libiberty/testsuite/test-demangle.c ]] ; then
+ sed -i 's:\<getline\>:get_line:g' libiberty/testsuite/test-demangle.c
+ fi
+
+ # Fix po Makefile generators
+ sed -i \
+ -e '/^datadir = /s:$(prefix)/@DATADIRNAME@:@datadir@:' \
+ -e '/^gnulocaledir = /s:$(prefix)/share:$(datadir):' \
+ */po/Make-in || die "sed po's failed"
+
+ # Run misc portage update scripts
+ gnuconfig_update
+ elibtoolize --portage --no-uclibc
+}
+
+toolchain-binutils_src_unpack() {
+ tc-binutils_unpack
+ case ${EAPI:-0} in
+ 0|1) toolchain-binutils_src_prepare ;;
+ esac
+}
+
+toolchain-binutils_src_prepare() {
+ tc-binutils_apply_patches
+}
+
+_eprefix_init() {
+ has "${EAPI:-0}" 0 1 2 && ED=${D} EPREFIX= EROOT=${ROOT}
+}
+
+# Intended for ebuilds to override to set their own versioning information.
+toolchain-binutils_bugurl() {
+ printf "http://bugs.gentoo.org/"
+}
+toolchain-binutils_pkgversion() {
+ printf "Gentoo ${BVER}"
+ [[ -n ${PATCHVER} ]] && printf " p${PATCHVER}"
+}
+
+toolchain-binutils_src_configure() {
+ _eprefix_init
+
+ # make sure we filter $LINGUAS so that only ones that
+ # actually work make it through #42033
+ strip-linguas -u */po
+
+ # keep things sane
+ strip-flags
+
+ local x
+ echo
+ for x in CATEGORY CBUILD CHOST CTARGET CFLAGS LDFLAGS ; do
+ einfo "$(printf '%10s' ${x}:) ${!x}"
+ done
+ echo
+
+ cd "${MY_BUILDDIR}"
+ local myconf=()
+
+ # enable gold if available (installed as ld.gold)
+ if use cxx ; then
+ if grep -q 'enable-gold=default' "${S}"/configure ; then
+ myconf+=( --enable-gold )
+ # old ways - remove when 2.21 is stable
+ elif grep -q 'enable-gold=both/ld' "${S}"/configure ; then
+ myconf+=( --enable-gold=both/ld )
+ elif grep -q 'enable-gold=both/bfd' "${S}"/configure ; then
+ myconf+=( --enable-gold=both/bfd )
+ fi
+ if grep -q -e '--enable-plugins' "${S}"/ld/configure ; then
+ myconf+=( --enable-plugins )
+ fi
+ fi
+
+ use nls \
+ && myconf+=( --without-included-gettext ) \
+ || myconf+=( --disable-nls )
+
+ if in_iuse zlib ; then
+ # older versions did not have an explicit configure flag
+ export ac_cv_search_zlibVersion=$(usex zlib -lz no)
+ myconf+=( $(use_with zlib) )
+ fi
+
+ # For bi-arch systems, enable a 64bit bfd. This matches
+ # the bi-arch logic in toolchain.eclass. #446946
+ # We used to do it for everyone, but it's slow on 32bit arches. #438522
+ case $(tc-arch) in
+ ppc|sparc|x86) myconf+=( --enable-64-bit-bfd ) ;;
+ esac
+
+ use multitarget && myconf+=( --enable-targets=all --enable-64-bit-bfd )
+ [[ -n ${CBUILD} ]] && myconf+=( --build=${CBUILD} )
+ is_cross && myconf+=(
+ --with-sysroot="${EPREFIX}"/usr/${CTARGET}
+ --enable-poison-system-directories
+ )
+
+ # glibc-2.3.6 lacks support for this ... so rather than force glibc-2.5+
+ # on everyone in alpha (for now), we'll just enable it when possible
+ has_version ">=${CATEGORY}/glibc-2.5" && myconf+=( --enable-secureplt )
+ has_version ">=sys-libs/glibc-2.5" && myconf+=( --enable-secureplt )
+
+ myconf+=(
+ --prefix="${EPREFIX}"/usr
+ --host=${CHOST}
+ --target=${CTARGET}
+ --datadir="${EPREFIX}"${DATAPATH}
+ --infodir="${EPREFIX}"${DATAPATH}/info
+ --mandir="${EPREFIX}"${DATAPATH}/man
+ --bindir="${EPREFIX}"${BINPATH}
+ --libdir="${EPREFIX}"${LIBPATH}
+ --libexecdir="${EPREFIX}"${LIBPATH}
+ --includedir="${EPREFIX}"${INCPATH}
+ --enable-obsolete
+ --enable-shared
+ --enable-threads
+ # Newer versions (>=2.24) make this an explicit option. #497268
+ --enable-install-libiberty
+ --disable-werror
+ --with-bugurl="$(toolchain-binutils_bugurl)"
+ --with-pkgversion="$(toolchain-binutils_pkgversion)"
+ $(use_enable static-libs static)
+ ${EXTRA_ECONF}
+ # Disable modules that are in a combined binutils/gdb tree. #490566
+ --disable-{gdb,libdecnumber,readline,sim}
+ # Strip out broken static link flags.
+ # https://gcc.gnu.org/PR56750
+ --without-stage1-ldflags
+ )
+ echo ./configure "${myconf[@]}"
+ "${S}"/configure "${myconf[@]}" || die
+
+ # Prevent makeinfo from running in releases. It may not always be
+ # installed, and older binutils may fail with newer texinfo.
+ # Besides, we never patch the doc files anyways, so regenerating
+ # in the first place is useless. #193364
+ # For older versions, it means we don't get any info pages at all.
+ # Oh well, tough luck. #294617
+ if [[ -e ${S}/gas/doc/as.info ]] || ! version_is_at_least 2.24 ; then
+ sed -i \
+ -e '/^MAKEINFO/s:=.*:= true:' \
+ Makefile || die
+ fi
+}
+
+toolchain-binutils_src_compile() {
+ _eprefix_init
+ case ${EAPI:-0} in
+ 0|1) toolchain-binutils_src_configure ;;
+ esac
+
+ cd "${MY_BUILDDIR}"
+ emake all || die "emake failed"
+
+ # only build info pages if we user wants them, and if
+ # we have makeinfo (may not exist when we bootstrap)
+ if type -p makeinfo > /dev/null ; then
+ emake info || die "make info failed"
+ fi
+ # we nuke the manpages when we're left with junk
+ # (like when we bootstrap, no perl -> no manpages)
+ find . -name '*.1' -a -size 0 -delete
+
+ # elf2flt only works on some arches / targets
+ if [[ -n ${ELF2FLT_VER} ]] && [[ ${CTARGET} == *linux* || ${CTARGET} == *-elf* ]] ; then
+ cd "${WORKDIR}"/elf2flt-${ELF2FLT_VER}
+
+ local x supported_arches=$(sed -n '/defined(TARGET_/{s:^.*TARGET_::;s:)::;p}' elf2flt.c | sort -u)
+ for x in ${supported_arches} UNSUPPORTED ; do
+ [[ ${CTARGET} == ${x}* ]] && break
+ done
+
+ if [[ ${x} != "UNSUPPORTED" ]] ; then
+ append-flags -I"${S}"/include
+ myconf+=(
+ --with-bfd-include-dir=${MY_BUILDDIR}/bfd
+ --with-libbfd=${MY_BUILDDIR}/bfd/libbfd.a
+ --with-libiberty=${MY_BUILDDIR}/libiberty/libiberty.a
+ --with-binutils-ldscript-dir="${EPREFIX}"${LIBPATH}/ldscripts
+ )
+ echo ./configure "${myconf[@]}"
+ ./configure "${myconf[@]}" || die
+ emake || die "make elf2flt failed"
+ fi
+ fi
+}
+
+toolchain-binutils_src_test() {
+ cd "${MY_BUILDDIR}"
+ emake -k check || die "check failed :("
+}
+
+toolchain-binutils_src_install() {
+ _eprefix_init
+ local x d
+
+ cd "${MY_BUILDDIR}"
+ emake DESTDIR="${D}" tooldir="${EPREFIX}${LIBPATH}" install || die
+ rm -rf "${ED}"/${LIBPATH}/bin
+ use static-libs || find "${ED}" -name '*.la' -delete
+
+ # Newer versions of binutils get fancy with ${LIBPATH} #171905
+ cd "${ED}"/${LIBPATH}
+ for d in ../* ; do
+ [[ ${d} == ../${BVER} ]] && continue
+ mv ${d}/* . || die
+ rmdir ${d} || die
+ done
+
+ # Now we collect everything intp the proper SLOT-ed dirs
+ # When something is built to cross-compile, it installs into
+ # /usr/$CHOST/ by default ... we have to 'fix' that :)
+ if is_cross ; then
+ cd "${ED}"/${BINPATH}
+ for x in * ; do
+ mv ${x} ${x/${CTARGET}-}
+ done
+
+ if [[ -d ${ED}/usr/${CHOST}/${CTARGET} ]] ; then
+ mv "${ED}"/usr/${CHOST}/${CTARGET}/include "${ED}"/${INCPATH}
+ mv "${ED}"/usr/${CHOST}/${CTARGET}/lib/* "${ED}"/${LIBPATH}/
+ rm -r "${ED}"/usr/${CHOST}/{include,lib}
+ fi
+ fi
+ insinto ${INCPATH}
+ local libiberty_headers=(
+ # Not all the libiberty headers. See libiberty/Makefile.in:install_to_libdir.
+ demangle.h
+ dyn-string.h
+ fibheap.h
+ hashtab.h
+ libiberty.h
+ objalloc.h
+ splay-tree.h
+ )
+ doins "${libiberty_headers[@]/#/${S}/include/}" || die
+ if [[ -d ${ED}/${LIBPATH}/lib ]] ; then
+ mv "${ED}"/${LIBPATH}/lib/* "${ED}"/${LIBPATH}/
+ rm -r "${ED}"/${LIBPATH}/lib
+ fi
+
+ # Insert elf2flt where appropriate
+ if [[ -x ${WORKDIR}/elf2flt-${ELF2FLT_VER}/elf2flt ]] ; then
+ cd "${WORKDIR}"/elf2flt-${ELF2FLT_VER}
+ insinto ${LIBPATH}/ldscripts
+ doins elf2flt.ld || die "doins elf2flt.ld failed"
+ exeinto ${BINPATH}
+ doexe elf2flt flthdr || die "doexe elf2flt flthdr failed"
+ mv "${ED}"/${BINPATH}/{ld,ld.real} || die
+ newexe ld-elf2flt ld || die "doexe ld-elf2flt failed"
+ newdoc README README.elf2flt
+ fi
+
+ # Generate an env.d entry for this binutils
+ insinto /etc/env.d/binutils
+ cat <<-EOF > "${T}"/env.d
+ TARGET="${CTARGET}"
+ VER="${BVER}"
+ LIBPATH="${EPREFIX}${LIBPATH}"
+ EOF
+ newins "${T}"/env.d ${CTARGET}-${BVER}
+
+ # Handle documentation
+ if ! is_cross ; then
+ cd "${S}"
+ dodoc README
+ docinto bfd
+ dodoc bfd/ChangeLog* bfd/README bfd/PORTING bfd/TODO
+ docinto binutils
+ dodoc binutils/ChangeLog binutils/NEWS binutils/README
+ docinto gas
+ dodoc gas/ChangeLog* gas/CONTRIBUTORS gas/NEWS gas/README*
+ docinto gprof
+ dodoc gprof/ChangeLog* gprof/TEST gprof/TODO gprof/bbconv.pl
+ docinto ld
+ dodoc ld/ChangeLog* ld/README ld/NEWS ld/TODO
+ docinto libiberty
+ dodoc libiberty/ChangeLog* libiberty/README
+ docinto opcodes
+ dodoc opcodes/ChangeLog*
+ fi
+ # Remove shared info pages
+ rm -f "${ED}"/${DATAPATH}/info/{dir,configure.info,standards.info}
+ # Trim all empty dirs
+ find "${ED}" -depth -type d -exec rmdir {} + 2>/dev/null
+}
+
+toolchain-binutils_pkg_postinst() {
+ _eprefix_init
+ # Make sure this ${CTARGET} has a binutils version selected
+ [[ -e ${EROOT}/etc/env.d/binutils/config-${CTARGET} ]] && return 0
+ binutils-config ${CTARGET}-${BVER}
+}
+
+toolchain-binutils_pkg_postrm() {
+ _eprefix_init
+ local current_profile=$(binutils-config -c ${CTARGET})
+
+ # If no other versions exist, then uninstall for this
+ # target ... otherwise, switch to the newest version
+ # Note: only do this if this version is unmerged. We
+ # rerun binutils-config if this is a remerge, as
+ # we want the mtimes on the symlinks updated (if
+ # it is the same as the current selected profile)
+ if [[ ! -e ${EPREFIX}${BINPATH}/ld ]] && [[ ${current_profile} == ${CTARGET}-${BVER} ]] ; then
+ local choice=$(binutils-config -l | grep ${CTARGET} | awk '{print $2}')
+ choice=${choice//$'\n'/ }
+ choice=${choice/* }
+ if [[ -z ${choice} ]] ; then
+ env -i ROOT="${ROOT}" binutils-config -u ${CTARGET}
+ else
+ binutils-config ${choice}
+ fi
+ elif [[ $(CHOST=${CTARGET} binutils-config -c) == ${CTARGET}-${BVER} ]] ; then
+ binutils-config ${CTARGET}-${BVER}
+ fi
+}
diff --git a/eclass/toolchain-funcs.eclass b/eclass/toolchain-funcs.eclass
new file mode 100644
index 000000000000..19a1cd8adb20
--- /dev/null
+++ b/eclass/toolchain-funcs.eclass
@@ -0,0 +1,839 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: toolchain-funcs.eclass
+# @MAINTAINER:
+# Toolchain Ninjas <toolchain@gentoo.org>
+# @BLURB: functions to query common info about the toolchain
+# @DESCRIPTION:
+# The toolchain-funcs aims to provide a complete suite of functions
+# for gleaning useful information about the toolchain and to simplify
+# ugly things like cross-compiling and multilib. All of this is done
+# in such a way that you can rely on the function always returning
+# something sane.
+
+if [[ -z ${_TOOLCHAIN_FUNCS_ECLASS} ]]; then
+_TOOLCHAIN_FUNCS_ECLASS=1
+
+inherit multilib
+
+# tc-getPROG <VAR [search vars]> <default> [tuple]
+_tc-getPROG() {
+ local tuple=$1
+ local v var vars=$2
+ local prog=$3
+
+ var=${vars%% *}
+ for v in ${vars} ; do
+ if [[ -n ${!v} ]] ; then
+ export ${var}="${!v}"
+ echo "${!v}"
+ return 0
+ fi
+ done
+
+ local search=
+ [[ -n $4 ]] && search=$(type -p "$4-${prog}")
+ [[ -z ${search} && -n ${!tuple} ]] && search=$(type -p "${!tuple}-${prog}")
+ [[ -n ${search} ]] && prog=${search##*/}
+
+ export ${var}=${prog}
+ echo "${!var}"
+}
+tc-getBUILD_PROG() { _tc-getPROG CBUILD "BUILD_$1 $1_FOR_BUILD HOST$1" "${@:2}"; }
+tc-getPROG() { _tc-getPROG CHOST "$@"; }
+
+# @FUNCTION: tc-getAR
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the archiver
+tc-getAR() { tc-getPROG AR ar "$@"; }
+# @FUNCTION: tc-getAS
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the assembler
+tc-getAS() { tc-getPROG AS as "$@"; }
+# @FUNCTION: tc-getCC
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the C compiler
+tc-getCC() { tc-getPROG CC gcc "$@"; }
+# @FUNCTION: tc-getCPP
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the C preprocessor
+tc-getCPP() { tc-getPROG CPP cpp "$@"; }
+# @FUNCTION: tc-getCXX
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the C++ compiler
+tc-getCXX() { tc-getPROG CXX g++ "$@"; }
+# @FUNCTION: tc-getLD
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the linker
+tc-getLD() { tc-getPROG LD ld "$@"; }
+# @FUNCTION: tc-getSTRIP
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the strip program
+tc-getSTRIP() { tc-getPROG STRIP strip "$@"; }
+# @FUNCTION: tc-getNM
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the symbol/object thingy
+tc-getNM() { tc-getPROG NM nm "$@"; }
+# @FUNCTION: tc-getRANLIB
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the archiver indexer
+tc-getRANLIB() { tc-getPROG RANLIB ranlib "$@"; }
+# @FUNCTION: tc-getOBJCOPY
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the object copier
+tc-getOBJCOPY() { tc-getPROG OBJCOPY objcopy "$@"; }
+# @FUNCTION: tc-getOBJDUMP
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the object dumper
+tc-getOBJDUMP() { tc-getPROG OBJDUMP objdump "$@"; }
+# @FUNCTION: tc-getF77
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the Fortran 77 compiler
+tc-getF77() { tc-getPROG F77 gfortran "$@"; }
+# @FUNCTION: tc-getFC
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the Fortran 90 compiler
+tc-getFC() { tc-getPROG FC gfortran "$@"; }
+# @FUNCTION: tc-getGCJ
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the java compiler
+tc-getGCJ() { tc-getPROG GCJ gcj "$@"; }
+# @FUNCTION: tc-getGO
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the Go compiler
+tc-getGO() { tc-getPROG GO gccgo "$@"; }
+# @FUNCTION: tc-getPKG_CONFIG
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the pkg-config tool
+tc-getPKG_CONFIG() { tc-getPROG PKG_CONFIG pkg-config "$@"; }
+# @FUNCTION: tc-getRC
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the Windows resource compiler
+tc-getRC() { tc-getPROG RC windres "$@"; }
+# @FUNCTION: tc-getDLLWRAP
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the Windows dllwrap utility
+tc-getDLLWRAP() { tc-getPROG DLLWRAP dllwrap "$@"; }
+
+# @FUNCTION: tc-getBUILD_AR
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the archiver for building binaries to run on the build machine
+tc-getBUILD_AR() { tc-getBUILD_PROG AR ar "$@"; }
+# @FUNCTION: tc-getBUILD_AS
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the assembler for building binaries to run on the build machine
+tc-getBUILD_AS() { tc-getBUILD_PROG AS as "$@"; }
+# @FUNCTION: tc-getBUILD_CC
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the C compiler for building binaries to run on the build machine
+tc-getBUILD_CC() { tc-getBUILD_PROG CC gcc "$@"; }
+# @FUNCTION: tc-getBUILD_CPP
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the C preprocessor for building binaries to run on the build machine
+tc-getBUILD_CPP() { tc-getBUILD_PROG CPP cpp "$@"; }
+# @FUNCTION: tc-getBUILD_CXX
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the C++ compiler for building binaries to run on the build machine
+tc-getBUILD_CXX() { tc-getBUILD_PROG CXX g++ "$@"; }
+# @FUNCTION: tc-getBUILD_LD
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the linker for building binaries to run on the build machine
+tc-getBUILD_LD() { tc-getBUILD_PROG LD ld "$@"; }
+# @FUNCTION: tc-getBUILD_STRIP
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the strip program for building binaries to run on the build machine
+tc-getBUILD_STRIP() { tc-getBUILD_PROG STRIP strip "$@"; }
+# @FUNCTION: tc-getBUILD_NM
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the symbol/object thingy for building binaries to run on the build machine
+tc-getBUILD_NM() { tc-getBUILD_PROG NM nm "$@"; }
+# @FUNCTION: tc-getBUILD_RANLIB
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the archiver indexer for building binaries to run on the build machine
+tc-getBUILD_RANLIB() { tc-getBUILD_PROG RANLIB ranlib "$@"; }
+# @FUNCTION: tc-getBUILD_OBJCOPY
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the object copier for building binaries to run on the build machine
+tc-getBUILD_OBJCOPY() { tc-getBUILD_PROG OBJCOPY objcopy "$@"; }
+# @FUNCTION: tc-getBUILD_PKG_CONFIG
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the pkg-config tool for building binaries to run on the build machine
+tc-getBUILD_PKG_CONFIG() { tc-getBUILD_PROG PKG_CONFIG pkg-config "$@"; }
+
+# @FUNCTION: tc-export
+# @USAGE: <list of toolchain variables>
+# @DESCRIPTION:
+# Quick way to export a bunch of compiler vars at once.
+tc-export() {
+ local var
+ for var in "$@" ; do
+ [[ $(type -t tc-get${var}) != "function" ]] && die "tc-export: invalid export variable '${var}'"
+ eval tc-get${var} > /dev/null
+ done
+}
+
+# @FUNCTION: tc-is-cross-compiler
+# @RETURN: Shell true if we are using a cross-compiler, shell false otherwise
+tc-is-cross-compiler() {
+ [[ ${CBUILD:-${CHOST}} != ${CHOST} ]]
+}
+
+# @FUNCTION: tc-is-softfloat
+# @DESCRIPTION:
+# See if this toolchain is a softfloat based one.
+# @CODE
+# The possible return values:
+# - only: the target is always softfloat (never had fpu)
+# - yes: the target should support softfloat
+# - softfp: (arm specific) the target should use hardfloat insns, but softfloat calling convention
+# - no: the target doesn't support softfloat
+# @CODE
+# This allows us to react differently where packages accept
+# softfloat flags in the case where support is optional, but
+# rejects softfloat flags where the target always lacks an fpu.
+tc-is-softfloat() {
+ local CTARGET=${CTARGET:-${CHOST}}
+ case ${CTARGET} in
+ bfin*|h8300*)
+ echo "only" ;;
+ *)
+ if [[ ${CTARGET//_/-} == *-softfloat-* ]] ; then
+ echo "yes"
+ elif [[ ${CTARGET//_/-} == *-softfp-* ]] ; then
+ echo "softfp"
+ else
+ echo "no"
+ fi
+ ;;
+ esac
+}
+
+# @FUNCTION: tc-is-static-only
+# @DESCRIPTION:
+# Return shell true if the target does not support shared libs, shell false
+# otherwise.
+tc-is-static-only() {
+ local host=${CTARGET:-${CHOST}}
+
+ # *MiNT doesn't have shared libraries, only platform so far
+ [[ ${host} == *-mint* ]]
+}
+
+# @FUNCTION: tc-export_build_env
+# @USAGE: [compiler variables]
+# @DESCRIPTION:
+# Export common build related compiler settings.
+tc-export_build_env() {
+ tc-export "$@"
+ # Some build envs will initialize vars like:
+ # : ${BUILD_LDFLAGS:-${LDFLAGS}}
+ # So make sure all variables are non-empty. #526734
+ : ${BUILD_CFLAGS:=-O1 -pipe}
+ : ${BUILD_CXXFLAGS:=-O1 -pipe}
+ : ${BUILD_CPPFLAGS:= }
+ : ${BUILD_LDFLAGS:= }
+ export BUILD_{C,CXX,CPP,LD}FLAGS
+
+ # Some packages use XXX_FOR_BUILD.
+ local v
+ for v in BUILD_{C,CXX,CPP,LD}FLAGS ; do
+ export ${v#BUILD_}_FOR_BUILD="${!v}"
+ done
+}
+
+# @FUNCTION: tc-env_build
+# @USAGE: <command> [command args]
+# @INTERNAL
+# @DESCRIPTION:
+# Setup the compile environment to the build tools and then execute the
+# specified command. We use tc-getBUILD_XX here so that we work with
+# all of the semi-[non-]standard env vars like $BUILD_CC which often
+# the target build system does not check.
+tc-env_build() {
+ tc-export_build_env
+ CFLAGS=${BUILD_CFLAGS} \
+ CXXFLAGS=${BUILD_CXXFLAGS} \
+ CPPFLAGS=${BUILD_CPPFLAGS} \
+ LDFLAGS=${BUILD_LDFLAGS} \
+ AR=$(tc-getBUILD_AR) \
+ AS=$(tc-getBUILD_AS) \
+ CC=$(tc-getBUILD_CC) \
+ CPP=$(tc-getBUILD_CPP) \
+ CXX=$(tc-getBUILD_CXX) \
+ LD=$(tc-getBUILD_LD) \
+ NM=$(tc-getBUILD_NM) \
+ PKG_CONFIG=$(tc-getBUILD_PKG_CONFIG) \
+ RANLIB=$(tc-getBUILD_RANLIB) \
+ "$@"
+}
+
+# @FUNCTION: econf_build
+# @USAGE: [econf flags]
+# @DESCRIPTION:
+# Sometimes we need to locally build up some tools to run on CBUILD because
+# the package has helper utils which are compiled+executed when compiling.
+# This won't work when cross-compiling as the CHOST is set to a target which
+# we cannot natively execute.
+#
+# For example, the python package will build up a local python binary using
+# a portable build system (configure+make), but then use that binary to run
+# local python scripts to build up other components of the overall python.
+# We cannot rely on the python binary in $PATH as that often times will be
+# a different version, or not even installed in the first place. Instead,
+# we compile the code in a different directory to run on CBUILD, and then
+# use that binary when compiling the main package to run on CHOST.
+#
+# For example, with newer EAPIs, you'd do something like:
+# @CODE
+# src_configure() {
+# ECONF_SOURCE=${S}
+# if tc-is-cross-compiler ; then
+# mkdir "${WORKDIR}"/${CBUILD}
+# pushd "${WORKDIR}"/${CBUILD} >/dev/null
+# econf_build --disable-some-unused-stuff
+# popd >/dev/null
+# fi
+# ... normal build paths ...
+# }
+# src_compile() {
+# if tc-is-cross-compiler ; then
+# pushd "${WORKDIR}"/${CBUILD} >/dev/null
+# emake one-or-two-build-tools
+# ln/mv build-tools to normal build paths in ${S}/
+# popd >/dev/null
+# fi
+# ... normal build paths ...
+# }
+# @CODE
+econf_build() {
+ local CBUILD=${CBUILD:-${CHOST}}
+ tc-env_build econf --build=${CBUILD} --host=${CBUILD} "$@"
+}
+
+# @FUNCTION: tc-ld-is-gold
+# @USAGE: [toolchain prefix]
+# @DESCRIPTION:
+# Return true if the current linker is set to gold.
+tc-ld-is-gold() {
+ local out
+
+ # First check the linker directly.
+ out=$($(tc-getLD "$@") --version 2>&1)
+ if [[ ${out} == *"GNU gold"* ]] ; then
+ return 0
+ fi
+
+ # Then see if they're selecting gold via compiler flags.
+ # Note: We're assuming they're using LDFLAGS to hold the
+ # options and not CFLAGS/CXXFLAGS.
+ local base="${T}/test-tc-gold"
+ cat <<-EOF > "${base}.c"
+ int main() { return 0; }
+ EOF
+ out=$($(tc-getCC "$@") ${CFLAGS} ${CPPFLAGS} ${LDFLAGS} -Wl,--version "${base}.c" -o "${base}" 2>&1)
+ rm -f "${base}"*
+ if [[ ${out} == *"GNU gold"* ]] ; then
+ return 0
+ fi
+
+ # No gold here!
+ return 1
+}
+
+# @FUNCTION: tc-ld-disable-gold
+# @USAGE: [toolchain prefix]
+# @DESCRIPTION:
+# If the gold linker is currently selected, configure the compilation
+# settings so that we use the older bfd linker instead.
+tc-ld-disable-gold() {
+ if ! tc-ld-is-gold "$@" ; then
+ # They aren't using gold, so nothing to do!
+ return
+ fi
+
+ ewarn "Forcing usage of the BFD linker instead of GOLD"
+
+ # Set up LD to point directly to bfd if it's available.
+ # We need to extract the first word in case there are flags appended
+ # to its value (like multilib). #545218
+ local ld=$(tc-getLD "$@")
+ local bfd_ld="${ld%% *}.bfd"
+ local path_ld=$(which "${bfd_ld}" 2>/dev/null)
+ [[ -e ${path_ld} ]] && export LD=${bfd_ld}
+
+ # Set up LDFLAGS to select gold based on the gcc version.
+ local major=$(gcc-major-version "$@")
+ local minor=$(gcc-minor-version "$@")
+ if [[ ${major} -lt 4 ]] || [[ ${major} -eq 4 && ${minor} -lt 8 ]] ; then
+ # <=gcc-4.7 requires some coercion. Only works if bfd exists.
+ if [[ -e ${path_ld} ]] ; then
+ local d="${T}/bfd-linker"
+ mkdir -p "${d}"
+ ln -sf "${path_ld}" "${d}"/ld
+ export LDFLAGS="${LDFLAGS} -B${d}"
+ else
+ die "unable to locate a BFD linker to bypass gold"
+ fi
+ else
+ # gcc-4.8+ supports -fuse-ld directly.
+ export LDFLAGS="${LDFLAGS} -fuse-ld=bfd"
+ fi
+}
+
+# @FUNCTION: tc-has-openmp
+# @USAGE: [toolchain prefix]
+# @DESCRIPTION:
+# See if the toolchain supports OpenMP.
+tc-has-openmp() {
+ local base="${T}/test-tc-openmp"
+ cat <<-EOF > "${base}.c"
+ #include <omp.h>
+ int main() {
+ int nthreads, tid, ret = 0;
+ #pragma omp parallel private(nthreads, tid)
+ {
+ tid = omp_get_thread_num();
+ nthreads = omp_get_num_threads(); ret += tid + nthreads;
+ }
+ return ret;
+ }
+ EOF
+ $(tc-getCC "$@") -fopenmp "${base}.c" -o "${base}" >&/dev/null
+ local ret=$?
+ rm -f "${base}"*
+ return ${ret}
+}
+
+# @FUNCTION: tc-has-tls
+# @USAGE: [-s|-c|-l] [toolchain prefix]
+# @DESCRIPTION:
+# See if the toolchain supports thread local storage (TLS). Use -s to test the
+# compiler, -c to also test the assembler, and -l to also test the C library
+# (the default).
+tc-has-tls() {
+ local base="${T}/test-tc-tls"
+ cat <<-EOF > "${base}.c"
+ int foo(int *i) {
+ static __thread int j = 0;
+ return *i ? j : *i;
+ }
+ EOF
+ local flags
+ case $1 in
+ -s) flags="-S";;
+ -c) flags="-c";;
+ -l) ;;
+ -*) die "Usage: tc-has-tls [-c|-l] [toolchain prefix]";;
+ esac
+ : ${flags:=-fPIC -shared -Wl,-z,defs}
+ [[ $1 == -* ]] && shift
+ $(tc-getCC "$@") ${flags} "${base}.c" -o "${base}" >&/dev/null
+ local ret=$?
+ rm -f "${base}"*
+ return ${ret}
+}
+
+
+# Parse information from CBUILD/CHOST/CTARGET rather than
+# use external variables from the profile.
+tc-ninja_magic_to_arch() {
+ninj() { [[ ${type} == "kern" ]] && echo $1 || echo $2 ; }
+
+ local type=$1
+ local host=$2
+ [[ -z ${host} ]] && host=${CTARGET:-${CHOST}}
+
+ local KV=${KV:-${KV_FULL}}
+ [[ ${type} == "kern" ]] && [[ -z ${KV} ]] && \
+ ewarn "QA: Kernel version could not be determined, please inherit kernel-2 or linux-info"
+
+ case ${host} in
+ aarch64*) echo arm64;;
+ alpha*) echo alpha;;
+ arm*) echo arm;;
+ avr*) ninj avr32 avr;;
+ bfin*) ninj blackfin bfin;;
+ c6x*) echo c6x;;
+ cris*) echo cris;;
+ frv*) echo frv;;
+ hexagon*) echo hexagon;;
+ hppa*) ninj parisc hppa;;
+ i?86*)
+ # Starting with linux-2.6.24, the 'x86_64' and 'i386'
+ # trees have been unified into 'x86'.
+ # FreeBSD still uses i386
+ if [[ ${type} == "kern" ]] && [[ $(KV_to_int ${KV}) -lt $(KV_to_int 2.6.24) || ${host} == *freebsd* ]] ; then
+ echo i386
+ else
+ echo x86
+ fi
+ ;;
+ ia64*) echo ia64;;
+ m68*) echo m68k;;
+ metag*) echo metag;;
+ microblaze*) echo microblaze;;
+ mips*) echo mips;;
+ nios2*) echo nios2;;
+ nios*) echo nios;;
+ or32*) echo openrisc;;
+ powerpc*)
+ # Starting with linux-2.6.15, the 'ppc' and 'ppc64' trees
+ # have been unified into simply 'powerpc', but until 2.6.16,
+ # ppc32 is still using ARCH="ppc" as default
+ if [[ ${type} == "kern" ]] && [[ $(KV_to_int ${KV}) -ge $(KV_to_int 2.6.16) ]] ; then
+ echo powerpc
+ elif [[ ${type} == "kern" ]] && [[ $(KV_to_int ${KV}) -eq $(KV_to_int 2.6.15) ]] ; then
+ if [[ ${host} == powerpc64* ]] || [[ ${PROFILE_ARCH} == "ppc64" ]] ; then
+ echo powerpc
+ else
+ echo ppc
+ fi
+ elif [[ ${host} == powerpc64* ]] ; then
+ echo ppc64
+ elif [[ ${PROFILE_ARCH} == "ppc64" ]] ; then
+ ninj ppc64 ppc
+ else
+ echo ppc
+ fi
+ ;;
+ riscv*) echo riscv;;
+ s390*) echo s390;;
+ score*) echo score;;
+ sh64*) ninj sh64 sh;;
+ sh*) echo sh;;
+ sparc64*) ninj sparc64 sparc;;
+ sparc*) [[ ${PROFILE_ARCH} == "sparc64" ]] \
+ && ninj sparc64 sparc \
+ || echo sparc
+ ;;
+ tile*) echo tile;;
+ vax*) echo vax;;
+ x86_64*freebsd*) echo amd64;;
+ x86_64*)
+ # Starting with linux-2.6.24, the 'x86_64' and 'i386'
+ # trees have been unified into 'x86'.
+ if [[ ${type} == "kern" ]] && [[ $(KV_to_int ${KV}) -ge $(KV_to_int 2.6.24) ]] ; then
+ echo x86
+ else
+ ninj x86_64 amd64
+ fi
+ ;;
+ xtensa*) echo xtensa;;
+
+ # since our usage of tc-arch is largely concerned with
+ # normalizing inputs for testing ${CTARGET}, let's filter
+ # other cross targets (mingw and such) into the unknown.
+ *) echo unknown;;
+ esac
+}
+# @FUNCTION: tc-arch-kernel
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the kernel arch according to the compiler target
+tc-arch-kernel() {
+ tc-ninja_magic_to_arch kern "$@"
+}
+# @FUNCTION: tc-arch
+# @USAGE: [toolchain prefix]
+# @RETURN: name of the portage arch according to the compiler target
+tc-arch() {
+ tc-ninja_magic_to_arch portage "$@"
+}
+
+tc-endian() {
+ local host=$1
+ [[ -z ${host} ]] && host=${CTARGET:-${CHOST}}
+ host=${host%%-*}
+
+ case ${host} in
+ aarch64*be) echo big;;
+ aarch64) echo little;;
+ alpha*) echo big;;
+ arm*b*) echo big;;
+ arm*) echo little;;
+ cris*) echo little;;
+ hppa*) echo big;;
+ i?86*) echo little;;
+ ia64*) echo little;;
+ m68*) echo big;;
+ mips*l*) echo little;;
+ mips*) echo big;;
+ powerpc*le) echo little;;
+ powerpc*) echo big;;
+ s390*) echo big;;
+ sh*b*) echo big;;
+ sh*) echo little;;
+ sparc*) echo big;;
+ x86_64*) echo little;;
+ *) echo wtf;;
+ esac
+}
+
+# Internal func. The first argument is the version info to expand.
+# Query the preprocessor to improve compatibility across different
+# compilers rather than maintaining a --version flag matrix. #335943
+_gcc_fullversion() {
+ local ver="$1"; shift
+ set -- `$(tc-getCPP "$@") -E -P - <<<"__GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__"`
+ eval echo "$ver"
+}
+
+# @FUNCTION: gcc-fullversion
+# @RETURN: compiler version (major.minor.micro: [3.4.6])
+gcc-fullversion() {
+ _gcc_fullversion '$1.$2.$3' "$@"
+}
+# @FUNCTION: gcc-version
+# @RETURN: compiler version (major.minor: [3.4].6)
+gcc-version() {
+ _gcc_fullversion '$1.$2' "$@"
+}
+# @FUNCTION: gcc-major-version
+# @RETURN: major compiler version (major: [3].4.6)
+gcc-major-version() {
+ _gcc_fullversion '$1' "$@"
+}
+# @FUNCTION: gcc-minor-version
+# @RETURN: minor compiler version (minor: 3.[4].6)
+gcc-minor-version() {
+ _gcc_fullversion '$2' "$@"
+}
+# @FUNCTION: gcc-micro-version
+# @RETURN: micro compiler version (micro: 3.4.[6])
+gcc-micro-version() {
+ _gcc_fullversion '$3' "$@"
+}
+
+# Returns the installation directory - internal toolchain
+# function for use by _gcc-specs-exists (for flag-o-matic).
+_gcc-install-dir() {
+ echo "$(LC_ALL=C $(tc-getCC) -print-search-dirs 2> /dev/null |\
+ awk '$1=="install:" {print $2}')"
+}
+# Returns true if the indicated specs file exists - internal toolchain
+# function for use by flag-o-matic.
+_gcc-specs-exists() {
+ [[ -f $(_gcc-install-dir)/$1 ]]
+}
+
+# Returns requested gcc specs directive unprocessed - for used by
+# gcc-specs-directive()
+# Note; later specs normally overwrite earlier ones; however if a later
+# spec starts with '+' then it appends.
+# gcc -dumpspecs is parsed first, followed by files listed by "gcc -v"
+# as "Reading <file>", in order. Strictly speaking, if there's a
+# $(gcc_install_dir)/specs, the built-in specs aren't read, however by
+# the same token anything from 'gcc -dumpspecs' is overridden by
+# the contents of $(gcc_install_dir)/specs so the result is the
+# same either way.
+_gcc-specs-directive_raw() {
+ local cc=$(tc-getCC)
+ local specfiles=$(LC_ALL=C ${cc} -v 2>&1 | awk '$1=="Reading" {print $NF}')
+ ${cc} -dumpspecs 2> /dev/null | cat - ${specfiles} | awk -v directive=$1 \
+'BEGIN { pspec=""; spec=""; outside=1 }
+$1=="*"directive":" { pspec=spec; spec=""; outside=0; next }
+ outside || NF==0 || ( substr($1,1,1)=="*" && substr($1,length($1),1)==":" ) { outside=1; next }
+ spec=="" && substr($0,1,1)=="+" { spec=pspec " " substr($0,2); next }
+ { spec=spec $0 }
+END { print spec }'
+ return 0
+}
+
+# Return the requested gcc specs directive, with all included
+# specs expanded.
+# Note, it does not check for inclusion loops, which cause it
+# to never finish - but such loops are invalid for gcc and we're
+# assuming gcc is operational.
+gcc-specs-directive() {
+ local directive subdname subdirective
+ directive="$(_gcc-specs-directive_raw $1)"
+ while [[ ${directive} == *%\(*\)* ]]; do
+ subdname=${directive/*%\(}
+ subdname=${subdname/\)*}
+ subdirective="$(_gcc-specs-directive_raw ${subdname})"
+ directive="${directive//\%(${subdname})/${subdirective}}"
+ done
+ echo "${directive}"
+ return 0
+}
+
+# Returns true if gcc sets relro
+gcc-specs-relro() {
+ local directive
+ directive=$(gcc-specs-directive link_command)
+ [[ "${directive/\{!norelro:}" != "${directive}" ]]
+}
+# Returns true if gcc sets now
+gcc-specs-now() {
+ local directive
+ directive=$(gcc-specs-directive link_command)
+ [[ "${directive/\{!nonow:}" != "${directive}" ]]
+}
+# Returns true if gcc builds PIEs
+gcc-specs-pie() {
+ local directive
+ directive=$(gcc-specs-directive cc1)
+ [[ "${directive/\{!nopie:}" != "${directive}" ]]
+}
+# Returns true if gcc builds with the stack protector
+gcc-specs-ssp() {
+ local directive
+ directive=$(gcc-specs-directive cc1)
+ [[ "${directive/\{!fno-stack-protector:}" != "${directive}" ]]
+}
+# Returns true if gcc upgrades fstack-protector to fstack-protector-all
+gcc-specs-ssp-to-all() {
+ local directive
+ directive=$(gcc-specs-directive cc1)
+ [[ "${directive/\{!fno-stack-protector-all:}" != "${directive}" ]]
+}
+# Returns true if gcc builds with fno-strict-overflow
+gcc-specs-nostrict() {
+ local directive
+ directive=$(gcc-specs-directive cc1)
+ [[ "${directive/\{!fstrict-overflow:}" != "${directive}" ]]
+}
+# Returns true if gcc builds with fstack-check
+gcc-specs-stack-check() {
+ local directive
+ directive=$(gcc-specs-directive cc1)
+ [[ "${directive/\{!fno-stack-check:}" != "${directive}" ]]
+}
+
+
+# @FUNCTION: gen_usr_ldscript
+# @USAGE: [-a] <list of libs to create linker scripts for>
+# @DESCRIPTION:
+# This function generate linker scripts in /usr/lib for dynamic
+# libs in /lib. This is to fix linking problems when you have
+# the .so in /lib, and the .a in /usr/lib. What happens is that
+# in some cases when linking dynamic, the .a in /usr/lib is used
+# instead of the .so in /lib due to gcc/libtool tweaking ld's
+# library search path. This causes many builds to fail.
+# See bug #4411 for more info.
+#
+# Note that you should in general use the unversioned name of
+# the library (libfoo.so), as ldconfig should usually update it
+# correctly to point to the latest version of the library present.
+gen_usr_ldscript() {
+ local lib libdir=$(get_libdir) output_format="" auto=false suffix=$(get_libname)
+ [[ -z ${ED+set} ]] && local ED=${D%/}${EPREFIX}/
+
+ tc-is-static-only && return
+
+ # Eventually we'd like to get rid of this func completely #417451
+ case ${CTARGET:-${CHOST}} in
+ *-darwin*) ;;
+ *linux*|*-freebsd*|*-openbsd*|*-netbsd*)
+ use prefix && return 0 ;;
+ *) return 0 ;;
+ esac
+
+ # Just make sure it exists
+ dodir /usr/${libdir}
+
+ if [[ $1 == "-a" ]] ; then
+ auto=true
+ shift
+ dodir /${libdir}
+ fi
+
+ # OUTPUT_FORMAT gives hints to the linker as to what binary format
+ # is referenced ... makes multilib saner
+ local flags=( ${CFLAGS} ${LDFLAGS} -Wl,--verbose )
+ if $(tc-getLD) --version | grep -q 'GNU gold' ; then
+ # If they're using gold, manually invoke the old bfd. #487696
+ local d="${T}/bfd-linker"
+ mkdir -p "${d}"
+ ln -sf $(which ${CHOST}-ld.bfd) "${d}"/ld
+ flags+=( -B"${d}" )
+ fi
+ output_format=$($(tc-getCC) "${flags[@]}" 2>&1 | sed -n 's/^OUTPUT_FORMAT("\([^"]*\)",.*/\1/p')
+ [[ -n ${output_format} ]] && output_format="OUTPUT_FORMAT ( ${output_format} )"
+
+ for lib in "$@" ; do
+ local tlib
+ if ${auto} ; then
+ lib="lib${lib}${suffix}"
+ else
+ # Ensure /lib/${lib} exists to avoid dangling scripts/symlinks.
+ # This especially is for AIX where $(get_libname) can return ".a",
+ # so /lib/${lib} might be moved to /usr/lib/${lib} (by accident).
+ [[ -r ${ED}/${libdir}/${lib} ]] || continue
+ #TODO: better die here?
+ fi
+
+ case ${CTARGET:-${CHOST}} in
+ *-darwin*)
+ if ${auto} ; then
+ tlib=$(scanmacho -qF'%S#F' "${ED}"/usr/${libdir}/${lib})
+ else
+ tlib=$(scanmacho -qF'%S#F' "${ED}"/${libdir}/${lib})
+ fi
+ [[ -z ${tlib} ]] && die "unable to read install_name from ${lib}"
+ tlib=${tlib##*/}
+
+ if ${auto} ; then
+ mv "${ED}"/usr/${libdir}/${lib%${suffix}}.*${suffix#.} "${ED}"/${libdir}/ || die
+ # some install_names are funky: they encode a version
+ if [[ ${tlib} != ${lib%${suffix}}.*${suffix#.} ]] ; then
+ mv "${ED}"/usr/${libdir}/${tlib%${suffix}}.*${suffix#.} "${ED}"/${libdir}/ || die
+ fi
+ rm -f "${ED}"/${libdir}/${lib}
+ fi
+
+ # Mach-O files have an id, which is like a soname, it tells how
+ # another object linking against this lib should reference it.
+ # Since we moved the lib from usr/lib into lib this reference is
+ # wrong. Hence, we update it here. We don't configure with
+ # libdir=/lib because that messes up libtool files.
+ # Make sure we don't lose the specific version, so just modify the
+ # existing install_name
+ if [[ ! -w "${ED}/${libdir}/${tlib}" ]] ; then
+ chmod u+w "${ED}${libdir}/${tlib}" # needed to write to it
+ local nowrite=yes
+ fi
+ install_name_tool \
+ -id "${EPREFIX}"/${libdir}/${tlib} \
+ "${ED}"/${libdir}/${tlib} || die "install_name_tool failed"
+ [[ -n ${nowrite} ]] && chmod u-w "${ED}${libdir}/${tlib}"
+ # Now as we don't use GNU binutils and our linker doesn't
+ # understand linker scripts, just create a symlink.
+ pushd "${ED}/usr/${libdir}" > /dev/null
+ ln -snf "../../${libdir}/${tlib}" "${lib}"
+ popd > /dev/null
+ ;;
+ *)
+ if ${auto} ; then
+ tlib=$(scanelf -qF'%S#F' "${ED}"/usr/${libdir}/${lib})
+ [[ -z ${tlib} ]] && die "unable to read SONAME from ${lib}"
+ mv "${ED}"/usr/${libdir}/${lib}* "${ED}"/${libdir}/ || die
+ # some SONAMEs are funky: they encode a version before the .so
+ if [[ ${tlib} != ${lib}* ]] ; then
+ mv "${ED}"/usr/${libdir}/${tlib}* "${ED}"/${libdir}/ || die
+ fi
+ rm -f "${ED}"/${libdir}/${lib}
+ else
+ tlib=${lib}
+ fi
+ cat > "${ED}/usr/${libdir}/${lib}" <<-END_LDSCRIPT
+ /* GNU ld script
+ Since Gentoo has critical dynamic libraries in /lib, and the static versions
+ in /usr/lib, we need to have a "fake" dynamic lib in /usr/lib, otherwise we
+ run into linking problems. This "fake" dynamic lib is a linker script that
+ redirects the linker to the real lib. And yes, this works in the cross-
+ compiling scenario as the sysroot-ed linker will prepend the real path.
+
+ See bug http://bugs.gentoo.org/4411 for more info.
+ */
+ ${output_format}
+ GROUP ( ${EPREFIX}/${libdir}/${tlib} )
+ END_LDSCRIPT
+ ;;
+ esac
+ fperms a+x "/usr/${libdir}/${lib}" || die "could not change perms on ${lib}"
+ done
+}
+
+fi
diff --git a/eclass/toolchain.eclass b/eclass/toolchain.eclass
new file mode 100644
index 000000000000..7b9ce945888f
--- /dev/null
+++ b/eclass/toolchain.eclass
@@ -0,0 +1,2263 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# Maintainer: Toolchain Ninjas <toolchain@gentoo.org>
+
+DESCRIPTION="The GNU Compiler Collection"
+HOMEPAGE="http://gcc.gnu.org/"
+RESTRICT="strip" # cross-compilers need controlled stripping
+
+inherit eutils fixheadtails flag-o-matic gnuconfig libtool multilib pax-utils toolchain-funcs versionator
+
+if [[ ${PV} == *_pre9999* ]] ; then
+ EGIT_REPO_URI="git://gcc.gnu.org/git/gcc.git"
+ # naming style:
+ # gcc-4.7.1_pre9999 -> gcc-4_7-branch
+ # Note that the micro version is required or lots of stuff will break.
+ # To checkout master set gcc_LIVE_BRANCH="master" in the ebuild before
+ # inheriting this eclass.
+ EGIT_BRANCH="${PN}-${PV%.?_pre9999}-branch"
+ EGIT_BRANCH=${EGIT_BRANCH//./_}
+ inherit git-2
+fi
+
+FEATURES=${FEATURES/multilib-strict/}
+
+EXPORTED_FUNCTIONS="pkg_setup src_unpack src_compile src_test src_install pkg_postinst pkg_postrm"
+case ${EAPI:-0} in
+ 0|1) die "Need to upgrade to at least EAPI=2";;
+ 2|3) EXPORTED_FUNCTIONS+=" src_prepare src_configure" ;;
+ 4*|5*) EXPORTED_FUNCTIONS+=" pkg_pretend src_prepare src_configure" ;;
+ *) die "I don't speak EAPI ${EAPI}."
+esac
+EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS}
+
+#---->> globals <<----
+
+export CTARGET=${CTARGET:-${CHOST}}
+if [[ ${CTARGET} = ${CHOST} ]] ; then
+ if [[ ${CATEGORY} == cross-* ]] ; then
+ export CTARGET=${CATEGORY#cross-}
+ fi
+fi
+: ${TARGET_ABI:=${ABI}}
+: ${TARGET_MULTILIB_ABIS:=${MULTILIB_ABIS}}
+: ${TARGET_DEFAULT_ABI:=${DEFAULT_ABI}}
+
+is_crosscompile() {
+ [[ ${CHOST} != ${CTARGET} ]]
+}
+
+# General purpose version check. Without a second arg matches up to minor version (x.x.x)
+tc_version_is_at_least() {
+ version_is_at_least "$1" "${2:-${GCC_RELEASE_VER}}"
+}
+
+# General purpose version range check
+# Note that it matches up to but NOT including the second version
+tc_version_is_between() {
+ tc_version_is_at_least "${1}" && ! tc_version_is_at_least "${2}"
+}
+
+GCC_PV=${TOOLCHAIN_GCC_PV:-${PV}}
+GCC_PVR=${GCC_PV}
+[[ ${PR} != "r0" ]] && GCC_PVR=${GCC_PVR}-${PR}
+GCC_RELEASE_VER=$(get_version_component_range 1-3 ${GCC_PV})
+GCC_BRANCH_VER=$(get_version_component_range 1-2 ${GCC_PV})
+GCCMAJOR=$(get_version_component_range 1 ${GCC_PV})
+GCCMINOR=$(get_version_component_range 2 ${GCC_PV})
+GCCMICRO=$(get_version_component_range 3 ${GCC_PV})
+[[ ${BRANCH_UPDATE-notset} == "notset" ]] && \
+ BRANCH_UPDATE=$(get_version_component_range 4 ${GCC_PV})
+
+# According to gcc/c-cppbuiltin.c, GCC_CONFIG_VER MUST match this regex.
+# ([^0-9]*-)?[0-9]+[.][0-9]+([.][0-9]+)?([- ].*)?
+GCC_CONFIG_VER=${GCC_CONFIG_VER:-$(replace_version_separator 3 '-' ${GCC_PV})}
+
+# Pre-release support
+if [[ ${GCC_PV} == *_pre* ]] ; then
+ PRERELEASE=${GCC_PV/_pre/-}
+elif [[ ${GCC_PV} == *_alpha* ]] ; then
+ SNAPSHOT=${GCC_BRANCH_VER}-${GCC_PV##*_alpha}
+elif [[ ${GCC_PV} == *_beta* ]] ; then
+ SNAPSHOT=${GCC_BRANCH_VER}-${GCC_PV##*_beta}
+elif [[ ${GCC_PV} == *_rc* ]] ; then
+ SNAPSHOT=${GCC_PV%_rc*}-RC-${GCC_PV##*_rc}
+fi
+
+if [[ ${SNAPSHOT} == [56789].0-* ]] ; then
+ # The gcc-5+ releases have dropped the .0 for some reason.
+ SNAPSHOT=${SNAPSHOT/.0}
+fi
+
+export GCC_FILESDIR=${GCC_FILESDIR:-${FILESDIR}}
+
+PREFIX=${TOOLCHAIN_PREFIX:-/usr}
+
+if tc_version_is_at_least 3.4.0 ; then
+ LIBPATH=${TOOLCHAIN_LIBPATH:-${PREFIX}/lib/gcc/${CTARGET}/${GCC_CONFIG_VER}}
+else
+ LIBPATH=${TOOLCHAIN_LIBPATH:-${PREFIX}/lib/gcc-lib/${CTARGET}/${GCC_CONFIG_VER}}
+fi
+INCLUDEPATH=${TOOLCHAIN_INCLUDEPATH:-${LIBPATH}/include}
+
+if is_crosscompile ; then
+ BINPATH=${TOOLCHAIN_BINPATH:-${PREFIX}/${CHOST}/${CTARGET}/gcc-bin/${GCC_CONFIG_VER}}
+else
+ BINPATH=${TOOLCHAIN_BINPATH:-${PREFIX}/${CTARGET}/gcc-bin/${GCC_CONFIG_VER}}
+fi
+
+DATAPATH=${TOOLCHAIN_DATAPATH:-${PREFIX}/share/gcc-data/${CTARGET}/${GCC_CONFIG_VER}}
+
+# Dont install in /usr/include/g++-v3/, but in gcc internal directory.
+# We will handle /usr/include/g++-v3/ with gcc-config ...
+STDCXX_INCDIR=${TOOLCHAIN_STDCXX_INCDIR:-${LIBPATH}/include/g++-v${GCC_BRANCH_VER/\.*/}}
+
+#---->> LICENSE+SLOT+IUSE logic <<----
+
+if tc_version_is_at_least 4.6 ; then
+ LICENSE="GPL-3+ LGPL-3+ || ( GPL-3+ libgcc libstdc++ gcc-runtime-library-exception-3.1 ) FDL-1.3+"
+elif tc_version_is_at_least 4.4 ; then
+ LICENSE="GPL-3+ LGPL-3+ || ( GPL-3+ libgcc libstdc++ gcc-runtime-library-exception-3.1 ) FDL-1.2+"
+elif tc_version_is_at_least 4.3 ; then
+ LICENSE="GPL-3+ LGPL-3+ || ( GPL-3+ libgcc libstdc++ ) FDL-1.2+"
+elif tc_version_is_at_least 4.2 ; then
+ LICENSE="GPL-3+ LGPL-2.1+ || ( GPL-3+ libgcc libstdc++ ) FDL-1.2+"
+elif tc_version_is_at_least 3.3 ; then
+ LICENSE="GPL-2+ LGPL-2.1+ FDL-1.2+"
+else
+ LICENSE="GPL-2+ LGPL-2.1+ FDL-1.1+"
+fi
+
+IUSE="multislot regression-test vanilla"
+IUSE_DEF=( nls nptl )
+
+if [[ ${PN} != "kgcc64" && ${PN} != gcc-* ]] ; then
+ IUSE+=" altivec debug"
+ IUSE_DEF+=( cxx fortran )
+ [[ -n ${PIE_VER} ]] && IUSE+=" nopie"
+ [[ -n ${HTB_VER} ]] && IUSE+=" boundschecking"
+ [[ -n ${D_VER} ]] && IUSE+=" d"
+ [[ -n ${SPECS_VER} ]] && IUSE+=" nossp"
+ tc_version_is_at_least 3 && IUSE+=" doc gcj awt hardened multilib objc"
+ tc_version_is_at_least 4.0 && IUSE+=" objc-gc"
+ tc_version_is_between 4.0 4.9 && IUSE+=" mudflap"
+ tc_version_is_at_least 4.1 && IUSE+=" libssp objc++"
+ tc_version_is_at_least 4.2 && IUSE_DEF+=( openmp )
+ tc_version_is_at_least 4.3 && IUSE+=" fixed-point"
+ tc_version_is_at_least 4.7 && IUSE+=" go"
+ # Note: while <=gcc-4.7 also supported graphite, it required forked ppl
+ # versions which we dropped. Since graphite was also experimental in
+ # the older versions, we don't want to bother supporting it. #448024
+ tc_version_is_at_least 4.8 && IUSE+=" graphite" IUSE_DEF+=( sanitize )
+ tc_version_is_at_least 4.9 && IUSE+=" cilk"
+ tc_version_is_at_least 6.0 && IUSE+=" pie"
+fi
+
+IUSE+=" ${IUSE_DEF[*]/#/+}"
+
+# Support upgrade paths here or people get pissed
+if ! tc_version_is_at_least 4.7 || is_crosscompile || use multislot || [[ ${GCC_PV} == *_alpha* ]] ; then
+ SLOT="${GCC_CONFIG_VER}"
+else
+ SLOT="${GCC_BRANCH_VER}"
+fi
+
+#---->> DEPEND <<----
+
+RDEPEND="sys-libs/zlib
+ nls? ( virtual/libintl )"
+
+tc_version_is_at_least 3 && RDEPEND+=" virtual/libiconv"
+
+if tc_version_is_at_least 4 ; then
+ GMP_MPFR_DEPS=">=dev-libs/gmp-4.3.2 >=dev-libs/mpfr-2.4.2"
+ if tc_version_is_at_least 4.3 ; then
+ RDEPEND+=" ${GMP_MPFR_DEPS}"
+ elif in_iuse fortran ; then
+ RDEPEND+=" fortran? ( ${GMP_MPFR_DEPS} )"
+ fi
+fi
+
+tc_version_is_at_least 4.5 && RDEPEND+=" >=dev-libs/mpc-0.8.1"
+
+if in_iuse graphite ; then
+ if tc_version_is_at_least 5.0 ; then
+ RDEPEND+=" graphite? ( >=dev-libs/isl-0.14 )"
+ elif tc_version_is_at_least 4.8 ; then
+ RDEPEND+="
+ graphite? (
+ >=dev-libs/cloog-0.18.0
+ >=dev-libs/isl-0.11.1
+ )"
+ fi
+fi
+
+DEPEND="${RDEPEND}
+ >=sys-devel/bison-1.875
+ >=sys-devel/flex-2.5.4
+ nls? ( sys-devel/gettext )
+ regression-test? (
+ >=dev-util/dejagnu-1.4.4
+ >=sys-devel/autogen-5.5.4
+ )"
+
+if in_iuse gcj ; then
+ GCJ_DEPS=">=media-libs/libart_lgpl-2.1"
+ GCJ_GTK_DEPS="
+ x11-libs/libXt
+ x11-libs/libX11
+ x11-libs/libXtst
+ x11-proto/xproto
+ x11-proto/xextproto
+ =x11-libs/gtk+-2*
+ virtual/pkgconfig
+ amd64? ( multilib? (
+ app-emulation/emul-linux-x86-gtklibs
+ app-emulation/emul-linux-x86-xlibs
+ ) )
+ "
+ tc_version_is_at_least 3.4 && GCJ_GTK_DEPS+=" x11-libs/pango"
+ tc_version_is_at_least 4.2 && GCJ_DEPS+=" app-arch/zip app-arch/unzip"
+ DEPEND+=" gcj? ( awt? ( ${GCJ_GTK_DEPS} ) ${GCJ_DEPS} )"
+fi
+
+PDEPEND=">=sys-devel/gcc-config-1.7"
+
+#---->> S + SRC_URI essentials <<----
+
+# Set the source directory depending on whether we're using
+# a prerelease, snapshot, or release tarball.
+S=$(
+ if [[ -n ${PRERELEASE} ]] ; then
+ echo ${WORKDIR}/gcc-${PRERELEASE}
+ elif [[ -n ${SNAPSHOT} ]] ; then
+ echo ${WORKDIR}/gcc-${SNAPSHOT}
+ else
+ echo ${WORKDIR}/gcc-${GCC_RELEASE_VER}
+ fi
+)
+
+gentoo_urls() {
+ local devspace="HTTP~vapier/dist/URI HTTP~rhill/dist/URI
+ HTTP~zorry/patches/gcc/URI HTTP~blueness/dist/URI"
+ devspace=${devspace//HTTP/http:\/\/dev.gentoo.org\/}
+ echo mirror://gentoo/$1 ${devspace//URI/$1}
+}
+
+# This function handles the basics of setting the SRC_URI for a gcc ebuild.
+# To use, set SRC_URI with:
+#
+# SRC_URI="$(get_gcc_src_uri)"
+#
+# Other than the variables normally set by portage, this function's behavior
+# can be altered by setting the following:
+#
+# SNAPSHOT
+# If set, this variable signals that we should be using a snapshot of
+# gcc. It is expected to be in the format "YYYY-MM-DD". Note that if
+# the ebuild has a _pre suffix, this variable is ignored and the
+# prerelease tarball is used instead.
+#
+# BRANCH_UPDATE
+# If set, this variable signals that we should be using the main
+# release tarball (determined by ebuild version) and applying a
+# CVS branch update patch against it. The location of this branch
+# update patch is assumed to be in ${GENTOO_TOOLCHAIN_BASE_URI}.
+# Just like with SNAPSHOT, this variable is ignored if the ebuild
+# has a _pre suffix.
+#
+# PATCH_VER
+# PATCH_GCC_VER
+# This should be set to the version of the gentoo patch tarball.
+# The resulting filename of this tarball will be:
+# gcc-${PATCH_GCC_VER:-${GCC_RELEASE_VER}}-patches-${PATCH_VER}.tar.bz2
+#
+# PIE_VER
+# PIE_GCC_VER
+# These variables control patching in various updates for the logic
+# controlling Position Independant Executables. PIE_VER is expected
+# to be the version of this patch, and PIE_GCC_VER the gcc version of
+# the patch:
+# An example:
+# PIE_VER="8.7.6.5"
+# PIE_GCC_VER="3.4.0"
+# The resulting filename of this tarball will be:
+# gcc-${PIE_GCC_VER:-${GCC_RELEASE_VER}}-piepatches-v${PIE_VER}.tar.bz2
+#
+# SPECS_VER
+# SPECS_GCC_VER
+# This is for the minispecs files included in the hardened gcc-4.x
+# The specs files for hardenedno*, vanilla and for building the "specs" file.
+# SPECS_VER is expected to be the version of this patch, SPECS_GCC_VER
+# the gcc version of the patch.
+# An example:
+# SPECS_VER="8.7.6.5"
+# SPECS_GCC_VER="3.4.0"
+# The resulting filename of this tarball will be:
+# gcc-${SPECS_GCC_VER:-${GCC_RELEASE_VER}}-specs-${SPECS_VER}.tar.bz2
+#
+# HTB_VER
+# HTB_GCC_VER
+# These variables control whether or not an ebuild supports Herman
+# ten Brugge's bounds-checking patches. If you want to use a patch
+# for an older gcc version with a new gcc, make sure you set
+# HTB_GCC_VER to that version of gcc.
+get_gcc_src_uri() {
+ export PATCH_GCC_VER=${PATCH_GCC_VER:-${GCC_RELEASE_VER}}
+ export UCLIBC_GCC_VER=${UCLIBC_GCC_VER:-${PATCH_GCC_VER}}
+ export PIE_GCC_VER=${PIE_GCC_VER:-${GCC_RELEASE_VER}}
+ export HTB_GCC_VER=${HTB_GCC_VER:-${GCC_RELEASE_VER}}
+ export SPECS_GCC_VER=${SPECS_GCC_VER:-${GCC_RELEASE_VER}}
+
+ # Set where to download gcc itself depending on whether we're using a
+ # prerelease, snapshot, or release tarball.
+ if [[ ${PV} == *9999* ]] ; then
+ # Nothing to do w/git snapshots.
+ :
+ elif [[ -n ${PRERELEASE} ]] ; then
+ GCC_SRC_URI="ftp://gcc.gnu.org/pub/gcc/prerelease-${PRERELEASE}/gcc-${PRERELEASE}.tar.bz2"
+ elif [[ -n ${SNAPSHOT} ]] ; then
+ GCC_SRC_URI="ftp://gcc.gnu.org/pub/gcc/snapshots/${SNAPSHOT}/gcc-${SNAPSHOT}.tar.bz2"
+ else
+ GCC_SRC_URI="mirror://gnu/gcc/gcc-${GCC_PV}/gcc-${GCC_RELEASE_VER}.tar.bz2"
+ # we want all branch updates to be against the main release
+ [[ -n ${BRANCH_UPDATE} ]] && \
+ GCC_SRC_URI+=" $(gentoo_urls gcc-${GCC_RELEASE_VER}-branch-update-${BRANCH_UPDATE}.patch.bz2)"
+ fi
+
+ [[ -n ${UCLIBC_VER} ]] && \
+ GCC_SRC_URI+=" $(gentoo_urls gcc-${UCLIBC_GCC_VER}-uclibc-patches-${UCLIBC_VER}.tar.bz2)"
+ [[ -n ${PATCH_VER} ]] && \
+ GCC_SRC_URI+=" $(gentoo_urls gcc-${PATCH_GCC_VER}-patches-${PATCH_VER}.tar.bz2)"
+
+ # strawberry pie, Cappuccino and a Gauloises (it's a good thing)
+ [[ -n ${PIE_VER} ]] && \
+ PIE_CORE=${PIE_CORE:-gcc-${PIE_GCC_VER}-piepatches-v${PIE_VER}.tar.bz2} && \
+ GCC_SRC_URI+=" $(gentoo_urls ${PIE_CORE})"
+
+ # gcc minispec for the hardened gcc 4 compiler
+ [[ -n ${SPECS_VER} ]] && \
+ GCC_SRC_URI+=" $(gentoo_urls gcc-${SPECS_GCC_VER}-specs-${SPECS_VER}.tar.bz2)"
+
+ # gcc bounds checking patch
+ if [[ -n ${HTB_VER} ]] ; then
+ local HTBFILE="bounds-checking-gcc-${HTB_GCC_VER}-${HTB_VER}.patch.bz2"
+ GCC_SRC_URI+="
+ boundschecking? (
+ mirror://sourceforge/boundschecking/${HTBFILE}
+ $(gentoo_urls ${HTBFILE})
+ )"
+ fi
+
+ [[ -n ${D_VER} ]] && \
+ GCC_SRC_URI+=" d? ( mirror://sourceforge/dgcc/gdc-${D_VER}-src.tar.bz2 )"
+
+ if in_iuse gcj ; then
+ if tc_version_is_at_least 4.5 ; then
+ GCC_SRC_URI+=" gcj? ( ftp://sourceware.org/pub/java/ecj-4.5.jar )"
+ elif tc_version_is_at_least 4.3 ; then
+ GCC_SRC_URI+=" gcj? ( ftp://sourceware.org/pub/java/ecj-4.3.jar )"
+ fi
+ fi
+
+ echo "${GCC_SRC_URI}"
+}
+
+SRC_URI=$(get_gcc_src_uri)
+
+#---->> pkg_pretend <<----
+
+toolchain_pkg_pretend() {
+ if [[ -n ${PRERELEASE}${SNAPSHOT} || ${PV} == *9999* ]] &&
+ [[ -z ${I_PROMISE_TO_SUPPLY_PATCHES_WITH_BUGS} ]] ; then
+ die "Please \`export I_PROMISE_TO_SUPPLY_PATCHES_WITH_BUGS=1\` or define it" \
+ "in your make.conf if you want to use this version."
+ fi
+
+ [[ -z ${UCLIBC_VER} ]] && [[ ${CTARGET} == *-uclibc* ]] && \
+ die "Sorry, this version does not support uClibc"
+
+ if ! use_if_iuse cxx ; then
+ use_if_iuse go && ewarn 'Go requires a C++ compiler, disabled due to USE="-cxx"'
+ use_if_iuse objc++ && ewarn 'Obj-C++ requires a C++ compiler, disabled due to USE="-cxx"'
+ use_if_iuse gcj && ewarn 'GCJ requires a C++ compiler, disabled due to USE="-cxx"'
+ fi
+
+ want_minispecs
+}
+
+#---->> pkg_setup <<----
+
+toolchain_pkg_setup() {
+ case ${EAPI} in
+ 2|3) toolchain_pkg_pretend ;;
+ esac
+
+ # we dont want to use the installed compiler's specs to build gcc
+ unset GCC_SPECS
+ unset LANGUAGES #265283
+}
+
+#---->> src_unpack <<----
+
+toolchain_src_unpack() {
+ if [[ ${PV} == *9999* ]]; then
+ git-2_src_unpack
+ else
+ gcc_quick_unpack
+ fi
+}
+
+gcc_quick_unpack() {
+ pushd "${WORKDIR}" > /dev/null
+ export PATCH_GCC_VER=${PATCH_GCC_VER:-${GCC_RELEASE_VER}}
+ export UCLIBC_GCC_VER=${UCLIBC_GCC_VER:-${PATCH_GCC_VER}}
+ export PIE_GCC_VER=${PIE_GCC_VER:-${GCC_RELEASE_VER}}
+ export HTB_GCC_VER=${HTB_GCC_VER:-${GCC_RELEASE_VER}}
+ export SPECS_GCC_VER=${SPECS_GCC_VER:-${GCC_RELEASE_VER}}
+
+ if [[ -n ${GCC_A_FAKEIT} ]] ; then
+ unpack ${GCC_A_FAKEIT}
+ elif [[ -n ${PRERELEASE} ]] ; then
+ unpack gcc-${PRERELEASE}.tar.bz2
+ elif [[ -n ${SNAPSHOT} ]] ; then
+ unpack gcc-${SNAPSHOT}.tar.bz2
+ elif [[ ${PV} != *9999* ]] ; then
+ unpack gcc-${GCC_RELEASE_VER}.tar.bz2
+ # We want branch updates to be against a release tarball
+ if [[ -n ${BRANCH_UPDATE} ]] ; then
+ pushd "${S}" > /dev/null
+ epatch "${DISTDIR}"/gcc-${GCC_RELEASE_VER}-branch-update-${BRANCH_UPDATE}.patch.bz2
+ popd > /dev/null
+ fi
+ fi
+
+ if [[ -n ${D_VER} ]] && use d ; then
+ pushd "${S}"/gcc > /dev/null
+ unpack gdc-${D_VER}-src.tar.bz2
+ cd ..
+ ebegin "Adding support for the D language"
+ ./gcc/d/setup-gcc.sh >& "${T}"/dgcc.log
+ if ! eend $? ; then
+ eerror "The D GCC package failed to apply"
+ eerror "Please include this log file when posting a bug report:"
+ eerror " ${T}/dgcc.log"
+ die "failed to include the D language"
+ fi
+ popd > /dev/null
+ fi
+
+ [[ -n ${PATCH_VER} ]] && \
+ unpack gcc-${PATCH_GCC_VER}-patches-${PATCH_VER}.tar.bz2
+
+ [[ -n ${UCLIBC_VER} ]] && \
+ unpack gcc-${UCLIBC_GCC_VER}-uclibc-patches-${UCLIBC_VER}.tar.bz2
+
+ if want_pie ; then
+ if [[ -n ${PIE_CORE} ]] ; then
+ unpack ${PIE_CORE}
+ else
+ unpack gcc-${PIE_GCC_VER}-piepatches-v${PIE_VER}.tar.bz2
+ fi
+ [[ -n ${SPECS_VER} ]] && \
+ unpack gcc-${SPECS_GCC_VER}-specs-${SPECS_VER}.tar.bz2
+ fi
+
+ use_if_iuse boundschecking && unpack "bounds-checking-gcc-${HTB_GCC_VER}-${HTB_VER}.patch.bz2"
+
+ popd > /dev/null
+}
+
+#---->> src_prepare <<----
+
+toolchain_src_prepare() {
+ export BRANDING_GCC_PKGVERSION="Gentoo ${GCC_PVR}"
+ cd "${S}"
+
+ if ! use vanilla ; then
+ if [[ -n ${PATCH_VER} ]] ; then
+ guess_patch_type_in_dir "${WORKDIR}"/patch
+ EPATCH_MULTI_MSG="Applying Gentoo patches ..." \
+ epatch "${WORKDIR}"/patch
+ BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION} p${PATCH_VER}"
+ fi
+ if [[ -n ${UCLIBC_VER} ]] ; then
+ guess_patch_type_in_dir "${WORKDIR}"/uclibc
+ EPATCH_MULTI_MSG="Applying uClibc patches ..." \
+ epatch "${WORKDIR}"/uclibc
+ fi
+ fi
+ do_gcc_HTB_patches
+ do_gcc_PIE_patches
+ epatch_user
+
+ if ( tc_version_is_at_least 4.8.2 || use hardened ) && ! use vanilla ; then
+ make_gcc_hard
+ fi
+
+ # install the libstdc++ python into the right location
+ # http://gcc.gnu.org/PR51368
+ if tc_version_is_between 4.5 4.7 ; then
+ sed -i \
+ '/^pythondir =/s:=.*:= $(datadir)/python:' \
+ "${S}"/libstdc++-v3/python/Makefile.in || die
+ fi
+
+ # make sure the pkg config files install into multilib dirs.
+ # since we configure with just one --libdir, we can't use that
+ # (as gcc itself takes care of building multilibs). #435728
+ find "${S}" -name Makefile.in \
+ -exec sed -i '/^pkgconfigdir/s:=.*:=$(toolexeclibdir)/pkgconfig:' {} +
+
+ # No idea when this first started being fixed, but let's go with 4.3.x for now
+ if ! tc_version_is_at_least 4.3 ; then
+ fix_files=""
+ for x in contrib/test_summary libstdc++-v3/scripts/check_survey.in ; do
+ [[ -e ${x} ]] && fix_files="${fix_files} ${x}"
+ done
+ ht_fix_file ${fix_files} */configure *.sh */Makefile.in
+ fi
+
+ setup_multilib_osdirnames
+ gcc_version_patch
+
+ if tc_version_is_at_least 4.1 ; then
+ if [[ -n ${SNAPSHOT} || -n ${PRERELEASE} ]] ; then
+ # BASE-VER must be a three-digit version number
+ # followed by an optional -pre string
+ # eg. 4.5.1, 4.6.2-pre20120213, 4.7.0-pre9999
+ # If BASE-VER differs from ${PV/_/-} then libraries get installed in
+ # the wrong directory.
+ echo ${PV/_/-} > "${S}"/gcc/BASE-VER
+ fi
+ fi
+
+ # >= gcc-4.3 doesn't bundle ecj.jar, so copy it
+ if tc_version_is_at_least 4.3 && use gcj ; then
+ if tc_version_is_at_least 4.5 ; then
+ einfo "Copying ecj-4.5.jar"
+ cp -pPR "${DISTDIR}/ecj-4.5.jar" "${S}/ecj.jar" || die
+ else
+ einfo "Copying ecj-4.3.jar"
+ cp -pPR "${DISTDIR}/ecj-4.3.jar" "${S}/ecj.jar" || die
+ fi
+ fi
+
+ # disable --as-needed from being compiled into gcc specs
+ # natively when using a gcc version < 3.4.4
+ # http://gcc.gnu.org/PR14992
+ if ! tc_version_is_at_least 3.4.4 ; then
+ sed -i -e s/HAVE_LD_AS_NEEDED/USE_LD_AS_NEEDED/g "${S}"/gcc/config.in
+ fi
+
+ # In gcc 3.3.x and 3.4.x, rename the java bins to gcc-specific names
+ # in line with gcc-4.
+ if tc_version_is_between 3.3 4.0 ; then
+ do_gcc_rename_java_bins
+ fi
+
+ # Prevent libffi from being installed
+ if tc_version_is_between 3.0 4.8 ; then
+ sed -i -e 's/\(install.*:\) install-.*recursive/\1/' "${S}"/libffi/Makefile.in || die
+ sed -i -e 's/\(install-data-am:\).*/\1/' "${S}"/libffi/include/Makefile.in || die
+ fi
+
+ # Fixup libtool to correctly generate .la files with portage
+ elibtoolize --portage --shallow --no-uclibc
+
+ gnuconfig_update
+
+ # update configure files
+ local f
+ einfo "Fixing misc issues in configure files"
+ for f in $(grep -l 'autoconf version 2.13' $(find "${S}" -name configure)) ; do
+ ebegin " Updating ${f/${S}\/} [LANG]"
+ patch "${f}" "${GCC_FILESDIR}"/gcc-configure-LANG.patch >& "${T}"/configure-patch.log \
+ || eerror "Please file a bug about this"
+ eend $?
+ done
+ sed -i 's|A-Za-z0-9|[:alnum:]|g' "${S}"/gcc/*.awk #215828
+
+ # Prevent new texinfo from breaking old versions (see #198182, #464008)
+ tc_version_is_at_least 4.1 && epatch "${GCC_FILESDIR}"/gcc-configure-texinfo.patch
+
+ if [[ -x contrib/gcc_update ]] ; then
+ einfo "Touching generated files"
+ ./contrib/gcc_update --touch | \
+ while read f ; do
+ einfo " ${f%%...}"
+ done
+ fi
+}
+
+guess_patch_type_in_dir() {
+ [[ -n $(ls "$1"/*.bz2 2>/dev/null) ]] \
+ && EPATCH_SUFFIX="patch.bz2" \
+ || EPATCH_SUFFIX="patch"
+}
+
+do_gcc_HTB_patches() {
+ use_if_iuse boundschecking || return 0
+
+ # modify the bounds checking patch with a regression patch
+ epatch "${WORKDIR}/bounds-checking-gcc-${HTB_GCC_VER}-${HTB_VER}.patch"
+ BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, HTB-${HTB_GCC_VER}-${HTB_VER}"
+}
+
+do_gcc_PIE_patches() {
+ want_pie || return 0
+ use vanilla && return 0
+
+ if tc_version_is_at_least 4.3.2 ; then
+ guess_patch_type_in_dir "${WORKDIR}"/piepatch/
+ EPATCH_MULTI_MSG="Applying pie patches ..." \
+ epatch "${WORKDIR}"/piepatch/
+ else
+ guess_patch_type_in_dir "${WORKDIR}"/piepatch/upstream
+
+ # corrects startfile/endfile selection and shared/static/pie flag usage
+ EPATCH_MULTI_MSG="Applying upstream pie patches ..." \
+ epatch "${WORKDIR}"/piepatch/upstream
+ # adds non-default pie support (rs6000)
+ EPATCH_MULTI_MSG="Applying non-default pie patches ..." \
+ epatch "${WORKDIR}"/piepatch/nondef
+ # adds default pie support (rs6000 too) if DEFAULT_PIE[_SSP] is defined
+ EPATCH_MULTI_MSG="Applying default pie patches ..." \
+ epatch "${WORKDIR}"/piepatch/def
+ fi
+
+ BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, pie-${PIE_VER}"
+}
+
+# configure to build with the hardened GCC specs as the default
+make_gcc_hard() {
+ # we want to be able to control the pie patch logic via something other
+ # than ALL_CFLAGS...
+ sed -e '/^ALL_CFLAGS/iHARD_CFLAGS = ' \
+ -e 's|^ALL_CFLAGS = |ALL_CFLAGS = $(HARD_CFLAGS) |' \
+ -i "${S}"/gcc/Makefile.in
+ # Need to add HARD_CFLAGS to ALL_CXXFLAGS on >= 4.7
+ if tc_version_is_at_least 4.7 ; then
+ sed -e '/^ALL_CXXFLAGS/iHARD_CFLAGS = ' \
+ -e 's|^ALL_CXXFLAGS = |ALL_CXXFLAGS = $(HARD_CFLAGS) |' \
+ -i "${S}"/gcc/Makefile.in
+ fi
+
+ # defaults to enable for all toolchains
+ local gcc_hard_flags=""
+ if use hardened ; then
+ if hardened_gcc_works ; then
+ einfo "Updating gcc to use automatic PIE + SSP building ..."
+ gcc_hard_flags+=" -DEFAULT_PIE_SSP"
+ elif hardened_gcc_works pie ; then
+ einfo "Updating gcc to use automatic PIE building ..."
+ ewarn "SSP has not been enabled by default"
+ gcc_hard_flags+=" -DEFAULT_PIE"
+ elif hardened_gcc_works ssp ; then
+ einfo "Updating gcc to use automatic SSP building ..."
+ ewarn "PIE has not been enabled by default"
+ gcc_hard_flags+=" -DEFAULT_SSP"
+ else
+ # do nothing if hardened is't supported, but don't die either
+ ewarn "hardened is not supported for this arch in this gcc version"
+ return 0
+ fi
+ # rebrand to make bug reports easier
+ BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened}
+ else
+ if hardened_gcc_works ssp ; then
+ einfo "Updating gcc to use automatic SSP building ..."
+ gcc_hard_flags+=" -DEFAULT_SSP"
+ fi
+ fi
+
+ sed -i \
+ -e "/^HARD_CFLAGS = /s|=|= ${gcc_hard_flags} |" \
+ "${S}"/gcc/Makefile.in || die
+
+}
+
+# This is a historical wart. The original Gentoo/amd64 port used:
+# lib32 - 32bit binaries (x86)
+# lib64 - 64bit binaries (x86_64)
+# lib - "native" binaries (a symlink to lib64)
+# Most other distros use the logic (including mainline gcc):
+# lib - 32bit binaries (x86)
+# lib64 - 64bit binaries (x86_64)
+# Over time, Gentoo is migrating to the latter form.
+#
+# Unfortunately, due to distros picking the lib32 behavior, newer gcc
+# versions will dynamically detect whether to use lib or lib32 for its
+# 32bit multilib. So, to keep the automagic from getting things wrong
+# while people are transitioning from the old style to the new style,
+# we always set the MULTILIB_OSDIRNAMES var for relevant targets.
+setup_multilib_osdirnames() {
+ is_multilib || return 0
+
+ local config
+ local libdirs="../lib64 ../lib32"
+
+ # this only makes sense for some Linux targets
+ case ${CTARGET} in
+ x86_64*-linux*) config="i386" ;;
+ powerpc64*-linux*) config="rs6000" ;;
+ sparc64*-linux*) config="sparc" ;;
+ s390x*-linux*) config="s390" ;;
+ *) return 0 ;;
+ esac
+ config+="/t-linux64"
+
+ local sed_args=()
+ if tc_version_is_at_least 4.6 ; then
+ sed_args+=( -e 's:$[(]call if_multiarch[^)]*[)]::g' )
+ fi
+ if [[ ${SYMLINK_LIB} == "yes" ]] ; then
+ einfo "updating multilib directories to be: ${libdirs}"
+ if tc_version_is_at_least 4.6.4 || tc_version_is_at_least 4.7 ; then
+ sed_args+=( -e '/^MULTILIB_OSDIRNAMES.*lib32/s:[$][(]if.*):../lib32:' )
+ else
+ sed_args+=( -e "/^MULTILIB_OSDIRNAMES/s:=.*:= ${libdirs}:" )
+ fi
+ else
+ einfo "using upstream multilib; disabling lib32 autodetection"
+ sed_args+=( -r -e 's:[$][(]if.*,(.*)[)]:\1:' )
+ fi
+ sed -i "${sed_args[@]}" "${S}"/gcc/config/${config} || die
+}
+
+gcc_version_patch() {
+ # gcc-4.3+ has configure flags (whoo!)
+ tc_version_is_at_least 4.3 && return 0
+
+ local version_string=${GCC_CONFIG_VER}
+ [[ -n ${BRANCH_UPDATE} ]] && version_string+=" ${BRANCH_UPDATE}"
+
+ einfo "patching gcc version: ${version_string} (${BRANDING_GCC_PKGVERSION})"
+
+ local gcc_sed=( -e 's:gcc\.gnu\.org/bugs\.html:bugs\.gentoo\.org/:' )
+ if grep -qs VERSUFFIX "${S}"/gcc/version.c ; then
+ gcc_sed+=( -e "/VERSUFFIX \"\"/s:\"\":\" (${BRANDING_GCC_PKGVERSION})\":" )
+ else
+ version_string="${version_string} (${BRANDING_GCC_PKGVERSION})"
+ gcc_sed+=( -e "/const char version_string\[\] = /s:= \".*\":= \"${version_string}\":" )
+ fi
+ sed -i "${gcc_sed[@]}" "${S}"/gcc/version.c || die
+}
+
+do_gcc_rename_java_bins() {
+ # bug #139918 - conflict between gcc and java-config-2 for ownership of
+ # /usr/bin/rmi{c,registry}. Done with mv & sed rather than a patch
+ # because patches would be large (thanks to the rename of man files),
+ # and it's clear from the sed invocations that all that changes is the
+ # rmi{c,registry} names to grmi{c,registry} names.
+ # Kevin F. Quinn 2006-07-12
+ einfo "Renaming jdk executables rmic and rmiregistry to grmic and grmiregistry."
+ # 1) Move the man files if present (missing prior to gcc-3.4)
+ for manfile in rmic rmiregistry ; do
+ [[ -f ${S}/gcc/doc/${manfile}.1 ]] || continue
+ mv "${S}"/gcc/doc/${manfile}.1 "${S}"/gcc/doc/g${manfile}.1
+ done
+ # 2) Fixup references in the docs if present (mission prior to gcc-3.4)
+ for jfile in gcc/doc/gcj.info gcc/doc/grmic.1 gcc/doc/grmiregistry.1 gcc/java/gcj.texi ; do
+ [[ -f ${S}/${jfile} ]] || continue
+ sed -i -e 's:rmiregistry:grmiregistry:g' "${S}"/${jfile} ||
+ die "Failed to fixup file ${jfile} for rename to grmiregistry"
+ sed -i -e 's:rmic:grmic:g' "${S}"/${jfile} ||
+ die "Failed to fixup file ${jfile} for rename to grmic"
+ done
+ # 3) Fixup Makefiles to build the changed executable names
+ # These are present in all 3.x versions, and are the important bit
+ # to get gcc to build with the new names.
+ for jfile in libjava/Makefile.am libjava/Makefile.in gcc/java/Make-lang.in ; do
+ sed -i -e 's:rmiregistry:grmiregistry:g' "${S}"/${jfile} ||
+ die "Failed to fixup file ${jfile} for rename to grmiregistry"
+ # Careful with rmic on these files; it's also the name of a directory
+ # which should be left unchanged. Replace occurrences of 'rmic$',
+ # 'rmic_' and 'rmic '.
+ sed -i -e 's:rmic\([$_ ]\):grmic\1:g' "${S}"/${jfile} ||
+ die "Failed to fixup file ${jfile} for rename to grmic"
+ done
+}
+
+#---->> src_configure <<----
+
+toolchain_src_configure() {
+ downgrade_arch_flags
+ gcc_do_filter_flags
+
+ einfo "CFLAGS=\"${CFLAGS}\""
+ einfo "CXXFLAGS=\"${CXXFLAGS}\""
+ einfo "LDFLAGS=\"${LDFLAGS}\""
+
+ # Force internal zip based jar script to avoid random
+ # issues with 3rd party jar implementations. #384291
+ export JAR=no
+
+ # For hardened gcc 4.3 piepatchset to build the hardened specs
+ # file (build.specs) to use when building gcc.
+ if ! tc_version_is_at_least 4.4 && want_minispecs ; then
+ setup_minispecs_gcc_build_specs
+ fi
+
+ local confgcc=( --host=${CHOST} )
+
+ if is_crosscompile || tc-is-cross-compiler ; then
+ # Straight from the GCC install doc:
+ # "GCC has code to correctly determine the correct value for target
+ # for nearly all native systems. Therefore, we highly recommend you
+ # not provide a configure target when configuring a native compiler."
+ confgcc+=( --target=${CTARGET} )
+ fi
+ [[ -n ${CBUILD} ]] && confgcc+=( --build=${CBUILD} )
+
+ confgcc+=(
+ --prefix="${PREFIX}"
+ --bindir="${BINPATH}"
+ --includedir="${INCLUDEPATH}"
+ --datadir="${DATAPATH}"
+ --mandir="${DATAPATH}/man"
+ --infodir="${DATAPATH}/info"
+ --with-gxx-include-dir="${STDCXX_INCDIR}"
+ )
+
+ # Stick the python scripts in their own slotted directory (bug #279252)
+ #
+ # --with-python-dir=DIR
+ # Specifies where to install the Python modules used for aot-compile. DIR
+ # should not include the prefix used in installation. For example, if the
+ # Python modules are to be installed in /usr/lib/python2.5/site-packages,
+ # then --with-python-dir=/lib/python2.5/site-packages should be passed.
+ #
+ # This should translate into "/share/gcc-data/${CTARGET}/${GCC_CONFIG_VER}/python"
+ if tc_version_is_at_least 4.4 ; then
+ confgcc+=( --with-python-dir=${DATAPATH/$PREFIX/}/python )
+ fi
+
+ ### language options
+
+ local GCC_LANG="c"
+ is_cxx && GCC_LANG+=",c++"
+ is_d && GCC_LANG+=",d"
+ is_gcj && GCC_LANG+=",java"
+ is_go && GCC_LANG+=",go"
+ if is_objc || is_objcxx ; then
+ GCC_LANG+=",objc"
+ if tc_version_is_at_least 4 ; then
+ use objc-gc && confgcc+=( --enable-objc-gc )
+ fi
+ is_objcxx && GCC_LANG+=",obj-c++"
+ fi
+
+ # fortran support just got sillier! the lang value can be f77 for
+ # fortran77, f95 for fortran95, or just plain old fortran for the
+ # currently supported standard depending on gcc version.
+ is_fortran && GCC_LANG+=",fortran"
+ is_f77 && GCC_LANG+=",f77"
+ is_f95 && GCC_LANG+=",f95"
+
+ # We do NOT want 'ADA support' in here!
+ # is_ada && GCC_LANG+=",ada"
+
+ confgcc+=( --enable-languages=${GCC_LANG} )
+
+ ### general options
+
+ confgcc+=(
+ --enable-obsolete
+ --enable-secureplt
+ --disable-werror
+ --with-system-zlib
+ )
+
+ if use nls ; then
+ confgcc+=( --enable-nls --without-included-gettext )
+ else
+ confgcc+=( --disable-nls )
+ fi
+
+ tc_version_is_at_least 3.4 || confgcc+=( --disable-libunwind-exceptions )
+
+ # Use the default ("release") checking because upstream usually neglects
+ # to test "disabled" so it has a history of breaking. #317217
+ if tc_version_is_at_least 3.4 ; then
+ # The "release" keyword is new to 4.0. #551636
+ local off=$(tc_version_is_at_least 4.0 && echo release || echo no)
+ confgcc+=( --enable-checking="${GCC_CHECKS_LIST:-$(usex debug yes ${off})}" )
+ fi
+
+ # Branding
+ tc_version_is_at_least 4.3 && confgcc+=(
+ --with-bugurl=https://bugs.gentoo.org/
+ --with-pkgversion="${BRANDING_GCC_PKGVERSION}"
+ )
+
+ # If we want hardened support with the newer piepatchset for >=gcc 4.4
+ if tc_version_is_at_least 4.4 && want_minispecs ; then
+ confgcc+=( $(use_enable hardened esp) )
+ fi
+
+ # allow gcc to search for clock funcs in the main C lib.
+ # if it can't find them, then tough cookies -- we aren't
+ # going to link in -lrt to all C++ apps. #411681
+ if tc_version_is_at_least 4.4 && is_cxx ; then
+ confgcc+=( --enable-libstdcxx-time )
+ fi
+
+ # # Turn on the -Wl,--build-id flag by default for ELF targets. #525942
+ # # This helps with locating debug files.
+ # case ${CTARGET} in
+ # *-linux-*|*-elf|*-eabi)
+ # tc_version_is_at_least 4.5 && confgcc+=(
+ # --enable-linker-build-id
+ # )
+ # ;;
+ # esac
+
+ # newer gcc versions like to bootstrap themselves with C++,
+ # so we need to manually disable it ourselves
+ if tc_version_is_between 4.7 4.8 && ! is_cxx ; then
+ confgcc+=( --disable-build-with-cxx --disable-build-poststage1-with-cxx )
+ fi
+
+ ### Cross-compiler options
+ if is_crosscompile ; then
+ # Enable build warnings by default with cross-compilers when system
+ # paths are included (e.g. via -I flags).
+ confgcc+=( --enable-poison-system-directories )
+
+ # When building a stage1 cross-compiler (just C compiler), we have to
+ # disable a bunch of features or gcc goes boom
+ local needed_libc=""
+ case ${CTARGET} in
+ *-linux) needed_libc=no-fucking-clue;;
+ *-dietlibc) needed_libc=dietlibc;;
+ *-elf|*-eabi) needed_libc=newlib;;
+ *-freebsd*) needed_libc=freebsd-lib;;
+ *-gnu*) needed_libc=glibc;;
+ *-klibc) needed_libc=klibc;;
+ *-musl*) needed_libc=musl;;
+ *-uclibc*)
+ if ! echo '#include <features.h>' | \
+ $(tc-getCPP ${CTARGET}) -E -dD - 2>/dev/null | \
+ grep -q __HAVE_SHARED__
+ then #291870
+ confgcc+=( --disable-shared )
+ fi
+ needed_libc=uclibc
+ ;;
+ *-cygwin) needed_libc=cygwin;;
+ x86_64-*-mingw*|\
+ *-w64-mingw*) needed_libc=mingw64-runtime;;
+ mingw*|*-mingw*) needed_libc=mingw-runtime;;
+ avr) confgcc+=( --enable-shared --disable-threads );;
+ esac
+ if [[ -n ${needed_libc} ]] ; then
+ local confgcc_no_libc=( --disable-shared )
+ tc_version_is_at_least 4.8 && confgcc_no_libc+=( --disable-libatomic )
+ if ! has_version ${CATEGORY}/${needed_libc} ; then
+ confgcc+=(
+ "${confgcc_no_libc[@]}"
+ --disable-threads
+ --without-headers
+ )
+ elif built_with_use --hidden --missing false ${CATEGORY}/${needed_libc} crosscompile_opts_headers-only ; then
+ confgcc+=(
+ "${confgcc_no_libc[@]}"
+ --with-sysroot=${PREFIX}/${CTARGET}
+ )
+ else
+ confgcc+=( --with-sysroot=${PREFIX}/${CTARGET} )
+ fi
+ fi
+
+ tc_version_is_at_least 4.2 && confgcc+=( --disable-bootstrap )
+ else
+ if tc-is-static-only ; then
+ confgcc+=( --disable-shared )
+ else
+ confgcc+=( --enable-shared )
+ fi
+ case ${CHOST} in
+ mingw*|*-mingw*|*-cygwin)
+ confgcc+=( --enable-threads=win32 ) ;;
+ *)
+ confgcc+=( --enable-threads=posix ) ;;
+ esac
+ fi
+
+ # __cxa_atexit is "essential for fully standards-compliant handling of
+ # destructors", but apparently requires glibc.
+ case ${CTARGET} in
+ *-uclibc*)
+ confgcc+=(
+ --disable-__cxa_atexit
+ $(use_enable nptl tls)
+ )
+ tc_version_is_between 3.3 3.4 && confgcc+=( --enable-sjlj-exceptions )
+ if tc_version_is_between 3.4 4.3 ; then
+ confgcc+=( --enable-clocale=uclibc )
+ fi
+ ;;
+ *-elf|*-eabi)
+ confgcc+=( --with-newlib )
+ ;;
+ *-gnu*)
+ confgcc+=(
+ --enable-__cxa_atexit
+ --enable-clocale=gnu
+ )
+ ;;
+ *-freebsd*)
+ confgcc+=( --enable-__cxa_atexit )
+ ;;
+ *-solaris*)
+ confgcc+=( --enable-__cxa_atexit )
+ ;;
+ esac
+
+ ### arch options
+
+ gcc-multilib-configure
+
+ # ppc altivec support
+ confgcc+=( $(use_enable altivec) )
+
+ # gcc has fixed-point arithmetic support in 4.3 for mips targets that can
+ # significantly increase compile time by several hours. This will allow
+ # users to control this feature in the event they need the support.
+ tc_version_is_at_least 4.3 && confgcc+=( $(use_enable fixed-point) )
+
+ case $(tc-is-softfloat) in
+ yes) confgcc+=( --with-float=soft ) ;;
+ softfp) confgcc+=( --with-float=softfp ) ;;
+ *)
+ # If they've explicitly opt-ed in, do hardfloat,
+ # otherwise let the gcc default kick in.
+ case ${CTARGET//_/-} in
+ *-hardfloat-*|*eabihf) confgcc+=( --with-float=hard ) ;;
+ esac
+ esac
+
+ local with_abi_map=()
+ case $(tc-arch) in
+ arm) #264534 #414395
+ local a arm_arch=${CTARGET%%-*}
+ # Remove trailing endian variations first: eb el be bl b l
+ for a in e{b,l} {b,l}e b l ; do
+ if [[ ${arm_arch} == *${a} ]] ; then
+ arm_arch=${arm_arch%${a}}
+ break
+ fi
+ done
+ # Convert armv7{a,r,m} to armv7-{a,r,m}
+ [[ ${arm_arch} == armv7? ]] && arm_arch=${arm_arch/7/7-}
+ # See if this is a valid --with-arch flag
+ if (srcdir=${S}/gcc target=${CTARGET} with_arch=${arm_arch};
+ . "${srcdir}"/config.gcc) &>/dev/null
+ then
+ confgcc+=( --with-arch=${arm_arch} )
+ fi
+
+ # Make default mode thumb for microcontroller classes #418209
+ [[ ${arm_arch} == *-m ]] && confgcc+=( --with-mode=thumb )
+
+ # Enable hardvfp
+ if [[ $(tc-is-softfloat) == "no" ]] && \
+ [[ ${CTARGET} == armv[67]* ]] && \
+ tc_version_is_at_least 4.5
+ then
+ # Follow the new arm hardfp distro standard by default
+ confgcc+=( --with-float=hard )
+ case ${CTARGET} in
+ armv6*) confgcc+=( --with-fpu=vfp ) ;;
+ armv7*) confgcc+=( --with-fpu=vfpv3-d16 ) ;;
+ esac
+ fi
+ ;;
+ mips)
+ # Add --with-abi flags to set default ABI
+ confgcc+=( --with-abi=$(gcc-abi-map ${TARGET_DEFAULT_ABI}) )
+ ;;
+ amd64)
+ # drop the older/ABI checks once this get's merged into some
+ # version of gcc upstream
+ if tc_version_is_at_least 4.8 && has x32 $(get_all_abis TARGET) ; then
+ confgcc+=( --with-abi=$(gcc-abi-map ${TARGET_DEFAULT_ABI}) )
+ fi
+ ;;
+ x86)
+ # Default arch for x86 is normally i386, lets give it a bump
+ # since glibc will do so based on CTARGET anyways
+ confgcc+=( --with-arch=${CTARGET%%-*} )
+ ;;
+ hppa)
+ # Enable sjlj exceptions for backward compatibility on hppa
+ [[ ${GCCMAJOR} == "3" ]] && confgcc+=( --enable-sjlj-exceptions )
+ ;;
+ ppc)
+ # Set up defaults based on current CFLAGS
+ is-flagq -mfloat-gprs=double && confgcc+=( --enable-e500-double )
+ [[ ${CTARGET//_/-} == *-e500v2-* ]] && confgcc+=( --enable-e500-double )
+ ;;
+ esac
+
+ # if the target can do biarch (-m32/-m64), enable it. overhead should
+ # be small, and should simplify building of 64bit kernels in a 32bit
+ # userland by not needing sys-devel/kgcc64. #349405
+ case $(tc-arch) in
+ ppc|ppc64) tc_version_is_at_least 3.4 && confgcc+=( --enable-targets=all ) ;;
+ sparc) tc_version_is_at_least 4.4 && confgcc+=( --enable-targets=all ) ;;
+ amd64|x86) tc_version_is_at_least 4.3 && confgcc+=( --enable-targets=all ) ;;
+ esac
+
+ # On Darwin we need libdir to be set in order to get correct install names
+ # for things like libobjc-gnu, libgcj and libfortran. If we enable it on
+ # non-Darwin we screw up the behaviour this eclass relies on. We in
+ # particular need this over --libdir for bug #255315.
+ [[ ${CTARGET} == *-darwin* ]] && \
+ confgcc+=( --enable-version-specific-runtime-libs )
+
+ ### library options
+
+ if ! is_gcj ; then
+ confgcc+=( --disable-libgcj )
+ elif use awt ; then
+ confgcc+=( --enable-java-awt=gtk )
+ fi
+
+ if tc_version_is_at_least 4.2 ; then
+ if in_iuse openmp ; then
+ # Make sure target has pthreads support. #326757 #335883
+ # There shouldn't be a chicken & egg problem here as openmp won't
+ # build without a C library, and you can't build that w/out
+ # already having a compiler ...
+ if ! is_crosscompile || \
+ $(tc-getCPP ${CTARGET}) -E - <<<"#include <pthread.h>" >& /dev/null
+ then
+ confgcc+=( $(use_enable openmp libgomp) )
+ else
+ # Force disable as the configure script can be dumb #359855
+ confgcc+=( --disable-libgomp )
+ fi
+ else
+ # For gcc variants where we don't want openmp (e.g. kgcc)
+ confgcc+=( --disable-libgomp )
+ fi
+ fi
+
+ if tc_version_is_at_least 4.0 ; then
+ if in_iuse mudflap ; then
+ confgcc+=( $(use_enable mudflap libmudflap) )
+ else
+ confgcc+=( --disable-libmudflap )
+ fi
+
+ if use_if_iuse libssp ; then
+ confgcc+=( --enable-libssp )
+ else
+ if hardened_gcc_is_stable ssp; then
+ export gcc_cv_libc_provides_ssp=yes
+ fi
+ confgcc+=( --disable-libssp )
+ fi
+ fi
+
+ if in_iuse cilk ; then
+ confgcc+=( $(use_enable cilk libcilkrts) )
+ fi
+
+ # newer gcc's come with libquadmath, but only fortran uses
+ # it, so auto punt it when we don't care
+ if tc_version_is_at_least 4.6 && ! is_fortran ; then
+ confgcc+=( --disable-libquadmath )
+ fi
+
+ if tc_version_is_at_least 4.6 ; then
+ confgcc+=( --enable-lto )
+ elif tc_version_is_at_least 4.5 ; then
+ confgcc+=( --disable-lto )
+ fi
+
+ # graphite was added in 4.4 but we only support it in 4.8+ due to external
+ # library issues. #448024
+ if tc_version_is_at_least 5.0 ; then
+ confgcc+=( $(use_with graphite isl) )
+ use graphite && confgcc+=( --disable-isl-version-check )
+ elif tc_version_is_at_least 4.8 ; then
+ confgcc+=( $(use_with graphite cloog) )
+ use graphite && confgcc+=( --disable-isl-version-check )
+ elif tc_version_is_at_least 4.4 ; then
+ confgcc+=( --without-{cloog,ppl} )
+ fi
+
+ if tc_version_is_at_least 4.8 ; then
+ confgcc+=( $(use_enable sanitize libsanitizer) )
+ fi
+
+ if tc_version_is_at_least 6.0 ; then
+ confgcc+=( $(use_enable pie default-pie) )
+ fi
+
+ # Disable gcc info regeneration -- it ships with generated info pages
+ # already. Our custom version/urls/etc... trigger it. #464008
+ export gcc_cv_prog_makeinfo_modern=no
+
+ # Do not let the X detection get in our way. We know things can be found
+ # via system paths, so no need to hardcode things that'll break multilib.
+ # Older gcc versions will detect ac_x_libraries=/usr/lib64 which ends up
+ # killing the 32bit builds which want /usr/lib.
+ export ac_cv_have_x='have_x=yes ac_x_includes= ac_x_libraries='
+
+ confgcc+=( "$@" ${EXTRA_ECONF} )
+
+ # Nothing wrong with a good dose of verbosity
+ echo
+ einfo "PREFIX: ${PREFIX}"
+ einfo "BINPATH: ${BINPATH}"
+ einfo "LIBPATH: ${LIBPATH}"
+ einfo "DATAPATH: ${DATAPATH}"
+ einfo "STDCXX_INCDIR: ${STDCXX_INCDIR}"
+ echo
+ einfo "Languages: ${GCC_LANG}"
+ echo
+ einfo "Configuring GCC with: ${confgcc[@]//--/\n\t--}"
+ echo
+
+ # Build in a separate build tree
+ mkdir -p "${WORKDIR}"/build
+ pushd "${WORKDIR}"/build > /dev/null
+
+ # and now to do the actual configuration
+ addwrite /dev/zero
+ echo "${S}"/configure "${confgcc[@]}"
+ # Older gcc versions did not detect bash and re-exec itself, so force the
+ # use of bash. Newer ones will auto-detect, but this is not harmeful.
+ CONFIG_SHELL="/bin/bash" \
+ bash "${S}"/configure "${confgcc[@]}" || die "failed to run configure"
+
+ # return to whatever directory we were in before
+ popd > /dev/null
+}
+
+# Replace -m flags unsupported by the version being built with the best
+# available equivalent
+downgrade_arch_flags() {
+ local arch bver i isa myarch mytune rep ver
+
+ bver=${1:-${GCC_BRANCH_VER}}
+ [[ $(gcc-version) < ${bver} ]] && return 0
+ [[ $(tc-arch) != amd64 && $(tc-arch) != x86 ]] && return 0
+
+ myarch=$(get-flag march)
+ mytune=$(get-flag mtune)
+
+ # If -march=native isn't supported we have to tease out the actual arch
+ if [[ ${myarch} == native || ${mytune} == native ]] ; then
+ if [[ ${bver} < 4.2 ]] ; then
+ arch=$($(tc-getCC) -march=native -v -E -P - </dev/null 2>&1 \
+ | sed -rn "/cc1.*-march/s:.*-march=([^ ']*).*:\1:p")
+ replace-cpu-flags native ${arch}
+ fi
+ fi
+
+ # Handle special -mtune flags
+ [[ ${mytune} == intel && ${bver} < 4.9 ]] && replace-cpu-flags intel generic
+ [[ ${mytune} == generic && ${bver} < 4.2 ]] && filter-flags '-mtune=*'
+ [[ ${mytune} == x86-64 ]] && filter-flags '-mtune=*'
+ [[ ${bver} < 3.4 ]] && filter-flags '-mtune=*'
+
+ # "added" "arch" "replacement"
+ local archlist=(
+ 4.9 bdver4 bdver3
+ 4.9 bonnell atom
+ 4.9 broadwell core-avx2
+ 4.9 haswell core-avx2
+ 4.9 ivybridge core-avx-i
+ 4.9 nehalem corei7
+ 4.9 sandybridge corei7-avx
+ 4.9 silvermont corei7
+ 4.9 westmere corei7
+ 4.8 bdver3 bdver2
+ 4.8 btver2 btver1
+ 4.7 bdver2 bdver1
+ 4.7 core-avx2 core-avx-i
+ 4.6 bdver1 amdfam10
+ 4.6 btver1 amdfam10
+ 4.6 core-avx-i core2
+ 4.6 corei7 core2
+ 4.6 corei7-avx core2
+ 4.5 atom core2
+ 4.3 amdfam10 k8
+ 4.3 athlon64-sse3 k8
+ 4.3 barcelona k8
+ 4.3 core2 nocona
+ 4.3 geode k6-2 # gcc.gnu.org/PR41989#c22
+ 4.3 k8-sse3 k8
+ 4.3 opteron-sse3 k8
+ 3.4 athlon-fx x86-64
+ 3.4 athlon64 x86-64
+ 3.4 c3-2 c3
+ 3.4 k8 x86-64
+ 3.4 opteron x86-64
+ 3.4 pentium-m pentium3
+ 3.4 pentium3m pentium3
+ 3.4 pentium4m pentium4
+ )
+
+ for ((i = 0; i < ${#archlist[@]}; i += 3)) ; do
+ myarch=$(get-flag march)
+ mytune=$(get-flag mtune)
+
+ ver=${archlist[i]}
+ arch=${archlist[i + 1]}
+ rep=${archlist[i + 2]}
+
+ [[ ${myarch} != ${arch} && ${mytune} != ${arch} ]] && continue
+
+ if [[ ${ver} > ${bver} ]] ; then
+ einfo "Replacing ${myarch} (added in gcc ${ver}) with ${rep}..."
+ [[ ${myarch} == ${arch} ]] && replace-cpu-flags ${myarch} ${rep}
+ [[ ${mytune} == ${arch} ]] && replace-cpu-flags ${mytune} ${rep}
+ continue
+ else
+ break
+ fi
+ done
+
+ # we only check -mno* here since -m* get removed by strip-flags later on
+ local isalist=(
+ 4.9 -mno-sha
+ 4.9 -mno-avx512pf
+ 4.9 -mno-avx512f
+ 4.9 -mno-avx512er
+ 4.9 -mno-avx512cd
+ 4.8 -mno-xsaveopt
+ 4.8 -mno-xsave
+ 4.8 -mno-rtm
+ 4.8 -mno-fxsr
+ 4.7 -mno-lzcnt
+ 4.7 -mno-bmi2
+ 4.7 -mno-avx2
+ 4.6 -mno-tbm
+ 4.6 -mno-rdrnd
+ 4.6 -mno-fsgsbase
+ 4.6 -mno-f16c
+ 4.6 -mno-bmi
+ 4.5 -mno-xop
+ 4.5 -mno-movbe
+ 4.5 -mno-lwp
+ 4.5 -mno-fma4
+ 4.4 -mno-pclmul
+ 4.4 -mno-fma
+ 4.4 -mno-avx
+ 4.4 -mno-aes
+ 4.3 -mno-ssse3
+ 4.3 -mno-sse4a
+ 4.3 -mno-sse4
+ 4.3 -mno-sse4.2
+ 4.3 -mno-sse4.1
+ 4.3 -mno-popcnt
+ 4.3 -mno-abm
+ )
+
+ for ((i = 0; i < ${#isalist[@]}; i += 2)) ; do
+ ver=${isalist[i]}
+ isa=${isalist[i + 1]}
+ [[ ${ver} > ${bver} ]] && filter-flags ${isa} ${isa/-m/-mno-}
+ done
+}
+
+gcc_do_filter_flags() {
+ strip-flags
+ replace-flags -O? -O2
+
+ # dont want to funk ourselves
+ filter-flags '-mabi*' -m31 -m32 -m64
+
+ filter-flags -frecord-gcc-switches # 490738
+ filter-flags -mno-rtm -mno-htm # 506202
+
+ if tc_version_is_between 3.2 3.4 ; then
+ # XXX: this is so outdated it's barely useful, but it don't hurt...
+ replace-cpu-flags G3 750
+ replace-cpu-flags G4 7400
+ replace-cpu-flags G5 7400
+
+ # XXX: should add a sed or something to query all supported flags
+ # from the gcc source and trim everything else ...
+ filter-flags -f{no-,}unit-at-a-time -f{no-,}web -mno-tls-direct-seg-refs
+ filter-flags -f{no-,}stack-protector{,-all}
+ filter-flags -fvisibility-inlines-hidden -fvisibility=hidden
+ fi
+
+ if tc_version_is_at_least 3.4 ; then
+ case $(tc-arch) in
+ amd64|x86)
+ filter-flags '-mcpu=*'
+
+ tc_version_is_between 4.4 4.5 && append-flags -mno-avx # 357287
+
+ if tc_version_is_between 4.6 4.7 ; then
+ # https://bugs.gentoo.org/411333
+ # https://bugs.gentoo.org/466454
+ replace-cpu-flags c3-2 pentium2 pentium3 pentium3m pentium-m i686
+ fi
+ ;;
+ alpha)
+ # https://bugs.gentoo.org/454426
+ append-ldflags -Wl,--no-relax
+ ;;
+ sparc)
+ # temporary workaround for random ICEs reproduced by multiple users
+ # https://bugs.gentoo.org/457062
+ tc_version_is_between 4.6 4.8 && MAKEOPTS+=" -j1"
+ ;;
+ *-macos)
+ # http://gcc.gnu.org/PR25127
+ tc_version_is_between 4.0 4.2 && \
+ filter-flags '-mcpu=*' '-march=*' '-mtune=*'
+ ;;
+ esac
+ fi
+
+ strip-unsupported-flags
+
+ # these are set here so we have something sane at configure time
+ if is_crosscompile ; then
+ # Set this to something sane for both native and target
+ CFLAGS="-O2 -pipe"
+ FFLAGS=${CFLAGS}
+ FCFLAGS=${CFLAGS}
+
+ local VAR="CFLAGS_"${CTARGET//-/_}
+ CXXFLAGS=${!VAR}
+ fi
+
+ export GCJFLAGS=${GCJFLAGS:-${CFLAGS}}
+}
+
+setup_minispecs_gcc_build_specs() {
+ # Setup the "build.specs" file for gcc 4.3 to use when building.
+ if hardened_gcc_works pie ; then
+ cat "${WORKDIR}"/specs/pie.specs >> "${WORKDIR}"/build.specs
+ fi
+ if hardened_gcc_works ssp ; then
+ for s in ssp sspall ; do
+ cat "${WORKDIR}"/specs/${s}.specs >> "${WORKDIR}"/build.specs
+ done
+ fi
+ for s in nostrict znow ; do
+ cat "${WORKDIR}"/specs/${s}.specs >> "${WORKDIR}"/build.specs
+ done
+ export GCC_SPECS="${WORKDIR}"/build.specs
+}
+
+gcc-multilib-configure() {
+ if ! is_multilib ; then
+ confgcc+=( --disable-multilib )
+ # Fun times: if we are building for a target that has multiple
+ # possible ABI formats, and the user has told us to pick one
+ # that isn't the default, then not specifying it via the list
+ # below will break that on us.
+ else
+ confgcc+=( --enable-multilib )
+ fi
+
+ # translate our notion of multilibs into gcc's
+ local abi list
+ for abi in $(get_all_abis TARGET) ; do
+ local l=$(gcc-abi-map ${abi})
+ [[ -n ${l} ]] && list+=",${l}"
+ done
+ if [[ -n ${list} ]] ; then
+ case ${CTARGET} in
+ x86_64*)
+ tc_version_is_at_least 4.8 && confgcc+=( --with-multilib-list=${list:1} )
+ ;;
+ esac
+ fi
+}
+
+gcc-abi-map() {
+ # Convert the ABI name we use in Gentoo to what gcc uses
+ local map=()
+ case ${CTARGET} in
+ mips*) map=("o32 32" "n32 n32" "n64 64") ;;
+ x86_64*) map=("amd64 m64" "x86 m32" "x32 mx32") ;;
+ esac
+
+ local m
+ for m in "${map[@]}" ; do
+ l=( ${m} )
+ [[ $1 == ${l[0]} ]] && echo ${l[1]} && break
+ done
+}
+
+#----> src_compile <----
+
+toolchain_src_compile() {
+ touch "${S}"/gcc/c-gperf.h
+
+ # Do not make manpages if we do not have perl ...
+ [[ ! -x /usr/bin/perl ]] \
+ && find "${WORKDIR}"/build -name '*.[17]' | xargs touch
+
+ gcc_do_make ${GCC_MAKE_TARGET}
+}
+
+gcc_do_make() {
+ # This function accepts one optional argument, the make target to be used.
+ # If omitted, gcc_do_make will try to guess whether it should use all,
+ # or bootstrap-lean depending on CTARGET and arch.
+ # An example of how to use this function:
+ #
+ # gcc_do_make all-target-libstdc++-v3
+
+ [[ -n ${1} ]] && GCC_MAKE_TARGET=${1}
+
+ # default target
+ if is_crosscompile || tc-is-cross-compiler ; then
+ # 3 stage bootstrapping doesnt quite work when you cant run the
+ # resulting binaries natively ^^;
+ GCC_MAKE_TARGET=${GCC_MAKE_TARGET-all}
+ else
+ GCC_MAKE_TARGET=${GCC_MAKE_TARGET-bootstrap-lean}
+ fi
+
+ # Older versions of GCC could not do profiledbootstrap in parallel due to
+ # collisions with profiling info.
+ # boundschecking also seems to introduce parallel build issues.
+ if [[ ${GCC_MAKE_TARGET} == "profiledbootstrap" ]] || use_if_iuse boundschecking ; then
+ ! tc_version_is_at_least 4.6 && export MAKEOPTS="${MAKEOPTS} -j1"
+ fi
+
+ if [[ ${GCC_MAKE_TARGET} == "all" ]] ; then
+ STAGE1_CFLAGS=${STAGE1_CFLAGS-"${CFLAGS}"}
+ elif [[ $(gcc-version) == "3.4" && ${GCC_BRANCH_VER} == "3.4" ]] && gcc-specs-ssp ; then
+ # See bug #79852
+ STAGE1_CFLAGS=${STAGE1_CFLAGS-"-O2"}
+ fi
+
+ if is_crosscompile; then
+ # In 3.4, BOOT_CFLAGS is never used on a crosscompile...
+ # but I'll leave this in anyways as someone might have had
+ # some reason for putting it in here... --eradicator
+ BOOT_CFLAGS=${BOOT_CFLAGS-"-O2"}
+ else
+ # we only want to use the system's CFLAGS if not building a
+ # cross-compiler.
+ BOOT_CFLAGS=${BOOT_CFLAGS-"$(get_abi_CFLAGS ${TARGET_DEFAULT_ABI}) ${CFLAGS}"}
+ fi
+
+ einfo "Compiling ${PN} (${GCC_MAKE_TARGET})..."
+
+ pushd "${WORKDIR}"/build >/dev/null
+
+ emake \
+ LDFLAGS="${LDFLAGS}" \
+ STAGE1_CFLAGS="${STAGE1_CFLAGS}" \
+ LIBPATH="${LIBPATH}" \
+ BOOT_CFLAGS="${BOOT_CFLAGS}" \
+ ${GCC_MAKE_TARGET} \
+ || die "emake failed with ${GCC_MAKE_TARGET}"
+
+ if ! is_crosscompile && use cxx && use_if_iuse doc ; then
+ if type -p doxygen > /dev/null ; then
+ if tc_version_is_at_least 4.3 ; then
+ cd "${CTARGET}"/libstdc++-v3/doc
+ emake doc-man-doxygen || ewarn "failed to make docs"
+ elif tc_version_is_at_least 3.0 ; then
+ cd "${CTARGET}"/libstdc++-v3
+ emake doxygen-man || ewarn "failed to make docs"
+ fi
+ # Clean bogus manpages. #113902
+ find -name '*_build_*' -delete
+ # Blow away generated directory references. Newer versions of gcc
+ # have gotten better at this, but not perfect. This is easier than
+ # backporting all of the various doxygen patches. #486754
+ find -name '*_.3' -exec grep -l ' Directory Reference ' {} + | \
+ xargs rm -f
+ else
+ ewarn "Skipping libstdc++ manpage generation since you don't have doxygen installed"
+ fi
+ fi
+
+ popd >/dev/null
+}
+
+#---->> src_test <<----
+
+toolchain_src_test() {
+ if use regression-test ; then
+ cd "${WORKDIR}"/build
+ emake -k check
+ fi
+}
+
+#---->> src_install <<----
+
+toolchain_src_install() {
+ cd "${WORKDIR}"/build
+
+ # Do allow symlinks in private gcc include dir as this can break the build
+ find gcc/include*/ -type l -delete
+
+ # Copy over the info pages. We disabled their generation earlier, but the
+ # build system only expects to install out of the build dir, not the source. #464008
+ mkdir -p gcc/doc
+ local x=
+ for x in "${S}"/gcc/doc/*.info* ; do
+ if [[ -f ${x} ]] ; then
+ cp "${x}" gcc/doc/ || die
+ fi
+ done
+
+ # We remove the generated fixincludes, as they can cause things to break
+ # (ncurses, openssl, etc). We do not prevent them from being built, as
+ # in the following commit which we revert:
+ # http://sources.gentoo.org/cgi-bin/viewvc.cgi/gentoo-x86/eclass/toolchain.eclass?r1=1.647&r2=1.648
+ # This is because bsd userland needs fixedincludes to build gcc, while
+ # linux does not. Both can dispose of them afterwards.
+ while read x ; do
+ grep -q 'It has been auto-edited by fixincludes from' "${x}" \
+ && rm -f "${x}"
+ done < <(find gcc/include*/ -name '*.h')
+
+ # Do the 'make install' from the build directory
+ S="${WORKDIR}"/build emake -j1 DESTDIR="${D}" install || die
+
+ # Punt some tools which are really only useful while building gcc
+ find "${D}" -name install-tools -prune -type d -exec rm -rf "{}" \;
+ # This one comes with binutils
+ find "${D}" -name libiberty.a -delete
+
+ # Move the libraries to the proper location
+ gcc_movelibs
+
+ # Basic sanity check
+ if ! is_crosscompile ; then
+ local EXEEXT
+ eval $(grep ^EXEEXT= "${WORKDIR}"/build/gcc/config.log)
+ [[ -r ${D}${BINPATH}/gcc${EXEEXT} ]] || die "gcc not found in ${D}"
+ fi
+
+ dodir /etc/env.d/gcc
+ create_gcc_env_entry
+
+ # Setup the gcc_env_entry for hardened gcc 4 with minispecs
+ want_minispecs && copy_minispecs_gcc_specs
+
+ # Make sure we dont have stuff lying around that
+ # can nuke multiple versions of gcc
+ gcc_slot_java
+
+ dodir /usr/bin
+ cd "${D}"${BINPATH}
+ # Ugh: we really need to auto-detect this list.
+ # It's constantly out of date.
+ for x in cpp gcc g++ c++ gcov g77 gcj gcjh gfortran gccgo ; do
+ # For some reason, g77 gets made instead of ${CTARGET}-g77...
+ # this should take care of that
+ [[ -f ${x} ]] && mv ${x} ${CTARGET}-${x}
+
+ if [[ -f ${CTARGET}-${x} ]] ; then
+ if ! is_crosscompile ; then
+ ln -sf ${CTARGET}-${x} ${x}
+ dosym ${BINPATH}/${CTARGET}-${x} \
+ /usr/bin/${x}-${GCC_CONFIG_VER}
+ fi
+ # Create versioned symlinks
+ dosym ${BINPATH}/${CTARGET}-${x} \
+ /usr/bin/${CTARGET}-${x}-${GCC_CONFIG_VER}
+ fi
+
+ if [[ -f ${CTARGET}-${x}-${GCC_CONFIG_VER} ]] ; then
+ rm -f ${CTARGET}-${x}-${GCC_CONFIG_VER}
+ ln -sf ${CTARGET}-${x} ${CTARGET}-${x}-${GCC_CONFIG_VER}
+ fi
+ done
+
+ # Now do the fun stripping stuff
+ env RESTRICT="" CHOST=${CHOST} prepstrip "${D}${BINPATH}"
+ env RESTRICT="" CHOST=${CTARGET} prepstrip "${D}${LIBPATH}"
+ # gcc used to install helper binaries in lib/ but then moved to libexec/
+ [[ -d ${D}${PREFIX}/libexec/gcc ]] && \
+ env RESTRICT="" CHOST=${CHOST} prepstrip "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}"
+
+ cd "${S}"
+ if is_crosscompile; then
+ rm -rf "${D}"/usr/share/{man,info}
+ rm -rf "${D}"${DATAPATH}/{man,info}
+ else
+ if tc_version_is_at_least 3.0 ; then
+ local cxx_mandir=$(find "${WORKDIR}/build/${CTARGET}/libstdc++-v3" -name man)
+ if [[ -d ${cxx_mandir} ]] ; then
+ cp -r "${cxx_mandir}"/man? "${D}/${DATAPATH}"/man/
+ fi
+ fi
+ has noinfo ${FEATURES} \
+ && rm -r "${D}/${DATAPATH}"/info \
+ || prepinfo "${DATAPATH}"
+ has noman ${FEATURES} \
+ && rm -r "${D}/${DATAPATH}"/man \
+ || prepman "${DATAPATH}"
+ fi
+ # prune empty dirs left behind
+ find "${D}" -depth -type d -delete 2>/dev/null
+
+ # install testsuite results
+ if use regression-test; then
+ docinto testsuite
+ find "${WORKDIR}"/build -type f -name "*.sum" -print0 | xargs -0 dodoc
+ find "${WORKDIR}"/build -type f -path "*/testsuite/*.log" -print0 \
+ | xargs -0 dodoc
+ fi
+
+ # Rather install the script, else portage with changing $FILESDIR
+ # between binary and source package borks things ....
+ if ! is_crosscompile ; then
+ insinto "${DATAPATH}"
+ newins "${GCC_FILESDIR}"/awk/fixlafiles.awk-no_gcc_la fixlafiles.awk || die
+ find "${D}/${LIBPATH}" -name libstdc++.la -type f -delete
+ find "${D}/${LIBPATH}" -name 'lib*san.la' -type f -delete #487550 #546700
+ exeinto "${DATAPATH}"
+ doexe "${GCC_FILESDIR}"/fix_libtool_files.sh || die
+ doexe "${GCC_FILESDIR}"/c{89,99} || die
+ fi
+
+ # Use gid of 0 because some stupid ports don't have
+ # the group 'root' set to gid 0. Send to /dev/null
+ # for people who are testing as non-root.
+ chown -R root:0 "${D}"${LIBPATH} 2>/dev/null
+
+ # Move pretty-printers to gdb datadir to shut ldconfig up
+ local py gdbdir=/usr/share/gdb/auto-load${LIBPATH/\/lib\//\/$(get_libdir)\/}
+ pushd "${D}"${LIBPATH} >/dev/null
+ for py in $(find . -name '*-gdb.py') ; do
+ local multidir=${py%/*}
+ insinto "${gdbdir}/${multidir}"
+ sed -i "/^libdir =/s:=.*:= '${LIBPATH}/${multidir}':" "${py}" || die #348128
+ doins "${py}" || die
+ rm "${py}" || die
+ done
+ popd >/dev/null
+
+ # Don't scan .gox files for executable stacks - false positives
+ export QA_EXECSTACK="usr/lib*/go/*/*.gox"
+ export QA_WX_LOAD="usr/lib*/go/*/*.gox"
+
+ # Disable RANDMMAP so PCH works. #301299
+ if tc_version_is_at_least 4.3 ; then
+ pax-mark -r "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/cc1"
+ pax-mark -r "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}/cc1plus"
+ fi
+}
+
+# Move around the libs to the right location. For some reason,
+# when installing gcc, it dumps internal libraries into /usr/lib
+# instead of the private gcc lib path
+gcc_movelibs() {
+ # older versions of gcc did not support --print-multi-os-directory
+ tc_version_is_at_least 3.2 || return 0
+
+ local x multiarg removedirs=""
+ for multiarg in $($(XGCC) -print-multi-lib) ; do
+ multiarg=${multiarg#*;}
+ multiarg=${multiarg//@/ -}
+
+ local OS_MULTIDIR=$($(XGCC) ${multiarg} --print-multi-os-directory)
+ local MULTIDIR=$($(XGCC) ${multiarg} --print-multi-directory)
+ local TODIR=${D}${LIBPATH}/${MULTIDIR}
+ local FROMDIR=
+
+ [[ -d ${TODIR} ]] || mkdir -p ${TODIR}
+
+ for FROMDIR in \
+ ${LIBPATH}/${OS_MULTIDIR} \
+ ${LIBPATH}/../${MULTIDIR} \
+ ${PREFIX}/lib/${OS_MULTIDIR} \
+ ${PREFIX}/${CTARGET}/lib/${OS_MULTIDIR}
+ do
+ removedirs="${removedirs} ${FROMDIR}"
+ FROMDIR=${D}${FROMDIR}
+ if [[ ${FROMDIR} != "${TODIR}" && -d ${FROMDIR} ]] ; then
+ local files=$(find "${FROMDIR}" -maxdepth 1 ! -type d 2>/dev/null)
+ if [[ -n ${files} ]] ; then
+ mv ${files} "${TODIR}" || die
+ fi
+ fi
+ done
+ fix_libtool_libdir_paths "${LIBPATH}/${MULTIDIR}"
+
+ # SLOT up libgcj.pc if it's available (and let gcc-config worry about links)
+ FROMDIR="${PREFIX}/lib/${OS_MULTIDIR}"
+ for x in "${D}${FROMDIR}"/pkgconfig/libgcj*.pc ; do
+ [[ -f ${x} ]] || continue
+ sed -i "/^libdir=/s:=.*:=${LIBPATH}/${MULTIDIR}:" "${x}" || die
+ mv "${x}" "${D}${FROMDIR}"/pkgconfig/libgcj-${GCC_PV}.pc || die
+ done
+ done
+
+ # We remove directories separately to avoid this case:
+ # mv SRC/lib/../lib/*.o DEST
+ # rmdir SRC/lib/../lib/
+ # mv SRC/lib/../lib32/*.o DEST # Bork
+ for FROMDIR in ${removedirs} ; do
+ rmdir "${D}"${FROMDIR} >& /dev/null
+ done
+ find "${D}" -type d | xargs rmdir >& /dev/null
+}
+
+# make sure the libtool archives have libdir set to where they actually
+# -are-, and not where they -used- to be. also, any dependencies we have
+# on our own .la files need to be updated.
+fix_libtool_libdir_paths() {
+ local libpath="$1"
+
+ pushd "${D}" >/dev/null
+
+ pushd "./${libpath}" >/dev/null
+ local dir="${PWD#${D%/}}"
+ local allarchives=$(echo *.la)
+ allarchives="\(${allarchives// /\\|}\)"
+ popd >/dev/null
+
+ # The libdir might not have any .la files. #548782
+ find "./${dir}" -maxdepth 1 -name '*.la' \
+ -exec sed -i -e "/^libdir=/s:=.*:='${dir}':" {} + || die
+ # Would be nice to combine these, but -maxdepth can not be specified
+ # on sub-expressions.
+ find "./${PREFIX}"/lib* -maxdepth 3 -name '*.la' \
+ -exec sed -i -e "/^dependency_libs=/s:/[^ ]*/${allarchives}:${libpath}/\1:g" {} + || die
+ find "./${dir}/" -maxdepth 1 -name '*.la' \
+ -exec sed -i -e "/^dependency_libs=/s:/[^ ]*/${allarchives}:${libpath}/\1:g" {} + || die
+
+ popd >/dev/null
+}
+
+create_gcc_env_entry() {
+ dodir /etc/env.d/gcc
+ local gcc_envd_base="/etc/env.d/gcc/${CTARGET}-${GCC_CONFIG_VER}"
+
+ local gcc_specs_file
+ local gcc_envd_file="${D}${gcc_envd_base}"
+ if [[ -z $1 ]] ; then
+ # I'm leaving the following commented out to remind me that it
+ # was an insanely -bad- idea. Stuff broke. GCC_SPECS isnt unset
+ # on chroot or in non-toolchain.eclass gcc ebuilds!
+ #gcc_specs_file="${LIBPATH}/specs"
+ gcc_specs_file=""
+ else
+ gcc_envd_file+="-$1"
+ gcc_specs_file="${LIBPATH}/$1.specs"
+ fi
+
+ # We want to list the default ABI's LIBPATH first so libtool
+ # searches that directory first. This is a temporary
+ # workaround for libtool being stupid and using .la's from
+ # conflicting ABIs by using the first one in the search path
+ local ldpaths mosdirs
+ if tc_version_is_at_least 3.2 ; then
+ local mdir mosdir abi ldpath
+ for abi in $(get_all_abis TARGET) ; do
+ mdir=$($(XGCC) $(get_abi_CFLAGS ${abi}) --print-multi-directory)
+ ldpath=${LIBPATH}
+ [[ ${mdir} != "." ]] && ldpath+="/${mdir}"
+ ldpaths="${ldpath}${ldpaths:+:${ldpaths}}"
+
+ mosdir=$($(XGCC) $(get_abi_CFLAGS ${abi}) -print-multi-os-directory)
+ mosdirs="${mosdir}${mosdirs:+:${mosdirs}}"
+ done
+ else
+ # Older gcc's didn't do multilib, so logic is simple.
+ ldpaths=${LIBPATH}
+ fi
+
+ cat <<-EOF > ${gcc_envd_file}
+ PATH="${BINPATH}"
+ ROOTPATH="${BINPATH}"
+ GCC_PATH="${BINPATH}"
+ LDPATH="${ldpaths}"
+ MANPATH="${DATAPATH}/man"
+ INFOPATH="${DATAPATH}/info"
+ STDCXX_INCDIR="${STDCXX_INCDIR##*/}"
+ CTARGET="${CTARGET}"
+ GCC_SPECS="${gcc_specs_file}"
+ MULTIOSDIRS="${mosdirs}"
+ EOF
+}
+
+copy_minispecs_gcc_specs() {
+ # setup the hardenedno* specs files and the vanilla specs file.
+ if hardened_gcc_works ; then
+ create_gcc_env_entry hardenednopiessp
+ fi
+ if hardened_gcc_works pie ; then
+ create_gcc_env_entry hardenednopie
+ fi
+ if hardened_gcc_works ssp ; then
+ create_gcc_env_entry hardenednossp
+ fi
+ create_gcc_env_entry vanilla
+ insinto ${LIBPATH}
+ doins "${WORKDIR}"/specs/*.specs || die "failed to install specs"
+ # Build system specs file which, if it exists, must be a complete set of
+ # specs as it completely and unconditionally overrides the builtin specs.
+ if ! tc_version_is_at_least 4.4 ; then
+ $(XGCC) -dumpspecs > "${WORKDIR}"/specs/specs
+ cat "${WORKDIR}"/build.specs >> "${WORKDIR}"/specs/specs
+ doins "${WORKDIR}"/specs/specs || die "failed to install the specs file"
+ fi
+}
+
+gcc_slot_java() {
+ local x
+
+ # Move Java headers to compiler-specific dir
+ for x in "${D}"${PREFIX}/include/gc*.h "${D}"${PREFIX}/include/j*.h ; do
+ [[ -f ${x} ]] && mv -f "${x}" "${D}"${LIBPATH}/include/
+ done
+ for x in gcj gnu java javax org ; do
+ if [[ -d ${D}${PREFIX}/include/${x} ]] ; then
+ dodir /${LIBPATH}/include/${x}
+ mv -f "${D}"${PREFIX}/include/${x}/* "${D}"${LIBPATH}/include/${x}/
+ rm -rf "${D}"${PREFIX}/include/${x}
+ fi
+ done
+
+ if [[ -d ${D}${PREFIX}/lib/security ]] || [[ -d ${D}${PREFIX}/$(get_libdir)/security ]] ; then
+ dodir /${LIBPATH}/security
+ mv -f "${D}"${PREFIX}/lib*/security/* "${D}"${LIBPATH}/security
+ rm -rf "${D}"${PREFIX}/lib*/security
+ fi
+
+ # Move random gcj files to compiler-specific directories
+ for x in libgcj.spec logging.properties ; do
+ x="${D}${PREFIX}/lib/${x}"
+ [[ -f ${x} ]] && mv -f "${x}" "${D}"${LIBPATH}/
+ done
+
+ # Rename jar because it could clash with Kaffe's jar if this gcc is
+ # primary compiler (aka don't have the -<version> extension)
+ cd "${D}"${BINPATH}
+ [[ -f jar ]] && mv -f jar gcj-jar
+}
+
+#---->> pkg_post* <<----
+
+toolchain_pkg_postinst() {
+ do_gcc_config
+
+ if ! is_crosscompile ; then
+ echo
+ ewarn "If you have issues with packages unable to locate libstdc++.la,"
+ ewarn "then try running 'fix_libtool_files.sh' on the old gcc versions."
+ echo
+ ewarn "You might want to review the GCC upgrade guide when moving between"
+ ewarn "major versions (like 4.2 to 4.3):"
+ ewarn "http://www.gentoo.org/doc/en/gcc-upgrading.xml"
+ echo
+
+ # Clean up old paths
+ rm -f "${ROOT}"/*/rcscripts/awk/fixlafiles.awk "${ROOT}"/sbin/fix_libtool_files.sh
+ rmdir "${ROOT}"/*/rcscripts{/awk,} 2>/dev/null
+
+ mkdir -p "${ROOT}"/usr/{share/gcc-data,sbin,bin}
+ cp "${ROOT}/${DATAPATH}"/fixlafiles.awk "${ROOT}"/usr/share/gcc-data/ || die
+ cp "${ROOT}/${DATAPATH}"/fix_libtool_files.sh "${ROOT}"/usr/sbin/ || die
+
+ # Since these aren't critical files and portage sucks with
+ # handling of binpkgs, don't require these to be found
+ cp "${ROOT}/${DATAPATH}"/c{89,99} "${ROOT}"/usr/bin/ 2>/dev/null
+ fi
+
+ if use regression-test ; then
+ elog "Testsuite results have been installed into /usr/share/doc/${PF}/testsuite"
+ echo
+ fi
+
+ if [[ -n ${PRERELEASE}${SNAPSHOT} ]] ; then
+ einfo "This GCC ebuild is provided for your convenience, and the use"
+ einfo "of this compiler is not supported by the Gentoo Developers."
+ einfo "Please report bugs to upstream at http://gcc.gnu.org/bugzilla/"
+ fi
+}
+
+toolchain_pkg_postrm() {
+ # to make our lives easier (and saner), we do the fix_libtool stuff here.
+ # rather than checking SLOT's and trying in upgrade paths, we just see if
+ # the common libstdc++.la exists in the ${LIBPATH} of the gcc that we are
+ # unmerging. if it does, that means this was a simple re-emerge.
+
+ # clean up the cruft left behind by cross-compilers
+ if is_crosscompile ; then
+ if [[ -z $(ls "${ROOT}"/etc/env.d/gcc/${CTARGET}* 2>/dev/null) ]] ; then
+ rm -f "${ROOT}"/etc/env.d/gcc/config-${CTARGET}
+ rm -f "${ROOT}"/etc/env.d/??gcc-${CTARGET}
+ rm -f "${ROOT}"/usr/bin/${CTARGET}-{gcc,{g,c}++}{,32,64}
+ fi
+ return 0
+ fi
+
+ # ROOT isnt handled by the script
+ [[ ${ROOT} != "/" ]] && return 0
+
+ if [[ ! -e ${LIBPATH}/libstdc++.so ]] ; then
+ # make sure the profile is sane during same-slot upgrade #289403
+ do_gcc_config
+
+ einfo "Running 'fix_libtool_files.sh ${GCC_RELEASE_VER}'"
+ /usr/sbin/fix_libtool_files.sh ${GCC_RELEASE_VER}
+ if [[ -n ${BRANCH_UPDATE} ]] ; then
+ einfo "Running 'fix_libtool_files.sh ${GCC_RELEASE_VER}-${BRANCH_UPDATE}'"
+ /usr/sbin/fix_libtool_files.sh ${GCC_RELEASE_VER}-${BRANCH_UPDATE}
+ fi
+ fi
+
+ return 0
+}
+
+do_gcc_config() {
+ if ! should_we_gcc_config ; then
+ env -i ROOT="${ROOT}" gcc-config --use-old --force
+ return 0
+ fi
+
+ local current_gcc_config="" current_specs="" use_specs=""
+
+ current_gcc_config=$(env -i ROOT="${ROOT}" gcc-config -c ${CTARGET} 2>/dev/null)
+ if [[ -n ${current_gcc_config} ]] ; then
+ # figure out which specs-specific config is active
+ current_specs=$(gcc-config -S ${current_gcc_config} | awk '{print $3}')
+ [[ -n ${current_specs} ]] && use_specs=-${current_specs}
+ fi
+ if [[ -n ${use_specs} ]] && \
+ [[ ! -e ${ROOT}/etc/env.d/gcc/${CTARGET}-${GCC_CONFIG_VER}${use_specs} ]]
+ then
+ ewarn "The currently selected specs-specific gcc config,"
+ ewarn "${current_specs}, doesn't exist anymore. This is usually"
+ ewarn "due to enabling/disabling hardened or switching to a version"
+ ewarn "of gcc that doesnt create multiple specs files. The default"
+ ewarn "config will be used, and the previous preference forgotten."
+ use_specs=""
+ fi
+
+ gcc-config ${CTARGET}-${GCC_CONFIG_VER}${use_specs}
+}
+
+should_we_gcc_config() {
+ # if the current config is invalid, we definitely want a new one
+ # Note: due to bash quirkiness, the following must not be 1 line
+ local curr_config
+ curr_config=$(env -i ROOT="${ROOT}" gcc-config -c ${CTARGET} 2>&1) || return 0
+
+ # if the previously selected config has the same major.minor (branch) as
+ # the version we are installing, then it will probably be uninstalled
+ # for being in the same SLOT, make sure we run gcc-config.
+ local curr_config_ver=$(env -i ROOT="${ROOT}" gcc-config -S ${curr_config} | awk '{print $2}')
+
+ local curr_branch_ver=$(get_version_component_range 1-2 ${curr_config_ver})
+
+ # If we're using multislot, just run gcc-config if we're installing
+ # to the same profile as the current one.
+ use multislot && return $([[ ${curr_config_ver} == ${GCC_CONFIG_VER} ]])
+
+ if [[ ${curr_branch_ver} == ${GCC_BRANCH_VER} ]] ; then
+ return 0
+ else
+ # if we're installing a genuinely different compiler version,
+ # we should probably tell the user -how- to switch to the new
+ # gcc version, since we're not going to do it for him/her.
+ # We don't want to switch from say gcc-3.3 to gcc-3.4 right in
+ # the middle of an emerge operation (like an 'emerge -e world'
+ # which could install multiple gcc versions).
+ # Only warn if we're installing a pkg as we might be called from
+ # the pkg_{pre,post}rm steps. #446830
+ if [[ ${EBUILD_PHASE} == *"inst" ]] ; then
+ einfo "The current gcc config appears valid, so it will not be"
+ einfo "automatically switched for you. If you would like to"
+ einfo "switch to the newly installed gcc version, do the"
+ einfo "following:"
+ echo
+ einfo "gcc-config ${CTARGET}-${GCC_CONFIG_VER}"
+ einfo "source /etc/profile"
+ echo
+ fi
+ return 1
+ fi
+}
+
+#---->> support and misc functions <<----
+
+# This is to make sure we don't accidentally try to enable support for a
+# language that doesnt exist. GCC 3.4 supports f77, while 4.0 supports f95, etc.
+#
+# Also add a hook so special ebuilds (kgcc64) can control which languages
+# exactly get enabled
+gcc-lang-supported() {
+ grep ^language=\"${1}\" "${S}"/gcc/*/config-lang.in > /dev/null || return 1
+ [[ -z ${TOOLCHAIN_ALLOWED_LANGS} ]] && return 0
+ has $1 ${TOOLCHAIN_ALLOWED_LANGS}
+}
+
+is_ada() {
+ gcc-lang-supported ada || return 1
+ use ada
+}
+
+is_cxx() {
+ gcc-lang-supported 'c++' || return 1
+ ! is_crosscompile && tc_version_is_at_least 4.8 && return 0
+ use cxx
+}
+
+is_d() {
+ gcc-lang-supported d || return 1
+ use_if_iuse d
+}
+
+is_f77() {
+ gcc-lang-supported f77 || return 1
+ use fortran
+}
+
+is_f95() {
+ gcc-lang-supported f95 || return 1
+ use fortran
+}
+
+is_fortran() {
+ gcc-lang-supported fortran || return 1
+ use fortran
+}
+
+is_gcj() {
+ gcc-lang-supported java || return 1
+ use cxx && use_if_iuse gcj
+}
+
+is_go() {
+ gcc-lang-supported go || return 1
+ use cxx && use_if_iuse go
+}
+
+is_multilib() {
+ tc_version_is_at_least 3 || return 1
+ use multilib
+}
+
+is_objc() {
+ gcc-lang-supported objc || return 1
+ use_if_iuse objc
+}
+
+is_objcxx() {
+ gcc-lang-supported 'obj-c++' || return 1
+ use cxx && use_if_iuse objc++
+}
+
+# Grab a variable from the build system (taken from linux-info.eclass)
+get_make_var() {
+ local var=$1 makefile=${2:-${WORKDIR}/build/Makefile}
+ echo -e "e:\\n\\t@echo \$(${var})\\ninclude ${makefile}" | \
+ r=${makefile%/*} emake --no-print-directory -s -f - 2>/dev/null
+}
+
+XGCC() { get_make_var GCC_FOR_TARGET ; }
+
+# The gentoo piessp patches allow for 3 configurations:
+# 1) PIE+SSP by default
+# 2) PIE by default
+# 3) SSP by default
+hardened_gcc_works() {
+ if [[ $1 == "pie" ]] ; then
+ # $gcc_cv_ld_pie is unreliable as it simply take the output of
+ # `ld --help | grep -- -pie`, that reports the option in all cases, also if
+ # the loader doesn't actually load the resulting executables.
+ # To avoid breakage, blacklist FreeBSD here at least
+ [[ ${CTARGET} == *-freebsd* ]] && return 1
+
+ want_pie || return 1
+ use_if_iuse nopie && return 1
+ hardened_gcc_is_stable pie
+ return $?
+ elif [[ $1 == "ssp" ]] ; then
+ [[ -n ${SPECS_VER} ]] || return 1
+ use_if_iuse nossp && return 1
+ hardened_gcc_is_stable ssp
+ return $?
+ else
+ # laziness ;)
+ hardened_gcc_works pie || return 1
+ hardened_gcc_works ssp || return 1
+ return 0
+ fi
+}
+
+hardened_gcc_is_stable() {
+ local tocheck
+ if [[ $1 == "pie" ]] ; then
+ if [[ ${CTARGET} == *-uclibc* ]] ; then
+ tocheck=${PIE_UCLIBC_STABLE}
+ else
+ tocheck=${PIE_GLIBC_STABLE}
+ fi
+ elif [[ $1 == "ssp" ]] ; then
+ if [[ ${CTARGET} == *-uclibc* ]] ; then
+ tocheck=${SSP_UCLIBC_STABLE}
+ elif [[ ${CTARGET} == *-gnu* ]] ; then
+ tocheck=${SSP_STABLE}
+ fi
+ else
+ die "hardened_gcc_stable needs to be called with pie or ssp"
+ fi
+
+ has $(tc-arch) ${tocheck} && return 0
+ return 1
+}
+
+want_minispecs() {
+ if tc_version_is_at_least 4.3.2 && use hardened ; then
+ if ! want_pie ; then
+ ewarn "PIE_VER or SPECS_VER is not defined in the GCC ebuild."
+ elif use vanilla ; then
+ ewarn "You will not get hardened features if you have the vanilla USE-flag."
+ elif use nopie && use nossp ; then
+ ewarn "You will not get hardened features if you have the nopie and nossp USE-flag."
+ elif ! hardened_gcc_works ; then
+ ewarn "Your $(tc-arch) arch is not supported."
+ else
+ return 0
+ fi
+ ewarn "Hope you know what you are doing. Hardened will not work."
+ return 0
+ fi
+ return 1
+}
+
+want_pie() {
+ ! use hardened && [[ -n ${PIE_VER} ]] && use nopie && return 1
+ [[ -n ${PIE_VER} ]] && [[ -n ${SPECS_VER} ]] && return 0
+ tc_version_is_at_least 4.3.2 && return 1
+ [[ -z ${PIE_VER} ]] && return 1
+ use !nopie && return 0
+ return 1
+}
+
+has toolchain_death_notice ${EBUILD_DEATH_HOOKS} || EBUILD_DEATH_HOOKS+=" toolchain_death_notice"
+toolchain_death_notice() {
+ if [[ -e "${WORKDIR}"/build ]] ; then
+ pushd "${WORKDIR}"/build >/dev/null
+ (echo '' | $(tc-getCC ${CTARGET}) ${CFLAGS} -v -E - 2>&1) > gccinfo.log
+ [[ -e "${T}"/build.log ]] && cp "${T}"/build.log .
+ tar jcf "${WORKDIR}"/gcc-build-logs.tar.bz2 \
+ gccinfo.log build.log $(find -name config.log)
+ rm gccinfo.log build.log
+ eerror
+ eerror "Please include ${WORKDIR}/gcc-build-logs.tar.bz2 in your bug report."
+ eerror
+ popd >/dev/null
+ fi
+}
diff --git a/eclass/twisted-r1.eclass b/eclass/twisted-r1.eclass
new file mode 100644
index 000000000000..2cb24e3f07a8
--- /dev/null
+++ b/eclass/twisted-r1.eclass
@@ -0,0 +1,236 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License, v2 or later
+# $Id$
+
+# @ECLASS: twisted-r1.eclass
+# @MAINTAINER:
+# Gentoo Python Project <python@gentoo.org>
+# @AUTHOR:
+# Author: Michał Górny <mgorny@gentoo.org>
+# Author: Jan Matejka <yac@gentoo.org>
+# @BLURB: Eclass for Twisted packages
+# @DESCRIPTION:
+# The twisted eclass defines phase functions for Twisted packages.
+
+case "${EAPI:-0}" in
+ 0|1|2|3)
+ die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
+ ;;
+ 4|5)
+ ;;
+ *)
+ die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
+ ;;
+esac
+
+if [[ ! ${_TWISTED_R1} ]]; then
+
+inherit distutils-r1 versionator
+
+fi # ! ${_TWISTED_R1}
+
+EXPORT_FUNCTIONS src_install pkg_postinst pkg_postrm
+
+if [[ ! ${_TWISTED_R1} ]]; then
+
+# @FUNCTION: _twisted-r1_camelcase
+# @USAGE: <pn>
+# @DESCRIPTION:
+# Convert dash-separated <pn> to CamelCase name suitable for Twisted.
+# In pure bash, therefore safe for global scope execution.
+_twisted-r1_camelcase() {
+ local IFS=-
+
+ # IFS=- splits words by -.
+ local words=( ${1} )
+
+ # we can't keep '-' as it collides with [a-z] check
+ # and '' is used by bash-4 words[*], so let's just set globally
+ IFS=
+
+ if [[ ${BASH_VERSINFO[0]} -ge 4 ]]; then
+ echo "${words[*]^}"
+ return
+ fi
+
+ local w LC_COLLATE=C uc='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+
+ local out
+ for w in "${words[@]}"; do
+ local fl=${w:0:1}
+
+ # Danger: magic starts here. Please close your eyes.
+ # In base 36, a..z represents digits 10..35. We substract 10
+ # and get array subscripts for uc.
+
+ [[ ${fl} == [a-z] ]] && fl=${uc:36#${fl} - 10:1}
+
+ out+=${fl}${w:1}
+ done
+
+ echo "${out}"
+}
+
+# @ECLASS-VARIABLE: TWISTED_PN
+# @DESCRIPTION:
+# The real package name. Default to camel-case conversion of ${PN}.
+#
+# Example: TwistedCore
+: ${TWISTED_PN:=$(_twisted-r1_camelcase ${PN})}
+
+# @ECLASS-VARIABLE: TWISTED_P
+# @DESCRIPTION:
+# The real package name with version appended.
+#
+# It is used to build the default SRC_URI and S values.
+#
+# Example: TwistedCore-1.2.3
+: ${TWISTED_P:=${TWISTED_PN}-${PV}}
+
+# @ECLASS-VARIABLE: TWISTED_RELEASE
+# @DESCRIPTION:
+# The 'release' of Twisted. Defaults to the major & minor version
+# number from ${PV}.
+#
+# It is used to build the default SRC_URI. It may be also used
+# in dependencies against other Twisted packages.
+#
+# Example: 1.2
+: ${TWISTED_RELEASE:=$(get_version_component_range 1-2 ${PV})}
+
+HOMEPAGE="http://www.twistedmatrix.com/"
+SRC_URI="http://twistedmatrix.com/Releases/${TWISTED_PN#Twisted}"
+SRC_URI="${SRC_URI}/${TWISTED_RELEASE}/${TWISTED_P}.tar.bz2"
+
+LICENSE="MIT"
+SLOT="0"
+IUSE=""
+
+S=${WORKDIR}/${TWISTED_P}
+
+# @ECLASS-VARIABLE: TWISTED_PLUGINS
+# @DESCRIPTION:
+# An array of Twisted plugins, whose cache is regenerated
+# in pkg_postinst() and pkg_postrm() phases.
+#
+# If no plugins are installed, set to empty array.
+declare -p TWISTED_PLUGINS &>/dev/null || TWISTED_PLUGINS=( twisted.plugins )
+
+# @FUNCTION: twisted-r1_python_test
+# @DESCRIPTION:
+# The common python_test() implementation that suffices for Twisted
+# packages.
+twisted-r1_python_test() {
+ local sitedir=$(python_get_sitedir)
+
+ # Copy modules of other Twisted packages from site-packages
+ # directory to the temporary directory.
+ local libdir=${BUILD_DIR}/test/lib
+ mkdir -p "${libdir}" || die
+ cp -r "${ROOT}${sitedir}"/twisted "${libdir}" || die
+ # Drop the installed module in case previous version conflicts with
+ # the new one somehow.
+ rm -fr "${libdir}/${PN/-//}" || die
+
+ distutils_install_for_testing || die
+
+ if [[ ${TEST_DIR} != ${BUILD_DIR}/test ]]; then
+ eqawarn "twisted-r1 integrity check failed."
+ eqawarn "TEST_DIR: ${TEST_DIR}"
+ eqawarn "expected: ${BUILD_DIR}/test"
+ fi
+
+ cd "${TEST_DIR}"/lib || die
+ trial ${PN/-/.} || die "Tests fail with ${EPYTHON}"
+}
+
+# @FUNCTION: python_test
+# @DESCRIPTION:
+# Default python_test() for Twisted packages. If you need to override
+# it, you can access the original implementation
+# via twisted-r1_python_test.
+python_test() {
+ twisted-r1_python_test
+}
+
+# @FUNCTION: twisted-r1_src_install
+# @DESCRIPTION:
+# Default src_install() for Twisted packages. Automatically handles HTML
+# docs (unless HTML_DOCS is set explicitly) and manpages in Twisted
+# packages.
+twisted-r1_src_install() {
+ [[ -d doc ]] && local HTML_DOCS=( "${HTML_DOCS[@]:-doc/.}" )
+ [[ -d doc/man ]] && doman doc/man/*.[[:digit:]]
+
+ distutils-r1_src_install
+}
+
+# @FUNCTION: _twisted-r1_create_caches
+# @USAGE: <packages>...
+# @DESCRIPTION:
+# Create dropin.cache for plugins in specified packages. The packages
+# are to be listed in standard dotted Python syntax.
+_twisted-r1_create_caches() {
+ # http://twistedmatrix.com/documents/current/core/howto/plugin.html
+ "${PYTHON}" -c \
+"import sys
+sys.path.insert(0, '${ROOT}$(python_get_sitedir)')
+
+fail = False
+
+try:
+ from twisted.plugin import getPlugins, IPlugin
+except ImportError as e:
+ if '${EBUILD_PHASE}' == 'postinst':
+ raise
+else:
+ for module in sys.argv[1:]:
+ try:
+ __import__(module, globals())
+ except ImportError as e:
+ if '${EBUILD_PHASE}' == 'postinst':
+ raise
+ else:
+ list(getPlugins(IPlugin, sys.modules[module]))
+" \
+ "${@}" || die "twisted plugin cache update failed"
+}
+
+# @FUNCTION: twisted-r1_update_plugin_cache
+# @DESCRIPTION:
+# Update and clean up plugin caches for packages listed
+# in TWISTED_PLUGINS.
+twisted-r1_update_plugin_cache() {
+ [[ ${TWISTED_PLUGINS[@]} ]] || return
+
+ local subdirs=( "${TWISTED_PLUGINS[@]//.//}" )
+ local paths=( "${subdirs[@]/#/${ROOT}$(python_get_sitedir)/}" )
+ local caches=( "${paths[@]/%//dropin.cache}" )
+
+ # First, delete existing (possibly stray) caches.
+ rm -f "${caches[@]}" || die
+
+ # Now, let's see which ones we can regenerate.
+ _twisted-r1_create_caches "${TWISTED_PLUGINS[@]}"
+
+ # Finally, drop empty parent directories.
+ rmdir -p "${paths[@]}" 2>/dev/null
+}
+
+# @FUNCTION: twisted-r1_pkg_postinst
+# @DESCRIPTION:
+# Post-installation hook for twisted-r1. Updates plugin caches.
+twisted-r1_pkg_postinst() {
+ _distutils-r1_run_foreach_impl twisted-r1_update_plugin_cache
+}
+
+# @FUNCTION: twisted-r1_pkg_postrm
+# @DESCRIPTION:
+# Post-removal hook for twisted-r1. Updates plugin caches.
+twisted-r1_pkg_postrm() {
+ _distutils-r1_run_foreach_impl twisted-r1_update_plugin_cache
+}
+
+_TWISTED_R1=1
+
+fi # ! ${_TWISTED_R1}
diff --git a/eclass/udev.eclass b/eclass/udev.eclass
new file mode 100644
index 000000000000..203d1426c88a
--- /dev/null
+++ b/eclass/udev.eclass
@@ -0,0 +1,115 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: udev.eclass
+# @MAINTAINER:
+# udev-bugs@gentoo.org
+# @BLURB: Default eclass for determining udev directories.
+# @DESCRIPTION:
+# Default eclass for determining udev directories.
+# @EXAMPLE:
+#
+# @CODE
+# inherit udev
+#
+# # Example of the eclass usage:
+# RDEPEND="virtual/udev"
+# DEPEND="${RDEPEND}"
+#
+# src_configure() {
+# econf --with-rulesdir="$(get_udevdir)"/rules.d
+# }
+#
+# src_install() {
+# default
+# # udev_dorules contrib/99-foomatic
+# # udev_newrules contrib/98-foomatic 99-foomatic
+# }
+# @CODE
+
+inherit toolchain-funcs
+
+case ${EAPI:-0} in
+ 0|1|2|3|4|5) ;;
+ *) die "${ECLASS}.eclass API in EAPI ${EAPI} not yet established."
+esac
+
+RDEPEND=""
+DEPEND="virtual/pkgconfig"
+
+# @FUNCTION: _udev_get_udevdir
+# @INTERNAL
+# @DESCRIPTION:
+# Get unprefixed udevdir.
+_udev_get_udevdir() {
+ if $($(tc-getPKG_CONFIG) --exists udev); then
+ echo "$($(tc-getPKG_CONFIG) --variable=udevdir udev)"
+ else
+ echo /lib/udev
+ fi
+}
+
+# @FUNCTION: udev_get_udevdir
+# @DESCRIPTION:
+# Use the short version $(get_udevdir) instead!
+udev_get_udevdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ eerror "This ebuild should be using the get_udevdir() function instead of the deprecated udev_get_udevdir()"
+ die "Deprecated function call: udev_get_udevdir(), please report to (overlay) maintainers."
+}
+
+# @FUNCTION: get_udevdir
+# @DESCRIPTION:
+# Output the path for the udev directory (not including ${D}).
+# This function always succeeds, even if udev is not installed.
+# The fallback value is set to /lib/udev
+get_udevdir() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ echo "$(_udev_get_udevdir)"
+}
+
+# @FUNCTION: udev_dorules
+# @USAGE: rules [...]
+# @DESCRIPTION:
+# Install udev rule(s). Uses doins, thus it is fatal in EAPI 4
+# and non-fatal in earlier EAPIs.
+udev_dorules() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_udev_get_udevdir)"/rules.d
+ doins "${@}"
+ )
+}
+
+# @FUNCTION: udev_newrules
+# @USAGE: oldname newname
+# @DESCRIPTION:
+# Install udev rule with a new name. Uses newins, thus it is fatal
+# in EAPI 4 and non-fatal in earlier EAPIs.
+udev_newrules() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ (
+ insinto "$(_udev_get_udevdir)"/rules.d
+ newins "${@}"
+ )
+}
+
+# @FUNCTION: udev_reload
+# @DESCRIPTION:
+# Run udevadm control --reload to refresh rules and databases
+udev_reload() {
+ if [[ ${ROOT} != "" ]] && [[ ${ROOT} != "/" ]]; then
+ return 0
+ fi
+
+ if [[ -d ${ROOT}/run/udev ]]; then
+ ebegin "Running udev control --reload for reloading rules and databases"
+ udevadm control --reload
+ eend $?
+ fi
+}
diff --git a/eclass/unpacker.eclass b/eclass/unpacker.eclass
new file mode 100644
index 000000000000..9aacf6548126
--- /dev/null
+++ b/eclass/unpacker.eclass
@@ -0,0 +1,462 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: unpacker.eclass
+# @MAINTAINER:
+# base-system@gentoo.org
+# @BLURB: helpers for extraneous file formats and consistent behavior across EAPIs
+# @DESCRIPTION:
+# Some extraneous file formats are not part of PMS, or are only in certain
+# EAPIs. Rather than worrying about that, support the crazy cruft here
+# and for all EAPI versions.
+
+# Possible todos:
+# - merge rpm unpacking
+# - support partial unpacks?
+
+if [[ -z ${_UNPACKER_ECLASS} ]]; then
+_UNPACKER_ECLASS=1
+
+# @ECLASS-VARIABLE: UNPACKER_BZ2
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Utility to use to decompress bzip2 files. Will dynamically pick between
+# `pbzip2` and `bzip2`. Make sure your choice accepts the "-dc" options.
+# Note: this is meant for users to set, not ebuilds.
+
+# @ECLASS-VARIABLE: UNPACKER_LZIP
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Utility to use to decompress lzip files. Will dynamically pick between
+# `plzip`, `pdlzip` and `lzip`. Make sure your choice accepts the "-dc" options.
+# Note: this is meant for users to set, not ebuilds.
+
+# for internal use only (unpack_pdv and unpack_makeself)
+find_unpackable_file() {
+ local src=$1
+ if [[ -z ${src} ]] ; then
+ src=${DISTDIR}/${A}
+ else
+ if [[ ${src} == ./* ]] ; then
+ : # already what we want
+ elif [[ -e ${DISTDIR}/${src} ]] ; then
+ src=${DISTDIR}/${src}
+ elif [[ -e ${PWD}/${src} ]] ; then
+ src=${PWD}/${src}
+ elif [[ -e ${src} ]] ; then
+ src=${src}
+ fi
+ fi
+ [[ ! -e ${src} ]] && return 1
+ echo "${src}"
+}
+
+unpack_banner() {
+ echo ">>> Unpacking ${1##*/} to ${PWD}"
+}
+
+# @FUNCTION: unpack_pdv
+# @USAGE: <file to unpack> <size of off_t>
+# @DESCRIPTION:
+# Unpack those pesky pdv generated files ...
+# They're self-unpacking programs with the binary package stuffed in
+# the middle of the archive. Valve seems to use it a lot ... too bad
+# it seems to like to segfault a lot :(. So lets take it apart ourselves.
+#
+# You have to specify the off_t size ... I have no idea how to extract that
+# information out of the binary executable myself. Basically you pass in
+# the size of the off_t type (in bytes) on the machine that built the pdv
+# archive.
+#
+# One way to determine this is by running the following commands:
+#
+# @CODE
+# strings <pdv archive> | grep lseek
+# strace -elseek <pdv archive>
+# @CODE
+#
+# Basically look for the first lseek command (we do the strings/grep because
+# sometimes the function call is _llseek or something) and steal the 2nd
+# parameter. Here is an example:
+#
+# @CODE
+# $ strings hldsupdatetool.bin | grep lseek
+# lseek
+# $ strace -elseek ./hldsupdatetool.bin
+# lseek(3, -4, SEEK_END) = 2981250
+# @CODE
+#
+# Thus we would pass in the value of '4' as the second parameter.
+unpack_pdv() {
+ local src=$(find_unpackable_file "$1")
+ local sizeoff_t=$2
+
+ [[ -z ${src} ]] && die "Could not locate source for '$1'"
+ [[ -z ${sizeoff_t} ]] && die "No idea what off_t size was used for this pdv :("
+
+ unpack_banner "${src}"
+
+ local metaskip=$(tail -c ${sizeoff_t} "${src}" | hexdump -e \"%i\")
+ local tailskip=$(tail -c $((${sizeoff_t}*2)) "${src}" | head -c ${sizeoff_t} | hexdump -e \"%i\")
+
+ # grab metadata for debug reasons
+ local metafile="${T}/${FUNCNAME}.meta"
+ tail -c +$((${metaskip}+1)) "${src}" > "${metafile}"
+
+ # rip out the final file name from the metadata
+ local datafile=$(tail -c +$((${metaskip}+1)) "${src}" | strings | head -n 1)
+ datafile=$(basename "${datafile}")
+
+ # now lets uncompress/untar the file if need be
+ local tmpfile="${T}/${FUNCNAME}"
+ tail -c +$((${tailskip}+1)) ${src} 2>/dev/null | head -c 512 > "${tmpfile}"
+
+ local iscompressed=$(file -b "${tmpfile}")
+ if [[ ${iscompressed:0:8} == "compress" ]] ; then
+ iscompressed=1
+ mv "${tmpfile}"{,.Z}
+ gunzip "${tmpfile}"
+ else
+ iscompressed=0
+ fi
+ local istar=$(file -b "${tmpfile}")
+ if [[ ${istar:0:9} == "POSIX tar" ]] ; then
+ istar=1
+ else
+ istar=0
+ fi
+
+ #for some reason gzip dies with this ... dd cant provide buffer fast enough ?
+ #dd if=${src} ibs=${metaskip} count=1 \
+ # | dd ibs=${tailskip} skip=1 \
+ # | gzip -dc \
+ # > ${datafile}
+ if [ ${iscompressed} -eq 1 ] ; then
+ if [ ${istar} -eq 1 ] ; then
+ tail -c +$((${tailskip}+1)) "${src}" 2>/dev/null \
+ | head -c $((${metaskip}-${tailskip})) \
+ | tar -xzf -
+ else
+ tail -c +$((${tailskip}+1)) "${src}" 2>/dev/null \
+ | head -c $((${metaskip}-${tailskip})) \
+ | gzip -dc \
+ > ${datafile}
+ fi
+ else
+ if [ ${istar} -eq 1 ] ; then
+ tail -c +$((${tailskip}+1)) "${src}" 2>/dev/null \
+ | head -c $((${metaskip}-${tailskip})) \
+ | tar --no-same-owner -xf -
+ else
+ tail -c +$((${tailskip}+1)) "${src}" 2>/dev/null \
+ | head -c $((${metaskip}-${tailskip})) \
+ > ${datafile}
+ fi
+ fi
+ true
+ #[ -s "${datafile}" ] || die "failure unpacking pdv ('${metaskip}' '${tailskip}' '${datafile}')"
+ #assert "failure unpacking pdv ('${metaskip}' '${tailskip}' '${datafile}')"
+}
+
+# @FUNCTION: unpack_makeself
+# @USAGE: [file to unpack] [offset] [tail|dd]
+# @DESCRIPTION:
+# Unpack those pesky makeself generated files ...
+# They're shell scripts with the binary package tagged onto
+# the end of the archive. Loki utilized the format as does
+# many other game companies.
+#
+# If the file is not specified, then ${A} is used. If the
+# offset is not specified then we will attempt to extract
+# the proper offset from the script itself.
+unpack_makeself() {
+ local src_input=${1:-${A}}
+ local src=$(find_unpackable_file "${src_input}")
+ local skip=$2
+ local exe=$3
+
+ [[ -z ${src} ]] && die "Could not locate source for '${src_input}'"
+
+ unpack_banner "${src}"
+
+ if [[ -z ${skip} ]] ; then
+ local ver=$(grep -m1 -a '#.*Makeself' "${src}" | awk '{print $NF}')
+ local skip=0
+ exe=tail
+ case ${ver} in
+ 1.5.*|1.6.0-nv*) # tested 1.5.{3,4,5} ... guessing 1.5.x series is same
+ skip=$(grep -a ^skip= "${src}" | cut -d= -f2)
+ ;;
+ 2.0|2.0.1)
+ skip=$(grep -a ^$'\t'tail "${src}" | awk '{print $2}' | cut -b2-)
+ ;;
+ 2.1.1)
+ skip=$(grep -a ^offset= "${src}" | awk '{print $2}' | cut -b2-)
+ (( skip++ ))
+ ;;
+ 2.1.2)
+ skip=$(grep -a ^offset= "${src}" | awk '{print $3}' | head -n 1)
+ (( skip++ ))
+ ;;
+ 2.1.3)
+ skip=`grep -a ^offset= "${src}" | awk '{print $3}'`
+ (( skip++ ))
+ ;;
+ 2.1.4|2.1.5|2.1.6|2.2.0)
+ skip=$(grep -a offset=.*head.*wc "${src}" | awk '{print $3}' | head -n 1)
+ skip=$(head -n ${skip} "${src}" | wc -c)
+ exe="dd"
+ ;;
+ *)
+ eerror "I'm sorry, but I was unable to support the Makeself file."
+ eerror "The version I detected was '${ver}'."
+ eerror "Please file a bug about the file ${src##*/} at"
+ eerror "http://bugs.gentoo.org/ so that support can be added."
+ die "makeself version '${ver}' not supported"
+ ;;
+ esac
+ debug-print "Detected Makeself version ${ver} ... using ${skip} as offset"
+ fi
+ case ${exe} in
+ tail) exe="tail -n +${skip} '${src}'";;
+ dd) exe="dd ibs=${skip} skip=1 if='${src}'";;
+ *) die "makeself cant handle exe '${exe}'"
+ esac
+
+ # lets grab the first few bytes of the file to figure out what kind of archive it is
+ local filetype tmpfile="${T}/${FUNCNAME}"
+ eval ${exe} 2>/dev/null | head -c 512 > "${tmpfile}"
+ filetype=$(file -b "${tmpfile}") || die
+ case ${filetype} in
+ *tar\ archive*)
+ eval ${exe} | tar --no-same-owner -xf -
+ ;;
+ bzip2*)
+ eval ${exe} | bzip2 -dc | tar --no-same-owner -xf -
+ ;;
+ gzip*)
+ eval ${exe} | tar --no-same-owner -xzf -
+ ;;
+ compress*)
+ eval ${exe} | gunzip | tar --no-same-owner -xf -
+ ;;
+ XZ*)
+ eval ${exe} | unxz | tar --no-same-owner -xf -
+ ;;
+ *)
+ eerror "Unknown filetype \"${filetype}\" ?"
+ false
+ ;;
+ esac
+ assert "failure unpacking (${filetype}) makeself ${src##*/} ('${ver}' +${skip})"
+}
+
+# @FUNCTION: unpack_deb
+# @USAGE: <one deb to unpack>
+# @DESCRIPTION:
+# Unpack a Debian .deb archive in style.
+unpack_deb() {
+ [[ $# -eq 1 ]] || die "Usage: ${FUNCNAME} <file>"
+
+ local deb=$(find_unpackable_file "$1")
+
+ unpack_banner "${deb}"
+
+ # on AIX ar doesn't work out as their ar used a different format
+ # from what GNU ar (and thus what .deb files) produce
+ if [[ -n ${EPREFIX} ]] ; then
+ {
+ read # global header
+ [[ ${REPLY} = "!<arch>" ]] || die "${deb} does not seem to be a deb archive"
+ local f timestamp uid gid mode size magic
+ while read f timestamp uid gid mode size magic ; do
+ [[ -n ${f} && -n ${size} ]] || continue # ignore empty lines
+ if [[ ${f} = "data.tar"* ]] ; then
+ head -c "${size}" > "${f}"
+ else
+ head -c "${size}" > /dev/null # trash it
+ fi
+ done
+ } < "${deb}"
+ else
+ ar x "${deb}"
+ fi
+
+ unpacker ./data.tar*
+
+ # Clean things up #458658. No one seems to actually care about
+ # these, so wait until someone requests to do something else ...
+ rm -f debian-binary {control,data}.tar*
+}
+
+# @FUNCTION: unpack_cpio
+# @USAGE: <one cpio to unpack>
+# @DESCRIPTION:
+# Unpack a cpio archive, file "-" means stdin.
+unpack_cpio() {
+ [[ $# -eq 1 ]] || die "Usage: ${FUNCNAME} <file>"
+
+ # needed as cpio always reads from stdin
+ local cpio_cmd=( cpio --make-directories --extract --preserve-modification-time )
+ if [[ $1 == "-" ]] ; then
+ unpack_banner "stdin"
+ "${cpio_cmd[@]}"
+ else
+ local cpio=$(find_unpackable_file "$1")
+ unpack_banner "${cpio}"
+ "${cpio_cmd[@]}" <"${cpio}"
+ fi
+}
+
+# @FUNCTION: unpack_zip
+# @USAGE: <zip file>
+# @DESCRIPTION:
+# Unpack zip archives.
+# This function ignores all non-fatal errors (i.e. warnings).
+# That is useful for zip archives with extra crap attached
+# (e.g. self-extracting archives).
+unpack_zip() {
+ [[ $# -eq 1 ]] || die "Usage: ${FUNCNAME} <file>"
+
+ local zip=$(find_unpackable_file "$1")
+ unpack_banner "${zip}"
+ unzip -qo "${zip}"
+
+ [[ $? -le 1 ]] || die "unpacking ${zip} failed (arch=unpack_zip)"
+}
+
+# @FUNCTION: _unpacker
+# @USAGE: <one archive to unpack>
+# @INTERNAL
+# @DESCRIPTION:
+# Unpack the specified archive. We only operate on one archive here
+# to keep down on the looping logic (that is handled by `unpacker`).
+_unpacker() {
+ [[ $# -eq 1 ]] || die "Usage: ${FUNCNAME} <file>"
+
+ local a=$1
+ local m=$(echo "${a}" | tr '[:upper:]' '[:lower:]')
+ a=$(find_unpackable_file "${a}")
+
+ # first figure out the decompression method
+ case ${m} in
+ *.bz2|*.tbz|*.tbz2)
+ local bzcmd=${PORTAGE_BZIP2_COMMAND:-$(type -P pbzip2 || type -P bzip2)}
+ local bzuncmd=${PORTAGE_BUNZIP2_COMMAND:-${bzcmd} -d}
+ : ${UNPACKER_BZ2:=${bzuncmd}}
+ comp="${UNPACKER_BZ2} -c"
+ ;;
+ *.z|*.gz|*.tgz)
+ comp="gzip -dc" ;;
+ *.lzma|*.xz|*.txz)
+ comp="xz -dc" ;;
+ *.lz)
+ : ${UNPACKER_LZIP:=$(type -P plzip || type -P pdlzip || type -P lzip)}
+ comp="${UNPACKER_LZIP} -dc" ;;
+ *) comp="" ;;
+ esac
+
+ # then figure out if there are any archiving aspects
+ arch=""
+ case ${m} in
+ *.tgz|*.tbz|*.tbz2|*.txz|*.tar.*|*.tar)
+ arch="tar --no-same-owner -xof" ;;
+ *.cpio.*|*.cpio)
+ arch="unpack_cpio" ;;
+ *.deb)
+ arch="unpack_deb" ;;
+ *.run)
+ arch="unpack_makeself" ;;
+ *.sh)
+ # Not all shell scripts are makeself
+ if head -n 30 "${a}" | grep -qs '#.*Makeself' ; then
+ arch="unpack_makeself"
+ fi
+ ;;
+ *.bin)
+ # Makeself archives can be annoyingly named
+ if head -c 100 "${a}" | grep -qs '#.*Makeself' ; then
+ arch="unpack_makeself"
+ fi
+ ;;
+ *.zip)
+ arch="unpack_zip" ;;
+ esac
+
+ # finally do the unpack
+ if [[ -z ${arch}${comp} ]] ; then
+ unpack "$1"
+ return $?
+ fi
+
+ [[ ${arch} != unpack_* ]] && unpack_banner "${a}"
+
+ if [[ -z ${arch} ]] ; then
+ # Need to decompress the file into $PWD #408801
+ local _a=${a%.*}
+ ${comp} "${a}" > "${_a##*/}"
+ elif [[ -z ${comp} ]] ; then
+ ${arch} "${a}"
+ else
+ ${comp} "${a}" | ${arch} -
+ fi
+
+ assert "unpacking ${a} failed (comp=${comp} arch=${arch})"
+}
+
+# @FUNCTION: unpacker
+# @USAGE: [archives to unpack]
+# @DESCRIPTION:
+# This works in the same way that `unpack` does. If you don't specify
+# any files, it will default to ${A}.
+unpacker() {
+ local a
+ [[ $# -eq 0 ]] && set -- ${A}
+ for a ; do _unpacker "${a}" ; done
+}
+
+# @FUNCTION: unpacker_src_unpack
+# @DESCRIPTION:
+# Run `unpacker` to unpack all our stuff.
+unpacker_src_unpack() {
+ unpacker
+}
+
+# @FUNCTION: unpacker_src_uri_depends
+# @USAGE: [archives that we will unpack]
+# @RETURN: Dependencies needed to unpack all the archives
+# @DESCRIPTION:
+# Walk all the specified files (defaults to $SRC_URI) and figure out the
+# dependencies that are needed to unpack things.
+#
+# Note: USE flags are not yet handled.
+unpacker_src_uri_depends() {
+ local uri deps d
+
+ [[ $# -eq 0 ]] && set -- ${SRC_URI}
+
+ for uri in "$@" ; do
+ case ${uri} in
+ *.cpio.*|*.cpio)
+ d="app-arch/cpio" ;;
+ *.rar|*.RAR)
+ d="app-arch/unrar" ;;
+ *.7z)
+ d="app-arch/p7zip" ;;
+ *.xz)
+ d="app-arch/xz-utils" ;;
+ *.zip)
+ d="app-arch/unzip" ;;
+ *.lz)
+ d="|| ( app-arch/plzip app-arch/pdlzip app-arch/lzip )" ;;
+ esac
+ deps+=" ${d}"
+ done
+
+ echo "${deps}"
+}
+
+EXPORT_FUNCTIONS src_unpack
+
+fi
diff --git a/eclass/user.eclass b/eclass/user.eclass
new file mode 100644
index 000000000000..29910b51221d
--- /dev/null
+++ b/eclass/user.eclass
@@ -0,0 +1,466 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: user.eclass
+# @MAINTAINER:
+# base-system@gentoo.org (Linux)
+# Joe Jezak <josejx@gmail.com> (OS X)
+# usata@gentoo.org (OS X)
+# Aaron Walker <ka0ttic@gentoo.org> (FreeBSD)
+# @BLURB: user management in ebuilds
+# @DESCRIPTION:
+# The user eclass contains a suite of functions that allow ebuilds
+# to quickly make sure users in the installed system are sane.
+
+if [[ -z ${_USER_ECLASS} ]]; then
+_USER_ECLASS=1
+
+# @FUNCTION: _assert_pkg_ebuild_phase
+# @INTERNAL
+# @USAGE: <calling func name>
+_assert_pkg_ebuild_phase() {
+ case ${EBUILD_PHASE} in
+ setup|preinst|postinst) ;;
+ *)
+ eerror "'$1()' called from '${EBUILD_PHASE}' phase which is not OK:"
+ eerror "You may only call from pkg_{setup,preinst,postinst} functions."
+ eerror "Package fails at QA and at life. Please file a bug."
+ die "Bad package! $1 is only for use in some pkg_* functions!"
+ esac
+}
+
+# @FUNCTION: egetent
+# @USAGE: <database> <key>
+# @DESCRIPTION:
+# Small wrapper for getent (Linux), nidump (< Mac OS X 10.5),
+# dscl (Mac OS X 10.5), and pw (FreeBSD) used in enewuser()/enewgroup().
+#
+# Supported databases: group passwd
+egetent() {
+ local db=$1 key=$2
+
+ [[ $# -ge 3 ]] && die "usage: egetent <database> <key>"
+
+ case ${db} in
+ passwd|group) ;;
+ *) die "sorry, database '${db}' not yet supported; file a bug" ;;
+ esac
+
+ case ${CHOST} in
+ *-darwin[678])
+ case ${key} in
+ *[!0-9]*) # Non numeric
+ nidump ${db} . | awk -F: "(\$1 ~ /^${key}\$/) {print;exit;}"
+ ;;
+ *) # Numeric
+ nidump ${db} . | awk -F: "(\$3 == ${key}) {print;exit;}"
+ ;;
+ esac
+ ;;
+ *-darwin*)
+ local mykey
+ case ${db} in
+ passwd) db="Users" mykey="UniqueID" ;;
+ group) db="Groups" mykey="PrimaryGroupID" ;;
+ esac
+
+ case ${key} in
+ *[!0-9]*) # Non numeric
+ dscl . -read /${db}/${key} 2>/dev/null |grep RecordName
+ ;;
+ *) # Numeric
+ dscl . -search /${db} ${mykey} ${key} 2>/dev/null
+ ;;
+ esac
+ ;;
+ *-freebsd*|*-dragonfly*)
+ case ${db} in
+ passwd) db="user" ;;
+ *) ;;
+ esac
+
+ # lookup by uid/gid
+ local opts
+ if [[ ${key} == [[:digit:]]* ]] ; then
+ [[ ${db} == "user" ]] && opts="-u" || opts="-g"
+ fi
+
+ pw show ${db} ${opts} "${key}" -q
+ ;;
+ *-netbsd*|*-openbsd*)
+ grep "${key}:\*:" /etc/${db}
+ ;;
+ *)
+ # ignore output if nscd doesn't exist, or we're not running as root
+ nscd -i "${db}" 2>/dev/null
+ getent "${db}" "${key}"
+ ;;
+ esac
+}
+
+# @FUNCTION: enewuser
+# @USAGE: <user> [uid] [shell] [homedir] [groups]
+# @DESCRIPTION:
+# Same as enewgroup, you are not required to understand how to properly add
+# a user to the system. The only required parameter is the username.
+# Default uid is (pass -1 for this) next available, default shell is
+# /bin/false, default homedir is /dev/null, and there are no default groups.
+enewuser() {
+ _assert_pkg_ebuild_phase ${FUNCNAME}
+
+ # get the username
+ local euser=$1; shift
+ if [[ -z ${euser} ]] ; then
+ eerror "No username specified !"
+ die "Cannot call enewuser without a username"
+ fi
+
+ # lets see if the username already exists
+ if [[ -n $(egetent passwd "${euser}") ]] ; then
+ return 0
+ fi
+ einfo "Adding user '${euser}' to your system ..."
+
+ # options to pass to useradd
+ local opts=()
+
+ # handle uid
+ local euid=$1; shift
+ if [[ -n ${euid} && ${euid} != -1 ]] ; then
+ if [[ ${euid} -gt 0 ]] ; then
+ if [[ -n $(egetent passwd ${euid}) ]] ; then
+ euid="next"
+ fi
+ else
+ eerror "Userid given but is not greater than 0 !"
+ die "${euid} is not a valid UID"
+ fi
+ else
+ euid="next"
+ fi
+ if [[ ${euid} == "next" ]] ; then
+ for ((euid = 101; euid <= 999; euid++)); do
+ [[ -z $(egetent passwd ${euid}) ]] && break
+ done
+ fi
+ opts+=( -u ${euid} )
+ einfo " - Userid: ${euid}"
+
+ # handle shell
+ local eshell=$1; shift
+ if [[ ! -z ${eshell} ]] && [[ ${eshell} != "-1" ]] ; then
+ if [[ ! -e ${ROOT}${eshell} ]] ; then
+ eerror "A shell was specified but it does not exist !"
+ die "${eshell} does not exist in ${ROOT}"
+ fi
+ if [[ ${eshell} == */false || ${eshell} == */nologin ]] ; then
+ eerror "Do not specify ${eshell} yourself, use -1"
+ die "Pass '-1' as the shell parameter"
+ fi
+ else
+ for eshell in /sbin/nologin /usr/sbin/nologin /bin/false /usr/bin/false /dev/null ; do
+ [[ -x ${ROOT}${eshell} ]] && break
+ done
+
+ if [[ ${eshell} == "/dev/null" ]] ; then
+ eerror "Unable to identify the shell to use, proceeding with userland default."
+ case ${USERLAND} in
+ GNU) eshell="/bin/false" ;;
+ BSD) eshell="/sbin/nologin" ;;
+ Darwin) eshell="/usr/sbin/nologin" ;;
+ *) die "Unable to identify the default shell for userland ${USERLAND}"
+ esac
+ fi
+ fi
+ einfo " - Shell: ${eshell}"
+ opts+=( -s "${eshell}" )
+
+ # handle homedir
+ local ehome=$1; shift
+ if [[ -z ${ehome} ]] || [[ ${ehome} == "-1" ]] ; then
+ ehome="/dev/null"
+ fi
+ einfo " - Home: ${ehome}"
+ opts+=( -d "${ehome}" )
+
+ # handle groups
+ local egroups=$1; shift
+ local g egroups_arr
+ IFS="," read -r -a egroups_arr <<<"${egroups}"
+ shift
+ if [[ ${#egroups_arr[@]} -gt 0 ]] ; then
+ local defgroup exgroups
+ for g in "${egroups_arr[@]}" ; do
+ if [[ -z $(egetent group "${g}") ]] ; then
+ eerror "You must add group ${g} to the system first"
+ die "${g} is not a valid GID"
+ fi
+ if [[ -z ${defgroup} ]] ; then
+ defgroup=${g}
+ else
+ exgroups+=",${g}"
+ fi
+ done
+ opts+=( -g "${defgroup}" )
+ if [[ ! -z ${exgroups} ]] ; then
+ opts+=( -G "${exgroups:1}" )
+ fi
+ fi
+ einfo " - Groups: ${egroups:-(none)}"
+
+ # handle extra args
+ if [[ $# -gt 0 ]] ; then
+ die "extra arguments no longer supported; please file a bug"
+ else
+ local comment="added by portage for ${PN}"
+ opts+=( -c "${comment}" )
+ einfo " - GECOS: ${comment}"
+ fi
+
+ # add the user
+ case ${CHOST} in
+ *-darwin*)
+ ### Make the user
+ dscl . create "/users/${euser}" uid ${euid}
+ dscl . create "/users/${euser}" shell "${eshell}"
+ dscl . create "/users/${euser}" home "${ehome}"
+ dscl . create "/users/${euser}" realname "added by portage for ${PN}"
+ ### Add the user to the groups specified
+ for g in "${egroups_arr[@]}" ; do
+ dscl . merge "/groups/${g}" users "${euser}"
+ done
+ ;;
+
+ *-freebsd*|*-dragonfly*)
+ pw useradd "${euser}" "${opts[@]}" || die
+ ;;
+
+ *-netbsd*)
+ useradd "${opts[@]}" "${euser}" || die
+ ;;
+
+ *-openbsd*)
+ # all ops the same, except the -g vs -g/-G ...
+ useradd -u ${euid} -s "${eshell}" \
+ -d "${ehome}" -g "${egroups}" "${euser}" || die
+ ;;
+
+ *)
+ useradd -r "${opts[@]}" "${euser}" || die
+ ;;
+ esac
+
+ if [[ ! -e ${ROOT}/${ehome} ]] ; then
+ einfo " - Creating ${ehome} in ${ROOT}"
+ mkdir -p "${ROOT}/${ehome}"
+ chown "${euser}" "${ROOT}/${ehome}"
+ chmod 755 "${ROOT}/${ehome}"
+ fi
+}
+
+# @FUNCTION: enewgroup
+# @USAGE: <group> [gid]
+# @DESCRIPTION:
+# This function does not require you to understand how to properly add a
+# group to the system. Just give it a group name to add and enewgroup will
+# do the rest. You may specify the gid for the group or allow the group to
+# allocate the next available one.
+enewgroup() {
+ _assert_pkg_ebuild_phase ${FUNCNAME}
+
+ # get the group
+ local egroup=$1; shift
+ if [[ -z ${egroup} ]] ; then
+ eerror "No group specified !"
+ die "Cannot call enewgroup without a group"
+ fi
+
+ # see if group already exists
+ if [[ -n $(egetent group "${egroup}") ]] ; then
+ return 0
+ fi
+ einfo "Adding group '${egroup}' to your system ..."
+
+ # handle gid
+ local egid=$1; shift
+ if [[ ! -z ${egid} ]] ; then
+ if [[ ${egid} -gt 0 ]] ; then
+ if [[ -n $(egetent group ${egid}) ]] ; then
+ egid="next available; requested gid taken"
+ fi
+ else
+ eerror "Groupid given but is not greater than 0 !"
+ die "${egid} is not a valid GID"
+ fi
+ else
+ egid="next available"
+ fi
+ einfo " - Groupid: ${egid}"
+
+ # handle extra
+ if [[ $# -gt 0 ]] ; then
+ die "extra arguments no longer supported; please file a bug"
+ fi
+
+ # Some targets need to find the next available GID manually
+ _enewgroup_next_gid() {
+ if [[ ${egid} == *[!0-9]* ]] ; then
+ # Non numeric
+ for ((egid = 101; egid <= 999; egid++)) ; do
+ [[ -z $(egetent group ${egid}) ]] && break
+ done
+ fi
+ }
+
+ # add the group
+ case ${CHOST} in
+ *-darwin*)
+ _enewgroup_next_gid
+ dscl . create "/groups/${egroup}" gid ${egid}
+ dscl . create "/groups/${egroup}" passwd '*'
+ ;;
+
+ *-freebsd*|*-dragonfly*)
+ _enewgroup_next_gid
+ pw groupadd "${egroup}" -g ${egid} || die
+ ;;
+
+ *-netbsd*)
+ _enewgroup_next_gid
+ groupadd -g ${egid} "${egroup}" || die
+ ;;
+
+ *)
+ local opts
+ if [[ ${egid} == *[!0-9]* ]] ; then
+ # Non numeric; let groupadd figure out a GID for us
+ opts=""
+ else
+ opts="-g ${egid}"
+ fi
+ # We specify -r so that we get a GID in the system range from login.defs
+ groupadd -r ${opts} "${egroup}" || die
+ ;;
+ esac
+}
+
+# @FUNCTION: egethome
+# @USAGE: <user>
+# @DESCRIPTION:
+# Gets the home directory for the specified user.
+egethome() {
+ local pos
+
+ [[ $# -eq 1 ]] || die "usage: egethome <user>"
+
+ case ${CHOST} in
+ *-darwin*|*-freebsd*|*-dragonfly*)
+ pos=9
+ ;;
+ *) # Linux, NetBSD, OpenBSD, etc...
+ pos=6
+ ;;
+ esac
+
+ egetent passwd "$1" | cut -d: -f${pos}
+}
+
+# @FUNCTION: egetshell
+# @USAGE: <user>
+# @DESCRIPTION:
+# Gets the shell for the specified user.
+egetshell() {
+ local pos
+
+ [[ $# -eq 1 ]] || die "usage: egetshell <user>"
+
+ case ${CHOST} in
+ *-darwin*|*-freebsd*|*-dragonfly*)
+ pos=10
+ ;;
+ *) # Linux, NetBSD, OpenBSD, etc...
+ pos=7
+ ;;
+ esac
+
+ egetent passwd "$1" | cut -d: -f${pos}
+}
+
+# @FUNCTION: esethome
+# @USAGE: <user> <homedir>
+# @DESCRIPTION:
+# Update the home directory in a platform-agnostic way.
+# Required parameters is the username and the new home directory.
+# Specify -1 if you want to set home to the enewuser default
+# of /dev/null.
+# If the new home directory does not exist, it is created.
+# Any previously existing home directory is NOT moved.
+esethome() {
+ _assert_pkg_ebuild_phase ${FUNCNAME}
+
+ # get the username
+ local euser=$1; shift
+ if [[ -z ${euser} ]] ; then
+ eerror "No username specified !"
+ die "Cannot call esethome without a username"
+ fi
+
+ # lets see if the username already exists
+ if [[ -z $(egetent passwd "${euser}") ]] ; then
+ ewarn "User does not exist, cannot set home dir -- skipping."
+ return 1
+ fi
+
+ # handle homedir
+ local ehome=$1; shift
+ if [[ -z ${ehome} ]] ; then
+ eerror "No home directory specified !"
+ die "Cannot call esethome without a home directory or '-1'"
+ fi
+
+ if [[ ${ehome} == "-1" ]] ; then
+ ehome="/dev/null"
+ fi
+
+ # exit with no message if home dir is up to date
+ if [[ $(egethome "${euser}") == ${ehome} ]]; then
+ return 0
+ fi
+
+ einfo "Updating home for user '${euser}' ..."
+ einfo " - Home: ${ehome}"
+
+ # ensure home directory exists, otherwise update will fail
+ if [[ ! -e ${ROOT}/${ehome} ]] ; then
+ einfo " - Creating ${ehome} in ${ROOT}"
+ mkdir -p "${ROOT}/${ehome}"
+ chown "${euser}" "${ROOT}/${ehome}"
+ chmod 755 "${ROOT}/${ehome}"
+ fi
+
+ # update the home directory
+ case ${CHOST} in
+ *-darwin*)
+ dscl . change "/users/${euser}" home "${ehome}"
+ ;;
+
+ *-freebsd*|*-dragonfly*)
+ pw usermod "${euser}" -d "${ehome}" && return 0
+ [[ $? == 8 ]] && eerror "${euser} is in use, cannot update home"
+ eerror "There was an error when attempting to update the home directory for ${euser}"
+ eerror "Please update it manually on your system:"
+ eerror "\t pw usermod \"${euser}\" -d \"${ehome}\""
+ ;;
+
+ *)
+ usermod -d "${ehome}" "${euser}" && return 0
+ [[ $? == 8 ]] && eerror "${euser} is in use, cannot update home"
+ eerror "There was an error when attempting to update the home directory for ${euser}"
+ eerror "Please update it manually on your system (as root):"
+ eerror "\t usermod -d \"${ehome}\" \"${euser}\""
+ ;;
+ esac
+}
+
+fi
diff --git a/eclass/vala.eclass b/eclass/vala.eclass
new file mode 100644
index 000000000000..bc147931d45d
--- /dev/null
+++ b/eclass/vala.eclass
@@ -0,0 +1,149 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: vala.eclass
+# @MAINTAINER:
+# gnome@gentoo.org
+# @AUTHOR:
+# Alexandre Rostovtsev <tetromino@gentoo.org>
+# @BLURB: Sets up the environment for using a specific version of vala.
+# @DESCRIPTION:
+# This eclass sets up commonly used environment variables for using a specific
+# version of dev-lang/vala to configure and build a package. It is needed for
+# packages whose build systems assume the existence of certain unversioned vala
+# executables, pkgconfig files, etc., which Gentoo does not provide.
+#
+# This eclass provides one phase function: src_prepare.
+
+inherit eutils multilib
+
+case "${EAPI:-0}" in
+ 0) die "EAPI=0 is not supported" ;;
+ 1) ;;
+ *) EXPORT_FUNCTIONS src_prepare ;;
+esac
+
+# @ECLASS-VARIABLE: VALA_MIN_API_VERSION
+# @DESCRIPTION:
+# Minimum vala API version (e.g. 0.20).
+VALA_MIN_API_VERSION=${VALA_MIN_API_VERSION:-0.20}
+
+# @ECLASS-VARIABLE: VALA_MAX_API_VERSION
+# @DESCRIPTION:
+# Maximum vala API version (e.g. 0.28).
+VALA_MAX_API_VERSION=${VALA_MAX_API_VERSION:-0.28}
+
+# @ECLASS-VARIABLE: VALA_USE_DEPEND
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# USE dependencies that vala must be built with (e.g. vapigen).
+
+# @FUNCTION: vala_api_versions
+# @DESCRIPTION:
+# Outputs a list of vala API versions from VALA_MAX_API_VERSION down to
+# VALA_MIN_API_VERSION.
+vala_api_versions() {
+ [[ ${VALA_MIN_API_VERSION} =~ ^0\.[[:digit:]]+$ ]] || die "Invalid syntax of VALA_MIN_API_VERSION"
+ [[ ${VALA_MAX_API_VERSION} =~ ^0\.[[:digit:]]+$ ]] || die "Invalid syntax of VALA_MAX_API_VERSION"
+
+ local minimal_supported_minor_version minor_version
+
+ # Dependency atoms are not generated for Vala versions older than 0.${minimal_supported_minor_version}.
+ minimal_supported_minor_version="20"
+
+ for ((minor_version = ${VALA_MAX_API_VERSION#*.}; minor_version >= ${VALA_MIN_API_VERSION#*.}; minor_version = minor_version - 2)); do
+ if ((minor_version >= minimal_supported_minor_version)); then
+ echo "0.${minor_version}"
+ fi
+ done
+}
+
+# @FUNCTION: vala_depend
+# @DESCRIPTION:
+# Outputs a ||-dependency string on vala from VALA_MAX_API_VERSION down to
+# VALA_MIN_API_VERSION
+vala_depend() {
+ local u v versions=$(vala_api_versions)
+ [[ ${VALA_USE_DEPEND} ]] && u="[${VALA_USE_DEPEND}(+)]"
+
+ echo -n "|| ("
+ for v in ${versions}; do
+ echo -n " dev-lang/vala:${v}${u}"
+ done
+ echo " )"
+}
+
+# @FUNCTION: vala_best_api_version
+# @DESCRIPTION:
+# Returns the highest installed vala API version satisfying
+# VALA_MAX_API_VERSION, VALA_MIN_API_VERSION, and VALA_USE_DEPEND.
+vala_best_api_version() {
+ local u v
+ [[ ${VALA_USE_DEPEND} ]] && u="[${VALA_USE_DEPEND}(+)]"
+ for v in $(vala_api_versions); do
+ has_version "dev-lang/vala:${v}${u}" && echo "${v}" && return
+ done
+}
+
+# @FUNCTION: vala_src_prepare
+# @USAGE: [--ignore-use] [--vala-api-version api_version]
+# @DESCRIPTION:
+# Sets up the environment variables and pkgconfig files for the
+# specified API version, or, if no version is specified, for the
+# highest installed vala API version satisfying
+# VALA_MAX_API_VERSION, VALA_MIN_API_VERSION, and VALA_USE_DEPEND.
+# Is a no-op if called without --ignore-use when USE=-vala.
+# Dies if the USE check is passed (or ignored) and a suitable vala
+# version is not available.
+vala_src_prepare() {
+ local p d valafoo version ignore_use
+
+ while [[ $1 ]]; do
+ case $1 in
+ "--ignore-use" )
+ ignore_use=1 ;;
+ "--vala-api-version" )
+ shift
+ version=$1
+ [[ ${version} ]] || die "'--vala-api-version' option requires API version parameter."
+ esac
+ shift
+ done
+
+ if [[ -z ${ignore_use} ]]; then
+ in_iuse vala && ! use vala && return 0
+ fi
+
+ if [[ ${version} ]]; then
+ has_version "dev-lang/vala:${version}" || die "No installed vala:${version}"
+ else
+ version=$(vala_best_api_version)
+ [[ ${version} ]] || die "No installed vala in $(vala_depend)"
+ fi
+
+ export VALAC=$(type -P valac-${version})
+
+ valafoo=$(type -P vala-gen-introspect-${version})
+ [[ ${valafoo} ]] && export VALA_GEN_INTROSPECT="${valafoo}"
+
+ valafoo=$(type -P vapigen-${version})
+ [[ ${valafoo} ]] && export VAPIGEN="${valafoo}"
+
+ valafoo="${EPREFIX}/usr/share/vala/Makefile.vapigen"
+ [[ -e ${valafoo} ]] && export VAPIGEN_MAKEFILE="${valafoo}"
+
+ export VAPIGEN_VAPIDIR="${EPREFIX}/usr/share/vala/vapi"
+
+ mkdir -p "${T}/pkgconfig" || die "mkdir failed"
+ for p in libvala vapigen; do
+ for d in "${EPREFIX}/usr/$(get_libdir)/pkgconfig" "${EPREFIX}/usr/share/pkgconfig"; do
+ if [[ -e ${d}/${p}-${version}.pc ]]; then
+ ln -s "${d}/${p}-${version}.pc" "${T}/pkgconfig/${p}.pc" || die "ln failed"
+ break
+ fi
+ done
+ done
+ : ${PKG_CONFIG_PATH:="${EPREFIX}/usr/$(get_libdir)/pkgconfig:${EPREFIX}/usr/share/pkgconfig"}
+ export PKG_CONFIG_PATH="${T}/pkgconfig:${PKG_CONFIG_PATH}"
+}
diff --git a/eclass/vcs-snapshot.eclass b/eclass/vcs-snapshot.eclass
new file mode 100644
index 000000000000..7e0c3fe3d443
--- /dev/null
+++ b/eclass/vcs-snapshot.eclass
@@ -0,0 +1,78 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: vcs-snapshot.eclass
+# @MAINTAINER:
+# mgorny@gentoo.org
+# @BLURB: support eclass for unpacking VCS snapshot tarballs
+# @DESCRIPTION:
+# This eclass provides a convenience src_unpack() which does unpack all
+# the tarballs in SRC_URI to locations matching their (local) names,
+# discarding the original parent directory.
+#
+# The typical use case are VCS snapshots, coming from github, bitbucket
+# and similar services. They have hash appended to the directory name
+# which makes extracting them a painful experience. But if you just use
+# a SRC_URI arrow to rename it (which you're likely have to do anyway),
+# vcs-snapshot will just extract it into a matching directory.
+#
+# Please note that this eclass handles only tarballs (.tar, .tar.gz,
+# .tar.bz2 & .tar.xz). For any other file format (or suffix) it will
+# fall back to regular unpack. Support for additional formats may be
+# added at some point so please keep your SRC_URIs clean.
+#
+# @EXAMPLE:
+#
+# @CODE
+# EAPI=4
+# AUTOTOOLS_AUTORECONF=1
+# inherit autotools-utils vcs-snapshot
+#
+# SRC_URI="http://github.com/example/${PN}/tarball/v${PV} -> ${P}.tar.gz"
+# @CODE
+#
+# and however the tarball was originally named, all files will appear
+# in ${WORKDIR}/${P}.
+
+case ${EAPI:-0} in
+ 0|1|2|3|4|5) ;;
+ *) die "vcs-snapshot.eclass API in EAPI ${EAPI} not yet established."
+esac
+
+EXPORT_FUNCTIONS src_unpack
+
+# @FUNCTION: vcs-snapshot_src_unpack
+# @DESCRIPTION:
+# Extract all the archives from ${A}. The .tar, .tar.gz, .tar.bz2
+# and .tar.xz archives will be unpacked to directories matching their
+# local names. Other archive types will be passed down to regular
+# unpack.
+vcs-snapshot_src_unpack() {
+ debug-print-function ${FUNCNAME} "${@}"
+
+ local f
+
+ for f in ${A}
+ do
+ case "${f}" in
+ *.tar|*.tar.gz|*.tar.bz2|*.tar.xz)
+ local destdir=${WORKDIR}/${f%.tar*}
+
+ debug-print "${FUNCNAME}: unpacking ${f} to ${destdir}"
+
+ # XXX: check whether the directory structure inside is
+ # fine? i.e. if the tarball has actually a parent dir.
+ mkdir "${destdir}" || die
+ tar -C "${destdir}" -x --strip-components 1 \
+ -f "${DISTDIR}/${f}" || die
+ ;;
+ *)
+ debug-print "${FUNCNAME}: falling back to unpack for ${f}"
+
+ # fall back to the default method
+ unpack "${f}"
+ ;;
+ esac
+ done
+}
diff --git a/eclass/vdr-plugin-2.eclass b/eclass/vdr-plugin-2.eclass
new file mode 100644
index 000000000000..b3ca9b0f086c
--- /dev/null
+++ b/eclass/vdr-plugin-2.eclass
@@ -0,0 +1,656 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: vdr-plugin-2.eclass
+# @MAINTAINER:
+# Gentoo VDR Project <vdr@gentoo.org>
+# @AUTHOR:
+# Matthias Schwarzott <zzam@gentoo.org>
+# Joerg Bornkessel <hd_brummy@gentoo.org>
+# Christian Ruppert <idl0r@gentoo.org>
+# (undisclosed contributors)
+# @BLURB: common vdr plugin ebuild functions
+# @DESCRIPTION:
+# Eclass for easing maintenance of vdr plugin ebuilds
+
+# @ECLASS-VARIABLE: VDR_CONFD_FILE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# A plugin config file can be specified through the $VDR_CONFD_FILE variable, it
+# defaults to ${FILESDIR}/confd. Each config file will be installed as e.g.
+# ${D}/etc/conf.d/vdr.${VDRPLUGIN}
+
+# @ECLASS-VARIABLE: VDR_RCADDON_FILE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Installing rc-addon files is basically the same as for plugin config files
+# (see above), it's just using the $VDR_RCADDON_FILE variable instead.
+# The default value when $VDR_RCADDON_FILE is undefined is:
+# ${FILESDIR}/rc-addon.sh and will be installed as
+# ${VDR_RC_DIR}/plugin-${VDRPLUGIN}.sh
+#
+# The rc-addon files will be sourced by the startscript when the specific plugin
+# has been enabled.
+# rc-addon files may be used to prepare everything that is necessary for the
+# plugin start/stop, like passing extra command line options and so on.
+#
+# NOTE: rc-addon files must be valid shell scripts!
+
+# @ECLASS-VARIABLE: GENTOO_VDR_CONDITIONAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# This is a hack for ebuilds like vdr-xineliboutput that want to
+# conditionally install a vdr-plugin
+
+# @ECLASS-VARIABLE: PO_SUBDIR
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# By default, translation are found in"${S}"/po but this
+# default can be overridden by defining PO_SUBDIR.
+#
+# Example:
+# @CODE
+# PO_SUBDIR="bla foo/bla"
+# @CODE
+
+# @ECLASS-VARIABLE: VDR_MAINTAINER_MODE
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Output from function dev_check if it is defined in ebuild or eclass,
+# helpfull for gentoo ebuild developer
+#
+# This will also install any debug files in /usr/share/vdr/maintainer-data
+#
+# This is intended to be set by user in make.conf. Ebuilds must not set
+# it.
+#
+# VDR_MAINTAINER_MODE=1
+
+# @FUNCTION: fix_vdr_libsi_include
+# @DESCRIPTION:
+# Plugins failed on compile with wrong path of libsi includes,
+# this can be fixed by 'function + space separated list of files'
+#
+# Example:
+# @CODE
+# fix_vdr_libsi_include bla.c foo.c
+# @CODE
+
+# @FUNCTION: remove_i18n_include
+# @DESCRIPTION:
+# Compile will fail if plugin still use the old i18n language handling,
+# most parts are fixed by vdr-plugin-2.eclass internal functions itself.
+# Remove unneeded i18.n includes from files, if they are still wrong there,
+# this can be fixed by 'function + space separated list of files"
+#
+# Example:
+# @CODE
+# remove_i18n_include bla.n foo.n
+# @CODE
+
+# Applying your own local/user patches:
+# This is done by using the epatch_user() function of the eutils.eclass.
+# Simply add your patches into one of these directories:
+# /etc/portage/patches/<CATEGORY>/<PF|P|PN>/
+# Quote: where the first of these three directories to exist will be the one to
+# use, ignoring any more general directories which might exist as well.
+#
+# For more details about it please take a look at the eutils.class.
+
+inherit eutils flag-o-matic multilib toolchain-funcs unpacker
+
+case ${EAPI:-0} in
+ 4|5) ;;
+ *) die "EAPI ${EAPI} unsupported."
+esac
+
+EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_compile src_install pkg_postinst pkg_postrm pkg_config
+
+IUSE=""
+
+# Name of the plugin stripped from all vdrplugin-, vdr- and -cvs pre- and postfixes
+VDRPLUGIN="${PN/#vdrplugin-/}"
+VDRPLUGIN="${VDRPLUGIN/#vdr-/}"
+VDRPLUGIN="${VDRPLUGIN/%-cvs/}"
+
+DESCRIPTION="vdr Plugin: ${VDRPLUGIN} (based on vdr-plugin-2.eclass)"
+
+# Works in most cases
+S="${WORKDIR}/${VDRPLUGIN}-${PV}"
+
+# depend on headers for DVB-driver
+COMMON_DEPEND=">=media-tv/gentoo-vdr-scripts-0.4.2"
+
+DEPEND="${COMMON_DEPEND}
+ virtual/linuxtv-dvb-headers"
+RDEPEND="${COMMON_DEPEND}
+ >=app-eselect/eselect-vdr-0.0.2"
+
+if [[ "${GENTOO_VDR_CONDITIONAL:-no}" = "yes" ]]; then
+ IUSE="${IUSE} vdr"
+ DEPEND="vdr? ( ${DEPEND} )"
+ RDEPEND="vdr? ( ${RDEPEND} )"
+fi
+
+# New method of storing plugindb
+# Called from src_install
+# file maintained by normal portage-methods
+create_plugindb_file() {
+ local NEW_VDRPLUGINDB_DIR=/usr/share/vdr/vdrplugin-rebuild/
+ local DB_FILE="${NEW_VDRPLUGINDB_DIR}/${CATEGORY}-${PF}"
+ insinto "${NEW_VDRPLUGINDB_DIR}"
+
+# BUG: portage-2.1.4_rc9 will delete the EBUILD= line, so we cannot use this code.
+# cat <<-EOT > "${D}/${DB_FILE}"
+# VDRPLUGIN_DB=1
+# CREATOR=ECLASS
+# EBUILD=${CATEGORY}/${PN}
+# EBUILD_V=${PVR}
+# EOT
+ {
+ echo "VDRPLUGIN_DB=1"
+ echo "CREATOR=ECLASS"
+ echo "EBUILD=${CATEGORY}/${PN}"
+ echo "EBUILD_V=${PVR}"
+ echo "PLUGINS=\"$@\""
+ } > "${D}/${DB_FILE}"
+}
+
+create_header_checksum_file() {
+ # Danger: Not using $ROOT here, as compile will also not use it !!!
+ # If vdr in $ROOT and / differ, plugins will not run anyway
+
+ local CHKSUM="header-md5-vdr"
+
+ if [[ -f ${VDR_CHECKSUM_DIR}/header-md5-vdr ]]; then
+ cp "${VDR_CHECKSUM_DIR}/header-md5-vdr" "${CHKSUM}"
+ elif type -p md5sum >/dev/null 2>&1; then
+ (
+ cd "${VDR_INCLUDE_DIR}"
+ md5sum *.h libsi/*.h|LC_ALL=C sort --key=2
+ ) > "${CHKSUM}"
+ else
+ die "Could not create md5 checksum of headers"
+ fi
+
+ insinto "${VDR_CHECKSUM_DIR}"
+ local p_name
+ for p_name; do
+ newins "${CHKSUM}" "header-md5-${p_name}"
+ done
+}
+
+fix_vdr_libsi_include() {
+ dev_check "Fixing include of libsi-headers"
+ local f
+ for f; do
+ sed -i "${f}" \
+ -e '/#include/s:"\(.*libsi.*\)":<\1>:' \
+ -e '/#include/s:<.*\(libsi/.*\)>:<vdr/\1>:'
+ done
+}
+
+vdr_patchmakefile() {
+ einfo "Patching Makefile"
+ [[ -e Makefile ]] || die "Makefile of plugin can not be found!"
+ cp Makefile "${WORKDIR}"/Makefile.before
+
+ # plugin makefiles use VDRDIR in strange ways
+ # assumptions:
+ # 1. $(VDRDIR) contains Make.config
+ # 2. $(VDRDIR) contains config.h
+ # 3. $(VDRDIR)/include/vdr contains the headers
+ # 4. $(VDRDIR) contains main vdr Makefile
+ # 5. $(VDRDIR)/locale exists
+ # 6. $(VDRDIR) allows to access vdr source files
+ #
+ # We only have one directory (for now /usr/include/vdr),
+ # that contains vdr-headers and Make.config.
+ # To satisfy 1-3 we do this:
+ # Set VDRDIR=/usr/include/vdr
+ # Set VDRINCDIR=/usr/include
+ # Change $(VDRDIR)/include to $(VDRINCDIR)
+
+ sed -i Makefile \
+ -e "s:^VDRDIR.*$:VDRDIR = ${VDR_INCLUDE_DIR}:" \
+ -e "/^VDRDIR/a VDRINCDIR = ${VDR_INCLUDE_DIR%/vdr}" \
+ -e '/VDRINCDIR.*=/!s:$(VDRDIR)/include:$(VDRINCDIR):' \
+ \
+ -e 's:-I$(DVBDIR)/include::' \
+ -e 's:-I$(DVBDIR)::'
+
+ # may be needed for multiproto:
+ #sed -i Makefile \
+ # -e "s:^DVBDIR.*$:DVBDIR = ${DVB_INCLUDE_DIR}:" \
+ # -e 's:-I$(DVBDIR)/include:-I$(DVBDIR):'
+
+ if ! grep -q APIVERSION Makefile; then
+ ebegin " Converting to APIVERSION"
+ sed -i Makefile \
+ -e 's:^APIVERSION = :APIVERSION ?= :' \
+ -e 's:$(LIBDIR)/$@.$(VDRVERSION):$(LIBDIR)/$@.$(APIVERSION):' \
+ -e '/VDRVERSION =/a\APIVERSION = $(shell sed -ne '"'"'/define APIVERSION/s/^.*"\\(.*\\)".*$$/\\1/p'"'"' $(VDRDIR)/config.h)'
+ eend $?
+ fi
+
+ # Correcting Compile-Flags
+ # Do not overwrite CXXFLAGS, add LDFLAGS if missing
+ sed -i Makefile \
+ -e '/^CXXFLAGS[[:space:]]*=/s/=/?=/' \
+ -e '/LDFLAGS/!s:-shared:$(LDFLAGS) -shared:'
+
+ # Disabling file stripping, the package manager takes care of it
+ sed -i Makefile \
+ -e '/@.*strip/d' \
+ -e '/strip \$(LIBDIR)\/\$@/d' \
+ -e 's/STRIP.*=.*$/STRIP = true/'
+
+ # Use a file instead of a variable as single-stepping via ebuild
+ # destroys environment.
+ touch "${WORKDIR}"/.vdr-plugin_makefile_patched
+}
+
+dev_check() {
+ # A lot useful debug infos
+ # set VDR_MAINTAINER_MODE="1" in make.conf
+ if [[ -n ${VDR_MAINTAINER_MODE} ]]; then
+ eerror "\t Gentoo Developer Debug: $@"
+ fi
+}
+
+gettext_missing() {
+ # plugins without converting to gettext
+
+ local GETTEXT_MISSING=$( grep xgettext Makefile )
+ if [[ -z ${GETTEXT_MISSING} ]]; then
+ dev_check "Plugin isn't converted to gettext handling \n"
+ fi
+}
+
+detect_po_dir() {
+ # helper function
+
+ [[ -f po ]] && local po_dir="${S}"
+ local po_subdir=( ${S}/${PO_SUBDIR} )
+ local f
+
+ pofile_dir=( ${po_dir} ${po_subdir[*]} )
+}
+
+linguas_support() {
+# Patching Makefile for linguas support.
+# Only locales, enabled through the LINGUAS (make.conf) variable will be
+# "compiled" and installed.
+
+ einfo "Patching for Linguas support"
+ einfo "available Languages for ${P} are:"
+
+ detect_po_dir
+
+ for f in ${pofile_dir[*]}; do
+ PLUGIN_LINGUAS=$( ls ${f}/po --ignore="*.pot" | sed -e "s:.po::g" | cut -d_ -f1 | tr \\\012 ' ' )
+ einfo "LINGUAS=\"${PLUGIN_LINGUAS}\""
+
+ sed -i ${f}/Makefile \
+ -e 's:\$(wildcard[[:space:]]*\$(PODIR)/\*.po):\$(foreach dir,\$(LINGUAS),\$(wildcard \$(PODIR)\/\$(dir)\*.po)):' \
+ || die "sed failed for Linguas"
+ done
+
+ strip-linguas ${PLUGIN_LINGUAS} en
+}
+
+vdr_i18n() {
+# i18n handling was deprecated since >=media-video/vdr-1.5.9,
+# finally with >=media-video/vdr-1.7.27 it has been dropped entirely and some
+# plugins will fail to "compile" because they're still using the old variant.
+# Simply remove the i18n.o object from Makefile (OBJECT) and
+# remove "static const tI18nPhrase*" from i18n.h.
+#
+# Plugins that are still using the old method will be pmasked until they're
+# fixed or in case of maintainer timeout they'll be masked for removal.
+
+ gettext_missing
+
+ local I18N_OBJECT=$( grep i18n.o Makefile )
+ if [[ -n ${I18N_OBJECT} ]]; then
+
+ if [[ "${KEEP_I18NOBJECT:-no}" = "yes" ]]; then
+ dev_check "Forced to keep i18n.o"
+ else
+ sed -i "s:i18n.o::g" Makefile
+ dev_check "OBJECT i18n.o found"
+ dev_check "removed per sed \n"
+ fi
+
+ else
+ dev_check "OBJECT i18n.o not found in Makefile"
+ dev_check "all fine or manual review needed? \n"
+ fi
+
+ local I18N_STRING=$( [[ -e i18n.h ]] && grep tI18nPhrase i18n.h )
+ if [[ -n ${I18N_STRING} ]]; then
+ sed -i "s:^extern[[:space:]]*const[[:space:]]*tI18nPhrase://static const tI18nPhrase:" i18n.h
+ dev_check "obsolete tI18nPhrase found"
+ dev_check "disabled per sed, please recheck \n"
+ else
+ dev_check "obsolete tI18nPhrase not found, fine..."
+ dev_check "please review, may be in subdir... \n"
+ fi
+}
+
+remove_i18n_include() {
+ # remove uneeded i18.n includes
+
+ local f
+ for f; do
+ sed -i "${f}" \
+ -e "s:^#include[[:space:]]*\"i18n.h\"://:"
+ done
+
+ dev_check "removed i18n.h include in ${@}"
+}
+
+vdr-plugin-2_print_enable_command() {
+ local p_name c=0 l=""
+ for p_name in ${vdr_plugin_list}; do
+ c=$(( c+1 ))
+ l="$l ${p_name#vdr-}"
+ done
+
+ elog
+ case $c in
+ 1) elog "Installed plugin${l}" ;;
+ *) elog "Installed $c plugins:${l}" ;;
+ esac
+ elog "To activate a plugin execute this command:"
+ elog "\teselect vdr-plugin enable <plugin_name> ..."
+ elog
+}
+
+has_vdr() {
+ [[ -f "${VDR_INCLUDE_DIR}"/config.h ]]
+}
+
+## exported functions
+
+vdr-plugin-2_pkg_setup() {
+ # missing ${chost}- tag
+ tc-export CC CXX
+
+ # -fPIC is needed for shared objects on some platforms (amd64 and others)
+ append-flags -fPIC
+
+ # Plugins need to be compiled with position independent code, otherwise linking
+ # VDR against it will fail
+ if has_version ">=media-video/vdr-1.7.13"; then
+ append-cxxflags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE
+ fi
+
+ # Where should the plugins live in the filesystem
+ if has_version ">=media-video/vdr-1.7.34"; then
+ VDR_PLUGIN_DIR=$(pkg-config --variable=libdir vdr)
+ else
+ # obsolete, as we have only >=media-video/vdr-2
+ VDR_PLUGIN_DIR="/usr/$(get_libdir)/vdr/plugins"
+ fi
+
+ VDR_CHECKSUM_DIR="${VDR_PLUGIN_DIR%/plugins}/checksums"
+
+ # was /usr/lib/... some time ago
+ # since gentoo-vdr-scripts-0.3.6 it works with /usr/share/...
+ VDR_RC_DIR="/usr/share/vdr/rcscript"
+
+ # Pathes to includes
+ VDR_INCLUDE_DIR="/usr/include/vdr"
+ DVB_INCLUDE_DIR="/usr/include"
+
+ TMP_LOCALE_DIR="${WORKDIR}/tmp-locale"
+
+ if has_version ">=media-video/vdr-1.7.34"; then
+ LOCDIR=$(pkg-config --variable=locdir vdr)
+ else
+ # obsolete, as we have only >=media-video/vdr-2
+ LOCDIR="/usr/share/locale"
+ fi
+
+ if ! has_vdr; then
+ # set to invalid values to detect abuses
+ VDRVERSION="eclass_no_vdr_installed"
+ APIVERSION="eclass_no_vdr_installed"
+
+ if [[ "${GENTOO_VDR_CONDITIONAL:-no}" = "yes" ]] && ! use vdr; then
+ einfo "VDR not found!"
+ else
+ # if vdr is required
+ die "VDR not found!"
+ fi
+ return
+ fi
+
+ if has_version ">=media-video/vdr-1.7.34"; then
+ VDRVERSION=$(awk -F'"' '/define VDRVERSION/ {print $2}' "${VDR_INCLUDE_DIR}"/config.h)
+ APIVERSION=$(pkg-config --variable=apiversion vdr)
+ else
+ # obsolete, as we have only >=media-video/vdr-2
+ VDRVERSION=$(awk -F'"' '/define VDRVERSION/ {print $2}' "${VDR_INCLUDE_DIR}"/config.h)
+ APIVERSION=$(awk -F'"' '/define APIVERSION/ {print $2}' "${VDR_INCLUDE_DIR}"/config.h)
+ [[ -z ${APIVERSION} ]] && APIVERSION="${VDRVERSION}"
+ fi
+
+ einfo "Compiling against"
+ einfo "\tvdr-${VDRVERSION} [API version ${APIVERSION}]"
+
+ if [[ -n "${VDR_LOCAL_PATCHES_DIR}" ]]; then
+ eerror "Using VDR_LOCAL_PATCHES_DIR is deprecated!"
+ eerror "Please move all your patches into"
+ eerror "${EROOT}/etc/portage/patches/${CATEGORY}/${P}"
+ eerror "and remove or unset the VDR_LOCAL_PATCHES_DIR variable."
+ die
+ fi
+}
+
+vdr-plugin-2_src_util() {
+ while [ "$1" ]; do
+ case "$1" in
+ all)
+ vdr-plugin-2_src_util unpack add_local_patch patchmakefile linguas_patch i18n
+ ;;
+ prepare)
+ vdr-plugin-2_src_util add_local_patch patchmakefile linguas_patch i18n
+ ;;
+ unpack)
+ unpacker_src_unpack
+ ;;
+ add_local_patch)
+ cd "${S}" || die "Could not change to plugin-source-directory!"
+ epatch_user
+ ;;
+ patchmakefile)
+ cd "${S}" || die "Could not change to plugin-source-directory!"
+ vdr_patchmakefile
+ ;;
+ i18n)
+ vdr_i18n
+ ;;
+ linguas_patch)
+ linguas_support
+ ;;
+ esac
+
+ shift
+ done
+}
+
+vdr-plugin-2_src_unpack() {
+ if [[ -z ${VDR_INCLUDE_DIR} ]]; then
+ eerror "Wrong use of vdr-plugin-2.eclass."
+ eerror "An ebuild for a vdr-plugin will not work without calling vdr-plugin-2_src_unpack."
+ echo
+ eerror "Please report this at bugs.gentoo.org."
+ die "vdr-plugin-2_src_unpack not called!"
+ fi
+
+ if [ -z "$1" ]; then
+ vdr-plugin-2_src_util unpack
+ else
+ vdr-plugin-2_src_util $@
+ fi
+}
+
+vdr-plugin-2_src_prepare() {
+ if [[ -z ${VDR_INCLUDE_DIR} ]]; then
+ eerror "Wrong use of vdr-plugin-2.eclass."
+ eerror "An ebuild for a vdr-plugin will not work without calling vdr-plugin-2_src_prepare."
+ echo
+ eerror "Please report this at bugs.gentoo.org."
+ die "vdr-plugin-2_src_prepare not called!"
+ fi
+
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ debug-print "$FUNCNAME: applying user patches"
+
+ vdr-plugin-2_src_util prepare
+}
+
+vdr-plugin-2_src_compile() {
+ [ -z "$1" ] && vdr-plugin-2_src_compile compile
+
+ while [ "$1" ]; do
+ case "$1" in
+ compile)
+ if [[ ! -f ${WORKDIR}/.vdr-plugin_makefile_patched ]]; then
+ eerror "Wrong use of vdr-plugin-2.eclass."
+ eerror "An ebuild for a vdr-plugin will not work without"
+ eerror "calling vdr-plugin-2_src_compile to patch the Makefile."
+ echo
+ eerror "Please report this at bugs.gentoo.org."
+ die "vdr-plugin-2_src_compile not called!"
+ fi
+ cd "${S}"
+
+ BUILD_TARGETS=${BUILD_TARGETS:-${VDRPLUGIN_MAKE_TARGET:-all }}
+ emake ${BUILD_PARAMS} \
+ ${BUILD_TARGETS} \
+ LOCALEDIR="${TMP_LOCALE_DIR}" \
+ LOCDIR="${TMP_LOCALE_DIR}" \
+ LIBDIR="${S}" \
+ TMPDIR="${T}" \
+ || die "emake failed"
+ ;;
+ esac
+
+ shift
+ done
+}
+
+vdr-plugin-2_src_install() {
+ if [[ -z ${VDR_INCLUDE_DIR} ]]; then
+ eerror "Wrong use of vdr-plugin-2.eclass."
+ eerror "An ebuild for a vdr-plugin will not work without calling vdr-plugin-2_src_install."
+ echo
+ eerror "Please report this at bugs.gentoo.org."
+ die "vdr-plugin-2_src_install not called!"
+ fi
+
+ cd "${WORKDIR}"
+
+ if [[ -n ${VDR_MAINTAINER_MODE} ]]; then
+ local mname="${P}-Makefile"
+ cp "${S}"/Makefile "${mname}.patched"
+ cp Makefile.before "${mname}.before"
+
+ diff -u "${mname}.before" "${mname}.patched" > "${mname}.diff"
+
+ insinto "/usr/share/vdr/maintainer-data/makefile-changes"
+ doins "${mname}.diff"
+
+ insinto "/usr/share/vdr/maintainer-data/makefile-before"
+ doins "${mname}.before"
+
+ insinto "/usr/share/vdr/maintainer-data/makefile-patched"
+ doins "${mname}.patched"
+
+ fi
+
+ cd "${S}"
+
+ local SOFILE_STRING=$(grep SOFILE Makefile)
+ if [[ -n ${SOFILE_STRING} ]]; then
+ BUILD_TARGETS=${BUILD_TARGETS:-${VDRPLUGIN_MAKE_TARGET:-install }}
+ einstall ${BUILD_PARAMS} \
+ ${BUILD_TARGETS} \
+ TMPDIR="${T}" \
+ DESTDIR="${D}" \
+ || die "einstall (makefile target) failed"
+ else
+ dev_check "Plugin use still the old Makefile handling"
+ insinto "${VDR_PLUGIN_DIR}"
+ doins libvdr-*.so.*
+ fi
+
+ if [[ -d ${TMP_LOCALE_DIR} ]]; then
+ einfo "Installing locales"
+ cd "${TMP_LOCALE_DIR}"
+
+ local linguas
+ for linguas in ${LINGUAS[*]}; do
+ insinto "${LOCDIR}"
+ cp -r --parents ${linguas}* ${D}/${LOCDIR}
+ done
+ fi
+
+ cd "${D}/usr/$(get_libdir)/vdr/plugins"
+
+ # create list of all created plugin libs
+ vdr_plugin_list=""
+ local p_name
+ for p in libvdr-*.so.*; do
+ p_name="${p%.so*}"
+ p_name="${p_name#lib}"
+ vdr_plugin_list="${vdr_plugin_list} ${p_name}"
+ done
+
+ cd "${S}"
+
+ create_header_checksum_file ${vdr_plugin_list}
+ create_plugindb_file ${vdr_plugin_list}
+
+ local docfile
+ for docfile in README* HISTORY CHANGELOG; do
+ [[ -f ${docfile} ]] && dodoc ${docfile}
+ done
+
+ # if VDR_CONFD_FILE is empty and ${FILESDIR}/confd exists take it
+ [[ -z ${VDR_CONFD_FILE} ]] && [[ -e ${FILESDIR}/confd ]] && VDR_CONFD_FILE=${FILESDIR}/confd
+
+ if [[ -n ${VDR_CONFD_FILE} ]]; then
+ newconfd "${VDR_CONFD_FILE}" vdr.${VDRPLUGIN}
+ fi
+
+ # if VDR_RCADDON_FILE is empty and ${FILESDIR}/rc-addon.sh exists take it
+ [[ -z ${VDR_RCADDON_FILE} ]] && [[ -e ${FILESDIR}/rc-addon.sh ]] && VDR_RCADDON_FILE=${FILESDIR}/rc-addon.sh
+
+ if [[ -n ${VDR_RCADDON_FILE} ]]; then
+ insinto "${VDR_RC_DIR}"
+ newins "${VDR_RCADDON_FILE}" plugin-${VDRPLUGIN}.sh
+ fi
+}
+
+vdr-plugin-2_pkg_postinst() {
+ vdr-plugin-2_print_enable_command
+
+ if [[ -n "${VDR_CONFD_FILE}" ]]; then
+ elog "Please have a look at the config-file"
+ elog "\t/etc/conf.d/vdr.${VDRPLUGIN}"
+ elog
+ fi
+}
+
+vdr-plugin-2_pkg_postrm() {
+:
+}
+
+vdr-plugin-2_pkg_config() {
+:
+}
diff --git a/eclass/versionator.eclass b/eclass/versionator.eclass
new file mode 100644
index 000000000000..74e676ee4e80
--- /dev/null
+++ b/eclass/versionator.eclass
@@ -0,0 +1,510 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: versionator.eclass
+# @MAINTAINER:
+# Jonathan Callen <jcallen@gentoo.org>
+# base-system@gentoo.org
+# @BLURB: functions which simplify manipulation of ${PV} and similar version strings
+# @DESCRIPTION:
+# This eclass provides functions which simplify manipulating $PV and similar
+# variables. Most functions default to working with $PV, although other
+# values can be used.
+# @EXAMPLE:
+# Simple Example 1: $PV is 1.2.3b, we want 1_2.3b:
+# MY_PV=$(replace_version_separator 1 '_' )
+#
+# Simple Example 2: $PV is 1.4.5, we want 1:
+# MY_MAJORV=$(get_major_version )
+#
+# Rather than being a number, the index parameter can be a separator character
+# such as '-', '.' or '_'. In this case, the first separator of this kind is
+# selected.
+#
+# There's also:
+# version_is_at_least want have
+# which may be buggy, so use with caution.
+
+if [[ -z ${_VERSIONATOR_ECLASS} ]]; then
+_VERSIONATOR_ECLASS=1
+
+inherit eutils
+
+# @FUNCTION: get_all_version_components
+# @USAGE: [version]
+# @DESCRIPTION:
+# Split up a version string into its component parts. If no parameter is
+# supplied, defaults to $PV.
+# 0.8.3 -> 0 . 8 . 3
+# 7c -> 7 c
+# 3.0_p2 -> 3 . 0 _ p2
+# 20040905 -> 20040905
+# 3.0c-r1 -> 3 . 0 c - r1
+get_all_version_components() {
+ eshopts_push -s extglob
+ local ver_str=${1:-${PV}} result
+ result=()
+
+ # sneaky cache trick cache to avoid having to parse the same thing several
+ # times.
+ if [[ ${VERSIONATOR_CACHE_VER_STR} == ${ver_str} ]] ; then
+ echo ${VERSIONATOR_CACHE_RESULT}
+ eshopts_pop
+ return
+ fi
+ export VERSIONATOR_CACHE_VER_STR=${ver_str}
+
+ while [[ -n $ver_str ]] ; do
+ case "${ver_str::1}" in
+ # number: parse whilst we have a number
+ [[:digit:]])
+ result+=("${ver_str%%[^[:digit:]]*}")
+ ver_str=${ver_str##+([[:digit:]])}
+ ;;
+
+ # separator: single character
+ [-_.])
+ result+=("${ver_str::1}")
+ ver_str=${ver_str:1}
+ ;;
+
+ # letter: grab the letters plus any following numbers
+ [[:alpha:]])
+ local not_match=${ver_str##+([[:alpha:]])*([[:digit:]])}
+ # Can't say "${ver_str::-${#not_match}}" in Bash 3.2
+ result+=("${ver_str::${#ver_str} - ${#not_match}}")
+ ver_str=${not_match}
+ ;;
+
+ # huh?
+ *)
+ result+=("${ver_str::1}")
+ ver_str=${ver_str:1}
+ ;;
+ esac
+ done
+
+ export VERSIONATOR_CACHE_RESULT=${result[*]}
+ echo ${result[@]}
+ eshopts_pop
+}
+
+# @FUNCTION: get_version_components
+# @USAGE: [version]
+# @DESCRIPTION:
+# Get the important version components, excluding '.', '-' and '_'. Defaults to
+# $PV if no parameter is supplied.
+# 0.8.3 -> 0 8 3
+# 7c -> 7 c
+# 3.0_p2 -> 3 0 p2
+# 20040905 -> 20040905
+# 3.0c-r1 -> 3 0 c r1
+get_version_components() {
+ local c=$(get_all_version_components "${1:-${PV}}")
+ echo ${c//[-._]/ }
+}
+
+# @FUNCTION: get_major_version
+# @USAGE: [version]
+# @DESCRIPTION:
+# Get the major version of a value. Defaults to $PV if no parameter is supplied.
+# 0.8.3 -> 0
+# 7c -> 7
+# 3.0_p2 -> 3
+# 20040905 -> 20040905
+# 3.0c-r1 -> 3
+get_major_version() {
+ local c=($(get_all_version_components "${1:-${PV}}"))
+ echo ${c[0]}
+}
+
+# @FUNCTION: get_version_component_range
+# @USAGE: <range> [version]
+# @DESCRIPTION:
+# Get a particular component or range of components from the version. If no
+# version parameter is supplied, defaults to $PV.
+# 1 1.2.3 -> 1
+# 1-2 1.2.3 -> 1.2
+# 2- 1.2.3 -> 2.3
+get_version_component_range() {
+ eshopts_push -s extglob
+ local c v="${2:-${PV}}" range="${1}" range_start range_end
+ local -i i=-1 j=0
+ c=($(get_all_version_components "${v}"))
+ range_start=${range%-*}; range_start=${range_start:-1}
+ range_end=${range#*-} ; range_end=${range_end:-${#c[@]}}
+
+ while ((j < range_start)); do
+ i+=1
+ ((i > ${#c[@]})) && eshopts_pop && return
+ [[ -n "${c[i]//[-._]}" ]] && j+=1
+ done
+
+ while ((j <= range_end)); do
+ echo -n ${c[i]}
+ ((i > ${#c[@]})) && eshopts_pop && return
+ [[ -n "${c[i]//[-._]}" ]] && j+=1
+ i+=1
+ done
+ eshopts_pop
+}
+
+# @FUNCTION: get_after_major_version
+# @USAGE: [version]
+# @DESCRIPTION:
+# Get everything after the major version and its separator (if present) of a
+# value. Defaults to $PV if no parameter is supplied.
+# 0.8.3 -> 8.3
+# 7c -> c
+# 3.0_p2 -> 0_p2
+# 20040905 -> (empty string)
+# 3.0c-r1 -> 0c-r1
+get_after_major_version() {
+ echo $(get_version_component_range 2- "${1:-${PV}}")
+}
+
+# @FUNCTION: replace_version_separator
+# @USAGE: <search> <replacement> [subject]
+# @DESCRIPTION:
+# Replace the $1th separator with $2 in $3 (defaults to $PV if $3 is not
+# supplied). If there are fewer than $1 separators, don't change anything.
+# 1 '_' 1.2.3 -> 1_2.3
+# 2 '_' 1.2.3 -> 1.2_3
+# 1 '_' 1b-2.3 -> 1b_2.3
+# Rather than being a number, $1 can be a separator character such as '-', '.'
+# or '_'. In this case, the first separator of this kind is selected.
+replace_version_separator() {
+ eshopts_push -s extglob
+ local w c v="${3:-${PV}}"
+ declare -i i found=0
+ w=${1:-1}
+ c=($(get_all_version_components ${v}))
+ if [[ ${w} != *[[:digit:]]* ]] ; then
+ # it's a character, not an index
+ for ((i = 0; i < ${#c[@]}; i++)); do
+ if [[ ${c[i]} == ${w} ]]; then
+ c[i]=${2}
+ break
+ fi
+ done
+ else
+ for ((i = 0; i < ${#c[@]}; i++)); do
+ if [[ -n "${c[i]//[^-._]}" ]]; then
+ found+=1
+ if ((found == w)); then
+ c[i]=${2}
+ break
+ fi
+ fi
+ done
+ fi
+ c=${c[*]}
+ echo ${c// }
+ eshopts_pop
+}
+
+# @FUNCTION: replace_all_version_separators
+# @USAGE: <replacement> [subject]
+# @DESCRIPTION:
+# Replace all version separators in $2 (defaults to $PV) with $1.
+# '_' 1b.2.3 -> 1b_2_3
+replace_all_version_separators() {
+ local c=($(get_all_version_components "${2:-${PV}}"))
+ c=${c[@]//[-._]/$1}
+ echo ${c// }
+}
+
+# @FUNCTION: delete_version_separator
+# @USAGE: <search> [subject]
+# @DESCRIPTION:
+# Delete the $1th separator in $2 (defaults to $PV if $2 is not supplied). If
+# there are fewer than $1 separators, don't change anything.
+# 1 1.2.3 -> 12.3
+# 2 1.2.3 -> 1.23
+# 1 1b-2.3 -> 1b2.3
+# Rather than being a number, $1 can be a separator character such as '-', '.'
+# or '_'. In this case, the first separator of this kind is deleted.
+delete_version_separator() {
+ replace_version_separator "${1}" "" "${2}"
+}
+
+# @FUNCTION: delete_all_version_separators
+# @USAGE: [subject]
+# @DESCRIPTION:
+# Delete all version separators in $1 (defaults to $PV).
+# 1b.2.3 -> 1b23
+delete_all_version_separators() {
+ replace_all_version_separators "" "${1}"
+}
+
+# @FUNCTION: get_version_component_count
+# @USAGE: [version]
+# @DESCRIPTION:
+# How many version components are there in $1 (defaults to $PV)?
+# 1.0.1 -> 3
+# 3.0c-r1 -> 4
+get_version_component_count() {
+ local a=($(get_version_components "${1:-${PV}}"))
+ echo ${#a[@]}
+}
+
+# @FUNCTION: get_last_version_component_index
+# @USAGE: [version]
+# @DESCRIPTION:
+# What is the index of the last version component in $1 (defaults to $PV)?
+# Equivalent to get_version_component_count - 1.
+# 1.0.1 -> 2
+# 3.0c-r1 -> 3
+get_last_version_component_index() {
+ echo $(($(get_version_component_count "${1:-${PV}}" ) - 1))
+}
+
+# @FUNCTION: version_is_at_least
+# @USAGE: <want> [have]
+# @DESCRIPTION:
+# Is $2 (defaults to $PVR) at least version $1? Intended for use in eclasses
+# only. May not be reliable, be sure to do very careful testing before actually
+# using this.
+version_is_at_least() {
+ local want_s="$1" have_s="${2:-${PVR}}" r
+ version_compare "${want_s}" "${have_s}"
+ r=$?
+ case $r in
+ 1|2)
+ return 0
+ ;;
+ 3)
+ return 1
+ ;;
+ *)
+ die "versionator compare bug [atleast, ${want_s}, ${have_s}, ${r}]"
+ ;;
+ esac
+}
+
+# @FUNCTION: version_compare
+# @USAGE: <A> <B>
+# @DESCRIPTION:
+# Takes two parameters (A, B) which are versions. If A is an earlier version
+# than B, returns 1. If A is identical to B, return 2. If A is later than B,
+# return 3. You probably want version_is_at_least rather than this function.
+# May not be very reliable. Test carefully before using this.
+version_compare() {
+ eshopts_push -s extglob
+ local ver_a=${1} ver_b=${2} parts_a parts_b
+ local cur_tok_a cur_tok_b num_part_a num_part_b
+ local -i cur_idx_a=0 cur_idx_b=0 prev_idx_a prev_idx_b
+ parts_a=( $(get_all_version_components "${ver_a}" ) )
+ parts_b=( $(get_all_version_components "${ver_b}" ) )
+
+ ### compare number parts.
+ local -i inf_loop=0
+ while true; do
+ inf_loop+=1
+ ((inf_loop > 20)) && \
+ die "versionator compare bug [numbers, ${ver_a}, ${ver_b}]"
+
+ # Store the current index to test later
+ prev_idx_a=cur_idx_a
+ prev_idx_b=cur_idx_b
+
+ # grab the current number components
+ cur_tok_a=${parts_a[cur_idx_a]}
+ cur_tok_b=${parts_b[cur_idx_b]}
+
+ # number?
+ if [[ -n ${cur_tok_a} ]] && [[ -z ${cur_tok_a//[[:digit:]]} ]] ; then
+ cur_idx_a+=1
+ [[ ${parts_a[cur_idx_a]} == . ]] \
+ && cur_idx_a+=1
+ else
+ cur_tok_a=
+ fi
+
+ if [[ -n ${cur_tok_b} ]] && [[ -z ${cur_tok_b//[[:digit:]]} ]] ; then
+ cur_idx_b+=1
+ [[ ${parts_b[cur_idx_b]} == . ]] \
+ && cur_idx_b+=1
+ else
+ cur_tok_b=
+ fi
+
+ # done with number components?
+ [[ -z ${cur_tok_a} && -z ${cur_tok_b} ]] && break
+
+ # if a component is blank, then it is the lesser value
+ [[ -z ${cur_tok_a} ]] && eshopts_pop && return 1
+ [[ -z ${cur_tok_b} ]] && eshopts_pop && return 3
+
+ # According to PMS, if we are *not* in the first number part, and either
+ # token begins with "0", then we use a different algorithm (that
+ # effectively does floating point comparison)
+ if (( prev_idx_a != 0 && prev_idx_b != 0 )) \
+ && [[ ${cur_tok_a} == 0* || ${cur_tok_b} == 0* ]] ; then
+
+ # strip trailing zeros
+ cur_tok_a=${cur_tok_a%%+(0)}
+ cur_tok_b=${cur_tok_b%%+(0)}
+
+ # do a *string* comparison of the resulting values: 2 > 11
+ [[ ${cur_tok_a} < ${cur_tok_b} ]] && eshopts_pop && return 1
+ [[ ${cur_tok_a} > ${cur_tok_b} ]] && eshopts_pop && return 3
+ else
+ # to avoid going into octal mode, strip any leading zeros. otherwise
+ # bash will throw a hissy fit on versions like 6.3.068.
+ cur_tok_a=${cur_tok_a##+(0)}
+ cur_tok_b=${cur_tok_b##+(0)}
+
+ # now if a component is blank, it was originally 0 -- make it so
+ : ${cur_tok_a:=0}
+ : ${cur_tok_b:=0}
+
+ # compare
+ ((cur_tok_a < cur_tok_b)) && eshopts_pop && return 1
+ ((cur_tok_a > cur_tok_b)) && eshopts_pop && return 3
+ fi
+ done
+
+ ### number parts equal. compare letter parts.
+ local letter_a=
+ letter_a=${parts_a[cur_idx_a]}
+ if [[ ${#letter_a} -eq 1 && -z ${letter_a/[a-z]} ]] ; then
+ cur_idx_a+=1
+ else
+ letter_a=@
+ fi
+
+ local letter_b=
+ letter_b=${parts_b[cur_idx_b]}
+ if [[ ${#letter_b} -eq 1 && -z ${letter_b/[a-z]} ]] ; then
+ cur_idx_b+=1
+ else
+ letter_b=@
+ fi
+
+ # compare
+ [[ ${letter_a} < ${letter_b} ]] && eshopts_pop && return 1
+ [[ ${letter_a} > ${letter_b} ]] && eshopts_pop && return 3
+
+ ### letter parts equal. compare suffixes in order.
+ inf_loop=0
+ while true ; do
+ inf_loop+=1
+ ((inf_loop > 20)) && \
+ die "versionator compare bug [numbers, ${ver_a}, ${ver_b}]"
+ [[ ${parts_a[cur_idx_a]} == _ ]] && ((cur_idx_a++))
+ [[ ${parts_b[cur_idx_b]} == _ ]] && ((cur_idx_b++))
+
+ cur_tok_a=${parts_a[cur_idx_a]}
+ cur_tok_b=${parts_b[cur_idx_b]}
+ num_part_a=0
+ num_part_b=0
+
+ if has ${cur_tok_a%%+([0-9])} "alpha" "beta" "pre" "rc" "p"; then
+ cur_idx_a+=1
+ num_part_a=${cur_tok_a##+([a-z])}
+ # I don't like octal
+ num_part_a=${num_part_a##+(0)}
+ : ${num_part_a:=0}
+ cur_tok_a=${cur_tok_a%%+([0-9])}
+ else
+ cur_tok_a=
+ fi
+
+ if has ${cur_tok_b%%+([0-9])} alpha beta pre rc p; then
+ cur_idx_b+=1
+ num_part_b=${cur_tok_b##+([a-z])}
+ # I still don't like octal
+ num_part_b=${num_part_b##+(0)}
+ : ${num_part_b:=0}
+ cur_tok_b=${cur_tok_b%%+([0-9])}
+ else
+ cur_tok_b=
+ fi
+
+ if [[ ${cur_tok_a} != ${cur_tok_b} ]]; then
+ local suffix
+ for suffix in alpha beta pre rc "" p; do
+ [[ ${cur_tok_a} == ${suffix} ]] && eshopts_pop && return 1
+ [[ ${cur_tok_b} == ${suffix} ]] && eshopts_pop && return 3
+ done
+ elif [[ -z ${cur_tok_a} && -z ${cur_tok_b} ]]; then
+ break
+ else
+ ((num_part_a < num_part_b)) && eshopts_pop && return 1
+ ((num_part_a > num_part_b)) && eshopts_pop && return 3
+ fi
+ done
+
+ # At this point, the only thing that should be left is the -r# part
+ [[ ${parts_a[cur_idx_a]} == - ]] && ((cur_idx_a++))
+ [[ ${parts_b[cur_idx_b]} == - ]] && ((cur_idx_b++))
+
+ # Sanity check
+ if [[ ${parts_a[cur_idx_a]/r+([0-9])} || ${parts_b[cur_idx_b]/r+([0-9])} ]]; then
+ die "versionator compare bug [revisions, ${ver_a}, ${ver_b}]"
+ fi
+
+ num_part_a=${parts_a[cur_idx_a]#r}
+ num_part_a=${num_part_a##+(0)}
+ : ${num_part_a:=0}
+ num_part_b=${parts_b[cur_idx_b]#r}
+ num_part_b=${num_part_b##+(0)}
+ : ${num_part_b:=0}
+
+ ((num_part_a < num_part_b)) && eshopts_pop && return 1
+ ((num_part_a > num_part_b)) && eshopts_pop && return 3
+
+ ### no differences.
+ eshopts_pop
+ return 2
+}
+
+# @FUNCTION: version_sort
+# @USAGE: <version> [more versions...]
+# @DESCRIPTION:
+# Returns its parameters sorted, highest version last. We're using a quadratic
+# algorithm for simplicity, so don't call it with more than a few dozen items.
+# Uses version_compare, so be careful.
+version_sort() {
+ eshopts_push -s extglob
+ local items=
+ local -i left=0
+ items=("$@")
+ while ((left < ${#items[@]})); do
+ local -i lowest_idx=left
+ local -i idx=lowest_idx+1
+ while ((idx < ${#items[@]})); do
+ version_compare "${items[lowest_idx]}" "${items[idx]}"
+ [[ $? -eq 3 ]] && lowest_idx=idx
+ idx+=1
+ done
+ local tmp=${items[lowest_idx]}
+ items[lowest_idx]=${items[left]}
+ items[left]=${tmp}
+ left+=1
+ done
+ echo ${items[@]}
+ eshopts_pop
+}
+
+# @FUNCTION: version_format_string
+# @USAGE: <format> [version]
+# @DESCRIPTION:
+# Reformat complicated version strings. The first argument is the string
+# to reformat with while the rest of the args are passed on to the
+# get_version_components function. You should make sure to single quote
+# the first argument since it'll have variables that get delayed expansions.
+# @EXAMPLE:
+# P="cow-hat-1.2.3_p4"
+# MY_P=$(version_format_string '${PN}_source_$1_$2-$3_$4')
+# Now MY_P will be: cow-hat_source_1_2-3_p4
+version_format_string() {
+ local fstr=$1
+ shift
+ set -- $(get_version_components "$@")
+ eval echo "${fstr}"
+}
+
+fi
diff --git a/eclass/vim-doc.eclass b/eclass/vim-doc.eclass
new file mode 100644
index 000000000000..bad18833cc3e
--- /dev/null
+++ b/eclass/vim-doc.eclass
@@ -0,0 +1,73 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# This eclass is used by vim.eclass and vim-plugin.eclass to update
+# the documentation tags. This is necessary since vim doesn't look in
+# /usr/share/vim/vimfiles/doc for documentation; it only uses the
+# versioned directory, for example /usr/share/vim/vim62/doc
+#
+# We depend on vim being installed, which is satisfied by either the
+# DEPEND in vim-plugin or by whatever version of vim is being
+# installed by the eclass.
+
+
+update_vim_helptags() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ local vimfiles vim d s
+
+ # This is where vim plugins are installed
+ vimfiles="${EROOT}"/usr/share/vim/vimfiles
+
+ if [[ $PN != vim-core ]]; then
+ # Find a suitable vim binary for updating tags :helptags
+ vim=$(type -P vim 2>/dev/null)
+ [[ -z "$vim" ]] && vim=$(type -P gvim 2>/dev/null)
+ [[ -z "$vim" ]] && vim=$(type -P kvim 2>/dev/null)
+ if [[ -z "$vim" ]]; then
+ ewarn "No suitable vim binary to rebuild documentation tags"
+ fi
+ fi
+
+ # Make vim not try to connect to X. See :help gui-x11-start
+ # in vim for how this evil trickery works.
+ if [[ -n "${vim}" ]] ; then
+ ln -s "${vim}" "${T}/tagvim"
+ vim="${T}/tagvim"
+ fi
+
+ # Install the documentation symlinks into the versioned vim
+ # directory and run :helptags
+ for d in "${EROOT%/}"/usr/share/vim/vim[0-9]*; do
+ [[ -d "$d/doc" ]] || continue # catch a failed glob
+
+ # Remove links, and possibly remove stale dirs
+ find $d/doc -name \*.txt -type l | while read s; do
+ [[ $(readlink "$s") = $vimfiles/* ]] && rm -f "$s"
+ done
+ if [[ -f "$d/doc/tags" && $(find "$d" | wc -l | tr -d ' ') = 3 ]]; then
+ # /usr/share/vim/vim61
+ # /usr/share/vim/vim61/doc
+ # /usr/share/vim/vim61/doc/tags
+ einfo "Removing $d"
+ rm -r "$d"
+ continue
+ fi
+
+ # Re-create / install new links
+ if [[ -d $vimfiles/doc ]]; then
+ ln -s $vimfiles/doc/*.txt $d/doc 2>/dev/null
+ fi
+
+ # Update tags; need a vim binary for this
+ if [[ -n "$vim" ]]; then
+ einfo "Updating documentation tags in $d"
+ DISPLAY= $vim -u NONE -U NONE -T xterm -X -n -f \
+ '+set nobackup nomore' \
+ "+helptags $d/doc" \
+ '+qa!' </dev/null &>/dev/null
+ fi
+ done
+
+ [[ -n "${vim}" && -f "${vim}" ]] && rm "${vim}"
+}
diff --git a/eclass/vim-plugin.eclass b/eclass/vim-plugin.eclass
new file mode 100644
index 000000000000..4da8bd3c9978
--- /dev/null
+++ b/eclass/vim-plugin.eclass
@@ -0,0 +1,157 @@
+# Copyright 1999-2013 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# This eclass simplifies installation of app-vim plugins into
+# /usr/share/vim/vimfiles. This is a version-independent directory
+# which is read automatically by vim. The only exception is
+# documentation, for which we make a special case via vim-doc.eclass
+
+inherit vim-doc
+EXPORT_FUNCTIONS src_install pkg_postinst pkg_postrm
+
+VIM_PLUGIN_VIM_VERSION="${VIM_PLUGIN_VIM_VERSION:-7.3}"
+
+DEPEND="|| ( >=app-editors/vim-${VIM_PLUGIN_VIM_VERSION}
+ >=app-editors/gvim-${VIM_PLUGIN_VIM_VERSION} )"
+RDEPEND="${DEPEND}"
+if [[ ${PV} != 9999* ]] ; then
+ SRC_URI="mirror://gentoo/${P}.tar.bz2
+ http://dev.gentoo.org/~radhermit/vim/${P}.tar.bz2"
+fi
+SLOT="0"
+
+vim-plugin_src_install() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
+ local f
+
+ if use !prefix && [[ ${EUID} -eq 0 ]] ; then
+ ebegin "Fixing file permissions"
+ # Make sure perms are good
+ chmod -R a+rX "${S}" || die "chmod failed"
+ find "${S}" -user 'portage' -exec chown root '{}' \; || die "chown failed"
+ if use userland_BSD || [[ ${CHOST} == *-darwin* ]] ; then
+ find "${S}" -group 'portage' -exec chgrp wheel '{}' \; || die "chgrp failed"
+ else
+ find "${S}" -group 'portage' -exec chgrp root '{}' \; || die "chgrp failed"
+ fi
+ eend $?
+ fi
+
+ # Remove unwanted files that may exist
+ rm -rf .[^.] .??* Makefile*
+
+ # Install non-vim-help-docs
+ cd "${S}"
+ for f in *; do
+ [[ -f "${f}" ]] || continue
+ if [[ "${f}" = *.html ]]; then
+ dohtml "${f}"
+ else
+ dodoc "${f}"
+ fi
+ rm -f "${f}"
+ done
+
+ # Install remainder of plugin
+ cd "${WORKDIR}"
+ dodir /usr/share/vim
+ mv "${S}" "${ED}"/usr/share/vim/vimfiles
+
+ # Fix remaining bad permissions
+ chmod -R -x+X "${ED}"/usr/share/vim/vimfiles/ || die "chmod failed"
+}
+
+vim-plugin_pkg_postinst() {
+ update_vim_helptags # from vim-doc
+ update_vim_afterscripts # see below
+ display_vim_plugin_help # see below
+}
+
+vim-plugin_pkg_postrm() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ update_vim_helptags # from vim-doc
+ update_vim_afterscripts # see below
+
+ # Remove empty dirs; this allows
+ # /usr/share/vim to be removed if vim-core is unmerged
+ find "${EPREFIX}/usr/share/vim/vimfiles" -depth -type d -exec rmdir {} \; 2>/dev/null
+}
+
+# update_vim_afterscripts: create scripts in
+# /usr/share/vim/vimfiles/after/* comprised of the snippets in
+# /usr/share/vim/vimfiles/after/*/*.d
+update_vim_afterscripts() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
+ local d f afterdir="${EROOT}"/usr/share/vim/vimfiles/after
+
+ # Nothing to do if the dir isn't there
+ [ -d "${afterdir}" ] || return 0
+
+ einfo "Updating scripts in ${EPREFIX}/usr/share/vim/vimfiles/after"
+ find "${afterdir}" -type d -name \*.vim.d | \
+ while read d; do
+ echo '" Generated by update_vim_afterscripts' > "${d%.d}"
+ find "${d}" -name \*.vim -type f -maxdepth 1 -print0 | \
+ sort -z | xargs -0 cat >> "${d%.d}"
+ done
+
+ einfo "Removing dead scripts in ${EPREFIX}/usr/share/vim/vimfiles/after"
+ find "${afterdir}" -type f -name \*.vim | \
+ while read f; do
+ [[ "$(head -n 1 ${f})" == '" Generated by update_vim_afterscripts' ]] \
+ || continue
+ # This is a generated file, but might be abandoned. Check
+ # if there's no corresponding .d directory, or if the
+ # file's effectively empty
+ if [[ ! -d "${f}.d" || -z "$(grep -v '^"' "${f}")" ]]; then
+ rm -f "${f}"
+ fi
+ done
+}
+
+# Display a message with the plugin's help file if one is available. Uses the
+# VIM_PLUGIN_HELPFILES env var. If multiple help files are available, they
+# should be separated by spaces. If no help files are available, but the env
+# var VIM_PLUGIN_HELPTEXT is set, that is displayed instead. Finally, if we
+# have nothing else, display a link to VIM_PLUGIN_HELPURI. An extra message
+# regarding enabling filetype plugins is displayed if VIM_PLUGIN_MESSAGES
+# includes the word "filetype".
+display_vim_plugin_help() {
+ local h
+
+ if ! has_version ${CATEGORY}/${PN} ; then
+ if [[ -n "${VIM_PLUGIN_HELPFILES}" ]] ; then
+ elog " "
+ elog "This plugin provides documentation via vim's help system. To"
+ elog "view it, use:"
+ for h in ${VIM_PLUGIN_HELPFILES} ; do
+ elog " :help ${h}"
+ done
+ elog " "
+
+ elif [[ -n "${VIM_PLUGIN_HELPTEXT}" ]] ; then
+ elog " "
+ while read h ; do
+ elog "$h"
+ done <<<"${VIM_PLUGIN_HELPTEXT}"
+ elog " "
+
+ elif [[ -n "${VIM_PLUGIN_HELPURI}" ]] ; then
+ elog " "
+ elog "Documentation for this plugin is available online at:"
+ elog " ${VIM_PLUGIN_HELPURI}"
+ elog " "
+ fi
+
+ if has "filetype" "${VIM_PLUGIN_MESSAGES}" ; then
+ elog "This plugin makes use of filetype settings. To enable these,"
+ elog "add lines like:"
+ elog " filetype plugin on"
+ elog " filetype indent on"
+ elog "to your ~/.vimrc file."
+ elog " "
+ fi
+ fi
+}
diff --git a/eclass/vim-spell.eclass b/eclass/vim-spell.eclass
new file mode 100644
index 000000000000..05c38dadc1cc
--- /dev/null
+++ b/eclass/vim-spell.eclass
@@ -0,0 +1,127 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+#
+# Original Author: Ciaran McCreesh <ciaranm@gentoo.org>
+# Maintainers: Vim Herd <vim@gentoo.org>
+# Purpose: Simplify installing spell files for vim7
+#
+
+# How to make a vim spell file package using prebuilt spell lists
+# from upstream (${CODE} is the language's two letter code):
+#
+# * Get the ${CODE}.*.spl, ${CODE}.*.sug (if your language has them) and
+# README_${CODE}.txt files. Currently they're at
+# ftp://ftp.vim.org/pub/vim/unstable/runtime/spell/ (except for English,
+# which should be taken from CVS instead).
+#
+# * Stick them in vim-spell-${CODE}-$(date --iso | tr -d - ).tar.bz2 . Make sure
+# that they're in the appropriately named subdirectory to avoid having to mess
+# with S=.
+#
+# * Upload the tarball to the Gentoo mirrors.
+#
+# * (for now) Add your spell file to package.mask next to the other vim7
+# things. The vim herd will handle unmasking your spell packages when vim7
+# comes out of package.mask.
+#
+# * Create the app-vim/vim-spell-${CODE} package. You should base your ebuild
+# upon app-vim/vim-spell-en. You will need to change VIM_SPELL_LANGUAGE,
+# KEYWORDS and LICENSE. Check the license carefully! The README will tell
+# you what it is.
+#
+# * Don't forget metadata.xml. You should list vim as the herd, and yourself
+# as the maintainer (there is no need to join the vim herd just for spell
+# files):
+#
+# <?xml version="1.0" encoding="UTF-8"?>
+# <!DOCTYPE pkgmetadata SYSTEM "http://www.gentoo.org/dtd/metadata.dtd">
+# <pkgmetadata>
+# <herd>vim</herd>
+# <maintainer>
+# <email>your-name@gentoo.org</email>
+# </maintainer>
+# <longdescription lang="en">
+# Vim spell files for French (fr). Supported character sets are
+# UTF-8 and latin1.
+# </longdescription>
+# </pkgmetadata>
+#
+# * Send an email to vim@gentoo.org to let us know.
+#
+# Don't forget to update your package as necessary.
+#
+# If there isn't an upstream-provided pregenerated spell file for your language
+# yet, read :help spell.txt from inside vim7 for instructions on how to create
+# spell files. It's best to let upstream know if you've generated spell files
+# for another language rather than keeping them Gentoo-specific.
+
+inherit eutils
+
+EXPORT_FUNCTIONS src_install pkg_postinst
+
+IUSE=""
+DEPEND="|| ( >=app-editors/vim-7_alpha
+ >=app-editors/gvim-7_alpha )"
+RDEPEND="${DEPEND}"
+SRC_URI="mirror://gentoo/${P}.tar.bz2"
+SLOT="0"
+
+if [[ -z "${VIM_SPELL_CODE}" ]] ; then
+ VIM_SPELL_CODE="${PN/vim-spell-/}"
+fi
+
+DESCRIPTION="vim spell files: ${VIM_SPELL_LANGUAGE} (${VIM_SPELL_CODE})"
+
+if [[ -z "${HOMEPAGE}" ]] ; then
+ HOMEPAGE="http://www.vim.org/"
+fi
+
+vim-spell_src_install() {
+ target="/usr/share/vim/vimfiles/spell/"
+ dodir "${target}"
+ insinto "${target}"
+
+ had_spell_file=
+ for f in *.spl ; do
+ if [[ -f "${f}" ]]; then
+ doins "${f}"
+ had_spell_file="yes"
+ fi
+ done
+
+ for f in *.sug ; do
+ if [[ -f "${f}" ]]; then
+ doins "${f}"
+ fi
+ done
+
+ for f in README* ; do
+ dodoc "${f}"
+ done
+
+ [[ -z "${had_spell_file}" ]] && die "Didn't install any spell files?"
+}
+
+vim-spell_pkg_postinst() {
+ has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
+ target="/usr/share/vim/vimfiles/spell/"
+ echo
+ elog "To enable ${VIM_SPELL_LANGUAGE} spell checking, use"
+ elog " :setlocal spell spelllang=${VIM_SPELL_CODE}"
+ echo
+ elog "The following (Vim internal, not file) encodings are supported for"
+ elog "this language:"
+ for f in "${EROOT}/${target}/${VIM_SPELL_CODE}".*.spl ; do
+ enc="${f##*/${VIM_SPELL_CODE}.}"
+ enc="${enc%.spl}"
+ [[ -z "${enc}" ]] && continue
+ elog " ${enc}"
+ done
+ echo
+ elog "For further documentation, use:"
+ elog " :help spell"
+ echo
+ epause
+}
diff --git a/eclass/virtualx.eclass b/eclass/virtualx.eclass
new file mode 100644
index 000000000000..69d9230b2df8
--- /dev/null
+++ b/eclass/virtualx.eclass
@@ -0,0 +1,199 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: virtualx.eclass
+# @MAINTAINER:
+# x11@gentoo.org
+# @AUTHOR:
+# Original author: Martin Schlemmer <azarah@gentoo.org>
+# @BLURB: This eclass can be used for packages that needs a working X environment to build.
+
+# @ECLASS-VARIABLE: VIRTUALX_REQUIRED
+# @DESCRIPTION:
+# Variable specifying the dependency on xorg-server and xhost.
+# Possible special values are "always" and "manual", which specify
+# the dependency to be set unconditionaly or not at all.
+# Any other value is taken as useflag desired to be in control of
+# the dependency (eg. VIRTUALX_REQUIRED="kde" will add the dependency
+# into "kde? ( )" and add kde into IUSE.
+: ${VIRTUALX_REQUIRED:=test}
+
+# @ECLASS-VARIABLE: VIRTUALX_DEPEND
+# @DESCRIPTION:
+# Dep string available for use outside of eclass, in case a more
+# complicated dep is needed.
+# You can specify the variable BEFORE inherit to add more dependencies.
+VIRTUALX_DEPEND="${VIRTUALX_DEPEND}
+ !prefix? ( x11-base/xorg-server[xvfb] )
+ x11-apps/xhost
+"
+
+# @ECLASS-VARIABLE: VIRTUALX_COMMAND
+# @DESCRIPTION:
+# Command (or eclass function call) to be run in the X11 environment
+# (within virtualmake function).
+: ${VIRTUALX_COMMAND:="emake"}
+
+has "${EAPI:-0}" 0 1 && die "virtualx eclass require EAPI=2 or newer."
+
+case ${VIRTUALX_REQUIRED} in
+ manual)
+ ;;
+ always)
+ DEPEND="${VIRTUALX_DEPEND}"
+ RDEPEND=""
+ ;;
+ optional|tests)
+ # deprecated section YAY.
+ ewarn "QA: VIRTUALX_REQUIRED=optional and VIRTUALX_REQUIRED=tests are deprecated."
+ ewarn "QA: You can drop the variable definition completely from ebuild,"
+ ewarn "QA: because it is default behaviour."
+
+ if [[ -n ${VIRTUALX_USE} ]]; then
+ # so they like to specify the useflag
+ ewarn "QA: VIRTUALX_USE variable is deprecated."
+ ewarn "QA: Please read eclass manpage to find out how to use VIRTUALX_REQUIRED"
+ ewarn "QA: to achieve the same behaviour."
+ fi
+
+ [[ -z ${VIRTUALX_USE} ]] && VIRTUALX_USE="test"
+ DEPEND="${VIRTUALX_USE}? ( ${VIRTUALX_DEPEND} )"
+ RDEPEND=""
+ IUSE="${VIRTUALX_USE}"
+ ;;
+ *)
+ DEPEND="${VIRTUALX_REQUIRED}? ( ${VIRTUALX_DEPEND} )"
+ RDEPEND=""
+ IUSE="${VIRTUALX_REQUIRED}"
+ ;;
+esac
+
+# @FUNCTION: virtualmake
+# @DESCRIPTION:
+# Function which attach to running X session or start new Xvfb session
+# where the VIRTUALX_COMMAND variable content gets executed.
+virtualmake() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local i=0
+ local retval=0
+ local OLD_SANDBOX_ON="${SANDBOX_ON}"
+ local XVFB=$(type -p Xvfb)
+ local XHOST=$(type -p xhost)
+ local xvfbargs="-screen 0 1280x1024x24"
+
+ # backcompat for maketype
+ if [[ -n ${maketype} ]]; then
+ ewarn "QA: ebuild is exporting \$maketype=${maketype}"
+ ewarn "QA: Ebuild should be migrated to use VIRTUALX_COMMAND=${maketype} instead."
+ ewarn "QA: Setting VIRTUALX_COMMAND to \$maketype conveniently for now."
+ VIRTUALX_COMMAND=${maketype}
+ fi
+
+ # If $DISPLAY is not set, or xhost cannot connect to an X
+ # display, then do the Xvfb hack.
+ if [[ -n ${XVFB} && -n ${XHOST} ]] && \
+ ( [[ -z ${DISPLAY} ]] || ! (${XHOST} &>/dev/null) ) ; then
+ debug-print "${FUNCNAME}: running Xvfb hack"
+ export XAUTHORITY=
+ # The following is derived from Mandrake's hack to allow
+ # compiling without the X display
+
+ einfo "Scanning for an open DISPLAY to start Xvfb ..."
+ # If we are in a chrooted environment, and there is already a
+ # X server started outside of the chroot, Xvfb will fail to start
+ # on the same display (most cases this is :0 ), so make sure
+ # Xvfb is started, else bump the display number
+ #
+ # Azarah - 5 May 2002
+ XDISPLAY=$(i=0; while [[ -f /tmp/.X${i}-lock ]] ; do ((i++));done; echo ${i})
+ debug-print "${FUNCNAME}: XDISPLAY=${XDISPLAY}"
+
+ # We really do not want SANDBOX enabled here
+ export SANDBOX_ON="0"
+
+ debug-print "${FUNCNAME}: ${XVFB} :${XDISPLAY} ${xvfbargs}"
+ ${XVFB} :${XDISPLAY} ${xvfbargs} &>/dev/null &
+ sleep 2
+
+ local start=${XDISPLAY}
+ while [[ ! -f /tmp/.X${XDISPLAY}-lock ]]; do
+ # Stop trying after 15 tries
+ if ((XDISPLAY - start > 15)) ; then
+ eerror "'${XVFB} :${XDISPLAY} ${xvfbargs}' returns:"
+ echo
+ ${XVFB} :${XDISPLAY} ${xvfbargs}
+ echo
+ eerror "If possible, correct the above error and try your emerge again."
+ die "Unable to start Xvfb"
+ fi
+
+ ((XDISPLAY++))
+ debug-print "${FUNCNAME}: ${XVFB} :${XDISPLAY} ${xvfbargs}"
+ ${XVFB} :${XDISPLAY} ${xvfbargs} &>/dev/null &
+ sleep 2
+ done
+
+ # Now enable SANDBOX again if needed.
+ export SANDBOX_ON="${OLD_SANDBOX_ON}"
+
+ einfo "Starting Xvfb on \$DISPLAY=${XDISPLAY} ..."
+
+ export DISPLAY=:${XDISPLAY}
+ # Do not break on error, but setup $retval, as we need
+ # to kill Xvfb
+ debug-print "${FUNCNAME}: ${VIRTUALX_COMMAND} \"$@\""
+ if has "${EAPI}" 2 3; then
+ ${VIRTUALX_COMMAND} "$@"
+ retval=$?
+ else
+ nonfatal ${VIRTUALX_COMMAND} "$@"
+ retval=$?
+ fi
+
+ # Now kill Xvfb
+ kill $(cat /tmp/.X${XDISPLAY}-lock)
+ else
+ debug-print "${FUNCNAME}: attaching to running X display"
+ # Normal make if we can connect to an X display
+ debug-print "${FUNCNAME}: ${VIRTUALX_COMMAND} \"$@\""
+ ${VIRTUALX_COMMAND} "$@"
+ retval=$?
+ fi
+
+ # die if our command failed
+ [[ ${retval} -ne 0 ]] && die "${FUNCNAME}: the ${VIRTUALX_COMMAND} failed."
+
+ return 0 # always return 0, it can be altered by failed kill for Xvfb
+}
+
+# @FUNCTION: Xmake
+# @DESCRIPTION:
+# Same as "make", but set up the Xvfb hack if needed.
+# Deprecated call.
+Xmake() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ewarn "QA: you should not execute make directly"
+ ewarn "QA: rather execute Xemake -j1 if you have issues with parallel make"
+ VIRTUALX_COMMAND="emake -j1" virtualmake "$@"
+}
+
+# @FUNCTION: Xemake
+# @DESCRIPTION:
+# Same as "emake", but set up the Xvfb hack if needed.
+Xemake() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ VIRTUALX_COMMAND="emake" virtualmake "$@"
+}
+
+# @FUNCTION: Xeconf
+# @DESCRIPTION:
+# Same as "econf", but set up the Xvfb hack if needed.
+Xeconf() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ VIRTUALX_COMMAND="econf" virtualmake "$@"
+}
diff --git a/eclass/virtuoso.eclass b/eclass/virtuoso.eclass
new file mode 100644
index 000000000000..e0ac542a7c29
--- /dev/null
+++ b/eclass/virtuoso.eclass
@@ -0,0 +1,131 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: virtuoso.eclass
+# @MAINTAINER:
+# Maciej Mrozowski <reavertm@gentoo.org>
+# Chris Reffett <creffett@gentoo.org>
+#
+# @BLURB: Provides splitting functionality for Virtuoso
+# @DESCRIPTION:
+# This eclass provides common code for splitting Virtuoso OpenSource database
+
+case ${EAPI:-0} in
+ 2|3|4|5) : ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+inherit autotools multilib eutils
+
+MY_P="virtuoso-opensource-${PV}"
+
+case ${PV} in
+ *9999*)
+ ECVS_SERVER="virtuoso.cvs.sourceforge.net:/cvsroot/virtuoso"
+ ECVS_PROJECT='virtuoso'
+ SRC_URI=""
+ inherit cvs
+ ;;
+ *)
+ # Use this variable to determine distribution method (live or tarball)
+ TARBALL="${MY_P}.tar.gz"
+ SRC_URI="mirror://sourceforge/virtuoso/${TARBALL} mirror://gentoo/VOS-genpatches-${PV}.tar.bz2"
+ ;;
+esac
+
+EXPORT_FUNCTIONS src_prepare src_configure
+
+# Set some defaults
+HOMEPAGE='http://virtuoso.openlinksw.com/wiki/main/Main/'
+LICENSE='GPL-2'
+SLOT='0'
+
+DEPEND='
+ >=sys-devel/libtool-2.2.6a
+'
+RDEPEND=''
+
+S="${WORKDIR}/${MY_P}"
+
+# @FUNCTION: virtuoso_src_prepare
+# @DESCRIPTION:
+# 1. Applies common release patches
+# 2. Applies package-specific patches (from ${FILESDIR}/, PATCHES can be used)
+# 3. Applies user patches from /etc/portage/patches/${CATEGORY}/${PN}/
+# 4. Modifies makefiles for split build. Uses VOS_EXTRACT
+# 5. eautoreconf
+virtuoso_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ EPATCH_SUFFIX='patch' EPATCH_FORCE='yes' epatch
+ pushd "${S}" >/dev/null
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+
+ debug-print "$FUNCNAME: applying user patches"
+ epatch_user
+
+
+ # @ECLASS-VARIABLE: VOS_EXTRACT
+ # @DESCRIPTION:
+ # Lists any subdirectories that are required to be extracted
+ # and enabled in Makefile.am's for current package.
+ if [[ -n ${VOS_EXTRACT} ]]; then
+ # Comment out everything
+ find . -name Makefile.am -exec \
+ sed -e '/SUBDIRS\s*=/s/^/# DISABLED /g' -i {} + \
+ || die 'failed to disable subdirs'
+
+ # Uncomment specified
+ local path
+ for path in ${VOS_EXTRACT}; do
+ if [[ -d "${path}" ]]; then
+ # Uncomment leaf
+ if [[ -f "${path}"/Makefile.am ]]; then
+ sed -e '/^# DISABLED \s*SUBDIRS\s*=/s/# DISABLED //g' \
+ -i "${path}"/Makefile.am || die "failed to uncomment leaf in ${path}/Makefile.am"
+ fi
+ # Process remaining path elements
+ while true; do
+ local subdir=`basename "${path}"`
+ path=`dirname "${path}"`
+ if [[ -f "${path}"/Makefile.am ]]; then
+ # Uncomment if necessary
+ sed -e '/^# DISABLED \s*SUBDIRS\s*=/s/.*/SUBDIRS =/g' \
+ -i "${path}"/Makefile.am
+ # Append subdirs if not there already
+ if [[ -z `sed -ne "/SUBDIRS\s*=.*${subdir}\b/p" "${path}"/Makefile.am` ]]; then
+ sed -e "/^SUBDIRS\s*=/s|$| ${subdir}|" \
+ -i "${path}"/Makefile.am || die "failed to append ${subdir}"
+ fi
+ fi
+ [[ "${path}" = . ]] && break
+ done
+ fi
+ done
+ fi
+
+ eautoreconf
+}
+
+# @FUNCTION: virtuoso_src_configure
+# @DESCRIPTION:
+# Runs ./configure with common and user options specified via myconf variable
+virtuoso_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Override some variables to make tests work
+ if [[ ${PN} != virtuoso-server ]]; then
+ [[ ${EAPI} == 2 ]] && ! use prefix && EPREFIX=
+ export ISQL="${EPREFIX}"/usr/bin/isql-v
+ export SERVER="${EPREFIX}"/usr/bin/virtuoso-t
+ fi
+
+ econf \
+ --with-layout=gentoo \
+ --localstatedir="${EPREFIX}"/var \
+ --enable-shared \
+ --with-pthreads \
+ --without-internal-zlib \
+ ${myconf}
+}
diff --git a/eclass/vmware-bundle.eclass b/eclass/vmware-bundle.eclass
new file mode 100644
index 000000000000..6a897bb267ca
--- /dev/null
+++ b/eclass/vmware-bundle.eclass
@@ -0,0 +1,83 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: vmware-bundle.eclass
+# @MAINTAINER:
+# vmware@gentoo.org
+# @AUTHOR:
+# Matt Whitlock <matt@whitlock.name>
+# @BLURB: Provides extract functionality for vmware products bundles
+
+DEPEND="dev-libs/libxslt"
+
+vmware-bundle_extract-bundle-component() {
+ local bundle=${1:?} component=${2:?} dest=${3:-${2}}
+ cat > "${T}"/list-bundle-components.xsl <<-EOF
+ <?xml version="1.0" encoding="ISO-8859-1"?>
+ <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+ <xsl:output omit-xml-declaration="yes"/>
+ <xsl:template match="text()"/>
+ <xsl:template match="/bundle/components/component">
+ <xsl:value-of select="@offset"/>
+ <xsl:text> </xsl:text>
+ <xsl:value-of select="@size"/>
+ <xsl:text> </xsl:text>
+ <xsl:value-of select="@name"/>
+ <xsl:text>&#10;</xsl:text>
+ </xsl:template>
+ </xsl:stylesheet>
+ EOF
+ local -i bundle_size=$(stat -L -c'%s' "${bundle}")
+ local -i bundle_manifestOffset=$(od -An -j$((bundle_size-36)) -N4 -tu4 "${bundle}")
+ local -i bundle_manifestSize=$(od -An -j$((bundle_size-40)) -N4 -tu4 "${bundle}")
+ local -i bundle_dataOffset=$(od -An -j$((bundle_size-44)) -N4 -tu4 "${bundle}")
+ local -i bundle_dataSize=$(od -An -j$((bundle_size-52)) -N8 -tu8 "${bundle}")
+ tail -c+$((bundle_manifestOffset+1)) "${bundle}" 2> /dev/null | head -c$((bundle_manifestSize)) |
+ xsltproc "${T}"/list-bundle-components.xsl - |
+ while read -r component_offset component_size component_name ; do
+ if [[ ${component_name} == ${component} ]] ; then
+ ebegin "Extracting '${component_name}' component from '$(basename "${bundle}")'"
+ vmware-bundle_extract-component "${bundle}" "${dest}" $((bundle_dataOffset+component_offset))
+ eend
+ fi
+ done
+}
+
+vmware-bundle_extract-component() {
+ local component=${1:?} dest=${2:-.}
+ local -i offset=${3}
+ cat > "${T}"/list-component-files.xsl <<-EOF
+ <?xml version="1.0" encoding="ISO-8859-1"?>
+ <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
+ <xsl:output omit-xml-declaration="yes"/>
+ <xsl:template match="text()"/>
+ <xsl:template match="/component/fileset/file">
+ <xsl:value-of select="@offset"/>
+ <xsl:text> </xsl:text>
+ <xsl:value-of select="@compressedSize"/>
+ <xsl:text> </xsl:text>
+ <xsl:value-of select="@uncompressedSize"/>
+ <xsl:text> </xsl:text>
+ <xsl:value-of select="@path"/>
+ <xsl:text>&#10;</xsl:text>
+ </xsl:template>
+ </xsl:stylesheet>
+ EOF
+ local -i component_manifestOffset=$(od -An -j$((offset+9)) -N4 -tu4 "${component}")
+ local -i component_manifestSize=$(od -An -j$((offset+13)) -N4 -tu4 "${component}")
+ local -i component_dataOffset=$(od -An -j$((offset+17)) -N4 -tu4 "${component}")
+ local -i component_dataSize=$(od -An -j$((offset+21)) -N8 -tu8 "${component}")
+ tail -c+$((offset+component_manifestOffset+1)) "${component}" 2> /dev/null |
+ head -c$((component_manifestSize)) | xsltproc "${T}"/list-component-files.xsl - |
+ while read -r file_offset file_compressedSize file_uncompressedSize file_path ; do
+ if [[ ${file_path} ]] ; then
+ echo -n '.'
+ file_path="${dest}/${file_path}"
+ mkdir -p "$(dirname "${file_path}")" || die
+ tail -c+$((offset+component_dataOffset+file_offset+1)) "${component}" 2> /dev/null |
+ head -c$((file_compressedSize)) | gzip -cd > "${file_path}" || die
+ fi
+ done
+ echo
+}
diff --git a/eclass/waf-utils.eclass b/eclass/waf-utils.eclass
new file mode 100644
index 000000000000..eb9642a68f1e
--- /dev/null
+++ b/eclass/waf-utils.eclass
@@ -0,0 +1,129 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: waf-utils.eclass
+# @MAINTAINER:
+# maintainer-needed@gentoo.org
+# @AUTHOR:
+# Original Author: Gilles Dartiguelongue <eva@gentoo.org>
+# Various improvements based on cmake-utils.eclass: Tomáš Chvátal <scarabeus@gentoo.org>
+# Proper prefix support: Jonathan Callen <jcallen@gentoo.org>
+# @BLURB: common ebuild functions for waf-based packages
+# @DESCRIPTION:
+# The waf-utils eclass contains functions that make creating ebuild for
+# waf-based packages much easier.
+# Its main features are support of common portage default settings.
+
+inherit eutils multilib toolchain-funcs multiprocessing
+
+case ${EAPI:-0} in
+ 4|5) EXPORT_FUNCTIONS src_configure src_compile src_install ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+# Python with threads is required to run waf. We do not know which python slot
+# is being used as the system interpreter, so we are forced to block all
+# slots that have USE=-threads.
+DEPEND="${DEPEND}
+ dev-lang/python
+ !dev-lang/python[-threads]"
+
+# @ECLASS-VARIABLE: WAF_VERBOSE
+# @DESCRIPTION:
+# Set to OFF to disable verbose messages during compilation
+# this is _not_ meant to be set in ebuilds
+: ${WAF_VERBOSE:=ON}
+
+# @FUNCTION: waf-utils_src_configure
+# @DESCRIPTION:
+# General function for configuring with waf.
+waf-utils_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ! ${_PYTHON_ANY_R1} && ! ${_PYTHON_SINGLE_R1} && ! ${_PYTHON_R1} ]]; then
+ eqawarn "Using waf-utils.eclass without any python-r1 suite eclass is not supported"
+ eqawarn "and will be banned on 2015-01-24. Please make sure to configure and inherit"
+ eqawarn "appropriate -r1 eclass. For more information and examples, please see:"
+ eqawarn " https://wiki.gentoo.org/wiki/Project:Python/waf-utils_integration"
+ else
+ if [[ ! ${EPYTHON} ]]; then
+ eqawarn "EPYTHON is unset while calling waf-utils. This most likely means that"
+ eqawarn "the ebuild did not call the appropriate eclass function before calling waf."
+ if [[ ${_PYTHON_ANY_R1} ]]; then
+ eqawarn "Please ensure that python-any-r1_pkg_setup is called in pkg_setup()."
+ elif [[ ${_PYTHON_SINGLE_R1} ]]; then
+ eqawarn "Please ensure that python-single-r1_pkg_setup is called in pkg_setup()."
+ else # python-r1
+ eqawarn "Please ensure that python_setup is called before waf-utils_src_configure(),"
+ eqawarn "or that the latter is used within python_foreach_impl as appropriate."
+ fi
+ eqawarn
+ fi
+
+ if [[ ${PYTHON_REQ_USE} != *threads* ]]; then
+ eqawarn "Waf requires threading support in Python. To accomodate this requirement,"
+ eqawarn "please add 'threads(+)' to PYTHON_REQ_USE variable (above inherit line)."
+ eqawarn "For more information and examples, please see:"
+ eqawarn " https://wiki.gentoo.org/wiki/Project:Python/waf-utils_integration"
+ fi
+ fi
+
+ local libdir=""
+
+ # @ECLASS-VARIABLE: WAF_BINARY
+ # @DESCRIPTION:
+ # Eclass can use different waf executable. Usually it is located in "${S}/waf".
+ : ${WAF_BINARY:="${S}/waf"}
+
+ # @ECLASS-VARIABLE: NO_WAF_LIBDIR
+ # @DEFAULT_UNSET
+ # @DESCRIPTION:
+ # Variable specifying that you don't want to set the libdir for waf script.
+ # Some scripts does not allow setting it at all and die if they find it.
+ [[ -z ${NO_WAF_LIBDIR} ]] && libdir="--libdir=${EPREFIX}/usr/$(get_libdir)"
+
+ tc-export AR CC CPP CXX RANLIB
+ echo "CCFLAGS=\"${CFLAGS}\" LINKFLAGS=\"${CFLAGS} ${LDFLAGS}\" \"${WAF_BINARY}\" --prefix=${EPREFIX}/usr ${libdir} $@ configure"
+
+ # This condition is required because waf takes even whitespace as function
+ # calls, awesome isn't it?
+ if [[ -z ${NO_WAF_LIBDIR} ]]; then
+ CCFLAGS="${CFLAGS}" LINKFLAGS="${CFLAGS} ${LDFLAGS}" "${WAF_BINARY}" \
+ "--prefix=${EPREFIX}/usr" \
+ "${libdir}" \
+ "$@" \
+ configure || die "configure failed"
+ else
+ CCFLAGS="${CFLAGS}" LINKFLAGS="${CFLAGS} ${LDFLAGS}" "${WAF_BINARY}" \
+ "--prefix=${EPREFIX}/usr" \
+ "$@" \
+ configure || die "configure failed"
+ fi
+}
+
+# @FUNCTION: waf-utils_src_compile
+# @DESCRIPTION:
+# General function for compiling with waf.
+waf-utils_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+ local _mywafconfig
+ [[ "${WAF_VERBOSE}" ]] && _mywafconfig="--verbose"
+
+ local jobs="--jobs=$(makeopts_jobs)"
+ echo "\"${WAF_BINARY}\" build ${_mywafconfig} ${jobs}"
+ "${WAF_BINARY}" ${_mywafconfig} ${jobs} || die "build failed"
+}
+
+# @FUNCTION: waf-utils_src_install
+# @DESCRIPTION:
+# Function for installing the package.
+waf-utils_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ echo "\"${WAF_BINARY}\" --destdir=\"${D}\" install"
+ "${WAF_BINARY}" --destdir="${D}" install || die "Make install failed"
+
+ # Manual document installation
+ einstalldocs
+}
diff --git a/eclass/webapp.eclass b/eclass/webapp.eclass
new file mode 100644
index 000000000000..1dddec1edaff
--- /dev/null
+++ b/eclass/webapp.eclass
@@ -0,0 +1,581 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: webapp.eclass
+# @MAINTAINER:
+# web-apps@gentoo.org
+# @BLURB: functions for installing applications to run under a web server
+# @DESCRIPTION:
+# The webapp eclass contains functions to handle web applications with
+# webapp-config. Part of the implementation of GLEP #11
+
+# @ECLASS-VARIABLE: WEBAPP_DEPEND
+# @DESCRIPTION:
+# An ebuild should use WEBAPP_DEPEND if a custom DEPEND needs to be built, most
+# notably in combination with WEBAPP_OPTIONAL.
+WEBAPP_DEPEND=">=app-admin/webapp-config-1.50.15"
+
+# @ECLASS-VARIABLE: WEBAPP_NO_AUTO_INSTALL
+# @DESCRIPTION:
+# An ebuild sets this to `yes' if an automatic installation and/or upgrade is
+# not possible. The ebuild should overwrite pkg_postinst() and explain the
+# reason for this BEFORE calling webapp_pkg_postinst().
+
+# @ECLASS-VARIABLE: WEBAPP_OPTIONAL
+# @DESCRIPTION:
+# An ebuild sets this to `yes' to make webapp support optional, in which case
+# you also need to take care of USE-flags and dependencies.
+
+if [[ "${WEBAPP_OPTIONAL}" != "yes" ]]; then
+ [[ "${WEBAPP_NO_AUTO_INSTALL}" == "yes" ]] || IUSE="vhosts"
+ SLOT="${PVR}"
+ DEPEND="${WEBAPP_DEPEND}"
+ RDEPEND="${DEPEND}"
+fi
+
+EXPORT_FUNCTIONS pkg_postinst pkg_setup src_install pkg_prerm
+
+INSTALL_DIR="/${PN}"
+IS_UPGRADE=0
+IS_REPLACE=0
+
+INSTALL_CHECK_FILE="installed_by_webapp_eclass"
+SETUP_CHECK_FILE="setup_by_webapp_eclass"
+
+ETC_CONFIG="${ROOT}etc/vhosts/webapp-config"
+WEBAPP_CONFIG="${ROOT}usr/sbin/webapp-config"
+WEBAPP_CLEANER="${ROOT}usr/sbin/webapp-cleaner"
+
+# ==============================================================================
+# INTERNAL FUNCTIONS
+# ==============================================================================
+
+# Load the config file /etc/vhosts/webapp-config
+# Supports both the old bash version, and the new python version
+webapp_read_config() {
+ debug-print-function $FUNCNAME $*
+
+ if has_version '>=app-admin/webapp-config-1.50'; then
+ ENVVAR=$(${WEBAPP_CONFIG} --query ${PN} ${PVR}) || die "Could not read settings from webapp-config!"
+ eval ${ENVVAR}
+ elif [[ "${WEBAPP_OPTIONAL}" != "yes" ]]; then
+ # ETC_CONFIG might not be available
+ . ${ETC_CONFIG} || die "Unable to read ${ETC_CONFIG}"
+ elif [[ -f "${ETC_CONFIG}" ]]; then
+ # WEBAPP_OPTIONAL is set to yes
+ # and this must run only if ETC_CONFIG actually exists
+ . ${ETC_CONFIG} || die "Unable to read ${ETC_CONFIG}"
+ fi
+}
+
+# Check whether a specified file exists in the given directory (`.' by default)
+webapp_checkfileexists() {
+ debug-print-function $FUNCNAME $*
+
+ local my_prefix=${2:+${2}/}
+
+ if [[ ! -e "${my_prefix}${1}" ]]; then
+ msg="ebuild fault: file '${1}' not found"
+ eerror "$msg"
+ eerror "Please report this as a bug at http://bugs.gentoo.org/"
+ die "$msg"
+ fi
+}
+
+webapp_check_installedat() {
+ debug-print-function $FUNCNAME $*
+ ${WEBAPP_CONFIG} --show-installed -h localhost -d "${INSTALL_DIR}" 2> /dev/null
+}
+
+webapp_strip_appdir() {
+ debug-print-function $FUNCNAME $*
+ echo "${1#${MY_APPDIR}/}"
+}
+
+webapp_strip_d() {
+ debug-print-function $FUNCNAME $*
+ echo "${1#${D}}"
+}
+
+webapp_strip_cwd() {
+ debug-print-function $FUNCNAME $*
+ echo "${1/#.\///}"
+}
+
+webapp_getinstalltype() {
+ debug-print-function $FUNCNAME $*
+
+ if ! has vhosts ${IUSE} || use vhosts; then
+ return
+ fi
+
+ local my_output
+ my_output="$(webapp_check_installedat)"
+
+ if [[ $? -eq 0 ]]; then
+ # something is already installed there
+ # make sure it isn't the same version
+
+ local my_pn="$(echo ${my_output} | awk '{ print $1 }')"
+ local my_pvr="$(echo ${my_output} | awk '{ print $2 }')"
+
+ REMOVE_PKG="${my_pn}-${my_pvr}"
+
+ if [[ "${my_pn}" == "${PN}" ]]; then
+ if [[ "${my_pvr}" != "${PVR}" ]]; then
+ elog "This is an upgrade"
+ IS_UPGRADE=1
+ # for binpkgs, reset status, var declared in global scope
+ IS_REPLACE=0
+ else
+ elog "This is a re-installation"
+ IS_REPLACE=1
+ # for binpkgs, reset status, var declared in global scope
+ IS_UPGRADE=0
+ fi
+ else
+ elog "${my_output} is installed there"
+ fi
+ else
+ # for binpkgs, reset status, var declared in global scope
+ IS_REPLACE=0
+ IS_UPGRADE=0
+ elog "This is an installation"
+ fi
+}
+
+# ==============================================================================
+# PUBLIC FUNCTIONS
+# ==============================================================================
+
+# @FUNCTION: need_httpd
+# @DESCRIPTION:
+# Call this function AFTER your ebuilds DEPEND line if any of the available
+# webservers are able to run this application.
+need_httpd() {
+ DEPEND="${DEPEND}
+ || ( virtual/httpd-basic virtual/httpd-cgi virtual/httpd-fastcgi )"
+}
+
+# @FUNCTION: need_httpd_cgi
+# @DESCRIPTION:
+# Call this function AFTER your ebuilds DEPEND line if any of the available
+# CGI-capable webservers are able to run this application.
+need_httpd_cgi() {
+ DEPEND="${DEPEND}
+ || ( virtual/httpd-cgi virtual/httpd-fastcgi )"
+}
+
+# @FUNCTION: need_httpd_fastcgi
+# @DESCRIPTION:
+# Call this function AFTER your ebuilds DEPEND line if any of the available
+# FastCGI-capabale webservers are able to run this application.
+need_httpd_fastcgi() {
+ DEPEND="${DEPEND}
+ virtual/httpd-fastcgi"
+}
+
+# @FUNCTION: webapp_configfile
+# @USAGE: <file> [more files ...]
+# @DESCRIPTION:
+# Mark a file config-protected for a web-based application.
+webapp_configfile() {
+ debug-print-function $FUNCNAME $*
+
+ local m
+ for m in "$@"; do
+ webapp_checkfileexists "${m}" "${D}"
+
+ local my_file="$(webapp_strip_appdir "${m}")"
+ my_file="$(webapp_strip_cwd "${my_file}")"
+
+ elog "(config) ${my_file}"
+ echo "${my_file}" >> ${D}/${WA_CONFIGLIST}
+ done
+}
+
+# @FUNCTION: webapp_hook_script
+# @USAGE: <file>
+# @DESCRIPTION:
+# Install a script that will run after a virtual copy is created, and
+# before a virtual copy has been removed.
+webapp_hook_script() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_checkfileexists "${1}"
+
+ elog "(hook) ${1}"
+ cp "${1}" "${D}/${MY_HOOKSCRIPTSDIR}/$(basename "${1}")" || die "Unable to install ${1} into ${D}/${MY_HOOKSCRIPTSDIR}/"
+ chmod 555 "${D}/${MY_HOOKSCRIPTSDIR}/$(basename "${1}")"
+}
+
+# @FUNCTION: webapp_postinst_txt
+# @USAGE: <lang> <file>
+# @DESCRIPTION:
+# Install a text file containing post-installation instructions.
+webapp_postinst_txt() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_checkfileexists "${2}"
+
+ elog "(info) ${2} (lang: ${1})"
+ cp "${2}" "${D}/${MY_APPDIR}/postinst-${1}.txt"
+}
+
+# @FUNCTION: webapp_postupgrade_txt
+# @USAGE: <lang> <file>
+# @DESCRIPTION:
+# Install a text file containing post-upgrade instructions.
+webapp_postupgrade_txt() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_checkfileexists "${2}"
+
+ elog "(info) ${2} (lang: ${1})"
+ cp "${2}" "${D}/${MY_APPDIR}/postupgrade-${1}.txt"
+}
+
+# helper for webapp_serverowned()
+_webapp_serverowned() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_checkfileexists "${1}" "${D}"
+ local my_file="$(webapp_strip_appdir "${1}")"
+ my_file="$(webapp_strip_cwd "${my_file}")"
+
+ echo "${my_file}" >> "${D}/${WA_SOLIST}"
+}
+
+# @FUNCTION: webapp_serverowned
+# @USAGE: [-R] <file> [more files ...]
+# @DESCRIPTION:
+# Identify a file which must be owned by the webserver's user:group settings.
+# The ownership of the file is NOT set until the application is installed using
+# the webapp-config tool. If -R is given directories are handled recursively.
+webapp_serverowned() {
+ debug-print-function $FUNCNAME $*
+
+ local a m
+ if [[ "${1}" == "-R" ]]; then
+ shift
+ for m in "$@"; do
+ find "${D}${m}" | while read a; do
+ a=$(webapp_strip_d "${a}")
+ _webapp_serverowned "${a}"
+ done
+ done
+ else
+ for m in "$@"; do
+ _webapp_serverowned "${m}"
+ done
+ fi
+}
+
+# @FUNCTION: webapp_server_configfile
+# @USAGE: <server> <file> [new name]
+# @DESCRIPTION:
+# Install a configuration file for the webserver. You need to specify a
+# webapp-config supported <server>. if no new name is given `basename $2' is
+# used by default. Note: this function will automagically prepend $1 to the
+# front of your config file's name.
+webapp_server_configfile() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_checkfileexists "${2}"
+
+ # WARNING:
+ #
+ # do NOT change the naming convention used here without changing all
+ # the other scripts that also rely upon these names
+
+ local my_file="${1}-${3:-$(basename "${2}")}"
+
+ elog "(${1}) config file '${my_file}'"
+ cp "${2}" "${D}/${MY_SERVERCONFIGDIR}/${my_file}"
+}
+
+# @FUNCTION: webapp_sqlscript
+# @USAGE: <db> <file> [version]
+# @DESCRIPTION:
+# Install a SQL script that creates/upgrades a database schema for the web
+# application. Currently supported database engines are mysql and postgres.
+# If a version is given the script should upgrade the database schema from
+# the given version to $PVR.
+webapp_sqlscript() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_checkfileexists "${2}"
+
+ dodir "${MY_SQLSCRIPTSDIR}/${1}"
+
+ # WARNING:
+ #
+ # do NOT change the naming convention used here without changing all
+ # the other scripts that also rely upon these names
+
+ if [[ -n "${3}" ]]; then
+ elog "(${1}) upgrade script for ${PN}-${3} to ${PVR}"
+ cp "${2}" "${D}${MY_SQLSCRIPTSDIR}/${1}/${3}_to_${PVR}.sql"
+ chmod 600 "${D}${MY_SQLSCRIPTSDIR}/${1}/${3}_to_${PVR}.sql"
+ else
+ elog "(${1}) create script for ${PN}-${PVR}"
+ cp "${2}" "${D}/${MY_SQLSCRIPTSDIR}/${1}/${PVR}_create.sql"
+ chmod 600 "${D}/${MY_SQLSCRIPTSDIR}/${1}/${PVR}_create.sql"
+ fi
+}
+
+# @FUNCTION: webapp_src_preinst
+# @DESCRIPTION:
+# You need to call this function in src_install() BEFORE anything else has run.
+# For now we just create required webapp-config directories.
+webapp_src_preinst() {
+ debug-print-function $FUNCNAME $*
+
+ # sanity checks, to catch bugs in the ebuild
+ if [[ ! -f "${T}/${SETUP_CHECK_FILE}" ]]; then
+ eerror
+ eerror "This ebuild did not call webapp_pkg_setup() at the beginning"
+ eerror "of the pkg_setup() function"
+ eerror
+ eerror "Please log a bug on http://bugs.gentoo.org"
+ eerror
+ eerror "You should use emerge -C to remove this package, as the"
+ eerror "installation is incomplete"
+ eerror
+ die "Ebuild did not call webapp_pkg_setup() - report to http://bugs.gentoo.org"
+ fi
+
+ # Hint, see the webapp_read_config() function to find where these are
+ # defined.
+ dodir "${MY_HTDOCSDIR}"
+ dodir "${MY_HOSTROOTDIR}"
+ dodir "${MY_CGIBINDIR}"
+ dodir "${MY_ICONSDIR}"
+ dodir "${MY_ERRORSDIR}"
+ dodir "${MY_SQLSCRIPTSDIR}"
+ dodir "${MY_HOOKSCRIPTSDIR}"
+ dodir "${MY_SERVERCONFIGDIR}"
+}
+
+# ==============================================================================
+# EXPORTED FUNCTIONS
+# ==============================================================================
+
+# @FUNCTION: webapp_pkg_setup
+# @DESCRIPTION:
+# The default pkg_setup() for this eclass. This will gather required variables
+# from webapp-config and check if there is an application installed to
+# `${ROOT}/var/www/localhost/htdocs/${PN}/' if USE=vhosts is not set.
+#
+# You need to call this function BEFORE anything else has run in your custom
+# pkg_setup().
+webapp_pkg_setup() {
+ debug-print-function $FUNCNAME $*
+
+ # to test whether or not the ebuild has correctly called this function
+ # we add an empty file to the filesystem
+ #
+ # we used to just set a variable in the shell script, but we can
+ # no longer rely on Portage calling both webapp_pkg_setup() and
+ # webapp_src_install() within the same shell process
+ touch "${T}/${SETUP_CHECK_FILE}"
+
+ # special case - some ebuilds *do* need to overwride the SLOT
+ if [[ "${SLOT}+" != "${PVR}+" && "${WEBAPP_MANUAL_SLOT}" != "yes" ]]; then
+ die "Set WEBAPP_MANUAL_SLOT=\"yes\" if you need to SLOT manually"
+ fi
+
+ # pull in the shared configuration file
+ G_HOSTNAME="localhost"
+ webapp_read_config
+
+ local my_dir="${ROOT}${VHOST_ROOT}/${MY_HTDOCSBASE}/${PN}"
+
+ # if USE=vhosts is enabled OR no application is installed we're done here
+ if ! has vhosts ${IUSE} || use vhosts || [[ ! -d "${my_dir}" ]]; then
+ return
+ fi
+
+ local my_output
+ my_output="$(webapp_check_installedat)"
+
+ if [[ $? -ne 0 ]]; then
+ # okay, whatever is there, it isn't webapp-config-compatible
+ echo
+ ewarn
+ ewarn "You already have something installed in ${my_dir}"
+ ewarn
+ ewarn "Whatever is in ${my_dir}, it's not"
+ ewarn "compatible with webapp-config."
+ ewarn
+ ewarn "This ebuild may be overwriting important files."
+ ewarn
+ echo
+ if has "${EAPI:-0}" 0 1 2; then
+ ebeep 10
+ fi
+ elif [[ "$(echo ${my_output} | awk '{ print $1 }')" != "${PN}" ]]; then
+ echo
+ eerror "You already have ${my_output} installed in ${my_dir}"
+ eerror
+ eerror "I cannot upgrade a different application"
+ eerror
+ echo
+ die "Cannot upgrade contents of ${my_dir}"
+ fi
+
+}
+
+# @FUNCTION: webapp_src_install
+# @DESCRIPTION:
+# This is the default src_install(). For now, we just make sure that root owns
+# everything, and that there are no setuid files.
+#
+# You need to call this function AFTER everything else has run in your custom
+# src_install().
+webapp_src_install() {
+ debug-print-function $FUNCNAME $*
+
+ # to test whether or not the ebuild has correctly called this function
+ # we add an empty file to the filesystem
+ #
+ # we used to just set a variable in the shell script, but we can
+ # no longer rely on Portage calling both webapp_src_install() and
+ # webapp_pkg_postinst() within the same shell process
+ touch "${D}/${MY_APPDIR}/${INSTALL_CHECK_FILE}"
+
+ chown -R "${VHOST_DEFAULT_UID}:${VHOST_DEFAULT_GID}" "${D}/"
+ chmod -R u-s "${D}/"
+ chmod -R g-s "${D}/"
+
+ keepdir "${MY_PERSISTDIR}"
+ fowners "root:0" "${MY_PERSISTDIR}"
+ fperms 755 "${MY_PERSISTDIR}"
+}
+
+# @FUNCTION: webapp_pkg_postinst
+# @DESCRIPTION:
+# The default pkg_postinst() for this eclass. This installs the web application to
+# `${ROOT}/var/www/localhost/htdocs/${PN}/' if USE=vhosts is not set. Otherwise
+# display a short notice how to install this application with webapp-config.
+#
+# You need to call this function AFTER everything else has run in your custom
+# pkg_postinst().
+webapp_pkg_postinst() {
+ debug-print-function $FUNCNAME $*
+
+ webapp_read_config
+
+ # sanity checks, to catch bugs in the ebuild
+ if [[ ! -f "${ROOT}${MY_APPDIR}/${INSTALL_CHECK_FILE}" ]]; then
+ eerror
+ eerror "This ebuild did not call webapp_src_install() at the end"
+ eerror "of the src_install() function"
+ eerror
+ eerror "Please log a bug on http://bugs.gentoo.org"
+ eerror
+ eerror "You should use emerge -C to remove this package, as the"
+ eerror "installation is incomplete"
+ eerror
+ die "Ebuild did not call webapp_src_install() - report to http://bugs.gentoo.org"
+ fi
+
+ if has vhosts ${IUSE}; then
+ if ! use vhosts; then
+ echo
+ elog "vhosts USE flag not set - auto-installing using webapp-config"
+
+ G_HOSTNAME="localhost"
+ webapp_read_config
+
+ local my_mode=-I
+ webapp_getinstalltype
+
+ if [[ "${IS_REPLACE}" == "1" ]]; then
+ elog "${PN}-${PVR} is already installed - replacing"
+ my_mode=-I
+ elif [[ "${IS_UPGRADE}" == "1" ]]; then
+ elog "${REMOVE_PKG} is already installed - upgrading"
+ my_mode=-U
+ else
+ elog "${PN}-${PVR} is not installed - using install mode"
+ fi
+
+ my_cmd="${WEBAPP_CONFIG} -h localhost -u root -d ${INSTALL_DIR} ${my_mode} ${PN} ${PVR}"
+ elog "Running ${my_cmd}"
+ ${my_cmd}
+
+ echo
+ local cleaner="${WEBAPP_CLEANER} -p -C ${CATEGORY}/${PN}"
+ einfo "Running ${cleaner}"
+ ${cleaner}
+ else
+ elog
+ elog "The 'vhosts' USE flag is switched ON"
+ elog "This means that Portage will not automatically run webapp-config to"
+ elog "complete the installation."
+ elog
+ elog "To install ${PN}-${PVR} into a virtual host, run the following command:"
+ elog
+ elog " webapp-config -h <host> -d ${PN} -I ${PN} ${PVR}"
+ elog
+ elog "For more details, see the webapp-config(8) man page"
+ fi
+ else
+ elog
+ elog "This ebuild does not support the 'vhosts' USE flag."
+ elog "This means that Portage will not automatically run webapp-config to"
+ elog "complete the installation."
+ elog
+ elog "To install ${PN}-${PVR} into a virtual host, run the following command:"
+ elog
+ elog " webapp-config -h <host> -d ${PN} -I ${PN} ${PVR}"
+ elog
+ elog "For more details, see the webapp-config(8) man page"
+ fi
+}
+
+# @FUNCTION: webapp_pkg_prerm
+# @DESCRIPTION:
+# This is the default pkg_prerm() for this eclass. If USE=vhosts is not set
+# remove all installed copies of this web application. Otherwise instruct the
+# user to manually remove those copies. See bug #136959.
+webapp_pkg_prerm() {
+ debug-print-function $FUNCNAME $*
+
+ local my_output=
+ my_output="$(${WEBAPP_CONFIG} --list-installs ${PN} ${PVR})"
+ [[ $? -ne 0 ]] && return
+
+ local x
+ if has vhosts ${IUSE} && ! use vhosts; then
+ echo "${my_output}" | while read x; do
+ if [[ -f "${x}"/.webapp ]]; then
+ . "${x}"/.webapp
+ if [[ -n "${WEB_HOSTNAME}" && -n "${WEB_INSTALLDIR}" ]]; then
+ ${WEBAPP_CONFIG} -h ${WEB_HOSTNAME} -d ${WEB_INSTALLDIR} -C ${PN} ${PVR}
+ fi
+ else
+ ewarn "Cannot find file ${x}/.webapp"
+ fi
+ done
+ elif [[ "${my_output}" != "" ]]; then
+ echo
+ ewarn
+ ewarn "Don't forget to use webapp-config to remove any copies of"
+ ewarn "${PN}-${PVR} installed in"
+ ewarn
+
+ echo "${my_output}" | while read x; do
+ if [[ -f "${x}"/.webapp ]]; then
+ ewarn " ${x}"
+ else
+ ewarn "Cannot find file ${x}/.webapp"
+ fi
+ done
+
+ ewarn
+ echo
+ fi
+}
diff --git a/eclass/wxwidgets.eclass b/eclass/wxwidgets.eclass
new file mode 100644
index 000000000000..f26f13d50459
--- /dev/null
+++ b/eclass/wxwidgets.eclass
@@ -0,0 +1,145 @@
+# Copyright 1999-2014 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: wxwidgets.eclass
+# @MAINTAINER:
+# wxwidgets@gentoo.org
+# @BLURB: Manages build configuration for wxGTK-using packages.
+# @DESCRIPTION:
+# This eclass gives ebuilds the ability to build against a specific wxGTK
+# SLOT and profile without interfering with the system configuration. Any
+# ebuild with a x11-libs/wxGTK dependency must use this eclass.
+#
+# There are two ways to do it:
+#
+# - set WX_GTK_VER before inheriting the eclass
+# - set WX_GTK_VER and call need-wxwidgets from a phase function
+#
+# (where WX_GTK_VER is the SLOT you want)
+#
+# If your package has optional support for wxGTK (ie. by a USE flag) then
+# you should use need-wxwidgets. This is important because some packages
+# will force-enable wxGTK if they find WX_CONFIG set in the environment.
+#
+# @CODE
+# inherit wxwidgets
+#
+# IUSE="X wxwidgets"
+# DEPEND="wxwidgets? ( x11-libs/wxGTK:2.8[X?] )"
+#
+# src_configure() {
+# if use wxwidgets; then
+# WX_GTK_VER="2.8"
+# if use X; then
+# need-wxwidgets unicode
+# else
+# need-wxwidgets base-unicode
+# fi
+# fi
+# econf --with-wx-config="${WX_CONFIG}"
+# }
+# @CODE
+#
+# That's about as complicated as it gets. 99% of ebuilds can get away with:
+#
+# @CODE
+# inherit wxwidgets
+# DEPEND="wxwidgets? ( x11-libs/wxGTK:2.8[X] )
+# ...
+# WX_GTK_VER=2.8 need-wxwidgets unicode
+# @CODE
+#
+# Note: unless you know your package works with wxbase (which is very
+# doubtful), always depend on wxGTK[X].
+
+inherit eutils multilib
+
+# We do this in global scope so ebuilds can get sane defaults just by
+# inheriting.
+if [[ -z ${WX_CONFIG} ]]; then
+ if [[ -n ${WX_GTK_VER} ]]; then
+ for wxtoolkit in mac gtk2 base; do
+ # newer versions don't have a seperate debug profile
+ for wxdebug in xxx release- debug-; do
+ wxconf="${wxtoolkit}-unicode-${wxdebug/xxx/}${WX_GTK_VER}"
+
+ [[ -f ${EPREFIX}/usr/$(get_libdir)/wx/config/${wxconf} ]] || continue
+
+ WX_CONFIG="${EPREFIX}/usr/$(get_libdir)/wx/config/${wxconf}"
+ WX_ECLASS_CONFIG="${WX_CONFIG}"
+ break
+ done
+ [[ -n ${WX_CONFIG} ]] && break
+ done
+ [[ -n ${WX_CONFIG} ]] && export WX_CONFIG WX_ECLASS_CONFIG
+ fi
+fi
+
+# @FUNCTION: need-wxwidgets
+# @USAGE: <profile>
+# @DESCRIPTION:
+#
+# Available configurations are:
+#
+# unicode (USE="X")
+# base-unicode (USE="-X")
+
+need-wxwidgets() {
+ local wxtoolkit wxdebug wxconf
+
+ if [[ -z ${WX_GTK_VER} ]]; then
+ eerror "WX_GTK_VER must be set before calling $FUNCNAME."
+ echo
+ die
+ fi
+
+ if [[ ${WX_GTK_VER} != 2.8 && ${WX_GTK_VER} != 2.9 && ${WX_GTK_VER} != 3.0 ]]; then
+ eerror "Invalid WX_GTK_VER: ${WX_GTK_VER} - must be set to a valid wxGTK SLOT."
+ echo
+ die
+ fi
+
+ case $1 in
+ unicode|base-unicode) ;;
+ *) eerror "Invalid $FUNCNAME profile: $1"
+ echo
+ die
+ ;;
+ esac
+
+ # wxbase is provided by both gtk2 and base installations
+ if has_version "x11-libs/wxGTK:${WX_GTK_VER}[aqua]"; then
+ wxtoolkit="mac"
+ elif has_version "x11-libs/wxGTK:${WX_GTK_VER}[X]"; then
+ wxtoolkit="gtk2"
+ else
+ wxtoolkit="base"
+ fi
+
+ # 2.8 has a separate debug element
+ if [[ ${WX_GTK_VER} == 2.8 ]]; then
+ if has_version "x11-libs/wxGTK:${WX_GTK_VER}[debug]"; then
+ wxdebug="debug-"
+ else
+ wxdebug="release-"
+ fi
+ fi
+
+ wxconf="${wxtoolkit}-unicode-${wxdebug}${WX_GTK_VER}"
+
+ if [[ ! -f ${EPREFIX}/usr/$(get_libdir)/wx/config/${wxconf} ]]; then
+ echo
+ eerror "Failed to find configuration ${wxconf}"
+ echo
+ die
+ fi
+
+ export WX_CONFIG="${EPREFIX}/usr/$(get_libdir)/wx/config/${wxconf}"
+ export WX_ECLASS_CONFIG="${WX_CONFIG}"
+
+ echo
+ einfo "Requested wxWidgets: ${1} ${WX_GTK_VER}"
+ einfo "Using wxWidgets: ${wxconf}"
+ echo
+}
diff --git a/eclass/x-modular.eclass b/eclass/x-modular.eclass
new file mode 100644
index 000000000000..45aadab01183
--- /dev/null
+++ b/eclass/x-modular.eclass
@@ -0,0 +1,621 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# DEPRECATED
+# This eclass has been superseded by xorg-2
+# Please modify your ebuilds to use that instead
+#
+# @ECLASS: x-modular.eclass
+# @MAINTAINER:
+# Donnie Berkholz <dberkholz@gentoo.org>
+# x11@gentoo.org
+# @BLURB: Reduces code duplication in the modularized X11 ebuilds.
+# @DESCRIPTION:
+# This eclass makes trivial X ebuilds possible for apps, fonts, drivers,
+# and more. Many things that would normally be done in various functions
+# can be accessed by setting variables instead, such as patching,
+# running eautoreconf, passing options to configure and installing docs.
+#
+# All you need to do in a basic ebuild is inherit this eclass and set
+# DESCRIPTION, KEYWORDS and RDEPEND/DEPEND. If your package is hosted
+# with the other X packages, you don't need to set SRC_URI. Pretty much
+# everything else should be automatic.
+
+if [[ ${PV} = 9999* ]]; then
+ GIT_ECLASS="git"
+ SNAPSHOT="yes"
+ SRC_URI=""
+fi
+
+# If we're a font package, but not the font.alias one
+FONT_ECLASS=""
+if [[ "${PN/#font-}" != "${PN}" ]] \
+ && [[ "${CATEGORY}" = "media-fonts" ]] \
+ && [[ "${PN}" != "font-alias" ]] \
+ && [[ "${PN}" != "font-util" ]]; then
+ # Activate font code in the rest of the eclass
+ FONT="yes"
+
+ # Whether to inherit the font eclass
+ FONT_ECLASS="font"
+fi
+
+inherit eutils libtool multilib toolchain-funcs flag-o-matic autotools \
+ ${FONT_ECLASS} ${GIT_ECLASS}
+
+EXPORTED_FUNCTIONS="src_unpack src_compile src_install pkg_preinst pkg_postinst pkg_postrm"
+
+case "${EAPI:-0}" in
+ 0|1)
+ ;;
+ 2)
+ EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS} src_prepare src_configure"
+ ;;
+ *)
+ die "Unknown EAPI ${EAPI}"
+ ;;
+esac
+
+# exports must be ALWAYS after inherit
+EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS}
+
+# @ECLASS-VARIABLE: XDIR
+# @DESCRIPTION:
+# Directory prefix to use for everything. If you want to install to a
+# non-default prefix (e.g., /opt/xorg), change XDIR. This has not been
+# recently tested. You may need to uncomment the setting of datadir and
+# mandir in x-modular_src_install() or add it back in if it's no longer
+# there. You may also want to change the SLOT.
+XDIR="/usr"
+
+IUSE=""
+HOMEPAGE="http://xorg.freedesktop.org/"
+
+# @ECLASS-VARIABLE: SNAPSHOT
+# @DESCRIPTION:
+# If set to 'yes' and configure.ac exists, eautoreconf will run. Set
+# before inheriting this eclass.
+: ${SNAPSHOT:=no}
+
+# Set up SRC_URI for individual modular releases
+BASE_INDIVIDUAL_URI="http://xorg.freedesktop.org/releases/individual"
+# @ECLASS-VARIABLE: MODULE
+# @DESCRIPTION:
+# The subdirectory to download source from. Possible settings are app,
+# doc, data, util, driver, font, lib, proto, xserver. Set above the
+# inherit to override the default autoconfigured module.
+if [[ -z ${MODULE} ]]; then
+ case ${CATEGORY} in
+ app-doc) MODULE="doc" ;;
+ media-fonts) MODULE="font" ;;
+ x11-apps|x11-wm) MODULE="app" ;;
+ x11-misc|x11-themes) MODULE="util" ;;
+ x11-drivers) MODULE="driver" ;;
+ x11-base) MODULE="xserver" ;;
+ x11-proto) MODULE="proto" ;;
+ x11-libs) MODULE="lib" ;;
+ esac
+fi
+
+if [[ -n ${GIT_ECLASS} ]]; then
+ EGIT_REPO_URI="git://anongit.freedesktop.org/git/xorg/${MODULE}/${PN}"
+else
+ SRC_URI="${SRC_URI} ${BASE_INDIVIDUAL_URI}/${MODULE}/${P}.tar.bz2"
+fi
+
+SLOT="0"
+
+# Set the license for the package. This can be overridden by setting
+# LICENSE after the inherit. Nearly all FreeDesktop-hosted X packages
+# are under the MIT license. (This is what Red Hat does in their rpms)
+LICENSE="MIT"
+
+# Set up shared dependencies
+if [[ -n "${SNAPSHOT}" ]]; then
+# FIXME: What's the minimal libtool version supporting arbitrary versioning?
+ DEPEND="${DEPEND}
+ >=sys-devel/libtool-1.5
+ >=sys-devel/m4-1.4"
+ WANT_AUTOCONF="latest"
+ WANT_AUTOMAKE="latest"
+fi
+
+if [[ -n "${FONT}" ]]; then
+ RDEPEND="${RDEPEND}
+ media-fonts/encodings
+ x11-apps/mkfontscale
+ x11-apps/mkfontdir"
+ PDEPEND="${PDEPEND}
+ media-fonts/font-alias"
+
+ # Starting with 7.0RC3, we can specify the font directory
+ # But oddly, we can't do the same for encodings or font-alias
+
+# @ECLASS-VARIABLE: FONT_DIR
+# @DESCRIPTION:
+# If you're creating a font package and the suffix of PN is not equal to
+# the subdirectory of /usr/share/fonts/ it should install into, set
+# FONT_DIR to that directory or directories. Set before inheriting this
+# eclass.
+ : ${FONT_DIR:=${PN##*-}}
+
+ # Fix case of font directories
+ FONT_DIR=${FONT_DIR/ttf/TTF}
+ FONT_DIR=${FONT_DIR/otf/OTF}
+ FONT_DIR=${FONT_DIR/type1/Type1}
+ FONT_DIR=${FONT_DIR/speedo/Speedo}
+
+ # Set up configure options, wrapped so ebuilds can override if need be
+ if [[ -z ${FONT_OPTIONS} ]]; then
+ FONT_OPTIONS="--with-fontdir=\"/usr/share/fonts/${FONT_DIR}\""
+ fi
+
+ if [[ -n "${FONT}" ]]; then
+ if [[ ${PN##*-} = misc ]] || [[ ${PN##*-} = 75dpi ]] || [[ ${PN##*-} = 100dpi ]] || [[ ${PN##*-} = cyrillic ]]; then
+ IUSE="${IUSE} nls"
+ fi
+ fi
+fi
+
+# If we're a driver package
+if [[ "${PN/#xf86-video}" != "${PN}" ]] || [[ "${PN/#xf86-input}" != "${PN}" ]]; then
+ # Enable driver code in the rest of the eclass
+ DRIVER="yes"
+fi
+
+# Debugging -- ignore packages that can't be built with debugging
+if [[ -z "${FONT}" ]] \
+ && [[ "${CATEGORY/app-doc}" = "${CATEGORY}" ]] \
+ && [[ "${CATEGORY/x11-proto}" = "${CATEGORY}" ]] \
+ && [[ "${PN/util-macros}" = "${PN}" ]] \
+ && [[ "${PN/xbitmaps}" = "${PN}" ]] \
+ && [[ "${PN/xkbdata}" = "${PN}" ]] \
+ && [[ "${PN/xorg-cf-files}" = "${PN}" ]] \
+ && [[ "${PN/xcursor}" = "${PN}" ]] \
+ ; then
+ DEBUGGABLE="yes"
+ IUSE="${IUSE} debug"
+fi
+
+DEPEND="${DEPEND}
+ virtual/pkgconfig"
+
+if [[ "${PN/util-macros}" = "${PN}" ]]; then
+ DEPEND="${DEPEND}
+ >=x11-misc/util-macros-1.3.0"
+fi
+
+RDEPEND="${RDEPEND}
+ !<=x11-base/xorg-x11-6.9"
+# Provides virtual/x11 for temporary use until packages are ported
+# x11-base/x11-env"
+
+# @FUNCTION: x-modular_specs_check
+# @USAGE:
+# @DESCRIPTION:
+# Make any necessary changes related to gcc specs (generally hardened)
+x-modular_specs_check() {
+ if [[ ${PN:0:11} = "xorg-server" ]] || [[ -n "${DRIVER}" ]]; then
+ append-ldflags -Wl,-z,lazy
+ # (#116698) breaks loading
+ filter-ldflags -Wl,-z,now
+ fi
+}
+
+# @FUNCTION: x-modular_dri_check
+# @USAGE:
+# @DESCRIPTION:
+# Ensures the server supports DRI if building a driver with DRI support
+x-modular_dri_check() {
+ # (#120057) Enabling DRI in drivers requires that the server was built with
+ # support for it
+ # Starting with xorg-server 1.5.3, DRI support is always enabled unless
+ # USE=minimal is set (see bug #252084)
+ if [[ -n "${DRIVER}" ]]; then
+ if has dri ${IUSE} && use dri; then
+ einfo "Checking for direct rendering capabilities ..."
+ if has_version '>=x11-base/xorg-server-1.5.3'; then
+ if built_with_use x11-base/xorg-server minimal; then
+ die "You must build x11-base/xorg-server with USE=-minimal."
+ fi
+ else
+ if ! built_with_use x11-base/xorg-server dri; then
+ die "You must build x11-base/xorg-server with USE=dri."
+ fi
+ fi
+ fi
+ fi
+}
+
+# @FUNCTION: x-modular_server_supports_drivers_check
+# @USAGE:
+# @DESCRIPTION:
+# Ensures the server SDK is installed if a driver is being built
+x-modular_server_supports_drivers_check() {
+ # (#135873) Only certain servers will actually use or be capable of
+ # building external drivers, including binary drivers.
+ if [[ -n "${DRIVER}" ]]; then
+ if has_version '>=x11-base/xorg-server-1.1'; then
+ if ! built_with_use x11-base/xorg-server xorg; then
+ eerror "x11-base/xorg-server is not built with support for external drivers."
+ die "You must build x11-base/xorg-server with USE=xorg."
+ fi
+ fi
+ fi
+}
+
+# @FUNCTION: x-modular_unpack_source
+# @USAGE:
+# @DESCRIPTION:
+# Simply unpack source code. Nothing else.
+x-modular_unpack_source() {
+ if [[ -n ${GIT_ECLASS} ]]; then
+ git_src_unpack
+ else
+ unpack ${A}
+ fi
+ cd "${S}"
+
+ if [[ -n ${FONT_OPTIONS} ]]; then
+ einfo "Detected font directory: ${FONT_DIR}"
+ fi
+}
+
+# @FUNCTION: x-modular_patch_source
+# @USAGE:
+# @DESCRIPTION:
+# Apply all patches
+x-modular_patch_source() {
+ # Use standardized names and locations with bulk patching
+ # Patch directory is ${WORKDIR}/patch
+ # See epatch() in eutils.eclass for more documentation
+ if [[ -z "${EPATCH_SUFFIX}" ]] ; then
+ EPATCH_SUFFIX="patch"
+ fi
+
+# @VARIABLE: PATCHES
+# @DESCRIPTION:
+# If you have any patches to apply, set PATCHES to their locations and epatch
+# will apply them. It also handles epatch-style bulk patches, if you know how to
+# use them and set the correct variables. If you don't, read eutils.eclass.
+ if [[ ${#PATCHES[@]} -gt 1 ]]; then
+ for x in "${PATCHES[@]}"; do
+ epatch "${x}"
+ done
+ elif [[ -n "${PATCHES}" ]]; then
+ for x in ${PATCHES}; do
+ epatch "${x}"
+ done
+ # For non-default directory bulk patching
+ elif [[ -n "${PATCH_LOC}" ]] ; then
+ epatch ${PATCH_LOC}
+ # For standard bulk patching
+ elif [[ -d "${EPATCH_SOURCE}" ]] ; then
+ epatch
+ fi
+}
+
+# @FUNCTION: x-modular_reconf_source
+# @USAGE:
+# @DESCRIPTION:
+# Run eautoreconf if necessary, and run elibtoolize.
+x-modular_reconf_source() {
+ if [[ "${SNAPSHOT}" = "yes" ]]
+ then
+ # If possible, generate configure if it doesn't exist
+ if [ -f "./configure.ac" ]
+ then
+ eautoreconf
+ fi
+ fi
+
+ # Joshua Baergen - October 23, 2005
+ # Fix shared lib issues on MIPS, FBSD, etc etc
+ elibtoolize
+}
+
+# @FUNCTION: x-modular_src_prepare
+# @USAGE:
+# @DESCRIPTION:
+# Prepare a package after unpacking, performing all X-related tasks.
+x-modular_src_prepare() {
+ [[ -n ${GIT_ECLASS} ]] && has src_prepare ${EXPORTED_FUNCTIONS} \
+ && git_src_prepare
+ x-modular_patch_source
+ x-modular_reconf_source
+}
+
+# @FUNCTION: x-modular_src_unpack
+# @USAGE:
+# @DESCRIPTION:
+# Unpack a package, performing all X-related tasks.
+x-modular_src_unpack() {
+ x-modular_specs_check
+ x-modular_server_supports_drivers_check
+ x-modular_dri_check
+ x-modular_unpack_source
+ has src_prepare ${EXPORTED_FUNCTIONS} || x-modular_src_prepare
+}
+
+# @FUNCTION: x-modular_font_configure
+# @USAGE:
+# @DESCRIPTION:
+# If a font package, perform any necessary configuration steps
+x-modular_font_configure() {
+ if [[ -n "${FONT}" ]]; then
+ # Might be worth adding an option to configure your desired font
+ # and exclude all others. Also, should this USE be nls or minimal?
+ if has nls ${IUSE//+} && ! use nls; then
+ FONT_OPTIONS="${FONT_OPTIONS}
+ --disable-iso8859-2
+ --disable-iso8859-3
+ --disable-iso8859-4
+ --disable-iso8859-5
+ --disable-iso8859-6
+ --disable-iso8859-7
+ --disable-iso8859-8
+ --disable-iso8859-9
+ --disable-iso8859-10
+ --disable-iso8859-11
+ --disable-iso8859-12
+ --disable-iso8859-13
+ --disable-iso8859-14
+ --disable-iso8859-15
+ --disable-iso8859-16
+ --disable-jisx0201
+ --disable-koi8-r"
+ fi
+ fi
+}
+
+# @FUNCTION: x-modular_debug_setup
+# @USAGE:
+# @DESCRIPTION:
+# Set up CFLAGS for a debug build
+x-modular_debug_setup() {
+ if [[ -n "${DEBUGGABLE}" ]]; then
+ if use debug; then
+ strip-flags
+ append-flags -g
+ fi
+ fi
+}
+
+# @FUNCTION: x-modular_src_configure
+# @USAGE:
+# @DESCRIPTION:
+# Perform any necessary pre-configuration steps, then run configure
+x-modular_src_configure() {
+ x-modular_font_configure
+ x-modular_debug_setup
+
+# @VARIABLE: CONFIGURE_OPTIONS
+# @DESCRIPTION:
+# Any extra options to pass to configure
+
+ # If prefix isn't set here, .pc files cause problems
+ if [[ -x ${ECONF_SOURCE:-.}/configure ]]; then
+ econf --prefix=${XDIR} \
+ --datadir=${XDIR}/share \
+ ${FONT_OPTIONS} \
+ ${DRIVER_OPTIONS} \
+ ${CONFIGURE_OPTIONS}
+ fi
+}
+
+# @FUNCTION: x-modular_src_make
+# @USAGE:
+# @DESCRIPTION:
+# Run make.
+x-modular_src_make() {
+ emake || die "emake failed"
+}
+
+# @FUNCTION: x-modular_src_compile
+# @USAGE:
+# @DESCRIPTION:
+# Compile a package, performing all X-related tasks.
+x-modular_src_compile() {
+ has src_configure ${EXPORTED_FUNCTIONS} || x-modular_src_configure
+ x-modular_src_make
+}
+
+# @FUNCTION: x-modular_src_install
+# @USAGE:
+# @DESCRIPTION:
+# Install a built package to ${D}, performing any necessary steps.
+# Creates a ChangeLog from git if using live ebuilds.
+x-modular_src_install() {
+ # Install everything to ${XDIR}
+ if [[ ${CATEGORY} = x11-proto ]]; then
+ make \
+ ${PN/proto/}docdir=/usr/share/doc/${PF} \
+ DESTDIR="${D}" \
+ install \
+ || die
+ else
+ make \
+ docdir=/usr/share/doc/${PF} \
+ DESTDIR="${D}" \
+ install \
+ || die
+ fi
+# Shouldn't be necessary in XDIR=/usr
+# einstall forces datadir, so we need to re-force it
+# datadir=${XDIR}/share \
+# mandir=${XDIR}/share/man \
+
+ if [[ -n ${GIT_ECLASS} ]]; then
+ pushd "${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}"
+ git log ${GIT_TREE} > "${S}"/ChangeLog
+ popd
+ fi
+
+ if [[ -e ${S}/ChangeLog ]]; then
+ dodoc "${S}"/ChangeLog
+ fi
+# @VARIABLE: DOCS
+# @DESCRIPTION:
+# Any documentation to install via dodoc
+ [[ -n ${DOCS} ]] && dodoc ${DOCS}
+
+ # Don't install libtool archives for server modules
+ if [[ -e ${D}/usr/$(get_libdir)/xorg/modules ]]; then
+ find "${D}"/usr/$(get_libdir)/xorg/modules -name '*.la' \
+ | xargs rm -f
+ fi
+
+ if [[ -n "${FONT}" ]]; then
+ remove_font_metadata
+ fi
+
+ if [[ -n "${DRIVER}" ]]; then
+ install_driver_hwdata
+ fi
+}
+
+# @FUNCTION: x-modular_pkg_preinst
+# @USAGE:
+# @DESCRIPTION:
+# This function doesn't do anything right now, but it may in the future.
+x-modular_pkg_preinst() {
+ # We no longer do anything here, but we can't remove it from the API
+ :
+}
+
+# @FUNCTION: x-modular_pkg_postinst
+# @USAGE:
+# @DESCRIPTION:
+# Run X-specific post-installation tasks on the live filesystem. The
+# only task right now is some setup for font packages.
+x-modular_pkg_postinst() {
+ if [[ -n "${FONT}" ]]; then
+ setup_fonts
+ fi
+}
+
+# @FUNCTION: x-modular_pkg_postrm
+# @USAGE:
+# @DESCRIPTION:
+# Run X-specific post-removal tasks on the live filesystem. The only
+# task right now is some cleanup for font packages.
+x-modular_pkg_postrm() {
+ if [[ -n "${FONT}" ]]; then
+ font_pkg_postrm
+ fi
+}
+
+# @FUNCTION: setup_fonts
+# @USAGE:
+# @DESCRIPTION:
+# Generates needed files for fonts and fixes font permissions
+setup_fonts() {
+ if [[ ! -n "${FONT_DIR}" ]]; then
+ msg="FONT_DIR is empty. The ebuild should set it to at least one subdir of /usr/share/fonts."
+ eerror "${msg}"
+ die "${msg}"
+ fi
+
+ create_fonts_scale
+ create_fonts_dir
+ create_font_cache
+}
+
+# @FUNCTION: remove_font_metadata
+# @USAGE:
+# @DESCRIPTION:
+# Don't let the package install generated font files that may overlap
+# with other packages. Instead, they're generated in pkg_postinst().
+remove_font_metadata() {
+ local DIR
+ for DIR in ${FONT_DIR}; do
+ if [[ "${DIR}" != "Speedo" ]] && \
+ [[ "${DIR}" != "CID" ]] ; then
+ # Delete font metadata files
+ # fonts.scale, fonts.dir, fonts.cache-1
+ rm -f "${D}"/usr/share/fonts/${DIR}/fonts.{scale,dir,cache-1}
+ fi
+ done
+}
+
+# @FUNCTION: install_driver_hwdata
+# @USAGE:
+# @DESCRIPTION:
+# Installs device-to-driver mappings for system-config-display and
+# anything else that uses hwdata.
+install_driver_hwdata() {
+ insinto /usr/share/hwdata/videoaliases
+ for i in "${FILESDIR}"/*.xinf; do
+ # We need this for the case when none exist,
+ # so *.xinf doesn't expand
+ if [[ -e $i ]]; then
+ doins $i
+ fi
+ done
+}
+
+# @FUNCTION: discover_font_dirs
+# @USAGE:
+# @DESCRIPTION:
+# Deprecated. Sets up the now-unused FONT_DIRS variable.
+discover_font_dirs() {
+ FONT_DIRS="${FONT_DIR}"
+}
+
+# @FUNCTION: create_fonts_scale
+# @USAGE:
+# @DESCRIPTION:
+# Create fonts.scale file, used by the old server-side fonts subsystem.
+create_fonts_scale() {
+ ebegin "Creating fonts.scale files"
+ local x
+ for DIR in ${FONT_DIR}; do
+ x=${ROOT}/usr/share/fonts/${DIR}
+ [[ -z "$(ls ${x}/)" ]] && continue
+ [[ "$(ls ${x}/)" = "fonts.cache-1" ]] && continue
+
+ # Only generate .scale files if truetype, opentype or type1
+ # fonts are present ...
+
+ # NOTE: There is no way to regenerate Speedo/CID fonts.scale
+ # <dberkholz@gentoo.org> 2 August 2004
+ if [[ "${x/encodings}" = "${x}" ]] \
+ && [[ -n "$(find ${x} -iname '*.[pot][ft][abcf]' -print)" ]]; then
+ mkfontscale \
+ -a "${ROOT}"/usr/share/fonts/encodings/encodings.dir \
+ -- ${x}
+ fi
+ done
+ eend 0
+}
+
+# @FUNCTION: create_fonts_dir
+# @USAGE:
+# @DESCRIPTION:
+# Create fonts.dir file, used by the old server-side fonts subsystem.
+create_fonts_dir() {
+ ebegin "Generating fonts.dir files"
+ for DIR in ${FONT_DIR}; do
+ x=${ROOT}/usr/share/fonts/${DIR}
+ [[ -z "$(ls ${x}/)" ]] && continue
+ [[ "$(ls ${x}/)" = "fonts.cache-1" ]] && continue
+
+ if [[ "${x/encodings}" = "${x}" ]]; then
+ mkfontdir \
+ -e "${ROOT}"/usr/share/fonts/encodings \
+ -e "${ROOT}"/usr/share/fonts/encodings/large \
+ -- ${x}
+ fi
+ done
+ eend 0
+}
+
+# @FUNCTION: create_font_cache
+# @USAGE:
+# @DESCRIPTION:
+# Create fonts.cache-1 files, used by the new client-side fonts
+# subsystem.
+create_font_cache() {
+ font_pkg_postinst
+}
diff --git a/eclass/xemacs-elisp-common.eclass b/eclass/xemacs-elisp-common.eclass
new file mode 100644
index 000000000000..67ca772bac8d
--- /dev/null
+++ b/eclass/xemacs-elisp-common.eclass
@@ -0,0 +1,311 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Copyright 2007-2011 Hans de Graaff <graaff@gentoo.org>
+#
+# Based on elisp-common.eclass:
+# Copyright 2007 Christian Faulhammer <opfer@gentoo.org>
+# Copyright 2002-2004 Matthew Kennedy <mkennedy@gentoo.org>
+# Copyright 2004-2005 Mamoru Komachi <usata@gentoo.org>
+# Copyright 2003 Jeremy Maitin-Shepard <jbms@attbi.com>
+# Copyright 2007 Ulrich Müller <ulm@gentoo.org>
+#
+# @ECLASS: xemacs-elisp-common.eclass
+# @MAINTAINER:
+# xemacs@gentoo.org
+# @BLURB: XEmacs-related installation utilities
+# @DESCRIPTION:
+#
+# Usually you want to use this eclass for (optional) XEmacs support of
+# your package. This is NOT for GNU Emacs!
+#
+# Many of the steps here are sometimes done by the build system of your
+# package (especially compilation), so this is mainly for standalone elisp
+# files you gathered from somewhere else.
+#
+# When relying on the xemacs USE flag, you need to add
+#
+# xemacs? ( app-editors/xemacs )
+#
+# to your DEPEND/RDEPEND line and use the functions provided here to bring
+# the files to the correct locations.
+#
+# @ROFF .SS
+# src_compile() usage:
+#
+# An elisp file is compiled by the xemacs-elisp-compile() function
+# defined here and simply takes the source files as arguments.
+#
+# xemacs-elisp-compile *.el
+#
+# In the case of interdependent elisp files, you can use the
+# xemacs-elisp-comp() function which makes sure all files are
+# loadable.
+#
+# xemacs-elisp-comp *.el
+#
+# Function xemacs-elisp-make-autoload-file() can be used to generate a
+# file with autoload definitions for the lisp functions. It takes a
+# list of directories (default: working directory) as its argument.
+# Use of this function requires that the elisp source files contain
+# magic ";;;###autoload" comments. See the XEmacs Lisp Reference Manual
+# (node "Autoload") for a detailed explanation.
+#
+# @ROFF .SS
+# src_install() usage:
+#
+# The resulting compiled files (.elc) should be put in a subdirectory
+# of /usr/lib/xemacs/site-lisp/ which is named after the first
+# argument of xemacs-elisp-install(). The following parameters are
+# the files to be put in that directory. Usually the subdirectory
+# should be ${PN}, but you can choose something else.
+#
+# xemacs-elisp-install ${PN} *.el *.elc
+#
+# To let the XEmacs support be activated by XEmacs on startup, you need
+# to provide a site file (shipped in ${FILESDIR}) which contains the
+# startup code (have a look in the documentation of your software).
+# Normally this would look like this:
+#
+# (add-to-list 'load-path "@SITELISP@")
+# (add-to-list 'auto-mode-alist '("\\.csv\\'" . csv-mode))
+# (autoload 'csv-mode "csv-mode" "Major mode for csv files." t)
+#
+# If your XEmacs support files are installed in a subdirectory of
+# /usr/share/xemacs/site-packages/ (which is strongly recommended), you need
+# to extend XEmacs' load-path as shown in the first non-comment line.
+# The xemacs-elisp-site-file-install() function of this eclass will replace
+# "@SITELISP@" by the actual path.
+#
+# The next line tells XEmacs to load the mode opening a file ending
+# with ".csv" and load functions depending on the context and needed
+# features. Be careful though. Commands as "load-library" or "require"
+# bloat the editor as they are loaded on every startup. When having
+# many XEmacs support files, users may be annoyed by the start-up time.
+# Also avoid keybindings as they might interfere with the user's
+# settings. Give a hint in pkg_postinst(), which should be enough.
+#
+# The naming scheme for this site-init file matches the shell pattern
+# "[1-8][0-9]*-gentoo*.el", where the two digits at the beginning define
+# the loading order (numbers below 10 or above 89 are reserved for
+# internal use). So if your initialisation depends on another XEmacs
+# package, your site file's number must be higher! If there are no such
+# interdependencies then the number should be 50. Otherwise, numbers
+# divisible by 10 are preferred.
+#
+# Best practice is to define a SITEFILE variable in the global scope of
+# your ebuild (e.g., right after S or RDEPEND):
+#
+# SITEFILE="50${PN}-gentoo.el"
+#
+# Which is then installed by
+#
+# xemacs-elisp-site-file-install "${FILESDIR}/${SITEFILE}" || die
+#
+# in src_install(). Any characters after the "-gentoo" part and before
+# the extension will be stripped from the destination file's name.
+# For example, a file "50${PN}-gentoo-${PV}.el" will be installed as
+# "50${PN}-gentoo.el". If your subdirectory is not named ${PN}, give
+# the differing name as second argument.
+
+# @ECLASS-VARIABLE: XEMACS_SITELISP
+# @DESCRIPTION:
+# Directory where packages install indivivual XEmacs Lisp files.
+XEMACS_SITELISP=/usr/share/xemacs/site-lisp
+
+# @ECLASS-VARIABLE: XEMACS_SITEPACKAGE
+# @DESCRIPTION:
+# Directory where packages install XEmacs Lisp packages.
+XEMACS_SITEPACKAGE=/usr/share/xemacs/site-packages
+
+# @ECLASS-VARIABLE: XEMACS
+# @DESCRIPTION:
+# Path of XEmacs executable.
+XEMACS=/usr/bin/xemacs
+
+# @ECLASS-VARIABLE: XEMACS_BATCH_CLEAN
+# @DESCRIPTION:
+# Invocation of XEMACS in batch mode.
+XEMACS_BATCH_CLEAN="${XEMACS} --batch --no-site-file --no-init-file"
+
+# @FUNCTION: xemacs-elisp-compile
+# @USAGE: <list of elisp files>
+# @DESCRIPTION:
+# Byte-compile elisp files with xemacs. This function will die when
+# there is a problem compiling the lisp files.
+xemacs-elisp-compile () {
+ {
+ ${XEMACS_BATCH_CLEAN} -f batch-byte-compile "$@"
+ xemacs-elisp-make-autoload-file "$@"
+ } || die "Compile lisp files failed"
+}
+
+xemacs-elisp-make-autoload-file () {
+ ${XEMACS_BATCH_CLEAN} \
+ -eval "(setq autoload-package-name \"${PN}\")" \
+ -eval "(setq generated-autoload-file \"${S}/auto-autoloads.el\")" \
+ -l autoload -f batch-update-autoloads "$@"
+}
+
+# @FUNCTION: xemacs-elisp-install
+# @USAGE: <subdirectory> <list of files>
+# @DESCRIPTION:
+# Install elisp source and byte-compiled files. All files are installed
+# in site-packages in their own directory, indicated by the first
+# argument to the function. This function will die if there is a problem
+# installing the list files.
+
+xemacs-elisp-install () {
+ local subdir="$1"
+ shift
+ ( # use sub-shell to avoid possible environment polution
+ dodir "${XEMACS_SITEPACKAGE}"/lisp/"${subdir}"
+ insinto "${XEMACS_SITEPACKAGE}"/lisp/"${subdir}"
+ doins "$@"
+ ) || die "Installing lisp files failed"
+}
+
+# @FUNCTION: xemacs-elisp-comp
+# @USAGE: <list of elisp files>
+# @DESCRIPTION:
+# Byte-compile interdependent XEmacs lisp files.
+# Originally taken from GNU autotools, but some configuration options
+# removed as they don't make sense with the current status of XEmacs
+# in Gentoo.
+
+xemacs-elisp-comp() {
+ # Copyright 1995 Free Software Foundation, Inc.
+ # François Pinard <pinard@iro.umontreal.ca>, 1995.
+ # This script byte-compiles all `.el' files which are part of its
+ # arguments, using XEmacs, and put the resulting `.elc' files into
+ # the current directory, so disregarding the original directories used
+ # in `.el' arguments.
+ #
+ # This script manages in such a way that all XEmacs LISP files to
+ # be compiled are made visible between themselves, in the event
+ # they require or load-library one another.
+
+ test $# -gt 0 || return 1
+
+ einfo "Compiling XEmacs Elisp files ..."
+
+ tempdir=elc.$$
+ mkdir ${tempdir}
+ cp "$@" ${tempdir}
+ pushd ${tempdir}
+
+ echo "(add-to-list 'load-path \"../\")" > script
+ ${XEMACS_BATCH_CLEAN} -l script -f batch-byte-compile *.el
+ local ret=$?
+ mv *.elc ..
+
+ popd
+ rm -fr ${tempdir}
+ return ${ret}
+}
+
+# @FUNCTION: xemacs-elisp-site-file-install
+# @USAGE: <site-init file> [subdirectory]
+# @DESCRIPTION:
+# Install XEmacs site-init file in XEMACS_SITELISP directory.
+# Automatically inserts a standard comment header with the name of the
+# package (unless it is already present). Token @SITELISP@ is replaced
+# by the path to the package's subdirectory in XEMACS_SITELISP.
+
+xemacs-elisp-site-file-install() {
+ local sf="${1##*/}" my_pn="${2:-${PN}}" ret
+ local header=";;; ${PN} site-lisp configuration"
+
+ [[ ${sf} == [0-9][0-9]*-gentoo*.el ]] \
+ || ewarn "xemacs-elisp-site-file-install: bad name of site-init file"
+ sf="${T}/${sf/%-gentoo*.el/-gentoo.el}"
+ ebegin "Installing site initialisation file for XEmacs"
+ [[ $1 = "${sf}" ]] || cp "$1" "${sf}"
+ sed -i -e "1{:x;/^\$/{n;bx;};/^;.*${PN}/I!s:^:${header}\n\n:;1s:^:\n:;}" \
+ -e "s:@SITELISP@:${EPREFIX}${XEMACS_SITELISP}/${my_pn}:g" "${sf}"
+ ( # subshell to avoid pollution of calling environment
+ insinto "${XEMACS_SITELISP}/site-gentoo.d"
+ doins "${sf}"
+ )
+ ret=$?
+ rm -f "${sf}"
+ eend ${ret} "xemacs-elisp-site-file-install: doins failed"
+}
+
+# @FUNCTION: xemacs-elisp-site-regen
+# @DESCRIPTION:
+# Regenerate the site-gentoo.el file, based on packages' site
+# initialisation files in the /usr/share/xemacs/site-lisp/site-gentoo.d/
+# directory.
+
+xemacs-elisp-site-regen() {
+ local sitelisp=${ROOT}${EPREFIX}${XEMACS_SITELISP}
+ local sf i line null="" page=$'\f'
+ local -a sflist
+
+ if [ ! -d "${sitelisp}" ]; then
+ eerror "xemacs-elisp-site-regen: Directory ${sitelisp} does not exist"
+ return 1
+ fi
+
+ if [ ! -d "${T}" ]; then
+ eerror "xemacs-elisp-site-regen: Temporary directory ${T} does not exist"
+ return 1
+ fi
+
+ einfon "Regenerating site-gentoo.el for XEmacs (${EBUILD_PHASE}) ..."
+
+ for sf in "${sitelisp}"/site-gentoo.d/[0-9][0-9]*.el
+ do
+ [ -r "${sf}" ] || continue
+ # sort files by their basename. straight insertion sort.
+ for ((i=${#sflist[@]}; i>0; i--)); do
+ [[ ${sf##*/} < ${sflist[i-1]##*/} ]] || break
+ sflist[i]=${sflist[i-1]}
+ done
+ sflist[i]=${sf}
+ done
+
+ cat <<-EOF >"${T}"/site-gentoo.el
+ ;;; site-gentoo.el --- site initialisation for Gentoo-installed packages
+
+ ;;; Commentary:
+ ;; Automatically generated by xemacs-elisp-common.eclass
+ ;; DO NOT EDIT THIS FILE
+
+ ;;; Code:
+ EOF
+ # Use sed instead of cat here, since files may miss a trailing newline.
+ sed '$q' "${sflist[@]}" </dev/null >>"${T}"/site-gentoo.el
+ cat <<-EOF >>"${T}"/site-gentoo.el
+
+ ${page}
+ (provide 'site-gentoo)
+
+ ;; Local ${null}Variables:
+ ;; no-byte-compile: t
+ ;; buffer-read-only: t
+ ;; End:
+
+ ;;; site-gentoo.el ends here
+ EOF
+
+ if cmp -s "${sitelisp}"/site-gentoo.el "${T}"/site-gentoo.el; then
+ # This prevents outputting unnecessary text when there
+ # was actually no change.
+ # A case is a remerge where we have doubled output.
+ rm -f "${T}"/site-gentoo.el
+ echo " no changes."
+ else
+ mv "${T}"/site-gentoo.el "${sitelisp}"/site-gentoo.el
+ echo
+ case ${#sflist[@]} in
+ 0) ewarn "... Huh? No site initialisation files found." ;;
+ 1) einfo "... ${#sflist[@]} site initialisation file included." ;;
+ *) einfo "... ${#sflist[@]} site initialisation files included." ;;
+ esac
+ fi
+
+ return 0
+}
diff --git a/eclass/xemacs-elisp.eclass b/eclass/xemacs-elisp.eclass
new file mode 100644
index 000000000000..1b08e5c48176
--- /dev/null
+++ b/eclass/xemacs-elisp.eclass
@@ -0,0 +1,55 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+#
+# Copyright 2007-2011 Hans de Graaff <graaff@gentoo.org>
+#
+# Based on elisp.eclass:
+# Copyright 2007 Christian Faulhammer <opfer@gentoo.org>
+# Copyright 2002-2003 Matthew Kennedy <mkennedy@gentoo.org>
+# Copyright 2003 Jeremy Maitin-Shepard <jbms@attbi.com>
+#
+# @ECLASS: xemacs-elisp.eclass
+# @MAINTAINER:
+# xemacs@gentoo.org
+# @BLURB: Eclass for XEmacs Lisp packages
+# @DESCRIPTION:
+# Emacs support for other than pure elisp packages is handled by
+# xemacs-elisp-common.eclass where you won't have a dependency on XEmacs
+# itself. All elisp-* functions are documented there.
+
+# @ECLASS-VARIABLE: SIMPLE_ELISP
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# Setting SIMPLE_ELISP=t in an ebuild means, that the package's source
+# is a single (in whatever way) compressed elisp file with the file name
+# ${PN}-${PV}. This eclass will then redefine ${S}, and move
+# ${PN}-${PV}.el to ${PN}.el in src_unpack().
+
+inherit xemacs-elisp-common
+
+if [ "${SIMPLE_ELISP}" = 't' ]; then
+ S="${WORKDIR}/"
+fi
+
+
+DEPEND="app-editors/xemacs"
+IUSE=""
+
+xemacs-elisp_src_unpack() {
+ unpack ${A}
+ if [ "${SIMPLE_ELISP}" = 't' ]
+ then
+ cd "${S}" && mv ${P}.el ${PN}.el
+ fi
+}
+
+xemacs-elisp_src_compile() {
+ xemacs-elisp-compile *.el
+}
+
+xemacs-elisp_src_install () {
+ xemacs-elisp-install "${PN}" *.el *.elc
+}
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
diff --git a/eclass/xemacs-packages.eclass b/eclass/xemacs-packages.eclass
new file mode 100644
index 000000000000..2d453fc624c4
--- /dev/null
+++ b/eclass/xemacs-packages.eclass
@@ -0,0 +1,68 @@
+# Copyright 1999-2011 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: xemacs-packages.eclass
+# @MAINTAINER:
+# xemacs@gentoo.org
+# @BLURB: Eclass to support elisp packages distributed by XEmacs.
+# @DESCRIPTION:
+# This eclass supports ebuilds for packages distributed by XEmacs.
+
+EXPORT_FUNCTIONS src_unpack src_compile src_install
+
+RDEPEND="${RDEPEND} app-editors/xemacs"
+DEPEND="${DEPEND}"
+
+[ -z "$HOMEPAGE" ] && HOMEPAGE="http://xemacs.org/"
+[ -z "$LICENSE" ] && LICENSE="GPL-2"
+
+# @ECLASS-VARIABLE: PKG_CAT
+# @REQUIRED
+# @DESCRIPTION:
+# The package category that the package is in. Can be either standard,
+# mule, or contrib.
+
+case "${PKG_CAT}" in
+ "standard" )
+ MY_INSTALL_DIR="/usr/lib/xemacs/xemacs-packages" ;;
+
+ "mule" )
+ MY_INSTALL_DIR="/usr/lib/xemacs/mule-packages" ;;
+
+ "contrib" )
+ MY_INSTALL_DIR="/usr/lib/xemacs/site-packages" ;;
+ *)
+ die "Unsupported package category in PKG_CAT (or unset)" ;;
+esac
+[ -n "$DEBUG" ] && einfo "MY_INSTALL_DIR is ${MY_INSTALL_DIR}"
+
+# @ECLASS-VARIABLE: EXPERIMENTAL
+# @DEFAULT_UNSET
+# @DESCRIPTION:
+# If set then the package is downloaded from the experimental packages
+# repository, which is the staging area for packages upstream. Packages
+# in the experimental repository are auto-generated from XEmacs VCS, so
+# they may not be well-tested.
+
+if [ -n "$EXPERIMENTAL" ]
+then
+ [ -z "$SRC_URI" ] && SRC_URI="http://ftp.xemacs.org/pub/xemacs/beta/experimental/packages/${P}-pkg.tar.gz"
+else
+ [ -z "$SRC_URI" ] && SRC_URI="http://ftp.xemacs.org/pub/xemacs/packages/${P}-pkg.tar.gz"
+fi
+[ -n "$DEBUG" ] && einfo "SRC_URI is ${SRC_URI}"
+
+xemacs-packages_src_unpack() {
+ return 0
+}
+
+xemacs-packages_src_compile() {
+ einfo "Nothing to compile"
+}
+
+xemacs-packages_src_install() {
+ dodir ${MY_INSTALL_DIR}
+ cd "${D}${MY_INSTALL_DIR}"
+ unpack ${A}
+}
diff --git a/eclass/xfconf.eclass b/eclass/xfconf.eclass
new file mode 100644
index 000000000000..b35964495714
--- /dev/null
+++ b/eclass/xfconf.eclass
@@ -0,0 +1,154 @@
+# Copyright 1999-2012 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: xfconf.eclass
+# @MAINTAINER:
+# XFCE maintainers <xfce@gentoo.org>
+# @BLURB: Default XFCE ebuild layout
+# @DESCRIPTION:
+# Default XFCE ebuild layout
+
+# @ECLASS-VARIABLE: EAUTORECONF
+# @DESCRIPTION:
+# Run eautoreconf instead of elibtoolize if the variable is set
+
+if [[ -n ${EAUTORECONF} ]] ; then
+ AUTOTOOLS_AUTO_DEPEND=yes
+else
+ : ${AUTOTOOLS_AUTO_DEPEND:=no}
+fi
+
+# @ECLASS-VARIABLE: XFCONF
+# @DESCRIPTION:
+# This should be an array defining arguments for econf
+
+unset _xfconf_live
+[[ $PV == *9999* ]] && _xfconf_live=git-2
+
+inherit ${_xfconf_live} autotools eutils fdo-mime gnome2-utils libtool
+
+EGIT_BOOTSTRAP=autogen.sh
+EGIT_REPO_URI="git://git.xfce.org/xfce/${MY_PN:-${PN}}"
+
+_xfconf_deps=""
+_xfconf_m4=">=dev-util/xfce4-dev-tools-4.10"
+
+[[ -n $_xfconf_live ]] && _xfconf_deps+=" dev-util/gtk-doc ${_xfconf_m4}"
+[[ -n $EAUTORECONF ]] && _xfconf_deps+=" ${_xfconf_m4}"
+
+RDEPEND=""
+DEPEND="${_xfconf_deps}"
+
+unset _xfconf_deps
+unset _xfconf_m4
+
+case ${EAPI:-0} in
+ 5) ;;
+ *) die "Unknown EAPI." ;;
+esac
+
+[[ -n $_xfconf_live ]] && _xfconf_live=src_unpack
+
+EXPORT_FUNCTIONS ${_xfconf_live} src_prepare src_configure src_install pkg_preinst pkg_postinst pkg_postrm
+
+# @FUNCTION: xfconf_use_debug
+# @DESCRIPTION:
+# If IUSE has debug, return --enable-debug=minimum.
+# If USE debug is enabled, return --enable-debug which is the same as --enable-debug=yes.
+# If USE debug is enabled and the XFCONF_FULL_DEBUG variable is set, return --enable-debug=full.
+xfconf_use_debug() {
+ if has debug ${IUSE}; then
+ if use debug; then
+ if [[ -n $XFCONF_FULL_DEBUG ]]; then
+ echo "--enable-debug=full"
+ else
+ echo "--enable-debug"
+ fi
+ else
+ echo "--enable-debug=minimum"
+ fi
+ else
+ ewarn "${FUNCNAME} called without debug in IUSE"
+ fi
+}
+
+# @FUNCTION: xfconf_src_unpack
+# @DESCRIPTION:
+# Run git-2_src_unpack if required
+xfconf_src_unpack() {
+ NOCONFIGURE=1 git-2_src_unpack
+}
+
+# @FUNCTION: xfconf_src_prepare
+# @DESCRIPTION:
+# Process PATCHES with epatch and run epatch_user followed by run of
+# elibtoolize, or eautoreconf if EAUTORECONF is set.
+xfconf_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
+ epatch_user
+
+ if [[ -n $EAUTORECONF ]]; then
+ AT_M4DIR=${EPREFIX}/usr/share/xfce4/dev-tools/m4macros eautoreconf
+ else
+ elibtoolize
+ fi
+}
+
+# @FUNCTION: xfconf_src_configure
+# @DESCRIPTION:
+# Run econf with opts from the XFCONF array
+xfconf_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+ [[ -n $_xfconf_live ]] && XFCONF+=( --enable-maintainer-mode )
+ econf "${XFCONF[@]}"
+}
+
+# @FUNCTION: xfconf_src_install
+# @DESCRIPTION:
+# Run emake install to DESTDIR, einstalldocs to process DOCS and
+# prune_libtool_files --all to always remove libtool files (.la)
+xfconf_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # FIXME
+ if [[ -n $_xfconf_live ]] && ! [[ -e ChangeLog ]]; then
+ touch ChangeLog
+ fi
+
+ emake DESTDIR="${D}" "$@" install
+
+ einstalldocs
+
+ prune_libtool_files --all
+}
+
+# @FUNCTION: xfconf_pkg_preinst
+# @DESCRIPTION:
+# Run gnome2_icon_savelist
+xfconf_pkg_preinst() {
+ debug-print-function ${FUNCNAME} "$@"
+ gnome2_icon_savelist
+}
+
+# @FUNCTION: xfconf_pkg_postinst
+# @DESCRIPTION:
+# Run fdo-mime_{desktop,mime}_database_update and gnome2_icon_cache_update
+xfconf_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "$@"
+ fdo-mime_desktop_database_update
+ fdo-mime_mime_database_update
+ gnome2_icon_cache_update
+}
+
+# @FUNCTION: xfconf_pkg_postrm
+# @DESCRIPTION:
+# Run fdo-mime_{desktop,mime}_database_update and gnome2_icon_cache_update
+xfconf_pkg_postrm() {
+ debug-print-function ${FUNCNAME} "$@"
+ fdo-mime_desktop_database_update
+ fdo-mime_mime_database_update
+ gnome2_icon_cache_update
+}
diff --git a/eclass/xorg-2.eclass b/eclass/xorg-2.eclass
new file mode 100644
index 000000000000..91b83aea0eed
--- /dev/null
+++ b/eclass/xorg-2.eclass
@@ -0,0 +1,605 @@
+# Copyright 1999-2015 Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# $Id$
+
+# @ECLASS: xorg-2.eclass
+# @MAINTAINER:
+# x11@gentoo.org
+# @AUTHOR:
+# Author: Tomáš Chvátal <scarabeus@gentoo.org>
+# Author: Donnie Berkholz <dberkholz@gentoo.org>
+# @BLURB: Reduces code duplication in the modularized X11 ebuilds.
+# @DESCRIPTION:
+# This eclass makes trivial X ebuilds possible for apps, fonts, drivers,
+# and more. Many things that would normally be done in various functions
+# can be accessed by setting variables instead, such as patching,
+# running eautoreconf, passing options to configure and installing docs.
+#
+# All you need to do in a basic ebuild is inherit this eclass and set
+# DESCRIPTION, KEYWORDS and RDEPEND/DEPEND. If your package is hosted
+# with the other X packages, you don't need to set SRC_URI. Pretty much
+# everything else should be automatic.
+
+GIT_ECLASS=""
+if [[ ${PV} == *9999* ]]; then
+ GIT_ECLASS="git-r3"
+ XORG_EAUTORECONF="yes"
+fi
+
+# If we're a font package, but not the font.alias one
+FONT_ECLASS=""
+if [[ ${PN} == font* \
+ && ${CATEGORY} = media-fonts \
+ && ${PN} != font-alias \
+ && ${PN} != font-util ]]; then
+ # Activate font code in the rest of the eclass
+ FONT="yes"
+ FONT_ECLASS="font"
+fi
+
+# @ECLASS-VARIABLE: XORG_MULTILIB
+# @DESCRIPTION:
+# If set to 'yes', the multilib support for package will be enabled. Set
+# before inheriting this eclass.
+: ${XORG_MULTILIB:="no"}
+
+# we need to inherit autotools first to get the deps
+inherit autotools autotools-utils eutils libtool multilib toolchain-funcs \
+ flag-o-matic ${FONT_ECLASS} ${GIT_ECLASS}
+
+if [[ ${XORG_MULTILIB} == yes ]]; then
+ inherit autotools-multilib
+fi
+
+EXPORTED_FUNCTIONS="src_unpack src_compile src_install pkg_postinst pkg_postrm"
+case "${EAPI:-0}" in
+ 3|4|5) EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS} src_prepare src_configure" ;;
+ *) die "EAPI=${EAPI} is not supported" ;;
+esac
+
+# exports must be ALWAYS after inherit
+EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS}
+
+IUSE=""
+HOMEPAGE="http://xorg.freedesktop.org/"
+
+# @ECLASS-VARIABLE: XORG_EAUTORECONF
+# @DESCRIPTION:
+# If set to 'yes' and configure.ac exists, eautoreconf will run. Set
+# before inheriting this eclass.
+: ${XORG_EAUTORECONF:="no"}
+
+# @ECLASS-VARIABLE: XORG_BASE_INDIVIDUAL_URI
+# @DESCRIPTION:
+# Set up SRC_URI for individual modular releases. If set to an empty
+# string, no SRC_URI will be provided by the eclass.
+: ${XORG_BASE_INDIVIDUAL_URI="http://xorg.freedesktop.org/releases/individual"}
+
+# @ECLASS-VARIABLE: XORG_MODULE
+# @DESCRIPTION:
+# The subdirectory to download source from. Possible settings are app,
+# doc, data, util, driver, font, lib, proto, xserver. Set above the
+# inherit to override the default autoconfigured module.
+if [[ -z ${XORG_MODULE} ]]; then
+ case ${CATEGORY} in
+ app-doc) XORG_MODULE=doc/ ;;
+ media-fonts) XORG_MODULE=font/ ;;
+ x11-apps|x11-wm) XORG_MODULE=app/ ;;
+ x11-misc|x11-themes) XORG_MODULE=util/ ;;
+ x11-base) XORG_MODULE=xserver/ ;;
+ x11-drivers) XORG_MODULE=driver/ ;;
+ x11-proto) XORG_MODULE=proto/ ;;
+ x11-libs) XORG_MODULE=lib/ ;;
+ *) XORG_MODULE= ;;
+ esac
+fi
+
+# @ECLASS-VARIABLE: XORG_PACKAGE_NAME
+# @DESCRIPTION:
+# For git checkout the git repository might differ from package name.
+# This variable can be used for proper directory specification
+: ${XORG_PACKAGE_NAME:=${PN}}
+
+if [[ -n ${GIT_ECLASS} ]]; then
+ : ${EGIT_REPO_URI:="git://anongit.freedesktop.org/xorg/${XORG_MODULE}${XORG_PACKAGE_NAME} http://anongit.freedesktop.org/git/xorg/${XORG_MODULE}${XORG_PACKAGE_NAME}"}
+elif [[ -n ${XORG_BASE_INDIVIDUAL_URI} ]]; then
+ SRC_URI="${XORG_BASE_INDIVIDUAL_URI}/${XORG_MODULE}${P}.tar.bz2"
+fi
+
+: ${SLOT:=0}
+
+# Set the license for the package. This can be overridden by setting
+# LICENSE after the inherit. Nearly all FreeDesktop-hosted X packages
+# are under the MIT license. (This is what Red Hat does in their rpms)
+: ${LICENSE:=MIT}
+
+# Set up autotools shared dependencies
+# Remember that all versions here MUST be stable
+XORG_EAUTORECONF_ARCHES="x86-interix ppc-aix x86-winnt"
+EAUTORECONF_DEPEND+="
+ >=sys-devel/libtool-2.2.6a
+ sys-devel/m4"
+if [[ ${PN} != util-macros ]] ; then
+ EAUTORECONF_DEPEND+=" >=x11-misc/util-macros-1.18"
+ # Required even by xorg-server
+ [[ ${PN} == "font-util" ]] || EAUTORECONF_DEPEND+=" >=media-fonts/font-util-1.2.0"
+fi
+WANT_AUTOCONF="latest"
+WANT_AUTOMAKE="latest"
+for arch in ${XORG_EAUTORECONF_ARCHES}; do
+ EAUTORECONF_DEPENDS+=" ${arch}? ( ${EAUTORECONF_DEPEND} )"
+done
+DEPEND+=" ${EAUTORECONF_DEPENDS}"
+[[ ${XORG_EAUTORECONF} != no ]] && DEPEND+=" ${EAUTORECONF_DEPEND}"
+unset EAUTORECONF_DEPENDS
+unset EAUTORECONF_DEPEND
+
+if [[ ${FONT} == yes ]]; then
+ RDEPEND+=" media-fonts/encodings
+ x11-apps/mkfontscale
+ x11-apps/mkfontdir"
+ PDEPEND+=" media-fonts/font-alias"
+ DEPEND+=" >=media-fonts/font-util-1.2.0"
+
+ # @ECLASS-VARIABLE: FONT_DIR
+ # @DESCRIPTION:
+ # If you're creating a font package and the suffix of PN is not equal to
+ # the subdirectory of /usr/share/fonts/ it should install into, set
+ # FONT_DIR to that directory or directories. Set before inheriting this
+ # eclass.
+ [[ -z ${FONT_DIR} ]] && FONT_DIR=${PN##*-}
+
+ # Fix case of font directories
+ FONT_DIR=${FONT_DIR/ttf/TTF}
+ FONT_DIR=${FONT_DIR/otf/OTF}
+ FONT_DIR=${FONT_DIR/type1/Type1}
+ FONT_DIR=${FONT_DIR/speedo/Speedo}
+
+ # Set up configure options, wrapped so ebuilds can override if need be
+ [[ -z ${FONT_OPTIONS} ]] && FONT_OPTIONS="--with-fontdir=\"${EPREFIX}/usr/share/fonts/${FONT_DIR}\""
+
+ [[ ${PN##*-} = misc || ${PN##*-} = 75dpi || ${PN##*-} = 100dpi || ${PN##*-} = cyrillic ]] && IUSE+=" nls"
+fi
+
+# If we're a driver package, then enable DRIVER case
+[[ ${PN} == xf86-video-* || ${PN} == xf86-input-* ]] && DRIVER="yes"
+
+# @ECLASS-VARIABLE: XORG_STATIC
+# @DESCRIPTION:
+# Enables static-libs useflag. Set to no, if your package gets:
+#
+# QA: configure: WARNING: unrecognized options: --disable-static
+: ${XORG_STATIC:="yes"}
+
+# Add static-libs useflag where usefull.
+if [[ ${XORG_STATIC} == yes \
+ && ${FONT} != yes \
+ && ${CATEGORY} != app-doc \
+ && ${CATEGORY} != x11-apps \
+ && ${CATEGORY} != x11-proto \
+ && ${CATEGORY} != x11-drivers \
+ && ${CATEGORY} != media-fonts \
+ && ${PN} != util-macros \
+ && ${PN} != xbitmaps \
+ && ${PN} != xorg-cf-files \
+ && ${PN/xcursor} = ${PN} ]]; then
+ IUSE+=" static-libs"
+fi
+
+DEPEND+=" virtual/pkgconfig"
+
+# @ECLASS-VARIABLE: XORG_DRI
+# @DESCRIPTION:
+# Possible values are "always" or the value of the useflag DRI capabilities
+# are required for. Default value is "no"
+#
+# Eg. XORG_DRI="opengl" will pull all dri dependant deps for opengl useflag
+: ${XORG_DRI:="no"}
+
+DRI_COMMON_DEPEND="
+ x11-base/xorg-server[-minimal]
+ x11-libs/libdrm
+"
+DRI_DEPEND="
+ x11-proto/xf86driproto
+ x11-proto/glproto
+ x11-proto/dri2proto
+"
+case ${XORG_DRI} in
+ no)
+ ;;
+ always)
+ COMMON_DEPEND+=" ${DRI_COMMON_DEPEND}"
+ DEPEND+=" ${DRI_DEPEND}"
+ ;;
+ *)
+ COMMON_DEPEND+=" ${XORG_DRI}? ( ${DRI_COMMON_DEPEND} )"
+ DEPEND+=" ${XORG_DRI}? ( ${DRI_DEPEND} )"
+ IUSE+=" ${XORG_DRI}"
+ ;;
+esac
+unset DRI_DEPEND
+unset DRI_COMMONDEPEND
+
+if [[ -n "${DRIVER}" ]]; then
+ COMMON_DEPEND+="
+ x11-base/xorg-server[xorg]
+ "
+fi
+if [[ -n "${DRIVER}" && ${PN} == xf86-input-* ]]; then
+ DEPEND+="
+ x11-proto/inputproto
+ x11-proto/kbproto
+ x11-proto/xproto
+ "
+fi
+if [[ -n "${DRIVER}" && ${PN} == xf86-video-* ]]; then
+ COMMON_DEPEND+="
+ x11-libs/libpciaccess
+ "
+ # we also needs some protos and libs in all cases
+ DEPEND+="
+ x11-proto/fontsproto
+ x11-proto/randrproto
+ x11-proto/renderproto
+ x11-proto/videoproto
+ x11-proto/xextproto
+ x11-proto/xineramaproto
+ x11-proto/xproto
+ "
+fi
+
+# @ECLASS-VARIABLE: XORG_DOC
+# @DESCRIPTION:
+# Possible values are "always" or the value of the useflag doc packages
+# are required for. Default value is "no"
+#
+# Eg. XORG_DOC="manual" will pull all doc dependant deps for manual useflag
+: ${XORG_DOC:="no"}
+
+DOC_DEPEND="
+ doc? (
+ app-text/asciidoc
+ app-text/xmlto
+ app-doc/doxygen
+ app-text/docbook-xml-dtd:4.1.2
+ app-text/docbook-xml-dtd:4.2
+ app-text/docbook-xml-dtd:4.3
+ )
+"
+case ${XORG_DOC} in
+ no)
+ ;;
+ always)
+ DEPEND+=" ${DOC_DEPEND}"
+ ;;
+ *)
+ DEPEND+=" ${XORG_DOC}? ( ${DOC_DEPEND} )"
+ IUSE+=" ${XORG_DOC}"
+ ;;
+esac
+unset DOC_DEPEND
+
+# @ECLASS-VARIABLE: XORG_MODULE_REBUILD
+# @DESCRIPTION:
+# Describes whether a package contains modules that need to be rebuilt on
+# xorg-server upgrade. This has an effect only since EAPI=5.
+# Possible values are "yes" or "no". Default value is "yes" for packages which
+# are recognized as DRIVER by this eclass and "no" for all other packages.
+if [[ "${DRIVER}" == yes ]]; then
+ : ${XORG_MODULE_REBUILD:="yes"}
+else
+ : ${XORG_MODULE_REBUILD:="no"}
+fi
+
+if [[ ${XORG_MODULE_REBUILD} == yes ]]; then
+ case ${EAPI} in
+ 3|4)
+ ;;
+ *)
+ RDEPEND+=" x11-base/xorg-server:="
+ ;;
+ esac
+fi
+
+DEPEND+=" ${COMMON_DEPEND}"
+RDEPEND+=" ${COMMON_DEPEND}"
+unset COMMON_DEPEND
+
+if [[ ${XORG_MULTILIB} == yes ]]; then
+ RDEPEND+=" abi_x86_32? ( !app-emulation/emul-linux-x86-xlibs[-abi_x86_32(-)] )"
+fi
+
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: DEPEND=${DEPEND}"
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: RDEPEND=${RDEPEND}"
+debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: PDEPEND=${PDEPEND}"
+
+# @FUNCTION: xorg-2_pkg_setup
+# @DESCRIPTION:
+# Setup prefix compat
+xorg-2_pkg_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ [[ ${FONT} == yes ]] && font_pkg_setup "$@"
+}
+
+# @FUNCTION: xorg-2_src_unpack
+# @DESCRIPTION:
+# Simply unpack source code.
+xorg-2_src_unpack() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ -n ${GIT_ECLASS} ]]; then
+ git-r3_src_unpack
+ else
+ unpack ${A}
+ fi
+
+ [[ -n ${FONT_OPTIONS} ]] && einfo "Detected font directory: ${FONT_DIR}"
+}
+
+# @FUNCTION: xorg-2_patch_source
+# @DESCRIPTION:
+# Apply all patches
+xorg-2_patch_source() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Use standardized names and locations with bulk patching
+ # Patch directory is ${WORKDIR}/patch
+ # See epatch() in eutils.eclass for more documentation
+ EPATCH_SUFFIX=${EPATCH_SUFFIX:=patch}
+
+ [[ -d "${EPATCH_SOURCE}" ]] && epatch
+}
+
+# @FUNCTION: xorg-2_reconf_source
+# @DESCRIPTION:
+# Run eautoreconf if necessary, and run elibtoolize.
+xorg-2_reconf_source() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ case ${CHOST} in
+ *-interix* | *-aix* | *-winnt*)
+ # some hosts need full eautoreconf
+ [[ -e "./configure.ac" || -e "./configure.in" ]] \
+ && AUTOTOOLS_AUTORECONF=1
+ ;;
+ *)
+ # elibtoolize required for BSD
+ [[ ${XORG_EAUTORECONF} != no && ( -e "./configure.ac" || -e "./configure.in" ) ]] \
+ && AUTOTOOLS_AUTORECONF=1
+ ;;
+ esac
+}
+
+# @FUNCTION: xorg-2_src_prepare
+# @DESCRIPTION:
+# Prepare a package after unpacking, performing all X-related tasks.
+xorg-2_src_prepare() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ xorg-2_patch_source
+ xorg-2_reconf_source
+ autotools-utils_src_prepare "$@"
+}
+
+# @FUNCTION: xorg-2_font_configure
+# @DESCRIPTION:
+# If a font package, perform any necessary configuration steps
+xorg-2_font_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if has nls ${IUSE//+} && ! use nls; then
+ if grep -q -s "disable-all-encodings" ${ECONF_SOURCE:-.}/configure; then
+ FONT_OPTIONS+="
+ --disable-all-encodings"
+ else
+ FONT_OPTIONS+="
+ --disable-iso8859-2
+ --disable-iso8859-3
+ --disable-iso8859-4
+ --disable-iso8859-5
+ --disable-iso8859-6
+ --disable-iso8859-7
+ --disable-iso8859-8
+ --disable-iso8859-9
+ --disable-iso8859-10
+ --disable-iso8859-11
+ --disable-iso8859-12
+ --disable-iso8859-13
+ --disable-iso8859-14
+ --disable-iso8859-15
+ --disable-iso8859-16
+ --disable-jisx0201
+ --disable-koi8-r"
+ fi
+ fi
+}
+
+# @FUNCTION: xorg-2_flags_setup
+# @DESCRIPTION:
+# Set up CFLAGS for a debug build
+xorg-2_flags_setup() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ # Win32 require special define
+ [[ ${CHOST} == *-winnt* ]] && append-cppflags -DWIN32 -D__STDC__
+ # hardened ldflags
+ [[ ${PN} = xorg-server || -n ${DRIVER} ]] && append-ldflags -Wl,-z,lazy
+
+ # Quite few libraries fail on runtime without these:
+ if has static-libs ${IUSE//+}; then
+ filter-flags -Wl,-Bdirect
+ filter-ldflags -Bdirect
+ filter-ldflags -Wl,-Bdirect
+ fi
+}
+
+# @FUNCTION: xorg-2_src_configure
+# @DESCRIPTION:
+# Perform any necessary pre-configuration steps, then run configure
+xorg-2_src_configure() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ xorg-2_flags_setup
+
+ # @VARIABLE: XORG_CONFIGURE_OPTIONS
+ # @DESCRIPTION:
+ # Array of an additional options to pass to configure.
+ # @DEFAULT_UNSET
+ if [[ $(declare -p XORG_CONFIGURE_OPTIONS 2>&-) != "declare -a"* ]]; then
+ # fallback to CONFIGURE_OPTIONS, deprecated.
+ if [[ -n "${CONFIGURE_OPTIONS}" ]]; then
+ eqawarn "CONFIGURE_OPTIONS are deprecated. Please migrate to XORG_CONFIGURE_OPTIONS"
+ eqawarn "to preserve namespace."
+ fi
+
+ local xorgconfadd=(${CONFIGURE_OPTIONS} ${XORG_CONFIGURE_OPTIONS})
+ else
+ local xorgconfadd=("${XORG_CONFIGURE_OPTIONS[@]}")
+ fi
+
+ [[ -n "${FONT}" ]] && xorg-2_font_configure
+
+ # Check if package supports disabling of dep tracking
+ # Fixes warnings like:
+ # WARNING: unrecognized options: --disable-dependency-tracking
+ if grep -q -s "disable-depencency-tracking" ${ECONF_SOURCE:-.}/configure; then
+ local dep_track="--disable-dependency-tracking"
+ fi
+
+ local myeconfargs=(
+ ${dep_track}
+ ${FONT_OPTIONS}
+ "${xorgconfadd[@]}"
+ )
+
+ if [[ ${XORG_MULTILIB} == yes ]]; then
+ autotools-multilib_src_configure "$@"
+ else
+ autotools-utils_src_configure "$@"
+ fi
+}
+
+# @FUNCTION: xorg-2_src_compile
+# @DESCRIPTION:
+# Compile a package, performing all X-related tasks.
+xorg-2_src_compile() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${XORG_MULTILIB} == yes ]]; then
+ autotools-multilib_src_compile "$@"
+ else
+ autotools-utils_src_compile "$@"
+ fi
+}
+
+# @FUNCTION: xorg-2_src_install
+# @DESCRIPTION:
+# Install a built package to ${D}, performing any necessary steps.
+# Creates a ChangeLog from git if using live ebuilds.
+xorg-2_src_install() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ local install_args=( docdir="${EPREFIX}/usr/share/doc/${PF}" )
+
+ if [[ ${CATEGORY} == x11-proto ]]; then
+ install_args+=(
+ ${PN/proto/}docdir="${EPREFIX}/usr/share/doc/${PF}"
+ )
+ fi
+
+ if [[ ${XORG_MULTILIB} == yes ]]; then
+ autotools-multilib_src_install "${install_args[@]}"
+ else
+ autotools-utils_src_install "${install_args[@]}"
+ fi
+
+ if [[ -n ${GIT_ECLASS} ]]; then
+ pushd "${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}" > /dev/null
+ git log ${EGIT_COMMIT} > "${S}"/ChangeLog
+ popd > /dev/null
+ fi
+
+ if [[ -e "${S}"/ChangeLog ]]; then
+ dodoc "${S}"/ChangeLog || die "dodoc failed"
+ fi
+
+ # Don't install libtool archives (even for modules)
+ prune_libtool_files --all
+
+ [[ -n ${FONT} ]] && remove_font_metadata
+}
+
+# @FUNCTION: xorg-2_pkg_postinst
+# @DESCRIPTION:
+# Run X-specific post-installation tasks on the live filesystem. The
+# only task right now is some setup for font packages.
+xorg-2_pkg_postinst() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ -n ${FONT} ]]; then
+ create_fonts_scale
+ create_fonts_dir
+ font_pkg_postinst "$@"
+ fi
+}
+
+# @FUNCTION: xorg-2_pkg_postrm
+# @DESCRIPTION:
+# Run X-specific post-removal tasks on the live filesystem. The only
+# task right now is some cleanup for font packages.
+xorg-2_pkg_postrm() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ -n ${FONT} ]]; then
+ # if we're doing an upgrade, postinst will do
+ if [[ ${EAPI} -lt 4 || -z ${REPLACED_BY_VERSION} ]]; then
+ create_fonts_scale
+ create_fonts_dir
+ font_pkg_postrm "$@"
+ fi
+ fi
+}
+
+# @FUNCTION: remove_font_metadata
+# @DESCRIPTION:
+# Don't let the package install generated font files that may overlap
+# with other packages. Instead, they're generated in pkg_postinst().
+remove_font_metadata() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${FONT_DIR} != Speedo && ${FONT_DIR} != CID ]]; then
+ einfo "Removing font metadata"
+ rm -rf "${ED}"/usr/share/fonts/${FONT_DIR}/fonts.{scale,dir,cache-1}
+ fi
+}
+
+# @FUNCTION: create_fonts_scale
+# @DESCRIPTION:
+# Create fonts.scale file, used by the old server-side fonts subsystem.
+create_fonts_scale() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ if [[ ${FONT_DIR} != Speedo && ${FONT_DIR} != CID ]]; then
+ ebegin "Generating fonts.scale"
+ mkfontscale \
+ -a "${EROOT}/usr/share/fonts/encodings/encodings.dir" \
+ -- "${EROOT}/usr/share/fonts/${FONT_DIR}"
+ eend $?
+ fi
+}
+
+# @FUNCTION: create_fonts_dir
+# @DESCRIPTION:
+# Create fonts.dir file, used by the old server-side fonts subsystem.
+create_fonts_dir() {
+ debug-print-function ${FUNCNAME} "$@"
+
+ ebegin "Generating fonts.dir"
+ mkfontdir \
+ -e "${EROOT}"/usr/share/fonts/encodings \
+ -e "${EROOT}"/usr/share/fonts/encodings/large \
+ -- "${EROOT}/usr/share/fonts/${FONT_DIR}"
+ eend $?
+}