[csw-devel] SF.net SVN: gar:[11764] csw/mgar/gar/v2-fortran
gadavis at users.sourceforge.net
gadavis at users.sourceforge.net
Thu Dec 2 18:46:17 CET 2010
Revision: 11764
http://gar.svn.sourceforge.net/gar/?rev=11764&view=rev
Author: gadavis
Date: 2010-12-02 17:46:17 +0000 (Thu, 02 Dec 2010)
Log Message:
-----------
Merging changes from v2 into v2-fortran 10989:11763
Modified Paths:
--------------
csw/mgar/gar/v2-fortran/bin/analyze_module_results.py
csw/mgar/gar/v2-fortran/bin/checkpkg
csw/mgar/gar/v2-fortran/bin/checkpkg_collect_stats.py
csw/mgar/gar/v2-fortran/bin/upstream_watch
csw/mgar/gar/v2-fortran/categories/cpan/category.mk
csw/mgar/gar/v2-fortran/category.mk
csw/mgar/gar/v2-fortran/gar.conf.mk
csw/mgar/gar/v2-fortran/gar.lib.mk
csw/mgar/gar/v2-fortran/gar.mk
csw/mgar/gar/v2-fortran/gar.pkg.mk
csw/mgar/gar/v2-fortran/lib/python/README
csw/mgar/gar/v2-fortran/lib/python/checkpkg.py
csw/mgar/gar/v2-fortran/lib/python/checkpkg_test.py
csw/mgar/gar/v2-fortran/lib/python/dependency_checks.py
csw/mgar/gar/v2-fortran/lib/python/dependency_checks_test.py
csw/mgar/gar/v2-fortran/lib/python/opencsw.py
csw/mgar/gar/v2-fortran/lib/python/opencsw_test.py
csw/mgar/gar/v2-fortran/lib/python/package_checks.py
csw/mgar/gar/v2-fortran/lib/python/package_checks_test.py
csw/mgar/gar/v2-fortran/lib/python/pkg-review-template.html
csw/mgar/gar/v2-fortran/lib/python/submit_to_newpkgs.py
csw/mgar/gar/v2-fortran/lib/python/tag.py
csw/mgar/gar/v2-fortran/lib/python/tag_test.py
csw/mgar/gar/v2-fortran/lib/python/testdata/rpaths.py
csw/mgar/gar/v2-fortran/pkglib/csw_cpan_dyngspec.gspec
csw/mgar/gar/v2-fortran/tests/run_tests.py
Added Paths:
-----------
csw/mgar/gar/v2-fortran/.gitignore
csw/mgar/gar/v2-fortran/bin/ap2mod_build_scripts
csw/mgar/gar/v2-fortran/lib/python/catalog.py
csw/mgar/gar/v2-fortran/lib/python/catalog_test.py
csw/mgar/gar/v2-fortran/lib/python/compare_pkgs.py
csw/mgar/gar/v2-fortran/lib/python/compare_pkgs_test.py
csw/mgar/gar/v2-fortran/lib/python/database.py
csw/mgar/gar/v2-fortran/lib/python/inspective_package.py
csw/mgar/gar/v2-fortran/lib/python/package.py
csw/mgar/gar/v2-fortran/lib/python/package_stats.py
csw/mgar/gar/v2-fortran/lib/python/package_stats_test.py
csw/mgar/gar/v2-fortran/lib/python/package_test.py
csw/mgar/gar/v2-fortran/lib/python/patch_package.py
csw/mgar/gar/v2-fortran/lib/python/sharedlib_utils.py
csw/mgar/gar/v2-fortran/lib/python/sharedlib_utils_test.py
csw/mgar/gar/v2-fortran/lib/python/submit_to_newpkgs_test.py
csw/mgar/gar/v2-fortran/lib/python/testdata/bdb48_stats.py
csw/mgar/gar/v2-fortran/lib/python/testdata/libnet_stats.py
csw/mgar/gar/v2-fortran/lib/sh/run_full_cat.sh
Property Changed:
----------------
csw/mgar/gar/v2-fortran/
csw/mgar/gar/v2-fortran/lib/python/tag_test.py
csw/mgar/gar/v2-fortran/pkglib/csw/depend
Property changes on: csw/mgar/gar/v2-fortran
___________________________________________________________________
Modified: svn:mergeinfo
- /csw/mgar/gar/v2:4936-6678,10928-10988
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-sqlite:10434-10449
+ /csw/mgar/gar/v2:4936-6678,10928-10988,10990-11763
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-sqlite:10434-10449
Copied: csw/mgar/gar/v2-fortran/.gitignore (from rev 11763, csw/mgar/gar/v2/.gitignore)
===================================================================
--- csw/mgar/gar/v2-fortran/.gitignore (rev 0)
+++ csw/mgar/gar/v2-fortran/.gitignore 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,2 @@
+*.pyc
+*.swp
Modified: csw/mgar/gar/v2-fortran/bin/analyze_module_results.py
===================================================================
--- csw/mgar/gar/v2-fortran/bin/analyze_module_results.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/bin/analyze_module_results.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -17,6 +17,7 @@
sys.path.append(os.path.join(*path_list))
import checkpkg
import overrides
+import package_stats
BEFORE_OVERRIDES = """If any of the reported errors were false positives, you
can override them pasting the lines below to the GAR recipe."""
@@ -48,7 +49,7 @@
# It might be a good idea to store the error tags in the database and
# eliminate the need to access the directory with the error tag files.
- pkgstats = checkpkg.StatsListFromCatalog(filenames, options.catalog)
+ pkgstats = package_stats.StatsListFromCatalog(filenames, options.catalog)
overrides_list = [pkg.GetSavedOverrides() for pkg in pkgstats]
override_list = reduce(operator.add, overrides_list)
error_tags = reduce(operator.add, [stat.GetSavedErrorTags() for stat in pkgstats])
Copied: csw/mgar/gar/v2-fortran/bin/ap2mod_build_scripts (from rev 11763, csw/mgar/gar/v2/bin/ap2mod_build_scripts)
===================================================================
--- csw/mgar/gar/v2-fortran/bin/ap2mod_build_scripts (rev 0)
+++ csw/mgar/gar/v2-fortran/bin/ap2mod_build_scripts 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,71 @@
+#!/bin/bash
+
+MERGEDIR=$1; shift
+AP2MODS=opt/csw/apache2/ap2mod
+
+cd $MERGEDIR/
+
+[ -d $AP2MODS ] || mkdir -p $AP2MODS
+
+for a in $*; do
+ for f in $(find . -name "*$a*"); do
+ modfile=$(basename $f)
+ modname=$(echo $modfile | sed 's/mod_//; s/\.so$//')
+ ap2modfile=$AP2MODS/$modname
+ cat <<EOF > $ap2modfile
+
+PIR=\${PKG_INSTALL_ROOT:-'/'}
+AP2_PREFIX=/opt/csw/apache2
+AP2_BINDIR=\$AP2_PREFIX/sbin
+AP2_LIBEXEC=\$AP2_PREFIX/libexec
+AP2_CONFDIR=/etc/opt/csw/apache2
+AP2_CONFIG=\$AP2_CONFDIR/httpd.conf
+AP2_APXS=\$AP2_BINDIR/apxs
+
+if [ "\$1" = install ]; then
+
+echo "File created by \$PKGINST during install. Please do not remove." > \$PIR/$ap2modfile
+
+# Source csw.conf, if it exists
+if [ -f \$PIR/opt/csw/etc/csw.conf ] ; then
+ . \$PIR/opt/csw/etc/csw.conf
+fi
+if [ -f \$PIR/etc/opt/csw/csw.conf ] ; then
+ . \$PIR/etc/opt/csw/csw.conf
+fi
+
+# Modules will be enabled by default, but a global kill switch
+# is checked , followed by a module specific value. Thus,
+# either ap2_enable_modules or ap2_enable_suexec (for example)
+# could disable activation of the suexec module. The
+#
+# This value is passed to apxs (-a: add, enable; -A: add,
+# disable)
+enmod=-a
+
+if [ "\$ap2_enable_modules" = "no" ]; then
+ enmod=-A
+fi
+
+if [ "\$ap2_enable_$modname" = "no" ]; then
+ enmod=-A
+elif [ "$ap2_mod_specific" = "yes" ]; then
+ enmod=-a
+fi
+
+if [ -f "\$PIR/\$AP2_CONFIG" -a -x "\$PIR/\$AP2_APXS" ]; then
+ # Add the module module, optionally enabling it by default
+ chroot \$PIR \$AP2_APXS -e \$enmod -n $modname $modfile
+fi
+
+else
+
+rm \$PIR/$ap2modfile
+
+if [ -f "\$PIR/\$AP2_CONFIG" -a -x "\$PIR/\$AP2_APXS" ]; then
+ chroot \$PIR \$AP2_APXS -e -A -n $modname $modfile
+fi
+fi
+EOF
+ done
+done
Modified: csw/mgar/gar/v2-fortran/bin/checkpkg
===================================================================
--- csw/mgar/gar/v2-fortran/bin/checkpkg 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/bin/checkpkg 2010-12-02 17:46:17 UTC (rev 11764)
@@ -236,7 +236,7 @@
# "checkpkg-".
test_suite_ok=1
-checkpkg_module_dir=${command_basedir}/../lib/checkpkg.d
+checkpkg_module_dir="${command_basedir}/../lib/checkpkg.d"
checkpkg_module_tag="checkpkg-"
checkpkg_stats_basedir="${HOME}/.checkpkg/stats"
@@ -317,15 +317,14 @@
if [[ "${ANALYZE}" -eq 1 ]]; then
# Collecting errors and applying the overrides.
# This has to use the original files.
- echo "Applying the overrides and analyzing the results."
${command_basedir}/analyze_module_results.py \
${catalog_options} \
${quiet_options} \
"$@"
if [[ "$?" -ne 0 ]]; then
- errmsg "${RED}Modular checks are reporting errors.${COLOR_RESET}"
+ errmsg "${RED}Checkpkg has reported errors.${COLOR_RESET}"
else
- print "${GREEN}All modular tests were successful.${COLOR_RESET}"
+ print "${GREEN}Checkpkg reports no errors.${COLOR_RESET}"
fi
else
echo "Skipping result analysis."
Modified: csw/mgar/gar/v2-fortran/bin/checkpkg_collect_stats.py
===================================================================
--- csw/mgar/gar/v2-fortran/bin/checkpkg_collect_stats.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/bin/checkpkg_collect_stats.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -21,6 +21,7 @@
sys.path.append(os.path.join(*path_list))
import checkpkg
import opencsw
+import package_stats
def main():
parser = optparse.OptionParser()
@@ -43,7 +44,7 @@
args_display = args_display[:5] + ["...more..."]
file_list = args
logging.debug("Processing: %s, please be patient", args_display)
- stats_list = checkpkg.StatsListFromCatalog(
+ stats_list = package_stats.StatsListFromCatalog(
file_list, options.catalog, options.debug)
# Reversing the item order in the list, so that the pop() method can be used
# to get packages, and the order of processing still matches the one in the
Modified: csw/mgar/gar/v2-fortran/bin/upstream_watch
===================================================================
--- csw/mgar/gar/v2-fortran/bin/upstream_watch 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/bin/upstream_watch 2010-12-02 17:46:17 UTC (rev 11764)
@@ -89,8 +89,10 @@
$ftp_proxy_cmd = "set ftp:proxy $ENV{ftp_proxy};" if exists $ENV{ftp_proxy};
$http_proxy_cmd = "set http:proxy $ENV{HTTP_PROXY};" if exists $ENV{HTTP_PROXY};
$http_proxy_cmd = "set http:proxy $ENV{http_proxy};" if exists $ENV{http_proxy};
+ $https_proxy_cmd = "set https:proxy $ENV{HTTPS_PROXY};" if exists $ENV{HTTPS_PROXY};
+ $https_proxy_cmd = "set https:proxy $ENV{https_proxy};" if exists $ENV{https_proxy};
- open (FH, "lftp -q -c \"set net:timeout 30; set net:max-retries 16; $ftp_proxy_cmd $http_proxy_cmd open $url/ && ls\" 2>/dev/null |");
+ open (FH, "lftp -q -c \"set net:timeout 30; set net:max-retries 16; $ftp_proxy_cmd $http_proxy_cmd $https_proxy_cmd open $url/ && ls\" 2>/dev/null |");
while (my $line = <FH>) {
my @cols = split (/\s+/, $line);
Modified: csw/mgar/gar/v2-fortran/categories/cpan/category.mk
===================================================================
--- csw/mgar/gar/v2-fortran/categories/cpan/category.mk 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/categories/cpan/category.mk 2010-12-02 17:46:17 UTC (rev 11764)
@@ -31,7 +31,7 @@
UFILES_REGEX ?= $(GARNAME)-(\d+(?:\.\d+)*).tar.gz
USTREAM_MASTER_SITE ?= $(SPKG_SOURCEURL)
-_CATEGORY_SPKG_DESC = $(GARNAME): $(SPKG_DESC)
+$(foreach P,$(PACKAGES),$(eval _CATEGORY_SPKG_DESC_$P = $$(GARNAME): $$(or $$(SPKG_DESC_$P),$$(SPKG_DESC))))
_CATEGORY_PKGINFO = echo "PERL_MODULE_NAME=$(GARNAME)";
SPKG_SOURCEURL := $(SPKG_SOURCEURL)/$(GARNAME)
@@ -39,6 +39,8 @@
_MERGE_EXCLUDE_CATEGORY = .*/perllocal\.pod .*/\.packlist
_CATEGORY_GSPEC_INCLUDE ?= csw_cpan_dyngspec.gspec
+_CATEGORY_RUNTIME_DEP_PKGS ?= CSWperl
+
# Perl module dependencies can not be properly tracked right now
_CATEGORY_CHECKPKG_OVERRIDES = surplus-dependency
Modified: csw/mgar/gar/v2-fortran/category.mk
===================================================================
--- csw/mgar/gar/v2-fortran/category.mk 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/category.mk 2010-12-02 17:46:17 UTC (rev 11764)
@@ -1,9 +1,15 @@
# vim: ft=make ts=4 sw=4 noet
-# This makefile is to be included from Makefiles in each category
-# directory.
+#
+# This Makefile is the main entry point to GAR and is included by
+# each package build description. As such, the file name 'category.mk'
+# is slightly misleading and could be subject to future change.
+#
-ifeq (,$(wildcard gar/categories/$(CATEGORIES)/category.mk))
- $(error The category '$(CATEGORIES)' is invalid. Valid categories are $(patsubst gar/categories/%,%,$(wildcard gar/categories/*)))
+# Determine this file's directory, i.e. the GAR base directory
+GARDIR := $(dir $(lastword $(MAKEFILE_LIST)))
+
+ifeq (,$(wildcard $(GARDIR)/categories/$(CATEGORIES)/category.mk))
+ $(error The category '$(CATEGORIES)' is invalid. Valid categories are: $(patsubst $(GARDIR)/categories/%,%,$(wildcard $(GARDIR)/categories/*)))
endif
-include gar/categories/$(CATEGORIES)/category.mk
+include $(GARDIR)/categories/$(CATEGORIES)/category.mk
Modified: csw/mgar/gar/v2-fortran/gar.conf.mk
===================================================================
--- csw/mgar/gar/v2-fortran/gar.conf.mk 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/gar.conf.mk 2010-12-02 17:46:17 UTC (rev 11764)
@@ -145,6 +145,11 @@
perlcswlib ?= $(perllib)/csw
perlpackroot ?= $(perlcswlib)/auto
+# This Variable is only used if you make your package relocatable
+# using ALLOW_RELOCATE, it will allow you to change your default
+# directory for BASEDIR
+RELOCATE_PREFIX ?= $(prefix)
+
# These variables are used to construct pathes. If you temporarily reset the above
# variables for special install locations (like /opt/csw/bin/bdb44/) the definitions
# here make sure the binaries for the make process are still found.
@@ -666,6 +671,11 @@
CPAN_MIRRORS = $(foreach S,$(CPAN_SITES),$(S)/authors/id/$(AUTHOR_ID)/)
CPAN_FIRST_MIRROR = $(firstword $(CPAN_SITES))/authors/id
+# Python Package Index
+PYPI_PROJECT ?= $(GARNAME)
+PYPI_SUBDIR = $(shell echo $(PYPI_PROJECT) | cut -c 1)
+PYPI_MIRROR = http://pypi.python.org/packages/source/$(PYPI_SUBDIR)/$(PYPI_PROJECT)/
+
# Package dir
GARPACKAGE = $(shell basename $(CURDIR))
Modified: csw/mgar/gar/v2-fortran/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2-fortran/gar.lib.mk 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/gar.lib.mk 2010-12-02 17:46:17 UTC (rev 11764)
@@ -27,7 +27,7 @@
#################### FETCH RULES ####################
-URLS = $(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(ALLFILES_PATCHFILES)))
+URLS := $(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(ALLFILES_PATCHFILES)))
# if the caller has defined _postinstall, etc targets for a package, add
# these 'dynamic script' targets to our fetch list
@@ -339,6 +339,12 @@
@lzip -dc $(DOWNLOADDIR)/$* | gtar $(TAR_ARGS) -xf - -C $(EXTRACTDIR)
@$(MAKECOOKIE)
+# rule to extract files with tar and lzma
+tar-lzma-extract-%:
+ @echo " ==> Extracting $(DOWNLOADDIR)/$*"
+ @lzma -dc $(DOWNLOADDIR)/$* | gtar $(TAR_ARGS) -xf - -C $(EXTRACTDIR)
+ @$(MAKECOOKIE)
+
# extract compressed single files
gz-extract-%:
@echo " ==> Decompressing $(DOWNLOADDIR)/$*"
@@ -364,6 +370,12 @@
@lzip -d $(WORKDIR)/$*
@$(MAKECOOKIE)
+lzma-extract-%:
+ @echo " ==> Decompressing $(DOWNLOADDIR)/$*"
+ @cp $(DOWNLOADDIR)/$* $(WORKDIR)/
+ @lzma -d $(WORKDIR)/$*
+ @$(MAKECOOKIE)
+
# extra dependency rule for git repos, that will allow the user
# to supply an alternate target at their discretion
git-extract-%:
@@ -431,6 +443,9 @@
extract-archive-%.tar.lz: tar-lz-extract-%.tar.lz
@$(MAKECOOKIE)
+extract-archive-%.tar.lzma: tar-lzma-extract-%.tar.lzma
+ @$(MAKECOOKIE)
+
extract-archive-%.zip: zip-extract-%.zip
@$(MAKECOOKIE)
@@ -452,6 +467,9 @@
extract-archive-%.lz: lz-extract-%.lz
@$(MAKECOOKIE)
+extract-archive-%.lzma: lzma-extract-%.lzma
+ @$(MAKECOOKIE)
+
extract-archive-%.git: git-extract-%.git
@$(MAKECOOKIE)
@@ -498,7 +516,7 @@
# apply normal patches (git format-patch output or old-style diff -r)
normal-patch-%:
@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
- @( if ggrep -q 'Subject:' $(abspath $(DOWNLOADDIR)/$*); then \
+ @( if ggrep -q 'diff --git' $(abspath $(DOWNLOADDIR)/$*); then \
cd $(WORKSRC); git am --ignore-space-change --ignore-whitespace $(abspath $(DOWNLOADDIR)/$*); \
else \
echo Adding old-style patch...; \
Modified: csw/mgar/gar/v2-fortran/gar.mk
===================================================================
--- csw/mgar/gar/v2-fortran/gar.mk 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/gar.mk 2010-12-02 17:46:17 UTC (rev 11764)
@@ -11,20 +11,11 @@
# Comment this out to make much verbosity
#.SILENT:
-#ifeq ($(origin GARDIR), undefined)
-#GARDIR := $(CURDIR)/../..
-#endif
-
-#GARDIR ?= ../..
-#ifeq ($(origin GARDIR), undefined)
-#GARDIR := $(CURDIR)/../..
-#endif
-
ifneq ($(abspath /),/)
$(error Your version of 'make' is too old: $(MAKE_VERSION). Please make sure you are using at least 3.81)
endif
-GARDIR ?= gar
+# $(GARDIR) is pre-set by the top-level category.mk
GARBIN = $(GARDIR)/bin
DIRSTODOTS = $(subst . /,./,$(patsubst %,/..,$(subst /, ,/$(1))))
@@ -135,7 +126,7 @@
merge-$(2):
@echo "[===== Building modulation '$(2)' on host '$$(BUILDHOST)' =====]"
$$(if $$(and $$(BUILDHOST),$$(filter-out $$(THISHOST),$$(BUILDHOST))),\
- $(SSH) $$(BUILDHOST) "PATH=$$(PATH) $(MAKE) -C $$(CURDIR) $(if $(GAR_PLATFORM),GAR_PLATFORM=$(GAR_PLATFORM)) MODULATION=$(2) $(3) merge-modulated",\
+ $(SSH) $$(BUILDHOST) "PATH=$$(PATH) MAKEFLAGS=\"$(MAKEFLAGS)\" $(MAKE) -C $$(CURDIR) $(if $(GAR_PLATFORM),GAR_PLATFORM=$(GAR_PLATFORM)) MODULATION=$(2) $(3) merge-modulated",\
$(MAKE) $(if $(GAR_PLATFORM),GAR_PLATFORM=$(GAR_PLATFORM)) MODULATION=$(2) $(3) merge-modulated\
)
@# The next line has intentionally been left blank to explicitly terminate this make rule
@@ -395,7 +386,7 @@
# We call an additional extract-modulated without resetting any variables so
# a complete unpacked set goes to the global dir for packaging (like gspec)
-extract: checksum $(COOKIEDIR) pre-extract pre-extract-git-check extract-modulated $(addprefix extract-,$(MODULATIONS)) post-extract
+extract: checksum $(COOKIEDIR) pre-extract $(if $(NOGITPATCH),,pre-extract-git-check) extract-modulated $(addprefix extract-,$(MODULATIONS)) post-extract
@$(DONADA)
extract-global: $(if $(filter global,$(MODULATION)),extract-modulated)
@@ -406,7 +397,7 @@
$(addprefix dep-$(GARDIR)/,$(EXTRACTDEPS)) \
announce-modulation \
pre-extract-modulated pre-extract-$(MODULATION) $(EXTRACT_TARGETS) post-extract-$(MODULATION) post-extract-modulated \
- $(if $(filter global,$(MODULATION)),,post-extract-gitsnap) \
+ $(if $(filter global,$(MODULATION)),,$(if $(NOGITPATCH),,post-extract-gitsnap)) \
$(foreach FILE,$(EXPANDVARS),expandvars-$(FILE))
@$(DONADA)
@@ -467,7 +458,7 @@
patch: pre-patch $(addprefix patch-,$(MODULATIONS)) post-patch
@$(DONADA)
-patch-modulated: extract-modulated $(WORKSRC) pre-patch-modulated pre-patch-$(MODULATION) $(PATCH_TARGETS) $(if $(filter global,$(MODULATION)),,post-patch-gitsnap) post-patch-$(MODULATION) post-patch-modulated
+patch-modulated: extract-modulated $(WORKSRC) pre-patch-modulated pre-patch-$(MODULATION) $(PATCH_TARGETS) $(if $(filter global,$(MODULATION)),,$(if $(NOGITPATCH),,post-patch-gitsnap)) post-patch-$(MODULATION) post-patch-modulated
@$(DONADA)
# returns true if patch has completed successfully, false
@@ -484,9 +475,13 @@
fi )
@$(MAKECOOKIE)
-makepatch: $(addprefix patch-,$(MODULATIONS)) $(addprefix makepatch-,$(MODULATIONS))
+makepatch: $(if $(NOGITPATCH),makepatch-nogit,$(addprefix patch-,$(MODULATIONS)) $(addprefix makepatch-,$(MODULATIONS)))
@$(DONADA)
+makepatch-nogit:
+ @echo You set NOGITPATCH in your build recipe. I can't create a patch.
+ @$(DONADA)
+
# Allow generation of patches from modified work source.
makepatch-modulated: $(FILEDIR)
@( if [ -d "$(WORKSRC)/.git" ]; then \
@@ -805,7 +800,7 @@
# The basic merge merges the compiles for all ISAs on the current architecture
-merge: checksum pre-merge merge-do merge-license merge-classutils merge-checkpkgoverrides merge-alternatives $(if $(COMPILE_ELISP),compile-elisp) $(if $(NOSOURCEPACKAGE),,merge-src) post-merge
+merge: checksum pre-merge merge-do merge-license merge-classutils merge-checkpkgoverrides merge-alternatives $(if $(COMPILE_ELISP),compile-elisp) $(if $(NOSOURCEPACKAGE),,merge-src) $(if $(AP2_MODS),post-merge-ap2mod) post-merge
@$(DONADA)
merge-do: $(if $(PARALLELMODULATIONS),merge-parallel,merge-sequential)
@@ -833,6 +828,9 @@
echo "Building all ISAs in parallel. Please see the individual logfiles for details:";$(foreach M,$(MODULATIONS),echo "- $(WORKROOTDIR)/build-$M/build.log";)\
)
+post-merge-ap2mod:
+ $(GARBIN)/ap2mod_build_scripts $(PKGROOT) $(AP2_MODFILES)
+ @$(MAKECOOKIE)
# This merges the
merge-modulated: install-modulated pre-merge-modulated pre-merge-$(MODULATION) $(MERGE_TARGETS) post-merge-$(MODULATION) post-merge-modulated
@@ -840,14 +838,14 @@
# Copy the whole tree verbatim
merge-copy-all: $(PKGROOT) $(INSTALLISADIR)
- $(_DBG_MERGE)(cd $(INSTALLISADIR); umask 022 && pax -r -w -v $(_PAX_ARGS) \
+ $(_DBG_MERGE)(cd $(INSTALLISADIR)$(if $(ALLOW_RELOCATE),$(RELOCATE_PREFIX)); umask 022 && pax -r -w -v $(_PAX_ARGS) \
$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/,p") \
. $(PKGROOT))
@$(MAKECOOKIE)
# Copy only the merge directories
merge-copy-only: $(PKGROOT)
- $(_DBG_MERGE)(cd $(INSTALLISADIR); umask 022 && pax -r -w -v $(_PAX_ARGS) \
+ $(_DBG_MERGE)(cd $(INSTALLISADIR)$(if $(ALLOW_RELOCATE),$(RELOCATE_PREFIX)); umask 022 && pax -r -w -v $(_PAX_ARGS) \
$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/,p") -s ",.*,," \
. $(PKGROOT) \
)
@@ -855,7 +853,7 @@
# Copy the whole tree and relocate the directories in $(MERGE_DIRS)
merge-copy-relocate: $(PKGROOT) $(INSTALLISADIR)
- $(_DBG_MERGE)(cd $(INSTALLISADIR); umask 022 && pax -r -w -v $(_PAX_ARGS) \
+ $(_DBG_MERGE)(cd $(INSTALLISADIR)$(if $(ALLOW_RELOCATE),$(RELOCATE_PREFIX)); umask 022 && pax -r -w -v $(_PAX_ARGS) \
$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/$(ISA)/,p") \
. $(PKGROOT) \
)
@@ -863,7 +861,7 @@
# Copy only the relocated directories
merge-copy-relocated-only: $(PKGROOT) $(INSTALLISADIR)
- $(_DBG_MERGE)(cd $(INSTALLISADIR); umask 022 && pax -r -w -v $(_PAX_ARGS) \
+ $(_DBG_MERGE)(cd $(INSTALLISADIR)$(if $(ALLOW_RELOCATE),$(RELOCATE_PREFIX)); umask 022 && pax -r -w -v $(_PAX_ARGS) \
$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/$(ISA)/,p") -s ",.*,," \
. $(PKGROOT) \
)
@@ -871,7 +869,7 @@
# Copy
merge-copy-config-only:
- $(_DBG_MERGE)(cd $(INSTALLISADIR); umask 022 && pax -r -w -v $(_PAX_ARGS) \
+ $(_DBG_MERGE)(cd $(INSTALLISADIR)$(if $(ALLOW_RELOCATE),$(RELOCATE_PREFIX)); umask 022 && pax -r -w -v $(_PAX_ARGS) \
-s ",^\(\.$(bindir)/.*-config\)\$$,\1,p" \
-s ",.*,," \
. $(PKGROOT) \
@@ -881,7 +879,7 @@
.PHONY: remerge reset-merge reset-merge-modulated
remerge: reset-merge merge
-reset-merge: reset-package $(addprefix reset-merge-,$(MODULATIONS)) reset-merge-license reset-merge-classutils reset-merge-checkpkgoverrides reset-merge-alternatives reset-merge-src
+reset-merge: reset-package $(addprefix reset-merge-,$(MODULATIONS)) reset-merge-license reset-merge-classutils reset-merge-checkpkgoverrides reset-merge-alternatives reset-merge-ap2mod reset-merge-src
@rm -f $(COOKIEDIR)/pre-merge $(foreach M,$(MODULATIONS),$(COOKIEDIR)/merge-$M) $(COOKIEDIR)/merge $(COOKIEDIR)/post-merge
@rm -rf $(PKGROOT)
Modified: csw/mgar/gar/v2-fortran/gar.pkg.mk
===================================================================
--- csw/mgar/gar/v2-fortran/gar.pkg.mk 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/gar.pkg.mk 2010-12-02 17:46:17 UTC (rev 11764)
@@ -152,7 +152,10 @@
SPKG_DESC ?= $(DESCRIPTION)
SPKG_VERSION ?= $(GARVERSION)
SPKG_CATEGORY ?= application
-SPKG_SOURCEURL ?= $(firstword $(VENDOR_URL) $(MASTER_SITES) $(GIT_REPOS))
+SPKG_SOURCEURL ?= $(firstword $(VENDOR_URL) \
+ $(if $(filter $(GNU_MIRROR),$(MASTER_SITES)),http://www.gnu.org/software/$(GNU_PROJ)) \
+ $(MASTER_SITES) \
+ $(GIT_REPOS))
SPKG_VENDOR ?= $(SPKG_SOURCEURL) packaged for CSW by $(SPKG_PACKAGER)
SPKG_PSTAMP ?= $(LOGNAME)@$(shell hostname)-$(call _REVISION)-$(shell date '+%Y%m%d%H%M%S')
SPKG_BASEDIR ?= $(prefix)
@@ -185,6 +188,10 @@
# This is the default path for texinfo pages to be picked up. Extend or replace as necessary.
TEXINFO ?= $(infodir)/.*\.info(?:-\d+)? $(EXTRA_TEXINFO)
+# if AP2_MODS is set, files matching this shell glob (passed to find)
+# will have 'build' set as their class
+AP2_MODFILES ?= opt/csw/apache2/libexec/*so $(EXTRA_AP2_MODFILES)
+
# - set class for all config files
_CSWCLASS_FILTER = | perl -ane '\
$(foreach FILE,$(MIGRATECONF),$$F[1] = "cswmigrateconf" if( $$F[2] =~ m(^$(FILE)$$) );)\
@@ -197,6 +204,7 @@
$(foreach FILE,$(CRONTABS),$$F[1] = "cswcrontab" if( $$F[2] =~ m(^$(FILE)$$) );)\
$(if $(PYCOMPILE),$(foreach FILE,$(_PYCOMPILE_FILES),$$F[1] = "cswpycompile" if( $$F[2] =~ m(^$(FILE)$$) );))\
$(foreach FILE,$(TEXINFO),$$F[1] = "cswtexinfo" if( $$F[2] =~ m(^$(FILE)$$) );)\
+ $(if $(AP2_MODS), at F = ("e", "build", $$F[2], "?", "?", "?") if ($$F[2] =~ m(^/opt/csw/apache2/ap2mod/.*));) \
print join(" ", at F),"\n";'
# If you add another filter above, also add the class to this list. It is used
@@ -238,7 +246,7 @@
endif
# Where we find our mkpackage global templates
-PKGLIB = $(CURDIR)/$(GARDIR)/pkglib
+PKGLIB = $(GARDIR)/pkglib
PKG_EXPORTS = GARNAME GARVERSION DESCRIPTION CATEGORIES GARCH GARDIR GARBIN
PKG_EXPORTS += CURDIR WORKDIR WORKDIR_FIRSTMOD WORKSRC WORKSRC_FIRSTMOD PKGROOT
@@ -277,10 +285,11 @@
baseisadirs = $(1)/$(2) $(call isadirs,$(1),$(2))
# PKGFILES_RT selects files belonging to a runtime package
-PKGFILES_RT += $(call baseisadirs,$(libdir),[^/]*\.so(\.\d+)*)
+PKGFILES_RT += $(call baseisadirs,$(libdir),[^/]*\.so\.\d+(\.\d+)*)
# PKGFILES_DEVEL selects files belonging to a developer package
PKGFILES_DEVEL += $(call baseisadirs,$(bindir),[^/]*-config)
+PKGFILES_DEVEL += $(call baseisadirs,$(libdir),[^/]*\.so)
PKGFILES_DEVEL += $(call baseisadirs,$(libdir),[^/]*\.(a|la))
PKGFILES_DEVEL += $(call baseisadirs,$(libdir),pkgconfig(/.*)?)
PKGFILES_DEVEL += $(includedir)/.*
@@ -376,12 +385,16 @@
# for distributing files to individual packages.
PROTOTYPE = $(WORKDIR)/prototype
+define dontrelocate
+ $(shell gsed -i -e 's,\(.\) .* \($(1)[\s/]*\),\1 norelocate /\2,g' $(2))
+endef
+
# Dynamic prototypes work like this:
# - A prototype from DISTFILES takes precedence over
# Pulled in from pkglib/csw_prototype.gspec
$(PROTOTYPE): $(WORKDIR) merge
- $(_DBG)cswproto -c $(GARDIR)/etc/commondirs-$(GARCH) -r $(PKGROOT) $(PKGROOT)=/ >$@
+ $(_DBG)cswproto -c $(GARDIR)/etc/commondirs-$(GARCH) -r $(PKGROOT) $(PKGROOT)=$(if $(ALLOW_RELOCATE),,'/') >$@
# pathfilter lives in bin/pathfilter and takes care of including/excluding paths from
# a prototype (see "perldoc bin/pathfilter"). We employ it here to:
@@ -422,6 +435,7 @@
else \
cat $(PROTOTYPE) $(call checkpkg_override_filter,$*) $(_CSWCLASS_FILTER) $(_PROTOTYPE_MODIFIERS) $(_PROTOTYPE_FILTER_$*) >$@; \
fi
+ $(if $(ALLOW_RELOCATE),$(call dontrelocate,opt,$(PROTOTYPE)))
$(WORKDIR)/%.prototype-$(GARCH): | $(WORKDIR)/%.prototype
$(_DBG)cat $(WORKDIR)/$*.prototype >$@
@@ -447,16 +461,19 @@
# The dependencies to CSWcswclassutils and CSWtexinfo are only added if there are files
# actually matching the _TEXINFO_FILTER. This is done at the prototype-level.
$(WORKDIR)/%.depend: $(WORKDIR)/$*.prototype
+$(WORKDIR)/%.depend: _EXTRA_GAR_PKGS += $(_CATEGORY_RUNTIME_DEP_PKGS)
$(WORKDIR)/%.depend: _EXTRA_GAR_PKGS += $(if $(strip $(shell cat $(WORKDIR)/$*.prototype | perl -ane 'print "yes" if( $$F[1] eq "cswalternatives")')),CSWalternatives)
$(WORKDIR)/%.depend: _EXTRA_GAR_PKGS += $(if $(strip $(shell cat $(WORKDIR)/$*.prototype | perl -ane '$(foreach C,$(_CSWCLASSES),print "$C\n" if( $$F[1] eq "$C");)')),CSWcswclassutils)
+$(WORKDIR)/%.depend: _DEP_PKGS=$(or $(RUNTIME_DEP_PKGS_ONLY_$*),$(RUNTIME_DEP_PKGS_ONLY),$(sort $(_EXTRA_GAR_PKGS)) $(or $(RUNTIME_DEP_PKGS_$*),$(RUNTIME_DEP_PKGS),$(DEP_PKGS_$*),$(DEP_PKGS)))
+$(WORKDIR)/%.depend: $(WORKDIR)
# The final "true" is for packages without dependencies to make the shell happy as "( )" is not allowed.
-$(WORKDIR)/%.depend: $(WORKDIR)
- $(_DBG)$(if $(_EXTRA_GAR_PKGS)$(RUNTIME_DEP_PKGS_$*)$(RUNTIME_DEP_PKGS)$(DEP_PKGS)$(DEP_PKGS_$*)$(INCOMPATIBLE_PKGS)$(INCOMPATIBLE_PKGS_$*), \
+$(WORKDIR)/%.depend:
+ $(_DBG)$(if $(_DEP_PKGS)$(INCOMPATIBLE_PKGS)$(INCOMPATIBLE_PKGS_$*), \
($(foreach PKG,$(INCOMPATIBLE_PKGS_$*) $(INCOMPATIBLE_PKGS),\
echo "I $(PKG)";\
)\
- $(foreach PKG,$(sort $(_EXTRA_GAR_PKGS)) $(or $(RUNTIME_DEP_PKGS_$*),$(RUNTIME_DEP_PKGS),$(DEP_PKGS_$*),$(DEP_PKGS)),\
+ $(foreach PKG,$(_DEP_PKGS),\
$(if $(SPKG_DESC_$(PKG)), \
echo "P $(PKG) $(call catalogname,$(PKG)) - $(SPKG_DESC_$(PKG))";, \
echo "$(shell (/usr/bin/pkginfo $(PKG) || echo "P $(PKG) - ") | $(GAWK) '{ $$1 = "P"; print } ')"; \
@@ -545,17 +562,20 @@
# Make sure every producable package contains specific descriptions.
# We explicitly ignore NOPACKAGE here to disallow circumventing the check.
+$(if $(filter-out $(firstword $(SPKG_SPECS)),$(SPKG_SPECS)),\
+ $(foreach P,$(SPKG_SPECS),\
+ $(if $(SPKG_DESC_$(P)),,$(error Multiple packages defined and SPKG_DESC_$(P) is not set.))))
+
$(foreach P,$(SPKG_SPECS),\
- $(foreach Q,$(filter-out $P,$(SPKG_SPECS)),\
- $(if $(shell if test "$(SPKG_DESC_$P)" = "$(SPKG_DESC_$Q)"; then echo ERROR; fi),\
- $(error The package descriptions for $P and $Q are identical, please make sure all package descriptions are unique by setting SPKG_DESC_<pkg> for each package) \
-)))
+ $(foreach Q,$(filter-out $(P),$(SPKG_SPECS)),\
+ $(if $(filter-out $(subst ,_,$(SPKG_DESC_$(P))),$(subst ,_,$(SPKG_DESC_$(Q)))),,$(error The package descriptions for $(P) [$(if $(SPKG_DESC_$(P)),$(SPKG_DESC_$(P)),<not set>)] and $(Q) [$(if $(SPKG_DESC_$(Q)),$(SPKG_DESC_$(Q)),<not set>)] are identical. Please make sure that all descriptions are unique by setting SPKG_DESC_<pkg> for each package.))))
.PRECIOUS: $(WORKDIR)/%.pkginfo
# The texinfo filter has been taken out of the normal filters as TEXINFO has a default.
$(WORKDIR)/%.pkginfo: $(WORKDIR)/%.prototype
$(WORKDIR)/%.pkginfo: SPKG_CLASSES += $(if $(strip $(shell cat $(WORKDIR)/$*.prototype | perl -ane 'print "yes" if( $$F[1] eq "cswalternatives")')),cswalternatives)
+$(WORKDIR)/%.pkginfo: SPKG_CLASSES += $(if $(strip $(shell cat $(WORKDIR)/$*.prototype | perl -ane 'print "yes" if( $$F[1] eq "build")')),build)
$(WORKDIR)/%.pkginfo: SPKG_CLASSES += $(shell cat $(WORKDIR)/$*.prototype | perl -e 'while(<>){@F=split;$$c{$$F[1]}++};$(foreach C,$(_CSWCLASSES),print "$C\n" if( $$c{$C});)')
$(WORKDIR)/%.pkginfo: $(WORKDIR)
@@ -567,7 +587,8 @@
echo "VENDOR=$(call pkgvar,SPKG_VENDOR,$*)"; \
echo "EMAIL=$(call pkgvar,SPKG_EMAIL,$*)"; \
echo "PSTAMP=$(LOGNAME)@$(shell hostname)-$(shell date '+%Y%m%d%H%M%S')"; \
- echo "CLASSES=$(call pkgvar,SPKG_CLASSES,$*)"; \
+ $(if $(ALLOW_RELOCATE),echo "CLASSES=$(call pkgvar,SPKG_CLASSES,$*) norelocate"; \
+ ,echo "CLASSES=$(call pkgvar,SPKG_CLASSES,$*)";) \
echo "HOTLINE=http://www.opencsw.org/bugtrack/"; \
echo "OPENCSW_CATALOGNAME=$(call catalogname,$*)"; \
echo "OPENCSW_MODE64=$(call mode64,$*)"; \
@@ -575,6 +596,7 @@
echo "OPENCSW_BUNDLE=$(BUNDLE)"; \
$(_CATEGORY_PKGINFO) \
) >$@
+ $(if $(ALLOW_RELOCATE),echo "BASEDIR=$(RELOCATE_PREFIX)" >>$@)
# findlicensefile - Find an existing file for a given license name
@@ -624,6 +646,9 @@
reset-merge-classutils: reset-merge-migrateconf reset-merge-usergroup reset-merge-inetdconf reset-merge-etcservices
+reset-merge-ap2mod:
+ @rm -f $(COOKIEDIR)/post-merge-ap2mod
+
merge-migrateconf: $(foreach S,$(SPKG_SPECS),$(if $(or $(MIGRATE_FILES_$S),$(MIGRATE_FILES)),merge-migrateconf-$S))
@$(MAKECOOKIE)
@@ -852,7 +877,7 @@
$(if $(PACKAGING_HOST_$P),\
$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
$(MAKE) GAR_PLATFORM=$P _package && ,\
- $(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -C $(CURDIR) GAR_PLATFORM=$P _package" && \
+ $(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin MAKEFLAGS=\"$(MAKEFLAGS)\" $(MAKE) -C $(CURDIR) GAR_PLATFORM=$P _package" && \
),\
$(error *** No host has been defined for platform $P)\
)\
@@ -867,7 +892,7 @@
echo " (built on this host)";\
$(MAKE) -s GAR_PLATFORM=$P _pkgshow;echo;,\
echo " (built on host '$(PACKAGING_HOST_$P)')";\
- $(SSH) $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -C $(CURDIR) -s GAR_PLATFORM=$P _pkgshow";echo;\
+ $(SSH) $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin MAKEFLAGS=\"$(MAKEFLAGS)\" $(MAKE) -C $(CURDIR) -s GAR_PLATFORM=$P _pkgshow";echo;\
)\
)
@$(MAKECOOKIE)
@@ -878,7 +903,7 @@
$(if $(PACKAGING_HOST_$P),\
$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
$(MAKE) -s GAR_PLATFORM=$P $* && ,\
- $(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -C $(CURDIR) GAR_PLATFORM=$P $*" && \
+ $(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin MAKEFLAGS=\"$(MAKEFLAGS)\" $(MAKE) -C $(CURDIR) GAR_PLATFORM=$P $*" && \
),\
$(error *** No host has been defined for platform $P)\
)\
Modified: csw/mgar/gar/v2-fortran/lib/python/README
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/README 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/lib/python/README 2010-12-02 17:46:17 UTC (rev 11764)
@@ -13,3 +13,17 @@
Known problems:
- libmagic fails sometimes when processing the whole catalog
+
+
+Dependencies:
+
+It's possible to develop checkpkg on a non-Solaris platform, using unit
+tests as means to run various bits of code. Here's the dependency list
+for Ubuntu.
+
+ python-cheetah
+ python-hachoir-parser
+ python-magic
+ python-mox
+ python-progressbar
+ python-yaml
Copied: csw/mgar/gar/v2-fortran/lib/python/catalog.py (from rev 11763, csw/mgar/gar/v2/lib/python/catalog.py)
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/catalog.py (rev 0)
+++ csw/mgar/gar/v2-fortran/lib/python/catalog.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,147 @@
+#!/usr/bin/env python2.6
+
+import os
+import re
+import logging
+
+
+class Error(Exception):
+ pass
+
+
+class CatalogLineParseError(Error):
+ pass
+
+
+class OpencswCatalogBuilder(object):
+
+ def __init__(self, product_dir, catalog_dir):
+ self.product_dir = product_dir
+ self.catalog_dir = catalog_dir
+
+ def Run(self):
+ pkg_dirs = os.listdir(self.product_dir)
+ for pkg_dir in pkg_dirs:
+ pkg_path = os.path.join(self.product_dir, pkg_dir)
+ pkginfo_path = os.path.join(pkg_path, "pkginfo")
+ if (os.path.isdir(pkg_path)
+ and
+ os.path.exists(pkginfo_path)):
+ if not self.Srv4Exists(pkg_path):
+ pkg = None
+ tmpdir = None
+ try:
+ tmpdir = tempfile.mkdtemp(prefix="sunw-pkg-")
+ logging.debug("Copying %s to %s", repr(pkg_path), repr(tmpdir))
+ tmp_pkg_dir = os.path.join(tmpdir, pkg_dir)
+ shutil.copytree(pkg_path, tmp_pkg_dir, symlinks=True)
+ pkg = DirectoryFormatPackage(tmp_pkg_dir)
+ # Replacing NAME= in the pkginfo, setting it to the catalog name.
+ pkg.ResetNameProperty()
+ pkg.ToSrv4(self.catalog_dir)
+ except IOError, e:
+ logging.warn("%s has failed: %s", pkg_path, e)
+ finally:
+ if pkg:
+ del(pkg)
+ if os.path.exists(tmpdir):
+ shutil.rmtree(tmpdir)
+ else:
+ logging.warn("srv4 file for %s already exists, skipping", pkg_path)
+ else:
+ logging.warn("%s is not a directory.", pkg_path)
+
+
+ def Srv4Exists(self, pkg_dir):
+ pkg = DirectoryFormatPackage(pkg_dir)
+ srv4_name = pkg.GetSrv4FileName()
+ srv4_name += ".gz"
+ srv4_path = os.path.join(self.catalog_dir, srv4_name)
+ result = os.path.exists(srv4_path)
+ logging.debug("Srv4Exists(%s) => %s, %s", pkg_dir, repr(srv4_path), result)
+ return result
+
+
+class OpencswCatalog(object):
+ """Represents a catalog file."""
+
+ def __init__(self, file_name):
+ self.file_name = file_name
+ self.by_basename = None
+ self.catalog_data = None
+
+ def _ParseCatalogLine(self, line):
+ cline_re_str_list = [
+ (
+ r"^"
+ # tmux
+ r"(?P<catalogname>\S+)"
+ r"\s+"
+ # 1.2,REV=2010.05.17
+ r"(?P<version>\S+)"
+ r"\s+"
+ # CSWtmux
+ r"(?P<pkgname>\S+)"
+ r"\s+"
+ # tmux-1.2,REV=2010.05.17-SunOS5.9-sparc-CSW.pkg.gz
+ r"(?P<file_basename>\S+)"
+ r"\s+"
+ # 145351cf6186fdcadcd169b66387f72f
+ r"(?P<md5sum>\S+)"
+ r"\s+"
+ # 214091
+ r"(?P<size>\S+)"
+ r"\s+"
+ # CSWcommon|CSWlibevent
+ r"(?P<deps>\S+)"
+ r"\s+"
+ # none
+ r"(?P<none_thing_1>\S+)"
+ # An optional empty field.
+ r"("
+ r"\s+"
+ # none\n'
+ r"(?P<none_thing_2>\S+)"
+ r")?"
+ r"$"
+ ),
+ ]
+ cline_re_list = [re.compile(x) for x in cline_re_str_list]
+ matched = False
+ d = None
+ for cline_re in cline_re_list:
+ m = cline_re.match(line)
+ if m:
+ d = m.groupdict()
+ matched = True
+ if not d:
+ raise CatalogLineParseError("Parsed %s data is empty" % repr(line))
+ if not matched:
+ raise CatalogLineParseError("No regexes matched %s" % repr(line))
+ return d
+
+ def _GetCatalogData(self, fd):
+ catalog_data = []
+ for line in fd:
+ try:
+ parsed = self._ParseCatalogLine(line)
+ catalog_data.append(parsed)
+ except CatalogLineParseError, e:
+ logging.debug("Could not parse %s, %s", repr(line), e)
+ return catalog_data
+
+ def GetCatalogData(self):
+ if not self.catalog_data:
+ fd = open(self.file_name, "r")
+ self.catalog_data = self._GetCatalogData(fd)
+ return self.catalog_data
+
+ def GetDataByBasename(self):
+ if not self.by_basename:
+ self.by_basename = {}
+ cd = self.GetCatalogData()
+ for d in cd:
+ if "file_basename" not in d:
+ logging.error("%s is missing the file_basename field", d)
+ self.by_basename[d["file_basename"]] = d
+ return self.by_basename
Copied: csw/mgar/gar/v2-fortran/lib/python/catalog_test.py (from rev 11763, csw/mgar/gar/v2/lib/python/catalog_test.py)
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/catalog_test.py (rev 0)
+++ csw/mgar/gar/v2-fortran/lib/python/catalog_test.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,29 @@
+#!/usr/bin/env python2.6
+
+import unittest
+import catalog
+
+class OpencswCatalogUnitTest(unittest.TestCase):
+
+ def test_ParseCatalogLine_1(self):
+ line = (
+ 'tmux 1.2,REV=2010.05.17 CSWtmux '
+ 'tmux-1.2,REV=2010.05.17-SunOS5.9-sparc-CSW.pkg.gz '
+ '145351cf6186fdcadcd169b66387f72f 214091 '
+ 'CSWcommon|CSWlibevent none none\n')
+ oc = catalog.OpencswCatalog(None)
+ parsed = oc._ParseCatalogLine(line)
+ expected = {'catalogname': 'tmux',
+ 'deps': 'CSWcommon|CSWlibevent',
+ 'file_basename': 'tmux-1.2,REV=2010.05.17-SunOS5.9-sparc-CSW.pkg.gz',
+ 'md5sum': '145351cf6186fdcadcd169b66387f72f',
+ 'none_thing_1': 'none',
+ 'none_thing_2': 'none',
+ 'pkgname': 'CSWtmux',
+ 'size': '214091',
+ 'version': '1.2,REV=2010.05.17'}
+ self.assertEquals(expected, parsed)
+
+
+if __name__ == '__main__':
+ unittest.main()
Modified: csw/mgar/gar/v2-fortran/lib/python/checkpkg.py
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/checkpkg.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/lib/python/checkpkg.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -22,18 +22,19 @@
from sqlobject import sqlbuilder
import subprocess
import textwrap
-import yaml
from Cheetah import Template
-import opencsw
-import overrides
+import database
+
+import package
+import inspective_package
import package_checks
+import package_stats
import models as m
import configuration as c
import tag
+
DEBUG_BREAK_PKGMAP_AFTER = False
-DB_SCHEMA_VERSION = 5L
-PACKAGE_STATS_VERSION = 9L
SYSTEM_PKGMAP = "/var/sadm/install/contents"
NEEDED_SONAMES = "needed sonames"
RUNPATH = "runpath"
@@ -194,58 +195,7 @@
return m.group("username") if m else None
-class DatabaseClient(object):
-
- CHECKPKG_DIR = ".checkpkg"
- SQLITE3_DBNAME_TMPL = "checkpkg-db-%(fqdn)s"
- TABLES_THAT_NEED_UPDATES = (m.CswFile,)
- TABLES = TABLES_THAT_NEED_UPDATES + (
- m.Pkginst,
- m.CswConfig,
- m.Srv4FileStats,
- m.CheckpkgOverride,
- m.CheckpkgErrorTag,
- m.Architecture,
- m.OsRelease,
- m.Maintainer)
- sqo_conn = None
- db_path = None
-
- def __init__(self, debug=False):
- self.debug = debug
-
- @classmethod
- def GetDatabasePath(cls):
- if not cls.db_path:
- dbname_dict = {'fqdn': socket.getfqdn()}
- db_filename = cls.SQLITE3_DBNAME_TMPL % dbname_dict
- home_dir = os.environ["HOME"]
- cls.db_path = os.path.join(home_dir, cls.CHECKPKG_DIR, db_filename)
- return cls.db_path
-
- @classmethod
- def InitializeSqlobject(cls):
- """Establishes a database connection and stores it as a class member.
-
- The idea is to share the database connection between instances. It would
- be solved even better if the connection was passed to the class
- constructor.
- """
- if not cls.sqo_conn:
- db_path = cls.GetDatabasePath()
- cls.sqo_conn = sqlobject.connectionForURI('sqlite:%s' % db_path)
- sqlobject.sqlhub.processConnection = cls.sqo_conn
-
- def CreateTables(self):
- for table in self.TABLES:
- table.createTable(ifNotExists=True)
-
- def IsDatabaseGoodSchema(self):
- good_version = self.GetDatabaseSchemaVersion() >= DB_SCHEMA_VERSION
- return good_version
-
-
-class SystemPkgmap(DatabaseClient):
+class SystemPkgmap(database.DatabaseClient):
"""A class to hold and manipulate the /var/sadm/install/contents file."""
STOP_PKGS = ["SUNWbcp", "SUNWowbcp", "SUNWucb"]
@@ -457,10 +407,10 @@
try:
config_option = m.CswConfig.select(
m.CswConfig.q.option_key==CONFIG_DB_SCHEMA).getOne()
- config_option.int_value = DB_SCHEMA_VERSION
+ config_option.int_value = database.DB_SCHEMA_VERSION
except sqlobject.main.SQLObjectNotFound, e:
version = m.CswConfig(option_key=CONFIG_DB_SCHEMA,
- int_value=DB_SCHEMA_VERSION)
+ int_value=database.DB_SCHEMA_VERSION)
def GetPkgmapLineByBasename(self, filename):
"""Returns pkgmap lines by basename:
@@ -578,7 +528,7 @@
# subsequent checkpkg runs won't pick up the last change.
# I don't expect pkgadd to run under 1s.
fresh = f_mtime <= d_mtime
- good_version = self.GetDatabaseSchemaVersion() >= DB_SCHEMA_VERSION
+ good_version = self.GetDatabaseSchemaVersion() >= database.DB_SCHEMA_VERSION
logging.debug("IsDatabaseUpToDate: good_version=%s, fresh=%s",
repr(good_version), repr(fresh))
return fresh and good_version
@@ -773,7 +723,8 @@
self.packages = []
def GetPackageStatsList(self):
- return [PackageStats(None, self.stats_basedir, x) for x in self.md5sum_list]
+ return [package_stats.PackageStats(None, self.stats_basedir, x)
+ for x in self.md5sum_list]
def FormatReports(self, errors, messages, gar_lines):
namespace = {
@@ -800,7 +751,7 @@
else:
if "package-set" not in errors:
errors["package-set"] = []
- errors["package-set"].append(error)
+ errors["package-set"].append(tag)
return errors
def GetOptimizedAllStats(self, stats_obj_list):
@@ -1079,425 +1030,6 @@
return tuple(isalist)
-class PackageStats(DatabaseClient):
- """Collects stats about a package and saves it.
-
- TODO: Maintain a global database connection instead of creating one for each
- instantiated object.
- TODO: Store overrides in a separate table for performance.
- """
-
- def __init__(self, srv4_pkg, stats_basedir=None, md5sum=None, debug=False):
- super(PackageStats, self).__init__(debug=debug)
- self.srv4_pkg = srv4_pkg
- self.md5sum = md5sum
- self.dir_format_pkg = None
- self.all_stats = {}
- self.stats_basedir = stats_basedir
- self.db_pkg_stats = None
- if not self.stats_basedir:
- home = os.environ["HOME"]
- parts = [home, ".checkpkg", "stats"]
- self.stats_basedir = os.path.join(*parts)
- self.InitializeSqlobject()
-
- def GetPkgchkData(self):
- ret, stdout, stderr = self.srv4_pkg.GetPkgchkOutput()
- data = {
- 'return_code': ret,
- 'stdout_lines': stdout.splitlines(),
- 'stderr_lines': stderr.splitlines(),
- }
- return data
-
- def GetMd5sum(self):
- if not self.md5sum:
- self.md5sum = self.srv4_pkg.GetMd5sum()
- return self.md5sum
-
- def GetDbObject(self):
- if not self.db_pkg_stats:
- md5_sum = self.GetMd5sum()
- res = m.Srv4FileStats.select(m.Srv4FileStats.q.md5_sum==md5_sum)
- if not res.count():
- # TODO: Change this bit to throw an exception if the object is not
- # found.
- return None
- else:
- self.db_pkg_stats = res.getOne()
- return self.db_pkg_stats
-
-
- def StatsExist(self):
- """Checks if statistics of a package exist.
-
- Returns:
- bool
- """
- pkg_stats = self.GetDbObject()
- if not pkg_stats:
- return False
- if pkg_stats.stats_version != PACKAGE_STATS_VERSION:
- pkg_stats.destroySelf()
- else:
- return True
- return False
-
- def GetDirFormatPkg(self):
- if not self.dir_format_pkg:
- self.dir_format_pkg = self.srv4_pkg.GetDirFormatPkg()
- return self.dir_format_pkg
-
- def GetMtime(self):
- return self.srv4_pkg.GetMtime()
-
- def _MakeDirP(self, dir_path):
- """mkdir -p equivalent.
-
- http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python
- """
- try:
- os.makedirs(dir_path)
- except OSError, e:
- if e.errno == errno.EEXIST:
- pass
- else:
- raise
-
- def GetBinaryDumpInfo(self):
- dir_pkg = self.GetDirFormatPkg()
- # Binaries. This could be split off to a separate function.
- # man ld.so.1 for more info on this hack
- env = copy.copy(os.environ)
- env["LD_NOAUXFLTR"] = "1"
- binaries_dump_info = []
- for binary in dir_pkg.ListBinaries():
- binary_abs_path = os.path.join(dir_pkg.directory, "root", binary)
- binary_base_name = os.path.basename(binary)
- args = [DUMP_BIN, "-Lv", binary_abs_path]
- dump_proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env)
- stdout, stderr = dump_proc.communicate()
- ret = dump_proc.wait()
- binary_data = ParseDumpOutput(stdout)
- binary_data["path"] = binary
- binary_data["base_name"] = binary_base_name
- binaries_dump_info.append(binary_data)
- return binaries_dump_info
-
- def GetBasicStats(self):
- dir_pkg = self.GetDirFormatPkg()
- basic_stats = {}
- basic_stats["stats_version"] = PACKAGE_STATS_VERSION
- basic_stats["pkg_path"] = self.srv4_pkg.pkg_path
- basic_stats["pkg_basename"] = os.path.basename(self.srv4_pkg.pkg_path)
- basic_stats["parsed_basename"] = opencsw.ParsePackageFileName(
- basic_stats["pkg_basename"])
- basic_stats["pkgname"] = dir_pkg.pkgname
- basic_stats["catalogname"] = dir_pkg.GetCatalogname()
- basic_stats["md5_sum"] = self.GetMd5sum()
- return basic_stats
-
- def GetOverrides(self):
- dir_pkg = self.GetDirFormatPkg()
- override_list = dir_pkg.GetOverrides()
- def OverrideToDict(override):
- return {
- "pkgname": override.pkgname,
- "tag_name": override.tag_name,
- "tag_info": override.tag_info,
- }
- overrides_simple = [OverrideToDict(x) for x in override_list]
- return overrides_simple
-
- def GetLddMinusRlines(self):
- """Returns ldd -r output."""
- dir_pkg = self.GetDirFormatPkg()
- binaries = dir_pkg.ListBinaries()
- ldd_output = {}
- for binary in binaries:
- binary_abspath = os.path.join(dir_pkg.directory, "root", binary)
- # this could be potentially moved into the DirectoryFormatPackage class.
- # ldd needs the binary to be executable
- os.chmod(binary_abspath, 0755)
- args = ["ldd", "-r", binary_abspath]
- ldd_proc = subprocess.Popen(
- args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = ldd_proc.communicate()
- retcode = ldd_proc.wait()
- if retcode:
- logging.error("%s returned an error: %s", args, stderr)
- ldd_info = []
- for line in stdout.splitlines():
- ldd_info.append(self._ParseLddDashRline(line))
- ldd_output[binary] = ldd_info
- return ldd_output
-
- def GetDefinedSymbols(self):
- """Returns text symbols (i.e. defined functions) for packaged ELF objects
-
- To do this we parse output lines from nm similar to the following. "T"s are
- the definitions which we are after.
-
- 0000104000 D _lib_version
- 0000986980 D _libiconv_version
- 0000000000 U abort
- 0000097616 T aliases_lookup
- """
- dir_pkg = self.GetDirFormatPkg()
- binaries = dir_pkg.ListBinaries()
- defined_symbols = {}
-
- for binary in binaries:
- binary_abspath = os.path.join(dir_pkg.directory, "root", binary)
- # Get parsable, ld.so.1 relevant SHT_DYNSYM symbol information
- args = ["/usr/ccs/bin/nm", "-p", "-D", binary_abspath]
- nm_proc = subprocess.Popen(
- args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = nm_proc.communicate()
- retcode = nm_proc.wait()
- if retcode:
- logging.error("%s returned an error: %s", args, stderr)
- continue
- nm_out = stdout.splitlines()
-
- defined_symbols[binary] = []
- for line in nm_out:
- sym = self._ParseNmSymLine(line)
- if not sym:
- continue
- if sym['type'] not in ("T", "D", "B"):
- continue
- defined_symbols[binary].append(sym['name'])
-
- return defined_symbols
-
- def _ParseNmSymLine(self, line):
- re_defined_symbol = re.compile('[0-9]+ [ABDFNSTU] \S+')
- m = re_defined_symbol.match(line)
- if not m:
- return None
- fields = line.split()
- sym = { 'address': fields[0], 'type': fields[1], 'name': fields[2] }
- return sym
-
- def CollectStats(self, force=False):
- """Lazy stats collection."""
- if force or not self.StatsExist():
- return self._CollectStats()
- return self.ReadSavedStats()
-
- def _CollectStats(self):
- """The list of variables needs to be synchronized with the one
- at the top of this class.
- """
- dir_pkg = self.GetDirFormatPkg()
- logging.debug("Collecting %s package statistics.", repr(dir_pkg.pkgname))
- override_dicts = self.GetOverrides()
- pkg_stats = {
- "binaries": dir_pkg.ListBinaries(),
- "binaries_dump_info": self.GetBinaryDumpInfo(),
- "depends": dir_pkg.GetDependencies(),
- "isalist": GetIsalist(),
- "overrides": override_dicts,
- "pkgchk": self.GetPkgchkData(),
- "pkginfo": dir_pkg.GetParsedPkginfo(),
- "pkgmap": dir_pkg.GetPkgmap().entries,
- "bad_paths": dir_pkg.GetFilesContaining(BAD_CONTENT_REGEXES),
- "basic_stats": self.GetBasicStats(),
- "files_metadata": dir_pkg.GetFilesMetadata(),
- "mtime": self.GetMtime(),
- }
- pkgname = pkg_stats["basic_stats"]["pkgname"]
- # Getting sqlobject representations.
- try:
- pkginst = m.Pkginst.select(m.Pkginst.q.pkgname==pkgname).getOne()
- except sqlobject.main.SQLObjectNotFound, e:
- logging.debug(e)
- pkginst = m.Pkginst(pkgname=pkgname)
- try:
- res = m.Architecture.select(
- m.Architecture.q.name==pkg_stats["pkginfo"]["ARCH"])
- arch = res.getOne()
- except sqlobject.main.SQLObjectNotFound, e:
- logging.debug(e)
- arch = m.Architecture(name=pkg_stats["pkginfo"]["ARCH"])
- parsed_basename = pkg_stats["basic_stats"]["parsed_basename"]
- os_rel_name = parsed_basename["osrel"]
- try:
- os_rel = m.OsRelease.select(
- m.OsRelease.q.short_name==os_rel_name).getOne()
- except sqlobject.main.SQLObjectNotFound, e:
- logging.debug(e)
- os_rel = m.OsRelease(short_name=os_rel_name, full_name=os_rel_name)
- try:
- maint_email = pkg_stats["pkginfo"]["EMAIL"]
- maintainer = m.Maintainer.select(
- m.Maintainer.q.email==maint_email).getOne()
- except sqlobject.main.SQLObjectNotFound, e:
- logging.debug(e)
- maintainer = m.Maintainer(email=maint_email)
-
- # If there are any previous records of the same pkginst, arch and os_rel,
- # we're marking them as not-latest.
- # This assumes that the packages are examined in a chronological order.
- res = m.Srv4FileStats.select(sqlobject.AND(
- m.Srv4FileStats.q.pkginst==pkginst,
- m.Srv4FileStats.q.arch==arch,
- m.Srv4FileStats.q.os_rel==os_rel))
- for obj in res:
- obj.latest = False
-
- rev=None
- if "revision_info" in parsed_basename:
- if "REV" in parsed_basename["revision_info"]:
- rev = parsed_basename["revision_info"]["REV"]
- # Creating the object in the database.
- db_pkg_stats = m.Srv4FileStats(
- md5_sum=self.GetMd5sum(),
- pkginst=pkginst,
- catalogname=pkg_stats["basic_stats"]["catalogname"],
- stats_version=PACKAGE_STATS_VERSION,
- os_rel=os_rel,
- arch=arch,
- basename=pkg_stats["basic_stats"]["pkg_basename"],
- maintainer=maintainer,
- latest=True,
- version_string=parsed_basename["full_version_string"],
- rev=rev,
- mtime=self.GetMtime(),
- data=cPickle.dumps(pkg_stats))
- # Inserting overrides as rows into the database
- for override_dict in override_dicts:
- o = m.CheckpkgOverride(srv4_file=db_pkg_stats,
- **override_dict)
-
- # The ldd -r reporting breaks on bigger packages during yaml saving.
- # It might work when yaml is disabled
- # self.DumpObject(self.GetLddMinusRlines(), "ldd_dash_r")
- # This check is currently disabled, let's save time by not collecting
- # these data.
- # self.DumpObject(self.GetDefinedSymbols(), "defined_symbols")
- # This one should be last, so that if the collection is interrupted
- # in one of the previous runs, the basic_stats.pickle file is not there
- # or not updated, and the collection is started again.
-
- logging.debug("Statistics of %s have been collected.", repr(dir_pkg.pkgname))
- return pkg_stats
-
- def GetAllStats(self):
- if not self.all_stats and self.StatsExist():
- self.all_stats = self.ReadSavedStats()
- elif not self.all_stats:
- self.all_stats = self.CollectStats()
- return self.all_stats
-
- def GetSavedOverrides(self):
- if not self.StatsExist():
- raise PackageError("Package stats not ready.")
- pkg_stats = self.GetDbObject()
- res = m.CheckpkgOverride.select(m.CheckpkgOverride.q.srv4_file==pkg_stats)
- override_list = []
- for db_override in res:
- d = {
- 'pkgname': db_override.pkgname,
- 'tag_name': db_override.tag_name,
- 'tag_info': db_override.tag_info,
- }
- override_list.append(overrides.Override(**d))
- return override_list
-
- def GetSavedErrorTags(self):
- pkg_stats = self.GetDbObject()
- res = m.CheckpkgErrorTag.select(m.CheckpkgErrorTag.q.srv4_file==pkg_stats)
- tag_list = [tag.CheckpkgTag(x.pkgname, x.tag_name, x.tag_info, x.msg)
- for x in res]
- return tag_list
-
- def ReadSavedStats(self):
- if not self.all_stats:
- md5_sum = self.GetMd5sum()
- res = m.Srv4FileStats.select(m.Srv4FileStats.q.md5_sum==md5_sum)
- self.all_stats = cPickle.loads(str(res.getOne().data))
- return self.all_stats
-
- def _ParseLddDashRline(self, line):
- found_re = r"^\t(?P<soname>\S+)\s+=>\s+(?P<path_found>\S+)"
- symbol_not_found_re = (r"^\tsymbol not found:\s(?P<symbol>\S+)\s+"
- r"\((?P<path_not_found>\S+)\)")
- only_so = r"^\t(?P<path_only>\S+)$"
- version_so = (r'^\t(?P<soname_version_not_found>\S+) '
- r'\((?P<lib_name>\S+)\) =>\t \(version not found\)')
- stv_protected = (r'^\trelocation \S+ symbol: (?P<relocation_symbol>\S+): '
- r'file (?P<relocation_path>\S+): '
- r'relocation bound to a symbol '
- r'with STV_PROTECTED visibility$')
- sizes_differ = (r'^\trelocation \S+ sizes differ: '
- r'(?P<sizes_differ_symbol>\S+)$')
- sizes_info = (r'^\t\t\(file (?P<sizediff_file1>\S+) size=(?P<size1>0x\w+); '
- r'file (?P<sizediff_file2>\S+) size=(?P<size2>0x\w+)\)$')
- sizes_one_used = (r'^\t\t(?P<sizediffused_file>\S+) size used; '
- r'possible insufficient data copied$')
- common_re = (r"(%s|%s|%s|%s|%s|%s|%s|%s)"
- % (found_re, symbol_not_found_re, only_so, version_so,
- stv_protected, sizes_differ, sizes_info, sizes_one_used))
- m = re.match(common_re, line)
- response = {}
- if m:
- d = m.groupdict()
- if "soname" in d and d["soname"]:
- # it was found
- response["state"] = "OK"
- response["soname"] = d["soname"]
- response["path"] = d["path_found"]
- response["symbol"] = None
- elif "symbol" in d and d["symbol"]:
- response["state"] = "symbol-not-found"
- response["soname"] = None
- response["path"] = d["path_not_found"]
- response["symbol"] = d["symbol"]
- elif d["path_only"]:
- response["state"] = "OK"
- response["soname"] = None
- response["path"] = d["path_only"]
- response["symbol"] = None
- elif d["soname_version_not_found"]:
- response["state"] = "version-not-found"
- response["soname"] = d["soname_version_not_found"]
- response["path"] = None
- response["symbol"] = None
- elif d["relocation_symbol"]:
- response["state"] = 'relocation-bound-to-a-symbol-with-STV_PROTECTED-visibility'
- response["soname"] = None
- response["path"] = d["relocation_path"]
- response["symbol"] = d["relocation_symbol"]
- elif d["sizes_differ_symbol"]:
- response["state"] = 'sizes-differ'
- response["soname"] = None
- response["path"] = None
- response["symbol"] = d["sizes_differ_symbol"]
- elif d["sizediff_file1"]:
- response["state"] = 'sizes-diff-info'
- response["soname"] = None
- response["path"] = "%s %s" % (d["sizediff_file1"], d["sizediff_file2"])
- response["symbol"] = None
- elif d["sizediffused_file"]:
- response["state"] = 'sizes-diff-one-used'
- response["soname"] = None
- response["path"] = "%s" % (d["sizediffused_file"])
- response["symbol"] = None
- else:
- raise StdoutSyntaxError("Could not parse %s with %s"
- % (repr(line), common_re))
- else:
- raise StdoutSyntaxError("Could not parse %s with %s"
- % (repr(line), common_re))
- return response
-
-
def ErrorTagsFromFile(file_name):
fd = open(file_name)
error_tags = []
@@ -1509,20 +1041,6 @@
return error_tags
-def StatsListFromCatalog(file_name_list, catalog_file_name=None, debug=False):
- packages = [opencsw.CswSrv4File(x, debug) for x in file_name_list]
- if catalog_file_name:
- catalog = opencsw.OpencswCatalog(catalog_file_name)
- md5s_by_basename = catalog.GetDataByBasename()
- for pkg in packages:
- basename = os.path.basename(pkg.pkg_path)
- # It might be the case that a file is present on disk, but missing from
- # the catalog file.
- if basename in md5s_by_basename:
- pkg.md5sum = md5s_by_basename[basename]["md5sum"]
- stats_list = [PackageStats(pkg) for pkg in packages]
- return stats_list
-
def SliceList(l, size):
"""Trasforms a list into a list of lists."""
idxes = xrange(0, len(l), size)
@@ -1542,17 +1060,17 @@
md5s.append(arg)
else:
filenames.append(arg)
- srv4_pkgs = [opencsw.CswSrv4File(x) for x in filenames]
+ srv4_pkgs = [inspective_package.InspectiveCswSrv4File(x) for x in filenames]
pkgstat_objs = []
bar = progressbar.ProgressBar()
bar.maxval = len(md5s) + len(srv4_pkgs)
bar.start()
counter = itertools.count()
for pkg in srv4_pkgs:
- pkgstat_objs.append(PackageStats(pkg, debug=debug))
+ pkgstat_objs.append(package_stats.PackageStats(pkg, debug=debug))
bar.update(counter.next())
for md5 in md5s:
- pkgstat_objs.append(PackageStats(None, md5sum=md5, debug=debug))
+ pkgstat_objs.append(package_stats.PackageStats(None, md5sum=md5, debug=debug))
bar.update(counter.next())
bar.finish()
return pkgstat_objs
Modified: csw/mgar/gar/v2-fortran/lib/python/checkpkg_test.py
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/checkpkg_test.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/lib/python/checkpkg_test.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -23,18 +23,6 @@
SELECT * FROM systempkgmap WHERE basename = 'libncursesw.so.5';
"""
-LDD_R_OUTPUT_1 = """\tlibc.so.1 => /lib/libc.so.1
-\tsymbol not found: check_encoding_conversion_args (/opt/csw/lib/postgresql/8.4/utf8_and_gbk.so)
-\tsymbol not found: LocalToUtf (/opt/csw/lib/postgresql/8.4/utf8_and_gbk.so)
-\tsymbol not found: UtfToLocal (/opt/csw/lib/postgresql/8.4/utf8_and_gbk.so)
-\tlibm.so.2 => /lib/libm.so.2
-\t/usr/lib/secure/s8_preload.so.1
-\tlibXext.so.0 (SUNW_1.1) =>\t (version not found)
-\trelocation R_SPARC_COPY symbol: ASN1_OCTET_STRING_it: file /opt/csw/lib/sparcv8plus+vis/libcrypto.so.0.9.8: relocation bound to a symbol with STV_PROTECTED visibility
-\trelocation R_SPARC_COPY sizes differ: _ZTI7QWidget
-\t\t(file /tmp/pkg_GqCk0P/CSWkdeartworkgcc/root/opt/csw/kde-gcc/bin/kslideshow.kss size=0x28; file /opt/csw/kde-gcc/lib/libqt-mt.so.3 size=0x20)
-"""
-
class GetLinesBySonameUnitTest(unittest.TestCase):
def setUp(self):
@@ -279,116 +267,6 @@
self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
-class PackageStatsUnitTest(unittest.TestCase):
-
- def setUp(self):
- self.pkgstats = checkpkg.PackageStats(None)
-
- def test_ParseNmSymLineGoodLine(self):
- line = '0000097616 T aliases_lookup'
- expected = {
- 'address': '0000097616',
- 'type': 'T',
- 'name': 'aliases_lookup',
- }
- self.assertEqual(expected, self.pkgstats._ParseNmSymLine(line))
-
- def test_ParseNmSymLineBadLine(self):
- line = 'foo'
- self.assertEqual(None, self.pkgstats._ParseNmSymLine(line))
-
- def test_ParseLddDashRlineFound(self):
- line = '\tlibc.so.1 => /lib/libc.so.1'
- expected = {
- 'state': 'OK',
- 'soname': 'libc.so.1',
- 'path': '/lib/libc.so.1',
- 'symbol': None,
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLddDashRlineSymbolMissing(self):
- line = ('\tsymbol not found: check_encoding_conversion_args '
- '(/opt/csw/lib/postgresql/8.4/utf8_and_gbk.so)')
- expected = {
- 'state': 'symbol-not-found',
- 'soname': None,
- 'path': '/opt/csw/lib/postgresql/8.4/utf8_and_gbk.so',
- 'symbol': 'check_encoding_conversion_args',
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLddDashRlineFound(self):
- line = '\t/usr/lib/secure/s8_preload.so.1'
- expected = {
- 'state': 'OK',
- 'soname': None,
- 'path': '/usr/lib/secure/s8_preload.so.1',
- 'symbol': None,
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLdd_VersionNotFound(self):
- line = '\tlibXext.so.0 (SUNW_1.1) =>\t (version not found)'
- expected = {
- 'symbol': None,
- 'soname': 'libXext.so.0',
- 'path': None,
- 'state': 'version-not-found',
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLdd_StvProtectedVisibility(self):
- line = ('\trelocation R_SPARC_COPY symbol: ASN1_OCTET_STRING_it: '
- 'file /opt/csw/lib/sparcv8plus+vis/libcrypto.so.0.9.8: '
- 'relocation bound to a symbol with STV_PROTECTED visibility')
- expected = {
- 'symbol': 'ASN1_OCTET_STRING_it',
- 'soname': None,
- 'path': '/opt/csw/lib/sparcv8plus+vis/libcrypto.so.0.9.8',
- 'state': 'relocation-bound-to-a-symbol-with-STV_PROTECTED-visibility',
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLdd_SizesDiffer(self):
- line = '\trelocation R_SPARC_COPY sizes differ: _ZTI7QWidget'
- expected = {
- 'symbol': '_ZTI7QWidget',
- 'soname': None,
- 'path': None,
- 'state': 'sizes-differ',
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLdd_SizesDifferInfo(self):
- line = ('\t\t(file /tmp/pkg_GqCk0P/CSWkdeartworkgcc/root/opt/csw/kde-gcc/bin/'
- 'kslideshow.kss size=0x28; '
- 'file /opt/csw/kde-gcc/lib/libqt-mt.so.3 size=0x20)')
- expected = {
- 'symbol': None,
- 'path': ('/tmp/pkg_GqCk0P/CSWkdeartworkgcc/root/opt/csw/kde-gcc/'
- 'bin/kslideshow.kss /opt/csw/kde-gcc/lib/libqt-mt.so.3'),
- 'state': 'sizes-diff-info',
- 'soname': None,
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLdd_SizesDifferOneUsed(self):
- line = ('\t\t/opt/csw/kde-gcc/lib/libqt-mt.so.3 size used; '
- 'possible insufficient data copied')
- expected = {
- 'symbol': None,
- 'path': '/opt/csw/kde-gcc/lib/libqt-mt.so.3',
- 'state': 'sizes-diff-one-used',
- 'soname': None,
- }
- self.assertEqual(expected, self.pkgstats._ParseLddDashRline(line))
-
- def test_ParseLddDashRlineManyLines(self):
- for line in LDD_R_OUTPUT_1.splitlines():
- parsed = self.pkgstats._ParseLddDashRline(line)
-
-
class ExtractorsUnitTest(unittest.TestCase):
def testExtractDescriptionFromGoodData(self):
Copied: csw/mgar/gar/v2-fortran/lib/python/compare_pkgs.py (from rev 11763, csw/mgar/gar/v2/lib/python/compare_pkgs.py)
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/compare_pkgs.py (rev 0)
+++ csw/mgar/gar/v2-fortran/lib/python/compare_pkgs.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,69 @@
+#!/opt/csw/bin/python2.6
+# coding=utf-8
+# vim:set sw=2 ts=2 sts=2 expandtab:
+#
+# Copyright (c) 2009 Maciej Bliziński
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License version 2 as published by the
+# Free Software Foundation.
+
+"""Compares the contents of two svr4 packages.
+
+The needed opencsw.py library is now at:
+https://gar.svn.sourceforge.net/svnroot/gar/csw/mgar/gar/v2/lib/python/
+
+$Id: compare_pkgs.py 124 2010-02-18 07:28:10Z wahwah $
+"""
+
+import logging
+import optparse
+import opencsw
+
+USAGE = """Compares two packages with the same catalogname.
+
+To use, place packages (say, foo-1.0,REV=1898.09.25-SunOS5.9-sparc-CSW.pkg.gz
+and foo-1.0.1,REV=2010.09.25-SunOS5.9-sparc-CSW.pkg.gz) in two directories
+(say, /a and /b), and issue:
+
+ comparepkg --package-dir-a /a --package-dir-b /b --catalog-name foo
+"""
+
+def main():
+ parser = optparse.OptionParser(USAGE)
+ parser.add_option("-d", "--debug", dest="debug",
+ default=False, action="store_true")
+ parser.add_option("-a", "--package-dir-a", dest="package_dir_a",
+ help="Package directory A")
+ parser.add_option("-b", "--package-dir-b", dest="package_dir_b",
+ help="Package directory B")
+ parser.add_option("-c", "--catalog-name", dest="catalog_name",
+ help="Catalog name, for example 'cups'")
+ parser.add_option("-p", "--permissions", dest="permissions",
+ help="Whether to analyze permission bits",
+ default=False, action="store_true")
+ parser.add_option("", "--strip-a", dest="strip_a",
+ help="Strip from paths in a")
+ parser.add_option("", "--strip-b", dest="strip_b",
+ help="Strip from paths in b")
+ (options, args) = parser.parse_args()
+ if options.debug:
+ current_logging_level = logging.DEBUG
+ else:
+ current_logging_level = logging.INFO
+ logging.basicConfig(level=current_logging_level)
+ pkg_dir_a = opencsw.StagingDir(options.package_dir_a)
+ pkg_dir_b = opencsw.StagingDir(options.package_dir_b)
+ pkg_path_a = pkg_dir_a.GetLatest(options.catalog_name)[-1]
+ pkg_path_b = pkg_dir_b.GetLatest(options.catalog_name)[-1]
+ pc = opencsw.PackageComparator(
+ pkg_path_a,
+ pkg_path_b,
+ permissions=options.permissions,
+ strip_a=options.strip_a,
+ strip_b=options.strip_b)
+ pc.Run()
+
+
+if __name__ == '__main__':
+ main()
Copied: csw/mgar/gar/v2-fortran/lib/python/compare_pkgs_test.py (from rev 11763, csw/mgar/gar/v2/lib/python/compare_pkgs_test.py)
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/compare_pkgs_test.py (rev 0)
+++ csw/mgar/gar/v2-fortran/lib/python/compare_pkgs_test.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,66 @@
+#!/opt/csw/bin/python2.6
+# coding=utf-8
+# vim:set sw=2 ts=2 sts=2 expandtab:
+
+"""
+The needed opencsw.py library is now at:
+https://gar.svn.sourceforge.net/svnroot/gar/csw/mgar/gar/v2/lib/python/
+
+$Id: compare_pkgs_test.py 124 2010-02-18 07:28:10Z wahwah $
+"""
+
+import unittest
+import compare_pkgs as cpkg
+import opencsw
+
+PKGMAP_1 = """: 1 4407
+1 f none /etc/init.d/cswvncserver 0744 root sys 1152 21257 1048192898
+1 s none /etc/rc0.d/K36cswvncserver=../init.d/cswvncserver
+1 s none /etc/rc1.d/K36cswvncserver=../init.d/cswvncserver
+1 s none /etc/rc2.d/K36cswvncserver=../init.d/cswvncserver
+1 s none /etc/rc3.d/S92cswvncserver=../init.d/cswvncserver
+1 s none /etc/rcS.d/K36cswvncserver=../init.d/cswvncserver
+1 d none /opt/csw/bin 0755 root bin
+1 f none /opt/csw/bin/Xvnc 0755 root bin 1723040 56599 1048192381
+1 f none /opt/csw/bin/vncconnect 0755 root bin 5692 56567 1048192381
+1 f none /opt/csw/bin/vncpasswd 0755 root bin 15828 10990 1048192381
+1 d none /opt/csw/etc 0755 root bin
+1 d none /opt/csw/share 0755 root bin
+1 d none /opt/csw/share/man 0755 root bin
+1 d none /opt/csw/share/man/man1 0755 root bin
+1 f none /opt/csw/share/man/man1/Xvnc.1 0644 root bin 6000 15243 1028731374
+1 f none /opt/csw/share/man/man1/vncconnect.1 0644 root bin 1082 26168 1028731541
+1 f none /opt/csw/share/man/man1/vncpasswd.1 0644 root bin 2812 53713 1042812886
+1 f none /opt/csw/share/man/man1/vncserver.1 0644 root bin 3070 7365 1028731541
+1 d none /opt/csw/share/vnc 0755 root bin
+1 d none /opt/csw/share/vnc/classes 0755 root bin
+1 f none /opt/csw/share/vnc/classes/AuthPanel.class 0644 root bin 2458 21987 1048192130
+1 f none /opt/csw/share/vnc/classes/ButtonPanel.class 0644 root bin 3044 1240 1048192130
+1 f none /opt/csw/share/vnc/classes/ClipboardFrame.class 0644 root bin 2595 24223 1048192130
+1 f none /opt/csw/share/vnc/classes/DesCipher.class 0644 root bin 12745 33616 1048192130
+1 f none /opt/csw/share/vnc/classes/OptionsFrame.class 0644 root bin 6908 39588 1048192130
+1 f none /opt/csw/share/vnc/classes/RecordingFrame.class 0644 root bin 6101 7175 1048192130
+1 f none /opt/csw/share/vnc/classes/ReloginPanel.class 0644 root bin 1405 22871 1048192130
+1 f none /opt/csw/share/vnc/classes/RfbProto.class 0644 root bin 14186 29040 1048192130
+1 f none /opt/csw/share/vnc/classes/SessionRecorder.class 0644 root bin 2654 62139 1048192130
+1 f none /opt/csw/share/vnc/classes/SocketFactory.class 0644 root bin 342 23575 1048192130
+1 f none /opt/csw/share/vnc/classes/VncCanvas.class 0644 root bin 20927 18690 1048192130
+1 f none /opt/csw/share/vnc/classes/VncViewer.class 0644 root bin 13795 52263 1048192130
+1 f none /opt/csw/share/vnc/classes/VncViewer.jar 0644 root bin 47606 63577 1048192130
+1 f none /opt/csw/share/vnc/classes/index.vnc 0644 root bin 846 592 1048192130
+1 f none /opt/csw/share/vnc/vncserver.bin 0755 root bin 15190 2021 1048192092
+1 f none /opt/csw/share/vnc/vncservers.etc 0644 root sys 698 58245 1048192098
+1 i copyright 18000 30145 1048191525
+1 i depend 454 38987 1051394941
+1 i pkginfo 363 30834 1219230102
+1 i postinstall 827 2423 1048191525
+"""
+
+class PkgmapTest(unittest.TestCase):
+
+ def testPkgmap1(self):
+ lines = PKGMAP_1.splitlines()
+ p1 = opencsw.Pkgmap(lines)
+
+if __name__ == '__main__':
+ unittest.main()
Copied: csw/mgar/gar/v2-fortran/lib/python/database.py (from rev 11763, csw/mgar/gar/v2/lib/python/database.py)
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/database.py (rev 0)
+++ csw/mgar/gar/v2-fortran/lib/python/database.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,57 @@
+import socket
+import os
+import sqlobject
+import models as m
+
+DB_SCHEMA_VERSION = 5L
+
+
+class DatabaseClient(object):
+
+ CHECKPKG_DIR = ".checkpkg"
+ SQLITE3_DBNAME_TMPL = "checkpkg-db-%(fqdn)s"
+ TABLES_THAT_NEED_UPDATES = (m.CswFile,)
+ TABLES = TABLES_THAT_NEED_UPDATES + (
+ m.Pkginst,
+ m.CswConfig,
+ m.Srv4FileStats,
+ m.CheckpkgOverride,
+ m.CheckpkgErrorTag,
+ m.Architecture,
+ m.OsRelease,
+ m.Maintainer)
+ sqo_conn = None
+ db_path = None
+
+ def __init__(self, debug=False):
+ self.debug = debug
+
+ @classmethod
+ def GetDatabasePath(cls):
+ if not cls.db_path:
+ dbname_dict = {'fqdn': socket.getfqdn()}
+ db_filename = cls.SQLITE3_DBNAME_TMPL % dbname_dict
+ home_dir = os.environ["HOME"]
+ cls.db_path = os.path.join(home_dir, cls.CHECKPKG_DIR, db_filename)
+ return cls.db_path
+
+ @classmethod
+ def InitializeSqlobject(cls):
+ """Establishes a database connection and stores it as a class member.
+
+ The idea is to share the database connection between instances. It would
+ be solved even better if the connection was passed to the class
+ constructor.
+ """
+ if not cls.sqo_conn:
+ db_path = cls.GetDatabasePath()
+ cls.sqo_conn = sqlobject.connectionForURI('sqlite:%s' % db_path)
+ sqlobject.sqlhub.processConnection = cls.sqo_conn
+
+ def CreateTables(self):
+ for table in self.TABLES:
+ table.createTable(ifNotExists=True)
+
+ def IsDatabaseGoodSchema(self):
+ good_version = self.GetDatabaseSchemaVersion() >= DB_SCHEMA_VERSION
+ return good_version
Modified: csw/mgar/gar/v2-fortran/lib/python/dependency_checks.py
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/dependency_checks.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/lib/python/dependency_checks.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -14,6 +14,9 @@
"Please use /opt/csw/mysql5/..."),
("/opt/csw/lib/mysql", "libmysqlclient.so.15",
"Please use /opt/csw/mysql5/..."),
+ ("/opt/csw/lib", "libnet.so",
+ "Please use -L/opt/csw/lib/libnet-new for linking. "
+ "See more at http://wiki.opencsw.org/project-libnet"),
)
DLOPEN_LIB_LOCATIONS = (
@@ -26,6 +29,7 @@
(r".*\.py$", (u"CSWpython",)),
(r".*\.rb$", (u"CSWruby",)),
(r".*\.elc?$", (u"CSWemacscommon",)),
+ (r"/opt/csw/apache2/", (u"CSWapache2",)),
)
PREFERRED_DIRECTORY_PROVIDERS = set([u"CSWcommon"])
Modified: csw/mgar/gar/v2-fortran/lib/python/dependency_checks_test.py
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/dependency_checks_test.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/lib/python/dependency_checks_test.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -5,7 +5,7 @@
import mox
import unittest
import pprint
-import dependency_checks as depchecks
+import dependency_checks
from testdata import stubs
from testdata.tree_stats import pkgstats as tree_stats
from testdata.sudo_stats import pkgstats as sudo_stats
@@ -26,7 +26,7 @@
logger_stub = stubs.LoggerStub()
self.assertEqual(
expected,
- depchecks.GetPkgByFullPath(self.error_mgr_mock,
+ dependency_checks.GetPkgByFullPath(self.error_mgr_mock,
logger_stub,
path_list,
pkg_by_path))
@@ -54,7 +54,7 @@
'/opt/csw/lib/libfoo.so.1': ['CSWbar']}
self.assertEqual(
expected,
- depchecks.GetPkgByFullPath(self.error_mgr_mock,
+ dependency_checks.GetPkgByFullPath(self.error_mgr_mock,
logger_stub,
paths_to_verify,
pkg_by_path))
@@ -156,7 +156,7 @@
u'CSWlibxft2', u'CSWpango', u'CSWgtk2', u'CSWpkgutil'],
'/opt/csw/share/man/man1': ['CSWtree'],
'/opt/csw/share/man/man1/tree.1': ['CSWtree']}
- result = depchecks.ByDirectory(self.pkg_data,
+ result = dependency_checks.ByDirectory(self.pkg_data,
self.error_mgr_mock,
self.logger_stub,
self.messenger_stub,
@@ -182,7 +182,7 @@
'/opt/csw/share/man': [u'CSWcommon', u'CSWgnuplot'],
'/opt/csw/share/man/man1': ['CSWtree'],
'/opt/csw/share/man/man1/tree.1': ['CSWtree']}
- result = depchecks.ByDirectory(self.pkg_data,
+ result = dependency_checks.ByDirectory(self.pkg_data,
self.error_mgr_mock,
self.logger_stub,
self.messenger_stub,
@@ -220,7 +220,7 @@
'/opt/csw/share/man': [u'CSWcommon', u'CSWgnuplot'],
'/opt/csw/share/man/man1': ['CSWtree'],
'/opt/csw/share/man/man1/tree.1': ['CSWtree']}
- result = depchecks.Libraries(self.pkg_data,
+ result = dependency_checks.Libraries(self.pkg_data,
self.error_mgr_mock,
self.logger_stub,
self.messenger_stub,
@@ -290,9 +290,9 @@
[(u'SUNWlibC', u'provides /usr/lib/libCstd.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
[(u'SUNWlibC', u'provides /usr/lib/libCrun.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')]]
- # pkg_by_path is not important for depchecks.Libraries.
+ # pkg_by_path is not important for dependency_checks.Libraries.
pkg_by_path = {}
- result = depchecks.Libraries(self.pkg_data,
+ result = dependency_checks.Libraries(self.pkg_data,
self.error_mgr_mock,
self.logger_stub,
self.messenger_stub,
@@ -310,7 +310,7 @@
]
declared_deps = set([u"CSWfoo2"])
expected = [[u"CSWbar"]]
- result = depchecks.MissingDepsFromReasonGroups(
+ result = dependency_checks.MissingDepsFromReasonGroups(
reason_groups, declared_deps)
self.assertEqual(result, expected)
@@ -328,7 +328,29 @@
def testOne(self):
pass
+ def testByFilename(self):
+ self.pkg_data = tree_stats[0]
+ self.pkg_data["pkgmap"] = [
+ {'class': 'none',
+ 'line': 'not important',
+ 'mode': '0755',
+ 'path': '/opt/csw/apache2/bin/foo',
+ 'type': 'f',
+ 'group': 'bin',
+ 'user': 'root'}]
+ self.mocker.ReplayAll()
+ result = dependency_checks.ByFilename(
+ self.pkg_data,
+ self.error_mgr_mock,
+ self.logger_stub,
+ self.messenger_stub,
+ None, None)
+ self.mocker.VerifyAll()
+ expected = [[
+ (u'CSWapache2',
+ "found file(s) matching /opt/csw/apache2/, "
+ "e.g. '/opt/csw/apache2/bin/foo'")]]
+ self.assertEqual(expected, result)
-
if __name__ == '__main__':
unittest.main()
Copied: csw/mgar/gar/v2-fortran/lib/python/inspective_package.py (from rev 11763, csw/mgar/gar/v2/lib/python/inspective_package.py)
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/inspective_package.py (rev 0)
+++ csw/mgar/gar/v2-fortran/lib/python/inspective_package.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -0,0 +1,146 @@
+import package
+import os
+import re
+import logging
+import hachoir_parser as hp
+import sharedlib_utils
+import magic
+
+"""This file isolates code dependent on hachoir parser.
+
+hachoir parser takes quite a while to import.
+"""
+
+# Suppress unhelpful warnings
+# http://bitbucket.org/haypo/hachoir/issue/23
+import hachoir_core.config
+hachoir_core.config.quiet = True
+
+class InspectivePackage(package.DirectoryFormatPackage):
+ """Extends DirectoryFormatPackage to allow package inspection."""
+
+ def GetFilesMetadata(self):
+ """Returns a data structure with all the files plus their metadata.
+
+ [
+ {
+ "path": ...,
+ "mime_type": ...,
+ },
+ ]
+ """
+ if not self.files_metadata:
+ self.CheckPkgpathExists()
+ self.files_metadata = []
+ files_root = os.path.join(self.directory, "root")
+ if not os.path.exists(files_root):
+ return self.files_metadata
+ all_files = self.GetAllFilePaths()
+ def StripRe(x, strip_re):
+ return re.sub(strip_re, "", x)
+ root_re = re.compile(r"^root/")
+ file_magic = FileMagic()
+ for file_path in all_files:
+ full_path = unicode(self.MakeAbsolutePath(file_path))
+ file_info = {
+ "path": StripRe(file_path, root_re),
+ "mime_type": file_magic.GetFileMimeType(full_path)
+ }
+ if not file_info["mime_type"]:
+ logging.error("Could not establish the mime type of %s",
+ full_path)
+ # We really don't want that, as it misses binaries.
+ raise package.PackageError("Could not establish the mime type of %s"
+ % full_path)
+ if sharedlib_utils.IsBinary(file_info):
+ parser = hp.createParser(full_path)
+ if not parser:
+ logging.warning("Can't parse file %s", file_path)
+ else:
+ file_info["mime_type_by_hachoir"] = parser.mime_type
+ machine_id = parser["/header/machine"].value
+ file_info["machine_id"] = machine_id
+ file_info["endian"] = parser["/header/endian"].display
+ self.files_metadata.append(file_info)
+ return self.files_metadata
+
+ def ListBinaries(self):
+ """Lists all the binaries from a given package.
+
+ Original checkpkg code:
+
+ #########################################
+ # find all executables and dynamic libs,and list their filenames.
+ listbinaries() {
+ if [ ! -d $1 ] ; then
+ print errmsg $1 not a directory
+ rm -rf $EXTRACTDIR
+ exit 1
+ fi
+ find $1 -print | xargs file |grep ELF |nawk -F: '{print $1}'
+ }
+
+ Returns a list of absolute paths.
+
+ Now that there are files_metadata, this function can safely go away, once
+ all its callers are modified to use files_metadata instead.
+ """
+ if self.binaries is None:
+ self.CheckPkgpathExists()
+ files_metadata = self.GetFilesMetadata()
+ self.binaries = []
+ # The nested for-loop looks inefficient.
+ for file_info in files_metadata:
+ if sharedlib_utils.IsBinary(file_info):
+ self.binaries.append(file_info["path"])
+ self.binaries.sort()
+ return self.binaries
+
+
+class FileMagic(object):
+ """Libmagic sometimes returns None, which I think is a bug.
+ Trying to come up with a way to work around that. It might not even be
+ very helpful, but at least detects the issue and tries to work around it.
+ """
+
+ def __init__(self):
+ self.cookie_count = 0
+ self.magic_cookie = None
+
+ def _GetCookie(self):
+ magic_cookie = magic.open(self.cookie_count)
+ self.cookie_count += 1
+ magic_cookie.load()
+ magic_cookie.setflags(magic.MAGIC_MIME)
+ return magic_cookie
+
+ def _LazyInit(self):
+ if not self.magic_cookie:
+ self.magic_cookie = self._GetCookie()
+
+ def GetFileMimeType(self, full_path):
+ """Trying to run magic.file() a few times, not accepting None."""
+ self._LazyInit()
+ mime = None
+ for i in xrange(10):
+ mime = self.magic_cookie.file(full_path)
+ if mime:
+ break;
+ else:
+ # Returned mime is null. Re-initializing the cookie and trying again.
+ logging.error("magic_cookie.file(%s) returned None. Retrying.",
+ full_path)
+ self.magic_cookie = self._GetCookie()
+ return mime
+
+
+class InspectiveCswSrv4File(package.CswSrv4File):
+ """Allows to get the inspective version of the dir format pkg."""
+
+ # The presence of this method makes it explicit that we want an inspective
+ # version of the directory format package.
+ def GetInspectivePkg(self):
+ return self.GetDirFormatPkg()
+
+ def GetDirFormatClass(self):
+ return InspectivePackage
Modified: csw/mgar/gar/v2-fortran/lib/python/opencsw.py
===================================================================
--- csw/mgar/gar/v2-fortran/lib/python/opencsw.py 2010-12-02 14:09:06 UTC (rev 11763)
+++ csw/mgar/gar/v2-fortran/lib/python/opencsw.py 2010-12-02 17:46:17 UTC (rev 11764)
@@ -14,28 +14,15 @@
import copy
import datetime
-import difflib
-import hachoir_parser as hp
-import hashlib
-import magic
import logging
import os
import os.path
import re
import shutil
-import subprocess
-import tempfile
-import time
import urllib2
-import overrides
-import configuration as c
from Cheetah import Template
+import sharedlib_utils as su
-# Suppress unhelpful warnings
-# http://bitbucket.org/haypo/hachoir/issue/23
-import hachoir_core.config
-hachoir_core.config.quiet = True
-
ARCH_SPARC = "sparc"
ARCH_i386 = "i386"
ARCH_ALL = "all"
@@ -51,28 +38,7 @@
REVISION_ADDED = "revision number added"
PKG_URL_TMPL = "http://www.opencsw.org/packages/%s"
CATALOG_URL = "http://mirror.opencsw.org/opencsw/current/i386/5.10/catalog"
-BIN_MIMETYPES = (
- 'application/x-executable',
- 'application/x-sharedlib',
-)
-ADMIN_FILE_CONTENT = """
-basedir=default
-runlevel=nocheck
-conflict=nocheck
-setuid=nocheck
-action=nocheck
-partial=nocheck
-instance=unique
-idepend=quit
-rdepend=quit
-space=quit
-authentication=nocheck
-networktimeout=10
-networkretries=5
-keystore=/var/sadm/security
-proxy=
-"""
-
+KNOWN_PKGNAME_PREFIXES = ["SUNW", "FJSV", "CSW"]
SUBMITPKG_TMPL = """From: $from
To: $to
#if $cc
@@ -108,10 +74,6 @@
pass
-class CatalogLineParseError(Error):
- pass
-
-
def ParsePackageFileName(p):
if p.endswith(".gz"):
p = p[:-3]
@@ -137,6 +99,32 @@
}
return data
+
+def ComposeVersionString(version, revision_info):
+ if revision_info:
+ version += ","
+ rev_lst = []
+ for key in sorted(revision_info.keys()):
+ rev_lst.append("%s=%s" % (key, revision_info[key]))
+ version += "_".join(rev_lst)
+ return version
+
+
+def ComposePackageFileName(parsed_filename):
+ """Composes package name, based on a parsed filename data structure.
+
+ Does not use the version_string property, but builds the version from
+ the basic version plus revision info.
+ """
+ tmpl = "%(catalogname)s-%(new_version)s-%(osrel)s-%(arch)s-%(vendortag)s.pkg"
+ version_string = parsed_filename["version"]
+ revision_info = parsed_filename["revision_info"]
+ version_string = ComposeVersionString(version_string, revision_info)
+ new_data = copy.copy(parsed_filename)
+ new_data["new_version"] = version_string
+ return tmpl % new_data
+
+
def ParseVersionString(s):
version_bits = re.split("_|,", s)
version_str = version_bits[0]
@@ -157,6 +145,10 @@
if not "extra_strings" in revision_info:
revision_info["extra_strings"] = []
revision_info["extra_strings"].append(version_bit)
+ # Bits of parsed version must be hashable; especially extra_strings in
+ # revision_info.
+ if "extra_strings" in revision_info:
+ revision_info["extra_strings"] = tuple(revision_info["extra_strings"])
return version_str, version_info, revision_info
@@ -407,183 +399,6 @@
return editor
-class ShellMixin(object):
-
- def ShellCommand(self, args, quiet=False):
- logging.debug("Calling: %s", repr(args))
- if quiet:
- process = subprocess.Popen(args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = process.communicate()
- retcode = process.wait()
- else:
- retcode = subprocess.call(args)
- if retcode:
- raise Error("Running %s has failed." % repr(args))
- return retcode
-
-
-class CswSrv4File(ShellMixin, object):
- """Represents a package in the srv4 format (pkg)."""
-
- def __init__(self, pkg_path, debug=False):
- self.pkg_path = pkg_path
- self.workdir = None
- self.gunzipped_path = None
- self.transformed = False
- self.dir_format_pkg = None
- self.debug = debug
- self.pkgname = None
- self.md5sum = None
- self.mtime = None
-
- def __repr__(self):
- return u"CswSrv4File(%s)" % repr(self.pkg_path)
-
- def GetWorkDir(self):
- if not self.workdir:
- self.workdir = tempfile.mkdtemp(prefix="pkg_")
- fd = open(os.path.join(self.workdir, "admin"), "w")
- fd.write(ADMIN_FILE_CONTENT)
- fd.close()
- return self.workdir
-
- def GetAdminFilePath(self):
- return os.path.join(self.GetWorkDir(), "admin")
-
- def GetGunzippedPath(self):
- if not self.gunzipped_path:
- gzip_suffix = ".gz"
- pkg_suffix = ".pkg"
- if self.pkg_path.endswith("%s%s" % (pkg_suffix, gzip_suffix)):
- # Causing the class to stat the .gz file. This call throws away the
- # result, but the result will be cached as a class instance member.
- self.GetMtime()
- base_name_gz = os.path.split(self.pkg_path)[1]
- shutil.copy(self.pkg_path, self.GetWorkDir())
- self.pkg_path = os.path.join(self.GetWorkDir(), base_name_gz)
- args = ["gunzip", "-f", self.pkg_path]
- unused_retcode = self.ShellCommand(args)
- self.gunzipped_path = self.pkg_path[:(-len(gzip_suffix))]
- elif self.pkg_path.endswith(pkg_suffix):
- self.gunzipped_path = self.pkg_path
- else:
- raise Error("The file name should end in either "
- "%s or %s." % (gzip_suffix, pkg_suffix))
- return self.gunzipped_path
-
- def Pkgtrans(self, src_file, destdir, pkgname):
- """A proxy for the pkgtrans command.
-
- This requires custom-pkgtrans to be available.
- """
- if not os.path.isdir(destdir):
- raise PackageError("%s doesn't exist or is not a directory" % destdir)
- args = [os.path.join(os.path.dirname(__file__), "custom-pkgtrans"),
- src_file,
- destdir,
- pkgname ]
- pkgtrans_proc = subprocess.Popen(args,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = pkgtrans_proc.communicate()
- ret = pkgtrans_proc.wait()
- if ret:
- logging.error(stdout)
- logging.error(stderr)
- logging.error("% has failed" % args)
-
- def GetPkgname(self):
- """It's necessary to figure out the pkgname from the .pkg file.
- # nawk 'NR == 2 {print $1; exit;} $f
- """
- if not self.pkgname:
- gunzipped_path = self.GetGunzippedPath()
- args = ["nawk", "NR == 2 {print $1; exit;}", gunzipped_path]
- nawk_proc = subprocess.Popen(args, stdout=subprocess.PIPE)
- stdout, stderr = nawk_proc.communicate()
- ret_code = nawk_proc.wait()
- self.pkgname = stdout.strip()
- logging.debug("GetPkgname(): %s", repr(self.pkgname))
- return self.pkgname
-
- def GetMtime(self):
- if not self.mtime:
- # This fails if the file is not there.
- s = os.stat(self.pkg_path)
- t = time.gmtime(s.st_mtime)
- self.mtime = datetime.datetime(*t[:6])
- return self.mtime
-
- def TransformToDir(self):
- """Transforms the file to the directory format.
-
- This uses the Pkgtrans function at the top, because pkgtrans behaves
- differently on Solaris 8 and 10. Having our own implementation helps
- achieve consistent behavior.
- """
- if not self.transformed:
- gunzipped_path = self.GetGunzippedPath()
- pkgname = self.GetPkgname()
- args = [os.path.join(os.path.dirname(__file__),
- "..", "..", "bin", "custom-pkgtrans"),
- gunzipped_path, self.GetWorkDir(), pkgname]
- logging.debug("transforming: %s", args)
- unused_retcode = self.ShellCommand(args, quiet=(not self.debug))
- dirs = self.GetDirs()
- if len(dirs) != 1:
- raise Error("Need exactly one package in the package stream: "
- "%s." % (dirs))
- self.dir_format_pkg = DirectoryFormatPackage(dirs[0])
- self.transformed = True
-
- def GetDirFormatPkg(self):
- self.TransformToDir()
- return self.dir_format_pkg
-
- def GetDirs(self):
- paths = os.listdir(self.GetWorkDir())
- dirs = []
- for p in paths:
- abspath = os.path.join(self.GetWorkDir(), p)
- if os.path.isdir(abspath):
- dirs.append(abspath)
- return dirs
-
- def GetPkgmap(self, analyze_permissions, strip=None):
- dir_format_pkg = self.GetDirFormatPkg()
- return dir_format_pkg.GetPkgmap(analyze_permissions, strip)
-
- def GetMd5sum(self):
- if not self.md5sum:
- logging.debug("GetMd5sum() reading file %s", repr(self.pkg_path))
- fp = open(self.pkg_path)
- hash = hashlib.md5()
- hash.update(fp.read())
- fp.close()
- self.md5sum = hash.hexdigest()
- return self.md5sum
-
- def GetPkgchkOutput(self):
- """Returns: (exit code, stdout, stderr)."""
- args = ["pkgchk", "-d", self.GetGunzippedPath(), "all"]
- pkgchk_proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = pkgchk_proc.communicate()
- ret = pkgchk_proc.wait()
- return ret, stdout, stderr
-
- def GetFileMtime(self):
- if not self.mtime:
- self.mtime = os.stat(self.pkg_path).st_mtime
- return self.mtime
-
- def __del__(self):
- if self.workdir:
- logging.debug("Removing %s", repr(self.workdir))
- shutil.rmtree(self.workdir)
-
-
def ParsePkginfo(lines):
"""Parses a pkginfo data."""
d = {}
@@ -608,10 +423,9 @@
SUNWbashS --> sunw_bash_s
SUNWPython --> sunw_python
- Incomprehensible, but unit tested!
+ This function is incomprehensible, but unit tested!
"""
- known_prefixes = ["SUNW", "FJSV", "CSW"]
- for prefix in known_prefixes:
+ for prefix in KNOWN_PKGNAME_PREFIXES:
if pkgname.startswith(prefix):
unused, tmp_prefix, the_rest = pkgname.partition(prefix)
pkgname = tmp_prefix + "_" + the_rest
@@ -640,12 +454,7 @@
catalogname_list = copy.copy(catalogname_list)
if len(catalogname_list) == 1:
return catalogname_list[0]
- current_substring = catalogname_list.pop()
- while catalogname_list and current_substring:
- substring_set = LongestCommonSubstring(current_substring,
- catalogname_list.pop())
- if substring_set:
- current_substring = list(substring_set)[0]
+ current_substring = su.CollectionLongestCommonSubstring(catalogname_list)
# If it's something like foo_, make it foo.
while current_substring and not current_substring[-1].isalnum():
current_substring = current_substring[:-1]
@@ -654,27 +463,6 @@
return "various packages"
-def LongestCommonSubstring(S, T):
- """Stolen from Wikibooks
-
- http://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Longest_common_substring#Python"""
- m = len(S); n = len(T)
- L = [[0] * (n+1) for i in xrange(m+1)]
- LCS = set()
- longest = 0
- for i in xrange(m):
- for j in xrange(n):
- if S[i] == T[j]:
- v = L[i][j] + 1
- L[i+1][j+1] = v
- if v > longest:
- longest = v
- LCS = set()
- if v == longest:
- LCS.add(S[i-v+1:i+1])
- return LCS
-
-
def PkginfoToSrv4Name(pkginfo_dict):
SRV4_FN_TMPL = "%(catalog_name)s-%(version)s-%(osver)s-%(arch)s-%(tag)s.pkg"
fn_data = {}
@@ -689,304 +477,6 @@
return SRV4_FN_TMPL % fn_data
-class DirectoryFormatPackage(ShellMixin, object):
- """Represents a package in the directory format.
-
- Allows some read-write operations.
- """
- def __init__(self, directory):
- self.directory = directory
- self.pkgname = os.path.basename(directory)
- self.pkgpath = self.directory
- self.pkginfo_dict = None
- self.binaries = None
- self.file_paths = None
- self.files_metadata = None
-
- def GetCatalogname(self):
- """Returns the catalog name of the package.
-
- A bit hacky. Looks for the first word of the NAME field in the package.
- """
- pkginfo = self.GetParsedPkginfo()
- words = re.split(c.WS_RE, pkginfo["NAME"])
- return words[0]
-
- def GetParsedPkginfo(self):
- if not self.pkginfo_dict:
- pkginfo_fd = open(self.GetPkginfoFilename(), "r")
- self.pkginfo_dict = ParsePkginfo(pkginfo_fd)
- pkginfo_fd.close()
- return self.pkginfo_dict
-
- def GetSrv4FileName(self):
- """Guesses the Srv4FileName based on the package directory contents."""
- return PkginfoToSrv4Name(self.GetParsedPkginfo())
-
- def ToSrv4(self, target_dir):
- target_file_name = self.GetSrv4FileName()
- target_path = os.path.join(target_dir, target_file_name)
- if os.path.exists(target_path):
- return target_path
- pkg_container_dir, pkg_dir = os.path.split(self.directory)
- if not os.path.isdir(target_dir):
- os.makedirs(target_dir)
- args = ["pkgtrans", "-s", pkg_container_dir, target_path, pkg_dir]
- self.ShellCommand(args, quiet=True)
- args = ["gzip", "-f", target_path]
- self.ShellCommand(args, quiet=True)
- return target_path
-
- def GetPkgmap(self, analyze_permissions=False, strip=None):
- fd = open(os.path.join(self.directory, "pkgmap"), "r")
- return Pkgmap(fd, analyze_permissions, strip)
-
- def SetPkginfoEntry(self, key, value):
- pkginfo = self.GetParsedPkginfo()
- logging.debug("Setting %s to %s", repr(key), repr(value))
- pkginfo[key] = value
- self.WritePkginfo(pkginfo)
- pkgmap_path = os.path.join(self.directory, "pkgmap")
- pkgmap_fd = open(pkgmap_path, "r")
- new_pkgmap_lines = []
- pkginfo_re = re.compile("1 i pkginfo")
- ws_re = re.compile(r"\s+")
- for line in pkgmap_fd:
- if pkginfo_re.search(line):
- fields = ws_re.split(line)
- # 3: size
- # 4: sum
- pkginfo_path = os.path.join(self.directory, "pkginfo")
- args = ["cksum", pkginfo_path]
- cksum_process = subprocess.Popen(args, stdout=subprocess.PIPE)
- stdout, stderr = cksum_process.communicate()
- cksum_process.wait()
- size = ws_re.split(stdout)[1]
- args = ["sum", pkginfo_path]
- sum_process = subprocess.Popen(args, stdout=subprocess.PIPE)
- stdout, stderr = sum_process.communicate()
- sum_process.wait()
- sum_value = ws_re.split(stdout)[0]
- fields[3] = size
- fields[4] = sum_value
- logging.debug("New pkgmap line: %s", fields)
- line = " ".join(fields)
- new_pkgmap_lines.append(line.strip())
- pkgmap_fd.close()
- # Write that back
- pkgmap_path_new = pkgmap_path + ".new"
- logging.debug("Writing back to %s", pkgmap_path_new)
- pkgmap_fd = open(pkgmap_path_new, "w")
- pkgmap_fd.write("\n".join(new_pkgmap_lines))
- pkgmap_fd.close()
- shutil.move(pkgmap_path_new, pkgmap_path)
-
- # TODO(maciej): Need to update the relevant line on pkgmap too
-
- def GetPkginfoFilename(self):
- return os.path.join(self.directory, "pkginfo")
-
- def WritePkginfo(self, pkginfo_dict):
- # Some packages extract read-only. To be sure, change them to be
- # user-writable.
- args = ["chmod", "-R", "u+w", self.directory]
- self.ShellCommand(args)
- pkginfo_filename = self.GetPkginfoFilename()
- os.chmod(pkginfo_filename, 0644)
- pkginfo_fd = open(pkginfo_filename, "w")
- pkginfo_dict = self.GetParsedPkginfo()
- for k, v in pkginfo_dict.items():
- pkginfo_fd.write("%s=%s\n" % (k, pkginfo_dict[k]))
- pkginfo_fd.close()
-
- def ResetNameProperty(self):
- """Sometimes, NAME= contains useless data. This method resets them."""
- pkginfo_dict = self.GetParsedPkginfo()
- catalog_name = PkgnameToCatName(pkginfo_dict["PKG"])
- description = pkginfo_dict["DESC"]
- pkginfo_name = "%s - %s" % (catalog_name, description)
- self.SetPkginfoEntry("NAME", pkginfo_name)
-
- def GetDependencies(self):
- depends = []
- depend_file_path = os.path.join(self.directory, "install", "depend")
- if not os.path.exists(depend_file_path):
- return depends
- fd = open(os.path.join(self.directory, "install", "depend"), "r")
- # It needs to be a list because there might be duplicates and it's
- # necessary to carry that information.
- for line in fd:
- fields = re.split(c.WS_RE, line)
- if fields[0] == "P":
- pkgname = fields[1]
- pkg_desc = " ".join(fields[1:])
- depends.append((pkgname, pkg_desc))
- fd.close()
- return depends
-
- def CheckPkgpathExists(self):
- if not os.path.isdir(self.directory):
- raise PackageError("%s does not exist or is not a directory"
- % self.directory)
-
- def GetFilesMetadata(self):
- """Returns a data structure with all the files plus their metadata.
-
- [
- {
- "path": ...,
- "mime_type": ...,
- },
- ]
- """
- if not self.files_metadata:
- self.CheckPkgpathExists()
- self.files_metadata = []
- files_root = os.path.join(self.directory, "root")
- if not os.path.exists(files_root):
- return self.files_metadata
- all_files = self.GetAllFilePaths()
- def StripRe(x, strip_re):
- return re.sub(strip_re, "", x)
- root_re = re.compile(r"^root/")
- file_magic = FileMagic()
- for file_path in all_files:
- full_path = unicode(self.MakeAbsolutePath(file_path))
- file_info = {
- "path": StripRe(file_path, root_re),
- "mime_type": file_magic.GetFileMimeType(full_path)
- }
- if not file_info["mime_type"]:
- logging.error("Could not establish the mime type of %s",
- full_path)
- # We really don't want that, as it misses binaries.
- raise PackageError("Could not establish the mime type of %s"
- % full_path)
- if IsBinary(file_info):
- parser = hp.createParser(full_path)
- if not parser:
- logging.warning("Can't parse file %s", file_path)
- else:
- file_info["mime_type_by_hachoir"] = parser.mime_type
- machine_id = parser["/header/machine"].value
- file_info["machine_id"] = machine_id
- file_info["endian"] = parser["/header/endian"].display
- self.files_metadata.append(file_info)
@@ Diff output truncated at 100000 characters. @@
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
More information about the devel
mailing list