[csw-devel] SF.net SVN: gar:[20478] csw/mgar/gar/bts

dmichelsen at users.sourceforge.net dmichelsen at users.sourceforge.net
Mon Mar 18 17:06:22 CET 2013


Revision: 20478
          http://gar.svn.sourceforge.net/gar/?rev=20478&view=rev
Author:   dmichelsen
Date:     2013-03-18 16:06:19 +0000 (Mon, 18 Mar 2013)
Log Message:
-----------
mGAR bts: Merge in v2 changes

Modified Paths:
--------------
    csw/mgar/gar/bts/gar.conf.mk
    csw/mgar/gar/bts/gar.mk
    csw/mgar/gar/bts/gar.pkg.mk
    csw/mgar/gar/bts/lib/python/catalog_gc.py
    csw/mgar/gar/bts/lib/python/checkpkg2.py
    csw/mgar/gar/bts/lib/python/common_constants.py
    csw/mgar/gar/bts/lib/python/csw_upload_pkg.py
    csw/mgar/gar/bts/lib/python/database.py
    csw/mgar/gar/bts/lib/python/database_test.py
    csw/mgar/gar/bts/lib/python/dependency_checks.py
    csw/mgar/gar/bts/lib/python/inspective_package.py
    csw/mgar/gar/bts/lib/python/inspective_package_test.py
    csw/mgar/gar/bts/lib/python/models.py
    csw/mgar/gar/bts/lib/python/package.py
    csw/mgar/gar/bts/lib/python/package_checks.py
    csw/mgar/gar/bts/lib/python/package_checks_test.py
    csw/mgar/gar/bts/lib/python/package_stats.py
    csw/mgar/gar/bts/lib/python/package_stats_test.py
    csw/mgar/gar/bts/lib/python/pkgdb.py
    csw/mgar/gar/bts/lib/python/rest.py
    csw/mgar/gar/bts/lib/python/safe_remove_package.py
    csw/mgar/gar/bts/lib/python/sharedlib_utils.py
    csw/mgar/gar/bts/lib/python/shell.py
    csw/mgar/gar/bts/lib/python/system_pkgmap.py
    csw/mgar/gar/bts/lib/python/system_pkgmap_test.py
    csw/mgar/gar/bts/lib/python/testdata/apr_util_stats.py
    csw/mgar/gar/bts/lib/python/testdata/bdb48_stats.py
    csw/mgar/gar/bts/lib/python/testdata/checkpkg_test_data_CSWdjvulibrert.py
    csw/mgar/gar/bts/lib/python/testdata/ivtools_stats.py
    csw/mgar/gar/bts/lib/python/testdata/javasvn_stats.py
    csw/mgar/gar/bts/lib/python/testdata/libnet_stats.py
    csw/mgar/gar/bts/lib/python/testdata/mercurial_stats.py
    csw/mgar/gar/bts/lib/python/testdata/neon_stats.py
    csw/mgar/gar/bts/lib/python/testdata/rsync_pkg_stats.py
    csw/mgar/gar/bts/lib/python/testdata/sudo_stats.py
    csw/mgar/gar/bts/lib/python/testdata/tree_stats.py
    csw/mgar/gar/bts/lib/web/pkgdb_web.py
    csw/mgar/gar/bts/lib/web/releases_web.py
    csw/mgar/gar/bts/lib/web/templates/CatalogDetail.html
    csw/mgar/gar/bts/tests/run_tests.py

Added Paths:
-----------
    csw/mgar/gar/bts/lib/python/testdata/cadaver_stats.py
    csw/mgar/gar/bts/lib/python/testdata/vsftpd_stats.py

Removed Paths:
-------------
    csw/mgar/gar/bts/tests/static/example/gar
    csw/mgar/gar/bts/upload-application/

Property Changed:
----------------
    csw/mgar/gar/bts/


Property changes on: csw/mgar/gar/bts
___________________________________________________________________
Modified: svn:mergeinfo
   - /csw/mgar/gar/v2:4936-6678
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-defaultchange:13903-14022
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-fortran:10883-12516
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-raised-buildlevel:15906-15949
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-solaris11:18134-18236
/csw/mgar/gar/v2-sqlite:10434-10449
/csw/mgar/gar/v2-uwatch2:12141-13270
   + /csw/mgar/gar/v2:4936-6678,19813-20248
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-defaultchange:13903-14022
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-fortran:10883-12516
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-raised-buildlevel:15906-15949
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-solaris11:18134-18236
/csw/mgar/gar/v2-sqlite:10434-10449
/csw/mgar/gar/v2-uwatch2:12141-13270

Modified: csw/mgar/gar/bts/gar.conf.mk
===================================================================
--- csw/mgar/gar/bts/gar.conf.mk	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/gar.conf.mk	2013-03-18 16:06:19 UTC (rev 20478)
@@ -201,17 +201,20 @@
 
 # These are the core packages which must be installed for GAR to function correctly
 
-DEF_BASE_PKGS += CSWxz
 DEF_BASE_PKGS += CSWbzip2
+DEF_BASE_PKGS += CSWcoreutils
 DEF_BASE_PKGS += CSWdiffutils
 DEF_BASE_PKGS += CSWfindutils
 DEF_BASE_PKGS += CSWgawk
 DEF_BASE_PKGS += CSWgfile
 DEF_BASE_PKGS += CSWggrep
+DEF_BASE_PKGS += CSWgit
 DEF_BASE_PKGS += CSWgmake
 DEF_BASE_PKGS += CSWgsed
 DEF_BASE_PKGS += CSWgtar
 DEF_BASE_PKGS += CSWpy-cheetah
+DEF_BASE_PKGS += CSWpy-cjson
+DEF_BASE_PKGS += CSWpy-dateutil
 DEF_BASE_PKGS += CSWpy-hachoir-core
 DEF_BASE_PKGS += CSWpy-hachoir-parser
 DEF_BASE_PKGS += CSWpy-libmagic
@@ -219,9 +222,8 @@
 DEF_BASE_PKGS += CSWpy-sqlobject
 DEF_BASE_PKGS += CSWpy-yaml
 DEF_BASE_PKGS += CSWpython
-DEF_BASE_PKGS += CSWcoreutils
 DEF_BASE_PKGS += CSWwget
-DEF_BASE_PKGS += CSWgit
+DEF_BASE_PKGS += CSWxz
 
 ifdef GIT_REPOS
 # netcat and bash are for the gitproxy script.
@@ -716,7 +718,7 @@
 RUNPATH_LINKER_FLAGS ?= $(foreach D,$(RUNPATH_DIRS),$(addprefix -R,$(addsuffix /\$$ISALIST,$(filter $D,$(RUNPATH_ISALIST))) $(abspath $D/$(MM_LIBDIR)))) $(addprefix -R,$(filter-out $(RUNPATH_DIRS),$(RUNPATH_ISALIST))) $(EXTRA_RUNPATH_LINKER_FLAGS)
 endif
 
-LINKER_FLAGS ?= $(foreach ELIB,$(EXTRA_LIB) $(filter-out $(libpath_install),$(libdir_install)) $(libpath_install),-L$(abspath $(ELIB)/$(MM_LIBDIR))) $(EXTRA_LINKER_FLAGS)
+LINKER_FLAGS ?= $(PREPEND_LINKER_FLAGS) $(foreach ELIB,$(EXTRA_LIB) $(filter-out $(libpath_install),$(libdir_install)) $(libpath_install),-L$(abspath $(ELIB)/$(MM_LIBDIR))) $(EXTRA_LINKER_FLAGS)
 
 CC_HOME  = $($(GARCOMPILER)_CC_HOME)
 CC       = $($(GARCOMPILER)_CC)

Modified: csw/mgar/gar/bts/gar.mk
===================================================================
--- csw/mgar/gar/bts/gar.mk	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/gar.mk	2013-03-18 16:06:19 UTC (rev 20478)
@@ -157,7 +157,6 @@
 $(call _modulate_target_nocookie,reset-test,$(2),$(4))
 $(call _modulate_target,install,$(2),$(4))
 $(call _modulate_target_nocookie,reset-install,$(2),$(4))
-#$(call _modulate_target,merge,$(2),$(4))
 $(call _modulate_merge,,$(2),$(4))
 $(call _modulate_target_nocookie,reset-merge,$(2),$(4))
 $(call _modulate_target_nocookie,clean,$(2),$(4))
@@ -428,7 +427,7 @@
 
 # We call an additional extract-modulated without resetting any variables so
 # a complete unpacked set goes to the global dir for packaging (like gspec)
-extract: checksum $(COOKIEDIR) pre-extract $(if $(NOGITPATCH),,pre-extract-git-check) extract-modulated $(addprefix extract-,$(MODULATIONS)) post-extract
+extract: checksum $(COOKIEDIR) $(if $(NOGITPATCH),,pre-extract-git-check) extract-modulated $(addprefix extract-,$(MODULATIONS))
 	@$(DONADA)
 
 extract-global: $(if $(filter global,$(MODULATION)),extract-modulated)
@@ -438,7 +437,7 @@
 extract-modulated: checksum-modulated $(EXTRACTDIR) $(COOKIEDIR) \
 		$(addprefix dep-$(GARDIR)/,$(EXTRACTDEPS)) \
 		announce-modulation \
-		pre-extract-modulated pre-extract-$(MODULATION) $(EXTRACT_TARGETS) post-extract-$(MODULATION) post-extract-modulated \
+		$(if $(filter-out global,$(MODULATION)),pre-extract) pre-extract-modulated pre-extract-$(MODULATION) $(EXTRACT_TARGETS) post-extract-$(MODULATION) post-extract-modulated $(if $(filter-out global,$(MODULATION)),post-extract) \
 		$(if $(filter global,$(MODULATION)),,$(if $(NOGITPATCH),,post-extract-gitsnap)) \
 		$(foreach FILE,$(EXPANDVARS),expandvars-$(FILE)) \
 		$(foreach REINPLACEMENT,$(POSTEXTRACT_REINPLACEMENTS),\
@@ -510,10 +509,10 @@
 # patch			- Apply any provided patches to the source.
 PATCH_TARGETS = $(addprefix patch-extract-,$(PATCHFILES) $(PATCHFILES_$(MODULATION)))
 
-patch: pre-patch $(addprefix patch-,$(MODULATIONS)) post-patch
+patch: $(addprefix patch-,$(MODULATIONS))
 	@$(DONADA)
 
-patch-modulated: extract-modulated $(WORKSRC) pre-patch-modulated pre-patch-$(MODULATION) $(PATCH_TARGETS) $(if $(filter global,$(MODULATION)),,$(if $(NOGITPATCH),,post-patch-gitsnap)) post-patch-$(MODULATION) post-patch-modulated
+patch-modulated: extract-modulated $(WORKSRC) pre-patch pre-patch-modulated pre-patch-$(MODULATION) $(PATCH_TARGETS) $(if $(filter global,$(MODULATION)),,$(if $(NOGITPATCH),,post-patch-gitsnap)) post-patch-$(MODULATION) post-patch-modulated post-patch
 	@$(DONADA)
 
 # returns true if patch has completed successfully, false
@@ -605,12 +604,12 @@
 #CONFIGURE_BUILDDEPS = $(addprefix $(GARDIR)/,$(addsuffix /$(COOKIEROOTDIR)/build.d/install,$(BUILDDEPS)))
 endif
 
-configure: pre-configure $(addprefix configure-,$(MODULATIONS)) post-configure
+configure: $(addprefix configure-,$(MODULATIONS))
 	@$(DONADA)
 
 configure-modulated: verify-isa patch-modulated $(CONFIGURE_IMGDEPS) $(CONFIGURE_BUILDDEPS) $(CONFIGURE_DEPS) \
 		$(addprefix srcdep-$(GARDIR)/,$(SOURCEDEPS)) \
-		pre-configure-modulated pre-configure-$(MODULATION) $(CONFIGURE_TARGETS) post-configure-$(MODULATION) post-configure-modulated $(if $(STRIP_LIBTOOL),strip-libtool)
+		pre-configure pre-configure-modulated pre-configure-$(MODULATION) $(CONFIGURE_TARGETS) post-configure-$(MODULATION) post-configure-modulated post-configure $(if $(STRIP_LIBTOOL),strip-libtool)
 	@$(DONADA)
 
 strip-libtool:
@@ -626,7 +625,7 @@
 	@rm -f $(COOKIEDIR)/configure
 
 reset-configure-modulated:
-	@rm -f $(foreach C,pre-configure-modulated configure-modulated post-configure-modulated,$(COOKIEDIR)/$C)
+	@rm -f $(foreach C,pre-configure pre-configure-modulated configure-modulated post-configure-modulated post-configure,$(COOKIEDIR)/$C)
 	@rm -f $(COOKIEDIR)/pre-configure-$(MODULATION) $(COOKIEDIR)/post-configure-$(MODULATION)
 	@rm -f $(addprefix $(COOKIEDIR)/,$(CONFIGURE_TARGETS))
 
@@ -638,7 +637,7 @@
 # build			- Actually compile the sources.
 BUILD_TARGETS = $(addprefix build-,$(BUILD_CHECK_SCRIPTS)) $(addprefix build-,$(BUILD_SCRIPTS))
 
-build: pre-build $(addprefix build-,$(MODULATIONS)) post-build
+build: $(addprefix build-,$(MODULATIONS))
 	$(DONADA)
 
 # Build for a specific architecture
@@ -647,7 +646,7 @@
 		$(error Code for the architecture $* can not be produced with the compiler $(GARCOMPILER))      \
 	)
 
-build-modulated: verify-isa configure-modulated pre-build-modulated pre-build-$(MODULATION) $(BUILD_TARGETS) post-build-$(MODULATION) post-build-modulated
+build-modulated: verify-isa configure-modulated pre-build pre-build-modulated pre-build-$(MODULATION) $(BUILD_TARGETS) post-build-$(MODULATION) post-build-modulated post-build
 	@$(MAKECOOKIE)
 
 .PHONY: reset-build reset-build-modulated
@@ -656,9 +655,8 @@
 reset-build: $(addprefix reset-build-,$(MODULATIONS))
 	rm -f $(COOKIEDIR)/build
 
-# XXX: pre-*, post-*
 reset-build-modulated: $(patsubst build-%,clean-%,$(BUILD_TARGETS))
-	rm -f $(addprefix $(COOKIEDIR)/,pre-build-modulated $(BUILD_TAGRETS) post-build-modulated))
+	rm -f $(addprefix $(COOKIEDIR)/,pre-build pre-build-modulated $(BUILD_TAGRETS) post-build-modulated post-build))
 
 # returns true if build has completed successfully, false
 # otherwise
@@ -667,10 +665,10 @@
 
 TEST_TARGETS = $(addprefix test-,$(TEST_SCRIPTS))
 
-test: pre-test $(addprefix test-,$(MODULATIONS)) post-test
+test: $(addprefix test-,$(MODULATIONS))
 	$(DONADA)
 
-test-modulated: build-modulated pre-test-modulated pre-test-$(MODULATION) $(TEST_TARGETS) post-test-$(MODULATION) post-test-modulated
+test-modulated: build-modulated pre-test pre-test-modulated pre-test-$(MODULATION) $(TEST_TARGETS) post-test-$(MODULATION) post-test-modulated post-test
 	$(DONADA)
 
 # XXX: retest
@@ -704,13 +702,13 @@
 # install		- Test and install the results of a build.
 INSTALL_TARGETS = $(addprefix install-,$(INSTALL_SCRIPTS))
 
-install: pre-install $(addprefix install-,$(MODULATIONS)) post-install
+install: $(addprefix install-,$(MODULATIONS))
 	$(DONADA)
 
 install-modulated: build-modulated $(addprefix dep-$(GARDIR)/,$(INSTALLDEPS)) test-modulated $(INSTALL_DIRS) $(PRE_INSTALL_TARGETS) \
-		pre-install-modulated pre-install-$(MODULATION) \
+		pre-install pre-install-modulated pre-install-$(MODULATION) \
 		$(INSTALL_TARGETS) \
-		post-install-$(MODULATION) post-install-modulated \
+		post-install-$(MODULATION) post-install-modulated post-install \
 		$(POST_INSTALL_TARGETS) \
 		$(foreach REINPLACEMENT,$(POSTINSTALL_REINPLACEMENTS),\
 		  post-install-reinplace-$(REINPLACEMENT) \
@@ -741,7 +739,7 @@
 reset-install-modulated:
 	@$(call _pmod,Reset install state)
 	@rm -rf $(INSTALLISADIR) $(COOKIEDIR)/install-work
-	@rm -f $(foreach C,pre-install-modulated install-modulated post-install-modulated,$(COOKIEDIR)/$C)
+	@rm -f $(foreach C,pre-install pre-install-modulated install-modulated post-install-modulated post-install,$(COOKIEDIR)/$C)
 	@rm -f $(COOKIEDIR)/pre-install-$(MODULATION) $(COOKIEDIR)/post-install-$(MODULATION)
 	@rm -f $(COOKIEDIR)/strip
 	@rm -f $(foreach S,$(INSTALL_TARGETS),$(COOKIEDIR)/$S)
@@ -879,7 +877,7 @@
 
 
 # The basic merge merges the compiles for all ISAs on the current architecture
-merge: checksum pre-merge merge-do merge-license merge-classutils merge-checkpkgoverrides merge-alternatives $(if $(COMPILE_ELISP),compile-elisp) $(if $(NOSOURCEPACKAGE),,merge-src) merge-distfile-README.CSW merge-distfile-changelog.CSW merge-obsolete $(if $(AP2_MODS),post-merge-ap2mod) $(if $(PHP5_EXT),post-merge-php5ext) post-merge
+merge: checksum merge-do merge-license merge-classutils merge-checkpkgoverrides merge-alternatives $(if $(COMPILE_ELISP),compile-elisp) $(if $(NOSOURCEPACKAGE),,merge-src) merge-distfile-README.CSW merge-distfile-changelog.CSW merge-obsolete $(if $(AP2_MODS),post-merge-ap2mod) $(if $(PHP5_EXT),post-merge-php5ext)
 	@banner merge
 	@$(MAKECOOKIE)
 
@@ -921,7 +919,7 @@
 	@$(MAKECOOKIE)
 
 # This merges the 
-merge-modulated: install-modulated pre-merge-modulated pre-merge-$(MODULATION) $(MERGE_TARGETS) post-merge-$(MODULATION) post-merge-modulated
+merge-modulated: install-modulated pre-merge pre-merge-modulated pre-merge-$(MODULATION) $(MERGE_TARGETS) post-merge-$(MODULATION) post-merge-modulated post-merge
 	@$(MAKECOOKIE)
 
 # Copy the whole tree verbatim
@@ -979,13 +977,12 @@
 remerge: reset-merge merge
 
 reset-merge: reset-package $(addprefix reset-merge-,$(MODULATIONS)) reset-merge-license reset-merge-classutils reset-merge-checkpkgoverrides reset-merge-alternatives reset-merge-distfile-README.CSW reset-merge-distfile-changelog.CSW reset-merge-obsolete reset-merge-ap2mod reset-merge-php5ext reset-merge-src
-	rm -f $(COOKIEDIR)/pre-merge $(foreach M,$(MODULATIONS),$(COOKIEDIR)/merge-$M) $(COOKIEDIR)/merge $(COOKIEDIR)/post-merge
+	rm -f $(foreach M,$(MODULATIONS),$(COOKIEDIR)/merge-$M) $(COOKIEDIR)/merge
 	rm -rf $(PKGROOT)
 
 reset-merge-modulated:
 	@$(call _pmod,Reset merge state)
-	echo rm -f $(COOKIEDIR)/merge-*
-	rm -f $(COOKIEDIR)/merge-*
+	rm -f $(COOKIEDIR)/pre-merge $(COOKIEDIR)/pre-merge-* $(COOKIEDIR)/merge-* $(COOKIEDIR)/post-merge $(COOKIEDIR)/post-merge-*
 
 # The clean rule.  It must be run if you want to re-download a
 # file after a successful checksum (or just remove the checksum

Modified: csw/mgar/gar/bts/gar.pkg.mk
===================================================================
--- csw/mgar/gar/bts/gar.pkg.mk	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/gar.pkg.mk	2013-03-18 16:06:19 UTC (rev 20478)
@@ -31,7 +31,7 @@
 # SRCPACKAGE is the name of the package containing the sources
 
 ifeq ($(origin PACKAGES), undefined)
-PACKAGES        = $(if $(filter %.gspec,$(DISTFILES)),,$(PKG_STEM)$(NAME))
+PACKAGES        = $(if $(filter %.gspec,$(DISTFILES)),,$(PKG_STEM)$(subst _,-,$(NAME)))
 CATALOGNAME    ?= $(if $(filter %.gspec,$(DISTFILES)),,$(subst -,_,$(NAME)))
 SRCPACKAGE_BASE = $(firstword $(basename $(filter %.gspec,$(DISTFILES))) $(PACKAGES))
 SRCPACKAGE     ?= $(SRCPACKAGE_BASE)-src
@@ -208,7 +208,14 @@
 
 SPKG_SPOOLROOT ?= $(DESTROOT)
 SPKG_SPOOLDIR  ?= $(SPKG_SPOOLROOT)/spool.$(GAROSREL)-$(GARCH)
-SPKG_EXPORT    ?= $(HOME)/staging/build-$(shell date '+%d.%b.%Y')
+ifdef SPKG_EXPORT
+# The definition may include variable parts like a call to "date". This would lead to different directory names
+# for multiple invocation in longs builds and a failing checkpkg due to lookup in wrong directories, so fixate
+# once what we have.
+SPKG_EXPORT    := $(SPKG_EXPORT)
+else
+SPKG_EXPORT    := $(HOME)/staging/build-$(shell date '+%d.%b.%Y')
+endif
 SPKG_PKGROOT   ?= $(PKGROOT)
 SPKG_PKGBASE   ?= $(PKGROOT)
 SPKG_WORKDIR   ?= $(CURDIR)/$(WORKDIR)
@@ -216,8 +223,20 @@
 
 SPKG_DEPEND_DB  = $(GARDIR)/csw/depend.db
 
-SPKG_PKGFILE ?= %{bitname}-%{SPKG_VERSION},%{SPKG_REVSTAMP}-%{SPKG_OSNAME}-%{arch}-$(or $(filter $(call _REVISION),UNCOMMITTED NOTVERSIONED NOSVN),$(PKG_STEM)).pkg
+# These variables could change value transiently and need to be passed to subinvocations of GAR
+_PASS_GAR_SUBINVOCATION_EXPORTS += SPKG_EXPORT
+_PASS_GAR_ENV = $(foreach V,$(_PASS_GAR_SUBINVOCATION_EXPORTS),$V=$($V))
 
+# This is the old specification being evaluated during mkpackage. The expansion of the SPKG_REVSTAMP leads to
+# problems later on when need the filename for checkpkg again and too much time has passed. In the new approach
+# the packagename is directly put in the gspec.
+# SPKG_PKGFILE ?= %{bitname}-%{SPKG_VERSION},%{SPKG_REVSTAMP}-%{SPKG_OSNAME}-%{arch}-$(or $(filter $(call _REVISION),UNCOMMITTED NOTVERSIONED NOSVN),CSW).pkg
+
+# The filename for a package
+define _pkgfile
+$(call catalogname,$(1))-$(call pkgvar,SPKG_VERSION,$(1)),$(call pkgvar,SPKG_REVSTAMP,$(1))-$(call pkgvar,SPKG_OSNAME,$(1))-$(if $(or $(ARCHALL),$(ARCHALL_$(1))),all,$(GARCH))-$(or $(filter $(call _REVISION),UNCOMMITTED NOTVERSIONED NOSVN),CSW).pkg
+endef
+
 MIGRATECONF ?= $(strip $(foreach S,$(filter-out $(OBSOLETED_PKGS),$(SPKG_SPECS)),$(if $(or $(MIGRATE_FILES_$S),$(MIGRATE_FILES)),/etc/opt/csw/pkg/$S/cswmigrateconf)))
 
 # It is NOT sufficient to change the pathes here, they must be adjusted in merge-* also
@@ -315,6 +334,7 @@
 # Where we find our mkpackage global templates
 PKGLIB = $(GARDIR)/pkglib
 
+# These variables are for mkpackage and the gspec expansion
 PKG_EXPORTS  = NAME VERSION DESCRIPTION CATEGORIES GARCH GARDIR GARBIN
 PKG_EXPORTS += CURDIR WORKDIR WORKDIR_FIRSTMOD WORKSRC WORKSRC_FIRSTMOD PKGROOT
 PKG_EXPORTS += SPKG_REVSTAMP SPKG_PKGNAME SPKG_DESC SPKG_VERSION SPKG_CATEGORY
@@ -597,6 +617,7 @@
 	$(_DBG)$(if $(filter $*.gspec,$(DISTFILES)),,\
 		(echo "%var            bitname $(call catalogname,$*)"; \
 		echo "%var            pkgname $*"; \
+		echo "%var            pkgfile $(call _pkgfile,$*)"; \
 		$(if $(or $(ARCHALL),$(ARCHALL_$*)),echo "%var            arch all";) \
 		$(if $(_CATEGORY_GSPEC_INCLUDE),echo "%include        url file://%{PKGLIB}/$(_CATEGORY_GSPEC_INCLUDE)")) >$@\
 	)
@@ -704,6 +725,7 @@
 	echo "OPENCSW_BUNDLE=$(BUNDLE)"; \
 	echo "OPENCSW_OS_RELEASE=$(SPKG_OSNAME)"; \
 	echo "OPENCSW_OS_ARCH=$(GARCH)"; \
+	$(if $(OBSOLETED_BY_$*),echo "OPENCSW_OBSOLETES=$(OBSOLETED_BY_$*)";) \
 	$(_CATEGORY_PKGINFO) \
 	) >$@
 	$(if $(ALLOW_RELOCATE),echo "BASEDIR=$(RELOCATE_PREFIX)" >>$@)
@@ -950,7 +972,7 @@
 	@echo
 	@echo "The following packages have been built:"
 	@echo
-	@$(MAKE) -s GAR_PLATFORM=$(GAR_PLATFORM) _pkgshow
+	@$(MAKE) -s $(_PASS_GAR_ENV) GAR_PLATFORM=$(GAR_PLATFORM) _pkgshow
 	@echo
 	@$(DONADA)
 
@@ -959,7 +981,7 @@
 dirpackage: _package
 	@echo "The following packages have been built:"
 	@echo
-	@$(MAKE) -s GAR_PLATFORM=$(GAR_PLATFORM) _dirpkgshow
+	@$(MAKE) -s $(_PASS_GAR_ENV) GAR_PLATFORM=$(GAR_PLATFORM) _dirpkgshow
 	@echo
 	@$(DONADA)
 
@@ -1032,14 +1054,15 @@
 _PROPAGATE_ENV += PARALLELMFLAGS
 _PROPAGATE_ENV += PARALLELMODULATIONS
 _PROPAGATE_ENV += PATH
+_PROPAGATE_ENV += SKIPTEST
 
 platforms: _PACKAGING_PLATFORMS=$(if $(ARCHALL),$(firstword $(PACKAGING_PLATFORMS)),$(PACKAGING_PLATFORMS))
 platforms:
 	$(foreach P,$(_PACKAGING_PLATFORMS),\
 		$(if $(PACKAGING_HOST_$P),\
 			$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
-				$(MAKE) GAR_PLATFORM=$P _package && ,\
-				$(SSH) -t $(PACKAGING_HOST_$P) "$(foreach V,$(_PROPAGATE_ENV),$(if $($V),$V=$($V))) $(MAKE) -I $(GARDIR) -C $(CURDIR) GAR_PLATFORM=$P _package" && \
+				$(MAKE) $(_PASS_GAR_ENV) GAR_PLATFORM=$P _package && ,\
+				$(SSH) -t $(PACKAGING_HOST_$P) "$(foreach V,$(_PROPAGATE_ENV),$(if $($V),$V=$($V))) $(MAKE) -I $(GARDIR) -C $(CURDIR) $(_PASS_GAR_ENV) GAR_PLATFORM=$P _package" && \
 			),\
 			$(error *** No host has been defined for platform $P)\
 		)\
@@ -1052,9 +1075,9 @@
 		$(if $(ARCHALL),echo " (suitable for all architectures)\c";) \
 		$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
 			echo " (built on this host)";\
-			  $(MAKE) -s GAR_PLATFORM=$P _pkgshow;echo;,\
+			  $(MAKE) -s $(_PASS_GAR_ENV) GAR_PLATFORM=$P _pkgshow;echo;,\
 			echo " (built on host '$(PACKAGING_HOST_$P)')";\
-			  $(SSH) $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -I $(GARDIR) -C $(CURDIR) -s GAR_PLATFORM=$P _pkgshow";echo;\
+			  $(SSH) $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -I $(GARDIR) -C $(CURDIR) -s $(_PASS_GAR_ENV) GAR_PLATFORM=$P _pkgshow";echo;\
 		)\
 	)
 	@$(MAKECOOKIE)
@@ -1064,8 +1087,8 @@
 	$(foreach P,$(_PACKAGING_PLATFORMS),\
 		$(if $(PACKAGING_HOST_$P),\
 			$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
-				$(MAKE) -s GAR_PLATFORM=$P $* && ,\
-				$(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -I $(GARDIR) -C $(CURDIR) GAR_PLATFORM=$P $*" && \
+				$(MAKE) -s $(_PASS_GAR_ENV) GAR_PLATFORM=$P $* && ,\
+				$(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -I $(GARDIR) -C $(CURDIR) $(_PASS_GAR_ENV) GAR_PLATFORM=$P $*" && \
 			),\
 			$(error *** No host has been defined for platform $P)\
 		)\

Modified: csw/mgar/gar/bts/lib/python/catalog_gc.py
===================================================================
--- csw/mgar/gar/bts/lib/python/catalog_gc.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/catalog_gc.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -1,16 +1,29 @@
 #!/opt/csw/bin/python2.6
 
-"""Garbage-collecting for a catalog.
+"""Garbage-collecting for the catalog tree.
 
 The allpkgs directory may contain unused files.  They should be deleted.
 """
 
+import logging
 import optparse
-import logging
-import os.path
+import os
+import pipes
 import re
 import common_constants
+import rest
 
+USAGE = """%prog --catalog-tree /home/mirror/opencsw-official --dest_dir /home/mirror/gc > gc_01.sh
+less gc_01.sh
+
+# Looks good?
+
+bash gc_01.sh
+
+If everything is fine (catalog still generates, no files are missing that are
+necessary), you can remove files from /home/mirror/gc.
+"""
+
 class Error(Exception):
   """Base error."""
 
@@ -20,11 +33,12 @@
 
 class CatalogGarbageCollector(object):
 
-  ADDITIONAL_CATALOGS = ("current", "stable")
+  ADDITIONAL_CATALOGS = ("legacy",)
 
-  def __init__(self, d):
+  def __init__(self, d, dest_dir):
     logging.debug("CatalogGarbageCollector(%s)", repr(d))
     self.catalog_dir = d
+    self.dest_dir = dest_dir
 
   def GarbageCollect(self):
     allpkgs_path = os.path.join(self.catalog_dir, "allpkgs")
@@ -33,9 +47,15 @@
     catalogs_by_files = {}
     for p in os.listdir(allpkgs_path):
       allpkgs.add(p)
-    catalogs_to_check = (
-        tuple(common_constants.DEFAULT_CATALOG_RELEASES)
-        + self.ADDITIONAL_CATALOGS)
+    catalogs_to_check = tuple(common_constants.DEFAULT_CATALOG_RELEASES)
+    catalogs_to_check += self.ADDITIONAL_CATALOGS
+    rest_client = rest.RestClient()
+    catalog_triplet_list = rest_client.GetCatalogList()
+    catalogs_to_check += tuple(set([x[2] for x in catalog_triplet_list]))
+    catalogs_to_check = tuple(set(catalogs_to_check))
+    logging.info("Collecting packages from catalogs: %s",
+                 catalogs_to_check)
+    file_sizes = {}
     for catrel in catalogs_to_check:
       for arch in common_constants.PHYSICAL_ARCHITECTURES:
         for osrel_long in common_constants.OS_RELS:
@@ -49,25 +69,42 @@
           for p in os.listdir(catalog_path):
             if pkg_re.search(p):
               # It's a package
+              full_path = os.path.join(catalog_path, p)
               files_in_catalogs.add(p)
               l = catalogs_by_files.setdefault(p, [])
               l.append((catrel, arch, osrel_short))
-    for p in allpkgs.difference(files_in_catalogs):
-      logging.debug("File %s is not used by any catalogs.", p)
-      print "rm %s/%s" % (allpkgs_path, p)
+              if full_path not in file_sizes:
+                s = os.stat(full_path)
+                file_sizes[full_path] = s.st_size
+      logging.info(
+          "Collected from %r, found references to %d files (out of %d in allpkgs)",
+          catrel, len(files_in_catalogs), len(allpkgs))
+    to_remove = allpkgs.difference(files_in_catalogs)
+    logging.debug("Collecting file sizes.")
+    total_size = sum(os.stat(os.path.join(allpkgs_path, x)).st_size
+                     for x in to_remove)
+    logging.info("Found %d packages to remove, total size: %.1fMB.",
+                 len(to_remove), float(total_size) / 1024 ** 2)
+    for p in to_remove:
+      full_path = os.path.join(allpkgs_path, p)
+      print "mv", pipes.quote(full_path), pipes.quote(self.dest_dir)
 
 
 def main():
   parser = optparse.OptionParser()
-  parser.add_option("-c", "--catalog",
-      dest="catalog",
-      help="Catalog path")
+  parser.add_option("--catalog-tree",
+      dest="catalog_tree",
+      help=("Path to the catalog tree, that is the directory "
+            "containing subdirectories unstable, testing, etc."))
+  parser.add_option("--dest-dir",
+      dest="dest_dir",
+      help=("Move files out to this catalog."))
   options, args = parser.parse_args()
   logging.basicConfig(level=logging.DEBUG)
-  if not options.catalog:
+  if not options.catalog_tree or not options.dest_dir:
     parser.print_usage()
-    raise UsageError("Missing catalog option, see --help.")
-  gcg = CatalogGarbageCollector(options.catalog)
+    raise UsageError("Missing the catalog tree option, see --help.")
+  gcg = CatalogGarbageCollector(options.catalog_tree, options.dest_dir)
   gcg.GarbageCollect()
 
 

Modified: csw/mgar/gar/bts/lib/python/checkpkg2.py
===================================================================
--- csw/mgar/gar/bts/lib/python/checkpkg2.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/checkpkg2.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -134,7 +134,7 @@
   tags_for_all_osrels = []
   try:
     sqo_catrel = models.CatalogRelease.selectBy(name=options.catrel).getOne()
-  except sqlobject.main.SQLObjectNotFound, e:
+  except sqlobject.main.SQLObjectNotFound as e:
     logging.fatal("Fetching from the db has failed: catrel=%s",
                   repr(str(options.catrel)))
     logging.fatal("Available catalog releases:")
@@ -173,7 +173,7 @@
     tags_for_all_osrels.extend(tags_after_overrides)
     if not options.quiet:
       if tags_after_overrides:
-        print textwrap.fill(BEFORE_OVERRIDES, 80)
+        print(textwrap.fill(BEFORE_OVERRIDES, 80))
         for checkpkg_tag in tags_after_overrides:
           print checkpkg_tag.ToGarSyntax()
         print textwrap.fill(AFTER_OVERRIDES, 80)

Modified: csw/mgar/gar/bts/lib/python/common_constants.py
===================================================================
--- csw/mgar/gar/bts/lib/python/common_constants.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/common_constants.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -34,6 +34,7 @@
 
 DEFAULT_INSTALL_CONTENTS_FILE = "/var/sadm/install/contents"
 DUMP_BIN = "/usr/ccs/bin/dump"
+ELFDUMP_BIN = "/usr/ccs/bin/elfdump"
 
 OWN_PKGNAME_PREFIXES = frozenset(["CSW"])
 
@@ -73,6 +74,7 @@
     'dublin',
     'unstable',
     'legacy',
+    'kiel',
     ])
 
 # At some point, it was used to prevent people from linking against

Modified: csw/mgar/gar/bts/lib/python/csw_upload_pkg.py
===================================================================
--- csw/mgar/gar/bts/lib/python/csw_upload_pkg.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/csw_upload_pkg.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -22,8 +22,8 @@
 import file_set_checker
 import sys
 import getpass
+import urllib2
 
-
 BASE_URL = "http://buildfarm.opencsw.org"
 RELEASES_APP = "/releases"
 DEFAULT_CATREL = "unstable"
@@ -240,8 +240,11 @@
       for osrel in osrels:
         logging.debug("%s %s %s", catrel, arch, osrel)
         cat_key = (catrel, arch, osrel)
-        srv4_in_catalog = self._rest_client.Srv4ByCatalogAndCatalogname(
-            catrel, arch, osrel, catalogname)
+        try:
+          srv4_in_catalog = self._rest_client.Srv4ByCatalogAndCatalogname(
+              catrel, arch, osrel, catalogname)
+        except urllib2.HTTPError, e:
+          srv4_in_catalog = None
         if srv4_in_catalog:
           logging.debug("Catalog %s %s contains version %s of the %s package",
                         arch, osrel, srv4_in_catalog["osrel"], catalogname)

Modified: csw/mgar/gar/bts/lib/python/database.py
===================================================================
--- csw/mgar/gar/bts/lib/python/database.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/database.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -10,23 +10,28 @@
 import system_pkgmap
 
 CONFIG_DB_SCHEMA = "db_schema_version"
-DB_SCHEMA_VERSION = 7L
+DB_SCHEMA_VERSION = 9L
 TABLES_THAT_NEED_UPDATES = (m.CswFile,)
-TABLES = TABLES_THAT_NEED_UPDATES + (
-            m.Architecture,
-            m.CatalogRelease,
-            m.CatalogReleaseType,
-            m.CheckpkgErrorTag,
-            m.CheckpkgOverride,
-            m.CswConfig,
-            m.Host,
-            m.Maintainer,
-            m.OsRelease,
-            m.Pkginst,
-            m.Srv4DependsOn,
-            m.Srv4FileInCatalog,
-            m.Srv4FileStats,
-            m.Srv4FileStatsBlob)
+
+# This list of tables is sensitive to the order in which tables are created.
+# After you change the order here, you need to make sure that the tables can
+# still be created.
+TABLES = (m.Architecture,
+          m.CatalogReleaseType,
+          m.CatalogRelease,
+          m.CswConfig,
+          m.Host,
+          m.Maintainer,
+          m.OsRelease,
+          m.Pkginst,
+          m.Srv4FileStatsBlob,
+          m.Srv4FileStats,
+          m.CheckpkgErrorTag,
+) + TABLES_THAT_NEED_UPDATES + (
+          m.CheckpkgOverride, # needs Srv4FileStats
+          m.Srv4DependsOn,
+          m.Srv4FileInCatalog,
+)
 # Shouldn't this be in common_constants?
 SYSTEM_PKGMAP = "/var/sadm/install/contents"
 CONFIG_MTIME = "mtime"
@@ -87,7 +92,7 @@
             "the application expects: %s. "
             % (ldm.GetDatabaseSchemaVersion(), DB_SCHEMA_VERSION))
         if DB_SCHEMA_VERSION < ldm.GetDatabaseSchemaVersion():
-          msg += "Make sure your application sources are up to date."
+          msg += "When did you last run 'mgar up --all'?."
         elif DB_SCHEMA_VERSION > ldm.GetDatabaseSchemaVersion():
           msg += ("Make sure your database is up to date.  "
                   "Re-create it if necessary.")
@@ -153,8 +158,14 @@
 
   def CreateTables(self):
     for table in TABLES:
-      table.createTable(ifNotExists=True)
+      try:
+        logging.debug("Creating table %r", table)
+        table.createTable(ifNotExists=True)
+      except sqlobject.dberrors.OperationalError, e:
+        logging.error("Could not create table %r: %s", table, e)
+        raise
 
+
   def InitialDataImport(self):
     """Imports initial data into the db.
 
@@ -183,10 +194,6 @@
         pass
     self.SetDatabaseSchemaVersion()
 
-  def CreateTables(self):
-    for table in TABLES:
-      table.createTable(ifNotExists=True)
-
   def ClearTablesForUpdates(self):
     for table in TABLES_THAT_NEED_UPDATES:
       table.clearTable()
@@ -287,7 +294,7 @@
       logging.warning("Could not get file mtime: %s", e)
     d_mtime = time.gmtime(int(d_mtime_epoch))
     logging.debug("IsDatabaseUpToDate: f_mtime %s, d_time: %s", f_mtime, d_mtime)
-    # Rounding up to integer seconds.  There is a race condition: 
+    # Rounding up to integer seconds.  There is a race condition:
     # pkgadd finishes at 100.1
     # checkpkg reads /var/sadm/install/contents at 100.2
     # new pkgadd runs and finishes at 100.3

Modified: csw/mgar/gar/bts/lib/python/database_test.py
===================================================================
--- csw/mgar/gar/bts/lib/python/database_test.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/database_test.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -8,6 +8,18 @@
 
 class DatabaseManagerUnitTest(mox.MoxTestBase):
 
+  def testCanCreateTables(self):
+    # This test succeeds when run with sqlite, but fails with MySQL.
+    # We don't want to add a dependency on a running MySQL database to run tests, so
+    # we'll leave it unsolved for now.
+    dbc = database.CatalogDatabase(uri="sqlite:/:memory:")
+    dbc.CreateTables()
+
+  def testCanCreateTablesAndImportData(self):
+    dbc = database.CatalogDatabase(uri="sqlite:/:memory:")
+    dbc.CreateTables()
+    dbc.InitialDataImport()
+
   def testNoSystemFiles(self):
     # This test shows that stubbing out sqlite classes is quite laborious.
     saved_s = database.m.Srv4FileStats

Modified: csw/mgar/gar/bts/lib/python/dependency_checks.py
===================================================================
--- csw/mgar/gar/bts/lib/python/dependency_checks.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/dependency_checks.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -39,6 +39,29 @@
 
 PREFERRED_DIRECTORY_PROVIDERS = set([u"CSWcommon"])
 
+BASE_SOLARIS_LIBRARIES = set([
+     "libsocket.so.1", "libnsl.so.1", "libdl.so.1", "librt.so.1",
+     "libresolv.so.2", "libpthread.so.1",
+     # linked by default with C++, see "Default C++ Libraries"
+     # in Solaris Studio C++ User's Guide
+     "libCstd.so.1", "libCrun.so.1", "libm.so.1", "libm.so.2",
+     "libw.so.1", "libcx.so.1", "libc.so.1", "libC.so.3", "libC.so.5",
+])
+
+ALLOWED_VERSION_DEPENDENCIES = {
+    "libc.so.1": ['SYSVABI_1.3', 'SUNWprivate_1.1', 'SUNW_1.22.6',
+                  'SUNW_1.22.5', 'SUNW_1.22.4', 'SUNW_1.22.3', 'SUNW_1.22.2',
+                  'SUNW_1.22.1', 'SUNW_1.22', 'SUNW_1.21.3', 'SUNW_1.21.2',
+                  'SUNW_1.21.1', 'SUNW_1.21', 'SUNW_1.20.4', 'SUNW_1.20.1',
+                  'SUNW_1.20', 'SUNW_1.19', 'SUNW_1.18.1', 'SUNW_1.18',
+                  'SUNW_1.17', 'SUNW_1.16', 'SUNW_1.15', 'SUNW_1.14',
+                  'SUNW_1.13', 'SUNW_1.12', 'SUNW_1.11', 'SUNW_1.10',
+                  'SUNW_1.9', 'SUNW_1.8', 'SUNW_1.7', 'SUNW_1.6', 'SUNW_1.5',
+                  'SUNW_1.4', 'SUNW_1.3', 'SUNW_1.2', 'SUNW_1.1', 'SUNW_0.9',
+                  'SUNW_0.8', 'SUNW_0.7', 'SISCD_2.3'],
+}
+
+
 def ProcessSoname(
     ldd_emulator,
     soname, path_and_pkg_by_basename, binary_info, isalist, binary_path, logger,
@@ -147,6 +170,83 @@
           error_mgr,
           pkgname, messenger)
       orphan_sonames.extend(orphan_sonames_tmp)
+
+    sonames_unused = set()
+    ldd_info = pkg_data['ldd_info'][binary_info["path"]]
+    for ldd_response in ldd_info:
+      if (ldd_response['state'] == 'soname-unused'
+          and ldd_response['soname'] not in BASE_SOLARIS_LIBRARIES):
+        sonames_unused.add(ldd_response['soname'])
+        messenger.Message(
+          "Binary %s links to library %s but doesn't seem to use any"
+          " of its symbols. It usually happens because superfluous"
+          " libraries were added to the linker options, either because"
+          " of the configure script itself or because of the"
+          " \"pkg-config --libs\" output of one the dependency."
+          % ("/" + binary_info["path"], ldd_response['soname']))
+        error_mgr.ReportError(
+            pkgname, "soname-unused",
+            "%s is needed by %s but never used"
+             % (ldd_response['soname'], "/" + binary_info["path"]))
+
+    # Even when direct binding is enabled, some symbols might not be
+    # directly bound because the library explicitely requested the symbol
+    # not to be drectly bound to.
+    # For example, libc.so.1 does it for symbol sigaction, free, malloc...
+    # So we consider that direct binding is enabled if at least one
+    # symbol is directly bound to because that definitely means that
+    # -B direct or -z direct was used.
+    binary_elf_info = pkg_data["binaries_elf_info"][binary_info["path"]]
+    libs = set(binary_info["needed sonames"])
+
+    # we skip the standard Solaris libraries: a lot of plugins only
+    # link to non directly bindable symbols of libc.so.1, librt.so.1
+    # which trigger false positives.
+    # Direct binding really matters for opencsw libraries so it's
+    # easier and riskless to just skip theses libraries
+    libs.difference_update(BASE_SOLARIS_LIBRARIES)
+
+    db_libs = set()
+    for syminfo in binary_elf_info['symbol table']:
+      if (syminfo['shndx'] == 'UNDEF' and syminfo['flags']
+          and 'D' in syminfo['flags'] and 'B' in syminfo['flags']):
+          db_libs.add(syminfo['soname'])
+    no_db_libs = libs.difference(db_libs)
+
+    # no symbol used means no way to detect if direct binding was
+    # enabled so we must ignore the libraries which were linked
+    # without being used
+    no_db_libs.difference_update(sonames_unused)
+
+    if no_db_libs:
+      messenger.Message(
+        "No symbol of binary %s is directly bound against the following"
+        " libraries: %s. Please make sure the binaries are compiled using"
+        " the \"-Bdirect\" linker option."
+        % ("/" + binary_info["path"], ", ".join(no_db_libs)))
+      for soname in no_db_libs:
+        error_mgr.ReportError(
+          pkgname, "no-direct-binding",
+          "%s is not directly bound to soname %s"
+           % ("/" + binary_info["path"], soname))
+
+
+    for version_dep in binary_elf_info['version needed']:
+      if (version_dep['soname'] in ALLOWED_VERSION_DEPENDENCIES and
+          not version_dep['version'] in
+          ALLOWED_VERSION_DEPENDENCIES[version_dep['soname']]):
+        messenger.Message(
+          "Binary %s requires interface version %s in library %s which is"
+          " only available in recent Solaris releases."
+          % ("/" + binary_info["path"], version_dep['version'],
+             version_dep['soname']))
+        error_mgr.ReportError(
+          pkgname, "forbidden-version-interface-dependencies",
+          "%s requires forbidden interface version %s in library %s"
+          % ("/" + binary_info["path"], version_dep['version'],
+             version_dep['soname']))
+
+
   orphan_sonames = set(orphan_sonames)
   for soname, binary_path in orphan_sonames:
     if soname not in ALLOWED_ORPHAN_SONAMES:

Modified: csw/mgar/gar/bts/lib/python/inspective_package.py
===================================================================
--- csw/mgar/gar/bts/lib/python/inspective_package.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/inspective_package.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -1,15 +1,17 @@
 import package
 import os
 import re
+import sys
 import logging
 import hachoir_parser
 import sharedlib_utils
 import magic
 import copy
 import common_constants
-import subprocess
 import ldd_emul
 import configuration as c
+import time
+import shell
 
 """This file isolates code dependent on hachoir parser.
 
@@ -53,15 +55,26 @@
         "You have to restart your process - it "
         "will probably finish successfully when do you that."
         % full_path)
-    raise package.PackageError(msg)
-  if sharedlib_utils.IsBinary(file_info):
+    if "/opt/csw/share" in full_path:
+    	file_info["mime_type"] = "application/octet-stream; fallback"
+    	logging.error(msg)
+    else:
+      raise package.PackageError(msg)
+  if sharedlib_utils.IsBinary(file_info, check_consistency=False):
     parser = hachoir_parser.createParser(full_path)
     if not parser:
       logging.warning("Can't parse file %s", file_path)
     else:
       try:
+        machine_id = parser["/header/machine"].value
+      except hachoir_core.field.field.MissingField, e:
+        logging.fatal(
+            "hachoir_parser failed to retrieve machine_id for %r. "
+            "checkpkg cannot continue.",
+            file_info)
+        raise
+      try:
         file_info["mime_type_by_hachoir"] = parser.mime_type
-        machine_id = parser["/header/machine"].value
         file_info["machine_id"] = machine_id
         file_info["endian"] = parser["/header/endian"].display
       except hachoir_core.field.field.MissingField, e:
@@ -69,7 +82,6 @@
             "Error in hachoir_parser processing %s: %r", file_path, e)
   return file_info
 
-
 class InspectivePackage(package.DirectoryFormatPackage):
   """Extends DirectoryFormatPackage to allow package inspection."""
 
@@ -167,24 +179,15 @@
     binaries_dump_info = []
     basedir = self.GetBasedir()
     for binary in self.ListBinaries():
-      # Relocatable packages complicate things. Binaries returns paths with
-      # the basedir, but files in reloc are in paths without the basedir, so
-      # we need to strip that bit.
-      binary_in_tmp_dir = binary
+      binary_abs_path = os.path.join(self.directory, self.GetFilesDir(), binary)
       if basedir:
-        binary_in_tmp_dir = binary_in_tmp_dir[len(basedir):]
-        binary_in_tmp_dir = binary_in_tmp_dir.lstrip("/")
-      binary_abs_path = os.path.join(self.directory, self.GetFilesDir(), binary_in_tmp_dir)
-      binary_base_name = os.path.basename(binary_in_tmp_dir)
+        binary = os.path.join(basedir, binary)
+      binary_base_name = os.path.basename(binary)
+
       args = [common_constants.DUMP_BIN, "-Lv", binary_abs_path]
-      logging.debug("Running: %s", args)
-      dump_proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env)
-      stdout, stderr = dump_proc.communicate()
-      ret = dump_proc.wait()
+      retcode, stdout, stderr = shell.ShellCommand(args, env)
       binary_data = ldd_emul.ParseDumpOutput(stdout)
       binary_data["path"] = binary
-      if basedir:
-        binary_data["path"] = os.path.join(basedir, binary_data["path"])
       binary_data["base_name"] = binary_base_name
       binaries_dump_info.append(binary_data)
     return binaries_dump_info
@@ -204,17 +207,13 @@
     defined_symbols = {}
 
     for binary in binaries:
-      binary_abspath = os.path.join(self.directory, "root", binary)
+      binary_abspath = os.path.join(self.directory, self.GetFilesDir(), binary)
       # Get parsable, ld.so.1 relevant SHT_DYNSYM symbol information
       args = ["/usr/ccs/bin/nm", "-p", "-D", binary_abspath]
-      nm_proc = subprocess.Popen(
-          args,
-          stdout=subprocess.PIPE,
-          stderr=subprocess.PIPE)
-      stdout, stderr = nm_proc.communicate()
-      retcode = nm_proc.wait()
+      retcode, stdout, stderr = shell.ShellCommand(args)
       if retcode:
         logging.error("%s returned an error: %s", args, stderr)
+      	# Should it just skip over an error?
         continue
       nm_out = stdout.splitlines()
 
@@ -229,29 +228,176 @@
 
     return defined_symbols
 
+  def GetBinaryElfInfo(self):
+    """Returns various informations symbol and versions present in elf header
+
+    To do this we parse output lines from elfdump -syv, it's the
+    only command that will give us all informations we need on
+    symbols and versions.
+
+    We will analyse 3 sections:
+     - version section: contains soname needed, version interface required
+                        for each soname, and version definition
+     - symbol table section: contains list of symbol and soname/version
+                             interface providing it
+     - syminfo section: contains special linking flags for each symbol
+    """
+    binaries = self.ListBinaries()
+    binaries_elf_info = {}
+    base_dir = self.GetBasedir()
+
+    for binary in binaries:
+      binary_abspath = os.path.join(self.directory, self.GetFilesDir(), binary)
+      if base_dir:
+        binary = os.path.join(base_dir, binary)
+      # elfdump is the only tool that give us all informations
+      args = [common_constants.ELFDUMP_BIN, "-svy", binary_abspath]
+      retcode, stdout, stderr = shell.ShellCommand(args)
+      if retcode or stderr:
+        # we ignore for now these elfdump errors which can be catched
+        # later by check functions,
+        ignored_error_re = re.compile(
+          r"""[^:]+:(\s\.((SUNW_l)?dynsym|symtab):\s
+           ((index\[\d+\]:\s)?
+            (suspicious\s(local|global)\ssymbol\sentry:\s[^:]+:\slies
+             \swithin\s(local|global)\ssymbol\srange\s\(index\s[<>=]+\s\d+\)
+
+            |bad\ssymbol\sentry:\s[^:]+:\ssection\[\d+\]\ssize:\s0(x[0-9a-f]+)?
+             :\s(symbol\s\(address\s0x[0-9a-f]+,\ssize\s0x[0-9a-f]+\)
+                 \slies\soutside\sof\scontaining\ssection
+                 |is\ssmaller\sthan\ssymbol\ssize:\s\d+)
+
+            |bad\ssymbol\sentry:\s:\sinvalid\sshndx:\s\d+
+            |)
+
+           |invalid\ssh_link:\s0)
+
+           |\smemory\soverlap\sbetween\ssection\[\d+\]:\s[^:]+:\s
+            [0-9a-f]+:[0-9a-f]+\sand\ssection\[\d+\]:\s[^:]+:
+            \s[0-9a-f]+:[0-9a-f]+)
+           \n""",
+          re.VERBOSE)
+
+        stderr = re.sub(ignored_error_re, "", stderr)
+        if stderr:
+          with open("/tmp/elfdump_stdout.log", "w") as fd:
+            fd.write(stdout)
+          with open("/tmp/elfdump_stderr.log", "w") as fd:
+            fd.write(stderr)
+          msg = ("%s returned one or more errors: %s" % (args, stderr) +
+                 "\n\n" +
+                 "ERROR: elfdump invocation failed. Please copy this message " +
+                 "and the above messages into your report and send " +
+                 "as path of the error report. Logs are saved in " +
+                 "/tmp/elfdump_std(out|err).log for your inspection.")
+          raise package.Error(msg)
+      elfdump_out = stdout.splitlines()
+
+      symbols = {}
+      binary_info = {'version definition': [],
+                     'version needed': []}
+
+      cur_section = None
+      for line in elfdump_out:
+
+        try:
+          elf_info, cur_section = self._ParseElfdumpLine(line, cur_section)
+        except package.StdoutSyntaxError as e:
+          sys.stderr.write("elfdump out:\n")
+          sys.stderr.write(stdout)
+          raise
+
+        # header or blank line contains no information
+        if not elf_info:
+          continue
+
+        # symbol table and syminfo section store various informations
+        # about the same symbols, so we merge them in a dict
+        if cur_section in ('symbol table', 'syminfo'):
+          symbols.setdefault(elf_info['symbol'], {}).update(elf_info)
+        else:
+          binary_info[cur_section].append(elf_info)
+
+      # elfdump doesn't repeat the name of the soname in the version section
+      # if it's the same on two contiguous line, e.g.:
+      #         libc.so.1            SUNW_1.1
+      #                              SUNWprivate_1.1
+      # so we have to make sure the information is present in each entry
+      for i, version in enumerate(binary_info['version needed'][1:]):
+        if not version['soname']:
+          version['soname'] = binary_info['version needed'][i]['soname']
+
+      # soname version needed are usually displayed sorted by index ...
+      # but that's not always the case :( so we have to reorder
+      # the list by index if they are present
+      if any ( v['index'] for v in binary_info['version needed'] ):
+        binary_info['version needed'].sort(key=lambda m: int(m['index']))
+        for version in binary_info['version needed']:
+          del version['index']
+
+      # if it exists, the first "version definition" entry is the base soname
+      # we don't need this information
+      if binary_info['version definition']:
+        binary_info['version definition'].pop(0)
+
+      binary_info['symbol table'] = symbols.values()
+      binary_info['symbol table'].sort(key=lambda m: m['symbol'])
+      # To not rely of the section order output of elfdump, we resolve
+      # symbol version informations here after having parsed all output
+      self._ResolveSymbolsVersionInfo(binary_info)
+
+      binaries_elf_info[binary] = binary_info
+
+    return binaries_elf_info
+
   def GetLddMinusRlines(self):
     """Returns ldd -r output."""
-    dir_pkg = self.GetInspectivePkg()
-    binaries = dir_pkg.ListBinaries()
+    binaries = self.ListBinaries()
+    base_dir = self.GetBasedir()
     ldd_output = {}
     for binary in binaries:
-      binary_abspath = os.path.join(dir_pkg.directory, "root", binary)
+      binary_abspath = os.path.join(self.directory, self.GetFilesDir(), binary)
+      if base_dir:
+        binary = os.path.join(base_dir, binary)
+
       # this could be potentially moved into the DirectoryFormatPackage class.
       # ldd needs the binary to be executable
       os.chmod(binary_abspath, 0755)
-      args = ["ldd", "-r", binary_abspath]
-      ldd_proc = subprocess.Popen(
-          args,
-          stdout=subprocess.PIPE,
-          stderr=subprocess.PIPE)
-      stdout, stderr = ldd_proc.communicate()
-      retcode = ldd_proc.wait()
+      args = ["ldd", "-Ur", binary_abspath]
+      # ldd can be stuck while ran on a some binaries, so we define
+      # a timeout (problem encountered with uconv)
+      retcode, stdout, stderr = shell.ShellCommand(args, timeout=10)
       if retcode:
-        logging.error("%s returned an error: %s", args, stderr)
+        # There three cases where we will ignore an ldd error
+        #  - if we are trying to analyze a 64 bits binary on a Solaris 9 x86
+        #    solaris 9 exists only in 32 bits, so we can't do this
+        #    We ignore the error as it is likely that the ldd infos will be
+        #    the same on the 32 bits binaries
+        #  - if we are trying to analyze a binary from another architecture
+        #    we ignore this error as it will be caught by another checkpkg test
+        #  - if we are trying to analyze a statically linked binaries
+        #    we care only about dynamic binary so we ignore the error
+        #
+        uname_info = os.uname()
+        if ((uname_info[2] == '5.9' and uname_info[4] == 'i86pc' and
+             '/amd64/' in binary_abspath and
+             'has wrong class or data encoding' in stderr) or
+            re.search(r'ELF machine type: EM_\w+: '
+                      r'is incompatible with system', stderr)
+            or 'file is not a dynamic executable or shared object' in stderr):
+          ldd_output[binary] = []
+          continue
+
+        raise package.SystemUtilityError("%s returned an error: %s" % (args, stderr))
+
       ldd_info = []
       for line in stdout.splitlines():
-        ldd_info.append(self._ParseLddDashRline(line))
+        result = self._ParseLddDashRline(line, binary_abspath)
+        if result:
+          ldd_info.append(result)
+
       ldd_output[binary] = ldd_info
+
     return ldd_output
 
   def _ParseNmSymLine(self, line):
@@ -263,7 +409,111 @@
     sym = { 'address': fields[0], 'type': fields[1], 'name': fields[2] }
     return sym
 
-  def _ParseLddDashRline(self, line):
+  def _ResolveSymbolsVersionInfo(self, binary_info):
+
+    version_info = (binary_info['version definition']
+                    + binary_info['version needed'])
+
+    for sym_info in binary_info['symbol table']:
+      # sym_info version field is an 1-based index on the version
+      # information table
+      # we don't care about 0 and 1 values:
+      #  0 is for external symbol with no version information available
+      #  1 is for a symbol defined by the binary and not binded
+      #    to a version interface
+      version_index = int(sym_info['version']) - 2
+      if version_index >= 0:
+        version = version_info[version_index]
+        sym_info['version'] = version['version']
+        if 'soname' in version:
+          sym_info['soname'] = version['soname']
+      else:
+        sym_info['version'] = None
+
+      # we make sure these fields are present
+      # even if the syminfo section is not
+      sym_info.setdefault('soname')
+      sym_info.setdefault('flags')
+
+  def _ParseElfdumpLine(self, line, section=None):
+
+    headers_re = (
+      r"""
+       (?P<section>Version\sNeeded|Symbol\sTable  # Section header
+                  |Version\sDefinition|Syminfo)
+                   \sSection:
+        \s+(?:\.SUNW_version|\.gnu\.version_[rd]
+            |\.(SUNW_l)?dynsym|\.SUNW_syminfo|.symtab)\s*$
+
+       |\s*(?:index\s+)?version\s+dependency\s*$  # Version needed header
+
+       |\s*(?:index\s+)?file\s+version\s*$        # Version definition header
+
+       |\s*index\s*value\s+size\s+type\s+bind     # Symbol table header
+        \s+oth\s+ver\s+shndx\s+name\s*$
+
+       |\s*index\s+fla?gs\s+bound\sto\s+symbol\s*$ # Syminfo header
+
+       |\s*$                                      # There is always a blank
+                                                  # line before a new section
+       """)
+
+    re_by_section = {
+      'version definition': (r"""
+        \s*(?:\[\d+\]\s+)?                # index: might be not present if no
+                                          #        version binding is enabled
+        (?P<version>\S+)                  # version
+        (?:\s+(?P<dependency>\S+))?       # dependency
+        (?:\s+\[\s(?:BASE|WEAK)\s\])?\s*$
+                              """),
+      'version needed': (r"""
+        \s*(?:\[(?P<index>\d+)\]\s+)?     # index: might be not present if no
+                                          #        version binding is enabled
+        (?:(?P<soname>\S+)\s+             # file: can be absent if the same as
+         (?!\[\s(?:INFO|WEAK)\s\]))?      #       the previous line,
+                                          #       we make sure there is no
+                                          #       confusion with version
+        (?P<version>\S+)                  # version
+        (?:\s+\[\s(?:INFO|WEAK)\s\])?\s*$ #
+                          """),
+      'symbol table': (r"""
+         \s*\[\d+\]                       # index
+         \s+(?:0x[0-9a-f]+|REG_G\d+)      # value
+         \s+(?:0x[0-9a-f]+)               # size
+         \s+(?P<type>\S+)                 # type
+         \s+(?P<bind>\S+)                 # bind
+         \s+(?:\S+)                       # oth
+         \s+(?P<version>\S+)              # ver
+         \s+(?P<shndx>\S+)                # shndx
+         (?:\s+(?P<symbol>\S+))?\s*$      # name
+                        """),
+      'syminfo': (r"""
+         \s*(?:\[\d+\])                   # index
+         \s+(?P<flags>[ABCDFILNPS]+)      # flags
+
+         \s+(?:(?:\[\d+\]                 # bound to: contains either
+         \s+(?P<soname>\S+)|<self>)\s+)?  #  - library index and library name
+                                          #  -  <self> for non external symbols
+
+         (?P<symbol>\S+)\s*               # symbol
+                   """)}
+
+    elfdump_data = None
+    m = re.match(headers_re, line, re.VERBOSE)
+    if m:
+      if m.lastindex:
+        section = m.group('section').lower()
+    elif section:
+      m = re.match(re_by_section[section], line, re.VERBOSE)
+      if m:
+        elfdump_data = m.groupdict()
+
+    if not m:
+      raise package.StdoutSyntaxError("Could not parse %s" % (repr(line)))
+
+    return elfdump_data, section
+
+  def _ParseLddDashRline(self, line, binary=None):
     found_re = r"^\t(?P<soname>\S+)\s+=>\s+(?P<path_found>\S+)"
     symbol_not_found_re = (r"^\tsymbol not found:\s(?P<symbol>\S+)\s+"
                            r"\((?P<path_not_found>\S+)\)")
@@ -276,16 +526,38 @@
                      r'with STV_PROTECTED visibility$')
     sizes_differ = (r'^\trelocation \S+ sizes differ: '
                     r'(?P<sizes_differ_symbol>\S+)$')
-    sizes_info = (r'^\t\t\(file (?P<sizediff_file1>\S+) size=(?P<size1>0x\w+); '
+    sizes_info = (r'^\t\t\(file (?P<sizediff_file1>\S+)'
+                  r' size=(?P<size1>0x\w+); '
                   r'file (?P<sizediff_file2>\S+) size=(?P<size2>0x\w+)\)$')
     sizes_one_used = (r'^\t\t(?P<sizediffused_file>\S+) size used; '
                       r'possible insufficient data copied$')
-    common_re = (r"(%s|%s|%s|%s|%s|%s|%s|%s)"
+    unreferenced_object = (r'^\s*unreferenced object=(?P<object>.*);'
+                           r' unused dependency of (?P<binary>.*)$')
+    unused_object = (r'^\s*unused object=.*$')
+    unused_search_path = (r'^\s*unused search path=.*'
+                          r'  \(RUNPATH/RPATH from file .*\)$')
+    move_offset_error = (r'^\tmove (?P<move_index>\d+) offset invalid: '
+                         r'\(unknown\): offset=(?P<move_offset>0x[0-9a-f]+) '
+                         'lies outside memory image; move discarded')
+    relocation_error = (r'relocation R_(386|AMD64|X86_64|SPARC)_\w+ '
+                        r'sizes differ: (?P<reloc_symbol>.*)'
+                        r'|\t\t\(file .* size=0(?:x[0-9a-f]+)?; file .*'
+                        r'size=0x(?:[0-9a-f]+)?\)'
+                        r'|\t.* size used; possible data truncation')
+    copy_relocation_error = (r'\tsymbol (?P<copy_reloc_symbol>\S+):'
+                             r' file \S+: copy relocation symbol'
+                             r' may have been displacement relocated')
+    blank_line = (r'^\s*$')
+    common_re = (r"(%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s)"
                  % (found_re, symbol_not_found_re, only_so, version_so,
-                    stv_protected, sizes_differ, sizes_info, sizes_one_used))
+                    stv_protected, sizes_differ, sizes_info,
+                    sizes_one_used, unreferenced_object, unused_object,
+                    unused_search_path, blank_line, move_offset_error,
+                    relocation_error, copy_relocation_error))
     m = re.match(common_re, line)
-    response = {}
+    response = None
     if m:
+      response = {}
       d = m.groupdict()
       if "soname" in d and d["soname"]:
         # it was found
@@ -298,6 +570,11 @@
         response["soname"] = None
         response["path"] = d["path_not_found"]
         response["symbol"] = d["symbol"]
+      elif "binary" in d and d["binary"] and binary == d["binary"]:
+        response["state"] = "soname-unused"
+        response["soname"] = os.path.basename(d["object"])
+        response["path"] = None
+        response["symbol"] = None
       elif d["path_only"]:
         response["state"] = "OK"
         response["soname"] = None
@@ -328,12 +605,28 @@
         response["soname"] = None
         response["path"] = "%s" % (d["sizediffused_file"])
         response["symbol"] = None
-      else:
-        raise StdoutSyntaxError("Could not parse %s with %s"
-                                % (repr(line), common_re))
+      elif d["move_offset"]:
+        response["state"] = 'move-offset-error'
+        response["soname"] = None
+        response["path"] = None
+        response["symbol"] = None
+        response["move_offset"] = d['move_offset']
+        response["move_index"] = d['move_index']
+      elif d["reloc_symbol"]:
+        response["state"] = 'relocation-issue'
+        response["soname"] = None
+        response["path"] = None
+        response["symbol"] = d['reloc_symbol']
+      elif d["copy_reloc_symbol"]:
+        response["state"] = 'relocation-issue'
+        response["soname"] = None
+        response["path"] = None
+        response["symbol"] = d['copy_reloc_symbol']
+
     else:
-      raise StdoutSyntaxError("Could not parse %s with %s"
-                              % (repr(line), common_re))
+      raise package.StdoutSyntaxError("Could not parse %s with %s"
+                                      % (repr(line), common_re))
+
     return response
 
   def GetDependencies(self):
@@ -436,10 +729,11 @@
     """Trying to run magic.file() a few times, not accepting None."""
     self._LazyInit()
     mime = None
+    logging.debug("GetFileMimeType(%r)", full_path)
     for i in xrange(10):
       mime = self.magic_cookie.file(full_path)
       if mime:
-        break;
+        break
       else:
         # Returned mime is null. Re-initializing the cookie and trying again.
         logging.error("magic_cookie.file(%s) returned None. Retrying.",

Modified: csw/mgar/gar/bts/lib/python/inspective_package_test.py
===================================================================
--- csw/mgar/gar/bts/lib/python/inspective_package_test.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/inspective_package_test.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -1,11 +1,14 @@
-#!/usr/bin/env python2.6
+#!/opt/csw/bin/python2.6
 
 import unittest2 as unittest
 import inspective_package
+import package
+import shell
 import mox
 import hachoir_parser
 import magic
 import os
+import common_constants
 
 LDD_R_OUTPUT_1 =  """\tlibc.so.1 =>  /lib/libc.so.1
 \tsymbol not found: check_encoding_conversion_args    (/opt/csw/lib/postgresql/8.4/utf8_and_gbk.so)
@@ -19,6 +22,118 @@
 \t\t(file /tmp/pkg_GqCk0P/CSWkdeartworkgcc/root/opt/csw/kde-gcc/bin/kslideshow.kss size=0x28; file /opt/csw/kde-gcc/lib/libqt-mt.so.3 size=0x20)
 """
 
+DUMP_OUTPUT = '''
+  **** DYNAMIC SECTION INFORMATION ****
+.dynamic:
+[INDEX] Tag         Value
+[1]     NEEDED          libXext.so.0
+[2]     NEEDED          libX11.so.4
+[3]     NEEDED          libsocket.so.1
+[4]     NEEDED          libnsl.so.1
+[5]     NEEDED          libc.so.1
+[6]     INIT            0x80531e4
+[7]     FINI            0x8053200
+[8]     HASH            0x80500e8
+[9]     STRTAB          0x8050cb0
+[10]    STRSZ           0x511
+[11]    SYMTAB          0x80504e0
+[12]    SYMENT          0x10
+[13]    CHECKSUM        0x9e8
+[14]    VERNEED         0x80511c4
+[15]    VERNEEDNUM      0x2
+[16]    PLTSZ           0x1a0
+[17]    PLTREL          0x11
+[18]    JMPREL          0x8051224
+[19]    REL             0x8051214
+[20]    RELSZ           0x1b0
+[21]    RELENT          0x8
+[22]    DEBUG           0
+[23]    FEATURE_1       PARINIT
+[24]    FLAGS           0
+[25]    FLAGS_1         0
+[26]    PLTGOT          0x806359c
+'''
+
+BINARY_DUMP_INFO = {
+  'base_name': 'foo',
+  'RUNPATH RPATH the same': True,
+  'runpath': (),
+  'RPATH set': False,
+  'needed sonames': (
+    'libXext.so.0',
+    'libX11.so.4',
+    'libsocket.so.1',
+    'libnsl.so.1',
+    'libc.so.1'),
+  'path': 'opt/csw/bin/foo',
+  'RUNPATH set': False,
+  }
+
+ELFDUMP_OUTPUT = '''
+Version Definition Section:  .SUNW_version
+     index  version                     dependency
+       [1]  libssl.so.1.0.0                                  [ BASE ]
+       [2]  OPENSSL_1.0.0
+       [3]  OPENSSL_1.0.1               OPENSSL_1.0.0
+
+Version Needed Section:  .SUNW_version
+     index  file                        version
+       [4]  libcrypto.so.1.0.0          OPENSSL_1.0.0        [ INFO ]
+       [5]                              OPENSSL_1.0.1
+       [6]  libnsl.so.1                 SUNW_1.9.1
+
+Symbol Table Section:  .dynsym
+     index    value      size      type bind oth ver shndx          name
+       [0]  0x00000000 0x00000000  NOTY LOCL  D    0 UNDEF
+       [1]  0x00000000 0x00000000  FUNC GLOB  D    4 UNDEF          EVP_DigestSignFinal
+       [2]  0x0003ead4 0x000000dc  FUNC GLOB  P    2 .text          SSL_get_shared_ciphers
+       [3]  0x0004f8f8 0x00000014  FUNC GLOB  P    3 .text          SSL_CTX_set_srp_client_pwd_callback
+       [4]  0x00000000 0x00000000  FUNC GLOB  D    5 UNDEF          SRP_Calc_client_key
+       [5]  0x000661a0 0x00000000  OBJT GLOB  P    1 .got           _GLOBAL_OFFSET_TABLE_
+
+Syminfo Section:  .SUNW_syminfo
+     index  flags            bound to                 symbol
+       [1]  DBL          [1] libcrypto.so.1.0.0       EVP_DigestSignFinal
+       [2]  DB               <self>                   SSL_get_shared_ciphers
+       [3]  DB               <self>                   SSL_CTX_set_srp_client_pwd_callback
+       [4]  DBL          [1] libcrypto.so.1.0.0       SRP_Calc_client_key
+       [5]  DB               <self>                   _GLOBAL_OFFSET_TABLE_
+'''
+
+BINARY_ELFINFO = {'opt/csw/lib/libssl.so.1.0.0': {
+  'symbol table': [
+    {'shndx': 'UNDEF', 'soname': None, 'bind': 'LOCL',
+      'symbol': None, 'version': None, 'flags': None, 'type': 'NOTY'},
+    {'shndx': 'UNDEF', 'soname': 'libcrypto.so.1.0.0', 'bind': 'GLOB',
+      'symbol': 'EVP_DigestSignFinal', 'version': 'OPENSSL_1.0.0',
+      'flags': 'DBL', 'type': 'FUNC'},
+    {'shndx': 'UNDEF', 'soname': 'libcrypto.so.1.0.0', 'bind': 'GLOB',
+      'symbol': 'SRP_Calc_client_key', 'version': 'OPENSSL_1.0.1',
+      'flags': 'DBL', 'type': 'FUNC'},
+    {'shndx': '.text', 'soname': None, 'bind': 'GLOB',
+      'symbol': 'SSL_CTX_set_srp_client_pwd_callback',
+      'version': 'OPENSSL_1.0.1', 'flags': 'DB', 'type': 'FUNC'},
+    {'shndx': '.text', 'soname': None, 'bind': 'GLOB',
+      'symbol': 'SSL_get_shared_ciphers', 'version': 'OPENSSL_1.0.0',
+      'flags': 'DB', 'type': 'FUNC'},
+    {'shndx': '.got', 'soname': None, 'bind': 'GLOB',
+      'symbol': '_GLOBAL_OFFSET_TABLE_', 'version': None,
+      'flags': 'DB', 'type': 'OBJT'},
+    ],
+  'version definition': [
+    {'dependency': None, 'version': 'OPENSSL_1.0.0'},
+    {'dependency': 'OPENSSL_1.0.0', 'version': 'OPENSSL_1.0.1'},
+    ],
+  'version needed': [
+    {'version': 'OPENSSL_1.0.0', 'soname': 'libcrypto.so.1.0.0'},
+    {'version': 'OPENSSL_1.0.1', 'soname': 'libcrypto.so.1.0.0'},
+    {'version': 'SUNW_1.9.1', 'soname': 'libnsl.so.1'},
+    ]
+  }
+  }
+
+
+
 class InspectivePackageUnitTest(mox.MoxTestBase):
 
   def testListBinaries(self):
@@ -68,12 +183,250 @@
     }
     self.assertEqual([u'foo-file'], ip.ListBinaries())
 
+  def testGetBinaryDumpInfoRoot(self):
+    fake_binary = 'opt/csw/bin/foo'
+    fake_package_path = '/fake/path/CSWfoo'
 
+    ip = inspective_package.InspectivePackage(fake_package_path)
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    ip.ListBinaries().AndReturn([fake_binary])
+    ip.GetBasedir().AndReturn('')
+    ip.GetFilesDir().AndReturn('root')
+
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    args = [common_constants.DUMP_BIN,
+            '-Lv',
+            os.path.join(fake_package_path, "root", fake_binary)]
+    shell.ShellCommand(args, mox.IgnoreArg()).AndReturn((0, DUMP_OUTPUT, ""))
+    self.mox.ReplayAll()
+
+    self.assertEqual([BINARY_DUMP_INFO], ip.GetBinaryDumpInfo())
+
+  def testGetBinaryDumpInfoReloc(self):
+    fake_binary = 'bin/foo'
+    fake_package_path = '/fake/path/CSWfoo'
+
+    ip = inspective_package.InspectivePackage(fake_package_path)
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    ip.ListBinaries().AndReturn([fake_binary])
+    ip.GetBasedir().AndReturn('opt/csw')
+    ip.GetFilesDir().AndReturn('reloc')
+
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    args = [common_constants.DUMP_BIN,
+            '-Lv',
+            os.path.join(fake_package_path, "reloc", fake_binary)]
+    shell.ShellCommand(args, mox.IgnoreArg()).AndReturn((0, DUMP_OUTPUT, ""))
+    self.mox.ReplayAll()
+
+    self.assertEqual([BINARY_DUMP_INFO], ip.GetBinaryDumpInfo())
+
+
+  def testGetBinaryElfInfoRoot(self):
+    fake_binary = 'opt/csw/lib/libssl.so.1.0.0'
+    fake_package_path = '/fake/path/CSWfoo'
+
+    ip = inspective_package.InspectivePackage(fake_package_path)
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    ip.ListBinaries().AndReturn([fake_binary])
+    ip.GetBasedir().AndReturn('')
+    ip.GetFilesDir().AndReturn('root')
+
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    args = [common_constants.ELFDUMP_BIN,
+            '-svy',
+            os.path.join(fake_package_path, "root", fake_binary)]
+    shell.ShellCommand(args).AndReturn((0, ELFDUMP_OUTPUT, ""))
+    self.mox.ReplayAll()
+
+    self.assertEqual(BINARY_ELFINFO, ip.GetBinaryElfInfo())
+
+  def testGetBinaryElfInfoReloc(self):
+    fake_binary = 'lib/libssl.so.1.0.0'
+    fake_package_path = '/fake/path/CSWfoo'
+
+    ip = inspective_package.InspectivePackage(fake_package_path)
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    ip.ListBinaries().AndReturn([fake_binary])
+    ip.GetBasedir().AndReturn('opt/csw')
+    ip.GetFilesDir().AndReturn('reloc')
+
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    args = [common_constants.ELFDUMP_BIN,
+            '-svy',
+            os.path.join(fake_package_path, "reloc", fake_binary)]
+    shell.ShellCommand(args).AndReturn((0, ELFDUMP_OUTPUT, ""))
+    self.mox.ReplayAll()
+
+    self.assertEqual(BINARY_ELFINFO, ip.GetBinaryElfInfo())
+
+  def testGetBinaryElfInfoWithIgnoredErrors(self):
+    fake_binary = 'opt/csw/bin/foo'
+    fake_package_path = '/fake/path/CSWfoo'
+    fake_elfdump_output = '''
+Version Needed Section:  .SUNW_version
+     index  file                        version
+       [2]  libc.so.1                 SUNW_1.1
+
+Symbol Table Section:  .dynsym
+     index    value      size      type bind oth ver shndx          name
+       [1]  0x00000000 0x00000000  FUNC GLOB  D    2 UNDEF          fopen64
+
+Syminfo Section:  .SUNW_syminfo
+     index  flags            bound to                 symbol
+       [1]  DBL          [1] libc.so.1                fopen64
+'''
+    fake_elfdump_errors = '''
+/opt/csw/bin/foo: .dynsym: index[26]: bad symbol entry: : invalid shndx: 26
+/opt/csw/bin/foo: .dynsym: bad symbol entry: : invalid shndx: 23
+/opt/csw/bin/foo: .dynsym: index[108]: suspicious local symbol entry: _END_: lies within global symbol range (index >= 27)
+/opt/csw/bin/foo: .dynsym: index[4]: bad symbol entry: toto: section[24] size: 0: symbol (address 0x36b7fc, size 0x4) lies outside of containing section
+/opt/csw/bin/foo: .dynsym: bad symbol entry: Xt_app_con: section[28] size: 0: is smaller than symbol size: 4
+'''
+    fake_binary_elfinfo = {'opt/csw/bin/foo': {
+      'symbol table': [
+        {'shndx': 'UNDEF', 'soname': 'libc.so.1', 'bind': 'GLOB',
+          'symbol': 'fopen64', 'version': 'SUNW_1.1',
+          'flags': 'DBL', 'type': 'FUNC'},
+        ],
+      'version needed': [
+        {'version': 'SUNW_1.1', 'soname': 'libc.so.1'},
+        ],
+      'version definition': [],
+      }
+    }
+    ip = inspective_package.InspectivePackage(fake_package_path)
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    ip.ListBinaries().AndReturn([fake_binary])
+    ip.GetBasedir().AndReturn('')
+    ip.GetFilesDir().AndReturn('root')
+
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    args = [common_constants.ELFDUMP_BIN,
+            '-svy',
+            os.path.join(fake_package_path, "root", fake_binary)]
+    shell.ShellCommand(args).AndReturn((0, fake_elfdump_output, fake_elfdump_errors))
+    self.mox.ReplayAll()
+
+    self.assertEqual(fake_binary_elfinfo, ip.GetBinaryElfInfo())
+
+  def testGetLddMinusRlinesRoot(self):
+    ip = inspective_package.InspectivePackage("/tmp/CSWfake")
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    self.mox.StubOutWithMock(os, 'chmod')
+    self.mox.StubOutWithMock(os, 'uname')
+    ip.GetBasedir().AndReturn('')
+    os.chmod('/tmp/CSWfake/root/opt/csw/bin/foo', 0755)
+    ip.ListBinaries().AndReturn(['opt/csw/bin/foo'])
+    ip.GetFilesDir().AndReturn('root')
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    shell.ShellCommand(
+        ['ldd', '-Ur', '/tmp/CSWfake/root/opt/csw/bin/foo'],
+        timeout=10).AndReturn((0, "", ""))
+    self.mox.StubOutWithMock(ip, '_ParseLddDashRline')
+    self.mox.ReplayAll()
+    self.assertEqual({'opt/csw/bin/foo': []}, ip.GetLddMinusRlines())
+
+  def testGetLddMinusRlinesReloc(self):
+    ip = inspective_package.InspectivePackage("/tmp/CSWfake")
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    self.mox.StubOutWithMock(os, 'chmod')
+    self.mox.StubOutWithMock(os, 'uname')
+    ip.GetBasedir().AndReturn('opt/csw')
+    os.chmod('/tmp/CSWfake/reloc/bin/foo', 0755)
+    ip.ListBinaries().AndReturn(['bin/foo'])
+    ip.GetFilesDir().AndReturn('reloc')
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    shell.ShellCommand(
+        ['ldd', '-Ur', '/tmp/CSWfake/reloc/bin/foo'],
+        timeout=10).AndReturn((0, "", ""))
+    self.mox.StubOutWithMock(ip, '_ParseLddDashRline')
+    self.mox.ReplayAll()
+    self.assertEqual({'opt/csw/bin/foo': []}, ip.GetLddMinusRlines())
+
+  def testGetLddMinusRlinesThrows(self):
+    ip = inspective_package.InspectivePackage("/tmp/CSWfake")
+    self.mox.StubOutWithMock(ip, 'GetBasedir')
+    self.mox.StubOutWithMock(ip, 'ListBinaries')
+    self.mox.StubOutWithMock(ip, 'GetFilesDir')
+    self.mox.StubOutWithMock(os, 'chmod')
+    self.mox.StubOutWithMock(os, 'uname')
+    ip.GetBasedir().AndReturn('/')
+    os.chmod('/tmp/CSWfake/root/opt/csw/bin/foo', 0755)
+    os.uname().AndReturn('i86pc')
+    ip.GetFilesDir().AndReturn('root')
+    ip.ListBinaries().AndReturn(['opt/csw/bin/foo'])
+    self.mox.StubOutWithMock(shell, 'ShellCommand')
+    shell.ShellCommand(
+        ['ldd', '-Ur', '/tmp/CSWfake/root/opt/csw/bin/foo'],
+        timeout=10).AndReturn((1, "", "boo"))
+    self.mox.StubOutWithMock(ip, '_ParseLddDashRline')
+    self.mox.ReplayAll()
+    self.assertRaises(package.SystemUtilityError,
+                      ip.GetLddMinusRlines)
+
+
 class PackageStatsUnitTest(unittest.TestCase):
 
   def setUp(self):
     self.ip = inspective_package.InspectivePackage("/fake/path/CSWfoo")
 
+  def test_ParseElfdumpLineSectionHeader(self):
+    line = 'Symbol Table Section:  .dynsym'
+    self.assertEqual((None, "symbol table"), self.ip._ParseElfdumpLine(line, None))
+
+  def test_ParseElfdumpLineVersionNeeded(self):
+    line = '[13]                              SUNW_0.9             [ INFO ]'
+    expected = {
+      'index': '13',
+      'version': 'SUNW_0.9',
+      'soname': None
+    }
+    self.assertEqual((expected, "version needed"), self.ip._ParseElfdumpLine(line, 'version needed'))
+
+  def test_ParseElfdumpLineSymbolTable(self):
+    line = '    [9]  0x000224b8 0x0000001c  FUNC GLOB  D    1 .text          vsf_log_line'
+    expected = {
+      'bind': 'GLOB',
+      'shndx': '.text',
+      'symbol': 'vsf_log_line',
+      'version': '1',
+      'type': 'FUNC',
+    }
+    self.assertEqual((expected, 'symbol table'), self.ip._ParseElfdumpLine(line, 'symbol table'))
+
+  def test_ParseElfdumpLineNeededSymbol(self):
+    line = '      [152]  DB           [4] libc.so.1                strlen'
+    expected = {
+        'flags': 'DB',
+        'soname': 'libc.so.1',
+        'symbol': 'strlen',
+    }
+    self.assertEqual((expected, "syminfo"), self.ip._ParseElfdumpLine(line, "syminfo"))
+
+  def test_ParseElfdumpLineExportedSymbol(self):
+    line = '      [116]  DB               <self>                   environ'
+    expected = {
+        'flags': 'DB',
+        'soname': None,
+        'symbol': 'environ',
+    }
+    self.assertEqual((expected, "syminfo"), self.ip._ParseElfdumpLine(line, "syminfo"))
+
   def test_ParseNmSymLineGoodLine(self):
     line = '0000097616 T aliases_lookup'
     expected = {

Modified: csw/mgar/gar/bts/lib/python/models.py
===================================================================
--- csw/mgar/gar/bts/lib/python/models.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/models.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -229,7 +229,13 @@
         % (self.catalogname, self.version_string, self.arch.name))
 
   def GetUnicodeOrNone(self, s):
-    """Tries to decode UTF-8"""
+    """Tries to decode UTF-8.
+
+    If the object does not decode as UTF-8, it's forced to do so, while
+    ignoring any potential errors.
+
+    Returns: a unicode object or a None type.
+    """
     if s is None:
       return None
     if type(s) != unicode:

Modified: csw/mgar/gar/bts/lib/python/package.py
===================================================================
--- csw/mgar/gar/bts/lib/python/package.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/package.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -39,9 +39,14 @@
   pass
 
 
+class SystemUtilityError(Error):
+  """A problem occurred while running system utility, e.g. ldd."""
+
 class PackageError(Error):
   pass
 
+class StdoutSyntaxError(Error):
+  pass
 
 class CswSrv4File(shell.ShellMixin, object):
   """Represents a package in the srv4 format (pkg)."""
@@ -65,10 +70,9 @@
 
   def GetWorkDir(self):
     if not self.workdir:
-      self.workdir = tempfile.mkdtemp(prefix="pkg_")
-      fd = open(os.path.join(self.workdir, "admin"), "w")
-      fd.write(ADMIN_FILE_CONTENT)
-      fd.close()
+      self.workdir = tempfile.mkdtemp(prefix="pkg_", dir="/var/tmp")
+      with open(os.path.join(self.workdir, "admin"), "w") as fd:
+        fd.write(ADMIN_FILE_CONTENT)
     return self.workdir
 
   def GetAdminFilePath(self):
@@ -108,11 +112,7 @@
             src_file,
             destdir,
             pkgname ]
-    pkgtrans_proc = subprocess.Popen(args,
-                                     stdout=subprocess.PIPE,
-                                     stderr=subprocess.PIPE)
-    stdout, stderr = pkgtrans_proc.communicate()
-    ret = pkgtrans_proc.wait()
+    ret, stdout, stderr = shell.ShellCommand(args)
     if ret:
       logging.error(stdout)
       logging.error(stderr)
@@ -125,9 +125,7 @@
     if not self.pkgname:
       gunzipped_path = self.GetGunzippedPath()
       args = ["nawk", "NR == 2 {print $1; exit;}", gunzipped_path]
-      nawk_proc = subprocess.Popen(args, stdout=subprocess.PIPE)
-      stdout, stderr = nawk_proc.communicate()
-      ret_code = nawk_proc.wait()
+      ret_code, stdout, stderr = shell.ShellCommand(args)
       self.pkgname = stdout.strip()
       logging.debug("GetPkgname(): %s", repr(self.pkgname))
     return self.pkgname
@@ -138,6 +136,10 @@
     return self.stat
 
   def GetMtime(self):
+    """The mtime of the svr4 file.
+
+    Returns: a datetime.datetime object (not encodable with json!).
+    """
     if not self.mtime:
       s = self._Stat()
       t = time.gmtime(s.st_mtime)
@@ -145,8 +147,7 @@
     return self.mtime
 
   def GetSize(self):
-    s = self._Stat()
-    return s.st_size
+    return self._Stat().st_size
 
   def TransformToDir(self):
     """Transforms the file to the directory format.
@@ -190,21 +191,16 @@
   def GetMd5sum(self):
     if not self.md5sum:
       logging.debug("GetMd5sum() reading file %s", repr(self.pkg_path))
-      fp = open(self.pkg_path)
       hash = hashlib.md5()
-      hash.update(fp.read())
-      fp.close()
+      with open(self.pkg_path) as fp:
+        hash.update(fp.read())
       self.md5sum = hash.hexdigest()
     return self.md5sum
 
   def GetPkgchkOutput(self):
     """Returns: (exit code, stdout, stderr)."""
     args = ["/usr/sbin/pkgchk", "-d", self.GetGunzippedPath(), "all"]
-    pkgchk_proc = subprocess.Popen(
-        args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    stdout, stderr = pkgchk_proc.communicate()
-    ret = pkgchk_proc.wait()
-    return ret, stdout, stderr
+    return shell.ShellCommand(args)
 
   def __del__(self):
     if self.workdir:
@@ -304,13 +300,10 @@
         # 4: sum
         pkginfo_path = os.path.join(self.directory, "pkginfo")
         args = ["cksum", pkginfo_path]
-        cksum_process = subprocess.Popen(args, stdout=subprocess.PIPE)
-        stdout, stderr = cksum_process.communicate()
-        cksum_process.wait()
+        _, stdout, stderr = shell.ShellCommand(args)
         size = ws_re.split(stdout)[1]
         args = ["sum", pkginfo_path]
-        sum_process = subprocess.Popen(args, stdout=subprocess.PIPE)
-        stdout, stderr = sum_process.communicate()
+        _, stdout, stderr = shell.ShellCommand(args)
         sum_process.wait()
         sum_value = ws_re.split(stdout)[0]
         fields[3] = size
@@ -474,7 +467,6 @@
     if not self.dir_pkg:
       self.dir_pkg = self.srv4.GetDirFormatPkg()
       logging.debug(repr(self.dir_pkg))
-      # subprocess.call(["tree", self.dir_pkg.directory])
 
   def Export(self, dest_dir):
     self.Transform()

Modified: csw/mgar/gar/bts/lib/python/package_checks.py
===================================================================
--- csw/mgar/gar/bts/lib/python/package_checks.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/package_checks.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -503,7 +503,10 @@
   pkgmap_paths = [x["path"] for x in pkgmap]
   for pkgmap_path in pkgmap_paths:
     try:
-      path_str = str(pkgmap_path)
+      if type(pkgmap_path) is unicode:
+        path_str = pkgmap_path.encode("utf-8")
+      else:
+        path_str = str(pkgmap_path)
       if re.search(ARCH_RE, path_str):
         reasons_to_be_arch_specific.append((
             "archall-with-arch-paths",

Modified: csw/mgar/gar/bts/lib/python/package_checks_test.py
===================================================================
--- csw/mgar/gar/bts/lib/python/package_checks_test.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/package_checks_test.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -8,7 +8,6 @@
 import package_checks as pc
 import checkpkg
 import checkpkg_lib
-import yaml
 import os.path
 import mox
 import logging
@@ -25,6 +24,8 @@
 from testdata.neon_stats import pkgstats as neon_stats
 from testdata.bdb48_stats import pkgstat_objs as bdb48_stats
 from testdata.mercurial_stats import pkgstat_objs as mercurial_stats
+from testdata.cadaver_stats import pkgstats as cadaver_stats
+from testdata.vsftpd_stats import pkgstats as vsftpd_stats
 from testdata import stubs
 
 DEFAULT_PKG_STATS = None
@@ -498,6 +499,13 @@
     binaries_dump_info[0]["needed sonames"] = ["libdb-4.7.so"]
     self.pkg_data["depends"] = (("CSWfoo", None),(u"CSWcommon", ""))
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
+    self.pkg_data["binaries_elf_info"]['opt/csw/bin/sparcv8/rsync'] = {
+	'version definition': [],
+	'version needed': [],
+	'symbol table': [
+		{ 'soname': 'libdb-4.7.so', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' }
+		]
+    }
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libdb-4.7.so').AndReturn({
        u'/opt/csw/lib': [u'CSWfoo'],
        u'/opt/csw/lib/sparcv9': [u'CSWfoo'],
@@ -526,6 +534,13 @@
     binaries_dump_info[0]["needed sonames"] = ["libdb-4.7.so"]
     self.pkg_data["depends"] = (("CSWbad", None),(u"CSWcommon", ""))
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
+    self.pkg_data["binaries_elf_info"]['opt/csw/bin/sparcv8/rsync'] = {
+	'version definition': [],
+	'version needed': [],
+	'symbol table': [
+		{ 'soname': 'libdb-4.7.so', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' }
+	]
+    }
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libdb-4.7.so').AndReturn({
        u'/opt/csw/bdb47/lib':         [u'CSWbad'],
        u'/opt/csw/bdb47lib/sparcv9': [u'CSWbad'],
@@ -554,6 +569,15 @@
     binaries_dump_info[0]["needed sonames"] = ["libdb-4.7.so"]
     self.pkg_data["depends"] = (("CSWbad", None),(u"CSWcommon", ""))
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
+    self.pkg_data["binaries_elf_info"]['opt/csw/bin/sparcv8/rsync'] = {
+        'version definition': [],
+	'version needed': [],
+	'symbol table': [{ 'symbol': 'foo',
+		           'soname': 'libdb-4.7.so',
+			   'bind': 'GLOB',
+			   'shndx': 'UNDEF',
+			   'flags': 'DBL' }],
+    }
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libdb-4.7.so').AndReturn({
        u'/opt/csw/bdb47/lib':         [u'CSWbad'],
        u'/opt/csw/bdb47lib/sparcv9': [u'CSWbad'],
@@ -591,6 +615,15 @@
     binaries_dump_info[0]["needed sonames"] = ["libm.so.2"]
     self.pkg_data["depends"] = ((u"CSWcommon", ""),)
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
+    self.pkg_data["binaries_elf_info"] = {
+	'opt/csw/bin/sparcv8/rsync': {
+		'version definition': [],
+		'version needed': [],
+		'symbol table': [
+			{ 'soname': 'libm.so.2', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' }
+			]
+		}
+	}
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libm.so.2').AndReturn({
     })
     self.error_mgr_mock.GetPkgByPath(
@@ -634,6 +667,15 @@
                                 }],
         'depends': (('CSWlibfoo', None),),
         'isalist': (),
+	'ldd_info': { 'opt/csw/bin/bar': [] },
+	'binaries_elf_info': { 'opt/csw/bin/bar': {
+		 		'version definition': [],
+				'version needed': [],
+				'symbol table': [
+					{ 'soname': 'libfoo.so.1', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' },
+					]
+				}
+			},
         'pkgmap': [],
         'files_metadata': [
                     {'endian': 'Little endian',
@@ -650,6 +692,7 @@
         'binaries_dump_info': [],
         'depends': [],
         'isalist': (),
+	       'ldd_info': {},
         'pkgmap': [],
       }
 
@@ -687,6 +730,20 @@
         # 'depends': (),
         'depends': ((u"CSWcommon", ""),),
         'isalist': ('foo'),
+	'ldd_info': { 'opt/csw/bin/bar': [], 'opt/csw/lib/libfoo.so.1': []},
+	'binaries_elf_info': { 'opt/csw/bin/bar': {
+		 		'version definition': [],
+				'version needed': [],
+				'symbol table': [
+					{ 'soname': 'libfoo.so.1', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' },
+					]
+				},
+		            'opt/csw/lib/libfoo.so.1': {
+			     		'version definition': [],
+					'version needed': [],
+					'symbol table': [],
+					}
+			    },
         'pkgmap': [
           { 'path': '/opt/csw/lib/libfoo.so.1', },
           { 'path': '/opt/csw/bin/bar', },
@@ -712,6 +769,16 @@
     binaries_dump_info[0]["path"] = 'opt/csw/lib/python/site-packages/foo.so'
     self.pkg_data["depends"] = ((u"CSWcommon", "This one provides directories"),)
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
+    self.pkg_data["ldd_info"] = { 'opt/csw/lib/python/site-packages/foo.so': [] }
+    self.pkg_data["binaries_elf_info"] = {
+	'opt/csw/lib/python/site-packages/foo.so': {
+		'version definition': [],
+		'version needed': [],
+		'symbol table': [
+			{ 'soname': 'libbar.so', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' }
+			]
+		}
+	}
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libbar.so').AndReturn({
        u'/opt/csw/lib': [u'CSWlibbar'],
        u'/opt/csw/lib/sparcv9': [u'CSWlibbar'],
@@ -738,6 +805,16 @@
     binaries_dump_info[0]["path"] = 'opt/csw/lib/foo.so'
     self.pkg_data["depends"] = ((u"CSWcommon","This is needed"),)
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
+    self.pkg_data["ldd_info"] = { 'opt/csw/lib/foo.so': [] }
+    self.pkg_data["binaries_elf_info"] = {
+	'opt/csw/lib/foo.so': {
+		'version definition': [],
+		'version needed': [],
+		'symbol table': [
+			{ 'soname': 'libnotfound.so', 'symbol': 'foo', 'flags': 'DBL', 'shndx': 'UNDEF', 'bind': 'GLOB' }
+			]
+		}
+	}
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename(
         'libnotfound.so').AndReturn({})
     self.error_mgr_mock.GetPkgByPath(
@@ -1333,7 +1410,174 @@
       self.error_mgr_mock.NeedFile(
           mox.IsA(str), mox.IsA(unicode), mox.IsA(str))
 
+class TestCheckUnusedSoname(CheckTestHelper, unittest.TestCase):
+  FUNCTION_NAME = 'SetCheckLibraries'
+  def testUnusedSoname(self):
+    self.pkg_data = cadaver_stats
 
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libc.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libcrypto.so.1.0.0').AndReturn({
+      "/opt/csw/lib": (u"CSWlibssl1-0-0",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibssl1-0-0",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libcurses.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libdl.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libexpat.so.1').AndReturn({
+      "/opt/csw/lib": [u'CSWexpat'], u'/opt/csw/lib/sparcv9': [u'CSWexpat']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libiconv.so.2').AndReturn({
+      "/opt/csw/lib": [u'CSWlibiconv2'], u'/opt/csw/lib/sparcv9': [u'CSWlibiconv2']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libintl.so.8').AndReturn({
+      "/opt/csw/lib": (u"CSWggettextrt",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libm.so.2').AndReturn(
+      {'/lib': [u'SUNWlibmsr'],
+       '/lib/sparcv9': [u'SUNWlibmsr'],
+       '/usr/lib': [u'SUNWlibms'],
+       '/usr/lib/sparcv9': [u'SUNWlibms']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libmd.so.1').AndReturn(
+      {'/lib': [u'SUNWclsr'],
+       '/lib/sparcv9': [u'SUNWclsr'],
+       '/usr/lib': [u'SUNWcls'],
+       '/usr/lib/sparcv9': [u'SUNWcls']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libmp.so.2').AndReturn(
+      {'/lib': [u'SUNWclsr'],
+       '/lib/sparcv9': [u'SUNWclsr'],
+       '/usr/lib': [u'SUNWcls'],
+       '/usr/lib/sparcv9': [u'SUNWcls']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libncurses.so.5').AndReturn({
+      "/opt/csw/lib": [u'CSWlibncurses5'], u'/opt/csw/lib/sparcv9': [u'CSWlibncurses5']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libneon.so.27').AndReturn({
+      "/opt/csw/lib": [u'CSWlibneon27'], u'/opt/csw/lib/sparcv9': [u'CSWlibneon27']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libnsl.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libreadline.so.6').AndReturn({
+      "/opt/csw/lib": [u'CSWlibreadline6'], u'/opt/csw/lib/sparcv9': [u'CSWlibreadline6']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libsocket.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libssl.so.1.0.0').AndReturn({
+      "/opt/csw/lib": (u"CSWlibssl1-0-0",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibssl1-0-0",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libz.so.1').AndReturn({
+      "/opt/csw/lib": (u"CSWlibz1",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibz1",),
+      "/usr/lib": (u"SUNWzlib")})
+
+
+    for common_path in ["/opt/csw/share/locale/it/LC_MESSAGES", "/opt/csw/bin",
+		        "/opt/csw/share/locale/en at quot/LC_MESSAGES", "/opt/csw/share/man",
+			"/opt/csw/share/doc", "/opt/csw/share/locale/es/LC_MESSAGES"]:
+      self.error_mgr_mock.GetPkgByPath(common_path).AndReturn([u"CSWcommon"])
+
+    for i in range(21):
+      self.error_mgr_mock.NeedFile(
+          mox.IsA(str), mox.IsA(str), mox.IsA(str))
+
+    for soname in [ 'libcurses.so.1', 'libz.so.1', 'libssl.so.1.0.0',
+		    'libcrypto.so.1.0.0', 'libexpat.so.1' ]:
+      self.error_mgr_mock.ReportError(
+        'CSWcadaver', 'soname-unused',
+        soname + ' is needed by /opt/csw/bin/cadaver but never used')
+
+class TestCheckDirectBinding(CheckTestHelper, unittest.TestCase):
+  FUNCTION_NAME = 'SetCheckLibraries'
+  def testDirectBinding(self):
+    self.pkg_data = vsftpd_stats
+
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libc.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libcrypto.so.1.0.0').AndReturn({
+      "/opt/csw/lib": (u"CSWlibssl1-0-0",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibssl1-0-0",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libnsl.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libpam.so.1').AndReturn({
+      "/usr/dt/lib": (u"SUNWdtbas",),
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),
+    })
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('librt.so.1').AndReturn({
+      '/usr/lib': [u'SUNWcsl'],
+      '/usr/lib/sparcv9': [u'SUNWcslx']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libsendfile.so.1').AndReturn({
+      '/usr/lib': [u'SUNWcsl'],
+      '/usr/lib/sparcv9': [u'SUNWcslx']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libsocket.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libssl.so.1.0.0').AndReturn({
+      "/opt/csw/lib": (u"CSWlibssl1-0-0",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibssl1-0-0",)})
+
+    for common_path in ["/opt/csw/share/man", "/var/opt/csw", "/opt/csw/sbin",
+		        "/opt/csw/share/doc", "/etc/opt/csw"]:
+      self.error_mgr_mock.GetPkgByPath(common_path).AndReturn([u"CSWcommon"])
+
+    for soname in [ 'libnsl.so.1', 'libpam.so.1', 'libsocket.so.1', 'librt.so.1',
+		    'libsendfile.so.1', 'libssl.so.1.0.0', 'libcrypto.so.1.0.0',
+		    'libc.so.1' ]:
+      self.error_mgr_mock.NeedFile(
+          mox.IsA(str), mox.IsA(str), mox.IsA(str))
+
+    for soname in ['libssl.so.1.0.0']:
+      self.error_mgr_mock.ReportError(
+        'CSWvsftpd',
+        'no-direct-binding',
+        '/opt/csw/sbin/vsftpd is not directly bound to soname ' + soname)
+
+  def testDirectBindingNoSyminfo(self):
+    self.pkg_data = vsftpd_stats
+    self.pkg_data[0]['binaries_elf_info']['opt/csw/sbin/vsftpd'] = {
+     		'version definition': [],
+		'version needed': [],
+		'symbol table': [] }
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libc.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libcrypto.so.1.0.0').AndReturn({
+      "/opt/csw/lib": (u"CSWlibssl1-0-0",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibssl1-0-0",)})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libnsl.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libpam.so.1').AndReturn({
+      "/usr/dt/lib": (u"SUNWdtbas",),
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),
+    })
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('librt.so.1').AndReturn({
+      '/usr/lib': [u'SUNWcsl'],
+      '/usr/lib/sparcv9': [u'SUNWcslx']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libsendfile.so.1').AndReturn({
+      '/usr/lib': [u'SUNWcsl'],
+      '/usr/lib/sparcv9': [u'SUNWcslx']})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libsocket.so.1').AndReturn({
+      "/usr/lib": (u"SUNWcsl",),
+      "/usr/lib/sparcv9": (u"SUNWcslx"),})
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libssl.so.1.0.0').AndReturn({
+      "/opt/csw/lib": (u"CSWlibssl1-0-0",),
+      "/opt/csw/lib/sparcv9": (u"CSWlibssl1-0-0",)})
+
+    for common_path in ["/opt/csw/share/man", "/var/opt/csw", "/opt/csw/sbin",
+		        "/opt/csw/share/doc", "/etc/opt/csw"]:
+      self.error_mgr_mock.GetPkgByPath(common_path).AndReturn([u"CSWcommon"])
+
+    for soname in [ 'libnsl.so.1', 'libpam.so.1', 'libsocket.so.1', 'librt.so.1',
+		    'libsendfile.so.1', 'libssl.so.1.0.0', 'libcrypto.so.1.0.0',
+		    'libc.so.1' ]:
+      self.error_mgr_mock.NeedFile(
+          mox.IsA(str), mox.IsA(str), mox.IsA(str))
+
+    for soname in ['libsendfile.so.1', 'libssl.so.1.0.0', 'libcrypto.so.1.0.0',
+        'libpam.so.1']:
+      self.error_mgr_mock.ReportError(
+        'CSWvsftpd',
+        'no-direct-binding',
+        '/opt/csw/sbin/vsftpd is not directly bound to soname ' + soname)
+
+
 class TestCheckWrongArchitecture(CheckTestHelper, unittest.TestCase):
   FUNCTION_NAME = 'CheckWrongArchitecture'
   def testSparcBinariesInIntelPackage(self):

Modified: csw/mgar/gar/bts/lib/python/package_stats.py
===================================================================
--- csw/mgar/gar/bts/lib/python/package_stats.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/package_stats.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -6,6 +6,7 @@
 import logging
 import os
 import progressbar
+import mute_progressbar
 import re
 import sqlobject
 
@@ -35,19 +36,19 @@
 
 
 class Error(Exception):
-  pass
+  """Generic error."""
 
 
 class PackageError(Error):
-  pass
+  """Problem with the package file examined."""
 
 
 class DatabaseError(Error):
-  pass
+  """Problem with the database contents or schema."""
 
 
 class StdoutSyntaxError(Error):
-  pass
+  """A utility's output is bad, e.g. impossible to parse."""
 
 
 class PackageStatsMixin(object):
@@ -121,6 +122,10 @@
     return self.dir_format_pkg
 
   def GetMtime(self):
+    """Get svr4 file mtime value.
+
+    Returns: a datetime.datetime object.
+    """
     return self.srv4_pkg.GetMtime()
 
   def GetSize(self):
@@ -185,7 +190,7 @@
 
     """
     dir_pkg = self.GetInspectivePkg()
-    logging.debug("Collecting %s package statistics.", repr(dir_pkg.pkgname))
+    logging.debug("Collecting %r (%r) package statistics.", dir_pkg, dir_pkg.pkgname)
     override_dicts = self.GetOverrides()
     basic_stats = self.GetBasicStats()
     # This would be better inferred from pkginfo, and not from the filename, but
@@ -208,9 +213,12 @@
         "basic_stats": basic_stats,
         "files_metadata": dir_pkg.GetFilesMetadata(),
         "mtime": self.GetMtime(),
+        "ldd_info": dir_pkg.GetLddMinusRlines(),
+        "binaries_elf_info": dir_pkg.GetBinaryElfInfo(),
     }
     self.SaveStats(pkg_stats)
-    logging.debug("Statistics of %s have been collected.", repr(dir_pkg.pkgname))
+    logging.debug("Statistics of %s have been collected and saved in the db.",
+                  repr(dir_pkg.pkgname))
     return pkg_stats
 
   @classmethod
@@ -228,6 +236,7 @@
 
     Does not require an instance.
     """
+    logging.debug("SaveStats()")
     pkgname = pkg_stats["basic_stats"]["pkgname"]
     # Getting sqlobject representations.
     pkginst = cls.GetOrSetPkginst(pkgname)
@@ -246,7 +255,8 @@
     except sqlobject.main.SQLObjectNotFound, e:
       filename_arch = m.Architecture(
           name=pkg_stats["basic_stats"]["parsed_basename"]["arch"])
-    parsed_basename = pkg_stats["basic_stats"]["parsed_basename"]
+    basename = pkg_stats["basic_stats"]["parsed_basename"]
+    parsed_basename = basename
     os_rel_name = parsed_basename["osrel"]
     try:
       os_rel = m.OsRelease.select(
@@ -284,7 +294,8 @@
       logging.debug("Cleaning %s before saving it again", db_pkg_stats)
       db_pkg_stats.DeleteAllDependentObjects()
     except sqlobject.main.SQLObjectNotFound, e:
-      logging.debug("Package %s not present in the db, proceeding with insert.")
+      logging.debug("Package %s not present in the db, proceeding with insert.",
+                    basename)
       pass
     # Creating the object in the database.
     data_obj = m.Srv4FileStatsBlob(
@@ -335,15 +346,6 @@
     for override_dict in pkg_stats["overrides"]:
       o = m.CheckpkgOverride(srv4_file=db_pkg_stats,
                              **override_dict)
-    # The ldd -r reporting breaks on bigger packages during yaml saving.
-    # It might work when yaml is disabled
-    # self.DumpObject(self.GetLddMinusRlines(), "ldd_dash_r")
-    # This check is currently disabled, let's save time by not collecting
-    # these data.
-    # self.DumpObject(self.GetDefinedSymbols(), "defined_symbols")
-    # This one should be last, so that if the collection is interrupted
-    # in one of the previous runs, the basic_stats.pickle file is not there
-    # or not updated, and the collection is started again.
     return db_pkg_stats
 
   @classmethod
@@ -375,11 +377,16 @@
         line_u = pkgmap_entry["line"].decode("utf-8")
         f_path, basename = os.path.split(
             pkgmap_entry["path"].decode('utf-8'))
-      except UnicodeDecodeError, e:
+      except UnicodeDecodeError as e:
         line_u = pkgmap_entry["line"].decode("latin1")
         f_path, basename = os.path.split(
             pkgmap_entry["path"].decode('latin1'))
+      except UnicodeEncodeError as e:
+        # the line was already in unicode
+        line_u = pkgmap_entry['line']
+        f_path, basename = os.path.split(pkgmap_entry["path"])
         # If this fails too, code change will be needed.
+
       f = m.CswFile(
           basename=basename,
           path=f_path,
@@ -446,9 +453,11 @@
 
 
 def StatsListFromCatalog(file_name_list, catalog_file_name=None, debug=False):
-  packages = [inspective_package.InspectiveCswSrv4File(x, debug) for x in file_name_list]
+  packages = [inspective_package.InspectiveCswSrv4File(x, debug)
+              for x in file_name_list]
   if catalog_file_name:
-    catalog_obj = catalog.OpencswCatalog(open(catalog_file_name, "rb"))
+    with open(catalog_file_name, "rb") as fd:
+      catalog_obj = catalog.OpencswCatalog(fd)
     md5s_by_basename = catalog_obj.GetDataByBasename()
     for pkg in packages:
       basename = os.path.basename(pkg.pkg_path)
@@ -471,6 +480,7 @@
     self.debug = debug
 
   def CollectStatsFromFiles(self, file_list, catalog_file, force_unpack=False):
+    """Returns: A list of md5 sums of collected statistics."""
     args_display = file_list
     if len(args_display) > 5:
       args_display = args_display[:5] + ["...more..."]
@@ -487,9 +497,12 @@
       raise PackageError("The length of package list is zero.")
     counter = itertools.count(1)
     self.logger.info("Juicing the svr4 package stream files...")
-    pbar = progressbar.ProgressBar()
-    pbar.maxval = total_packages
-    pbar.start()
+    if not self.debug:
+      pbar = progressbar.ProgressBar()
+      pbar.maxval = total_packages
+      pbar.start()
+    else:
+      pbar = mute_progressbar.MuteProgressBar()
     while stats_list:
       # This way objects will get garbage collected as soon as they are removed
       # from the list by pop().  The destructor (__del__()) of the srv4 class

Modified: csw/mgar/gar/bts/lib/python/package_stats_test.py
===================================================================
--- csw/mgar/gar/bts/lib/python/package_stats_test.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/package_stats_test.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -60,6 +60,8 @@
     mock_dirpkg.GetFilesContaining(mox.IsA(tuple)).AndReturn([])
     mock_dirpkg.GetFilesMetadata().AndReturn([])
     mock_srv4.GetMtime().AndReturn(datetime.datetime(2010, 12, 8, 7, 52, 54))
+    mock_dirpkg.GetLddMinusRlines().AndReturn({})
+    mock_dirpkg.GetBinaryElfInfo().AndReturn({})
     pkgstats = package_stats.PackageStats(mock_srv4)
     self.mox.ReplayAll()
     data_structure = pkgstats._CollectStats(True)

Modified: csw/mgar/gar/bts/lib/python/pkgdb.py
===================================================================
--- csw/mgar/gar/bts/lib/python/pkgdb.py	2013-03-18 13:47:04 UTC (rev 20477)
+++ csw/mgar/gar/bts/lib/python/pkgdb.py	2013-03-18 16:06:19 UTC (rev 20478)
@@ -20,6 +20,7 @@
 import package_checks
 import package_stats
 import re
+import shell
 import socket
 import sqlobject
 import struct_util
@@ -82,9 +83,10 @@
   "dublin",
   "kiel",
   "bratislava",
+  "beanie",
 ])
 CATALOGS_ALLOWED_TO_BE_IMPORTED = frozenset([
-  "current",
+  "unstable",
 ])
 
 
@@ -213,7 +215,6 @@
       cat_entry_by_md5[catalog_entry["md5sum"]] = catalog_entry
       cat_entry_by_basename[catalog_entry["file_basename"]] = catalog_entry
     # - import all srv4 files that were not in the database so far
-    sqo_objects = set()
     entries_to_import = []
     logging.debug("Checking which srv4 files are already in the db.")
     for md5 in cat_entry_by_md5:
@@ -326,10 +327,18 @@
           "The catalog release %s is not one of the default releases.",

@@ Diff output truncated at 100000 characters. @@
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.



More information about the devel mailing list