[csw-devel] SF.net SVN: gar:[8232] csw/mgar/gar/v2-git

wahwah at users.sourceforge.net wahwah at users.sourceforge.net
Sat Jan 30 23:14:48 CET 2010


Revision: 8232
          http://gar.svn.sourceforge.net/gar/?rev=8232&view=rev
Author:   wahwah
Date:     2010-01-30 22:14:48 +0000 (Sat, 30 Jan 2010)

Log Message:
-----------
mGAR v2-git: Merging in the v2 branch

Modified Paths:
--------------
    csw/mgar/gar/v2-git/bin/checkpkg
    csw/mgar/gar/v2-git/categories/cpan/category.mk
    csw/mgar/gar/v2-git/categories/gnome/category.mk
    csw/mgar/gar/v2-git/categories/kde/category.mk
    csw/mgar/gar/v2-git/gar.conf.mk
    csw/mgar/gar/v2-git/gar.lib.mk
    csw/mgar/gar/v2-git/gar.mk
    csw/mgar/gar/v2-git/gar.pkg.mk

Added Paths:
-----------
    csw/mgar/gar/v2-git/bin/checkpkg.d/
    csw/mgar/gar/v2-git/bin/checkpkg.d/README
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py
    csw/mgar/gar/v2-git/categories/kde4/
    csw/mgar/gar/v2-git/categories/kde4/category.mk
    csw/mgar/gar/v2-git/lib/
    csw/mgar/gar/v2-git/lib/python/
    csw/mgar/gar/v2-git/lib/python/checkpkg.py
    csw/mgar/gar/v2-git/lib/python/checkpkg_test.py
    csw/mgar/gar/v2-git/lib/python/gartest.py
    csw/mgar/gar/v2-git/lib/python/opencsw.py
    csw/mgar/gar/v2-git/lib/python/opencsw_test.py
    csw/mgar/gar/v2-git/lib/python/testdata/
    csw/mgar/gar/v2-git/lib/python/testdata/README
    csw/mgar/gar/v2-git/lib/python/testdata/__init__.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWlibpq_84.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51client.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51devel.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51rt.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql5client_8x.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWpostfix.py
    csw/mgar/gar/v2-git/lib/python/testdata/dump_output_1.py
    csw/mgar/gar/v2-git/lib/python/testdata/dump_output_2.py
    csw/mgar/gar/v2-git/lib/python/testdata/example-catalog.txt
    csw/mgar/gar/v2-git/tests/
    csw/mgar/gar/v2-git/tests/example_test.py
    csw/mgar/gar/v2-git/tests/run_tests.py
    csw/mgar/gar/v2-git/tests/static/
    csw/mgar/gar/v2-git/tests/static/example/
    csw/mgar/gar/v2-git/tests/static/example/Makefile
    csw/mgar/gar/v2-git/tests/static/example/checksums
    csw/mgar/gar/v2-git/tests/static/example/gar

Removed Paths:
-------------
    csw/mgar/gar/v2-git/bin/checkpkg.d/README
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py
    csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py
    csw/mgar/gar/v2-git/categories/kde4/category.mk
    csw/mgar/gar/v2-git/lib/python/
    csw/mgar/gar/v2-git/lib/python/checkpkg.py
    csw/mgar/gar/v2-git/lib/python/checkpkg_test.py
    csw/mgar/gar/v2-git/lib/python/gartest.py
    csw/mgar/gar/v2-git/lib/python/opencsw.py
    csw/mgar/gar/v2-git/lib/python/opencsw_test.py
    csw/mgar/gar/v2-git/lib/python/testdata/
    csw/mgar/gar/v2-git/lib/python/testdata/README
    csw/mgar/gar/v2-git/lib/python/testdata/__init__.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWlibpq_84.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51client.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51devel.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql51rt.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWmysql5client_8x.py
    csw/mgar/gar/v2-git/lib/python/testdata/checkpkg_test_data_CSWpostfix.py
    csw/mgar/gar/v2-git/lib/python/testdata/dump_output_1.py
    csw/mgar/gar/v2-git/lib/python/testdata/dump_output_2.py
    csw/mgar/gar/v2-git/lib/python/testdata/example-catalog.txt
    csw/mgar/gar/v2-git/tests/example_test.py
    csw/mgar/gar/v2-git/tests/run_tests.py
    csw/mgar/gar/v2-git/tests/static/
    csw/mgar/gar/v2-git/tests/static/example/
    csw/mgar/gar/v2-git/tests/static/example/Makefile
    csw/mgar/gar/v2-git/tests/static/example/checksums
    csw/mgar/gar/v2-git/tests/static/example/gar

Property Changed:
----------------
    csw/mgar/gar/v2-git/
    csw/mgar/gar/v2-git/bin/checkpkg
    csw/mgar/gar/v2-git/pkglib/csw/depend


Property changes on: csw/mgar/gar/v2-git
___________________________________________________________________
Modified: svn:mergeinfo
   - /csw/mgar/gar/v2:4936-6678,6915-7553,7617
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-skayser:6087-6132
   + /csw/mgar/gar/v2:4936-6678,6915-8229
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-skayser:6087-6132

Modified: csw/mgar/gar/v2-git/bin/checkpkg
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/bin/checkpkg	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,6 +1,13 @@
 #!/bin/ksh -p
-
-# checkpkg 1.50 (diff to 1.46a: check multiple package files)
+# 
+# $Id$
+#
+# checkpkg 1.51
+#
+# diff to 1.46a
+#  - check multiple package files
+#  - checkpkg.d plugin support
+#
 # This script examines a package that has been put together
 # for submittal to the CSW archive at opencsw.org
 #
@@ -16,10 +23,29 @@
 
 
 PATH=$PATH:/usr/sbin
+readonly NAME_MAX_LENGTH=${NAME_MAX_LENGTH:-20}
 
 LOCAL_ARCH=`uname -p`
+if [[ -z "${CHECKPKG_TMPDIR}" ]]; then
+  readonly CHECKPKG_TMPDIR="/var/tmp"
+else
+  readonly CHECKPKG_TMPDIR
+fi
 
+# Colors only when running interactively
+if [[ -t 1 ]]; then
+	GREEN="\\033[0;32;40m"
+	RED="\\033[1;31;40m"
+	COLOR_RESET="\\033[00m"
+else
+	GREEN=""
+	RED=""
+	COLOR_RESET=""
+fi
 
+readonly selfpath="$0"
+readonly selfargs="$*"
+
 # always print out a warning message. (to stderr)
 # exit script, if quit_on_warn set
 
@@ -28,11 +54,17 @@
 	if [[ -d "$EXTRACTDIR" ]] ; then
 		rm -rf $EXTRACTDIR
 	fi
-	if [[ "$TMPARCHIVE" != "" ]] ; then
-		[ -f "$TMPARCHIVE" ] && rm $TMPARCHIVE
-	fi
+	cleantmparchives
 }
 
+cleantmparchives() {
+	for TMPARCHIVE in $tmparchives; do
+		if [[ "$TMPARCHIVE" != "" ]]; then
+			[ -f "$TMPARCHIVE" ] && rm $TMPARCHIVE
+		fi
+	done
+}
+
 cleanupset(){
     if [ "`echo $SETINF*`" != "$SETINF*" ]; then
 	rm $SETINF*
@@ -51,9 +83,40 @@
 	print ERROR: $* >/dev/fd/2
 	cleanup
 	cleanupset
+	print "To run checkpkg in the debug mode, add the '-d' flag:"
+	print "${selfpath} -d ${selfargs}"
 	exit 1
 }
 
+debugmsg() {
+	if [[ "${DEBUG}" != "" ]]; then
+		print "DEBUG: $*" > /dev/fd/2
+	fi
+}
+
+set_variables_for_individual_package_check() {
+	f=$1
+	file $f \
+	    | sed 's/^[^:]*://' \
+	    | grep gzip >/dev/null
+	if [ $? -eq 0 ] ; then
+		TMPARCHIVE=$CHECKPKG_TMPDIR/`basename $f`
+		if [[ -f $TMPARCHIVE ]] ; then
+			print ERROR: $TMPARCHIVE already exists
+			
+		fi
+		gzcat $f >$TMPARCHIVE || exit 1
+		f=$TMPARCHIVE
+	fi
+	pkgname=`nawk 'NR == 2 {print $1; exit;}' $f`
+	pkgnames="$pkgnames $pkgname"
+}
+
+if [[ "$1" == "-d" ]] ; then
+	DEBUG=1
+	shift
+fi
+
 if [[ "$1" == "-e" ]] ; then
 	quit_on_warn=1;
 	shift
@@ -65,11 +128,19 @@
 fi
 
 # a unique filename for the list of package deps and libs we see in a 'set'
-SETINF=/tmp/checkpkg.$$.`date +%Y%m%d%H%M%S`
+SETINF=$CHECKPKG_TMPDIR/checkpkg.$$.`date +%Y%m%d%H%M%S`
 SETLIBS=$SETINF.libs
 SETDEPS=$SETINF.deps
+pkgnames=""
+tmparchives=""
 
+EXTRACTDIR=$CHECKPKG_TMPDIR/dissect.$$
 
+if [ -d $EXTRACTDIR ] ; then
+	print ERROR: $EXTRACTDIR already exists
+	exit 1
+fi
+
 for f in "$@"
 do
 
@@ -82,9 +153,6 @@
 
 
 case $f in
-	cswutils-*)
-		:
-	;;
 	*)
 	print Examining $f
 
@@ -111,30 +179,13 @@
 	done
 esac
 
-print Extracting files for more detailed inspection...
+print Extracting all files for more detailed inspection...
 
-file $f |sed 's/^.*://' |grep gzip >/dev/null
-if [ $? -eq 0 ] ; then
-	TMPARCHIVE=/tmp/`basename $f`
-	if [[ -f $TMPARCHIVE ]] ; then
-		print ERROR: $TMPARCHIVE already exists
-		
-	fi
-	gzcat $f >$TMPARCHIVE || exit 1
-	f=$TMPARCHIVE
-fi
+set_variables_for_individual_package_check "$f"
 
-pkgname=`nawk 'NR == 2 {print $1; exit;}' $f`
-
-EXTRACTDIR=/tmp/dissect.$$
-
-if [ -d $EXTRACTDIR ] ; then
-	print ERROR: $EXTRACTDIR already exists
-	exit 1
-fi
-
 mkdir $EXTRACTDIR
 
+# FIXME: This doesn't support multiple packages
 TMPFILE=$EXTRACTDIR/pkginfo
 
 
@@ -158,8 +209,7 @@
 
 case $software in
      *[A-Z]*)
-	echo ERROR: $software must be all lowercase
-	exit 1
+	errmsg "$software must be all lowercase"
 	;;
 esac
 
@@ -213,8 +263,8 @@
 	print basedir="'$basedir'"
 fi
 
-if [[ ${#software} -gt 20 ]] ; then errmsg $f: software name greater than 20 chars ; fi
-if [[ ${#pkgname} -gt 20 ]] ; then errmsg $f: pkg name greater than 20 chars; fi
+if [[ ${#software} -gt ${NAME_MAX_LENGTH} ]] ; then errmsg $f: software name greater than 20 chars ; fi
+if [[ ${#pkgname} -gt ${NAME_MAX_LENGTH} ]] ; then errmsg $f: pkg name greater than 20 chars; fi
 
 if [ "$software" = "" ] ; then errmsg $f: software field not set properly in NAME ; fi
 if [ "$pkgname" = "" ] ; then errmsg $f: pkgname field blank ; fi
@@ -228,8 +278,7 @@
 
 case $version in
 	*-*)
-		print ERROR: VERSION field not allowed to have '"-"' in it
-		exit 1
+		errmsg "VERSION field not allowed to have \"-\" in it"
 	;;
 	*,REV=20[01][0-9].[0-9][0-9].[0-9][0-9]*)
 		:
@@ -252,6 +301,17 @@
 	exit 1
 esac
 
+case $f in
+  *${pkgarch}*)
+    print "f: $f, pkgarch: $pkgarch"
+    ;;
+  *)
+    print "The file name is '$f'."
+    print "The pkgarch is '$pkgarch'."
+    errmsg "ERROR: package file name and pkgarch don't match."
+    ;;
+esac
+
 goodarch=yes
 case $f in
 	*${LOCAL_ARCH}*)
@@ -304,7 +364,6 @@
 print Extracing pkg for examination of files...
 pkgtrans $f $EXTRACTDIR $pkgname
 
-
 #############################################################
 # We now have the package expanded, in "directory" form, in
 # $EXTRACTDIR/$pkgname
@@ -406,7 +465,7 @@
 	#cat $EXTRACTDIR/elflist| xargs ldd  2>/dev/null |fgrep  '.so' |
 	#              sed 's:^.*=>[^/]*::' | nawk '{print $1}' |sort -u >$EXTRACTDIR/liblist
 
-		cat $EXTRACTDIR/elflist| xargs dump -Lv |nawk '$2=="NEEDED"{print $3}' |
+		cat $EXTRACTDIR/elflist| xargs /usr/ccs/bin/dump -Lv |nawk '$2=="NEEDED"{print $3}' |
 			sort -u | egrep -v $EXTRACTDIR >$EXTRACTDIR/liblist
 
 			
@@ -439,17 +498,15 @@
 # sanity check against "depends on self"
 nawk '$2=="'$pkgname'" {exit 1}' $EXTRACTDIR/$pkgname/install/depend
 if [[ $? -ne 0 ]] ; then
-	print ERROR: $pkgname references self in depend file
-	exit 1
+	errmsg "$pkgname references self in depend file"
 fi
 
-# Verify that there are no double depends
+# Verify that there are no multiple depends
 repeated_depends="$(awk '{print $2}' $EXTRACTDIR/$pkgname/install/depend \
   | sort | uniq -c | awk '{print $1}' | sort | uniq | wc -l)"
 if [[ "$repeated_depends" -gt 1 ]]; then
         cat $EXTRACTDIR/$pkgname/install/depend
-        print ERROR: $pkgname has double depends
-        exit 1
+        errmsg "$pkgname has multiple depends"
 fi
 
 #to retain a record of all packages currently being examined from $@
@@ -474,47 +531,6 @@
     fi
 done
 
-egrep -v 'SUNWbcp|SUNWowbcp|SUNWucb' /var/sadm/install/contents |
-		fgrep -f $EXTRACTDIR/liblist >$EXTRACTDIR/shortcatalog
-
-
-
-for lib in `cat $EXTRACTDIR/liblist` ; do
-	grep "[/=]$lib[ =]" $EXTRACTDIR/$pkgname/pkgmap
-	if [[ $? -eq 0 ]] ; then
-		echo $lib provided by package itself
-		continue
-	else
-	    grep "[/=]$lib[ =]" $SETLIBS
-	    if [[ $? -eq 0 ]]; then
-		echo "$lib provided by package set being evaluated."
-		continue
-	    fi
-	fi
-
-	libpkg=`grep /$lib $EXTRACTDIR/shortcatalog |
-	      sed 's/^.* \([^ ]*\)$/\1/' |sort -u`
-
-	if [[ -z "$libpkg" ]] ; then
-		echo "$lib $pkgname" >> $SETLIBS.missing
-		print Cannot find package providing $lib.  Storing for delayed validation.
-	else
-		print $libpkg | fmt -1 >>$EXTRACTDIR/libpkgs
-	fi
-done
-
-sort -u $EXTRACTDIR/libpkgs >$EXTRACTDIR/libpkgs.x
-mv $EXTRACTDIR/libpkgs.x $EXTRACTDIR/libpkgs
-
-diff $EXTRACTDIR/deppkgs $EXTRACTDIR/libpkgs >/dev/null
-if [[ $? -ne 0 ]] ; then
-	print SUGGESTION: you may want to add some or all of the following as depends:
-	print '   (Feel free to ignore SUNW or SPRO packages)'
-	diff $EXTRACTDIR/deppkgs $EXTRACTDIR/libpkgs | fgrep '>'
-fi
-
-
-
 if [[ "$basedir" != "" ]] ; then
 	print
 	if [[ -f $EXTRACTDIR/elflist ]] ; then
@@ -530,13 +546,90 @@
 	fi
 fi
 
+# Plugin section.
+#
+# Plugins should live in checkpkg.d subdirectory in the same directory in which
+# checkpkg is.  Each plugin file name should be an executable and begin with
+# "checkpkg-".
 
-cleanup
+tmparchives="$tmparchives $TMPARCHIVE"
+done
 
-print ""
+# All packages have been extracted.
 
+set_variables_for_individual_package_check "$f"
+
+test_suite_ok=1
+checkpkg_scriptname=`basename $0`
+checkpkg_basedir=${0%/${checkpkg_scriptname}}
+plugindir=${checkpkg_basedir}/checkpkg.d
+
+# Cleaning up old *.pyc files which can cause grief.  This is because of the
+# move of Python libraries.
+for pyc_file in ${plugindir}/opencsw.pyc \
+                ${plugindir}/checkpkg.pyc; do
+  if [ -f "${pyc_file}" ]; then
+    echo "Removing old pyc file: '${pyc_file}'"
+    rm "${pyc_file}"
+  fi
 done
 
+# /var/sadm/install/contents cache update
+${plugindir}/update_contents_cache.py
+
+if [[ "${DEBUG}" != "" ]]; then
+	extra_options="--debug"
+fi
+debugmsg "plugindir: '$plugindir'"
+log_files=""
+if [[ -d "$plugindir" ]]; then
+  echo "Running modular tests"
+	# echo plugin dir exists
+	for plugin in "${plugindir}"/checkpkg-*; do
+		if [[ -x "${plugin}" ]]; then
+			debugmsg "Executing: ${plugin} $extra_options -e \"${EXTRACTDIR}\" ${pkgnames}"
+			plugin_base_name=`basename ${plugin}`
+			plugin_log="${EXTRACTDIR}/${plugin_base_name}.log"
+			log_files="${log_files} ${plugin_log}"
+			printf "TEST: ${plugin} running..."
+			${plugin} $extra_options -e "${EXTRACTDIR}" ${pkgnames} > "${plugin_log}" 2>&1
+			if [[ "$?" -ne 0 ]]; then
+				printf "\rTEST: ${plugin} ${RED}[FAIL]${COLOR_RESET}        \\n"
+				test_suite_ok=0
+			else
+				printf "\rTEST: ${plugin} ${GREEN}[OK]${COLOR_RESET}        \\n"
+			fi
+		else
+			debugmsg "'${plugin}' is not executable"
+		fi
+	done
+else
+	debugmsg "plugin dir does not exist"
+fi
+
+echo
+for log_file in ${log_files}; do
+	if [[ -s "${log_file}" ]]; then
+		debugmsg ">> LOG START: ${log_file}"
+		cat "${log_file}"
+		debugmsg "<< LOG END: ${log_file}"
+  else
+  	debugmsg "-- LOG ${log_file} is empty"
+	fi
+done
+echo
+
+if [[ ${test_suite_ok} -ne 1 ]]; then
+	errmsg "One or more modular tests have failed."
+else
+	print "All modular tests were successful."
+fi
+
+print ""
+
+# Cleaning up after all packages
+cleanup
+
 if [ -s $SETDEPS.missing ]; then
     print "Doing late evaluations of package dependencies."
     while read mdep; do
@@ -551,19 +644,4 @@
     done < $SETDEPS.missing
 fi
 
-if [ -s $SETLIBS.missing ]; then
-    print "Doing late evaluations of package library dependencies."
-    while read ldep; do
-	lib=`echo $ldep | nawk '{print $1}'`
-        [ "$lib" = "libm.so.2" ] && continue
-	pkg=`echo $ldep | nawk '{print $2}'`
-	/usr/bin/grep "[/=]$lib[ =]" $SETLIBS >/dev/null
-	if [ $? -ne 0 ]; then
-	    errmsg "Couldn't find a package providing $lib"
-	else
-	    print "A package in the set being evaluated provides $lib"
-	fi
-    done < $SETLIBS.missing
-fi
-
 cleanupset


Property changes on: csw/mgar/gar/v2-git/bin/checkpkg
___________________________________________________________________
Added: svn:keywords
   + Id

Deleted: csw/mgar/gar/v2-git/bin/checkpkg.d/README
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg.d/README	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/README	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,11 +0,0 @@
-$Id$
-
-This directory contains modular checks.  Each check is an executable file,
-written in any language, accepting specific command line options and returning
-the result as a status exit code.
-
-To see the required flags, issue:
-
-./checkpkg-dummy.py -h
-
-Each test's file name must begin with "checkpkg-".

Copied: csw/mgar/gar/v2-git/bin/checkpkg.d/README (from rev 8229, csw/mgar/gar/v2/bin/checkpkg.d/README)
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg.d/README	                        (rev 0)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/README	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,11 @@
+$Id$
+
+This directory contains modular checks.  Each check is an executable file,
+written in any language, accepting specific command line options and returning
+the result as a status exit code.
+
+To see the required flags, issue:
+
+./checkpkg-dummy.py -h
+
+Each test's file name must begin with "checkpkg-".

Deleted: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-actionclasses.py	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,59 +0,0 @@
-#!/opt/csw/bin/python2.6
-# $Id$
-
-"""This is a dummy check. You can use it as a boilerplate for your own checks.
-
-Copy it and modify.
-"""
-
-import logging
-import os.path
-import sys
-import re
-
-# The following bit of code sets the correct path to Python libraries
-# distributed with GAR.
-path_list = [os.path.dirname(__file__),
-             "..", "..", "lib", "python"]
-sys.path.append(os.path.join(*path_list))
-import checkpkg
-import opencsw
-
-
-def CheckActionClasses(pkg):
-  """Checks the consistency between classes in the prototype and pkginfo."""
-  errors = []
-  pkginfo = pkg.GetParsedPkginfo()
-  pkgmap = pkg.GetPkgmap()
-  pkginfo_classes = set(re.split(opencsw.WS_RE, pkginfo["CLASSES"]))
-  pkgmap_classes = pkgmap.GetClasses()
-  only_in_pkginfo = pkginfo_classes.difference(pkgmap_classes)
-  only_in_pkgmap = pkgmap_classes.difference(pkginfo_classes)
-  for cls in only_in_pkginfo:
-    print "Class %s of %s is only in pkginfo" % (repr(cls), pkg.pkgname)
-    print "This shouldn't cause any problems, but it might be not necessary."
-  for cls in only_in_pkgmap:
-    errors.append(
-        opencsw.PackageError("Class %s is only in pkgmap" % repr(cls)))
-  if only_in_pkginfo or only_in_pkgmap:
-    print ("pkginfo_classes: %s, pkgmap classes: %s"
-           % (pkginfo_classes, pkgmap_classes))
-  return errors
-
-
-def main():
-  options, args = checkpkg.GetOptions()
-  pkgnames = args
-  check_manager = checkpkg.CheckpkgManager(
-      "class action scripts / prototype integrity",
-      options.extractdir,
-      pkgnames,
-      options.debug)
-  check_manager.RegisterIndividualCheck(CheckActionClasses)
-  exit_code, report = check_manager.Run()
-  print report.strip()
-  sys.exit(exit_code)
-
-
-if __name__ == '__main__':
-  main()

Copied: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py (from rev 8229, csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-actionclasses.py)
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py	                        (rev 0)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-actionclasses.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,59 @@
+#!/opt/csw/bin/python2.6
+# $Id$
+
+"""This is a dummy check. You can use it as a boilerplate for your own checks.
+
+Copy it and modify.
+"""
+
+import logging
+import os.path
+import sys
+import re
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+             "..", "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+import opencsw
+
+
+def CheckActionClasses(pkg):
+  """Checks the consistency between classes in the prototype and pkginfo."""
+  errors = []
+  pkginfo = pkg.GetParsedPkginfo()
+  pkgmap = pkg.GetPkgmap()
+  pkginfo_classes = set(re.split(opencsw.WS_RE, pkginfo["CLASSES"]))
+  pkgmap_classes = pkgmap.GetClasses()
+  only_in_pkginfo = pkginfo_classes.difference(pkgmap_classes)
+  only_in_pkgmap = pkgmap_classes.difference(pkginfo_classes)
+  for cls in only_in_pkginfo:
+    print "Class %s of %s is only in pkginfo" % (repr(cls), pkg.pkgname)
+    print "This shouldn't cause any problems, but it might be not necessary."
+  for cls in only_in_pkgmap:
+    errors.append(
+        opencsw.PackageError("Class %s is only in pkgmap" % repr(cls)))
+  if only_in_pkginfo or only_in_pkgmap:
+    print ("pkginfo_classes: %s, pkgmap classes: %s"
+           % (pkginfo_classes, pkgmap_classes))
+  return errors
+
+
+def main():
+  options, args = checkpkg.GetOptions()
+  pkgnames = args
+  check_manager = checkpkg.CheckpkgManager(
+      "class action scripts / prototype integrity",
+      options.extractdir,
+      pkgnames,
+      options.debug)
+  check_manager.RegisterIndividualCheck(CheckActionClasses)
+  exit_code, report = check_manager.Run()
+  print report.strip()
+  sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+  main()

Deleted: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-libs.py	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,216 +0,0 @@
-#!/opt/csw/bin/python2.6
-#
-# $Id$
-#
-# A check for dependencies between shared libraries.
-#
-# This is currently more of a prototype than a mature program, but it has some
-# unit tests and it appears to be working.  The main problem is that it's not
-# divided into smaller testable sections.
-
-import os
-import os.path
-import copy
-import re
-import subprocess
-import logging
-import sys
-import textwrap
-
-# The following bit of code sets the correct path to Python libraries
-# distributed with GAR.
-path_list = [os.path.dirname(__file__),
-             "..", "..", "lib", "python"]
-sys.path.append(os.path.join(*path_list))
-import checkpkg
-import opencsw
-
-DUMP_BIN = "/usr/ccs/bin/dump"
-
-def GetIsalist():
-  args = ["isalist"]
-  isalist_proc = subprocess.Popen(args, stdout=subprocess.PIPE)
-  stdout, stderr = isalist_proc.communicate()
-  ret = isalist_proc.wait()
-  if ret:
-    logging.error("Calling isalist has failed.")
-  isalist = re.split(r"\s+", stdout.strip())
-  return isalist
-
-
-def main():
-  result_ok = True
-  errors = []
-  options, args = checkpkg.GetOptions()
-  pkgnames = args
-  if options.debug:
-    logging.basicConfig(level=logging.DEBUG)
-  else:
-    logging.basicConfig(level=logging.INFO)
-  checkers = []
-  for pkgname in pkgnames:
-    checker = checkpkg.CheckpkgBase(options.extractdir, pkgname)
-    checkers.append(checker)
-  binaries = []
-  binaries_by_pkgname = {}
-  sonames_by_pkgname = {}
-  pkg_by_any_filename = {}
-  for checker in checkers:
-    pkg_binary_paths = checker.ListBinaries()
-    binaries_base = [os.path.split(x)[1] for x in pkg_binary_paths]
-    binaries_by_pkgname[checker.pkgname] = binaries_base
-    binaries.extend(pkg_binary_paths)
-    for filename in checker.GetAllFilenames():
-      pkg_by_any_filename[filename] = checker.pkgname
-  # Making the binaries unique
-  binaries = set(binaries)
-  ws_re = re.compile(r"\s+")
-
-  # man ld.so.1 for more info on this hack
-  env = copy.copy(os.environ)
-  env["LD_NOAUXFLTR"] = "1"
-  needed_sonames_by_binary = {}
-  filenames_by_soname = {}
-  # Assembling a data structure with the data about binaries.
-  # {
-  #   <binary1 name>: { checkpkg.NEEDED_SONAMES: [...],
-  #                     checkpkg.RUNPATH:        [...]},
-  #   <binary2 name>: ...,
-  #   ...
-  # }
-  #
-  for binary in binaries:
-    binary_base_name = binary.split("/")[-1]
-    args = [DUMP_BIN, "-Lv", binary]
-    dump_proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env)
-    stdout, stderr = dump_proc.communicate()
-    ret = dump_proc.wait()
-    binary_data = checkpkg.ParseDumpOutput(stdout)
-    needed_sonames_by_binary[binary_base_name] = binary_data
-    if checkpkg.SONAME not in binary_data:
-      logging.debug("The %s binary doesn't provide a SONAME. "
-                    "(It might be an executable)",
-                   binary_base_name)
-      # The shared library doesn't tell its SONAME.  We're guessing it's the
-      # same as the base file name.
-      binary_data[checkpkg.SONAME] = binary_base_name
-    filenames_by_soname[binary_data[checkpkg.SONAME]] = binary_base_name
-
-  isalist = GetIsalist()
-
-  # Building indexes by soname to simplify further processing
-  # These are indexes "by soname".
-  (needed_sonames,
-   binaries_by_soname,
-   runpath_by_needed_soname) = checkpkg.BuildIndexesBySoname(
-       needed_sonames_by_binary)
-
-  pkgmap = checkpkg.SystemPkgmap()
-  logging.debug("Determining the soname-package relationships.")
-  # lines by soname is an equivalent of $EXTRACTDIR/shortcatalog
-  lines_by_soname = checkpkg.GetLinesBySoname(
-      pkgmap, needed_sonames, runpath_by_needed_soname, isalist)
-
-  # Creating a map from files to packages.
-  pkgs_by_filename = {}
-  for soname, line in lines_by_soname.iteritems():
-    # TODO: Find all the packages, not just the last field.
-    fields = re.split(ws_re, line.strip())
-    # For now, we'll assume that the last field is the package.
-    pkgname = fields[-1]
-    pkgs_by_filename[soname] = pkgname
-
-  # A shared object dependency/provisioning report, plus checking.
-  #
-  # This section is somewhat overlapping with checkpkg.AnalyzeDependencies(),
-  # it has a different purpose: it reports the relationships between shared
-  # libraries, binaries using them and packages providing them.  Ideally, the
-  # same bit of code with do checking and reporting.
-  #
-  # TODO: Rewrite this using cheetah templates
-  if options.debug and needed_sonames:
-    print "Analysis of sonames needed by the package set:"
-    binaries_with_missing_sonames = set([])
-    for soname in needed_sonames:
-      logging.debug("Analyzing: %s", soname)
-      if soname in filenames_by_soname:
-        print "%s is provided by the package itself" % soname
-      elif soname in lines_by_soname:
-        print ("%s is provided by %s and required by:" 
-               % (soname,
-                  pkgs_by_filename[soname]))
-        filename_lines = " ".join(sorted(binaries_by_soname[soname]))
-        for line in textwrap.wrap(filename_lines, 70):
-          print " ", line
-      else:
-        print ("%s is required by %s, but we don't know what provides it."
-               % (soname, binaries_by_soname[soname]))
-        for binary in binaries_by_soname[soname]:
-          binaries_with_missing_sonames.add(binary)
-        if soname in checkpkg.ALLOWED_ORPHAN_SONAMES:
-          print "However, it's a whitelisted soname."
-        else:
-          errors.append(
-              checkpkg.Error("%s is required by %s, but "
-                             "we don't know what provides it."
-                             % (soname, binaries_by_soname[soname])))
-    if binaries_with_missing_sonames:
-      print "The following are binaries with missing sonames:"
-      binary_lines = " ".join(sorted(binaries_with_missing_sonames))
-      for line in textwrap.wrap(binary_lines, 70):
-        print " ", line
-    print
-
-  dependent_pkgs = {}
-  for checker in checkers:
-    pkgname = checker.pkgname
-    dir_format_pkg = opencsw.DirectoryFormatPackage(checker.pkgpath)
-    declared_dependencies = dir_format_pkg.GetDependencies()
-    if options.debug:
-      sanitized_pkgname = pkgname.replace("-", "_")
-      data_file_name = "/var/tmp/checkpkg_test_data_%s.py" % sanitized_pkgname
-      logging.warn("Saving test data to %s." % repr(data_file_name))
-      test_fd = open(data_file_name, "w")
-      print >>test_fd, "# Testing data for %s" % pkgname
-      print >>test_fd, "# $Id$"
-      print >>test_fd, "DATA_PKGNAME                  =", repr(pkgname)
-      print >>test_fd, "DATA_DECLARED_DEPENDENCIES    =", repr(declared_dependencies)
-      print >>test_fd, "DATA_BINARIES_BY_PKGNAME      =", repr(binaries_by_pkgname)
-      print >>test_fd, "DATA_NEEDED_SONAMES_BY_BINARY =", repr(needed_sonames_by_binary)
-      print >>test_fd, "DATA_PKGS_BY_FILENAME         =", repr(pkgs_by_filename)
-      print >>test_fd, "DATA_FILENAMES_BY_SONAME      =", repr(filenames_by_soname)
-      print >>test_fd, "DATA_PKG_BY_ANY_FILENAME      =", repr(pkg_by_any_filename)
-      print >>test_fd, "DATA_LINES_BY_SONAME          =", repr(lines_by_soname)
-      print >>test_fd, "DATA_PKGMAP_CACHE             =", repr(pkgmap.cache)
-      print >>test_fd, "DATA_BINARIES_BY_SONAME       =", repr(binaries_by_soname)
-      print >>test_fd, "DATA_ISALIST                  =", repr(isalist)
-      test_fd.close()
-
-    missing_deps, surplus_deps, orphan_sonames = checkpkg.AnalyzeDependencies(
-        pkgname,
-        declared_dependencies,
-        binaries_by_pkgname,
-        needed_sonames_by_binary,
-        pkgs_by_filename,
-        filenames_by_soname,
-        pkg_by_any_filename)
-    print checker.FormatDepsReport(missing_deps,
-                                   surplus_deps,
-                                   orphan_sonames)
-
-    for soname in orphan_sonames:
-      errors.append(checkpkg.Error("The following soname does't belong to "
-                                   "any package: %s" % soname))
-
-  if errors:
-    for error in errors:
-      logging.error(error)
-    sys.exit(1)
-  else:
-    sys.exit(0)
-
-
-if __name__ == '__main__':
-  main()
-
-# vim:set sw=2 ts=2 sts=2 expandtab:

Copied: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py (from rev 8229, csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-libs.py)
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py	                        (rev 0)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-libs.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,216 @@
+#!/opt/csw/bin/python2.6
+#
+# $Id$
+#
+# A check for dependencies between shared libraries.
+#
+# This is currently more of a prototype than a mature program, but it has some
+# unit tests and it appears to be working.  The main problem is that it's not
+# divided into smaller testable sections.
+
+import os
+import os.path
+import copy
+import re
+import subprocess
+import logging
+import sys
+import textwrap
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+             "..", "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+import opencsw
+
+DUMP_BIN = "/usr/ccs/bin/dump"
+
+def GetIsalist():
+  args = ["isalist"]
+  isalist_proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+  stdout, stderr = isalist_proc.communicate()
+  ret = isalist_proc.wait()
+  if ret:
+    logging.error("Calling isalist has failed.")
+  isalist = re.split(r"\s+", stdout.strip())
+  return isalist
+
+
+def main():
+  result_ok = True
+  errors = []
+  options, args = checkpkg.GetOptions()
+  pkgnames = args
+  if options.debug:
+    logging.basicConfig(level=logging.DEBUG)
+  else:
+    logging.basicConfig(level=logging.INFO)
+  checkers = []
+  for pkgname in pkgnames:
+    checker = checkpkg.CheckpkgBase(options.extractdir, pkgname)
+    checkers.append(checker)
+  binaries = []
+  binaries_by_pkgname = {}
+  sonames_by_pkgname = {}
+  pkg_by_any_filename = {}
+  for checker in checkers:
+    pkg_binary_paths = checker.ListBinaries()
+    binaries_base = [os.path.split(x)[1] for x in pkg_binary_paths]
+    binaries_by_pkgname[checker.pkgname] = binaries_base
+    binaries.extend(pkg_binary_paths)
+    for filename in checker.GetAllFilenames():
+      pkg_by_any_filename[filename] = checker.pkgname
+  # Making the binaries unique
+  binaries = set(binaries)
+  ws_re = re.compile(r"\s+")
+
+  # man ld.so.1 for more info on this hack
+  env = copy.copy(os.environ)
+  env["LD_NOAUXFLTR"] = "1"
+  needed_sonames_by_binary = {}
+  filenames_by_soname = {}
+  # Assembling a data structure with the data about binaries.
+  # {
+  #   <binary1 name>: { checkpkg.NEEDED_SONAMES: [...],
+  #                     checkpkg.RUNPATH:        [...]},
+  #   <binary2 name>: ...,
+  #   ...
+  # }
+  #
+  for binary in binaries:
+    binary_base_name = binary.split("/")[-1]
+    args = [DUMP_BIN, "-Lv", binary]
+    dump_proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env)
+    stdout, stderr = dump_proc.communicate()
+    ret = dump_proc.wait()
+    binary_data = checkpkg.ParseDumpOutput(stdout)
+    needed_sonames_by_binary[binary_base_name] = binary_data
+    if checkpkg.SONAME not in binary_data:
+      logging.debug("The %s binary doesn't provide a SONAME. "
+                    "(It might be an executable)",
+                   binary_base_name)
+      # The shared library doesn't tell its SONAME.  We're guessing it's the
+      # same as the base file name.
+      binary_data[checkpkg.SONAME] = binary_base_name
+    filenames_by_soname[binary_data[checkpkg.SONAME]] = binary_base_name
+
+  isalist = GetIsalist()
+
+  # Building indexes by soname to simplify further processing
+  # These are indexes "by soname".
+  (needed_sonames,
+   binaries_by_soname,
+   runpath_by_needed_soname) = checkpkg.BuildIndexesBySoname(
+       needed_sonames_by_binary)
+
+  pkgmap = checkpkg.SystemPkgmap()
+  logging.debug("Determining the soname-package relationships.")
+  # lines by soname is an equivalent of $EXTRACTDIR/shortcatalog
+  lines_by_soname = checkpkg.GetLinesBySoname(
+      pkgmap, needed_sonames, runpath_by_needed_soname, isalist)
+
+  # Creating a map from files to packages.
+  pkgs_by_filename = {}
+  for soname, line in lines_by_soname.iteritems():
+    # TODO: Find all the packages, not just the last field.
+    fields = re.split(ws_re, line.strip())
+    # For now, we'll assume that the last field is the package.
+    pkgname = fields[-1]
+    pkgs_by_filename[soname] = pkgname
+
+  # A shared object dependency/provisioning report, plus checking.
+  #
+  # This section is somewhat overlapping with checkpkg.AnalyzeDependencies(),
+  # it has a different purpose: it reports the relationships between shared
+  # libraries, binaries using them and packages providing them.  Ideally, the
+  # same bit of code with do checking and reporting.
+  #
+  # TODO: Rewrite this using cheetah templates
+  if options.debug and needed_sonames:
+    print "Analysis of sonames needed by the package set:"
+    binaries_with_missing_sonames = set([])
+    for soname in needed_sonames:
+      logging.debug("Analyzing: %s", soname)
+      if soname in filenames_by_soname:
+        print "%s is provided by the package itself" % soname
+      elif soname in lines_by_soname:
+        print ("%s is provided by %s and required by:" 
+               % (soname,
+                  pkgs_by_filename[soname]))
+        filename_lines = " ".join(sorted(binaries_by_soname[soname]))
+        for line in textwrap.wrap(filename_lines, 70):
+          print " ", line
+      else:
+        print ("%s is required by %s, but we don't know what provides it."
+               % (soname, binaries_by_soname[soname]))
+        for binary in binaries_by_soname[soname]:
+          binaries_with_missing_sonames.add(binary)
+        if soname in checkpkg.ALLOWED_ORPHAN_SONAMES:
+          print "However, it's a whitelisted soname."
+        else:
+          errors.append(
+              checkpkg.Error("%s is required by %s, but "
+                             "we don't know what provides it."
+                             % (soname, binaries_by_soname[soname])))
+    if binaries_with_missing_sonames:
+      print "The following are binaries with missing sonames:"
+      binary_lines = " ".join(sorted(binaries_with_missing_sonames))
+      for line in textwrap.wrap(binary_lines, 70):
+        print " ", line
+    print
+
+  dependent_pkgs = {}
+  for checker in checkers:
+    pkgname = checker.pkgname
+    dir_format_pkg = opencsw.DirectoryFormatPackage(checker.pkgpath)
+    declared_dependencies = dir_format_pkg.GetDependencies()
+    if options.debug:
+      sanitized_pkgname = pkgname.replace("-", "_")
+      data_file_name = "/var/tmp/checkpkg_test_data_%s.py" % sanitized_pkgname
+      logging.warn("Saving test data to %s." % repr(data_file_name))
+      test_fd = open(data_file_name, "w")
+      print >>test_fd, "# Testing data for %s" % pkgname
+      print >>test_fd, "# $Id$"
+      print >>test_fd, "DATA_PKGNAME                  =", repr(pkgname)
+      print >>test_fd, "DATA_DECLARED_DEPENDENCIES    =", repr(declared_dependencies)
+      print >>test_fd, "DATA_BINARIES_BY_PKGNAME      =", repr(binaries_by_pkgname)
+      print >>test_fd, "DATA_NEEDED_SONAMES_BY_BINARY =", repr(needed_sonames_by_binary)
+      print >>test_fd, "DATA_PKGS_BY_FILENAME         =", repr(pkgs_by_filename)
+      print >>test_fd, "DATA_FILENAMES_BY_SONAME      =", repr(filenames_by_soname)
+      print >>test_fd, "DATA_PKG_BY_ANY_FILENAME      =", repr(pkg_by_any_filename)
+      print >>test_fd, "DATA_LINES_BY_SONAME          =", repr(lines_by_soname)
+      print >>test_fd, "DATA_PKGMAP_CACHE             =", repr(pkgmap.cache)
+      print >>test_fd, "DATA_BINARIES_BY_SONAME       =", repr(binaries_by_soname)
+      print >>test_fd, "DATA_ISALIST                  =", repr(isalist)
+      test_fd.close()
+
+    missing_deps, surplus_deps, orphan_sonames = checkpkg.AnalyzeDependencies(
+        pkgname,
+        declared_dependencies,
+        binaries_by_pkgname,
+        needed_sonames_by_binary,
+        pkgs_by_filename,
+        filenames_by_soname,
+        pkg_by_any_filename)
+    print checker.FormatDepsReport(missing_deps,
+                                   surplus_deps,
+                                   orphan_sonames)
+
+    for soname in orphan_sonames:
+      errors.append(checkpkg.Error("The following soname does't belong to "
+                                   "any package: %s" % soname))
+
+  if errors:
+    for error in errors:
+      logging.error(error)
+    sys.exit(1)
+  else:
+    sys.exit(0)
+
+
+if __name__ == '__main__':
+  main()
+
+# vim:set sw=2 ts=2 sts=2 expandtab:

Deleted: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-obsolete-deps.py	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,63 +0,0 @@
-#!/opt/csw/bin/python2.6
-# $Id$
-
-"""Makes sure that obsolete packages are not added as dependencies.
-"""
-
-import logging
-import os.path
-import sys
-
-# The following bit of code sets the correct path to Python libraries
-# distributed with GAR.
-path_list = [os.path.dirname(__file__),
-             "..", "..", "lib", "python"]
-sys.path.append(os.path.join(*path_list))
-import checkpkg
-
-OBSOLETE_DEPS = {
-    # "CSWfoo": {
-    #   "hint": "Do this...",
-    #   "url": "http://www.opencsw.org/bugtrack/view.php?id=..."
-    # },
-    "CSWpython-rt": {
-      "hint": "CSWpython-rt is deprecated, use CSWpython instead.",
-      "url": "http://www.opencsw.org/bugtrack/view.php?id=4031"
-    },
-}
-
-def CheckObsoleteDeps(pkg):
-  """Checks for obsolete dependencies."""
-  errors = []
-  deps = set(pkg.GetDependencies())
-  obsolete_pkg_deps = deps.intersection(set(OBSOLETE_DEPS))
-  if obsolete_pkg_deps:
-    for obsolete_pkg in obsolete_pkg_deps:
-      errors.append(
-          checkpkg.PackageError(
-            "Package %s should not depend on %s."
-             % (pkg.pkgname, obsolete_pkg)))
-      if "hint" in OBSOLETE_DEPS[obsolete_pkg]:
-        errors.append(
-            checkpkg.PackageError("Hint: %s" % OBSOLETE_DEPS[obsolete_pkg]["hint"]))
-      if "url" in OBSOLETE_DEPS[obsolete_pkg]:
-      	errors.append(
-      	    checkpkg.PackageError("URL: %s" % OBSOLETE_DEPS[obsolete_pkg]["url"]))
-  return errors
-
-
-def main():
-  options, args = checkpkg.GetOptions()
-  pkgnames = args
-  check_manager = checkpkg.CheckpkgManager("obsolete dependencies",
-                                           options.extractdir,
-                                           pkgnames,
-                                           options.debug)
-  check_manager.RegisterIndividualCheck(CheckObsoleteDeps)
-  exit_code, report = check_manager.Run()
-  print report.strip()
-  sys.exit(exit_code)
-
-
-if __name__ == '__main__':
-  main()

Copied: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py (from rev 8229, csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-obsolete-deps.py)
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py	                        (rev 0)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-obsolete-deps.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,63 @@
+#!/opt/csw/bin/python2.6
+# $Id$
+
+"""Makes sure that obsolete packages are not added as dependencies.
+"""
+
+import logging
+import os.path
+import sys
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+             "..", "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+
+OBSOLETE_DEPS = {
+    # "CSWfoo": {
+    #   "hint": "Do this...",
+    #   "url": "http://www.opencsw.org/bugtrack/view.php?id=..."
+    # },
+    "CSWpython-rt": {
+      "hint": "CSWpython-rt is deprecated, use CSWpython instead.",
+      "url": "http://www.opencsw.org/bugtrack/view.php?id=4031"
+    },
+}
+
+def CheckObsoleteDeps(pkg):
+  """Checks for obsolete dependencies."""
+  errors = []
+  deps = set(pkg.GetDependencies())
+  obsolete_pkg_deps = deps.intersection(set(OBSOLETE_DEPS))
+  if obsolete_pkg_deps:
+    for obsolete_pkg in obsolete_pkg_deps:
+      errors.append(
+          checkpkg.PackageError(
+            "Package %s should not depend on %s."
+             % (pkg.pkgname, obsolete_pkg)))
+      if "hint" in OBSOLETE_DEPS[obsolete_pkg]:
+        errors.append(
+            checkpkg.PackageError("Hint: %s" % OBSOLETE_DEPS[obsolete_pkg]["hint"]))
+      if "url" in OBSOLETE_DEPS[obsolete_pkg]:
+      	errors.append(
+      	    checkpkg.PackageError("URL: %s" % OBSOLETE_DEPS[obsolete_pkg]["url"]))
+  return errors
+
+
+def main():
+  options, args = checkpkg.GetOptions()
+  pkgnames = args
+  check_manager = checkpkg.CheckpkgManager("obsolete dependencies",
+                                           options.extractdir,
+                                           pkgnames,
+                                           options.debug)
+  check_manager.RegisterIndividualCheck(CheckObsoleteDeps)
+  exit_code, report = check_manager.Run()
+  print report.strip()
+  sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+  main()

Deleted: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-you-can-write-your-own.py	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,74 +0,0 @@
-#!/opt/csw/bin/python2.6
-# $Id$
-
-"""This is a dummy module. You can use it as a boilerplate for your own modules.
-
-Copy it and modify.
-"""
-
-import os.path
-import sys
-
-# The following bit of code sets the correct path to Python libraries
-# distributed with GAR.
-path_list = [os.path.dirname(__file__),
-             "..", "..", "lib", "python"]
-sys.path.append(os.path.join(*path_list))
-import checkpkg
-
-# Defining checking functions.
-
-def MyCheckForAsinglePackage(pkg):
-  """Checks an individual package.
-  
-  Gets a DirctoryFormatPackage as an argument, and returns a list of errors.
-
-  Errors should be a list of checkpkg.PackageError objects:
-
-  errors.append(checkpkg.PackageError("There's something wrong."))
-  """
-  errors = []
-  # Checking code for an individual package goes here.  See the
-  # DirectoryFormatPackage class in lib/python/opencsw.py for the available
-  # APIs.
-
-  # Here's how to report an error:
-  something_is_wrong = False
-  if something_is_wrong:
-    errors.append(checkpkg.PackageError("There's something wrong."))
-  return errors
-
-
-def MyCheckForAsetOfPackages(pkgs):
-  """Checks a set of packages.
-
-  Sometimes individual checks aren't enough. If you need to write code which
-  needs to examine multiple packages at the same time, use this function.
-
-  Gets a list of packages, returns a list of errors.
-  """
-  errors = []
-  # Checking code goes here.
-  return errors
-
-
-def main():
-  options, args = checkpkg.GetOptions()
-  pkgnames = args
-  # CheckpkgManager class abstracts away things such as the collection of
-  # results.
-  check_manager = checkpkg.CheckpkgManager("a template of a checkpkg module",
-                                           options.extractdir,
-                                           pkgnames,
-                                           options.debug)
-  # Registering functions defined above.
-  check_manager.RegisterIndividualCheck(MyCheckForAsinglePackage)
-  check_manager.RegisterSetCheck(MyCheckForAsetOfPackages)
-  # Running the checks, reporting and exiting.
-  exit_code, report = check_manager.Run()
-  print report.strip()
-  sys.exit(exit_code)
-
-
-if __name__ == '__main__':
-  main()

Copied: csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py (from rev 8229, csw/mgar/gar/v2/bin/checkpkg.d/checkpkg-you-can-write-your-own.py)
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py	                        (rev 0)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/checkpkg-you-can-write-your-own.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,74 @@
+#!/opt/csw/bin/python2.6
+# $Id$
+
+"""This is a dummy module. You can use it as a boilerplate for your own modules.
+
+Copy it and modify.
+"""
+
+import os.path
+import sys
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+             "..", "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+
+# Defining checking functions.
+
+def MyCheckForAsinglePackage(pkg):
+  """Checks an individual package.
+  
+  Gets a DirctoryFormatPackage as an argument, and returns a list of errors.
+
+  Errors should be a list of checkpkg.PackageError objects:
+
+  errors.append(checkpkg.PackageError("There's something wrong."))
+  """
+  errors = []
+  # Checking code for an individual package goes here.  See the
+  # DirectoryFormatPackage class in lib/python/opencsw.py for the available
+  # APIs.
+
+  # Here's how to report an error:
+  something_is_wrong = False
+  if something_is_wrong:
+    errors.append(checkpkg.PackageError("There's something wrong."))
+  return errors
+
+
+def MyCheckForAsetOfPackages(pkgs):
+  """Checks a set of packages.
+
+  Sometimes individual checks aren't enough. If you need to write code which
+  needs to examine multiple packages at the same time, use this function.
+
+  Gets a list of packages, returns a list of errors.
+  """
+  errors = []
+  # Checking code goes here.
+  return errors
+
+
+def main():
+  options, args = checkpkg.GetOptions()
+  pkgnames = args
+  # CheckpkgManager class abstracts away things such as the collection of
+  # results.
+  check_manager = checkpkg.CheckpkgManager("a template of a checkpkg module",
+                                           options.extractdir,
+                                           pkgnames,
+                                           options.debug)
+  # Registering functions defined above.
+  check_manager.RegisterIndividualCheck(MyCheckForAsinglePackage)
+  check_manager.RegisterSetCheck(MyCheckForAsetOfPackages)
+  # Running the checks, reporting and exiting.
+  exit_code, report = check_manager.Run()
+  print report.strip()
+  sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+  main()

Deleted: csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg.d/update_contents_cache.py	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,28 +0,0 @@
-#!/opt/csw/bin/python2.6
-#
-# $Id$
-#
-# This file only creates an instance of SystemPkgmap in order to update the
-# package cache (if necessary), and display the information about the update.
-
-import os
-import os.path
-import sys
-import logging
-
-# The following bit of code sets the correct path to Python libraries
-# distributed with GAR.
-path_list = [os.getcwd(),
-             os.path.split(sys.argv[0])[0],
-             "..", "..", "lib", "python"]
-sys.path.append(os.path.join(*path_list))
-import checkpkg
-
-def main():
-  print "Checking if the package cache is up to date."
-  logging.basicConfig(level=logging.INFO)
-  test_pkgmap = checkpkg.SystemPkgmap()
-
-
-if __name__ == '__main__':
-	main()

Copied: csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py (from rev 8229, csw/mgar/gar/v2/bin/checkpkg.d/update_contents_cache.py)
===================================================================
--- csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py	                        (rev 0)
+++ csw/mgar/gar/v2-git/bin/checkpkg.d/update_contents_cache.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,28 @@
+#!/opt/csw/bin/python2.6
+#
+# $Id$
+#
+# This file only creates an instance of SystemPkgmap in order to update the
+# package cache (if necessary), and display the information about the update.
+
+import os
+import os.path
+import sys
+import logging
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.getcwd(),
+             os.path.split(sys.argv[0])[0],
+             "..", "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+
+def main():
+  print "Checking if the package cache is up to date."
+  logging.basicConfig(level=logging.INFO)
+  test_pkgmap = checkpkg.SystemPkgmap()
+
+
+if __name__ == '__main__':
+	main()

Modified: csw/mgar/gar/v2-git/categories/cpan/category.mk
===================================================================
--- csw/mgar/gar/v2-git/categories/cpan/category.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/categories/cpan/category.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -88,9 +88,10 @@
 	@( cd $* ; $(TEST_ENV) ./Build test )
 	@$(MAKECOOKIE)
 
+PERLBUILD_INSTALL_ARGS ?= destdir=$(DESTDIR) $(EXTRA_PERLBUILD_INSTALL_ARGS)
 install-%/Build:
 	@echo " ==> Running Build install in $*"
-	@( cd $* ; $(INSTALL_ENV) ./Build install destdir=$(DESTDIR) )
+	( cd $* ; $(INSTALL_ENV) ./Build install $(PERLBUILD_INSTALL_ARGS) )
 	@$(MAKECOOKIE)
 
 # Check for a CPAN module version update

Modified: csw/mgar/gar/v2-git/categories/gnome/category.mk
===================================================================
--- csw/mgar/gar/v2-git/categories/gnome/category.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/categories/gnome/category.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1 +1,13 @@
+# pkg-config options
+EXTRA_PKG_CONFIG_PATH += /opt/csw/X11/lib/pkgconfig
+
+MSGFMT= /opt/csw/bin/gmsgfmt
+MSGMERGE= /opt/csw/bin/gmsgmerge
+XGETTEXT = /opt/csw/bin/gxgettext
+GETTEXT = /opt/csw/bin/ggettext
+export MSGMERGE
+export MSGFMT
+export XGETTEXT
+export GETTEXT
+
 include gar/gar.mk

Modified: csw/mgar/gar/v2-git/categories/kde/category.mk
===================================================================
--- csw/mgar/gar/v2-git/categories/kde/category.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/categories/kde/category.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,7 +1,7 @@
 
 # KDE
 KDE_ROOT      = http://download.kde.org
-KDE_VERSION   = 3.5.2
+KDE_VERSION   = 3.5.10
 KDE_DIST      = stable
 KDE_MIRROR    = $(KDE_ROOT)/$(KDE_DIST)/$(KDE_VERSION)/src/
 

Deleted: csw/mgar/gar/v2-git/categories/kde4/category.mk
===================================================================
--- csw/mgar/gar/v2/categories/kde4/category.mk	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/categories/kde4/category.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,16 +0,0 @@
-
-# KDE
-KDE_ROOT      = http://download.kde.org
-KDE_VERSION   = 4.3.4
-KDE_DIST      = stable
-KDE_MIRROR    = $(KDE_ROOT)/$(KDE_DIST)/$(KDE_VERSION)/src/
-
-MASTER_SITES ?= $(KDE_MIRROR)
-GARVERSION   ?= $(KDE_VERSION)
-PKGDIST      ?= $(DISTNAME)-$(GARVERSION).tar.bz2
-DISTFILES    += $(PKGDIST)
-
-# Compiler
-GARCOMPILER = GNU
-
-include gar/gar.mk

Copied: csw/mgar/gar/v2-git/categories/kde4/category.mk (from rev 8229, csw/mgar/gar/v2/categories/kde4/category.mk)
===================================================================
--- csw/mgar/gar/v2-git/categories/kde4/category.mk	                        (rev 0)
+++ csw/mgar/gar/v2-git/categories/kde4/category.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,16 @@
+
+# KDE
+KDE_ROOT      = http://download.kde.org
+KDE_VERSION   = 4.3.4
+KDE_DIST      = stable
+KDE_MIRROR    = $(KDE_ROOT)/$(KDE_DIST)/$(KDE_VERSION)/src/
+
+MASTER_SITES ?= $(KDE_MIRROR)
+GARVERSION   ?= $(KDE_VERSION)
+PKGDIST      ?= $(DISTNAME)-$(GARVERSION).tar.bz2
+DISTFILES    += $(PKGDIST)
+
+# Compiler
+GARCOMPILER = GNU
+
+include gar/gar.mk

Modified: csw/mgar/gar/v2-git/gar.conf.mk
===================================================================
--- csw/mgar/gar/v2-git/gar.conf.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/gar.conf.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -168,7 +168,21 @@
 COMMON_PKG_DEPENDS ?= CSWcommon
 
 # These are the core packages which must be installed for GAR to function correctly
-DEF_BASE_PKGS = CSWgmake CSWgtar CSWggrep CSWdiffutils CSWgfile CSWtextutils CSWwget CSWfindutils CSWgsed CSWgawk CSWbzip2
+
+DEF_BASE_PKGS += CSWbzip2
+DEF_BASE_PKGS += CSWdiffutils
+DEF_BASE_PKGS += CSWfindutils
+DEF_BASE_PKGS += CSWgawk
+DEF_BASE_PKGS += CSWgfile
+DEF_BASE_PKGS += CSWggrep
+DEF_BASE_PKGS += CSWgmake
+DEF_BASE_PKGS += CSWgsed
+DEF_BASE_PKGS += CSWgtar
+DEF_BASE_PKGS += CSWpy-cheetah
+DEF_BASE_PKGS += CSWpython
+DEF_BASE_PKGS += CSWtextutils
+DEF_BASE_PKGS += CSWwget
+
 ifdef GIT_REPOS
 # netcat and bash are for the gitproxy script.
 DEF_BASE_PKGS += CSWgit CSWnetcat
@@ -547,6 +561,10 @@
 GOOGLE_PROJECT ?= $(GARNAME)
 GOOGLE_MIRROR  ?= http://$(GOOGLE_PROJECT).googlecode.com/files/
 
+# Berlios
+BERLIOS_PROJECT ?= $(GARNAME)
+BERLIOS_MIRROR ?= http://download.berlios.de/$(BERLIOS_PROJECT)/ http://download2.berlios.de/$(BERLIOS_PROJECT)/
+
 # GNU
 GNU_SITE     = http://mirrors.kernel.org
 GNU_GNUROOT  = $(GNU_SITE)/gnu

Modified: csw/mgar/gar/v2-git/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2-git/gar.lib.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/gar.lib.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -509,17 +509,17 @@
 # build from a standard gnu-style makefile's default rule.
 build-%/Makefile:
 	@echo " ==> Running make in $*"
-	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
+	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_VARS),$(TTT)="$(BUILD_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
 	@$(MAKECOOKIE)
 
 build-%/makefile:
 	@echo " ==> Running make in $*"
-	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
+	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_VARS),$(TTT)="$(BUILD_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
 	@$(MAKECOOKIE)
 
 build-%/GNUmakefile:
 	@echo " ==> Running make in $*"
-	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
+	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_VARS),$(TTT)="$(BUILD_OVERRIDE_VAR_$(TTT))")  $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
 	@$(MAKECOOKIE)
 
 build-%/Jamfile:
@@ -564,17 +564,17 @@
 # Run tests on pre-built sources
 test-%/Makefile:
 	@echo " ==> Running make $(TEST_TARGET) in $*"
-	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
+	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(foreach TTT,$(TEST_OVERRIDE_VARS),$(TTT)="$(TEST_OVERRIDE_VAR_$(TTT))") $(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
 	@$(MAKECOOKIE)
 
 test-%/makefile:
 	@echo " ==> Running make $(TEST_TARGET) in $*"
-	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
+	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(foreach TTT,$(TEST_OVERRIDE_VARS),$(TTT)="$(TEST_OVERRIDE_VAR_$(TTT))") $(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
 	@$(MAKECOOKIE)
 
 test-%/GNUmakefile:
 	@echo " ==> Running make $(TEST_TARGET) in $*"
-	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
+	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_VARS),$(TTT)="$(TEST_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
 	@$(MAKECOOKIE)
 
 # Ruby makefiles

Modified: csw/mgar/gar/v2-git/gar.mk
===================================================================
--- csw/mgar/gar/v2-git/gar.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/gar.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -356,9 +356,11 @@
 	@$(foreach COOKIEFILE,$(CHECKSUM_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
 
 # makesum		- Generate distinfo (only do this for your own ports!).
+GARCHIVE_TARGETS =  $(addprefix $(GARCHIVEDIR)/,$(filter-out $(ALLFILES_DYNSCRIPTS), $(ALLFILES)))
+
 MAKESUM_TARGETS =  $(filter-out $(_NOCHECKSUM) $(NOCHECKSUM),$(ALLFILES))
 
-makesum: fetch $(addprefix $(DOWNLOADDIR)/,$(MAKESUM_TARGETS))
+makesum: fetch $(addprefix $(DOWNLOADDIR)/,$(MAKESUM_TARGETS)) $(GARCHIVE_TARGETS)
 	@if test "x$(MAKESUM_TARGETS)" != "x "; then \
 		(cd $(DOWNLOADDIR) && gmd5sum $(MAKESUM_TARGETS)) > $(CHECKSUM_FILE) ; \
 		echo "Checksums made for $(MAKESUM_TARGETS)" ; \
@@ -368,8 +370,6 @@
 # I am always typing this by mistake
 makesums: makesum
 
-GARCHIVE_TARGETS =  $(addprefix $(GARCHIVEDIR)/,$(ALLFILES))
-
 garchive: checksum $(GARCHIVE_TARGETS) ;
 
 # extract		- Unpacks $(DISTFILES) into $(EXTRACTDIR) (patches are "zcatted" into the patch program)
@@ -631,7 +631,9 @@
 endif
 
 ifeq ($(NEEDED_ISAS),$(ISA_DEFAULT))
-MERGE_SCRIPTS_isa-$(ISA_DEFAULT) ?= copy-all $(EXTRA_MERGE_SCRIPTS_$(ISA_DEFAULT)) $(EXTRA_MERGE_SCRIPTS)
+MERGE_SCRIPTS_isa-default ?= copy-all $(EXTRA_MERGE_SCRIPTS_$(ISA_DEFAULT)) $(EXTRA_MERGE_SCRIPTS)
+MERGE_SCRIPTS_isa-$(ISA_DEFAULT) ?= $(MERGE_SCRIPTS_isa-default)
+MERGE_SCRIPTS_$(MODULATION) ?= $(MERGE_SCRIPTS_$(MODULATION_ISACOLLAPSED))
 else
 ISAEXEC_DIRS ?= $(if $(NO_ISAEXEC),,$(bindir) $(sbindir) $(libexecdir))
 MERGE_DIRS_isa-default ?= $(EXTRA_MERGE_DIRS) $(EXTRA_MERGE_DIRS_isa-$(ISA_DEFAULT))
@@ -887,7 +889,7 @@
 
 # Update inter-package depends
 makedepend:
-	@for gspec in `gfind $(CURDIR) -type f -name '*.gspec' | ggrep files`; do \
+	$(_DBG)for gspec in `gfind $(CURDIR) -type f -name '*.gspec' | ggrep files`; do \
 		pkgname=`basename $$gspec .gspec` ; \
 		pkgfiles=`dirname $$gspec` ; \
 		pkgdir=`dirname $$pkgfiles` ; \

Modified: csw/mgar/gar/v2-git/gar.pkg.mk
===================================================================
--- csw/mgar/gar/v2-git/gar.pkg.mk	2010-01-30 10:50:34 UTC (rev 8231)
+++ csw/mgar/gar/v2-git/gar.pkg.mk	2010-01-30 22:14:48 UTC (rev 8232)
@@ -141,7 +141,7 @@
 SPKG_DESC      ?= $(DESCRIPTION)
 SPKG_VERSION   ?= $(GARVERSION)
 SPKG_CATEGORY  ?= application
-SPKG_SOURCEURL ?= $(firstword $(MASTER_SITES))
+SPKG_SOURCEURL ?= $(firstword $(VENDOR_URL) $(MASTER_SITES) $(GIT_REPOS))
 SPKG_VENDOR    ?= $(SPKG_SOURCEURL) packaged for CSW by $(SPKG_PACKAGER)
 SPKG_PSTAMP    ?= $(LOGNAME)@$(shell hostname)-$(call _REVISION)-$(shell date '+%Y%m%d%H%M%S')
 SPKG_BASEDIR   ?= $(prefix)
@@ -154,17 +154,12 @@
 SPKG_PKGROOT   ?= $(PKGROOT)
 SPKG_PKGBASE   ?= $(PKGROOT)
 SPKG_WORKDIR   ?= $(CURDIR)/$(WORKDIR)
+SPKG_TMPDIR    ?= /tmp
 
 SPKG_DEPEND_DB  = $(GARDIR)/csw/depend.db
 
 SPKG_PKGFILE ?= %{bitname}-%{SPKG_VERSION},%{SPKG_REVSTAMP}-%{SPKG_OSNAME}-%{arch}-$(or $(filter $(call _REVISION),UNCOMMITTED NOTVERSIONED NOSVN),CSW).pkg
 
-# Handle cswclassutils
-# append $2 to SPKG_CLASSES if $1 is non-null
-define _spkg_cond_add
-$(SPKG_CLASSES) $(if $($(1)),$(if $(filter $(2),$(SPKG_CLASSES)),,$(2)))
-endef
-
 MIGRATECONF ?= $(strip $(foreach S,$(SPKG_SPECS),$(if $(or $(MIGRATE_FILES_$S),$(MIGRATE_FILES)),/etc/opt/csw/pkg/$S/cswmigrateconf)))
 
 # It is NOT sufficient to change the pathes here, they must be adjusted in merge-* also
@@ -176,21 +171,6 @@
 INETDCONF += $(_INETDCONF_FILES)
 ETCSERVICES += $(_ETCSERVICES_FILES)
 
-# NOTE: Order _can_  be important here.  cswinitsmf and cswinetd should
-#	always be the last two added.  The reason for this is that
-#	you need to ensure any binaries and config files are already on disk
-#	and able to be consumed by a service that might be started.
-SPKG_CLASSES := $(call _spkg_cond_add,MIGRATECONF,cswmigrateconf)
-SPKG_CLASSES := $(call _spkg_cond_add,SAMPLECONF,cswcpsampleconf)
-SPKG_CLASSES := $(call _spkg_cond_add,PRESERVECONF,cswpreserveconf)
-SPKG_CLASSES := $(call _spkg_cond_add,ETCSERVICES,cswetcservices)
-SPKG_CLASSES := $(call _spkg_cond_add,USERGROUP,cswusergroup)
-SPKG_CLASSES := $(call _spkg_cond_add,CRONTABS,cswcrontab)
-SPKG_CLASSES := $(call _spkg_cond_add,PYCOMPILE,cswpycompile)
-SPKG_CLASSES := $(call _spkg_cond_add,INETDCONF,cswinetd)
-SPKG_CLASSES := $(call _spkg_cond_add,INITSMF,cswinitsmf)
-
-
 # This is the default path for texinfo pages to be picked up. Extend or replace as necessary.
 TEXINFO ?= $(infodir)/.*\.info(?:-\d+)? $(EXTRA_TEXINFO)
 
@@ -208,13 +188,27 @@
 		$(foreach FILE,$(TEXINFO),$$F[1] = "cswtexinfo" if( $$F[2] =~ m(^$(FILE)$$) );)\
 		print join(" ", at F),"\n";'
 
-# The TEXINFO dependency is handled dynamically by looking at the prototype for matching files
-ifneq ($(MIGRATECONF)$(SAMPLECONF)$(PRESERVECONF)$(ETCSERVICES)$(INETDCONF)$(INITSMF)$(USERGROUP)$(PYCOMPILE),)
-_EXTRA_GAR_PKGS += CSWcswclassutils
+# If you add another filter above, also add the class to this list. It is used
+# to detect if a package needs to depends on CSWcswclassutils by looking at
+# files belonging to one of these in the prototype.
+
+# NOTE: Order _can_  be important here.  cswinitsmf and cswinetd should
+#	always be the last two added.  The reason for this is that
+#	you need to ensure any binaries and config files are already on disk
+#	and able to be consumed by a service that might be started.
+
+_CSWCLASSES  = cswmigrateconf cswcpsampleconf cswpreserveconf
+_CSWCLASSES += cswetcservices
+_CSWCLASSES += cswusergroup ugfiles
+_CSWCLASSES += cswcrontab
+_CSWCLASSES += cswpycompile
+_CSWCLASSES += cswinetd
+_CSWCLASSES += cswinitsmf
+_CSWCLASSES += cswtexinfo
+
 # Make sure the configuration files always have a .CSW suffix and rename the
 # configuration files to this if necessary during merge.
 _EXTRA_PAX_ARGS += $(foreach FILE,$(SAMPLECONF:%\.CSW=%) $(PRESERVECONF:%\.CSW=%),-s ",^\.\($(FILE)\)$$,.\1\.CSW,p")
-endif
 
 PKGGET_DESTDIR ?=
 
@@ -372,7 +366,8 @@
 $(PROTOTYPE): $(WORKDIR) merge
 	$(_DBG)cswproto -c $(GARDIR)/etc/commondirs-$(GARCH) -r $(PKGROOT) $(PKGROOT)=/ >$@
 
-# The pathfilter rules are as follows:
+# pathfilter lives in bin/pathfilter and takes care of including/excluding paths from
+# a prototype (see "perldoc bin/pathfilter"). We employ it here to:
 # - include license for current package
 # - exclude licenses for all other packages
 # - if other includes are given, only include these files
@@ -427,7 +422,7 @@
 # The dependencies to CSWcswclassutils and CSWtexinfo are only added if there are files
 # actually matching the _TEXINFO_FILTER. This is done at the prototype-level.
 $(WORKDIR)/%.depend: $(WORKDIR)/$*.prototype
-$(WORKDIR)/%.depend: _EXTRA_GAR_PKGS += $(if $(shell cat $(WORKDIR)/$*.prototype | perl -ane '$(foreach FILE,$(TEXINFO),print "$$F[2]\n" if( $$F[2] =~ m(^$(FILE)$$) );)'),CSWcswclassutils)
+$(WORKDIR)/%.depend: _EXTRA_GAR_PKGS += $(if $(strip $(shell cat $(WORKDIR)/$*.prototype | perl -ane '$(foreach C,$(_CSWCLASSES),print "$C\n" if( $$F[1] eq "$C");)')),CSWcswclassutils)
 
 $(WORKDIR)/%.depend: $(WORKDIR)
 	$(_DBG)$(if $(_EXTRA_GAR_PKGS)$(REQUIRED_PKGS_$*)$(REQUIRED_PKGS)$(INCOMPATIBLE_PKGS)$(INCOMPATIBLE_PKGS_$*), \
@@ -532,7 +527,7 @@
 
 # The texinfo filter has been taken out of the normal filters as TEXINFO has a default.
 $(WORKDIR)/%.pkginfo: $(WORKDIR)/%.prototype
-$(WORKDIR)/%.pkginfo: SPKG_CLASSES += $(if $(shell cat $(WORKDIR)/$*.prototype | perl -ane '$(foreach FILE,$(TEXINFO),print "$$F[2]\n" if( $$F[2] =~ m(^$(FILE)$$) );)'),cswtexinfo)
+$(WORKDIR)/%.pkginfo: SPKG_CLASSES += $(shell cat $(WORKDIR)/$*.prototype | perl -e 'while(<>){@F=split;$$c{$$F[1]}++};$(foreach C,$(_CSWCLASSES),print "$C\n" if( $$c{$C});)')
 
 $(WORKDIR)/%.pkginfo: $(WORKDIR)
 	$(_DBG)(echo "PKG=$*"; \
@@ -710,20 +705,34 @@
 	@echo
 	@$(DONADA)
 
+dirpackage: _DIRPACKAGE=1
+dirpackage: ENABLE_CHECK=
+dirpackage: _package
+	@echo "The following packages have been built:"
+	@echo
+	@$(MAKE) -s PLATFORM=$(PLATFORM) _dirpkgshow
+	@echo
+	@$(DONADA)
+
+_dirpkgshow:
+	@$(foreach SPEC,$(_PKG_SPECS),echo "  $(SPKG_SPOOLDIR)/$(SPEC)";)
+
 _pkgshow:
 	@$(foreach SPEC,$(_PKG_SPECS),printf "  %-20s %s\n"  $(SPEC) $(SPKG_EXPORT)/$(shell $(call _PKG_ENV,$(SPEC)) $(GARBIN)/mkpackage -qs $(WORKDIR)/$(SPEC).gspec -D pkgfile).gz;)
 
 # The dynamic pkginfo is only generated for dynamic gspec-files
 package-%: $(WORKDIR)/%.gspec $(WORKDIR)/%.prototype-$(GARCH) $(WORKDIR)/%.depend $(if $(findstring %.gspec,$(DISTFILES)),,$(WORKDIR)/%.pkginfo)
 	@echo " ==> Processing $*.gspec"
-	$(_DBG)( $(call _PKG_ENV,$*) mkpackage --spec $(WORKDIR)/$*.gspec \
+	$(_DBG)( $(call _PKG_ENV,$*) mkpackage \
+						 --spec $(WORKDIR)/$*.gspec \
 						 --spooldir $(SPKG_SPOOLDIR) \
+						 --tmpdir   $(SPKG_TMPDIR)  \
 						 --destdir  $(SPKG_EXPORT) \
 						 --workdir  $(SPKG_WORKDIR) \
 						 --pkgbase  $(SPKG_PKGBASE) \
 						 --pkgroot  $(SPKG_PKGROOT) \
 						-v WORKDIR_FIRSTMOD=../build-$(firstword $(MODULATIONS)) \
-						 --compress \
+						 $(if $(_DIRPACKAGE),--notransfer --nocompress,--compress) \
 						 $(MKPACKAGE_ARGS) ) || exit 2
 	@$(MAKECOOKIE)
 
@@ -733,7 +742,7 @@
 # pkgcheck - check if the package is compliant
 #
 pkgcheck: $(foreach SPEC,$(_PKG_SPECS),package-$(SPEC))
-	$(_DBG)( LC_ALL=C $(GARBIN)/checkpkg $(foreach SPEC,$(_PKG_SPECS),$(SPKG_EXPORT)/`$(call _PKG_ENV,$(SPEC)) mkpackage -qs $(WORKDIR)/$(SPEC).gspec -D pkgfile`.gz ) || exit 2;)
+	$(_DBG)( LC_ALL=C $(GARBIN)/checkpkg $(foreach SPEC,$(_PKG_SPECS),$(SPKG_EXPORT)/`$(call _PKG_ENV,$(SPEC)) mkpackage --tmpdir $(SPKG_TMPDIR) -qs $(WORKDIR)/$(SPEC).gspec -D pkgfile`.gz ) || exit 2;)
 	@$(MAKECOOKIE)
 
 pkgcheck-p:
@@ -757,6 +766,8 @@
 
 repackage: pkgreset package
 
+redirpackage: pkgreset dirpackage
+
 # This rule automatically logs into every host where a package for this software should
 # be built. It is especially suited for automated build bots.
 platforms:
@@ -783,6 +794,17 @@
 	)
 	@$(MAKECOOKIE)
 
+platforms-%:
+	$(foreach P,$(PACKAGING_PLATFORMS),\
+		$(if $(PACKAGING_HOST_$P),\
+			$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
+				$(MAKE) PLATFORM=$P $* && ,\
+				$(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -C $(CURDIR) PLATFORM=$P $*" && \
+			),\
+			$(error *** No host has been defined for platform $P)\
+		)\
+	) true
+
 replatforms: spotless platforms
 
 # Print relecant informations about the platform

Deleted: csw/mgar/gar/v2-git/lib/python/checkpkg.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg.py	2010-01-30 08:02:55 UTC (rev 8229)
+++ csw/mgar/gar/v2-git/lib/python/checkpkg.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -1,622 +0,0 @@
-# $Id$
-#
-# This is the checkpkg library, common for all checkpkg tests written in
-# Python.
-
-import itertools
-import logging
-import optparse
-import os
-import os.path
-import re
-import socket
-import sqlite3
-import subprocess
-from Cheetah import Template
-import opencsw
-
-SYSTEM_PKGMAP = "/var/sadm/install/contents"
-WS_RE = re.compile(r"\s+")
-NEEDED_SONAMES = "needed sonames"
-RUNPATH = "runpath"
-SONAME = "soname"
-CONFIG_MTIME = "mtime"
-DO_NOT_REPORT_SURPLUS = set([u"CSWcommon", u"CSWcswclassutils", u"CSWisaexec"])
-DO_NOT_REPORT_MISSING = set([u"SUNWlibC", u"SUNWcsl", u"SUNWlibms",
-                             u"*SUNWcslr", u"*SUNWlibC", u"*SUNWlibms",
-                             u"SUNWcslx"])
-SYSTEM_SYMLINKS = (
-    ("/opt/csw/bdb4", ["/opt/csw/bdb42"]),
-    ("/64", ["/amd64", "/sparcv9"]),
-    ("/opt/csw/lib/i386", ["/opt/csw/lib"]),
-)
-
-# This shared library is present on Solaris 10 on amd64, but it's missing on
-# Solaris 8 on i386.  It's okay if it's missing.
-ALLOWED_ORPHAN_SONAMES = set([u"libm.so.2"])
-DEPENDENCY_FILENAME_REGEXES = (
-    (r".*\.pl", u"CSWperl"),
-    (r".*\.pm", u"CSWperl"),
-    (r".*\.py", u"CSWpython"),
-    (r".*\.rb", u"CSWruby"),
-)
-
-REPORT_TMPL = u"""# $pkgname:
-#if $missing_deps
-# SUGGESTION: you may want to add some or all of the following as depends:
-#    (Feel free to ignore SUNW or SPRO packages)
-#for $pkg in $sorted($missing_deps)
-REQUIRED_PKGS_$pkgname += $pkg
-#end for
-#end if
-#if $surplus_deps
-# The following dependencies might be unnecessary:
-#for $pkg in $sorted($surplus_deps)
-# ? $pkg
-#end for
-#end if
-#if $orphan_sonames
-# The following required sonames would not be found at runtime:
-#for $soname in $sorted($orphan_sonames)
-# ! $soname
-#end for
-#end if
-#if not $missing_deps and not $surplus_deps and not $orphan_sonames
-# + Dependencies of $pkgname look good.
-#end if
-"""
-
-ERROR_REPORT_TMPL = u"""#if $errors
-ERROR: One or more errors have been found by $name.
-#for $pkgname in $errors
-$pkgname:
-#for $error in $errors[$pkgname]
-  $repr($error)
-#end for
-#end for
-#else
-#if $debug
-OK: $name found no problems.
-#end if
-#end if
-"""
-
-class Error(Exception):
-  pass
-
-
-class ConfigurationError(Error):
-  pass
-
-
-class PackageError(Error):
-  pass
-
-
-def GetOptions():
-  parser = optparse.OptionParser()
-  parser.add_option("-e", dest="extractdir",
-                    help="The directory into which the package has been extracted")
-  parser.add_option("-d", "--debug", dest="debug",
-                    default=False, action="store_true",
-                    help="Turn on debugging messages")
-  (options, args) = parser.parse_args()
-  if not options.extractdir:
-    raise ConfigurationError("ERROR: -e option is missing.")
-  # Using set() to make the arguments unique.
-  return options, set(args)
-
-
-class CheckpkgBase(object):
-  """This class has functionality overlapping with DirectoryFormatPackage
-  from the opencsw.py library. The classes should be merged.
-  """
-
-  def __init__(self, extractdir, pkgname):
-    self.extractdir = extractdir
-    self.pkgname = pkgname
-    self.pkgpath = os.path.join(self.extractdir, self.pkgname)
-
-  def CheckPkgpathExists(self):
-    if not os.path.isdir(self.pkgpath):
-      raise PackageError("%s does not exist or is not a directory"
-                         % self.pkgpath)
-
-  def ListBinaries(self):
-    """Shells out to list all the binaries from a given package.
-
-    Original checkpkg code:
-
-    # #########################################
-    # # find all executables and dynamic libs,and list their filenames.
-    # listbinaries() {
-    #   if [ ! -d $1 ] ; then
-    #     print errmsg $1 not a directory
-    #     rm -rf $EXTRACTDIR
-    #     exit 1
-    #   fi
-    # 
-    #   find $1 -print | xargs file |grep ELF |nawk -F: '{print $1}'
-    # }
-    """
-    self.CheckPkgpathExists()
-    find_tmpl = "find %s -print | xargs file | grep ELF | nawk -F: '{print $1}'"
-    find_proc = subprocess.Popen(find_tmpl % self.pkgpath,
-                                 shell=True, stdout=subprocess.PIPE)
-    stdout, stderr = find_proc.communicate()
-    ret = find_proc.wait()
-    if ret:
-      logging.error("The find command returned an error.")
-    return stdout.splitlines()
-
-  def GetAllFilenames(self):
-    self.CheckPkgpathExists()
-    file_basenames = []
-    for root, dirs, files in os.walk(self.pkgpath):
-      file_basenames.extend(files)
-    return file_basenames
-
-  def FormatDepsReport(self, missing_deps, surplus_deps, orphan_sonames):
-    """To be removed."""
-    namespace = {
-        "pkgname": self.pkgname,
-        "missing_deps": missing_deps,
-        "surplus_deps": surplus_deps,
-        "orphan_sonames": orphan_sonames,
-    }
-    t = Template.Template(REPORT_TMPL, searchList=[namespace])
-    return unicode(t)
-
-
-class SystemPkgmap(object):
-  """A class to hold and manipulate the /var/sadm/install/contents file.
-
-  TODO: Implement timestamp checking and refreshing the cache.
-  """
-  
-  STOP_PKGS = ["SUNWbcp", "SUNWowbcp", "SUNWucb"] 
-  CHECKPKG_DIR = ".checkpkg"
-  SQLITE3_DBNAME_TMPL = "var-sadm-install-contents-cache-%s"
-
-  def __init__(self):
-    """There is no need to re-parse it each time.
-
-    Read it slowly the first time and cache it for later."""
-    self.cache = {}
-    self.checkpkg_dir = os.path.join(os.environ["HOME"], self.CHECKPKG_DIR)
-    self.fqdn = socket.getfqdn()
-    self.db_path = os.path.join(self.checkpkg_dir,
-                                self.SQLITE3_DBNAME_TMPL % self.fqdn)
-    self.file_mtime = None
-    self.cache_mtime = None
-    if os.path.exists(self.db_path):
-      logging.debug("Connecting to the %s database.", self.db_path)
-      self.conn = sqlite3.connect(self.db_path)
-      if not self.IsDatabaseUpToDate():
-        logging.warning("Rebuilding the package cache, can take a few minutes.")
-        self.PurgeDatabase()
-        self.PopulateDatabase()
-    else:
-      print "Building a cache of /var/sadm/install/contents."
-      print "The cache will be kept in %s." % self.db_path
-      if not os.path.exists(self.checkpkg_dir):
-        logging.debug("Creating %s", self.checkpkg_dir)
-        os.mkdir(self.checkpkg_dir)
-      self.conn = sqlite3.connect(self.db_path)
-      c = self.conn.cursor()
-      c.execute("""
-          CREATE TABLE systempkgmap (
-            id INTEGER PRIMARY KEY,
-            basename TEXT,
-            path TEXT,
-            line TEXT
-          );
-      """)
-      logging.debug("Creating the config table.")
-      c.execute("""
-          CREATE TABLE config (
-            key VARCHAR(255) PRIMARY KEY,
-            float_value FLOAT,
-            str_value VARCHAR(255)
-          );
-      """)
-      self.PopulateDatabase()
-
-  def SymlinkDuringInstallation(self, p):
-    """Emulates the effect of some symlinks present during installations."""
-    p = p.replace("/opt/csw/lib/i386", "/opt/csw/lib")
-
-  def PopulateDatabase(self):
-    """Imports data into the database.
-
-    Original bit of code from checkpkg:
-    
-    egrep -v 'SUNWbcp|SUNWowbcp|SUNWucb' /var/sadm/install/contents |
-        fgrep -f $EXTRACTDIR/liblist >$EXTRACTDIR/shortcatalog
-    """
-
-    system_pkgmap_fd = open(SYSTEM_PKGMAP, "r")
-    stop_re = re.compile("(%s)" % "|".join(self.STOP_PKGS))
-    # Creating a data structure:
-    # soname - {<path1>: <line1>, <path2>: <line2>, ...}
-    logging.debug("Building sqlite3 cache db of the %s file",
-                  SYSTEM_PKGMAP)
-    c = self.conn.cursor()
-    count = itertools.count()
-    for line in system_pkgmap_fd:
-      i = count.next()
-      if not i % 1000:
-        print "\r%s" % i,
-      if stop_re.search(line):
-        continue
-      fields = re.split(WS_RE, line)
-      pkgmap_entry_path = fields[0].split("=")[0]
-      pkgmap_entry_dir, pkgmap_entry_base_name = os.path.split(pkgmap_entry_path)
-      sql = "INSERT INTO systempkgmap (basename, path, line) VALUES (?, ?, ?);"
-      c.execute(sql, (pkgmap_entry_base_name, pkgmap_entry_dir, line.strip()))
-    print
-    print "Creating the main database index."
-    sql = "CREATE INDEX basename_idx ON systempkgmap(basename);"
-    c.execute(sql)
-    self.SetDatabaseMtime()
-    self.conn.commit()
-
-  def SetDatabaseMtime(self):
-    c = self.conn.cursor()
-    sql = "DELETE FROM config WHERE key = ?;"
-    c.execute(sql, [CONFIG_MTIME])
-    mtime = self.GetFileMtime()
-    logging.debug("Inserting the mtime (%s) into the database.", mtime)
-    sql = """
-    INSERT INTO config (key, float_value)
-    VALUES (?, ?);
-    """
-    c.execute(sql, [CONFIG_MTIME, mtime])
-
-  def GetPkgmapLineByBasename(self, filename):
-    if filename in self.cache:
-      return self.cache[filename]
-    sql = "SELECT path, line FROM systempkgmap WHERE basename = ?;"
-    c = self.conn.cursor()
-    c.execute(sql, [filename])
-    lines = {}
-    for row in c:
-      lines[row[0]] = row[1]
-    if len(lines) == 0:
-      logging.debug("Cache doesn't contain filename %s", filename)
-    self.cache[filename] = lines
-    return lines
-
-  def GetDatabaseMtime(self):
-    if not self.cache_mtime:
-      sql = """
-      SELECT float_value FROM config
-      WHERE key = ?;
-      """
-      c = self.conn.cursor()
-      c.execute(sql, [CONFIG_MTIME])
-      row = c.fetchone()
-      if not row:
-        # raise ConfigurationError("Could not find the mtime setting")
-        self.cache_mtime = 1
-      else:
-        self.cache_mtime = row[0]
-    return self.cache_mtime
-
-  def GetFileMtime(self):
-    if not self.file_mtime:
-      stat_data = os.stat(SYSTEM_PKGMAP)
-      self.file_mtime = stat_data.st_mtime
-    return self.file_mtime
-
-  def IsDatabaseUpToDate(self):
-    f_mtime = self.GetFileMtime()
-    d_mtime = self.GetDatabaseMtime()
-    logging.debug("f_mtime %s, d_time: %s", f_mtime, d_mtime)
-    return self.GetFileMtime() <= self.GetDatabaseMtime()
-
-  def PurgeDatabase(self):
-    logging.info("Purging the cache database")
-    c = self.conn.cursor()
-    sql = "DELETE FROM config;"
-    c.execute(sql)
-    sql = "DELETE FROM systempkgmap;"
-    c.execute(sql)
-    sql = "DROP INDEX basename_idx;"
-    try:
-      c.execute(sql)
-    except sqlite3.OperationalError, e:
-      logging.warn(e)
-
-def SharedObjectDependencies(pkgname,
-                             binaries_by_pkgname,
-                             needed_sonames_by_binary,
-                             pkgs_by_soname,
-                             filenames_by_soname,
-                             pkg_by_any_filename):
-  """This is one of the more obscure and more important pieces of code.
-
-  I tried to make it simpler, but given that the operations here involve
-  whole sets of packages, it's not easy.
-  """
-  so_dependencies = set()
-  orphan_sonames = set()
-  self_provided = set()
-  for binary in binaries_by_pkgname[pkgname]:
-    needed_sonames = needed_sonames_by_binary[binary][NEEDED_SONAMES]
-    for soname in needed_sonames:
-      if soname in filenames_by_soname:
-        filename = filenames_by_soname[soname]
-        pkg = pkg_by_any_filename[filename]
-        self_provided.add(soname)
-        so_dependencies.add(pkg)
-      elif soname in pkgs_by_soname:
-        so_dependencies.add(pkgs_by_soname[soname])
-      else:
-        orphan_sonames.add(soname)
-  return so_dependencies, self_provided, orphan_sonames
-
-
-def GuessDepsByFilename(pkgname, pkg_by_any_filename):
-  """Guesses dependencies based on filename regexes."""
-  guessed_deps = set()
-  for pattern, dep_pkgname in DEPENDENCY_FILENAME_REGEXES:
-    # If any file name matches, add the dep, go to the next pattern/pkg
-    # combination.
-    pattern_re = re.compile("^%s$" % pattern)
-    for filename in pkg_by_any_filename:
-      if (re.match(pattern_re, filename)
-            and
-          pkgname == pkg_by_any_filename[filename]):
-        guessed_deps.add(dep_pkgname)
-        break
-  return guessed_deps
-
-
-def GuessDepsByPkgname(pkgname, pkg_by_any_filename):
-  # More guessed dependencies: If one package is a substring of another, it
-  # might be a hint. For example, CSWmysql51test should depend on CSWmysql51.
-  # However, the rt (runtime) packages should not want to depend on the main
-  # package.
-  guessed_deps = set()
-  all_other_pkgs = set(pkg_by_any_filename.values())
-  for other_pkg in all_other_pkgs:
-    other_pkg = unicode(other_pkg)
-    if pkgname == other_pkg:
-      continue
-    if pkgname.startswith(other_pkg):
-      endings = ["devel", "test", "bench", "dev"]
-      for ending in endings:
-        if pkgname.endswith(ending):
-          guessed_deps.add(other_pkg)
-  return guessed_deps
-
-
-def AnalyzeDependencies(pkgname,
-                        declared_dependencies,
-                        binaries_by_pkgname,
-                        needed_sonames_by_binary,
-                        pkgs_by_soname,
-                        filenames_by_soname,
-                        pkg_by_any_filename):
-  """Gathers and merges dependency results from other functions.
-
-  declared_dependencies: Dependencies that the package in question claims to
-                         have.
-
-  binaries_by_pkgname: A dictionary mapping pkgnames (CSWfoo) to binary names
-                       (without paths)
-
-  needed_sonames_by_binary: A dictionary mapping binary file name to
-                            a dictionary containing: "needed sonames",
-                            "soname", "rpath". Based on examining the binary
-                            files within the packages.
-
-  pkgs_by_soname: A dictionary mapping sonames to pkgnames, based on the
-                  contents of the system wide pkgmap
-                  (/var/sadm/install/contents)
-
-  filenames_by_soname: A dictionary mapping shared library sonames to filenames,
-                       based on files within packages
-
-  pkg_by_any_filename: Mapping from file names to packages names, based on the
-                       contents of the packages under examination.
-  """
-  declared_dependencies_set = set(declared_dependencies)
-
-  so_dependencies, self_provided, orphan_sonames = SharedObjectDependencies(
-      pkgname,
-      binaries_by_pkgname,
-      needed_sonames_by_binary,
-      pkgs_by_soname,
-      filenames_by_soname,
-      pkg_by_any_filename)
-  auto_dependencies = reduce(lambda x, y: x.union(y),
-      [
-        so_dependencies,
-        GuessDepsByFilename(pkgname, pkg_by_any_filename),
-        GuessDepsByPkgname(pkgname, pkg_by_any_filename),
-      ])
-  missing_deps = auto_dependencies.difference(declared_dependencies_set)
-  # Don't report itself as a suggested dependency.
-  missing_deps = missing_deps.difference(set([pkgname]))
-  missing_deps = missing_deps.difference(set(DO_NOT_REPORT_MISSING))
-  surplus_deps = declared_dependencies_set.difference(auto_dependencies)
-  surplus_deps = surplus_deps.difference(DO_NOT_REPORT_SURPLUS)
-  orphan_sonames = orphan_sonames.difference(ALLOWED_ORPHAN_SONAMES)
-  return missing_deps, surplus_deps, orphan_sonames
-
-
-def ExpandRunpath(runpath, isalist):
-  # Emulating $ISALIST expansion
-  if '$ISALIST' in runpath:
-    expanded_list = [runpath.replace('$ISALIST', isa) for isa in isalist]
-  else:
-    expanded_list = [runpath]
-  return expanded_list
-
-def ExpandSymlink(symlink, target, input_path):
-  symlink_re = re.compile(r"%s(/|$)" % symlink)
-  if re.search(symlink_re, input_path):
-    result = input_path.replace(symlink, target)
-  else:
-    result = input_path
-  return result
-
-def Emulate64BitSymlinks(runpath_list):
-  """Need to emulate the 64 -> amd64, 64 -> sparcv9 symlink
-
-  Since we don't know the architecture, we'll adding both amd64 and sparcv9.
-  It should be safe.
-  """
-  symlinked_list = []
-  for runpath in runpath_list:
-    for symlink, expansion_list in SYSTEM_SYMLINKS:
-      for target in expansion_list:
-        expanded = ExpandSymlink(symlink, target, runpath)
-        if expanded not in symlinked_list:
-          symlinked_list.append(expanded)
-  return symlinked_list
-
-
-def SanitizeRunpath(runpath):
-  while True:
-    if runpath.endswith("/"):
-      runpath = runpath[:-1]
-    elif "//" in runpath:
-      runpath = runpath.replace("//", "/")
-    else:
-      break
-  return runpath
-
-
-def GetLinesBySoname(pkgmap, needed_sonames, runpath_by_needed_soname, isalist):
-  """Works out which system pkgmap lines correspond to given sonames."""
-  lines_by_soname = {}
-  for soname in needed_sonames:
-    # This is the critical part of the algorithm: it iterates over the
-    # runpath and finds the first matching one.
-    runpath_found = False
-    for runpath in runpath_by_needed_soname[soname]:
-      runpath = SanitizeRunpath(runpath)
-      runpath_list = ExpandRunpath(runpath, isalist)
-      runpath_list = Emulate64BitSymlinks(runpath_list)
-      soname_runpath_data = pkgmap.GetPkgmapLineByBasename(soname)
-      # Emulating the install time symlinks, for instance, if the prototype contains
-      # /opt/csw/lib/i386/foo.so.0 and /opt/csw/lib/i386 is a symlink to ".",
-      # the shared library ends up in /opt/csw/lib/foo.so.0 and should be findable even when
-      # RPATH does not contain $ISALIST.
-      new_soname_runpath_data = {}
-      for p in soname_runpath_data:
-        expanded_p_list = Emulate64BitSymlinks([p])
-        for expanded_p in expanded_p_list:
-          new_soname_runpath_data[expanded_p] = soname_runpath_data[p]
-      soname_runpath_data = new_soname_runpath_data
-
-      logging.debug("%s: will be looking for %s in %s" %
-                    (soname, runpath_list, soname_runpath_data.keys()))
-      for runpath_expanded in runpath_list:
-        if runpath_expanded in soname_runpath_data:
-          lines_by_soname[soname] = soname_runpath_data[runpath_expanded]
-          runpath_found = True
-          # This break only goes out of the inner loop,
-          # need another one below to finish the outer loop.
-          break
-      if runpath_found:
-        break
-  return lines_by_soname
-
-
-def BuildIndexesBySoname(needed_sonames_by_binary):
-  """Builds data structures indexed by soname.
-
-  Building indexes
-  {"foo.so": ["/opt/csw/lib/gcc4", "/opt/csw/lib", ...],
-   ...
-  }
-  """
-  needed_sonames = set()
-  binaries_by_soname = {}
-  runpath_by_needed_soname = {}
-  for binary_name, data in needed_sonames_by_binary.iteritems():
-    for soname in data[NEEDED_SONAMES]:
-      needed_sonames.add(soname)
-      if soname not in runpath_by_needed_soname:
-        runpath_by_needed_soname[soname] = []
-      runpath_by_needed_soname[soname].extend(data[RUNPATH])
-      if soname not in binaries_by_soname:
-        binaries_by_soname[soname] = set()
-      binaries_by_soname[soname].add(binary_name)
-  return needed_sonames, binaries_by_soname, runpath_by_needed_soname
-
-
-def ParseDumpOutput(dump_output):
-  binary_data = {RUNPATH: [],
-                 NEEDED_SONAMES: []}
-  for line in dump_output.splitlines():
-    fields = re.split(WS_RE, line)
-    # TODO: Make it a unit test
-    # logging.debug("%s says: %s", DUMP_BIN, fields)
-    if len(fields) < 3:
-      continue
-    if fields[1] == "NEEDED":
-      binary_data[NEEDED_SONAMES].append(fields[2])
-    elif fields[1] == "RUNPATH":
-      binary_data[RUNPATH].extend(fields[2].split(":"))
-    elif fields[1] == "SONAME":
-      binary_data[SONAME] = fields[2]
-  # Adding the default runtime path search option.
-  binary_data[RUNPATH].append("/usr/lib/$ISALIST")
-  binary_data[RUNPATH].append("/usr/lib")
-  binary_data[RUNPATH].append("/lib/$ISALIST")
-  binary_data[RUNPATH].append("/lib")
-  return binary_data
-
-
-class CheckpkgManager(object):
-  """Takes care of calling checking functions"""
-
-  def __init__(self, name, extractdir, pkgname_list, debug=False):
-    self.debug = debug
-    self.name = name
-    self.extractdir = extractdir
-    self.pkgname_list = pkgname_list
-    self.errors = []
-    self.individual_checks = []
-    self.set_checks = []
-    self.packages = []
-
-  def RegisterIndividualCheck(self, function):
-    self.individual_checks.append(function)
-
-  def RegisterSetCheck(self, function):
-    self.set_checks.append(function)
-
-  def Run(self):
-    """Runs all the checks
-
-    Returns a tuple of an exit code and a report.
-    """
-    packages = []
-    errors = {}
-    for pkgname in self.pkgname_list:
-        pkg_path = os.path.join(self.extractdir, pkgname)
-        packages.append(opencsw.DirectoryFormatPackage(pkg_path))
-    for pkg in packages:
-      for function in self.individual_checks:
-        errors_for_pkg = function(pkg)
-        if errors_for_pkg:
-          errors[pkg.pkgname] = errors_for_pkg
-    # Set checks
-    for function in self.set_checks:
-      set_errors = function(packages)
-      if set_errors:
-        errors["The package set"] = set_errors
-    namespace = {
-        "name": self.name,
-        "errors": errors,
-        "debug": self.debug,
-    }
-    t = Template.Template(ERROR_REPORT_TMPL, searchList=[namespace])
-    exit_code = bool(errors)
-    return (exit_code, unicode(t))

Copied: csw/mgar/gar/v2-git/lib/python/checkpkg.py (from rev 8229, csw/mgar/gar/v2/lib/python/checkpkg.py)
===================================================================
--- csw/mgar/gar/v2-git/lib/python/checkpkg.py	                        (rev 0)
+++ csw/mgar/gar/v2-git/lib/python/checkpkg.py	2010-01-30 22:14:48 UTC (rev 8232)
@@ -0,0 +1,622 @@
+# $Id$
+#
+# This is the checkpkg library, common for all checkpkg tests written in
+# Python.
+
+import itertools
+import logging
+import optparse
+import os
+import os.path
+import re
+import socket
+import sqlite3
+import subprocess
+from Cheetah import Template
+import opencsw
+
+SYSTEM_PKGMAP = "/var/sadm/install/contents"
+WS_RE = re.compile(r"\s+")
+NEEDED_SONAMES = "needed sonames"
+RUNPATH = "runpath"
+SONAME = "soname"
+CONFIG_MTIME = "mtime"
+DO_NOT_REPORT_SURPLUS = set([u"CSWcommon", u"CSWcswclassutils", u"CSWisaexec"])
+DO_NOT_REPORT_MISSING = set([u"SUNWlibC", u"SUNWcsl", u"SUNWlibms",
+                             u"*SUNWcslr", u"*SUNWlibC", u"*SUNWlibms",
+                             u"SUNWcslx"])
+SYSTEM_SYMLINKS = (
+    ("/opt/csw/bdb4", ["/opt/csw/bdb42"]),
+    ("/64", ["/amd64", "/sparcv9"]),
+    ("/opt/csw/lib/i386", ["/opt/csw/lib"]),
+)
+
+# This shared library is present on Solaris 10 on amd64, but it's missing on
+# Solaris 8 on i386.  It's okay if it's missing.
+ALLOWED_ORPHAN_SONAMES = set([u"libm.so.2"])
+DEPENDENCY_FILENAME_REGEXES = (
+    (r".*\.pl", u"CSWperl"),
+    (r".*\.pm", u"CSWperl"),
+    (r".*\.py", u"CSWpython"),
+    (r".*\.rb", u"CSWruby"),
+)
+
+REPORT_TMPL = u"""# $pkgname:
+#if $missing_deps
+# SUGGESTION: you may want to add some or all of the following as depends:
+#    (Feel free to ignore SUNW or SPRO packages)
+#for $pkg in $sorted($missing_deps)
+REQUIRED_PKGS_$pkgname += $pkg
+#end for
+#end if
+#if $surplus_deps
+# The following dependencies might be unnecessary:
+#for $pkg in $sorted($surplus_deps)
+# ? $pkg
+#end for
+#end if
+#if $orphan_sonames
+# The following required sonames would not be found at runtime:
+#for $soname in $sorted($orphan_sonames)
+# ! $soname
+#end for
+#end if
+#if not $missing_deps and not $surplus_deps and not $orphan_sonames
+# + Dependencies of $pkgname look good.
+#end if
+"""
+
+ERROR_REPORT_TMPL = u"""#if $errors
+ERROR: One or more errors have been found by $name.
+#for $pkgname in $errors
+$pkgname:
+#for $error in $errors[$pkgname]
+  $repr($error)
+#end for
+#end for
+#else
+#if $debug
+OK: $name found no problems.
+#end if
+#end if
+"""
+
+class Error(Exception):
+  pass
+
+
+class ConfigurationError(Error):
+  pass
+
+
+class PackageError(Error):
+  pass
+
+
+def GetOptions():
+  parser = optparse.OptionParser()
+  parser.add_option("-e", dest="extractdir",
+                    help="The directory into which the package has been extracted")
+  parser.add_option("-d", "--debug", dest="debug",
+                    default=False, action="store_true",
+                    help="Turn on debugging messages")
+  (options, args) = parser.parse_args()
+  if not options.extractdir:
+    raise ConfigurationError("ERROR: -e option is missing.")
+  # Using set() to make the arguments unique.
+  return options, set(args)
+
+
+class CheckpkgBase(object):
+  """This class has functionality overlapping with DirectoryFormatPackage
+  from the opencsw.py library. The classes should be merged.
+  """
+
+  def __init__(self, extractdir, pkgname):
+    self.extractdir = extractdir
+    self.pkgname = pkgname
+    self.pkgpath = os.path.join(self.extractdir, self.pkgname)
+
+  def CheckPkgpathExists(self):
+    if not os.path.isdir(self.pkgpath):
+      raise PackageError("%s does not exist or is not a directory"
+                         % self.pkgpath)
+
+  def ListBinaries(self):
+    """Shells out to list all the binaries from a given package.
+
+    Original checkpkg code:
+
+    # #########################################
+    # # find all executables and dynamic libs,and list their filenames.
+    # listbinaries() {
+    #   if [ ! -d $1 ] ; then
+    #     print errmsg $1 not a directory
+    #     rm -rf $EXTRACTDIR
+    #     exit 1
+    #   fi
+    # 
+    #   find $1 -print | xargs file |grep ELF |nawk -F: '{print $1}'
+    # }
+    """
+    self.CheckPkgpathExists()
+    find_tmpl = "find %s -print | xargs file | grep ELF | nawk -F: '{print $1}'"
+    find_proc = subprocess.Popen(find_tmpl % self.pkgpath,
+                                 shell=True, stdout=subprocess.PIPE)
+    stdout, stderr = find_proc.communicate()
+    ret = find_proc.wait()
+    if ret:
+      logging.error("The find command returned an error.")
+    return stdout.splitlines()
+
+  def GetAllFilenames(self):
+    self.CheckPkgpathExists()
+    file_basenames = []
+    for root, dirs, files in os.walk(self.pkgpath):
+      file_basenames.extend(files)
+    return file_basenames
+
+  def FormatDepsReport(self, missing_deps, surplus_deps, orphan_sonames):
+    """To be removed."""
+    namespace = {
+        "pkgname": self.pkgname,
+        "missing_deps": missing_deps,
+        "surplus_deps": surplus_deps,
+        "orphan_sonames": orphan_sonames,
+    }
+    t = Template.Template(REPORT_TMPL, searchList=[namespace])
+    return unicode(t)
+
+
+class SystemPkgmap(object):
+  """A class to hold and manipulate the /var/sadm/install/contents file.
+
+  TODO: Implement timestamp checking and refreshing the cache.
+  """
+  
+  STOP_PKGS = ["SUNWbcp", "SUNWowbcp", "SUNWucb"] 
+  CHECKPKG_DIR = ".checkpkg"
+  SQLITE3_DBNAME_TMPL = "var-sadm-install-contents-cache-%s"
+
+  def __init__(self):
+    """There is no need to re-parse it each time.
+
+    Read it slowly the first time and cache it for later."""
+    self.cache = {}
+    self.checkpkg_dir = os.path.join(os.environ["HOME"], self.CHECKPKG_DIR)
+    self.fqdn = socket.getfqdn()
+    self.db_path = os.path.join(self.checkpkg_dir,
+                                self.SQLITE3_DBNAME_TMPL % self.fqdn)
+    self.file_mtime = None
+    self.cache_mtime = None
+    if os.path.exists(self.db_path):
+      logging.debug("Connecting to the %s database.", self.db_path)
+      self.conn = sqlite3.connect(self.db_path)
+      if not self.IsDatabaseUpToDate():
+        logging.warning("Rebuilding the package cache, can take a few minutes.")
+        self.PurgeDatabase()
+        self.PopulateDatabase()
+    else:
+      print "Building a cache of /var/sadm/install/contents."
+      print "The cache will be kept in %s." % self.db_path
+      if not os.path.exists(self.checkpkg_dir):
+        logging.debug("Creating %s", self.checkpkg_dir)
+        os.mkdir(self.checkpkg_dir)
+      self.conn = sqlite3.connect(self.db_path)
+      c = self.conn.cursor()
+      c.execute("""
+          CREATE TABLE systempkgmap (
+            id INTEGER PRIMARY KEY,
+            basename TEXT,
+            path TEXT,
+            line TEXT
+          );
+      """)
+      logging.debug("Creating the config table.")
+      c.execute("""
+          CREATE TABLE config (
+            key VARCHAR(255) PRIMARY KEY,
+            float_value FLOAT,
+            str_value VARCHAR(255)
+          );
+      """)
+      self.PopulateDatabase()
+
+  def SymlinkDuringInstallation(self, p):
+    """Emulates the effect of some symlinks present during installations."""
+    p = p.replace("/opt/csw/lib/i386", "/opt/csw/lib")
+
+  def PopulateDatabase(self):
+    """Imports data into the database.
+
+    Original bit of code from checkpkg:
+    
+    egrep -v 'SUNWbcp|SUNWowbcp|SUNWucb' /var/sadm/install/contents |
+        fgrep -f $EXTRACTDIR/liblist >$EXTRACTDIR/shortcatalog
+    """
+
+    system_pkgmap_fd = open(SYSTEM_PKGMAP, "r")
+    stop_re = re.compile("(%s)" % "|".join(self.STOP_PKGS))
+    # Creating a data structure:
+    # soname - {<path1>: <line1>, <path2>: <line2>, ...}
+    logging.debug("Building sqlite3 cache db of the %s file",
+                  SYSTEM_PKGMAP)
+    c = self.conn.cursor()
+    count = itertools.count()
+    for line in system_pkgmap_fd:
+      i = count.next()
+      if not i % 1000:
+        print "\r%s" % i,
+      if stop_re.search(line):
+        continue
+      fields = re.split(WS_RE, line)
+      pkgmap_entry_path = fields[0].split("=")[0]
+      pkgmap_entry_dir, pkgmap_entry_base_name = os.path.split(pkgmap_entry_path)
+      sql = "INSERT INTO systempkgmap (basename, path, line) VALUES (?, ?, ?);"
+      c.execute(sql, (pkgmap_entry_base_name, pkgmap_entry_dir, line.strip()))
+    print
+    print "Creating the main database index."
+    sql = "CREATE INDEX basename_idx ON systempkgmap(basename);"
+    c.execute(sql)
+    self.SetDatabaseMtime()
+    self.conn.commit()
+
+  def SetDatabaseMtime(self):
+    c = self.conn.cursor()
+    sql = "DELETE FROM config WHERE key = ?;"
+    c.execute(sql, [CONFIG_MTIME])
+    mtime = self.GetFileMtime()
+    logging.debug("Inserting the mtime (%s) into the database.", mtime)
+    sql = """
+    INSERT INTO config (key, float_value)
+    VALUES (?, ?);
+    """
+    c.execute(sql, [CONFIG_MTIME, mtime])
+
+  def GetPkgmapLineByBasename(self, filename):
+    if filename in self.cache:
+      return self.cache[filename]
+    sql = "SELECT path, line FROM systempkgmap WHERE basename = ?;"
+    c = self.conn.cursor()
+    c.execute(sql, [filename])
+    lines = {}
+    for row in c:
+      lines[row[0]] = row[1]
+    if len(lines) == 0:
+      logging.debug("Cache doesn't contain filename %s", filename)
+    self.cache[filename] = lines
+    return lines
+
+  def GetDatabaseMtime(self):
+    if not self.cache_mtime:
+      sql = """
+      SELECT float_value FROM config
+      WHERE key = ?;
+      """
+      c = self.conn.cursor()
+      c.execute(sql, [CONFIG_MTIME])
+      row = c.fetchone()
+      if not row:
+        # raise ConfigurationError("Could not find the mtime setting")
+        self.cache_mtime = 1
+      else:
+        self.cache_mtime = row[0]
+    return self.cache_mtime
+
+  def GetFileMtime(self):
+    if not self.file_mtime:
+      stat_data = os.stat(SYSTEM_PKGMAP)
+      self.file_mtime = stat_data.st_mtime
+    return self.file_mtime
+
+  def IsDatabaseUpToDate(self):
+    f_mtime = self.GetFileMtime()
+    d_mtime = self.GetDatabaseMtime()
+    logging.debug("f_mtime %s, d_time: %s", f_mtime, d_mtime)
+    return self.GetFileMtime() <= self.GetDatabaseMtime()
+
+  def PurgeDatabase(self):
+    logging.info("Purging the cache database")
+    c = self.conn.cursor()
+    sql = "DELETE FROM config;"
+    c.execute(sql)
+    sql = "DELETE FROM systempkgmap;"
+    c.execute(sql)
+    sql = "DROP INDEX basename_idx;"
+    try:
+      c.execute(sql)
+    except sqlite3.OperationalError, e:
+      logging.warn(e)
+
+def SharedObjectDependencies(pkgname,
+                             binaries_by_pkgname,
+                             needed_sonames_by_binary,
+                             pkgs_by_soname,
+                             filenames_by_soname,
+                             pkg_by_any_filename):
+  """This is one of the more obscure and more important pieces of code.
+
+  I tried to make it simpler, but given that the operations here involve
+  whole sets of packages, it's not easy.
+  """
+  so_dependencies = set()
+  orphan_sonames = set()
+  self_provided = set()
+  for binary in binaries_by_pkgname[pkgname]:
+    needed_sonames = needed_sonames_by_binary[binary][NEEDED_SONAMES]
+    for soname in needed_sonames:
+      if soname in filenames_by_soname:
+        filename = filenames_by_soname[soname]
+        pkg = pkg_by_any_filename[filename]
+        self_provided.add(soname)
+        so_dependencies.add(pkg)
+      elif soname in pkgs_by_soname:
+        so_dependencies.add(pkgs_by_soname[soname])
+      else:
+        orphan_sonames.add(soname)
+  return so_dependencies, self_provided, orphan_sonames
+
+
+def GuessDepsByFilename(pkgname, pkg_by_any_filename):
+  """Guesses dependencies based on filename regexes."""
+  guessed_deps = set()
+  for pattern, dep_pkgname in DEPENDENCY_FILENAME_REGEXES:
+    # If any file name matches, add the dep, go to the next pattern/pkg
+    # combination.
+    pattern_re = re.compile("^%s$" % pattern)
+    for filename in pkg_by_any_filename:
+      if (re.match(pattern_re, filename)
+            and
+          pkgname == pkg_by_any_filename[filename]):
+        guessed_deps.add(dep_pkgname)
+        break
+  return guessed_deps
+
+
+def GuessDepsByPkgname(pkgname, pkg_by_any_filename):
+  # More guessed dependencies: If one package is a substring of another, it
+  # might be a hint. For example, CSWmysql51test should depend on CSWmysql51.
+  # However, the rt (runtime) packages should not want to depend on the main
+  # package.
+  guessed_deps = set()
+  all_other_pkgs = set(pkg_by_any_filename.values())
+  for other_pkg in all_other_pkgs:
+    other_pkg = unicode(other_pkg)
+    if pkgname == other_pkg:
+      continue
+    if pkgname.startswith(other_pkg):
+      endings = ["devel", "test", "bench", "dev"]
+      for ending in endings:
+        if pkgname.endswith(ending):
+          guessed_deps.add(other_pkg)
+  return guessed_deps
+
+
+def AnalyzeDependencies(pkgname,
+                        declared_dependencies,
+                        binaries_by_pkgname,
+                        needed_sonames_by_binary,
+                        pkgs_by_soname,
+                        filenames_by_soname,
+                        pkg_by_any_filename):
+  """Gathers and merges dependency results from other functions.
+
+  declared_dependencies: Dependencies that the package in question claims to
+                         have.
+
+  binaries_by_pkgname: A dictionary mapping pkgnames (CSWfoo) to binary names
+                       (without paths)
+
+  needed_sonames_by_binary: A dictionary mapping binary file name to
+                            a dictionary containing: "needed sonames",
+                            "soname", "rpath". Based on examining the binary
+                            files within the packages.
+
+  pkgs_by_soname: A dictionary mapping sonames to pkgnames, based on the
+                  contents of the system wide pkgmap
+                  (/var/sadm/install/contents)
+
+  filenames_by_soname: A dictionary mapping shared library sonames to filenames,
+                       based on files within packages
+
+  pkg_by_any_filename: Mapping from file names to packages names, based on the
+                       contents of the packages under examination.
+  """
+  declared_dependencies_set = set(declared_dependencies)
+
+  so_dependencies, self_provided, orphan_sonames = SharedObjectDependencies(
+      pkgname,
+      binaries_by_pkgname,
+      needed_sonames_by_binary,
+      pkgs_by_soname,
+      filenames_by_soname,
+      pkg_by_any_filename)
+  auto_dependencies = reduce(lambda x, y: x.union(y),
+      [
+        so_dependencies,
+        GuessDepsByFilename(pkgname, pkg_by_any_filename),
+        GuessDepsByPkgname(pkgname, pkg_by_any_filename),
+      ])
+  missing_deps = auto_dependencies.difference(declared_dependencies_set)
+  # Don't report itself as a suggested dependency.
+  missing_deps = missing_deps.difference(set([pkgname]))
+  missing_deps = missing_deps.difference(set(DO_NOT_REPORT_MISSING))
+  surplus_deps = declared_dependencies_set.difference(auto_dependencies)
+  surplus_deps = surplus_deps.difference(DO_NOT_REPORT_SURPLUS)
+  orphan_sonames = orphan_sonames.difference(ALLOWED_ORPHAN_SONAMES)
+  return missing_deps, surplus_deps, orphan_sonames
+
+
+def ExpandRunpath(runpath, isalist):
+  # Emulating $ISALIST expansion
+  if '$ISALIST' in runpath:
+    expanded_list = [runpath.replace('$ISALIST', isa) for isa in isalist]
+  else:
+    expanded_list = [runpath]
+  return expanded_list
+
+def ExpandSymlink(symlink, target, input_path):
+  symlink_re = re.compile(r"%s(/|$)" % symlink)
+  if re.search(symlink_re, input_path):
+    result = input_path.replace(symlink, target)
+  else:
+    result = input_path
+  return result
+
+def Emulate64BitSymlinks(runpath_list):
+  """Need to emulate the 64 -> amd64, 64 -> sparcv9 symlink
+
+  Since we don't know the architecture, we'll adding both amd64 and sparcv9.
+  It should be safe.
+  """
+  symlinked_list = []
+  for runpath in runpath_list:
+    for symlink, expansion_list in SYSTEM_SYMLINKS:
+      for target in expansion_list:
+        expanded = ExpandSymlink(symlink, target, runpath)
+        if expanded not in symlinked_list:
+          symlinked_list.append(expanded)
+  return symlinked_list
+
+
+def SanitizeRunpath(runpath):
+  while True:
+    if runpath.endswith("/"):
+      runpath = runpath[:-1]
+    elif "//" in runpath:
+      runpath = runpath.replace("//", "/")
+    else:
+      break
+  return runpath
+
+
+def GetLinesBySoname(pkgmap, needed_sonames, runpath_by_needed_soname, isalist):
+  """Works out which system pkgmap lines correspond to given sonames."""
+  lines_by_soname = {}
+  for soname in needed_sonames:

@@ Diff output truncated at 100000 characters. @@

This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.


More information about the devel mailing list